From python-checkins at python.org Sun Jul 1 10:11:36 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 1 Jul 2007 10:11:36 +0200 (CEST) Subject: [Python-checkins] r56137 - python/trunk/Lib/webbrowser.py Message-ID: <20070701081136.11BD51E4005@bag.python.org> Author: georg.brandl Date: Sun Jul 1 10:11:35 2007 New Revision: 56137 Modified: python/trunk/Lib/webbrowser.py Log: Fix a few webbrowser.py problems. Modified: python/trunk/Lib/webbrowser.py ============================================================================== --- python/trunk/Lib/webbrowser.py (original) +++ python/trunk/Lib/webbrowser.py Sun Jul 1 10:11:35 2007 @@ -1,5 +1,6 @@ #! /usr/bin/env python """Interfaces for launching and remotely controlling Web browsers.""" +# Maintained by Georg Brandl. import os import shlex @@ -160,6 +161,7 @@ def __init__(self, name): if isinstance(name, basestring): self.name = name + self.args = ["%s"] else: # name should be a list with arguments self.name = name[0] From python-checkins at python.org Sun Jul 1 10:11:40 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 1 Jul 2007 10:11:40 +0200 (CEST) Subject: [Python-checkins] r56138 - python/branches/release25-maint/Lib/webbrowser.py Message-ID: <20070701081140.7BD0B1E400B@bag.python.org> Author: georg.brandl Date: Sun Jul 1 10:11:40 2007 New Revision: 56138 Modified: python/branches/release25-maint/Lib/webbrowser.py Log: Fix a few webbrowser.py problems. (backport from rev. 56137) Modified: python/branches/release25-maint/Lib/webbrowser.py ============================================================================== --- python/branches/release25-maint/Lib/webbrowser.py (original) +++ python/branches/release25-maint/Lib/webbrowser.py Sun Jul 1 10:11:40 2007 @@ -1,5 +1,6 @@ #! /usr/bin/env python """Interfaces for launching and remotely controlling Web browsers.""" +# Maintained by Georg Brandl. import os import shlex @@ -160,6 +161,7 @@ def __init__(self, name): if isinstance(name, basestring): self.name = name + self.args = ["%s"] else: # name should be a list with arguments self.name = name[0] @@ -452,7 +454,7 @@ # if successful, register it if retncode is None and commd: - register("gnome", None, BackgroundBrowser(commd.split())) + register("gnome", None, BackgroundBrowser(shlex.split(commd))) # First, the Mozilla/Netscape browsers for browser in ("mozilla-firefox", "firefox", From buildbot at python.org Sun Jul 1 10:27:35 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 01 Jul 2007 08:27:35 +0000 Subject: [Python-checkins] buildbot warnings in PPC64 Debian trunk Message-ID: <20070701082735.5431F1E4005@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/35 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: make: *** [buildbottest] Segmentation fault sincerely, -The Buildbot From buildbot at python.org Sun Jul 1 10:51:41 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 01 Jul 2007 08:51:41 +0000 Subject: [Python-checkins] buildbot warnings in ppc Debian unstable trunk Message-ID: <20070701085141.84CAB1E4005@bag.python.org> The Buildbot has detected a new failure of ppc Debian unstable trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ppc%2520Debian%2520unstable%2520trunk/builds/35 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_bsddb3 make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Sun Jul 1 11:37:24 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 01 Jul 2007 09:37:24 +0000 Subject: [Python-checkins] buildbot warnings in hppa Ubuntu dapper trunk Message-ID: <20070701093724.371D11E4005@bag.python.org> The Buildbot has detected a new failure of hppa Ubuntu dapper trunk. Full details are available at: http://www.python.org/dev/buildbot/all/hppa%2520Ubuntu%2520dapper%2520trunk/builds/0 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Sun Jul 1 12:56:36 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 01 Jul 2007 10:56:36 +0000 Subject: [Python-checkins] buildbot warnings in hppa Ubuntu dapper 2.5 Message-ID: <20070701105636.480051E4005@bag.python.org> The Buildbot has detected a new failure of hppa Ubuntu dapper 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/hppa%2520Ubuntu%2520dapper%25202.5/builds/0 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Sun Jul 1 14:22:35 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 01 Jul 2007 12:22:35 +0000 Subject: [Python-checkins] buildbot warnings in MIPS Debian trunk Message-ID: <20070701122235.7B89C1E4005@bag.python.org> The Buildbot has detected a new failure of MIPS Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/MIPS%2520Debian%2520trunk/builds/0 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_urllib2net ====================================================================== ERROR: test_ftp_NoneNodefault (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/test/test_urllib2net.py", line 306, in test_ftp_NoneNodefault u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/", timeout=None) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 124, in urlopen return _opener.open(url, data, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 376, in open response = self._open(req, data) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 394, in _open '_open', req) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 354, in _call_chain result = func(*args) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1273, in ftp_open fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1319, in connect_ftp self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 829, in __init__ self.init() File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 835, in init self.ftp.connect(self.host, self.port, self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/ftplib.py", line 129, in connect self.sock = socket.create_connection((self.host, self.port), self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/socket.py", line 443, in create_connection raise error, msg IOError: [Errno ftp error] (145, 'Connection timed out') ====================================================================== ERROR: test_ftp_NoneWithdefault (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/test/test_urllib2net.py", line 300, in test_ftp_NoneWithdefault u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/", timeout=None) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 124, in urlopen return _opener.open(url, data, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 376, in open response = self._open(req, data) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 394, in _open '_open', req) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 354, in _call_chain result = func(*args) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1273, in ftp_open fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1319, in connect_ftp self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 829, in __init__ self.init() File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 835, in init self.ftp.connect(self.host, self.port, self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/ftplib.py", line 129, in connect self.sock = socket.create_connection((self.host, self.port), self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/socket.py", line 443, in create_connection raise error, msg IOError: [Errno ftp error] timed out ====================================================================== ERROR: test_ftp_Value (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/test/test_urllib2net.py", line 310, in test_ftp_Value u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/", timeout=60) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 124, in urlopen return _opener.open(url, data, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 376, in open response = self._open(req, data) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 394, in _open '_open', req) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 354, in _call_chain result = func(*args) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1273, in ftp_open fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1319, in connect_ftp self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 829, in __init__ self.init() File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 835, in init self.ftp.connect(self.host, self.port, self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/ftplib.py", line 129, in connect self.sock = socket.create_connection((self.host, self.port), self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/socket.py", line 443, in create_connection raise error, msg IOError: [Errno ftp error] timed out ====================================================================== ERROR: test_ftp_basic (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/test/test_urllib2net.py", line 293, in test_ftp_basic u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/") File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 124, in urlopen return _opener.open(url, data, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 376, in open response = self._open(req, data) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 394, in _open '_open', req) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 354, in _call_chain result = func(*args) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1273, in ftp_open fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib2.py", line 1319, in connect_ftp self.cache[key] = ftpwrapper(user, passwd, host, port, dirs, timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 829, in __init__ self.init() File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/urllib.py", line 835, in init self.ftp.connect(self.host, self.port, self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/ftplib.py", line 129, in connect self.sock = socket.create_connection((self.host, self.port), self.timeout) File "/home/pybot/buildarea/trunk.klose-debian-mips/build/Lib/socket.py", line 443, in create_connection raise error, msg IOError: [Errno ftp error] (145, 'Connection timed out') make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Mon Jul 2 05:01:48 2007 From: python-checkins at python.org (brett.cannon) Date: Mon, 2 Jul 2007 05:01:48 +0200 (CEST) Subject: [Python-checkins] r56141 - python/branches/bcannon-objcap/build_secure_py.sh Message-ID: <20070702030148.226951E4007@bag.python.org> Author: brett.cannon Date: Mon Jul 2 05:01:47 2007 New Revision: 56141 Modified: python/branches/bcannon-objcap/build_secure_py.sh Log: Add a comment about what someone might need to change to build. Modified: python/branches/bcannon-objcap/build_secure_py.sh ============================================================================== --- python/branches/bcannon-objcap/build_secure_py.sh (original) +++ python/branches/bcannon-objcap/build_secure_py.sh Mon Jul 2 05:01:47 2007 @@ -1,4 +1,5 @@ echo "Build secure_python.o ..." gcc -Wall -g -c -IInclude -I. secure_python.c echo "Build executable ..." +# If the command below fails, might need to add -lutil or -Wl,-E . gcc -L. -lpthread -lm -ldl -o secure_python.exe secure_python.o libpython2.6.a From nnorwitz at gmail.com Mon Jul 2 11:43:35 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Mon, 2 Jul 2007 05:43:35 -0400 Subject: [Python-checkins] Python Regression Test Failures refleak (1) Message-ID: <20070702094335.GA22051@python.psfb.org> test_popen2 leaked [-26, 26, -26] references, sum=-26 test_sys leaked [-132, 0, 0] references, sum=-132 test_urllib2_localnet leaked [3, 3, 3] references, sum=9 From python-checkins at python.org Mon Jul 2 13:54:28 2007 From: python-checkins at python.org (georg.brandl) Date: Mon, 2 Jul 2007 13:54:28 +0200 (CEST) Subject: [Python-checkins] r56143 - python/trunk/Doc/lib/libsignal.tex Message-ID: <20070702115428.8BE731E4007@bag.python.org> Author: georg.brandl Date: Mon Jul 2 13:54:28 2007 New Revision: 56143 Modified: python/trunk/Doc/lib/libsignal.tex Log: Remove duplicate sentence from alarm() doc. Modified: python/trunk/Doc/lib/libsignal.tex ============================================================================== --- python/trunk/Doc/lib/libsignal.tex (original) +++ python/trunk/Doc/lib/libsignal.tex Mon Jul 2 13:54:28 2007 @@ -101,8 +101,7 @@ be scheduled at any time). The returned value is then the number of seconds before any previously set alarm was to have been delivered. If \var{time} is zero, no alarm is scheduled, and any scheduled - alarm is canceled. The return value is the number of seconds - remaining before a previously scheduled alarm. If the return value + alarm is canceled. If the return value is zero, no alarm is currently scheduled. (See the \UNIX{} man page \manpage{alarm}{2}.) Availability: \UNIX. From python-checkins at python.org Mon Jul 2 13:54:31 2007 From: python-checkins at python.org (georg.brandl) Date: Mon, 2 Jul 2007 13:54:31 +0200 (CEST) Subject: [Python-checkins] r56144 - python/branches/release25-maint/Doc/lib/libsignal.tex Message-ID: <20070702115431.3DBBC1E400D@bag.python.org> Author: georg.brandl Date: Mon Jul 2 13:54:30 2007 New Revision: 56144 Modified: python/branches/release25-maint/Doc/lib/libsignal.tex Log: Remove duplicate sentence from alarm() doc. (backport from rev. 56143) Modified: python/branches/release25-maint/Doc/lib/libsignal.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/libsignal.tex (original) +++ python/branches/release25-maint/Doc/lib/libsignal.tex Mon Jul 2 13:54:30 2007 @@ -101,8 +101,7 @@ be scheduled at any time). The returned value is then the number of seconds before any previously set alarm was to have been delivered. If \var{time} is zero, no alarm is scheduled, and any scheduled - alarm is canceled. The return value is the number of seconds - remaining before a previously scheduled alarm. If the return value + alarm is canceled. If the return value is zero, no alarm is currently scheduled. (See the \UNIX{} man page \manpage{alarm}{2}.) Availability: \UNIX. From python-checkins at python.org Mon Jul 2 15:19:58 2007 From: python-checkins at python.org (guido.van.rossum) Date: Mon, 2 Jul 2007 15:19:58 +0200 (CEST) Subject: [Python-checkins] r56145 - sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/fix_xrange.py Message-ID: <20070702131958.9ADC31E4007@bag.python.org> Author: guido.van.rossum Date: Mon Jul 2 15:19:58 2007 New Revision: 56145 Modified: sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/fix_xrange.py Log: Complete copyright assignment. Modified: sandbox/trunk/2to3/fixes/fix_dict.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_dict.py (original) +++ sandbox/trunk/2to3/fixes/fix_dict.py Mon Jul 2 15:19:58 2007 @@ -1,4 +1,5 @@ # Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. """Fixer for dict methods. Modified: sandbox/trunk/2to3/fixes/fix_xrange.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_xrange.py (original) +++ sandbox/trunk/2to3/fixes/fix_xrange.py Mon Jul 2 15:19:58 2007 @@ -1,4 +1,5 @@ # Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. """Fixer that changes xrange(...) into range(...).""" From python-checkins at python.org Mon Jul 2 15:20:43 2007 From: python-checkins at python.org (guido.van.rossum) Date: Mon, 2 Jul 2007 15:20:43 +0200 (CEST) Subject: [Python-checkins] r56146 - in sandbox/trunk/2to3: example.py fixes/fix_filter.py fixes/fix_map.py fixes/util.py tests/test_fixers.py Message-ID: <20070702132043.CBCAC1E4007@bag.python.org> Author: guido.van.rossum Date: Mon Jul 2 15:20:43 2007 New Revision: 56146 Added: sandbox/trunk/2to3/fixes/fix_filter.py (contents, props changed) sandbox/trunk/2to3/fixes/fix_map.py (contents, props changed) Modified: sandbox/trunk/2to3/example.py sandbox/trunk/2to3/fixes/util.py sandbox/trunk/2to3/tests/test_fixers.py Log: Add fixers for map and filter. Not perfect, but a start. Modified: sandbox/trunk/2to3/example.py ============================================================================== --- sandbox/trunk/2to3/example.py (original) +++ sandbox/trunk/2to3/example.py Mon Jul 2 15:20:43 2007 @@ -344,4 +344,17 @@ a = raw_input() b = raw_input(a.rstrip()) +def filter_examples(): + filter(os.unlink, filenames) + filter(None, "whatever") + filter(lambda x: not x, range(4)) + +def map_examples(): + map(None, foo.bar) + map(None, foo.bar,) + map(None, foo, bar) + map(f, foo.bar) + map(lambda x: x+1, range(10)) + + # This is the last line. Added: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- (empty file) +++ sandbox/trunk/2to3/fixes/fix_filter.py Mon Jul 2 15:20:43 2007 @@ -0,0 +1,107 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes filter(F, X) into list(filter(F, X)). + +We avoid the transformation if the filter() call is directly contained +in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or +for V in <>:. + +NOTE: This is still not correct if the original code was depending on +filter(F, X) to return a string if X is a string and a tuple if X is a +tuple. That would require type inference, which we don't do. Let +Python 2.6 figure it out. +""" + +# Local imports +import pytree +import patcomp +from pgen2 import token +from fixes import basefix +from fixes.util import Name, Call, ListComp + +class FixFilter(basefix.BaseFix): + + PATTERN = """ + filter_lambda=power< + 'filter' + trailer< + '(' + arglist< + lambdef< 'lambda' fp=NAME ':' xp=any > + ',' + it=any + > + ')' + > + > + | + power< + 'filter' + args=trailer< '(' [any] ')' > + > + """ + + def transform(self, node): + results = self.match(node) + if "filter_lambda" in results: + new = ListComp(results.get("fp").clone(), + results.get("fp").clone(), + results.get("it").clone(), + results.get("xp").clone()) + else: + if self.in_special_context(node): + return None + new = node.clone() + new.set_prefix("") + new = Call(Name("list"), [new]) + new.set_prefix(node.get_prefix()) + return new + + P0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | list_for< 'for' any 'in' node=any any* > + | gen_for< 'for' any 'in' node=any any* > + """ + p0 = patcomp.PatternCompiler().compile_pattern(P0) + + P1 = """ + power< + NAME< 'iter' | 'list' | 'tuple' | 'sorted' > + trailer< '(' node=any ')' > + any* + > + """ + p1 = patcomp.PatternCompiler().compile_pattern(P1) + + P2 = """ + power< + 'sorted' + trailer< '(' arglist ')' > + any* + > + """ + p2 = patcomp.PatternCompiler().compile_pattern(P2) + + def in_special_context(self, node): + p = node.parent + if p is None: + return False + results = {} + if self.p0.match(p, results) and results["node"] is node: + return True + + pp = p.parent + if pp is None: + return False + results = {} + if self.p1.match(pp, results) and results["node"] is node: + return True + + ppp = pp.parent + if ppp is None: + return False + results = {} + if self.p2.match(ppp, results) and results["node"] is node: + return True + + return False Added: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- (empty file) +++ sandbox/trunk/2to3/fixes/fix_map.py Mon Jul 2 15:20:43 2007 @@ -0,0 +1,119 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes map(F, ...) into list(map(F, ...)). + +As a special case, map(None, X) is changed into list(X). (This is +necessary because the semantics are changed in this case -- the new +map(None, X) is equivalent to [(x,) for x in X].) + +We avoid the transformation (except for the special case mentioned +above) if the map() call is directly contained in iter(<>), list(<>), +tuple(<>), sorted(<>), ...join(<>), or for V in <>:. + +NOTE: This is still not correct if the original code was depending on +map(F, X, Y, ...) to go on until the longest argument is exhausted, +substituting None for missing values -- like zip(), it now stops as +soon as the shortest argument is exhausted. +""" + +# Local imports +import pytree +import patcomp +from pgen2 import token +from fixes import basefix +from fixes.util import Name, Call, ListComp +from pygram import python_symbols as syms + +class FixMap(basefix.BaseFix): + + PATTERN = """ + map_none=power< + 'map' + trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > + > + | + map_lambda=power< + 'map' + trailer< + '(' + arglist< + lambdef< 'lambda' fp=NAME ':' xp=any > + ',' + it=any + > + ')' + > + > + | + power< + 'map' + args=trailer< '(' [any] ')' > + > + """ + + def transform(self, node): + results = self.match(node) + if "map_lambda" in results: + new = ListComp(results.get("xp").clone(), + results.get("fp").clone(), + results.get("it").clone()) + else: + if "map_none" in results: + new = results["arg"].clone() + else: + if self.in_special_context(node): + return None + new = node.clone() + new.set_prefix("") + new = Call(Name("list"), [new]) + new.set_prefix(node.get_prefix()) + return new + + P0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | list_for< 'for' any 'in' node=any any* > + | gen_for< 'for' any 'in' node=any any* > + """ + p0 = patcomp.PatternCompiler().compile_pattern(P0) + + P1 = """ + power< + NAME< 'iter' | 'list' | 'tuple' | 'sorted' > + trailer< '(' node=any ')' > + any* + > + """ + p1 = patcomp.PatternCompiler().compile_pattern(P1) + + P2 = """ + power< + 'sorted' + trailer< '(' arglist ')' > + any* + > + """ + p2 = patcomp.PatternCompiler().compile_pattern(P2) + + def in_special_context(self, node): + p = node.parent + if p is None: + return False + results = {} + if self.p0.match(p, results) and results["node"] is node: + return True + + pp = p.parent + if pp is None: + return False + results = {} + if self.p1.match(pp, results) and results["node"] is node: + return True + + ppp = pp.parent + if ppp is None: + return False + results = {} + if self.p2.match(ppp, results) and results["node"] is node: + return True + + return False Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Mon Jul 2 15:20:43 2007 @@ -76,6 +76,30 @@ """A string leaf""" return Leaf(token.STRING, string, prefix=prefix) +def ListComp(xp, fp, it, test=None): + """A list comprehension of the form [xp for fp in it if test]. + + If test is None, the "if test" part is omitted. + """ + xp.set_prefix("") + fp.set_prefix(" ") + it.set_prefix(" ") + for_leaf = Leaf(token.NAME, "for") + for_leaf.set_prefix(" ") + in_leaf = Leaf(token.NAME, "in") + in_leaf.set_prefix(" ") + inner_args = [for_leaf, fp, in_leaf, it] + if test: + test.set_prefix(" ") + if_leaf = Leaf(token.NAME, "if") + if_leaf.set_prefix(" ") + inner_args.append(Node(syms.list_if, [if_leaf, test])) + inner = Node(syms.listmaker, [xp, Node(syms.list_for, inner_args)]) + return Node(syms.atom, + [Leaf(token.LBRACE, "["), + inner, + Leaf(token.RBRACE, "]")]) + ########################################################### ### Determine whether a node represents a given literal ########################################################### Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Mon Jul 2 15:20:43 2007 @@ -1858,6 +1858,100 @@ a = """callable(x, kw=y)""" self.check(a, a) +class Test_filter(FixerTestCase): + fixer = "filter" + + def test_filter_basic(self): + b = """x = filter(None, 'abc')""" + a = """x = list(filter(None, 'abc'))""" + self.check(b, a) + + b = """x = filter(f, 'abc')""" + a = """x = list(filter(f, 'abc'))""" + self.check(b, a) + + b = """x = filter(lambda x: x%2 == 0, range(10))""" + a = """x = [x for x in range(10) if x%2 == 0]""" + self.check(b, a) + + # XXX This (rare) case is not supported +## b = """x = filter(f, 'abc')[0]""" +## a = """x = list(filter(f, 'abc'))[0]""" +## self.check(b, a) + + def test_filter_nochange(self): + a = """iter(filter(f, 'abc'))""" + self.check(a, a) + a = """list(filter(f, 'abc'))""" + self.check(a, a) + a = """list(filter(f, 'abc'))[0]""" + self.check(a, a) + a = """tuple(filter(f, 'abc'))""" + self.check(a, a) + a = """sorted(filter(f, 'abc'))""" + self.check(a, a) + a = """sorted(filter(f, 'abc'), key=blah)""" + self.check(a, a) + a = """sorted(filter(f, 'abc'), key=blah)[0]""" + self.check(a, a) + a = """for i in filter(f, 'abc'): pass""" + self.check(a, a) + a = """[x for x in filter(f, 'abc')]""" + self.check(a, a) + a = """(x for x in filter(f, 'abc'))""" + self.check(a, a) + +class Test_map(FixerTestCase): + fixer = "map" + + def test_map_basic(self): + b = """x = map(f, 'abc')""" + a = """x = list(map(f, 'abc'))""" + self.check(b, a) + + b = """x = map(f, 'abc', 'def')""" + a = """x = list(map(f, 'abc', 'def'))""" + self.check(b, a) + + b = """x = map(None, 'abc')""" + a = """x = list('abc')""" + self.check(b, a) + + b = """x = map(None, 'abc', 'def')""" + a = """x = list(map(None, 'abc', 'def'))""" + self.check(b, a) + + b = """x = map(lambda x: x+1, range(4))""" + a = """x = [x+1 for x in range(4)]""" + self.check(b, a) + + # XXX This (rare) case is not supported +## b = """x = map(f, 'abc')[0]""" +## a = """x = list(map(f, 'abc'))[0]""" +## self.check(b, a) + + def test_map_nochange(self): + a = """iter(map(f, 'abc'))""" + self.check(a, a) + a = """list(map(f, 'abc'))""" + self.check(a, a) + a = """list(map(f, 'abc'))[0]""" + self.check(a, a) + a = """tuple(map(f, 'abc'))""" + self.check(a, a) + a = """sorted(map(f, 'abc'))""" + self.check(a, a) + a = """sorted(map(f, 'abc'), key=blah)""" + self.check(a, a) + a = """sorted(map(f, 'abc'), key=blah)[0]""" + self.check(a, a) + a = """for i in map(f, 'abc'): pass""" + self.check(a, a) + a = """[x for x in map(f, 'abc')]""" + self.check(a, a) + a = """(x for x in map(f, 'abc'))""" + self.check(a, a) + if __name__ == "__main__": import __main__ From python-checkins at python.org Mon Jul 2 20:13:30 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 2 Jul 2007 20:13:30 +0200 (CEST) Subject: [Python-checkins] r56148 - python/branches/cpy_merge/Modules/_bytes_iomodule.c Message-ID: <20070702181330.1B8711E4007@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 2 20:13:29 2007 New Revision: 56148 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c Log: Use the buffer protocol API for writelines and setvalue. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Mon Jul 2 20:13:29 2007 @@ -146,6 +146,9 @@ static int bytes_io_setvalue(BytesIOObject *self, PyObject *value) { + const char *bytes; + Py_ssize_t len; + if (self->buf == NULL) { err_closed(); return -1; @@ -156,13 +159,11 @@ if (value == NULL) return 0; - - if (!PyString_Check(value)) { - PyErr_SetString(PyExc_TypeError, "need a string"); + + if (PyObject_AsCharBuffer(value, &bytes, &len) == -1) return -1; - } - if ((write_bytes(self, PyString_AsString(value), - PyString_Size(value))) < 0) { + + if (write_bytes(self, bytes, len) < 0) { return -1; /* out of memory */ } /* Reset the position back to beginning-of-file, since @@ -385,6 +386,8 @@ bytes_io_writelines(BytesIOObject *self, PyObject *v) { PyObject *it, *item; + const char *bytes; + Py_ssize_t len; if (self->buf == NULL) return err_closed(); @@ -394,16 +397,14 @@ return NULL; while ((item = PyIter_Next(it)) != NULL) { - Py_ssize_t n; - char *c; - if (PyString_AsStringAndSize(item, &c, &n) == -1) { + if (PyObject_AsCharBuffer(item, &bytes, &len) == -1) { Py_DECREF(it); Py_DECREF(item); return NULL; } Py_DECREF(item); - if (write_bytes(self, c, n) == -1) { + if (write_bytes(self, bytes, len) == -1) { Py_DECREF(it); return NULL; } From python-checkins at python.org Mon Jul 2 20:20:18 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 2 Jul 2007 20:20:18 +0200 (CEST) Subject: [Python-checkins] r56149 - python/branches/cpy_merge/Lib/test/test_memoryio.py Message-ID: <20070702182018.6143E1E4011@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 2 20:20:18 2007 New Revision: 56149 Added: python/branches/cpy_merge/Lib/test/test_memoryio.py Log: Add unit tests for memory-based IO. Not perfect, but it's a start. Added: python/branches/cpy_merge/Lib/test/test_memoryio.py ============================================================================== --- (empty file) +++ python/branches/cpy_merge/Lib/test/test_memoryio.py Mon Jul 2 20:20:18 2007 @@ -0,0 +1,248 @@ +"""Unit tests for memory-based file-like objects. +StringIO -- for unicode strings +BytesIO -- for bytes +""" + +import unittest +from test import test_support + +import io + +try: + import _string_io, _bytes_io + has_c_implementation = True +except ImportError: + has_c_implementation = False + + +class MemoryTestMixin: + + def write_ops(self, f): + t = self.buftype + self.assertEqual(f.write(t("blah.")), 5) + self.assertEqual(f.seek(0), 0) + self.assertEqual(f.write(t("Hello.")), 6) + self.assertEqual(f.tell(), 6) + self.assertEqual(f.seek(-1, 1), 5) + self.assertEqual(f.tell(), 5) + self.assertEqual(f.write(t(" world\n\n\n")), 9) + self.assertEqual(f.seek(0), 0) + self.assertEqual(f.write(t("h")), 1) + self.assertEqual(f.seek(-1, 2), 13) + self.assertEqual(f.tell(), 13) + self.assertEqual(f.truncate(12), 12) + self.assertEqual(f.tell(), 13) + + def test_write(self): + buf = self.buftype("hello world\n") + memio = self.ioclass(buf) + + self.write_ops(memio) + + def test_write_noinit(self): + memio = self.ioclass() + + self.write_ops(memio) + + def test_writelines(self): + buf = self.buftype("1234567890") + memio = self.ioclass() + + memio.writelines([buf] * 100) + self.assertEqual(memio.getvalue(), buf * 100) + + def test_writelines_error(self): + memio = self.ioclass() + def error_gen(): + yield self.buftype('spam') + raise KeyboardInterrupt + + self.assertRaises(KeyboardInterrupt, memio.writelines, error_gen()) + + def test_truncate(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + memio.seek(6) + self.assertEqual(memio.truncate(), 6) + self.assertEqual(memio.getvalue(), buf[:6]) + self.assertEqual(memio.truncate(4), 4) + self.assertEqual(memio.getvalue(), buf[:4]) + self.assertEqual(memio.tell(), 4) + memio.write(buf) + self.assertEqual(memio.getvalue(), buf[:4] + buf) + self.assertRaises(IOError, memio.truncate, -1) + + def test_close(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + self.assertEqual(memio.closed, False) + memio.close() + self.assertRaises(ValueError, memio.read) + self.assertRaises(ValueError, memio.write, buf) + self.assertRaises(ValueError, memio.writelines, buf) + self.assertRaises(ValueError, memio.seek, 3) + self.assertRaises(ValueError, memio.truncate) + self.assertRaises(ValueError, memio.getvalue) + self.assertRaises(ValueError, memio.tell) + self.assertEqual(memio.closed, True) + + def test_init(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + def read_ops(self, f, data): + t = self.buftype + data = f.read(5) + self.assertEqual(data, t("hello")) + self.assertEqual(f.readinto(data), 5) + self.assertEqual(data, t(" worl")) + self.assertEqual(f.readinto(data), 2) + self.assertEqual(len(data), 5) + self.assertEqual(data[:2], t("d\n")) + self.assertEqual(f.seek(0), 0) + self.assertEqual(f.read(20), t("hello world\n")) + self.assertEqual(f.read(1), t("")) + self.assertEqual(f.readinto(t("x")), 0) + self.assertEqual(f.seek(-6, 2), 6) + self.assertEqual(f.read(5), t("world")) + self.assertEqual(f.read(0), t("")) + self.assertEqual(f.readinto(t("")), 0) + self.assertEqual(f.seek(-6, 1), 5) + self.assertEqual(f.read(5), t(" worl")) + self.assertEqual(f.tell(), 10) + + def test_read(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + self.assertEqual(memio.read(1), buf[:1]) + self.assertEqual(memio.read(4), buf[1:5]) + self.assertEqual(memio.read(900), buf[5:]) + self.assertEqual(memio.read(), self.EOF) + + buf = self.buftype("hello world\n") + memio = self.ioclass(buf) + + self.read_ops(memio, buf) + + def test_read_noargs(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + self.assertEqual(buf, memio.read()) + self.assertEqual(self.EOF, memio.read()) + + def test_readline(self): + pass + + def test_readlines(self): + pass + + def test_iterator(self): + buf = self.buftype("1234567890\n") + memio = self.ioclass(buf * 10) + + self.assertEqual(iter(memio), memio) + self.failUnless(hasattr(memio, '__iter__')) + self.failUnless(hasattr(memio, '__next__')) + i = 0 + for line in memio: + self.assertEqual(line, buf) + i += 1 + self.assertEqual(i, 10) + + memio.seek(0) + i = 0 + for line in memio: + self.assertEqual(line, buf) + i += 1 + self.assertEqual(i, 10) + + def test_getvalue(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + self.assertEqual(memio.getvalue(), buf) + memio = self.ioclass(buf * 1000) + self.assertEqual(memio.getvalue()[-3:], "890") + + def test_seek(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + memio.read(5) + memio.seek(0) + self.assertEqual(buf, memio.read()) + + memio.seek(3) + self.assertEqual(buf[3:], memio.read()) + + def test_tell(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + self.assertEqual(0, memio.tell()) + memio.seek(5) + self.assertEqual(5, memio.tell()) + memio.seek(10000) + self.assertEqual(10000, memio.tell()) + + def test_flags(self): + memio = self.ioclass() + + self.assertEqual(memio.writable(), True) + self.assertEqual(memio.readable(), True) + self.assertEqual(memio.seekable(), True) + self.assertEqual(memio.isatty(), False) + memio.close() + self.assertEqual(memio.writable(), True) + self.assertEqual(memio.readable(), True) + self.assertEqual(memio.seekable(), True) + + +class PythonBytesIOTest(MemoryTestMixin, unittest.TestCase): + """ + Test the Python implementation of BytesIO. + """ + buftype = bytes + ioclass = io._BytesIO + EOF = b"" + + +class PythonStringIOTest(MemoryTestMixin, unittest.TestCase): + """ + Test the Python implementation of StringIO. + """ + buftype = str + ioclass = io._StringIO + EOF = "" + +if has_c_implementation: + class CBytesIOTest(MemoryTestMixin, unittest.TestCase): + """ + Test the C implementation of BytesIO if available. + """ + buftype = bytes + ioclass = _bytes_io.BytesIO + EOF = b"" + + + class CStringIOTest(MemoryTestMixin, unittest.TestCase): + """ + Test the C implementation of StringIO if available. + """ + buftype = unicode + ioclass = _string_io.StringIO + EOF = u"" + +def test_main(): + if has_c_implementation: + test_support.run_unittest(PythonBytesIOTest, PythonStringIOTest, + CBytesIOTest, CStringIOTest) + else: + test_support.run_unittest(PythonBytesIOTest, PythonStringIOTest) + +if __name__ == '__main__': + test_main() From python-checkins at python.org Mon Jul 2 22:08:49 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 2 Jul 2007 22:08:49 +0200 (CEST) Subject: [Python-checkins] r56150 - python/branches/cpy_merge/Modules/_bytes_iomodule.c Message-ID: <20070702200849.DEF6C1E4007@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 2 22:08:49 2007 New Revision: 56150 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c Log: Add the readinto method. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Mon Jul 2 22:08:49 2007 @@ -278,6 +278,24 @@ } static PyObject * +bytes_io_readinto(BytesIOObject *self, PyObject *buffer) +{ + void *raw_buffer; + Py_ssize_t len; + + if (PyObject_AsWriteBuffer(buffer, &raw_buffer, &len) == -1) + return NULL; + + if (len > self->string_size) + len = self->string_size; + + memcpy(raw_buffer, self->buf + self->pos, len); + self->pos += len; + + return PyInt_FromSsize_t(len); +} + +static PyObject * bytes_io_truncate(BytesIOObject *self, PyObject *args) { Py_ssize_t size; @@ -517,6 +535,12 @@ "The optional size argument, if given, is an approximate bound on the\n" "total number of bytes in the lines returned.\n"); +PyDoc_STRVAR(BytesIO_readinto_doc, +"readinto(bytes) -> int. Read up to len(b) bytes into b.\n" +"\n" +"Returns number of bytes read (0 for EOF), or None if the object\n" +"is set not to block as has no data to read." + PyDoc_STRVAR(BytesIO_tell_doc, "tell() -> current file position, an integer\n"); @@ -579,6 +603,8 @@ BytesIO_readline_doc}, {"readlines", (PyCFunction) bytes_io_readlines, METH_VARARGS, BytesIO_readlines_doc}, + {"readinto", (PyCFunction) bytes_io_readinto, METH_O, + BytesIO_readinto_doc}, {"tell", (PyCFunction) bytes_io_tell, METH_NOARGS, BytesIO_tell_doc}, {"truncate", (PyCFunction) bytes_io_truncate, METH_VARARGS, From python-checkins at python.org Mon Jul 2 22:39:49 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 2 Jul 2007 22:39:49 +0200 (CEST) Subject: [Python-checkins] r56151 - python/branches/cpy_merge/Modules/_bytes_iomodule.c Message-ID: <20070702203949.67CB11E4007@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 2 22:39:49 2007 New Revision: 56151 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c Log: Make readinto set the buffer position correctly. Fix a minor error with the docstring of readinto. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Mon Jul 2 22:39:49 2007 @@ -286,8 +286,8 @@ if (PyObject_AsWriteBuffer(buffer, &raw_buffer, &len) == -1) return NULL; - if (len > self->string_size) - len = self->string_size; + if (self->pos + len > self->string_size) + len = self->string_size - self->pos; memcpy(raw_buffer, self->buf + self->pos, len); self->pos += len; @@ -539,7 +539,7 @@ "readinto(bytes) -> int. Read up to len(b) bytes into b.\n" "\n" "Returns number of bytes read (0 for EOF), or None if the object\n" -"is set not to block as has no data to read." +"is set not to block as has no data to read."); PyDoc_STRVAR(BytesIO_tell_doc, "tell() -> current file position, an integer\n"); From python-checkins at python.org Mon Jul 2 22:53:11 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 2 Jul 2007 22:53:11 +0200 (CEST) Subject: [Python-checkins] r56152 - python/branches/cpy_merge/Lib/test/test_memoryio.py Message-ID: <20070702205311.81BA11E4007@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 2 22:53:11 2007 New Revision: 56152 Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py Log: Add a unit test for BytesIO.readinto Clean up the main MemoryIOTest. Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py ============================================================================== --- python/branches/cpy_merge/Lib/test/test_memoryio.py (original) +++ python/branches/cpy_merge/Lib/test/test_memoryio.py Mon Jul 2 22:53:11 2007 @@ -31,7 +31,7 @@ self.assertEqual(f.seek(-1, 2), 13) self.assertEqual(f.tell(), 13) self.assertEqual(f.truncate(12), 12) - self.assertEqual(f.tell(), 13) + self.assertEqual(f.tell(), 12) def test_write(self): buf = self.buftype("hello world\n") @@ -92,27 +92,6 @@ buf = self.buftype("1234567890") memio = self.ioclass(buf) - def read_ops(self, f, data): - t = self.buftype - data = f.read(5) - self.assertEqual(data, t("hello")) - self.assertEqual(f.readinto(data), 5) - self.assertEqual(data, t(" worl")) - self.assertEqual(f.readinto(data), 2) - self.assertEqual(len(data), 5) - self.assertEqual(data[:2], t("d\n")) - self.assertEqual(f.seek(0), 0) - self.assertEqual(f.read(20), t("hello world\n")) - self.assertEqual(f.read(1), t("")) - self.assertEqual(f.readinto(t("x")), 0) - self.assertEqual(f.seek(-6, 2), 6) - self.assertEqual(f.read(5), t("world")) - self.assertEqual(f.read(0), t("")) - self.assertEqual(f.readinto(t("")), 0) - self.assertEqual(f.seek(-6, 1), 5) - self.assertEqual(f.read(5), t(" worl")) - self.assertEqual(f.tell(), 10) - def test_read(self): buf = self.buftype("1234567890") memio = self.ioclass(buf) @@ -121,11 +100,9 @@ self.assertEqual(memio.read(4), buf[1:5]) self.assertEqual(memio.read(900), buf[5:]) self.assertEqual(memio.read(), self.EOF) - - buf = self.buftype("hello world\n") - memio = self.ioclass(buf) - - self.read_ops(memio, buf) + self.assertEqual(memio.seek(0), 0) + self.assertEqual(memio.read(), buf) + self.assertEqual(memio.tell(), 10) def test_read_noargs(self): buf = self.buftype("1234567890") @@ -202,7 +179,7 @@ self.assertEqual(memio.seekable(), True) -class PythonBytesIOTest(MemoryTestMixin, unittest.TestCase): +class PyBytesIOTest(MemoryTestMixin, unittest.TestCase): """ Test the Python implementation of BytesIO. """ @@ -210,36 +187,50 @@ ioclass = io._BytesIO EOF = b"" + def test_readinto(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + b = bytes("hello") + self.assertEqual(memio.readinto(b), 5) + self.assertEqual(b, b"12345") + self.assertEqual(memio.readinto(b), 5) + self.assertEqual(b, b"67890") + self.assertEqual(memio.readinto(b), 0) + self.assertEqual(b, b"67890") -class PythonStringIOTest(MemoryTestMixin, unittest.TestCase): + b = bytes("hello world") + self.assertEqual(memio.seek(0), 0) + self.assertEqual(memio.readinto(b), 10) + self.assertEqual(b, "1234567890d") + + +class PyStringIOTest(MemoryTestMixin, unittest.TestCase): """ Test the Python implementation of StringIO. """ - buftype = str + buftype = unicode ioclass = io._StringIO EOF = "" if has_c_implementation: - class CBytesIOTest(MemoryTestMixin, unittest.TestCase): + class CBytesIOTest(PyBytesIOTest): """ Test the C implementation of BytesIO if available. """ - buftype = bytes ioclass = _bytes_io.BytesIO - EOF = b"" - class CStringIOTest(MemoryTestMixin, unittest.TestCase): + class CStringIOTest(PyStringIOTest): """ Test the C implementation of StringIO if available. """ - buftype = unicode ioclass = _string_io.StringIO - EOF = u"" + def test_main(): if has_c_implementation: - test_support.run_unittest(PythonBytesIOTest, PythonStringIOTest, + test_support.run_unittest(PyBytesIOTest, PyStringIOTest, CBytesIOTest, CStringIOTest) else: test_support.run_unittest(PythonBytesIOTest, PythonStringIOTest) From python-checkins at python.org Mon Jul 2 23:23:39 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 2 Jul 2007 23:23:39 +0200 (CEST) Subject: [Python-checkins] r56153 - python/branches/cpy_merge/Lib/test/test_memoryio.py Message-ID: <20070702212339.574671E4007@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 2 23:23:39 2007 New Revision: 56153 Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py Log: Add test for readline. Add test for readlines. Don't check the return value of seek in other tests. Fix a typo, PythonBytesIOTest -> PyBytesIOTest Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py ============================================================================== --- python/branches/cpy_merge/Lib/test/test_memoryio.py (original) +++ python/branches/cpy_merge/Lib/test/test_memoryio.py Mon Jul 2 23:23:39 2007 @@ -100,7 +100,7 @@ self.assertEqual(memio.read(4), buf[1:5]) self.assertEqual(memio.read(900), buf[5:]) self.assertEqual(memio.read(), self.EOF) - self.assertEqual(memio.seek(0), 0) + memio.seek(0) self.assertEqual(memio.read(), buf) self.assertEqual(memio.tell(), 10) @@ -112,10 +112,26 @@ self.assertEqual(self.EOF, memio.read()) def test_readline(self): - pass + buf = self.buftype("1234567890\n") + memio = self.ioclass(buf * 2) + + self.assertEqual(memio.readline(), buf) + self.assertEqual(memio.readline(), buf) + self.assertEqual(memio.readline(), self.EOF) + memio.seek(0) + self.assertEqual(memio.readline(5), "12345") + self.assertEqual(memio.readline(5), "67890") + self.assertEqual(memio.readline(5), '\n') def test_readlines(self): - pass + buf = self.buftype("1234567890\n") + memio = self.ioclass(buf * 10) + + self.assertEqual(memio.readlines(), [buf] * 10) + memio.seek(5) + self.assertEqual(memio.readlines(), ['67890\n'] + [buf] * 9) + memio.seek(0) + self.assertEqual(memio.readlines(15), [buf] * 2) def test_iterator(self): buf = self.buftype("1234567890\n") @@ -200,7 +216,7 @@ self.assertEqual(b, b"67890") b = bytes("hello world") - self.assertEqual(memio.seek(0), 0) + memio.seek(0) self.assertEqual(memio.readinto(b), 10) self.assertEqual(b, "1234567890d") @@ -233,7 +249,7 @@ test_support.run_unittest(PyBytesIOTest, PyStringIOTest, CBytesIOTest, CStringIOTest) else: - test_support.run_unittest(PythonBytesIOTest, PythonStringIOTest) + test_support.run_unittest(PyBytesIOTest, PyStringIOTest) if __name__ == '__main__': test_main() From python-checkins at python.org Mon Jul 2 23:46:52 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 2 Jul 2007 23:46:52 +0200 (CEST) Subject: [Python-checkins] r56154 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Message-ID: <20070702214652.C95871E4007@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 2 23:46:52 2007 New Revision: 56154 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Log: Fix a slight inefficiency in the seek method. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Mon Jul 2 23:46:52 2007 @@ -342,7 +342,7 @@ static PyObject * bytes_io_seek(BytesIOObject *self, PyObject *args) { - Py_ssize_t newpos; + Py_ssize_t newpos, prevpos; int mode = 0; if (self->buf == NULL) @@ -371,12 +371,15 @@ if (resize_buffer(self, newpos) < 0) return NULL; /* out of memory */ + prevpos = self->pos; self->pos = newpos; - /* Pad with zeros the buffer region larger than the string size. - XXX This is inefficient for multiple seeks. */ - while (--newpos >= self->string_size) + /* Pad with zeros the buffer region larger than the string size and + not previously padded with zeros. */ + while (newpos >= self->string_size && newpos >= prevpos) { self->buf[newpos] = 0; + newpos--; + } return PyInt_FromSsize_t(self->pos); } Modified: python/branches/cpy_merge/Modules/_string_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_string_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_string_iomodule.c Mon Jul 2 23:46:52 2007 @@ -322,7 +322,7 @@ static PyObject * string_io_seek(StringIOObject *self, PyObject *args) { - Py_ssize_t newpos; + Py_ssize_t newpos, prevpos; int mode = 0; if (self->buf == NULL) @@ -351,12 +351,15 @@ if (resize_buffer(self, newpos) < 0) return NULL; /* out of memory */ + prevpos = self->pos; self->pos = newpos; - /* Pad with zeros the buffer region larger than the string size. - XXX This is inefficient for multiple seeks. */ - while (--newpos >= self->string_size) + /* Pad with zeros the buffer region larger than the string size and + not previously padded with zeros. */ + while (newpos >= self->string_size && newpos >= prevpos) { self->buf[newpos] = 0; + newpos--; + } return PyInt_FromSsize_t(self->pos); } From python-checkins at python.org Tue Jul 3 08:05:09 2007 From: python-checkins at python.org (neal.norwitz) Date: Tue, 3 Jul 2007 08:05:09 +0200 (CEST) Subject: [Python-checkins] r56156 - peps/trunk/pep-3100.txt Message-ID: <20070703060509.695641E4003@bag.python.org> Author: neal.norwitz Date: Tue Jul 3 08:05:09 2007 New Revision: 56156 Modified: peps/trunk/pep-3100.txt Log: Now map and filter also return an iterator Modified: peps/trunk/pep-3100.txt ============================================================================== --- peps/trunk/pep-3100.txt (original) +++ peps/trunk/pep-3100.txt Tue Jul 3 08:05:09 2007 @@ -168,7 +168,7 @@ ================== * Make built-ins return an iterator where appropriate (e.g. ``range()``, - ``zip()``, ``map()``, ``filter()``, etc.) [zip and range: done] + ``zip()``, ``map()``, ``filter()``, etc.) [done] * Relevant functions should consume iterators (e.g. ``min()``, ``max()``) [They already do, since 2.2.] * Remove ``input()`` and rename ``raw_input()`` to ``input()``. From python-checkins at python.org Tue Jul 3 17:41:23 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Tue, 3 Jul 2007 17:41:23 +0200 (CEST) Subject: [Python-checkins] r56160 - python/branches/cpy_merge/Modules/_bytes_iomodule.c Message-ID: <20070703154123.37C661E4007@bag.python.org> Author: alexandre.vassalotti Date: Tue Jul 3 17:41:22 2007 New Revision: 56160 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c Log: Check if the object is closed in the readinto method. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Tue Jul 3 17:41:22 2007 @@ -283,6 +283,9 @@ void *raw_buffer; Py_ssize_t len; + if (self->buf == NULL) + return err_closed(); + if (PyObject_AsWriteBuffer(buffer, &raw_buffer, &len) == -1) return NULL; From python-checkins at python.org Tue Jul 3 18:43:37 2007 From: python-checkins at python.org (guido.van.rossum) Date: Tue, 3 Jul 2007 18:43:37 +0200 (CEST) Subject: [Python-checkins] r56163 - in sandbox/trunk/2to3: fixes/fix_filter.py fixes/fix_map.py tests/test_fixers.py Message-ID: <20070703164337.4BB401E4003@bag.python.org> Author: guido.van.rossum Date: Tue Jul 3 18:43:36 2007 New Revision: 56163 Modified: sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py sandbox/trunk/2to3/tests/test_fixers.py Log: Fix a problem where a filter() or map() call inside another call was always ignored (instead of only when the called function was iter(), list(), tuple() or sorted()). Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Tue Jul 3 18:43:36 2007 @@ -66,7 +66,7 @@ P1 = """ power< - NAME< 'iter' | 'list' | 'tuple' | 'sorted' > + ( 'iter' | 'list' | 'tuple' | 'sorted' ) trailer< '(' node=any ')' > any* > Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Tue Jul 3 18:43:36 2007 @@ -78,7 +78,7 @@ P1 = """ power< - NAME< 'iter' | 'list' | 'tuple' | 'sorted' > + ( 'iter' | 'list' | 'tuple' | 'sorted' ) trailer< '(' node=any ')' > any* > Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Tue Jul 3 18:43:36 2007 @@ -1866,8 +1866,8 @@ a = """x = list(filter(None, 'abc'))""" self.check(b, a) - b = """x = filter(f, 'abc')""" - a = """x = list(filter(f, 'abc'))""" + b = """x = len(filter(f, 'abc'))""" + a = """x = len(list(filter(f, 'abc')))""" self.check(b, a) b = """x = filter(lambda x: x%2 == 0, range(10))""" @@ -1909,8 +1909,8 @@ a = """x = list(map(f, 'abc'))""" self.check(b, a) - b = """x = map(f, 'abc', 'def')""" - a = """x = list(map(f, 'abc', 'def'))""" + b = """x = len(map(f, 'abc', 'def'))""" + a = """x = len(list(map(f, 'abc', 'def')))""" self.check(b, a) b = """x = map(None, 'abc')""" From python-checkins at python.org Wed Jul 4 01:23:59 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Wed, 4 Jul 2007 01:23:59 +0200 (CEST) Subject: [Python-checkins] r56169 - python/branches/cpy_merge/Lib/test/test_memoryio.py Message-ID: <20070703232359.EFA051E4003@bag.python.org> Author: alexandre.vassalotti Date: Wed Jul 4 01:23:59 2007 New Revision: 56169 Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py Log: Clean up the unit tests. Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py ============================================================================== --- python/branches/cpy_merge/Lib/test/test_memoryio.py (original) +++ python/branches/cpy_merge/Lib/test/test_memoryio.py Wed Jul 4 01:23:59 2007 @@ -38,11 +38,10 @@ memio = self.ioclass(buf) self.write_ops(memio) - - def test_write_noinit(self): memio = self.ioclass() - self.write_ops(memio) + memio.close() + self.assertRaises(ValueError, memio.write, buf) def test_writelines(self): buf = self.buftype("1234567890") @@ -50,6 +49,8 @@ memio.writelines([buf] * 100) self.assertEqual(memio.getvalue(), buf * 100) + memio.close() + self.assertRaises(ValueError, memio.writelines, buf) def test_writelines_error(self): memio = self.ioclass() @@ -72,21 +73,8 @@ memio.write(buf) self.assertEqual(memio.getvalue(), buf[:4] + buf) self.assertRaises(IOError, memio.truncate, -1) - - def test_close(self): - buf = self.buftype("1234567890") - memio = self.ioclass(buf) - - self.assertEqual(memio.closed, False) memio.close() - self.assertRaises(ValueError, memio.read) - self.assertRaises(ValueError, memio.write, buf) - self.assertRaises(ValueError, memio.writelines, buf) - self.assertRaises(ValueError, memio.seek, 3) self.assertRaises(ValueError, memio.truncate) - self.assertRaises(ValueError, memio.getvalue) - self.assertRaises(ValueError, memio.tell) - self.assertEqual(memio.closed, True) def test_init(self): buf = self.buftype("1234567890") @@ -102,14 +90,10 @@ self.assertEqual(memio.read(), self.EOF) memio.seek(0) self.assertEqual(memio.read(), buf) + self.assertEqual(memio.read(), self.EOF) self.assertEqual(memio.tell(), 10) - - def test_read_noargs(self): - buf = self.buftype("1234567890") - memio = self.ioclass(buf) - - self.assertEqual(buf, memio.read()) - self.assertEqual(self.EOF, memio.read()) + memio.close() + self.assertRaises(ValueError, memio.read) def test_readline(self): buf = self.buftype("1234567890\n") @@ -122,6 +106,8 @@ self.assertEqual(memio.readline(5), "12345") self.assertEqual(memio.readline(5), "67890") self.assertEqual(memio.readline(5), '\n') + memio.close() + self.assertRaises(ValueError, memio.readline) def test_readlines(self): buf = self.buftype("1234567890\n") @@ -132,6 +118,8 @@ self.assertEqual(memio.readlines(), ['67890\n'] + [buf] * 9) memio.seek(0) self.assertEqual(memio.readlines(15), [buf] * 2) + memio.close() + self.assertRaises(ValueError, memio.readlines) def test_iterator(self): buf = self.buftype("1234567890\n") @@ -145,13 +133,14 @@ self.assertEqual(line, buf) i += 1 self.assertEqual(i, 10) - memio.seek(0) i = 0 for line in memio: self.assertEqual(line, buf) i += 1 self.assertEqual(i, 10) + memio.close() + self.assertRaises(ValueError, memio.__next__) def test_getvalue(self): buf = self.buftype("1234567890") @@ -160,6 +149,8 @@ self.assertEqual(memio.getvalue(), buf) memio = self.ioclass(buf * 1000) self.assertEqual(memio.getvalue()[-3:], "890") + memio.close() + self.assertRaises(ValueError, memio.getvalue) def test_seek(self): buf = self.buftype("1234567890") @@ -171,6 +162,8 @@ memio.seek(3) self.assertEqual(buf[3:], memio.read()) + memio.close() + self.assertRaises(ValueError, memio.seek, 3) def test_tell(self): buf = self.buftype("1234567890") @@ -181,6 +174,8 @@ self.assertEqual(5, memio.tell()) memio.seek(10000) self.assertEqual(10000, memio.tell()) + memio.close() + self.assertRaises(ValueError, memio.tell) def test_flags(self): memio = self.ioclass() @@ -189,16 +184,16 @@ self.assertEqual(memio.readable(), True) self.assertEqual(memio.seekable(), True) self.assertEqual(memio.isatty(), False) + self.assertEqual(memio.closed, False) memio.close() self.assertEqual(memio.writable(), True) self.assertEqual(memio.readable(), True) self.assertEqual(memio.seekable(), True) + self.assertRaises(ValueError, memio.isatty) + self.assertEqual(memio.closed, True) class PyBytesIOTest(MemoryTestMixin, unittest.TestCase): - """ - Test the Python implementation of BytesIO. - """ buftype = bytes ioclass = io._BytesIO EOF = b"" @@ -214,42 +209,36 @@ self.assertEqual(b, b"67890") self.assertEqual(memio.readinto(b), 0) self.assertEqual(b, b"67890") - b = bytes("hello world") memio.seek(0) self.assertEqual(memio.readinto(b), 10) self.assertEqual(b, "1234567890d") + b = bytes() + memio.seek(0) + self.assertEqual(memio.readinto(b), 0) + self.assertEqual(b, b"") + memio.close() + self.assertRaises(ValueError, memio.readinto, b) class PyStringIOTest(MemoryTestMixin, unittest.TestCase): - """ - Test the Python implementation of StringIO. - """ buftype = unicode ioclass = io._StringIO EOF = "" if has_c_implementation: class CBytesIOTest(PyBytesIOTest): - """ - Test the C implementation of BytesIO if available. - """ ioclass = _bytes_io.BytesIO - class CStringIOTest(PyStringIOTest): - """ - Test the C implementation of StringIO if available. - """ ioclass = _string_io.StringIO def test_main(): + tests = [PyBytesIOTest, PyStringIOTest] if has_c_implementation: - test_support.run_unittest(PyBytesIOTest, PyStringIOTest, - CBytesIOTest, CStringIOTest) - else: - test_support.run_unittest(PyBytesIOTest, PyStringIOTest) + tests.extend([CBytesIOTest, CStringIOTest]) + test_support.run_unittest(*tests) if __name__ == '__main__': test_main() From python-checkins at python.org Wed Jul 4 04:03:10 2007 From: python-checkins at python.org (mark.hammond) Date: Wed, 4 Jul 2007 04:03:10 +0200 (CEST) Subject: [Python-checkins] r56170 - python/trunk/PCbuild8/build.bat Message-ID: <20070704020310.C9C6D1E4003@bag.python.org> Author: mark.hammond Date: Wed Jul 4 04:03:10 2007 New Revision: 56170 Modified: python/trunk/PCbuild8/build.bat Log: copy built files to the PCBuild directory, where tools like distutils or external build processes can find them. Modified: python/trunk/PCbuild8/build.bat ============================================================================== --- python/trunk/PCbuild8/build.bat (original) +++ python/trunk/PCbuild8/build.bat Wed Jul 4 04:03:10 2007 @@ -14,4 +14,15 @@ set cmd=devenv pcbuild.sln %build% "%conf%|%platf%" echo %cmd% -%cmd% \ No newline at end of file +%cmd% + +rem Copy whatever was built to the canonical 'PCBuild' directory. +rem This helps extensions which use distutils etc. +rem (Don't check if the build was successful - we expect a few failures +rem due to missing libs) +echo Copying built files to ..\PCBuild +if not exist %platf%%conf%\. (echo %platf%%conf% does not exist - nothing copied & goto xit) +if not exist ..\PCBuild\. (echo ..\PCBuild does not exist - nothing copied & goto xit) +xcopy /q/y %platf%%conf%\* ..\PCBuild\. + +:xit From python-checkins at python.org Wed Jul 4 14:26:46 2007 From: python-checkins at python.org (nick.coghlan) Date: Wed, 4 Jul 2007 14:26:46 +0200 (CEST) Subject: [Python-checkins] r56171 - peps/trunk/pep-0366.txt Message-ID: <20070704122646.E8C061E4018@bag.python.org> Author: nick.coghlan Date: Wed Jul 4 14:26:46 2007 New Revision: 56171 Modified: peps/trunk/pep-0366.txt Log: Add reference to c.l.p discussion of bundling scripts as part of a package Modified: peps/trunk/pep-0366.txt ============================================================================== --- peps/trunk/pep-0366.txt (original) +++ peps/trunk/pep-0366.txt Wed Jul 4 14:26:46 2007 @@ -18,7 +18,9 @@ the use of explicit relative imports from executable modules within packages. Such imports currently fail due to an awkward interaction between PEP 328 and PEP 338 - this behaviour is the subject of at -least one open SF bug report (#1510172)[1]. +least one open SF bug report (#1510172)[1], and has lead to at least +a few queries on comp.lang.python (such as Alan Isaac's question in +[2]). With the proposed mechanism, relative imports will work automatically if the module is executed using the ``-m`` switch. A small amount of @@ -173,6 +175,9 @@ .. [2] Guido's rejection of PEP 3122 (http://mail.python.org/pipermail/python-3000/2007-April/006793.html) +.. [3] c.l.p. question about modules and relative imports + (http://groups.google.com/group/comp.lang.python/browse_thread/thread/c44c769a72ca69fa/) + Copyright ========= From python-checkins at python.org Wed Jul 4 14:47:13 2007 From: python-checkins at python.org (nick.coghlan) Date: Wed, 4 Jul 2007 14:47:13 +0200 (CEST) Subject: [Python-checkins] r56172 - peps/trunk/pep-0366.txt Message-ID: <20070704124713.EDB551E4003@bag.python.org> Author: nick.coghlan Date: Wed Jul 4 14:47:13 2007 New Revision: 56172 Modified: peps/trunk/pep-0366.txt Log: Typo fixes, note limitation of direct execution boilerplate Modified: peps/trunk/pep-0366.txt ============================================================================== --- peps/trunk/pep-0366.txt (original) +++ peps/trunk/pep-0366.txt Wed Jul 4 14:47:13 2007 @@ -18,9 +18,9 @@ the use of explicit relative imports from executable modules within packages. Such imports currently fail due to an awkward interaction between PEP 328 and PEP 338 - this behaviour is the subject of at -least one open SF bug report (#1510172)[1], and has lead to at least -a few queries on comp.lang.python (such as Alan Isaac's question in -[2]). +least one open SF bug report (#1510172)[1], and has most likely +been a factor in at least a few queries on comp.lang.python (such +as Alan Isaac's question in [2]). With the proposed mechanism, relative imports will work automatically if the module is executed using the ``-m`` switch. A small amount of @@ -55,8 +55,8 @@ For the 2.5 release, the recommendation is to always use absolute imports in any module that is intended to be used as a main module. The ``-m`` switch already provides a benefit here, as it inserts the -current directory into ``sys.path``, instead of the directory contain the -main module. This means that it is possible to run a module from +current directory into ``sys.path``, instead of the directory containing +the main module. This means that it is possible to run a module from inside a package using ``-m`` so long as the current directory contains the top level directory for the package. Absolute imports will work correctly even if the package isn't installed anywhere else on @@ -135,10 +135,15 @@ if __name__ == "__main__" and not __package_name__: __package_name__ = "" -Note that this boilerplate has the same disadvantage as the use of -absolute imports of sibling modules - if the script is moved to a -different package or subpackage, the boilerplate will need to be -updated manually. +Note that this boilerplate is sufficient only if the top level package +is already accessible via sys.path. Additional code that manipulates +sys.path would be needed in order for direct execution to work +without the top level package already being on sys.path. + +This approach also has the same disadvantage as the use of absolute +imports of sibling modules - if the script is moved to a different +package or subpackage, the boilerplate will need to be updated +manually. With this feature in place, the test scripts in the package above would be able to change their import lines to something along the From python-checkins at python.org Wed Jul 4 19:13:04 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Wed, 4 Jul 2007 19:13:04 +0200 (CEST) Subject: [Python-checkins] r56173 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Message-ID: <20070704171304.306231E4012@bag.python.org> Author: alexandre.vassalotti Date: Wed Jul 4 19:13:03 2007 New Revision: 56173 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Log: Made the object struct slightly clearer. Add assert check to readinto. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Wed Jul 4 19:13:03 2007 @@ -6,12 +6,11 @@ typedef struct { PyObject_HEAD char *buf; - Py_ssize_t pos, string_size; + Py_ssize_t pos; + Py_ssize_t string_size; Py_ssize_t buf_size; } BytesIOObject; -static PyTypeObject BytesIO_Type; - static PyObject * err_closed(void) @@ -293,6 +292,8 @@ len = self->string_size - self->pos; memcpy(raw_buffer, self->buf + self->pos, len); + assert(self->pos + len < PY_SSIZE_T_MAX); + assert(len >= 0); self->pos += len; return PyInt_FromSsize_t(len); @@ -633,7 +634,6 @@ "_bytes_io.BytesIO", /*tp_name*/ sizeof(BytesIOObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ - /* methods */ (destructor)BytesIO_dealloc, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ Modified: python/branches/cpy_merge/Modules/_string_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_string_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_string_iomodule.c Wed Jul 4 19:13:03 2007 @@ -6,11 +6,11 @@ typedef struct { PyObject_HEAD Py_UNICODE *buf; - Py_ssize_t pos, string_size; + Py_ssize_t pos; + Py_ssize_t string_size; Py_ssize_t buf_size; } StringIOObject; -static PyTypeObject StringIO_Type; static PyObject * err_closed(void) @@ -609,7 +609,6 @@ "_string_io.StringIO", /*tp_name*/ sizeof(StringIOObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ - /* methods */ (destructor)StringIO_dealloc, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ From python-checkins at python.org Wed Jul 4 21:12:03 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Wed, 4 Jul 2007 21:12:03 +0200 (CEST) Subject: [Python-checkins] r56174 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Message-ID: <20070704191203.E084C1E4004@bag.python.org> Author: alexandre.vassalotti Date: Wed Jul 4 21:12:03 2007 New Revision: 56174 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Log: Remove useless asserts. Py_ssize_t is an unsigned type, so it can't be negative. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Wed Jul 4 21:12:03 2007 @@ -47,7 +47,6 @@ *output = self->buf + self->pos; assert(self->pos + len < PY_SSIZE_T_MAX); - assert(len >= 0); self->pos += len; return len; @@ -293,7 +292,6 @@ memcpy(raw_buffer, self->buf + self->pos, len); assert(self->pos + len < PY_SSIZE_T_MAX); - assert(len >= 0); self->pos += len; return PyInt_FromSsize_t(len); Modified: python/branches/cpy_merge/Modules/_string_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_string_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_string_iomodule.c Wed Jul 4 21:12:03 2007 @@ -47,7 +47,6 @@ *output = self->buf + self->pos; assert(self->pos + len < PY_SSIZE_T_MAX); - assert(len >= 0); self->pos += len; return len; From martin at v.loewis.de Wed Jul 4 21:27:50 2007 From: martin at v.loewis.de (=?ISO-8859-1?Q?=22Martin_v=2E_L=F6wis=22?=) Date: Wed, 04 Jul 2007 21:27:50 +0200 Subject: [Python-checkins] r56174 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c In-Reply-To: <20070704191203.E084C1E4004@bag.python.org> References: <20070704191203.E084C1E4004@bag.python.org> Message-ID: <468BF4B6.50607@v.loewis.de> > Py_ssize_t is an unsigned type, so it can't be negative. That's not true. *s*size_t is meant to indicate that it is a *signed* version of size_t. Regards, Martin From alexandre at peadrop.com Wed Jul 4 22:36:16 2007 From: alexandre at peadrop.com (Alexandre Vassalotti) Date: Wed, 4 Jul 2007 16:36:16 -0400 Subject: [Python-checkins] r56174 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c In-Reply-To: <468BF4B6.50607@v.loewis.de> References: <20070704191203.E084C1E4004@bag.python.org> <468BF4B6.50607@v.loewis.de> Message-ID: On 7/4/07, "Martin v. L?wis" wrote: > > Py_ssize_t is an unsigned type, so it can't be negative. > > That's not true. *s*size_t is meant to indicate that it > is a *signed* version of size_t. Ah! I thought Py_ssize_t was just the Python analogue of size_t, which is unsigned. Thanks for the note! From martin at v.loewis.de Wed Jul 4 22:55:05 2007 From: martin at v.loewis.de (=?UTF-8?B?Ik1hcnRpbiB2LiBMw7Z3aXMi?=) Date: Wed, 04 Jul 2007 22:55:05 +0200 Subject: [Python-checkins] r56174 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c In-Reply-To: References: <20070704191203.E084C1E4004@bag.python.org> <468BF4B6.50607@v.loewis.de> Message-ID: <468C0929.5080606@v.loewis.de> Alexandre Vassalotti schrieb: > On 7/4/07, "Martin v. L?wis" wrote: >> > Py_ssize_t is an unsigned type, so it can't be negative. >> >> That's not true. *s*size_t is meant to indicate that it >> is a *signed* version of size_t. > > Ah! I thought Py_ssize_t was just the Python analogue of size_t, which > is unsigned. Thanks for the note! Actually, it is the analogue of ssize_t, which doesn't exist on all platforms (hence the Py_ prefix). Regards, Martin From python-checkins at python.org Wed Jul 4 22:57:09 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Wed, 4 Jul 2007 22:57:09 +0200 (CEST) Subject: [Python-checkins] r56175 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Message-ID: <20070704205709.D40E51E4013@bag.python.org> Author: alexandre.vassalotti Date: Wed Jul 4 22:57:09 2007 New Revision: 56175 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Log: Revert to r56173, as per recommendation of Martin v. L?wis. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Wed Jul 4 22:57:09 2007 @@ -47,6 +47,7 @@ *output = self->buf + self->pos; assert(self->pos + len < PY_SSIZE_T_MAX); + assert(len >= 0); self->pos += len; return len; @@ -292,6 +293,7 @@ memcpy(raw_buffer, self->buf + self->pos, len); assert(self->pos + len < PY_SSIZE_T_MAX); + assert(len >= 0); self->pos += len; return PyInt_FromSsize_t(len); Modified: python/branches/cpy_merge/Modules/_string_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_string_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_string_iomodule.c Wed Jul 4 22:57:09 2007 @@ -47,6 +47,7 @@ *output = self->buf + self->pos; assert(self->pos + len < PY_SSIZE_T_MAX); + assert(len >= 0); self->pos += len; return len; From alexandre at peadrop.com Wed Jul 4 23:02:21 2007 From: alexandre at peadrop.com (Alexandre Vassalotti) Date: Wed, 4 Jul 2007 17:02:21 -0400 Subject: [Python-checkins] r56174 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c In-Reply-To: <468C0929.5080606@v.loewis.de> References: <20070704191203.E084C1E4004@bag.python.org> <468BF4B6.50607@v.loewis.de> <468C0929.5080606@v.loewis.de> Message-ID: On 7/4/07, "Martin v. L?wis" wrote: > Actually, it is the analogue of ssize_t, which doesn't exist on all > platforms (hence the Py_ prefix). Just read PEP-353 that explained me everything I wanted to know. Thanks again, -- Alexandre From nnorwitz at gmail.com Thu Jul 5 11:43:46 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Thu, 5 Jul 2007 05:43:46 -0400 Subject: [Python-checkins] Python Regression Test Failures refleak (1) Message-ID: <20070705094346.GA1019@python.psfb.org> test_popen2 leaked [-26, 26, -26] references, sum=-26 test_sys leaked [-132, 0, 0] references, sum=-132 test_urllib2_localnet leaked [1, 5, 3] references, sum=9 From python-checkins at python.org Fri Jul 6 00:03:39 2007 From: python-checkins at python.org (kurt.kaiser) Date: Fri, 6 Jul 2007 00:03:39 +0200 (CEST) Subject: [Python-checkins] r56176 - python/trunk/Modules/_tkinter.c Message-ID: <20070705220339.A1AF41E4004@bag.python.org> Author: kurt.kaiser Date: Fri Jul 6 00:03:39 2007 New Revision: 56176 Modified: python/trunk/Modules/_tkinter.c Log: Many calls to tk.call involve an arglist containing a single tuple. Calls using METH_OLDARGS unpack this tuple; calls using METH_VARARG don't. Tcl's concatenation of args was affected; IDLE doesn't start. Modify Tkapp_Call() to unpack single tuple arglists. Bug 1733943 Ref http://mail.python.org/pipermail/python-checkins/2007-May/060454.html Modified: python/trunk/Modules/_tkinter.c ============================================================================== --- python/trunk/Modules/_tkinter.c (original) +++ python/trunk/Modules/_tkinter.c Fri Jul 6 00:03:39 2007 @@ -1285,6 +1285,12 @@ /* Could add TCL_EVAL_GLOBAL if wrapped by GlobalCall... */ int flags = TCL_EVAL_DIRECT; + /* If args is a single tuple, replace with contents of tuple */ + if (1 == PyTuple_Size(args)){ + PyObject* item = PyTuple_GetItem(args, 0); + if (PyTuple_Check(item)) + args = item; + } #ifdef WITH_THREAD if (self->threaded && self->thread_id != Tcl_GetCurrentThread()) { /* We cannot call the command directly. Instead, we must From buildbot at python.org Fri Jul 6 00:30:39 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 05 Jul 2007 22:30:39 +0000 Subject: [Python-checkins] buildbot warnings in x86 mvlgcc trunk Message-ID: <20070705223040.04DD01E4004@bag.python.org> The Buildbot has detected a new failure of x86 mvlgcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520mvlgcc%2520trunk/builds/635 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: kurt.kaiser Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') 1 test failed: test_urllib2_localnet Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') ====================================================================== ERROR: test_proxy_qop_auth_int_works_or_throws_urlerror (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 252, in setUp raise self.server.error error: (98, 'Address already in use') ====================================================================== ERROR: test_proxy_qop_auth_works (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 252, in setUp raise self.server.error error: (98, 'Address already in use') ====================================================================== ERROR: test_proxy_with_bad_password_raises_httperror (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 252, in setUp raise self.server.error error: (98, 'Address already in use') ====================================================================== ERROR: test_proxy_with_no_password_raises_httperror (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 252, in setUp raise self.server.error error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Fri Jul 6 00:37:28 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 05 Jul 2007 22:37:28 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP trunk Message-ID: <20070705223728.DD64C1E4004@bag.python.org> The Buildbot has detected a new failure of x86 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP%2520trunk/builds/500 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: kurt.kaiser Build had warnings: warnings failed slave lost sincerely, -The Buildbot From buildbot at python.org Fri Jul 6 00:48:08 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 05 Jul 2007 22:48:08 +0000 Subject: [Python-checkins] buildbot warnings in sparc solaris10 gcc trunk Message-ID: <20070705224808.ED5A61E4007@bag.python.org> The Buildbot has detected a new failure of sparc solaris10 gcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/sparc%2520solaris10%2520gcc%2520trunk/builds/2113 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: kurt.kaiser Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/test/test_socketserver.py", line 93, in run svr.serve_a_few() File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/test/test_socketserver.py", line 35, in serve_a_few self.handle_request() File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/SocketServer.py", line 224, in handle_request self.handle_error(request, client_address) File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/SocketServer.py", line 222, in handle_request self.process_request(request, client_address) File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/SocketServer.py", line 429, in process_request self.collect_children() File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/SocketServer.py", line 425, in collect_children self.active_children.remove(pid) ValueError: list.remove(x): x not in list 1 test failed: test_socketserver sincerely, -The Buildbot From buildbot at python.org Fri Jul 6 01:55:47 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 05 Jul 2007 23:55:47 +0000 Subject: [Python-checkins] buildbot warnings in alpha Debian trunk Message-ID: <20070705235547.CCA4D1E4004@bag.python.org> The Buildbot has detected a new failure of alpha Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Debian%2520trunk/builds/33 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: kurt.kaiser Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From python-checkins at python.org Fri Jul 6 06:13:39 2007 From: python-checkins at python.org (neal.norwitz) Date: Fri, 6 Jul 2007 06:13:39 +0200 (CEST) Subject: [Python-checkins] r56177 - python/trunk/Lib/sre_compile.py Message-ID: <20070706041339.CD7131E4004@bag.python.org> Author: neal.norwitz Date: Fri Jul 6 06:13:39 2007 New Revision: 56177 Modified: python/trunk/Lib/sre_compile.py Log: Fix typo in comment Modified: python/trunk/Lib/sre_compile.py ============================================================================== --- python/trunk/Lib/sre_compile.py (original) +++ python/trunk/Lib/sre_compile.py Fri Jul 6 06:13:39 2007 @@ -280,7 +280,7 @@ # To represent a big charset, first a bitmap of all characters in the # set is constructed. Then, this bitmap is sliced into chunks of 256 -# characters, duplicate chunks are eliminitated, and each chunk is +# characters, duplicate chunks are eliminated, and each chunk is # given a number. In the compiled expression, the charset is # represented by a 16-bit word sequence, consisting of one word for # the number of different chunks, a sequence of 256 bytes (128 words) From buildbot at python.org Fri Jul 6 07:02:35 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 06 Jul 2007 05:02:35 +0000 Subject: [Python-checkins] buildbot warnings in g4 osx.4 trunk Message-ID: <20070706050235.E0B0A1E4004@bag.python.org> The Buildbot has detected a new failure of g4 osx.4 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/g4%2520osx.4%2520trunk/builds/2123 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: neal.norwitz Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_urllibnet make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Fri Jul 6 15:22:40 2007 From: python-checkins at python.org (collin.winter) Date: Fri, 6 Jul 2007 15:22:40 +0200 (CEST) Subject: [Python-checkins] r56178 - sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py Message-ID: <20070706132240.85F361E4007@bag.python.org> Author: collin.winter Date: Fri Jul 6 15:22:40 2007 New Revision: 56178 Modified: sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py Log: Whitespace fixes. Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Fri Jul 6 15:22:40 2007 @@ -103,5 +103,5 @@ results = {} if self.p2.match(ppp, results) and results["node"] is node: return True - + return False Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Fri Jul 6 15:22:40 2007 @@ -115,5 +115,5 @@ results = {} if self.p2.match(ppp, results) and results["node"] is node: return True - + return False From python-checkins at python.org Fri Jul 6 15:30:54 2007 From: python-checkins at python.org (collin.winter) Date: Fri, 6 Jul 2007 15:30:54 +0200 (CEST) Subject: [Python-checkins] r56179 - in sandbox/trunk/2to3: README fixes/fix_sysexcattrs.py fixes/fix_sysexcinfo.py tests/test_fixers.py Message-ID: <20070706133054.02BCE1E400E@bag.python.org> Author: collin.winter Date: Fri Jul 6 15:30:53 2007 New Revision: 56179 Added: sandbox/trunk/2to3/fixes/fix_sysexcattrs.py - copied, changed from r56177, sandbox/trunk/2to3/fixes/fix_sysexcinfo.py Removed: sandbox/trunk/2to3/fixes/fix_sysexcinfo.py Modified: sandbox/trunk/2to3/README sandbox/trunk/2to3/tests/test_fixers.py Log: Rename the sysexcinfo fixer to fix_sysexcattrs to reflect that it no longer warns on sys.exc_info() (per PEP 3100). Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Fri Jul 6 15:30:53 2007 @@ -64,7 +64,7 @@ * **fix_repr** - swap backticks for repr() calls. -* **fix_sysexcinfo** - warn on usage of sys.value, sys.type and +* **fix_sysexcattrs** - warn on usage of sys.value, sys.type and sys.traceback. * **fix_throw** - fix generator.throw() calls to be 3.0-compliant (PEP 3109). Copied: sandbox/trunk/2to3/fixes/fix_sysexcattrs.py (from r56177, sandbox/trunk/2to3/fixes/fix_sysexcinfo.py) ============================================================================== --- sandbox/trunk/2to3/fixes/fix_sysexcinfo.py (original) +++ sandbox/trunk/2to3/fixes/fix_sysexcattrs.py Fri Jul 6 15:30:53 2007 @@ -1,4 +1,4 @@ -"""Fixer/warner for sys.exc_{info,value,type,traceback}""" +"""Fixer/warner for sys.exc_{value,type,traceback}""" # Author: Collin Winter # Local imports @@ -6,24 +6,13 @@ from fixes import basefix -class FixSysexcinfo(basefix.BaseFix): +class FixSysexcattrs(basefix.BaseFix): PATTERN = """ - power< 'sys' trailer< '.' attr='exc_info'> any* > - | power< 'sys' - trailer< '.' attr=('exc_value' | 'exc_traceback' | 'exc_type')> + trailer< '.' ('exc_value' | 'exc_traceback' | 'exc_type')> any* > """ def transform(self, node): - results = self.match(node) - assert results - attr = results['attr'] - - if isinstance(attr, Leaf) and attr.value == 'exc_info': - self.cannot_convert(node, - "This function is going away in Python 3") - else: - self.cannot_convert(node, - "This attribute is going away in Python 3") + self.cannot_convert(node, "This attribute is going away in Python 3") Deleted: /sandbox/trunk/2to3/fixes/fix_sysexcinfo.py ============================================================================== --- /sandbox/trunk/2to3/fixes/fix_sysexcinfo.py Fri Jul 6 15:30:53 2007 +++ (empty file) @@ -1,29 +0,0 @@ -"""Fixer/warner for sys.exc_{info,value,type,traceback}""" -# Author: Collin Winter - -# Local imports -from pytree import Leaf -from fixes import basefix - - -class FixSysexcinfo(basefix.BaseFix): - - PATTERN = """ - power< 'sys' trailer< '.' attr='exc_info'> any* > - | - power< 'sys' - trailer< '.' attr=('exc_value' | 'exc_traceback' | 'exc_type')> - any* > - """ - - def transform(self, node): - results = self.match(node) - assert results - attr = results['attr'] - - if isinstance(attr, Leaf) and attr.value == 'exc_info': - self.cannot_convert(node, - "This function is going away in Python 3") - else: - self.cannot_convert(node, - "This attribute is going away in Python 3") Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Fri Jul 6 15:30:53 2007 @@ -877,25 +877,19 @@ self.check(b, a) -class Test_sysexcinfo(FixerTestCase): - fixer = "sysexcinfo" +class Test_sysexcattrs(FixerTestCase): + fixer = "sysexcattrs" def test_1(self): - s = """sys.exc_info()""" - self.warns(s, s, "This function is going away") + s = """f = sys.exc_type""" + self.warns(s, s, "This attribute is going away") def test_2(self): - s = """if sys.exc_info()[1] == 1: - pass""" - - self.warns(s, s, "This function is going away") + s = """f = sys.exc_value""" + self.warns(s, s, "This attribute is going away") def test_3(self): - s = """f = sys.exc_info""" - self.warns(s, s, "This function is going away") - - def test_4(self): - s = """f = sys.exc_type + ":" + sys.exc_value""" + s = """f = sys.exc_traceback""" self.warns(s, s, "This attribute is going away") From python-checkins at python.org Fri Jul 6 15:42:13 2007 From: python-checkins at python.org (collin.winter) Date: Fri, 6 Jul 2007 15:42:13 +0200 (CEST) Subject: [Python-checkins] r56180 - in sandbox/trunk/2to3: README examples examples/fix_ws_comma.py fixes/fix_ws_comma.py Message-ID: <20070706134213.714C11E4007@bag.python.org> Author: collin.winter Date: Fri Jul 6 15:42:13 2007 New Revision: 56180 Added: sandbox/trunk/2to3/examples/ sandbox/trunk/2to3/examples/fix_ws_comma.py - copied unchanged from r56177, sandbox/trunk/2to3/fixes/fix_ws_comma.py Removed: sandbox/trunk/2to3/fixes/fix_ws_comma.py Modified: sandbox/trunk/2to3/README Log: Move fix_ws_comma out of fixes (we don't want it run by -f all). Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Fri Jul 6 15:42:13 2007 @@ -24,6 +24,7 @@ pgen2/ - Parser generator and driver ([1]_, [2]_) fixes/ - Individual transformations tests/ - Test files for pytree, fixers, grammar, etc +examples/ - Fixers that while neat, we don't want run by refactor's -f all Capabilities Deleted: /sandbox/trunk/2to3/fixes/fix_ws_comma.py ============================================================================== --- /sandbox/trunk/2to3/fixes/fix_ws_comma.py Fri Jul 6 15:42:13 2007 +++ (empty file) @@ -1,37 +0,0 @@ -"""Fixer that changes 'a ,b' into 'a, b'. - -This also changes '{a :b}' into '{a: b}', but does not touch other -uses of colons. It does not touch other uses of whitespace. - -""" - -import pytree -from pgen2 import token -from fixes import basefix - -class FixWsComma(basefix.BaseFix): - - PATTERN = """ - any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> - """ - - COMMA = pytree.Leaf(token.COMMA, ",") - COLON = pytree.Leaf(token.COLON, ":") - SEPS = (COMMA, COLON) - - def transform(self, node): - new = node.clone() - comma = False - for child in new.children: - if child in self.SEPS: - prefix = child.get_prefix() - if prefix.isspace() and "\n" not in prefix: - child.set_prefix("") - comma = True - else: - if comma: - prefix = child.get_prefix() - if not prefix: - child.set_prefix(" ") - comma = False - return new From python-checkins at python.org Sat Jul 7 19:03:03 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 7 Jul 2007 19:03:03 +0200 (CEST) Subject: [Python-checkins] r56181 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070707170303.A9B391E4007@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 7 19:03:03 2007 New Revision: 56181 Added: python/branches/cpy_merge/Modules/_picklemodule.c - copied unchanged from r56174, python/branches/cpy_merge/Modules/cPickle.c Log: Copy cPickle.c to _picklemodule.c From python-checkins at python.org Sat Jul 7 19:47:04 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 7 Jul 2007 19:47:04 +0200 (CEST) Subject: [Python-checkins] r56182 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070707174704.C4C7C1E4002@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 7 19:47:03 2007 New Revision: 56182 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Quick mecanical clean up with: indent -i4 -l79 -bap -nbad -ncdb \ -sc -br -nce -cdw -cli0 \ -nss -npcs -saf -sai -saw \ -nbc -di1 -brs -psl -lp \ -nlps -nbbo Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sat Jul 7 19:47:03 2007 @@ -3,7 +3,7 @@ #include "structmember.h" PyDoc_STRVAR(cPickle_module_documentation, -"C implementation and optimization of the Python pickle module."); + "C implementation and optimization of the Python pickle module."); #ifndef Py_eval_input #include @@ -64,18 +64,18 @@ #define SETITEMS 'u' /* Protocol 2. */ -#define PROTO '\x80' /* identify pickle protocol */ -#define NEWOBJ '\x81' /* build object by applying cls.__new__ to argtuple */ -#define EXT1 '\x82' /* push object from extension registry; 1-byte index */ -#define EXT2 '\x83' /* ditto, but 2-byte index */ -#define EXT4 '\x84' /* ditto, but 4-byte index */ -#define TUPLE1 '\x85' /* build 1-tuple from stack top */ -#define TUPLE2 '\x86' /* build 2-tuple from two topmost stack items */ -#define TUPLE3 '\x87' /* build 3-tuple from three topmost stack items */ -#define NEWTRUE '\x88' /* push True */ -#define NEWFALSE '\x89' /* push False */ -#define LONG1 '\x8a' /* push long from < 256 bytes */ -#define LONG4 '\x8b' /* push really big long */ +#define PROTO '\x80' /* identify pickle protocol */ +#define NEWOBJ '\x81' /* build object by applying cls.__new__ to argtuple */ +#define EXT1 '\x82' /* push object from extension registry; 1-byte index */ +#define EXT2 '\x83' /* ditto, but 2-byte index */ +#define EXT4 '\x84' /* ditto, but 4-byte index */ +#define TUPLE1 '\x85' /* build 1-tuple from stack top */ +#define TUPLE2 '\x86' /* build 2-tuple from two topmost stack items */ +#define TUPLE3 '\x87' /* build 3-tuple from three topmost stack items */ +#define NEWTRUE '\x88' /* push True */ +#define NEWFALSE '\x89' /* push False */ +#define LONG1 '\x8a' /* push long from < 256 bytes */ +#define LONG4 '\x8b' /* push really big long */ /* There aren't opcodes -- they're ways to pickle bools before protocol 2, * so that unpicklers written before bools were introduced unpickle them @@ -120,40 +120,39 @@ static PyObject *two_tuple; static PyObject *__class___str, *__getinitargs___str, *__dict___str, - *__getstate___str, *__setstate___str, *__name___str, *__reduce___str, - *__reduce_ex___str, - *write_str, *append_str, - *read_str, *readline_str, *__main___str, - *copy_reg_str, *dispatch_table_str; + *__getstate___str, *__setstate___str, *__name___str, *__reduce___str, + *__reduce_ex___str, + *write_str, *append_str, + *read_str, *readline_str, *__main___str, + *copy_reg_str, *dispatch_table_str; /************************************************************************* Internal Data type for pickle data. */ typedef struct { - PyObject_HEAD - int length; /* number of initial slots in data currently used */ - int size; /* number of slots in data allocated */ - PyObject **data; + PyObject_HEAD int length; /* number of initial slots in data currently used */ + int size; /* number of slots in data allocated */ + PyObject **data; } Pdata; static void -Pdata_dealloc(Pdata *self) +Pdata_dealloc(Pdata * self) { - int i; - PyObject **p; + int i; + PyObject **p; - for (i = self->length, p = self->data; --i >= 0; p++) { - Py_DECREF(*p); - } - if (self->data) - free(self->data); - PyObject_Del(self); + for (i = self->length, p = self->data; --i >= 0; p++) { + Py_DECREF(*p); + } + if (self->data) + free(self->data); + PyObject_Del(self); } static PyTypeObject PdataType = { - PyObject_HEAD_INIT(NULL) 0, "cPickle.Pdata", sizeof(Pdata), 0, - (destructor)Pdata_dealloc, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0L,0L,0L,0L, "" + PyObject_HEAD_INIT(NULL) 0, "cPickle.Pdata", sizeof(Pdata), 0, + (destructor) Pdata_dealloc, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0L, 0L, 0L, 0L, "" }; #define Pdata_Check(O) ((O)->ob_type == &PdataType) @@ -161,73 +160,73 @@ static PyObject * Pdata_New(void) { - Pdata *self; + Pdata *self; - if (!(self = PyObject_New(Pdata, &PdataType))) - return NULL; - self->size = 8; - self->length = 0; - self->data = malloc(self->size * sizeof(PyObject*)); - if (self->data) - return (PyObject*)self; - Py_DECREF(self); - return PyErr_NoMemory(); + if (!(self = PyObject_New(Pdata, &PdataType))) + return NULL; + self->size = 8; + self->length = 0; + self->data = malloc(self->size * sizeof(PyObject *)); + if (self->data) + return (PyObject *) self; + Py_DECREF(self); + return PyErr_NoMemory(); } static int stackUnderflow(void) { - PyErr_SetString(UnpicklingError, "unpickling stack underflow"); - return -1; + PyErr_SetString(UnpicklingError, "unpickling stack underflow"); + return -1; } /* Retain only the initial clearto items. If clearto >= the current * number of items, this is a (non-erroneous) NOP. */ static int -Pdata_clear(Pdata *self, int clearto) +Pdata_clear(Pdata * self, int clearto) { - int i; - PyObject **p; + int i; + PyObject **p; - if (clearto < 0) return stackUnderflow(); - if (clearto >= self->length) return 0; + if (clearto < 0) + return stackUnderflow(); + if (clearto >= self->length) + return 0; - for (i = self->length, p = self->data + clearto; - --i >= clearto; - p++) { - Py_CLEAR(*p); - } - self->length = clearto; + for (i = self->length, p = self->data + clearto; --i >= clearto; p++) { + Py_CLEAR(*p); + } + self->length = clearto; - return 0; + return 0; } static int -Pdata_grow(Pdata *self) +Pdata_grow(Pdata * self) { - int bigger; - size_t nbytes; - PyObject **tmp; + int bigger; + size_t nbytes; + PyObject **tmp; - bigger = self->size << 1; - if (bigger <= 0) /* was 0, or new value overflows */ - goto nomemory; - if ((int)(size_t)bigger != bigger) - goto nomemory; - nbytes = (size_t)bigger * sizeof(PyObject *); - if (nbytes / sizeof(PyObject *) != (size_t)bigger) - goto nomemory; - tmp = realloc(self->data, nbytes); - if (tmp == NULL) - goto nomemory; - self->data = tmp; - self->size = bigger; - return 0; + bigger = self->size << 1; + if (bigger <= 0) /* was 0, or new value overflows */ + goto nomemory; + if ((int) (size_t) bigger != bigger) + goto nomemory; + nbytes = (size_t) bigger *sizeof(PyObject *); + if (nbytes / sizeof(PyObject *) != (size_t) bigger) + goto nomemory; + tmp = realloc(self->data, nbytes); + if (tmp == NULL) + goto nomemory; + self->data = tmp; + self->size = bigger; + return 0; nomemory: - PyErr_NoMemory(); - return -1; + PyErr_NoMemory(); + return -1; } /* D is a Pdata*. Pop the topmost element and store it into V, which @@ -272,35 +271,36 @@ static PyObject * -Pdata_popTuple(Pdata *self, int start) +Pdata_popTuple(Pdata * self, int start) { - PyObject *r; - int i, j, l; + PyObject *r; + int i, j, l; - l = self->length-start; - r = PyTuple_New(l); - if (r == NULL) - return NULL; - for (i = start, j = 0 ; j < l; i++, j++) - PyTuple_SET_ITEM(r, j, self->data[i]); + l = self->length - start; + r = PyTuple_New(l); + if (r == NULL) + return NULL; + for (i = start, j = 0; j < l; i++, j++) + PyTuple_SET_ITEM(r, j, self->data[i]); - self->length = start; - return r; + self->length = start; + return r; } static PyObject * -Pdata_popList(Pdata *self, int start) +Pdata_popList(Pdata * self, int start) { - PyObject *r; - int i, j, l; + PyObject *r; + int i, j, l; - l=self->length-start; - if (!( r=PyList_New(l))) return NULL; - for (i=start, j=0 ; j < l; i++, j++) - PyList_SET_ITEM(r, j, self->data[i]); + l = self->length - start; + if (!(r = PyList_New(l))) + return NULL; + for (i = start, j = 0; j < l; i++, j++) + PyList_SET_ITEM(r, j, self->data[i]); - self->length=start; - return r; + self->length = start; + return r; } /*************************************************************************/ @@ -323,29 +323,28 @@ } typedef struct Picklerobject { - PyObject_HEAD - FILE *fp; - PyObject *write; - PyObject *file; - PyObject *memo; - PyObject *arg; - PyObject *pers_func; - PyObject *inst_pers_func; - - /* pickle protocol number, >= 0 */ - int proto; - - /* bool, true if proto > 0 */ - int bin; - - int fast; /* Fast mode doesn't save in memo, don't use if circ ref */ - int nesting; - int (*write_func)(struct Picklerobject *, const char *, Py_ssize_t); - char *write_buf; - int buf_size; - PyObject *dispatch_table; - int fast_container; /* count nested container dumps */ - PyObject *fast_memo; + PyObject_HEAD FILE * fp; + PyObject *write; + PyObject *file; + PyObject *memo; + PyObject *arg; + PyObject *pers_func; + PyObject *inst_pers_func; + + /* pickle protocol number, >= 0 */ + int proto; + + /* bool, true if proto > 0 */ + int bin; + + int fast; /* Fast mode doesn't save in memo, don't use if circ ref */ + int nesting; + int (*write_func) (struct Picklerobject *, const char *, Py_ssize_t); + char *write_buf; + int buf_size; + PyObject *dispatch_table; + int fast_container; /* count nested container dumps */ + PyObject *fast_memo; } Picklerobject; #ifndef PY_CPICKLE_FAST_LIMIT @@ -355,25 +354,24 @@ static PyTypeObject Picklertype; typedef struct Unpicklerobject { - PyObject_HEAD - FILE *fp; - PyObject *file; - PyObject *readline; - PyObject *read; - PyObject *memo; - PyObject *arg; - Pdata *stack; - PyObject *mark; - PyObject *pers_func; - PyObject *last_string; - int *marks; - int num_marks; - int marks_size; - Py_ssize_t (*read_func)(struct Unpicklerobject *, char **, Py_ssize_t); - Py_ssize_t (*readline_func)(struct Unpicklerobject *, char **); - int buf_size; - char *buf; - PyObject *find_class; + PyObject_HEAD FILE * fp; + PyObject *file; + PyObject *readline; + PyObject *read; + PyObject *memo; + PyObject *arg; + Pdata *stack; + PyObject *mark; + PyObject *pers_func; + PyObject *last_string; + int *marks; + int num_marks; + int marks_size; + Py_ssize_t(*read_func) (struct Unpicklerobject *, char **, Py_ssize_t); + Py_ssize_t(*readline_func) (struct Unpicklerobject *, char **); + int buf_size; + char *buf; + PyObject *find_class; } Unpicklerobject; static PyTypeObject Unpicklertype; @@ -384,298 +382,304 @@ static PyObject * -cPickle_ErrFormat(PyObject *ErrType, char *stringformat, char *format, ...) +cPickle_ErrFormat(PyObject * ErrType, char *stringformat, char *format, ...) { - va_list va; - PyObject *args=0, *retval=0; - va_start(va, format); - - if (format) args = Py_VaBuildValue(format, va); - va_end(va); - if (format && ! args) return NULL; - if (stringformat && !(retval=PyString_FromString(stringformat))) - return NULL; + va_list va; + PyObject *args = 0, *retval = 0; + va_start(va, format); + + if (format) + args = Py_VaBuildValue(format, va); + va_end(va); + if (format && !args) + return NULL; + if (stringformat && !(retval = PyString_FromString(stringformat))) + return NULL; - if (retval) { - if (args) { - PyObject *v; - v=PyString_Format(retval, args); - Py_DECREF(retval); - Py_DECREF(args); - if (! v) return NULL; - retval=v; - } + if (retval) { + if (args) { + PyObject *v; + v = PyString_Format(retval, args); + Py_DECREF(retval); + Py_DECREF(args); + if (!v) + return NULL; + retval = v; } - else - if (args) retval=args; - else { - PyErr_SetObject(ErrType,Py_None); - return NULL; - } - PyErr_SetObject(ErrType,retval); - Py_DECREF(retval); + } + else if (args) + retval = args; + else { + PyErr_SetObject(ErrType, Py_None); return NULL; + } + PyErr_SetObject(ErrType, retval); + Py_DECREF(retval); + return NULL; } static int -write_file(Picklerobject *self, const char *s, Py_ssize_t n) +write_file(Picklerobject * self, const char *s, Py_ssize_t n) { - size_t nbyteswritten; + size_t nbyteswritten; - if (s == NULL) { - return 0; - } + if (s == NULL) { + return 0; + } - if (n > INT_MAX) { - /* String too large */ - return -1; - } + if (n > INT_MAX) { + /* String too large */ + return -1; + } - Py_BEGIN_ALLOW_THREADS + Py_BEGIN_ALLOW_THREADS nbyteswritten = fwrite(s, sizeof(char), n, self->fp); - Py_END_ALLOW_THREADS - if (nbyteswritten != (size_t)n) { - PyErr_SetFromErrno(PyExc_IOError); - return -1; - } + Py_END_ALLOW_THREADS if (nbyteswritten != (size_t) n) { + PyErr_SetFromErrno(PyExc_IOError); + return -1; + } - return (int)n; + return (int) n; } static int -write_cStringIO(Picklerobject *self, const char *s, Py_ssize_t n) +write_cStringIO(Picklerobject * self, const char *s, Py_ssize_t n) { - if (s == NULL) { - return 0; - } + if (s == NULL) { + return 0; + } - if (PycStringIO->cwrite((PyObject *)self->file, s, n) != n) { - return -1; - } + if (PycStringIO->cwrite((PyObject *) self->file, s, n) != n) { + return -1; + } - return (int)n; + return (int) n; } static int -write_none(Picklerobject *self, const char *s, Py_ssize_t n) +write_none(Picklerobject * self, const char *s, Py_ssize_t n) { - if (s == NULL) return 0; - if (n > INT_MAX) return -1; - return (int)n; + if (s == NULL) + return 0; + if (n > INT_MAX) + return -1; + return (int) n; } static int -write_other(Picklerobject *self, const char *s, Py_ssize_t _n) +write_other(Picklerobject * self, const char *s, Py_ssize_t _n) { - PyObject *py_str = 0, *junk = 0; - int n; + PyObject *py_str = 0, *junk = 0; + int n; - if (_n > INT_MAX) + if (_n > INT_MAX) + return -1; + n = (int) _n; + if (s == NULL) { + if (!(self->buf_size)) + return 0; + py_str = PyString_FromStringAndSize(self->write_buf, self->buf_size); + if (!py_str) + return -1; + } + else { + if (self->buf_size && (n + self->buf_size) > WRITE_BUF_SIZE) { + if (write_other(self, NULL, 0) < 0) return -1; - n = (int)_n; - if (s == NULL) { - if (!( self->buf_size )) return 0; - py_str = PyString_FromStringAndSize(self->write_buf, - self->buf_size); - if (!py_str) - return -1; } - else { - if (self->buf_size && (n + self->buf_size) > WRITE_BUF_SIZE) { - if (write_other(self, NULL, 0) < 0) - return -1; - } - if (n > WRITE_BUF_SIZE) { - if (!( py_str = - PyString_FromStringAndSize(s, n))) - return -1; - } - else { - memcpy(self->write_buf + self->buf_size, s, n); - self->buf_size += n; - return n; - } + if (n > WRITE_BUF_SIZE) { + if (!(py_str = PyString_FromStringAndSize(s, n))) + return -1; } - - if (self->write) { - /* object with write method */ - ARG_TUP(self, py_str); - if (self->arg) { - junk = PyObject_Call(self->write, self->arg, NULL); - FREE_ARG_TUP(self); - } - if (junk) Py_DECREF(junk); - else return -1; + else { + memcpy(self->write_buf + self->buf_size, s, n); + self->buf_size += n; + return n; + } + } + + if (self->write) { + /* object with write method */ + ARG_TUP(self, py_str); + if (self->arg) { + junk = PyObject_Call(self->write, self->arg, NULL); + FREE_ARG_TUP(self); } + if (junk) + Py_DECREF(junk); else - PDATA_PUSH(self->file, py_str, -1); + return -1; + } + else + PDATA_PUSH(self->file, py_str, -1); - self->buf_size = 0; - return n; + self->buf_size = 0; + return n; } static Py_ssize_t -read_file(Unpicklerobject *self, char **s, Py_ssize_t n) +read_file(Unpicklerobject * self, char **s, Py_ssize_t n) { - size_t nbytesread; + size_t nbytesread; - if (self->buf_size == 0) { - int size; - - size = ((n < 32) ? 32 : n); - if (!( self->buf = (char *)malloc(size))) { - PyErr_NoMemory(); - return -1; - } + if (self->buf_size == 0) { + int size; - self->buf_size = size; + size = ((n < 32) ? 32 : n); + if (!(self->buf = (char *) malloc(size))) { + PyErr_NoMemory(); + return -1; } - else if (n > self->buf_size) { - char *newbuf = (char *)realloc(self->buf, n); - if (!newbuf) { - PyErr_NoMemory(); - return -1; - } - self->buf = newbuf; - self->buf_size = n; + + self->buf_size = size; + } + else if (n > self->buf_size) { + char *newbuf = (char *) realloc(self->buf, n); + if (!newbuf) { + PyErr_NoMemory(); + return -1; } + self->buf = newbuf; + self->buf_size = n; + } - Py_BEGIN_ALLOW_THREADS + Py_BEGIN_ALLOW_THREADS nbytesread = fread(self->buf, sizeof(char), n, self->fp); - Py_END_ALLOW_THREADS - if (nbytesread != (size_t)n) { - if (feof(self->fp)) { - PyErr_SetNone(PyExc_EOFError); - return -1; - } - - PyErr_SetFromErrno(PyExc_IOError); - return -1; + Py_END_ALLOW_THREADS if (nbytesread != (size_t) n) { + if (feof(self->fp)) { + PyErr_SetNone(PyExc_EOFError); + return -1; } - *s = self->buf; + PyErr_SetFromErrno(PyExc_IOError); + return -1; + } + + *s = self->buf; - return n; + return n; } static Py_ssize_t -readline_file(Unpicklerobject *self, char **s) +readline_file(Unpicklerobject * self, char **s) { - int i; + int i; - if (self->buf_size == 0) { - if (!( self->buf = (char *)malloc(40))) { - PyErr_NoMemory(); - return -1; - } - self->buf_size = 40; + if (self->buf_size == 0) { + if (!(self->buf = (char *) malloc(40))) { + PyErr_NoMemory(); + return -1; } + self->buf_size = 40; + } - i = 0; - while (1) { - int bigger; - char *newbuf; - for (; i < (self->buf_size - 1); i++) { - if (feof(self->fp) || - (self->buf[i] = getc(self->fp)) == '\n') { - self->buf[i + 1] = '\0'; - *s = self->buf; - return i + 1; - } - } - bigger = self->buf_size << 1; - if (bigger <= 0) { /* overflow */ - PyErr_NoMemory(); - return -1; - } - newbuf = (char *)realloc(self->buf, bigger); - if (!newbuf) { - PyErr_NoMemory(); - return -1; - } - self->buf = newbuf; - self->buf_size = bigger; - } + i = 0; + while (1) { + int bigger; + char *newbuf; + for (; i < (self->buf_size - 1); i++) { + if (feof(self->fp) || (self->buf[i] = getc(self->fp)) == '\n') { + self->buf[i + 1] = '\0'; + *s = self->buf; + return i + 1; + } + } + bigger = self->buf_size << 1; + if (bigger <= 0) { /* overflow */ + PyErr_NoMemory(); + return -1; + } + newbuf = (char *) realloc(self->buf, bigger); + if (!newbuf) { + PyErr_NoMemory(); + return -1; + } + self->buf = newbuf; + self->buf_size = bigger; + } } static Py_ssize_t -read_cStringIO(Unpicklerobject *self, char **s, Py_ssize_t n) +read_cStringIO(Unpicklerobject * self, char **s, Py_ssize_t n) { - char *ptr; + char *ptr; - if (PycStringIO->cread((PyObject *)self->file, &ptr, n) != n) { - PyErr_SetNone(PyExc_EOFError); - return -1; - } + if (PycStringIO->cread((PyObject *) self->file, &ptr, n) != n) { + PyErr_SetNone(PyExc_EOFError); + return -1; + } - *s = ptr; + *s = ptr; - return n; + return n; } static Py_ssize_t -readline_cStringIO(Unpicklerobject *self, char **s) +readline_cStringIO(Unpicklerobject * self, char **s) { - Py_ssize_t n; - char *ptr; + Py_ssize_t n; + char *ptr; - if ((n = PycStringIO->creadline((PyObject *)self->file, &ptr)) < 0) { - return -1; - } + if ((n = PycStringIO->creadline((PyObject *) self->file, &ptr)) < 0) { + return -1; + } - *s = ptr; + *s = ptr; - return n; + return n; } static Py_ssize_t -read_other(Unpicklerobject *self, char **s, Py_ssize_t n) +read_other(Unpicklerobject * self, char **s, Py_ssize_t n) { - PyObject *bytes, *str=0; + PyObject *bytes, *str = 0; - if (!( bytes = PyInt_FromSsize_t(n))) return -1; + if (!(bytes = PyInt_FromSsize_t(n))) + return -1; - ARG_TUP(self, bytes); - if (self->arg) { - str = PyObject_Call(self->read, self->arg, NULL); - FREE_ARG_TUP(self); - } - if (! str) return -1; + ARG_TUP(self, bytes); + if (self->arg) { + str = PyObject_Call(self->read, self->arg, NULL); + FREE_ARG_TUP(self); + } + if (!str) + return -1; - Py_XDECREF(self->last_string); - self->last_string = str; + Py_XDECREF(self->last_string); + self->last_string = str; - if (! (*s = PyString_AsString(str))) return -1; - return n; + if (!(*s = PyString_AsString(str))) + return -1; + return n; } static Py_ssize_t -readline_other(Unpicklerobject *self, char **s) +readline_other(Unpicklerobject * self, char **s) { - PyObject *str; - Py_ssize_t str_size; + PyObject *str; + Py_ssize_t str_size; - if (!( str = PyObject_CallObject(self->readline, empty_tuple))) { - return -1; - } + if (!(str = PyObject_CallObject(self->readline, empty_tuple))) { + return -1; + } - if ((str_size = PyString_Size(str)) < 0) - return -1; + if ((str_size = PyString_Size(str)) < 0) + return -1; - Py_XDECREF(self->last_string); - self->last_string = str; + Py_XDECREF(self->last_string); + self->last_string = str; - if (! (*s = PyString_AsString(str))) - return -1; + if (!(*s = PyString_AsString(str))) + return -1; - return str_size; + return str_size; } /* Copy the first n bytes from s into newly malloc'ed memory, plus a @@ -685,574 +689,580 @@ static char * pystrndup(const char *s, int n) { - char *r = (char *)malloc(n+1); - if (r == NULL) - return (char*)PyErr_NoMemory(); - memcpy(r, s, n); - r[n] = 0; - return r; + char *r = (char *) malloc(n + 1); + if (r == NULL) + return (char *) PyErr_NoMemory(); + memcpy(r, s, n); + r[n] = 0; + return r; } static int -get(Picklerobject *self, PyObject *id) +get(Picklerobject * self, PyObject * id) { - PyObject *value, *mv; - long c_value; - char s[30]; - size_t len; + PyObject *value, *mv; + long c_value; + char s[30]; + size_t len; - if (!( mv = PyDict_GetItem(self->memo, id))) { - PyErr_SetObject(PyExc_KeyError, id); - return -1; - } + if (!(mv = PyDict_GetItem(self->memo, id))) { + PyErr_SetObject(PyExc_KeyError, id); + return -1; + } - if (!( value = PyTuple_GetItem(mv, 0))) - return -1; + if (!(value = PyTuple_GetItem(mv, 0))) + return -1; - if (!( PyInt_Check(value))) { - PyErr_SetString(PicklingError, "no int where int expected in memo"); - return -1; - } - c_value = PyInt_AsLong(value); - if (c_value == -1 && PyErr_Occurred()) - return -1; + if (!(PyInt_Check(value))) { + PyErr_SetString(PicklingError, "no int where int expected in memo"); + return -1; + } + c_value = PyInt_AsLong(value); + if (c_value == -1 && PyErr_Occurred()) + return -1; - if (!self->bin) { - s[0] = GET; - PyOS_snprintf(s + 1, sizeof(s) - 1, "%ld\n", c_value); - len = strlen(s); - } - else if (Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) return -1; - PDATA_APPEND(self->file, mv, -1); - return 0; + if (!self->bin) { + s[0] = GET; + PyOS_snprintf(s + 1, sizeof(s) - 1, "%ld\n", c_value); + len = strlen(s); + } + else if (Pdata_Check(self->file)) { + if (write_other(self, NULL, 0) < 0) + return -1; + PDATA_APPEND(self->file, mv, -1); + return 0; + } + else { + if (c_value < 256) { + s[0] = BINGET; + s[1] = (int) (c_value & 0xff); + len = 2; } else { - if (c_value < 256) { - s[0] = BINGET; - s[1] = (int)(c_value & 0xff); - len = 2; - } - else { - s[0] = LONG_BINGET; - s[1] = (int)(c_value & 0xff); - s[2] = (int)((c_value >> 8) & 0xff); - s[3] = (int)((c_value >> 16) & 0xff); - s[4] = (int)((c_value >> 24) & 0xff); - len = 5; - } + s[0] = LONG_BINGET; + s[1] = (int) (c_value & 0xff); + s[2] = (int) ((c_value >> 8) & 0xff); + s[3] = (int) ((c_value >> 16) & 0xff); + s[4] = (int) ((c_value >> 24) & 0xff); + len = 5; } + } - if (self->write_func(self, s, len) < 0) - return -1; + if (self->write_func(self, s, len) < 0) + return -1; - return 0; + return 0; } static int -put(Picklerobject *self, PyObject *ob) +put(Picklerobject * self, PyObject * ob) { - if (ob->ob_refcnt < 2 || self->fast) - return 0; + if (ob->ob_refcnt < 2 || self->fast) + return 0; - return put2(self, ob); + return put2(self, ob); } static int -put2(Picklerobject *self, PyObject *ob) +put2(Picklerobject * self, PyObject * ob) { - char c_str[30]; - int p; - size_t len; - int res = -1; - PyObject *py_ob_id = 0, *memo_len = 0, *t = 0; + char c_str[30]; + int p; + size_t len; + int res = -1; + PyObject *py_ob_id = 0, *memo_len = 0, *t = 0; - if (self->fast) - return 0; + if (self->fast) + return 0; - if ((p = PyDict_Size(self->memo)) < 0) - goto finally; + if ((p = PyDict_Size(self->memo)) < 0) + goto finally; - /* Make sure memo keys are positive! */ - /* XXX Why? - * XXX And does "positive" really mean non-negative? - * XXX pickle.py starts with PUT index 0, not 1. This makes for - * XXX gratuitous differences between the pickling modules. - */ - p++; + /* Make sure memo keys are positive! */ + /* XXX Why? + * XXX And does "positive" really mean non-negative? + * XXX pickle.py starts with PUT index 0, not 1. This makes for + * XXX gratuitous differences between the pickling modules. + */ + p++; - if (!( py_ob_id = PyLong_FromVoidPtr(ob))) - goto finally; + if (!(py_ob_id = PyLong_FromVoidPtr(ob))) + goto finally; - if (!( memo_len = PyInt_FromLong(p))) - goto finally; + if (!(memo_len = PyInt_FromLong(p))) + goto finally; - if (!( t = PyTuple_New(2))) - goto finally; + if (!(t = PyTuple_New(2))) + goto finally; - PyTuple_SET_ITEM(t, 0, memo_len); - Py_INCREF(memo_len); - PyTuple_SET_ITEM(t, 1, ob); - Py_INCREF(ob); + PyTuple_SET_ITEM(t, 0, memo_len); + Py_INCREF(memo_len); + PyTuple_SET_ITEM(t, 1, ob); + Py_INCREF(ob); - if (PyDict_SetItem(self->memo, py_ob_id, t) < 0) - goto finally; + if (PyDict_SetItem(self->memo, py_ob_id, t) < 0) + goto finally; - if (!self->bin) { - c_str[0] = PUT; - PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%d\n", p); - len = strlen(c_str); - } - else if (Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) return -1; - PDATA_APPEND(self->file, memo_len, -1); - res=0; /* Job well done ;) */ - goto finally; + if (!self->bin) { + c_str[0] = PUT; + PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%d\n", p); + len = strlen(c_str); + } + else if (Pdata_Check(self->file)) { + if (write_other(self, NULL, 0) < 0) + return -1; + PDATA_APPEND(self->file, memo_len, -1); + res = 0; /* Job well done ;) */ + goto finally; + } + else { + if (p >= 256) { + c_str[0] = LONG_BINPUT; + c_str[1] = (int) (p & 0xff); + c_str[2] = (int) ((p >> 8) & 0xff); + c_str[3] = (int) ((p >> 16) & 0xff); + c_str[4] = (int) ((p >> 24) & 0xff); + len = 5; } else { - if (p >= 256) { - c_str[0] = LONG_BINPUT; - c_str[1] = (int)(p & 0xff); - c_str[2] = (int)((p >> 8) & 0xff); - c_str[3] = (int)((p >> 16) & 0xff); - c_str[4] = (int)((p >> 24) & 0xff); - len = 5; - } - else { - c_str[0] = BINPUT; - c_str[1] = p; - len = 2; - } + c_str[0] = BINPUT; + c_str[1] = p; + len = 2; } + } - if (self->write_func(self, c_str, len) < 0) - goto finally; + if (self->write_func(self, c_str, len) < 0) + goto finally; - res = 0; + res = 0; finally: - Py_XDECREF(py_ob_id); - Py_XDECREF(memo_len); - Py_XDECREF(t); + Py_XDECREF(py_ob_id); + Py_XDECREF(memo_len); + Py_XDECREF(t); - return res; + return res; } static PyObject * -whichmodule(PyObject *global, PyObject *global_name) +whichmodule(PyObject * global, PyObject * global_name) { - Py_ssize_t i, j; - PyObject *module = 0, *modules_dict = 0, - *global_name_attr = 0, *name = 0; - - module = PyObject_GetAttrString(global, "__module__"); - if (module) - return module; - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - return NULL; - - if (!( modules_dict = PySys_GetObject("modules"))) - return NULL; + Py_ssize_t i, j; + PyObject *module = 0, *modules_dict = 0, *global_name_attr = 0, *name = 0; - i = 0; - while ((j = PyDict_Next(modules_dict, &i, &name, &module))) { - - if (PyObject_Compare(name, __main___str)==0) continue; + module = PyObject_GetAttrString(global, "__module__"); + if (module) + return module; + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + return NULL; - global_name_attr = PyObject_GetAttr(module, global_name); - if (!global_name_attr) { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - return NULL; - continue; - } + if (!(modules_dict = PySys_GetObject("modules"))) + return NULL; - if (global_name_attr != global) { - Py_DECREF(global_name_attr); - continue; - } + i = 0; + while ((j = PyDict_Next(modules_dict, &i, &name, &module))) { - Py_DECREF(global_name_attr); + if (PyObject_Compare(name, __main___str) == 0) + continue; - break; + global_name_attr = PyObject_GetAttr(module, global_name); + if (!global_name_attr) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + return NULL; + continue; } - /* The following implements the rule in pickle.py added in 1.5 - that used __main__ if no module is found. I don't actually - like this rule. jlf - */ - if (!j) { - j=1; - name=__main___str; + if (global_name_attr != global) { + Py_DECREF(global_name_attr); + continue; } - Py_INCREF(name); - return name; + Py_DECREF(global_name_attr); + + break; + } + + /* The following implements the rule in pickle.py added in 1.5 + * that used __main__ if no module is found. I don't actually + * like this rule. jlf + */ + if (!j) { + j = 1; + name = __main___str; + } + + Py_INCREF(name); + return name; } static int -fast_save_enter(Picklerobject *self, PyObject *obj) +fast_save_enter(Picklerobject * self, PyObject * obj) { - /* if fast_container < 0, we're doing an error exit. */ - if (++self->fast_container >= PY_CPICKLE_FAST_LIMIT) { - PyObject *key = NULL; - if (self->fast_memo == NULL) { - self->fast_memo = PyDict_New(); - if (self->fast_memo == NULL) { - self->fast_container = -1; - return 0; - } - } - key = PyLong_FromVoidPtr(obj); - if (key == NULL) - return 0; - if (PyDict_GetItem(self->fast_memo, key)) { - Py_DECREF(key); - PyErr_Format(PyExc_ValueError, - "fast mode: can't pickle cyclic objects " - "including object type %s at %p", - obj->ob_type->tp_name, obj); - self->fast_container = -1; - return 0; - } - if (PyDict_SetItem(self->fast_memo, key, Py_None) < 0) { - Py_DECREF(key); - self->fast_container = -1; - return 0; - } - Py_DECREF(key); + /* if fast_container < 0, we're doing an error exit. */ + if (++self->fast_container >= PY_CPICKLE_FAST_LIMIT) { + PyObject *key = NULL; + if (self->fast_memo == NULL) { + self->fast_memo = PyDict_New(); + if (self->fast_memo == NULL) { + self->fast_container = -1; + return 0; + } } - return 1; + key = PyLong_FromVoidPtr(obj); + if (key == NULL) + return 0; + if (PyDict_GetItem(self->fast_memo, key)) { + Py_DECREF(key); + PyErr_Format(PyExc_ValueError, + "fast mode: can't pickle cyclic objects " + "including object type %s at %p", + obj->ob_type->tp_name, obj); + self->fast_container = -1; + return 0; + } + if (PyDict_SetItem(self->fast_memo, key, Py_None) < 0) { + Py_DECREF(key); + self->fast_container = -1; + return 0; + } + Py_DECREF(key); + } + return 1; } int -fast_save_leave(Picklerobject *self, PyObject *obj) +fast_save_leave(Picklerobject * self, PyObject * obj) { - if (self->fast_container-- >= PY_CPICKLE_FAST_LIMIT) { - PyObject *key = PyLong_FromVoidPtr(obj); - if (key == NULL) - return 0; - if (PyDict_DelItem(self->fast_memo, key) < 0) { - Py_DECREF(key); - return 0; - } - Py_DECREF(key); + if (self->fast_container-- >= PY_CPICKLE_FAST_LIMIT) { + PyObject *key = PyLong_FromVoidPtr(obj); + if (key == NULL) + return 0; + if (PyDict_DelItem(self->fast_memo, key) < 0) { + Py_DECREF(key); + return 0; } - return 1; + Py_DECREF(key); + } + return 1; } static int -save_none(Picklerobject *self, PyObject *args) +save_none(Picklerobject * self, PyObject * args) { - static char none = NONE; - if (self->write_func(self, &none, 1) < 0) - return -1; + static char none = NONE; + if (self->write_func(self, &none, 1) < 0) + return -1; - return 0; + return 0; } static int -save_bool(Picklerobject *self, PyObject *args) +save_bool(Picklerobject * self, PyObject * args) { - static const char *buf[2] = {FALSE, TRUE}; - static char len[2] = {sizeof(FALSE)-1, sizeof(TRUE)-1}; - long l = args == Py_True; - - if (self->proto >= 2) { - char opcode = l ? NEWTRUE : NEWFALSE; - if (self->write_func(self, &opcode, 1) < 0) - return -1; - } - else if (self->write_func(self, buf[l], len[l]) < 0) - return -1; - return 0; + static const char *buf[2] = { FALSE, TRUE }; + static char len[2] = { sizeof(FALSE) - 1, sizeof(TRUE) - 1 }; + long l = args == Py_True; + + if (self->proto >= 2) { + char opcode = l ? NEWTRUE : NEWFALSE; + if (self->write_func(self, &opcode, 1) < 0) + return -1; + } + else if (self->write_func(self, buf[l], len[l]) < 0) + return -1; + return 0; } static int -save_int(Picklerobject *self, long l) +save_int(Picklerobject * self, long l) { - char c_str[32]; - int len = 0; + char c_str[32]; + int len = 0; - if (!self->bin + if (!self->bin #if SIZEOF_LONG > 4 - || l > 0x7fffffffL - || l < -0x80000000L + || l > 0x7fffffffL || l < -0x80000000L #endif - ) { - /* Text-mode pickle, or long too big to fit in the 4-byte - * signed BININT format: store as a string. - */ - c_str[0] = INT; - PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%ld\n", l); - if (self->write_func(self, c_str, strlen(c_str)) < 0) - return -1; + ) { + /* Text-mode pickle, or long too big to fit in the 4-byte + * signed BININT format: store as a string. + */ + c_str[0] = INT; + PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%ld\n", l); + if (self->write_func(self, c_str, strlen(c_str)) < 0) + return -1; + } + else { + /* Binary pickle and l fits in a signed 4-byte int. */ + c_str[1] = (int) (l & 0xff); + c_str[2] = (int) ((l >> 8) & 0xff); + c_str[3] = (int) ((l >> 16) & 0xff); + c_str[4] = (int) ((l >> 24) & 0xff); + + if ((c_str[4] == 0) && (c_str[3] == 0)) { + if (c_str[2] == 0) { + c_str[0] = BININT1; + len = 2; + } + else { + c_str[0] = BININT2; + len = 3; + } } else { - /* Binary pickle and l fits in a signed 4-byte int. */ - c_str[1] = (int)( l & 0xff); - c_str[2] = (int)((l >> 8) & 0xff); - c_str[3] = (int)((l >> 16) & 0xff); - c_str[4] = (int)((l >> 24) & 0xff); - - if ((c_str[4] == 0) && (c_str[3] == 0)) { - if (c_str[2] == 0) { - c_str[0] = BININT1; - len = 2; - } - else { - c_str[0] = BININT2; - len = 3; - } - } - else { - c_str[0] = BININT; - len = 5; - } - - if (self->write_func(self, c_str, len) < 0) - return -1; + c_str[0] = BININT; + len = 5; } - return 0; + if (self->write_func(self, c_str, len) < 0) + return -1; + } + + return 0; } static int -save_long(Picklerobject *self, PyObject *args) +save_long(Picklerobject * self, PyObject * args) { - Py_ssize_t size; - int res = -1; - PyObject *repr = NULL; - long val = PyInt_AsLong(args); - static char l = LONG; + Py_ssize_t size; + int res = -1; + PyObject *repr = NULL; + long val = PyInt_AsLong(args); + static char l = LONG; - if (val == -1 && PyErr_Occurred()) { - /* out of range for int pickling */ - PyErr_Clear(); - } - else - return save_int(self, val); + if (val == -1 && PyErr_Occurred()) { + /* out of range for int pickling */ + PyErr_Clear(); + } + else + return save_int(self, val); + + if (self->proto >= 2) { + /* Linear-time pickling. */ + size_t nbits; + size_t nbytes; + unsigned char *pdata; + char c_str[5]; + int i; + int sign = _PyLong_Sign(args); - if (self->proto >= 2) { - /* Linear-time pickling. */ - size_t nbits; - size_t nbytes; - unsigned char *pdata; - char c_str[5]; - int i; - int sign = _PyLong_Sign(args); - - if (sign == 0) { - /* It's 0 -- an empty bytestring. */ - c_str[0] = LONG1; - c_str[1] = 0; - i = self->write_func(self, c_str, 2); - if (i < 0) goto finally; - res = 0; - goto finally; - } - nbits = _PyLong_NumBits(args); - if (nbits == (size_t)-1 && PyErr_Occurred()) - goto finally; - /* How many bytes do we need? There are nbits >> 3 full - * bytes of data, and nbits & 7 leftover bits. If there - * are any leftover bits, then we clearly need another - * byte. Wnat's not so obvious is that we *probably* - * need another byte even if there aren't any leftovers: - * the most-significant bit of the most-significant byte - * acts like a sign bit, and it's usually got a sense - * opposite of the one we need. The exception is longs - * of the form -(2**(8*j-1)) for j > 0. Such a long is - * its own 256's-complement, so has the right sign bit - * even without the extra byte. That's a pain to check - * for in advance, though, so we always grab an extra - * byte at the start, and cut it back later if possible. - */ - nbytes = (nbits >> 3) + 1; - if (nbytes > INT_MAX) { - PyErr_SetString(PyExc_OverflowError, "long too large " - "to pickle"); - goto finally; - } - repr = PyString_FromStringAndSize(NULL, (int)nbytes); - if (repr == NULL) goto finally; - pdata = (unsigned char *)PyString_AS_STRING(repr); - i = _PyLong_AsByteArray((PyLongObject *)args, - pdata, nbytes, - 1 /* little endian */, 1 /* signed */); - if (i < 0) goto finally; - /* If the long is negative, this may be a byte more than - * needed. This is so iff the MSB is all redundant sign - * bits. - */ - if (sign < 0 && nbytes > 1 && pdata[nbytes - 1] == 0xff && - (pdata[nbytes - 2] & 0x80) != 0) - --nbytes; - - if (nbytes < 256) { - c_str[0] = LONG1; - c_str[1] = (char)nbytes; - size = 2; - } - else { - c_str[0] = LONG4; - size = (int)nbytes; - for (i = 1; i < 5; i++) { - c_str[i] = (char)(size & 0xff); - size >>= 8; - } - size = 5; - } - i = self->write_func(self, c_str, size); - if (i < 0) goto finally; - i = self->write_func(self, (char *)pdata, (int)nbytes); - if (i < 0) goto finally; - res = 0; - goto finally; + if (sign == 0) { + /* It's 0 -- an empty bytestring. */ + c_str[0] = LONG1; + c_str[1] = 0; + i = self->write_func(self, c_str, 2); + if (i < 0) + goto finally; + res = 0; + goto finally; + } + nbits = _PyLong_NumBits(args); + if (nbits == (size_t) - 1 && PyErr_Occurred()) + goto finally; + /* How many bytes do we need? There are nbits >> 3 full + * bytes of data, and nbits & 7 leftover bits. If there + * are any leftover bits, then we clearly need another + * byte. Wnat's not so obvious is that we *probably* + * need another byte even if there aren't any leftovers: + * the most-significant bit of the most-significant byte + * acts like a sign bit, and it's usually got a sense + * opposite of the one we need. The exception is longs + * of the form -(2**(8*j-1)) for j > 0. Such a long is + * its own 256's-complement, so has the right sign bit + * even without the extra byte. That's a pain to check + * for in advance, though, so we always grab an extra + * byte at the start, and cut it back later if possible. + */ + nbytes = (nbits >> 3) + 1; + if (nbytes > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "long too large " + "to pickle"); + goto finally; } - - /* proto < 2: write the repr and newline. This is quadratic-time - * (in the number of digits), in both directions. + repr = PyString_FromStringAndSize(NULL, (int) nbytes); + if (repr == NULL) + goto finally; + pdata = (unsigned char *) PyString_AS_STRING(repr); + i = _PyLong_AsByteArray((PyLongObject *) args, + pdata, nbytes, + 1 /* little endian */ , 1 /* signed */ ); + if (i < 0) + goto finally; + /* If the long is negative, this may be a byte more than + * needed. This is so iff the MSB is all redundant sign + * bits. */ - if (!( repr = PyObject_Repr(args))) - goto finally; - - if ((size = PyString_Size(repr)) < 0) - goto finally; - - if (self->write_func(self, &l, 1) < 0) - goto finally; + if (sign < 0 && nbytes > 1 && pdata[nbytes - 1] == 0xff && + (pdata[nbytes - 2] & 0x80) != 0) + --nbytes; + + if (nbytes < 256) { + c_str[0] = LONG1; + c_str[1] = (char) nbytes; + size = 2; + } + else { + c_str[0] = LONG4; + size = (int) nbytes; + for (i = 1; i < 5; i++) { + c_str[i] = (char) (size & 0xff); + size >>= 8; + } + size = 5; + } + i = self->write_func(self, c_str, size); + if (i < 0) + goto finally; + i = self->write_func(self, (char *) pdata, (int) nbytes); + if (i < 0) + goto finally; + res = 0; + goto finally; + } - if (self->write_func(self, - PyString_AS_STRING((PyStringObject *)repr), - size) < 0) - goto finally; + /* proto < 2: write the repr and newline. This is quadratic-time + * (in the number of digits), in both directions. + */ + if (!(repr = PyObject_Repr(args))) + goto finally; + + if ((size = PyString_Size(repr)) < 0) + goto finally; + + if (self->write_func(self, &l, 1) < 0) + goto finally; + + if (self->write_func(self, + PyString_AS_STRING((PyStringObject *) repr), + size) < 0) + goto finally; - if (self->write_func(self, "\n", 1) < 0) - goto finally; + if (self->write_func(self, "\n", 1) < 0) + goto finally; - res = 0; + res = 0; finally: - Py_XDECREF(repr); - return res; + Py_XDECREF(repr); + return res; } static int -save_float(Picklerobject *self, PyObject *args) +save_float(Picklerobject * self, PyObject * args) { - double x = PyFloat_AS_DOUBLE((PyFloatObject *)args); + double x = PyFloat_AS_DOUBLE((PyFloatObject *) args); - if (self->bin) { - char str[9]; - str[0] = BINFLOAT; - if (_PyFloat_Pack8(x, (unsigned char *)&str[1], 0) < 0) - return -1; - if (self->write_func(self, str, 9) < 0) - return -1; - } - else { - char c_str[250]; - c_str[0] = FLOAT; - PyOS_ascii_formatd(c_str + 1, sizeof(c_str) - 2, "%.17g", x); - /* Extend the formatted string with a newline character */ - strcat(c_str, "\n"); + if (self->bin) { + char str[9]; + str[0] = BINFLOAT; + if (_PyFloat_Pack8(x, (unsigned char *) &str[1], 0) < 0) + return -1; + if (self->write_func(self, str, 9) < 0) + return -1; + } + else { + char c_str[250]; + c_str[0] = FLOAT; + PyOS_ascii_formatd(c_str + 1, sizeof(c_str) - 2, "%.17g", x); + /* Extend the formatted string with a newline character */ + strcat(c_str, "\n"); - if (self->write_func(self, c_str, strlen(c_str)) < 0) - return -1; - } + if (self->write_func(self, c_str, strlen(c_str)) < 0) + return -1; + } - return 0; + return 0; } static int -save_string(Picklerobject *self, PyObject *args, int doput) +save_string(Picklerobject * self, PyObject * args, int doput) { - int size, len; - PyObject *repr=0; + int size, len; + PyObject *repr = 0; - if ((size = PyString_Size(args)) < 0) - return -1; + if ((size = PyString_Size(args)) < 0) + return -1; - if (!self->bin) { - char *repr_str; + if (!self->bin) { + char *repr_str; - static char string = STRING; + static char string = STRING; - if (!( repr = PyObject_Repr(args))) - return -1; + if (!(repr = PyObject_Repr(args))) + return -1; - if ((len = PyString_Size(repr)) < 0) - goto err; - repr_str = PyString_AS_STRING((PyStringObject *)repr); + if ((len = PyString_Size(repr)) < 0) + goto err; + repr_str = PyString_AS_STRING((PyStringObject *) repr); - if (self->write_func(self, &string, 1) < 0) - goto err; + if (self->write_func(self, &string, 1) < 0) + goto err; - if (self->write_func(self, repr_str, len) < 0) - goto err; + if (self->write_func(self, repr_str, len) < 0) + goto err; - if (self->write_func(self, "\n", 1) < 0) - goto err; + if (self->write_func(self, "\n", 1) < 0) + goto err; - Py_XDECREF(repr); - } - else { - int i; - char c_str[5]; + Py_XDECREF(repr); + } + else { + int i; + char c_str[5]; - if ((size = PyString_Size(args)) < 0) - return -1; + if ((size = PyString_Size(args)) < 0) + return -1; - if (size < 256) { - c_str[0] = SHORT_BINSTRING; - c_str[1] = size; - len = 2; - } - else if (size <= INT_MAX) { - c_str[0] = BINSTRING; - for (i = 1; i < 5; i++) - c_str[i] = (int)(size >> ((i - 1) * 8)); - len = 5; - } - else - return -1; /* string too large */ + if (size < 256) { + c_str[0] = SHORT_BINSTRING; + c_str[1] = size; + len = 2; + } + else if (size <= INT_MAX) { + c_str[0] = BINSTRING; + for (i = 1; i < 5; i++) + c_str[i] = (int) (size >> ((i - 1) * 8)); + len = 5; + } + else + return -1; /* string too large */ - if (self->write_func(self, c_str, len) < 0) - return -1; + if (self->write_func(self, c_str, len) < 0) + return -1; - if (size > 128 && Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) return -1; - PDATA_APPEND(self->file, args, -1); - } - else { - if (self->write_func(self, - PyString_AS_STRING( - (PyStringObject *)args), - size) < 0) - return -1; - } + if (size > 128 && Pdata_Check(self->file)) { + if (write_other(self, NULL, 0) < 0) + return -1; + PDATA_APPEND(self->file, args, -1); + } + else { + if (self->write_func(self, + PyString_AS_STRING((PyStringObject *) args), + size) < 0) + return -1; } + } - if (doput) - if (put(self, args) < 0) - return -1; + if (doput) + if (put(self, args) < 0) + return -1; - return 0; + return 0; err: - Py_XDECREF(repr); - return -1; + Py_XDECREF(repr); + return -1; } @@ -1260,142 +1270,142 @@ /* A copy of PyUnicode_EncodeRawUnicodeEscape() that also translates backslash and newline characters to \uXXXX escapes. */ static PyObject * -modified_EncodeRawUnicodeEscape(const Py_UNICODE *s, int size) +modified_EncodeRawUnicodeEscape(const Py_UNICODE * s, int size) { - PyObject *repr; - char *p; - char *q; + PyObject *repr; + char *p; + char *q; - static const char *hexdigit = "0123456789ABCDEF"; + static const char *hexdigit = "0123456789ABCDEF"; - repr = PyString_FromStringAndSize(NULL, 6 * size); - if (repr == NULL) - return NULL; - if (size == 0) - return repr; + repr = PyString_FromStringAndSize(NULL, 6 * size); + if (repr == NULL) + return NULL; + if (size == 0) + return repr; - p = q = PyString_AS_STRING(repr); - while (size-- > 0) { - Py_UNICODE ch = *s++; - /* Map 16-bit characters to '\uxxxx' */ - if (ch >= 256 || ch == '\\' || ch == '\n') { - *p++ = '\\'; - *p++ = 'u'; - *p++ = hexdigit[(ch >> 12) & 0xf]; - *p++ = hexdigit[(ch >> 8) & 0xf]; - *p++ = hexdigit[(ch >> 4) & 0xf]; - *p++ = hexdigit[ch & 15]; - } - /* Copy everything else as-is */ - else - *p++ = (char) ch; + p = q = PyString_AS_STRING(repr); + while (size-- > 0) { + Py_UNICODE ch = *s++; + /* Map 16-bit characters to '\uxxxx' */ + if (ch >= 256 || ch == '\\' || ch == '\n') { + *p++ = '\\'; + *p++ = 'u'; + *p++ = hexdigit[(ch >> 12) & 0xf]; + *p++ = hexdigit[(ch >> 8) & 0xf]; + *p++ = hexdigit[(ch >> 4) & 0xf]; + *p++ = hexdigit[ch & 15]; } - *p = '\0'; - _PyString_Resize(&repr, p - q); - return repr; + /* Copy everything else as-is */ + else + *p++ = (char) ch; + } + *p = '\0'; + _PyString_Resize(&repr, p - q); + return repr; } static int -save_unicode(Picklerobject *self, PyObject *args, int doput) +save_unicode(Picklerobject * self, PyObject * args, int doput) { - Py_ssize_t size, len; - PyObject *repr=0; + Py_ssize_t size, len; + PyObject *repr = 0; - if (!PyUnicode_Check(args)) - return -1; + if (!PyUnicode_Check(args)) + return -1; - if (!self->bin) { - char *repr_str; - static char string = UNICODE; + if (!self->bin) { + char *repr_str; + static char string = UNICODE; + + repr = + modified_EncodeRawUnicodeEscape(PyUnicode_AS_UNICODE(args), + PyUnicode_GET_SIZE(args)); + if (!repr) + return -1; + + if ((len = PyString_Size(repr)) < 0) + goto err; + repr_str = PyString_AS_STRING((PyStringObject *) repr); - repr = modified_EncodeRawUnicodeEscape( - PyUnicode_AS_UNICODE(args), PyUnicode_GET_SIZE(args)); - if (!repr) - return -1; + if (self->write_func(self, &string, 1) < 0) + goto err; - if ((len = PyString_Size(repr)) < 0) - goto err; - repr_str = PyString_AS_STRING((PyStringObject *)repr); + if (self->write_func(self, repr_str, len) < 0) + goto err; - if (self->write_func(self, &string, 1) < 0) - goto err; + if (self->write_func(self, "\n", 1) < 0) + goto err; - if (self->write_func(self, repr_str, len) < 0) - goto err; + Py_XDECREF(repr); + } + else { + int i; + char c_str[5]; - if (self->write_func(self, "\n", 1) < 0) - goto err; + if (!(repr = PyUnicode_AsUTF8String(args))) + return -1; - Py_XDECREF(repr); - } - else { - int i; - char c_str[5]; + if ((size = PyString_Size(repr)) < 0) + goto err; + if (size > INT_MAX) + return -1; /* string too large */ + + c_str[0] = BINUNICODE; + for (i = 1; i < 5; i++) + c_str[i] = (int) (size >> ((i - 1) * 8)); + len = 5; - if (!( repr = PyUnicode_AsUTF8String(args))) - return -1; + if (self->write_func(self, c_str, len) < 0) + goto err; - if ((size = PyString_Size(repr)) < 0) - goto err; - if (size > INT_MAX) - return -1; /* string too large */ - - c_str[0] = BINUNICODE; - for (i = 1; i < 5; i++) - c_str[i] = (int)(size >> ((i - 1) * 8)); - len = 5; - - if (self->write_func(self, c_str, len) < 0) - goto err; - - if (size > 128 && Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - goto err; - PDATA_APPEND(self->file, repr, -1); - } - else { - if (self->write_func(self, PyString_AS_STRING(repr), - size) < 0) - goto err; - } - - Py_DECREF(repr); + if (size > 128 && Pdata_Check(self->file)) { + if (write_other(self, NULL, 0) < 0) + goto err; + PDATA_APPEND(self->file, repr, -1); + } + else { + if (self->write_func(self, PyString_AS_STRING(repr), size) < 0) + goto err; } - if (doput) - if (put(self, args) < 0) - return -1; + Py_DECREF(repr); + } - return 0; + if (doput) + if (put(self, args) < 0) + return -1; + + return 0; err: - Py_XDECREF(repr); - return -1; + Py_XDECREF(repr); + return -1; } #endif /* A helper for save_tuple. Push the len elements in tuple t on the stack. */ static int -store_tuple_elements(Picklerobject *self, PyObject *t, int len) +store_tuple_elements(Picklerobject * self, PyObject * t, int len) { - int i; - int res = -1; /* guilty until proved innocent */ - - assert(PyTuple_Size(t) == len); + int i; + int res = -1; /* guilty until proved innocent */ - for (i = 0; i < len; i++) { - PyObject *element = PyTuple_GET_ITEM(t, i); + assert(PyTuple_Size(t) == len); - if (element == NULL) - goto finally; - if (save(self, element, 0) < 0) - goto finally; - } - res = 0; + for (i = 0; i < len; i++) { + PyObject *element = PyTuple_GET_ITEM(t, i); + + if (element == NULL) + goto finally; + if (save(self, element, 0) < 0) + goto finally; + } + res = 0; finally: - return res; + return res; } /* Tuples are ubiquitous in the pickle protocols, so many techniques are @@ -1405,110 +1415,110 @@ * magic so that it works in all cases. IOW, this is a long routine. */ static int -save_tuple(Picklerobject *self, PyObject *args) +save_tuple(Picklerobject * self, PyObject * args) { - PyObject *py_tuple_id = NULL; - int len, i; - int res = -1; - - static char tuple = TUPLE; - static char pop = POP; - static char pop_mark = POP_MARK; - static char len2opcode[] = {EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3}; - - if ((len = PyTuple_Size(args)) < 0) - goto finally; - - if (len == 0) { - char c_str[2]; - - if (self->proto) { - c_str[0] = EMPTY_TUPLE; - len = 1; - } - else { - c_str[0] = MARK; - c_str[1] = TUPLE; - len = 2; - } - if (self->write_func(self, c_str, len) >= 0) - res = 0; - /* Don't memoize an empty tuple. */ - goto finally; + PyObject *py_tuple_id = NULL; + int len, i; + int res = -1; + + static char tuple = TUPLE; + static char pop = POP; + static char pop_mark = POP_MARK; + static char len2opcode[] = { EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3 }; + + if ((len = PyTuple_Size(args)) < 0) + goto finally; + + if (len == 0) { + char c_str[2]; + + if (self->proto) { + c_str[0] = EMPTY_TUPLE; + len = 1; } + else { + c_str[0] = MARK; + c_str[1] = TUPLE; + len = 2; + } + if (self->write_func(self, c_str, len) >= 0) + res = 0; + /* Don't memoize an empty tuple. */ + goto finally; + } + + /* A non-empty tuple. */ + + /* id(tuple) isn't in the memo now. If it shows up there after + * saving the tuple elements, the tuple must be recursive, in + * which case we'll pop everything we put on the stack, and fetch + * its value from the memo. + */ + py_tuple_id = PyLong_FromVoidPtr(args); + if (py_tuple_id == NULL) + goto finally; - /* A non-empty tuple. */ - - /* id(tuple) isn't in the memo now. If it shows up there after - * saving the tuple elements, the tuple must be recursive, in - * which case we'll pop everything we put on the stack, and fetch - * its value from the memo. - */ - py_tuple_id = PyLong_FromVoidPtr(args); - if (py_tuple_id == NULL) - goto finally; - - if (len <= 3 && self->proto >= 2) { - /* Use TUPLE{1,2,3} opcodes. */ - if (store_tuple_elements(self, args, len) < 0) - goto finally; - if (PyDict_GetItem(self->memo, py_tuple_id)) { - /* pop the len elements */ - for (i = 0; i < len; ++i) - if (self->write_func(self, &pop, 1) < 0) - goto finally; - /* fetch from memo */ - if (get(self, py_tuple_id) < 0) - goto finally; - res = 0; - goto finally; - } - /* Not recursive. */ - if (self->write_func(self, len2opcode + len, 1) < 0) - goto finally; - goto memoize; - } - - /* proto < 2 and len > 0, or proto >= 2 and len > 3. - * Generate MARK elt1 elt2 ... TUPLE - */ - if (self->write_func(self, &MARKv, 1) < 0) - goto finally; - + if (len <= 3 && self->proto >= 2) { + /* Use TUPLE{1,2,3} opcodes. */ if (store_tuple_elements(self, args, len) < 0) - goto finally; - + goto finally; if (PyDict_GetItem(self->memo, py_tuple_id)) { - /* pop the stack stuff we pushed */ - if (self->bin) { - if (self->write_func(self, &pop_mark, 1) < 0) - goto finally; - } - else { - /* Note that we pop one more than len, to remove - * the MARK too. - */ - for (i = 0; i <= len; i++) - if (self->write_func(self, &pop, 1) < 0) - goto finally; - } - /* fetch from memo */ - if (get(self, py_tuple_id) >= 0) - res = 0; + /* pop the len elements */ + for (i = 0; i < len; ++i) + if (self->write_func(self, &pop, 1) < 0) + goto finally; + /* fetch from memo */ + if (get(self, py_tuple_id) < 0) goto finally; + res = 0; + goto finally; } - /* Not recursive. */ - if (self->write_func(self, &tuple, 1) < 0) + if (self->write_func(self, len2opcode + len, 1) < 0) + goto finally; + goto memoize; + } + + /* proto < 2 and len > 0, or proto >= 2 and len > 3. + * Generate MARK elt1 elt2 ... TUPLE + */ + if (self->write_func(self, &MARKv, 1) < 0) + goto finally; + + if (store_tuple_elements(self, args, len) < 0) + goto finally; + + if (PyDict_GetItem(self->memo, py_tuple_id)) { + /* pop the stack stuff we pushed */ + if (self->bin) { + if (self->write_func(self, &pop_mark, 1) < 0) goto finally; + } + else { + /* Note that we pop one more than len, to remove + * the MARK too. + */ + for (i = 0; i <= len; i++) + if (self->write_func(self, &pop, 1) < 0) + goto finally; + } + /* fetch from memo */ + if (get(self, py_tuple_id) >= 0) + res = 0; + goto finally; + } + + /* Not recursive. */ + if (self->write_func(self, &tuple, 1) < 0) + goto finally; memoize: - if (put(self, args) >= 0) - res = 0; + if (put(self, args) >= 0) + res = 0; finally: - Py_XDECREF(py_tuple_id); - return res; + Py_XDECREF(py_tuple_id); + return res; } /* iter is an iterator giving items, and we batch up chunks of @@ -1518,130 +1528,130 @@ * Returns 0 on success, <0 on error. */ static int -batch_list(Picklerobject *self, PyObject *iter) +batch_list(Picklerobject * self, PyObject * iter) { - PyObject *obj; - PyObject *slice[BATCHSIZE]; - int i, n; - - static char append = APPEND; - static char appends = APPENDS; - - assert(iter != NULL); - - if (self->proto == 0) { - /* APPENDS isn't available; do one at a time. */ - for (;;) { - obj = PyIter_Next(iter); - if (obj == NULL) { - if (PyErr_Occurred()) - return -1; - break; - } - i = save(self, obj, 0); - Py_DECREF(obj); - if (i < 0) - return -1; - if (self->write_func(self, &append, 1) < 0) - return -1; - } - return 0; + PyObject *obj; + PyObject *slice[BATCHSIZE]; + int i, n; + + static char append = APPEND; + static char appends = APPENDS; + + assert(iter != NULL); + + if (self->proto == 0) { + /* APPENDS isn't available; do one at a time. */ + for (;;) { + obj = PyIter_Next(iter); + if (obj == NULL) { + if (PyErr_Occurred()) + return -1; + break; + } + i = save(self, obj, 0); + Py_DECREF(obj); + if (i < 0) + return -1; + if (self->write_func(self, &append, 1) < 0) + return -1; } - - /* proto > 0: write in batches of BATCHSIZE. */ - do { - /* Get next group of (no more than) BATCHSIZE elements. */ - for (n = 0; n < BATCHSIZE; ++n) { - obj = PyIter_Next(iter); - if (obj == NULL) { - if (PyErr_Occurred()) - goto BatchFailed; - break; - } - slice[n] = obj; - } - - if (n > 1) { - /* Pump out MARK, slice[0:n], APPENDS. */ - if (self->write_func(self, &MARKv, 1) < 0) - goto BatchFailed; - for (i = 0; i < n; ++i) { - if (save(self, slice[i], 0) < 0) - goto BatchFailed; - } - if (self->write_func(self, &appends, 1) < 0) - goto BatchFailed; - } - else if (n == 1) { - if (save(self, slice[0], 0) < 0) - goto BatchFailed; - if (self->write_func(self, &append, 1) < 0) - goto BatchFailed; - } - - for (i = 0; i < n; ++i) { - Py_DECREF(slice[i]); - } - } while (n == BATCHSIZE); return 0; + } -BatchFailed: - while (--n >= 0) { - Py_DECREF(slice[n]); - } - return -1; -} - -static int -save_list(Picklerobject *self, PyObject *args) -{ - int res = -1; - char s[3]; - int len; - PyObject *iter; - - if (self->fast && !fast_save_enter(self, args)) - goto finally; - - /* Create an empty list. */ - if (self->bin) { - s[0] = EMPTY_LIST; - len = 1; - } - else { - s[0] = MARK; - s[1] = LIST; - len = 2; + /* proto > 0: write in batches of BATCHSIZE. */ + do { + /* Get next group of (no more than) BATCHSIZE elements. */ + for (n = 0; n < BATCHSIZE; ++n) { + obj = PyIter_Next(iter); + if (obj == NULL) { + if (PyErr_Occurred()) + goto BatchFailed; + break; + } + slice[n] = obj; } - if (self->write_func(self, s, len) < 0) - goto finally; - - /* Get list length, and bow out early if empty. */ - if ((len = PyList_Size(args)) < 0) - goto finally; - - /* Memoize. */ - if (len == 0) { - if (put(self, args) >= 0) - res = 0; - goto finally; - } - if (put2(self, args) < 0) - goto finally; + if (n > 1) { + /* Pump out MARK, slice[0:n], APPENDS. */ + if (self->write_func(self, &MARKv, 1) < 0) + goto BatchFailed; + for (i = 0; i < n; ++i) { + if (save(self, slice[i], 0) < 0) + goto BatchFailed; + } + if (self->write_func(self, &appends, 1) < 0) + goto BatchFailed; + } + else if (n == 1) { + if (save(self, slice[0], 0) < 0) + goto BatchFailed; + if (self->write_func(self, &append, 1) < 0) + goto BatchFailed; + } + + for (i = 0; i < n; ++i) { + Py_DECREF(slice[i]); + } + } while (n == BATCHSIZE); + return 0; + + BatchFailed: + while (--n >= 0) { + Py_DECREF(slice[n]); + } + return -1; +} + +static int +save_list(Picklerobject * self, PyObject * args) +{ + int res = -1; + char s[3]; + int len; + PyObject *iter; + + if (self->fast && !fast_save_enter(self, args)) + goto finally; + + /* Create an empty list. */ + if (self->bin) { + s[0] = EMPTY_LIST; + len = 1; + } + else { + s[0] = MARK; + s[1] = LIST; + len = 2; + } + + if (self->write_func(self, s, len) < 0) + goto finally; + + /* Get list length, and bow out early if empty. */ + if ((len = PyList_Size(args)) < 0) + goto finally; - /* Materialize the list elements. */ - iter = PyObject_GetIter(args); - if (iter == NULL) - goto finally; - res = batch_list(self, iter); - Py_DECREF(iter); + /* Memoize. */ + if (len == 0) { + if (put(self, args) >= 0) + res = 0; + goto finally; + } + if (put2(self, args) < 0) + goto finally; + + /* Materialize the list elements. */ + iter = PyObject_GetIter(args); + if (iter == NULL) + goto finally; + res = batch_list(self, iter); + Py_DECREF(iter); finally: - if (self->fast && !fast_save_leave(self, args)) - res = -1; + if (self->fast && !fast_save_leave(self, args)) + res = -1; - return res; + return res; } @@ -1657,1213 +1667,1196 @@ * ugly to bear. */ static int -batch_dict(Picklerobject *self, PyObject *iter) +batch_dict(Picklerobject * self, PyObject * iter) { - PyObject *p; - PyObject *slice[BATCHSIZE]; - int i, n; - - static char setitem = SETITEM; - static char setitems = SETITEMS; - - assert(iter != NULL); - - if (self->proto == 0) { - /* SETITEMS isn't available; do one at a time. */ - for (;;) { - p = PyIter_Next(iter); - if (p == NULL) { - if (PyErr_Occurred()) - return -1; - break; - } - if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { - PyErr_SetString(PyExc_TypeError, "dict items " - "iterator must return 2-tuples"); - return -1; - } - i = save(self, PyTuple_GET_ITEM(p, 0), 0); - if (i >= 0) - i = save(self, PyTuple_GET_ITEM(p, 1), 0); - Py_DECREF(p); - if (i < 0) - return -1; - if (self->write_func(self, &setitem, 1) < 0) - return -1; - } - return 0; + PyObject *p; + PyObject *slice[BATCHSIZE]; + int i, n; + + static char setitem = SETITEM; + static char setitems = SETITEMS; + + assert(iter != NULL); + + if (self->proto == 0) { + /* SETITEMS isn't available; do one at a time. */ + for (;;) { + p = PyIter_Next(iter); + if (p == NULL) { + if (PyErr_Occurred()) + return -1; + break; + } + if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { + PyErr_SetString(PyExc_TypeError, "dict items " + "iterator must return 2-tuples"); + return -1; + } + i = save(self, PyTuple_GET_ITEM(p, 0), 0); + if (i >= 0) + i = save(self, PyTuple_GET_ITEM(p, 1), 0); + Py_DECREF(p); + if (i < 0) + return -1; + if (self->write_func(self, &setitem, 1) < 0) + return -1; } - - /* proto > 0: write in batches of BATCHSIZE. */ - do { - /* Get next group of (no more than) BATCHSIZE elements. */ - for (n = 0; n < BATCHSIZE; ++n) { - p = PyIter_Next(iter); - if (p == NULL) { - if (PyErr_Occurred()) - goto BatchFailed; - break; - } - if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { - PyErr_SetString(PyExc_TypeError, "dict items " - "iterator must return 2-tuples"); - goto BatchFailed; - } - slice[n] = p; - } - - if (n > 1) { - /* Pump out MARK, slice[0:n], SETITEMS. */ - if (self->write_func(self, &MARKv, 1) < 0) - goto BatchFailed; - for (i = 0; i < n; ++i) { - p = slice[i]; - if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) - goto BatchFailed; - if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) - goto BatchFailed; - } - if (self->write_func(self, &setitems, 1) < 0) - goto BatchFailed; - } - else if (n == 1) { - p = slice[0]; - if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) - goto BatchFailed; - if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) - goto BatchFailed; - if (self->write_func(self, &setitem, 1) < 0) - goto BatchFailed; - } - - for (i = 0; i < n; ++i) { - Py_DECREF(slice[i]); - } - } while (n == BATCHSIZE); return 0; + } + + /* proto > 0: write in batches of BATCHSIZE. */ + do { + /* Get next group of (no more than) BATCHSIZE elements. */ + for (n = 0; n < BATCHSIZE; ++n) { + p = PyIter_Next(iter); + if (p == NULL) { + if (PyErr_Occurred()) + goto BatchFailed; + break; + } + if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { + PyErr_SetString(PyExc_TypeError, "dict items " + "iterator must return 2-tuples"); + goto BatchFailed; + } + slice[n] = p; + } + + if (n > 1) { + /* Pump out MARK, slice[0:n], SETITEMS. */ + if (self->write_func(self, &MARKv, 1) < 0) + goto BatchFailed; + for (i = 0; i < n; ++i) { + p = slice[i]; + if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) + goto BatchFailed; + if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) + goto BatchFailed; + } + if (self->write_func(self, &setitems, 1) < 0) + goto BatchFailed; + } + else if (n == 1) { + p = slice[0]; + if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) + goto BatchFailed; + if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) + goto BatchFailed; + if (self->write_func(self, &setitem, 1) < 0) + goto BatchFailed; + } + + for (i = 0; i < n; ++i) { + Py_DECREF(slice[i]); + } + } while (n == BATCHSIZE); + return 0; + + BatchFailed: + while (--n >= 0) { + Py_DECREF(slice[n]); + } + return -1; +} + +static int +save_dict(Picklerobject * self, PyObject * args) +{ + int res = -1; + char s[3]; + int len; + PyObject *items, *iter; + + if (self->fast && !fast_save_enter(self, args)) + goto finally; + + /* Create an empty dict. */ + if (self->bin) { + s[0] = EMPTY_DICT; + len = 1; + } + else { + s[0] = MARK; + s[1] = DICT; + len = 2; + } + + if (self->write_func(self, s, len) < 0) + goto finally; + + /* Get dict size, and bow out early if empty. */ + if ((len = PyDict_Size(args)) < 0) + goto finally; -BatchFailed: - while (--n >= 0) { - Py_DECREF(slice[n]); - } - return -1; -} - -static int -save_dict(Picklerobject *self, PyObject *args) -{ - int res = -1; - char s[3]; - int len; - PyObject *items, *iter; - - if (self->fast && !fast_save_enter(self, args)) - goto finally; - - /* Create an empty dict. */ - if (self->bin) { - s[0] = EMPTY_DICT; - len = 1; - } - else { - s[0] = MARK; - s[1] = DICT; - len = 2; - } - - if (self->write_func(self, s, len) < 0) - goto finally; - - /* Get dict size, and bow out early if empty. */ - if ((len = PyDict_Size(args)) < 0) - goto finally; - - if (len == 0) { - if (put(self, args) >= 0) - res = 0; - goto finally; - } - if (put2(self, args) < 0) - goto finally; - - /* Materialize the dict items. */ - items = PyObject_CallMethod(args, "items", "()"); - if (items == NULL) - goto finally; - iter = PyObject_GetIter(items); - Py_DECREF(items); - if (iter == NULL) - goto finally; - res = batch_dict(self, iter); - Py_DECREF(iter); + if (len == 0) { + if (put(self, args) >= 0) + res = 0; + goto finally; + } + if (put2(self, args) < 0) + goto finally; + + /* Materialize the dict items. */ + items = PyObject_CallMethod(args, "items", "()"); + if (items == NULL) + goto finally; + iter = PyObject_GetIter(items); + Py_DECREF(items); + if (iter == NULL) + goto finally; + res = batch_dict(self, iter); + Py_DECREF(iter); finally: - if (self->fast && !fast_save_leave(self, args)) - res = -1; + if (self->fast && !fast_save_leave(self, args)) + res = -1; - return res; + return res; } static int -save_global(Picklerobject *self, PyObject *args, PyObject *name) -{ - PyObject *global_name = 0, *module = 0, *mod = 0, *klass = 0; - char *name_str, *module_str; - int module_size, name_size, res = -1; - - static char global = GLOBAL; +save_global(Picklerobject * self, PyObject * args, PyObject * name) +{ + PyObject *global_name = 0, *module = 0, *mod = 0, *klass = 0; + char *name_str, *module_str; + int module_size, name_size, res = -1; + + static char global = GLOBAL; + + if (name) { + global_name = name; + Py_INCREF(global_name); + } + else { + if (!(global_name = PyObject_GetAttr(args, __name___str))) + goto finally; + } + + if (!(module = whichmodule(args, global_name))) + goto finally; + + if ((module_size = PyString_Size(module)) < 0 || + (name_size = PyString_Size(global_name)) < 0) + goto finally; + + module_str = PyString_AS_STRING((PyStringObject *) module); + name_str = PyString_AS_STRING((PyStringObject *) global_name); + + /* XXX This can be doing a relative import. Clearly it shouldn't, + * but I don't know how to stop it. :-( */ + mod = PyImport_ImportModule(module_str); + if (mod == NULL) { + cPickle_ErrFormat(PicklingError, + "Can't pickle %s: import of module %s " + "failed", "OS", args, module); + goto finally; + } + klass = PyObject_GetAttrString(mod, name_str); + if (klass == NULL) { + cPickle_ErrFormat(PicklingError, + "Can't pickle %s: attribute lookup %s.%s " + "failed", "OSS", args, module, global_name); + goto finally; + } + if (klass != args) { + Py_DECREF(klass); + cPickle_ErrFormat(PicklingError, + "Can't pickle %s: it's not the same object " + "as %s.%s", "OSS", args, module, global_name); + goto finally; + } + Py_DECREF(klass); + + if (self->proto >= 2) { + /* See whether this is in the extension registry, and if + * so generate an EXT opcode. + */ + PyObject *py_code; /* extension code as Python object */ + long code; /* extension code as C value */ + char c_str[5]; + int n; - if (name) { - global_name = name; - Py_INCREF(global_name); + PyTuple_SET_ITEM(two_tuple, 0, module); + PyTuple_SET_ITEM(two_tuple, 1, global_name); + py_code = PyDict_GetItem(extension_registry, two_tuple); + if (py_code == NULL) + goto gen_global; /* not registered */ + + /* Verify py_code has the right type and value. */ + if (!PyInt_Check(py_code)) { + cPickle_ErrFormat(PicklingError, "Can't pickle %s: " + "extension code %s isn't an integer", + "OO", args, py_code); + goto finally; + } + code = PyInt_AS_LONG(py_code); + if (code <= 0 || code > 0x7fffffffL) { + cPickle_ErrFormat(PicklingError, "Can't pickle %s: " + "extension code %ld is out of range", + "Ol", args, code); + goto finally; + } + + /* Generate an EXT opcode. */ + if (code <= 0xff) { + c_str[0] = EXT1; + c_str[1] = (char) code; + n = 2; + } + else if (code <= 0xffff) { + c_str[0] = EXT2; + c_str[1] = (char) (code & 0xff); + c_str[2] = (char) ((code >> 8) & 0xff); + n = 3; } else { - if (!( global_name = PyObject_GetAttr(args, __name___str))) - goto finally; - } - - if (!( module = whichmodule(args, global_name))) - goto finally; - - if ((module_size = PyString_Size(module)) < 0 || - (name_size = PyString_Size(global_name)) < 0) - goto finally; - - module_str = PyString_AS_STRING((PyStringObject *)module); - name_str = PyString_AS_STRING((PyStringObject *)global_name); - - /* XXX This can be doing a relative import. Clearly it shouldn't, - but I don't know how to stop it. :-( */ - mod = PyImport_ImportModule(module_str); - if (mod == NULL) { - cPickle_ErrFormat(PicklingError, - "Can't pickle %s: import of module %s " - "failed", - "OS", args, module); - goto finally; - } - klass = PyObject_GetAttrString(mod, name_str); - if (klass == NULL) { - cPickle_ErrFormat(PicklingError, - "Can't pickle %s: attribute lookup %s.%s " - "failed", - "OSS", args, module, global_name); - goto finally; + c_str[0] = EXT4; + c_str[1] = (char) (code & 0xff); + c_str[2] = (char) ((code >> 8) & 0xff); + c_str[3] = (char) ((code >> 16) & 0xff); + c_str[4] = (char) ((code >> 24) & 0xff); + n = 5; } - if (klass != args) { - Py_DECREF(klass); - cPickle_ErrFormat(PicklingError, - "Can't pickle %s: it's not the same object " - "as %s.%s", - "OSS", args, module, global_name); - goto finally; - } - Py_DECREF(klass); - - if (self->proto >= 2) { - /* See whether this is in the extension registry, and if - * so generate an EXT opcode. - */ - PyObject *py_code; /* extension code as Python object */ - long code; /* extension code as C value */ - char c_str[5]; - int n; - - PyTuple_SET_ITEM(two_tuple, 0, module); - PyTuple_SET_ITEM(two_tuple, 1, global_name); - py_code = PyDict_GetItem(extension_registry, two_tuple); - if (py_code == NULL) - goto gen_global; /* not registered */ - - /* Verify py_code has the right type and value. */ - if (!PyInt_Check(py_code)) { - cPickle_ErrFormat(PicklingError, "Can't pickle %s: " - "extension code %s isn't an integer", - "OO", args, py_code); - goto finally; - } - code = PyInt_AS_LONG(py_code); - if (code <= 0 || code > 0x7fffffffL) { - cPickle_ErrFormat(PicklingError, "Can't pickle %s: " - "extension code %ld is out of range", - "Ol", args, code); - goto finally; - } - - /* Generate an EXT opcode. */ - if (code <= 0xff) { - c_str[0] = EXT1; - c_str[1] = (char)code; - n = 2; - } - else if (code <= 0xffff) { - c_str[0] = EXT2; - c_str[1] = (char)(code & 0xff); - c_str[2] = (char)((code >> 8) & 0xff); - n = 3; - } - else { - c_str[0] = EXT4; - c_str[1] = (char)(code & 0xff); - c_str[2] = (char)((code >> 8) & 0xff); - c_str[3] = (char)((code >> 16) & 0xff); - c_str[4] = (char)((code >> 24) & 0xff); - n = 5; - } - if (self->write_func(self, c_str, n) >= 0) - res = 0; - goto finally; /* and don't memoize */ - } + if (self->write_func(self, c_str, n) >= 0) + res = 0; + goto finally; /* and don't memoize */ + } gen_global: - if (self->write_func(self, &global, 1) < 0) - goto finally; + if (self->write_func(self, &global, 1) < 0) + goto finally; - if (self->write_func(self, module_str, module_size) < 0) - goto finally; + if (self->write_func(self, module_str, module_size) < 0) + goto finally; - if (self->write_func(self, "\n", 1) < 0) - goto finally; + if (self->write_func(self, "\n", 1) < 0) + goto finally; - if (self->write_func(self, name_str, name_size) < 0) - goto finally; + if (self->write_func(self, name_str, name_size) < 0) + goto finally; - if (self->write_func(self, "\n", 1) < 0) - goto finally; + if (self->write_func(self, "\n", 1) < 0) + goto finally; - if (put(self, args) < 0) - goto finally; + if (put(self, args) < 0) + goto finally; - res = 0; + res = 0; finally: - Py_XDECREF(module); - Py_XDECREF(global_name); - Py_XDECREF(mod); + Py_XDECREF(module); + Py_XDECREF(global_name); + Py_XDECREF(mod); - return res; + return res; } static int -save_pers(Picklerobject *self, PyObject *args, PyObject *f) +save_pers(Picklerobject * self, PyObject * args, PyObject * f) { - PyObject *pid = 0; - int size, res = -1; + PyObject *pid = 0; + int size, res = -1; - static char persid = PERSID, binpersid = BINPERSID; + static char persid = PERSID, binpersid = BINPERSID; - Py_INCREF(args); - ARG_TUP(self, args); - if (self->arg) { - pid = PyObject_Call(f, self->arg, NULL); - FREE_ARG_TUP(self); - } - if (! pid) return -1; + Py_INCREF(args); + ARG_TUP(self, args); + if (self->arg) { + pid = PyObject_Call(f, self->arg, NULL); + FREE_ARG_TUP(self); + } + if (!pid) + return -1; - if (pid != Py_None) { - if (!self->bin) { - if (!PyString_Check(pid)) { - PyErr_SetString(PicklingError, - "persistent id must be string"); - goto finally; - } - - if (self->write_func(self, &persid, 1) < 0) - goto finally; - - if ((size = PyString_Size(pid)) < 0) - goto finally; - - if (self->write_func(self, - PyString_AS_STRING( - (PyStringObject *)pid), - size) < 0) - goto finally; + if (pid != Py_None) { + if (!self->bin) { + if (!PyString_Check(pid)) { + PyErr_SetString(PicklingError, "persistent id must be string"); + goto finally; + } - if (self->write_func(self, "\n", 1) < 0) - goto finally; + if (self->write_func(self, &persid, 1) < 0) + goto finally; - res = 1; - goto finally; - } - else if (save(self, pid, 1) >= 0) { - if (self->write_func(self, &binpersid, 1) < 0) - res = -1; - else - res = 1; - } + if ((size = PyString_Size(pid)) < 0) + goto finally; + + if (self->write_func(self, + PyString_AS_STRING((PyStringObject *) pid), + size) < 0) + goto finally; + if (self->write_func(self, "\n", 1) < 0) goto finally; + + res = 1; + goto finally; + } + else if (save(self, pid, 1) >= 0) { + if (self->write_func(self, &binpersid, 1) < 0) + res = -1; + else + res = 1; } - res = 0; + goto finally; + } + + res = 0; finally: - Py_XDECREF(pid); + Py_XDECREF(pid); - return res; + return res; } /* We're saving ob, and args is the 2-thru-5 tuple returned by the * appropriate __reduce__ method for ob. */ static int -save_reduce(Picklerobject *self, PyObject *args, PyObject *ob) +save_reduce(Picklerobject * self, PyObject * args, PyObject * ob) { - PyObject *callable; - PyObject *argtup; - PyObject *state = NULL; - PyObject *listitems = NULL; - PyObject *dictitems = NULL; - - int use_newobj = self->proto >= 2; - - static char reduce = REDUCE; - static char build = BUILD; - static char newobj = NEWOBJ; - - if (! PyArg_UnpackTuple(args, "save_reduce", 2, 5, - &callable, - &argtup, - &state, - &listitems, - &dictitems)) - return -1; + PyObject *callable; + PyObject *argtup; + PyObject *state = NULL; + PyObject *listitems = NULL; + PyObject *dictitems = NULL; + + int use_newobj = self->proto >= 2; + + static char reduce = REDUCE; + static char build = BUILD; + static char newobj = NEWOBJ; - if (!PyTuple_Check(argtup)) { - PyErr_SetString(PicklingError, - "args from reduce() should be a tuple"); - return -1; - } - - if (state == Py_None) - state = NULL; - if (listitems == Py_None) - listitems = NULL; - if (dictitems == Py_None) - dictitems = NULL; - - /* Protocol 2 special case: if callable's name is __newobj__, use - * NEWOBJ. This consumes a lot of code. - */ - if (use_newobj) { - PyObject *temp = PyObject_GetAttr(callable, __name___str); - - if (temp == NULL) { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - return -1; - use_newobj = 0; - } - else { - use_newobj = PyString_Check(temp) && - strcmp(PyString_AS_STRING(temp), - "__newobj__") == 0; - Py_DECREF(temp); - } - } - if (use_newobj) { - PyObject *cls; - PyObject *newargtup; - int n, i; - - /* Sanity checks. */ - n = PyTuple_Size(argtup); - if (n < 1) { - PyErr_SetString(PicklingError, "__newobj__ arglist " - "is empty"); - return -1; - } + if (!PyArg_UnpackTuple(args, "save_reduce", 2, 5, + &callable, &argtup, &state, &listitems, &dictitems)) + return -1; - cls = PyTuple_GET_ITEM(argtup, 0); - if (! PyObject_HasAttrString(cls, "__new__")) { - PyErr_SetString(PicklingError, "args[0] from " - "__newobj__ args has no __new__"); - return -1; - } + if (!PyTuple_Check(argtup)) { + PyErr_SetString(PicklingError, "args from reduce() should be a tuple"); + return -1; + } - /* XXX How could ob be NULL? */ - if (ob != NULL) { - PyObject *ob_dot_class; - - ob_dot_class = PyObject_GetAttr(ob, __class___str); - if (ob_dot_class == NULL) { - if (PyErr_ExceptionMatches( - PyExc_AttributeError)) - PyErr_Clear(); - else - return -1; - } - i = ob_dot_class != cls; /* true iff a problem */ - Py_XDECREF(ob_dot_class); - if (i) { - PyErr_SetString(PicklingError, "args[0] from " - "__newobj__ args has the wrong class"); - return -1; - } - } + if (state == Py_None) + state = NULL; + if (listitems == Py_None) + listitems = NULL; + if (dictitems == Py_None) + dictitems = NULL; + + /* Protocol 2 special case: if callable's name is __newobj__, use + * NEWOBJ. This consumes a lot of code. + */ + if (use_newobj) { + PyObject *temp = PyObject_GetAttr(callable, __name___str); - /* Save the class and its __new__ arguments. */ - if (save(self, cls, 0) < 0) - return -1; - - newargtup = PyTuple_New(n-1); /* argtup[1:] */ - if (newargtup == NULL) - return -1; - for (i = 1; i < n; ++i) { - PyObject *temp = PyTuple_GET_ITEM(argtup, i); - Py_INCREF(temp); - PyTuple_SET_ITEM(newargtup, i-1, temp); - } - i = save(self, newargtup, 0) < 0; - Py_DECREF(newargtup); - if (i < 0) - return -1; - - /* Add NEWOBJ opcode. */ - if (self->write_func(self, &newobj, 1) < 0) - return -1; + if (temp == NULL) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + return -1; + use_newobj = 0; } else { - /* Not using NEWOBJ. */ - if (save(self, callable, 0) < 0 || - save(self, argtup, 0) < 0 || - self->write_func(self, &reduce, 1) < 0) - return -1; + use_newobj = PyString_Check(temp) && + strcmp(PyString_AS_STRING(temp), "__newobj__") == 0; + Py_DECREF(temp); + } + } + if (use_newobj) { + PyObject *cls; + PyObject *newargtup; + int n, i; + + /* Sanity checks. */ + n = PyTuple_Size(argtup); + if (n < 1) { + PyErr_SetString(PicklingError, "__newobj__ arglist " "is empty"); + return -1; + } + + cls = PyTuple_GET_ITEM(argtup, 0); + if (!PyObject_HasAttrString(cls, "__new__")) { + PyErr_SetString(PicklingError, "args[0] from " + "__newobj__ args has no __new__"); + return -1; } - /* Memoize. */ - /* XXX How can ob be NULL? */ + /* XXX How could ob be NULL? */ if (ob != NULL) { - if (state && !PyDict_Check(state)) { - if (put2(self, ob) < 0) - return -1; - } - else if (put(self, ob) < 0) - return -1; + PyObject *ob_dot_class; + + ob_dot_class = PyObject_GetAttr(ob, __class___str); + if (ob_dot_class == NULL) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + return -1; + } + i = ob_dot_class != cls; /* true iff a problem */ + Py_XDECREF(ob_dot_class); + if (i) { + PyErr_SetString(PicklingError, "args[0] from " + "__newobj__ args has the wrong class"); + return -1; + } + } + + /* Save the class and its __new__ arguments. */ + if (save(self, cls, 0) < 0) + return -1; + + newargtup = PyTuple_New(n - 1); /* argtup[1:] */ + if (newargtup == NULL) + return -1; + for (i = 1; i < n; ++i) { + PyObject *temp = PyTuple_GET_ITEM(argtup, i); + Py_INCREF(temp); + PyTuple_SET_ITEM(newargtup, i - 1, temp); } + i = save(self, newargtup, 0) < 0; + Py_DECREF(newargtup); + if (i < 0) + return -1; + /* Add NEWOBJ opcode. */ + if (self->write_func(self, &newobj, 1) < 0) + return -1; + } + else { + /* Not using NEWOBJ. */ + if (save(self, callable, 0) < 0 || + save(self, argtup, 0) < 0 || + self->write_func(self, &reduce, 1) < 0) + return -1; + } + + /* Memoize. */ + /* XXX How can ob be NULL? */ + if (ob != NULL) { + if (state && !PyDict_Check(state)) { + if (put2(self, ob) < 0) + return -1; + } + else if (put(self, ob) < 0) + return -1; + } - if (listitems && batch_list(self, listitems) < 0) - return -1; - if (dictitems && batch_dict(self, dictitems) < 0) - return -1; + if (listitems && batch_list(self, listitems) < 0) + return -1; - if (state) { - if (save(self, state, 0) < 0 || - self->write_func(self, &build, 1) < 0) - return -1; - } + if (dictitems && batch_dict(self, dictitems) < 0) + return -1; - return 0; + if (state) { + if (save(self, state, 0) < 0 || self->write_func(self, &build, 1) < 0) + return -1; + } + + return 0; } static int -save(Picklerobject *self, PyObject *args, int pers_save) +save(Picklerobject * self, PyObject * args, int pers_save) { - PyTypeObject *type; - PyObject *py_ob_id = 0, *__reduce__ = 0, *t = 0; - PyObject *arg_tup; - int res = -1; - int tmp, size; - - if (self->nesting++ > Py_GetRecursionLimit()){ - PyErr_SetString(PyExc_RuntimeError, - "maximum recursion depth exceeded"); - goto finally; - } + PyTypeObject *type; + PyObject *py_ob_id = 0, *__reduce__ = 0, *t = 0; + PyObject *arg_tup; + int res = -1; + int tmp, size; - if (!pers_save && self->pers_func) { - if ((tmp = save_pers(self, args, self->pers_func)) != 0) { - res = tmp; - goto finally; - } - } + if (self->nesting++ > Py_GetRecursionLimit()) { + PyErr_SetString(PyExc_RuntimeError, + "maximum recursion depth exceeded"); + goto finally; + } - if (args == Py_None) { - res = save_none(self, args); - goto finally; + if (!pers_save && self->pers_func) { + if ((tmp = save_pers(self, args, self->pers_func)) != 0) { + res = tmp; + goto finally; } + } - type = args->ob_type; - - switch (type->tp_name[0]) { - case 'b': - if (args == Py_False || args == Py_True) { - res = save_bool(self, args); - goto finally; - } - break; - case 'i': - if (type == &PyLong_Type) { - res = save_long(self, args); - goto finally; - } - break; - - case 'f': - if (type == &PyFloat_Type) { - res = save_float(self, args); - goto finally; - } - break; - - case 't': - if (type == &PyTuple_Type && PyTuple_Size(args) == 0) { - res = save_tuple(self, args); - goto finally; - } - break; + if (args == Py_None) { + res = save_none(self, args); + goto finally; + } - case 's': - if ((type == &PyString_Type) && (PyString_GET_SIZE(args) < 2)) { - res = save_string(self, args, 0); - goto finally; - } + type = args->ob_type; -#ifdef Py_USING_UNICODE - case 'u': - if ((type == &PyUnicode_Type) && (PyString_GET_SIZE(args) < 2)) { - res = save_unicode(self, args, 0); - goto finally; - } -#endif + switch (type->tp_name[0]) { + case 'b': + if (args == Py_False || args == Py_True) { + res = save_bool(self, args); + goto finally; } + break; + case 'i': + if (type == &PyLong_Type) { + res = save_long(self, args); + goto finally; + } + break; - if (args->ob_refcnt > 1) { - if (!( py_ob_id = PyLong_FromVoidPtr(args))) - goto finally; - - if (PyDict_GetItem(self->memo, py_ob_id)) { - if (get(self, py_ob_id) < 0) - goto finally; + case 'f': + if (type == &PyFloat_Type) { + res = save_float(self, args); + goto finally; + } + break; - res = 0; - goto finally; - } + case 't': + if (type == &PyTuple_Type && PyTuple_Size(args) == 0) { + res = save_tuple(self, args); + goto finally; } + break; - switch (type->tp_name[0]) { - case 's': - if (type == &PyString_Type) { - res = save_string(self, args, 1); - goto finally; - } - break; + case 's': + if ((type == &PyString_Type) && (PyString_GET_SIZE(args) < 2)) { + res = save_string(self, args, 0); + goto finally; + } #ifdef Py_USING_UNICODE - case 'u': - if (type == &PyUnicode_Type) { - res = save_unicode(self, args, 1); - goto finally; - } - break; + case 'u': + if ((type == &PyUnicode_Type) && (PyString_GET_SIZE(args) < 2)) { + res = save_unicode(self, args, 0); + goto finally; + } #endif + } - case 't': - if (type == &PyTuple_Type) { - res = save_tuple(self, args); - goto finally; - } - if (type == &PyType_Type) { - res = save_global(self, args, NULL); - goto finally; - } - break; + if (args->ob_refcnt > 1) { + if (!(py_ob_id = PyLong_FromVoidPtr(args))) + goto finally; - case 'l': - if (type == &PyList_Type) { - res = save_list(self, args); - goto finally; - } - break; - - case 'd': - if (type == &PyDict_Type) { - res = save_dict(self, args); - goto finally; - } - break; - - case 'i': - break; - - case 'c': - break; + if (PyDict_GetItem(self->memo, py_ob_id)) { + if (get(self, py_ob_id) < 0) + goto finally; - case 'f': - if (type == &PyFunction_Type) { - res = save_global(self, args, NULL); - if (res && PyErr_ExceptionMatches(PickleError)) { - /* fall back to reduce */ - PyErr_Clear(); - break; - } - goto finally; - } - break; + res = 0; + goto finally; + } + } - case 'b': - if (type == &PyCFunction_Type) { - res = save_global(self, args, NULL); - goto finally; - } + switch (type->tp_name[0]) { + case 's': + if (type == &PyString_Type) { + res = save_string(self, args, 1); + goto finally; } + break; - if (!pers_save && self->inst_pers_func) { - if ((tmp = save_pers(self, args, self->inst_pers_func)) != 0) { - res = tmp; - goto finally; - } +#ifdef Py_USING_UNICODE + case 'u': + if (type == &PyUnicode_Type) { + res = save_unicode(self, args, 1); + goto finally; } + break; +#endif - if (PyType_IsSubtype(type, &PyType_Type)) { - res = save_global(self, args, NULL); - goto finally; + case 't': + if (type == &PyTuple_Type) { + res = save_tuple(self, args); + goto finally; + } + if (type == &PyType_Type) { + res = save_global(self, args, NULL); + goto finally; + } + break; + + case 'l': + if (type == &PyList_Type) { + res = save_list(self, args); + goto finally; + } + break; + + case 'd': + if (type == &PyDict_Type) { + res = save_dict(self, args); + goto finally; + } + break; + + case 'i': + break; + + case 'c': + break; + + case 'f': + if (type == &PyFunction_Type) { + res = save_global(self, args, NULL); + if (res && PyErr_ExceptionMatches(PickleError)) { + /* fall back to reduce */ + PyErr_Clear(); + break; + } + goto finally; } + break; - /* Get a reduction callable, and call it. This may come from - * copy_reg.dispatch_table, the object's __reduce_ex__ method, - * or the object's __reduce__ method. - */ - __reduce__ = PyDict_GetItem(dispatch_table, (PyObject *)type); + case 'b': + if (type == &PyCFunction_Type) { + res = save_global(self, args, NULL); + goto finally; + } + } + + if (!pers_save && self->inst_pers_func) { + if ((tmp = save_pers(self, args, self->inst_pers_func)) != 0) { + res = tmp; + goto finally; + } + } + + if (PyType_IsSubtype(type, &PyType_Type)) { + res = save_global(self, args, NULL); + goto finally; + } + + /* Get a reduction callable, and call it. This may come from + * copy_reg.dispatch_table, the object's __reduce_ex__ method, + * or the object's __reduce__ method. + */ + __reduce__ = PyDict_GetItem(dispatch_table, (PyObject *) type); + if (__reduce__ != NULL) { + Py_INCREF(__reduce__); + Py_INCREF(args); + ARG_TUP(self, args); + if (self->arg) { + t = PyObject_Call(__reduce__, self->arg, NULL); + FREE_ARG_TUP(self); + } + } + else { + /* Check for a __reduce_ex__ method. */ + __reduce__ = PyObject_GetAttr(args, __reduce_ex___str); if (__reduce__ != NULL) { - Py_INCREF(__reduce__); - Py_INCREF(args); - ARG_TUP(self, args); + t = PyInt_FromLong(self->proto); + if (t != NULL) { + ARG_TUP(self, t); + t = NULL; if (self->arg) { - t = PyObject_Call(__reduce__, self->arg, NULL); - FREE_ARG_TUP(self); + t = PyObject_Call(__reduce__, self->arg, NULL); + FREE_ARG_TUP(self); } + } } else { - /* Check for a __reduce_ex__ method. */ - __reduce__ = PyObject_GetAttr(args, __reduce_ex___str); - if (__reduce__ != NULL) { - t = PyInt_FromLong(self->proto); - if (t != NULL) { - ARG_TUP(self, t); - t = NULL; - if (self->arg) { - t = PyObject_Call(__reduce__, - self->arg, NULL); - FREE_ARG_TUP(self); - } - } - } - else { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - goto finally; - /* Check for a __reduce__ method. */ - __reduce__ = PyObject_GetAttr(args, __reduce___str); - if (__reduce__ != NULL) { - t = PyObject_Call(__reduce__, - empty_tuple, NULL); - } - else { - PyErr_SetObject(UnpickleableError, args); - goto finally; - } - } - } - - if (t == NULL) - goto finally; - - if (PyString_Check(t)) { - res = save_global(self, args, t); - goto finally; - } - - if (! PyTuple_Check(t)) { - cPickle_ErrFormat(PicklingError, "Value returned by " - "%s must be string or tuple", - "O", __reduce__); - goto finally; - } - - size = PyTuple_Size(t); - if (size < 2 || size > 5) { - cPickle_ErrFormat(PicklingError, "tuple returned by " - "%s must contain 2 through 5 elements", - "O", __reduce__); - goto finally; - } - - arg_tup = PyTuple_GET_ITEM(t, 1); - if (!(PyTuple_Check(arg_tup) || arg_tup == Py_None)) { - cPickle_ErrFormat(PicklingError, "Second element of " - "tuple returned by %s must be a tuple", - "O", __reduce__); + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else goto finally; - } + /* Check for a __reduce__ method. */ + __reduce__ = PyObject_GetAttr(args, __reduce___str); + if (__reduce__ != NULL) { + t = PyObject_Call(__reduce__, empty_tuple, NULL); + } + else { + PyErr_SetObject(UnpickleableError, args); + goto finally; + } + } + } + + if (t == NULL) + goto finally; + + if (PyString_Check(t)) { + res = save_global(self, args, t); + goto finally; + } + + if (!PyTuple_Check(t)) { + cPickle_ErrFormat(PicklingError, "Value returned by " + "%s must be string or tuple", "O", __reduce__); + goto finally; + } + + size = PyTuple_Size(t); + if (size < 2 || size > 5) { + cPickle_ErrFormat(PicklingError, "tuple returned by " + "%s must contain 2 through 5 elements", + "O", __reduce__); + goto finally; + } + + arg_tup = PyTuple_GET_ITEM(t, 1); + if (!(PyTuple_Check(arg_tup) || arg_tup == Py_None)) { + cPickle_ErrFormat(PicklingError, "Second element of " + "tuple returned by %s must be a tuple", + "O", __reduce__); + goto finally; + } - res = save_reduce(self, t, args); + res = save_reduce(self, t, args); finally: - self->nesting--; - Py_XDECREF(py_ob_id); - Py_XDECREF(__reduce__); - Py_XDECREF(t); + self->nesting--; + Py_XDECREF(py_ob_id); + Py_XDECREF(__reduce__); + Py_XDECREF(t); - return res; + return res; } static int -dump(Picklerobject *self, PyObject *args) +dump(Picklerobject * self, PyObject * args) { - static char stop = STOP; + static char stop = STOP; - if (self->proto >= 2) { - char bytes[2]; + if (self->proto >= 2) { + char bytes[2]; - bytes[0] = PROTO; - assert(self->proto >= 0 && self->proto < 256); - bytes[1] = (char)self->proto; - if (self->write_func(self, bytes, 2) < 0) - return -1; - } + bytes[0] = PROTO; + assert(self->proto >= 0 && self->proto < 256); + bytes[1] = (char) self->proto; + if (self->write_func(self, bytes, 2) < 0) + return -1; + } - if (save(self, args, 0) < 0) - return -1; + if (save(self, args, 0) < 0) + return -1; - if (self->write_func(self, &stop, 1) < 0) - return -1; + if (self->write_func(self, &stop, 1) < 0) + return -1; - if (self->write_func(self, NULL, 0) < 0) - return -1; + if (self->write_func(self, NULL, 0) < 0) + return -1; - return 0; + return 0; } static PyObject * -Pickle_clear_memo(Picklerobject *self, PyObject *args) +Pickle_clear_memo(Picklerobject * self, PyObject * args) { - if (self->memo) - PyDict_Clear(self->memo); - Py_INCREF(Py_None); - return Py_None; + if (self->memo) + PyDict_Clear(self->memo); + Py_INCREF(Py_None); + return Py_None; } static PyObject * -Pickle_getvalue(Picklerobject *self, PyObject *args) +Pickle_getvalue(Picklerobject * self, PyObject * args) { - int l, i, rsize, ssize, clear=1, lm; - long ik; - PyObject *k, *r; - char *s, *p, *have_get; - Pdata *data; - - /* Can be called by Python code or C code */ - if (args && !PyArg_ParseTuple(args, "|i:getvalue", &clear)) - return NULL; - - /* Check to make sure we are based on a list */ - if (! Pdata_Check(self->file)) { - PyErr_SetString(PicklingError, - "Attempt to getvalue() a non-list-based pickler"); - return NULL; - } + int l, i, rsize, ssize, clear = 1, lm; + long ik; + PyObject *k, *r; + char *s, *p, *have_get; + Pdata *data; - /* flush write buffer */ - if (write_other(self, NULL, 0) < 0) return NULL; + /* Can be called by Python code or C code */ + if (args && !PyArg_ParseTuple(args, "|i:getvalue", &clear)) + return NULL; - data=(Pdata*)self->file; - l=data->length; + /* Check to make sure we are based on a list */ + if (!Pdata_Check(self->file)) { + PyErr_SetString(PicklingError, + "Attempt to getvalue() a non-list-based pickler"); + return NULL; + } - /* set up an array to hold get/put status */ - lm = PyDict_Size(self->memo); - if (lm < 0) return NULL; - lm++; - have_get = malloc(lm); - if (have_get == NULL) return PyErr_NoMemory(); - memset(have_get, 0, lm); - - /* Scan for gets. */ - for (rsize = 0, i = l; --i >= 0; ) { - k = data->data[i]; - - if (PyString_Check(k)) - rsize += PyString_GET_SIZE(k); - - else if (PyInt_Check(k)) { /* put */ - ik = PyInt_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik >= lm || ik == 0) { - PyErr_SetString(PicklingError, - "Invalid get data"); - goto err; - } - if (have_get[ik]) /* with matching get */ - rsize += ik < 256 ? 2 : 5; - } + /* flush write buffer */ + if (write_other(self, NULL, 0) < 0) + return NULL; - else if (! (PyTuple_Check(k) && - PyTuple_GET_SIZE(k) == 2 && - PyInt_Check((k = PyTuple_GET_ITEM(k, 0)))) - ) { - PyErr_SetString(PicklingError, - "Unexpected data in internal list"); - goto err; - } + data = (Pdata *) self->file; + l = data->length; - else { /* put */ - ik = PyInt_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik >= lm || ik == 0) { - PyErr_SetString(PicklingError, - "Invalid get data"); - return NULL; - } - have_get[ik] = 1; - rsize += ik < 256 ? 2 : 5; - } - } - - /* Now generate the result */ - r = PyString_FromStringAndSize(NULL, rsize); - if (r == NULL) goto err; - s = PyString_AS_STRING((PyStringObject *)r); - - for (i = 0; i < l; i++) { - k = data->data[i]; - - if (PyString_Check(k)) { - ssize = PyString_GET_SIZE(k); - if (ssize) { - p=PyString_AS_STRING((PyStringObject *)k); - while (--ssize >= 0) - *s++ = *p++; - } - } + /* set up an array to hold get/put status */ + lm = PyDict_Size(self->memo); + if (lm < 0) + return NULL; + lm++; + have_get = malloc(lm); + if (have_get == NULL) + return PyErr_NoMemory(); + memset(have_get, 0, lm); - else if (PyTuple_Check(k)) { /* get */ - ik = PyLong_AsLong(PyTuple_GET_ITEM(k, 0)); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik < 256) { - *s++ = BINGET; - *s++ = (int)(ik & 0xff); - } - else { - *s++ = LONG_BINGET; - *s++ = (int)(ik & 0xff); - *s++ = (int)((ik >> 8) & 0xff); - *s++ = (int)((ik >> 16) & 0xff); - *s++ = (int)((ik >> 24) & 0xff); - } - } + /* Scan for gets. */ + for (rsize = 0, i = l; --i >= 0;) { + k = data->data[i]; + + if (PyString_Check(k)) + rsize += PyString_GET_SIZE(k); + + else if (PyInt_Check(k)) { /* put */ + ik = PyInt_AsLong(k); + if (ik == -1 && PyErr_Occurred()) + goto err; + if (ik >= lm || ik == 0) { + PyErr_SetString(PicklingError, "Invalid get data"); + goto err; + } + if (have_get[ik]) /* with matching get */ + rsize += ik < 256 ? 2 : 5; + } + + else if (!(PyTuple_Check(k) && + PyTuple_GET_SIZE(k) == 2 && + PyInt_Check((k = PyTuple_GET_ITEM(k, 0)))) + ) { + PyErr_SetString(PicklingError, "Unexpected data in internal list"); + goto err; + } + + else { /* put */ + ik = PyInt_AsLong(k); + if (ik == -1 && PyErr_Occurred()) + goto err; + if (ik >= lm || ik == 0) { + PyErr_SetString(PicklingError, "Invalid get data"); + return NULL; + } + have_get[ik] = 1; + rsize += ik < 256 ? 2 : 5; + } + } + + /* Now generate the result */ + r = PyString_FromStringAndSize(NULL, rsize); + if (r == NULL) + goto err; + s = PyString_AS_STRING((PyStringObject *) r); + + for (i = 0; i < l; i++) { + k = data->data[i]; + + if (PyString_Check(k)) { + ssize = PyString_GET_SIZE(k); + if (ssize) { + p = PyString_AS_STRING((PyStringObject *) k); + while (--ssize >= 0) + *s++ = *p++; + } + } + + else if (PyTuple_Check(k)) { /* get */ + ik = PyLong_AsLong(PyTuple_GET_ITEM(k, 0)); + if (ik == -1 && PyErr_Occurred()) + goto err; + if (ik < 256) { + *s++ = BINGET; + *s++ = (int) (ik & 0xff); + } + else { + *s++ = LONG_BINGET; + *s++ = (int) (ik & 0xff); + *s++ = (int) ((ik >> 8) & 0xff); + *s++ = (int) ((ik >> 16) & 0xff); + *s++ = (int) ((ik >> 24) & 0xff); + } + } + + else { /* put */ + ik = PyLong_AsLong(k); + if (ik == -1 && PyErr_Occurred()) + goto err; - else { /* put */ - ik = PyLong_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - - if (have_get[ik]) { /* with matching get */ - if (ik < 256) { - *s++ = BINPUT; - *s++ = (int)(ik & 0xff); - } - else { - *s++ = LONG_BINPUT; - *s++ = (int)(ik & 0xff); - *s++ = (int)((ik >> 8) & 0xff); - *s++ = (int)((ik >> 16) & 0xff); - *s++ = (int)((ik >> 24) & 0xff); - } - } + if (have_get[ik]) { /* with matching get */ + if (ik < 256) { + *s++ = BINPUT; + *s++ = (int) (ik & 0xff); } - } - - if (clear) { - PyDict_Clear(self->memo); - Pdata_clear(data, 0); - } + else { + *s++ = LONG_BINPUT; + *s++ = (int) (ik & 0xff); + *s++ = (int) ((ik >> 8) & 0xff); + *s++ = (int) ((ik >> 16) & 0xff); + *s++ = (int) ((ik >> 24) & 0xff); + } + } + } + } + + if (clear) { + PyDict_Clear(self->memo); + Pdata_clear(data, 0); + } - free(have_get); - return r; + free(have_get); + return r; err: - free(have_get); - return NULL; + free(have_get); + return NULL; } static PyObject * -Pickler_dump(Picklerobject *self, PyObject *args) +Pickler_dump(Picklerobject * self, PyObject * args) { - PyObject *ob; - int get=0; + PyObject *ob; + int get = 0; - if (!( PyArg_ParseTuple(args, "O|i:dump", &ob, &get))) - return NULL; + if (!(PyArg_ParseTuple(args, "O|i:dump", &ob, &get))) + return NULL; - if (dump(self, ob) < 0) - return NULL; + if (dump(self, ob) < 0) + return NULL; - if (get) return Pickle_getvalue(self, NULL); + if (get) + return Pickle_getvalue(self, NULL); - /* XXX Why does dump() return self? */ - Py_INCREF(self); - return (PyObject*)self; + /* XXX Why does dump() return self? */ + Py_INCREF(self); + return (PyObject *) self; } -static struct PyMethodDef Pickler_methods[] = -{ - {"dump", (PyCFunction)Pickler_dump, METH_VARARGS, - PyDoc_STR("dump(object) -- " - "Write an object in pickle format to the object's pickle stream")}, - {"clear_memo", (PyCFunction)Pickle_clear_memo, METH_NOARGS, - PyDoc_STR("clear_memo() -- Clear the picklers memo")}, - {"getvalue", (PyCFunction)Pickle_getvalue, METH_VARARGS, - PyDoc_STR("getvalue() -- Finish picking a list-based pickle")}, - {NULL, NULL} /* sentinel */ +static struct PyMethodDef Pickler_methods[] = { + {"dump", (PyCFunction) Pickler_dump, METH_VARARGS, + PyDoc_STR("dump(object) -- " + "Write an object in pickle format to the object's pickle stream")}, + {"clear_memo", (PyCFunction) Pickle_clear_memo, METH_NOARGS, + PyDoc_STR("clear_memo() -- Clear the picklers memo")}, + {"getvalue", (PyCFunction) Pickle_getvalue, METH_VARARGS, + PyDoc_STR("getvalue() -- Finish picking a list-based pickle")}, + {NULL, NULL} /* sentinel */ }; static Picklerobject * -newPicklerobject(PyObject *file, int proto) +newPicklerobject(PyObject * file, int proto) { - Picklerobject *self; - - if (proto < 0) - proto = HIGHEST_PROTOCOL; - if (proto > HIGHEST_PROTOCOL) { - PyErr_Format(PyExc_ValueError, "pickle protocol %d asked for; " - "the highest available protocol is %d", - proto, HIGHEST_PROTOCOL); - return NULL; - } + Picklerobject *self; - self = PyObject_GC_New(Picklerobject, &Picklertype); - if (self == NULL) - return NULL; - self->proto = proto; - self->bin = proto > 0; - self->fp = NULL; - self->write = NULL; - self->memo = NULL; - self->arg = NULL; - self->pers_func = NULL; - self->inst_pers_func = NULL; - self->write_buf = NULL; - self->fast = 0; - self->nesting = 0; - self->fast_container = 0; - self->fast_memo = NULL; - self->buf_size = 0; - self->dispatch_table = NULL; - - self->file = NULL; - if (file) - Py_INCREF(file); - else { - file = Pdata_New(); - if (file == NULL) - goto err; - } - self->file = file; + if (proto < 0) + proto = HIGHEST_PROTOCOL; + if (proto > HIGHEST_PROTOCOL) { + PyErr_Format(PyExc_ValueError, "pickle protocol %d asked for; " + "the highest available protocol is %d", + proto, HIGHEST_PROTOCOL); + return NULL; + } - if (!( self->memo = PyDict_New())) + self = PyObject_GC_New(Picklerobject, &Picklertype); + if (self == NULL) + return NULL; + self->proto = proto; + self->bin = proto > 0; + self->fp = NULL; + self->write = NULL; + self->memo = NULL; + self->arg = NULL; + self->pers_func = NULL; + self->inst_pers_func = NULL; + self->write_buf = NULL; + self->fast = 0; + self->nesting = 0; + self->fast_container = 0; + self->fast_memo = NULL; + self->buf_size = 0; + self->dispatch_table = NULL; + + self->file = NULL; + if (file) + Py_INCREF(file); + else { + file = Pdata_New(); + if (file == NULL) + goto err; + } + self->file = file; + + if (!(self->memo = PyDict_New())) + goto err; + + if (PyFile_Check(file)) { + self->fp = PyFile_AsFile(file); + if (self->fp == NULL) { + PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); + goto err; + } + self->write_func = write_file; + } + else if (PycStringIO_OutputCheck(file)) { + self->write_func = write_cStringIO; + } + else if (file == Py_None) { + self->write_func = write_none; + } + else { + self->write_func = write_other; + + if (!Pdata_Check(file)) { + self->write = PyObject_GetAttr(file, write_str); + if (!self->write) { + PyErr_Clear(); + PyErr_SetString(PyExc_TypeError, + "argument must have 'write' " "attribute"); goto err; - - if (PyFile_Check(file)) { - self->fp = PyFile_AsFile(file); - if (self->fp == NULL) { - PyErr_SetString(PyExc_ValueError, - "I/O operation on closed file"); - goto err; - } - self->write_func = write_file; + } } - else if (PycStringIO_OutputCheck(file)) { - self->write_func = write_cStringIO; - } - else if (file == Py_None) { - self->write_func = write_none; - } - else { - self->write_func = write_other; - if (! Pdata_Check(file)) { - self->write = PyObject_GetAttr(file, write_str); - if (!self->write) { - PyErr_Clear(); - PyErr_SetString(PyExc_TypeError, - "argument must have 'write' " - "attribute"); - goto err; - } - } - - self->write_buf = (char *)PyMem_Malloc(WRITE_BUF_SIZE); - if (self->write_buf == NULL) { - PyErr_NoMemory(); - goto err; - } + self->write_buf = (char *) PyMem_Malloc(WRITE_BUF_SIZE); + if (self->write_buf == NULL) { + PyErr_NoMemory(); + goto err; } + } - self->dispatch_table = dispatch_table; - Py_INCREF(dispatch_table); - PyObject_GC_Track(self); + self->dispatch_table = dispatch_table; + Py_INCREF(dispatch_table); + PyObject_GC_Track(self); - return self; + return self; err: - Py_DECREF(self); - return NULL; + Py_DECREF(self); + return NULL; } static PyObject * -get_Pickler(PyObject *self, PyObject *args, PyObject *kwds) +get_Pickler(PyObject * self, PyObject * args, PyObject * kwds) { - static char *kwlist[] = {"file", "protocol", NULL}; - PyObject *file = NULL; - int proto = 0; - - /* XXX - * The documented signature is Pickler(file, protocol=0), but this - * accepts Pickler() and Pickler(integer) too. The meaning then - * is clear as mud, undocumented, and not supported by pickle.py. - * I'm told Zope uses this, but I haven't traced into this code - * far enough to figure out what it means. - */ - if (!PyArg_ParseTuple(args, "|i:Pickler", &proto)) { - PyErr_Clear(); - proto = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", - kwlist, &file, &proto)) - return NULL; - } - return (PyObject *)newPicklerobject(file, proto); + static char *kwlist[] = { "file", "protocol", NULL }; + PyObject *file = NULL; + int proto = 0; + + /* XXX + * The documented signature is Pickler(file, protocol=0), but this + * accepts Pickler() and Pickler(integer) too. The meaning then + * is clear as mud, undocumented, and not supported by pickle.py. + * I'm told Zope uses this, but I haven't traced into this code + * far enough to figure out what it means. + */ + if (!PyArg_ParseTuple(args, "|i:Pickler", &proto)) { + PyErr_Clear(); + proto = 0; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", + kwlist, &file, &proto)) + return NULL; + } + return (PyObject *) newPicklerobject(file, proto); } static void -Pickler_dealloc(Picklerobject *self) +Pickler_dealloc(Picklerobject * self) { - PyObject_GC_UnTrack(self); - Py_XDECREF(self->write); - Py_XDECREF(self->memo); - Py_XDECREF(self->fast_memo); - Py_XDECREF(self->arg); - Py_XDECREF(self->file); - Py_XDECREF(self->pers_func); - Py_XDECREF(self->inst_pers_func); - Py_XDECREF(self->dispatch_table); - PyMem_Free(self->write_buf); - self->ob_type->tp_free((PyObject *)self); + PyObject_GC_UnTrack(self); + Py_XDECREF(self->write); + Py_XDECREF(self->memo); + Py_XDECREF(self->fast_memo); + Py_XDECREF(self->arg); + Py_XDECREF(self->file); + Py_XDECREF(self->pers_func); + Py_XDECREF(self->inst_pers_func); + Py_XDECREF(self->dispatch_table); + PyMem_Free(self->write_buf); + self->ob_type->tp_free((PyObject *) self); } static int -Pickler_traverse(Picklerobject *self, visitproc visit, void *arg) -{ - Py_VISIT(self->write); - Py_VISIT(self->memo); - Py_VISIT(self->fast_memo); - Py_VISIT(self->arg); - Py_VISIT(self->file); - Py_VISIT(self->pers_func); - Py_VISIT(self->inst_pers_func); - Py_VISIT(self->dispatch_table); - return 0; +Pickler_traverse(Picklerobject * self, visitproc visit, void *arg) +{ + Py_VISIT(self->write); + Py_VISIT(self->memo); + Py_VISIT(self->fast_memo); + Py_VISIT(self->arg); + Py_VISIT(self->file); + Py_VISIT(self->pers_func); + Py_VISIT(self->inst_pers_func); + Py_VISIT(self->dispatch_table); + return 0; } static int -Pickler_clear(Picklerobject *self) -{ - Py_CLEAR(self->write); - Py_CLEAR(self->memo); - Py_CLEAR(self->fast_memo); - Py_CLEAR(self->arg); - Py_CLEAR(self->file); - Py_CLEAR(self->pers_func); - Py_CLEAR(self->inst_pers_func); - Py_CLEAR(self->dispatch_table); - return 0; +Pickler_clear(Picklerobject * self) +{ + Py_CLEAR(self->write); + Py_CLEAR(self->memo); + Py_CLEAR(self->fast_memo); + Py_CLEAR(self->arg); + Py_CLEAR(self->file); + Py_CLEAR(self->pers_func); + Py_CLEAR(self->inst_pers_func); + Py_CLEAR(self->dispatch_table); + return 0; } static PyObject * -Pickler_get_pers_func(Picklerobject *p) +Pickler_get_pers_func(Picklerobject * p) { - if (p->pers_func == NULL) - PyErr_SetString(PyExc_AttributeError, "persistent_id"); - else - Py_INCREF(p->pers_func); - return p->pers_func; + if (p->pers_func == NULL) + PyErr_SetString(PyExc_AttributeError, "persistent_id"); + else + Py_INCREF(p->pers_func); + return p->pers_func; } static int -Pickler_set_pers_func(Picklerobject *p, PyObject *v) +Pickler_set_pers_func(Picklerobject * p, PyObject * v) { - if (v == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; - } - Py_XDECREF(p->pers_func); - Py_INCREF(v); - p->pers_func = v; - return 0; + if (v == NULL) { + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; + } + Py_XDECREF(p->pers_func); + Py_INCREF(v); + p->pers_func = v; + return 0; } static int -Pickler_set_inst_pers_func(Picklerobject *p, PyObject *v) +Pickler_set_inst_pers_func(Picklerobject * p, PyObject * v) { - if (v == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; - } - Py_XDECREF(p->inst_pers_func); - Py_INCREF(v); - p->inst_pers_func = v; - return 0; + if (v == NULL) { + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; + } + Py_XDECREF(p->inst_pers_func); + Py_INCREF(v); + p->inst_pers_func = v; + return 0; } static PyObject * -Pickler_get_memo(Picklerobject *p) +Pickler_get_memo(Picklerobject * p) { - if (p->memo == NULL) - PyErr_SetString(PyExc_AttributeError, "memo"); - else - Py_INCREF(p->memo); - return p->memo; + if (p->memo == NULL) + PyErr_SetString(PyExc_AttributeError, "memo"); + else + Py_INCREF(p->memo); + return p->memo; } static int -Pickler_set_memo(Picklerobject *p, PyObject *v) +Pickler_set_memo(Picklerobject * p, PyObject * v) { - if (v == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; - } - if (!PyDict_Check(v)) { - PyErr_SetString(PyExc_TypeError, "memo must be a dictionary"); - return -1; - } - Py_XDECREF(p->memo); - Py_INCREF(v); - p->memo = v; - return 0; + if (v == NULL) { + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; + } + if (!PyDict_Check(v)) { + PyErr_SetString(PyExc_TypeError, "memo must be a dictionary"); + return -1; + } + Py_XDECREF(p->memo); + Py_INCREF(v); + p->memo = v; + return 0; } static PyObject * -Pickler_get_error(Picklerobject *p) +Pickler_get_error(Picklerobject * p) { - /* why is this an attribute on the Pickler? */ - Py_INCREF(PicklingError); - return PicklingError; + /* why is this an attribute on the Pickler? */ + Py_INCREF(PicklingError); + return PicklingError; } static PyMemberDef Pickler_members[] = { @@ -2873,160 +2866,164 @@ }; static PyGetSetDef Pickler_getsets[] = { - {"persistent_id", (getter)Pickler_get_pers_func, - (setter)Pickler_set_pers_func}, - {"inst_persistent_id", NULL, (setter)Pickler_set_inst_pers_func}, - {"memo", (getter)Pickler_get_memo, (setter)Pickler_set_memo}, - {"PicklingError", (getter)Pickler_get_error, NULL}, + {"persistent_id", (getter) Pickler_get_pers_func, + (setter) Pickler_set_pers_func}, + {"inst_persistent_id", NULL, (setter) Pickler_set_inst_pers_func}, + {"memo", (getter) Pickler_get_memo, (setter) Pickler_set_memo}, + {"PicklingError", (getter) Pickler_get_error, NULL}, {NULL} }; -PyDoc_STRVAR(Picklertype__doc__, -"Objects that know how to pickle objects\n"); +PyDoc_STRVAR(Picklertype__doc__, "Objects that know how to pickle objects\n"); static PyTypeObject Picklertype = { PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ - "cPickle.Pickler", /*tp_name*/ - sizeof(Picklerobject), /*tp_basicsize*/ + 0, /*ob_size */ + "cPickle.Pickler", /*tp_name */ + sizeof(Picklerobject), /*tp_basicsize */ 0, - (destructor)Pickler_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - PyObject_GenericSetAttr, /* tp_setattro */ - 0, /* tp_as_buffer */ + (destructor) Pickler_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + PyObject_GenericSetAttr, /* tp_setattro */ + 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Picklertype__doc__, /* tp_doc */ - (traverseproc)Pickler_traverse, /* tp_traverse */ - (inquiry)Pickler_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Pickler_methods, /* tp_methods */ - Pickler_members, /* tp_members */ - Pickler_getsets, /* tp_getset */ + Picklertype__doc__, /* tp_doc */ + (traverseproc) Pickler_traverse, /* tp_traverse */ + (inquiry) Pickler_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + Pickler_methods, /* tp_methods */ + Pickler_members, /* tp_members */ + Pickler_getsets, /* tp_getset */ }; static PyObject * -find_class(PyObject *py_module_name, PyObject *py_global_name, PyObject *fc) +find_class(PyObject * py_module_name, PyObject * py_global_name, PyObject * fc) { - PyObject *global = 0, *module; + PyObject *global = 0, *module; - if (fc) { - if (fc==Py_None) { - PyErr_SetString(UnpicklingError, "Global and instance " - "pickles are not supported."); - return NULL; - } - return PyObject_CallFunctionObjArgs(fc, py_module_name, - py_global_name, NULL); - } + if (fc) { + if (fc == Py_None) { + PyErr_SetString(UnpicklingError, "Global and instance " + "pickles are not supported."); + return NULL; + } + return PyObject_CallFunctionObjArgs(fc, py_module_name, + py_global_name, NULL); + } - module = PySys_GetObject("modules"); - if (module == NULL) - return NULL; + module = PySys_GetObject("modules"); + if (module == NULL) + return NULL; - module = PyDict_GetItem(module, py_module_name); - if (module == NULL) { - module = PyImport_Import(py_module_name); - if (!module) - return NULL; - global = PyObject_GetAttr(module, py_global_name); - Py_DECREF(module); - } - else - global = PyObject_GetAttr(module, py_global_name); - return global; + module = PyDict_GetItem(module, py_module_name); + if (module == NULL) { + module = PyImport_Import(py_module_name); + if (!module) + return NULL; + global = PyObject_GetAttr(module, py_global_name); + Py_DECREF(module); + } + else + global = PyObject_GetAttr(module, py_global_name); + return global; } static int -marker(Unpicklerobject *self) +marker(Unpicklerobject * self) { - if (self->num_marks < 1) { - PyErr_SetString(UnpicklingError, "could not find MARK"); - return -1; - } + if (self->num_marks < 1) { + PyErr_SetString(UnpicklingError, "could not find MARK"); + return -1; + } - return self->marks[--self->num_marks]; + return self->marks[--self->num_marks]; } static int -load_none(Unpicklerobject *self) +load_none(Unpicklerobject * self) { - PDATA_APPEND(self->stack, Py_None, -1); - return 0; + PDATA_APPEND(self->stack, Py_None, -1); + return 0; } static int bad_readline(void) { - PyErr_SetString(UnpicklingError, "pickle data was truncated"); - return -1; + PyErr_SetString(UnpicklingError, "pickle data was truncated"); + return -1; } static int -load_int(Unpicklerobject *self) +load_int(Unpicklerobject * self) { - PyObject *py_int = 0; - char *endptr, *s; - int len, res = -1; - long l; + PyObject *py_int = 0; + char *endptr, *s; + int len, res = -1; + long l; - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - if (!( s=pystrndup(s,len))) return -1; + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + if (!(s = pystrndup(s, len))) + return -1; - errno = 0; - l = strtol(s, &endptr, 0); + errno = 0; + l = strtol(s, &endptr, 0); - if (errno || (*endptr != '\n') || (endptr[1] != '\0')) { - /* Hm, maybe we've got something long. Let's try reading - it as a Python long object. */ - errno = 0; - py_int = PyLong_FromString(s, NULL, 0); - if (py_int == NULL) { - PyErr_SetString(PyExc_ValueError, - "could not convert string to int"); - goto finally; - } + if (errno || (*endptr != '\n') || (endptr[1] != '\0')) { + /* Hm, maybe we've got something long. Let's try reading + * it as a Python long object. */ + errno = 0; + py_int = PyLong_FromString(s, NULL, 0); + if (py_int == NULL) { + PyErr_SetString(PyExc_ValueError, + "could not convert string to int"); + goto finally; + } + } + else { + if (len == 3 && (l == 0 || l == 1)) { + if (!(py_int = PyBool_FromLong(l))) + goto finally; } else { - if (len == 3 && (l == 0 || l == 1)) { - if (!( py_int = PyBool_FromLong(l))) goto finally; - } - else { - if (!( py_int = PyInt_FromLong(l))) goto finally; - } + if (!(py_int = PyInt_FromLong(l))) + goto finally; } + } - free(s); - PDATA_PUSH(self->stack, py_int, -1); - return 0; + free(s); + PDATA_PUSH(self->stack, py_int, -1); + return 0; finally: - free(s); + free(s); - return res; + return res; } static int -load_bool(Unpicklerobject *self, PyObject *boolean) +load_bool(Unpicklerobject * self, PyObject * boolean) { - assert(boolean == Py_True || boolean == Py_False); - PDATA_APPEND(self->stack, boolean, -1); - return 0; + assert(boolean == Py_True || boolean == Py_False); + PDATA_APPEND(self->stack, boolean, -1); + return 0; } /* s contains x bytes of a little-endian integer. Return its value as a @@ -3037,1526 +3034,1594 @@ static long calc_binint(char *s, int x) { - unsigned char c; - int i; - long l; - - for (i = 0, l = 0L; i < x; i++) { - c = (unsigned char)s[i]; - l |= (long)c << (i * 8); - } + unsigned char c; + int i; + long l; + + for (i = 0, l = 0L; i < x; i++) { + c = (unsigned char) s[i]; + l |= (long) c << (i * 8); + } #if SIZEOF_LONG > 4 - /* Unlike BININT1 and BININT2, BININT (more accurately BININT4) - * is signed, so on a box with longs bigger than 4 bytes we need - * to extend a BININT's sign bit to the full width. - */ - if (x == 4 && l & (1L << 31)) - l |= (~0L) << 32; + /* Unlike BININT1 and BININT2, BININT (more accurately BININT4) + * is signed, so on a box with longs bigger than 4 bytes we need + * to extend a BININT's sign bit to the full width. + */ + if (x == 4 && l & (1L << 31)) + l |= (~0L) << 32; #endif - return l; + return l; } static int -load_binintx(Unpicklerobject *self, char *s, int x) +load_binintx(Unpicklerobject * self, char *s, int x) { - PyObject *py_int = 0; - long l; + PyObject *py_int = 0; + long l; - l = calc_binint(s, x); + l = calc_binint(s, x); - if (!( py_int = PyInt_FromLong(l))) - return -1; + if (!(py_int = PyInt_FromLong(l))) + return -1; - PDATA_PUSH(self->stack, py_int, -1); - return 0; + PDATA_PUSH(self->stack, py_int, -1); + return 0; } static int -load_binint(Unpicklerobject *self) +load_binint(Unpicklerobject * self) { - char *s; + char *s; - if (self->read_func(self, &s, 4) < 0) - return -1; + if (self->read_func(self, &s, 4) < 0) + return -1; - return load_binintx(self, s, 4); + return load_binintx(self, s, 4); } static int -load_binint1(Unpicklerobject *self) +load_binint1(Unpicklerobject * self) { - char *s; + char *s; - if (self->read_func(self, &s, 1) < 0) - return -1; + if (self->read_func(self, &s, 1) < 0) + return -1; - return load_binintx(self, s, 1); + return load_binintx(self, s, 1); } static int -load_binint2(Unpicklerobject *self) +load_binint2(Unpicklerobject * self) { - char *s; + char *s; - if (self->read_func(self, &s, 2) < 0) - return -1; + if (self->read_func(self, &s, 2) < 0) + return -1; - return load_binintx(self, s, 2); + return load_binintx(self, s, 2); } static int -load_long(Unpicklerobject *self) +load_long(Unpicklerobject * self) { - PyObject *l = 0; - char *end, *s; - int len, res = -1; + PyObject *l = 0; + char *end, *s; + int len, res = -1; - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - if (!( s=pystrndup(s,len))) return -1; + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + if (!(s = pystrndup(s, len))) + return -1; - if (!( l = PyLong_FromString(s, &end, 0))) - goto finally; + if (!(l = PyLong_FromString(s, &end, 0))) + goto finally; - free(s); - PDATA_PUSH(self->stack, l, -1); - return 0; + free(s); + PDATA_PUSH(self->stack, l, -1); + return 0; finally: - free(s); + free(s); - return res; + return res; } /* 'size' bytes contain the # of bytes of little-endian 256's-complement * data following. */ static int -load_counted_long(Unpicklerobject *self, int size) +load_counted_long(Unpicklerobject * self, int size) { - Py_ssize_t i; - char *nbytes; - unsigned char *pdata; - PyObject *along; + Py_ssize_t i; + char *nbytes; + unsigned char *pdata; + PyObject *along; + + assert(size == 1 || size == 4); + i = self->read_func(self, &nbytes, size); + if (i < 0) + return -1; - assert(size == 1 || size == 4); - i = self->read_func(self, &nbytes, size); - if (i < 0) return -1; - - size = calc_binint(nbytes, size); - if (size < 0) { - /* Corrupt or hostile pickle -- we never write one like - * this. - */ - PyErr_SetString(UnpicklingError, "LONG pickle has negative " - "byte count"); - return -1; - } + size = calc_binint(nbytes, size); + if (size < 0) { + /* Corrupt or hostile pickle -- we never write one like + * this. + */ + PyErr_SetString(UnpicklingError, "LONG pickle has negative " + "byte count"); + return -1; + } - if (size == 0) - along = PyLong_FromLong(0L); - else { - /* Read the raw little-endian bytes & convert. */ - i = self->read_func(self, (char **)&pdata, size); - if (i < 0) return -1; - along = _PyLong_FromByteArray(pdata, (size_t)size, - 1 /* little endian */, 1 /* signed */); - } - if (along == NULL) - return -1; - PDATA_PUSH(self->stack, along, -1); - return 0; + if (size == 0) + along = PyLong_FromLong(0L); + else { + /* Read the raw little-endian bytes & convert. */ + i = self->read_func(self, (char **) &pdata, size); + if (i < 0) + return -1; + along = _PyLong_FromByteArray(pdata, (size_t) size, + 1 /* little endian */ , 1 /* signed */ ); + } + if (along == NULL) + return -1; + PDATA_PUSH(self->stack, along, -1); + return 0; } static int -load_float(Unpicklerobject *self) +load_float(Unpicklerobject * self) { - PyObject *py_float = 0; - char *endptr, *s; - int len, res = -1; - double d; - - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - if (!( s=pystrndup(s,len))) return -1; + PyObject *py_float = 0; + char *endptr, *s; + int len, res = -1; + double d; - errno = 0; - d = PyOS_ascii_strtod(s, &endptr); - - if (errno || (endptr[0] != '\n') || (endptr[1] != '\0')) { - PyErr_SetString(PyExc_ValueError, - "could not convert string to float"); - goto finally; - } + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + if (!(s = pystrndup(s, len))) + return -1; - if (!( py_float = PyFloat_FromDouble(d))) - goto finally; + errno = 0; + d = PyOS_ascii_strtod(s, &endptr); - free(s); - PDATA_PUSH(self->stack, py_float, -1); - return 0; + if (errno || (endptr[0] != '\n') || (endptr[1] != '\0')) { + PyErr_SetString(PyExc_ValueError, "could not convert string to float"); + goto finally; + } + + if (!(py_float = PyFloat_FromDouble(d))) + goto finally; + + free(s); + PDATA_PUSH(self->stack, py_float, -1); + return 0; finally: - free(s); + free(s); - return res; + return res; } static int -load_binfloat(Unpicklerobject *self) +load_binfloat(Unpicklerobject * self) { - PyObject *py_float; - double x; - char *p; + PyObject *py_float; + double x; + char *p; - if (self->read_func(self, &p, 8) < 0) - return -1; + if (self->read_func(self, &p, 8) < 0) + return -1; - x = _PyFloat_Unpack8((unsigned char *)p, 0); - if (x == -1.0 && PyErr_Occurred()) - return -1; + x = _PyFloat_Unpack8((unsigned char *) p, 0); + if (x == -1.0 && PyErr_Occurred()) + return -1; - py_float = PyFloat_FromDouble(x); - if (py_float == NULL) - return -1; + py_float = PyFloat_FromDouble(x); + if (py_float == NULL) + return -1; - PDATA_PUSH(self->stack, py_float, -1); - return 0; + PDATA_PUSH(self->stack, py_float, -1); + return 0; } static int -load_string(Unpicklerobject *self) +load_string(Unpicklerobject * self) { - PyObject *str = 0; - int len, res = -1; - char *s, *p; - - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - if (!( s=pystrndup(s,len))) return -1; - - - /* Strip outermost quotes */ - while (s[len-1] <= ' ') - len--; - if(s[0]=='"' && s[len-1]=='"'){ - s[len-1] = '\0'; - p = s + 1 ; - len -= 2; - } else if(s[0]=='\'' && s[len-1]=='\''){ - s[len-1] = '\0'; - p = s + 1 ; - len -= 2; - } else - goto insecure; + PyObject *str = 0; + int len, res = -1; + char *s, *p; + + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + if (!(s = pystrndup(s, len))) + return -1; + + + /* Strip outermost quotes */ + while (s[len - 1] <= ' ') + len--; + if (s[0] == '"' && s[len - 1] == '"') { + s[len - 1] = '\0'; + p = s + 1; + len -= 2; + } + else if (s[0] == '\'' && s[len - 1] == '\'') { + s[len - 1] = '\0'; + p = s + 1; + len -= 2; + } + else + goto insecure; /********************************************/ - str = PyString_DecodeEscape(p, len, NULL, 0, NULL); - free(s); - if (str) { - PDATA_PUSH(self->stack, str, -1); - res = 0; - } - return res; + str = PyString_DecodeEscape(p, len, NULL, 0, NULL); + free(s); + if (str) { + PDATA_PUSH(self->stack, str, -1); + res = 0; + } + return res; insecure: - free(s); - PyErr_SetString(PyExc_ValueError,"insecure string pickle"); - return -1; + free(s); + PyErr_SetString(PyExc_ValueError, "insecure string pickle"); + return -1; } static int -load_binstring(Unpicklerobject *self) +load_binstring(Unpicklerobject * self) { - PyObject *py_string = 0; - long l; - char *s; + PyObject *py_string = 0; + long l; + char *s; - if (self->read_func(self, &s, 4) < 0) return -1; + if (self->read_func(self, &s, 4) < 0) + return -1; - l = calc_binint(s, 4); + l = calc_binint(s, 4); - if (self->read_func(self, &s, l) < 0) - return -1; + if (self->read_func(self, &s, l) < 0) + return -1; - if (!( py_string = PyString_FromStringAndSize(s, l))) - return -1; + if (!(py_string = PyString_FromStringAndSize(s, l))) + return -1; - PDATA_PUSH(self->stack, py_string, -1); - return 0; + PDATA_PUSH(self->stack, py_string, -1); + return 0; } static int -load_short_binstring(Unpicklerobject *self) +load_short_binstring(Unpicklerobject * self) { - PyObject *py_string = 0; - unsigned char l; - char *s; + PyObject *py_string = 0; + unsigned char l; + char *s; - if (self->read_func(self, &s, 1) < 0) - return -1; + if (self->read_func(self, &s, 1) < 0) + return -1; - l = (unsigned char)s[0]; + l = (unsigned char) s[0]; - if (self->read_func(self, &s, l) < 0) return -1; + if (self->read_func(self, &s, l) < 0) + return -1; - if (!( py_string = PyString_FromStringAndSize(s, l))) return -1; + if (!(py_string = PyString_FromStringAndSize(s, l))) + return -1; - PDATA_PUSH(self->stack, py_string, -1); - return 0; + PDATA_PUSH(self->stack, py_string, -1); + return 0; } #ifdef Py_USING_UNICODE static int -load_unicode(Unpicklerobject *self) +load_unicode(Unpicklerobject * self) { - PyObject *str = 0; - int len, res = -1; - char *s; + PyObject *str = 0; + int len, res = -1; + char *s; - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 1) return bad_readline(); + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 1) + return bad_readline(); - if (!( str = PyUnicode_DecodeRawUnicodeEscape(s, len - 1, NULL))) - goto finally; + if (!(str = PyUnicode_DecodeRawUnicodeEscape(s, len - 1, NULL))) + goto finally; - PDATA_PUSH(self->stack, str, -1); - return 0; + PDATA_PUSH(self->stack, str, -1); + return 0; finally: - return res; + return res; } #endif #ifdef Py_USING_UNICODE static int -load_binunicode(Unpicklerobject *self) +load_binunicode(Unpicklerobject * self) { - PyObject *unicode; - long l; - char *s; + PyObject *unicode; + long l; + char *s; - if (self->read_func(self, &s, 4) < 0) return -1; + if (self->read_func(self, &s, 4) < 0) + return -1; - l = calc_binint(s, 4); + l = calc_binint(s, 4); - if (self->read_func(self, &s, l) < 0) - return -1; + if (self->read_func(self, &s, l) < 0) + return -1; - if (!( unicode = PyUnicode_DecodeUTF8(s, l, NULL))) - return -1; + if (!(unicode = PyUnicode_DecodeUTF8(s, l, NULL))) + return -1; - PDATA_PUSH(self->stack, unicode, -1); - return 0; + PDATA_PUSH(self->stack, unicode, -1); + return 0; } #endif static int -load_tuple(Unpicklerobject *self) +load_tuple(Unpicklerobject * self) { - PyObject *tup; - int i; + PyObject *tup; + int i; - if ((i = marker(self)) < 0) return -1; - if (!( tup=Pdata_popTuple(self->stack, i))) return -1; - PDATA_PUSH(self->stack, tup, -1); - return 0; + if ((i = marker(self)) < 0) + return -1; + if (!(tup = Pdata_popTuple(self->stack, i))) + return -1; + PDATA_PUSH(self->stack, tup, -1); + return 0; } static int -load_counted_tuple(Unpicklerobject *self, int len) +load_counted_tuple(Unpicklerobject * self, int len) { - PyObject *tup = PyTuple_New(len); + PyObject *tup = PyTuple_New(len); - if (tup == NULL) - return -1; + if (tup == NULL) + return -1; - while (--len >= 0) { - PyObject *element; + while (--len >= 0) { + PyObject *element; - PDATA_POP(self->stack, element); - if (element == NULL) - return -1; - PyTuple_SET_ITEM(tup, len, element); - } - PDATA_PUSH(self->stack, tup, -1); - return 0; + PDATA_POP(self->stack, element); + if (element == NULL) + return -1; + PyTuple_SET_ITEM(tup, len, element); + } + PDATA_PUSH(self->stack, tup, -1); + return 0; } static int -load_empty_list(Unpicklerobject *self) +load_empty_list(Unpicklerobject * self) { - PyObject *list; + PyObject *list; - if (!( list=PyList_New(0))) return -1; - PDATA_PUSH(self->stack, list, -1); - return 0; + if (!(list = PyList_New(0))) + return -1; + PDATA_PUSH(self->stack, list, -1); + return 0; } static int -load_empty_dict(Unpicklerobject *self) +load_empty_dict(Unpicklerobject * self) { - PyObject *dict; + PyObject *dict; - if (!( dict=PyDict_New())) return -1; - PDATA_PUSH(self->stack, dict, -1); - return 0; + if (!(dict = PyDict_New())) + return -1; + PDATA_PUSH(self->stack, dict, -1); + return 0; } static int -load_list(Unpicklerobject *self) +load_list(Unpicklerobject * self) { - PyObject *list = 0; - int i; + PyObject *list = 0; + int i; - if ((i = marker(self)) < 0) return -1; - if (!( list=Pdata_popList(self->stack, i))) return -1; - PDATA_PUSH(self->stack, list, -1); - return 0; + if ((i = marker(self)) < 0) + return -1; + if (!(list = Pdata_popList(self->stack, i))) + return -1; + PDATA_PUSH(self->stack, list, -1); + return 0; } static int -load_dict(Unpicklerobject *self) +load_dict(Unpicklerobject * self) { - PyObject *dict, *key, *value; - int i, j, k; + PyObject *dict, *key, *value; + int i, j, k; - if ((i = marker(self)) < 0) return -1; - j=self->stack->length; + if ((i = marker(self)) < 0) + return -1; + j = self->stack->length; - if (!( dict = PyDict_New())) return -1; - - for (k = i+1; k < j; k += 2) { - key =self->stack->data[k-1]; - value=self->stack->data[k ]; - if (PyDict_SetItem(dict, key, value) < 0) { - Py_DECREF(dict); - return -1; - } - } - Pdata_clear(self->stack, i); - PDATA_PUSH(self->stack, dict, -1); - return 0; + if (!(dict = PyDict_New())) + return -1; + + for (k = i + 1; k < j; k += 2) { + key = self->stack->data[k - 1]; + value = self->stack->data[k]; + if (PyDict_SetItem(dict, key, value) < 0) { + Py_DECREF(dict); + return -1; + } + } + Pdata_clear(self->stack, i); + PDATA_PUSH(self->stack, dict, -1); + return 0; } static PyObject * -Instance_New(PyObject *cls, PyObject *args) +Instance_New(PyObject * cls, PyObject * args) { - PyObject *r = 0; + PyObject *r = 0; - if ((r=PyObject_CallObject(cls, args))) return r; + if ((r = PyObject_CallObject(cls, args))) + return r; - { - PyObject *tp, *v, *tb, *tmp_value; + { + PyObject *tp, *v, *tb, *tmp_value; - PyErr_Fetch(&tp, &v, &tb); - tmp_value = v; - /* NULL occurs when there was a KeyboardInterrupt */ - if (tmp_value == NULL) - tmp_value = Py_None; - if ((r = PyTuple_Pack(3, tmp_value, cls, args))) { - Py_XDECREF(v); - v=r; - } - PyErr_Restore(tp,v,tb); + PyErr_Fetch(&tp, &v, &tb); + tmp_value = v; + /* NULL occurs when there was a KeyboardInterrupt */ + if (tmp_value == NULL) + tmp_value = Py_None; + if ((r = PyTuple_Pack(3, tmp_value, cls, args))) { + Py_XDECREF(v); + v = r; } - return NULL; + PyErr_Restore(tp, v, tb); + } + return NULL; } static int -load_obj(Unpicklerobject *self) +load_obj(Unpicklerobject * self) { - PyObject *class, *tup, *obj=0; - int i; + PyObject *class, *tup, *obj = 0; + int i; - if ((i = marker(self)) < 0) return -1; - if (!( tup=Pdata_popTuple(self->stack, i+1))) return -1; - PDATA_POP(self->stack, class); - if (class) { - obj = Instance_New(class, tup); - Py_DECREF(class); - } - Py_DECREF(tup); + if ((i = marker(self)) < 0) + return -1; + if (!(tup = Pdata_popTuple(self->stack, i + 1))) + return -1; + PDATA_POP(self->stack, class); + if (class) { + obj = Instance_New(class, tup); + Py_DECREF(class); + } + Py_DECREF(tup); - if (! obj) return -1; - PDATA_PUSH(self->stack, obj, -1); - return 0; + if (!obj) + return -1; + PDATA_PUSH(self->stack, obj, -1); + return 0; } static int -load_inst(Unpicklerobject *self) +load_inst(Unpicklerobject * self) { - PyObject *tup, *class=0, *obj=0, *module_name, *class_name; - int i, len; - char *s; - - if ((i = marker(self)) < 0) return -1; - - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - module_name = PyString_FromStringAndSize(s, len - 1); - if (!module_name) return -1; - - if ((len = self->readline_func(self, &s)) >= 0) { - if (len < 2) return bad_readline(); - if ((class_name = PyString_FromStringAndSize(s, len - 1))) { - class = find_class(module_name, class_name, - self->find_class); - Py_DECREF(class_name); - } - } - Py_DECREF(module_name); + PyObject *tup, *class = 0, *obj = 0, *module_name, *class_name; + int i, len; + char *s; - if (! class) return -1; + if ((i = marker(self)) < 0) + return -1; + + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + module_name = PyString_FromStringAndSize(s, len - 1); + if (!module_name) + return -1; - if ((tup=Pdata_popTuple(self->stack, i))) { - obj = Instance_New(class, tup); - Py_DECREF(tup); + if ((len = self->readline_func(self, &s)) >= 0) { + if (len < 2) + return bad_readline(); + if ((class_name = PyString_FromStringAndSize(s, len - 1))) { + class = find_class(module_name, class_name, self->find_class); + Py_DECREF(class_name); } - Py_DECREF(class); + } + Py_DECREF(module_name); - if (! obj) return -1; + if (!class) + return -1; - PDATA_PUSH(self->stack, obj, -1); - return 0; + if ((tup = Pdata_popTuple(self->stack, i))) { + obj = Instance_New(class, tup); + Py_DECREF(tup); + } + Py_DECREF(class); + + if (!obj) + return -1; + + PDATA_PUSH(self->stack, obj, -1); + return 0; } static int -load_newobj(Unpicklerobject *self) +load_newobj(Unpicklerobject * self) { - PyObject *args = NULL; - PyObject *clsraw = NULL; - PyTypeObject *cls; /* clsraw cast to its true type */ - PyObject *obj; - - /* Stack is ... cls argtuple, and we want to call - * cls.__new__(cls, *argtuple). - */ - PDATA_POP(self->stack, args); - if (args == NULL) goto Fail; - if (! PyTuple_Check(args)) { - PyErr_SetString(UnpicklingError, "NEWOBJ expected an arg " - "tuple."); - goto Fail; - } - - PDATA_POP(self->stack, clsraw); - cls = (PyTypeObject *)clsraw; - if (cls == NULL) goto Fail; - if (! PyType_Check(cls)) { - PyErr_SetString(UnpicklingError, "NEWOBJ class argument " - "isn't a type object"); - goto Fail; - } - if (cls->tp_new == NULL) { - PyErr_SetString(UnpicklingError, "NEWOBJ class argument " - "has NULL tp_new"); - goto Fail; - } - - /* Call __new__. */ - obj = cls->tp_new(cls, args, NULL); - if (obj == NULL) goto Fail; - - Py_DECREF(args); - Py_DECREF(clsraw); - PDATA_PUSH(self->stack, obj, -1); - return 0; - - Fail: - Py_XDECREF(args); - Py_XDECREF(clsraw); - return -1; + PyObject *args = NULL; + PyObject *clsraw = NULL; + PyTypeObject *cls; /* clsraw cast to its true type */ + PyObject *obj; + + /* Stack is ... cls argtuple, and we want to call + * cls.__new__(cls, *argtuple). + */ + PDATA_POP(self->stack, args); + if (args == NULL) + goto Fail; + if (!PyTuple_Check(args)) { + PyErr_SetString(UnpicklingError, "NEWOBJ expected an arg " "tuple."); + goto Fail; + } + + PDATA_POP(self->stack, clsraw); + cls = (PyTypeObject *) clsraw; + if (cls == NULL) + goto Fail; + if (!PyType_Check(cls)) { + PyErr_SetString(UnpicklingError, "NEWOBJ class argument " + "isn't a type object"); + goto Fail; + } + if (cls->tp_new == NULL) { + PyErr_SetString(UnpicklingError, "NEWOBJ class argument " + "has NULL tp_new"); + goto Fail; + } + + /* Call __new__. */ + obj = cls->tp_new(cls, args, NULL); + if (obj == NULL) + goto Fail; + + Py_DECREF(args); + Py_DECREF(clsraw); + PDATA_PUSH(self->stack, obj, -1); + return 0; + + Fail: + Py_XDECREF(args); + Py_XDECREF(clsraw); + return -1; } static int -load_global(Unpicklerobject *self) -{ - PyObject *class = 0, *module_name = 0, *class_name = 0; - int len; - char *s; - - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - module_name = PyString_FromStringAndSize(s, len - 1); - if (!module_name) return -1; - - if ((len = self->readline_func(self, &s)) >= 0) { - if (len < 2) { - Py_DECREF(module_name); - return bad_readline(); - } - if ((class_name = PyString_FromStringAndSize(s, len - 1))) { - class = find_class(module_name, class_name, - self->find_class); - Py_DECREF(class_name); - } +load_global(Unpicklerobject * self) +{ + PyObject *class = 0, *module_name = 0, *class_name = 0; + int len; + char *s; + + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + module_name = PyString_FromStringAndSize(s, len - 1); + if (!module_name) + return -1; + + if ((len = self->readline_func(self, &s)) >= 0) { + if (len < 2) { + Py_DECREF(module_name); + return bad_readline(); + } + if ((class_name = PyString_FromStringAndSize(s, len - 1))) { + class = find_class(module_name, class_name, self->find_class); + Py_DECREF(class_name); } - Py_DECREF(module_name); + } + Py_DECREF(module_name); - if (! class) return -1; - PDATA_PUSH(self->stack, class, -1); - return 0; + if (!class) + return -1; + PDATA_PUSH(self->stack, class, -1); + return 0; } static int -load_persid(Unpicklerobject *self) +load_persid(Unpicklerobject * self) { - PyObject *pid = 0; - int len; - char *s; - - if (self->pers_func) { - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - - pid = PyString_FromStringAndSize(s, len - 1); - if (!pid) return -1; - - if (PyList_Check(self->pers_func)) { - if (PyList_Append(self->pers_func, pid) < 0) { - Py_DECREF(pid); - return -1; - } - } - else { - ARG_TUP(self, pid); - if (self->arg) { - pid = PyObject_Call(self->pers_func, self->arg, - NULL); - FREE_ARG_TUP(self); - } - } + PyObject *pid = 0; + int len; + char *s; - if (! pid) return -1; + if (self->pers_func) { + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); - PDATA_PUSH(self->stack, pid, -1); - return 0; + pid = PyString_FromStringAndSize(s, len - 1); + if (!pid) + return -1; + + if (PyList_Check(self->pers_func)) { + if (PyList_Append(self->pers_func, pid) < 0) { + Py_DECREF(pid); + return -1; + } } else { - PyErr_SetString(UnpicklingError, - "A load persistent id instruction was encountered,\n" - "but no persistent_load function was specified."); - return -1; + ARG_TUP(self, pid); + if (self->arg) { + pid = PyObject_Call(self->pers_func, self->arg, NULL); + FREE_ARG_TUP(self); + } } + + if (!pid) + return -1; + + PDATA_PUSH(self->stack, pid, -1); + return 0; + } + else { + PyErr_SetString(UnpicklingError, + "A load persistent id instruction was encountered,\n" + "but no persistent_load function was specified."); + return -1; + } } static int -load_binpersid(Unpicklerobject *self) +load_binpersid(Unpicklerobject * self) { - PyObject *pid = 0; + PyObject *pid = 0; - if (self->pers_func) { - PDATA_POP(self->stack, pid); - if (! pid) return -1; - - if (PyList_Check(self->pers_func)) { - if (PyList_Append(self->pers_func, pid) < 0) { - Py_DECREF(pid); - return -1; - } - } - else { - ARG_TUP(self, pid); - if (self->arg) { - pid = PyObject_Call(self->pers_func, self->arg, - NULL); - FREE_ARG_TUP(self); - } - if (! pid) return -1; - } + if (self->pers_func) { + PDATA_POP(self->stack, pid); + if (!pid) + return -1; - PDATA_PUSH(self->stack, pid, -1); - return 0; + if (PyList_Check(self->pers_func)) { + if (PyList_Append(self->pers_func, pid) < 0) { + Py_DECREF(pid); + return -1; + } } else { - PyErr_SetString(UnpicklingError, - "A load persistent id instruction was encountered,\n" - "but no persistent_load function was specified."); + ARG_TUP(self, pid); + if (self->arg) { + pid = PyObject_Call(self->pers_func, self->arg, NULL); + FREE_ARG_TUP(self); + } + if (!pid) return -1; } + + PDATA_PUSH(self->stack, pid, -1); + return 0; + } + else { + PyErr_SetString(UnpicklingError, + "A load persistent id instruction was encountered,\n" + "but no persistent_load function was specified."); + return -1; + } } static int -load_pop(Unpicklerobject *self) +load_pop(Unpicklerobject * self) { - int len; + int len; - if (!( (len=self->stack->length) > 0 )) return stackUnderflow(); + if (!((len = self->stack->length) > 0)) + return stackUnderflow(); - /* Note that we split the (pickle.py) stack into two stacks, - an object stack and a mark stack. We have to be clever and - pop the right one. We do this by looking at the top of the - mark stack. - */ + /* Note that we split the (pickle.py) stack into two stacks, + * an object stack and a mark stack. We have to be clever and + * pop the right one. We do this by looking at the top of the + * mark stack. + */ - if ((self->num_marks > 0) && - (self->marks[self->num_marks - 1] == len)) - self->num_marks--; - else { - len--; - Py_DECREF(self->stack->data[len]); - self->stack->length=len; - } + if ((self->num_marks > 0) && (self->marks[self->num_marks - 1] == len)) + self->num_marks--; + else { + len--; + Py_DECREF(self->stack->data[len]); + self->stack->length = len; + } - return 0; + return 0; } static int -load_pop_mark(Unpicklerobject *self) +load_pop_mark(Unpicklerobject * self) { - int i; + int i; - if ((i = marker(self)) < 0) - return -1; + if ((i = marker(self)) < 0) + return -1; - Pdata_clear(self->stack, i); + Pdata_clear(self->stack, i); - return 0; + return 0; } static int -load_dup(Unpicklerobject *self) +load_dup(Unpicklerobject * self) { - PyObject *last; - int len; + PyObject *last; + int len; - if ((len = self->stack->length) <= 0) return stackUnderflow(); - last=self->stack->data[len-1]; - Py_INCREF(last); - PDATA_PUSH(self->stack, last, -1); - return 0; + if ((len = self->stack->length) <= 0) + return stackUnderflow(); + last = self->stack->data[len - 1]; + Py_INCREF(last); + PDATA_PUSH(self->stack, last, -1); + return 0; } static int -load_get(Unpicklerobject *self) +load_get(Unpicklerobject * self) { - PyObject *py_str = 0, *value = 0; - int len; - char *s; - int rc; - - if ((len = self->readline_func(self, &s)) < 0) return -1; - if (len < 2) return bad_readline(); - - if (!( py_str = PyString_FromStringAndSize(s, len - 1))) return -1; - - value = PyDict_GetItem(self->memo, py_str); - if (! value) { - PyErr_SetObject(BadPickleGet, py_str); - rc = -1; - } - else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; - } + PyObject *py_str = 0, *value = 0; + int len; + char *s; + int rc; + + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + + if (!(py_str = PyString_FromStringAndSize(s, len - 1))) + return -1; + + value = PyDict_GetItem(self->memo, py_str); + if (!value) { + PyErr_SetObject(BadPickleGet, py_str); + rc = -1; + } + else { + PDATA_APPEND(self->stack, value, -1); + rc = 0; + } - Py_DECREF(py_str); - return rc; + Py_DECREF(py_str); + return rc; } static int -load_binget(Unpicklerobject *self) +load_binget(Unpicklerobject * self) { - PyObject *py_key = 0, *value = 0; - unsigned char key; - char *s; - int rc; + PyObject *py_key = 0, *value = 0; + unsigned char key; + char *s; + int rc; - if (self->read_func(self, &s, 1) < 0) return -1; + if (self->read_func(self, &s, 1) < 0) + return -1; - key = (unsigned char)s[0]; - if (!( py_key = PyInt_FromLong((long)key))) return -1; + key = (unsigned char) s[0]; + if (!(py_key = PyInt_FromLong((long) key))) + return -1; - value = PyDict_GetItem(self->memo, py_key); - if (! value) { - PyErr_SetObject(BadPickleGet, py_key); - rc = -1; - } - else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; - } + value = PyDict_GetItem(self->memo, py_key); + if (!value) { + PyErr_SetObject(BadPickleGet, py_key); + rc = -1; + } + else { + PDATA_APPEND(self->stack, value, -1); + rc = 0; + } - Py_DECREF(py_key); - return rc; + Py_DECREF(py_key); + return rc; } static int -load_long_binget(Unpicklerobject *self) +load_long_binget(Unpicklerobject * self) { - PyObject *py_key = 0, *value = 0; - unsigned char c; - char *s; - long key; - int rc; + PyObject *py_key = 0, *value = 0; + unsigned char c; + char *s; + long key; + int rc; - if (self->read_func(self, &s, 4) < 0) return -1; + if (self->read_func(self, &s, 4) < 0) + return -1; - c = (unsigned char)s[0]; - key = (long)c; - c = (unsigned char)s[1]; - key |= (long)c << 8; - c = (unsigned char)s[2]; - key |= (long)c << 16; - c = (unsigned char)s[3]; - key |= (long)c << 24; + c = (unsigned char) s[0]; + key = (long) c; + c = (unsigned char) s[1]; + key |= (long) c << 8; + c = (unsigned char) s[2]; + key |= (long) c << 16; + c = (unsigned char) s[3]; + key |= (long) c << 24; - if (!( py_key = PyInt_FromLong((long)key))) return -1; + if (!(py_key = PyInt_FromLong((long) key))) + return -1; - value = PyDict_GetItem(self->memo, py_key); - if (! value) { - PyErr_SetObject(BadPickleGet, py_key); - rc = -1; - } - else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; - } + value = PyDict_GetItem(self->memo, py_key); + if (!value) { + PyErr_SetObject(BadPickleGet, py_key); + rc = -1; + } + else { + PDATA_APPEND(self->stack, value, -1); + rc = 0; + } - Py_DECREF(py_key); - return rc; + Py_DECREF(py_key); + return rc; } /* Push an object from the extension registry (EXT[124]). nbytes is * the number of bytes following the opcode, holding the index (code) value. */ static int -load_extension(Unpicklerobject *self, int nbytes) +load_extension(Unpicklerobject * self, int nbytes) { - char *codebytes; /* the nbytes bytes after the opcode */ - long code; /* calc_binint returns long */ - PyObject *py_code; /* code as a Python int */ - PyObject *obj; /* the object to push */ - PyObject *pair; /* (module_name, class_name) */ - PyObject *module_name, *class_name; - - assert(nbytes == 1 || nbytes == 2 || nbytes == 4); - if (self->read_func(self, &codebytes, nbytes) < 0) return -1; - code = calc_binint(codebytes, nbytes); - if (code <= 0) { /* note that 0 is forbidden */ - /* Corrupt or hostile pickle. */ - PyErr_SetString(UnpicklingError, "EXT specifies code <= 0"); - return -1; - } + char *codebytes; /* the nbytes bytes after the opcode */ + long code; /* calc_binint returns long */ + PyObject *py_code; /* code as a Python int */ + PyObject *obj; /* the object to push */ + PyObject *pair; /* (module_name, class_name) */ + PyObject *module_name, *class_name; - /* Look for the code in the cache. */ - py_code = PyInt_FromLong(code); - if (py_code == NULL) return -1; - obj = PyDict_GetItem(extension_cache, py_code); - if (obj != NULL) { - /* Bingo. */ - Py_DECREF(py_code); - PDATA_APPEND(self->stack, obj, -1); - return 0; - } + assert(nbytes == 1 || nbytes == 2 || nbytes == 4); + if (self->read_func(self, &codebytes, nbytes) < 0) + return -1; + code = calc_binint(codebytes, nbytes); + if (code <= 0) { /* note that 0 is forbidden */ + /* Corrupt or hostile pickle. */ + PyErr_SetString(UnpicklingError, "EXT specifies code <= 0"); + return -1; + } - /* Look up the (module_name, class_name) pair. */ - pair = PyDict_GetItem(inverted_registry, py_code); - if (pair == NULL) { - Py_DECREF(py_code); - PyErr_Format(PyExc_ValueError, "unregistered extension " - "code %ld", code); - return -1; - } - /* Since the extension registry is manipulable via Python code, - * confirm that pair is really a 2-tuple of strings. - */ - if (!PyTuple_Check(pair) || PyTuple_Size(pair) != 2 || - !PyString_Check(module_name = PyTuple_GET_ITEM(pair, 0)) || - !PyString_Check(class_name = PyTuple_GET_ITEM(pair, 1))) { - Py_DECREF(py_code); - PyErr_Format(PyExc_ValueError, "_inverted_registry[%ld] " - "isn't a 2-tuple of strings", code); - return -1; - } - /* Load the object. */ - obj = find_class(module_name, class_name, self->find_class); - if (obj == NULL) { - Py_DECREF(py_code); - return -1; - } - /* Cache code -> obj. */ - code = PyDict_SetItem(extension_cache, py_code, obj); + /* Look for the code in the cache. */ + py_code = PyInt_FromLong(code); + if (py_code == NULL) + return -1; + obj = PyDict_GetItem(extension_cache, py_code); + if (obj != NULL) { + /* Bingo. */ Py_DECREF(py_code); - if (code < 0) { - Py_DECREF(obj); - return -1; - } - PDATA_PUSH(self->stack, obj, -1); + PDATA_APPEND(self->stack, obj, -1); return 0; + } + + /* Look up the (module_name, class_name) pair. */ + pair = PyDict_GetItem(inverted_registry, py_code); + if (pair == NULL) { + Py_DECREF(py_code); + PyErr_Format(PyExc_ValueError, "unregistered extension " + "code %ld", code); + return -1; + } + /* Since the extension registry is manipulable via Python code, + * confirm that pair is really a 2-tuple of strings. + */ + if (!PyTuple_Check(pair) || PyTuple_Size(pair) != 2 || + !PyString_Check(module_name = PyTuple_GET_ITEM(pair, 0)) || + !PyString_Check(class_name = PyTuple_GET_ITEM(pair, 1))) { + Py_DECREF(py_code); + PyErr_Format(PyExc_ValueError, "_inverted_registry[%ld] " + "isn't a 2-tuple of strings", code); + return -1; + } + /* Load the object. */ + obj = find_class(module_name, class_name, self->find_class); + if (obj == NULL) { + Py_DECREF(py_code); + return -1; + } + /* Cache code -> obj. */ + code = PyDict_SetItem(extension_cache, py_code, obj); + Py_DECREF(py_code); + if (code < 0) { + Py_DECREF(obj); + return -1; + } + PDATA_PUSH(self->stack, obj, -1); + return 0; } static int -load_put(Unpicklerobject *self) +load_put(Unpicklerobject * self) { - PyObject *py_str = 0, *value = 0; - int len, l; - char *s; + PyObject *py_str = 0, *value = 0; + int len, l; + char *s; - if ((l = self->readline_func(self, &s)) < 0) return -1; - if (l < 2) return bad_readline(); - if (!( len=self->stack->length )) return stackUnderflow(); - if (!( py_str = PyString_FromStringAndSize(s, l - 1))) return -1; - value=self->stack->data[len-1]; - l=PyDict_SetItem(self->memo, py_str, value); - Py_DECREF(py_str); - return l; + if ((l = self->readline_func(self, &s)) < 0) + return -1; + if (l < 2) + return bad_readline(); + if (!(len = self->stack->length)) + return stackUnderflow(); + if (!(py_str = PyString_FromStringAndSize(s, l - 1))) + return -1; + value = self->stack->data[len - 1]; + l = PyDict_SetItem(self->memo, py_str, value); + Py_DECREF(py_str); + return l; } static int -load_binput(Unpicklerobject *self) +load_binput(Unpicklerobject * self) { - PyObject *py_key = 0, *value = 0; - unsigned char key; - char *s; - int len; + PyObject *py_key = 0, *value = 0; + unsigned char key; + char *s; + int len; - if (self->read_func(self, &s, 1) < 0) return -1; - if (!( (len=self->stack->length) > 0 )) return stackUnderflow(); + if (self->read_func(self, &s, 1) < 0) + return -1; + if (!((len = self->stack->length) > 0)) + return stackUnderflow(); - key = (unsigned char)s[0]; + key = (unsigned char) s[0]; - if (!( py_key = PyInt_FromLong((long)key))) return -1; - value=self->stack->data[len-1]; - len=PyDict_SetItem(self->memo, py_key, value); - Py_DECREF(py_key); - return len; + if (!(py_key = PyInt_FromLong((long) key))) + return -1; + value = self->stack->data[len - 1]; + len = PyDict_SetItem(self->memo, py_key, value); + Py_DECREF(py_key); + return len; } static int -load_long_binput(Unpicklerobject *self) +load_long_binput(Unpicklerobject * self) { - PyObject *py_key = 0, *value = 0; - long key; - unsigned char c; - char *s; - int len; + PyObject *py_key = 0, *value = 0; + long key; + unsigned char c; + char *s; + int len; - if (self->read_func(self, &s, 4) < 0) return -1; - if (!( len=self->stack->length )) return stackUnderflow(); + if (self->read_func(self, &s, 4) < 0) + return -1; + if (!(len = self->stack->length)) + return stackUnderflow(); - c = (unsigned char)s[0]; - key = (long)c; - c = (unsigned char)s[1]; - key |= (long)c << 8; - c = (unsigned char)s[2]; - key |= (long)c << 16; - c = (unsigned char)s[3]; - key |= (long)c << 24; + c = (unsigned char) s[0]; + key = (long) c; + c = (unsigned char) s[1]; + key |= (long) c << 8; + c = (unsigned char) s[2]; + key |= (long) c << 16; + c = (unsigned char) s[3]; + key |= (long) c << 24; - if (!( py_key = PyInt_FromLong(key))) return -1; - value=self->stack->data[len-1]; - len=PyDict_SetItem(self->memo, py_key, value); - Py_DECREF(py_key); - return len; + if (!(py_key = PyInt_FromLong(key))) + return -1; + value = self->stack->data[len - 1]; + len = PyDict_SetItem(self->memo, py_key, value); + Py_DECREF(py_key); + return len; } static int -do_append(Unpicklerobject *self, int x) +do_append(Unpicklerobject * self, int x) { - PyObject *value = 0, *list = 0, *append_method = 0; - int len, i; + PyObject *value = 0, *list = 0, *append_method = 0; + int len, i; - len=self->stack->length; - if (!( len >= x && x > 0 )) return stackUnderflow(); - /* nothing to do */ - if (len==x) return 0; + len = self->stack->length; + if (!(len >= x && x > 0)) + return stackUnderflow(); + /* nothing to do */ + if (len == x) + return 0; - list=self->stack->data[x-1]; + list = self->stack->data[x - 1]; - if (PyList_Check(list)) { - PyObject *slice; - int list_len; + if (PyList_Check(list)) { + PyObject *slice; + int list_len; - slice=Pdata_popList(self->stack, x); - if (! slice) return -1; - list_len = PyList_GET_SIZE(list); - i=PyList_SetSlice(list, list_len, list_len, slice); - Py_DECREF(slice); - return i; - } - else { + slice = Pdata_popList(self->stack, x); + if (!slice) + return -1; + list_len = PyList_GET_SIZE(list); + i = PyList_SetSlice(list, list_len, list_len, slice); + Py_DECREF(slice); + return i; + } + else { - if (!( append_method = PyObject_GetAttr(list, append_str))) - return -1; + if (!(append_method = PyObject_GetAttr(list, append_str))) + return -1; - for (i = x; i < len; i++) { - PyObject *junk; + for (i = x; i < len; i++) { + PyObject *junk; - value=self->stack->data[i]; - junk=0; - ARG_TUP(self, value); - if (self->arg) { - junk = PyObject_Call(append_method, self->arg, - NULL); - FREE_ARG_TUP(self); - } - if (! junk) { - Pdata_clear(self->stack, i+1); - self->stack->length=x; - Py_DECREF(append_method); - return -1; - } - Py_DECREF(junk); - } - self->stack->length=x; + value = self->stack->data[i]; + junk = 0; + ARG_TUP(self, value); + if (self->arg) { + junk = PyObject_Call(append_method, self->arg, NULL); + FREE_ARG_TUP(self); + } + if (!junk) { + Pdata_clear(self->stack, i + 1); + self->stack->length = x; Py_DECREF(append_method); + return -1; + } + Py_DECREF(junk); } + self->stack->length = x; + Py_DECREF(append_method); + } - return 0; + return 0; } static int -load_append(Unpicklerobject *self) +load_append(Unpicklerobject * self) { - return do_append(self, self->stack->length - 1); + return do_append(self, self->stack->length - 1); } static int -load_appends(Unpicklerobject *self) +load_appends(Unpicklerobject * self) { - return do_append(self, marker(self)); + return do_append(self, marker(self)); } static int -do_setitems(Unpicklerobject *self, int x) +do_setitems(Unpicklerobject * self, int x) { - PyObject *value = 0, *key = 0, *dict = 0; - int len, i, r=0; + PyObject *value = 0, *key = 0, *dict = 0; + int len, i, r = 0; - if (!( (len=self->stack->length) >= x - && x > 0 )) return stackUnderflow(); + if (!((len = self->stack->length) >= x && x > 0)) + return stackUnderflow(); - dict=self->stack->data[x-1]; + dict = self->stack->data[x - 1]; - for (i = x+1; i < len; i += 2) { - key =self->stack->data[i-1]; - value=self->stack->data[i ]; - if (PyObject_SetItem(dict, key, value) < 0) { - r=-1; - break; - } + for (i = x + 1; i < len; i += 2) { + key = self->stack->data[i - 1]; + value = self->stack->data[i]; + if (PyObject_SetItem(dict, key, value) < 0) { + r = -1; + break; } + } - Pdata_clear(self->stack, x); + Pdata_clear(self->stack, x); - return r; + return r; } static int -load_setitem(Unpicklerobject *self) +load_setitem(Unpicklerobject * self) { - return do_setitems(self, self->stack->length - 2); + return do_setitems(self, self->stack->length - 2); } static int -load_setitems(Unpicklerobject *self) +load_setitems(Unpicklerobject * self) { - return do_setitems(self, marker(self)); + return do_setitems(self, marker(self)); } static int -load_build(Unpicklerobject *self) +load_build(Unpicklerobject * self) { - PyObject *state, *inst, *slotstate; - PyObject *__setstate__; - PyObject *d_key, *d_value; - Py_ssize_t i; - int res = -1; + PyObject *state, *inst, *slotstate; + PyObject *__setstate__; + PyObject *d_key, *d_value; + Py_ssize_t i; + int res = -1; - /* Stack is ... instance, state. We want to leave instance at - * the stack top, possibly mutated via instance.__setstate__(state). - */ - if (self->stack->length < 2) - return stackUnderflow(); - PDATA_POP(self->stack, state); - if (state == NULL) - return -1; - inst = self->stack->data[self->stack->length - 1]; + /* Stack is ... instance, state. We want to leave instance at + * the stack top, possibly mutated via instance.__setstate__(state). + */ + if (self->stack->length < 2) + return stackUnderflow(); + PDATA_POP(self->stack, state); + if (state == NULL) + return -1; + inst = self->stack->data[self->stack->length - 1]; - __setstate__ = PyObject_GetAttr(inst, __setstate___str); - if (__setstate__ != NULL) { - PyObject *junk = NULL; + __setstate__ = PyObject_GetAttr(inst, __setstate___str); + if (__setstate__ != NULL) { + PyObject *junk = NULL; - /* The explicit __setstate__ is responsible for everything. */ - ARG_TUP(self, state); - if (self->arg) { - junk = PyObject_Call(__setstate__, self->arg, NULL); - FREE_ARG_TUP(self); - } - Py_DECREF(__setstate__); - if (junk == NULL) - return -1; - Py_DECREF(junk); - return 0; + /* The explicit __setstate__ is responsible for everything. */ + ARG_TUP(self, state); + if (self->arg) { + junk = PyObject_Call(__setstate__, self->arg, NULL); + FREE_ARG_TUP(self); } - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) - return -1; - PyErr_Clear(); + Py_DECREF(__setstate__); + if (junk == NULL) + return -1; + Py_DECREF(junk); + return 0; + } + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + return -1; + PyErr_Clear(); - /* A default __setstate__. First see whether state embeds a - * slot state dict too (a proto 2 addition). - */ - if (PyTuple_Check(state) && PyTuple_Size(state) == 2) { - PyObject *temp = state; - state = PyTuple_GET_ITEM(temp, 0); - slotstate = PyTuple_GET_ITEM(temp, 1); - Py_INCREF(state); - Py_INCREF(slotstate); - Py_DECREF(temp); - } - else - slotstate = NULL; + /* A default __setstate__. First see whether state embeds a + * slot state dict too (a proto 2 addition). + */ + if (PyTuple_Check(state) && PyTuple_Size(state) == 2) { + PyObject *temp = state; + state = PyTuple_GET_ITEM(temp, 0); + slotstate = PyTuple_GET_ITEM(temp, 1); + Py_INCREF(state); + Py_INCREF(slotstate); + Py_DECREF(temp); + } + else + slotstate = NULL; - /* Set inst.__dict__ from the state dict (if any). */ - if (state != Py_None) { - PyObject *dict; - if (! PyDict_Check(state)) { - PyErr_SetString(UnpicklingError, "state is not a " - "dictionary"); - goto finally; - } - dict = PyObject_GetAttr(inst, __dict___str); - if (dict == NULL) - goto finally; - - i = 0; - while (PyDict_Next(state, &i, &d_key, &d_value)) { - if (PyObject_SetItem(dict, d_key, d_value) < 0) - goto finally; - } - Py_DECREF(dict); + /* Set inst.__dict__ from the state dict (if any). */ + if (state != Py_None) { + PyObject *dict; + if (!PyDict_Check(state)) { + PyErr_SetString(UnpicklingError, "state is not a " "dictionary"); + goto finally; + } + dict = PyObject_GetAttr(inst, __dict___str); + if (dict == NULL) + goto finally; + + i = 0; + while (PyDict_Next(state, &i, &d_key, &d_value)) { + if (PyObject_SetItem(dict, d_key, d_value) < 0) + goto finally; } + Py_DECREF(dict); + } - /* Also set instance attributes from the slotstate dict (if any). */ - if (slotstate != NULL) { - if (! PyDict_Check(slotstate)) { - PyErr_SetString(UnpicklingError, "slot state is not " - "a dictionary"); - goto finally; - } - i = 0; - while (PyDict_Next(slotstate, &i, &d_key, &d_value)) { - if (PyObject_SetAttr(inst, d_key, d_value) < 0) - goto finally; - } + /* Also set instance attributes from the slotstate dict (if any). */ + if (slotstate != NULL) { + if (!PyDict_Check(slotstate)) { + PyErr_SetString(UnpicklingError, "slot state is not " + "a dictionary"); + goto finally; } - res = 0; + i = 0; + while (PyDict_Next(slotstate, &i, &d_key, &d_value)) { + if (PyObject_SetAttr(inst, d_key, d_value) < 0) + goto finally; + } + } + res = 0; finally: - Py_DECREF(state); - Py_XDECREF(slotstate); - return res; + Py_DECREF(state); + Py_XDECREF(slotstate); + return res; } static int -load_mark(Unpicklerobject *self) +load_mark(Unpicklerobject * self) { - int s; + int s; - /* Note that we split the (pickle.py) stack into two stacks, an - object stack and a mark stack. Here we push a mark onto the - mark stack. - */ + /* Note that we split the (pickle.py) stack into two stacks, an + * object stack and a mark stack. Here we push a mark onto the + * mark stack. + */ - if ((self->num_marks + 1) >= self->marks_size) { - int *marks; - s=self->marks_size+20; - if (s <= self->num_marks) s=self->num_marks + 1; - if (self->marks == NULL) - marks=(int *)malloc(s * sizeof(int)); - else - marks=(int *)realloc(self->marks, - s * sizeof(int)); - if (!marks) { - PyErr_NoMemory(); - return -1; - } - self->marks = marks; - self->marks_size = s; + if ((self->num_marks + 1) >= self->marks_size) { + int *marks; + s = self->marks_size + 20; + if (s <= self->num_marks) + s = self->num_marks + 1; + if (self->marks == NULL) + marks = (int *) malloc(s * sizeof(int)); + else + marks = (int *) realloc(self->marks, s * sizeof(int)); + if (!marks) { + PyErr_NoMemory(); + return -1; } + self->marks = marks; + self->marks_size = s; + } - self->marks[self->num_marks++] = self->stack->length; + self->marks[self->num_marks++] = self->stack->length; - return 0; + return 0; } static int -load_reduce(Unpicklerobject *self) +load_reduce(Unpicklerobject * self) { - PyObject *callable = 0, *arg_tup = 0, *ob = 0; + PyObject *callable = 0, *arg_tup = 0, *ob = 0; - PDATA_POP(self->stack, arg_tup); - if (! arg_tup) return -1; - PDATA_POP(self->stack, callable); - if (callable) { - ob = Instance_New(callable, arg_tup); - Py_DECREF(callable); - } - Py_DECREF(arg_tup); + PDATA_POP(self->stack, arg_tup); + if (!arg_tup) + return -1; + PDATA_POP(self->stack, callable); + if (callable) { + ob = Instance_New(callable, arg_tup); + Py_DECREF(callable); + } + Py_DECREF(arg_tup); - if (! ob) return -1; + if (!ob) + return -1; - PDATA_PUSH(self->stack, ob, -1); - return 0; + PDATA_PUSH(self->stack, ob, -1); + return 0; } /* Just raises an error if we don't know the protocol specified. PROTO * is the first opcode for protocols >= 2. */ static int -load_proto(Unpicklerobject *self) +load_proto(Unpicklerobject * self) { - int i; - char *protobyte; + int i; + char *protobyte; - i = self->read_func(self, &protobyte, 1); - if (i < 0) - return -1; + i = self->read_func(self, &protobyte, 1); + if (i < 0) + return -1; - i = calc_binint(protobyte, 1); - /* No point checking for < 0, since calc_binint returns an unsigned - * int when chewing on 1 byte. - */ - assert(i >= 0); - if (i <= HIGHEST_PROTOCOL) - return 0; + i = calc_binint(protobyte, 1); + /* No point checking for < 0, since calc_binint returns an unsigned + * int when chewing on 1 byte. + */ + assert(i >= 0); + if (i <= HIGHEST_PROTOCOL) + return 0; + + PyErr_Format(PyExc_ValueError, "unsupported pickle protocol: %d", i); + return -1; +} + +static PyObject * +load(Unpicklerobject * self) +{ + PyObject *err = 0, *val = 0; + char *s; + + self->num_marks = 0; + if (self->stack->length) + Pdata_clear(self->stack, 0); + + while (1) { + if (self->read_func(self, &s, 1) < 0) + break; + + switch (s[0]) { + case NONE: + if (load_none(self) < 0) + break; + continue; + + case BININT: + if (load_binint(self) < 0) + break; + continue; + + case BININT1: + if (load_binint1(self) < 0) + break; + continue; + + case BININT2: + if (load_binint2(self) < 0) + break; + continue; + + case INT: + if (load_int(self) < 0) + break; + continue; + + case LONG: + if (load_long(self) < 0) + break; + continue; + + case LONG1: + if (load_counted_long(self, 1) < 0) + break; + continue; + + case LONG4: + if (load_counted_long(self, 4) < 0) + break; + continue; + + case FLOAT: + if (load_float(self) < 0) + break; + continue; + + case BINFLOAT: + if (load_binfloat(self) < 0) + break; + continue; + + case BINSTRING: + if (load_binstring(self) < 0) + break; + continue; + + case SHORT_BINSTRING: + if (load_short_binstring(self) < 0) + break; + continue; + + case STRING: + if (load_string(self) < 0) + break; + continue; + +#ifdef Py_USING_UNICODE + case UNICODE: + if (load_unicode(self) < 0) + break; + continue; + + case BINUNICODE: + if (load_binunicode(self) < 0) + break; + continue; +#endif + + case EMPTY_TUPLE: + if (load_counted_tuple(self, 0) < 0) + break; + continue; + + case TUPLE1: + if (load_counted_tuple(self, 1) < 0) + break; + continue; + + case TUPLE2: + if (load_counted_tuple(self, 2) < 0) + break; + continue; + + case TUPLE3: + if (load_counted_tuple(self, 3) < 0) + break; + continue; + + case TUPLE: + if (load_tuple(self) < 0) + break; + continue; + + case EMPTY_LIST: + if (load_empty_list(self) < 0) + break; + continue; + + case LIST: + if (load_list(self) < 0) + break; + continue; + + case EMPTY_DICT: + if (load_empty_dict(self) < 0) + break; + continue; + + case DICT: + if (load_dict(self) < 0) + break; + continue; + + case OBJ: + if (load_obj(self) < 0) + break; + continue; + + case INST: + if (load_inst(self) < 0) + break; + continue; + + case NEWOBJ: + if (load_newobj(self) < 0) + break; + continue; + + case GLOBAL: + if (load_global(self) < 0) + break; + continue; + + case APPEND: + if (load_append(self) < 0) + break; + continue; + + case APPENDS: + if (load_appends(self) < 0) + break; + continue; + + case BUILD: + if (load_build(self) < 0) + break; + continue; + + case DUP: + if (load_dup(self) < 0) + break; + continue; + + case BINGET: + if (load_binget(self) < 0) + break; + continue; + + case LONG_BINGET: + if (load_long_binget(self) < 0) + break; + continue; + + case GET: + if (load_get(self) < 0) + break; + continue; + + case EXT1: + if (load_extension(self, 1) < 0) + break; + continue; + + case EXT2: + if (load_extension(self, 2) < 0) + break; + continue; + + case EXT4: + if (load_extension(self, 4) < 0) + break; + continue; + case MARK: + if (load_mark(self) < 0) + break; + continue; + + case BINPUT: + if (load_binput(self) < 0) + break; + continue; + + case LONG_BINPUT: + if (load_long_binput(self) < 0) + break; + continue; + + case PUT: + if (load_put(self) < 0) + break; + continue; + + case POP: + if (load_pop(self) < 0) + break; + continue; + + case POP_MARK: + if (load_pop_mark(self) < 0) + break; + continue; + + case SETITEM: + if (load_setitem(self) < 0) + break; + continue; + + case SETITEMS: + if (load_setitems(self) < 0) + break; + continue; - PyErr_Format(PyExc_ValueError, "unsupported pickle protocol: %d", i); - return -1; -} + case STOP: + break; -static PyObject * -load(Unpicklerobject *self) -{ - PyObject *err = 0, *val = 0; - char *s; + case PERSID: + if (load_persid(self) < 0) + break; + continue; - self->num_marks = 0; - if (self->stack->length) Pdata_clear(self->stack, 0); + case BINPERSID: + if (load_binpersid(self) < 0) + break; + continue; - while (1) { - if (self->read_func(self, &s, 1) < 0) - break; - - switch (s[0]) { - case NONE: - if (load_none(self) < 0) - break; - continue; - - case BININT: - if (load_binint(self) < 0) - break; - continue; - - case BININT1: - if (load_binint1(self) < 0) - break; - continue; - - case BININT2: - if (load_binint2(self) < 0) - break; - continue; - - case INT: - if (load_int(self) < 0) - break; - continue; - - case LONG: - if (load_long(self) < 0) - break; - continue; - - case LONG1: - if (load_counted_long(self, 1) < 0) - break; - continue; - - case LONG4: - if (load_counted_long(self, 4) < 0) - break; - continue; - - case FLOAT: - if (load_float(self) < 0) - break; - continue; - - case BINFLOAT: - if (load_binfloat(self) < 0) - break; - continue; - - case BINSTRING: - if (load_binstring(self) < 0) - break; - continue; - - case SHORT_BINSTRING: - if (load_short_binstring(self) < 0) - break; - continue; - - case STRING: - if (load_string(self) < 0) - break; - continue; + case REDUCE: + if (load_reduce(self) < 0) + break; + continue; -#ifdef Py_USING_UNICODE - case UNICODE: - if (load_unicode(self) < 0) - break; - continue; - - case BINUNICODE: - if (load_binunicode(self) < 0) - break; - continue; -#endif + case PROTO: + if (load_proto(self) < 0) + break; + continue; - case EMPTY_TUPLE: - if (load_counted_tuple(self, 0) < 0) - break; - continue; - - case TUPLE1: - if (load_counted_tuple(self, 1) < 0) - break; - continue; - - case TUPLE2: - if (load_counted_tuple(self, 2) < 0) - break; - continue; - - case TUPLE3: - if (load_counted_tuple(self, 3) < 0) - break; - continue; - - case TUPLE: - if (load_tuple(self) < 0) - break; - continue; - - case EMPTY_LIST: - if (load_empty_list(self) < 0) - break; - continue; - - case LIST: - if (load_list(self) < 0) - break; - continue; - - case EMPTY_DICT: - if (load_empty_dict(self) < 0) - break; - continue; - - case DICT: - if (load_dict(self) < 0) - break; - continue; - - case OBJ: - if (load_obj(self) < 0) - break; - continue; - - case INST: - if (load_inst(self) < 0) - break; - continue; - - case NEWOBJ: - if (load_newobj(self) < 0) - break; - continue; - - case GLOBAL: - if (load_global(self) < 0) - break; - continue; - - case APPEND: - if (load_append(self) < 0) - break; - continue; - - case APPENDS: - if (load_appends(self) < 0) - break; - continue; - - case BUILD: - if (load_build(self) < 0) - break; - continue; - - case DUP: - if (load_dup(self) < 0) - break; - continue; - - case BINGET: - if (load_binget(self) < 0) - break; - continue; - - case LONG_BINGET: - if (load_long_binget(self) < 0) - break; - continue; - - case GET: - if (load_get(self) < 0) - break; - continue; - - case EXT1: - if (load_extension(self, 1) < 0) - break; - continue; - - case EXT2: - if (load_extension(self, 2) < 0) - break; - continue; - - case EXT4: - if (load_extension(self, 4) < 0) - break; - continue; - case MARK: - if (load_mark(self) < 0) - break; - continue; - - case BINPUT: - if (load_binput(self) < 0) - break; - continue; - - case LONG_BINPUT: - if (load_long_binput(self) < 0) - break; - continue; - - case PUT: - if (load_put(self) < 0) - break; - continue; - - case POP: - if (load_pop(self) < 0) - break; - continue; - - case POP_MARK: - if (load_pop_mark(self) < 0) - break; - continue; - - case SETITEM: - if (load_setitem(self) < 0) - break; - continue; - - case SETITEMS: - if (load_setitems(self) < 0) - break; - continue; - - case STOP: - break; - - case PERSID: - if (load_persid(self) < 0) - break; - continue; - - case BINPERSID: - if (load_binpersid(self) < 0) - break; - continue; - - case REDUCE: - if (load_reduce(self) < 0) - break; - continue; - - case PROTO: - if (load_proto(self) < 0) - break; - continue; - - case NEWTRUE: - if (load_bool(self, Py_True) < 0) - break; - continue; - - case NEWFALSE: - if (load_bool(self, Py_False) < 0) - break; - continue; - - case '\0': - /* end of file */ - PyErr_SetNone(PyExc_EOFError); - break; - - default: - cPickle_ErrFormat(UnpicklingError, - "invalid load key, '%s'.", - "c", s[0]); - return NULL; - } + case NEWTRUE: + if (load_bool(self, Py_True) < 0) + break; + continue; + case NEWFALSE: + if (load_bool(self, Py_False) < 0) break; + continue; + + case '\0': + /* end of file */ + PyErr_SetNone(PyExc_EOFError); + break; + + default: + cPickle_ErrFormat(UnpicklingError, + "invalid load key, '%s'.", "c", s[0]); + return NULL; } - if ((err = PyErr_Occurred())) { - if (err == PyExc_EOFError) { - PyErr_SetNone(PyExc_EOFError); - } - return NULL; + break; + } + + if ((err = PyErr_Occurred())) { + if (err == PyExc_EOFError) { + PyErr_SetNone(PyExc_EOFError); } + return NULL; + } - PDATA_POP(self->stack, val); - return val; + PDATA_POP(self->stack, val); + return val; } @@ -4564,630 +4629,639 @@ find persistent references. */ static int -noload_obj(Unpicklerobject *self) +noload_obj(Unpicklerobject * self) { - int i; + int i; - if ((i = marker(self)) < 0) return -1; - return Pdata_clear(self->stack, i+1); + if ((i = marker(self)) < 0) + return -1; + return Pdata_clear(self->stack, i + 1); } static int -noload_inst(Unpicklerobject *self) +noload_inst(Unpicklerobject * self) { - int i; - char *s; + int i; + char *s; - if ((i = marker(self)) < 0) return -1; - Pdata_clear(self->stack, i); - if (self->readline_func(self, &s) < 0) return -1; - if (self->readline_func(self, &s) < 0) return -1; - PDATA_APPEND(self->stack, Py_None, -1); - return 0; + if ((i = marker(self)) < 0) + return -1; + Pdata_clear(self->stack, i); + if (self->readline_func(self, &s) < 0) + return -1; + if (self->readline_func(self, &s) < 0) + return -1; + PDATA_APPEND(self->stack, Py_None, -1); + return 0; } static int -noload_newobj(Unpicklerobject *self) +noload_newobj(Unpicklerobject * self) { - PyObject *obj; + PyObject *obj; - PDATA_POP(self->stack, obj); /* pop argtuple */ - if (obj == NULL) return -1; - Py_DECREF(obj); + PDATA_POP(self->stack, obj); /* pop argtuple */ + if (obj == NULL) + return -1; + Py_DECREF(obj); - PDATA_POP(self->stack, obj); /* pop cls */ - if (obj == NULL) return -1; - Py_DECREF(obj); + PDATA_POP(self->stack, obj); /* pop cls */ + if (obj == NULL) + return -1; + Py_DECREF(obj); - PDATA_APPEND(self->stack, Py_None, -1); - return 0; + PDATA_APPEND(self->stack, Py_None, -1); + return 0; } static int -noload_global(Unpicklerobject *self) +noload_global(Unpicklerobject * self) { - char *s; + char *s; - if (self->readline_func(self, &s) < 0) return -1; - if (self->readline_func(self, &s) < 0) return -1; - PDATA_APPEND(self->stack, Py_None,-1); - return 0; + if (self->readline_func(self, &s) < 0) + return -1; + if (self->readline_func(self, &s) < 0) + return -1; + PDATA_APPEND(self->stack, Py_None, -1); + return 0; } static int -noload_reduce(Unpicklerobject *self) +noload_reduce(Unpicklerobject * self) { - if (self->stack->length < 2) return stackUnderflow(); - Pdata_clear(self->stack, self->stack->length-2); - PDATA_APPEND(self->stack, Py_None,-1); - return 0; + if (self->stack->length < 2) + return stackUnderflow(); + Pdata_clear(self->stack, self->stack->length - 2); + PDATA_APPEND(self->stack, Py_None, -1); + return 0; } static int -noload_build(Unpicklerobject *self) { +noload_build(Unpicklerobject * self) +{ - if (self->stack->length < 1) return stackUnderflow(); - Pdata_clear(self->stack, self->stack->length-1); - return 0; + if (self->stack->length < 1) + return stackUnderflow(); + Pdata_clear(self->stack, self->stack->length - 1); + return 0; } static int -noload_extension(Unpicklerobject *self, int nbytes) +noload_extension(Unpicklerobject * self, int nbytes) { - char *codebytes; + char *codebytes; - assert(nbytes == 1 || nbytes == 2 || nbytes == 4); - if (self->read_func(self, &codebytes, nbytes) < 0) return -1; - PDATA_APPEND(self->stack, Py_None, -1); - return 0; + assert(nbytes == 1 || nbytes == 2 || nbytes == 4); + if (self->read_func(self, &codebytes, nbytes) < 0) + return -1; + PDATA_APPEND(self->stack, Py_None, -1); + return 0; } static PyObject * -noload(Unpicklerobject *self) +noload(Unpicklerobject * self) { - PyObject *err = 0, *val = 0; - char *s; + PyObject *err = 0, *val = 0; + char *s; - self->num_marks = 0; - Pdata_clear(self->stack, 0); + self->num_marks = 0; + Pdata_clear(self->stack, 0); + + while (1) { + if (self->read_func(self, &s, 1) < 0) + break; + + switch (s[0]) { + case NONE: + if (load_none(self) < 0) + break; + continue; + + case BININT: + if (load_binint(self) < 0) + break; + continue; + + case BININT1: + if (load_binint1(self) < 0) + break; + continue; + + case BININT2: + if (load_binint2(self) < 0) + break; + continue; + + case INT: + if (load_int(self) < 0) + break; + continue; + + case LONG: + if (load_long(self) < 0) + break; + continue; + + case LONG1: + if (load_counted_long(self, 1) < 0) + break; + continue; + + case LONG4: + if (load_counted_long(self, 4) < 0) + break; + continue; + + case FLOAT: + if (load_float(self) < 0) + break; + continue; + + case BINFLOAT: + if (load_binfloat(self) < 0) + break; + continue; + + case BINSTRING: + if (load_binstring(self) < 0) + break; + continue; + + case SHORT_BINSTRING: + if (load_short_binstring(self) < 0) + break; + continue; - while (1) { - if (self->read_func(self, &s, 1) < 0) - break; - - switch (s[0]) { - case NONE: - if (load_none(self) < 0) - break; - continue; - - case BININT: - if (load_binint(self) < 0) - break; - continue; - - case BININT1: - if (load_binint1(self) < 0) - break; - continue; - - case BININT2: - if (load_binint2(self) < 0) - break; - continue; - - case INT: - if (load_int(self) < 0) - break; - continue; - - case LONG: - if (load_long(self) < 0) - break; - continue; - - case LONG1: - if (load_counted_long(self, 1) < 0) - break; - continue; - - case LONG4: - if (load_counted_long(self, 4) < 0) - break; - continue; - - case FLOAT: - if (load_float(self) < 0) - break; - continue; - - case BINFLOAT: - if (load_binfloat(self) < 0) - break; - continue; - - case BINSTRING: - if (load_binstring(self) < 0) - break; - continue; - - case SHORT_BINSTRING: - if (load_short_binstring(self) < 0) - break; - continue; - - case STRING: - if (load_string(self) < 0) - break; - continue; + case STRING: + if (load_string(self) < 0) + break; + continue; #ifdef Py_USING_UNICODE - case UNICODE: - if (load_unicode(self) < 0) - break; - continue; - - case BINUNICODE: - if (load_binunicode(self) < 0) - break; - continue; + case UNICODE: + if (load_unicode(self) < 0) + break; + continue; + + case BINUNICODE: + if (load_binunicode(self) < 0) + break; + continue; #endif - case EMPTY_TUPLE: - if (load_counted_tuple(self, 0) < 0) - break; - continue; - - case TUPLE1: - if (load_counted_tuple(self, 1) < 0) - break; - continue; - - case TUPLE2: - if (load_counted_tuple(self, 2) < 0) - break; - continue; - - case TUPLE3: - if (load_counted_tuple(self, 3) < 0) - break; - continue; - - case TUPLE: - if (load_tuple(self) < 0) - break; - continue; - - case EMPTY_LIST: - if (load_empty_list(self) < 0) - break; - continue; - - case LIST: - if (load_list(self) < 0) - break; - continue; - - case EMPTY_DICT: - if (load_empty_dict(self) < 0) - break; - continue; - - case DICT: - if (load_dict(self) < 0) - break; - continue; - - case OBJ: - if (noload_obj(self) < 0) - break; - continue; - - case INST: - if (noload_inst(self) < 0) - break; - continue; - - case NEWOBJ: - if (noload_newobj(self) < 0) - break; - continue; - - case GLOBAL: - if (noload_global(self) < 0) - break; - continue; - - case APPEND: - if (load_append(self) < 0) - break; - continue; - - case APPENDS: - if (load_appends(self) < 0) - break; - continue; - - case BUILD: - if (noload_build(self) < 0) - break; - continue; - - case DUP: - if (load_dup(self) < 0) - break; - continue; - - case BINGET: - if (load_binget(self) < 0) - break; - continue; - - case LONG_BINGET: - if (load_long_binget(self) < 0) - break; - continue; - - case GET: - if (load_get(self) < 0) - break; - continue; - - case EXT1: - if (noload_extension(self, 1) < 0) - break; - continue; - - case EXT2: - if (noload_extension(self, 2) < 0) - break; - continue; - - case EXT4: - if (noload_extension(self, 4) < 0) - break; - continue; - - case MARK: - if (load_mark(self) < 0) - break; - continue; - - case BINPUT: - if (load_binput(self) < 0) - break; - continue; - - case LONG_BINPUT: - if (load_long_binput(self) < 0) - break; - continue; - - case PUT: - if (load_put(self) < 0) - break; - continue; - - case POP: - if (load_pop(self) < 0) - break; - continue; - - case POP_MARK: - if (load_pop_mark(self) < 0) - break; - continue; - - case SETITEM: - if (load_setitem(self) < 0) - break; - continue; - - case SETITEMS: - if (load_setitems(self) < 0) - break; - continue; - - case STOP: - break; - - case PERSID: - if (load_persid(self) < 0) - break; - continue; - - case BINPERSID: - if (load_binpersid(self) < 0) - break; - continue; - - case REDUCE: - if (noload_reduce(self) < 0) - break; - continue; - - case PROTO: - if (load_proto(self) < 0) - break; - continue; - - case NEWTRUE: - if (load_bool(self, Py_True) < 0) - break; - continue; - - case NEWFALSE: - if (load_bool(self, Py_False) < 0) - break; - continue; - default: - cPickle_ErrFormat(UnpicklingError, - "invalid load key, '%s'.", - "c", s[0]); - return NULL; - } + case EMPTY_TUPLE: + if (load_counted_tuple(self, 0) < 0) + break; + continue; + + case TUPLE1: + if (load_counted_tuple(self, 1) < 0) + break; + continue; + + case TUPLE2: + if (load_counted_tuple(self, 2) < 0) + break; + continue; + + case TUPLE3: + if (load_counted_tuple(self, 3) < 0) + break; + continue; + + case TUPLE: + if (load_tuple(self) < 0) + break; + continue; + + case EMPTY_LIST: + if (load_empty_list(self) < 0) + break; + continue; + + case LIST: + if (load_list(self) < 0) + break; + continue; + + case EMPTY_DICT: + if (load_empty_dict(self) < 0) + break; + continue; + + case DICT: + if (load_dict(self) < 0) + break; + continue; + + case OBJ: + if (noload_obj(self) < 0) + break; + continue; + + case INST: + if (noload_inst(self) < 0) + break; + continue; + + case NEWOBJ: + if (noload_newobj(self) < 0) + break; + continue; + + case GLOBAL: + if (noload_global(self) < 0) + break; + continue; + + case APPEND: + if (load_append(self) < 0) + break; + continue; + + case APPENDS: + if (load_appends(self) < 0) + break; + continue; + + case BUILD: + if (noload_build(self) < 0) + break; + continue; + + case DUP: + if (load_dup(self) < 0) + break; + continue; + + case BINGET: + if (load_binget(self) < 0) + break; + continue; + + case LONG_BINGET: + if (load_long_binget(self) < 0) + break; + continue; + + case GET: + if (load_get(self) < 0) + break; + continue; + + case EXT1: + if (noload_extension(self, 1) < 0) + break; + continue; + + case EXT2: + if (noload_extension(self, 2) < 0) + break; + continue; + + case EXT4: + if (noload_extension(self, 4) < 0) + break; + continue; + + case MARK: + if (load_mark(self) < 0) + break; + continue; + + case BINPUT: + if (load_binput(self) < 0) + break; + continue; + + case LONG_BINPUT: + if (load_long_binput(self) < 0) + break; + continue; + + case PUT: + if (load_put(self) < 0) + break; + continue; + + case POP: + if (load_pop(self) < 0) + break; + continue; + + case POP_MARK: + if (load_pop_mark(self) < 0) + break; + continue; + + case SETITEM: + if (load_setitem(self) < 0) + break; + continue; + + case SETITEMS: + if (load_setitems(self) < 0) + break; + continue; + + case STOP: + break; + + case PERSID: + if (load_persid(self) < 0) + break; + continue; + + case BINPERSID: + if (load_binpersid(self) < 0) + break; + continue; + + case REDUCE: + if (noload_reduce(self) < 0) + break; + continue; + case PROTO: + if (load_proto(self) < 0) break; + continue; + + case NEWTRUE: + if (load_bool(self, Py_True) < 0) + break; + continue; + + case NEWFALSE: + if (load_bool(self, Py_False) < 0) + break; + continue; + default: + cPickle_ErrFormat(UnpicklingError, + "invalid load key, '%s'.", "c", s[0]); + return NULL; } - if ((err = PyErr_Occurred())) { - if (err == PyExc_EOFError) { - PyErr_SetNone(PyExc_EOFError); - } - return NULL; + break; + } + + if ((err = PyErr_Occurred())) { + if (err == PyExc_EOFError) { + PyErr_SetNone(PyExc_EOFError); } + return NULL; + } - PDATA_POP(self->stack, val); - return val; + PDATA_POP(self->stack, val); + return val; } static PyObject * -Unpickler_load(Unpicklerobject *self, PyObject *unused) +Unpickler_load(Unpicklerobject * self, PyObject * unused) { - return load(self); + return load(self); } static PyObject * -Unpickler_noload(Unpicklerobject *self, PyObject *unused) +Unpickler_noload(Unpicklerobject * self, PyObject * unused) { - return noload(self); + return noload(self); } static struct PyMethodDef Unpickler_methods[] = { - {"load", (PyCFunction)Unpickler_load, METH_NOARGS, - PyDoc_STR("load() -- Load a pickle") - }, - {"noload", (PyCFunction)Unpickler_noload, METH_NOARGS, - PyDoc_STR( - "noload() -- not load a pickle, but go through most of the motions\n" - "\n" - "This function can be used to read past a pickle without instantiating\n" - "any objects or importing any modules. It can also be used to find all\n" - "persistent references without instantiating any objects or importing\n" - "any modules.\n") - }, - {NULL, NULL} /* sentinel */ + {"load", (PyCFunction) Unpickler_load, METH_NOARGS, + PyDoc_STR("load() -- Load a pickle") + }, + {"noload", (PyCFunction) Unpickler_noload, METH_NOARGS, + PyDoc_STR + ("noload() -- not load a pickle, but go through most of the motions\n" + "\n" + "This function can be used to read past a pickle without instantiating\n" + "any objects or importing any modules. It can also be used to find all\n" + "persistent references without instantiating any objects or importing\n" + "any modules.\n") + }, + {NULL, NULL} /* sentinel */ }; static Unpicklerobject * -newUnpicklerobject(PyObject *f) +newUnpicklerobject(PyObject * f) { - Unpicklerobject *self; - - if (!( self = PyObject_GC_New(Unpicklerobject, &Unpicklertype))) - return NULL; - - self->file = NULL; - self->arg = NULL; - self->stack = (Pdata*)Pdata_New(); - self->pers_func = NULL; - self->last_string = NULL; - self->marks = NULL; - self->num_marks = 0; - self->marks_size = 0; - self->buf_size = 0; - self->read = NULL; - self->readline = NULL; - self->find_class = NULL; - - if (!( self->memo = PyDict_New())) - goto err; + Unpicklerobject *self; - if (!self->stack) - goto err; + if (!(self = PyObject_GC_New(Unpicklerobject, &Unpicklertype))) + return NULL; - Py_INCREF(f); - self->file = f; + self->file = NULL; + self->arg = NULL; + self->stack = (Pdata *) Pdata_New(); + self->pers_func = NULL; + self->last_string = NULL; + self->marks = NULL; + self->num_marks = 0; + self->marks_size = 0; + self->buf_size = 0; + self->read = NULL; + self->readline = NULL; + self->find_class = NULL; + + if (!(self->memo = PyDict_New())) + goto err; + + if (!self->stack) + goto err; + + Py_INCREF(f); + self->file = f; + + /* Set read, readline based on type of f */ + if (PyFile_Check(f)) { + self->fp = PyFile_AsFile(f); + if (self->fp == NULL) { + PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); + goto err; + } + self->read_func = read_file; + self->readline_func = readline_file; + } + else if (PycStringIO_InputCheck(f)) { + self->fp = NULL; + self->read_func = read_cStringIO; + self->readline_func = readline_cStringIO; + } + else { - /* Set read, readline based on type of f */ - if (PyFile_Check(f)) { - self->fp = PyFile_AsFile(f); - if (self->fp == NULL) { - PyErr_SetString(PyExc_ValueError, - "I/O operation on closed file"); - goto err; - } - self->read_func = read_file; - self->readline_func = readline_file; - } - else if (PycStringIO_InputCheck(f)) { - self->fp = NULL; - self->read_func = read_cStringIO; - self->readline_func = readline_cStringIO; - } - else { + self->fp = NULL; + self->read_func = read_other; + self->readline_func = readline_other; - self->fp = NULL; - self->read_func = read_other; - self->readline_func = readline_other; - - if (!( (self->readline = PyObject_GetAttr(f, readline_str)) && - (self->read = PyObject_GetAttr(f, read_str)))) { - PyErr_Clear(); - PyErr_SetString( PyExc_TypeError, - "argument must have 'read' and " - "'readline' attributes" ); - goto err; - } + if (!((self->readline = PyObject_GetAttr(f, readline_str)) && + (self->read = PyObject_GetAttr(f, read_str)))) { + PyErr_Clear(); + PyErr_SetString(PyExc_TypeError, + "argument must have 'read' and " + "'readline' attributes"); + goto err; } - PyObject_GC_Track(self); + } + PyObject_GC_Track(self); - return self; + return self; err: - Py_DECREF((PyObject *)self); - return NULL; + Py_DECREF((PyObject *) self); + return NULL; } static PyObject * -get_Unpickler(PyObject *self, PyObject *file) +get_Unpickler(PyObject * self, PyObject * file) { - return (PyObject *)newUnpicklerobject(file); + return (PyObject *) newUnpicklerobject(file); } static void -Unpickler_dealloc(Unpicklerobject *self) +Unpickler_dealloc(Unpicklerobject * self) { - PyObject_GC_UnTrack((PyObject *)self); - Py_XDECREF(self->readline); - Py_XDECREF(self->read); - Py_XDECREF(self->file); - Py_XDECREF(self->memo); - Py_XDECREF(self->stack); - Py_XDECREF(self->pers_func); - Py_XDECREF(self->arg); - Py_XDECREF(self->last_string); - Py_XDECREF(self->find_class); - - if (self->marks) { - free(self->marks); - } - - if (self->buf_size) { - free(self->buf); - } - - self->ob_type->tp_free((PyObject *)self); + PyObject_GC_UnTrack((PyObject *) self); + Py_XDECREF(self->readline); + Py_XDECREF(self->read); + Py_XDECREF(self->file); + Py_XDECREF(self->memo); + Py_XDECREF(self->stack); + Py_XDECREF(self->pers_func); + Py_XDECREF(self->arg); + Py_XDECREF(self->last_string); + Py_XDECREF(self->find_class); + + if (self->marks) { + free(self->marks); + } + + if (self->buf_size) { + free(self->buf); + } + + self->ob_type->tp_free((PyObject *) self); } static int -Unpickler_traverse(Unpicklerobject *self, visitproc visit, void *arg) -{ - Py_VISIT(self->readline); - Py_VISIT(self->read); - Py_VISIT(self->file); - Py_VISIT(self->memo); - Py_VISIT(self->stack); - Py_VISIT(self->pers_func); - Py_VISIT(self->arg); - Py_VISIT(self->last_string); - Py_VISIT(self->find_class); - return 0; +Unpickler_traverse(Unpicklerobject * self, visitproc visit, void *arg) +{ + Py_VISIT(self->readline); + Py_VISIT(self->read); + Py_VISIT(self->file); + Py_VISIT(self->memo); + Py_VISIT(self->stack); + Py_VISIT(self->pers_func); + Py_VISIT(self->arg); + Py_VISIT(self->last_string); + Py_VISIT(self->find_class); + return 0; } static int -Unpickler_clear(Unpicklerobject *self) -{ - Py_CLEAR(self->readline); - Py_CLEAR(self->read); - Py_CLEAR(self->file); - Py_CLEAR(self->memo); - Py_CLEAR(self->stack); - Py_CLEAR(self->pers_func); - Py_CLEAR(self->arg); - Py_CLEAR(self->last_string); - Py_CLEAR(self->find_class); - return 0; +Unpickler_clear(Unpicklerobject * self) +{ + Py_CLEAR(self->readline); + Py_CLEAR(self->read); + Py_CLEAR(self->file); + Py_CLEAR(self->memo); + Py_CLEAR(self->stack); + Py_CLEAR(self->pers_func); + Py_CLEAR(self->arg); + Py_CLEAR(self->last_string); + Py_CLEAR(self->find_class); + return 0; } static PyObject * -Unpickler_getattr(Unpicklerobject *self, char *name) +Unpickler_getattr(Unpicklerobject * self, char *name) { - if (!strcmp(name, "persistent_load")) { - if (!self->pers_func) { - PyErr_SetString(PyExc_AttributeError, name); - return NULL; - } - - Py_INCREF(self->pers_func); - return self->pers_func; + if (!strcmp(name, "persistent_load")) { + if (!self->pers_func) { + PyErr_SetString(PyExc_AttributeError, name); + return NULL; } - if (!strcmp(name, "find_global")) { - if (!self->find_class) { - PyErr_SetString(PyExc_AttributeError, name); - return NULL; - } + Py_INCREF(self->pers_func); + return self->pers_func; + } - Py_INCREF(self->find_class); - return self->find_class; + if (!strcmp(name, "find_global")) { + if (!self->find_class) { + PyErr_SetString(PyExc_AttributeError, name); + return NULL; } - if (!strcmp(name, "memo")) { - if (!self->memo) { - PyErr_SetString(PyExc_AttributeError, name); - return NULL; - } + Py_INCREF(self->find_class); + return self->find_class; + } - Py_INCREF(self->memo); - return self->memo; + if (!strcmp(name, "memo")) { + if (!self->memo) { + PyErr_SetString(PyExc_AttributeError, name); + return NULL; } - if (!strcmp(name, "UnpicklingError")) { - Py_INCREF(UnpicklingError); - return UnpicklingError; - } + Py_INCREF(self->memo); + return self->memo; + } + + if (!strcmp(name, "UnpicklingError")) { + Py_INCREF(UnpicklingError); + return UnpicklingError; + } - return Py_FindMethod(Unpickler_methods, (PyObject *)self, name); + return Py_FindMethod(Unpickler_methods, (PyObject *) self, name); } static int -Unpickler_setattr(Unpicklerobject *self, char *name, PyObject *value) +Unpickler_setattr(Unpicklerobject * self, char *name, PyObject * value) { - if (!strcmp(name, "persistent_load")) { - Py_XDECREF(self->pers_func); - self->pers_func = value; - Py_XINCREF(value); - return 0; - } + if (!strcmp(name, "persistent_load")) { + Py_XDECREF(self->pers_func); + self->pers_func = value; + Py_XINCREF(value); + return 0; + } - if (!strcmp(name, "find_global")) { - Py_XDECREF(self->find_class); - self->find_class = value; - Py_XINCREF(value); - return 0; - } + if (!strcmp(name, "find_global")) { + Py_XDECREF(self->find_class); + self->find_class = value; + Py_XINCREF(value); + return 0; + } - if (! value) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; - } + if (!value) { + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; + } - if (strcmp(name, "memo") == 0) { - if (!PyDict_Check(value)) { - PyErr_SetString(PyExc_TypeError, - "memo must be a dictionary"); - return -1; - } - Py_XDECREF(self->memo); - self->memo = value; - Py_INCREF(value); - return 0; + if (strcmp(name, "memo") == 0) { + if (!PyDict_Check(value)) { + PyErr_SetString(PyExc_TypeError, "memo must be a dictionary"); + return -1; } + Py_XDECREF(self->memo); + self->memo = value; + Py_INCREF(value); + return 0; + } - PyErr_SetString(PyExc_AttributeError, name); - return -1; + PyErr_SetString(PyExc_AttributeError, name); + return -1; } /* --------------------------------------------------------------------------- @@ -5196,388 +5270,389 @@ /* dump(obj, file, protocol=0). */ static PyObject * -cpm_dump(PyObject *self, PyObject *args, PyObject *kwds) +cpm_dump(PyObject * self, PyObject * args, PyObject * kwds) { - static char *kwlist[] = {"obj", "file", "protocol", NULL}; - PyObject *ob, *file, *res = NULL; - Picklerobject *pickler = 0; - int proto = 0; + static char *kwlist[] = { "obj", "file", "protocol", NULL }; + PyObject *ob, *file, *res = NULL; + Picklerobject *pickler = 0; + int proto = 0; - if (!( PyArg_ParseTupleAndKeywords(args, kwds, "OO|i", kwlist, - &ob, &file, &proto))) - goto finally; + if (!(PyArg_ParseTupleAndKeywords(args, kwds, "OO|i", kwlist, + &ob, &file, &proto))) + goto finally; - if (!( pickler = newPicklerobject(file, proto))) - goto finally; + if (!(pickler = newPicklerobject(file, proto))) + goto finally; - if (dump(pickler, ob) < 0) - goto finally; + if (dump(pickler, ob) < 0) + goto finally; - Py_INCREF(Py_None); - res = Py_None; + Py_INCREF(Py_None); + res = Py_None; finally: - Py_XDECREF(pickler); + Py_XDECREF(pickler); - return res; + return res; } /* dumps(obj, protocol=0). */ static PyObject * -cpm_dumps(PyObject *self, PyObject *args, PyObject *kwds) +cpm_dumps(PyObject * self, PyObject * args, PyObject * kwds) { - static char *kwlist[] = {"obj", "protocol", NULL}; - PyObject *ob, *file = 0, *res = NULL; - Picklerobject *pickler = 0; - int proto = 0; + static char *kwlist[] = { "obj", "protocol", NULL }; + PyObject *ob, *file = 0, *res = NULL; + Picklerobject *pickler = 0; + int proto = 0; - if (!( PyArg_ParseTupleAndKeywords(args, kwds, "O|i:dumps", kwlist, - &ob, &proto))) - goto finally; + if (!(PyArg_ParseTupleAndKeywords(args, kwds, "O|i:dumps", kwlist, + &ob, &proto))) + goto finally; - if (!( file = PycStringIO->NewOutput(128))) - goto finally; + if (!(file = PycStringIO->NewOutput(128))) + goto finally; - if (!( pickler = newPicklerobject(file, proto))) - goto finally; + if (!(pickler = newPicklerobject(file, proto))) + goto finally; - if (dump(pickler, ob) < 0) - goto finally; + if (dump(pickler, ob) < 0) + goto finally; - res = PycStringIO->cgetvalue(file); + res = PycStringIO->cgetvalue(file); finally: - Py_XDECREF(pickler); - Py_XDECREF(file); + Py_XDECREF(pickler); + Py_XDECREF(file); - return res; + return res; } /* load(fileobj). */ static PyObject * -cpm_load(PyObject *self, PyObject *ob) +cpm_load(PyObject * self, PyObject * ob) { - Unpicklerobject *unpickler = 0; - PyObject *res = NULL; + Unpicklerobject *unpickler = 0; + PyObject *res = NULL; - if (!( unpickler = newUnpicklerobject(ob))) - goto finally; + if (!(unpickler = newUnpicklerobject(ob))) + goto finally; - res = load(unpickler); + res = load(unpickler); finally: - Py_XDECREF(unpickler); + Py_XDECREF(unpickler); - return res; + return res; } /* loads(string) */ static PyObject * -cpm_loads(PyObject *self, PyObject *args) +cpm_loads(PyObject * self, PyObject * args) { - PyObject *ob, *file = 0, *res = NULL; - Unpicklerobject *unpickler = 0; + PyObject *ob, *file = 0, *res = NULL; + Unpicklerobject *unpickler = 0; - if (!( PyArg_ParseTuple(args, "S:loads", &ob))) - goto finally; + if (!(PyArg_ParseTuple(args, "S:loads", &ob))) + goto finally; - if (!( file = PycStringIO->NewInput(ob))) - goto finally; + if (!(file = PycStringIO->NewInput(ob))) + goto finally; - if (!( unpickler = newUnpicklerobject(file))) - goto finally; + if (!(unpickler = newUnpicklerobject(file))) + goto finally; - res = load(unpickler); + res = load(unpickler); finally: - Py_XDECREF(file); - Py_XDECREF(unpickler); + Py_XDECREF(file); + Py_XDECREF(unpickler); - return res; + return res; } -PyDoc_STRVAR(Unpicklertype__doc__, -"Objects that know how to unpickle"); +PyDoc_STRVAR(Unpicklertype__doc__, "Objects that know how to unpickle"); static PyTypeObject Unpicklertype = { PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ - "cPickle.Unpickler", /*tp_name*/ - sizeof(Unpicklerobject), /*tp_basicsize*/ + 0, /*ob_size */ + "cPickle.Unpickler", /*tp_name */ + sizeof(Unpicklerobject), /*tp_basicsize */ 0, - (destructor)Unpickler_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - (getattrfunc)Unpickler_getattr, /* tp_getattr */ - (setattrfunc)Unpickler_setattr, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ + (destructor) Unpickler_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + (getattrfunc) Unpickler_getattr, /* tp_getattr */ + (setattrfunc) Unpickler_setattr, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Unpicklertype__doc__, /* tp_doc */ - (traverseproc)Unpickler_traverse, /* tp_traverse */ - (inquiry)Unpickler_clear, /* tp_clear */ + Unpicklertype__doc__, /* tp_doc */ + (traverseproc) Unpickler_traverse, /* tp_traverse */ + (inquiry) Unpickler_clear, /* tp_clear */ }; static struct PyMethodDef cPickle_methods[] = { - {"dump", (PyCFunction)cpm_dump, METH_VARARGS | METH_KEYWORDS, - PyDoc_STR("dump(obj, file, protocol=0) -- " - "Write an object in pickle format to the given file.\n" - "\n" - "See the Pickler docstring for the meaning of optional argument proto.") - }, - - {"dumps", (PyCFunction)cpm_dumps, METH_VARARGS | METH_KEYWORDS, - PyDoc_STR("dumps(obj, protocol=0) -- " - "Return a string containing an object in pickle format.\n" - "\n" - "See the Pickler docstring for the meaning of optional argument proto.") - }, - - {"load", (PyCFunction)cpm_load, METH_O, - PyDoc_STR("load(file) -- Load a pickle from the given file")}, - - {"loads", (PyCFunction)cpm_loads, METH_VARARGS, - PyDoc_STR("loads(string) -- Load a pickle from the given string")}, - - {"Pickler", (PyCFunction)get_Pickler, METH_VARARGS | METH_KEYWORDS, - PyDoc_STR("Pickler(file, protocol=0) -- Create a pickler.\n" - "\n" - "This takes a file-like object for writing a pickle data stream.\n" - "The optional proto argument tells the pickler to use the given\n" - "protocol; supported protocols are 0, 1, 2. The default\n" - "protocol is 0, to be backwards compatible. (Protocol 0 is the\n" - "only protocol that can be written to a file opened in text\n" - "mode and read back successfully. When using a protocol higher\n" - "than 0, make sure the file is opened in binary mode, both when\n" - "pickling and unpickling.)\n" - "\n" - "Protocol 1 is more efficient than protocol 0; protocol 2 is\n" - "more efficient than protocol 1.\n" - "\n" - "Specifying a negative protocol version selects the highest\n" - "protocol version supported. The higher the protocol used, the\n" - "more recent the version of Python needed to read the pickle\n" - "produced.\n" - "\n" - "The file parameter must have a write() method that accepts a single\n" - "string argument. It can thus be an open file object, a StringIO\n" - "object, or any other custom object that meets this interface.\n") - }, + {"dump", (PyCFunction) cpm_dump, METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("dump(obj, file, protocol=0) -- " + "Write an object in pickle format to the given file.\n" + "\n" + "See the Pickler docstring for the meaning of optional argument proto.") + }, + + {"dumps", (PyCFunction) cpm_dumps, METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("dumps(obj, protocol=0) -- " + "Return a string containing an object in pickle format.\n" + "\n" + "See the Pickler docstring for the meaning of optional argument proto.") + }, + + {"load", (PyCFunction) cpm_load, METH_O, + PyDoc_STR("load(file) -- Load a pickle from the given file")}, + + {"loads", (PyCFunction) cpm_loads, METH_VARARGS, + PyDoc_STR("loads(string) -- Load a pickle from the given string")}, + + {"Pickler", (PyCFunction) get_Pickler, METH_VARARGS | METH_KEYWORDS, + PyDoc_STR("Pickler(file, protocol=0) -- Create a pickler.\n" + "\n" + "This takes a file-like object for writing a pickle data stream.\n" + "The optional proto argument tells the pickler to use the given\n" + "protocol; supported protocols are 0, 1, 2. The default\n" + "protocol is 0, to be backwards compatible. (Protocol 0 is the\n" + "only protocol that can be written to a file opened in text\n" + "mode and read back successfully. When using a protocol higher\n" + "than 0, make sure the file is opened in binary mode, both when\n" + "pickling and unpickling.)\n" + "\n" + "Protocol 1 is more efficient than protocol 0; protocol 2 is\n" + "more efficient than protocol 1.\n" + "\n" + "Specifying a negative protocol version selects the highest\n" + "protocol version supported. The higher the protocol used, the\n" + "more recent the version of Python needed to read the pickle\n" + "produced.\n" + "\n" + "The file parameter must have a write() method that accepts a single\n" + "string argument. It can thus be an open file object, a StringIO\n" + "object, or any other custom object that meets this interface.\n") + }, - {"Unpickler", (PyCFunction)get_Unpickler, METH_O, - PyDoc_STR("Unpickler(file) -- Create an unpickler.")}, + {"Unpickler", (PyCFunction) get_Unpickler, METH_O, + PyDoc_STR("Unpickler(file) -- Create an unpickler.")}, - { NULL, NULL } + {NULL, NULL} }; static int -init_stuff(PyObject *module_dict) +init_stuff(PyObject * module_dict) { - PyObject *copy_reg, *t, *r; + PyObject *copy_reg, *t, *r; #define INIT_STR(S) if (!( S ## _str=PyString_InternFromString(#S))) return -1; - if (PyType_Ready(&Unpicklertype) < 0) - return -1; - if (PyType_Ready(&Picklertype) < 0) - return -1; + if (PyType_Ready(&Unpicklertype) < 0) + return -1; + if (PyType_Ready(&Picklertype) < 0) + return -1; - INIT_STR(__class__); - INIT_STR(__getinitargs__); - INIT_STR(__dict__); - INIT_STR(__getstate__); - INIT_STR(__setstate__); - INIT_STR(__name__); - INIT_STR(__main__); - INIT_STR(__reduce__); - INIT_STR(__reduce_ex__); - INIT_STR(write); - INIT_STR(append); - INIT_STR(read); - INIT_STR(readline); - INIT_STR(copy_reg); - INIT_STR(dispatch_table); + INIT_STR(__class__); + INIT_STR(__getinitargs__); + INIT_STR(__dict__); + INIT_STR(__getstate__); + INIT_STR(__setstate__); + INIT_STR(__name__); + INIT_STR(__main__); + INIT_STR(__reduce__); + INIT_STR(__reduce_ex__); + INIT_STR(write); + INIT_STR(append); + INIT_STR(read); + INIT_STR(readline); + INIT_STR(copy_reg); + INIT_STR(dispatch_table); - if (!( copy_reg = PyImport_ImportModule("copy_reg"))) - return -1; + if (!(copy_reg = PyImport_ImportModule("copy_reg"))) + return -1; - /* This is special because we want to use a different - one in restricted mode. */ - dispatch_table = PyObject_GetAttr(copy_reg, dispatch_table_str); - if (!dispatch_table) return -1; - - extension_registry = PyObject_GetAttrString(copy_reg, - "_extension_registry"); - if (!extension_registry) return -1; - - inverted_registry = PyObject_GetAttrString(copy_reg, - "_inverted_registry"); - if (!inverted_registry) return -1; - - extension_cache = PyObject_GetAttrString(copy_reg, - "_extension_cache"); - if (!extension_cache) return -1; + /* This is special because we want to use a different + * one in restricted mode. */ + dispatch_table = PyObject_GetAttr(copy_reg, dispatch_table_str); + if (!dispatch_table) + return -1; - Py_DECREF(copy_reg); + extension_registry = PyObject_GetAttrString(copy_reg, + "_extension_registry"); + if (!extension_registry) + return -1; - if (!(empty_tuple = PyTuple_New(0))) - return -1; + inverted_registry = PyObject_GetAttrString(copy_reg, "_inverted_registry"); + if (!inverted_registry) + return -1; - two_tuple = PyTuple_New(2); - if (two_tuple == NULL) - return -1; - /* We use this temp container with no regard to refcounts, or to - * keeping containees alive. Exempt from GC, because we don't - * want anything looking at two_tuple() by magic. - */ - PyObject_GC_UnTrack(two_tuple); + extension_cache = PyObject_GetAttrString(copy_reg, "_extension_cache"); + if (!extension_cache) + return -1; - /* Ugh */ - if (!( t=PyImport_ImportModule("__builtin__"))) return -1; - if (PyDict_SetItemString(module_dict, "__builtins__", t) < 0) - return -1; + Py_DECREF(copy_reg); - if (!( t=PyDict_New())) return -1; - if (!( r=PyRun_String( - "def __str__(self):\n" - " return self.args and ('%s' % self.args[0]) or '(what)'\n", - Py_file_input, - module_dict, t) )) return -1; - Py_DECREF(r); + if (!(empty_tuple = PyTuple_New(0))) + return -1; - PickleError = PyErr_NewException("cPickle.PickleError", NULL, t); - if (!PickleError) - return -1; + two_tuple = PyTuple_New(2); + if (two_tuple == NULL) + return -1; + /* We use this temp container with no regard to refcounts, or to + * keeping containees alive. Exempt from GC, because we don't + * want anything looking at two_tuple() by magic. + */ + PyObject_GC_UnTrack(two_tuple); - Py_DECREF(t); + /* Ugh */ + if (!(t = PyImport_ImportModule("__builtin__"))) + return -1; + if (PyDict_SetItemString(module_dict, "__builtins__", t) < 0) + return -1; - PicklingError = PyErr_NewException("cPickle.PicklingError", - PickleError, NULL); - if (!PicklingError) - return -1; + if (!(t = PyDict_New())) + return -1; + if (!(r = PyRun_String("def __str__(self):\n" + " return self.args and ('%s' % self.args[0]) or '(what)'\n", + Py_file_input, module_dict, t))) + return -1; + Py_DECREF(r); - if (!( t=PyDict_New())) return -1; - if (!( r=PyRun_String( - "def __str__(self):\n" - " a=self.args\n" - " a=a and type(a[0]) or '(what)'\n" - " return 'Cannot pickle %s objects' % a\n" - , Py_file_input, - module_dict, t) )) return -1; - Py_DECREF(r); + PickleError = PyErr_NewException("cPickle.PickleError", NULL, t); + if (!PickleError) + return -1; - if (!( UnpickleableError = PyErr_NewException( - "cPickle.UnpickleableError", PicklingError, t))) - return -1; + Py_DECREF(t); - Py_DECREF(t); + PicklingError = PyErr_NewException("cPickle.PicklingError", + PickleError, NULL); + if (!PicklingError) + return -1; - if (!( UnpicklingError = PyErr_NewException("cPickle.UnpicklingError", - PickleError, NULL))) - return -1; + if (!(t = PyDict_New())) + return -1; + if (!(r = PyRun_String("def __str__(self):\n" + " a=self.args\n" + " a=a and type(a[0]) or '(what)'\n" + " return 'Cannot pickle %s objects' % a\n", + Py_file_input, module_dict, t))) + return -1; + Py_DECREF(r); - if (!( BadPickleGet = PyErr_NewException("cPickle.BadPickleGet", - UnpicklingError, NULL))) - return -1; + if (! + (UnpickleableError = + PyErr_NewException("cPickle.UnpickleableError", PicklingError, t))) + return -1; - if (PyDict_SetItemString(module_dict, "PickleError", - PickleError) < 0) - return -1; + Py_DECREF(t); - if (PyDict_SetItemString(module_dict, "PicklingError", - PicklingError) < 0) - return -1; + if (!(UnpicklingError = PyErr_NewException("cPickle.UnpicklingError", + PickleError, NULL))) + return -1; - if (PyDict_SetItemString(module_dict, "UnpicklingError", - UnpicklingError) < 0) - return -1; + if (!(BadPickleGet = PyErr_NewException("cPickle.BadPickleGet", + UnpicklingError, NULL))) + return -1; - if (PyDict_SetItemString(module_dict, "UnpickleableError", - UnpickleableError) < 0) - return -1; + if (PyDict_SetItemString(module_dict, "PickleError", PickleError) < 0) + return -1; - if (PyDict_SetItemString(module_dict, "BadPickleGet", - BadPickleGet) < 0) - return -1; + if (PyDict_SetItemString(module_dict, "PicklingError", PicklingError) < 0) + return -1; - PycString_IMPORT; + if (PyDict_SetItemString(module_dict, "UnpicklingError", + UnpicklingError) < 0) + return -1; - return 0; + if (PyDict_SetItemString(module_dict, "UnpickleableError", + UnpickleableError) < 0) + return -1; + + if (PyDict_SetItemString(module_dict, "BadPickleGet", BadPickleGet) < 0) + return -1; + + PycString_IMPORT; + + return 0; } -#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ +#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ #define PyMODINIT_FUNC void #endif PyMODINIT_FUNC initcPickle(void) { - PyObject *m, *d, *di, *v, *k; - Py_ssize_t i; - char *rev = "1.71"; /* XXX when does this change? */ - PyObject *format_version; - PyObject *compatible_formats; - - Picklertype.ob_type = &PyType_Type; - Unpicklertype.ob_type = &PyType_Type; - PdataType.ob_type = &PyType_Type; - - /* Initialize some pieces. We need to do this before module creation, - * so we're forced to use a temporary dictionary. :( - */ - di = PyDict_New(); - if (!di) return; - if (init_stuff(di) < 0) return; - - /* Create the module and add the functions */ - m = Py_InitModule4("cPickle", cPickle_methods, - cPickle_module_documentation, - (PyObject*)NULL,PYTHON_API_VERSION); - if (m == NULL) - return; - - /* Add some symbolic constants to the module */ - d = PyModule_GetDict(m); - v = PyString_FromString(rev); - PyDict_SetItemString(d, "__version__", v); - Py_XDECREF(v); - - /* Copy data from di. Waaa. */ - for (i=0; PyDict_Next(di, &i, &k, &v); ) { - if (PyObject_SetItem(d, k, v) < 0) { - Py_DECREF(di); - return; - } - } - Py_DECREF(di); - - i = PyModule_AddIntConstant(m, "HIGHEST_PROTOCOL", HIGHEST_PROTOCOL); - if (i < 0) - return; - - /* These are purely informational; no code uses them. */ - /* File format version we write. */ - format_version = PyString_FromString("2.0"); - /* Format versions we can read. */ - compatible_formats = Py_BuildValue("[sssss]", - "1.0", /* Original protocol 0 */ - "1.1", /* Protocol 0 + INST */ - "1.2", /* Original protocol 1 */ - "1.3", /* Protocol 1 + BINFLOAT */ - "2.0"); /* Original protocol 2 */ - PyDict_SetItemString(d, "format_version", format_version); - PyDict_SetItemString(d, "compatible_formats", compatible_formats); - Py_XDECREF(format_version); - Py_XDECREF(compatible_formats); + PyObject *m, *d, *di, *v, *k; + Py_ssize_t i; + char *rev = "1.71"; /* XXX when does this change? */ + PyObject *format_version; + PyObject *compatible_formats; + + Picklertype.ob_type = &PyType_Type; + Unpicklertype.ob_type = &PyType_Type; + PdataType.ob_type = &PyType_Type; + + /* Initialize some pieces. We need to do this before module creation, + * so we're forced to use a temporary dictionary. :( + */ + di = PyDict_New(); + if (!di) + return; + if (init_stuff(di) < 0) + return; + + /* Create the module and add the functions */ + m = Py_InitModule4("cPickle", cPickle_methods, + cPickle_module_documentation, + (PyObject *) NULL, PYTHON_API_VERSION); + if (m == NULL) + return; + + /* Add some symbolic constants to the module */ + d = PyModule_GetDict(m); + v = PyString_FromString(rev); + PyDict_SetItemString(d, "__version__", v); + Py_XDECREF(v); + + /* Copy data from di. Waaa. */ + for (i = 0; PyDict_Next(di, &i, &k, &v);) { + if (PyObject_SetItem(d, k, v) < 0) { + Py_DECREF(di); + return; + } + } + Py_DECREF(di); + + i = PyModule_AddIntConstant(m, "HIGHEST_PROTOCOL", HIGHEST_PROTOCOL); + if (i < 0) + return; + + /* These are purely informational; no code uses them. */ + /* File format version we write. */ + format_version = PyString_FromString("2.0"); + /* Format versions we can read. */ + compatible_formats = Py_BuildValue("[sssss]", "1.0", /* Original protocol 0 */ + "1.1", /* Protocol 0 + INST */ + "1.2", /* Original protocol 1 */ + "1.3", /* Protocol 1 + BINFLOAT */ + "2.0"); /* Original protocol 2 */ + PyDict_SetItemString(d, "format_version", format_version); + PyDict_SetItemString(d, "compatible_formats", compatible_formats); + Py_XDECREF(format_version); + Py_XDECREF(compatible_formats); } From python-checkins at python.org Sat Jul 7 20:02:40 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 7 Jul 2007 20:02:40 +0200 (CEST) Subject: [Python-checkins] r56183 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070707180240.9954E1E4007@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 7 20:02:39 2007 New Revision: 56183 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Expanded tabs. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sat Jul 7 20:02:39 2007 @@ -3,7 +3,7 @@ #include "structmember.h" PyDoc_STRVAR(cPickle_module_documentation, - "C implementation and optimization of the Python pickle module."); +"C implementation and optimization of the Python pickle module."); #ifndef Py_eval_input #include @@ -64,18 +64,18 @@ #define SETITEMS 'u' /* Protocol 2. */ -#define PROTO '\x80' /* identify pickle protocol */ -#define NEWOBJ '\x81' /* build object by applying cls.__new__ to argtuple */ -#define EXT1 '\x82' /* push object from extension registry; 1-byte index */ -#define EXT2 '\x83' /* ditto, but 2-byte index */ -#define EXT4 '\x84' /* ditto, but 4-byte index */ -#define TUPLE1 '\x85' /* build 1-tuple from stack top */ -#define TUPLE2 '\x86' /* build 2-tuple from two topmost stack items */ -#define TUPLE3 '\x87' /* build 3-tuple from three topmost stack items */ -#define NEWTRUE '\x88' /* push True */ -#define NEWFALSE '\x89' /* push False */ -#define LONG1 '\x8a' /* push long from < 256 bytes */ -#define LONG4 '\x8b' /* push really big long */ +#define PROTO '\x80' /* identify pickle protocol */ +#define NEWOBJ '\x81' /* build object by applying cls.__new__ to argtuple */ +#define EXT1 '\x82' /* push object from extension registry; 1-byte index */ +#define EXT2 '\x83' /* ditto, but 2-byte index */ +#define EXT4 '\x84' /* ditto, but 4-byte index */ +#define TUPLE1 '\x85' /* build 1-tuple from stack top */ +#define TUPLE2 '\x86' /* build 2-tuple from two topmost stack items */ +#define TUPLE3 '\x87' /* build 3-tuple from three topmost stack items */ +#define NEWTRUE '\x88' /* push True */ +#define NEWFALSE '\x89' /* push False */ +#define LONG1 '\x8a' /* push long from < 256 bytes */ +#define LONG4 '\x8b' /* push really big long */ /* There aren't opcodes -- they're ways to pickle bools before protocol 2, * so that unpicklers written before bools were introduced unpickle them @@ -130,8 +130,8 @@ Internal Data type for pickle data. */ typedef struct { - PyObject_HEAD int length; /* number of initial slots in data currently used */ - int size; /* number of slots in data allocated */ + PyObject_HEAD int length; /* number of initial slots in data currently used */ + int size; /* number of slots in data allocated */ PyObject **data; } Pdata; @@ -142,10 +142,10 @@ PyObject **p; for (i = self->length, p = self->data; --i >= 0; p++) { - Py_DECREF(*p); + Py_DECREF(*p); } if (self->data) - free(self->data); + free(self->data); PyObject_Del(self); } @@ -163,12 +163,12 @@ Pdata *self; if (!(self = PyObject_New(Pdata, &PdataType))) - return NULL; + return NULL; self->size = 8; self->length = 0; self->data = malloc(self->size * sizeof(PyObject *)); if (self->data) - return (PyObject *) self; + return (PyObject *) self; Py_DECREF(self); return PyErr_NoMemory(); } @@ -190,12 +190,12 @@ PyObject **p; if (clearto < 0) - return stackUnderflow(); + return stackUnderflow(); if (clearto >= self->length) - return 0; + return 0; for (i = self->length, p = self->data + clearto; --i >= clearto; p++) { - Py_CLEAR(*p); + Py_CLEAR(*p); } self->length = clearto; @@ -210,16 +210,16 @@ PyObject **tmp; bigger = self->size << 1; - if (bigger <= 0) /* was 0, or new value overflows */ - goto nomemory; + if (bigger <= 0) /* was 0, or new value overflows */ + goto nomemory; if ((int) (size_t) bigger != bigger) - goto nomemory; + goto nomemory; nbytes = (size_t) bigger *sizeof(PyObject *); if (nbytes / sizeof(PyObject *) != (size_t) bigger) - goto nomemory; + goto nomemory; tmp = realloc(self->data, nbytes); if (tmp == NULL) - goto nomemory; + goto nomemory; self->data = tmp; self->size = bigger; return 0; @@ -233,13 +233,13 @@ * must be an lvalue holding PyObject*. On stack underflow, UnpicklingError * is raised and V is set to NULL. D and V may be evaluated several times. */ -#define PDATA_POP(D, V) { \ - if ((D)->length) \ - (V) = (D)->data[--((D)->length)]; \ - else { \ - PyErr_SetString(UnpicklingError, "bad pickle data"); \ - (V) = NULL; \ - } \ +#define PDATA_POP(D, V) { \ + if ((D)->length) \ + (V) = (D)->data[--((D)->length)]; \ + else { \ + PyErr_SetString(UnpicklingError, "bad pickle data"); \ + (V) = NULL; \ + } \ } /* PDATA_PUSH and PDATA_APPEND both push rvalue PyObject* O on to Pdata* @@ -251,22 +251,22 @@ */ /* Push O on stack D, giving ownership of O to the stack. */ -#define PDATA_PUSH(D, O, ER) { \ - if (((Pdata*)(D))->length == ((Pdata*)(D))->size && \ - Pdata_grow((Pdata*)(D)) < 0) { \ - Py_DECREF(O); \ - return ER; \ - } \ - ((Pdata*)(D))->data[((Pdata*)(D))->length++] = (O); \ +#define PDATA_PUSH(D, O, ER) { \ + if (((Pdata*)(D))->length == ((Pdata*)(D))->size && \ + Pdata_grow((Pdata*)(D)) < 0) { \ + Py_DECREF(O); \ + return ER; \ + } \ + ((Pdata*)(D))->data[((Pdata*)(D))->length++] = (O); \ } /* Push O on stack D, pushing a new reference. */ -#define PDATA_APPEND(D, O, ER) { \ - if (((Pdata*)(D))->length == ((Pdata*)(D))->size && \ - Pdata_grow((Pdata*)(D)) < 0) \ - return ER; \ - Py_INCREF(O); \ - ((Pdata*)(D))->data[((Pdata*)(D))->length++] = (O); \ +#define PDATA_APPEND(D, O, ER) { \ + if (((Pdata*)(D))->length == ((Pdata*)(D))->size && \ + Pdata_grow((Pdata*)(D)) < 0) \ + return ER; \ + Py_INCREF(O); \ + ((Pdata*)(D))->data[((Pdata*)(D))->length++] = (O); \ } @@ -279,9 +279,9 @@ l = self->length - start; r = PyTuple_New(l); if (r == NULL) - return NULL; + return NULL; for (i = start, j = 0; j < l; i++, j++) - PyTuple_SET_ITEM(r, j, self->data[i]); + PyTuple_SET_ITEM(r, j, self->data[i]); self->length = start; return r; @@ -295,9 +295,9 @@ l = self->length - start; if (!(r = PyList_New(l))) - return NULL; + return NULL; for (i = start, j = 0; j < l; i++, j++) - PyList_SET_ITEM(r, j, self->data[i]); + PyList_SET_ITEM(r, j, self->data[i]); self->length = start; return r; @@ -337,13 +337,13 @@ /* bool, true if proto > 0 */ int bin; - int fast; /* Fast mode doesn't save in memo, don't use if circ ref */ + int fast; /* Fast mode doesn't save in memo, don't use if circ ref */ int nesting; int (*write_func) (struct Picklerobject *, const char *, Py_ssize_t); char *write_buf; int buf_size; PyObject *dispatch_table; - int fast_container; /* count nested container dumps */ + int fast_container; /* count nested container dumps */ PyObject *fast_memo; } Picklerobject; @@ -389,29 +389,29 @@ va_start(va, format); if (format) - args = Py_VaBuildValue(format, va); + args = Py_VaBuildValue(format, va); va_end(va); if (format && !args) - return NULL; + return NULL; if (stringformat && !(retval = PyString_FromString(stringformat))) - return NULL; + return NULL; if (retval) { - if (args) { - PyObject *v; - v = PyString_Format(retval, args); - Py_DECREF(retval); - Py_DECREF(args); - if (!v) - return NULL; - retval = v; - } + if (args) { + PyObject *v; + v = PyString_Format(retval, args); + Py_DECREF(retval); + Py_DECREF(args); + if (!v) + return NULL; + retval = v; + } } else if (args) - retval = args; + retval = args; else { - PyErr_SetObject(ErrType, Py_None); - return NULL; + PyErr_SetObject(ErrType, Py_None); + return NULL; } PyErr_SetObject(ErrType, retval); Py_DECREF(retval); @@ -424,19 +424,19 @@ size_t nbyteswritten; if (s == NULL) { - return 0; + return 0; } if (n > INT_MAX) { - /* String too large */ - return -1; + /* String too large */ + return -1; } Py_BEGIN_ALLOW_THREADS - nbyteswritten = fwrite(s, sizeof(char), n, self->fp); + nbyteswritten = fwrite(s, sizeof(char), n, self->fp); Py_END_ALLOW_THREADS if (nbyteswritten != (size_t) n) { - PyErr_SetFromErrno(PyExc_IOError); - return -1; + PyErr_SetFromErrno(PyExc_IOError); + return -1; } return (int) n; @@ -446,11 +446,11 @@ write_cStringIO(Picklerobject * self, const char *s, Py_ssize_t n) { if (s == NULL) { - return 0; + return 0; } if (PycStringIO->cwrite((PyObject *) self->file, s, n) != n) { - return -1; + return -1; } return (int) n; @@ -460,9 +460,9 @@ write_none(Picklerobject * self, const char *s, Py_ssize_t n) { if (s == NULL) - return 0; + return 0; if (n > INT_MAX) - return -1; + return -1; return (int) n; } @@ -473,46 +473,46 @@ int n; if (_n > INT_MAX) - return -1; + return -1; n = (int) _n; if (s == NULL) { - if (!(self->buf_size)) - return 0; - py_str = PyString_FromStringAndSize(self->write_buf, self->buf_size); - if (!py_str) - return -1; + if (!(self->buf_size)) + return 0; + py_str = PyString_FromStringAndSize(self->write_buf, self->buf_size); + if (!py_str) + return -1; } else { - if (self->buf_size && (n + self->buf_size) > WRITE_BUF_SIZE) { - if (write_other(self, NULL, 0) < 0) - return -1; - } - - if (n > WRITE_BUF_SIZE) { - if (!(py_str = PyString_FromStringAndSize(s, n))) - return -1; - } - else { - memcpy(self->write_buf + self->buf_size, s, n); - self->buf_size += n; - return n; - } + if (self->buf_size && (n + self->buf_size) > WRITE_BUF_SIZE) { + if (write_other(self, NULL, 0) < 0) + return -1; + } + + if (n > WRITE_BUF_SIZE) { + if (!(py_str = PyString_FromStringAndSize(s, n))) + return -1; + } + else { + memcpy(self->write_buf + self->buf_size, s, n); + self->buf_size += n; + return n; + } } if (self->write) { - /* object with write method */ - ARG_TUP(self, py_str); - if (self->arg) { - junk = PyObject_Call(self->write, self->arg, NULL); - FREE_ARG_TUP(self); - } - if (junk) - Py_DECREF(junk); - else - return -1; + /* object with write method */ + ARG_TUP(self, py_str); + if (self->arg) { + junk = PyObject_Call(self->write, self->arg, NULL); + FREE_ARG_TUP(self); + } + if (junk) + Py_DECREF(junk); + else + return -1; } else - PDATA_PUSH(self->file, py_str, -1); + PDATA_PUSH(self->file, py_str, -1); self->buf_size = 0; return n; @@ -525,36 +525,36 @@ size_t nbytesread; if (self->buf_size == 0) { - int size; + int size; - size = ((n < 32) ? 32 : n); - if (!(self->buf = (char *) malloc(size))) { - PyErr_NoMemory(); - return -1; - } + size = ((n < 32) ? 32 : n); + if (!(self->buf = (char *) malloc(size))) { + PyErr_NoMemory(); + return -1; + } - self->buf_size = size; + self->buf_size = size; } else if (n > self->buf_size) { - char *newbuf = (char *) realloc(self->buf, n); - if (!newbuf) { - PyErr_NoMemory(); - return -1; - } - self->buf = newbuf; - self->buf_size = n; + char *newbuf = (char *) realloc(self->buf, n); + if (!newbuf) { + PyErr_NoMemory(); + return -1; + } + self->buf = newbuf; + self->buf_size = n; } Py_BEGIN_ALLOW_THREADS - nbytesread = fread(self->buf, sizeof(char), n, self->fp); + nbytesread = fread(self->buf, sizeof(char), n, self->fp); Py_END_ALLOW_THREADS if (nbytesread != (size_t) n) { - if (feof(self->fp)) { - PyErr_SetNone(PyExc_EOFError); - return -1; - } + if (feof(self->fp)) { + PyErr_SetNone(PyExc_EOFError); + return -1; + } - PyErr_SetFromErrno(PyExc_IOError); - return -1; + PyErr_SetFromErrno(PyExc_IOError); + return -1; } *s = self->buf; @@ -569,36 +569,36 @@ int i; if (self->buf_size == 0) { - if (!(self->buf = (char *) malloc(40))) { - PyErr_NoMemory(); - return -1; - } - self->buf_size = 40; + if (!(self->buf = (char *) malloc(40))) { + PyErr_NoMemory(); + return -1; + } + self->buf_size = 40; } i = 0; while (1) { - int bigger; - char *newbuf; - for (; i < (self->buf_size - 1); i++) { - if (feof(self->fp) || (self->buf[i] = getc(self->fp)) == '\n') { - self->buf[i + 1] = '\0'; - *s = self->buf; - return i + 1; - } - } - bigger = self->buf_size << 1; - if (bigger <= 0) { /* overflow */ - PyErr_NoMemory(); - return -1; - } - newbuf = (char *) realloc(self->buf, bigger); - if (!newbuf) { - PyErr_NoMemory(); - return -1; - } - self->buf = newbuf; - self->buf_size = bigger; + int bigger; + char *newbuf; + for (; i < (self->buf_size - 1); i++) { + if (feof(self->fp) || (self->buf[i] = getc(self->fp)) == '\n') { + self->buf[i + 1] = '\0'; + *s = self->buf; + return i + 1; + } + } + bigger = self->buf_size << 1; + if (bigger <= 0) { /* overflow */ + PyErr_NoMemory(); + return -1; + } + newbuf = (char *) realloc(self->buf, bigger); + if (!newbuf) { + PyErr_NoMemory(); + return -1; + } + self->buf = newbuf; + self->buf_size = bigger; } } @@ -609,8 +609,8 @@ char *ptr; if (PycStringIO->cread((PyObject *) self->file, &ptr, n) != n) { - PyErr_SetNone(PyExc_EOFError); - return -1; + PyErr_SetNone(PyExc_EOFError); + return -1; } *s = ptr; @@ -626,7 +626,7 @@ char *ptr; if ((n = PycStringIO->creadline((PyObject *) self->file, &ptr)) < 0) { - return -1; + return -1; } *s = ptr; @@ -641,21 +641,21 @@ PyObject *bytes, *str = 0; if (!(bytes = PyInt_FromSsize_t(n))) - return -1; + return -1; ARG_TUP(self, bytes); if (self->arg) { - str = PyObject_Call(self->read, self->arg, NULL); - FREE_ARG_TUP(self); + str = PyObject_Call(self->read, self->arg, NULL); + FREE_ARG_TUP(self); } if (!str) - return -1; + return -1; Py_XDECREF(self->last_string); self->last_string = str; if (!(*s = PyString_AsString(str))) - return -1; + return -1; return n; } @@ -667,17 +667,17 @@ Py_ssize_t str_size; if (!(str = PyObject_CallObject(self->readline, empty_tuple))) { - return -1; + return -1; } if ((str_size = PyString_Size(str)) < 0) - return -1; + return -1; Py_XDECREF(self->last_string); self->last_string = str; if (!(*s = PyString_AsString(str))) - return -1; + return -1; return str_size; } @@ -691,7 +691,7 @@ { char *r = (char *) malloc(n + 1); if (r == NULL) - return (char *) PyErr_NoMemory(); + return (char *) PyErr_NoMemory(); memcpy(r, s, n); r[n] = 0; return r; @@ -707,50 +707,50 @@ size_t len; if (!(mv = PyDict_GetItem(self->memo, id))) { - PyErr_SetObject(PyExc_KeyError, id); - return -1; + PyErr_SetObject(PyExc_KeyError, id); + return -1; } if (!(value = PyTuple_GetItem(mv, 0))) - return -1; + return -1; if (!(PyInt_Check(value))) { - PyErr_SetString(PicklingError, "no int where int expected in memo"); - return -1; + PyErr_SetString(PicklingError, "no int where int expected in memo"); + return -1; } c_value = PyInt_AsLong(value); if (c_value == -1 && PyErr_Occurred()) - return -1; + return -1; if (!self->bin) { - s[0] = GET; - PyOS_snprintf(s + 1, sizeof(s) - 1, "%ld\n", c_value); - len = strlen(s); + s[0] = GET; + PyOS_snprintf(s + 1, sizeof(s) - 1, "%ld\n", c_value); + len = strlen(s); } else if (Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - return -1; - PDATA_APPEND(self->file, mv, -1); - return 0; + if (write_other(self, NULL, 0) < 0) + return -1; + PDATA_APPEND(self->file, mv, -1); + return 0; } else { - if (c_value < 256) { - s[0] = BINGET; - s[1] = (int) (c_value & 0xff); - len = 2; - } - else { - s[0] = LONG_BINGET; - s[1] = (int) (c_value & 0xff); - s[2] = (int) ((c_value >> 8) & 0xff); - s[3] = (int) ((c_value >> 16) & 0xff); - s[4] = (int) ((c_value >> 24) & 0xff); - len = 5; - } + if (c_value < 256) { + s[0] = BINGET; + s[1] = (int) (c_value & 0xff); + len = 2; + } + else { + s[0] = LONG_BINGET; + s[1] = (int) (c_value & 0xff); + s[2] = (int) ((c_value >> 8) & 0xff); + s[3] = (int) ((c_value >> 16) & 0xff); + s[4] = (int) ((c_value >> 24) & 0xff); + len = 5; + } } if (self->write_func(self, s, len) < 0) - return -1; + return -1; return 0; } @@ -760,7 +760,7 @@ put(Picklerobject * self, PyObject * ob) { if (ob->ob_refcnt < 2 || self->fast) - return 0; + return 0; return put2(self, ob); } @@ -776,10 +776,10 @@ PyObject *py_ob_id = 0, *memo_len = 0, *t = 0; if (self->fast) - return 0; + return 0; if ((p = PyDict_Size(self->memo)) < 0) - goto finally; + goto finally; /* Make sure memo keys are positive! */ /* XXX Why? @@ -790,13 +790,13 @@ p++; if (!(py_ob_id = PyLong_FromVoidPtr(ob))) - goto finally; + goto finally; if (!(memo_len = PyInt_FromLong(p))) - goto finally; + goto finally; if (!(t = PyTuple_New(2))) - goto finally; + goto finally; PyTuple_SET_ITEM(t, 0, memo_len); Py_INCREF(memo_len); @@ -804,38 +804,38 @@ Py_INCREF(ob); if (PyDict_SetItem(self->memo, py_ob_id, t) < 0) - goto finally; + goto finally; if (!self->bin) { - c_str[0] = PUT; - PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%d\n", p); - len = strlen(c_str); + c_str[0] = PUT; + PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%d\n", p); + len = strlen(c_str); } else if (Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - return -1; - PDATA_APPEND(self->file, memo_len, -1); - res = 0; /* Job well done ;) */ - goto finally; + if (write_other(self, NULL, 0) < 0) + return -1; + PDATA_APPEND(self->file, memo_len, -1); + res = 0; /* Job well done ;) */ + goto finally; } else { - if (p >= 256) { - c_str[0] = LONG_BINPUT; - c_str[1] = (int) (p & 0xff); - c_str[2] = (int) ((p >> 8) & 0xff); - c_str[3] = (int) ((p >> 16) & 0xff); - c_str[4] = (int) ((p >> 24) & 0xff); - len = 5; - } - else { - c_str[0] = BINPUT; - c_str[1] = p; - len = 2; - } + if (p >= 256) { + c_str[0] = LONG_BINPUT; + c_str[1] = (int) (p & 0xff); + c_str[2] = (int) ((p >> 8) & 0xff); + c_str[3] = (int) ((p >> 16) & 0xff); + c_str[4] = (int) ((p >> 24) & 0xff); + len = 5; + } + else { + c_str[0] = BINPUT; + c_str[1] = p; + len = 2; + } } if (self->write_func(self, c_str, len) < 0) - goto finally; + goto finally; res = 0; @@ -855,38 +855,38 @@ module = PyObject_GetAttrString(global, "__module__"); if (module) - return module; + return module; if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); + PyErr_Clear(); else - return NULL; + return NULL; if (!(modules_dict = PySys_GetObject("modules"))) - return NULL; + return NULL; i = 0; while ((j = PyDict_Next(modules_dict, &i, &name, &module))) { - if (PyObject_Compare(name, __main___str) == 0) - continue; + if (PyObject_Compare(name, __main___str) == 0) + continue; - global_name_attr = PyObject_GetAttr(module, global_name); - if (!global_name_attr) { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - return NULL; - continue; - } - - if (global_name_attr != global) { - Py_DECREF(global_name_attr); - continue; - } + global_name_attr = PyObject_GetAttr(module, global_name); + if (!global_name_attr) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + return NULL; + continue; + } + + if (global_name_attr != global) { + Py_DECREF(global_name_attr); + continue; + } - Py_DECREF(global_name_attr); + Py_DECREF(global_name_attr); - break; + break; } /* The following implements the rule in pickle.py added in 1.5 @@ -894,8 +894,8 @@ * like this rule. jlf */ if (!j) { - j = 1; - name = __main___str; + j = 1; + name = __main___str; } Py_INCREF(name); @@ -908,32 +908,32 @@ { /* if fast_container < 0, we're doing an error exit. */ if (++self->fast_container >= PY_CPICKLE_FAST_LIMIT) { - PyObject *key = NULL; - if (self->fast_memo == NULL) { - self->fast_memo = PyDict_New(); - if (self->fast_memo == NULL) { - self->fast_container = -1; - return 0; - } - } - key = PyLong_FromVoidPtr(obj); - if (key == NULL) - return 0; - if (PyDict_GetItem(self->fast_memo, key)) { - Py_DECREF(key); - PyErr_Format(PyExc_ValueError, - "fast mode: can't pickle cyclic objects " - "including object type %s at %p", - obj->ob_type->tp_name, obj); - self->fast_container = -1; - return 0; - } - if (PyDict_SetItem(self->fast_memo, key, Py_None) < 0) { - Py_DECREF(key); - self->fast_container = -1; - return 0; - } - Py_DECREF(key); + PyObject *key = NULL; + if (self->fast_memo == NULL) { + self->fast_memo = PyDict_New(); + if (self->fast_memo == NULL) { + self->fast_container = -1; + return 0; + } + } + key = PyLong_FromVoidPtr(obj); + if (key == NULL) + return 0; + if (PyDict_GetItem(self->fast_memo, key)) { + Py_DECREF(key); + PyErr_Format(PyExc_ValueError, + "fast mode: can't pickle cyclic objects " + "including object type %s at %p", + obj->ob_type->tp_name, obj); + self->fast_container = -1; + return 0; + } + if (PyDict_SetItem(self->fast_memo, key, Py_None) < 0) { + Py_DECREF(key); + self->fast_container = -1; + return 0; + } + Py_DECREF(key); } return 1; } @@ -942,14 +942,14 @@ fast_save_leave(Picklerobject * self, PyObject * obj) { if (self->fast_container-- >= PY_CPICKLE_FAST_LIMIT) { - PyObject *key = PyLong_FromVoidPtr(obj); - if (key == NULL) - return 0; - if (PyDict_DelItem(self->fast_memo, key) < 0) { - Py_DECREF(key); - return 0; - } - Py_DECREF(key); + PyObject *key = PyLong_FromVoidPtr(obj); + if (key == NULL) + return 0; + if (PyDict_DelItem(self->fast_memo, key) < 0) { + Py_DECREF(key); + return 0; + } + Py_DECREF(key); } return 1; } @@ -959,7 +959,7 @@ { static char none = NONE; if (self->write_func(self, &none, 1) < 0) - return -1; + return -1; return 0; } @@ -972,12 +972,12 @@ long l = args == Py_True; if (self->proto >= 2) { - char opcode = l ? NEWTRUE : NEWFALSE; - if (self->write_func(self, &opcode, 1) < 0) - return -1; + char opcode = l ? NEWTRUE : NEWFALSE; + if (self->write_func(self, &opcode, 1) < 0) + return -1; } else if (self->write_func(self, buf[l], len[l]) < 0) - return -1; + return -1; return 0; } @@ -989,41 +989,41 @@ if (!self->bin #if SIZEOF_LONG > 4 - || l > 0x7fffffffL || l < -0x80000000L + || l > 0x7fffffffL || l < -0x80000000L #endif - ) { - /* Text-mode pickle, or long too big to fit in the 4-byte - * signed BININT format: store as a string. - */ - c_str[0] = INT; - PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%ld\n", l); - if (self->write_func(self, c_str, strlen(c_str)) < 0) - return -1; + ) { + /* Text-mode pickle, or long too big to fit in the 4-byte + * signed BININT format: store as a string. + */ + c_str[0] = INT; + PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%ld\n", l); + if (self->write_func(self, c_str, strlen(c_str)) < 0) + return -1; } else { - /* Binary pickle and l fits in a signed 4-byte int. */ - c_str[1] = (int) (l & 0xff); - c_str[2] = (int) ((l >> 8) & 0xff); - c_str[3] = (int) ((l >> 16) & 0xff); - c_str[4] = (int) ((l >> 24) & 0xff); - - if ((c_str[4] == 0) && (c_str[3] == 0)) { - if (c_str[2] == 0) { - c_str[0] = BININT1; - len = 2; - } - else { - c_str[0] = BININT2; - len = 3; - } - } - else { - c_str[0] = BININT; - len = 5; - } + /* Binary pickle and l fits in a signed 4-byte int. */ + c_str[1] = (int) (l & 0xff); + c_str[2] = (int) ((l >> 8) & 0xff); + c_str[3] = (int) ((l >> 16) & 0xff); + c_str[4] = (int) ((l >> 24) & 0xff); + + if ((c_str[4] == 0) && (c_str[3] == 0)) { + if (c_str[2] == 0) { + c_str[0] = BININT1; + len = 2; + } + else { + c_str[0] = BININT2; + len = 3; + } + } + else { + c_str[0] = BININT; + len = 5; + } - if (self->write_func(self, c_str, len) < 0) - return -1; + if (self->write_func(self, c_str, len) < 0) + return -1; } return 0; @@ -1040,114 +1040,114 @@ static char l = LONG; if (val == -1 && PyErr_Occurred()) { - /* out of range for int pickling */ - PyErr_Clear(); + /* out of range for int pickling */ + PyErr_Clear(); } else - return save_int(self, val); + return save_int(self, val); if (self->proto >= 2) { - /* Linear-time pickling. */ - size_t nbits; - size_t nbytes; - unsigned char *pdata; - char c_str[5]; - int i; - int sign = _PyLong_Sign(args); - - if (sign == 0) { - /* It's 0 -- an empty bytestring. */ - c_str[0] = LONG1; - c_str[1] = 0; - i = self->write_func(self, c_str, 2); - if (i < 0) - goto finally; - res = 0; - goto finally; - } - nbits = _PyLong_NumBits(args); - if (nbits == (size_t) - 1 && PyErr_Occurred()) - goto finally; - /* How many bytes do we need? There are nbits >> 3 full - * bytes of data, and nbits & 7 leftover bits. If there - * are any leftover bits, then we clearly need another - * byte. Wnat's not so obvious is that we *probably* - * need another byte even if there aren't any leftovers: - * the most-significant bit of the most-significant byte - * acts like a sign bit, and it's usually got a sense - * opposite of the one we need. The exception is longs - * of the form -(2**(8*j-1)) for j > 0. Such a long is - * its own 256's-complement, so has the right sign bit - * even without the extra byte. That's a pain to check - * for in advance, though, so we always grab an extra - * byte at the start, and cut it back later if possible. - */ - nbytes = (nbits >> 3) + 1; - if (nbytes > INT_MAX) { - PyErr_SetString(PyExc_OverflowError, "long too large " - "to pickle"); - goto finally; - } - repr = PyString_FromStringAndSize(NULL, (int) nbytes); - if (repr == NULL) - goto finally; - pdata = (unsigned char *) PyString_AS_STRING(repr); - i = _PyLong_AsByteArray((PyLongObject *) args, - pdata, nbytes, - 1 /* little endian */ , 1 /* signed */ ); - if (i < 0) - goto finally; - /* If the long is negative, this may be a byte more than - * needed. This is so iff the MSB is all redundant sign - * bits. - */ - if (sign < 0 && nbytes > 1 && pdata[nbytes - 1] == 0xff && - (pdata[nbytes - 2] & 0x80) != 0) - --nbytes; - - if (nbytes < 256) { - c_str[0] = LONG1; - c_str[1] = (char) nbytes; - size = 2; - } - else { - c_str[0] = LONG4; - size = (int) nbytes; - for (i = 1; i < 5; i++) { - c_str[i] = (char) (size & 0xff); - size >>= 8; - } - size = 5; - } - i = self->write_func(self, c_str, size); - if (i < 0) - goto finally; - i = self->write_func(self, (char *) pdata, (int) nbytes); - if (i < 0) - goto finally; - res = 0; - goto finally; + /* Linear-time pickling. */ + size_t nbits; + size_t nbytes; + unsigned char *pdata; + char c_str[5]; + int i; + int sign = _PyLong_Sign(args); + + if (sign == 0) { + /* It's 0 -- an empty bytestring. */ + c_str[0] = LONG1; + c_str[1] = 0; + i = self->write_func(self, c_str, 2); + if (i < 0) + goto finally; + res = 0; + goto finally; + } + nbits = _PyLong_NumBits(args); + if (nbits == (size_t) - 1 && PyErr_Occurred()) + goto finally; + /* How many bytes do we need? There are nbits >> 3 full + * bytes of data, and nbits & 7 leftover bits. If there + * are any leftover bits, then we clearly need another + * byte. Wnat's not so obvious is that we *probably* + * need another byte even if there aren't any leftovers: + * the most-significant bit of the most-significant byte + * acts like a sign bit, and it's usually got a sense + * opposite of the one we need. The exception is longs + * of the form -(2**(8*j-1)) for j > 0. Such a long is + * its own 256's-complement, so has the right sign bit + * even without the extra byte. That's a pain to check + * for in advance, though, so we always grab an extra + * byte at the start, and cut it back later if possible. + */ + nbytes = (nbits >> 3) + 1; + if (nbytes > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "long too large " + "to pickle"); + goto finally; + } + repr = PyString_FromStringAndSize(NULL, (int) nbytes); + if (repr == NULL) + goto finally; + pdata = (unsigned char *) PyString_AS_STRING(repr); + i = _PyLong_AsByteArray((PyLongObject *) args, + pdata, nbytes, + 1 /* little endian */ , 1 /* signed */ ); + if (i < 0) + goto finally; + /* If the long is negative, this may be a byte more than + * needed. This is so iff the MSB is all redundant sign + * bits. + */ + if (sign < 0 && nbytes > 1 && pdata[nbytes - 1] == 0xff && + (pdata[nbytes - 2] & 0x80) != 0) + --nbytes; + + if (nbytes < 256) { + c_str[0] = LONG1; + c_str[1] = (char) nbytes; + size = 2; + } + else { + c_str[0] = LONG4; + size = (int) nbytes; + for (i = 1; i < 5; i++) { + c_str[i] = (char) (size & 0xff); + size >>= 8; + } + size = 5; + } + i = self->write_func(self, c_str, size); + if (i < 0) + goto finally; + i = self->write_func(self, (char *) pdata, (int) nbytes); + if (i < 0) + goto finally; + res = 0; + goto finally; } /* proto < 2: write the repr and newline. This is quadratic-time * (in the number of digits), in both directions. */ if (!(repr = PyObject_Repr(args))) - goto finally; + goto finally; if ((size = PyString_Size(repr)) < 0) - goto finally; + goto finally; if (self->write_func(self, &l, 1) < 0) - goto finally; + goto finally; if (self->write_func(self, - PyString_AS_STRING((PyStringObject *) repr), - size) < 0) - goto finally; + PyString_AS_STRING((PyStringObject *) repr), + size) < 0) + goto finally; if (self->write_func(self, "\n", 1) < 0) - goto finally; + goto finally; res = 0; @@ -1163,22 +1163,22 @@ double x = PyFloat_AS_DOUBLE((PyFloatObject *) args); if (self->bin) { - char str[9]; - str[0] = BINFLOAT; - if (_PyFloat_Pack8(x, (unsigned char *) &str[1], 0) < 0) - return -1; - if (self->write_func(self, str, 9) < 0) - return -1; + char str[9]; + str[0] = BINFLOAT; + if (_PyFloat_Pack8(x, (unsigned char *) &str[1], 0) < 0) + return -1; + if (self->write_func(self, str, 9) < 0) + return -1; } else { - char c_str[250]; - c_str[0] = FLOAT; - PyOS_ascii_formatd(c_str + 1, sizeof(c_str) - 2, "%.17g", x); - /* Extend the formatted string with a newline character */ - strcat(c_str, "\n"); + char c_str[250]; + c_str[0] = FLOAT; + PyOS_ascii_formatd(c_str + 1, sizeof(c_str) - 2, "%.17g", x); + /* Extend the formatted string with a newline character */ + strcat(c_str, "\n"); - if (self->write_func(self, c_str, strlen(c_str)) < 0) - return -1; + if (self->write_func(self, c_str, strlen(c_str)) < 0) + return -1; } return 0; @@ -1192,71 +1192,71 @@ PyObject *repr = 0; if ((size = PyString_Size(args)) < 0) - return -1; + return -1; if (!self->bin) { - char *repr_str; + char *repr_str; - static char string = STRING; + static char string = STRING; - if (!(repr = PyObject_Repr(args))) - return -1; + if (!(repr = PyObject_Repr(args))) + return -1; - if ((len = PyString_Size(repr)) < 0) - goto err; - repr_str = PyString_AS_STRING((PyStringObject *) repr); + if ((len = PyString_Size(repr)) < 0) + goto err; + repr_str = PyString_AS_STRING((PyStringObject *) repr); - if (self->write_func(self, &string, 1) < 0) - goto err; + if (self->write_func(self, &string, 1) < 0) + goto err; - if (self->write_func(self, repr_str, len) < 0) - goto err; + if (self->write_func(self, repr_str, len) < 0) + goto err; - if (self->write_func(self, "\n", 1) < 0) - goto err; + if (self->write_func(self, "\n", 1) < 0) + goto err; - Py_XDECREF(repr); + Py_XDECREF(repr); } else { - int i; - char c_str[5]; + int i; + char c_str[5]; - if ((size = PyString_Size(args)) < 0) - return -1; + if ((size = PyString_Size(args)) < 0) + return -1; - if (size < 256) { - c_str[0] = SHORT_BINSTRING; - c_str[1] = size; - len = 2; - } - else if (size <= INT_MAX) { - c_str[0] = BINSTRING; - for (i = 1; i < 5; i++) - c_str[i] = (int) (size >> ((i - 1) * 8)); - len = 5; - } - else - return -1; /* string too large */ - - if (self->write_func(self, c_str, len) < 0) - return -1; - - if (size > 128 && Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - return -1; - PDATA_APPEND(self->file, args, -1); - } - else { - if (self->write_func(self, - PyString_AS_STRING((PyStringObject *) args), - size) < 0) - return -1; - } + if (size < 256) { + c_str[0] = SHORT_BINSTRING; + c_str[1] = size; + len = 2; + } + else if (size <= INT_MAX) { + c_str[0] = BINSTRING; + for (i = 1; i < 5; i++) + c_str[i] = (int) (size >> ((i - 1) * 8)); + len = 5; + } + else + return -1; /* string too large */ + + if (self->write_func(self, c_str, len) < 0) + return -1; + + if (size > 128 && Pdata_Check(self->file)) { + if (write_other(self, NULL, 0) < 0) + return -1; + PDATA_APPEND(self->file, args, -1); + } + else { + if (self->write_func(self, + PyString_AS_STRING((PyStringObject *) args), + size) < 0) + return -1; + } } if (doput) - if (put(self, args) < 0) - return -1; + if (put(self, args) < 0) + return -1; return 0; @@ -1280,25 +1280,25 @@ repr = PyString_FromStringAndSize(NULL, 6 * size); if (repr == NULL) - return NULL; + return NULL; if (size == 0) - return repr; + return repr; p = q = PyString_AS_STRING(repr); while (size-- > 0) { - Py_UNICODE ch = *s++; - /* Map 16-bit characters to '\uxxxx' */ - if (ch >= 256 || ch == '\\' || ch == '\n') { - *p++ = '\\'; - *p++ = 'u'; - *p++ = hexdigit[(ch >> 12) & 0xf]; - *p++ = hexdigit[(ch >> 8) & 0xf]; - *p++ = hexdigit[(ch >> 4) & 0xf]; - *p++ = hexdigit[ch & 15]; - } - /* Copy everything else as-is */ - else - *p++ = (char) ch; + Py_UNICODE ch = *s++; + /* Map 16-bit characters to '\uxxxx' */ + if (ch >= 256 || ch == '\\' || ch == '\n') { + *p++ = '\\'; + *p++ = 'u'; + *p++ = hexdigit[(ch >> 12) & 0xf]; + *p++ = hexdigit[(ch >> 8) & 0xf]; + *p++ = hexdigit[(ch >> 4) & 0xf]; + *p++ = hexdigit[ch & 15]; + } + /* Copy everything else as-is */ + else + *p++ = (char) ch; } *p = '\0'; _PyString_Resize(&repr, p - q); @@ -1313,69 +1313,69 @@ PyObject *repr = 0; if (!PyUnicode_Check(args)) - return -1; + return -1; if (!self->bin) { - char *repr_str; - static char string = UNICODE; + char *repr_str; + static char string = UNICODE; - repr = - modified_EncodeRawUnicodeEscape(PyUnicode_AS_UNICODE(args), - PyUnicode_GET_SIZE(args)); - if (!repr) - return -1; + repr = + modified_EncodeRawUnicodeEscape(PyUnicode_AS_UNICODE(args), + PyUnicode_GET_SIZE(args)); + if (!repr) + return -1; - if ((len = PyString_Size(repr)) < 0) - goto err; - repr_str = PyString_AS_STRING((PyStringObject *) repr); + if ((len = PyString_Size(repr)) < 0) + goto err; + repr_str = PyString_AS_STRING((PyStringObject *) repr); - if (self->write_func(self, &string, 1) < 0) - goto err; + if (self->write_func(self, &string, 1) < 0) + goto err; - if (self->write_func(self, repr_str, len) < 0) - goto err; + if (self->write_func(self, repr_str, len) < 0) + goto err; - if (self->write_func(self, "\n", 1) < 0) - goto err; + if (self->write_func(self, "\n", 1) < 0) + goto err; - Py_XDECREF(repr); + Py_XDECREF(repr); } else { - int i; - char c_str[5]; + int i; + char c_str[5]; - if (!(repr = PyUnicode_AsUTF8String(args))) - return -1; + if (!(repr = PyUnicode_AsUTF8String(args))) + return -1; - if ((size = PyString_Size(repr)) < 0) - goto err; - if (size > INT_MAX) - return -1; /* string too large */ - - c_str[0] = BINUNICODE; - for (i = 1; i < 5; i++) - c_str[i] = (int) (size >> ((i - 1) * 8)); - len = 5; - - if (self->write_func(self, c_str, len) < 0) - goto err; - - if (size > 128 && Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - goto err; - PDATA_APPEND(self->file, repr, -1); - } - else { - if (self->write_func(self, PyString_AS_STRING(repr), size) < 0) - goto err; - } + if ((size = PyString_Size(repr)) < 0) + goto err; + if (size > INT_MAX) + return -1; /* string too large */ + + c_str[0] = BINUNICODE; + for (i = 1; i < 5; i++) + c_str[i] = (int) (size >> ((i - 1) * 8)); + len = 5; + + if (self->write_func(self, c_str, len) < 0) + goto err; + + if (size > 128 && Pdata_Check(self->file)) { + if (write_other(self, NULL, 0) < 0) + goto err; + PDATA_APPEND(self->file, repr, -1); + } + else { + if (self->write_func(self, PyString_AS_STRING(repr), size) < 0) + goto err; + } - Py_DECREF(repr); + Py_DECREF(repr); } if (doput) - if (put(self, args) < 0) - return -1; + if (put(self, args) < 0) + return -1; return 0; @@ -1390,17 +1390,17 @@ store_tuple_elements(Picklerobject * self, PyObject * t, int len) { int i; - int res = -1; /* guilty until proved innocent */ + int res = -1; /* guilty until proved innocent */ assert(PyTuple_Size(t) == len); for (i = 0; i < len; i++) { - PyObject *element = PyTuple_GET_ITEM(t, i); + PyObject *element = PyTuple_GET_ITEM(t, i); - if (element == NULL) - goto finally; - if (save(self, element, 0) < 0) - goto finally; + if (element == NULL) + goto finally; + if (save(self, element, 0) < 0) + goto finally; } res = 0; @@ -1427,24 +1427,24 @@ static char len2opcode[] = { EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3 }; if ((len = PyTuple_Size(args)) < 0) - goto finally; + goto finally; if (len == 0) { - char c_str[2]; + char c_str[2]; - if (self->proto) { - c_str[0] = EMPTY_TUPLE; - len = 1; - } - else { - c_str[0] = MARK; - c_str[1] = TUPLE; - len = 2; - } - if (self->write_func(self, c_str, len) >= 0) - res = 0; - /* Don't memoize an empty tuple. */ - goto finally; + if (self->proto) { + c_str[0] = EMPTY_TUPLE; + len = 1; + } + else { + c_str[0] = MARK; + c_str[1] = TUPLE; + len = 2; + } + if (self->write_func(self, c_str, len) >= 0) + res = 0; + /* Don't memoize an empty tuple. */ + goto finally; } /* A non-empty tuple. */ @@ -1456,65 +1456,65 @@ */ py_tuple_id = PyLong_FromVoidPtr(args); if (py_tuple_id == NULL) - goto finally; + goto finally; if (len <= 3 && self->proto >= 2) { - /* Use TUPLE{1,2,3} opcodes. */ - if (store_tuple_elements(self, args, len) < 0) - goto finally; - if (PyDict_GetItem(self->memo, py_tuple_id)) { - /* pop the len elements */ - for (i = 0; i < len; ++i) - if (self->write_func(self, &pop, 1) < 0) - goto finally; - /* fetch from memo */ - if (get(self, py_tuple_id) < 0) - goto finally; - res = 0; - goto finally; - } - /* Not recursive. */ - if (self->write_func(self, len2opcode + len, 1) < 0) - goto finally; - goto memoize; + /* Use TUPLE{1,2,3} opcodes. */ + if (store_tuple_elements(self, args, len) < 0) + goto finally; + if (PyDict_GetItem(self->memo, py_tuple_id)) { + /* pop the len elements */ + for (i = 0; i < len; ++i) + if (self->write_func(self, &pop, 1) < 0) + goto finally; + /* fetch from memo */ + if (get(self, py_tuple_id) < 0) + goto finally; + res = 0; + goto finally; + } + /* Not recursive. */ + if (self->write_func(self, len2opcode + len, 1) < 0) + goto finally; + goto memoize; } /* proto < 2 and len > 0, or proto >= 2 and len > 3. * Generate MARK elt1 elt2 ... TUPLE */ if (self->write_func(self, &MARKv, 1) < 0) - goto finally; + goto finally; if (store_tuple_elements(self, args, len) < 0) - goto finally; + goto finally; if (PyDict_GetItem(self->memo, py_tuple_id)) { - /* pop the stack stuff we pushed */ - if (self->bin) { - if (self->write_func(self, &pop_mark, 1) < 0) - goto finally; - } - else { - /* Note that we pop one more than len, to remove - * the MARK too. - */ - for (i = 0; i <= len; i++) - if (self->write_func(self, &pop, 1) < 0) - goto finally; - } - /* fetch from memo */ - if (get(self, py_tuple_id) >= 0) - res = 0; - goto finally; + /* pop the stack stuff we pushed */ + if (self->bin) { + if (self->write_func(self, &pop_mark, 1) < 0) + goto finally; + } + else { + /* Note that we pop one more than len, to remove + * the MARK too. + */ + for (i = 0; i <= len; i++) + if (self->write_func(self, &pop, 1) < 0) + goto finally; + } + /* fetch from memo */ + if (get(self, py_tuple_id) >= 0) + res = 0; + goto finally; } /* Not recursive. */ if (self->write_func(self, &tuple, 1) < 0) - goto finally; + goto finally; memoize: if (put(self, args) >= 0) - res = 0; + res = 0; finally: Py_XDECREF(py_tuple_id); @@ -1540,64 +1540,64 @@ assert(iter != NULL); if (self->proto == 0) { - /* APPENDS isn't available; do one at a time. */ - for (;;) { - obj = PyIter_Next(iter); - if (obj == NULL) { - if (PyErr_Occurred()) - return -1; - break; - } - i = save(self, obj, 0); - Py_DECREF(obj); - if (i < 0) - return -1; - if (self->write_func(self, &append, 1) < 0) - return -1; - } - return 0; + /* APPENDS isn't available; do one at a time. */ + for (;;) { + obj = PyIter_Next(iter); + if (obj == NULL) { + if (PyErr_Occurred()) + return -1; + break; + } + i = save(self, obj, 0); + Py_DECREF(obj); + if (i < 0) + return -1; + if (self->write_func(self, &append, 1) < 0) + return -1; + } + return 0; } /* proto > 0: write in batches of BATCHSIZE. */ do { - /* Get next group of (no more than) BATCHSIZE elements. */ - for (n = 0; n < BATCHSIZE; ++n) { - obj = PyIter_Next(iter); - if (obj == NULL) { - if (PyErr_Occurred()) - goto BatchFailed; - break; - } - slice[n] = obj; - } - - if (n > 1) { - /* Pump out MARK, slice[0:n], APPENDS. */ - if (self->write_func(self, &MARKv, 1) < 0) - goto BatchFailed; - for (i = 0; i < n; ++i) { - if (save(self, slice[i], 0) < 0) - goto BatchFailed; - } - if (self->write_func(self, &appends, 1) < 0) - goto BatchFailed; - } - else if (n == 1) { - if (save(self, slice[0], 0) < 0) - goto BatchFailed; - if (self->write_func(self, &append, 1) < 0) - goto BatchFailed; - } - - for (i = 0; i < n; ++i) { - Py_DECREF(slice[i]); - } + /* Get next group of (no more than) BATCHSIZE elements. */ + for (n = 0; n < BATCHSIZE; ++n) { + obj = PyIter_Next(iter); + if (obj == NULL) { + if (PyErr_Occurred()) + goto BatchFailed; + break; + } + slice[n] = obj; + } + + if (n > 1) { + /* Pump out MARK, slice[0:n], APPENDS. */ + if (self->write_func(self, &MARKv, 1) < 0) + goto BatchFailed; + for (i = 0; i < n; ++i) { + if (save(self, slice[i], 0) < 0) + goto BatchFailed; + } + if (self->write_func(self, &appends, 1) < 0) + goto BatchFailed; + } + else if (n == 1) { + if (save(self, slice[0], 0) < 0) + goto BatchFailed; + if (self->write_func(self, &append, 1) < 0) + goto BatchFailed; + } + + for (i = 0; i < n; ++i) { + Py_DECREF(slice[i]); + } } while (n == BATCHSIZE); return 0; BatchFailed: while (--n >= 0) { - Py_DECREF(slice[n]); + Py_DECREF(slice[n]); } return -1; } @@ -1611,45 +1611,45 @@ PyObject *iter; if (self->fast && !fast_save_enter(self, args)) - goto finally; + goto finally; /* Create an empty list. */ if (self->bin) { - s[0] = EMPTY_LIST; - len = 1; + s[0] = EMPTY_LIST; + len = 1; } else { - s[0] = MARK; - s[1] = LIST; - len = 2; + s[0] = MARK; + s[1] = LIST; + len = 2; } if (self->write_func(self, s, len) < 0) - goto finally; + goto finally; /* Get list length, and bow out early if empty. */ if ((len = PyList_Size(args)) < 0) - goto finally; + goto finally; /* Memoize. */ if (len == 0) { - if (put(self, args) >= 0) - res = 0; - goto finally; + if (put(self, args) >= 0) + res = 0; + goto finally; } if (put2(self, args) < 0) - goto finally; + goto finally; /* Materialize the list elements. */ iter = PyObject_GetIter(args); if (iter == NULL) - goto finally; + goto finally; res = batch_list(self, iter); Py_DECREF(iter); finally: if (self->fast && !fast_save_leave(self, args)) - res = -1; + res = -1; return res; } @@ -1679,82 +1679,82 @@ assert(iter != NULL); if (self->proto == 0) { - /* SETITEMS isn't available; do one at a time. */ - for (;;) { - p = PyIter_Next(iter); - if (p == NULL) { - if (PyErr_Occurred()) - return -1; - break; - } - if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { - PyErr_SetString(PyExc_TypeError, "dict items " - "iterator must return 2-tuples"); - return -1; - } - i = save(self, PyTuple_GET_ITEM(p, 0), 0); - if (i >= 0) - i = save(self, PyTuple_GET_ITEM(p, 1), 0); - Py_DECREF(p); - if (i < 0) - return -1; - if (self->write_func(self, &setitem, 1) < 0) - return -1; - } - return 0; + /* SETITEMS isn't available; do one at a time. */ + for (;;) { + p = PyIter_Next(iter); + if (p == NULL) { + if (PyErr_Occurred()) + return -1; + break; + } + if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { + PyErr_SetString(PyExc_TypeError, "dict items " + "iterator must return 2-tuples"); + return -1; + } + i = save(self, PyTuple_GET_ITEM(p, 0), 0); + if (i >= 0) + i = save(self, PyTuple_GET_ITEM(p, 1), 0); + Py_DECREF(p); + if (i < 0) + return -1; + if (self->write_func(self, &setitem, 1) < 0) + return -1; + } + return 0; } /* proto > 0: write in batches of BATCHSIZE. */ do { - /* Get next group of (no more than) BATCHSIZE elements. */ - for (n = 0; n < BATCHSIZE; ++n) { - p = PyIter_Next(iter); - if (p == NULL) { - if (PyErr_Occurred()) - goto BatchFailed; - break; - } - if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { - PyErr_SetString(PyExc_TypeError, "dict items " - "iterator must return 2-tuples"); - goto BatchFailed; - } - slice[n] = p; - } - - if (n > 1) { - /* Pump out MARK, slice[0:n], SETITEMS. */ - if (self->write_func(self, &MARKv, 1) < 0) - goto BatchFailed; - for (i = 0; i < n; ++i) { - p = slice[i]; - if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) - goto BatchFailed; - if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) - goto BatchFailed; - } - if (self->write_func(self, &setitems, 1) < 0) - goto BatchFailed; - } - else if (n == 1) { - p = slice[0]; - if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) - goto BatchFailed; - if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) - goto BatchFailed; - if (self->write_func(self, &setitem, 1) < 0) - goto BatchFailed; - } - - for (i = 0; i < n; ++i) { - Py_DECREF(slice[i]); - } + /* Get next group of (no more than) BATCHSIZE elements. */ + for (n = 0; n < BATCHSIZE; ++n) { + p = PyIter_Next(iter); + if (p == NULL) { + if (PyErr_Occurred()) + goto BatchFailed; + break; + } + if (!PyTuple_Check(p) || PyTuple_Size(p) != 2) { + PyErr_SetString(PyExc_TypeError, "dict items " + "iterator must return 2-tuples"); + goto BatchFailed; + } + slice[n] = p; + } + + if (n > 1) { + /* Pump out MARK, slice[0:n], SETITEMS. */ + if (self->write_func(self, &MARKv, 1) < 0) + goto BatchFailed; + for (i = 0; i < n; ++i) { + p = slice[i]; + if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) + goto BatchFailed; + if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) + goto BatchFailed; + } + if (self->write_func(self, &setitems, 1) < 0) + goto BatchFailed; + } + else if (n == 1) { + p = slice[0]; + if (save(self, PyTuple_GET_ITEM(p, 0), 0) < 0) + goto BatchFailed; + if (save(self, PyTuple_GET_ITEM(p, 1), 0) < 0) + goto BatchFailed; + if (self->write_func(self, &setitem, 1) < 0) + goto BatchFailed; + } + + for (i = 0; i < n; ++i) { + Py_DECREF(slice[i]); + } } while (n == BATCHSIZE); return 0; BatchFailed: while (--n >= 0) { - Py_DECREF(slice[n]); + Py_DECREF(slice[n]); } return -1; } @@ -1768,48 +1768,48 @@ PyObject *items, *iter; if (self->fast && !fast_save_enter(self, args)) - goto finally; + goto finally; /* Create an empty dict. */ if (self->bin) { - s[0] = EMPTY_DICT; - len = 1; + s[0] = EMPTY_DICT; + len = 1; } else { - s[0] = MARK; - s[1] = DICT; - len = 2; + s[0] = MARK; + s[1] = DICT; + len = 2; } if (self->write_func(self, s, len) < 0) - goto finally; + goto finally; /* Get dict size, and bow out early if empty. */ if ((len = PyDict_Size(args)) < 0) - goto finally; + goto finally; if (len == 0) { - if (put(self, args) >= 0) - res = 0; - goto finally; + if (put(self, args) >= 0) + res = 0; + goto finally; } if (put2(self, args) < 0) - goto finally; + goto finally; /* Materialize the dict items. */ items = PyObject_CallMethod(args, "items", "()"); if (items == NULL) - goto finally; + goto finally; iter = PyObject_GetIter(items); Py_DECREF(items); if (iter == NULL) - goto finally; + goto finally; res = batch_dict(self, iter); Py_DECREF(iter); finally: if (self->fast && !fast_save_leave(self, args)) - res = -1; + res = -1; return res; } @@ -1825,20 +1825,20 @@ static char global = GLOBAL; if (name) { - global_name = name; - Py_INCREF(global_name); + global_name = name; + Py_INCREF(global_name); } else { - if (!(global_name = PyObject_GetAttr(args, __name___str))) - goto finally; + if (!(global_name = PyObject_GetAttr(args, __name___str))) + goto finally; } if (!(module = whichmodule(args, global_name))) - goto finally; + goto finally; if ((module_size = PyString_Size(module)) < 0 || - (name_size = PyString_Size(global_name)) < 0) - goto finally; + (name_size = PyString_Size(global_name)) < 0) + goto finally; module_str = PyString_AS_STRING((PyStringObject *) module); name_str = PyString_AS_STRING((PyStringObject *) global_name); @@ -1847,101 +1847,101 @@ * but I don't know how to stop it. :-( */ mod = PyImport_ImportModule(module_str); if (mod == NULL) { - cPickle_ErrFormat(PicklingError, - "Can't pickle %s: import of module %s " - "failed", "OS", args, module); - goto finally; + cPickle_ErrFormat(PicklingError, + "Can't pickle %s: import of module %s " + "failed", "OS", args, module); + goto finally; } klass = PyObject_GetAttrString(mod, name_str); if (klass == NULL) { - cPickle_ErrFormat(PicklingError, - "Can't pickle %s: attribute lookup %s.%s " - "failed", "OSS", args, module, global_name); - goto finally; + cPickle_ErrFormat(PicklingError, + "Can't pickle %s: attribute lookup %s.%s " + "failed", "OSS", args, module, global_name); + goto finally; } if (klass != args) { - Py_DECREF(klass); - cPickle_ErrFormat(PicklingError, - "Can't pickle %s: it's not the same object " - "as %s.%s", "OSS", args, module, global_name); - goto finally; + Py_DECREF(klass); + cPickle_ErrFormat(PicklingError, + "Can't pickle %s: it's not the same object " + "as %s.%s", "OSS", args, module, global_name); + goto finally; } Py_DECREF(klass); if (self->proto >= 2) { - /* See whether this is in the extension registry, and if - * so generate an EXT opcode. - */ - PyObject *py_code; /* extension code as Python object */ - long code; /* extension code as C value */ - char c_str[5]; - int n; - - PyTuple_SET_ITEM(two_tuple, 0, module); - PyTuple_SET_ITEM(two_tuple, 1, global_name); - py_code = PyDict_GetItem(extension_registry, two_tuple); - if (py_code == NULL) - goto gen_global; /* not registered */ - - /* Verify py_code has the right type and value. */ - if (!PyInt_Check(py_code)) { - cPickle_ErrFormat(PicklingError, "Can't pickle %s: " - "extension code %s isn't an integer", - "OO", args, py_code); - goto finally; - } - code = PyInt_AS_LONG(py_code); - if (code <= 0 || code > 0x7fffffffL) { - cPickle_ErrFormat(PicklingError, "Can't pickle %s: " - "extension code %ld is out of range", - "Ol", args, code); - goto finally; - } - - /* Generate an EXT opcode. */ - if (code <= 0xff) { - c_str[0] = EXT1; - c_str[1] = (char) code; - n = 2; - } - else if (code <= 0xffff) { - c_str[0] = EXT2; - c_str[1] = (char) (code & 0xff); - c_str[2] = (char) ((code >> 8) & 0xff); - n = 3; - } - else { - c_str[0] = EXT4; - c_str[1] = (char) (code & 0xff); - c_str[2] = (char) ((code >> 8) & 0xff); - c_str[3] = (char) ((code >> 16) & 0xff); - c_str[4] = (char) ((code >> 24) & 0xff); - n = 5; - } - - if (self->write_func(self, c_str, n) >= 0) - res = 0; - goto finally; /* and don't memoize */ + /* See whether this is in the extension registry, and if + * so generate an EXT opcode. + */ + PyObject *py_code; /* extension code as Python object */ + long code; /* extension code as C value */ + char c_str[5]; + int n; + + PyTuple_SET_ITEM(two_tuple, 0, module); + PyTuple_SET_ITEM(two_tuple, 1, global_name); + py_code = PyDict_GetItem(extension_registry, two_tuple); + if (py_code == NULL) + goto gen_global; /* not registered */ + + /* Verify py_code has the right type and value. */ + if (!PyInt_Check(py_code)) { + cPickle_ErrFormat(PicklingError, "Can't pickle %s: " + "extension code %s isn't an integer", + "OO", args, py_code); + goto finally; + } + code = PyInt_AS_LONG(py_code); + if (code <= 0 || code > 0x7fffffffL) { + cPickle_ErrFormat(PicklingError, "Can't pickle %s: " + "extension code %ld is out of range", + "Ol", args, code); + goto finally; + } + + /* Generate an EXT opcode. */ + if (code <= 0xff) { + c_str[0] = EXT1; + c_str[1] = (char) code; + n = 2; + } + else if (code <= 0xffff) { + c_str[0] = EXT2; + c_str[1] = (char) (code & 0xff); + c_str[2] = (char) ((code >> 8) & 0xff); + n = 3; + } + else { + c_str[0] = EXT4; + c_str[1] = (char) (code & 0xff); + c_str[2] = (char) ((code >> 8) & 0xff); + c_str[3] = (char) ((code >> 16) & 0xff); + c_str[4] = (char) ((code >> 24) & 0xff); + n = 5; + } + + if (self->write_func(self, c_str, n) >= 0) + res = 0; + goto finally; /* and don't memoize */ } gen_global: if (self->write_func(self, &global, 1) < 0) - goto finally; + goto finally; if (self->write_func(self, module_str, module_size) < 0) - goto finally; + goto finally; if (self->write_func(self, "\n", 1) < 0) - goto finally; + goto finally; if (self->write_func(self, name_str, name_size) < 0) - goto finally; + goto finally; if (self->write_func(self, "\n", 1) < 0) - goto finally; + goto finally; if (put(self, args) < 0) - goto finally; + goto finally; res = 0; @@ -1964,44 +1964,44 @@ Py_INCREF(args); ARG_TUP(self, args); if (self->arg) { - pid = PyObject_Call(f, self->arg, NULL); - FREE_ARG_TUP(self); + pid = PyObject_Call(f, self->arg, NULL); + FREE_ARG_TUP(self); } if (!pid) - return -1; + return -1; if (pid != Py_None) { - if (!self->bin) { - if (!PyString_Check(pid)) { - PyErr_SetString(PicklingError, "persistent id must be string"); - goto finally; - } - - if (self->write_func(self, &persid, 1) < 0) - goto finally; - - if ((size = PyString_Size(pid)) < 0) - goto finally; - - if (self->write_func(self, - PyString_AS_STRING((PyStringObject *) pid), - size) < 0) - goto finally; - - if (self->write_func(self, "\n", 1) < 0) - goto finally; - - res = 1; - goto finally; - } - else if (save(self, pid, 1) >= 0) { - if (self->write_func(self, &binpersid, 1) < 0) - res = -1; - else - res = 1; - } + if (!self->bin) { + if (!PyString_Check(pid)) { + PyErr_SetString(PicklingError, "persistent id must be string"); + goto finally; + } + + if (self->write_func(self, &persid, 1) < 0) + goto finally; + + if ((size = PyString_Size(pid)) < 0) + goto finally; + + if (self->write_func(self, + PyString_AS_STRING((PyStringObject *) pid), + size) < 0) + goto finally; + + if (self->write_func(self, "\n", 1) < 0) + goto finally; + + res = 1; + goto finally; + } + else if (save(self, pid, 1) >= 0) { + if (self->write_func(self, &binpersid, 1) < 0) + res = -1; + else + res = 1; + } - goto finally; + goto finally; } res = 0; @@ -2031,129 +2031,129 @@ static char newobj = NEWOBJ; if (!PyArg_UnpackTuple(args, "save_reduce", 2, 5, - &callable, &argtup, &state, &listitems, &dictitems)) - return -1; + &callable, &argtup, &state, &listitems, &dictitems)) + return -1; if (!PyTuple_Check(argtup)) { - PyErr_SetString(PicklingError, "args from reduce() should be a tuple"); - return -1; + PyErr_SetString(PicklingError, "args from reduce() should be a tuple"); + return -1; } if (state == Py_None) - state = NULL; + state = NULL; if (listitems == Py_None) - listitems = NULL; + listitems = NULL; if (dictitems == Py_None) - dictitems = NULL; + dictitems = NULL; /* Protocol 2 special case: if callable's name is __newobj__, use * NEWOBJ. This consumes a lot of code. */ if (use_newobj) { - PyObject *temp = PyObject_GetAttr(callable, __name___str); + PyObject *temp = PyObject_GetAttr(callable, __name___str); - if (temp == NULL) { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - return -1; - use_newobj = 0; - } - else { - use_newobj = PyString_Check(temp) && - strcmp(PyString_AS_STRING(temp), "__newobj__") == 0; - Py_DECREF(temp); - } + if (temp == NULL) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + return -1; + use_newobj = 0; + } + else { + use_newobj = PyString_Check(temp) && + strcmp(PyString_AS_STRING(temp), "__newobj__") == 0; + Py_DECREF(temp); + } } if (use_newobj) { - PyObject *cls; - PyObject *newargtup; - int n, i; - - /* Sanity checks. */ - n = PyTuple_Size(argtup); - if (n < 1) { - PyErr_SetString(PicklingError, "__newobj__ arglist " "is empty"); - return -1; - } - - cls = PyTuple_GET_ITEM(argtup, 0); - if (!PyObject_HasAttrString(cls, "__new__")) { - PyErr_SetString(PicklingError, "args[0] from " - "__newobj__ args has no __new__"); - return -1; - } - - /* XXX How could ob be NULL? */ - if (ob != NULL) { - PyObject *ob_dot_class; - - ob_dot_class = PyObject_GetAttr(ob, __class___str); - if (ob_dot_class == NULL) { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - return -1; - } - i = ob_dot_class != cls; /* true iff a problem */ - Py_XDECREF(ob_dot_class); - if (i) { - PyErr_SetString(PicklingError, "args[0] from " - "__newobj__ args has the wrong class"); - return -1; - } - } - - /* Save the class and its __new__ arguments. */ - if (save(self, cls, 0) < 0) - return -1; - - newargtup = PyTuple_New(n - 1); /* argtup[1:] */ - if (newargtup == NULL) - return -1; - for (i = 1; i < n; ++i) { - PyObject *temp = PyTuple_GET_ITEM(argtup, i); - Py_INCREF(temp); - PyTuple_SET_ITEM(newargtup, i - 1, temp); - } - i = save(self, newargtup, 0) < 0; - Py_DECREF(newargtup); - if (i < 0) - return -1; - - /* Add NEWOBJ opcode. */ - if (self->write_func(self, &newobj, 1) < 0) - return -1; + PyObject *cls; + PyObject *newargtup; + int n, i; + + /* Sanity checks. */ + n = PyTuple_Size(argtup); + if (n < 1) { + PyErr_SetString(PicklingError, "__newobj__ arglist " "is empty"); + return -1; + } + + cls = PyTuple_GET_ITEM(argtup, 0); + if (!PyObject_HasAttrString(cls, "__new__")) { + PyErr_SetString(PicklingError, "args[0] from " + "__newobj__ args has no __new__"); + return -1; + } + + /* XXX How could ob be NULL? */ + if (ob != NULL) { + PyObject *ob_dot_class; + + ob_dot_class = PyObject_GetAttr(ob, __class___str); + if (ob_dot_class == NULL) { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + return -1; + } + i = ob_dot_class != cls; /* true iff a problem */ + Py_XDECREF(ob_dot_class); + if (i) { + PyErr_SetString(PicklingError, "args[0] from " + "__newobj__ args has the wrong class"); + return -1; + } + } + + /* Save the class and its __new__ arguments. */ + if (save(self, cls, 0) < 0) + return -1; + + newargtup = PyTuple_New(n - 1); /* argtup[1:] */ + if (newargtup == NULL) + return -1; + for (i = 1; i < n; ++i) { + PyObject *temp = PyTuple_GET_ITEM(argtup, i); + Py_INCREF(temp); + PyTuple_SET_ITEM(newargtup, i - 1, temp); + } + i = save(self, newargtup, 0) < 0; + Py_DECREF(newargtup); + if (i < 0) + return -1; + + /* Add NEWOBJ opcode. */ + if (self->write_func(self, &newobj, 1) < 0) + return -1; } else { - /* Not using NEWOBJ. */ - if (save(self, callable, 0) < 0 || - save(self, argtup, 0) < 0 || - self->write_func(self, &reduce, 1) < 0) - return -1; + /* Not using NEWOBJ. */ + if (save(self, callable, 0) < 0 || + save(self, argtup, 0) < 0 || + self->write_func(self, &reduce, 1) < 0) + return -1; } /* Memoize. */ /* XXX How can ob be NULL? */ if (ob != NULL) { - if (state && !PyDict_Check(state)) { - if (put2(self, ob) < 0) - return -1; - } - else if (put(self, ob) < 0) - return -1; + if (state && !PyDict_Check(state)) { + if (put2(self, ob) < 0) + return -1; + } + else if (put(self, ob) < 0) + return -1; } if (listitems && batch_list(self, listitems) < 0) - return -1; + return -1; if (dictitems && batch_dict(self, dictitems) < 0) - return -1; + return -1; if (state) { - if (save(self, state, 0) < 0 || self->write_func(self, &build, 1) < 0) - return -1; + if (save(self, state, 0) < 0 || self->write_func(self, &build, 1) < 0) + return -1; } return 0; @@ -2169,158 +2169,158 @@ int tmp, size; if (self->nesting++ > Py_GetRecursionLimit()) { - PyErr_SetString(PyExc_RuntimeError, - "maximum recursion depth exceeded"); - goto finally; + PyErr_SetString(PyExc_RuntimeError, + "maximum recursion depth exceeded"); + goto finally; } if (!pers_save && self->pers_func) { - if ((tmp = save_pers(self, args, self->pers_func)) != 0) { - res = tmp; - goto finally; - } + if ((tmp = save_pers(self, args, self->pers_func)) != 0) { + res = tmp; + goto finally; + } } if (args == Py_None) { - res = save_none(self, args); - goto finally; + res = save_none(self, args); + goto finally; } type = args->ob_type; switch (type->tp_name[0]) { case 'b': - if (args == Py_False || args == Py_True) { - res = save_bool(self, args); - goto finally; - } - break; + if (args == Py_False || args == Py_True) { + res = save_bool(self, args); + goto finally; + } + break; case 'i': - if (type == &PyLong_Type) { - res = save_long(self, args); - goto finally; - } - break; + if (type == &PyLong_Type) { + res = save_long(self, args); + goto finally; + } + break; case 'f': - if (type == &PyFloat_Type) { - res = save_float(self, args); - goto finally; - } - break; + if (type == &PyFloat_Type) { + res = save_float(self, args); + goto finally; + } + break; case 't': - if (type == &PyTuple_Type && PyTuple_Size(args) == 0) { - res = save_tuple(self, args); - goto finally; - } - break; + if (type == &PyTuple_Type && PyTuple_Size(args) == 0) { + res = save_tuple(self, args); + goto finally; + } + break; case 's': - if ((type == &PyString_Type) && (PyString_GET_SIZE(args) < 2)) { - res = save_string(self, args, 0); - goto finally; - } + if ((type == &PyString_Type) && (PyString_GET_SIZE(args) < 2)) { + res = save_string(self, args, 0); + goto finally; + } #ifdef Py_USING_UNICODE case 'u': - if ((type == &PyUnicode_Type) && (PyString_GET_SIZE(args) < 2)) { - res = save_unicode(self, args, 0); - goto finally; - } + if ((type == &PyUnicode_Type) && (PyString_GET_SIZE(args) < 2)) { + res = save_unicode(self, args, 0); + goto finally; + } #endif } if (args->ob_refcnt > 1) { - if (!(py_ob_id = PyLong_FromVoidPtr(args))) - goto finally; + if (!(py_ob_id = PyLong_FromVoidPtr(args))) + goto finally; - if (PyDict_GetItem(self->memo, py_ob_id)) { - if (get(self, py_ob_id) < 0) - goto finally; - - res = 0; - goto finally; - } + if (PyDict_GetItem(self->memo, py_ob_id)) { + if (get(self, py_ob_id) < 0) + goto finally; + + res = 0; + goto finally; + } } switch (type->tp_name[0]) { case 's': - if (type == &PyString_Type) { - res = save_string(self, args, 1); - goto finally; - } - break; + if (type == &PyString_Type) { + res = save_string(self, args, 1); + goto finally; + } + break; #ifdef Py_USING_UNICODE case 'u': - if (type == &PyUnicode_Type) { - res = save_unicode(self, args, 1); - goto finally; - } - break; + if (type == &PyUnicode_Type) { + res = save_unicode(self, args, 1); + goto finally; + } + break; #endif case 't': - if (type == &PyTuple_Type) { - res = save_tuple(self, args); - goto finally; - } - if (type == &PyType_Type) { - res = save_global(self, args, NULL); - goto finally; - } - break; + if (type == &PyTuple_Type) { + res = save_tuple(self, args); + goto finally; + } + if (type == &PyType_Type) { + res = save_global(self, args, NULL); + goto finally; + } + break; case 'l': - if (type == &PyList_Type) { - res = save_list(self, args); - goto finally; - } - break; + if (type == &PyList_Type) { + res = save_list(self, args); + goto finally; + } + break; case 'd': - if (type == &PyDict_Type) { - res = save_dict(self, args); - goto finally; - } - break; + if (type == &PyDict_Type) { + res = save_dict(self, args); + goto finally; + } + break; case 'i': - break; + break; case 'c': - break; + break; case 'f': - if (type == &PyFunction_Type) { - res = save_global(self, args, NULL); - if (res && PyErr_ExceptionMatches(PickleError)) { - /* fall back to reduce */ - PyErr_Clear(); - break; - } - goto finally; - } - break; + if (type == &PyFunction_Type) { + res = save_global(self, args, NULL); + if (res && PyErr_ExceptionMatches(PickleError)) { + /* fall back to reduce */ + PyErr_Clear(); + break; + } + goto finally; + } + break; case 'b': - if (type == &PyCFunction_Type) { - res = save_global(self, args, NULL); - goto finally; - } + if (type == &PyCFunction_Type) { + res = save_global(self, args, NULL); + goto finally; + } } if (!pers_save && self->inst_pers_func) { - if ((tmp = save_pers(self, args, self->inst_pers_func)) != 0) { - res = tmp; - goto finally; - } + if ((tmp = save_pers(self, args, self->inst_pers_func)) != 0) { + res = tmp; + goto finally; + } } if (PyType_IsSubtype(type, &PyType_Type)) { - res = save_global(self, args, NULL); - goto finally; + res = save_global(self, args, NULL); + goto finally; } /* Get a reduction callable, and call it. This may come from @@ -2329,73 +2329,73 @@ */ __reduce__ = PyDict_GetItem(dispatch_table, (PyObject *) type); if (__reduce__ != NULL) { - Py_INCREF(__reduce__); - Py_INCREF(args); - ARG_TUP(self, args); - if (self->arg) { - t = PyObject_Call(__reduce__, self->arg, NULL); - FREE_ARG_TUP(self); - } + Py_INCREF(__reduce__); + Py_INCREF(args); + ARG_TUP(self, args); + if (self->arg) { + t = PyObject_Call(__reduce__, self->arg, NULL); + FREE_ARG_TUP(self); + } } else { - /* Check for a __reduce_ex__ method. */ - __reduce__ = PyObject_GetAttr(args, __reduce_ex___str); - if (__reduce__ != NULL) { - t = PyInt_FromLong(self->proto); - if (t != NULL) { - ARG_TUP(self, t); - t = NULL; - if (self->arg) { - t = PyObject_Call(__reduce__, self->arg, NULL); - FREE_ARG_TUP(self); - } - } - } - else { - if (PyErr_ExceptionMatches(PyExc_AttributeError)) - PyErr_Clear(); - else - goto finally; - /* Check for a __reduce__ method. */ - __reduce__ = PyObject_GetAttr(args, __reduce___str); - if (__reduce__ != NULL) { - t = PyObject_Call(__reduce__, empty_tuple, NULL); - } - else { - PyErr_SetObject(UnpickleableError, args); - goto finally; - } - } + /* Check for a __reduce_ex__ method. */ + __reduce__ = PyObject_GetAttr(args, __reduce_ex___str); + if (__reduce__ != NULL) { + t = PyInt_FromLong(self->proto); + if (t != NULL) { + ARG_TUP(self, t); + t = NULL; + if (self->arg) { + t = PyObject_Call(__reduce__, self->arg, NULL); + FREE_ARG_TUP(self); + } + } + } + else { + if (PyErr_ExceptionMatches(PyExc_AttributeError)) + PyErr_Clear(); + else + goto finally; + /* Check for a __reduce__ method. */ + __reduce__ = PyObject_GetAttr(args, __reduce___str); + if (__reduce__ != NULL) { + t = PyObject_Call(__reduce__, empty_tuple, NULL); + } + else { + PyErr_SetObject(UnpickleableError, args); + goto finally; + } + } } if (t == NULL) - goto finally; + goto finally; if (PyString_Check(t)) { - res = save_global(self, args, t); - goto finally; + res = save_global(self, args, t); + goto finally; } if (!PyTuple_Check(t)) { - cPickle_ErrFormat(PicklingError, "Value returned by " - "%s must be string or tuple", "O", __reduce__); - goto finally; + cPickle_ErrFormat(PicklingError, "Value returned by " + "%s must be string or tuple", "O", __reduce__); + goto finally; } size = PyTuple_Size(t); if (size < 2 || size > 5) { - cPickle_ErrFormat(PicklingError, "tuple returned by " - "%s must contain 2 through 5 elements", - "O", __reduce__); - goto finally; + cPickle_ErrFormat(PicklingError, "tuple returned by " + "%s must contain 2 through 5 elements", + "O", __reduce__); + goto finally; } arg_tup = PyTuple_GET_ITEM(t, 1); if (!(PyTuple_Check(arg_tup) || arg_tup == Py_None)) { - cPickle_ErrFormat(PicklingError, "Second element of " - "tuple returned by %s must be a tuple", - "O", __reduce__); - goto finally; + cPickle_ErrFormat(PicklingError, "Second element of " + "tuple returned by %s must be a tuple", + "O", __reduce__); + goto finally; } res = save_reduce(self, t, args); @@ -2416,23 +2416,23 @@ static char stop = STOP; if (self->proto >= 2) { - char bytes[2]; + char bytes[2]; - bytes[0] = PROTO; - assert(self->proto >= 0 && self->proto < 256); - bytes[1] = (char) self->proto; - if (self->write_func(self, bytes, 2) < 0) - return -1; + bytes[0] = PROTO; + assert(self->proto >= 0 && self->proto < 256); + bytes[1] = (char) self->proto; + if (self->write_func(self, bytes, 2) < 0) + return -1; } if (save(self, args, 0) < 0) - return -1; + return -1; if (self->write_func(self, &stop, 1) < 0) - return -1; + return -1; if (self->write_func(self, NULL, 0) < 0) - return -1; + return -1; return 0; } @@ -2441,7 +2441,7 @@ Pickle_clear_memo(Picklerobject * self, PyObject * args) { if (self->memo) - PyDict_Clear(self->memo); + PyDict_Clear(self->memo); Py_INCREF(Py_None); return Py_None; } @@ -2457,18 +2457,18 @@ /* Can be called by Python code or C code */ if (args && !PyArg_ParseTuple(args, "|i:getvalue", &clear)) - return NULL; + return NULL; /* Check to make sure we are based on a list */ if (!Pdata_Check(self->file)) { - PyErr_SetString(PicklingError, - "Attempt to getvalue() a non-list-based pickler"); - return NULL; + PyErr_SetString(PicklingError, + "Attempt to getvalue() a non-list-based pickler"); + return NULL; } /* flush write buffer */ if (write_other(self, NULL, 0) < 0) - return NULL; + return NULL; data = (Pdata *) self->file; l = data->length; @@ -2476,112 +2476,112 @@ /* set up an array to hold get/put status */ lm = PyDict_Size(self->memo); if (lm < 0) - return NULL; + return NULL; lm++; have_get = malloc(lm); if (have_get == NULL) - return PyErr_NoMemory(); + return PyErr_NoMemory(); memset(have_get, 0, lm); /* Scan for gets. */ for (rsize = 0, i = l; --i >= 0;) { - k = data->data[i]; + k = data->data[i]; - if (PyString_Check(k)) - rsize += PyString_GET_SIZE(k); + if (PyString_Check(k)) + rsize += PyString_GET_SIZE(k); - else if (PyInt_Check(k)) { /* put */ - ik = PyInt_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik >= lm || ik == 0) { - PyErr_SetString(PicklingError, "Invalid get data"); - goto err; - } - if (have_get[ik]) /* with matching get */ - rsize += ik < 256 ? 2 : 5; - } - - else if (!(PyTuple_Check(k) && - PyTuple_GET_SIZE(k) == 2 && - PyInt_Check((k = PyTuple_GET_ITEM(k, 0)))) - ) { - PyErr_SetString(PicklingError, "Unexpected data in internal list"); - goto err; - } - - else { /* put */ - ik = PyInt_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik >= lm || ik == 0) { - PyErr_SetString(PicklingError, "Invalid get data"); - return NULL; - } - have_get[ik] = 1; - rsize += ik < 256 ? 2 : 5; - } + else if (PyInt_Check(k)) { /* put */ + ik = PyInt_AsLong(k); + if (ik == -1 && PyErr_Occurred()) + goto err; + if (ik >= lm || ik == 0) { + PyErr_SetString(PicklingError, "Invalid get data"); + goto err; + } + if (have_get[ik]) /* with matching get */ + rsize += ik < 256 ? 2 : 5; + } + + else if (!(PyTuple_Check(k) && + PyTuple_GET_SIZE(k) == 2 && + PyInt_Check((k = PyTuple_GET_ITEM(k, 0)))) + ) { + PyErr_SetString(PicklingError, "Unexpected data in internal list"); + goto err; + } + + else { /* put */ + ik = PyInt_AsLong(k); + if (ik == -1 && PyErr_Occurred()) + goto err; + if (ik >= lm || ik == 0) { + PyErr_SetString(PicklingError, "Invalid get data"); + return NULL; + } + have_get[ik] = 1; + rsize += ik < 256 ? 2 : 5; + } } /* Now generate the result */ r = PyString_FromStringAndSize(NULL, rsize); if (r == NULL) - goto err; + goto err; s = PyString_AS_STRING((PyStringObject *) r); for (i = 0; i < l; i++) { - k = data->data[i]; + k = data->data[i]; - if (PyString_Check(k)) { - ssize = PyString_GET_SIZE(k); - if (ssize) { - p = PyString_AS_STRING((PyStringObject *) k); - while (--ssize >= 0) - *s++ = *p++; - } - } - - else if (PyTuple_Check(k)) { /* get */ - ik = PyLong_AsLong(PyTuple_GET_ITEM(k, 0)); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik < 256) { - *s++ = BINGET; - *s++ = (int) (ik & 0xff); - } - else { - *s++ = LONG_BINGET; - *s++ = (int) (ik & 0xff); - *s++ = (int) ((ik >> 8) & 0xff); - *s++ = (int) ((ik >> 16) & 0xff); - *s++ = (int) ((ik >> 24) & 0xff); - } - } - - else { /* put */ - ik = PyLong_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - - if (have_get[ik]) { /* with matching get */ - if (ik < 256) { - *s++ = BINPUT; - *s++ = (int) (ik & 0xff); - } - else { - *s++ = LONG_BINPUT; - *s++ = (int) (ik & 0xff); - *s++ = (int) ((ik >> 8) & 0xff); - *s++ = (int) ((ik >> 16) & 0xff); - *s++ = (int) ((ik >> 24) & 0xff); - } - } - } + if (PyString_Check(k)) { + ssize = PyString_GET_SIZE(k); + if (ssize) { + p = PyString_AS_STRING((PyStringObject *) k); + while (--ssize >= 0) + *s++ = *p++; + } + } + + else if (PyTuple_Check(k)) { /* get */ + ik = PyLong_AsLong(PyTuple_GET_ITEM(k, 0)); + if (ik == -1 && PyErr_Occurred()) + goto err; + if (ik < 256) { + *s++ = BINGET; + *s++ = (int) (ik & 0xff); + } + else { + *s++ = LONG_BINGET; + *s++ = (int) (ik & 0xff); + *s++ = (int) ((ik >> 8) & 0xff); + *s++ = (int) ((ik >> 16) & 0xff); + *s++ = (int) ((ik >> 24) & 0xff); + } + } + + else { /* put */ + ik = PyLong_AsLong(k); + if (ik == -1 && PyErr_Occurred()) + goto err; + + if (have_get[ik]) { /* with matching get */ + if (ik < 256) { + *s++ = BINPUT; + *s++ = (int) (ik & 0xff); + } + else { + *s++ = LONG_BINPUT; + *s++ = (int) (ik & 0xff); + *s++ = (int) ((ik >> 8) & 0xff); + *s++ = (int) ((ik >> 16) & 0xff); + *s++ = (int) ((ik >> 24) & 0xff); + } + } + } } if (clear) { - PyDict_Clear(self->memo); - Pdata_clear(data, 0); + PyDict_Clear(self->memo); + Pdata_clear(data, 0); } free(have_get); @@ -2598,13 +2598,13 @@ int get = 0; if (!(PyArg_ParseTuple(args, "O|i:dump", &ob, &get))) - return NULL; + return NULL; if (dump(self, ob) < 0) - return NULL; + return NULL; if (get) - return Pickle_getvalue(self, NULL); + return Pickle_getvalue(self, NULL); /* XXX Why does dump() return self? */ Py_INCREF(self); @@ -2615,12 +2615,12 @@ static struct PyMethodDef Pickler_methods[] = { {"dump", (PyCFunction) Pickler_dump, METH_VARARGS, PyDoc_STR("dump(object) -- " - "Write an object in pickle format to the object's pickle stream")}, + "Write an object in pickle format to the object's pickle stream")}, {"clear_memo", (PyCFunction) Pickle_clear_memo, METH_NOARGS, PyDoc_STR("clear_memo() -- Clear the picklers memo")}, {"getvalue", (PyCFunction) Pickle_getvalue, METH_VARARGS, PyDoc_STR("getvalue() -- Finish picking a list-based pickle")}, - {NULL, NULL} /* sentinel */ + {NULL, NULL} /* sentinel */ }; @@ -2630,17 +2630,17 @@ Picklerobject *self; if (proto < 0) - proto = HIGHEST_PROTOCOL; + proto = HIGHEST_PROTOCOL; if (proto > HIGHEST_PROTOCOL) { - PyErr_Format(PyExc_ValueError, "pickle protocol %d asked for; " - "the highest available protocol is %d", - proto, HIGHEST_PROTOCOL); - return NULL; + PyErr_Format(PyExc_ValueError, "pickle protocol %d asked for; " + "the highest available protocol is %d", + proto, HIGHEST_PROTOCOL); + return NULL; } self = PyObject_GC_New(Picklerobject, &Picklertype); if (self == NULL) - return NULL; + return NULL; self->proto = proto; self->bin = proto > 0; self->fp = NULL; @@ -2659,49 +2659,49 @@ self->file = NULL; if (file) - Py_INCREF(file); + Py_INCREF(file); else { - file = Pdata_New(); - if (file == NULL) - goto err; + file = Pdata_New(); + if (file == NULL) + goto err; } self->file = file; if (!(self->memo = PyDict_New())) - goto err; + goto err; if (PyFile_Check(file)) { - self->fp = PyFile_AsFile(file); - if (self->fp == NULL) { - PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); - goto err; - } - self->write_func = write_file; + self->fp = PyFile_AsFile(file); + if (self->fp == NULL) { + PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); + goto err; + } + self->write_func = write_file; } else if (PycStringIO_OutputCheck(file)) { - self->write_func = write_cStringIO; + self->write_func = write_cStringIO; } else if (file == Py_None) { - self->write_func = write_none; + self->write_func = write_none; } else { - self->write_func = write_other; + self->write_func = write_other; - if (!Pdata_Check(file)) { - self->write = PyObject_GetAttr(file, write_str); - if (!self->write) { - PyErr_Clear(); - PyErr_SetString(PyExc_TypeError, - "argument must have 'write' " "attribute"); - goto err; - } - } - - self->write_buf = (char *) PyMem_Malloc(WRITE_BUF_SIZE); - if (self->write_buf == NULL) { - PyErr_NoMemory(); - goto err; - } + if (!Pdata_Check(file)) { + self->write = PyObject_GetAttr(file, write_str); + if (!self->write) { + PyErr_Clear(); + PyErr_SetString(PyExc_TypeError, + "argument must have 'write' " "attribute"); + goto err; + } + } + + self->write_buf = (char *) PyMem_Malloc(WRITE_BUF_SIZE); + if (self->write_buf == NULL) { + PyErr_NoMemory(); + goto err; + } } self->dispatch_table = dispatch_table; @@ -2731,11 +2731,11 @@ * far enough to figure out what it means. */ if (!PyArg_ParseTuple(args, "|i:Pickler", &proto)) { - PyErr_Clear(); - proto = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", - kwlist, &file, &proto)) - return NULL; + PyErr_Clear(); + proto = 0; + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", + kwlist, &file, &proto)) + return NULL; } return (PyObject *) newPicklerobject(file, proto); } @@ -2789,9 +2789,9 @@ Pickler_get_pers_func(Picklerobject * p) { if (p->pers_func == NULL) - PyErr_SetString(PyExc_AttributeError, "persistent_id"); + PyErr_SetString(PyExc_AttributeError, "persistent_id"); else - Py_INCREF(p->pers_func); + Py_INCREF(p->pers_func); return p->pers_func; } @@ -2799,9 +2799,9 @@ Pickler_set_pers_func(Picklerobject * p, PyObject * v) { if (v == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; } Py_XDECREF(p->pers_func); Py_INCREF(v); @@ -2813,9 +2813,9 @@ Pickler_set_inst_pers_func(Picklerobject * p, PyObject * v) { if (v == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; } Py_XDECREF(p->inst_pers_func); Py_INCREF(v); @@ -2827,9 +2827,9 @@ Pickler_get_memo(Picklerobject * p) { if (p->memo == NULL) - PyErr_SetString(PyExc_AttributeError, "memo"); + PyErr_SetString(PyExc_AttributeError, "memo"); else - Py_INCREF(p->memo); + Py_INCREF(p->memo); return p->memo; } @@ -2837,13 +2837,13 @@ Pickler_set_memo(Picklerobject * p, PyObject * v) { if (v == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; } if (!PyDict_Check(v)) { - PyErr_SetString(PyExc_TypeError, "memo must be a dictionary"); - return -1; + PyErr_SetString(PyExc_TypeError, "memo must be a dictionary"); + return -1; } Py_XDECREF(p->memo); Py_INCREF(v); @@ -2878,36 +2878,36 @@ static PyTypeObject Picklertype = { PyObject_HEAD_INIT(NULL) - 0, /*ob_size */ - "cPickle.Pickler", /*tp_name */ - sizeof(Picklerobject), /*tp_basicsize */ + 0, /*ob_size */ + "cPickle.Pickler", /*tp_name */ + sizeof(Picklerobject), /*tp_basicsize */ 0, - (destructor) Pickler_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - PyObject_GenericSetAttr, /* tp_setattro */ - 0, /* tp_as_buffer */ + (destructor) Pickler_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + PyObject_GenericSetAttr, /* tp_setattro */ + 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Picklertype__doc__, /* tp_doc */ - (traverseproc) Pickler_traverse, /* tp_traverse */ - (inquiry) Pickler_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Pickler_methods, /* tp_methods */ - Pickler_members, /* tp_members */ - Pickler_getsets, /* tp_getset */ + Picklertype__doc__, /* tp_doc */ + (traverseproc) Pickler_traverse, /* tp_traverse */ + (inquiry) Pickler_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + Pickler_methods, /* tp_methods */ + Pickler_members, /* tp_members */ + Pickler_getsets, /* tp_getset */ }; static PyObject * @@ -2916,29 +2916,29 @@ PyObject *global = 0, *module; if (fc) { - if (fc == Py_None) { - PyErr_SetString(UnpicklingError, "Global and instance " - "pickles are not supported."); - return NULL; - } - return PyObject_CallFunctionObjArgs(fc, py_module_name, - py_global_name, NULL); + if (fc == Py_None) { + PyErr_SetString(UnpicklingError, "Global and instance " + "pickles are not supported."); + return NULL; + } + return PyObject_CallFunctionObjArgs(fc, py_module_name, + py_global_name, NULL); } module = PySys_GetObject("modules"); if (module == NULL) - return NULL; + return NULL; module = PyDict_GetItem(module, py_module_name); if (module == NULL) { - module = PyImport_Import(py_module_name); - if (!module) - return NULL; - global = PyObject_GetAttr(module, py_global_name); - Py_DECREF(module); + module = PyImport_Import(py_module_name); + if (!module) + return NULL; + global = PyObject_GetAttr(module, py_global_name); + Py_DECREF(module); } else - global = PyObject_GetAttr(module, py_global_name); + global = PyObject_GetAttr(module, py_global_name); return global; } @@ -2946,8 +2946,8 @@ marker(Unpicklerobject * self) { if (self->num_marks < 1) { - PyErr_SetString(UnpicklingError, "could not find MARK"); - return -1; + PyErr_SetString(UnpicklingError, "could not find MARK"); + return -1; } return self->marks[--self->num_marks]; @@ -2977,35 +2977,35 @@ long l; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 2) - return bad_readline(); + return bad_readline(); if (!(s = pystrndup(s, len))) - return -1; + return -1; errno = 0; l = strtol(s, &endptr, 0); if (errno || (*endptr != '\n') || (endptr[1] != '\0')) { - /* Hm, maybe we've got something long. Let's try reading - * it as a Python long object. */ - errno = 0; - py_int = PyLong_FromString(s, NULL, 0); - if (py_int == NULL) { - PyErr_SetString(PyExc_ValueError, - "could not convert string to int"); - goto finally; - } + /* Hm, maybe we've got something long. Let's try reading + * it as a Python long object. */ + errno = 0; + py_int = PyLong_FromString(s, NULL, 0); + if (py_int == NULL) { + PyErr_SetString(PyExc_ValueError, + "could not convert string to int"); + goto finally; + } } else { - if (len == 3 && (l == 0 || l == 1)) { - if (!(py_int = PyBool_FromLong(l))) - goto finally; - } - else { - if (!(py_int = PyInt_FromLong(l))) - goto finally; - } + if (len == 3 && (l == 0 || l == 1)) { + if (!(py_int = PyBool_FromLong(l))) + goto finally; + } + else { + if (!(py_int = PyInt_FromLong(l))) + goto finally; + } } free(s); @@ -3039,8 +3039,8 @@ long l; for (i = 0, l = 0L; i < x; i++) { - c = (unsigned char) s[i]; - l |= (long) c << (i * 8); + c = (unsigned char) s[i]; + l |= (long) c << (i * 8); } #if SIZEOF_LONG > 4 /* Unlike BININT1 and BININT2, BININT (more accurately BININT4) @@ -3048,7 +3048,7 @@ * to extend a BININT's sign bit to the full width. */ if (x == 4 && l & (1L << 31)) - l |= (~0L) << 32; + l |= (~0L) << 32; #endif return l; } @@ -3063,7 +3063,7 @@ l = calc_binint(s, x); if (!(py_int = PyInt_FromLong(l))) - return -1; + return -1; PDATA_PUSH(self->stack, py_int, -1); return 0; @@ -3076,7 +3076,7 @@ char *s; if (self->read_func(self, &s, 4) < 0) - return -1; + return -1; return load_binintx(self, s, 4); } @@ -3088,7 +3088,7 @@ char *s; if (self->read_func(self, &s, 1) < 0) - return -1; + return -1; return load_binintx(self, s, 1); } @@ -3100,7 +3100,7 @@ char *s; if (self->read_func(self, &s, 2) < 0) - return -1; + return -1; return load_binintx(self, s, 2); } @@ -3113,14 +3113,14 @@ int len, res = -1; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 2) - return bad_readline(); + return bad_readline(); if (!(s = pystrndup(s, len))) - return -1; + return -1; if (!(l = PyLong_FromString(s, &end, 0))) - goto finally; + goto finally; free(s); PDATA_PUSH(self->stack, l, -1); @@ -3146,30 +3146,30 @@ assert(size == 1 || size == 4); i = self->read_func(self, &nbytes, size); if (i < 0) - return -1; + return -1; size = calc_binint(nbytes, size); if (size < 0) { - /* Corrupt or hostile pickle -- we never write one like - * this. - */ - PyErr_SetString(UnpicklingError, "LONG pickle has negative " - "byte count"); - return -1; + /* Corrupt or hostile pickle -- we never write one like + * this. + */ + PyErr_SetString(UnpicklingError, "LONG pickle has negative " + "byte count"); + return -1; } if (size == 0) - along = PyLong_FromLong(0L); + along = PyLong_FromLong(0L); else { - /* Read the raw little-endian bytes & convert. */ - i = self->read_func(self, (char **) &pdata, size); - if (i < 0) - return -1; - along = _PyLong_FromByteArray(pdata, (size_t) size, - 1 /* little endian */ , 1 /* signed */ ); + /* Read the raw little-endian bytes & convert. */ + i = self->read_func(self, (char **) &pdata, size); + if (i < 0) + return -1; + along = _PyLong_FromByteArray(pdata, (size_t) size, + 1 /* little endian */ , 1 /* signed */ ); } if (along == NULL) - return -1; + return -1; PDATA_PUSH(self->stack, along, -1); return 0; } @@ -3183,22 +3183,22 @@ double d; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 2) - return bad_readline(); + return bad_readline(); if (!(s = pystrndup(s, len))) - return -1; + return -1; errno = 0; d = PyOS_ascii_strtod(s, &endptr); if (errno || (endptr[0] != '\n') || (endptr[1] != '\0')) { - PyErr_SetString(PyExc_ValueError, "could not convert string to float"); - goto finally; + PyErr_SetString(PyExc_ValueError, "could not convert string to float"); + goto finally; } if (!(py_float = PyFloat_FromDouble(d))) - goto finally; + goto finally; free(s); PDATA_PUSH(self->stack, py_float, -1); @@ -3218,15 +3218,15 @@ char *p; if (self->read_func(self, &p, 8) < 0) - return -1; + return -1; x = _PyFloat_Unpack8((unsigned char *) p, 0); if (x == -1.0 && PyErr_Occurred()) - return -1; + return -1; py_float = PyFloat_FromDouble(x); if (py_float == NULL) - return -1; + return -1; PDATA_PUSH(self->stack, py_float, -1); return 0; @@ -3240,35 +3240,35 @@ char *s, *p; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 2) - return bad_readline(); + return bad_readline(); if (!(s = pystrndup(s, len))) - return -1; + return -1; /* Strip outermost quotes */ while (s[len - 1] <= ' ') - len--; + len--; if (s[0] == '"' && s[len - 1] == '"') { - s[len - 1] = '\0'; - p = s + 1; - len -= 2; + s[len - 1] = '\0'; + p = s + 1; + len -= 2; } else if (s[0] == '\'' && s[len - 1] == '\'') { - s[len - 1] = '\0'; - p = s + 1; - len -= 2; + s[len - 1] = '\0'; + p = s + 1; + len -= 2; } else - goto insecure; - /********************************************/ + goto insecure; + /********************************************/ str = PyString_DecodeEscape(p, len, NULL, 0, NULL); free(s); if (str) { - PDATA_PUSH(self->stack, str, -1); - res = 0; + PDATA_PUSH(self->stack, str, -1); + res = 0; } return res; @@ -3287,15 +3287,15 @@ char *s; if (self->read_func(self, &s, 4) < 0) - return -1; + return -1; l = calc_binint(s, 4); if (self->read_func(self, &s, l) < 0) - return -1; + return -1; if (!(py_string = PyString_FromStringAndSize(s, l))) - return -1; + return -1; PDATA_PUSH(self->stack, py_string, -1); return 0; @@ -3310,15 +3310,15 @@ char *s; if (self->read_func(self, &s, 1) < 0) - return -1; + return -1; l = (unsigned char) s[0]; if (self->read_func(self, &s, l) < 0) - return -1; + return -1; if (!(py_string = PyString_FromStringAndSize(s, l))) - return -1; + return -1; PDATA_PUSH(self->stack, py_string, -1); return 0; @@ -3334,12 +3334,12 @@ char *s; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 1) - return bad_readline(); + return bad_readline(); if (!(str = PyUnicode_DecodeRawUnicodeEscape(s, len - 1, NULL))) - goto finally; + goto finally; PDATA_PUSH(self->stack, str, -1); return 0; @@ -3359,15 +3359,15 @@ char *s; if (self->read_func(self, &s, 4) < 0) - return -1; + return -1; l = calc_binint(s, 4); if (self->read_func(self, &s, l) < 0) - return -1; + return -1; if (!(unicode = PyUnicode_DecodeUTF8(s, l, NULL))) - return -1; + return -1; PDATA_PUSH(self->stack, unicode, -1); return 0; @@ -3382,9 +3382,9 @@ int i; if ((i = marker(self)) < 0) - return -1; + return -1; if (!(tup = Pdata_popTuple(self->stack, i))) - return -1; + return -1; PDATA_PUSH(self->stack, tup, -1); return 0; } @@ -3395,15 +3395,15 @@ PyObject *tup = PyTuple_New(len); if (tup == NULL) - return -1; + return -1; while (--len >= 0) { - PyObject *element; + PyObject *element; - PDATA_POP(self->stack, element); - if (element == NULL) - return -1; - PyTuple_SET_ITEM(tup, len, element); + PDATA_POP(self->stack, element); + if (element == NULL) + return -1; + PyTuple_SET_ITEM(tup, len, element); } PDATA_PUSH(self->stack, tup, -1); return 0; @@ -3415,7 +3415,7 @@ PyObject *list; if (!(list = PyList_New(0))) - return -1; + return -1; PDATA_PUSH(self->stack, list, -1); return 0; } @@ -3426,7 +3426,7 @@ PyObject *dict; if (!(dict = PyDict_New())) - return -1; + return -1; PDATA_PUSH(self->stack, dict, -1); return 0; } @@ -3439,9 +3439,9 @@ int i; if ((i = marker(self)) < 0) - return -1; + return -1; if (!(list = Pdata_popList(self->stack, i))) - return -1; + return -1; PDATA_PUSH(self->stack, list, -1); return 0; } @@ -3453,19 +3453,19 @@ int i, j, k; if ((i = marker(self)) < 0) - return -1; + return -1; j = self->stack->length; if (!(dict = PyDict_New())) - return -1; + return -1; for (k = i + 1; k < j; k += 2) { - key = self->stack->data[k - 1]; - value = self->stack->data[k]; - if (PyDict_SetItem(dict, key, value) < 0) { - Py_DECREF(dict); - return -1; - } + key = self->stack->data[k - 1]; + value = self->stack->data[k]; + if (PyDict_SetItem(dict, key, value) < 0) { + Py_DECREF(dict); + return -1; + } } Pdata_clear(self->stack, i); PDATA_PUSH(self->stack, dict, -1); @@ -3478,21 +3478,21 @@ PyObject *r = 0; if ((r = PyObject_CallObject(cls, args))) - return r; + return r; { - PyObject *tp, *v, *tb, *tmp_value; + PyObject *tp, *v, *tb, *tmp_value; - PyErr_Fetch(&tp, &v, &tb); - tmp_value = v; - /* NULL occurs when there was a KeyboardInterrupt */ - if (tmp_value == NULL) - tmp_value = Py_None; - if ((r = PyTuple_Pack(3, tmp_value, cls, args))) { - Py_XDECREF(v); - v = r; - } - PyErr_Restore(tp, v, tb); + PyErr_Fetch(&tp, &v, &tb); + tmp_value = v; + /* NULL occurs when there was a KeyboardInterrupt */ + if (tmp_value == NULL) + tmp_value = Py_None; + if ((r = PyTuple_Pack(3, tmp_value, cls, args))) { + Py_XDECREF(v); + v = r; + } + PyErr_Restore(tp, v, tb); } return NULL; } @@ -3505,18 +3505,18 @@ int i; if ((i = marker(self)) < 0) - return -1; + return -1; if (!(tup = Pdata_popTuple(self->stack, i + 1))) - return -1; + return -1; PDATA_POP(self->stack, class); if (class) { - obj = Instance_New(class, tup); - Py_DECREF(class); + obj = Instance_New(class, tup); + Py_DECREF(class); } Py_DECREF(tup); if (!obj) - return -1; + return -1; PDATA_PUSH(self->stack, obj, -1); return 0; } @@ -3530,37 +3530,37 @@ char *s; if ((i = marker(self)) < 0) - return -1; + return -1; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 2) - return bad_readline(); + return bad_readline(); module_name = PyString_FromStringAndSize(s, len - 1); if (!module_name) - return -1; + return -1; if ((len = self->readline_func(self, &s)) >= 0) { - if (len < 2) - return bad_readline(); - if ((class_name = PyString_FromStringAndSize(s, len - 1))) { - class = find_class(module_name, class_name, self->find_class); - Py_DECREF(class_name); - } + if (len < 2) + return bad_readline(); + if ((class_name = PyString_FromStringAndSize(s, len - 1))) { + class = find_class(module_name, class_name, self->find_class); + Py_DECREF(class_name); + } } Py_DECREF(module_name); if (!class) - return -1; + return -1; if ((tup = Pdata_popTuple(self->stack, i))) { - obj = Instance_New(class, tup); - Py_DECREF(tup); + obj = Instance_New(class, tup); + Py_DECREF(tup); } Py_DECREF(class); if (!obj) - return -1; + return -1; PDATA_PUSH(self->stack, obj, -1); return 0; @@ -3571,7 +3571,7 @@ { PyObject *args = NULL; PyObject *clsraw = NULL; - PyTypeObject *cls; /* clsraw cast to its true type */ + PyTypeObject *cls; /* clsraw cast to its true type */ PyObject *obj; /* Stack is ... cls argtuple, and we want to call @@ -3579,31 +3579,31 @@ */ PDATA_POP(self->stack, args); if (args == NULL) - goto Fail; + goto Fail; if (!PyTuple_Check(args)) { - PyErr_SetString(UnpicklingError, "NEWOBJ expected an arg " "tuple."); - goto Fail; + PyErr_SetString(UnpicklingError, "NEWOBJ expected an arg " "tuple."); + goto Fail; } PDATA_POP(self->stack, clsraw); cls = (PyTypeObject *) clsraw; if (cls == NULL) - goto Fail; + goto Fail; if (!PyType_Check(cls)) { - PyErr_SetString(UnpicklingError, "NEWOBJ class argument " - "isn't a type object"); - goto Fail; + PyErr_SetString(UnpicklingError, "NEWOBJ class argument " + "isn't a type object"); + goto Fail; } if (cls->tp_new == NULL) { - PyErr_SetString(UnpicklingError, "NEWOBJ class argument " - "has NULL tp_new"); - goto Fail; + PyErr_SetString(UnpicklingError, "NEWOBJ class argument " + "has NULL tp_new"); + goto Fail; } /* Call __new__. */ obj = cls->tp_new(cls, args, NULL); if (obj == NULL) - goto Fail; + goto Fail; Py_DECREF(args); Py_DECREF(clsraw); @@ -3624,27 +3624,27 @@ char *s; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 2) - return bad_readline(); + return bad_readline(); module_name = PyString_FromStringAndSize(s, len - 1); if (!module_name) - return -1; + return -1; if ((len = self->readline_func(self, &s)) >= 0) { - if (len < 2) { - Py_DECREF(module_name); - return bad_readline(); - } - if ((class_name = PyString_FromStringAndSize(s, len - 1))) { - class = find_class(module_name, class_name, self->find_class); - Py_DECREF(class_name); - } + if (len < 2) { + Py_DECREF(module_name); + return bad_readline(); + } + if ((class_name = PyString_FromStringAndSize(s, len - 1))) { + class = find_class(module_name, class_name, self->find_class); + Py_DECREF(class_name); + } } Py_DECREF(module_name); if (!class) - return -1; + return -1; PDATA_PUSH(self->stack, class, -1); return 0; } @@ -3658,40 +3658,40 @@ char *s; if (self->pers_func) { - if ((len = self->readline_func(self, &s)) < 0) - return -1; - if (len < 2) - return bad_readline(); - - pid = PyString_FromStringAndSize(s, len - 1); - if (!pid) - return -1; - - if (PyList_Check(self->pers_func)) { - if (PyList_Append(self->pers_func, pid) < 0) { - Py_DECREF(pid); - return -1; - } - } - else { - ARG_TUP(self, pid); - if (self->arg) { - pid = PyObject_Call(self->pers_func, self->arg, NULL); - FREE_ARG_TUP(self); - } - } + if ((len = self->readline_func(self, &s)) < 0) + return -1; + if (len < 2) + return bad_readline(); + + pid = PyString_FromStringAndSize(s, len - 1); + if (!pid) + return -1; + + if (PyList_Check(self->pers_func)) { + if (PyList_Append(self->pers_func, pid) < 0) { + Py_DECREF(pid); + return -1; + } + } + else { + ARG_TUP(self, pid); + if (self->arg) { + pid = PyObject_Call(self->pers_func, self->arg, NULL); + FREE_ARG_TUP(self); + } + } - if (!pid) - return -1; + if (!pid) + return -1; - PDATA_PUSH(self->stack, pid, -1); - return 0; + PDATA_PUSH(self->stack, pid, -1); + return 0; } else { - PyErr_SetString(UnpicklingError, - "A load persistent id instruction was encountered,\n" - "but no persistent_load function was specified."); - return -1; + PyErr_SetString(UnpicklingError, + "A load persistent id instruction was encountered,\n" + "but no persistent_load function was specified."); + return -1; } } @@ -3701,34 +3701,34 @@ PyObject *pid = 0; if (self->pers_func) { - PDATA_POP(self->stack, pid); - if (!pid) - return -1; - - if (PyList_Check(self->pers_func)) { - if (PyList_Append(self->pers_func, pid) < 0) { - Py_DECREF(pid); - return -1; - } - } - else { - ARG_TUP(self, pid); - if (self->arg) { - pid = PyObject_Call(self->pers_func, self->arg, NULL); - FREE_ARG_TUP(self); - } - if (!pid) - return -1; - } + PDATA_POP(self->stack, pid); + if (!pid) + return -1; + + if (PyList_Check(self->pers_func)) { + if (PyList_Append(self->pers_func, pid) < 0) { + Py_DECREF(pid); + return -1; + } + } + else { + ARG_TUP(self, pid); + if (self->arg) { + pid = PyObject_Call(self->pers_func, self->arg, NULL); + FREE_ARG_TUP(self); + } + if (!pid) + return -1; + } - PDATA_PUSH(self->stack, pid, -1); - return 0; + PDATA_PUSH(self->stack, pid, -1); + return 0; } else { - PyErr_SetString(UnpicklingError, - "A load persistent id instruction was encountered,\n" - "but no persistent_load function was specified."); - return -1; + PyErr_SetString(UnpicklingError, + "A load persistent id instruction was encountered,\n" + "but no persistent_load function was specified."); + return -1; } } @@ -3739,7 +3739,7 @@ int len; if (!((len = self->stack->length) > 0)) - return stackUnderflow(); + return stackUnderflow(); /* Note that we split the (pickle.py) stack into two stacks, * an object stack and a mark stack. We have to be clever and @@ -3748,11 +3748,11 @@ */ if ((self->num_marks > 0) && (self->marks[self->num_marks - 1] == len)) - self->num_marks--; + self->num_marks--; else { - len--; - Py_DECREF(self->stack->data[len]); - self->stack->length = len; + len--; + Py_DECREF(self->stack->data[len]); + self->stack->length = len; } return 0; @@ -3765,7 +3765,7 @@ int i; if ((i = marker(self)) < 0) - return -1; + return -1; Pdata_clear(self->stack, i); @@ -3780,7 +3780,7 @@ int len; if ((len = self->stack->length) <= 0) - return stackUnderflow(); + return stackUnderflow(); last = self->stack->data[len - 1]; Py_INCREF(last); PDATA_PUSH(self->stack, last, -1); @@ -3797,21 +3797,21 @@ int rc; if ((len = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (len < 2) - return bad_readline(); + return bad_readline(); if (!(py_str = PyString_FromStringAndSize(s, len - 1))) - return -1; + return -1; value = PyDict_GetItem(self->memo, py_str); if (!value) { - PyErr_SetObject(BadPickleGet, py_str); - rc = -1; + PyErr_SetObject(BadPickleGet, py_str); + rc = -1; } else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; + PDATA_APPEND(self->stack, value, -1); + rc = 0; } Py_DECREF(py_str); @@ -3828,20 +3828,20 @@ int rc; if (self->read_func(self, &s, 1) < 0) - return -1; + return -1; key = (unsigned char) s[0]; if (!(py_key = PyInt_FromLong((long) key))) - return -1; + return -1; value = PyDict_GetItem(self->memo, py_key); if (!value) { - PyErr_SetObject(BadPickleGet, py_key); - rc = -1; + PyErr_SetObject(BadPickleGet, py_key); + rc = -1; } else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; + PDATA_APPEND(self->stack, value, -1); + rc = 0; } Py_DECREF(py_key); @@ -3859,7 +3859,7 @@ int rc; if (self->read_func(self, &s, 4) < 0) - return -1; + return -1; c = (unsigned char) s[0]; key = (long) c; @@ -3871,16 +3871,16 @@ key |= (long) c << 24; if (!(py_key = PyInt_FromLong((long) key))) - return -1; + return -1; value = PyDict_GetItem(self->memo, py_key); if (!value) { - PyErr_SetObject(BadPickleGet, py_key); - rc = -1; + PyErr_SetObject(BadPickleGet, py_key); + rc = -1; } else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; + PDATA_APPEND(self->stack, value, -1); + rc = 0; } Py_DECREF(py_key); @@ -3893,66 +3893,66 @@ static int load_extension(Unpicklerobject * self, int nbytes) { - char *codebytes; /* the nbytes bytes after the opcode */ - long code; /* calc_binint returns long */ - PyObject *py_code; /* code as a Python int */ - PyObject *obj; /* the object to push */ - PyObject *pair; /* (module_name, class_name) */ + char *codebytes; /* the nbytes bytes after the opcode */ + long code; /* calc_binint returns long */ + PyObject *py_code; /* code as a Python int */ + PyObject *obj; /* the object to push */ + PyObject *pair; /* (module_name, class_name) */ PyObject *module_name, *class_name; assert(nbytes == 1 || nbytes == 2 || nbytes == 4); if (self->read_func(self, &codebytes, nbytes) < 0) - return -1; + return -1; code = calc_binint(codebytes, nbytes); - if (code <= 0) { /* note that 0 is forbidden */ - /* Corrupt or hostile pickle. */ - PyErr_SetString(UnpicklingError, "EXT specifies code <= 0"); - return -1; + if (code <= 0) { /* note that 0 is forbidden */ + /* Corrupt or hostile pickle. */ + PyErr_SetString(UnpicklingError, "EXT specifies code <= 0"); + return -1; } /* Look for the code in the cache. */ py_code = PyInt_FromLong(code); if (py_code == NULL) - return -1; + return -1; obj = PyDict_GetItem(extension_cache, py_code); if (obj != NULL) { - /* Bingo. */ - Py_DECREF(py_code); - PDATA_APPEND(self->stack, obj, -1); - return 0; + /* Bingo. */ + Py_DECREF(py_code); + PDATA_APPEND(self->stack, obj, -1); + return 0; } /* Look up the (module_name, class_name) pair. */ pair = PyDict_GetItem(inverted_registry, py_code); if (pair == NULL) { - Py_DECREF(py_code); - PyErr_Format(PyExc_ValueError, "unregistered extension " - "code %ld", code); - return -1; + Py_DECREF(py_code); + PyErr_Format(PyExc_ValueError, "unregistered extension " + "code %ld", code); + return -1; } /* Since the extension registry is manipulable via Python code, * confirm that pair is really a 2-tuple of strings. */ if (!PyTuple_Check(pair) || PyTuple_Size(pair) != 2 || - !PyString_Check(module_name = PyTuple_GET_ITEM(pair, 0)) || - !PyString_Check(class_name = PyTuple_GET_ITEM(pair, 1))) { - Py_DECREF(py_code); - PyErr_Format(PyExc_ValueError, "_inverted_registry[%ld] " - "isn't a 2-tuple of strings", code); - return -1; + !PyString_Check(module_name = PyTuple_GET_ITEM(pair, 0)) || + !PyString_Check(class_name = PyTuple_GET_ITEM(pair, 1))) { + Py_DECREF(py_code); + PyErr_Format(PyExc_ValueError, "_inverted_registry[%ld] " + "isn't a 2-tuple of strings", code); + return -1; } /* Load the object. */ obj = find_class(module_name, class_name, self->find_class); if (obj == NULL) { - Py_DECREF(py_code); - return -1; + Py_DECREF(py_code); + return -1; } /* Cache code -> obj. */ code = PyDict_SetItem(extension_cache, py_code, obj); Py_DECREF(py_code); if (code < 0) { - Py_DECREF(obj); - return -1; + Py_DECREF(obj); + return -1; } PDATA_PUSH(self->stack, obj, -1); return 0; @@ -3966,13 +3966,13 @@ char *s; if ((l = self->readline_func(self, &s)) < 0) - return -1; + return -1; if (l < 2) - return bad_readline(); + return bad_readline(); if (!(len = self->stack->length)) - return stackUnderflow(); + return stackUnderflow(); if (!(py_str = PyString_FromStringAndSize(s, l - 1))) - return -1; + return -1; value = self->stack->data[len - 1]; l = PyDict_SetItem(self->memo, py_str, value); Py_DECREF(py_str); @@ -3989,14 +3989,14 @@ int len; if (self->read_func(self, &s, 1) < 0) - return -1; + return -1; if (!((len = self->stack->length) > 0)) - return stackUnderflow(); + return stackUnderflow(); key = (unsigned char) s[0]; if (!(py_key = PyInt_FromLong((long) key))) - return -1; + return -1; value = self->stack->data[len - 1]; len = PyDict_SetItem(self->memo, py_key, value); Py_DECREF(py_key); @@ -4014,9 +4014,9 @@ int len; if (self->read_func(self, &s, 4) < 0) - return -1; + return -1; if (!(len = self->stack->length)) - return stackUnderflow(); + return stackUnderflow(); c = (unsigned char) s[0]; key = (long) c; @@ -4028,7 +4028,7 @@ key |= (long) c << 24; if (!(py_key = PyInt_FromLong(key))) - return -1; + return -1; value = self->stack->data[len - 1]; len = PyDict_SetItem(self->memo, py_key, value); Py_DECREF(py_key); @@ -4044,50 +4044,50 @@ len = self->stack->length; if (!(len >= x && x > 0)) - return stackUnderflow(); + return stackUnderflow(); /* nothing to do */ if (len == x) - return 0; + return 0; list = self->stack->data[x - 1]; if (PyList_Check(list)) { - PyObject *slice; - int list_len; + PyObject *slice; + int list_len; - slice = Pdata_popList(self->stack, x); - if (!slice) - return -1; - list_len = PyList_GET_SIZE(list); - i = PyList_SetSlice(list, list_len, list_len, slice); - Py_DECREF(slice); - return i; + slice = Pdata_popList(self->stack, x); + if (!slice) + return -1; + list_len = PyList_GET_SIZE(list); + i = PyList_SetSlice(list, list_len, list_len, slice); + Py_DECREF(slice); + return i; } else { - if (!(append_method = PyObject_GetAttr(list, append_str))) - return -1; + if (!(append_method = PyObject_GetAttr(list, append_str))) + return -1; - for (i = x; i < len; i++) { - PyObject *junk; + for (i = x; i < len; i++) { + PyObject *junk; - value = self->stack->data[i]; - junk = 0; - ARG_TUP(self, value); - if (self->arg) { - junk = PyObject_Call(append_method, self->arg, NULL); - FREE_ARG_TUP(self); - } - if (!junk) { - Pdata_clear(self->stack, i + 1); - self->stack->length = x; - Py_DECREF(append_method); - return -1; - } - Py_DECREF(junk); - } - self->stack->length = x; - Py_DECREF(append_method); + value = self->stack->data[i]; + junk = 0; + ARG_TUP(self, value); + if (self->arg) { + junk = PyObject_Call(append_method, self->arg, NULL); + FREE_ARG_TUP(self); + } + if (!junk) { + Pdata_clear(self->stack, i + 1); + self->stack->length = x; + Py_DECREF(append_method); + return -1; + } + Py_DECREF(junk); + } + self->stack->length = x; + Py_DECREF(append_method); } return 0; @@ -4115,17 +4115,17 @@ int len, i, r = 0; if (!((len = self->stack->length) >= x && x > 0)) - return stackUnderflow(); + return stackUnderflow(); dict = self->stack->data[x - 1]; for (i = x + 1; i < len; i += 2) { - key = self->stack->data[i - 1]; - value = self->stack->data[i]; - if (PyObject_SetItem(dict, key, value) < 0) { - r = -1; - break; - } + key = self->stack->data[i - 1]; + value = self->stack->data[i]; + if (PyObject_SetItem(dict, key, value) < 0) { + r = -1; + break; + } } Pdata_clear(self->stack, x); @@ -4160,77 +4160,77 @@ * the stack top, possibly mutated via instance.__setstate__(state). */ if (self->stack->length < 2) - return stackUnderflow(); + return stackUnderflow(); PDATA_POP(self->stack, state); if (state == NULL) - return -1; + return -1; inst = self->stack->data[self->stack->length - 1]; __setstate__ = PyObject_GetAttr(inst, __setstate___str); if (__setstate__ != NULL) { - PyObject *junk = NULL; + PyObject *junk = NULL; - /* The explicit __setstate__ is responsible for everything. */ - ARG_TUP(self, state); - if (self->arg) { - junk = PyObject_Call(__setstate__, self->arg, NULL); - FREE_ARG_TUP(self); - } - Py_DECREF(__setstate__); - if (junk == NULL) - return -1; - Py_DECREF(junk); - return 0; + /* The explicit __setstate__ is responsible for everything. */ + ARG_TUP(self, state); + if (self->arg) { + junk = PyObject_Call(__setstate__, self->arg, NULL); + FREE_ARG_TUP(self); + } + Py_DECREF(__setstate__); + if (junk == NULL) + return -1; + Py_DECREF(junk); + return 0; } if (!PyErr_ExceptionMatches(PyExc_AttributeError)) - return -1; + return -1; PyErr_Clear(); /* A default __setstate__. First see whether state embeds a * slot state dict too (a proto 2 addition). */ if (PyTuple_Check(state) && PyTuple_Size(state) == 2) { - PyObject *temp = state; - state = PyTuple_GET_ITEM(temp, 0); - slotstate = PyTuple_GET_ITEM(temp, 1); - Py_INCREF(state); - Py_INCREF(slotstate); - Py_DECREF(temp); + PyObject *temp = state; + state = PyTuple_GET_ITEM(temp, 0); + slotstate = PyTuple_GET_ITEM(temp, 1); + Py_INCREF(state); + Py_INCREF(slotstate); + Py_DECREF(temp); } else - slotstate = NULL; + slotstate = NULL; /* Set inst.__dict__ from the state dict (if any). */ if (state != Py_None) { - PyObject *dict; - if (!PyDict_Check(state)) { - PyErr_SetString(UnpicklingError, "state is not a " "dictionary"); - goto finally; - } - dict = PyObject_GetAttr(inst, __dict___str); - if (dict == NULL) - goto finally; - - i = 0; - while (PyDict_Next(state, &i, &d_key, &d_value)) { - if (PyObject_SetItem(dict, d_key, d_value) < 0) - goto finally; - } - Py_DECREF(dict); + PyObject *dict; + if (!PyDict_Check(state)) { + PyErr_SetString(UnpicklingError, "state is not a " "dictionary"); + goto finally; + } + dict = PyObject_GetAttr(inst, __dict___str); + if (dict == NULL) + goto finally; + + i = 0; + while (PyDict_Next(state, &i, &d_key, &d_value)) { + if (PyObject_SetItem(dict, d_key, d_value) < 0) + goto finally; + } + Py_DECREF(dict); } /* Also set instance attributes from the slotstate dict (if any). */ if (slotstate != NULL) { - if (!PyDict_Check(slotstate)) { - PyErr_SetString(UnpicklingError, "slot state is not " - "a dictionary"); - goto finally; - } - i = 0; - while (PyDict_Next(slotstate, &i, &d_key, &d_value)) { - if (PyObject_SetAttr(inst, d_key, d_value) < 0) - goto finally; - } + if (!PyDict_Check(slotstate)) { + PyErr_SetString(UnpicklingError, "slot state is not " + "a dictionary"); + goto finally; + } + i = 0; + while (PyDict_Next(slotstate, &i, &d_key, &d_value)) { + if (PyObject_SetAttr(inst, d_key, d_value) < 0) + goto finally; + } } res = 0; @@ -4252,20 +4252,20 @@ */ if ((self->num_marks + 1) >= self->marks_size) { - int *marks; - s = self->marks_size + 20; - if (s <= self->num_marks) - s = self->num_marks + 1; - if (self->marks == NULL) - marks = (int *) malloc(s * sizeof(int)); - else - marks = (int *) realloc(self->marks, s * sizeof(int)); - if (!marks) { - PyErr_NoMemory(); - return -1; - } - self->marks = marks; - self->marks_size = s; + int *marks; + s = self->marks_size + 20; + if (s <= self->num_marks) + s = self->num_marks + 1; + if (self->marks == NULL) + marks = (int *) malloc(s * sizeof(int)); + else + marks = (int *) realloc(self->marks, s * sizeof(int)); + if (!marks) { + PyErr_NoMemory(); + return -1; + } + self->marks = marks; + self->marks_size = s; } self->marks[self->num_marks++] = self->stack->length; @@ -4280,16 +4280,16 @@ PDATA_POP(self->stack, arg_tup); if (!arg_tup) - return -1; + return -1; PDATA_POP(self->stack, callable); if (callable) { - ob = Instance_New(callable, arg_tup); - Py_DECREF(callable); + ob = Instance_New(callable, arg_tup); + Py_DECREF(callable); } Py_DECREF(arg_tup); if (!ob) - return -1; + return -1; PDATA_PUSH(self->stack, ob, -1); return 0; @@ -4306,7 +4306,7 @@ i = self->read_func(self, &protobyte, 1); if (i < 0) - return -1; + return -1; i = calc_binint(protobyte, 1); /* No point checking for < 0, since calc_binint returns an unsigned @@ -4314,7 +4314,7 @@ */ assert(i >= 0); if (i <= HIGHEST_PROTOCOL) - return 0; + return 0; PyErr_Format(PyExc_ValueError, "unsupported pickle protocol: %d", i); return -1; @@ -4328,296 +4328,296 @@ self->num_marks = 0; if (self->stack->length) - Pdata_clear(self->stack, 0); + Pdata_clear(self->stack, 0); while (1) { - if (self->read_func(self, &s, 1) < 0) - break; + if (self->read_func(self, &s, 1) < 0) + break; - switch (s[0]) { - case NONE: - if (load_none(self) < 0) - break; - continue; - - case BININT: - if (load_binint(self) < 0) - break; - continue; - - case BININT1: - if (load_binint1(self) < 0) - break; - continue; - - case BININT2: - if (load_binint2(self) < 0) - break; - continue; - - case INT: - if (load_int(self) < 0) - break; - continue; - - case LONG: - if (load_long(self) < 0) - break; - continue; - - case LONG1: - if (load_counted_long(self, 1) < 0) - break; - continue; - - case LONG4: - if (load_counted_long(self, 4) < 0) - break; - continue; - - case FLOAT: - if (load_float(self) < 0) - break; - continue; - - case BINFLOAT: - if (load_binfloat(self) < 0) - break; - continue; - - case BINSTRING: - if (load_binstring(self) < 0) - break; - continue; - - case SHORT_BINSTRING: - if (load_short_binstring(self) < 0) - break; - continue; - - case STRING: - if (load_string(self) < 0) - break; - continue; + switch (s[0]) { + case NONE: + if (load_none(self) < 0) + break; + continue; + + case BININT: + if (load_binint(self) < 0) + break; + continue; + + case BININT1: + if (load_binint1(self) < 0) + break; + continue; + + case BININT2: + if (load_binint2(self) < 0) + break; + continue; + + case INT: + if (load_int(self) < 0) + break; + continue; + + case LONG: + if (load_long(self) < 0) + break; + continue; + + case LONG1: + if (load_counted_long(self, 1) < 0) + break; + continue; + + case LONG4: + if (load_counted_long(self, 4) < 0) + break; + continue; + + case FLOAT: + if (load_float(self) < 0) + break; + continue; + + case BINFLOAT: + if (load_binfloat(self) < 0) + break; + continue; + + case BINSTRING: + if (load_binstring(self) < 0) + break; + continue; + + case SHORT_BINSTRING: + if (load_short_binstring(self) < 0) + break; + continue; + + case STRING: + if (load_string(self) < 0) + break; + continue; #ifdef Py_USING_UNICODE - case UNICODE: - if (load_unicode(self) < 0) - break; - continue; - - case BINUNICODE: - if (load_binunicode(self) < 0) - break; - continue; + case UNICODE: + if (load_unicode(self) < 0) + break; + continue; + + case BINUNICODE: + if (load_binunicode(self) < 0) + break; + continue; #endif - case EMPTY_TUPLE: - if (load_counted_tuple(self, 0) < 0) - break; - continue; - - case TUPLE1: - if (load_counted_tuple(self, 1) < 0) - break; - continue; - - case TUPLE2: - if (load_counted_tuple(self, 2) < 0) - break; - continue; - - case TUPLE3: - if (load_counted_tuple(self, 3) < 0) - break; - continue; - - case TUPLE: - if (load_tuple(self) < 0) - break; - continue; - - case EMPTY_LIST: - if (load_empty_list(self) < 0) - break; - continue; - - case LIST: - if (load_list(self) < 0) - break; - continue; - - case EMPTY_DICT: - if (load_empty_dict(self) < 0) - break; - continue; - - case DICT: - if (load_dict(self) < 0) - break; - continue; - - case OBJ: - if (load_obj(self) < 0) - break; - continue; - - case INST: - if (load_inst(self) < 0) - break; - continue; - - case NEWOBJ: - if (load_newobj(self) < 0) - break; - continue; - - case GLOBAL: - if (load_global(self) < 0) - break; - continue; - - case APPEND: - if (load_append(self) < 0) - break; - continue; - - case APPENDS: - if (load_appends(self) < 0) - break; - continue; - - case BUILD: - if (load_build(self) < 0) - break; - continue; - - case DUP: - if (load_dup(self) < 0) - break; - continue; - - case BINGET: - if (load_binget(self) < 0) - break; - continue; - - case LONG_BINGET: - if (load_long_binget(self) < 0) - break; - continue; - - case GET: - if (load_get(self) < 0) - break; - continue; - - case EXT1: - if (load_extension(self, 1) < 0) - break; - continue; - - case EXT2: - if (load_extension(self, 2) < 0) - break; - continue; - - case EXT4: - if (load_extension(self, 4) < 0) - break; - continue; - case MARK: - if (load_mark(self) < 0) - break; - continue; - - case BINPUT: - if (load_binput(self) < 0) - break; - continue; - - case LONG_BINPUT: - if (load_long_binput(self) < 0) - break; - continue; - - case PUT: - if (load_put(self) < 0) - break; - continue; - - case POP: - if (load_pop(self) < 0) - break; - continue; - - case POP_MARK: - if (load_pop_mark(self) < 0) - break; - continue; - - case SETITEM: - if (load_setitem(self) < 0) - break; - continue; - - case SETITEMS: - if (load_setitems(self) < 0) - break; - continue; - - case STOP: - break; - - case PERSID: - if (load_persid(self) < 0) - break; - continue; - - case BINPERSID: - if (load_binpersid(self) < 0) - break; - continue; - - case REDUCE: - if (load_reduce(self) < 0) - break; - continue; - - case PROTO: - if (load_proto(self) < 0) - break; - continue; - - case NEWTRUE: - if (load_bool(self, Py_True) < 0) - break; - continue; - - case NEWFALSE: - if (load_bool(self, Py_False) < 0) - break; - continue; - - case '\0': - /* end of file */ - PyErr_SetNone(PyExc_EOFError); - break; - - default: - cPickle_ErrFormat(UnpicklingError, - "invalid load key, '%s'.", "c", s[0]); - return NULL; - } + case EMPTY_TUPLE: + if (load_counted_tuple(self, 0) < 0) + break; + continue; + + case TUPLE1: + if (load_counted_tuple(self, 1) < 0) + break; + continue; + + case TUPLE2: + if (load_counted_tuple(self, 2) < 0) + break; + continue; + + case TUPLE3: + if (load_counted_tuple(self, 3) < 0) + break; + continue; + + case TUPLE: + if (load_tuple(self) < 0) + break; + continue; + + case EMPTY_LIST: + if (load_empty_list(self) < 0) + break; + continue; + + case LIST: + if (load_list(self) < 0) + break; + continue; + + case EMPTY_DICT: + if (load_empty_dict(self) < 0) + break; + continue; + + case DICT: + if (load_dict(self) < 0) + break; + continue; + + case OBJ: + if (load_obj(self) < 0) + break; + continue; + + case INST: + if (load_inst(self) < 0) + break; + continue; + + case NEWOBJ: + if (load_newobj(self) < 0) + break; + continue; + + case GLOBAL: + if (load_global(self) < 0) + break; + continue; + + case APPEND: + if (load_append(self) < 0) + break; + continue; + + case APPENDS: + if (load_appends(self) < 0) + break; + continue; + + case BUILD: + if (load_build(self) < 0) + break; + continue; + + case DUP: + if (load_dup(self) < 0) + break; + continue; + + case BINGET: + if (load_binget(self) < 0) + break; + continue; + + case LONG_BINGET: + if (load_long_binget(self) < 0) + break; + continue; + + case GET: + if (load_get(self) < 0) + break; + continue; + + case EXT1: + if (load_extension(self, 1) < 0) + break; + continue; + + case EXT2: + if (load_extension(self, 2) < 0) + break; + continue; + + case EXT4: + if (load_extension(self, 4) < 0) + break; + continue; + case MARK: + if (load_mark(self) < 0) + break; + continue; + + case BINPUT: + if (load_binput(self) < 0) + break; + continue; + + case LONG_BINPUT: + if (load_long_binput(self) < 0) + break; + continue; + + case PUT: + if (load_put(self) < 0) + break; + continue; + + case POP: + if (load_pop(self) < 0) + break; + continue; + + case POP_MARK: + if (load_pop_mark(self) < 0) + break; + continue; + + case SETITEM: + if (load_setitem(self) < 0) + break; + continue; + + case SETITEMS: + if (load_setitems(self) < 0) + break; + continue; + + case STOP: + break; + + case PERSID: + if (load_persid(self) < 0) + break; + continue; + + case BINPERSID: + if (load_binpersid(self) < 0) + break; + continue; + + case REDUCE: + if (load_reduce(self) < 0) + break; + continue; + + case PROTO: + if (load_proto(self) < 0) + break; + continue; + + case NEWTRUE: + if (load_bool(self, Py_True) < 0) + break; + continue; + + case NEWFALSE: + if (load_bool(self, Py_False) < 0) + break; + continue; + + case '\0': + /* end of file */ + PyErr_SetNone(PyExc_EOFError); + break; + + default: + cPickle_ErrFormat(UnpicklingError, + "invalid load key, '%s'.", "c", s[0]); + return NULL; + } - break; + break; } if ((err = PyErr_Occurred())) { - if (err == PyExc_EOFError) { - PyErr_SetNone(PyExc_EOFError); - } - return NULL; + if (err == PyExc_EOFError) { + PyErr_SetNone(PyExc_EOFError); + } + return NULL; } PDATA_POP(self->stack, val); @@ -4634,7 +4634,7 @@ int i; if ((i = marker(self)) < 0) - return -1; + return -1; return Pdata_clear(self->stack, i + 1); } @@ -4646,12 +4646,12 @@ char *s; if ((i = marker(self)) < 0) - return -1; + return -1; Pdata_clear(self->stack, i); if (self->readline_func(self, &s) < 0) - return -1; + return -1; if (self->readline_func(self, &s) < 0) - return -1; + return -1; PDATA_APPEND(self->stack, Py_None, -1); return 0; } @@ -4661,14 +4661,14 @@ { PyObject *obj; - PDATA_POP(self->stack, obj); /* pop argtuple */ + PDATA_POP(self->stack, obj); /* pop argtuple */ if (obj == NULL) - return -1; + return -1; Py_DECREF(obj); - PDATA_POP(self->stack, obj); /* pop cls */ + PDATA_POP(self->stack, obj); /* pop cls */ if (obj == NULL) - return -1; + return -1; Py_DECREF(obj); PDATA_APPEND(self->stack, Py_None, -1); @@ -4681,9 +4681,9 @@ char *s; if (self->readline_func(self, &s) < 0) - return -1; + return -1; if (self->readline_func(self, &s) < 0) - return -1; + return -1; PDATA_APPEND(self->stack, Py_None, -1); return 0; } @@ -4693,7 +4693,7 @@ { if (self->stack->length < 2) - return stackUnderflow(); + return stackUnderflow(); Pdata_clear(self->stack, self->stack->length - 2); PDATA_APPEND(self->stack, Py_None, -1); return 0; @@ -4704,7 +4704,7 @@ { if (self->stack->length < 1) - return stackUnderflow(); + return stackUnderflow(); Pdata_clear(self->stack, self->stack->length - 1); return 0; } @@ -4716,7 +4716,7 @@ assert(nbytes == 1 || nbytes == 2 || nbytes == 4); if (self->read_func(self, &codebytes, nbytes) < 0) - return -1; + return -1; PDATA_APPEND(self->stack, Py_None, -1); return 0; } @@ -4732,288 +4732,288 @@ Pdata_clear(self->stack, 0); while (1) { - if (self->read_func(self, &s, 1) < 0) - break; + if (self->read_func(self, &s, 1) < 0) + break; - switch (s[0]) { - case NONE: - if (load_none(self) < 0) - break; - continue; - - case BININT: - if (load_binint(self) < 0) - break; - continue; - - case BININT1: - if (load_binint1(self) < 0) - break; - continue; - - case BININT2: - if (load_binint2(self) < 0) - break; - continue; - - case INT: - if (load_int(self) < 0) - break; - continue; - - case LONG: - if (load_long(self) < 0) - break; - continue; - - case LONG1: - if (load_counted_long(self, 1) < 0) - break; - continue; - - case LONG4: - if (load_counted_long(self, 4) < 0) - break; - continue; - - case FLOAT: - if (load_float(self) < 0) - break; - continue; - - case BINFLOAT: - if (load_binfloat(self) < 0) - break; - continue; - - case BINSTRING: - if (load_binstring(self) < 0) - break; - continue; - - case SHORT_BINSTRING: - if (load_short_binstring(self) < 0) - break; - continue; - - case STRING: - if (load_string(self) < 0) - break; - continue; + switch (s[0]) { + case NONE: + if (load_none(self) < 0) + break; + continue; + + case BININT: + if (load_binint(self) < 0) + break; + continue; + + case BININT1: + if (load_binint1(self) < 0) + break; + continue; + + case BININT2: + if (load_binint2(self) < 0) + break; + continue; + + case INT: + if (load_int(self) < 0) + break; + continue; + + case LONG: + if (load_long(self) < 0) + break; + continue; + + case LONG1: + if (load_counted_long(self, 1) < 0) + break; + continue; + + case LONG4: + if (load_counted_long(self, 4) < 0) + break; + continue; + + case FLOAT: + if (load_float(self) < 0) + break; + continue; + + case BINFLOAT: + if (load_binfloat(self) < 0) + break; + continue; + + case BINSTRING: + if (load_binstring(self) < 0) + break; + continue; + + case SHORT_BINSTRING: + if (load_short_binstring(self) < 0) + break; + continue; + + case STRING: + if (load_string(self) < 0) + break; + continue; #ifdef Py_USING_UNICODE - case UNICODE: - if (load_unicode(self) < 0) - break; - continue; - - case BINUNICODE: - if (load_binunicode(self) < 0) - break; - continue; + case UNICODE: + if (load_unicode(self) < 0) + break; + continue; + + case BINUNICODE: + if (load_binunicode(self) < 0) + break; + continue; #endif - case EMPTY_TUPLE: - if (load_counted_tuple(self, 0) < 0) - break; - continue; - - case TUPLE1: - if (load_counted_tuple(self, 1) < 0) - break; - continue; - - case TUPLE2: - if (load_counted_tuple(self, 2) < 0) - break; - continue; - - case TUPLE3: - if (load_counted_tuple(self, 3) < 0) - break; - continue; - - case TUPLE: - if (load_tuple(self) < 0) - break; - continue; - - case EMPTY_LIST: - if (load_empty_list(self) < 0) - break; - continue; - - case LIST: - if (load_list(self) < 0) - break; - continue; - - case EMPTY_DICT: - if (load_empty_dict(self) < 0) - break; - continue; - - case DICT: - if (load_dict(self) < 0) - break; - continue; - - case OBJ: - if (noload_obj(self) < 0) - break; - continue; - - case INST: - if (noload_inst(self) < 0) - break; - continue; - - case NEWOBJ: - if (noload_newobj(self) < 0) - break; - continue; - - case GLOBAL: - if (noload_global(self) < 0) - break; - continue; - - case APPEND: - if (load_append(self) < 0) - break; - continue; - - case APPENDS: - if (load_appends(self) < 0) - break; - continue; - - case BUILD: - if (noload_build(self) < 0) - break; - continue; - - case DUP: - if (load_dup(self) < 0) - break; - continue; - - case BINGET: - if (load_binget(self) < 0) - break; - continue; - - case LONG_BINGET: - if (load_long_binget(self) < 0) - break; - continue; - - case GET: - if (load_get(self) < 0) - break; - continue; - - case EXT1: - if (noload_extension(self, 1) < 0) - break; - continue; - - case EXT2: - if (noload_extension(self, 2) < 0) - break; - continue; - - case EXT4: - if (noload_extension(self, 4) < 0) - break; - continue; - - case MARK: - if (load_mark(self) < 0) - break; - continue; - - case BINPUT: - if (load_binput(self) < 0) - break; - continue; - - case LONG_BINPUT: - if (load_long_binput(self) < 0) - break; - continue; - - case PUT: - if (load_put(self) < 0) - break; - continue; - - case POP: - if (load_pop(self) < 0) - break; - continue; - - case POP_MARK: - if (load_pop_mark(self) < 0) - break; - continue; - - case SETITEM: - if (load_setitem(self) < 0) - break; - continue; - - case SETITEMS: - if (load_setitems(self) < 0) - break; - continue; - - case STOP: - break; - - case PERSID: - if (load_persid(self) < 0) - break; - continue; - - case BINPERSID: - if (load_binpersid(self) < 0) - break; - continue; - - case REDUCE: - if (noload_reduce(self) < 0) - break; - continue; - - case PROTO: - if (load_proto(self) < 0) - break; - continue; - - case NEWTRUE: - if (load_bool(self, Py_True) < 0) - break; - continue; - - case NEWFALSE: - if (load_bool(self, Py_False) < 0) - break; - continue; - default: - cPickle_ErrFormat(UnpicklingError, - "invalid load key, '%s'.", "c", s[0]); - return NULL; - } + case EMPTY_TUPLE: + if (load_counted_tuple(self, 0) < 0) + break; + continue; + + case TUPLE1: + if (load_counted_tuple(self, 1) < 0) + break; + continue; + + case TUPLE2: + if (load_counted_tuple(self, 2) < 0) + break; + continue; + + case TUPLE3: + if (load_counted_tuple(self, 3) < 0) + break; + continue; + + case TUPLE: + if (load_tuple(self) < 0) + break; + continue; + + case EMPTY_LIST: + if (load_empty_list(self) < 0) + break; + continue; + + case LIST: + if (load_list(self) < 0) + break; + continue; + + case EMPTY_DICT: + if (load_empty_dict(self) < 0) + break; + continue; + + case DICT: + if (load_dict(self) < 0) + break; + continue; + + case OBJ: + if (noload_obj(self) < 0) + break; + continue; + + case INST: + if (noload_inst(self) < 0) + break; + continue; + + case NEWOBJ: + if (noload_newobj(self) < 0) + break; + continue; + + case GLOBAL: + if (noload_global(self) < 0) + break; + continue; + + case APPEND: + if (load_append(self) < 0) + break; + continue; + + case APPENDS: + if (load_appends(self) < 0) + break; + continue; + + case BUILD: + if (noload_build(self) < 0) + break; + continue; + + case DUP: + if (load_dup(self) < 0) + break; + continue; + + case BINGET: + if (load_binget(self) < 0) + break; + continue; + + case LONG_BINGET: + if (load_long_binget(self) < 0) + break; + continue; + + case GET: + if (load_get(self) < 0) + break; + continue; + + case EXT1: + if (noload_extension(self, 1) < 0) + break; + continue; + + case EXT2: + if (noload_extension(self, 2) < 0) + break; + continue; + + case EXT4: + if (noload_extension(self, 4) < 0) + break; + continue; + + case MARK: + if (load_mark(self) < 0) + break; + continue; + + case BINPUT: + if (load_binput(self) < 0) + break; + continue; + + case LONG_BINPUT: + if (load_long_binput(self) < 0) + break; + continue; + + case PUT: + if (load_put(self) < 0) + break; + continue; + + case POP: + if (load_pop(self) < 0) + break; + continue; + + case POP_MARK: + if (load_pop_mark(self) < 0) + break; + continue; + + case SETITEM: + if (load_setitem(self) < 0) + break; + continue; + + case SETITEMS: + if (load_setitems(self) < 0) + break; + continue; + + case STOP: + break; + + case PERSID: + if (load_persid(self) < 0) + break; + continue; + + case BINPERSID: + if (load_binpersid(self) < 0) + break; + continue; + + case REDUCE: + if (noload_reduce(self) < 0) + break; + continue; + + case PROTO: + if (load_proto(self) < 0) + break; + continue; + + case NEWTRUE: + if (load_bool(self, Py_True) < 0) + break; + continue; + + case NEWFALSE: + if (load_bool(self, Py_False) < 0) + break; + continue; + default: + cPickle_ErrFormat(UnpicklingError, + "invalid load key, '%s'.", "c", s[0]); + return NULL; + } - break; + break; } if ((err = PyErr_Occurred())) { - if (err == PyExc_EOFError) { - PyErr_SetNone(PyExc_EOFError); - } - return NULL; + if (err == PyExc_EOFError) { + PyErr_SetNone(PyExc_EOFError); + } + return NULL; } PDATA_POP(self->stack, val); @@ -5047,7 +5047,7 @@ "persistent references without instantiating any objects or importing\n" "any modules.\n") }, - {NULL, NULL} /* sentinel */ + {NULL, NULL} /* sentinel */ }; @@ -5057,7 +5057,7 @@ Unpicklerobject *self; if (!(self = PyObject_GC_New(Unpicklerobject, &Unpicklertype))) - return NULL; + return NULL; self->file = NULL; self->arg = NULL; @@ -5073,43 +5073,43 @@ self->find_class = NULL; if (!(self->memo = PyDict_New())) - goto err; + goto err; if (!self->stack) - goto err; + goto err; Py_INCREF(f); self->file = f; /* Set read, readline based on type of f */ if (PyFile_Check(f)) { - self->fp = PyFile_AsFile(f); - if (self->fp == NULL) { - PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); - goto err; - } - self->read_func = read_file; - self->readline_func = readline_file; + self->fp = PyFile_AsFile(f); + if (self->fp == NULL) { + PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); + goto err; + } + self->read_func = read_file; + self->readline_func = readline_file; } else if (PycStringIO_InputCheck(f)) { - self->fp = NULL; - self->read_func = read_cStringIO; - self->readline_func = readline_cStringIO; + self->fp = NULL; + self->read_func = read_cStringIO; + self->readline_func = readline_cStringIO; } else { - self->fp = NULL; - self->read_func = read_other; - self->readline_func = readline_other; - - if (!((self->readline = PyObject_GetAttr(f, readline_str)) && - (self->read = PyObject_GetAttr(f, read_str)))) { - PyErr_Clear(); - PyErr_SetString(PyExc_TypeError, - "argument must have 'read' and " - "'readline' attributes"); - goto err; - } + self->fp = NULL; + self->read_func = read_other; + self->readline_func = readline_other; + + if (!((self->readline = PyObject_GetAttr(f, readline_str)) && + (self->read = PyObject_GetAttr(f, read_str)))) { + PyErr_Clear(); + PyErr_SetString(PyExc_TypeError, + "argument must have 'read' and " + "'readline' attributes"); + goto err; + } } PyObject_GC_Track(self); @@ -5143,11 +5143,11 @@ Py_XDECREF(self->find_class); if (self->marks) { - free(self->marks); + free(self->marks); } if (self->buf_size) { - free(self->buf); + free(self->buf); } self->ob_type->tp_free((PyObject *) self); @@ -5187,38 +5187,38 @@ Unpickler_getattr(Unpicklerobject * self, char *name) { if (!strcmp(name, "persistent_load")) { - if (!self->pers_func) { - PyErr_SetString(PyExc_AttributeError, name); - return NULL; - } + if (!self->pers_func) { + PyErr_SetString(PyExc_AttributeError, name); + return NULL; + } - Py_INCREF(self->pers_func); - return self->pers_func; + Py_INCREF(self->pers_func); + return self->pers_func; } if (!strcmp(name, "find_global")) { - if (!self->find_class) { - PyErr_SetString(PyExc_AttributeError, name); - return NULL; - } + if (!self->find_class) { + PyErr_SetString(PyExc_AttributeError, name); + return NULL; + } - Py_INCREF(self->find_class); - return self->find_class; + Py_INCREF(self->find_class); + return self->find_class; } if (!strcmp(name, "memo")) { - if (!self->memo) { - PyErr_SetString(PyExc_AttributeError, name); - return NULL; - } + if (!self->memo) { + PyErr_SetString(PyExc_AttributeError, name); + return NULL; + } - Py_INCREF(self->memo); - return self->memo; + Py_INCREF(self->memo); + return self->memo; } if (!strcmp(name, "UnpicklingError")) { - Py_INCREF(UnpicklingError); - return UnpicklingError; + Py_INCREF(UnpicklingError); + return UnpicklingError; } return Py_FindMethod(Unpickler_methods, (PyObject *) self, name); @@ -5230,34 +5230,34 @@ { if (!strcmp(name, "persistent_load")) { - Py_XDECREF(self->pers_func); - self->pers_func = value; - Py_XINCREF(value); - return 0; + Py_XDECREF(self->pers_func); + self->pers_func = value; + Py_XINCREF(value); + return 0; } if (!strcmp(name, "find_global")) { - Py_XDECREF(self->find_class); - self->find_class = value; - Py_XINCREF(value); - return 0; + Py_XDECREF(self->find_class); + self->find_class = value; + Py_XINCREF(value); + return 0; } if (!value) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; + PyErr_SetString(PyExc_TypeError, + "attribute deletion is not supported"); + return -1; } if (strcmp(name, "memo") == 0) { - if (!PyDict_Check(value)) { - PyErr_SetString(PyExc_TypeError, "memo must be a dictionary"); - return -1; - } - Py_XDECREF(self->memo); - self->memo = value; - Py_INCREF(value); - return 0; + if (!PyDict_Check(value)) { + PyErr_SetString(PyExc_TypeError, "memo must be a dictionary"); + return -1; + } + Py_XDECREF(self->memo); + self->memo = value; + Py_INCREF(value); + return 0; } PyErr_SetString(PyExc_AttributeError, name); @@ -5278,14 +5278,14 @@ int proto = 0; if (!(PyArg_ParseTupleAndKeywords(args, kwds, "OO|i", kwlist, - &ob, &file, &proto))) - goto finally; + &ob, &file, &proto))) + goto finally; if (!(pickler = newPicklerobject(file, proto))) - goto finally; + goto finally; if (dump(pickler, ob) < 0) - goto finally; + goto finally; Py_INCREF(Py_None); res = Py_None; @@ -5307,17 +5307,17 @@ int proto = 0; if (!(PyArg_ParseTupleAndKeywords(args, kwds, "O|i:dumps", kwlist, - &ob, &proto))) - goto finally; + &ob, &proto))) + goto finally; if (!(file = PycStringIO->NewOutput(128))) - goto finally; + goto finally; if (!(pickler = newPicklerobject(file, proto))) - goto finally; + goto finally; if (dump(pickler, ob) < 0) - goto finally; + goto finally; res = PycStringIO->cgetvalue(file); @@ -5337,7 +5337,7 @@ PyObject *res = NULL; if (!(unpickler = newUnpicklerobject(ob))) - goto finally; + goto finally; res = load(unpickler); @@ -5356,13 +5356,13 @@ Unpicklerobject *unpickler = 0; if (!(PyArg_ParseTuple(args, "S:loads", &ob))) - goto finally; + goto finally; if (!(file = PycStringIO->NewInput(ob))) - goto finally; + goto finally; if (!(unpickler = newUnpicklerobject(file))) - goto finally; + goto finally; res = load(unpickler); @@ -5378,44 +5378,44 @@ static PyTypeObject Unpicklertype = { PyObject_HEAD_INIT(NULL) - 0, /*ob_size */ - "cPickle.Unpickler", /*tp_name */ - sizeof(Unpicklerobject), /*tp_basicsize */ + 0, /*ob_size */ + "cPickle.Unpickler", /*tp_name */ + sizeof(Unpicklerobject), /*tp_basicsize */ 0, - (destructor) Unpickler_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - (getattrfunc) Unpickler_getattr, /* tp_getattr */ - (setattrfunc) Unpickler_setattr, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ + (destructor) Unpickler_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + (getattrfunc) Unpickler_getattr, /* tp_getattr */ + (setattrfunc) Unpickler_setattr, /* tp_setattr */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Unpicklertype__doc__, /* tp_doc */ - (traverseproc) Unpickler_traverse, /* tp_traverse */ - (inquiry) Unpickler_clear, /* tp_clear */ + Unpicklertype__doc__, /* tp_doc */ + (traverseproc) Unpickler_traverse, /* tp_traverse */ + (inquiry) Unpickler_clear, /* tp_clear */ }; static struct PyMethodDef cPickle_methods[] = { {"dump", (PyCFunction) cpm_dump, METH_VARARGS | METH_KEYWORDS, PyDoc_STR("dump(obj, file, protocol=0) -- " - "Write an object in pickle format to the given file.\n" - "\n" - "See the Pickler docstring for the meaning of optional argument proto.") + "Write an object in pickle format to the given file.\n" + "\n" + "See the Pickler docstring for the meaning of optional argument proto.") }, {"dumps", (PyCFunction) cpm_dumps, METH_VARARGS | METH_KEYWORDS, PyDoc_STR("dumps(obj, protocol=0) -- " - "Return a string containing an object in pickle format.\n" - "\n" - "See the Pickler docstring for the meaning of optional argument proto.") + "Return a string containing an object in pickle format.\n" + "\n" + "See the Pickler docstring for the meaning of optional argument proto.") }, {"load", (PyCFunction) cpm_load, METH_O, @@ -5426,27 +5426,27 @@ {"Pickler", (PyCFunction) get_Pickler, METH_VARARGS | METH_KEYWORDS, PyDoc_STR("Pickler(file, protocol=0) -- Create a pickler.\n" - "\n" - "This takes a file-like object for writing a pickle data stream.\n" - "The optional proto argument tells the pickler to use the given\n" - "protocol; supported protocols are 0, 1, 2. The default\n" - "protocol is 0, to be backwards compatible. (Protocol 0 is the\n" - "only protocol that can be written to a file opened in text\n" - "mode and read back successfully. When using a protocol higher\n" - "than 0, make sure the file is opened in binary mode, both when\n" - "pickling and unpickling.)\n" - "\n" - "Protocol 1 is more efficient than protocol 0; protocol 2 is\n" - "more efficient than protocol 1.\n" - "\n" - "Specifying a negative protocol version selects the highest\n" - "protocol version supported. The higher the protocol used, the\n" - "more recent the version of Python needed to read the pickle\n" - "produced.\n" - "\n" - "The file parameter must have a write() method that accepts a single\n" - "string argument. It can thus be an open file object, a StringIO\n" - "object, or any other custom object that meets this interface.\n") + "\n" + "This takes a file-like object for writing a pickle data stream.\n" + "The optional proto argument tells the pickler to use the given\n" + "protocol; supported protocols are 0, 1, 2. The default\n" + "protocol is 0, to be backwards compatible. (Protocol 0 is the\n" + "only protocol that can be written to a file opened in text\n" + "mode and read back successfully. When using a protocol higher\n" + "than 0, make sure the file is opened in binary mode, both when\n" + "pickling and unpickling.)\n" + "\n" + "Protocol 1 is more efficient than protocol 0; protocol 2 is\n" + "more efficient than protocol 1.\n" + "\n" + "Specifying a negative protocol version selects the highest\n" + "protocol version supported. The higher the protocol used, the\n" + "more recent the version of Python needed to read the pickle\n" + "produced.\n" + "\n" + "The file parameter must have a write() method that accepts a single\n" + "string argument. It can thus be an open file object, a StringIO\n" + "object, or any other custom object that meets this interface.\n") }, {"Unpickler", (PyCFunction) get_Unpickler, METH_O, @@ -5463,9 +5463,9 @@ #define INIT_STR(S) if (!( S ## _str=PyString_InternFromString(#S))) return -1; if (PyType_Ready(&Unpicklertype) < 0) - return -1; + return -1; if (PyType_Ready(&Picklertype) < 0) - return -1; + return -1; INIT_STR(__class__); INIT_STR(__getinitargs__); @@ -5484,35 +5484,35 @@ INIT_STR(dispatch_table); if (!(copy_reg = PyImport_ImportModule("copy_reg"))) - return -1; + return -1; /* This is special because we want to use a different * one in restricted mode. */ dispatch_table = PyObject_GetAttr(copy_reg, dispatch_table_str); if (!dispatch_table) - return -1; + return -1; extension_registry = PyObject_GetAttrString(copy_reg, - "_extension_registry"); + "_extension_registry"); if (!extension_registry) - return -1; + return -1; inverted_registry = PyObject_GetAttrString(copy_reg, "_inverted_registry"); if (!inverted_registry) - return -1; + return -1; extension_cache = PyObject_GetAttrString(copy_reg, "_extension_cache"); if (!extension_cache) - return -1; + return -1; Py_DECREF(copy_reg); if (!(empty_tuple = PyTuple_New(0))) - return -1; + return -1; two_tuple = PyTuple_New(2); if (two_tuple == NULL) - return -1; + return -1; /* We use this temp container with no regard to refcounts, or to * keeping containees alive. Exempt from GC, because we don't * want anything looking at two_tuple() by magic. @@ -5521,77 +5521,77 @@ /* Ugh */ if (!(t = PyImport_ImportModule("__builtin__"))) - return -1; + return -1; if (PyDict_SetItemString(module_dict, "__builtins__", t) < 0) - return -1; + return -1; if (!(t = PyDict_New())) - return -1; + return -1; if (!(r = PyRun_String("def __str__(self):\n" - " return self.args and ('%s' % self.args[0]) or '(what)'\n", - Py_file_input, module_dict, t))) - return -1; + " return self.args and ('%s' % self.args[0]) or '(what)'\n", + Py_file_input, module_dict, t))) + return -1; Py_DECREF(r); PickleError = PyErr_NewException("cPickle.PickleError", NULL, t); if (!PickleError) - return -1; + return -1; Py_DECREF(t); PicklingError = PyErr_NewException("cPickle.PicklingError", - PickleError, NULL); + PickleError, NULL); if (!PicklingError) - return -1; + return -1; if (!(t = PyDict_New())) - return -1; + return -1; if (!(r = PyRun_String("def __str__(self):\n" - " a=self.args\n" - " a=a and type(a[0]) or '(what)'\n" - " return 'Cannot pickle %s objects' % a\n", - Py_file_input, module_dict, t))) - return -1; + " a=self.args\n" + " a=a and type(a[0]) or '(what)'\n" + " return 'Cannot pickle %s objects' % a\n", + Py_file_input, module_dict, t))) + return -1; Py_DECREF(r); if (! - (UnpickleableError = - PyErr_NewException("cPickle.UnpickleableError", PicklingError, t))) - return -1; + (UnpickleableError = + PyErr_NewException("cPickle.UnpickleableError", PicklingError, t))) + return -1; Py_DECREF(t); if (!(UnpicklingError = PyErr_NewException("cPickle.UnpicklingError", - PickleError, NULL))) - return -1; + PickleError, NULL))) + return -1; if (!(BadPickleGet = PyErr_NewException("cPickle.BadPickleGet", - UnpicklingError, NULL))) - return -1; + UnpicklingError, NULL))) + return -1; if (PyDict_SetItemString(module_dict, "PickleError", PickleError) < 0) - return -1; + return -1; if (PyDict_SetItemString(module_dict, "PicklingError", PicklingError) < 0) - return -1; + return -1; if (PyDict_SetItemString(module_dict, "UnpicklingError", - UnpicklingError) < 0) - return -1; + UnpicklingError) < 0) + return -1; if (PyDict_SetItemString(module_dict, "UnpickleableError", - UnpickleableError) < 0) - return -1; + UnpickleableError) < 0) + return -1; if (PyDict_SetItemString(module_dict, "BadPickleGet", BadPickleGet) < 0) - return -1; + return -1; PycString_IMPORT; return 0; } -#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ +#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ #define PyMODINIT_FUNC void #endif PyMODINIT_FUNC @@ -5599,7 +5599,7 @@ { PyObject *m, *d, *di, *v, *k; Py_ssize_t i; - char *rev = "1.71"; /* XXX when does this change? */ + char *rev = "1.71"; /* XXX when does this change? */ PyObject *format_version; PyObject *compatible_formats; @@ -5612,16 +5612,16 @@ */ di = PyDict_New(); if (!di) - return; + return; if (init_stuff(di) < 0) - return; + return; /* Create the module and add the functions */ m = Py_InitModule4("cPickle", cPickle_methods, - cPickle_module_documentation, - (PyObject *) NULL, PYTHON_API_VERSION); + cPickle_module_documentation, + (PyObject *) NULL, PYTHON_API_VERSION); if (m == NULL) - return; + return; /* Add some symbolic constants to the module */ d = PyModule_GetDict(m); @@ -5631,26 +5631,26 @@ /* Copy data from di. Waaa. */ for (i = 0; PyDict_Next(di, &i, &k, &v);) { - if (PyObject_SetItem(d, k, v) < 0) { - Py_DECREF(di); - return; - } + if (PyObject_SetItem(d, k, v) < 0) { + Py_DECREF(di); + return; + } } Py_DECREF(di); i = PyModule_AddIntConstant(m, "HIGHEST_PROTOCOL", HIGHEST_PROTOCOL); if (i < 0) - return; + return; /* These are purely informational; no code uses them. */ /* File format version we write. */ format_version = PyString_FromString("2.0"); /* Format versions we can read. */ - compatible_formats = Py_BuildValue("[sssss]", "1.0", /* Original protocol 0 */ - "1.1", /* Protocol 0 + INST */ - "1.2", /* Original protocol 1 */ - "1.3", /* Protocol 1 + BINFLOAT */ - "2.0"); /* Original protocol 2 */ + compatible_formats = Py_BuildValue("[sssss]", "1.0", /* Original protocol 0 */ + "1.1", /* Protocol 0 + INST */ + "1.2", /* Original protocol 1 */ + "1.3", /* Protocol 1 + BINFLOAT */ + "2.0"); /* Original protocol 2 */ PyDict_SetItemString(d, "format_version", format_version); PyDict_SetItemString(d, "compatible_formats", compatible_formats); Py_XDECREF(format_version); From python-checkins at python.org Sat Jul 7 20:22:41 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 7 Jul 2007 20:22:41 +0200 (CEST) Subject: [Python-checkins] r56184 - in python/branches/cpy_merge: Modules/_picklemodule.c setup.py Message-ID: <20070707182241.D38C41E4002@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 7 20:22:41 2007 New Revision: 56184 Modified: python/branches/cpy_merge/Modules/_picklemodule.c python/branches/cpy_merge/setup.py Log: Finish renaming cPickle to _pickle. Add build instruction to setup.py. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sat Jul 7 20:22:41 2007 @@ -2,7 +2,7 @@ #include "cStringIO.h" #include "structmember.h" -PyDoc_STRVAR(cPickle_module_documentation, +PyDoc_STRVAR(pickle_module_documentation, "C implementation and optimization of the Python pickle module."); #ifndef Py_eval_input @@ -150,7 +150,7 @@ } static PyTypeObject PdataType = { - PyObject_HEAD_INIT(NULL) 0, "cPickle.Pdata", sizeof(Pdata), 0, + PyObject_HEAD_INIT(NULL) 0, "pickle.Pdata", sizeof(Pdata), 0, (destructor) Pdata_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0L, 0L, 0L, 0L, "" }; @@ -382,7 +382,7 @@ static PyObject * -cPickle_ErrFormat(PyObject * ErrType, char *stringformat, char *format, ...) +pickle_ErrFormat(PyObject * ErrType, char *stringformat, char *format, ...) { va_list va; PyObject *args = 0, *retval = 0; @@ -1847,21 +1847,21 @@ * but I don't know how to stop it. :-( */ mod = PyImport_ImportModule(module_str); if (mod == NULL) { - cPickle_ErrFormat(PicklingError, + pickle_ErrFormat(PicklingError, "Can't pickle %s: import of module %s " "failed", "OS", args, module); goto finally; } klass = PyObject_GetAttrString(mod, name_str); if (klass == NULL) { - cPickle_ErrFormat(PicklingError, + pickle_ErrFormat(PicklingError, "Can't pickle %s: attribute lookup %s.%s " "failed", "OSS", args, module, global_name); goto finally; } if (klass != args) { Py_DECREF(klass); - cPickle_ErrFormat(PicklingError, + pickle_ErrFormat(PicklingError, "Can't pickle %s: it's not the same object " "as %s.%s", "OSS", args, module, global_name); goto finally; @@ -1885,14 +1885,14 @@ /* Verify py_code has the right type and value. */ if (!PyInt_Check(py_code)) { - cPickle_ErrFormat(PicklingError, "Can't pickle %s: " + pickle_ErrFormat(PicklingError, "Can't pickle %s: " "extension code %s isn't an integer", "OO", args, py_code); goto finally; } code = PyInt_AS_LONG(py_code); if (code <= 0 || code > 0x7fffffffL) { - cPickle_ErrFormat(PicklingError, "Can't pickle %s: " + pickle_ErrFormat(PicklingError, "Can't pickle %s: " "extension code %ld is out of range", "Ol", args, code); goto finally; @@ -2377,14 +2377,14 @@ } if (!PyTuple_Check(t)) { - cPickle_ErrFormat(PicklingError, "Value returned by " + pickle_ErrFormat(PicklingError, "Value returned by " "%s must be string or tuple", "O", __reduce__); goto finally; } size = PyTuple_Size(t); if (size < 2 || size > 5) { - cPickle_ErrFormat(PicklingError, "tuple returned by " + pickle_ErrFormat(PicklingError, "tuple returned by " "%s must contain 2 through 5 elements", "O", __reduce__); goto finally; @@ -2392,7 +2392,7 @@ arg_tup = PyTuple_GET_ITEM(t, 1); if (!(PyTuple_Check(arg_tup) || arg_tup == Py_None)) { - cPickle_ErrFormat(PicklingError, "Second element of " + pickle_ErrFormat(PicklingError, "Second element of " "tuple returned by %s must be a tuple", "O", __reduce__); goto finally; @@ -2879,7 +2879,7 @@ static PyTypeObject Picklertype = { PyObject_HEAD_INIT(NULL) 0, /*ob_size */ - "cPickle.Pickler", /*tp_name */ + "pickle.Pickler", /*tp_name */ sizeof(Picklerobject), /*tp_basicsize */ 0, (destructor) Pickler_dealloc, /* tp_dealloc */ @@ -4605,7 +4605,7 @@ break; default: - cPickle_ErrFormat(UnpicklingError, + pickle_ErrFormat(UnpicklingError, "invalid load key, '%s'.", "c", s[0]); return NULL; } @@ -5001,7 +5001,7 @@ break; continue; default: - cPickle_ErrFormat(UnpicklingError, + pickle_ErrFormat(UnpicklingError, "invalid load key, '%s'.", "c", s[0]); return NULL; } @@ -5379,7 +5379,7 @@ static PyTypeObject Unpicklertype = { PyObject_HEAD_INIT(NULL) 0, /*ob_size */ - "cPickle.Unpickler", /*tp_name */ + "pickle.Unpickler", /*tp_name */ sizeof(Unpicklerobject), /*tp_basicsize */ 0, (destructor) Unpickler_dealloc, /* tp_dealloc */ @@ -5403,7 +5403,7 @@ (inquiry) Unpickler_clear, /* tp_clear */ }; -static struct PyMethodDef cPickle_methods[] = { +static struct PyMethodDef pickle_methods[] = { {"dump", (PyCFunction) cpm_dump, METH_VARARGS | METH_KEYWORDS, PyDoc_STR("dump(obj, file, protocol=0) -- " "Write an object in pickle format to the given file.\n" @@ -5533,13 +5533,13 @@ return -1; Py_DECREF(r); - PickleError = PyErr_NewException("cPickle.PickleError", NULL, t); + PickleError = PyErr_NewException("pickle.PickleError", NULL, t); if (!PickleError) return -1; Py_DECREF(t); - PicklingError = PyErr_NewException("cPickle.PicklingError", + PicklingError = PyErr_NewException("pickle.PicklingError", PickleError, NULL); if (!PicklingError) return -1; @@ -5556,16 +5556,16 @@ if (! (UnpickleableError = - PyErr_NewException("cPickle.UnpickleableError", PicklingError, t))) + PyErr_NewException("pickle.UnpickleableError", PicklingError, t))) return -1; Py_DECREF(t); - if (!(UnpicklingError = PyErr_NewException("cPickle.UnpicklingError", + if (!(UnpicklingError = PyErr_NewException("pickle.UnpicklingError", PickleError, NULL))) return -1; - if (!(BadPickleGet = PyErr_NewException("cPickle.BadPickleGet", + if (!(BadPickleGet = PyErr_NewException("pickle.BadPickleGet", UnpicklingError, NULL))) return -1; @@ -5595,7 +5595,7 @@ #define PyMODINIT_FUNC void #endif PyMODINIT_FUNC -initcPickle(void) +init_pickle(void) { PyObject *m, *d, *di, *v, *k; Py_ssize_t i; @@ -5617,8 +5617,8 @@ return; /* Create the module and add the functions */ - m = Py_InitModule4("cPickle", cPickle_methods, - cPickle_module_documentation, + m = Py_InitModule4("_pickle", pickle_methods, + pickle_module_documentation, (PyObject *) NULL, PYTHON_API_VERSION); if (m == NULL) return; Modified: python/branches/cpy_merge/setup.py ============================================================================== --- python/branches/cpy_merge/setup.py (original) +++ python/branches/cpy_merge/setup.py Sat Jul 7 20:22:41 2007 @@ -472,6 +472,9 @@ exts.append( Extension('cStringIO', ['cStringIO.c']) ) exts.append( Extension('cPickle', ['cPickle.c']) ) + # Optimization for pickle + exts.append( Extension('_pickle', ['_picklemodule.c']) ) + # Fast implementation of BytesIO and StringIO exts.append( Extension('_bytes_io', ['_bytes_iomodule.c']) ) exts.append( Extension('_string_io', ['_string_iomodule.c']) ) From python-checkins at python.org Sat Jul 7 20:36:09 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 7 Jul 2007 20:36:09 +0200 (CEST) Subject: [Python-checkins] r56185 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070707183609.774DC1E4017@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 7 20:36:09 2007 New Revision: 56185 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Remove dependency on cStringIO. Remove the dumps and loads methods. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sat Jul 7 20:36:09 2007 @@ -1,5 +1,4 @@ #include "Python.h" -#include "cStringIO.h" #include "structmember.h" PyDoc_STRVAR(pickle_module_documentation, @@ -443,20 +442,6 @@ } static int -write_cStringIO(Picklerobject * self, const char *s, Py_ssize_t n) -{ - if (s == NULL) { - return 0; - } - - if (PycStringIO->cwrite((PyObject *) self->file, s, n) != n) { - return -1; - } - - return (int) n; -} - -static int write_none(Picklerobject * self, const char *s, Py_ssize_t n) { if (s == NULL) @@ -602,39 +587,6 @@ } } - -static Py_ssize_t -read_cStringIO(Unpicklerobject * self, char **s, Py_ssize_t n) -{ - char *ptr; - - if (PycStringIO->cread((PyObject *) self->file, &ptr, n) != n) { - PyErr_SetNone(PyExc_EOFError); - return -1; - } - - *s = ptr; - - return n; -} - - -static Py_ssize_t -readline_cStringIO(Unpicklerobject * self, char **s) -{ - Py_ssize_t n; - char *ptr; - - if ((n = PycStringIO->creadline((PyObject *) self->file, &ptr)) < 0) { - return -1; - } - - *s = ptr; - - return n; -} - - static Py_ssize_t read_other(Unpicklerobject * self, char **s, Py_ssize_t n) { @@ -2678,9 +2630,6 @@ } self->write_func = write_file; } - else if (PycStringIO_OutputCheck(file)) { - self->write_func = write_cStringIO; - } else if (file == Py_None) { self->write_func = write_none; } @@ -5091,11 +5040,6 @@ self->read_func = read_file; self->readline_func = readline_file; } - else if (PycStringIO_InputCheck(f)) { - self->fp = NULL; - self->read_func = read_cStringIO; - self->readline_func = readline_cStringIO; - } else { self->fp = NULL; @@ -5296,39 +5240,6 @@ return res; } - -/* dumps(obj, protocol=0). */ -static PyObject * -cpm_dumps(PyObject * self, PyObject * args, PyObject * kwds) -{ - static char *kwlist[] = { "obj", "protocol", NULL }; - PyObject *ob, *file = 0, *res = NULL; - Picklerobject *pickler = 0; - int proto = 0; - - if (!(PyArg_ParseTupleAndKeywords(args, kwds, "O|i:dumps", kwlist, - &ob, &proto))) - goto finally; - - if (!(file = PycStringIO->NewOutput(128))) - goto finally; - - if (!(pickler = newPicklerobject(file, proto))) - goto finally; - - if (dump(pickler, ob) < 0) - goto finally; - - res = PycStringIO->cgetvalue(file); - - finally: - Py_XDECREF(pickler); - Py_XDECREF(file); - - return res; -} - - /* load(fileobj). */ static PyObject * cpm_load(PyObject * self, PyObject * ob) @@ -5347,33 +5258,6 @@ return res; } - -/* loads(string) */ -static PyObject * -cpm_loads(PyObject * self, PyObject * args) -{ - PyObject *ob, *file = 0, *res = NULL; - Unpicklerobject *unpickler = 0; - - if (!(PyArg_ParseTuple(args, "S:loads", &ob))) - goto finally; - - if (!(file = PycStringIO->NewInput(ob))) - goto finally; - - if (!(unpickler = newUnpicklerobject(file))) - goto finally; - - res = load(unpickler); - - finally: - Py_XDECREF(file); - Py_XDECREF(unpickler); - - return res; -} - - PyDoc_STRVAR(Unpicklertype__doc__, "Objects that know how to unpickle"); static PyTypeObject Unpicklertype = { @@ -5411,19 +5295,9 @@ "See the Pickler docstring for the meaning of optional argument proto.") }, - {"dumps", (PyCFunction) cpm_dumps, METH_VARARGS | METH_KEYWORDS, - PyDoc_STR("dumps(obj, protocol=0) -- " - "Return a string containing an object in pickle format.\n" - "\n" - "See the Pickler docstring for the meaning of optional argument proto.") - }, - {"load", (PyCFunction) cpm_load, METH_O, PyDoc_STR("load(file) -- Load a pickle from the given file")}, - {"loads", (PyCFunction) cpm_loads, METH_VARARGS, - PyDoc_STR("loads(string) -- Load a pickle from the given string")}, - {"Pickler", (PyCFunction) get_Pickler, METH_VARARGS | METH_KEYWORDS, PyDoc_STR("Pickler(file, protocol=0) -- Create a pickler.\n" "\n" @@ -5586,8 +5460,6 @@ if (PyDict_SetItemString(module_dict, "BadPickleGet", BadPickleGet) < 0) return -1; - PycString_IMPORT; - return 0; } From python-checkins at python.org Sat Jul 7 21:04:38 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 7 Jul 2007 21:04:38 +0200 (CEST) Subject: [Python-checkins] r56186 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070707190438.489AB1E4002@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 7 21:04:37 2007 New Revision: 56186 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Fix remaining formatting problems with typedefs. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sat Jul 7 21:04:37 2007 @@ -129,13 +129,14 @@ Internal Data type for pickle data. */ typedef struct { - PyObject_HEAD int length; /* number of initial slots in data currently used */ + PyObject_HEAD + int length; /* number of initial slots in data currently used */ int size; /* number of slots in data allocated */ PyObject **data; } Pdata; static void -Pdata_dealloc(Pdata * self) +Pdata_dealloc(Pdata *self) { int i; PyObject **p; @@ -183,7 +184,7 @@ * number of items, this is a (non-erroneous) NOP. */ static int -Pdata_clear(Pdata * self, int clearto) +Pdata_clear(Pdata *self, int clearto) { int i; PyObject **p; @@ -202,7 +203,7 @@ } static int -Pdata_grow(Pdata * self) +Pdata_grow(Pdata *self) { int bigger; size_t nbytes; @@ -270,7 +271,7 @@ static PyObject * -Pdata_popTuple(Pdata * self, int start) +Pdata_popTuple(Pdata *self, int start) { PyObject *r; int i, j, l; @@ -287,7 +288,7 @@ } static PyObject * -Pdata_popList(Pdata * self, int start) +Pdata_popList(Pdata *self, int start) { PyObject *r; int i, j, l; @@ -322,7 +323,8 @@ } typedef struct Picklerobject { - PyObject_HEAD FILE * fp; + PyObject_HEAD + FILE *fp; PyObject *write; PyObject *file; PyObject *memo; @@ -353,7 +355,8 @@ static PyTypeObject Picklertype; typedef struct Unpicklerobject { - PyObject_HEAD FILE * fp; + PyObject_HEAD + FILE *fp; PyObject *file; PyObject *readline; PyObject *read; @@ -366,8 +369,8 @@ int *marks; int num_marks; int marks_size; - Py_ssize_t(*read_func) (struct Unpicklerobject *, char **, Py_ssize_t); - Py_ssize_t(*readline_func) (struct Unpicklerobject *, char **); + Py_ssize_t(*read_func) (struct Unpicklerobject *, char **, Py_ssize_t); + Py_ssize_t(*readline_func) (struct Unpicklerobject *, char **); int buf_size; char *buf; PyObject *find_class; @@ -381,7 +384,7 @@ static PyObject * -pickle_ErrFormat(PyObject * ErrType, char *stringformat, char *format, ...) +pickle_ErrFormat(PyObject *ErrType, char *stringformat, char *format, ...) { va_list va; PyObject *args = 0, *retval = 0; @@ -418,7 +421,7 @@ } static int -write_file(Picklerobject * self, const char *s, Py_ssize_t n) +write_file(Picklerobject *self, const char *s, Py_ssize_t n) { size_t nbyteswritten; @@ -433,7 +436,9 @@ Py_BEGIN_ALLOW_THREADS nbyteswritten = fwrite(s, sizeof(char), n, self->fp); - Py_END_ALLOW_THREADS if (nbyteswritten != (size_t) n) { + Py_END_ALLOW_THREADS + + if (nbyteswritten != (size_t) n) { PyErr_SetFromErrno(PyExc_IOError); return -1; } @@ -442,7 +447,7 @@ } static int -write_none(Picklerobject * self, const char *s, Py_ssize_t n) +write_none(Picklerobject *self, const char *s, Py_ssize_t n) { if (s == NULL) return 0; @@ -452,7 +457,7 @@ } static int -write_other(Picklerobject * self, const char *s, Py_ssize_t _n) +write_other(Picklerobject *self, const char *s, Py_ssize_t _n) { PyObject *py_str = 0, *junk = 0; int n; @@ -505,7 +510,7 @@ static Py_ssize_t -read_file(Unpicklerobject * self, char **s, Py_ssize_t n) +read_file(Unpicklerobject *self, char **s, Py_ssize_t n) { size_t nbytesread; @@ -532,7 +537,8 @@ Py_BEGIN_ALLOW_THREADS nbytesread = fread(self->buf, sizeof(char), n, self->fp); - Py_END_ALLOW_THREADS if (nbytesread != (size_t) n) { + Py_END_ALLOW_THREADS + if (nbytesread != (size_t) n) { if (feof(self->fp)) { PyErr_SetNone(PyExc_EOFError); return -1; @@ -549,7 +555,7 @@ static Py_ssize_t -readline_file(Unpicklerobject * self, char **s) +readline_file(Unpicklerobject *self, char **s) { int i; @@ -588,7 +594,7 @@ } static Py_ssize_t -read_other(Unpicklerobject * self, char **s, Py_ssize_t n) +read_other(Unpicklerobject *self, char **s, Py_ssize_t n) { PyObject *bytes, *str = 0; @@ -613,7 +619,7 @@ static Py_ssize_t -readline_other(Unpicklerobject * self, char **s) +readline_other(Unpicklerobject *self, char **s) { PyObject *str; Py_ssize_t str_size; @@ -651,7 +657,7 @@ static int -get(Picklerobject * self, PyObject * id) +get(Picklerobject *self, PyObject * id) { PyObject *value, *mv; long c_value; @@ -709,7 +715,7 @@ static int -put(Picklerobject * self, PyObject * ob) +put(Picklerobject *self, PyObject *ob) { if (ob->ob_refcnt < 2 || self->fast) return 0; @@ -719,7 +725,7 @@ static int -put2(Picklerobject * self, PyObject * ob) +put2(Picklerobject *self, PyObject *ob) { char c_str[30]; int p; @@ -800,7 +806,7 @@ } static PyObject * -whichmodule(PyObject * global, PyObject * global_name) +whichmodule(PyObject *global, PyObject *global_name) { Py_ssize_t i, j; PyObject *module = 0, *modules_dict = 0, *global_name_attr = 0, *name = 0; @@ -856,7 +862,7 @@ static int -fast_save_enter(Picklerobject * self, PyObject * obj) +fast_save_enter(Picklerobject *self, PyObject *obj) { /* if fast_container < 0, we're doing an error exit. */ if (++self->fast_container >= PY_CPICKLE_FAST_LIMIT) { @@ -891,7 +897,7 @@ } int -fast_save_leave(Picklerobject * self, PyObject * obj) +fast_save_leave(Picklerobject *self, PyObject *obj) { if (self->fast_container-- >= PY_CPICKLE_FAST_LIMIT) { PyObject *key = PyLong_FromVoidPtr(obj); @@ -907,7 +913,7 @@ } static int -save_none(Picklerobject * self, PyObject * args) +save_none(Picklerobject *self, PyObject *args) { static char none = NONE; if (self->write_func(self, &none, 1) < 0) @@ -917,7 +923,7 @@ } static int -save_bool(Picklerobject * self, PyObject * args) +save_bool(Picklerobject *self, PyObject *args) { static const char *buf[2] = { FALSE, TRUE }; static char len[2] = { sizeof(FALSE) - 1, sizeof(TRUE) - 1 }; @@ -934,7 +940,7 @@ } static int -save_int(Picklerobject * self, long l) +save_int(Picklerobject *self, long l) { char c_str[32]; int len = 0; @@ -983,7 +989,7 @@ static int -save_long(Picklerobject * self, PyObject * args) +save_long(Picklerobject *self, PyObject *args) { Py_ssize_t size; int res = -1; @@ -1110,7 +1116,7 @@ static int -save_float(Picklerobject * self, PyObject * args) +save_float(Picklerobject *self, PyObject *args) { double x = PyFloat_AS_DOUBLE((PyFloatObject *) args); @@ -1138,7 +1144,7 @@ static int -save_string(Picklerobject * self, PyObject * args, int doput) +save_string(Picklerobject *self, PyObject *args, int doput) { int size, len; PyObject *repr = 0; @@ -1222,7 +1228,7 @@ /* A copy of PyUnicode_EncodeRawUnicodeEscape() that also translates backslash and newline characters to \uXXXX escapes. */ static PyObject * -modified_EncodeRawUnicodeEscape(const Py_UNICODE * s, int size) +modified_EncodeRawUnicodeEscape(const Py_UNICODE *s, int size) { PyObject *repr; char *p; @@ -1259,7 +1265,7 @@ static int -save_unicode(Picklerobject * self, PyObject * args, int doput) +save_unicode(Picklerobject *self, PyObject *args, int doput) { Py_ssize_t size, len; PyObject *repr = 0; @@ -1339,7 +1345,7 @@ /* A helper for save_tuple. Push the len elements in tuple t on the stack. */ static int -store_tuple_elements(Picklerobject * self, PyObject * t, int len) +store_tuple_elements(Picklerobject *self, PyObject *t, int len) { int i; int res = -1; /* guilty until proved innocent */ @@ -1367,7 +1373,7 @@ * magic so that it works in all cases. IOW, this is a long routine. */ static int -save_tuple(Picklerobject * self, PyObject * args) +save_tuple(Picklerobject *self, PyObject *args) { PyObject *py_tuple_id = NULL; int len, i; @@ -1480,7 +1486,7 @@ * Returns 0 on success, <0 on error. */ static int -batch_list(Picklerobject * self, PyObject * iter) +batch_list(Picklerobject *self, PyObject *iter) { PyObject *obj; PyObject *slice[BATCHSIZE]; @@ -1555,7 +1561,7 @@ } static int -save_list(Picklerobject * self, PyObject * args) +save_list(Picklerobject *self, PyObject *args) { int res = -1; char s[3]; @@ -1619,7 +1625,7 @@ * ugly to bear. */ static int -batch_dict(Picklerobject * self, PyObject * iter) +batch_dict(Picklerobject *self, PyObject *iter) { PyObject *p; PyObject *slice[BATCHSIZE]; @@ -1712,7 +1718,7 @@ } static int -save_dict(Picklerobject * self, PyObject * args) +save_dict(Picklerobject *self, PyObject *args) { int res = -1; char s[3]; @@ -1768,7 +1774,7 @@ static int -save_global(Picklerobject * self, PyObject * args, PyObject * name) +save_global(Picklerobject *self, PyObject *args, PyObject *name) { PyObject *global_name = 0, *module = 0, *mod = 0, *klass = 0; char *name_str, *module_str; @@ -1906,7 +1912,7 @@ } static int -save_pers(Picklerobject * self, PyObject * args, PyObject * f) +save_pers(Picklerobject *self, PyObject *args, PyObject *f) { PyObject *pid = 0; int size, res = -1; @@ -1968,7 +1974,7 @@ * appropriate __reduce__ method for ob. */ static int -save_reduce(Picklerobject * self, PyObject * args, PyObject * ob) +save_reduce(Picklerobject *self, PyObject *args, PyObject *ob) { PyObject *callable; PyObject *argtup; @@ -2112,7 +2118,7 @@ } static int -save(Picklerobject * self, PyObject * args, int pers_save) +save(Picklerobject *self, PyObject *args, int pers_save) { PyTypeObject *type; PyObject *py_ob_id = 0, *__reduce__ = 0, *t = 0; @@ -2363,7 +2369,7 @@ static int -dump(Picklerobject * self, PyObject * args) +dump(Picklerobject *self, PyObject *args) { static char stop = STOP; @@ -2390,7 +2396,7 @@ } static PyObject * -Pickle_clear_memo(Picklerobject * self, PyObject * args) +Pickle_clear_memo(Picklerobject *self, PyObject *args) { if (self->memo) PyDict_Clear(self->memo); @@ -2399,7 +2405,7 @@ } static PyObject * -Pickle_getvalue(Picklerobject * self, PyObject * args) +Pickle_getvalue(Picklerobject *self, PyObject *args) { int l, i, rsize, ssize, clear = 1, lm; long ik; @@ -2544,7 +2550,7 @@ } static PyObject * -Pickler_dump(Picklerobject * self, PyObject * args) +Pickler_dump(Picklerobject *self, PyObject *args) { PyObject *ob; int get = 0; @@ -2577,7 +2583,7 @@ static Picklerobject * -newPicklerobject(PyObject * file, int proto) +newPicklerobject(PyObject *file, int proto) { Picklerobject *self; @@ -2666,7 +2672,7 @@ static PyObject * -get_Pickler(PyObject * self, PyObject * args, PyObject * kwds) +get_Pickler(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = { "file", "protocol", NULL }; PyObject *file = NULL; @@ -2691,7 +2697,7 @@ static void -Pickler_dealloc(Picklerobject * self) +Pickler_dealloc(Picklerobject *self) { PyObject_GC_UnTrack(self); Py_XDECREF(self->write); @@ -2707,7 +2713,7 @@ } static int -Pickler_traverse(Picklerobject * self, visitproc visit, void *arg) +Pickler_traverse(Picklerobject *self, visitproc visit, void *arg) { Py_VISIT(self->write); Py_VISIT(self->memo); @@ -2721,7 +2727,7 @@ } static int -Pickler_clear(Picklerobject * self) +Pickler_clear(Picklerobject *self) { Py_CLEAR(self->write); Py_CLEAR(self->memo); @@ -2735,7 +2741,7 @@ } static PyObject * -Pickler_get_pers_func(Picklerobject * p) +Pickler_get_pers_func(Picklerobject *p) { if (p->pers_func == NULL) PyErr_SetString(PyExc_AttributeError, "persistent_id"); @@ -2745,7 +2751,7 @@ } static int -Pickler_set_pers_func(Picklerobject * p, PyObject * v) +Pickler_set_pers_func(Picklerobject *p, PyObject *v) { if (v == NULL) { PyErr_SetString(PyExc_TypeError, @@ -2759,7 +2765,7 @@ } static int -Pickler_set_inst_pers_func(Picklerobject * p, PyObject * v) +Pickler_set_inst_pers_func(Picklerobject *p, PyObject *v) { if (v == NULL) { PyErr_SetString(PyExc_TypeError, @@ -2773,7 +2779,7 @@ } static PyObject * -Pickler_get_memo(Picklerobject * p) +Pickler_get_memo(Picklerobject *p) { if (p->memo == NULL) PyErr_SetString(PyExc_AttributeError, "memo"); @@ -2783,7 +2789,7 @@ } static int -Pickler_set_memo(Picklerobject * p, PyObject * v) +Pickler_set_memo(Picklerobject *p, PyObject *v) { if (v == NULL) { PyErr_SetString(PyExc_TypeError, @@ -2801,7 +2807,7 @@ } static PyObject * -Pickler_get_error(Picklerobject * p) +Pickler_get_error(Picklerobject *p) { /* why is this an attribute on the Pickler? */ Py_INCREF(PicklingError); @@ -2860,7 +2866,7 @@ }; static PyObject * -find_class(PyObject * py_module_name, PyObject * py_global_name, PyObject * fc) +find_class(PyObject *py_module_name, PyObject *py_global_name, PyObject *fc) { PyObject *global = 0, *module; @@ -2892,7 +2898,7 @@ } static int -marker(Unpicklerobject * self) +marker(Unpicklerobject *self) { if (self->num_marks < 1) { PyErr_SetString(UnpicklingError, "could not find MARK"); @@ -2904,7 +2910,7 @@ static int -load_none(Unpicklerobject * self) +load_none(Unpicklerobject *self) { PDATA_APPEND(self->stack, Py_None, -1); return 0; @@ -2918,7 +2924,7 @@ } static int -load_int(Unpicklerobject * self) +load_int(Unpicklerobject *self) { PyObject *py_int = 0; char *endptr, *s; @@ -2968,7 +2974,7 @@ } static int -load_bool(Unpicklerobject * self, PyObject * boolean) +load_bool(Unpicklerobject *self, PyObject *boolean) { assert(boolean == Py_True || boolean == Py_False); PDATA_APPEND(self->stack, boolean, -1); @@ -3004,7 +3010,7 @@ static int -load_binintx(Unpicklerobject * self, char *s, int x) +load_binintx(Unpicklerobject *self, char *s, int x) { PyObject *py_int = 0; long l; @@ -3020,7 +3026,7 @@ static int -load_binint(Unpicklerobject * self) +load_binint(Unpicklerobject *self) { char *s; @@ -3032,7 +3038,7 @@ static int -load_binint1(Unpicklerobject * self) +load_binint1(Unpicklerobject *self) { char *s; @@ -3044,7 +3050,7 @@ static int -load_binint2(Unpicklerobject * self) +load_binint2(Unpicklerobject *self) { char *s; @@ -3055,7 +3061,7 @@ } static int -load_long(Unpicklerobject * self) +load_long(Unpicklerobject *self) { PyObject *l = 0; char *end, *s; @@ -3085,7 +3091,7 @@ * data following. */ static int -load_counted_long(Unpicklerobject * self, int size) +load_counted_long(Unpicklerobject *self, int size) { Py_ssize_t i; char *nbytes; @@ -3124,7 +3130,7 @@ } static int -load_float(Unpicklerobject * self) +load_float(Unpicklerobject *self) { PyObject *py_float = 0; char *endptr, *s; @@ -3160,7 +3166,7 @@ } static int -load_binfloat(Unpicklerobject * self) +load_binfloat(Unpicklerobject *self) { PyObject *py_float; double x; @@ -3182,7 +3188,7 @@ } static int -load_string(Unpicklerobject * self) +load_string(Unpicklerobject *self) { PyObject *str = 0; int len, res = -1; @@ -3229,7 +3235,7 @@ static int -load_binstring(Unpicklerobject * self) +load_binstring(Unpicklerobject *self) { PyObject *py_string = 0; long l; @@ -3252,7 +3258,7 @@ static int -load_short_binstring(Unpicklerobject * self) +load_short_binstring(Unpicklerobject *self) { PyObject *py_string = 0; unsigned char l; @@ -3276,7 +3282,7 @@ #ifdef Py_USING_UNICODE static int -load_unicode(Unpicklerobject * self) +load_unicode(Unpicklerobject *self) { PyObject *str = 0; int len, res = -1; @@ -3301,7 +3307,7 @@ #ifdef Py_USING_UNICODE static int -load_binunicode(Unpicklerobject * self) +load_binunicode(Unpicklerobject *self) { PyObject *unicode; long l; @@ -3325,7 +3331,7 @@ static int -load_tuple(Unpicklerobject * self) +load_tuple(Unpicklerobject *self) { PyObject *tup; int i; @@ -3339,7 +3345,7 @@ } static int -load_counted_tuple(Unpicklerobject * self, int len) +load_counted_tuple(Unpicklerobject *self, int len) { PyObject *tup = PyTuple_New(len); @@ -3359,7 +3365,7 @@ } static int -load_empty_list(Unpicklerobject * self) +load_empty_list(Unpicklerobject *self) { PyObject *list; @@ -3370,7 +3376,7 @@ } static int -load_empty_dict(Unpicklerobject * self) +load_empty_dict(Unpicklerobject *self) { PyObject *dict; @@ -3382,7 +3388,7 @@ static int -load_list(Unpicklerobject * self) +load_list(Unpicklerobject *self) { PyObject *list = 0; int i; @@ -3396,7 +3402,7 @@ } static int -load_dict(Unpicklerobject * self) +load_dict(Unpicklerobject *self) { PyObject *dict, *key, *value; int i, j, k; @@ -3422,7 +3428,7 @@ } static PyObject * -Instance_New(PyObject * cls, PyObject * args) +Instance_New(PyObject *cls, PyObject *args) { PyObject *r = 0; @@ -3448,7 +3454,7 @@ static int -load_obj(Unpicklerobject * self) +load_obj(Unpicklerobject *self) { PyObject *class, *tup, *obj = 0; int i; @@ -3472,7 +3478,7 @@ static int -load_inst(Unpicklerobject * self) +load_inst(Unpicklerobject *self) { PyObject *tup, *class = 0, *obj = 0, *module_name, *class_name; int i, len; @@ -3516,7 +3522,7 @@ } static int -load_newobj(Unpicklerobject * self) +load_newobj(Unpicklerobject *self) { PyObject *args = NULL; PyObject *clsraw = NULL; @@ -3566,7 +3572,7 @@ } static int -load_global(Unpicklerobject * self) +load_global(Unpicklerobject *self) { PyObject *class = 0, *module_name = 0, *class_name = 0; int len; @@ -3600,7 +3606,7 @@ static int -load_persid(Unpicklerobject * self) +load_persid(Unpicklerobject *self) { PyObject *pid = 0; int len; @@ -3645,7 +3651,7 @@ } static int -load_binpersid(Unpicklerobject * self) +load_binpersid(Unpicklerobject *self) { PyObject *pid = 0; @@ -3683,7 +3689,7 @@ static int -load_pop(Unpicklerobject * self) +load_pop(Unpicklerobject *self) { int len; @@ -3709,7 +3715,7 @@ static int -load_pop_mark(Unpicklerobject * self) +load_pop_mark(Unpicklerobject *self) { int i; @@ -3723,7 +3729,7 @@ static int -load_dup(Unpicklerobject * self) +load_dup(Unpicklerobject *self) { PyObject *last; int len; @@ -3738,7 +3744,7 @@ static int -load_get(Unpicklerobject * self) +load_get(Unpicklerobject *self) { PyObject *py_str = 0, *value = 0; int len; @@ -3769,7 +3775,7 @@ static int -load_binget(Unpicklerobject * self) +load_binget(Unpicklerobject *self) { PyObject *py_key = 0, *value = 0; unsigned char key; @@ -3799,7 +3805,7 @@ static int -load_long_binget(Unpicklerobject * self) +load_long_binget(Unpicklerobject *self) { PyObject *py_key = 0, *value = 0; unsigned char c; @@ -3840,7 +3846,7 @@ * the number of bytes following the opcode, holding the index (code) value. */ static int -load_extension(Unpicklerobject * self, int nbytes) +load_extension(Unpicklerobject *self, int nbytes) { char *codebytes; /* the nbytes bytes after the opcode */ long code; /* calc_binint returns long */ @@ -3908,7 +3914,7 @@ } static int -load_put(Unpicklerobject * self) +load_put(Unpicklerobject *self) { PyObject *py_str = 0, *value = 0; int len, l; @@ -3930,7 +3936,7 @@ static int -load_binput(Unpicklerobject * self) +load_binput(Unpicklerobject *self) { PyObject *py_key = 0, *value = 0; unsigned char key; @@ -3954,7 +3960,7 @@ static int -load_long_binput(Unpicklerobject * self) +load_long_binput(Unpicklerobject *self) { PyObject *py_key = 0, *value = 0; long key; @@ -3986,7 +3992,7 @@ static int -do_append(Unpicklerobject * self, int x) +do_append(Unpicklerobject *self, int x) { PyObject *value = 0, *list = 0, *append_method = 0; int len, i; @@ -4044,21 +4050,21 @@ static int -load_append(Unpicklerobject * self) +load_append(Unpicklerobject *self) { return do_append(self, self->stack->length - 1); } static int -load_appends(Unpicklerobject * self) +load_appends(Unpicklerobject *self) { return do_append(self, marker(self)); } static int -do_setitems(Unpicklerobject * self, int x) +do_setitems(Unpicklerobject *self, int x) { PyObject *value = 0, *key = 0, *dict = 0; int len, i, r = 0; @@ -4084,20 +4090,20 @@ static int -load_setitem(Unpicklerobject * self) +load_setitem(Unpicklerobject *self) { return do_setitems(self, self->stack->length - 2); } static int -load_setitems(Unpicklerobject * self) +load_setitems(Unpicklerobject *self) { return do_setitems(self, marker(self)); } static int -load_build(Unpicklerobject * self) +load_build(Unpicklerobject *self) { PyObject *state, *inst, *slotstate; PyObject *__setstate__; @@ -4191,7 +4197,7 @@ static int -load_mark(Unpicklerobject * self) +load_mark(Unpicklerobject *self) { int s; @@ -4223,7 +4229,7 @@ } static int -load_reduce(Unpicklerobject * self) +load_reduce(Unpicklerobject *self) { PyObject *callable = 0, *arg_tup = 0, *ob = 0; @@ -4248,7 +4254,7 @@ * is the first opcode for protocols >= 2. */ static int -load_proto(Unpicklerobject * self) +load_proto(Unpicklerobject *self) { int i; char *protobyte; @@ -4270,7 +4276,7 @@ } static PyObject * -load(Unpicklerobject * self) +load(Unpicklerobject *self) { PyObject *err = 0, *val = 0; char *s; @@ -4625,7 +4631,7 @@ } static int -noload_global(Unpicklerobject * self) +noload_global(Unpicklerobject *self) { char *s; @@ -4638,7 +4644,7 @@ } static int -noload_reduce(Unpicklerobject * self) +noload_reduce(Unpicklerobject *self) { if (self->stack->length < 2) @@ -4649,7 +4655,7 @@ } static int -noload_build(Unpicklerobject * self) +noload_build(Unpicklerobject *self) { if (self->stack->length < 1) @@ -4659,7 +4665,7 @@ } static int -noload_extension(Unpicklerobject * self, int nbytes) +noload_extension(Unpicklerobject *self, int nbytes) { char *codebytes; @@ -4672,7 +4678,7 @@ static PyObject * -noload(Unpicklerobject * self) +noload(Unpicklerobject *self) { PyObject *err = 0, *val = 0; char *s; @@ -4977,7 +4983,7 @@ } static PyObject * -Unpickler_noload(Unpicklerobject * self, PyObject * unused) +Unpickler_noload(Unpicklerobject *self, PyObject *unused) { return noload(self); } @@ -5001,7 +5007,7 @@ static Unpicklerobject * -newUnpicklerobject(PyObject * f) +newUnpicklerobject(PyObject *f) { Unpicklerobject *self; @@ -5066,14 +5072,14 @@ static PyObject * -get_Unpickler(PyObject * self, PyObject * file) +get_Unpickler(PyObject *self, PyObject *file) { return (PyObject *) newUnpicklerobject(file); } static void -Unpickler_dealloc(Unpicklerobject * self) +Unpickler_dealloc(Unpicklerobject *self) { PyObject_GC_UnTrack((PyObject *) self); Py_XDECREF(self->readline); @@ -5098,7 +5104,7 @@ } static int -Unpickler_traverse(Unpicklerobject * self, visitproc visit, void *arg) +Unpickler_traverse(Unpicklerobject *self, visitproc visit, void *arg) { Py_VISIT(self->readline); Py_VISIT(self->read); @@ -5113,7 +5119,7 @@ } static int -Unpickler_clear(Unpicklerobject * self) +Unpickler_clear(Unpicklerobject *self) { Py_CLEAR(self->readline); Py_CLEAR(self->read); @@ -5128,7 +5134,7 @@ } static PyObject * -Unpickler_getattr(Unpicklerobject * self, char *name) +Unpickler_getattr(Unpicklerobject *self, char *name) { if (!strcmp(name, "persistent_load")) { if (!self->pers_func) { @@ -5170,7 +5176,7 @@ static int -Unpickler_setattr(Unpicklerobject * self, char *name, PyObject * value) +Unpickler_setattr(Unpicklerobject *self, char *name, PyObject *value) { if (!strcmp(name, "persistent_load")) { @@ -5214,7 +5220,7 @@ /* dump(obj, file, protocol=0). */ static PyObject * -cpm_dump(PyObject * self, PyObject * args, PyObject * kwds) +cpm_dump(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = { "obj", "file", "protocol", NULL }; PyObject *ob, *file, *res = NULL; @@ -5242,7 +5248,7 @@ /* load(fileobj). */ static PyObject * -cpm_load(PyObject * self, PyObject * ob) +cpm_load(PyObject *self, PyObject *ob) { Unpicklerobject *unpickler = 0; PyObject *res = NULL; @@ -5262,29 +5268,29 @@ static PyTypeObject Unpicklertype = { PyObject_HEAD_INIT(NULL) - 0, /*ob_size */ - "pickle.Unpickler", /*tp_name */ - sizeof(Unpicklerobject), /*tp_basicsize */ + 0, /*ob_size */ + "pickle.Unpickler", /*tp_name */ + sizeof(Unpicklerobject), /*tp_basicsize */ 0, (destructor) Unpickler_dealloc, /* tp_dealloc */ - 0, /* tp_print */ + 0, /* tp_print */ (getattrfunc) Unpickler_getattr, /* tp_getattr */ (setattrfunc) Unpickler_setattr, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ + 0, /* tp_compare */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Unpicklertype__doc__, /* tp_doc */ + Unpicklertype__doc__, /* tp_doc */ (traverseproc) Unpickler_traverse, /* tp_traverse */ - (inquiry) Unpickler_clear, /* tp_clear */ + (inquiry) Unpickler_clear, /* tp_clear */ }; static struct PyMethodDef pickle_methods[] = { @@ -5325,12 +5331,11 @@ {"Unpickler", (PyCFunction) get_Unpickler, METH_O, PyDoc_STR("Unpickler(file) -- Create an unpickler.")}, - {NULL, NULL} }; static int -init_stuff(PyObject * module_dict) +init_stuff(PyObject *module_dict) { PyObject *copy_reg, *t, *r; @@ -5463,9 +5468,6 @@ return 0; } -#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */ -#define PyMODINIT_FUNC void -#endif PyMODINIT_FUNC init_pickle(void) { From python-checkins at python.org Sun Jul 8 03:01:55 2007 From: python-checkins at python.org (brett.cannon) Date: Sun, 8 Jul 2007 03:01:55 +0200 (CEST) Subject: [Python-checkins] r56187 - sandbox/trunk/import_in_py/importlib.py Message-ID: <20070708010155.27D781E4002@bag.python.org> Author: brett.cannon Date: Sun Jul 8 03:01:54 2007 New Revision: 56187 Modified: sandbox/trunk/import_in_py/importlib.py Log: Remove an unneeded import. Modified: sandbox/trunk/import_in_py/importlib.py ============================================================================== --- sandbox/trunk/import_in_py/importlib.py (original) +++ sandbox/trunk/import_in_py/importlib.py Sun Jul 8 03:01:54 2007 @@ -38,12 +38,10 @@ # (Neal). import errno import os -import contextlib import warnings _importlib.errno = errno _importlib.os = os -_importlib.contextlib = contextlib _importlib.warnings = warnings del _importlib From python-checkins at python.org Sun Jul 8 03:02:13 2007 From: python-checkins at python.org (brett.cannon) Date: Sun, 8 Jul 2007 03:02:13 +0200 (CEST) Subject: [Python-checkins] r56188 - sandbox/trunk/import_in_py/tests/__init__.py Message-ID: <20070708010213.7FBA91E4002@bag.python.org> Author: brett.cannon Date: Sun Jul 8 03:02:13 2007 New Revision: 56188 Modified: sandbox/trunk/import_in_py/tests/__init__.py Log: Add a visual separator between tests. Modified: sandbox/trunk/import_in_py/tests/__init__.py ============================================================================== --- sandbox/trunk/import_in_py/tests/__init__.py (original) +++ sandbox/trunk/import_in_py/tests/__init__.py Sun Jul 8 03:02:13 2007 @@ -10,3 +10,4 @@ module_name = os.path.splitext(filename)[0] module = __import__('tests.' + module_name) getattr(module, module_name).test_main() + print '~' * 20 From python-checkins at python.org Sun Jul 8 03:13:21 2007 From: python-checkins at python.org (brett.cannon) Date: Sun, 8 Jul 2007 03:13:21 +0200 (CEST) Subject: [Python-checkins] r56189 - sandbox/trunk/import_in_py/_importlib.py Message-ID: <20070708011321.9DFE01E4002@bag.python.org> Author: brett.cannon Date: Sun Jul 8 03:13:21 2007 New Revision: 56189 Modified: sandbox/trunk/import_in_py/_importlib.py Log: Remove use of os.path.splitext. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Sun Jul 8 03:13:21 2007 @@ -329,7 +329,8 @@ This method is required for PyPycHandler. """ - return os.path.splitext(path) + splits = path.rpartition('.') + return splits[0], splits[1] + splits[2] def create_path(self, base_path, type_, must_exist=False): """Create a new path based on a base path and requested path type. From python-checkins at python.org Sun Jul 8 09:45:47 2007 From: python-checkins at python.org (nick.coghlan) Date: Sun, 8 Jul 2007 09:45:47 +0200 (CEST) Subject: [Python-checkins] r56190 - peps/trunk/pep-0366.txt Message-ID: <20070708074547.2284B1E4002@bag.python.org> Author: nick.coghlan Date: Sun Jul 8 09:45:46 2007 New Revision: 56190 Modified: peps/trunk/pep-0366.txt Log: Major rewrite to simplify things and to incorporate python-dev feedback Modified: peps/trunk/pep-0366.txt ============================================================================== --- peps/trunk/pep-0366.txt (original) +++ peps/trunk/pep-0366.txt Sun Jul 8 09:45:46 2007 @@ -7,8 +7,8 @@ Type: Standards Track Content-Type: text/x-rst Created: 1-May-2007 -Python-Version: 2.6 -Post-History: 1-May-2007 +Python-Version: 2.6, 3.0 +Post-History: 1-May-2007, 4-Jul-2007, 7-Jul-2007 Abstract @@ -17,158 +17,95 @@ This PEP proposes a backwards compatible mechanism that permits the use of explicit relative imports from executable modules within packages. Such imports currently fail due to an awkward interaction -between PEP 328 and PEP 338 - this behaviour is the subject of at -least one open SF bug report (#1510172)[1], and has most likely -been a factor in at least a few queries on comp.lang.python (such -as Alan Isaac's question in [2]). - -With the proposed mechanism, relative imports will work automatically -if the module is executed using the ``-m`` switch. A small amount of -boilerplate will be needed in the module itself to allow the relative +between PEP 328 and PEP 338. + +By adding a new module level attribute, this PEP allows relative imports +to work automatically if the module is executed using the ``-m``switch. +A small amount of boilerplate in the module itself will allow the relative imports to work when the file is executed by name. -Import Statements and the Main Module -===================================== +Proposed Change +=============== + +The major proposed change is the introduction of a new module level +attribute, ``__package__``. When it is present, relative imports will +be based on this attribute rather than the module ``__name__`` +attribute. + +As with the current ``__name__`` attribute, setting ``__package__`` will +be the responsibility of the PEP 302 loader used to import a module. +Loaders which use ``imp.new_module()`` to create the module object will +have the new attribute set automatically to +``__name__.rpartition('.')[0]``. + +``runpy.run_module`` will also set the new attribute, basing it off the +``mod_name`` argument, rather than the ``run_name`` argument. This will +allow relative imports to work correctly from main modules executed with +the ``-m`` switch. + +When the main module is specified by its filename, then the +``__package__`` attribute will be set to the empty string. To allow +relative imports when the module is executed directly, boilerplate +similar to the following would be needed before the first relative +import statement: + + if __name__ == "__main__" and not __package_name__: + __package_name__ = "" -(This section is taken from the final revision of PEP 338) +Note that this boilerplate is sufficient only if the top level package +is already accessible via ``sys.path``. Additional code that manipulates +``sys.path`` would be needed in order for direct execution to work +without the top level package already being importable. -The release of 2.5b1 showed a surprising (although obvious in -retrospect) interaction between PEP 338 and PEP 328 - explicit -relative imports don't work from a main module. This is due to -the fact that relative imports rely on ``__name__`` to determine -the current module's position in the package hierarchy. In a main -module, the value of ``__name__`` is always ``'__main__'``, so -explicit relative imports will always fail (as they only work for -a module inside a package). - -Investigation into why implicit relative imports *appear* to work when -a main module is executed directly but fail when executed using ``-m`` -showed that such imports are actually always treated as absolute -imports. Because of the way direct execution works, the package -containing the executed module is added to sys.path, so its sibling -modules are actually imported as top level modules. This can easily -lead to multiple copies of the sibling modules in the application if -implicit relative imports are used in modules that may be directly -executed (e.g. test modules or utility scripts). - -For the 2.5 release, the recommendation is to always use absolute -imports in any module that is intended to be used as a main module. -The ``-m`` switch already provides a benefit here, as it inserts the -current directory into ``sys.path``, instead of the directory containing -the main module. This means that it is possible to run a module from -inside a package using ``-m`` so long as the current directory contains -the top level directory for the package. Absolute imports will work -correctly even if the package isn't installed anywhere else on -sys.path. If the module is executed directly and uses absolute imports -to retrieve its sibling modules, then the top level package directory -needs to be installed somewhere on sys.path (since the current directory -won't be added automatically). - -Here's an example file layout:: - - devel/ - pkg/ - __init__.py - moduleA.py - moduleB.py - test/ - __init__.py - test_A.py - test_B.py - -So long as the current directory is ``devel``, or ``devel`` is already -on ``sys.path`` and the test modules use absolute imports (such as -``import pkg.moduleA`` to retrieve the module under test, PEP 338 -allows the tests to be run as:: +This approach also has the same disadvantage as the use of absolute +imports of sibling modules - if the script is moved to a different +package or subpackage, the boilerplate will need to be updated +manually. - python -m pkg.test.test_A - python -m pkg.test.test_B Rationale for Change ==================== -In rejecting PEP 3122 (which proposed a higher impact solution to this -problem), Guido has indicated that he still isn't particularly keen on -the idea of executing modules inside packages as scripts [2]. Despite -these misgivings he has previously approved the addition of the ``-m`` -switch in Python 2.4, and the ``runpy`` module based enhancements -described in PEP 338 for Python 2.5. - -The philosophy that motivated those previous additions (i.e. access to -utility or testing scripts without needing to worry about name clashes in -either the OS executable namespace or the top level Python namespace) is -also the motivation behind fixing what I see as a bug in the current -implementation. +The current inability to use explicit relative imports from the main +module is the subject of at least one open SF bug report (#1510172)[1], +and has most likely been a factor in at least a few queries on +comp.lang.python (such as Alan Isaac's question in [2]). This PEP is intended to provide a solution which permits explicit relative imports from main modules, without incurring any significant costs during interpreter startup or normal module import. +The section in PEP 338 on relative imports and the main module provides +further details and background on this problem. -Proposed Solution -================= -The heart of the proposed solution is a new module attribute -``__package_name__``. This attribute will be defined only in -the main module (i.e. modules where ``__name__ == "__main__"``). - -For a directly executed main module, this attribute will be set -to the empty string. For a module executed using -``runpy.run_module()`` with the ``run_name`` parameter set to -``"__main__"``, the attribute will be set to -``mod_name.rpartition('.')[0]`` (i.e., everything up to -but not including the last period). - -In the import machinery there is an error handling path which -deals with the case where an explicit relative reference attempts -to go higher than the top level in the package hierarchy. This -error path would be changed to fall back on the ``__package_name__`` -attribute for explicit relative imports when the importing module -is called ``"__main__"``. - -With this change, explicit relative imports will work automatically -from a script executed with the ``-m`` switch. To allow direct -execution of the module, the following boilerplate would be needed at -the top of the script:: +Reference Implementation +======================== - if __name__ == "__main__" and not __package_name__: - __package_name__ = "" - -Note that this boilerplate is sufficient only if the top level package -is already accessible via sys.path. Additional code that manipulates -sys.path would be needed in order for direct execution to work -without the top level package already being on sys.path. - -This approach also has the same disadvantage as the use of absolute -imports of sibling modules - if the script is moved to a different -package or subpackage, the boilerplate will need to be updated -manually. - -With this feature in place, the test scripts in the package above -would be able to change their import lines to something along the -lines of ``import ..moduleA``. The scripts could then be -executed unmodified even if the name of the package was changed. - -(Rev 47142 in SVN implemented an early variant of this proposal +Rev 47142 in SVN implemented an early variant of this proposal which stored the main module's real module name in the '__module_name__' attribute. It was reverted due to the fact -that 2.5 was already in beta by that time.) +that 2.5 was already in beta by that time. + +A new patch will be developed for 2.6, and forward ported to +Py3k via svnmerge. Alternative Proposals ===================== PEP 3122 proposed addressing this problem by changing the way -the main module is identified. That's a huge compatibility cost +the main module is identified. That's a significant compatibility cost to incur to fix something that is a pretty minor bug in the overall -scheme of things. +scheme of things, and the PEP was rejected [3]. The advantage of the proposal in this PEP is that its only impact on -normal code is the tiny amount of time needed at startup to set the extra -attribute in the main module. The changes to the import machinery are all -in an existing error handling path, so normal imports don't incur any -performance penalty at all. +normal code is the small amount of time needed to set the extra +attribute when importing a module. Relative imports themselves should +be sped up fractionally, as the package name is stored in the module +globals, rather than having to be worked out again for each relative +import. References @@ -177,11 +114,12 @@ .. [1] Absolute/relative import not working? (http://www.python.org/sf/1510172) -.. [2] Guido's rejection of PEP 3122 +.. [2] c.l.p. question about modules and relative imports + (http://groups.google.com/group/comp.lang.python/browse_thread/thread/c44c769a72ca69fa/) + +.. [3] Guido's rejection of PEP 3122 (http://mail.python.org/pipermail/python-3000/2007-April/006793.html) -.. [3] c.l.p. question about modules and relative imports - (http://groups.google.com/group/comp.lang.python/browse_thread/thread/c44c769a72ca69fa/) Copyright ========= From python-checkins at python.org Sun Jul 8 10:49:55 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 8 Jul 2007 10:49:55 +0200 (CEST) Subject: [Python-checkins] r56191 - peps/trunk/PyRSS2Gen.py peps/trunk/pep2rss.py Message-ID: <20070708084955.353BC1E4002@bag.python.org> Author: martin.v.loewis Date: Sun Jul 8 10:49:54 2007 New Revision: 56191 Added: peps/trunk/PyRSS2Gen.py (contents, props changed) peps/trunk/pep2rss.py (contents, props changed) Log: Add pep2rss, contributed by Jonathan Ellis. Added: peps/trunk/PyRSS2Gen.py ============================================================================== --- (empty file) +++ peps/trunk/PyRSS2Gen.py Sun Jul 8 10:49:54 2007 @@ -0,0 +1,443 @@ +"""PyRSS2Gen - A Python library for generating RSS 2.0 feeds.""" + +__name__ = "PyRSS2Gen" +__version__ = (1, 0, 0) +__author__ = "Andrew Dalke " + +_generator_name = __name__ + "-" + ".".join(map(str, __version__)) + +import datetime + +# Could make this the base class; will need to add 'publish' +class WriteXmlMixin: + def write_xml(self, outfile, encoding = "iso-8859-1"): + from xml.sax import saxutils + handler = saxutils.XMLGenerator(outfile, encoding) + handler.startDocument() + self.publish(handler) + handler.endDocument() + + def to_xml(self, encoding = "iso-8859-1"): + try: + import cStringIO as StringIO + except ImportError: + import StringIO + f = StringIO.StringIO() + self.write_xml(f, encoding) + return f.getvalue() + + +def _element(handler, name, obj, d = {}): + if isinstance(obj, basestring) or obj is None: + # special-case handling to make the API easier + # to use for the common case. + handler.startElement(name, d) + if obj is not None: + handler.characters(obj) + handler.endElement(name) + else: + # It better know how to emit the correct XML. + obj.publish(handler) + +def _opt_element(handler, name, obj): + if obj is None: + return + _element(handler, name, obj) + + +def _format_date(dt): + """convert a datetime into an RFC 822 formatted date + + Input date must be in GMT. + """ + # Looks like: + # Sat, 07 Sep 2002 00:00:01 GMT + # Can't use strftime because that's locale dependent + # + # Isn't there a standard way to do this for Python? The + # rfc822 and email.Utils modules assume a timestamp. The + # following is based on the rfc822 module. + return "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( + ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][dt.weekday()], + dt.day, + ["Jan", "Feb", "Mar", "Apr", "May", "Jun", + "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"][dt.month-1], + dt.year, dt.hour, dt.minute, dt.second) + + +## +# A couple simple wrapper objects for the fields which +# take a simple value other than a string. +class IntElement: + """implements the 'publish' API for integers + + Takes the tag name and the integer value to publish. + + (Could be used for anything which uses str() to be published + to text for XML.) + """ + element_attrs = {} + def __init__(self, name, val): + self.name = name + self.val = val + def publish(self, handler): + handler.startElement(self.name, self.element_attrs) + handler.characters(str(self.val)) + handler.endElement(self.name) + +class DateElement: + """implements the 'publish' API for a datetime.datetime + + Takes the tag name and the datetime to publish. + + Converts the datetime to RFC 2822 timestamp (4-digit year). + """ + def __init__(self, name, dt): + self.name = name + self.dt = dt + def publish(self, handler): + _element(handler, self.name, _format_date(self.dt)) +#### + +class Category: + """Publish a category element""" + def __init__(self, category, domain = None): + self.category = category + self.domain = domain + def publish(self, handler): + d = {} + if self.domain is not None: + d["domain"] = self.domain + _element(handler, "category", self.category, d) + +class Cloud: + """Publish a cloud""" + def __init__(self, domain, port, path, + registerProcedure, protocol): + self.domain = domain + self.port = port + self.path = path + self.registerProcedure = registerProcedure + self.protocol = protocol + def publish(self, handler): + _element(handler, "cloud", None, { + "domain": self.domain, + "port": str(self.port), + "path": self.path, + "registerProcedure": self.registerProcedure, + "protocol": self.protocol}) + +class Image: + """Publish a channel Image""" + element_attrs = {} + def __init__(self, url, title, link, + width = None, height = None, description = None): + self.url = url + self.title = title + self.link = link + self.width = width + self.height = height + self.description = description + + def publish(self, handler): + handler.startElement("image", self.element_attrs) + + _element(handler, "url", self.url) + _element(handler, "title", self.title) + _element(handler, "link", self.link) + + width = self.width + if isinstance(width, int): + width = IntElement("width", width) + _opt_element(handler, "width", width) + + height = self.height + if isinstance(height, int): + height = IntElement("height", height) + _opt_element(handler, "height", height) + + _opt_element(handler, "description", self.description) + + handler.endElement("image") + +class Guid: + """Publish a guid + + Defaults to being a permalink, which is the assumption if it's + omitted. Hence strings are always permalinks. + """ + def __init__(self, guid, isPermaLink = 1): + self.guid = guid + self.isPermaLink = isPermaLink + def publish(self, handler): + d = {} + if self.isPermaLink: + d["isPermaLink"] = "true" + else: + d["isPermaLink"] = "false" + _element(handler, "guid", self.guid, d) + +class TextInput: + """Publish a textInput + + Apparently this is rarely used. + """ + element_attrs = {} + def __init__(self, title, description, name, link): + self.title = title + self.description = description + self.name = name + self.link = link + + def publish(self, handler): + handler.startElement("textInput", self.element_attrs) + _element(handler, "title", self.title) + _element(handler, "description", self.description) + _element(handler, "name", self.name) + _element(handler, "link", self.link) + handler.endElement("textInput") + + +class Enclosure: + """Publish an enclosure""" + def __init__(self, url, length, type): + self.url = url + self.length = length + self.type = type + def publish(self, handler): + _element(handler, "enclosure", None, + {"url": self.url, + "length": str(self.length), + "type": self.type, + }) + +class Source: + """Publish the item's original source, used by aggregators""" + def __init__(self, name, url): + self.name = name + self.url = url + def publish(self, handler): + _element(handler, "source", self.name, {"url": self.url}) + +class SkipHours: + """Publish the skipHours + + This takes a list of hours, as integers. + """ + element_attrs = {} + def __init__(self, hours): + self.hours = hours + def publish(self, handler): + if self.hours: + handler.startElement("skipHours", self.element_attrs) + for hour in self.hours: + _element(handler, "hour", str(hour)) + handler.endElement("skipHours") + +class SkipDays: + """Publish the skipDays + + This takes a list of days as strings. + """ + element_attrs = {} + def __init__(self, days): + self.days = days + def publish(self, handler): + if self.days: + handler.startElement("skipDays", self.element_attrs) + for day in self.days: + _element(handler, "day", day) + handler.endElement("skipDays") + +class RSS2(WriteXmlMixin): + """The main RSS class. + + Stores the channel attributes, with the "category" elements under + ".categories" and the RSS items under ".items". + """ + + rss_attrs = {"version": "2.0"} + element_attrs = {} + def __init__(self, + title, + link, + description, + + language = None, + copyright = None, + managingEditor = None, + webMaster = None, + pubDate = None, # a datetime, *in* *GMT* + lastBuildDate = None, # a datetime + + categories = None, # list of strings or Category + generator = _generator_name, + docs = "http://blogs.law.harvard.edu/tech/rss", + cloud = None, # a Cloud + ttl = None, # integer number of minutes + + image = None, # an Image + rating = None, # a string; I don't know how it's used + textInput = None, # a TextInput + skipHours = None, # a SkipHours with a list of integers + skipDays = None, # a SkipDays with a list of strings + + items = None, # list of RSSItems + ): + self.title = title + self.link = link + self.description = description + self.language = language + self.copyright = copyright + self.managingEditor = managingEditor + + self.webMaster = webMaster + self.pubDate = pubDate + self.lastBuildDate = lastBuildDate + + if categories is None: + categories = [] + self.categories = categories + self.generator = generator + self.docs = docs + self.cloud = cloud + self.ttl = ttl + self.image = image + self.rating = rating + self.textInput = textInput + self.skipHours = skipHours + self.skipDays = skipDays + + if items is None: + items = [] + self.items = items + + def publish(self, handler): + handler.startElement("rss", self.rss_attrs) + handler.startElement("channel", self.element_attrs) + _element(handler, "title", self.title) + _element(handler, "link", self.link) + _element(handler, "description", self.description) + + self.publish_extensions(handler) + + _opt_element(handler, "language", self.language) + _opt_element(handler, "copyright", self.copyright) + _opt_element(handler, "managingEditor", self.managingEditor) + _opt_element(handler, "webMaster", self.webMaster) + + pubDate = self.pubDate + if isinstance(pubDate, datetime.datetime): + pubDate = DateElement("pubDate", pubDate) + _opt_element(handler, "pubDate", pubDate) + + lastBuildDate = self.lastBuildDate + if isinstance(lastBuildDate, datetime.datetime): + lastBuildDate = DateElement("lastBuildDate", lastBuildDate) + _opt_element(handler, "lastBuildDate", lastBuildDate) + + for category in self.categories: + if isinstance(category, basestring): + category = Category(category) + category.publish(handler) + + _opt_element(handler, "generator", self.generator) + _opt_element(handler, "docs", self.docs) + + if self.cloud is not None: + self.cloud.publish(handler) + + ttl = self.ttl + if isinstance(self.ttl, int): + ttl = IntElement("ttl", ttl) + _opt_element(handler, "tt", ttl) + + if self.image is not None: + self.image.publish(handler) + + _opt_element(handler, "rating", self.rating) + if self.textInput is not None: + self.textInput.publish(handler) + if self.skipHours is not None: + self.skipHours.publish(handler) + if self.skipDays is not None: + self.skipDays.publish(handler) + + for item in self.items: + item.publish(handler) + + handler.endElement("channel") + handler.endElement("rss") + + def publish_extensions(self, handler): + # Derived classes can hook into this to insert + # output after the three required fields. + pass + + + +class RSSItem(WriteXmlMixin): + """Publish an RSS Item""" + element_attrs = {} + def __init__(self, + title = None, # string + link = None, # url as string + description = None, # string + author = None, # email address as string + categories = None, # list of string or Category + comments = None, # url as string + enclosure = None, # an Enclosure + guid = None, # a unique string + pubDate = None, # a datetime + source = None, # a Source + ): + + if title is None and description is None: + raise TypeError( + "must define at least one of 'title' or 'description'") + self.title = title + self.link = link + self.description = description + self.author = author + if categories is None: + categories = [] + self.categories = categories + self.comments = comments + self.enclosure = enclosure + self.guid = guid + self.pubDate = pubDate + self.source = source + # It sure does get tedious typing these names three times... + + def publish(self, handler): + handler.startElement("item", self.element_attrs) + _opt_element(handler, "title", self.title) + _opt_element(handler, "link", self.link) + self.publish_extensions(handler) + _opt_element(handler, "description", self.description) + _opt_element(handler, "author", self.author) + + for category in self.categories: + if isinstance(category, basestring): + category = Category(category) + category.publish(handler) + + _opt_element(handler, "comments", self.comments) + if self.enclosure is not None: + self.enclosure.publish(handler) + _opt_element(handler, "guid", self.guid) + + pubDate = self.pubDate + if isinstance(pubDate, datetime.datetime): + pubDate = DateElement("pubDate", pubDate) + _opt_element(handler, "pubDate", pubDate) + + if self.source is not None: + self.source.publish(handler) + + handler.endElement("item") + + def publish_extensions(self, handler): + # Derived classes can hook into this to insert + # output after the title and link elements + pass Added: peps/trunk/pep2rss.py ============================================================================== --- (empty file) +++ peps/trunk/pep2rss.py Sun Jul 8 10:49:54 2007 @@ -0,0 +1,71 @@ +#!/usr/bin/env python + +# usage: pep-hook.py $REPOS $REV +# (standard post-commit args) + +import os, glob, time, datetime, stat, re, sys +from subprocess import Popen, PIPE +import PyRSS2Gen as rssgen + +RSS_PATH = os.path.join(sys.argv[1], 'peps.rss') + +def firstline_startingwith(full_path, text): + for line in file(full_path): + if line.startswith(text): + return line[len(text):].strip() + return None + +# get list of peps with creation time (from "Created:" string in pep .txt) +peps = glob.glob('pep-*.txt') +def pep_creation_dt(full_path): + created_str = firstline_startingwith(full_path, 'Created:') + # bleh, I was hoping to avoid re but some PEPs editorialize + # on the Created line + m = re.search(r'''(\d+-\w+-\d{4})''', created_str) + if not m: + # some older ones have an empty line, that's okay, if it's old + # we ipso facto don't care about it. + # "return None" would make the most sense but datetime objects + # refuse to compare with that. :-| + return datetime.datetime(*time.localtime(0)[:6]) + created_str = m.group(1) + try: + t = time.strptime(created_str, '%d-%b-%Y') + except ValueError: + t = time.strptime(created_str, '%d-%B-%Y') + return datetime.datetime(*t[:6]) +peps_with_dt = [(pep_creation_dt(full_path), full_path) for full_path in peps] +peps_with_dt.sort() + +# generate rss items for 10 most recent peps +items = [] +for dt, full_path in peps_with_dt[-10:]: + try: + n = int(full_path.split('-')[-1].split('.')[0]) + except ValueError: + pass + title = firstline_startingwith(full_path, 'Title:') + author = firstline_startingwith(full_path, 'Author:') + url = 'http://www.python.org/dev/peps/pep-%d' % n + item = rssgen.RSSItem( + title = 'PEP %d: %s' % (n, title), + link = url, + description = 'Author: %s' % author, + guid = rssgen.Guid(url), + pubDate = dt) + items.append(item) + +# the rss envelope +desc = """ +Newest Python Enhancement Proposals (PEPs) - Information on new +language features, and some meta-information like release +procedure and schedules +""".strip() +rss = rssgen.RSS2( + title = 'Newest Python PEPs', + link = 'http://www.python.org/dev/peps', + description = desc, + lastBuildDate = datetime.datetime.now(), + items = items) + +file(RSS_PATH, 'w').write(rss.to_xml()) From python-checkins at python.org Sun Jul 8 10:59:16 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 8 Jul 2007 10:59:16 +0200 (CEST) Subject: [Python-checkins] r56192 - peps/trunk/pep-0353.txt Message-ID: <20070708085916.B3DBE1E4002@bag.python.org> Author: martin.v.loewis Date: Sun Jul 8 10:59:16 2007 New Revision: 56192 Modified: peps/trunk/pep-0353.txt Log: Fix typo reported by Lino Mastrodomenico. Modified: peps/trunk/pep-0353.txt ============================================================================== --- peps/trunk/pep-0353.txt (original) +++ peps/trunk/pep-0353.txt Sun Jul 8 10:59:16 2007 @@ -167,7 +167,7 @@ Particular care must be taken for PyArg_ParseTuple calls: they need all be checked for s# and t# converters, and -PY_SIZE_T_CLEAN must be defined before including Python.h +PY_SSIZE_T_CLEAN must be defined before including Python.h if the calls have been updated accordingly. Fredrik Lundh has written a scanner_ which checks the code From python-checkins at python.org Sun Jul 8 20:53:53 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sun, 8 Jul 2007 20:53:53 +0200 (CEST) Subject: [Python-checkins] r56193 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070708185353.AAAA41E4002@bag.python.org> Author: alexandre.vassalotti Date: Sun Jul 8 20:53:53 2007 New Revision: 56193 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Fix the indentation of macros. Annotate PdataType. Move the INIT_STR to the top of the module. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sun Jul 8 20:53:53 2007 @@ -118,12 +118,26 @@ /* For looking up name pairs in copy_reg._extension_registry. */ static PyObject *two_tuple; -static PyObject *__class___str, *__getinitargs___str, *__dict___str, - *__getstate___str, *__setstate___str, *__name___str, *__reduce___str, +#define INIT_STR(S) \ + if (!(S##_str = PyString_InternFromString(#S))) \ + return -1; + +static PyObject \ + *__class___str, + *__getinitargs___str, + *__dict___str, + *__getstate___str, + *__setstate___str, + *__name___str, + *__reduce___str, *__reduce_ex___str, - *write_str, *append_str, - *read_str, *readline_str, *__main___str, - *copy_reg_str, *dispatch_table_str; + *write_str, + *append_str, + *read_str, + *readline_str, + *__main___str, + *copy_reg_str, + *dispatch_table_str; /************************************************************************* Internal Data type for pickle data. */ @@ -131,7 +145,7 @@ typedef struct { PyObject_HEAD int length; /* number of initial slots in data currently used */ - int size; /* number of slots in data allocated */ + int size; /* number of slots in data allocated */ PyObject **data; } Pdata; @@ -150,9 +164,12 @@ } static PyTypeObject PdataType = { - PyObject_HEAD_INIT(NULL) 0, "pickle.Pdata", sizeof(Pdata), 0, - (destructor) Pdata_dealloc, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0L, 0L, 0L, 0L, "" + PyObject_HEAD_INIT(NULL) + 0, /*ob_size*/ + "_pickle.Pdata", /*tp_name*/ + sizeof(Pdata), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor) Pdata_dealloc, /*tp_dealloc*/ }; #define Pdata_Check(O) ((O)->ob_type == &PdataType) @@ -233,14 +250,14 @@ * must be an lvalue holding PyObject*. On stack underflow, UnpicklingError * is raised and V is set to NULL. D and V may be evaluated several times. */ -#define PDATA_POP(D, V) { \ - if ((D)->length) \ - (V) = (D)->data[--((D)->length)]; \ - else { \ - PyErr_SetString(UnpicklingError, "bad pickle data"); \ - (V) = NULL; \ - } \ -} +#define PDATA_POP(D, V) { \ + if ((D)->length) \ + (V) = (D)->data[--((D)->length)]; \ + else { \ + PyErr_SetString(UnpicklingError, "bad pickle data"); \ + (V) = NULL; \ + } \ + } /* PDATA_PUSH and PDATA_APPEND both push rvalue PyObject* O on to Pdata* * D. If the Pdata stack can't be grown to hold the new value, both @@ -254,17 +271,17 @@ #define PDATA_PUSH(D, O, ER) { \ if (((Pdata*)(D))->length == ((Pdata*)(D))->size && \ Pdata_grow((Pdata*)(D)) < 0) { \ - Py_DECREF(O); \ - return ER; \ + Py_DECREF(O); \ + return ER; \ } \ ((Pdata*)(D))->data[((Pdata*)(D))->length++] = (O); \ -} + } /* Push O on stack D, pushing a new reference. */ #define PDATA_APPEND(D, O, ER) { \ if (((Pdata*)(D))->length == ((Pdata*)(D))->size && \ Pdata_grow((Pdata*)(D)) < 0) \ - return ER; \ + return ER; \ Py_INCREF(O); \ ((Pdata*)(D))->data[((Pdata*)(D))->length++] = (O); \ } @@ -305,22 +322,22 @@ /*************************************************************************/ -#define ARG_TUP(self, o) { \ - if (self->arg || (self->arg=PyTuple_New(1))) { \ - Py_XDECREF(PyTuple_GET_ITEM(self->arg,0)); \ - PyTuple_SET_ITEM(self->arg,0,o); \ - } \ - else { \ - Py_DECREF(o); \ - } \ -} +#define ARG_TUP(self, o) { \ + if (self->arg || (self->arg=PyTuple_New(1))) { \ + Py_XDECREF(PyTuple_GET_ITEM(self->arg,0)); \ + PyTuple_SET_ITEM(self->arg,0,o); \ + } \ + else { \ + Py_DECREF(o); \ + } \ + } #define FREE_ARG_TUP(self) { \ - if (self->arg->ob_refcnt > 1) { \ - Py_DECREF(self->arg); \ - self->arg=NULL; \ - } \ - } + if (self->arg->ob_refcnt > 1) { \ + Py_DECREF(self->arg); \ + self->arg=NULL; \ + } \ + } typedef struct Picklerobject { PyObject_HEAD @@ -338,7 +355,7 @@ /* bool, true if proto > 0 */ int bin; - int fast; /* Fast mode doesn't save in memo, don't use if circ ref */ + int fast; /* Fast mode doesn't save in memo, don't use if circ ref */ int nesting; int (*write_func) (struct Picklerobject *, const char *, Py_ssize_t); char *write_buf; @@ -434,9 +451,9 @@ return -1; } - Py_BEGIN_ALLOW_THREADS - nbyteswritten = fwrite(s, sizeof(char), n, self->fp); - Py_END_ALLOW_THREADS + Py_BEGIN_ALLOW_THREADS; + nbyteswritten = fwrite(s, sizeof(char), n, self->fp); + Py_END_ALLOW_THREADS; if (nbyteswritten != (size_t) n) { PyErr_SetFromErrno(PyExc_IOError); @@ -5269,7 +5286,7 @@ static PyTypeObject Unpicklertype = { PyObject_HEAD_INIT(NULL) 0, /*ob_size */ - "pickle.Unpickler", /*tp_name */ + "_pickle.Unpickler", /*tp_name */ sizeof(Unpicklerobject), /*tp_basicsize */ 0, (destructor) Unpickler_dealloc, /* tp_dealloc */ @@ -5339,8 +5356,6 @@ { PyObject *copy_reg, *t, *r; -#define INIT_STR(S) if (!( S ## _str=PyString_InternFromString(#S))) return -1; - if (PyType_Ready(&Unpicklertype) < 0) return -1; if (PyType_Ready(&Picklertype) < 0) @@ -5433,9 +5448,8 @@ return -1; Py_DECREF(r); - if (! - (UnpickleableError = - PyErr_NewException("pickle.UnpickleableError", PicklingError, t))) + if (!(UnpickleableError = PyErr_NewException("pickle.UnpickleableError", + PicklingError, t))) return -1; Py_DECREF(t); From python-checkins at python.org Sun Jul 8 21:29:28 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sun, 8 Jul 2007 21:29:28 +0200 (CEST) Subject: [Python-checkins] r56194 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070708192928.122B51E4002@bag.python.org> Author: alexandre.vassalotti Date: Sun Jul 8 21:29:27 2007 New Revision: 56194 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Make fast_save_leave() static. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sun Jul 8 21:29:27 2007 @@ -913,7 +913,7 @@ return 1; } -int +static int fast_save_leave(Picklerobject *self, PyObject *obj) { if (self->fast_container-- >= PY_CPICKLE_FAST_LIMIT) { From python-checkins at python.org Sun Jul 8 21:33:11 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sun, 8 Jul 2007 21:33:11 +0200 (CEST) Subject: [Python-checkins] r56195 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070708193311.7BE5B1E4002@bag.python.org> Author: alexandre.vassalotti Date: Sun Jul 8 21:33:11 2007 New Revision: 56195 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Remove the unused DEL_LIST_SLICE macro. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sun Jul 8 21:33:11 2007 @@ -9,8 +9,6 @@ #define Py_eval_input eval_input #endif /* Py_eval_input */ -#define DEL_LIST_SLICE(list, from, to) (PyList_SetSlice(list, from, to, NULL)) - #define WRITE_BUF_SIZE 256 /* Bump this when new opcodes are added to the pickle protocol. */ From python-checkins at python.org Sun Jul 8 21:53:06 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sun, 8 Jul 2007 21:53:06 +0200 (CEST) Subject: [Python-checkins] r56196 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070708195306.53DD81E400F@bag.python.org> Author: alexandre.vassalotti Date: Sun Jul 8 21:53:06 2007 New Revision: 56196 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Remove write_none. pickle.py doesn't allow to use None as the output file, anyway. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sun Jul 8 21:53:06 2007 @@ -462,16 +462,6 @@ } static int -write_none(Picklerobject *self, const char *s, Py_ssize_t n) -{ - if (s == NULL) - return 0; - if (n > INT_MAX) - return -1; - return (int) n; -} - -static int write_other(Picklerobject *self, const char *s, Py_ssize_t _n) { PyObject *py_str = 0, *junk = 0; @@ -2651,9 +2641,6 @@ } self->write_func = write_file; } - else if (file == Py_None) { - self->write_func = write_none; - } else { self->write_func = write_other; From python-checkins at python.org Sun Jul 8 22:17:58 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 8 Jul 2007 22:17:58 +0200 (CEST) Subject: [Python-checkins] r56197 - sandbox/trunk/2to3/tests/test_grammar.py sandbox/trunk/2to3/tests/test_pytree.py Message-ID: <20070708201758.C4E371E4002@bag.python.org> Author: collin.winter Date: Sun Jul 8 22:17:58 2007 New Revision: 56197 Modified: sandbox/trunk/2to3/tests/test_grammar.py sandbox/trunk/2to3/tests/test_pytree.py Log: Whitespace cleanup. Modified: sandbox/trunk/2to3/tests/test_grammar.py ============================================================================== --- sandbox/trunk/2to3/tests/test_grammar.py (original) +++ sandbox/trunk/2to3/tests/test_grammar.py Sun Jul 8 22:17:58 2007 @@ -21,7 +21,7 @@ class GrammarTest(support.TestCase): def validate(self, code): support.parse_string(code) - + def invalid_syntax(self, code): try: self.validate(code) @@ -46,16 +46,16 @@ def test_3x_style(self): self.validate("raise E1 from E2") - + def test_3x_style_invalid_1(self): self.invalid_syntax("raise E, V from E1") - + def test_3x_style_invalid_2(self): self.invalid_syntax("raise E from E1, E2") - + def test_3x_style_invalid_3(self): self.invalid_syntax("raise from E1, E2") - + def test_3x_style_invalid_4(self): self.invalid_syntax("raise E from") @@ -64,25 +64,25 @@ class TestFunctionAnnotations(GrammarTest): def test_1(self): self.validate("""def f(x) -> list: pass""") - + def test_2(self): self.validate("""def f(x:int): pass""") - + def test_3(self): self.validate("""def f(*x:str): pass""") - + def test_4(self): self.validate("""def f(**x:float): pass""") - + def test_5(self): self.validate("""def f(x, y:1+2): pass""") - + def test_6(self): self.validate("""def f(a, (b:1, c:2, d)): pass""") - + def test_7(self): self.validate("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""") - + def test_8(self): s = """def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass""" @@ -95,15 +95,15 @@ try: x except E as N: - y""" + y""" self.validate(s) - + def test_old(self): s = """ try: x except E, N: - y""" + y""" self.validate(s) @@ -111,13 +111,13 @@ class TestSetLiteral(GrammarTest): def test_1(self): self.validate("""x = {'one'}""") - + def test_2(self): self.validate("""x = {'one', 1,}""") - + def test_3(self): self.validate("""x = {'one', 'two', 'three'}""") - + def test_4(self): self.validate("""x = {2, 3, 4,}""") @@ -126,12 +126,12 @@ def test_new_octal_notation(self): self.validate("""0o7777777777777""") self.invalid_syntax("""0o7324528887""") - + def test_new_binary_notation(self): self.validate("""0b101010""") self.invalid_syntax("""0b0101021""") - + class TestClassDef(GrammarTest): def test_new_syntax(self): self.validate("class B(t=7): pass") @@ -145,7 +145,7 @@ def test_python2(self): f = os.path.join(test_dir, "data", "py2_test_grammar.py") driver.parse_file(f) - + def test_python3(self): f = os.path.join(test_dir, "data", "py3_test_grammar.py") driver.parse_file(f) Modified: sandbox/trunk/2to3/tests/test_pytree.py ============================================================================== --- sandbox/trunk/2to3/tests/test_pytree.py (original) +++ sandbox/trunk/2to3/tests/test_pytree.py Sun Jul 8 22:17:58 2007 @@ -47,7 +47,7 @@ self.assertEqual(str(l1), "foo") l2 = pytree.Leaf(100, "foo", context=(" ", (10, 1))) self.assertEqual(str(l2), " foo") - + def testLeafStrNumericValue(self): # Make sure that the Leaf's value is stringified. Failing to # do this can cause a TypeError in certain situations. @@ -103,11 +103,11 @@ l1 = pytree.Leaf(100, "foo", prefix="a") l2 = pytree.Leaf(100, "bar", prefix="b") n1 = pytree.Node(1000, [l1, l2]) - + self.assertEqual(l1.get_suffix(), l2.get_prefix()) self.assertEqual(l2.get_suffix(), "") self.assertEqual(n1.get_suffix(), "") - + l3 = pytree.Leaf(100, "bar", prefix="c") n2 = pytree.Node(1000, [n1, l3]) @@ -147,22 +147,22 @@ def testConvert(self): # XXX pass - + def testChangedLeaf(self): l1 = pytree.Leaf(100, "f") self.failIf(l1.was_changed) - + l1.changed() self.failUnless(l1.was_changed) - + def testChangedNode(self): l1 = pytree.Leaf(100, "f") n1 = pytree.Node(1000, [l1]) self.failIf(n1.was_changed) - + n1.changed() self.failUnless(n1.was_changed) - + def testChangedRecursive(self): l1 = pytree.Leaf(100, "foo") l2 = pytree.Leaf(100, "+") @@ -172,18 +172,18 @@ self.failIf(l1.was_changed) self.failIf(n1.was_changed) self.failIf(n2.was_changed) - + n1.changed() self.failUnless(n1.was_changed) self.failUnless(n2.was_changed) self.failIf(l1.was_changed) - + def testLeafConstructorPrefix(self): for prefix in ("xyz_", ""): l1 = pytree.Leaf(100, "self", prefix=prefix) self.failUnless(str(l1), prefix + "self") self.assertEqual(l1.get_prefix(), prefix) - + def testNodeConstructorPrefix(self): for prefix in ("xyz_", ""): l1 = pytree.Leaf(100, "self") @@ -193,7 +193,7 @@ self.assertEqual(n1.get_prefix(), prefix) self.assertEqual(l1.get_prefix(), prefix) self.assertEqual(l2.get_prefix(), "_") - + def testRemove(self): l1 = pytree.Leaf(100, "foo") l2 = pytree.Leaf(100, "foo") @@ -216,83 +216,83 @@ self.assertEqual(n2.parent, None) self.failUnless(n1.was_changed) self.failUnless(n2.was_changed) - + def testRemoveParentless(self): n1 = pytree.Node(1000, []) n1.remove() self.assertEqual(n1.parent, None) - + l1 = pytree.Leaf(100, "foo") l1.remove() self.assertEqual(l1.parent, None) - + def testNodeSetChild(self): l1 = pytree.Leaf(100, "foo") n1 = pytree.Node(1000, [l1]) - + l2 = pytree.Leaf(100, "bar") n1.set_child(0, l2) self.assertEqual(l1.parent, None) self.assertEqual(l2.parent, n1) self.assertEqual(n1.children, [l2]) - + n2 = pytree.Node(1000, [l1]) n2.set_child(0, n1) self.assertEqual(l1.parent, None) self.assertEqual(n1.parent, n2) self.assertEqual(n2.parent, None) self.assertEqual(n2.children, [n1]) - + self.assertRaises(IndexError, n1.set_child, 4, l2) # I don't care what it raises, so long as it's an exception self.assertRaises(Exception, n1.set_child, 0, list) - + def testNodeInsertChild(self): l1 = pytree.Leaf(100, "foo") n1 = pytree.Node(1000, [l1]) - + l2 = pytree.Leaf(100, "bar") n1.insert_child(0, l2) self.assertEqual(l2.parent, n1) self.assertEqual(n1.children, [l2, l1]) - + l3 = pytree.Leaf(100, "abc") n1.insert_child(2, l3) self.assertEqual(n1.children, [l2, l1, l3]) - + # I don't care what it raises, so long as it's an exception self.assertRaises(Exception, n1.insert_child, 0, list) - + def testNodeAppendChild(self): n1 = pytree.Node(1000, []) - + l1 = pytree.Leaf(100, "foo") n1.append_child(l1) self.assertEqual(l1.parent, n1) self.assertEqual(n1.children, [l1]) - + l2 = pytree.Leaf(100, "bar") n1.append_child(l2) self.assertEqual(l2.parent, n1) self.assertEqual(n1.children, [l1, l2]) - + # I don't care what it raises, so long as it's an exception self.assertRaises(Exception, n1.append_child, list) - + def testNodeNextSibling(self): n1 = pytree.Node(1000, []) n2 = pytree.Node(1000, []) p1 = pytree.Node(1000, [n1, n2]) - + self.failUnless(n1.get_next_sibling() is n2) self.assertEqual(n2.get_next_sibling(), None) self.assertEqual(p1.get_next_sibling(), None) - + def testLeafNextSibling(self): l1 = pytree.Leaf(100, "a") l2 = pytree.Leaf(100, "b") p1 = pytree.Node(1000, [l1, l2]) - + self.failUnless(l1.get_next_sibling() is l2) self.assertEqual(l2.get_next_sibling(), None) self.assertEqual(p1.get_next_sibling(), None) From python-checkins at python.org Sun Jul 8 22:35:52 2007 From: python-checkins at python.org (brett.cannon) Date: Sun, 8 Jul 2007 22:35:52 +0200 (CEST) Subject: [Python-checkins] r56198 - sandbox/trunk/import_in_py/_importlib.py Message-ID: <20070708203552.516B21E400C@bag.python.org> Author: brett.cannon Date: Sun Jul 8 22:35:52 2007 New Revision: 56198 Modified: sandbox/trunk/import_in_py/_importlib.py Log: Minor whitespace cleanup. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Sun Jul 8 22:35:52 2007 @@ -69,11 +69,13 @@ original__import__ = __import__ __builtins__['__import__'] = Import() + def _reset__import__(): """Set __import__ back to the original implementation (assumes _set__import__ was called previously).""" __builtins__['__import__'] = original__import__ + def _w_long(x): """Convert a 32-bit integer to little-endian. @@ -87,6 +89,7 @@ bytes.append((x >> 24) & 0xFF) return ''.join(chr(x) for x in bytes) + def _r_long(bytes): """Convert 4 bytes in little-endian to an integer. @@ -99,6 +102,7 @@ x |= ord(bytes[3]) << 24 return x + def _case_ok(directory, file_name): """Verify that file_name (as found in 'directory') has the proper case. From python-checkins at python.org Sun Jul 8 22:45:19 2007 From: python-checkins at python.org (brett.cannon) Date: Sun, 8 Jul 2007 22:45:19 +0200 (CEST) Subject: [Python-checkins] r56199 - sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Message-ID: <20070708204519.BC7811E4002@bag.python.org> Author: brett.cannon Date: Sun Jul 8 22:45:19 2007 New Revision: 56199 Modified: sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Log: Create an os.path.join replacement. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Sun Jul 8 22:45:19 2007 @@ -103,6 +103,11 @@ return x +def _path_join(*args): + """Replacement for os.path.join so as to remove dependency on os module.""" + return path_sep.join(args) + + def _case_ok(directory, file_name): """Verify that file_name (as found in 'directory') has the proper case. @@ -251,7 +256,7 @@ """ tail_module = fullname.rsplit('.', 1)[-1] - package_directory = os.path.join(self.path_entry, tail_module) + package_directory = _path_join(self.path_entry, tail_module) for handler in self.handlers: if handler.cannot_handle(fullname): continue @@ -259,7 +264,7 @@ # XXX Backwards-incompatible to use anything but .py/.pyc # files for __init__? init_filename = '__init__' + file_ext - package_init = os.path.join(package_directory, init_filename) + package_init = _path_join(package_directory, init_filename) # Check if it is a package with an __init__ file. if (os.path.isfile(package_init) and _case_ok(self.path_entry, tail_module) and @@ -267,7 +272,7 @@ return self.loader(package_init, handler, package_directory) # See if it is a module. file_name = tail_module + file_ext - file_path = os.path.join(self.path_entry, file_name) + file_path = _path_join(self.path_entry, file_name) if (os.path.isfile(file_path) and _case_ok(self.path_entry, file_name)): return self.loader(file_path, handler) Modified: sandbox/trunk/import_in_py/importlib.py ============================================================================== --- sandbox/trunk/import_in_py/importlib.py (original) +++ sandbox/trunk/import_in_py/importlib.py Sun Jul 8 22:45:19 2007 @@ -30,6 +30,7 @@ """ import _importlib +# Import needed built-in modules. for builtin_name in _importlib._required_builtins: module = __import__(builtin_name) _importlib.__dict__[builtin_name] = module @@ -42,6 +43,7 @@ _importlib.errno = errno _importlib.os = os +_importlib.path_sep = os.sep # For os.path.join replacement. _importlib.warnings = warnings del _importlib From python-checkins at python.org Sun Jul 8 22:53:11 2007 From: python-checkins at python.org (brett.cannon) Date: Sun, 8 Jul 2007 22:53:11 +0200 (CEST) Subject: [Python-checkins] r56200 - in sandbox/trunk/import_in_py: _importlib.py importlib.py tests/mock_importlib.py tests/test_py_handler.py tests/test_regression.py Message-ID: <20070708205311.A182B1E4002@bag.python.org> Author: brett.cannon Date: Sun Jul 8 22:53:11 2007 New Revision: 56200 Modified: sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py sandbox/trunk/import_in_py/tests/mock_importlib.py sandbox/trunk/import_in_py/tests/test_py_handler.py sandbox/trunk/import_in_py/tests/test_regression.py Log: Move hand-coded functions that should really come from C version out to importlib. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Sun Jul 8 22:53:11 2007 @@ -63,76 +63,11 @@ _required_builtins = ['imp', 'sys', 'marshal'] -def _set__import__(): - """Set __import__ to an instance of Import.""" - global original__import__ - original__import__ = __import__ - __builtins__['__import__'] = Import() - - -def _reset__import__(): - """Set __import__ back to the original implementation (assumes - _set__import__ was called previously).""" - __builtins__['__import__'] = original__import__ - - -def _w_long(x): - """Convert a 32-bit integer to little-endian. - - XXX Temporary until marshal's long functions are exposed. - - """ - bytes = [] - bytes.append(x & 0xFF) - bytes.append((x >> 8) & 0xFF) - bytes.append((x >> 16) & 0xFF) - bytes.append((x >> 24) & 0xFF) - return ''.join(chr(x) for x in bytes) - - -def _r_long(bytes): - """Convert 4 bytes in little-endian to an integer. - - XXX Temporary until marshal's long function are exposed. - - """ - x = ord(bytes[0]) - x |= ord(bytes[1]) << 8 - x |= ord(bytes[2]) << 16 - x |= ord(bytes[3]) << 24 - return x - - def _path_join(*args): """Replacement for os.path.join so as to remove dependency on os module.""" return path_sep.join(args) -def _case_ok(directory, file_name): - """Verify that file_name (as found in 'directory') has the proper case. - - The path is assumed to already exist. - - XXX Temporary until imp's case_ok function can be exposed. - - XXX Better to roll this into a single function some how so that existence - check can be part of case check and thus cut down on stat calls? - - """ - # If platform is not case-sensitive *or* the environment variable - # PYTHONCASEOK is defined, then os.path.exists already handled the case by - # either doing a case-sensitive check or from the user saying he does not - # want case-sensitivity, respectively. - if sys.platform not in ('win32', 'mac', 'riscos', 'darwin', 'cygwin', - 'os2emx') or os.environ.get('PYTHONCASEOK'): - return True - directory_contents = os.listdir(directory) - if file_name in directory_contents: - return True - else: - return False - - class _BuiltinFrozenBaseImporter(object): """Base class for meta_path importers for built-in and frozen modules. Modified: sandbox/trunk/import_in_py/importlib.py ============================================================================== --- sandbox/trunk/import_in_py/importlib.py (original) +++ sandbox/trunk/import_in_py/importlib.py Sun Jul 8 22:53:11 2007 @@ -30,6 +30,72 @@ """ import _importlib +#XXX Temporary functions that should eventually be removed. +def _set__import__(): + """Set __import__ to an instance of Import.""" + global original__import__ + original__import__ = __import__ + __builtins__['__import__'] = Import() + + +def _reset__import__(): + """Set __import__ back to the original implementation (assumes + _set__import__ was called previously).""" + __builtins__['__import__'] = original__import__ + + +def _w_long(x): + """Convert a 32-bit integer to little-endian. + + XXX Temporary until marshal's long functions are exposed. + + """ + bytes = [] + bytes.append(x & 0xFF) + bytes.append((x >> 8) & 0xFF) + bytes.append((x >> 16) & 0xFF) + bytes.append((x >> 24) & 0xFF) + return ''.join(chr(x) for x in bytes) + + +def _r_long(bytes): + """Convert 4 bytes in little-endian to an integer. + + XXX Temporary until marshal's long function are exposed. + + """ + x = ord(bytes[0]) + x |= ord(bytes[1]) << 8 + x |= ord(bytes[2]) << 16 + x |= ord(bytes[3]) << 24 + return x + + +def _case_ok(directory, file_name): + """Verify that file_name (as found in 'directory') has the proper case. + + The path is assumed to already exist. + + XXX Temporary until imp's case_ok function can be exposed. + + XXX Better to roll this into a single function some how so that existence + check can be part of case check and thus cut down on stat calls? + + """ + # If platform is not case-sensitive *or* the environment variable + # PYTHONCASEOK is defined, then os.path.exists already handled the case by + # either doing a case-sensitive check or from the user saying he does not + # want case-sensitivity, respectively. + if sys.platform not in ('win32', 'mac', 'riscos', 'darwin', 'cygwin', + 'os2emx') or os.environ.get('PYTHONCASEOK'): + return True + directory_contents = os.listdir(directory) + if file_name in directory_contents: + return True + else: + return False + + # Import needed built-in modules. for builtin_name in _importlib._required_builtins: module = __import__(builtin_name) @@ -41,9 +107,13 @@ import os import warnings +_importlib._r_long = _r_long #XXX Expose original from marshal. +_importlib._w_long = _w_long #XXX Expose original from marshal. +_importlib._case_ok = _case_ok #XXX Expose original from imp. +_importlib.path_sep = os.sep # For os.path.join replacement. + _importlib.errno = errno _importlib.os = os -_importlib.path_sep = os.sep # For os.path.join replacement. _importlib.warnings = warnings del _importlib Modified: sandbox/trunk/import_in_py/tests/mock_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/tests/mock_importlib.py (original) +++ sandbox/trunk/import_in_py/tests/mock_importlib.py Sun Jul 8 22:53:11 2007 @@ -2,7 +2,7 @@ import marshal import imp from test import test_support -from _importlib import _w_long +from importlib import _w_long def log_call(method): """Log method calls to self.log.""" Modified: sandbox/trunk/import_in_py/tests/test_py_handler.py ============================================================================== --- sandbox/trunk/import_in_py/tests/test_py_handler.py (original) +++ sandbox/trunk/import_in_py/tests/test_py_handler.py Sun Jul 8 22:53:11 2007 @@ -4,7 +4,7 @@ from tests import mock_importlib from tests.py_help import TestPyPycFiles -from _importlib import _r_long +from importlib import _r_long import imp import marshal Modified: sandbox/trunk/import_in_py/tests/test_regression.py ============================================================================== --- sandbox/trunk/import_in_py/tests/test_regression.py (original) +++ sandbox/trunk/import_in_py/tests/test_regression.py Sun Jul 8 22:53:11 2007 @@ -2,7 +2,7 @@ from tests import mock_importlib from tests.py_help import TestPyPycPackages -from _importlib import _r_long +from importlib import _r_long import imp import marshal From python-checkins at python.org Sun Jul 8 23:21:49 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 8 Jul 2007 23:21:49 +0200 (CEST) Subject: [Python-checkins] r56201 - in sandbox/trunk/2to3: fixes/fix_dict.py fixes/fix_raise.py tests/test_fixers.py Message-ID: <20070708212149.762E61E4007@bag.python.org> Author: collin.winter Date: Sun Jul 8 23:21:49 2007 New Revision: 56201 Modified: sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/fix_raise.py sandbox/trunk/2to3/tests/test_fixers.py Log: Add a whole slew of prefix-related tests; fix two prefix-related bugs in the dict and raise fixers; minor test suite cleanup. Modified: sandbox/trunk/2to3/fixes/fix_dict.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_dict.py (original) +++ sandbox/trunk/2to3/fixes/fix_dict.py Sun Jul 8 23:21:49 2007 @@ -27,14 +27,14 @@ import patcomp from pgen2 import token from fixes import basefix -from fixes.util import Name, Call, LParen, RParen +from fixes.util import Name, Call, LParen, RParen, ArgList class FixDict(basefix.BaseFix): PATTERN = """ power< head=any+ trailer< '.' method=('keys'|'items'|'values'| 'iterkeys'|'iteritems'|'itervalues') > - trailer< '(' ')' > + parens=trailer< '(' ')' > tail=any* > """ @@ -42,20 +42,22 @@ def transform(self, node): results = self.match(node) head = results["head"] - method = results["method"][0].value # Extract method name + method = results["method"][0] # Extract node for method name tail = results["tail"] syms = self.syms - isiter = method.startswith("iter") + method_name = method.value + isiter = method_name.startswith("iter") if isiter: - method = method[4:] - assert method in ("keys", "items", "values"), repr(method) + method_name = method_name[4:] + assert method_name in ("keys", "items", "values"), repr(method) head = [n.clone() for n in head] tail = [n.clone() for n in tail] special = not tail and self.in_special_context(node, isiter) args = head + [pytree.Node(syms.trailer, [pytree.Leaf(token.DOT, '.'), - Name(method)]), - pytree.Node(syms.trailer, [LParen(), RParen()])] + Name(method_name, + prefix=method.get_prefix())]), + results["parens"].clone()] new = pytree.Node(syms.power, args) if not special: new.set_prefix("") Modified: sandbox/trunk/2to3/fixes/fix_raise.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_raise.py (original) +++ sandbox/trunk/2to3/fixes/fix_raise.py Sun Jul 8 23:21:49 2007 @@ -37,7 +37,7 @@ syms = self.syms results = self.match(node) assert results - + exc = results["exc"].clone() if exc.type is token.STRING: self.cannot_convert(node, "Python 3 does not support string exceptions") @@ -49,18 +49,19 @@ # raise E1, V # Since Python 3 will not support this, we recurse down any tuple # literals, always taking the first element. - while is_tuple(exc): - # exc.children[1:-1] is the unparenthesized tuple - # exc.children[1].children[0] is the first element of the tuple - exc = exc.children[1].children[0].clone() - exc.set_prefix(" ") + if is_tuple(exc): + while is_tuple(exc): + # exc.children[1:-1] is the unparenthesized tuple + # exc.children[1].children[0] is the first element of the tuple + exc = exc.children[1].children[0].clone() + exc.set_prefix(" ") if "val" not in results: # One-argument raise new = pytree.Node(syms.raise_stmt, [Name("raise"), exc]) new.set_prefix(node.get_prefix()) return new - + val = results["val"].clone() if is_tuple(val): args = [c.clone() for c in val.children[1:-1]] @@ -71,7 +72,7 @@ if "tb" in results: tb = results["tb"].clone() tb.set_prefix("") - + e = Call(exc, args) with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb) Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sun Jul 8 23:21:49 2007 @@ -277,49 +277,60 @@ class Test_intern(FixerTestCase): fixer = "intern" - def test_1(self): - b = """x = intern(a)""" - a = """x = sys.intern(a)""" + def test_prefix_preservation(self): + b = """x = intern( a )""" + a = """x = sys.intern( a )""" self.check(b, a) - def test_2(self): b = """y = intern("b" # test )""" a = """y = sys.intern("b" # test )""" self.check(b, a) - def test_3(self): + b = """z = intern(a+b+c.d, )""" + a = """z = sys.intern(a+b+c.d, )""" + self.check(b, a) + + def test(self): + b = """x = intern(a)""" + a = """x = sys.intern(a)""" + self.check(b, a) + b = """z = intern(a+b+c.d,)""" a = """z = sys.intern(a+b+c.d,)""" self.check(b, a) - def test_4(self): b = """intern("y%s" % 5).replace("y", "")""" a = """sys.intern("y%s" % 5).replace("y", "")""" self.check(b, a) # These should not be refactored - def test_unchanged_1(self): + def test_unchanged(self): s = """intern(a=1)""" self.check(s, s) - def test_unchanged_2(self): s = """intern(f, g)""" self.check(s, s) - def test_unchanged_3(self): s = """intern(*h)""" self.check(s, s) - def test_unchanged_4(self): s = """intern(**i)""" self.check(s, s) + s = """intern()""" + self.check(s, s) + class Test_print(FixerTestCase): fixer = "print" + def test_prefix_preservation(self): + b = """print 1, 1+1, 1+1+1""" + a = """print(1, 1+1, 1+1+1)""" + self.check(b, a) + def test_1(self): b = """print 1, 1+1, 1+1+1""" a = """print(1, 1+1, 1+1+1)""" @@ -378,6 +389,11 @@ class Test_exec(FixerTestCase): fixer = "exec" + def test_prefix_preservation(self): + b = """ exec code in ns1, ns2""" + a = """ exec(code, ns1, ns2)""" + self.check(b, a) + def test_basic(self): b = """exec code""" a = """exec(code)""" @@ -425,6 +441,11 @@ class Test_repr(FixerTestCase): fixer = "repr" + def test_prefix_preservation(self): + b = """x = `1 + 2`""" + a = """x = repr(1 + 2)""" + self.check(b, a) + def test_simple_1(self): b = """x = `1 + 2`""" a = """x = repr(1 + 2)""" @@ -458,6 +479,19 @@ class Test_except(FixerTestCase): fixer = "except" + def test_prefix_preservation(self): + b = """ + try: + pass + except (RuntimeError, ImportError), e: + pass""" + a = """ + try: + pass + except (RuntimeError, ImportError) as e: + pass""" + self.check(b, a) + def test_tuple_unpack(self): b = """ def foo(): @@ -606,11 +640,30 @@ a = """raise Exception(5)""" self.check(b, a) - def test_prefix(self): + def test_prefix_preservation(self): b = """raise Exception,5""" a = """raise Exception(5)""" self.check(b, a) + b = """raise Exception, 5""" + a = """raise Exception(5)""" + self.check(b, a) + + def test_with_comments(self): + b = """raise Exception, 5 # foo""" + a = """raise Exception(5) # foo""" + self.check(b, a) + + b = """raise E, (5, 6) % (a, b) # foo""" + a = """raise E((5, 6) % (a, b)) # foo""" + self.check(b, a) + + b = """def foo(): + raise Exception, 5, 6 # foo""" + a = """def foo(): + raise Exception(5).with_traceback(6) # foo""" + self.check(b, a) + def test_tuple_value(self): b = """raise Exception, (5, 6, 7)""" a = """raise Exception(5, 6, 7)""" @@ -859,36 +912,34 @@ a = """b = 0x12""" self.check(b, a) - # These should not be touched - - def test_6(self): + def test_unchanged_1(self): b = """a = 12""" - a = """a = 12""" - self.check(b, a) + self.check(b, b) - def test_7(self): + def test_unchanged_2(self): b = """b = 0x12""" - a = """b = 0x12""" - self.check(b, a) + self.check(b, b) - def test_8(self): + def test_unchanged_3(self): b = """c = 3.14""" - a = """c = 3.14""" + self.check(b, b) + + def test_prefix_preservation(self): + b = """x = long( x )""" + a = """x = int( x )""" self.check(b, a) class Test_sysexcattrs(FixerTestCase): fixer = "sysexcattrs" - def test_1(self): + def test(self): s = """f = sys.exc_type""" self.warns(s, s, "This attribute is going away") - def test_2(self): s = """f = sys.exc_value""" self.warns(s, s, "This attribute is going away") - def test_3(self): s = """f = sys.exc_traceback""" self.warns(s, s, "This attribute is going away") @@ -896,12 +947,47 @@ class Test_dict(FixerTestCase): fixer = "dict" + def test_prefix_preservation(self): + b = "if d. keys ( ) : pass" + a = "if list(d. keys ( )) : pass" + self.check(b, a) + + b = "if d. items ( ) : pass" + a = "if list(d. items ( )) : pass" + self.check(b, a) + + b = "if d. iterkeys ( ) : pass" + a = "if iter(d. keys ( )) : pass" + self.check(b, a) + + b = "[i for i in d. iterkeys( ) ]" + a = "[i for i in d. keys( ) ]" + self.check(b, a) + + def test_trailing_comment(self): + b = "d.keys() # foo" + a = "list(d.keys()) # foo" + self.check(b, a) + + b = "d.items() # foo" + a = "list(d.items()) # foo" + self.check(b, a) + + b = "d.iterkeys() # foo" + a = "iter(d.keys()) # foo" + self.check(b, a) + + b = """[i for i in d.iterkeys() # foo + ]""" + a = """[i for i in d.keys() # foo + ]""" + self.check(b, a) + def test_01(self): b = "d.keys()" a = "list(d.keys())" self.check(b, a) - def test_01a(self): b = "a[0].foo().keys()" a = "list(a[0].foo().keys())" self.check(b, a) @@ -933,13 +1019,11 @@ def test_07(self): b = "list(d.keys())" - a = b - self.check(b, a) + self.check(b, b) def test_08(self): b = "sorted(d.keys())" - a = b - self.check(b, a) + self.check(b, b) def test_09(self): b = "iter(d.keys())" @@ -1024,6 +1108,19 @@ class Test_xrange(FixerTestCase): fixer = "xrange" + def test_prefix_preservation(self): + b = """x = xrange( 10 )""" + a = """x = range( 10 )""" + self.check(b, a) + + b = """x = xrange( 1 , 10 )""" + a = """x = range( 1 , 10 )""" + self.check(b, a) + + b = """x = xrange( 0 , 10 , 2 )""" + a = """x = range( 0 , 10 , 2 )""" + self.check(b, a) + def test_1(self): b = """x = xrange(10)""" a = """x = range(10)""" @@ -1048,6 +1145,15 @@ class Test_raw_input(FixerTestCase): fixer = "raw_input" + def test_prefix_preservation(self): + b = """x = raw_input( )""" + a = """x = input( )""" + self.check(b, a) + + b = """x = raw_input( '' )""" + a = """x = input( '' )""" + self.check(b, a) + def test_1(self): b = """x = raw_input()""" a = """x = input()""" @@ -1067,6 +1173,20 @@ class Test_input(FixerTestCase): fixer = "input" + def test_prefix_preservation(self): + b = """x = input( )""" + a = """x = eval(input( ))""" + self.check(b, a) + + b = """x = input( '' )""" + a = """x = eval(input( '' ))""" + self.check(b, a) + + def test_trailing_comment(self): + b = """x = input() # foo""" + a = """x = eval(input()) # foo""" + self.check(b, a) + def test_1(self): b = """x = input()""" a = """x = eval(input())""" @@ -1837,6 +1957,15 @@ class Test_callable(FixerTestCase): fixer = "callable" + def test_prefix_preservation(self): + b = """callable( x)""" + a = """hasattr( x, '__call__')""" + self.check(b, a) + + b = """if callable(x): pass""" + a = """if hasattr(x, '__call__'): pass""" + self.check(b, a) + def test_callable_call(self): b = """callable(x)""" a = """hasattr(x, '__call__')""" @@ -1852,9 +1981,17 @@ a = """callable(x, kw=y)""" self.check(a, a) + a = """callable()""" + self.check(a, a) + class Test_filter(FixerTestCase): fixer = "filter" + def test_prefix_preservation(self): + b = """x = filter( None, 'abc' )""" + a = """x = list(filter( None, 'abc' ))""" + self.check(b, a) + def test_filter_basic(self): b = """x = filter(None, 'abc')""" a = """x = list(filter(None, 'abc'))""" @@ -1898,6 +2035,16 @@ class Test_map(FixerTestCase): fixer = "map" + def test_prefix_preservation(self): + b = """x = map( f, 'abc' )""" + a = """x = list(map( f, 'abc' ))""" + self.check(b, a) + + def test_trailing_comment(self): + b = """x = map(f, 'abc') # foo""" + a = """x = list(map(f, 'abc')) # foo""" + self.check(b, a) + def test_map_basic(self): b = """x = map(f, 'abc')""" a = """x = list(map(f, 'abc'))""" From python-checkins at python.org Mon Jul 9 07:46:04 2007 From: python-checkins at python.org (ronald.oussoren) Date: Mon, 9 Jul 2007 07:46:04 +0200 (CEST) Subject: [Python-checkins] r56203 - python/branches/release25-maint/Lib/idlelib/macosxSupport.py Message-ID: <20070709054604.F36411E4002@bag.python.org> Author: ronald.oussoren Date: Mon Jul 9 07:46:04 2007 New Revision: 56203 Modified: python/branches/release25-maint/Lib/idlelib/macosxSupport.py Log: Fixes IDLE crash on OSX: some versions of Tcl/Tk on OSX don't have a console object, avoid crashing in that case. Modified: python/branches/release25-maint/Lib/idlelib/macosxSupport.py ============================================================================== --- python/branches/release25-maint/Lib/idlelib/macosxSupport.py (original) +++ python/branches/release25-maint/Lib/idlelib/macosxSupport.py Mon Jul 9 07:46:04 2007 @@ -3,6 +3,7 @@ GUI application (as opposed to an X11 application). """ import sys +import Tkinter def runningAsOSXApp(): """ Returns True iff running from the IDLE.app bundle on OSX """ @@ -23,7 +24,11 @@ root.createcommand("::tk::mac::OpenDocument", doOpenFile) def hideTkConsole(root): - root.tk.call('console', 'hide') + try: + root.tk.call('console', 'hide') + except Tkinter.TclError: + # Some versions of the Tk framework don't have a console object + pass def overrideRootMenu(root, flist): """ From buildbot at python.org Mon Jul 9 07:53:23 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 09 Jul 2007 05:53:23 +0000 Subject: [Python-checkins] buildbot failure in amd64 XP 2.5 Message-ID: <20070709055323.50EB21E4002@bag.python.org> The Buildbot has detected a new failure of amd64 XP 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%25202.5/builds/7 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: ronald.oussoren BUILD FAILED: failed compile sincerely, -The Buildbot From python-checkins at python.org Mon Jul 9 08:02:21 2007 From: python-checkins at python.org (ronald.oussoren) Date: Mon, 9 Jul 2007 08:02:21 +0200 (CEST) Subject: [Python-checkins] r56204 - python/trunk/Lib/idlelib/macosxSupport.py Message-ID: <20070709060221.69BAA1E4016@bag.python.org> Author: ronald.oussoren Date: Mon Jul 9 08:02:21 2007 New Revision: 56204 Modified: python/trunk/Lib/idlelib/macosxSupport.py Log: Patch 1693258: Fix for duplicate "preferences" menu-OS X Modified: python/trunk/Lib/idlelib/macosxSupport.py ============================================================================== --- python/trunk/Lib/idlelib/macosxSupport.py (original) +++ python/trunk/Lib/idlelib/macosxSupport.py Mon Jul 9 08:02:21 2007 @@ -3,6 +3,7 @@ GUI application (as opposed to an X11 application). """ import sys +import Tkinter def runningAsOSXApp(): """ Returns True iff running from the IDLE.app bundle on OSX """ @@ -23,7 +24,11 @@ root.createcommand("::tk::mac::OpenDocument", doOpenFile) def hideTkConsole(root): - root.tk.call('console', 'hide') + try: + root.tk.call('console', 'hide') + except Tkinter.TclError: + # Some versions of the Tk framework don't have a console object + pass def overrideRootMenu(root, flist): """ @@ -75,32 +80,40 @@ import configDialog configDialog.ConfigDialog(root, 'Settings') + root.bind('<>', about_dialog) root.bind('<>', config_dialog) if flist: root.bind('<>', flist.close_all_callback) - for mname, entrylist in Bindings.menudefs: - menu = menudict.get(mname) - if not menu: - continue - for entry in entrylist: - if not entry: - menu.add_separator() + + ###check if Tk version >= 8.4.14; if so, use hard-coded showprefs binding + tkversion = root.tk.eval('info patchlevel') + if tkversion >= '8.4.14': + Bindings.menudefs[0] = ('application', [ + ('About IDLE', '<>'), + None, + ]) + root.createcommand('::tk::mac::ShowPreferences', config_dialog) + else: + for mname, entrylist in Bindings.menudefs: + menu = menudict.get(mname) + if not menu: + continue else: - label, eventname = entry - underline, label = prepstr(label) - accelerator = get_accelerator(Bindings.default_keydefs, + for entry in entrylist: + if not entry: + menu.add_separator() + else: + label, eventname = entry + underline, label = prepstr(label) + accelerator = get_accelerator(Bindings.default_keydefs, eventname) - def command(text=root, eventname=eventname): - text.event_generate(eventname) - menu.add_command(label=label, underline=underline, + def command(text=root, eventname=eventname): + text.event_generate(eventname) + menu.add_command(label=label, underline=underline, command=command, accelerator=accelerator) - - - - def setupApp(root, flist): """ Perform setup for the OSX application bundle. From python-checkins at python.org Mon Jul 9 08:03:48 2007 From: python-checkins at python.org (ronald.oussoren) Date: Mon, 9 Jul 2007 08:03:48 +0200 (CEST) Subject: [Python-checkins] r56205 - python/branches/release25-maint/Lib/idlelib/macosxSupport.py Message-ID: <20070709060348.18CC01E4002@bag.python.org> Author: ronald.oussoren Date: Mon Jul 9 08:03:47 2007 New Revision: 56205 Modified: python/branches/release25-maint/Lib/idlelib/macosxSupport.py Log: Patch 1693258: Fix for duplicate "preferences" menu-OS X Backport of 56204. Modified: python/branches/release25-maint/Lib/idlelib/macosxSupport.py ============================================================================== --- python/branches/release25-maint/Lib/idlelib/macosxSupport.py (original) +++ python/branches/release25-maint/Lib/idlelib/macosxSupport.py Mon Jul 9 08:03:47 2007 @@ -80,32 +80,40 @@ import configDialog configDialog.ConfigDialog(root, 'Settings') + root.bind('<>', about_dialog) root.bind('<>', config_dialog) if flist: root.bind('<>', flist.close_all_callback) - for mname, entrylist in Bindings.menudefs: - menu = menudict.get(mname) - if not menu: - continue - for entry in entrylist: - if not entry: - menu.add_separator() + + ###check if Tk version >= 8.4.14; if so, use hard-coded showprefs binding + tkversion = root.tk.eval('info patchlevel') + if tkversion >= '8.4.14': + Bindings.menudefs[0] = ('application', [ + ('About IDLE', '<>'), + None, + ]) + root.createcommand('::tk::mac::ShowPreferences', config_dialog) + else: + for mname, entrylist in Bindings.menudefs: + menu = menudict.get(mname) + if not menu: + continue else: - label, eventname = entry - underline, label = prepstr(label) - accelerator = get_accelerator(Bindings.default_keydefs, + for entry in entrylist: + if not entry: + menu.add_separator() + else: + label, eventname = entry + underline, label = prepstr(label) + accelerator = get_accelerator(Bindings.default_keydefs, eventname) - def command(text=root, eventname=eventname): - text.event_generate(eventname) - menu.add_command(label=label, underline=underline, + def command(text=root, eventname=eventname): + text.event_generate(eventname) + menu.add_command(label=label, underline=underline, command=command, accelerator=accelerator) - - - - def setupApp(root, flist): """ Perform setup for the OSX application bundle. From buildbot at python.org Mon Jul 9 08:27:33 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 09 Jul 2007 06:27:33 +0000 Subject: [Python-checkins] buildbot warnings in x86 gentoo trunk Message-ID: <20070709062733.D51611E4002@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2294 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: ronald.oussoren Build had warnings: warnings test Excerpt from the test logfile: 2 tests failed: test_urllib2 test_urllib2net ====================================================================== ERROR: test_trivial (test.test_urllib2.TrivialTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2.py", line 19, in test_trivial self.assertRaises(ValueError, urllib2.urlopen, 'bogus url') File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_file (test.test_urllib2.HandlerTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2.py", line 613, in test_file r = h.file_open(Request(url)) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 1200, in file_open return self.open_local_file(req) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 1219, in open_local_file localfile = url2pathname(file) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 55, in url2pathname return unquote(pathname) TypeError: 'NoneType' object is not callable ====================================================================== ERROR: test_http (test.test_urllib2.HandlerTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2.py", line 719, in test_http r.read; r.readline # wrapped MockFile methods AttributeError: addinfourl instance has no attribute 'read' ====================================================================== ERROR: test_build_opener (test.test_urllib2.MiscTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2.py", line 1025, in test_build_opener o = build_opener(FooHandler, BarHandler) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: testURLread (test.test_urllib2net.URLTimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 24, in testURLread f = urllib2.urlopen("http://www.python.org/") File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_bad_address (test.test_urllib2net.urlopenNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 147, in test_bad_address urllib2.urlopen, "http://www.python.invalid./") File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_basic (test.test_urllib2net.urlopenNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 105, in test_basic open_url = urllib2.urlopen("http://www.python.org/") File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_geturl (test.test_urllib2net.urlopenNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 129, in test_geturl open_url = urllib2.urlopen(URL) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_info (test.test_urllib2net.urlopenNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 116, in test_info open_url = urllib2.urlopen("http://www.python.org/") File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_file (test.test_urllib2net.OtherNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 189, in test_file self._test_urls(urls, self._extra_handlers()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 237, in _test_urls urllib2.install_opener(urllib2.build_opener(*handlers)) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_ftp (test.test_urllib2net.OtherNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 174, in test_ftp self._test_urls(urls, self._extra_handlers()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 237, in _test_urls urllib2.install_opener(urllib2.build_opener(*handlers)) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_http (test.test_urllib2net.OtherNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 201, in test_http self._test_urls(urls, self._extra_handlers()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 237, in _test_urls urllib2.install_opener(urllib2.build_opener(*handlers)) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_range (test.test_urllib2net.OtherNetworkTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 160, in test_range result = urllib2.urlopen(req) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_close (test.test_urllib2net.CloseSocketTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 76, in test_close response = urllib2.urlopen("http://www.python.org/") File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_ftp_NoneNodefault (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 306, in test_ftp_NoneNodefault u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/", timeout=None) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_ftp_NoneWithdefault (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 300, in test_ftp_NoneWithdefault u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/", timeout=None) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_ftp_Value (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 310, in test_ftp_Value u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/", timeout=60) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_ftp_basic (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 293, in test_ftp_basic u = urllib2.urlopen("ftp://ftp.mirror.nl/pub/mirror/gnu/") File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_http_NoneNodefault (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 289, in test_http_NoneNodefault u = urllib2.urlopen("http://www.python.org", timeout=None) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_http_NoneWithdefault (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 279, in test_http_NoneWithdefault u = urllib2.urlopen("http://www.python.org", timeout=None) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_http_Value (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 285, in test_http_Value u = urllib2.urlopen("http://www.python.org", timeout=120) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' ====================================================================== ERROR: test_http_basic (test.test_urllib2net.TimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_urllib2net.py", line 272, in test_http_basic u = urllib2.urlopen("http://www.python.org") File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 123, in urlopen _opener = build_opener() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 457, in build_opener opener.add_handler(klass()) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib2.py", line 666, in __init__ proxies = getproxies() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/urllib.py", line 1267, in getproxies_environment for name, value in os.environ.items(): AttributeError: 'NoneType' object has no attribute 'environ' make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Mon Jul 9 08:29:21 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 09 Jul 2007 06:29:21 +0000 Subject: [Python-checkins] buildbot warnings in x86 mvlgcc 2.5 Message-ID: <20070709062921.56CEF1E4002@bag.python.org> The Buildbot has detected a new failure of x86 mvlgcc 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520mvlgcc%25202.5/builds/257 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: ronald.oussoren Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home2/buildbot/slave/2.5.loewis-linux/build/Lib/threading.py", line 460, in __bootstrap self.run() File "/home2/buildbot/slave/2.5.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 64, in run self._RequestHandlerClass) File "/home2/buildbot/slave/2.5.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/2.5.loewis-linux/build/Lib/SocketServer.py", line 330, in __init__ self.server_bind() File "/home2/buildbot/slave/2.5.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/2.5.loewis-linux/build/Lib/SocketServer.py", line 341, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') sincerely, -The Buildbot From python-checkins at python.org Mon Jul 9 10:40:34 2007 From: python-checkins at python.org (ronald.oussoren) Date: Mon, 9 Jul 2007 10:40:34 +0200 (CEST) Subject: [Python-checkins] r56206 - in python/branches/release25-maint: Makefile.pre.in Misc/NEWS Message-ID: <20070709084034.6DECB1E4013@bag.python.org> Author: ronald.oussoren Date: Mon Jul 9 10:40:34 2007 New Revision: 56206 Modified: python/branches/release25-maint/Makefile.pre.in python/branches/release25-maint/Misc/NEWS Log: Patch 1673122: be explicit about which libtool to use, to avoid name clashes when a users install GNU libtool early in his PATH Modified: python/branches/release25-maint/Makefile.pre.in ============================================================================== --- python/branches/release25-maint/Makefile.pre.in (original) +++ python/branches/release25-maint/Makefile.pre.in Mon Jul 9 10:40:34 2007 @@ -391,7 +391,7 @@ -compatibility_version $(VERSION) \ -current_version $(VERSION); \ else \ - libtool -o $(LDLIBRARY) -dynamic $(OTHER_LIBTOOL_OPT) $(LIBRARY) \ + /usr/bin/libtool -o $(LDLIBRARY) -dynamic $(OTHER_LIBTOOL_OPT) $(LIBRARY) \ @LIBTOOL_CRUFT@ ;\ fi $(INSTALL) -d -m $(DIRMODE) \ Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Mon Jul 9 10:40:34 2007 @@ -57,6 +57,9 @@ - Fix test_pty.py to not hang on OS X (and theoretically other *nixes) when run in verbose mode. +- Bug #1693258: IDLE would show two "Preferences" menu's with some versions + of Tcl/Tk + Extension Modules ----------------- @@ -74,6 +77,12 @@ - Bug #1569057: Document that calling file.next() on a file open for writing has undefined behaviour. Backport of r54712. +Build +----- + +- Patch #1673122: Use an explicit path to libtool when building a framework. + This avoids picking up GNU libtool from a users PATH. + What's New in Python 2.5.1? ============================= From python-checkins at python.org Mon Jul 9 10:41:15 2007 From: python-checkins at python.org (ronald.oussoren) Date: Mon, 9 Jul 2007 10:41:15 +0200 (CEST) Subject: [Python-checkins] r56207 - python/trunk/Makefile.pre.in Message-ID: <20070709084115.70B201E4017@bag.python.org> Author: ronald.oussoren Date: Mon Jul 9 10:41:15 2007 New Revision: 56207 Modified: python/trunk/Makefile.pre.in Log: Patch 1673122: be explicit about which libtool to use, to avoid name clashes when a users install GNU libtool early in his PATH Modified: python/trunk/Makefile.pre.in ============================================================================== --- python/trunk/Makefile.pre.in (original) +++ python/trunk/Makefile.pre.in Mon Jul 9 10:41:15 2007 @@ -392,7 +392,7 @@ -compatibility_version $(VERSION) \ -current_version $(VERSION); \ else \ - libtool -o $(LDLIBRARY) -dynamic $(OTHER_LIBTOOL_OPT) $(LIBRARY) \ + /usr/bin/libtool -o $(LDLIBRARY) -dynamic $(OTHER_LIBTOOL_OPT) $(LIBRARY) \ @LIBTOOL_CRUFT@ ;\ fi $(INSTALL) -d -m $(DIRMODE) \ From buildbot at python.org Mon Jul 9 11:20:46 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 09 Jul 2007 09:20:46 +0000 Subject: [Python-checkins] buildbot warnings in alpha Tru64 5.1 trunk Message-ID: <20070709092046.EF4281E4013@bag.python.org> The Buildbot has detected a new failure of alpha Tru64 5.1 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Tru64%25205.1%2520trunk/builds/1702 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: ronald.oussoren Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Mon Jul 9 12:32:55 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 09 Jul 2007 10:32:55 +0000 Subject: [Python-checkins] buildbot warnings in S-390 Debian trunk Message-ID: <20070709103255.51F901E4002@bag.python.org> The Buildbot has detected a new failure of S-390 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/S-390%2520Debian%2520trunk/builds/1035 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: ronald.oussoren Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_socket_ssl make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Mon Jul 9 13:33:39 2007 From: python-checkins at python.org (collin.winter) Date: Mon, 9 Jul 2007 13:33:39 +0200 (CEST) Subject: [Python-checkins] r56212 - sandbox/trunk/2to3/README Message-ID: <20070709113339.F0F311E4002@bag.python.org> Author: collin.winter Date: Mon Jul 9 13:33:39 2007 New Revision: 56212 Modified: sandbox/trunk/2to3/README Log: Add another caveat to the README description of fix_raise. Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Mon Jul 9 13:33:39 2007 @@ -139,6 +139,14 @@ but since we can't detect instance-hood by syntax alone and since any client code would have to be changed as well, we don't automate this. +Another translation problem is this: :: + + t = ((E, E2), E3) + raise t + +2to3 has no way of knowing that t is a tuple, and so this code will raise an +exception at runtime since the ability to raise tuples is going away. + Notes ===== From buildbot at python.org Mon Jul 9 15:35:14 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 09 Jul 2007 13:35:14 +0000 Subject: [Python-checkins] buildbot warnings in alpha Debian trunk Message-ID: <20070709133514.2A9C71E4002@bag.python.org> The Buildbot has detected a new failure of alpha Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Debian%2520trunk/builds/36 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: ronald.oussoren Build had warnings: warnings test Excerpt from the test logfile: 2 tests failed: test_pow test_resource ====================================================================== ERROR: test_bug705231 (test.test_pow.PowTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-alpha/build/Lib/test/test_pow.py", line 109, in test_bug705231 eq(pow(a, 1.23e167), 1.0) ValueError: negative number cannot be raised to a fractional power ====================================================================== ERROR: test_fsize_enforced (test.test_resource.ResourceTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-alpha/build/Lib/test/test_resource.py", line 59, in test_fsize_enforced f.close() IOError: [Errno 27] File too large make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Mon Jul 9 16:17:01 2007 From: python-checkins at python.org (collin.winter) Date: Mon, 9 Jul 2007 16:17:01 +0200 (CEST) Subject: [Python-checkins] r56214 - sandbox/trunk/2to3/README Message-ID: <20070709141701.925D51E4002@bag.python.org> Author: collin.winter Date: Mon Jul 9 16:17:01 2007 New Revision: 56214 Modified: sandbox/trunk/2to3/README Log: Update the README with more recent fixers. Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Mon Jul 9 16:17:01 2007 @@ -34,6 +34,8 @@ * **fix_apply** - convert apply() calls to real function calls. +* **fix_callable** - converts callable(obj) into hasattr(obj, '__call__'). + * **fix_dict** - fix up dict.keys(), .values(), .items() and their iterator versions. @@ -41,6 +43,8 @@ * **fix_exec** - convert "exec" statements to exec() function calls. +* **fix_filter** - changes filter(F, X) into list(filter(F, X)). + * **fix_has_key** - "d.has_key(x)" -> "x in d". * **fix_input** - "input()" -> "eval(input())" (PEP 3111). @@ -49,6 +53,8 @@ * **fix_long** - remove all usage of explicit longs in favor of ints. +* **fix_map** - generally changes map(F, ...) into list(map(F, ...)). + * **fix_ne** - convert the "<>" operator to "!=". * **fix_next** - fixer for it.next() -> next(it) (PEP 3114). @@ -73,6 +79,8 @@ * **fix_tuple_params** - remove tuple parameters from function, method and lambda declarations (PEP 3113). +* **fix_unicode** - convert, e.g., u"..." to "...", unicode(x) to str(x), etc. + * **fix_xrange** - "xrange()" -> "range()". @@ -119,6 +127,15 @@ This is seen frequently when dealing with OSError. +fix_filter +'''''''''' + +The transformation is not correct if the original code depended on +filter(F, X) returning a string if X is a string (or a tuple if X is a +tuple, etc). That would require type inference, which we don't do. Python +2.6's Python 3 compatibility mode should be used to detect such cases. + + fix_has_key ''''''''''' @@ -128,6 +145,15 @@ advised to pay close attention when using this fixer. +fix_map +''''''' + +The transformation is not correct if the original code was depending on +map(F, X, Y, ...) to go on until the longest argument is exhausted, +substituting None for missing values -- like zip(), it now stops as +soon as the shortest argument is exhausted. + + fix_raise ''''''''' From python-checkins at python.org Mon Jul 9 19:44:47 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 9 Jul 2007 19:44:47 +0200 (CEST) Subject: [Python-checkins] r56218 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070709174447.DE95D1E4003@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 9 19:44:47 2007 New Revision: 56218 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Remove the undocumented "list-based" pickler. Start PUT index at 0. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Mon Jul 9 19:44:47 2007 @@ -170,8 +170,6 @@ (destructor) Pdata_dealloc, /*tp_dealloc*/ }; -#define Pdata_Check(O) ((O)->ob_type == &PdataType) - static PyObject * Pdata_New(void) { @@ -397,8 +395,7 @@ static int save(Picklerobject *, PyObject *, int); static int put2(Picklerobject *, PyObject *); -static -PyObject * +static PyObject * pickle_ErrFormat(PyObject *ErrType, char *stringformat, char *format, ...) { va_list va; @@ -540,9 +537,9 @@ self->buf_size = n; } - Py_BEGIN_ALLOW_THREADS - nbytesread = fread(self->buf, sizeof(char), n, self->fp); - Py_END_ALLOW_THREADS + Py_BEGIN_ALLOW_THREADS; + nbytesread = fread(self->buf, sizeof(char), n, self->fp); + Py_END_ALLOW_THREADS; if (nbytesread != (size_t) n) { if (feof(self->fp)) { PyErr_SetNone(PyExc_EOFError); @@ -690,12 +687,6 @@ PyOS_snprintf(s + 1, sizeof(s) - 1, "%ld\n", c_value); len = strlen(s); } - else if (Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - return -1; - PDATA_APPEND(self->file, mv, -1); - return 0; - } else { if (c_value < 256) { s[0] = BINGET; @@ -744,14 +735,6 @@ if ((p = PyDict_Size(self->memo)) < 0) goto finally; - /* Make sure memo keys are positive! */ - /* XXX Why? - * XXX And does "positive" really mean non-negative? - * XXX pickle.py starts with PUT index 0, not 1. This makes for - * XXX gratuitous differences between the pickling modules. - */ - p++; - if (!(py_ob_id = PyLong_FromVoidPtr(ob))) goto finally; @@ -774,13 +757,6 @@ PyOS_snprintf(c_str + 1, sizeof(c_str) - 1, "%d\n", p); len = strlen(c_str); } - else if (Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - return -1; - PDATA_APPEND(self->file, memo_len, -1); - res = 0; /* Job well done ;) */ - goto finally; - } else { if (p >= 256) { c_str[0] = LONG_BINPUT; @@ -1204,17 +1180,10 @@ if (self->write_func(self, c_str, len) < 0) return -1; - if (size > 128 && Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - return -1; - PDATA_APPEND(self->file, args, -1); - } - else { - if (self->write_func(self, - PyString_AS_STRING((PyStringObject *) args), - size) < 0) - return -1; - } + if (self->write_func(self, + PyString_AS_STRING((PyStringObject *) args), + size) < 0) + return -1; } if (doput) @@ -1323,15 +1292,8 @@ if (self->write_func(self, c_str, len) < 0) goto err; - if (size > 128 && Pdata_Check(self->file)) { - if (write_other(self, NULL, 0) < 0) - goto err; - PDATA_APPEND(self->file, repr, -1); - } - else { - if (self->write_func(self, PyString_AS_STRING(repr), size) < 0) - goto err; - } + if (self->write_func(self, PyString_AS_STRING(repr), size) < 0) + goto err; Py_DECREF(repr); } @@ -2405,188 +2367,33 @@ { if (self->memo) PyDict_Clear(self->memo); - Py_INCREF(Py_None); - return Py_None; -} - -static PyObject * -Pickle_getvalue(Picklerobject *self, PyObject *args) -{ - int l, i, rsize, ssize, clear = 1, lm; - long ik; - PyObject *k, *r; - char *s, *p, *have_get; - Pdata *data; - - /* Can be called by Python code or C code */ - if (args && !PyArg_ParseTuple(args, "|i:getvalue", &clear)) - return NULL; - - /* Check to make sure we are based on a list */ - if (!Pdata_Check(self->file)) { - PyErr_SetString(PicklingError, - "Attempt to getvalue() a non-list-based pickler"); - return NULL; - } - /* flush write buffer */ - if (write_other(self, NULL, 0) < 0) - return NULL; - - data = (Pdata *) self->file; - l = data->length; - - /* set up an array to hold get/put status */ - lm = PyDict_Size(self->memo); - if (lm < 0) - return NULL; - lm++; - have_get = malloc(lm); - if (have_get == NULL) - return PyErr_NoMemory(); - memset(have_get, 0, lm); - - /* Scan for gets. */ - for (rsize = 0, i = l; --i >= 0;) { - k = data->data[i]; - - if (PyString_Check(k)) - rsize += PyString_GET_SIZE(k); - - else if (PyInt_Check(k)) { /* put */ - ik = PyInt_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik >= lm || ik == 0) { - PyErr_SetString(PicklingError, "Invalid get data"); - goto err; - } - if (have_get[ik]) /* with matching get */ - rsize += ik < 256 ? 2 : 5; - } - - else if (!(PyTuple_Check(k) && - PyTuple_GET_SIZE(k) == 2 && - PyInt_Check((k = PyTuple_GET_ITEM(k, 0)))) - ) { - PyErr_SetString(PicklingError, "Unexpected data in internal list"); - goto err; - } - - else { /* put */ - ik = PyInt_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik >= lm || ik == 0) { - PyErr_SetString(PicklingError, "Invalid get data"); - return NULL; - } - have_get[ik] = 1; - rsize += ik < 256 ? 2 : 5; - } - } - - /* Now generate the result */ - r = PyString_FromStringAndSize(NULL, rsize); - if (r == NULL) - goto err; - s = PyString_AS_STRING((PyStringObject *) r); - - for (i = 0; i < l; i++) { - k = data->data[i]; - - if (PyString_Check(k)) { - ssize = PyString_GET_SIZE(k); - if (ssize) { - p = PyString_AS_STRING((PyStringObject *) k); - while (--ssize >= 0) - *s++ = *p++; - } - } - - else if (PyTuple_Check(k)) { /* get */ - ik = PyLong_AsLong(PyTuple_GET_ITEM(k, 0)); - if (ik == -1 && PyErr_Occurred()) - goto err; - if (ik < 256) { - *s++ = BINGET; - *s++ = (int) (ik & 0xff); - } - else { - *s++ = LONG_BINGET; - *s++ = (int) (ik & 0xff); - *s++ = (int) ((ik >> 8) & 0xff); - *s++ = (int) ((ik >> 16) & 0xff); - *s++ = (int) ((ik >> 24) & 0xff); - } - } - - else { /* put */ - ik = PyLong_AsLong(k); - if (ik == -1 && PyErr_Occurred()) - goto err; - - if (have_get[ik]) { /* with matching get */ - if (ik < 256) { - *s++ = BINPUT; - *s++ = (int) (ik & 0xff); - } - else { - *s++ = LONG_BINPUT; - *s++ = (int) (ik & 0xff); - *s++ = (int) ((ik >> 8) & 0xff); - *s++ = (int) ((ik >> 16) & 0xff); - *s++ = (int) ((ik >> 24) & 0xff); - } - } - } - } - - if (clear) { - PyDict_Clear(self->memo); - Pdata_clear(data, 0); - } - - free(have_get); - return r; - err: - free(have_get); - return NULL; + Py_RETURN_NONE; } static PyObject * Pickler_dump(Picklerobject *self, PyObject *args) { PyObject *ob; - int get = 0; - if (!(PyArg_ParseTuple(args, "O|i:dump", &ob, &get))) + if (!(PyArg_ParseTuple(args, "O:dump", &ob))) return NULL; if (dump(self, ob) < 0) return NULL; - if (get) - return Pickle_getvalue(self, NULL); - - /* XXX Why does dump() return self? */ - Py_INCREF(self); - return (PyObject *) self; + Py_RETURN_NONE; } - static struct PyMethodDef Pickler_methods[] = { {"dump", (PyCFunction) Pickler_dump, METH_VARARGS, PyDoc_STR("dump(object) -- " "Write an object in pickle format to the object's pickle stream")}, {"clear_memo", (PyCFunction) Pickle_clear_memo, METH_NOARGS, PyDoc_STR("clear_memo() -- Clear the picklers memo")}, - {"getvalue", (PyCFunction) Pickle_getvalue, METH_VARARGS, - PyDoc_STR("getvalue() -- Finish picking a list-based pickle")}, {NULL, NULL} /* sentinel */ }; - static Picklerobject * newPicklerobject(PyObject *file, int proto) { @@ -2623,11 +2430,9 @@ self->file = NULL; if (file) Py_INCREF(file); - else { - file = Pdata_New(); - if (file == NULL) - goto err; - } + else + goto err; + self->file = file; if (!(self->memo = PyDict_New())) @@ -2644,14 +2449,12 @@ else { self->write_func = write_other; - if (!Pdata_Check(file)) { - self->write = PyObject_GetAttr(file, write_str); - if (!self->write) { - PyErr_Clear(); - PyErr_SetString(PyExc_TypeError, - "argument must have 'write' " "attribute"); - goto err; - } + self->write = PyObject_GetAttr(file, write_str); + if (!self->write) { + PyErr_Clear(); + PyErr_SetString(PyExc_TypeError, + "argument must have 'write' " "attribute"); + goto err; } self->write_buf = (char *) PyMem_Malloc(WRITE_BUF_SIZE); @@ -2680,20 +2483,10 @@ PyObject *file = NULL; int proto = 0; - /* XXX - * The documented signature is Pickler(file, protocol=0), but this - * accepts Pickler() and Pickler(integer) too. The meaning then - * is clear as mud, undocumented, and not supported by pickle.py. - * I'm told Zope uses this, but I haven't traced into this code - * far enough to figure out what it means. - */ - if (!PyArg_ParseTuple(args, "|i:Pickler", &proto)) { - PyErr_Clear(); - proto = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", - kwlist, &file, &proto)) - return NULL; - } + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", + kwlist, &file, &proto)) + return NULL; + return (PyObject *) newPicklerobject(file, proto); } @@ -3219,7 +3012,6 @@ } else goto insecure; - /********************************************/ str = PyString_DecodeEscape(p, len, NULL, 0, NULL); free(s); From python-checkins at python.org Mon Jul 9 21:36:01 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 9 Jul 2007 21:36:01 +0200 (CEST) Subject: [Python-checkins] r56219 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070709193601.9ACE31E4002@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 9 21:36:01 2007 New Revision: 56219 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Add basic subclassing support to Pickler. Remove all module-level function. Remove a left-over from the "list-based" pickle in write_other(). Clean up Picklertype. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Mon Jul 9 21:36:01 2007 @@ -461,7 +461,7 @@ static int write_other(Picklerobject *self, const char *s, Py_ssize_t _n) { - PyObject *py_str = 0, *junk = 0; + PyObject *py_str, *result; int n; if (_n > INT_MAX) @@ -491,21 +491,16 @@ } } - if (self->write) { - /* object with write method */ - ARG_TUP(self, py_str); - if (self->arg) { - junk = PyObject_Call(self->write, self->arg, NULL); - FREE_ARG_TUP(self); - } - if (junk) - Py_DECREF(junk); - else - return -1; + /* object with write method */ + ARG_TUP(self, py_str); + if (self->arg) { + result = PyObject_Call(self->write, self->arg, NULL); + FREE_ARG_TUP(self); } - else - PDATA_PUSH(self->file, py_str, -1); + if (result == NULL) + return -1; + Py_DECREF(result); self->buf_size = 0; return n; } @@ -2394,10 +2389,17 @@ {NULL, NULL} /* sentinel */ }; -static Picklerobject * -newPicklerobject(PyObject *file, int proto) +static PyObject * +Pickler_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { + static char *kwlist[] = { "file", "protocol", NULL }; Picklerobject *self; + PyObject *file; + int proto = 0; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", + kwlist, &file, &proto)) + return NULL; if (proto < 0) proto = HIGHEST_PROTOCOL; @@ -2411,86 +2413,66 @@ self = PyObject_GC_New(Picklerobject, &Picklertype); if (self == NULL) return NULL; - self->proto = proto; - self->bin = proto > 0; - self->fp = NULL; - self->write = NULL; - self->memo = NULL; - self->arg = NULL; - self->pers_func = NULL; - self->inst_pers_func = NULL; - self->write_buf = NULL; - self->fast = 0; - self->nesting = 0; - self->fast_container = 0; - self->fast_memo = NULL; - self->buf_size = 0; - self->dispatch_table = NULL; - - self->file = NULL; - if (file) - Py_INCREF(file); - else - goto err; - self->file = file; + self->proto = proto; + self->bin = proto > 0; + self->fp = NULL; + self->write = NULL; + self->memo = NULL; + self->arg = NULL; + self->pers_func = NULL; + self->inst_pers_func = NULL; + self->write_buf = NULL; + self->fast = 0; + self->nesting = 0; + self->fast_container = 0; + self->fast_memo = NULL; + self->buf_size = 0; + self->dispatch_table = NULL; + + self->memo = PyDict_New(); + if (self->memo == NULL) + goto error; - if (!(self->memo = PyDict_New())) - goto err; + Py_INCREF(dispatch_table); + self->dispatch_table = dispatch_table; + Py_INCREF(file); if (PyFile_Check(file)) { self->fp = PyFile_AsFile(file); if (self->fp == NULL) { PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); - goto err; + goto io_error; } self->write_func = write_file; } else { - self->write_func = write_other; - self->write = PyObject_GetAttr(file, write_str); - if (!self->write) { + if (self->write == NULL) { PyErr_Clear(); PyErr_SetString(PyExc_TypeError, "argument must have 'write' " "attribute"); - goto err; + goto io_error; } self->write_buf = (char *) PyMem_Malloc(WRITE_BUF_SIZE); if (self->write_buf == NULL) { PyErr_NoMemory(); - goto err; + goto io_error; } + self->write_func = write_other; } - self->dispatch_table = dispatch_table; - Py_INCREF(dispatch_table); PyObject_GC_Track(self); + return (PyObject *) self; - return self; - - err: + io_error: + Py_DECREF(file); + error: Py_DECREF(self); return NULL; } - -static PyObject * -get_Pickler(PyObject *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = { "file", "protocol", NULL }; - PyObject *file = NULL; - int proto = 0; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|i:Pickler", - kwlist, &file, &proto)) - return NULL; - - return (PyObject *) newPicklerobject(file, proto); -} - - static void Pickler_dealloc(Picklerobject *self) { @@ -2499,7 +2481,6 @@ Py_XDECREF(self->memo); Py_XDECREF(self->fast_memo); Py_XDECREF(self->arg); - Py_XDECREF(self->file); Py_XDECREF(self->pers_func); Py_XDECREF(self->inst_pers_func); Py_XDECREF(self->dispatch_table); @@ -2514,7 +2495,6 @@ Py_VISIT(self->memo); Py_VISIT(self->fast_memo); Py_VISIT(self->arg); - Py_VISIT(self->file); Py_VISIT(self->pers_func); Py_VISIT(self->inst_pers_func); Py_VISIT(self->dispatch_table); @@ -2528,7 +2508,6 @@ Py_CLEAR(self->memo); Py_CLEAR(self->fast_memo); Py_CLEAR(self->arg); - Py_CLEAR(self->file); Py_CLEAR(self->pers_func); Py_CLEAR(self->inst_pers_func); Py_CLEAR(self->dispatch_table); @@ -2624,40 +2603,73 @@ {NULL} }; -PyDoc_STRVAR(Picklertype__doc__, "Objects that know how to pickle objects\n"); +PyDoc_STRVAR(Pickler_doc, +"Pickler(file, protocol=0) -- Create a pickler.\n" +"\n" +"This takes a file-like object for writing a pickle data stream.\n" +"The optional proto argument tells the pickler to use the given\n" +"protocol; supported protocols are 0, 1, 2. The default\n" +"protocol is 0, to be backwards compatible. (Protocol 0 is the\n" +"only protocol that can be written to a file opened in text\n" +"mode and read back successfully. When using a protocol higher\n" +"than 0, make sure the file is opened in binary mode, both when\n" +"pickling and unpickling.)\n" +"\n" +"Protocol 1 is more efficient than protocol 0; protocol 2 is\n" +"more efficient than protocol 1.\n" +"\n" +"Specifying a negative protocol version selects the highest\n" +"protocol version supported. The higher the protocol used, the\n" +"more recent the version of Python needed to read the pickle\n" +"produced.\n" +"\n" +"The file parameter must have a write() method that accepts a single\n" +"string argument. It can thus be an open file object, a StringIO\n" +"object, or any other custom object that meets this interface.\n"); + static PyTypeObject Picklertype = { PyObject_HEAD_INIT(NULL) - 0, /*ob_size */ - "pickle.Pickler", /*tp_name */ - sizeof(Picklerobject), /*tp_basicsize */ - 0, - (destructor) Pickler_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - PyObject_GenericGetAttr, /* tp_getattro */ - PyObject_GenericSetAttr, /* tp_setattro */ - 0, /* tp_as_buffer */ + 0, /*ob_size*/ + "_pickle.Pickler" , /*tp_name*/ + sizeof(Picklerobject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor)Pickler_dealloc, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + 0, /*tp_compare*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + PyObject_GenericGetAttr, /*tp_getattro*/ + PyObject_GenericSetAttr, /*tp_setattro*/ + 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Picklertype__doc__, /* tp_doc */ - (traverseproc) Pickler_traverse, /* tp_traverse */ - (inquiry) Pickler_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Pickler_methods, /* tp_methods */ - Pickler_members, /* tp_members */ - Pickler_getsets, /* tp_getset */ + Pickler_doc, /*tp_doc*/ + (traverseproc)Pickler_traverse, /*tp_traverse*/ + (inquiry)Pickler_clear, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + Pickler_methods, /*tp_methods*/ + Pickler_members, /*tp_members*/ + Pickler_getsets, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + Pickler_new, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ }; static PyObject * @@ -5008,56 +5020,6 @@ return -1; } -/* --------------------------------------------------------------------------- - * Module-level functions. - */ - -/* dump(obj, file, protocol=0). */ -static PyObject * -cpm_dump(PyObject *self, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = { "obj", "file", "protocol", NULL }; - PyObject *ob, *file, *res = NULL; - Picklerobject *pickler = 0; - int proto = 0; - - if (!(PyArg_ParseTupleAndKeywords(args, kwds, "OO|i", kwlist, - &ob, &file, &proto))) - goto finally; - - if (!(pickler = newPicklerobject(file, proto))) - goto finally; - - if (dump(pickler, ob) < 0) - goto finally; - - Py_INCREF(Py_None); - res = Py_None; - - finally: - Py_XDECREF(pickler); - - return res; -} - -/* load(fileobj). */ -static PyObject * -cpm_load(PyObject *self, PyObject *ob) -{ - Unpicklerobject *unpickler = 0; - PyObject *res = NULL; - - if (!(unpickler = newUnpicklerobject(ob))) - goto finally; - - res = load(unpickler); - - finally: - Py_XDECREF(unpickler); - - return res; -} - PyDoc_STRVAR(Unpicklertype__doc__, "Objects that know how to unpickle"); static PyTypeObject Unpicklertype = { @@ -5087,47 +5049,6 @@ (inquiry) Unpickler_clear, /* tp_clear */ }; -static struct PyMethodDef pickle_methods[] = { - {"dump", (PyCFunction) cpm_dump, METH_VARARGS | METH_KEYWORDS, - PyDoc_STR("dump(obj, file, protocol=0) -- " - "Write an object in pickle format to the given file.\n" - "\n" - "See the Pickler docstring for the meaning of optional argument proto.") - }, - - {"load", (PyCFunction) cpm_load, METH_O, - PyDoc_STR("load(file) -- Load a pickle from the given file")}, - - {"Pickler", (PyCFunction) get_Pickler, METH_VARARGS | METH_KEYWORDS, - PyDoc_STR("Pickler(file, protocol=0) -- Create a pickler.\n" - "\n" - "This takes a file-like object for writing a pickle data stream.\n" - "The optional proto argument tells the pickler to use the given\n" - "protocol; supported protocols are 0, 1, 2. The default\n" - "protocol is 0, to be backwards compatible. (Protocol 0 is the\n" - "only protocol that can be written to a file opened in text\n" - "mode and read back successfully. When using a protocol higher\n" - "than 0, make sure the file is opened in binary mode, both when\n" - "pickling and unpickling.)\n" - "\n" - "Protocol 1 is more efficient than protocol 0; protocol 2 is\n" - "more efficient than protocol 1.\n" - "\n" - "Specifying a negative protocol version selects the highest\n" - "protocol version supported. The higher the protocol used, the\n" - "more recent the version of Python needed to read the pickle\n" - "produced.\n" - "\n" - "The file parameter must have a write() method that accepts a single\n" - "string argument. It can thus be an open file object, a StringIO\n" - "object, or any other custom object that meets this interface.\n") - }, - - {"Unpickler", (PyCFunction) get_Unpickler, METH_O, - PyDoc_STR("Unpickler(file) -- Create an unpickler.")}, - {NULL, NULL} -}; - static int init_stuff(PyObject *module_dict) { @@ -5282,12 +5203,12 @@ return; /* Create the module and add the functions */ - m = Py_InitModule4("_pickle", pickle_methods, - pickle_module_documentation, - (PyObject *) NULL, PYTHON_API_VERSION); + m = Py_InitModule3("_pickle", NULL, pickle_module_documentation); if (m == NULL) return; + PyModule_AddObject(m, "Pickler", (PyObject *)&Picklertype); + /* Add some symbolic constants to the module */ d = PyModule_GetDict(m); v = PyString_FromString(rev); From python-checkins at python.org Mon Jul 9 21:37:19 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 9 Jul 2007 21:37:19 +0200 (CEST) Subject: [Python-checkins] r56220 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070709193719.E07C91E4002@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 9 21:37:19 2007 New Revision: 56220 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Rename Picklertype to Pickler_Type. Rename Picklerobject to PicklerObject. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Mon Jul 9 21:37:19 2007 @@ -335,7 +335,7 @@ } \ } -typedef struct Picklerobject { +typedef struct PicklerObject { PyObject_HEAD FILE *fp; PyObject *write; @@ -353,19 +353,19 @@ int fast; /* Fast mode doesn't save in memo, don't use if circ ref */ int nesting; - int (*write_func) (struct Picklerobject *, const char *, Py_ssize_t); + int (*write_func) (struct PicklerObject *, const char *, Py_ssize_t); char *write_buf; int buf_size; PyObject *dispatch_table; int fast_container; /* count nested container dumps */ PyObject *fast_memo; -} Picklerobject; +} PicklerObject; #ifndef PY_CPICKLE_FAST_LIMIT #define PY_CPICKLE_FAST_LIMIT 50 #endif -static PyTypeObject Picklertype; +static PyTypeObject Pickler_Type; typedef struct Unpicklerobject { PyObject_HEAD @@ -392,8 +392,8 @@ static PyTypeObject Unpicklertype; /* Forward decls that need the above structs */ -static int save(Picklerobject *, PyObject *, int); -static int put2(Picklerobject *, PyObject *); +static int save(PicklerObject *, PyObject *, int); +static int put2(PicklerObject *, PyObject *); static PyObject * pickle_ErrFormat(PyObject *ErrType, char *stringformat, char *format, ...) @@ -433,7 +433,7 @@ } static int -write_file(Picklerobject *self, const char *s, Py_ssize_t n) +write_file(PicklerObject *self, const char *s, Py_ssize_t n) { size_t nbyteswritten; @@ -459,7 +459,7 @@ } static int -write_other(Picklerobject *self, const char *s, Py_ssize_t _n) +write_other(PicklerObject *self, const char *s, Py_ssize_t _n) { PyObject *py_str, *result; int n; @@ -654,7 +654,7 @@ static int -get(Picklerobject *self, PyObject * id) +get(PicklerObject *self, PyObject * id) { PyObject *value, *mv; long c_value; @@ -706,7 +706,7 @@ static int -put(Picklerobject *self, PyObject *ob) +put(PicklerObject *self, PyObject *ob) { if (ob->ob_refcnt < 2 || self->fast) return 0; @@ -716,7 +716,7 @@ static int -put2(Picklerobject *self, PyObject *ob) +put2(PicklerObject *self, PyObject *ob) { char c_str[30]; int p; @@ -838,7 +838,7 @@ static int -fast_save_enter(Picklerobject *self, PyObject *obj) +fast_save_enter(PicklerObject *self, PyObject *obj) { /* if fast_container < 0, we're doing an error exit. */ if (++self->fast_container >= PY_CPICKLE_FAST_LIMIT) { @@ -873,7 +873,7 @@ } static int -fast_save_leave(Picklerobject *self, PyObject *obj) +fast_save_leave(PicklerObject *self, PyObject *obj) { if (self->fast_container-- >= PY_CPICKLE_FAST_LIMIT) { PyObject *key = PyLong_FromVoidPtr(obj); @@ -889,7 +889,7 @@ } static int -save_none(Picklerobject *self, PyObject *args) +save_none(PicklerObject *self, PyObject *args) { static char none = NONE; if (self->write_func(self, &none, 1) < 0) @@ -899,7 +899,7 @@ } static int -save_bool(Picklerobject *self, PyObject *args) +save_bool(PicklerObject *self, PyObject *args) { static const char *buf[2] = { FALSE, TRUE }; static char len[2] = { sizeof(FALSE) - 1, sizeof(TRUE) - 1 }; @@ -916,7 +916,7 @@ } static int -save_int(Picklerobject *self, long l) +save_int(PicklerObject *self, long l) { char c_str[32]; int len = 0; @@ -965,7 +965,7 @@ static int -save_long(Picklerobject *self, PyObject *args) +save_long(PicklerObject *self, PyObject *args) { Py_ssize_t size; int res = -1; @@ -1092,7 +1092,7 @@ static int -save_float(Picklerobject *self, PyObject *args) +save_float(PicklerObject *self, PyObject *args) { double x = PyFloat_AS_DOUBLE((PyFloatObject *) args); @@ -1120,7 +1120,7 @@ static int -save_string(Picklerobject *self, PyObject *args, int doput) +save_string(PicklerObject *self, PyObject *args, int doput) { int size, len; PyObject *repr = 0; @@ -1234,7 +1234,7 @@ static int -save_unicode(Picklerobject *self, PyObject *args, int doput) +save_unicode(PicklerObject *self, PyObject *args, int doput) { Py_ssize_t size, len; PyObject *repr = 0; @@ -1307,7 +1307,7 @@ /* A helper for save_tuple. Push the len elements in tuple t on the stack. */ static int -store_tuple_elements(Picklerobject *self, PyObject *t, int len) +store_tuple_elements(PicklerObject *self, PyObject *t, int len) { int i; int res = -1; /* guilty until proved innocent */ @@ -1335,7 +1335,7 @@ * magic so that it works in all cases. IOW, this is a long routine. */ static int -save_tuple(Picklerobject *self, PyObject *args) +save_tuple(PicklerObject *self, PyObject *args) { PyObject *py_tuple_id = NULL; int len, i; @@ -1448,7 +1448,7 @@ * Returns 0 on success, <0 on error. */ static int -batch_list(Picklerobject *self, PyObject *iter) +batch_list(PicklerObject *self, PyObject *iter) { PyObject *obj; PyObject *slice[BATCHSIZE]; @@ -1523,7 +1523,7 @@ } static int -save_list(Picklerobject *self, PyObject *args) +save_list(PicklerObject *self, PyObject *args) { int res = -1; char s[3]; @@ -1587,7 +1587,7 @@ * ugly to bear. */ static int -batch_dict(Picklerobject *self, PyObject *iter) +batch_dict(PicklerObject *self, PyObject *iter) { PyObject *p; PyObject *slice[BATCHSIZE]; @@ -1680,7 +1680,7 @@ } static int -save_dict(Picklerobject *self, PyObject *args) +save_dict(PicklerObject *self, PyObject *args) { int res = -1; char s[3]; @@ -1736,7 +1736,7 @@ static int -save_global(Picklerobject *self, PyObject *args, PyObject *name) +save_global(PicklerObject *self, PyObject *args, PyObject *name) { PyObject *global_name = 0, *module = 0, *mod = 0, *klass = 0; char *name_str, *module_str; @@ -1874,7 +1874,7 @@ } static int -save_pers(Picklerobject *self, PyObject *args, PyObject *f) +save_pers(PicklerObject *self, PyObject *args, PyObject *f) { PyObject *pid = 0; int size, res = -1; @@ -1936,7 +1936,7 @@ * appropriate __reduce__ method for ob. */ static int -save_reduce(Picklerobject *self, PyObject *args, PyObject *ob) +save_reduce(PicklerObject *self, PyObject *args, PyObject *ob) { PyObject *callable; PyObject *argtup; @@ -2080,7 +2080,7 @@ } static int -save(Picklerobject *self, PyObject *args, int pers_save) +save(PicklerObject *self, PyObject *args, int pers_save) { PyTypeObject *type; PyObject *py_ob_id = 0, *__reduce__ = 0, *t = 0; @@ -2331,7 +2331,7 @@ static int -dump(Picklerobject *self, PyObject *args) +dump(PicklerObject *self, PyObject *args) { static char stop = STOP; @@ -2358,7 +2358,7 @@ } static PyObject * -Pickle_clear_memo(Picklerobject *self, PyObject *args) +Pickle_clear_memo(PicklerObject *self, PyObject *args) { if (self->memo) PyDict_Clear(self->memo); @@ -2367,7 +2367,7 @@ } static PyObject * -Pickler_dump(Picklerobject *self, PyObject *args) +Pickler_dump(PicklerObject *self, PyObject *args) { PyObject *ob; @@ -2393,7 +2393,7 @@ Pickler_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { static char *kwlist[] = { "file", "protocol", NULL }; - Picklerobject *self; + PicklerObject *self; PyObject *file; int proto = 0; @@ -2410,7 +2410,7 @@ return NULL; } - self = PyObject_GC_New(Picklerobject, &Picklertype); + self = PyObject_GC_New(PicklerObject, &Pickler_Type); if (self == NULL) return NULL; @@ -2474,7 +2474,7 @@ } static void -Pickler_dealloc(Picklerobject *self) +Pickler_dealloc(PicklerObject *self) { PyObject_GC_UnTrack(self); Py_XDECREF(self->write); @@ -2489,7 +2489,7 @@ } static int -Pickler_traverse(Picklerobject *self, visitproc visit, void *arg) +Pickler_traverse(PicklerObject *self, visitproc visit, void *arg) { Py_VISIT(self->write); Py_VISIT(self->memo); @@ -2502,7 +2502,7 @@ } static int -Pickler_clear(Picklerobject *self) +Pickler_clear(PicklerObject *self) { Py_CLEAR(self->write); Py_CLEAR(self->memo); @@ -2515,7 +2515,7 @@ } static PyObject * -Pickler_get_pers_func(Picklerobject *p) +Pickler_get_pers_func(PicklerObject *p) { if (p->pers_func == NULL) PyErr_SetString(PyExc_AttributeError, "persistent_id"); @@ -2525,7 +2525,7 @@ } static int -Pickler_set_pers_func(Picklerobject *p, PyObject *v) +Pickler_set_pers_func(PicklerObject *p, PyObject *v) { if (v == NULL) { PyErr_SetString(PyExc_TypeError, @@ -2539,7 +2539,7 @@ } static int -Pickler_set_inst_pers_func(Picklerobject *p, PyObject *v) +Pickler_set_inst_pers_func(PicklerObject *p, PyObject *v) { if (v == NULL) { PyErr_SetString(PyExc_TypeError, @@ -2553,7 +2553,7 @@ } static PyObject * -Pickler_get_memo(Picklerobject *p) +Pickler_get_memo(PicklerObject *p) { if (p->memo == NULL) PyErr_SetString(PyExc_AttributeError, "memo"); @@ -2563,7 +2563,7 @@ } static int -Pickler_set_memo(Picklerobject *p, PyObject *v) +Pickler_set_memo(PicklerObject *p, PyObject *v) { if (v == NULL) { PyErr_SetString(PyExc_TypeError, @@ -2581,7 +2581,7 @@ } static PyObject * -Pickler_get_error(Picklerobject *p) +Pickler_get_error(PicklerObject *p) { /* why is this an attribute on the Pickler? */ Py_INCREF(PicklingError); @@ -2589,8 +2589,8 @@ } static PyMemberDef Pickler_members[] = { - {"binary", T_INT, offsetof(Picklerobject, bin)}, - {"fast", T_INT, offsetof(Picklerobject, fast)}, + {"binary", T_INT, offsetof(PicklerObject, bin)}, + {"fast", T_INT, offsetof(PicklerObject, fast)}, {NULL} }; @@ -2628,11 +2628,11 @@ "object, or any other custom object that meets this interface.\n"); -static PyTypeObject Picklertype = { +static PyTypeObject Pickler_Type = { PyObject_HEAD_INIT(NULL) 0, /*ob_size*/ "_pickle.Pickler" , /*tp_name*/ - sizeof(Picklerobject), /*tp_basicsize*/ + sizeof(PicklerObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor)Pickler_dealloc, /*tp_dealloc*/ 0, /*tp_print*/ @@ -5056,7 +5056,7 @@ if (PyType_Ready(&Unpicklertype) < 0) return -1; - if (PyType_Ready(&Picklertype) < 0) + if (PyType_Ready(&Pickler_Type) < 0) return -1; INIT_STR(__class__); @@ -5189,7 +5189,7 @@ PyObject *format_version; PyObject *compatible_formats; - Picklertype.ob_type = &PyType_Type; + Pickler_Type.ob_type = &PyType_Type; Unpicklertype.ob_type = &PyType_Type; PdataType.ob_type = &PyType_Type; @@ -5207,7 +5207,7 @@ if (m == NULL) return; - PyModule_AddObject(m, "Pickler", (PyObject *)&Picklertype); + PyModule_AddObject(m, "Pickler", (PyObject *)&Pickler_Type); /* Add some symbolic constants to the module */ d = PyModule_GetDict(m); From python-checkins at python.org Mon Jul 9 21:56:36 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 9 Jul 2007 21:56:36 +0200 (CEST) Subject: [Python-checkins] r56221 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070709195636.3C5331E4007@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 9 21:56:35 2007 New Revision: 56221 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Rename goto labels "err" to "error". Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Mon Jul 9 21:56:35 2007 @@ -1137,17 +1137,17 @@ return -1; if ((len = PyString_Size(repr)) < 0) - goto err; + goto error; repr_str = PyString_AS_STRING((PyStringObject *) repr); if (self->write_func(self, &string, 1) < 0) - goto err; + goto error; if (self->write_func(self, repr_str, len) < 0) - goto err; + goto error; if (self->write_func(self, "\n", 1) < 0) - goto err; + goto error; Py_XDECREF(repr); } @@ -1187,7 +1187,7 @@ return 0; - err: + error: Py_XDECREF(repr); return -1; } @@ -1253,17 +1253,17 @@ return -1; if ((len = PyString_Size(repr)) < 0) - goto err; + goto error; repr_str = PyString_AS_STRING((PyStringObject *) repr); if (self->write_func(self, &string, 1) < 0) - goto err; + goto error; if (self->write_func(self, repr_str, len) < 0) - goto err; + goto error; if (self->write_func(self, "\n", 1) < 0) - goto err; + goto error; Py_XDECREF(repr); } @@ -1275,7 +1275,7 @@ return -1; if ((size = PyString_Size(repr)) < 0) - goto err; + goto error; if (size > INT_MAX) return -1; /* string too large */ @@ -1285,10 +1285,10 @@ len = 5; if (self->write_func(self, c_str, len) < 0) - goto err; + goto error; if (self->write_func(self, PyString_AS_STRING(repr), size) < 0) - goto err; + goto error; Py_DECREF(repr); } @@ -1299,7 +1299,7 @@ return 0; - err: + error: Py_XDECREF(repr); return -1; } @@ -4834,10 +4834,10 @@ self->find_class = NULL; if (!(self->memo = PyDict_New())) - goto err; + goto error; if (!self->stack) - goto err; + goto error; Py_INCREF(f); self->file = f; @@ -4847,7 +4847,7 @@ self->fp = PyFile_AsFile(f); if (self->fp == NULL) { PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); - goto err; + goto error; } self->read_func = read_file; self->readline_func = readline_file; @@ -4864,14 +4864,14 @@ PyErr_SetString(PyExc_TypeError, "argument must have 'read' and " "'readline' attributes"); - goto err; + goto error; } } PyObject_GC_Track(self); return self; - err: + error: Py_DECREF((PyObject *) self); return NULL; } From python-checkins at python.org Mon Jul 9 22:00:09 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Mon, 9 Jul 2007 22:00:09 +0200 (CEST) Subject: [Python-checkins] r56222 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070709200009.BCED41E400C@bag.python.org> Author: alexandre.vassalotti Date: Mon Jul 9 22:00:09 2007 New Revision: 56222 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Rename Unpicklerobject to UnpicklerObject. Rename Unpicklertype to Unpickler_Type. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Mon Jul 9 22:00:09 2007 @@ -367,7 +367,7 @@ static PyTypeObject Pickler_Type; -typedef struct Unpicklerobject { +typedef struct UnpicklerObject { PyObject_HEAD FILE *fp; PyObject *file; @@ -382,14 +382,14 @@ int *marks; int num_marks; int marks_size; - Py_ssize_t(*read_func) (struct Unpicklerobject *, char **, Py_ssize_t); - Py_ssize_t(*readline_func) (struct Unpicklerobject *, char **); + Py_ssize_t(*read_func) (struct UnpicklerObject *, char **, Py_ssize_t); + Py_ssize_t(*readline_func) (struct UnpicklerObject *, char **); int buf_size; char *buf; PyObject *find_class; -} Unpicklerobject; +} UnpicklerObject; -static PyTypeObject Unpicklertype; +static PyTypeObject Unpickler_Type; /* Forward decls that need the above structs */ static int save(PicklerObject *, PyObject *, int); @@ -507,7 +507,7 @@ static Py_ssize_t -read_file(Unpicklerobject *self, char **s, Py_ssize_t n) +read_file(UnpicklerObject *self, char **s, Py_ssize_t n) { size_t nbytesread; @@ -552,7 +552,7 @@ static Py_ssize_t -readline_file(Unpicklerobject *self, char **s) +readline_file(UnpicklerObject *self, char **s) { int i; @@ -591,7 +591,7 @@ } static Py_ssize_t -read_other(Unpicklerobject *self, char **s, Py_ssize_t n) +read_other(UnpicklerObject *self, char **s, Py_ssize_t n) { PyObject *bytes, *str = 0; @@ -616,7 +616,7 @@ static Py_ssize_t -readline_other(Unpicklerobject *self, char **s) +readline_other(UnpicklerObject *self, char **s) { PyObject *str; Py_ssize_t str_size; @@ -2705,7 +2705,7 @@ } static int -marker(Unpicklerobject *self) +marker(UnpicklerObject *self) { if (self->num_marks < 1) { PyErr_SetString(UnpicklingError, "could not find MARK"); @@ -2717,7 +2717,7 @@ static int -load_none(Unpicklerobject *self) +load_none(UnpicklerObject *self) { PDATA_APPEND(self->stack, Py_None, -1); return 0; @@ -2731,7 +2731,7 @@ } static int -load_int(Unpicklerobject *self) +load_int(UnpicklerObject *self) { PyObject *py_int = 0; char *endptr, *s; @@ -2781,7 +2781,7 @@ } static int -load_bool(Unpicklerobject *self, PyObject *boolean) +load_bool(UnpicklerObject *self, PyObject *boolean) { assert(boolean == Py_True || boolean == Py_False); PDATA_APPEND(self->stack, boolean, -1); @@ -2817,7 +2817,7 @@ static int -load_binintx(Unpicklerobject *self, char *s, int x) +load_binintx(UnpicklerObject *self, char *s, int x) { PyObject *py_int = 0; long l; @@ -2833,7 +2833,7 @@ static int -load_binint(Unpicklerobject *self) +load_binint(UnpicklerObject *self) { char *s; @@ -2845,7 +2845,7 @@ static int -load_binint1(Unpicklerobject *self) +load_binint1(UnpicklerObject *self) { char *s; @@ -2857,7 +2857,7 @@ static int -load_binint2(Unpicklerobject *self) +load_binint2(UnpicklerObject *self) { char *s; @@ -2868,7 +2868,7 @@ } static int -load_long(Unpicklerobject *self) +load_long(UnpicklerObject *self) { PyObject *l = 0; char *end, *s; @@ -2898,7 +2898,7 @@ * data following. */ static int -load_counted_long(Unpicklerobject *self, int size) +load_counted_long(UnpicklerObject *self, int size) { Py_ssize_t i; char *nbytes; @@ -2937,7 +2937,7 @@ } static int -load_float(Unpicklerobject *self) +load_float(UnpicklerObject *self) { PyObject *py_float = 0; char *endptr, *s; @@ -2973,7 +2973,7 @@ } static int -load_binfloat(Unpicklerobject *self) +load_binfloat(UnpicklerObject *self) { PyObject *py_float; double x; @@ -2995,7 +2995,7 @@ } static int -load_string(Unpicklerobject *self) +load_string(UnpicklerObject *self) { PyObject *str = 0; int len, res = -1; @@ -3041,7 +3041,7 @@ static int -load_binstring(Unpicklerobject *self) +load_binstring(UnpicklerObject *self) { PyObject *py_string = 0; long l; @@ -3064,7 +3064,7 @@ static int -load_short_binstring(Unpicklerobject *self) +load_short_binstring(UnpicklerObject *self) { PyObject *py_string = 0; unsigned char l; @@ -3088,7 +3088,7 @@ #ifdef Py_USING_UNICODE static int -load_unicode(Unpicklerobject *self) +load_unicode(UnpicklerObject *self) { PyObject *str = 0; int len, res = -1; @@ -3113,7 +3113,7 @@ #ifdef Py_USING_UNICODE static int -load_binunicode(Unpicklerobject *self) +load_binunicode(UnpicklerObject *self) { PyObject *unicode; long l; @@ -3137,7 +3137,7 @@ static int -load_tuple(Unpicklerobject *self) +load_tuple(UnpicklerObject *self) { PyObject *tup; int i; @@ -3151,7 +3151,7 @@ } static int -load_counted_tuple(Unpicklerobject *self, int len) +load_counted_tuple(UnpicklerObject *self, int len) { PyObject *tup = PyTuple_New(len); @@ -3171,7 +3171,7 @@ } static int -load_empty_list(Unpicklerobject *self) +load_empty_list(UnpicklerObject *self) { PyObject *list; @@ -3182,7 +3182,7 @@ } static int -load_empty_dict(Unpicklerobject *self) +load_empty_dict(UnpicklerObject *self) { PyObject *dict; @@ -3194,7 +3194,7 @@ static int -load_list(Unpicklerobject *self) +load_list(UnpicklerObject *self) { PyObject *list = 0; int i; @@ -3208,7 +3208,7 @@ } static int -load_dict(Unpicklerobject *self) +load_dict(UnpicklerObject *self) { PyObject *dict, *key, *value; int i, j, k; @@ -3260,7 +3260,7 @@ static int -load_obj(Unpicklerobject *self) +load_obj(UnpicklerObject *self) { PyObject *class, *tup, *obj = 0; int i; @@ -3284,7 +3284,7 @@ static int -load_inst(Unpicklerobject *self) +load_inst(UnpicklerObject *self) { PyObject *tup, *class = 0, *obj = 0, *module_name, *class_name; int i, len; @@ -3328,7 +3328,7 @@ } static int -load_newobj(Unpicklerobject *self) +load_newobj(UnpicklerObject *self) { PyObject *args = NULL; PyObject *clsraw = NULL; @@ -3378,7 +3378,7 @@ } static int -load_global(Unpicklerobject *self) +load_global(UnpicklerObject *self) { PyObject *class = 0, *module_name = 0, *class_name = 0; int len; @@ -3412,7 +3412,7 @@ static int -load_persid(Unpicklerobject *self) +load_persid(UnpicklerObject *self) { PyObject *pid = 0; int len; @@ -3457,7 +3457,7 @@ } static int -load_binpersid(Unpicklerobject *self) +load_binpersid(UnpicklerObject *self) { PyObject *pid = 0; @@ -3495,7 +3495,7 @@ static int -load_pop(Unpicklerobject *self) +load_pop(UnpicklerObject *self) { int len; @@ -3521,7 +3521,7 @@ static int -load_pop_mark(Unpicklerobject *self) +load_pop_mark(UnpicklerObject *self) { int i; @@ -3535,7 +3535,7 @@ static int -load_dup(Unpicklerobject *self) +load_dup(UnpicklerObject *self) { PyObject *last; int len; @@ -3550,7 +3550,7 @@ static int -load_get(Unpicklerobject *self) +load_get(UnpicklerObject *self) { PyObject *py_str = 0, *value = 0; int len; @@ -3581,7 +3581,7 @@ static int -load_binget(Unpicklerobject *self) +load_binget(UnpicklerObject *self) { PyObject *py_key = 0, *value = 0; unsigned char key; @@ -3611,7 +3611,7 @@ static int -load_long_binget(Unpicklerobject *self) +load_long_binget(UnpicklerObject *self) { PyObject *py_key = 0, *value = 0; unsigned char c; @@ -3652,7 +3652,7 @@ * the number of bytes following the opcode, holding the index (code) value. */ static int -load_extension(Unpicklerobject *self, int nbytes) +load_extension(UnpicklerObject *self, int nbytes) { char *codebytes; /* the nbytes bytes after the opcode */ long code; /* calc_binint returns long */ @@ -3720,7 +3720,7 @@ } static int -load_put(Unpicklerobject *self) +load_put(UnpicklerObject *self) { PyObject *py_str = 0, *value = 0; int len, l; @@ -3742,7 +3742,7 @@ static int -load_binput(Unpicklerobject *self) +load_binput(UnpicklerObject *self) { PyObject *py_key = 0, *value = 0; unsigned char key; @@ -3766,7 +3766,7 @@ static int -load_long_binput(Unpicklerobject *self) +load_long_binput(UnpicklerObject *self) { PyObject *py_key = 0, *value = 0; long key; @@ -3798,7 +3798,7 @@ static int -do_append(Unpicklerobject *self, int x) +do_append(UnpicklerObject *self, int x) { PyObject *value = 0, *list = 0, *append_method = 0; int len, i; @@ -3856,21 +3856,21 @@ static int -load_append(Unpicklerobject *self) +load_append(UnpicklerObject *self) { return do_append(self, self->stack->length - 1); } static int -load_appends(Unpicklerobject *self) +load_appends(UnpicklerObject *self) { return do_append(self, marker(self)); } static int -do_setitems(Unpicklerobject *self, int x) +do_setitems(UnpicklerObject *self, int x) { PyObject *value = 0, *key = 0, *dict = 0; int len, i, r = 0; @@ -3896,20 +3896,20 @@ static int -load_setitem(Unpicklerobject *self) +load_setitem(UnpicklerObject *self) { return do_setitems(self, self->stack->length - 2); } static int -load_setitems(Unpicklerobject *self) +load_setitems(UnpicklerObject *self) { return do_setitems(self, marker(self)); } static int -load_build(Unpicklerobject *self) +load_build(UnpicklerObject *self) { PyObject *state, *inst, *slotstate; PyObject *__setstate__; @@ -4003,7 +4003,7 @@ static int -load_mark(Unpicklerobject *self) +load_mark(UnpicklerObject *self) { int s; @@ -4035,7 +4035,7 @@ } static int -load_reduce(Unpicklerobject *self) +load_reduce(UnpicklerObject *self) { PyObject *callable = 0, *arg_tup = 0, *ob = 0; @@ -4060,7 +4060,7 @@ * is the first opcode for protocols >= 2. */ static int -load_proto(Unpicklerobject *self) +load_proto(UnpicklerObject *self) { int i; char *protobyte; @@ -4082,7 +4082,7 @@ } static PyObject * -load(Unpicklerobject *self) +load(UnpicklerObject *self) { PyObject *err = 0, *val = 0; char *s; @@ -4390,7 +4390,7 @@ find persistent references. */ static int -noload_obj(Unpicklerobject * self) +noload_obj(UnpicklerObject * self) { int i; @@ -4401,7 +4401,7 @@ static int -noload_inst(Unpicklerobject * self) +noload_inst(UnpicklerObject * self) { int i; char *s; @@ -4418,7 +4418,7 @@ } static int -noload_newobj(Unpicklerobject * self) +noload_newobj(UnpicklerObject * self) { PyObject *obj; @@ -4437,7 +4437,7 @@ } static int -noload_global(Unpicklerobject *self) +noload_global(UnpicklerObject *self) { char *s; @@ -4450,7 +4450,7 @@ } static int -noload_reduce(Unpicklerobject *self) +noload_reduce(UnpicklerObject *self) { if (self->stack->length < 2) @@ -4461,7 +4461,7 @@ } static int -noload_build(Unpicklerobject *self) +noload_build(UnpicklerObject *self) { if (self->stack->length < 1) @@ -4471,7 +4471,7 @@ } static int -noload_extension(Unpicklerobject *self, int nbytes) +noload_extension(UnpicklerObject *self, int nbytes) { char *codebytes; @@ -4484,7 +4484,7 @@ static PyObject * -noload(Unpicklerobject *self) +noload(UnpicklerObject *self) { PyObject *err = 0, *val = 0; char *s; @@ -4783,13 +4783,13 @@ static PyObject * -Unpickler_load(Unpicklerobject * self, PyObject * unused) +Unpickler_load(UnpicklerObject * self, PyObject * unused) { return load(self); } static PyObject * -Unpickler_noload(Unpicklerobject *self, PyObject *unused) +Unpickler_noload(UnpicklerObject *self, PyObject *unused) { return noload(self); } @@ -4812,12 +4812,12 @@ }; -static Unpicklerobject * -newUnpicklerobject(PyObject *f) +static UnpicklerObject * +newUnpicklerObject(PyObject *f) { - Unpicklerobject *self; + UnpicklerObject *self; - if (!(self = PyObject_GC_New(Unpicklerobject, &Unpicklertype))) + if (!(self = PyObject_GC_New(UnpicklerObject, &Unpickler_Type))) return NULL; self->file = NULL; @@ -4880,12 +4880,12 @@ static PyObject * get_Unpickler(PyObject *self, PyObject *file) { - return (PyObject *) newUnpicklerobject(file); + return (PyObject *) newUnpicklerObject(file); } static void -Unpickler_dealloc(Unpicklerobject *self) +Unpickler_dealloc(UnpicklerObject *self) { PyObject_GC_UnTrack((PyObject *) self); Py_XDECREF(self->readline); @@ -4910,7 +4910,7 @@ } static int -Unpickler_traverse(Unpicklerobject *self, visitproc visit, void *arg) +Unpickler_traverse(UnpicklerObject *self, visitproc visit, void *arg) { Py_VISIT(self->readline); Py_VISIT(self->read); @@ -4925,7 +4925,7 @@ } static int -Unpickler_clear(Unpicklerobject *self) +Unpickler_clear(UnpicklerObject *self) { Py_CLEAR(self->readline); Py_CLEAR(self->read); @@ -4940,7 +4940,7 @@ } static PyObject * -Unpickler_getattr(Unpicklerobject *self, char *name) +Unpickler_getattr(UnpicklerObject *self, char *name) { if (!strcmp(name, "persistent_load")) { if (!self->pers_func) { @@ -4982,7 +4982,7 @@ static int -Unpickler_setattr(Unpicklerobject *self, char *name, PyObject *value) +Unpickler_setattr(UnpicklerObject *self, char *name, PyObject *value) { if (!strcmp(name, "persistent_load")) { @@ -5020,13 +5020,13 @@ return -1; } -PyDoc_STRVAR(Unpicklertype__doc__, "Objects that know how to unpickle"); +PyDoc_STRVAR(Unpickler_Type__doc__, "Objects that know how to unpickle"); -static PyTypeObject Unpicklertype = { +static PyTypeObject Unpickler_Type = { PyObject_HEAD_INIT(NULL) 0, /*ob_size */ "_pickle.Unpickler", /*tp_name */ - sizeof(Unpicklerobject), /*tp_basicsize */ + sizeof(UnpicklerObject), /*tp_basicsize */ 0, (destructor) Unpickler_dealloc, /* tp_dealloc */ 0, /* tp_print */ @@ -5044,7 +5044,7 @@ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Unpicklertype__doc__, /* tp_doc */ + Unpickler_Type__doc__, /* tp_doc */ (traverseproc) Unpickler_traverse, /* tp_traverse */ (inquiry) Unpickler_clear, /* tp_clear */ }; @@ -5054,7 +5054,7 @@ { PyObject *copy_reg, *t, *r; - if (PyType_Ready(&Unpicklertype) < 0) + if (PyType_Ready(&Unpickler_Type) < 0) return -1; if (PyType_Ready(&Pickler_Type) < 0) return -1; @@ -5190,7 +5190,7 @@ PyObject *compatible_formats; Pickler_Type.ob_type = &PyType_Type; - Unpicklertype.ob_type = &PyType_Type; + Unpickler_Type.ob_type = &PyType_Type; PdataType.ob_type = &PyType_Type; /* Initialize some pieces. We need to do this before module creation, From python-checkins at python.org Mon Jul 9 23:10:34 2007 From: python-checkins at python.org (brett.cannon) Date: Mon, 9 Jul 2007 23:10:34 +0200 (CEST) Subject: [Python-checkins] r56223 - sandbox/trunk/import_in_py/_importlib.py Message-ID: <20070709211034.A5C6C1E4003@bag.python.org> Author: brett.cannon Date: Mon Jul 9 23:10:34 2007 New Revision: 56223 Modified: sandbox/trunk/import_in_py/_importlib.py Log: Remove os.path.exists dependency. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Mon Jul 9 23:10:34 2007 @@ -60,13 +60,22 @@ from __future__ import with_statement # Built-in modules required by this module to work. -_required_builtins = ['imp', 'sys', 'marshal'] +_required_builtins = ['imp', 'sys', 'marshal', 'posix'] def _path_join(*args): """Replacement for os.path.join so as to remove dependency on os module.""" return path_sep.join(args) +def _path_exists(path): + """Replacement for os.path.exists to help remove dependency on 'os'.""" + try: + posix.stat(path) + except OSError: + return False + else: + return True + class _BuiltinFrozenBaseImporter(object): @@ -287,7 +296,7 @@ """ path = base_path + type_ if must_exist: - path = path if os.path.exists(path) else None + path = path if _path_exists(path) else None return path def read_data(self, path, binary=False): From python-checkins at python.org Mon Jul 9 23:16:00 2007 From: python-checkins at python.org (brett.cannon) Date: Mon, 9 Jul 2007 23:16:00 +0200 (CEST) Subject: [Python-checkins] r56224 - sandbox/trunk/import_in_py/_importlib.py Message-ID: <20070709211600.000EE1E4002@bag.python.org> Author: brett.cannon Date: Mon Jul 9 23:16:00 2007 New Revision: 56224 Modified: sandbox/trunk/import_in_py/_importlib.py Log: Remove os.stat dependency. Assumes posix is the name of the built-in module. This is not the case on Windows or OS/2. Also don't know how it works out with other platforms such as RISC. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Mon Jul 9 23:16:00 2007 @@ -60,6 +60,8 @@ from __future__ import with_statement # Built-in modules required by this module to work. +# XXX posix can be named os2 or nt. +# XXX posix guaranteed on all platforms (especially posix.stat)? _required_builtins = ['imp', 'sys', 'marshal', 'posix'] @@ -273,7 +275,7 @@ PyPycHandler. """ - return int(os.stat(path).st_mtime) + return int(posix.stat(path).st_mtime) def split_path(self, path): """Split the specified path into a base path and the type of the From python-checkins at python.org Tue Jul 10 09:33:58 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 10 Jul 2007 09:33:58 +0200 (CEST) Subject: [Python-checkins] r56227 - sandbox/trunk/2to3/HACKING Message-ID: <20070710073358.0977C1E4003@bag.python.org> Author: collin.winter Date: Tue Jul 10 09:33:57 2007 New Revision: 56227 Modified: sandbox/trunk/2to3/HACKING Log: Add the examples/ directory to HACKING, minor style changes. Modified: sandbox/trunk/2to3/HACKING ============================================================================== --- sandbox/trunk/2to3/HACKING (original) +++ sandbox/trunk/2to3/HACKING Tue Jul 10 09:33:57 2007 @@ -13,7 +13,6 @@ just see what the parser does. - Putting 2to3 to work somewhere else: * By default, 2to3 uses a merger of Python 2.x and Python 3's grammars. @@ -28,6 +27,8 @@ could leverage 90% of the existing infrastructure with primarily cosmetic changes (e.g., fixes/fix_*.py -> styles/style_*.py). + * The examples/ directory contains fixers that show off 2to3's flexibility, + such as a fixer for whitespace. TODO @@ -35,7 +36,7 @@ Simple: ####### - * Refactor common code out of fixes/fix_*.py into fixes.util (ongoing). + * Refactor common code out of fixes/fix_*.py into fixes.util (on-going). Complex: From python-checkins at python.org Tue Jul 10 09:51:46 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 10 Jul 2007 09:51:46 +0200 (CEST) Subject: [Python-checkins] r56228 - sandbox/trunk/2to3/tests/test_all_fixers.py Message-ID: <20070710075146.25A9C1E4017@bag.python.org> Author: collin.winter Date: Tue Jul 10 09:51:45 2007 New Revision: 56228 Modified: sandbox/trunk/2to3/tests/test_all_fixers.py Log: Tweak test_all_fixers's status report messages. Modified: sandbox/trunk/2to3/tests/test_all_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_all_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_all_fixers.py Tue Jul 10 09:51:45 2007 @@ -51,9 +51,8 @@ fixerdir = os.path.join(basedir, "fixes") for filename in os.listdir(fixerdir): if not filename.endswith(".py"): - print "Skipping %s" % filename continue - print "Testing fixer %s..." % filename + print "Fixing %s..." % filename fixer = os.path.join(fixerdir, filename) self.refactor_stream(fixer, open(fixer)) From python-checkins at python.org Tue Jul 10 09:52:18 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 10 Jul 2007 09:52:18 +0200 (CEST) Subject: [Python-checkins] r56229 - sandbox/trunk/2to3/tests/test_parser.py Message-ID: <20070710075218.A89F11E400D@bag.python.org> Author: collin.winter Date: Tue Jul 10 09:52:18 2007 New Revision: 56229 Added: sandbox/trunk/2to3/tests/test_parser.py Log: Add a cut-down version of pytree_idempotency.py as test_parser.py. Added: sandbox/trunk/2to3/tests/test_parser.py ============================================================================== --- (empty file) +++ sandbox/trunk/2to3/tests/test_parser.py Tue Jul 10 09:52:18 2007 @@ -0,0 +1,48 @@ +#!/usr/bin/env python2.5 +"""Test suite for 2to3's parser.""" +# Author: Collin Winter + +# Testing imports +import support +from support import driver, test_dir + +# Python imports +import os +import os.path + +# Local imports +from pgen2.parse import ParseError + + +class TestParserIdempotency(support.TestCase): + + """A cut-down version of pytree_idempotency.py.""" + + def test_2to3_files(self): + proj_dir = os.path.join(test_dir, "..") + + for dirpath, dirnames, filenames in os.walk(proj_dir): + for filename in filenames: + if filename.endswith(".py"): + filepath = os.path.join(dirpath, filename) + print "Parsing %s..." % os.path.normpath(filepath) + tree = driver.parse_file(filepath, debug=True) + if diff(filepath, tree): + self.fail("Idempotency failed: %s" % filename) + + +def diff(fn, tree): + f = open("@", "w") + try: + f.write(str(tree)) + finally: + f.close() + try: + return os.system("diff -u %s @" % fn) + finally: + os.remove("@") + + +if __name__ == "__main__": + import __main__ + support.run_all_tests(__main__) From python-checkins at python.org Tue Jul 10 16:13:31 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Tue, 10 Jul 2007 16:13:31 +0200 (CEST) Subject: [Python-checkins] r56240 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070710141331.DA7DD1E4003@bag.python.org> Author: alexandre.vassalotti Date: Tue Jul 10 16:13:31 2007 New Revision: 56240 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Add basic subclassing support to Unpickler. Fix docstring of Pickler and Unpickler. Fix a reference leak in Pickler. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Tue Jul 10 16:13:31 2007 @@ -2438,11 +2438,12 @@ self->dispatch_table = dispatch_table; Py_INCREF(file); + self->file = file; if (PyFile_Check(file)) { self->fp = PyFile_AsFile(file); if (self->fp == NULL) { PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); - goto io_error; + goto error; } self->write_func = write_file; } @@ -2452,13 +2453,13 @@ PyErr_Clear(); PyErr_SetString(PyExc_TypeError, "argument must have 'write' " "attribute"); - goto io_error; + goto error; } self->write_buf = (char *) PyMem_Malloc(WRITE_BUF_SIZE); if (self->write_buf == NULL) { PyErr_NoMemory(); - goto io_error; + goto error; } self->write_func = write_other; } @@ -2466,8 +2467,6 @@ PyObject_GC_Track(self); return (PyObject *) self; - io_error: - Py_DECREF(file); error: Py_DECREF(self); return NULL; @@ -2477,6 +2476,7 @@ Pickler_dealloc(PicklerObject *self) { PyObject_GC_UnTrack(self); + Py_XDECREF(self->file); Py_XDECREF(self->write); Py_XDECREF(self->memo); Py_XDECREF(self->fast_memo); @@ -2604,11 +2604,12 @@ }; PyDoc_STRVAR(Pickler_doc, -"Pickler(file, protocol=0) -- Create a pickler.\n" +"Pickler(file, protocol=0) -> new pickler object" "\n" "This takes a file-like object for writing a pickle data stream.\n" -"The optional proto argument tells the pickler to use the given\n" -"protocol; supported protocols are 0, 1, 2. The default\n" +"\n" +"The optional protocol argument tells the pickler to use the\n" +"given protocol; supported protocols are 0, 1, 2. The default\n" "protocol is 0, to be backwards compatible. (Protocol 0 is the\n" "only protocol that can be written to a file opened in text\n" "mode and read back successfully. When using a protocol higher\n" @@ -4812,17 +4813,23 @@ }; -static UnpicklerObject * -newUnpicklerObject(PyObject *f) +static PyObject * +Unpickler_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { + static char *kwlist[] = { "file", NULL }; UnpicklerObject *self; + PyObject *file; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O:Unpickler", + kwlist, &file)) + return NULL; - if (!(self = PyObject_GC_New(UnpicklerObject, &Unpickler_Type))) + self = PyObject_GC_New(UnpicklerObject, &Unpickler_Type); + if (self == NULL) return NULL; - self->file = NULL; + self->fp = NULL; self->arg = NULL; - self->stack = (Pdata *) Pdata_New(); self->pers_func = NULL; self->last_string = NULL; self->marks = NULL; @@ -4833,18 +4840,19 @@ self->readline = NULL; self->find_class = NULL; - if (!(self->memo = PyDict_New())) + self->memo = PyDict_New(); + if (self->memo == NULL) goto error; - if (!self->stack) + self->stack = (Pdata *) Pdata_New(); + if (self->stack == NULL) goto error; - Py_INCREF(f); - self->file = f; - - /* Set read, readline based on type of f */ - if (PyFile_Check(f)) { - self->fp = PyFile_AsFile(f); + Py_INCREF(file); + self->file = file; + /* Set read, readline based on type of file */ + if (PyFile_Check(file)) { + self->fp = PyFile_AsFile(file); if (self->fp == NULL) { PyErr_SetString(PyExc_ValueError, "I/O operation on closed file"); goto error; @@ -4853,37 +4861,27 @@ self->readline_func = readline_file; } else { - - self->fp = NULL; - self->read_func = read_other; - self->readline_func = readline_other; - - if (!((self->readline = PyObject_GetAttr(f, readline_str)) && - (self->read = PyObject_GetAttr(f, read_str)))) { + self->readline = PyObject_GetAttr(file, readline_str); + self->read = PyObject_GetAttr(file, read_str); + if (self->readline == NULL || self->read == NULL) { PyErr_Clear(); PyErr_SetString(PyExc_TypeError, "argument must have 'read' and " "'readline' attributes"); goto error; } + self->read_func = read_other; + self->readline_func = readline_other; } - PyObject_GC_Track(self); - return self; + PyObject_GC_Track(self); + return (PyObject *)self; error: - Py_DECREF((PyObject *) self); + Py_DECREF(self); return NULL; } - -static PyObject * -get_Unpickler(PyObject *self, PyObject *file) -{ - return (PyObject *) newUnpicklerObject(file); -} - - static void Unpickler_dealloc(UnpicklerObject *self) { @@ -5020,33 +5018,62 @@ return -1; } -PyDoc_STRVAR(Unpickler_Type__doc__, "Objects that know how to unpickle"); +PyDoc_STRVAR(Unpickler_doc, +"Unpickler(file) -> new unpickler object" +"\n" +"This takes a file-like object for reading a pickle data stream.\n" +"\n" +"The protocol version of the pickle is detected automatically, so no\n" +"proto argument is needed.\n" +"\n" +"The file-like object must have two methods, a read() method that\n" +"takes an integer argument, and a readline() method that requires no\n" +"arguments. Both methods should return a string. Thus file-like\n" +"object can be a file object opened for reading, a StringIO object,\n" +"or any other custom object that meets this interface.\n"); static PyTypeObject Unpickler_Type = { PyObject_HEAD_INIT(NULL) - 0, /*ob_size */ - "_pickle.Unpickler", /*tp_name */ - sizeof(UnpicklerObject), /*tp_basicsize */ - 0, - (destructor) Unpickler_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - (getattrfunc) Unpickler_getattr, /* tp_getattr */ - (setattrfunc) Unpickler_setattr, /* tp_setattr */ - 0, /* tp_compare */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ + 0, /*ob_size*/ + "_pickle.Unpickler", /*tp_name*/ + sizeof(UnpicklerObject), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + (destructor)Unpickler_dealloc, /*tp_dealloc*/ + 0, /*tp_print*/ + (getattrfunc)Unpickler_getattr, /*tp_getattr*/ + (setattrfunc)Unpickler_setattr, /*tp_setattr*/ + 0, /*tp_compare*/ + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, - Unpickler_Type__doc__, /* tp_doc */ - (traverseproc) Unpickler_traverse, /* tp_traverse */ - (inquiry) Unpickler_clear, /* tp_clear */ + Unpickler_doc, /*tp_doc*/ + (traverseproc)Unpickler_traverse, /*tp_traverse*/ + (inquiry)Unpickler_clear, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + 0, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + 0, /*tp_init*/ + 0, /*tp_alloc*/ + Unpickler_new, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ }; static int @@ -5208,6 +5235,7 @@ return; PyModule_AddObject(m, "Pickler", (PyObject *)&Pickler_Type); + PyModule_AddObject(m, "Unpickler", (PyObject *)&Unpickler_Type); /* Add some symbolic constants to the module */ d = PyModule_GetDict(m); From python-checkins at python.org Tue Jul 10 17:32:19 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 10 Jul 2007 17:32:19 +0200 (CEST) Subject: [Python-checkins] r56241 - sandbox/trunk/2to3/pytree.py Message-ID: <20070710153219.5B30E1E4003@bag.python.org> Author: collin.winter Date: Tue Jul 10 17:32:19 2007 New Revision: 56241 Modified: sandbox/trunk/2to3/pytree.py Log: Whitespace cleanup. Modified: sandbox/trunk/2to3/pytree.py ============================================================================== --- sandbox/trunk/2to3/pytree.py (original) +++ sandbox/trunk/2to3/pytree.py Tue Jul 10 17:32:19 2007 @@ -126,12 +126,12 @@ return node = node.children[0] return node.lineno - + def changed(self): if self.parent: self.parent.changed() self.was_changed = True - + def remove(self): """Remove the node from the tree. Returns the position of the node in its parent's children before it was removed.""" @@ -149,7 +149,7 @@ sibling, return None.""" if self.parent is None: return None - + # Can't use index(); we need to test by identity for i, sibling in enumerate(self.parent.children): if sibling is self: @@ -157,7 +157,7 @@ return self.parent.children[i+1] except IndexError: return None - + def get_suffix(self): """Return the string immediately following the invocant node. This is effectively equivalent to node.get_next_sibling().get_prefix()""" @@ -232,20 +232,20 @@ if not self.children: return "" return self.children[0].get_prefix() - + def set_child(self, i, child): """Equivalent to 'node.children[i] = child'. This method also sets the child's parent attribute appropriately.""" child.parent = self self.children[i].parent = None self.children[i] = child - + def insert_child(self, i, child): """Equivalent to 'node.children.insert(i, child)'. This method also sets the child's parent attribute appropriately.""" child.parent = self self.children.insert(i, child) - + def append_child(self, child): """Equivalent to 'node.children.append(child)'. This method also sets the child's parent attribute appropriately.""" From python-checkins at python.org Tue Jul 10 17:34:18 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 10 Jul 2007 17:34:18 +0200 (CEST) Subject: [Python-checkins] r56242 - in sandbox/trunk/2to3: pytree.py tests/test_pytree.py Message-ID: <20070710153418.93AAC1E4003@bag.python.org> Author: collin.winter Date: Tue Jul 10 17:34:18 2007 New Revision: 56242 Modified: sandbox/trunk/2to3/pytree.py sandbox/trunk/2to3/tests/test_pytree.py Log: Fix a bug where set_prefix() didn't register that the node had changed. Modified: sandbox/trunk/2to3/pytree.py ============================================================================== --- sandbox/trunk/2to3/pytree.py (original) +++ sandbox/trunk/2to3/pytree.py Tue Jul 10 17:34:18 2007 @@ -304,6 +304,7 @@ def set_prefix(self, prefix): """Sets the prefix for the node.""" + self.changed() self.prefix = prefix def get_prefix(self): Modified: sandbox/trunk/2to3/tests/test_pytree.py ============================================================================== --- sandbox/trunk/2to3/tests/test_pytree.py (original) +++ sandbox/trunk/2to3/tests/test_pytree.py Tue Jul 10 17:34:18 2007 @@ -67,8 +67,10 @@ def testLeafPrefix(self): l1 = pytree.Leaf(100, "foo") self.assertEqual(l1.get_prefix(), "") + self.failIf(l1.was_changed) l1.set_prefix(" ##\n\n") self.assertEqual(l1.get_prefix(), " ##\n\n") + self.failUnless(l1.was_changed) def testNode(self): l1 = pytree.Leaf(100, "foo") From python-checkins at python.org Wed Jul 11 01:48:48 2007 From: python-checkins at python.org (brett.cannon) Date: Wed, 11 Jul 2007 01:48:48 +0200 (CEST) Subject: [Python-checkins] r56246 - sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Message-ID: <20070710234848.88A5A1E4007@bag.python.org> Author: brett.cannon Date: Wed Jul 11 01:48:48 2007 New Revision: 56246 Modified: sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Log: Rework importing of built-in modules. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Wed Jul 11 01:48:48 2007 @@ -58,12 +58,7 @@ """ from __future__ import with_statement - -# Built-in modules required by this module to work. -# XXX posix can be named os2 or nt. -# XXX posix guaranteed on all platforms (especially posix.stat)? -_required_builtins = ['imp', 'sys', 'marshal', 'posix'] - +# The injected modules are 'imp', 'sys', 'marshal', 'posix' (aka 'nt' & 'os2'). def _path_join(*args): """Replacement for os.path.join so as to remove dependency on os module.""" Modified: sandbox/trunk/import_in_py/importlib.py ============================================================================== --- sandbox/trunk/import_in_py/importlib.py (original) +++ sandbox/trunk/import_in_py/importlib.py Wed Jul 11 01:48:48 2007 @@ -96,10 +96,22 @@ return False -# Import needed built-in modules. -for builtin_name in _importlib._required_builtins: - module = __import__(builtin_name) - _importlib.__dict__[builtin_name] = module +# Required built-in modules. +import imp, sys, marshal +_importlib.imp = imp +_importlib.sys = sys +_importlib.marshal = marshal +try: + import posix +except ImportError: + try: + import nt as posix + except ImportError: + try: + import os2 as posix + except ImportError: + raise ImportError('posix, nt, or os2 required for importlib') +_importlib.posix = posix # XXX These all need to either go away or become built-in modules # (Neal). From python-checkins at python.org Wed Jul 11 01:59:30 2007 From: python-checkins at python.org (brett.cannon) Date: Wed, 11 Jul 2007 01:59:30 +0200 (CEST) Subject: [Python-checkins] r56247 - sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Message-ID: <20070710235930.9794D1E4011@bag.python.org> Author: brett.cannon Date: Wed Jul 11 01:59:30 2007 New Revision: 56247 Modified: sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Log: Remove os.path.(isfile | isdir) dependency. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Wed Jul 11 01:59:30 2007 @@ -74,6 +74,23 @@ return True +def _path_is_mode_type(path, mode): + """Test whether the path is the specified mode type.""" + try: + stat_info = posix.stat(path) + except OSError: + return False + return (stat_info.st_mode & 0170000) == mode + +def _path_isfile(path): + """Replacement for os.path.isfile.""" + return _path_is_mode_type(path, 0100000) + +def _path_isdir(path): + """Replacement for os.path.isdir.""" + return _path_is_mode_type(path, 0040000) + + class _BuiltinFrozenBaseImporter(object): """Base class for meta_path importers for built-in and frozen modules. @@ -164,7 +181,7 @@ """ absolute_path = os.path.abspath(path_entry) - if os.path.isdir(absolute_path): + if _path_isdir(absolute_path): return FileSystemImporter(absolute_path, *self.handlers) else: raise ImportError("can only handle directories") @@ -207,19 +224,19 @@ init_filename = '__init__' + file_ext package_init = _path_join(package_directory, init_filename) # Check if it is a package with an __init__ file. - if (os.path.isfile(package_init) and + if (_path_isfile(package_init) and _case_ok(self.path_entry, tail_module) and _case_ok(package_directory, init_filename)): return self.loader(package_init, handler, package_directory) # See if it is a module. file_name = tail_module + file_ext file_path = _path_join(self.path_entry, file_name) - if (os.path.isfile(file_path) and + if (_path_isfile(file_path) and _case_ok(self.path_entry, file_name)): return self.loader(file_path, handler) else: # Raise a warning if it matches a directory w/o an __init__ file. - if (os.path.isdir(package_directory) and + if (_path_isdir(package_directory) and _case_ok(self.path_entry, tail_module)): warnings.warn("Not importing directory %s: missing __init__.py" % package_directory, Modified: sandbox/trunk/import_in_py/importlib.py ============================================================================== --- sandbox/trunk/import_in_py/importlib.py (original) +++ sandbox/trunk/import_in_py/importlib.py Wed Jul 11 01:59:30 2007 @@ -96,6 +96,7 @@ return False + # Required built-in modules. import imp, sys, marshal _importlib.imp = imp From python-checkins at python.org Wed Jul 11 02:03:48 2007 From: python-checkins at python.org (brett.cannon) Date: Wed, 11 Jul 2007 02:03:48 +0200 (CEST) Subject: [Python-checkins] r56248 - sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Message-ID: <20070711000348.2A9271E4007@bag.python.org> Author: brett.cannon Date: Wed Jul 11 02:03:47 2007 New Revision: 56248 Modified: sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Log: Isolate attribute required from errno. Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Wed Jul 11 02:03:47 2007 @@ -336,7 +336,7 @@ with open(path, 'wb' if binary else 'w') as data_file: data_file.write(data) except IOError as exc: - if exc.errno == errno.EACCES: + if exc.errno == EACCES: pass else: raise Modified: sandbox/trunk/import_in_py/importlib.py ============================================================================== --- sandbox/trunk/import_in_py/importlib.py (original) +++ sandbox/trunk/import_in_py/importlib.py Wed Jul 11 02:03:47 2007 @@ -116,7 +116,7 @@ # XXX These all need to either go away or become built-in modules # (Neal). -import errno +from errno import EACCES import os import warnings @@ -124,8 +124,9 @@ _importlib._w_long = _w_long #XXX Expose original from marshal. _importlib._case_ok = _case_ok #XXX Expose original from imp. _importlib.path_sep = os.sep # For os.path.join replacement. +# For allowing silent failure of .pyc creation when permission is denied. +_importlib.EACCES = EACCES -_importlib.errno = errno _importlib.os = os _importlib.warnings = warnings From python-checkins at python.org Wed Jul 11 02:27:42 2007 From: python-checkins at python.org (brett.cannon) Date: Wed, 11 Jul 2007 02:27:42 +0200 (CEST) Subject: [Python-checkins] r56249 - sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Message-ID: <20070711002742.D22821E4007@bag.python.org> Author: brett.cannon Date: Wed Jul 11 02:27:42 2007 New Revision: 56249 Modified: sandbox/trunk/import_in_py/_importlib.py sandbox/trunk/import_in_py/importlib.py Log: Remove os.path.abspath dependency. That should remove os module dependency entirely from _importlib (still have one in _case_ok, but that will be exposed through imp). Modified: sandbox/trunk/import_in_py/_importlib.py ============================================================================== --- sandbox/trunk/import_in_py/_importlib.py (original) +++ sandbox/trunk/import_in_py/_importlib.py Wed Jul 11 02:27:42 2007 @@ -60,6 +60,7 @@ from __future__ import with_statement # The injected modules are 'imp', 'sys', 'marshal', 'posix' (aka 'nt' & 'os2'). +# XXX Could also expose Modules/getpath.c:joinpath() def _path_join(*args): """Replacement for os.path.join so as to remove dependency on os module.""" return path_sep.join(args) @@ -82,14 +83,26 @@ return False return (stat_info.st_mode & 0170000) == mode +# XXX Could also expose Modules/getpath.c:isfile() def _path_isfile(path): """Replacement for os.path.isfile.""" return _path_is_mode_type(path, 0100000) +# XXX Could also expose Modules/getpath.c:isdir() def _path_isdir(path): """Replacement for os.path.isdir.""" return _path_is_mode_type(path, 0040000) +def _path_absolute(path): + """Replacement for os.path.abspath.""" + try: + return posix._getfullpathname(path) + except AttributeError: + if path.startswith('/'): + return path + else: + return _path_join(posix.getcwd(), path) + class _BuiltinFrozenBaseImporter(object): @@ -180,7 +193,7 @@ interactive interpreter usage). """ - absolute_path = os.path.abspath(path_entry) + absolute_path = _path_absolute(path_entry) if _path_isdir(absolute_path): return FileSystemImporter(absolute_path, *self.handlers) else: Modified: sandbox/trunk/import_in_py/importlib.py ============================================================================== --- sandbox/trunk/import_in_py/importlib.py (original) +++ sandbox/trunk/import_in_py/importlib.py Wed Jul 11 02:27:42 2007 @@ -30,7 +30,9 @@ """ import _importlib -#XXX Temporary functions that should eventually be removed. +# XXX Temporary functions that should eventually be removed. +import os + def _set__import__(): """Set __import__ to an instance of Import.""" global original__import__ @@ -117,17 +119,16 @@ # XXX These all need to either go away or become built-in modules # (Neal). from errno import EACCES -import os +from os import sep import warnings _importlib._r_long = _r_long #XXX Expose original from marshal. _importlib._w_long = _w_long #XXX Expose original from marshal. _importlib._case_ok = _case_ok #XXX Expose original from imp. -_importlib.path_sep = os.sep # For os.path.join replacement. +_importlib.path_sep = sep # For os.path.join replacement. # For allowing silent failure of .pyc creation when permission is denied. _importlib.EACCES = EACCES -_importlib.os = os _importlib.warnings = warnings del _importlib From python-checkins at python.org Wed Jul 11 14:35:47 2007 From: python-checkins at python.org (thomas.wouters) Date: Wed, 11 Jul 2007 14:35:47 +0200 (CEST) Subject: [Python-checkins] r56262 - peps/trunk/pep-3100.txt Message-ID: <20070711123547.74AE41E4005@bag.python.org> Author: thomas.wouters Date: Wed Jul 11 14:35:47 2007 New Revision: 56262 Modified: peps/trunk/pep-3100.txt Log: Update for dict comprehensions. Modified: peps/trunk/pep-3100.txt ============================================================================== --- peps/trunk/pep-3100.txt (original) +++ peps/trunk/pep-3100.txt Wed Jul 11 14:35:47 2007 @@ -112,6 +112,8 @@ * The ``__nonzero__`` special method will be renamed to ``__bool__`` and have to return a bool. The typeobject slot will be called ``tp_bool`` [23]_ [done] +* Dict comprehensions, as first proposed in [#pep274]_ [done] + {K(x): V(x) for x in S if P(x)} means dict((K(x), V(x)) for x in S if P(x)). To be removed: @@ -371,6 +373,9 @@ .. [#pep238] PEP 238 (Changing the Division Operator) http://www.python.org/dev/peps/pep-0238 +.. [#pep274] PEP 274 (Dict Comprehensions) + http://www.python.org/dev/peps/pep-0274 + .. [#pep289] PEP 289 ("Generator Expressions") http://www.python.org/dev/peps/pep-0289 From python-checkins at python.org Wed Jul 11 15:00:58 2007 From: python-checkins at python.org (collin.winter) Date: Wed, 11 Jul 2007 15:00:58 +0200 (CEST) Subject: [Python-checkins] r56265 - sandbox/trunk/2to3/fixes/fix_next.py Message-ID: <20070711130058.C4F7C1E4017@bag.python.org> Author: collin.winter Date: Wed Jul 11 15:00:58 2007 New Revision: 56265 Modified: sandbox/trunk/2to3/fixes/fix_next.py Log: Whitespace cleanup in fix_next. Modified: sandbox/trunk/2to3/fixes/fix_next.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_next.py (original) +++ sandbox/trunk/2to3/fixes/fix_next.py Wed Jul 11 15:00:58 2007 @@ -107,7 +107,7 @@ if n: self.warning(n, bind_warning) self.shadowed_next = True - + def finish_tree(self, tree, filename): super(FixNext, self).finish_tree(tree, filename) if self.shadowed_next: @@ -119,17 +119,17 @@ ### target. def is_assign_target(node): - assign = find_assign(node) + assign = find_assign(node) if assign is None: return False - + for child in assign.children: if child.type == token.EQUAL: return False elif is_subtree(child, node): return True return False - + def find_assign(node): if node.type == syms.expr_stmt: return node From python-checkins at python.org Wed Jul 11 17:11:45 2007 From: python-checkins at python.org (collin.winter) Date: Wed, 11 Jul 2007 17:11:45 +0200 (CEST) Subject: [Python-checkins] r56275 - sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/util.py Message-ID: <20070711151145.981FB1E4005@bag.python.org> Author: collin.winter Date: Wed Jul 11 17:11:45 2007 New Revision: 56275 Modified: sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/util.py Log: Add a Dot() node macro. Modified: sandbox/trunk/2to3/fixes/fix_dict.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_dict.py (original) +++ sandbox/trunk/2to3/fixes/fix_dict.py Wed Jul 11 17:11:45 2007 @@ -27,7 +27,7 @@ import patcomp from pgen2 import token from fixes import basefix -from fixes.util import Name, Call, LParen, RParen, ArgList +from fixes.util import Name, Call, LParen, RParen, ArgList, Dot class FixDict(basefix.BaseFix): PATTERN = """ @@ -54,7 +54,7 @@ tail = [n.clone() for n in tail] special = not tail and self.in_special_context(node, isiter) args = head + [pytree.Node(syms.trailer, - [pytree.Leaf(token.DOT, '.'), + [Dot(), Name(method_name, prefix=method.get_prefix())]), results["parens"].clone()] Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Wed Jul 11 17:11:45 2007 @@ -41,13 +41,16 @@ def Attr(obj, attr): """A node tuple for obj.attr""" - return [obj, - Node(syms.trailer, [Leaf(token.DOT, '.'), attr])] + return [obj, Node(syms.trailer, [Dot(), attr])] def Comma(): """A comma leaf""" return Leaf(token.COMMA, ",") +def Dot(): + """A period (.) leaf""" + return Leaf(token.DOT, ".") + def ArgList(args, lparen=LParen(), rparen=RParen()): """A parenthesised argument list, used by Call()""" return Node(syms.trailer, From python-checkins at python.org Wed Jul 11 19:34:03 2007 From: python-checkins at python.org (phillip.eby) Date: Wed, 11 Jul 2007 19:34:03 +0200 (CEST) Subject: [Python-checkins] r56276 - in sandbox/trunk/setuptools/setuptools: __init__.py command/egg_info.py Message-ID: <20070711173403.132BA1E400A@bag.python.org> Author: phillip.eby Date: Wed Jul 11 19:34:02 2007 New Revision: 56276 Modified: sandbox/trunk/setuptools/setuptools/__init__.py sandbox/trunk/setuptools/setuptools/command/egg_info.py Log: Fix distutils.filelist.findall() crashing on broken symlinks. Fix egg_info failures on new, uncommitted SVN directories. Modified: sandbox/trunk/setuptools/setuptools/__init__.py ============================================================================== --- sandbox/trunk/setuptools/setuptools/__init__.py (original) +++ sandbox/trunk/setuptools/setuptools/__init__.py Wed Jul 11 19:34:02 2007 @@ -62,3 +62,21 @@ import distutils.core distutils.core.Command = Command # we can't patch distutils.cmd, alas + +def findall(dir = os.curdir): + """Find all files under 'dir' and return the list of full filenames + (relative to 'dir'). + """ + all_files = [] + for base, dirs, files in os.walk(dir): + if base!=os.curdir: + files = [os.path.join(base, f) for f in files] + all_files.extend(filter(os.path.isfile, files)) + return all_files + +import distutils.filelist +distutils.filelist.findall = findall # fix findall bug in distutils. + + + + Modified: sandbox/trunk/setuptools/setuptools/command/egg_info.py ============================================================================== --- sandbox/trunk/setuptools/setuptools/command/egg_info.py (original) +++ sandbox/trunk/setuptools/setuptools/command/egg_info.py Wed Jul 11 19:34:02 2007 @@ -221,10 +221,10 @@ data = map(str.splitlines,data.split('\n\x0c\n')) del data[0][0] # get rid of the '8' dirurl = data[0][3] - localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]) + localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0]) elif data.startswith(' Author: phillip.eby Date: Wed Jul 11 19:37:17 2007 New Revision: 56277 Modified: sandbox/branches/setuptools-0.6/setuptools.egg-info/entry_points.txt sandbox/branches/setuptools-0.6/setuptools/__init__.py sandbox/branches/setuptools-0.6/setuptools/command/egg_info.py Log: Fix distutils.filelist.findall() crashing on broken symlinks. Fix egg_info failures on new, uncommitted SVN directories. Modified: sandbox/branches/setuptools-0.6/setuptools.egg-info/entry_points.txt ============================================================================== --- sandbox/branches/setuptools-0.6/setuptools.egg-info/entry_points.txt (original) +++ sandbox/branches/setuptools-0.6/setuptools.egg-info/entry_points.txt Wed Jul 11 19:37:17 2007 @@ -1,38 +1,3 @@ -[distutils.setup_keywords] -dependency_links = setuptools.dist:assert_string_list -entry_points = setuptools.dist:check_entry_points -extras_require = setuptools.dist:check_extras -package_data = setuptools.dist:check_package_data -install_requires = setuptools.dist:check_requirements -include_package_data = setuptools.dist:assert_bool -exclude_package_data = setuptools.dist:check_package_data -namespace_packages = setuptools.dist:check_nsp -test_suite = setuptools.dist:check_test_suite -eager_resources = setuptools.dist:assert_string_list -zip_safe = setuptools.dist:assert_bool -test_loader = setuptools.dist:check_importable -tests_require = setuptools.dist:check_requirements - -[setuptools.file_finders] -svn_cvs = setuptools.command.sdist:_default_revctrl - -[egg_info.writers] -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -PKG-INFO = setuptools.command.egg_info:write_pkg_info -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -top_level.txt = setuptools.command.egg_info:write_toplevel_names -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -entry_points.txt = setuptools.command.egg_info:write_entries -depends.txt = setuptools.command.egg_info:warn_depends_obsolete - -[console_scripts] -easy_install = setuptools.command.easy_install:main -easy_install-2.4 = setuptools.command.easy_install:main - -[setuptools.installation] -eggsecutable = setuptools.command.easy_install:bootstrap - [distutils.commands] bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm rotate = setuptools.command.rotate:rotate @@ -42,7 +7,6 @@ saveopts = setuptools.command.saveopts:saveopts egg_info = setuptools.command.egg_info:egg_info register = setuptools.command.register:register -upload = setuptools.command.upload:upload install_egg_info = setuptools.command.install_egg_info:install_egg_info alias = setuptools.command.alias:alias easy_install = setuptools.command.easy_install:easy_install @@ -54,3 +18,39 @@ install_lib = setuptools.command.install_lib:install_lib build_ext = setuptools.command.build_ext:build_ext sdist = setuptools.command.sdist:sdist + +[egg_info.writers] +dependency_links.txt = setuptools.command.egg_info:overwrite_arg +requires.txt = setuptools.command.egg_info:write_requirements +PKG-INFO = setuptools.command.egg_info:write_pkg_info +eager_resources.txt = setuptools.command.egg_info:overwrite_arg +top_level.txt = setuptools.command.egg_info:write_toplevel_names +namespace_packages.txt = setuptools.command.egg_info:overwrite_arg +entry_points.txt = setuptools.command.egg_info:write_entries +depends.txt = setuptools.command.egg_info:warn_depends_obsolete + +[console_scripts] +easy_install = setuptools.command.easy_install:main +easy_install-2.5 = setuptools.command.easy_install:main + +[setuptools.file_finders] +svn_cvs = setuptools.command.sdist:_default_revctrl + +[distutils.setup_keywords] +dependency_links = setuptools.dist:assert_string_list +entry_points = setuptools.dist:check_entry_points +extras_require = setuptools.dist:check_extras +package_data = setuptools.dist:check_package_data +install_requires = setuptools.dist:check_requirements +include_package_data = setuptools.dist:assert_bool +exclude_package_data = setuptools.dist:check_package_data +namespace_packages = setuptools.dist:check_nsp +test_suite = setuptools.dist:check_test_suite +eager_resources = setuptools.dist:assert_string_list +zip_safe = setuptools.dist:assert_bool +test_loader = setuptools.dist:check_importable +tests_require = setuptools.dist:check_requirements + +[setuptools.installation] +eggsecutable = setuptools.command.easy_install:bootstrap + Modified: sandbox/branches/setuptools-0.6/setuptools/__init__.py ============================================================================== --- sandbox/branches/setuptools-0.6/setuptools/__init__.py (original) +++ sandbox/branches/setuptools-0.6/setuptools/__init__.py Wed Jul 11 19:37:17 2007 @@ -63,19 +63,19 @@ import distutils.core distutils.core.Command = Command # we can't patch distutils.cmd, alas +def findall(dir = os.curdir): + """Find all files under 'dir' and return the list of full filenames + (relative to 'dir'). + """ + all_files = [] + for base, dirs, files in os.walk(dir): + if base!=os.curdir: + files = [os.path.join(base, f) for f in files] + all_files.extend(filter(os.path.isfile, files)) + return all_files - - - - - - - - - - - - +import distutils.filelist +distutils.filelist.findall = findall # fix findall bug in distutils. Modified: sandbox/branches/setuptools-0.6/setuptools/command/egg_info.py ============================================================================== --- sandbox/branches/setuptools-0.6/setuptools/command/egg_info.py (original) +++ sandbox/branches/setuptools-0.6/setuptools/command/egg_info.py Wed Jul 11 19:37:17 2007 @@ -221,10 +221,10 @@ data = map(str.splitlines,data.split('\n\x0c\n')) del data[0][0] # get rid of the '8' dirurl = data[0][3] - localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]) + localrev = max([int(d[9]) for d in data if len(d)>9 and d[9]]+[0]) elif data.startswith(' Author: phillip.eby Date: Wed Jul 11 19:38:18 2007 New Revision: 56278 Modified: sandbox/branches/setuptools-0.6/setuptools.txt Log: Update release notes w/fixes. Modified: sandbox/branches/setuptools-0.6/setuptools.txt ============================================================================== --- sandbox/branches/setuptools-0.6/setuptools.txt (original) +++ sandbox/branches/setuptools-0.6/setuptools.txt Wed Jul 11 19:38:18 2007 @@ -2611,6 +2611,11 @@ Release Notes/Change History ---------------------------- +0.6c7 + + * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and + ``egg_info`` command failing on new, uncommitted SVN directories. + 0.6c6 * Added ``--egg-path`` option to ``develop`` command, allowing you to force ``.egg-link`` files to use relative paths (allowing them to be shared across From python-checkins at python.org Wed Jul 11 21:41:49 2007 From: python-checkins at python.org (georg.brandl) Date: Wed, 11 Jul 2007 21:41:49 +0200 (CEST) Subject: [Python-checkins] r56280 - python/trunk/Include/opcode.h Message-ID: <20070711194149.78A201E400E@bag.python.org> Author: georg.brandl Date: Wed Jul 11 21:41:49 2007 New Revision: 56280 Modified: python/trunk/Include/opcode.h Log: Fix #1752132: wrong comment in opcode description. Modified: python/trunk/Include/opcode.h ============================================================================== --- python/trunk/Include/opcode.h (original) +++ python/trunk/Include/opcode.h Wed Jul 11 21:41:49 2007 @@ -112,7 +112,7 @@ #define LOAD_GLOBAL 116 /* Index in name list */ #define CONTINUE_LOOP 119 /* Start of loop (absolute) */ -#define SETUP_LOOP 120 /* Target address (absolute) */ +#define SETUP_LOOP 120 /* Target address (relative) */ #define SETUP_EXCEPT 121 /* "" */ #define SETUP_FINALLY 122 /* "" */ From python-checkins at python.org Wed Jul 11 21:41:53 2007 From: python-checkins at python.org (georg.brandl) Date: Wed, 11 Jul 2007 21:41:53 +0200 (CEST) Subject: [Python-checkins] r56281 - python/branches/release25-maint/Include/opcode.h Message-ID: <20070711194153.5A0AB1E400C@bag.python.org> Author: georg.brandl Date: Wed Jul 11 21:41:53 2007 New Revision: 56281 Modified: python/branches/release25-maint/Include/opcode.h Log: Fix #1752132: wrong comment in opcode description. (backport from rev. 56280) Modified: python/branches/release25-maint/Include/opcode.h ============================================================================== --- python/branches/release25-maint/Include/opcode.h (original) +++ python/branches/release25-maint/Include/opcode.h Wed Jul 11 21:41:53 2007 @@ -112,7 +112,7 @@ #define LOAD_GLOBAL 116 /* Index in name list */ #define CONTINUE_LOOP 119 /* Start of loop (absolute) */ -#define SETUP_LOOP 120 /* Target address (absolute) */ +#define SETUP_LOOP 120 /* Target address (relative) */ #define SETUP_EXCEPT 121 /* "" */ #define SETUP_FINALLY 122 /* "" */ From buildbot at python.org Wed Jul 11 22:00:19 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 11 Jul 2007 20:00:19 +0000 Subject: [Python-checkins] buildbot warnings in x86 W2k trunk Message-ID: <20070711200032.E8F5C1E4005@bag.python.org> The Buildbot has detected a new failure of x86 W2k trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520W2k%2520trunk/builds/388 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings failed slave lost sincerely, -The Buildbot From buildbot at python.org Wed Jul 11 22:20:22 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 11 Jul 2007 20:20:22 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP trunk Message-ID: <20070711202023.108641E400D@bag.python.org> The Buildbot has detected a new failure of x86 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP%2520trunk/builds/504 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings failed slave lost sincerely, -The Buildbot From buildbot at python.org Wed Jul 11 22:30:31 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 11 Jul 2007 20:30:31 +0000 Subject: [Python-checkins] buildbot warnings in ppc Debian unstable trunk Message-ID: <20070711203031.71D0C1E4005@bag.python.org> The Buildbot has detected a new failure of ppc Debian unstable trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ppc%2520Debian%2520unstable%2520trunk/builds/41 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 93, in run svr.serve_a_few() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 35, in serve_a_few self.handle_request() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 224, in handle_request self.handle_error(request, client_address) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 222, in handle_request self.process_request(request, client_address) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 429, in process_request self.collect_children() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 425, in collect_children self.active_children.remove(pid) ValueError: list.remove(x): x not in list 1 test failed: test_socketserver Traceback (most recent call last): File "./Lib/test/regrtest.py", line 557, in runtest_inner indirect_test() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 216, in test_main testall() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 199, in testall testloop(socket.AF_INET, tcpservers, MyStreamHandler, teststream) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 148, in testloop testfunc(proto, addr) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 62, in teststream s.connect(addr) File "", line 1, in connect error: (111, 'Connection refused') make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Thu Jul 12 10:05:45 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 10:05:45 +0200 (CEST) Subject: [Python-checkins] r56293 - in python/trunk/Lib: test/test_urllib2.py urllib2.py Message-ID: <20070712080545.AB6061E400A@bag.python.org> Author: georg.brandl Date: Thu Jul 12 10:05:45 2007 New Revision: 56293 Modified: python/trunk/Lib/test/test_urllib2.py python/trunk/Lib/urllib2.py Log: Patch #1752270, #1750931: complain if urllib2 add_handler called without handler. Modified: python/trunk/Lib/test/test_urllib2.py ============================================================================== --- python/trunk/Lib/test/test_urllib2.py (original) +++ python/trunk/Lib/test/test_urllib2.py Thu Jul 12 10:05:45 2007 @@ -381,6 +381,12 @@ class OpenerDirectorTests(unittest.TestCase): + def test_add_non_handler(self): + class NonHandler(object): + pass + self.assertRaises(TypeError, + OpenerDirector().add_handler, NonHandler()) + def test_badly_named_methods(self): # test work-around for three methods that accidentally follow the # naming conventions for handler methods Modified: python/trunk/Lib/urllib2.py ============================================================================== --- python/trunk/Lib/urllib2.py (original) +++ python/trunk/Lib/urllib2.py Thu Jul 12 10:05:45 2007 @@ -295,6 +295,10 @@ self.process_request = {} def add_handler(self, handler): + if not hasattr(handler, "add_parent"): + raise TypeError("expected BaseHandler instance, got %r" % + type(handler)) + added = False for meth in dir(handler): if meth in ["redirect_request", "do_open", "proxy_open"]: From python-checkins at python.org Thu Jul 12 10:05:48 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 10:05:48 +0200 (CEST) Subject: [Python-checkins] r56294 - in python/branches/release25-maint/Lib: test/test_urllib2.py urllib2.py Message-ID: <20070712080548.ADDAE1E400E@bag.python.org> Author: georg.brandl Date: Thu Jul 12 10:05:48 2007 New Revision: 56294 Modified: python/branches/release25-maint/Lib/test/test_urllib2.py python/branches/release25-maint/Lib/urllib2.py Log: Patch #1752270, #1750931: complain if urllib2 add_handler called without handler. (backport from rev. 56293) Modified: python/branches/release25-maint/Lib/test/test_urllib2.py ============================================================================== --- python/branches/release25-maint/Lib/test/test_urllib2.py (original) +++ python/branches/release25-maint/Lib/test/test_urllib2.py Thu Jul 12 10:05:48 2007 @@ -381,6 +381,12 @@ class OpenerDirectorTests(unittest.TestCase): + def test_add_non_handler(self): + class NonHandler(object): + pass + self.assertRaises(TypeError, + OpenerDirector().add_handler, NonHandler()) + def test_badly_named_methods(self): # test work-around for three methods that accidentally follow the # naming conventions for handler methods Modified: python/branches/release25-maint/Lib/urllib2.py ============================================================================== --- python/branches/release25-maint/Lib/urllib2.py (original) +++ python/branches/release25-maint/Lib/urllib2.py Thu Jul 12 10:05:48 2007 @@ -298,6 +298,10 @@ self.process_request = {} def add_handler(self, handler): + if not hasattr(handler, "add_parent"): + raise TypeError("expected BaseHandler instance, got %r" % + type(handler)) + added = False for meth in dir(handler): if meth in ["redirect_request", "do_open", "proxy_open"]: From python-checkins at python.org Thu Jul 12 10:11:30 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 10:11:30 +0200 (CEST) Subject: [Python-checkins] r56296 - python/trunk/Lib/inspect.py Message-ID: <20070712081130.3DF991E400B@bag.python.org> Author: georg.brandl Date: Thu Jul 12 10:11:29 2007 New Revision: 56296 Modified: python/trunk/Lib/inspect.py Log: Patch #1739696: use code.co_code only if really necessary Modified: python/trunk/Lib/inspect.py ============================================================================== --- python/trunk/Lib/inspect.py (original) +++ python/trunk/Lib/inspect.py Thu Jul 12 10:11:29 2007 @@ -679,7 +679,6 @@ if not iscode(co): raise TypeError('arg is not a code object') - code = co.co_code nargs = co.co_argcount names = co.co_varnames args = list(names[:nargs]) @@ -689,12 +688,12 @@ for i in range(nargs): if args[i][:1] in ('', '.'): stack, remain, count = [], [], [] - while step < len(code): - op = ord(code[step]) + while step < len(co.co_code): + op = ord(co.co_code[step]) step = step + 1 if op >= dis.HAVE_ARGUMENT: opname = dis.opname[op] - value = ord(code[step]) + ord(code[step+1])*256 + value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256 step = step + 2 if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'): remain.append(value) From python-checkins at python.org Thu Jul 12 10:11:33 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 10:11:33 +0200 (CEST) Subject: [Python-checkins] r56297 - python/branches/release25-maint/Lib/inspect.py Message-ID: <20070712081133.437E51E400B@bag.python.org> Author: georg.brandl Date: Thu Jul 12 10:11:32 2007 New Revision: 56297 Modified: python/branches/release25-maint/Lib/inspect.py Log: Patch #1739696: use code.co_code only if really necessary (backport from rev. 56296) Modified: python/branches/release25-maint/Lib/inspect.py ============================================================================== --- python/branches/release25-maint/Lib/inspect.py (original) +++ python/branches/release25-maint/Lib/inspect.py Thu Jul 12 10:11:32 2007 @@ -679,7 +679,6 @@ if not iscode(co): raise TypeError('arg is not a code object') - code = co.co_code nargs = co.co_argcount names = co.co_varnames args = list(names[:nargs]) @@ -689,12 +688,12 @@ for i in range(nargs): if args[i][:1] in ('', '.'): stack, remain, count = [], [], [] - while step < len(code): - op = ord(code[step]) + while step < len(co.co_code): + op = ord(co.co_code[step]) step = step + 1 if op >= dis.HAVE_ARGUMENT: opname = dis.opname[op] - value = ord(code[step]) + ord(code[step+1])*256 + value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256 step = step + 2 if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'): remain.append(value) From python-checkins at python.org Thu Jul 12 10:38:01 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 10:38:01 +0200 (CEST) Subject: [Python-checkins] r56298 - in python/trunk: Lib/test/test_format.py Misc/NEWS Objects/stringobject.c Objects/unicodeobject.c Message-ID: <20070712083801.A686A1E400B@bag.python.org> Author: georg.brandl Date: Thu Jul 12 10:38:00 2007 New Revision: 56298 Modified: python/trunk/Lib/test/test_format.py python/trunk/Misc/NEWS python/trunk/Objects/stringobject.c python/trunk/Objects/unicodeobject.c Log: Patch #1673759: add a missing overflow check when formatting floats with %G. Modified: python/trunk/Lib/test/test_format.py ============================================================================== --- python/trunk/Lib/test/test_format.py (original) +++ python/trunk/Lib/test/test_format.py Thu Jul 12 10:38:00 2007 @@ -9,6 +9,7 @@ # test on unicode strings as well overflowok = 1 +overflowrequired = 0 def testformat(formatstr, args, output=None): if verbose: @@ -25,11 +26,16 @@ if verbose: print 'overflow (this is fine)' else: - if output and result != output: + if overflowrequired: if verbose: print 'no' - print "%s %% %s == %s != %s" %\ - (repr(formatstr), repr(args), repr(result), repr(output)) + print "overflow expected on %s %% %s" % \ + (repr(formatstr), repr(args)) + elif output and result != output: + if verbose: + print 'no' + print "%s %% %s == %s != %s" % \ + (repr(formatstr), repr(args), repr(result), repr(output)) else: if verbose: print 'yes' @@ -57,6 +63,14 @@ # test some ridiculously large precision, expect overflow testboth('%12.*f', (123456, 1.0)) +# check for internal overflow validation on length of precision +overflowrequired = 1 +testboth("%#.*g", (110, -1.e+100/3.)) +testboth("%#.*G", (110, -1.e+100/3.)) +testboth("%#.*f", (110, -1.e+100/3.)) +testboth("%#.*F", (110, -1.e+100/3.)) +overflowrequired = 0 + # Formatting of long integers. Overflow is not ok overflowok = 0 testboth("%x", 10L, "a") Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Thu Jul 12 10:38:00 2007 @@ -12,6 +12,9 @@ Core and builtins ----------------- +- Patch #1673759: add a missing overflow check when formatting floats + with %G. + - Patch #1733960: Allow T_LONGLONG to accept ints. - T_PYSSIZET can now be used in PyMemberDef lists for Py_ssize_t members. Modified: python/trunk/Objects/stringobject.c ============================================================================== --- python/trunk/Objects/stringobject.c (original) +++ python/trunk/Objects/stringobject.c Thu Jul 12 10:38:00 2007 @@ -4198,7 +4198,8 @@ always given), therefore increase the length by one. */ - if ((type == 'g' && buflen <= (size_t)10 + (size_t)prec) || + if (((type == 'g' || type == 'G') && + buflen <= (size_t)10 + (size_t)prec) || (type == 'f' && buflen <= (size_t)53 + (size_t)prec)) { PyErr_SetString(PyExc_OverflowError, "formatted float is too long (precision too large?)"); Modified: python/trunk/Objects/unicodeobject.c ============================================================================== --- python/trunk/Objects/unicodeobject.c (original) +++ python/trunk/Objects/unicodeobject.c Thu Jul 12 10:38:00 2007 @@ -7294,7 +7294,8 @@ always given), therefore increase the length by one. */ - if ((type == 'g' && buflen <= (size_t)10 + (size_t)prec) || + if (((type == 'g' || type == 'G') && + buflen <= (size_t)10 + (size_t)prec) || (type == 'f' && buflen <= (size_t)53 + (size_t)prec)) { PyErr_SetString(PyExc_OverflowError, "formatted float is too long (precision too large?)"); From python-checkins at python.org Thu Jul 12 10:38:05 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 10:38:05 +0200 (CEST) Subject: [Python-checkins] r56299 - in python/branches/release25-maint: Lib/test/test_format.py Misc/NEWS Objects/stringobject.c Objects/unicodeobject.c Message-ID: <20070712083805.28FEB1E4010@bag.python.org> Author: georg.brandl Date: Thu Jul 12 10:38:04 2007 New Revision: 56299 Modified: python/branches/release25-maint/Lib/test/test_format.py python/branches/release25-maint/Misc/NEWS python/branches/release25-maint/Objects/stringobject.c python/branches/release25-maint/Objects/unicodeobject.c Log: Patch #1673759: add a missing overflow check when formatting floats with %G. (backport from rev. 56298) Modified: python/branches/release25-maint/Lib/test/test_format.py ============================================================================== --- python/branches/release25-maint/Lib/test/test_format.py (original) +++ python/branches/release25-maint/Lib/test/test_format.py Thu Jul 12 10:38:04 2007 @@ -9,6 +9,7 @@ # test on unicode strings as well overflowok = 1 +overflowrequired = 0 def testformat(formatstr, args, output=None): if verbose: @@ -25,11 +26,16 @@ if verbose: print 'overflow (this is fine)' else: - if output and result != output: + if overflowrequired: if verbose: print 'no' - print "%s %% %s == %s != %s" %\ - (repr(formatstr), repr(args), repr(result), repr(output)) + print "overflow expected on %s %% %s" % \ + (repr(formatstr), repr(args)) + elif output and result != output: + if verbose: + print 'no' + print "%s %% %s == %s != %s" % \ + (repr(formatstr), repr(args), repr(result), repr(output)) else: if verbose: print 'yes' @@ -57,6 +63,14 @@ # test some ridiculously large precision, expect overflow testboth('%12.*f', (123456, 1.0)) +# check for internal overflow validation on length of precision +overflowrequired = 1 +testboth("%#.*g", (110, -1.e+100/3.)) +testboth("%#.*G", (110, -1.e+100/3.)) +testboth("%#.*f", (110, -1.e+100/3.)) +testboth("%#.*F", (110, -1.e+100/3.)) +overflowrequired = 0 + # Formatting of long integers. Overflow is not ok overflowok = 0 testboth("%x", 10L, "a") Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Thu Jul 12 10:38:04 2007 @@ -12,6 +12,9 @@ Core and builtins ----------------- +- Patch #1673759: add a missing overflow check when formatting floats + with %G. + - Patch #1733960: Allow T_LONGLONG to accept ints. - Prevent expandtabs() on string and unicode objects from causing a segfault Modified: python/branches/release25-maint/Objects/stringobject.c ============================================================================== --- python/branches/release25-maint/Objects/stringobject.c (original) +++ python/branches/release25-maint/Objects/stringobject.c Thu Jul 12 10:38:04 2007 @@ -4188,7 +4188,8 @@ always given), therefore increase the length by one. */ - if ((type == 'g' && buflen <= (size_t)10 + (size_t)prec) || + if (((type == 'g' || type == 'G') && + buflen <= (size_t)10 + (size_t)prec) || (type == 'f' && buflen <= (size_t)53 + (size_t)prec)) { PyErr_SetString(PyExc_OverflowError, "formatted float is too long (precision too large?)"); Modified: python/branches/release25-maint/Objects/unicodeobject.c ============================================================================== --- python/branches/release25-maint/Objects/unicodeobject.c (original) +++ python/branches/release25-maint/Objects/unicodeobject.c Thu Jul 12 10:38:04 2007 @@ -7290,7 +7290,8 @@ always given), therefore increase the length by one. */ - if ((type == 'g' && buflen <= (size_t)10 + (size_t)prec) || + if (((type == 'g' || type == 'G') && + buflen <= (size_t)10 + (size_t)prec) || (type == 'f' && buflen <= (size_t)53 + (size_t)prec)) { PyErr_SetString(PyExc_OverflowError, "formatted float is too long (precision too large?)"); From buildbot at python.org Thu Jul 12 10:42:48 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 12 Jul 2007 08:42:48 +0000 Subject: [Python-checkins] buildbot warnings in amd64 XP trunk Message-ID: <20070712084248.A35BA1E400A@bag.python.org> The Buildbot has detected a new failure of amd64 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%2520trunk/builds/39 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: 2 tests failed: test_ctypes test_winsound ====================================================================== ERROR: test_extremes (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 18, in test_extremes winsound.Beep(37, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_increasingfrequency (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 23, in test_increasingfrequency winsound.Beep(i, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_alias_asterisk (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 64, in test_alias_asterisk winsound.PlaySound('SystemAsterisk', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exclamation (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 74, in test_alias_exclamation winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exit (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 84, in test_alias_exit winsound.PlaySound('SystemExit', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_hand (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 94, in test_alias_hand winsound.PlaySound('SystemHand', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_question (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 104, in test_alias_question winsound.PlaySound('SystemQuestion', winsound.SND_ALIAS) RuntimeError: Failed to play sound sincerely, -The Buildbot From python-checkins at python.org Thu Jul 12 11:06:41 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 11:06:41 +0200 (CEST) Subject: [Python-checkins] r56302 - python/trunk/Doc/lib/libtime.tex Message-ID: <20070712090641.C8B1E1E400A@bag.python.org> Author: georg.brandl Date: Thu Jul 12 11:06:41 2007 New Revision: 56302 Modified: python/trunk/Doc/lib/libtime.tex Log: Patch #1731659: improve time.strptime docs. Modified: python/trunk/Doc/lib/libtime.tex ============================================================================== --- python/trunk/Doc/lib/libtime.tex (original) +++ python/trunk/Doc/lib/libtime.tex Thu Jul 12 11:06:41 2007 @@ -309,15 +309,23 @@ \begin{funcdesc}{strptime}{string\optional{, format}} Parse a string representing a time according to a format. The return value is a \class{struct_time} as returned by \function{gmtime()} or -\function{localtime()}. The \var{format} parameter uses the same -directives as those used by \function{strftime()}; it defaults to -\code{"\%a \%b \%d \%H:\%M:\%S \%Y"} which matches the formatting -returned by \function{ctime()}. If \var{string} cannot be parsed -according to \var{format}, \exception{ValueError} is raised. If the -string to be parsed has excess data after parsing, -\exception{ValueError} is raised. The default values used to fill in -any missing data when more accurate values cannot be inferred are -\code{(1900, 1, 1, 0, 0, 0, 0, 1, -1)} . +\function{localtime()}. + +The \var{format} parameter uses the same directives as those used by +\function{strftime()}; it defaults to \code{"\%a \%b \%d \%H:\%M:\%S + \%Y"} which matches the formatting returned by \function{ctime()}. +If \var{string} cannot be parsed according to \var{format}, or if it +has excess data after parsing, \exception{ValueError} is raised. The +default values used to fill in any missing data when more accurate +values cannot be inferred are \code{(1900, 1, 1, 0, 0, 0, 0, 1, -1)}. + +For example: + +\begin{verbatim} +>>> import time +>>> time.strptime("30 Nov 00", "%d %b %y") +(2000, 11, 30, 0, 0, 0, 3, 335, -1) +\end{verbatim} Support for the \code{\%Z} directive is based on the values contained in \code{tzname} and whether \code{daylight} is true. Because of this, From python-checkins at python.org Thu Jul 12 11:06:44 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 11:06:44 +0200 (CEST) Subject: [Python-checkins] r56303 - python/branches/release25-maint/Doc/lib/libtime.tex Message-ID: <20070712090644.110091E400A@bag.python.org> Author: georg.brandl Date: Thu Jul 12 11:06:43 2007 New Revision: 56303 Modified: python/branches/release25-maint/Doc/lib/libtime.tex Log: Patch #1731659: improve time.strptime docs. (backport from rev. 56302) Modified: python/branches/release25-maint/Doc/lib/libtime.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/libtime.tex (original) +++ python/branches/release25-maint/Doc/lib/libtime.tex Thu Jul 12 11:06:43 2007 @@ -309,15 +309,23 @@ \begin{funcdesc}{strptime}{string\optional{, format}} Parse a string representing a time according to a format. The return value is a \class{struct_time} as returned by \function{gmtime()} or -\function{localtime()}. The \var{format} parameter uses the same -directives as those used by \function{strftime()}; it defaults to -\code{"\%a \%b \%d \%H:\%M:\%S \%Y"} which matches the formatting -returned by \function{ctime()}. If \var{string} cannot be parsed -according to \var{format}, \exception{ValueError} is raised. If the -string to be parsed has excess data after parsing, -\exception{ValueError} is raised. The default values used to fill in -any missing data when more accurate values cannot be inferred are -\code{(1900, 1, 1, 0, 0, 0, 0, 1, -1)} . +\function{localtime()}. + +The \var{format} parameter uses the same directives as those used by +\function{strftime()}; it defaults to \code{"\%a \%b \%d \%H:\%M:\%S + \%Y"} which matches the formatting returned by \function{ctime()}. +If \var{string} cannot be parsed according to \var{format}, or if it +has excess data after parsing, \exception{ValueError} is raised. The +default values used to fill in any missing data when more accurate +values cannot be inferred are \code{(1900, 1, 1, 0, 0, 0, 0, 1, -1)}. + +For example: + +\begin{verbatim} +>>> import time +>>> time.strptime("30 Nov 00", "%d %b %y") +(2000, 11, 30, 0, 0, 0, 3, 335, -1) +\end{verbatim} Support for the \code{\%Z} directive is based on the values contained in \code{tzname} and whether \code{daylight} is true. Because of this, From buildbot at python.org Thu Jul 12 11:16:48 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 12 Jul 2007 09:16:48 +0000 Subject: [Python-checkins] buildbot warnings in alpha Tru64 5.1 2.5 Message-ID: <20070712091648.509A61E400A@bag.python.org> The Buildbot has detected a new failure of alpha Tru64 5.1 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Tru64%25205.1%25202.5/builds/278 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_socket ====================================================================== FAIL: testInterruptedTimeout (test.test_socket.TCPTimeoutTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/net/taipan/scratch1/nnorwitz/python/2.5.norwitz-tru64/build/Lib/test/test_socket.py", line 879, in testInterruptedTimeout self.fail("got Alarm in wrong place") AssertionError: got Alarm in wrong place sincerely, -The Buildbot From python-checkins at python.org Thu Jul 12 11:24:05 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 11:24:05 +0200 (CEST) Subject: [Python-checkins] r56304 - python/trunk/Lib/test/regrtest.py Message-ID: <20070712092405.452B61E400A@bag.python.org> Author: georg.brandl Date: Thu Jul 12 11:24:04 2007 New Revision: 56304 Modified: python/trunk/Lib/test/regrtest.py Log: Patch #1731169: clean up expected skips list. Modified: python/trunk/Lib/test/regrtest.py ============================================================================== --- python/trunk/Lib/test/regrtest.py (original) +++ python/trunk/Lib/test/regrtest.py Thu Jul 12 11:24:04 2007 @@ -806,17 +806,16 @@ # test_timeout # Controlled by test_timeout.skip_expected. Requires the network # resource and a socket module. +# +# Tests that are expected to be skipped everywhere except on one platform +# are also handled separately. _expectations = { 'win32': """ test__locale - test_applesingle - test_al test_bsddb185 test_bsddb3 - test_cd - test_cl test_commands test_crypt test_curses @@ -825,14 +824,10 @@ test_fcntl test_fork1 test_gdbm - test_gl test_grp - test_imgfile test_ioctl test_largefile - test_linuxaudiodev test_mhlib - test_nis test_openpty test_ossaudiodev test_poll @@ -841,7 +836,6 @@ test_pwd test_resource test_signal - test_sunaudiodev test_threadsignals test_timing test_wait3 @@ -849,34 +843,19 @@ """, 'linux2': """ - test_al - test_applesingle test_bsddb185 - test_cd - test_cl test_curses test_dl - test_gl - test_imgfile test_largefile - test_linuxaudiodev - test_nis - test_ntpath test_ossaudiodev - test_sqlite - test_startfile - test_sunaudiodev """, 'mac': """ - test_al test_atexit test_bsddb test_bsddb185 test_bsddb3 test_bz2 - test_cd - test_cl test_commands test_crypt test_curses @@ -884,16 +863,11 @@ test_dl test_fcntl test_fork1 - test_gl test_grp test_ioctl - test_imgfile test_largefile - test_linuxaudiodev test_locale test_mmap - test_nis - test_ntpath test_openpty test_ossaudiodev test_poll @@ -904,88 +878,49 @@ test_pwd test_resource test_signal - test_sqlite - test_startfile - test_sunaudiodev test_sundry test_tarfile test_timing """, 'unixware7': """ - test_al - test_applesingle test_bsddb test_bsddb185 - test_cd - test_cl test_dl - test_gl - test_imgfile test_largefile - test_linuxaudiodev test_minidom - test_nis - test_ntpath test_openpty test_pyexpat test_sax - test_startfile - test_sqlite - test_sunaudiodev test_sundry """, 'openunix8': """ - test_al - test_applesingle test_bsddb test_bsddb185 - test_cd - test_cl test_dl - test_gl - test_imgfile test_largefile - test_linuxaudiodev test_minidom - test_nis - test_ntpath test_openpty test_pyexpat test_sax - test_sqlite - test_startfile - test_sunaudiodev test_sundry """, 'sco_sv3': """ - test_al - test_applesingle test_asynchat test_bsddb test_bsddb185 - test_cd - test_cl test_dl test_fork1 test_gettext - test_gl - test_imgfile test_largefile - test_linuxaudiodev test_locale test_minidom - test_nis - test_ntpath test_openpty test_pyexpat test_queue test_sax - test_sqlite - test_startfile - test_sunaudiodev test_sundry test_thread test_threaded_import @@ -994,15 +929,11 @@ """, 'riscos': """ - test_al - test_applesingle test_asynchat test_atexit test_bsddb test_bsddb185 test_bsddb3 - test_cd - test_cl test_commands test_crypt test_dbm @@ -1010,24 +941,16 @@ test_fcntl test_fork1 test_gdbm - test_gl test_grp - test_imgfile test_largefile - test_linuxaudiodev test_locale test_mmap - test_nis - test_ntpath test_openpty test_poll test_popen2 test_pty test_pwd test_strop - test_sqlite - test_startfile - test_sunaudiodev test_sundry test_thread test_threaded_import @@ -1038,274 +961,143 @@ 'darwin': """ test__locale - test_al test_bsddb test_bsddb3 - test_cd - test_cl test_curses test_gdbm - test_gl - test_imgfile test_largefile - test_linuxaudiodev test_locale test_minidom - test_nis - test_ntpath test_ossaudiodev test_poll - test_sqlite - test_startfile - test_sunaudiodev """, 'sunos5': """ - test_al - test_applesingle test_bsddb test_bsddb185 - test_cd - test_cl test_curses test_dbm test_gdbm - test_gl test_gzip - test_imgfile - test_linuxaudiodev test_openpty - test_sqlite - test_startfile test_zipfile test_zlib """, 'hp-ux11': """ - test_al - test_applesingle test_bsddb test_bsddb185 - test_cd - test_cl test_curses test_dl test_gdbm - test_gl test_gzip - test_imgfile test_largefile - test_linuxaudiodev test_locale test_minidom - test_nis - test_ntpath test_openpty test_pyexpat test_sax - test_sqlite - test_startfile - test_sunaudiodev test_zipfile test_zlib """, 'atheos': """ - test_al - test_applesingle test_bsddb185 - test_cd - test_cl test_curses test_dl test_gdbm - test_gl - test_imgfile test_largefile - test_linuxaudiodev test_locale test_mhlib test_mmap - test_nis test_poll test_popen2 test_resource - test_sqlite - test_startfile - test_sunaudiodev """, 'cygwin': """ - test_al - test_applesingle test_bsddb185 test_bsddb3 - test_cd - test_cl test_curses test_dbm - test_gl - test_imgfile test_ioctl test_largefile - test_linuxaudiodev test_locale - test_nis test_ossaudiodev test_socketserver - test_sqlite - test_sunaudiodev """, 'os2emx': """ - test_al - test_applesingle test_audioop test_bsddb185 test_bsddb3 - test_cd - test_cl test_commands test_curses test_dl - test_gl - test_imgfile test_largefile - test_linuxaudiodev test_mhlib test_mmap - test_nis test_openpty test_ossaudiodev test_pty test_resource test_signal - test_sqlite - test_startfile - test_sunaudiodev """, 'freebsd4': """ - test_aepack - test_al - test_applesingle test_bsddb test_bsddb3 - test_cd - test_cl test_gdbm - test_gl - test_imgfile - test_linuxaudiodev test_locale - test_macostools - test_nis test_ossaudiodev test_pep277 - test_plistlib test_pty - test_scriptpackages test_socket_ssl test_socketserver - test_sqlite - test_startfile - test_sunaudiodev test_tcl test_timeout - test_unicode_file test_urllibnet - test_winreg - test_winsound """, 'aix5': """ - test_aepack - test_al - test_applesingle test_bsddb test_bsddb185 test_bsddb3 test_bz2 - test_cd - test_cl test_dl test_gdbm - test_gl test_gzip - test_imgfile - test_linuxaudiodev - test_macostools - test_nis test_ossaudiodev - test_sqlite - test_startfile - test_sunaudiodev test_tcl - test_winreg - test_winsound test_zipimport test_zlib """, 'openbsd3': """ - test_aepack - test_al - test_applesingle test_bsddb test_bsddb3 - test_cd - test_cl test_ctypes test_dl test_gdbm - test_gl - test_imgfile - test_linuxaudiodev test_locale - test_macostools - test_nis test_normalization test_ossaudiodev test_pep277 - test_plistlib - test_scriptpackages test_tcl - test_sqlite - test_startfile - test_sunaudiodev - test_unicode_file - test_winreg - test_winsound """, 'netbsd3': """ - test_aepack - test_al - test_applesingle test_bsddb test_bsddb185 test_bsddb3 - test_cd - test_cl test_ctypes test_curses test_dl test_gdbm - test_gl - test_imgfile - test_linuxaudiodev test_locale - test_macostools - test_nis test_ossaudiodev test_pep277 - test_sqlite - test_startfile - test_sunaudiodev test_tcl - test_unicode_file - test_winreg - test_winsound """, } _expectations['freebsd5'] = _expectations['freebsd4'] @@ -1323,6 +1115,9 @@ s = _expectations[sys.platform] self.expected = set(s.split()) + # expected to be skipped on every platform, even Linux + self.expected.add('test_linuxaudiodev') + if not os.path.supports_unicode_filenames: self.expected.add('test_pep277') @@ -1337,21 +1132,30 @@ if not sys.platform in ("mac", "darwin"): MAC_ONLY = ["test_macostools", "test_aepack", - "test_plistlib", "test_scriptpackages"] + "test_plistlib", "test_scriptpackages", + "test_applesingle"] for skip in MAC_ONLY: self.expected.add(skip) if sys.platform != "win32": + # test_sqlite is only reliable on Windows where the library + # is distributed with Python WIN_ONLY = ["test_unicode_file", "test_winreg", - "test_winsound"] + "test_winsound", "test_startfile", + "test_sqlite"] for skip in WIN_ONLY: self.expected.add(skip) if sys.platform != 'irix': - IRIX_ONLY =["test_imageop"] + IRIX_ONLY = ["test_imageop", "test_al", "test_cd", "test_cl", + "test_gl", "test_imgfile"] for skip in IRIX_ONLY: self.expected.add(skip) + if sys.platform != 'sunos5': + self.expected.add('test_sunaudiodev') + self.expected.add('test_nis') + self.valid = True def isvalid(self): From python-checkins at python.org Thu Jul 12 11:37:49 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 11:37:49 +0200 (CEST) Subject: [Python-checkins] r56306 - in python/trunk: Doc/tut/tut.tex Misc/NEWS Message-ID: <20070712093749.CB1201E401C@bag.python.org> Author: georg.brandl Date: Thu Jul 12 11:37:49 2007 New Revision: 56306 Modified: python/trunk/Doc/tut/tut.tex python/trunk/Misc/NEWS Log: Bug #1637365: add subsection about "__name__ == __main__" to the Python tutorial. Modified: python/trunk/Doc/tut/tut.tex ============================================================================== --- python/trunk/Doc/tut/tut.tex (original) +++ python/trunk/Doc/tut/tut.tex Thu Jul 12 11:37:49 2007 @@ -2522,6 +2522,44 @@ This imports all names except those beginning with an underscore (\code{_}). +\subsection{Executing modules as scripts \label{modulesAsScripts}} + +When you run a Python module with + +\begin{verbatim} +python fibo.py +\end{verbatim} + +the code in the module will be executed, just as if you imported it, but +with the \code{__name__} set to \code{"__main__"}. That means that by +adding this code at the end of your module: + +\begin{verbatim} +if __name__ == "__main__": + import sys + fib(int(sys.argv[1])) +\end{verbatim} + +you can make the file usable as a script as well as an importable module, +because the code that parses the command line only runs if the module is +executed as the ``main'' file: + +\begin{verbatim} +$ python fibo.py 50 +1 1 2 3 5 8 13 21 34 +\end{verbatim} + +If the module is imported, the code is not run: + +\begin{verbatim} +>>> import fibo +>>> +\end{verbatim} + +This is often used either to provide a convenient user interface to a +module, or for testing purposes (running the module as a script executes +a test suite). + \subsection{The Module Search Path \label{searchPath}} Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Thu Jul 12 11:37:49 2007 @@ -851,6 +851,9 @@ Documentation ------------- +- Bug #1637365: add subsection about "__name__ == __main__" to the + Python tutorial. + - Patch #1698768: updated the "using Python on the Mac" intro. - Bug #1569057: Document that calling file.next() when the file is open for From python-checkins at python.org Thu Jul 12 11:37:53 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 11:37:53 +0200 (CEST) Subject: [Python-checkins] r56307 - in python/branches/release25-maint: Doc/tut/tut.tex Misc/NEWS Message-ID: <20070712093753.C6C001E400F@bag.python.org> Author: georg.brandl Date: Thu Jul 12 11:37:53 2007 New Revision: 56307 Modified: python/branches/release25-maint/Doc/tut/tut.tex python/branches/release25-maint/Misc/NEWS Log: Bug #1637365: add subsection about "__name__ == __main__" to the Python tutorial. (backport from rev. 56306) Modified: python/branches/release25-maint/Doc/tut/tut.tex ============================================================================== --- python/branches/release25-maint/Doc/tut/tut.tex (original) +++ python/branches/release25-maint/Doc/tut/tut.tex Thu Jul 12 11:37:53 2007 @@ -2522,6 +2522,44 @@ This imports all names except those beginning with an underscore (\code{_}). +\subsection{Executing modules as scripts \label{modulesAsScripts}} + +When you run a Python module with + +\begin{verbatim} +python fibo.py +\end{verbatim} + +the code in the module will be executed, just as if you imported it, but +with the \code{__name__} set to \code{"__main__"}. That means that by +adding this code at the end of your module: + +\begin{verbatim} +if __name__ == "__main__": + import sys + fib(int(sys.argv[1])) +\end{verbatim} + +you can make the file usable as a script as well as an importable module, +because the code that parses the command line only runs if the module is +executed as the ``main'' file: + +\begin{verbatim} +$ python fibo.py 50 +1 1 2 3 5 8 13 21 34 +\end{verbatim} + +If the module is imported, the code is not run: + +\begin{verbatim} +>>> import fibo +>>> +\end{verbatim} + +This is often used either to provide a convenient user interface to a +module, or for testing purposes (running the module as a script executes +a test suite). + \subsection{The Module Search Path \label{searchPath}} Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Thu Jul 12 11:37:53 2007 @@ -77,6 +77,9 @@ Documentation ------------- +- Bug #1637365: add subsection about "__name__ == __main__" to the + Python tutorial. + - Bug #1569057: Document that calling file.next() on a file open for writing has undefined behaviour. Backport of r54712. From buildbot at python.org Thu Jul 12 11:44:19 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 12 Jul 2007 09:44:19 +0000 Subject: [Python-checkins] buildbot warnings in ppc Debian unstable trunk Message-ID: <20070712094419.D156A1E4011@bag.python.org> The Buildbot has detected a new failure of ppc Debian unstable trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ppc%2520Debian%2520unstable%2520trunk/builds/43 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 93, in run svr.serve_a_few() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 35, in serve_a_few self.handle_request() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 224, in handle_request self.handle_error(request, client_address) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 222, in handle_request self.process_request(request, client_address) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 429, in process_request self.collect_children() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/SocketServer.py", line 425, in collect_children self.active_children.remove(pid) ValueError: list.remove(x): x not in list 1 test failed: test_socketserver Traceback (most recent call last): File "./Lib/test/regrtest.py", line 557, in runtest_inner indirect_test() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 216, in test_main testall() File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 199, in testall testloop(socket.AF_INET, tcpservers, MyStreamHandler, teststream) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 148, in testloop testfunc(proto, addr) File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socketserver.py", line 62, in teststream s.connect(addr) File "", line 1, in connect error: (111, 'Connection refused') make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Thu Jul 12 11:59:22 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 12 Jul 2007 11:59:22 +0200 (CEST) Subject: [Python-checkins] r56308 - in python/trunk: Doc/lib/libzipfile.tex Lib/test/test_zipfile.py Lib/zipfile.py Misc/NEWS Message-ID: <20070712095922.8E6961E400A@bag.python.org> Author: georg.brandl Date: Thu Jul 12 11:59:22 2007 New Revision: 56308 Modified: python/trunk/Doc/lib/libzipfile.tex python/trunk/Lib/test/test_zipfile.py python/trunk/Lib/zipfile.py python/trunk/Misc/NEWS Log: Patch #1675424: Added tests for uncovered code in the zipfile module. The KeyError raised by Zipfile.getinfo for nonexistent names now has a descriptive message. Modified: python/trunk/Doc/lib/libzipfile.tex ============================================================================== --- python/trunk/Doc/lib/libzipfile.tex (original) +++ python/trunk/Doc/lib/libzipfile.tex Thu Jul 12 11:59:22 2007 @@ -24,8 +24,8 @@ The available attributes of this module are: -\begin{excdesc}{error} - The error raised for bad ZIP files. +\begin{excdesc}{BadZipfile} + The error raised for bad ZIP files (old name: \code{zipfile.error}). \end{excdesc} \begin{excdesc}{LargeZipFile} @@ -90,7 +90,7 @@ (a string) or a file-like object. The \var{mode} parameter should be \code{'r'} to read an existing file, \code{'w'} to truncate and write a new file, or \code{'a'} to append to an - existing file. For \var{mode} is \code{'a'} and \var{file} + existing file. If \var{mode} is \code{'a'} and \var{file} refers to an existing ZIP file, then additional files are added to it. If \var{file} does not refer to a ZIP file, then a new ZIP archive is appended to the file. This is meant for adding a ZIP @@ -128,7 +128,8 @@ \begin{methoddesc}{getinfo}{name} Return a \class{ZipInfo} object with information about the archive - member \var{name}. + member \var{name}. Calling \method{getinfo()} for a name not currently + contained in the archive will raise a \exception{KeyError}. \end{methoddesc} \begin{methoddesc}{infolist}{} @@ -147,7 +148,9 @@ parameter, if included, must be one of the following: \code{'r'} (the default), \code{'U'}, or \code{'rU'}. Choosing \code{'U'} or \code{'rU'} will enable universal newline support in the read-only - object. \var{pwd} is the password used for encrypted files. + object. \var{pwd} is the password used for encrypted files. Calling + \method{open()} on a closed ZipFile will raise a + \exception{RuntimeError}. \begin{notice} The file-like object is read-only and provides the following methods: \method{read()}, \method{readline()}, \method{readlines()}, @@ -182,7 +185,8 @@ Return the bytes of the file in the archive. The archive must be open for read or append. \var{pwd} is the password used for encrypted files and, if specified, it will override the default password set with - \method{setpassword()}. + \method{setpassword()}. Calling \method{read()} on a closed ZipFile + will raise a \exception{RuntimeError}. \versionchanged[\var{pwd} was added]{2.6} \end{methoddesc} @@ -190,6 +194,8 @@ \begin{methoddesc}{testzip}{} Read all the files in the archive and check their CRC's and file headers. Return the name of the first bad file, or else return \code{None}. + Calling \method{testzip()} on a closed ZipFile will raise a + \exception{RuntimeError}. \end{methoddesc} \begin{methoddesc}{write}{filename\optional{, arcname\optional{, @@ -200,7 +206,10 @@ separators removed). If given, \var{compress_type} overrides the value given for the \var{compression} parameter to the constructor for the new entry. The archive must be open with mode \code{'w'} - or \code{'a'}. + or \code{'a'} -- calling \method{write()} on a ZipFile created with + mode \code{'r'} will raise a \exception{RuntimeError}. Calling + \method{write()} on a closed ZipFile will raise a + \exception{RuntimeError}. \note{There is no official file name encoding for ZIP files. If you have unicode file names, please convert them to byte strings @@ -210,6 +219,11 @@ \note{Archive names should be relative to the archive root, that is, they should not start with a path separator.} + + \note{If \code{arcname} (or \code{filename}, if \code{arcname} is + not given) contains a null byte, the name of the file in the archive will + be truncated at the null byte.} + \end{methoddesc} \begin{methoddesc}{writestr}{zinfo_or_arcname, bytes} @@ -218,7 +232,10 @@ \class{ZipInfo} instance. If it's an instance, at least the filename, date, and time must be given. If it's a name, the date and time is set to the current date and time. The archive must be - opened with mode \code{'w'} or \code{'a'}. + opened with mode \code{'w'} or \code{'a'} -- calling + \method{writestr()} on a ZipFile created with mode \code{'r'} + will raise a \exception{RuntimeError}. Calling \method{writestr()} + on a closed ZipFile will raise a \exception{RuntimeError}. \end{methoddesc} @@ -243,12 +260,13 @@ available, else a \file{*.pyc} file, compiling if necessary. If the pathname is a file, the filename must end with \file{.py}, and just the (corresponding \file{*.py[co]}) file is added at the top level - (no path information). If it is a directory, and the directory is - not a package directory, then all the files \file{*.py[co]} are - added at the top level. If the directory is a package directory, - then all \file{*.py[oc]} are added under the package name as a file - path, and if any subdirectories are package directories, all of - these are added recursively. \var{basename} is intended for + (no path information). If the pathname is a file that does not end with + \file{.py}, a \exception{RuntimeError} will be raised. If it is a + directory, and the directory is not a package directory, then all the + files \file{*.py[co]} are added at the top level. If the directory is + a package directory, then all \file{*.py[co]} are added under the package + name as a file path, and if any subdirectories are package directories, all + of these are added recursively. \var{basename} is intended for internal use only. The \method{writepy()} method makes archives with file names like this: Modified: python/trunk/Lib/test/test_zipfile.py ============================================================================== --- python/trunk/Lib/test/test_zipfile.py (original) +++ python/trunk/Lib/test/test_zipfile.py Thu Jul 12 11:59:22 2007 @@ -14,12 +14,12 @@ from test.test_support import TESTFN, run_unittest TESTFN2 = TESTFN + "2" -FIXEDTEST_SIZE = 10 +FIXEDTEST_SIZE = 1000 class TestsWithSourceFile(unittest.TestCase): def setUp(self): - self.line_gen = ("Zipfile test line %d. random float: %f" % (i, random()) - for i in xrange(FIXEDTEST_SIZE)) + self.line_gen = ["Zipfile test line %d. random float: %f" % (i, random()) + for i in xrange(FIXEDTEST_SIZE)] self.data = '\n'.join(self.line_gen) + '\n' # Make a source file with some lines @@ -239,6 +239,63 @@ self.assertEqual(zipfp.namelist(), ["absolute"]) zipfp.close() + def testAppendToZipFile(self): + # Test appending to an existing zipfile + zipfp = zipfile.ZipFile(TESTFN2, "w", zipfile.ZIP_STORED) + zipfp.write(TESTFN, TESTFN) + zipfp.close() + zipfp = zipfile.ZipFile(TESTFN2, "a", zipfile.ZIP_STORED) + zipfp.writestr("strfile", self.data) + self.assertEqual(zipfp.namelist(), [TESTFN, "strfile"]) + zipfp.close() + + def testAppendToNonZipFile(self): + # Test appending to an existing file that is not a zipfile + # NOTE: this test fails if len(d) < 22 because of the first + # line "fpin.seek(-22, 2)" in _EndRecData + d = 'I am not a ZipFile!'*10 + f = file(TESTFN2, 'wb') + f.write(d) + f.close() + zipfp = zipfile.ZipFile(TESTFN2, "a", zipfile.ZIP_STORED) + zipfp.write(TESTFN, TESTFN) + zipfp.close() + + f = file(TESTFN2, 'rb') + f.seek(len(d)) + zipfp = zipfile.ZipFile(f, "r") + self.assertEqual(zipfp.namelist(), [TESTFN]) + zipfp.close() + f.close() + + def test_WriteDefaultName(self): + # Check that calling ZipFile.write without arcname specified produces the expected result + zipfp = zipfile.ZipFile(TESTFN2, "w") + zipfp.write(TESTFN) + self.assertEqual(zipfp.read(TESTFN), file(TESTFN).read()) + zipfp.close() + + def test_PerFileCompression(self): + # Check that files within a Zip archive can have different compression options + zipfp = zipfile.ZipFile(TESTFN2, "w") + zipfp.write(TESTFN, 'storeme', zipfile.ZIP_STORED) + zipfp.write(TESTFN, 'deflateme', zipfile.ZIP_DEFLATED) + sinfo = zipfp.getinfo('storeme') + dinfo = zipfp.getinfo('deflateme') + self.assertEqual(sinfo.compress_type, zipfile.ZIP_STORED) + self.assertEqual(dinfo.compress_type, zipfile.ZIP_DEFLATED) + zipfp.close() + + def test_WriteToReadonly(self): + # Check that trying to call write() on a readonly ZipFile object + # raises a RuntimeError + zipf = zipfile.ZipFile(TESTFN2, mode="w") + zipf.writestr("somefile.txt", "bogus") + zipf.close() + zipf = zipfile.ZipFile(TESTFN2, mode="r") + self.assertRaises(RuntimeError, zipf.write, TESTFN) + zipf.close() + def tearDown(self): os.remove(TESTFN) os.remove(TESTFN2) @@ -361,7 +418,6 @@ self.assertEqual(zipfp.namelist(), ["absolute"]) zipfp.close() - def tearDown(self): zipfile.ZIP64_LIMIT = self._limit os.remove(TESTFN) @@ -432,6 +488,11 @@ finally: shutil.rmtree(TESTFN2) + def testWriteNonPyfile(self): + zipfp = zipfile.PyZipFile(TemporaryFile(), "w") + file(TESTFN, 'w').write('most definitely not a python file') + self.assertRaises(RuntimeError, zipfp.writepy, TESTFN) + os.remove(TESTFN) class OtherTests(unittest.TestCase): @@ -513,7 +574,56 @@ # a RuntimeError, and so should calling .testzip. An earlier # version of .testzip would swallow this exception (and any other) # and report that the first file in the archive was corrupt. + self.assertRaises(RuntimeError, zipf.read, "foo.txt") + self.assertRaises(RuntimeError, zipf.open, "foo.txt") self.assertRaises(RuntimeError, zipf.testzip) + self.assertRaises(RuntimeError, zipf.writestr, "bogus.txt", "bogus") + file(TESTFN, 'w').write('zipfile test data') + self.assertRaises(RuntimeError, zipf.write, TESTFN) + + def test_BadConstructorMode(self): + # Check that bad modes passed to ZipFile constructor are caught + self.assertRaises(RuntimeError, zipfile.ZipFile, TESTFN, "q") + + def test_BadOpenMode(self): + # Check that bad modes passed to ZipFile.open are caught + zipf = zipfile.ZipFile(TESTFN, mode="w") + zipf.writestr("foo.txt", "O, for a Muse of Fire!") + zipf.close() + zipf = zipfile.ZipFile(TESTFN, mode="r") + # read the data to make sure the file is there + zipf.read("foo.txt") + self.assertRaises(RuntimeError, zipf.open, "foo.txt", "q") + zipf.close() + + def test_Read0(self): + # Check that calling read(0) on a ZipExtFile object returns an empty + # string and doesn't advance file pointer + zipf = zipfile.ZipFile(TESTFN, mode="w") + zipf.writestr("foo.txt", "O, for a Muse of Fire!") + # read the data to make sure the file is there + f = zipf.open("foo.txt") + for i in xrange(FIXEDTEST_SIZE): + self.assertEqual(f.read(0), '') + + self.assertEqual(f.read(), "O, for a Muse of Fire!") + zipf.close() + + def test_OpenNonexistentItem(self): + # Check that attempting to call open() for an item that doesn't + # exist in the archive raises a RuntimeError + zipf = zipfile.ZipFile(TESTFN, mode="w") + self.assertRaises(KeyError, zipf.open, "foo.txt", "r") + + def test_BadCompressionMode(self): + # Check that bad compression methods passed to ZipFile.open are caught + self.assertRaises(RuntimeError, zipfile.ZipFile, TESTFN, "w", -1) + + def test_NullByteInFilename(self): + # Check that a filename containing a null byte is properly terminated + zipf = zipfile.ZipFile(TESTFN, mode="w") + zipf.writestr("foo.txt\x00qqq", "O, for a Muse of Fire!") + self.assertEqual(zipf.namelist(), ['foo.txt']) def tearDown(self): support.unlink(TESTFN) Modified: python/trunk/Lib/zipfile.py ============================================================================== --- python/trunk/Lib/zipfile.py (original) +++ python/trunk/Lib/zipfile.py Thu Jul 12 11:59:22 2007 @@ -568,8 +568,9 @@ def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False): """Open the ZIP file with mode read "r", write "w" or append "a".""" - self._allowZip64 = allowZip64 - self._didModify = False + if mode not in ("r", "w", "a"): + raise RuntimeError('ZipFile() requires mode "r", "w", or "a"') + if compression == ZIP_STORED: pass elif compression == ZIP_DEFLATED: @@ -578,6 +579,9 @@ "Compression requires the (missing) zlib module" else: raise RuntimeError, "That compression method is not supported" + + self._allowZip64 = allowZip64 + self._didModify = False self.debug = 0 # Level of printing: 0 through 3 self.NameToInfo = {} # Find file info given name self.filelist = [] # List of ZipInfo instances for archive @@ -720,7 +724,12 @@ def getinfo(self, name): """Return the instance of ZipInfo given 'name'.""" - return self.NameToInfo[name] + info = self.NameToInfo.get(name) + if info is None: + raise KeyError( + 'There is no item named %r in the archive' % name) + + return info def setpassword(self, pwd): """Set default password for encrypted files.""" @@ -824,6 +833,10 @@ def write(self, filename, arcname=None, compress_type=None): """Put the bytes from filename into the archive under the name arcname.""" + if not self.fp: + raise RuntimeError( + "Attempt to write to ZIP archive that was already closed") + st = os.stat(filename) mtime = time.localtime(st.st_mtime) date_time = mtime[0:6] @@ -896,6 +909,11 @@ zinfo.compress_type = self.compression else: zinfo = zinfo_or_arcname + + if not self.fp: + raise RuntimeError( + "Attempt to write to ZIP archive that was already closed") + zinfo.file_size = len(bytes) # Uncompressed size zinfo.header_offset = self.fp.tell() # Start of header bytes self._writecheck(zinfo) Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Thu Jul 12 11:59:22 2007 @@ -420,6 +420,10 @@ - Patch #1481079: add support for HTTP_REFERER to CGIHTTPServer. +- Patch #1675424: Added tests for uncovered code in the zipfile module. + The KeyError raised by Zipfile.getinfo for nonexistent names now has + a descriptive message. + - Bug #1115886: os.path.splitext('.cshrc') gives now ('.cshrc', ''). - unittest now verifies more of its assumptions. In particular, TestCase From python-checkins at python.org Thu Jul 12 19:31:06 2007 From: python-checkins at python.org (phillip.eby) Date: Thu, 12 Jul 2007 19:31:06 +0200 (CEST) Subject: [Python-checkins] r56320 - sandbox/trunk/setuptools/setuptools/__init__.py Message-ID: <20070712173106.C4C401E400F@bag.python.org> Author: phillip.eby Date: Thu Jul 12 19:31:06 2007 New Revision: 56320 Modified: sandbox/trunk/setuptools/setuptools/__init__.py Log: Fix a problem with the findall() fix. :( Modified: sandbox/trunk/setuptools/setuptools/__init__.py ============================================================================== --- sandbox/trunk/setuptools/setuptools/__init__.py (original) +++ sandbox/trunk/setuptools/setuptools/__init__.py Thu Jul 12 19:31:06 2007 @@ -69,7 +69,9 @@ """ all_files = [] for base, dirs, files in os.walk(dir): - if base!=os.curdir: + if base==os.curdir or base.startswith(os.curdir+os.sep): + base = base[2:] + if base: files = [os.path.join(base, f) for f in files] all_files.extend(filter(os.path.isfile, files)) return all_files From python-checkins at python.org Thu Jul 12 19:32:02 2007 From: python-checkins at python.org (phillip.eby) Date: Thu, 12 Jul 2007 19:32:02 +0200 (CEST) Subject: [Python-checkins] r56321 - sandbox/branches/setuptools-0.6/setuptools/__init__.py Message-ID: <20070712173202.AA3381E400A@bag.python.org> Author: phillip.eby Date: Thu Jul 12 19:32:02 2007 New Revision: 56321 Modified: sandbox/branches/setuptools-0.6/setuptools/__init__.py Log: Backport findall() fix fix from trunk. Modified: sandbox/branches/setuptools-0.6/setuptools/__init__.py ============================================================================== --- sandbox/branches/setuptools-0.6/setuptools/__init__.py (original) +++ sandbox/branches/setuptools-0.6/setuptools/__init__.py Thu Jul 12 19:32:02 2007 @@ -69,7 +69,9 @@ """ all_files = [] for base, dirs, files in os.walk(dir): - if base!=os.curdir: + if base==os.curdir or base.startswith(os.curdir+os.sep): + base = base[2:] + if base: files = [os.path.join(base, f) for f in files] all_files.extend(filter(os.path.isfile, files)) return all_files From python-checkins at python.org Fri Jul 13 12:43:44 2007 From: python-checkins at python.org (facundo.batista) Date: Fri, 13 Jul 2007 12:43:44 +0200 (CEST) Subject: [Python-checkins] r56340 - python/trunk/Lib/test/test_xmlrpc.py Message-ID: <20070713104344.E65E21E4013@bag.python.org> Author: facundo.batista Date: Fri Jul 13 12:43:44 2007 New Revision: 56340 Modified: python/trunk/Lib/test/test_xmlrpc.py Log: Added tests for basic behavior of DateTime, Binary, and Fault classes and the escape function. Check that marshalling recursive sequences & dicts raises TypeError. Check that marshalling out-of-range ints raises OverflowError [Alan McIntyre - GSoC] Modified: python/trunk/Lib/test/test_xmlrpc.py ============================================================================== --- python/trunk/Lib/test/test_xmlrpc.py (original) +++ python/trunk/Lib/test/test_xmlrpc.py Fri Jul 13 12:43:44 2007 @@ -1,5 +1,7 @@ +import base64 import datetime import sys +import time import unittest import xmlrpclib from test import test_support @@ -25,6 +27,10 @@ (2005, 02, 10, 11, 41, 23, 0, 1, -1)), 'datetime3': xmlrpclib.DateTime( datetime.datetime(2005, 02, 10, 11, 41, 23)), + 'datetime4': xmlrpclib.DateTime( + datetime.date(2005, 02, 10)), + 'datetime5': xmlrpclib.DateTime( + datetime.time(11, 41, 23)), }] class XMLRPCTestCase(unittest.TestCase): @@ -101,11 +107,37 @@ def test_dump_bad_dict(self): self.assertRaises(TypeError, xmlrpclib.dumps, ({(1,2,3): 1},)) + def test_dump_recursive_seq(self): + l = [1,2,3] + t = [3,4,5,l] + l.append(t) + self.assertRaises(TypeError, xmlrpclib.dumps, (l,)) + + def test_dump_recursive_dict(self): + d = {'1':1, '2':1} + t = {'3':3, 'd':d} + d['t'] = t + self.assertRaises(TypeError, xmlrpclib.dumps, (d,)) + def test_dump_big_int(self): if sys.maxint > 2L**31-1: self.assertRaises(OverflowError, xmlrpclib.dumps, (int(2L**34),)) + xmlrpclib.dumps((xmlrpclib.MAXINT, xmlrpclib.MININT)) + self.assertRaises(OverflowError, xmlrpclib.dumps, (xmlrpclib.MAXINT+1,)) + self.assertRaises(OverflowError, xmlrpclib.dumps, (xmlrpclib.MININT-1,)) + + def dummy_write(s): + pass + + m = xmlrpclib.Marshaller() + m.dump_int(xmlrpclib.MAXINT, dummy_write) + m.dump_int(xmlrpclib.MININT, dummy_write) + self.assertRaises(OverflowError, m.dump_int, xmlrpclib.MAXINT+1, dummy_write) + self.assertRaises(OverflowError, m.dump_int, xmlrpclib.MININT-1, dummy_write) + + def test_dump_none(self): value = alist + [None] arg1 = (alist + [None],) @@ -156,8 +188,109 @@ self.assertEquals(s, "abc \xc2\x95") self.assertEquals(items, [("def \xc2\x96", "ghi \xc2\x97")]) + +class HelperTestCase(unittest.TestCase): + def test_escape(self): + self.assertEqual(xmlrpclib.escape("a&b"), "a&b") + self.assertEqual(xmlrpclib.escape("ab"), "a>b") + +class FaultTestCase(unittest.TestCase): + def test_repr(self): + f = xmlrpclib.Fault(42, 'Test Fault') + self.assertEqual(repr(f), "") + self.assertEqual(repr(f), str(f)) + + def test_dump_fault(self): + f = xmlrpclib.Fault(42, 'Test Fault') + s = xmlrpclib.dumps((f,)) + (newf,), m = xmlrpclib.loads(s) + self.assertEquals(newf, {'faultCode': 42, 'faultString': 'Test Fault'}) + self.assertEquals(m, None) + + s = xmlrpclib.Marshaller().dumps(f) + self.assertRaises(xmlrpclib.Fault, xmlrpclib.loads, s) + + +class DateTimeTestCase(unittest.TestCase): + def test_default(self): + t = xmlrpclib.DateTime() + + def test_time(self): + d = 1181399930.036952 + t = xmlrpclib.DateTime(d) + self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", time.localtime(d))) + + def test_time_tuple(self): + d = (2007,6,9,10,38,50,5,160,0) + t = xmlrpclib.DateTime(d) + self.assertEqual(str(t), '20070609T10:38:50') + + def test_time_struct(self): + d = time.localtime(1181399930.036952) + t = xmlrpclib.DateTime(d) + self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", d)) + + def test_datetime_datetime(self): + d = datetime.datetime(2007,1,2,3,4,5) + t = xmlrpclib.DateTime(d) + self.assertEqual(str(t), '20070102T03:04:05') + + def test_datetime_date(self): + d = datetime.date(2007,9,8) + t = xmlrpclib.DateTime(d) + self.assertEqual(str(t), '20070908T00:00:00') + + def test_datetime_time(self): + d = datetime.time(13,17,19) + # allow for date rollover by checking today's or tomorrow's dates + dd1 = datetime.datetime.now().date() + dd2 = dd1 + datetime.timedelta(days=1) + vals = (dd1.strftime('%Y%m%dT13:17:19'), + dd2.strftime('%Y%m%dT13:17:19')) + t = xmlrpclib.DateTime(d) + self.assertEqual(str(t) in vals, True) + + def test_repr(self): + d = datetime.datetime(2007,1,2,3,4,5) + t = xmlrpclib.DateTime(d) + val ="" % id(t) + self.assertEqual(repr(t), val) + + def test_decode(self): + d = ' 20070908T07:11:13 ' + t1 = xmlrpclib.DateTime() + t1.decode(d) + tref = xmlrpclib.DateTime(datetime.datetime(2007,9,8,7,11,13)) + self.assertEqual(t1, tref) + + t2 = xmlrpclib._datetime(d) + self.assertEqual(t1, tref) + +class BinaryTestCase(unittest.TestCase): + def test_default(self): + t = xmlrpclib.Binary() + self.assertEqual(str(t), '') + + def test_string(self): + d = '\x01\x02\x03abc123\xff\xfe' + t = xmlrpclib.Binary(d) + self.assertEqual(str(t), d) + + def test_decode(self): + d = '\x01\x02\x03abc123\xff\xfe' + de = base64.encodestring(d) + t1 = xmlrpclib.Binary() + t1.decode(de) + self.assertEqual(str(t1), d) + + t2 = xmlrpclib._binary(de) + self.assertEqual(str(t2), d) + + def test_main(): - test_support.run_unittest(XMLRPCTestCase) + test_support.run_unittest(XMLRPCTestCase, HelperTestCase, + DateTimeTestCase, BinaryTestCase, FaultTestCase) if __name__ == "__main__": From buildbot at python.org Fri Jul 13 12:48:50 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:48:50 +0000 Subject: [Python-checkins] buildbot failure in x86 gentoo trunk Message-ID: <20070713104850.02F621E4010@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2301 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:48:56 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:48:56 +0000 Subject: [Python-checkins] buildbot failure in x86 mvlgcc trunk Message-ID: <20070713104857.0C1A81E4022@bag.python.org> The Buildbot has detected a new failure of x86 mvlgcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520mvlgcc%2520trunk/builds/644 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:48:57 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:48:57 +0000 Subject: [Python-checkins] buildbot failure in S-390 Debian trunk Message-ID: <20070713104857.6A3911E401D@bag.python.org> The Buildbot has detected a new failure of S-390 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/S-390%2520Debian%2520trunk/builds/1037 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:49:12 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:49:12 +0000 Subject: [Python-checkins] buildbot failure in x86 W2k trunk Message-ID: <20070713104912.8BE991E400E@bag.python.org> The Buildbot has detected a new failure of x86 W2k trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520W2k%2520trunk/builds/393 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:49:12 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:49:12 +0000 Subject: [Python-checkins] buildbot failure in PPC64 Debian trunk Message-ID: <20070713104912.E88DE1E400E@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/46 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:49:14 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:49:14 +0000 Subject: [Python-checkins] buildbot failure in ppc Debian unstable trunk Message-ID: <20070713104914.6277D1E400E@bag.python.org> The Buildbot has detected a new failure of ppc Debian unstable trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ppc%2520Debian%2520unstable%2520trunk/builds/46 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:49:17 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:49:17 +0000 Subject: [Python-checkins] buildbot failure in g4 osx.4 trunk Message-ID: <20070713104917.527A01E400A@bag.python.org> The Buildbot has detected a new failure of g4 osx.4 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/g4%2520osx.4%2520trunk/builds/2130 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:49:18 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:49:18 +0000 Subject: [Python-checkins] buildbot failure in alpha Debian trunk Message-ID: <20070713104918.D12971E400A@bag.python.org> The Buildbot has detected a new failure of alpha Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Debian%2520trunk/builds/41 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:49:25 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:49:25 +0000 Subject: [Python-checkins] buildbot failure in sparc solaris10 gcc trunk Message-ID: <20070713104925.5B66A1E400A@bag.python.org> The Buildbot has detected a new failure of sparc solaris10 gcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/sparc%2520solaris10%2520gcc%2520trunk/builds/2121 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:50:49 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:50:49 +0000 Subject: [Python-checkins] buildbot failure in alpha Tru64 5.1 trunk Message-ID: <20070713105049.E42F51E400A@bag.python.org> The Buildbot has detected a new failure of alpha Tru64 5.1 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Tru64%25205.1%2520trunk/builds/1708 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:51:31 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:51:31 +0000 Subject: [Python-checkins] buildbot failure in x86 XP trunk Message-ID: <20070713105131.D26571E4006@bag.python.org> The Buildbot has detected a new failure of x86 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP%2520trunk/builds/509 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:53:21 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:53:21 +0000 Subject: [Python-checkins] buildbot failure in x86 XP-3 trunk Message-ID: <20070713105322.02A7E1E4006@bag.python.org> The Buildbot has detected a new failure of x86 XP-3 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP-3%2520trunk/builds/76 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:54:16 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:54:16 +0000 Subject: [Python-checkins] buildbot failure in amd64 XP trunk Message-ID: <20070713105417.15FF81E4006@bag.python.org> The Buildbot has detected a new failure of amd64 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%2520trunk/builds/44 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:55:07 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:55:07 +0000 Subject: [Python-checkins] buildbot failure in ia64 Ubuntu trunk trunk Message-ID: <20070713105507.A4AB01E4006@bag.python.org> The Buildbot has detected a new failure of ia64 Ubuntu trunk trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ia64%2520Ubuntu%2520trunk%2520trunk/builds/745 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 12:55:09 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 10:55:09 +0000 Subject: [Python-checkins] buildbot failure in hppa Ubuntu dapper trunk Message-ID: <20070713105509.B1CD91E400D@bag.python.org> The Buildbot has detected a new failure of hppa Ubuntu dapper trunk. Full details are available at: http://www.python.org/dev/buildbot/all/hppa%2520Ubuntu%2520dapper%2520trunk/builds/10 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed svn sincerely, -The Buildbot From python-checkins at python.org Fri Jul 13 14:09:42 2007 From: python-checkins at python.org (raymond.hettinger) Date: Fri, 13 Jul 2007 14:09:42 +0200 (CEST) Subject: [Python-checkins] r56345 - python/trunk/Doc/lib/libitertools.tex Message-ID: <20070713120942.35CA61E4011@bag.python.org> Author: raymond.hettinger Date: Fri Jul 13 14:09:41 2007 New Revision: 56345 Modified: python/trunk/Doc/lib/libitertools.tex Log: Correct the docs for takewhile(). Improve the recipe for nth(). Should be backported Modified: python/trunk/Doc/lib/libitertools.tex ============================================================================== --- python/trunk/Doc/lib/libitertools.tex (original) +++ python/trunk/Doc/lib/libitertools.tex Fri Jul 13 14:09:41 2007 @@ -117,7 +117,7 @@ Make an iterator that drops elements from the iterable as long as the predicate is true; afterwards, returns every element. Note, the iterator does not produce \emph{any} output until the predicate - is true, so it may have a lengthy start-up time. Equivalent to: + first becomes false, so it may have a lengthy start-up time. Equivalent to: \begin{verbatim} def dropwhile(predicate, iterable): @@ -509,8 +509,8 @@ return izip(mapping.iterkeys(), mapping.itervalues()) def nth(iterable, n): - "Returns the nth item or raise IndexError" - return list(islice(iterable, n, n+1))[0] + "Returns the nth item or raise StopIteration" + return islice(iterable, n, None).next() def all(seq, pred=None): "Returns True if pred(x) is true for every element in the iterable" From python-checkins at python.org Fri Jul 13 15:59:40 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 15:59:40 +0200 (CEST) Subject: [Python-checkins] r56348 - python/trunk/Modules/_ctypes/_ctypes.c Message-ID: <20070713135940.2054A1E400A@bag.python.org> Author: thomas.heller Date: Fri Jul 13 15:59:39 2007 New Revision: 56348 Modified: python/trunk/Modules/_ctypes/_ctypes.c Log: Repair COMError. Since exceptions are new style classes now, setting the methods and docstring after the type creation does not work, they must be in the dictionary before creating the type. Modified: python/trunk/Modules/_ctypes/_ctypes.c ============================================================================== --- python/trunk/Modules/_ctypes/_ctypes.c (original) +++ python/trunk/Modules/_ctypes/_ctypes.c Fri Jul 13 15:59:39 2007 @@ -4520,11 +4520,6 @@ PyObject *s; int status; - ComError = PyErr_NewException("_ctypes.COMError", - NULL, - dict); - if (ComError == NULL) - return -1; while (methods->ml_name) { /* get a wrapper for the built-in function */ PyObject *func = PyCFunction_New(methods, NULL); @@ -4539,13 +4534,24 @@ Py_DECREF(meth); ++methods; } - Py_INCREF(ComError); + s = PyString_FromString(comerror_doc); if (s == NULL) return -1; status = PyDict_SetItemString(dict, "__doc__", s); Py_DECREF(s); - return status; + if (status == -1) { + Py_DECREF(dict); + return -1; + } + + ComError = PyErr_NewException("_ctypes.COMError", + NULL, + dict); + if (ComError == NULL) + return -1; + + return 0; } #endif From python-checkins at python.org Fri Jul 13 16:18:06 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 16:18:06 +0200 (CEST) Subject: [Python-checkins] r56349 - python/trunk/Lib/ctypes/test/test_win32.py Message-ID: <20070713141806.5F2AF1E400A@bag.python.org> Author: thomas.heller Date: Fri Jul 13 16:18:06 2007 New Revision: 56349 Modified: python/trunk/Lib/ctypes/test/test_win32.py Log: Add tests for _ctypes.COMError. Modified: python/trunk/Lib/ctypes/test/test_win32.py ============================================================================== --- python/trunk/Lib/ctypes/test/test_win32.py (original) +++ python/trunk/Lib/ctypes/test/test_win32.py Fri Jul 13 16:18:06 2007 @@ -58,6 +58,15 @@ self.failUnlessEqual(sizeof(wintypes.LPARAM), sizeof(c_void_p)) + def test_COMError(self): + from _ctypes import COMError + self.assertEqual(COMError.__doc__, "Raised when a COM method call failed.") + + ex = COMError(-1, "text", ("details",)) + self.assertEqual(ex.hresult, -1) + self.assertEqual(ex.text, "text") + self.assertEqual(ex.details, ("details",)) + class Structures(unittest.TestCase): def test_struct_by_value(self): From python-checkins at python.org Fri Jul 13 18:50:44 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 18:50:44 +0200 (CEST) Subject: [Python-checkins] r56350 - python/trunk/Lib/ctypes/test/test_find.py Message-ID: <20070713165044.7BFFC1E4014@bag.python.org> Author: thomas.heller Date: Fri Jul 13 18:50:43 2007 New Revision: 56350 Modified: python/trunk/Lib/ctypes/test/test_find.py Log: Do not try to load the GLUT library in the ctypes tests. This test adds little value, but has a large problem on OS X, as explained in SF# 1581906. Modified: python/trunk/Lib/ctypes/test/test_find.py ============================================================================== --- python/trunk/Lib/ctypes/test/test_find.py (original) +++ python/trunk/Lib/ctypes/test/test_find.py Fri Jul 13 18:50:43 2007 @@ -7,25 +7,21 @@ if sys.platform == "win32": lib_gl = find_library("OpenGL32") lib_glu = find_library("Glu32") - lib_glut = find_library("glut32") lib_gle = None elif sys.platform == "darwin": lib_gl = lib_glu = find_library("OpenGL") - lib_glut = find_library("GLUT") lib_gle = None else: lib_gl = find_library("GL") lib_glu = find_library("GLU") - lib_glut = find_library("glut") lib_gle = find_library("gle") ## print, for debugging if is_resource_enabled("printing"): - if lib_gl or lib_glu or lib_glut or lib_gle: + if lib_gl or lib_glu or lib_gle: print "OpenGL libraries:" for item in (("GL", lib_gl), ("GLU", lib_glu), - ("glut", lib_glut), ("gle", lib_gle)): print "\t", item @@ -33,24 +29,11 @@ # On some systems, loading the OpenGL libraries needs the RTLD_GLOBAL mode. class Test_OpenGL_libs(unittest.TestCase): def setUp(self): - self.gl = self.glu = self.gle = self.glut = None + self.gl = self.glu = self.gle = None if lib_gl: self.gl = CDLL(lib_gl, mode=RTLD_GLOBAL) if lib_glu: self.glu = CDLL(lib_glu, RTLD_GLOBAL) - if lib_glut: - # On some systems, additional libraries seem to be - # required, loading glut fails with - # "OSError: /usr/lib/libglut.so.3: undefined symbol: XGetExtensionVersion" - # I cannot figure out how to repair the test on these - # systems (red hat), so we ignore it when the glut or gle - # libraries cannot be loaded. See also: - # https://sourceforge.net/tracker/?func=detail&atid=105470&aid=1478253&group_id=5470 - # http://mail.python.org/pipermail/python-dev/2006-May/064789.html - try: - self.glut = CDLL(lib_glut) - except OSError: - pass if lib_gle: try: self.gle = CDLL(lib_gle) @@ -67,11 +50,6 @@ if self.glu: self.glu.gluBeginCurve - if lib_glut: - def test_glut(self): - if self.glut: - self.glut.glutWireTetrahedron - if lib_gle: def test_gle(self): if self.gle: From python-checkins at python.org Fri Jul 13 19:07:55 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 19:07:55 +0200 (CEST) Subject: [Python-checkins] r56351 - in python/branches/release25-maint: Lib/ctypes/__init__.py Misc/NEWS Message-ID: <20070713170755.B4A5B1E400A@bag.python.org> Author: thomas.heller Date: Fri Jul 13 19:07:55 2007 New Revision: 56351 Modified: python/branches/release25-maint/Lib/ctypes/__init__.py python/branches/release25-maint/Misc/NEWS Log: Fix for SF# 1701409: segfault in c_char_p of ctypes. The repr output of c_char_p and c_wchar_p has changed as a sideeffect. Modified: python/branches/release25-maint/Lib/ctypes/__init__.py ============================================================================== --- python/branches/release25-maint/Lib/ctypes/__init__.py (original) +++ python/branches/release25-maint/Lib/ctypes/__init__.py Fri Jul 13 19:07:55 2007 @@ -226,6 +226,14 @@ class c_char_p(_SimpleCData): _type_ = "z" + if _os.name == "nt": + def __repr__(self): + if not windll.kernel32.IsBadStringPtrA(self, -1): + return "%s(%r)" % (self.__class__.__name__, self.value) + return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value) + else: + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value) _check_size(c_char_p, "P") class c_void_p(_SimpleCData): Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Fri Jul 13 19:07:55 2007 @@ -26,6 +26,10 @@ Library ------- +- Bug #1701409: Fix a segfault in printing ctypes.c_char_p and + ctypes.c_wchar_p when they point to an invalid location. As a + sideeffect the representation of these instances has changed. + - Bug #1734723: Fix repr.Repr() so it doesn't ignore the maxtuple attribute. - Bug #1728403: Fix a bug that CJKCodecs StreamReader hangs when it From buildbot at python.org Fri Jul 13 19:08:30 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 17:08:30 +0000 Subject: [Python-checkins] buildbot warnings in alpha Tru64 5.1 trunk Message-ID: <20070713170830.813A61E400A@bag.python.org> The Buildbot has detected a new failure of alpha Tru64 5.1 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Tru64%25205.1%2520trunk/builds/1711 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From python-checkins at python.org Fri Jul 13 19:12:24 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 19:12:24 +0200 (CEST) Subject: [Python-checkins] r56352 - in python/trunk: Lib/ctypes/__init__.py Misc/NEWS Message-ID: <20070713171224.25DC11E400A@bag.python.org> Author: thomas.heller Date: Fri Jul 13 19:12:23 2007 New Revision: 56352 Modified: python/trunk/Lib/ctypes/__init__.py python/trunk/Misc/NEWS Log: Fix for SF# 1701409: segfault in c_char_p of ctypes. The repr output of c_char_p and c_wchar_p has changed as a sideeffect. Modified: python/trunk/Lib/ctypes/__init__.py ============================================================================== --- python/trunk/Lib/ctypes/__init__.py (original) +++ python/trunk/Lib/ctypes/__init__.py Fri Jul 13 19:12:23 2007 @@ -226,6 +226,14 @@ class c_char_p(_SimpleCData): _type_ = "z" + if _os.name == "nt": + def __repr__(self): + if not windll.kernel32.IsBadStringPtrA(self, -1): + return "%s(%r)" % (self.__class__.__name__, self.value) + return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value) + else: + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value) _check_size(c_char_p, "P") class c_void_p(_SimpleCData): Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Fri Jul 13 19:12:23 2007 @@ -234,6 +234,10 @@ Library ------- +- Bug #1701409: Fix a segfault in printing ctypes.c_char_p and + ctypes.c_wchar_p when they point to an invalid location. As a + sideeffect the representation of these instances has changed. + - tarfile.py: Added "exclude" keyword argument to TarFile.add(). - Bug #1734723: Fix repr.Repr() so it doesn't ignore the maxtuple attribute. From buildbot at python.org Fri Jul 13 19:18:06 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 17:18:06 +0000 Subject: [Python-checkins] buildbot warnings in x86 mvlgcc trunk Message-ID: <20070713171806.9336E1E4012@bag.python.org> The Buildbot has detected a new failure of x86 mvlgcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520mvlgcc%2520trunk/builds/647 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 66, in run self._RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 22, in __init__ RequestHandlerClass) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 331, in __init__ self.server_bind() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/BaseHTTPServer.py", line 101, in server_bind SocketServer.TCPServer.server_bind(self) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 342, in server_bind self.socket.bind(self.server_address) File "", line 1, in bind error: (98, 'Address already in use') 1 test failed: test_urllib2_localnet ====================================================================== ERROR: test_proxy_qop_auth_int_works_or_throws_urlerror (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 252, in setUp raise self.server.error error: (98, 'Address already in use') ====================================================================== ERROR: test_proxy_qop_auth_works (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 252, in setUp raise self.server.error error: (98, 'Address already in use') ====================================================================== ERROR: test_proxy_with_bad_password_raises_httperror (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urllib2_localnet.py", line 252, in setUp raise self.server.error error: (98, 'Address already in use') ====================================================================== ERROR: test_proxy_with_no_password_raises_httperror (test.test_urllib2_localnet.ProxyAuthTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_urlException in thread Thread-119: Traceback (most recent call last): Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 478, in __bootstrap (self.getName(), _format_exc())) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 236, in format_exc return ''.join(format_exception(etype, value, tb, limit)) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 142, in format_exception list = list + format_tb(tb, limit) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 76, in format_tb return format_list(extract_tb(tb, limit)) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 101, in extract_tb line = linecache.getline(filename, lineno, f.f_globals) AttributeError: 'NoneType' object has no attribute 'getline' Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 478, in __bootstrap (self.getName(), _format_exc())) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 236, in format_exc return ''.join(format_exception(etype, value, tb, limit)) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 142, in format_exception list = list + format_tb(tb, limit) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 76, in format_tb return format_list(extract_tb(tb, limit)) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 101, in extract_tb line = linecache.getline(filename, lineno, f.f_globals) AttributeError: 'NoneType' object has no attribute 'getline' Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 478, in __bootstrap (self.getName(), _format_exc())) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 236, in format_exc return ''.join(format_exception(etype, value, tb, limit)) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 142, in format_exception list = list + format_tb(tb, limit) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 76, in format_tb return format_list(extract_tb(tb, limit)) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/traceback.py", line 101, in extract_tb line = linecache.getline(filename, lineno, f.f_globals) AttributeError: 'NoneType' object has no attribute 'getline' make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Fri Jul 13 19:33:13 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 19:33:13 +0200 (CEST) Subject: [Python-checkins] r56353 - python/branches/release25-maint/Modules/_ctypes/callbacks.c python/branches/release25-maint/Modules/_ctypes/ctypes.h Message-ID: <20070713173313.CAA161E400A@bag.python.org> Author: thomas.heller Date: Fri Jul 13 19:33:13 2007 New Revision: 56353 Modified: python/branches/release25-maint/Modules/_ctypes/callbacks.c python/branches/release25-maint/Modules/_ctypes/ctypes.h Log: Fix for SF# 1649098: avoid zero-sized array declaration in structure. Modified: python/branches/release25-maint/Modules/_ctypes/callbacks.c ============================================================================== --- python/branches/release25-maint/Modules/_ctypes/callbacks.c (original) +++ python/branches/release25-maint/Modules/_ctypes/callbacks.c Fri Jul 13 19:33:13 2007 @@ -268,7 +268,7 @@ ffi_abi cc; nArgs = PySequence_Size(converters); - p = (ffi_info *)PyMem_Malloc(sizeof(ffi_info) + sizeof(ffi_type) * (nArgs + 1)); + p = (ffi_info *)PyMem_Malloc(sizeof(ffi_info) + sizeof(ffi_type) * (nArgs)); if (p == NULL) { PyErr_NoMemory(); return NULL; Modified: python/branches/release25-maint/Modules/_ctypes/ctypes.h ============================================================================== --- python/branches/release25-maint/Modules/_ctypes/ctypes.h (original) +++ python/branches/release25-maint/Modules/_ctypes/ctypes.h Fri Jul 13 19:33:13 2007 @@ -74,7 +74,7 @@ PyObject *callable; SETFUNC setfunc; ffi_type *restype; - ffi_type *atypes[0]; + ffi_type *atypes[1]; } ffi_info; typedef struct { From buildbot at python.org Fri Jul 13 19:34:43 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 17:34:43 +0000 Subject: [Python-checkins] buildbot warnings in ppc Debian unstable trunk Message-ID: <20070713173443.D6BC91E4012@bag.python.org> The Buildbot has detected a new failure of ppc Debian unstable trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ppc%2520Debian%2520unstable%2520trunk/builds/49 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_resource ====================================================================== ERROR: test_fsize_enforced (test.test_resource.ResourceTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_resource.py", line 59, in test_fsize_enforced f.close() IOError: [Errno 27] File too large make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Fri Jul 13 19:36:01 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 19:36:01 +0200 (CEST) Subject: [Python-checkins] r56354 - python/branches/release25-maint/Misc/NEWS Message-ID: <20070713173601.B1CD81E400C@bag.python.org> Author: thomas.heller Date: Fri Jul 13 19:36:01 2007 New Revision: 56354 Modified: python/branches/release25-maint/Misc/NEWS Log: News entry for Fix for SF# 1649098: avoid zero-sized array declaration in structure. Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Fri Jul 13 19:36:01 2007 @@ -71,6 +71,9 @@ Extension Modules ----------------- +- Bug #1649098: Avoid declaration of zero-sized array declaration in + structure. + - Bug #1703286: ctypes no longer truncates 64-bit pointers. - Bug #1721309: prevent bsddb module from freeing random memory. From python-checkins at python.org Fri Jul 13 19:46:55 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 19:46:55 +0200 (CEST) Subject: [Python-checkins] r56355 - in python/trunk: Misc/NEWS Modules/_ctypes/callbacks.c Modules/_ctypes/ctypes.h Message-ID: <20070713174655.4E69A1E4015@bag.python.org> Author: thomas.heller Date: Fri Jul 13 19:46:54 2007 New Revision: 56355 Modified: python/trunk/Misc/NEWS python/trunk/Modules/_ctypes/callbacks.c python/trunk/Modules/_ctypes/ctypes.h Log: Fix for SF# 1649098: avoid zero-sized array declaration in structure. Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Fri Jul 13 19:46:54 2007 @@ -708,6 +708,9 @@ Extension Modules ----------------- +- Bug #1649098: Avoid declaration of zero-sized array declaration in + structure. + - Removed the rgbimg module; been deprecated since Python 2.5. - Bug #1721309: prevent bsddb module from freeing random memory. Modified: python/trunk/Modules/_ctypes/callbacks.c ============================================================================== --- python/trunk/Modules/_ctypes/callbacks.c (original) +++ python/trunk/Modules/_ctypes/callbacks.c Fri Jul 13 19:46:54 2007 @@ -268,7 +268,7 @@ ffi_abi cc; nArgs = PySequence_Size(converters); - p = (ffi_info *)PyMem_Malloc(sizeof(ffi_info) + sizeof(ffi_type) * (nArgs + 1)); + p = (ffi_info *)PyMem_Malloc(sizeof(ffi_info) + sizeof(ffi_type) * (nArgs)); if (p == NULL) { PyErr_NoMemory(); return NULL; Modified: python/trunk/Modules/_ctypes/ctypes.h ============================================================================== --- python/trunk/Modules/_ctypes/ctypes.h (original) +++ python/trunk/Modules/_ctypes/ctypes.h Fri Jul 13 19:46:54 2007 @@ -75,7 +75,7 @@ PyObject *callable; SETFUNC setfunc; ffi_type *restype; - ffi_type *atypes[0]; + ffi_type *atypes[1]; } ffi_info; typedef struct { From buildbot at python.org Fri Jul 13 20:09:05 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 18:09:05 +0000 Subject: [Python-checkins] buildbot warnings in ppc Debian unstable 2.5 Message-ID: <20070713180905.59CED1E400F@bag.python.org> The Buildbot has detected a new failure of ppc Debian unstable 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/ppc%2520Debian%2520unstable%25202.5/builds/18 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: georg.brandl,thomas.heller Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_resource Traceback (most recent call last): File "./Lib/test/regrtest.py", line 549, in runtest_inner the_package = __import__(abstest, globals(), locals(), []) File "/home/pybot/buildarea/2.5.klose-debian-ppc/build/Lib/test/test_resource.py", line 42, in f.close() IOError: [Errno 27] File too large make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 20:24:03 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 18:24:03 +0000 Subject: [Python-checkins] buildbot warnings in hppa Ubuntu dapper trunk Message-ID: <20070713182403.EF1001E400A@bag.python.org> The Buildbot has detected a new failure of hppa Ubuntu dapper trunk. Full details are available at: http://www.python.org/dev/buildbot/all/hppa%2520Ubuntu%2520dapper%2520trunk/builds/13 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: make: *** [buildbottest] Segmentation fault sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 21:25:44 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 19:25:44 +0000 Subject: [Python-checkins] buildbot warnings in alpha Tru64 5.1 2.5 Message-ID: <20070713192544.D79001E4014@bag.python.org> The Buildbot has detected a new failure of alpha Tru64 5.1 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Tru64%25205.1%25202.5/builds/281 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: 2 tests failed: test_signal test_socket Traceback (most recent call last): File "./Lib/test/regrtest.py", line 549, in runtest_inner the_package = __import__(abstest, globals(), locals(), []) File "/net/taipan/scratch1/nnorwitz/python/2.5.norwitz-tru64/build/Lib/test/test_signal.py", line 143, in print "KeyboardInterrupt (the alarm() went off)" File "/net/taipan/scratch1/nnorwitz/python/2.5.norwitz-tru64/build/Lib/test/test_signal.py", line 49, in handlerB raise HandlerBCalled, args HandlerBCalled: (30, ) sincerely, -The Buildbot From buildbot at python.org Fri Jul 13 21:33:30 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 19:33:30 +0000 Subject: [Python-checkins] buildbot warnings in alpha Debian trunk Message-ID: <20070713193330.8E0201E4010@bag.python.org> The Buildbot has detected a new failure of alpha Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Debian%2520trunk/builds/44 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: 2 tests failed: test_pow test_resource ====================================================================== ERROR: test_bug705231 (test.test_pow.PowTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-alpha/build/Lib/test/test_pow.py", line 109, in test_bug705231 eq(pow(a, 1.23e167), 1.0) ValueError: negative number cannot be raised to a fractional power ====================================================================== ERROR: test_fsize_enforced (test.test_resource.ResourceTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-alpha/build/Lib/test/test_resource.py", line 59, in test_fsize_enforced f.close() IOError: [Errno 27] File too large sincerely, -The Buildbot From python-checkins at python.org Fri Jul 13 21:51:55 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 21:51:55 +0200 (CEST) Subject: [Python-checkins] r56357 - python/trunk/Modules/_ctypes/stgdict.c Message-ID: <20070713195155.596481E400A@bag.python.org> Author: thomas.heller Date: Fri Jul 13 21:51:55 2007 New Revision: 56357 Modified: python/trunk/Modules/_ctypes/stgdict.c Log: PyType_stgdict() returns a borrowed reference which must not be Py_DECREF'd. Modified: python/trunk/Modules/_ctypes/stgdict.c ============================================================================== --- python/trunk/Modules/_ctypes/stgdict.c (original) +++ python/trunk/Modules/_ctypes/stgdict.c Fri Jul 13 21:51:55 2007 @@ -469,13 +469,11 @@ if (!prop) { Py_DECREF(pair); - Py_DECREF((PyObject *)stgdict); return -1; } if (-1 == PyDict_SetItem(realdict, name, prop)) { Py_DECREF(prop); Py_DECREF(pair); - Py_DECREF((PyObject *)stgdict); return -1; } Py_DECREF(pair); From python-checkins at python.org Fri Jul 13 21:52:42 2007 From: python-checkins at python.org (thomas.heller) Date: Fri, 13 Jul 2007 21:52:42 +0200 (CEST) Subject: [Python-checkins] r56358 - python/branches/release25-maint/Modules/_ctypes/stgdict.c Message-ID: <20070713195242.10E431E400A@bag.python.org> Author: thomas.heller Date: Fri Jul 13 21:52:41 2007 New Revision: 56358 Modified: python/branches/release25-maint/Modules/_ctypes/stgdict.c Log: PyType_stgdict() returns a borrowed reference which must not be Py_DECREF'd. Modified: python/branches/release25-maint/Modules/_ctypes/stgdict.c ============================================================================== --- python/branches/release25-maint/Modules/_ctypes/stgdict.c (original) +++ python/branches/release25-maint/Modules/_ctypes/stgdict.c Fri Jul 13 21:52:41 2007 @@ -465,13 +465,11 @@ if (!prop) { Py_DECREF(pair); - Py_DECREF((PyObject *)stgdict); return -1; } if (-1 == PyDict_SetItem(realdict, name, prop)) { Py_DECREF(prop); Py_DECREF(pair); - Py_DECREF((PyObject *)stgdict); return -1; } Py_DECREF(pair); From buildbot at python.org Fri Jul 13 22:57:12 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 20:57:12 +0000 Subject: [Python-checkins] buildbot warnings in amd64 XP trunk Message-ID: <20070713205712.748051E400A@bag.python.org> The Buildbot has detected a new failure of amd64 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%2520trunk/builds/47 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: 2 tests failed: test_ctypes test_winsound ====================================================================== ERROR: test_extremes (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 18, in test_extremes winsound.Beep(37, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_increasingfrequency (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 23, in test_increasingfrequency winsound.Beep(i, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_alias_asterisk (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 64, in test_alias_asterisk winsound.PlaySound('SystemAsterisk', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exclamation (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 74, in test_alias_exclamation winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exit (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 84, in test_alias_exit winsound.PlaySound('SystemExit', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_hand (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 94, in test_alias_hand winsound.PlaySound('SystemHand', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_question (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 104, in test_alias_question winsound.PlaySound('SystemQuestion', winsound.SND_ALIAS) RuntimeError: Failed to play sound sincerely, -The Buildbot From python-checkins at python.org Fri Jul 13 23:20:38 2007 From: python-checkins at python.org (barry.warsaw) Date: Fri, 13 Jul 2007 23:20:38 +0200 (CEST) Subject: [Python-checkins] r56359 - python/branches/release25-maint/Lib/email/test/test_email.py python/branches/release25-maint/Lib/email/test/test_email_renamed.py Message-ID: <20070713212038.CEEC21E4010@bag.python.org> Author: barry.warsaw Date: Fri Jul 13 23:20:38 2007 New Revision: 56359 Modified: python/branches/release25-maint/Lib/email/test/test_email.py python/branches/release25-maint/Lib/email/test/test_email_renamed.py Log: In response to this SF bug: [ 1752723 ] email.message_from_string: initial line gets discarded I added a test to assert that when the first line of text passed to message_from_string() contains a leading space, the message ends up with the appropriate FirstHeaderLineIsContinuationDefect on its defects list. The bug is invalid. Modified: python/branches/release25-maint/Lib/email/test/test_email.py ============================================================================== --- python/branches/release25-maint/Lib/email/test/test_email.py (original) +++ python/branches/release25-maint/Lib/email/test/test_email.py Fri Jul 13 23:20:38 2007 @@ -1492,6 +1492,18 @@ self.failUnless(isinstance(bad.defects[0], Errors.StartBoundaryNotFoundDefect)) + def test_first_line_is_continuation_header(self): + eq = self.assertEqual + m = ' Line 1\nLine 2\nLine 3' + msg = email.message_from_string(m) + eq(msg.keys(), []) + eq(msg.get_payload(), 'Line 2\nLine 3') + eq(len(msg.defects), 1) + self.failUnless(isinstance(msg.defects[0], + Errors.FirstHeaderLineIsContinuationDefect)) + eq(msg.defects[0].line, ' Line 1\n') + + # Test RFC 2047 header encoding and decoding Modified: python/branches/release25-maint/Lib/email/test/test_email_renamed.py ============================================================================== --- python/branches/release25-maint/Lib/email/test/test_email_renamed.py (original) +++ python/branches/release25-maint/Lib/email/test/test_email_renamed.py Fri Jul 13 23:20:38 2007 @@ -1490,6 +1490,17 @@ self.failUnless(isinstance(bad.defects[0], errors.StartBoundaryNotFoundDefect)) + def test_first_line_is_continuation_header(self): + eq = self.assertEqual + m = ' Line 1\nLine 2\nLine 3' + msg = email.message_from_string(m) + eq(msg.keys(), []) + eq(msg.get_payload(), 'Line 2\nLine 3') + eq(len(msg.defects), 1) + self.failUnless(isinstance(msg.defects[0], + errors.FirstHeaderLineIsContinuationDefect)) + eq(msg.defects[0].line, ' Line 1\n') + # Test RFC 2047 header encoding and decoding From python-checkins at python.org Sat Jul 14 00:12:58 2007 From: python-checkins at python.org (barry.warsaw) Date: Sat, 14 Jul 2007 00:12:58 +0200 (CEST) Subject: [Python-checkins] r56360 - python/trunk/Lib/email/test/test_email.py python/trunk/Lib/email/test/test_email_renamed.py Message-ID: <20070713221258.B60D91E4011@bag.python.org> Author: barry.warsaw Date: Sat Jul 14 00:12:58 2007 New Revision: 56360 Modified: python/trunk/Lib/email/test/test_email.py python/trunk/Lib/email/test/test_email_renamed.py Log: In response to this SF bug: [ 1752723 ] email.message_from_string: initial line gets discarded I added a test to assert that when the first line of text passed to message_from_string() contains a leading space, the message ends up with the appropriate FirstHeaderLineIsContinuationDefect on its defects list. The bug is invalid. Modified: python/trunk/Lib/email/test/test_email.py ============================================================================== --- python/trunk/Lib/email/test/test_email.py (original) +++ python/trunk/Lib/email/test/test_email.py Sat Jul 14 00:12:58 2007 @@ -1492,6 +1492,18 @@ self.failUnless(isinstance(bad.defects[0], Errors.StartBoundaryNotFoundDefect)) + def test_first_line_is_continuation_header(self): + eq = self.assertEqual + m = ' Line 1\nLine 2\nLine 3' + msg = email.message_from_string(m) + eq(msg.keys(), []) + eq(msg.get_payload(), 'Line 2\nLine 3') + eq(len(msg.defects), 1) + self.failUnless(isinstance(msg.defects[0], + Errors.FirstHeaderLineIsContinuationDefect)) + eq(msg.defects[0].line, ' Line 1\n') + + # Test RFC 2047 header encoding and decoding Modified: python/trunk/Lib/email/test/test_email_renamed.py ============================================================================== --- python/trunk/Lib/email/test/test_email_renamed.py (original) +++ python/trunk/Lib/email/test/test_email_renamed.py Sat Jul 14 00:12:58 2007 @@ -1490,6 +1490,17 @@ self.failUnless(isinstance(bad.defects[0], errors.StartBoundaryNotFoundDefect)) + def test_first_line_is_continuation_header(self): + eq = self.assertEqual + m = ' Line 1\nLine 2\nLine 3' + msg = email.message_from_string(m) + eq(msg.keys(), []) + eq(msg.get_payload(), 'Line 2\nLine 3') + eq(len(msg.defects), 1) + self.failUnless(isinstance(msg.defects[0], + errors.FirstHeaderLineIsContinuationDefect)) + eq(msg.defects[0].line, ' Line 1\n') + # Test RFC 2047 header encoding and decoding From buildbot at python.org Sat Jul 14 00:59:30 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 13 Jul 2007 22:59:30 +0000 Subject: [Python-checkins] buildbot warnings in hppa Ubuntu dapper 2.5 Message-ID: <20070713225931.0D60F1E4066@bag.python.org> The Buildbot has detected a new failure of hppa Ubuntu dapper 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/hppa%2520Ubuntu%2520dapper%25202.5/builds/9 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: thomas.heller Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From python-checkins at python.org Sat Jul 14 13:31:35 2007 From: python-checkins at python.org (raymond.hettinger) Date: Sat, 14 Jul 2007 13:31:35 +0200 (CEST) Subject: [Python-checkins] r56363 - python/branches/release25-maint/Doc/lib/libitertools.tex Message-ID: <20070714113135.64BA31E4015@bag.python.org> Author: raymond.hettinger Date: Sat Jul 14 13:31:35 2007 New Revision: 56363 Modified: python/branches/release25-maint/Doc/lib/libitertools.tex Log: Backport 56345 Modified: python/branches/release25-maint/Doc/lib/libitertools.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/libitertools.tex (original) +++ python/branches/release25-maint/Doc/lib/libitertools.tex Sat Jul 14 13:31:35 2007 @@ -117,7 +117,7 @@ Make an iterator that drops elements from the iterable as long as the predicate is true; afterwards, returns every element. Note, the iterator does not produce \emph{any} output until the predicate - is true, so it may have a lengthy start-up time. Equivalent to: + first becomes false, so it may have a lengthy start-up time. Equivalent to: \begin{verbatim} def dropwhile(predicate, iterable): @@ -474,8 +474,8 @@ return izip(mapping.iterkeys(), mapping.itervalues()) def nth(iterable, n): - "Returns the nth item or raise IndexError" - return list(islice(iterable, n, n+1))[0] + "Returns the nth item or raise StopIteration" + return islice(iterable, n, None).next() def all(seq, pred=None): "Returns True if pred(x) is true for every element in the iterable" From python-checkins at python.org Sat Jul 14 19:12:23 2007 From: python-checkins at python.org (georg.brandl) Date: Sat, 14 Jul 2007 19:12:23 +0200 (CEST) Subject: [Python-checkins] r56364 - python/trunk/Doc/lib/libsubprocess.tex Message-ID: <20070714171223.552201E400B@bag.python.org> Author: georg.brandl Date: Sat Jul 14 19:12:23 2007 New Revision: 56364 Modified: python/trunk/Doc/lib/libsubprocess.tex Log: Bug #1753406: missing \versionadded for subprocess.check_call. Modified: python/trunk/Doc/lib/libsubprocess.tex ============================================================================== --- python/trunk/Doc/lib/libsubprocess.tex (original) +++ python/trunk/Doc/lib/libsubprocess.tex Sat Jul 14 19:12:23 2007 @@ -147,6 +147,8 @@ \begin{verbatim} check_call(["ls", "-l"]) \end{verbatim} + +\versionadded{2.5} \end{funcdesc} \subsubsection{Exceptions} From python-checkins at python.org Sat Jul 14 19:12:27 2007 From: python-checkins at python.org (georg.brandl) Date: Sat, 14 Jul 2007 19:12:27 +0200 (CEST) Subject: [Python-checkins] r56365 - python/branches/release25-maint/Doc/lib/libsubprocess.tex Message-ID: <20070714171227.79B831E4010@bag.python.org> Author: georg.brandl Date: Sat Jul 14 19:12:27 2007 New Revision: 56365 Modified: python/branches/release25-maint/Doc/lib/libsubprocess.tex Log: Bug #1753406: missing \versionadded for subprocess.check_call. (backport from rev. 56364) Modified: python/branches/release25-maint/Doc/lib/libsubprocess.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/libsubprocess.tex (original) +++ python/branches/release25-maint/Doc/lib/libsubprocess.tex Sat Jul 14 19:12:27 2007 @@ -144,6 +144,8 @@ \begin{verbatim} check_call(["ls", "-l"]) \end{verbatim} + +\versionadded{2.5} \end{funcdesc} \subsubsection{Exceptions} From python-checkins at python.org Sat Jul 14 19:32:42 2007 From: python-checkins at python.org (georg.brandl) Date: Sat, 14 Jul 2007 19:32:42 +0200 (CEST) Subject: [Python-checkins] r56366 - python/trunk/Doc/lib/libwebbrowser.tex Message-ID: <20070714173242.118C81E400B@bag.python.org> Author: georg.brandl Date: Sat Jul 14 19:32:41 2007 New Revision: 56366 Modified: python/trunk/Doc/lib/libwebbrowser.tex Log: Clarify webbrowser.open description. Modified: python/trunk/Doc/lib/libwebbrowser.tex ============================================================================== --- python/trunk/Doc/lib/libwebbrowser.tex (original) +++ python/trunk/Doc/lib/libwebbrowser.tex Sat Jul 14 19:32:41 2007 @@ -47,7 +47,7 @@ \begin{funcdesc}{open}{url\optional{, new=0\optional{, autoraise=1}}} Display \var{url} using the default browser. If \var{new} is 0, the - \var{url} is opened in the same browser window. If \var{new} is 1, + \var{url} is opened in the same browser window if possible. If \var{new} is 1, a new browser window is opened if possible. If \var{new} is 2, a new browser page ("tab") is opened if possible. If \var{autoraise} is true, the window is raised if possible (note that under many window From python-checkins at python.org Sat Jul 14 19:32:47 2007 From: python-checkins at python.org (georg.brandl) Date: Sat, 14 Jul 2007 19:32:47 +0200 (CEST) Subject: [Python-checkins] r56367 - python/branches/release25-maint/Doc/lib/libwebbrowser.tex Message-ID: <20070714173247.561F31E400B@bag.python.org> Author: georg.brandl Date: Sat Jul 14 19:32:47 2007 New Revision: 56367 Modified: python/branches/release25-maint/Doc/lib/libwebbrowser.tex Log: Clarify webbrowser.open description. (backport from rev. 56366) Modified: python/branches/release25-maint/Doc/lib/libwebbrowser.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/libwebbrowser.tex (original) +++ python/branches/release25-maint/Doc/lib/libwebbrowser.tex Sat Jul 14 19:32:47 2007 @@ -47,7 +47,7 @@ \begin{funcdesc}{open}{url\optional{, new=0\optional{, autoraise=1}}} Display \var{url} using the default browser. If \var{new} is 0, the - \var{url} is opened in the same browser window. If \var{new} is 1, + \var{url} is opened in the same browser window if possible. If \var{new} is 1, a new browser window is opened if possible. If \var{new} is 2, a new browser page ("tab") is opened if possible. If \var{autoraise} is true, the window is raised if possible (note that under many window From python-checkins at python.org Sat Jul 14 20:22:12 2007 From: python-checkins at python.org (collin.winter) Date: Sat, 14 Jul 2007 20:22:12 +0200 (CEST) Subject: [Python-checkins] r56368 - in sandbox/trunk/2to3: fixes/fix_dict.py fixes/fix_filter.py fixes/fix_map.py patcomp.py Message-ID: <20070714182212.94A461E400C@bag.python.org> Author: collin.winter Date: Sat Jul 14 20:22:12 2007 New Revision: 56368 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py sandbox/trunk/2to3/patcomp.py Log: Add a compile_pattern() shortcut function to patcomp.py (and use it in several fixers). Modified: sandbox/trunk/2to3/fixes/fix_dict.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_dict.py (original) +++ sandbox/trunk/2to3/fixes/fix_dict.py Sat Jul 14 20:22:12 2007 @@ -68,13 +68,13 @@ return new P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" - p1 = patcomp.PatternCompiler().compile_pattern(P1) + p1 = patcomp.compile_pattern(P1) P2 = """for_stmt< 'for' any 'in' node=any ':' any* > | list_for< 'for' any 'in' node=any any* > | gen_for< 'for' any 'in' node=any any* > """ - p2 = patcomp.PatternCompiler().compile_pattern(P2) + p2 = patcomp.compile_pattern(P2) def in_special_context(self, node, isiter): if node.parent is None: Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Sat Jul 14 20:22:12 2007 @@ -62,7 +62,7 @@ | list_for< 'for' any 'in' node=any any* > | gen_for< 'for' any 'in' node=any any* > """ - p0 = patcomp.PatternCompiler().compile_pattern(P0) + p0 = patcomp.compile_pattern(P0) P1 = """ power< @@ -71,7 +71,7 @@ any* > """ - p1 = patcomp.PatternCompiler().compile_pattern(P1) + p1 = patcomp.compile_pattern(P1) P2 = """ power< @@ -80,7 +80,7 @@ any* > """ - p2 = patcomp.PatternCompiler().compile_pattern(P2) + p2 = patcomp.compile_pattern(P2) def in_special_context(self, node): p = node.parent Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Sat Jul 14 20:22:12 2007 @@ -74,7 +74,7 @@ | list_for< 'for' any 'in' node=any any* > | gen_for< 'for' any 'in' node=any any* > """ - p0 = patcomp.PatternCompiler().compile_pattern(P0) + p0 = patcomp.compile_pattern(P0) P1 = """ power< @@ -83,7 +83,7 @@ any* > """ - p1 = patcomp.PatternCompiler().compile_pattern(P1) + p1 = patcomp.compile_pattern(P1) P2 = """ power< @@ -92,7 +92,7 @@ any* > """ - p2 = patcomp.PatternCompiler().compile_pattern(P2) + p2 = patcomp.compile_pattern(P2) def in_special_context(self, node): p = node.parent Modified: sandbox/trunk/2to3/patcomp.py ============================================================================== --- sandbox/trunk/2to3/patcomp.py (original) +++ sandbox/trunk/2to3/patcomp.py Sat Jul 14 20:22:12 2007 @@ -182,15 +182,5 @@ return pytree.Leaf(type, value, context=context) -_SAMPLE = """(a=(power< ('apply' trailer<'(' b=(not STRING) ')'> ) >){1}) -{1,1}""" - - -def _test(): - pc = PatternCompiler() - pat = pc.compile_pattern(_SAMPLE) - print pat - - -if __name__ == "__main__": - _test() +def compile_pattern(pattern): + return PatternCompiler().compile_pattern(pattern) From python-checkins at python.org Sat Jul 14 20:22:28 2007 From: python-checkins at python.org (collin.winter) Date: Sat, 14 Jul 2007 20:22:28 +0200 (CEST) Subject: [Python-checkins] r56369 - in sandbox/trunk/2to3: tests/test_fixers.py Message-ID: <20070714182228.1593E1E400C@bag.python.org> Author: collin.winter Date: Sat Jul 14 20:22:27 2007 New Revision: 56369 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/test_fixers.py Log: Tighten down the fixer tests to make sure we're tracking all warnings. Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sat Jul 14 20:22:27 2007 @@ -49,15 +49,22 @@ sh.setFormatter(logging.Formatter("%(message)s")) self.refactor.fixers = [Fixer(f, sh) for f in self.refactor.fixers] - def check(self, before, after): + def tearDown(self): + self.logging_stream = None + + def _check(self, before, after): before = support.reformat(before) after = support.reformat(after) refactored = self.refactor_stream("", StringIO(before)) self.failUnlessEqual(after, refactored) - def warns(self, before, after, message): - self.check(before, after) + def check(self, before, after, ignore_warnings=False): + self._check(before, after) + if not ignore_warnings: + self.failUnlessEqual(self.logging_stream.getvalue(), "") + def warns(self, before, after, message): + self._check(before, after) self.failUnless(message in self.logging_stream.getvalue()) def refactor_stream(self, stream_name, stream): @@ -1422,7 +1429,7 @@ foo(a) a.__next__() """ - self.check(b, a) + self.check(b, a, ignore_warnings=True) def test_prefix_preservation_4(self): b = """ @@ -1439,7 +1446,7 @@ # def a.__next__() """ - self.check(b, a) + self.check(b, a, ignore_warnings=True) def test_prefix_preservation_5(self): b = """ @@ -1454,7 +1461,7 @@ foo(foo(a), # abc a.__next__()) """ - self.check(b, a) + self.check(b, a, ignore_warnings=True) def test_prefix_preservation_6(self): b = """ From python-checkins at python.org Sat Jul 14 20:22:44 2007 From: python-checkins at python.org (collin.winter) Date: Sat, 14 Jul 2007 20:22:44 +0200 (CEST) Subject: [Python-checkins] r56370 - in sandbox/trunk/2to3: fixes/fix_apply.py fixes/fix_callable.py fixes/fix_dict.py fixes/fix_dummy.py fixes/fix_except.py fixes/fix_exec.py fixes/fix_filter.py fixes/fix_has_key.py fixes/fix_input.py fixes/fix_intern.py fixes/fix_long.py fixes/fix_map.py fixes/fix_ne.py fixes/fix_next.py fixes/fix_nonzero.py fixes/fix_numliterals.py fixes/fix_print.py fixes/fix_raise.py fixes/fix_raw_input.py fixes/fix_repr.py fixes/fix_sysexcattrs.py fixes/fix_throw.py fixes/fix_tuple_params.py fixes/fix_unicode.py fixes/fix_xrange.py refactor.py Message-ID: <20070714182244.7F3381E400C@bag.python.org> Author: collin.winter Date: Sat Jul 14 20:22:43 2007 New Revision: 56370 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_apply.py sandbox/trunk/2to3/fixes/fix_callable.py sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/fix_dummy.py sandbox/trunk/2to3/fixes/fix_except.py sandbox/trunk/2to3/fixes/fix_exec.py sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_has_key.py sandbox/trunk/2to3/fixes/fix_input.py sandbox/trunk/2to3/fixes/fix_intern.py sandbox/trunk/2to3/fixes/fix_long.py sandbox/trunk/2to3/fixes/fix_map.py sandbox/trunk/2to3/fixes/fix_ne.py sandbox/trunk/2to3/fixes/fix_next.py sandbox/trunk/2to3/fixes/fix_nonzero.py sandbox/trunk/2to3/fixes/fix_numliterals.py sandbox/trunk/2to3/fixes/fix_print.py sandbox/trunk/2to3/fixes/fix_raise.py sandbox/trunk/2to3/fixes/fix_raw_input.py sandbox/trunk/2to3/fixes/fix_repr.py sandbox/trunk/2to3/fixes/fix_sysexcattrs.py sandbox/trunk/2to3/fixes/fix_throw.py sandbox/trunk/2to3/fixes/fix_tuple_params.py sandbox/trunk/2to3/fixes/fix_unicode.py sandbox/trunk/2to3/fixes/fix_xrange.py sandbox/trunk/2to3/refactor.py Log: Pass the results dict from the fixer's match() method to transform() via refactor.py; this saves having to do the match twice. Modified: sandbox/trunk/2to3/fixes/fix_apply.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_apply.py (original) +++ sandbox/trunk/2to3/fixes/fix_apply.py Sat Jul 14 20:22:43 2007 @@ -27,9 +27,8 @@ > """ - def transform(self, node): + def transform(self, node, results): syms = self.syms - results = self.match(node) assert results func = results["func"] args = results["args"] Modified: sandbox/trunk/2to3/fixes/fix_callable.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_callable.py (original) +++ sandbox/trunk/2to3/fixes/fix_callable.py Sat Jul 14 20:22:43 2007 @@ -24,8 +24,7 @@ > """ - def transform(self, node): - results = self.match(node) + def transform(self, node, results): func = results["func"] args = [func.clone(), String(', '), String("'__call__'")] Modified: sandbox/trunk/2to3/fixes/fix_dict.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_dict.py (original) +++ sandbox/trunk/2to3/fixes/fix_dict.py Sat Jul 14 20:22:43 2007 @@ -39,8 +39,7 @@ > """ - def transform(self, node): - results = self.match(node) + def transform(self, node, results): head = results["head"] method = results["method"][0] # Extract node for method name tail = results["tail"] Modified: sandbox/trunk/2to3/fixes/fix_dummy.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_dummy.py (original) +++ sandbox/trunk/2to3/fixes/fix_dummy.py Sat Jul 14 20:22:43 2007 @@ -12,5 +12,5 @@ def match(self, node): return True - def transform(self, node): + def transform(self, node, results): node.changed() Modified: sandbox/trunk/2to3/fixes/fix_except.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_except.py (original) +++ sandbox/trunk/2to3/fixes/fix_except.py Sat Jul 14 20:22:43 2007 @@ -42,10 +42,9 @@ | 'finally' ':' suite) > """ - def transform(self, node): - syms = self.syms - results = self.match(node) + def transform(self, node, results): assert results + syms = self.syms try_cleanup = [ch.clone() for ch in results['cleanup']] for except_clause, e_suite in find_excepts(try_cleanup): Modified: sandbox/trunk/2to3/fixes/fix_exec.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_exec.py (original) +++ sandbox/trunk/2to3/fixes/fix_exec.py Sat Jul 14 20:22:43 2007 @@ -23,10 +23,9 @@ exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > """ - def transform(self, node): - syms = self.syms - results = self.match(node) + def transform(self, node, results): assert results + syms = self.syms a = results["a"] b = results.get("b") c = results.get("c") Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Sat Jul 14 20:22:43 2007 @@ -42,8 +42,7 @@ > """ - def transform(self, node): - results = self.match(node) + def transform(self, node, results): if "filter_lambda" in results: new = ListComp(results.get("fp").clone(), results.get("fp").clone(), Modified: sandbox/trunk/2to3/fixes/fix_has_key.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_has_key.py (original) +++ sandbox/trunk/2to3/fixes/fix_has_key.py Sat Jul 14 20:22:43 2007 @@ -68,10 +68,9 @@ > """ - def transform(self, node): - syms = self.syms - results = self.match(node) + def transform(self, node, results): assert results + syms = self.syms if (node.parent.type == syms.not_test and self.pattern.match(node.parent)): # Don't transform a node matching the first alternative of the Modified: sandbox/trunk/2to3/fixes/fix_input.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_input.py (original) +++ sandbox/trunk/2to3/fixes/fix_input.py Sat Jul 14 20:22:43 2007 @@ -14,7 +14,7 @@ > """ - def transform(self, node): + def transform(self, node, results): new = node.clone() new.set_prefix("") new = Call(Name("eval"), [new]) Modified: sandbox/trunk/2to3/fixes/fix_intern.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_intern.py (original) +++ sandbox/trunk/2to3/fixes/fix_intern.py Sat Jul 14 20:22:43 2007 @@ -23,10 +23,9 @@ > """ - def transform(self, node): - syms = self.syms - results = self.match(node) + def transform(self, node, results): assert results + syms = self.syms obj = results["obj"].clone() if obj.type == syms.arglist: newarglist = obj.clone() Modified: sandbox/trunk/2to3/fixes/fix_long.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_long.py (original) +++ sandbox/trunk/2to3/fixes/fix_long.py Sat Jul 14 20:22:43 2007 @@ -21,8 +21,7 @@ static_long = Name("long") static_int = Name("int") - def transform(self, node): - results = self.match(node) + def transform(self, node, results): long_type = results.get("long_type") number = results.get("number") new = None Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Sat Jul 14 20:22:43 2007 @@ -52,8 +52,7 @@ > """ - def transform(self, node): - results = self.match(node) + def transform(self, node, results): if "map_lambda" in results: new = ListComp(results.get("xp").clone(), results.get("fp").clone(), Modified: sandbox/trunk/2to3/fixes/fix_ne.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_ne.py (original) +++ sandbox/trunk/2to3/fixes/fix_ne.py Sat Jul 14 20:22:43 2007 @@ -16,7 +16,7 @@ # Override return node.type == token.NOTEQUAL and node.value == "<>" - def transform(self, node): + def transform(self, node, results): new = pytree.Leaf(token.NOTEQUAL, "!=") new.set_prefix(node.get_prefix()) return new Modified: sandbox/trunk/2to3/fixes/fix_next.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_next.py (original) +++ sandbox/trunk/2to3/fixes/fix_next.py Sat Jul 14 20:22:43 2007 @@ -73,8 +73,7 @@ self.shadowed_next = False self.delayed = [] - def transform(self, node): - results = self.match(node) + def transform(self, node, results): assert results base = results.get("base") Modified: sandbox/trunk/2to3/fixes/fix_nonzero.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_nonzero.py (original) +++ sandbox/trunk/2to3/fixes/fix_nonzero.py Sat Jul 14 20:22:43 2007 @@ -13,9 +13,8 @@ parameters< '(' NAME ')' > any+ > any* > > """ - - def transform(self, node): - results = self.match(node) + + def transform(self, node, results): assert results name = results["name"] Modified: sandbox/trunk/2to3/fixes/fix_numliterals.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_numliterals.py (original) +++ sandbox/trunk/2to3/fixes/fix_numliterals.py Sat Jul 14 20:22:43 2007 @@ -17,7 +17,7 @@ return (node.type == token.NUMBER and (node.value.startswith("0") or node.value[-1] in "Ll")) - def transform(self, node): + def transform(self, node, results): val = node.value if val[-1] in 'Ll': val = val[:-1] Modified: sandbox/trunk/2to3/fixes/fix_print.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_print.py (original) +++ sandbox/trunk/2to3/fixes/fix_print.py Sat Jul 14 20:22:43 2007 @@ -30,10 +30,9 @@ return None return self.pattern.match(node) - def transform(self, node): - syms = self.syms - results = self.match(node) + def transform(self, node, results): assert results + syms = self.syms if node == Name("print"): # Special-case print all by itself Modified: sandbox/trunk/2to3/fixes/fix_raise.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_raise.py (original) +++ sandbox/trunk/2to3/fixes/fix_raise.py Sat Jul 14 20:22:43 2007 @@ -33,10 +33,9 @@ raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > """ - def transform(self, node): - syms = self.syms - results = self.match(node) + def transform(self, node, results): assert results + syms = self.syms exc = results["exc"].clone() if exc.type is token.STRING: Modified: sandbox/trunk/2to3/fixes/fix_raw_input.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_raw_input.py (original) +++ sandbox/trunk/2to3/fixes/fix_raw_input.py Sat Jul 14 20:22:43 2007 @@ -12,8 +12,7 @@ power< 'raw_input' args=trailer< '(' [any] ')' > > """ - def transform(self, node): - results = self.match(node) + def transform(self, node, results): args = results["args"] new = pytree.Node(self.syms.power, [Name("input"), args.clone()]) Modified: sandbox/trunk/2to3/fixes/fix_repr.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_repr.py (original) +++ sandbox/trunk/2to3/fixes/fix_repr.py Sat Jul 14 20:22:43 2007 @@ -14,9 +14,7 @@ atom < '`' expr=any '`' > """ - def transform(self, node): - results = self.match(node) - assert results + def transform(self, node, results): expr = results["expr"].clone() if expr.type == self.syms.testlist1: Modified: sandbox/trunk/2to3/fixes/fix_sysexcattrs.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_sysexcattrs.py (original) +++ sandbox/trunk/2to3/fixes/fix_sysexcattrs.py Sat Jul 14 20:22:43 2007 @@ -14,5 +14,5 @@ any* > """ - def transform(self, node): + def transform(self, node, results): self.cannot_convert(node, "This attribute is going away in Python 3") Modified: sandbox/trunk/2to3/fixes/fix_throw.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_throw.py (original) +++ sandbox/trunk/2to3/fixes/fix_throw.py Sat Jul 14 20:22:43 2007 @@ -23,10 +23,8 @@ power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > """ - def transform(self, node): + def transform(self, node, results): syms = self.syms - results = self.match(node) - assert results exc = results["exc"].clone() if exc.type is token.STRING: Modified: sandbox/trunk/2to3/fixes/fix_tuple_params.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_tuple_params.py (original) +++ sandbox/trunk/2to3/fixes/fix_tuple_params.py Sat Jul 14 20:22:43 2007 @@ -32,8 +32,7 @@ | lambda=lambdef< 'lambda' args=vfpdef< any+ > ':' body=any >""" - def transform(self, node): - results = self.match(node) + def transform(self, node, results): assert results if "lambda" in results: Modified: sandbox/trunk/2to3/fixes/fix_unicode.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_unicode.py (original) +++ sandbox/trunk/2to3/fixes/fix_unicode.py Sat Jul 14 20:22:43 2007 @@ -11,7 +11,7 @@ PATTERN = "STRING | NAME<'unicode' | 'unichr'>" - def transform(self, node): + def transform(self, node, results): if node.type == token.NAME: if node.value == "unicode": new = node.clone() Modified: sandbox/trunk/2to3/fixes/fix_xrange.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_xrange.py (original) +++ sandbox/trunk/2to3/fixes/fix_xrange.py Sat Jul 14 20:22:43 2007 @@ -18,8 +18,7 @@ > """ - def transform(self, node): - results = self.match(node) + def transform(self, node, results): args = results["args"] new = pytree.Node(self.syms.power, [Name("range"), args.clone()]) Modified: sandbox/trunk/2to3/refactor.py ============================================================================== --- sandbox/trunk/2to3/refactor.py (original) +++ sandbox/trunk/2to3/refactor.py Sat Jul 14 20:22:43 2007 @@ -254,8 +254,9 @@ changes = 0 for node in tree.post_order(): for fixer in self.fixers: - if fixer.match(node): - new = fixer.transform(node) + results = fixer.match(node) + if results: + new = fixer.transform(node, results) if new is not None and (new != node or str(new) != str(node)): node.replace(new) From python-checkins at python.org Sat Jul 14 20:23:00 2007 From: python-checkins at python.org (collin.winter) Date: Sat, 14 Jul 2007 20:23:00 +0200 (CEST) Subject: [Python-checkins] r56371 - in sandbox/trunk/2to3: fixes/util.py Message-ID: <20070714182300.82C411E400C@bag.python.org> Author: collin.winter Date: Sat Jul 14 20:23:00 2007 New Revision: 56371 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/util.py Log: Add an attr_chain() utility function. Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Sat Jul 14 20:23:00 2007 @@ -152,6 +152,29 @@ return l[::-1] ########################################################### +### Misc +########################################################### + +def attr_chain(obj, attr): + """Follow an attribute chain. + + If you have a chain of objects where a.foo -> b, b.foo-> c, etc, + use this to iterate over all objects in the chain. Iteration is + terminated by getattr(x, attr) is None. + + Args: + obj: the starting object + attr: the name of the chaining attribute + + Yields: + Each successive object in the chain. + """ + next = getattr(obj, attr) + while next: + yield next + next = getattr(next, attr) + +########################################################### ### The following functions are to find bindings in a suite ########################################################### From python-checkins at python.org Sat Jul 14 20:23:16 2007 From: python-checkins at python.org (collin.winter) Date: Sat, 14 Jul 2007 20:23:16 +0200 (CEST) Subject: [Python-checkins] r56372 - in sandbox/trunk/2to3: README fixes/fix_stringio.py tests/test_fixers.py Message-ID: <20070714182316.A70F51E400C@bag.python.org> Author: collin.winter Date: Sat Jul 14 20:23:16 2007 New Revision: 56372 Added: sandbox/trunk/2to3/fixes/fix_stringio.py Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/README sandbox/trunk/2to3/tests/test_fixers.py Log: First draft of StringIO fixer. Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Sat Jul 14 20:23:16 2007 @@ -71,6 +71,8 @@ * **fix_repr** - swap backticks for repr() calls. +* **fix_stringio** - StringIO.StringIO -> io.StringIO (imports, too). + * **fix_sysexcattrs** - warn on usage of sys.value, sys.type and sys.traceback. Added: sandbox/trunk/2to3/fixes/fix_stringio.py ============================================================================== --- (empty file) +++ sandbox/trunk/2to3/fixes/fix_stringio.py Sat Jul 14 20:23:16 2007 @@ -0,0 +1,81 @@ +"""StringIO.StringIO -> io.StringIO (imports, too). + +Imports this fixer picks up on: +* "import StringIO" -> "import io" +* "from StringIO import StringIO" -> "from io import StringIO" +* "import StringIO as foo" -> "import io as foo" + +If the fixer finds "import StringIO", all "StringIO.StringIO" attribute +lookups will be translated to "io.StringIO" and all "StringIO" names +will be translated to "io". +""" +# Author: Collin Winter + +# Local imports +import patcomp +from fixes import basefix +from fixes.util import Name, attr_chain, any + + +class DelayedStrLeaf(object): + def __init__(self, fixer, leaf): + self.fixer = fixer + self.leaf = leaf + self.parent = None + + def __getattr__(self, attr): + return getattr(self.leaf, attr) + + def __str__(self): + if self.fixer.module_import: + return self.leaf.get_prefix() + "io" + else: + return str(self.leaf) + + def clone(self): + return DelayedStrLeaf(self.fixer, self.leaf) + + +class FixStringio(basefix.BaseFix): + PATTERN = """ + import_name< 'import' (module='StringIO' + | dotted_as_names< any* module='StringIO' any* >) > + | + import_from< 'from' module_name='StringIO' 'import' + ( 'StringIO' | import_as_name< 'StringIO' 'as' any >) > + | + import_name< 'import' dotted_as_name< module_name='StringIO' 'as' any > > + | + power< module_name='StringIO' trailer< '.' 'StringIO' > any* > + | + bare_name='StringIO' + """ + + # Don't match 'StringIO' if it's within another match + def match(self, node): + match = super(FixStringio, self).match + results = match(node) + if results: + if any([match(obj) for obj in attr_chain(node, "parent")]): + return False + return results + return False + + def start_tree(self, tree, filename): + super(FixStringio, self).start_tree(tree, filename) + self.module_import = False + + def transform(self, node, results): + import_mod = results.get("module") + module_name = results.get("module_name") + bare_name = results.get("bare_name") + + if import_mod: + self.module_import = True + import_mod.replace(Name("io", prefix=import_mod.get_prefix())) + elif module_name: + module_name.replace(Name("io", prefix=module_name.get_prefix())) + elif bare_name: + bare_name.replace(DelayedStrLeaf(self, bare_name)) + else: + raise RuntimeError("Hmm, shouldn't have gotten here") Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sat Jul 14 20:23:16 2007 @@ -1177,6 +1177,63 @@ self.check(b, a) +class Test_stringio(FixerTestCase): + fixer = "stringio" + + def test_import_module(self): + b = "import StringIO" + a = "import io" + self.check(b, a) + + b = "import foo, StringIO, bar" + a = "import foo, io, bar" + self.check(b, a) + + def test_import_from(self): + b = "from StringIO import StringIO" + a = "from io import StringIO" + self.check(b, a) + + s = "from foo import StringIO" + self.check(s, s) + + def test_import_module_as(self): + b = "import StringIO as foo_bar" + a = "import io as foo_bar" + self.check(b, a) + + b = "import StringIO as foo_bar" + a = "import io as foo_bar" + self.check(b, a) + + def test_import_from_as(self): + b = "from StringIO import StringIO as foo_bar" + a = "from io import StringIO as foo_bar" + self.check(b, a) + + def test_import_module_usage(self): + b = """ + import StringIO + foo(StringIO, StringIO.StringIO) + """ + a = """ + import io + foo(io, io.StringIO) + """ + self.check(b, a) + + def test_from_import_usage(self): + b = """ + from StringIO import StringIO + foo(StringIO, StringIO()) + """ + a = """ + from io import StringIO + foo(StringIO, StringIO()) + """ + self.check(b, a) + + class Test_input(FixerTestCase): fixer = "input" From python-checkins at python.org Sat Jul 14 20:23:32 2007 From: python-checkins at python.org (collin.winter) Date: Sat, 14 Jul 2007 20:23:32 +0200 (CEST) Subject: [Python-checkins] r56373 - in sandbox/trunk/2to3: fixes/basefix.py fixes/fix_next.py fixes/fix_stringio.py pytree.py refactor.py tests/test_fixers.py tests/test_pytree.py Message-ID: <20070714182332.97CFA1E400F@bag.python.org> Author: collin.winter Date: Sat Jul 14 20:23:32 2007 New Revision: 56373 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/basefix.py sandbox/trunk/2to3/fixes/fix_next.py sandbox/trunk/2to3/fixes/fix_stringio.py sandbox/trunk/2to3/pytree.py sandbox/trunk/2to3/refactor.py sandbox/trunk/2to3/tests/test_fixers.py sandbox/trunk/2to3/tests/test_pytree.py Log: Add the ability for fixers to indicate whether they want a pre- or post-order traversal of the AST; change the StringIO and next fixers to take advantage of this (all the delayed-stringification crap is gone). Modified: sandbox/trunk/2to3/fixes/basefix.py ============================================================================== --- sandbox/trunk/2to3/fixes/basefix.py (original) +++ sandbox/trunk/2to3/fixes/basefix.py Sat Jul 14 20:23:32 2007 @@ -34,6 +34,7 @@ logger = None # A logger (set by set_filename) numbers = itertools.count(1) # For new_name() used_names = set() # A set of all used NAMEs + order = "post" # Does the fixer prefer pre- or post-order traversal # Shortcut for access to Python grammar symbols syms = pygram.python_symbols Modified: sandbox/trunk/2to3/fixes/fix_next.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_next.py (original) +++ sandbox/trunk/2to3/fixes/fix_next.py Sat Jul 14 20:23:32 2007 @@ -6,7 +6,6 @@ # - "with" statement targets aren't checked # Local imports -import pytree from pgen2 import token from pygram import python_symbols as syms from fixes import basefix @@ -15,44 +14,9 @@ bind_warning = "Calls to builtin next() possibly shadowed by global binding" -class DelayedStrNode(object): - - def __init__(self, type, base): - self.parent = None - self.shadowed_next = False - self.base = base - self.type = type - self.value = "" - self.prefix = "" - - def __str__(self): - if self.shadowed_next: - b = "".join([str(n) for n in self.base]) - return self.prefix + "%s.__next__()" % b - else: - b_prefix = prefix = self.base[0].get_prefix() - self.base[0].set_prefix("") - b = "".join([str(n) for n in self.base]) - self.base[0].set_prefix(b_prefix) - return self.prefix + prefix + "next(%s)" % b - - def clone(self): - node = DelayedStrNode(self.type, self.base) - node.shadowed_next = self.shadowed_next - node.value = self.value - node.prefix = self.prefix - return node - - def set_prefix(self, prefix): - self.prefix = prefix - - def get_prefix(self): - return self.prefix - - class FixNext(basefix.BaseFix): PATTERN = """ - power< base=any+ trailer< '.' 'next' > trailer< '(' ')' > > + power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > | power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > | @@ -68,10 +32,11 @@ mod=file_input< any+ > """ + order = "pre" # Pre-order tree traversal + def start_tree(self, tree, filename): super(FixNext, self).start_tree(tree, filename) self.shadowed_next = False - self.delayed = [] def transform(self, node, results): assert results @@ -82,9 +47,12 @@ mod = results.get("mod") if base: - n = DelayedStrNode(syms.power, base) - node.replace(n) - self.delayed.append(n) + if self.shadowed_next: + attr.replace(Name("__next__", prefix=attr.get_prefix())) + else: + base = [n.clone() for n in base] + base[0].set_prefix("") + node.replace(Call(Name("next", prefix=node.get_prefix()), base)) elif name: n = Name("__next__", prefix=name.get_prefix()) name.replace(n) @@ -107,12 +75,6 @@ self.warning(n, bind_warning) self.shadowed_next = True - def finish_tree(self, tree, filename): - super(FixNext, self).finish_tree(tree, filename) - if self.shadowed_next: - for node in self.delayed: - node.shadowed_next = True - ### The following functions help test if node is part of an assignment ### target. Modified: sandbox/trunk/2to3/fixes/fix_stringio.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_stringio.py (original) +++ sandbox/trunk/2to3/fixes/fix_stringio.py Sat Jul 14 20:23:32 2007 @@ -12,30 +12,10 @@ # Author: Collin Winter # Local imports -import patcomp from fixes import basefix from fixes.util import Name, attr_chain, any -class DelayedStrLeaf(object): - def __init__(self, fixer, leaf): - self.fixer = fixer - self.leaf = leaf - self.parent = None - - def __getattr__(self, attr): - return getattr(self.leaf, attr) - - def __str__(self): - if self.fixer.module_import: - return self.leaf.get_prefix() + "io" - else: - return str(self.leaf) - - def clone(self): - return DelayedStrLeaf(self.fixer, self.leaf) - - class FixStringio(basefix.BaseFix): PATTERN = """ import_name< 'import' (module='StringIO' @@ -51,6 +31,8 @@ bare_name='StringIO' """ + order = "pre" # Pre-order tree traversal + # Don't match 'StringIO' if it's within another match def match(self, node): match = super(FixStringio, self).match @@ -75,7 +57,5 @@ import_mod.replace(Name("io", prefix=import_mod.get_prefix())) elif module_name: module_name.replace(Name("io", prefix=module_name.get_prefix())) - elif bare_name: - bare_name.replace(DelayedStrLeaf(self, bare_name)) - else: - raise RuntimeError("Hmm, shouldn't have gotten here") + elif bare_name and self.module_import: + bare_name.replace(Name("io", prefix=bare_name.get_prefix())) Modified: sandbox/trunk/2to3/pytree.py ============================================================================== --- sandbox/trunk/2to3/pytree.py (original) +++ sandbox/trunk/2to3/pytree.py Sat Jul 14 20:23:32 2007 @@ -79,6 +79,13 @@ """ raise NotImplementedError + def pre_order(self): + """Returns a pre-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + def set_prefix(self, prefix): """Sets the prefix for the node (see Leaf class). @@ -216,6 +223,13 @@ yield node yield self + def pre_order(self): + """Returns a pre-order iterator for the tree.""" + yield self + for child in self.children: + for node in child.post_order(): + yield node + def set_prefix(self, prefix): """Sets the prefix for the node. @@ -302,6 +316,10 @@ """Returns a post-order iterator for the tree.""" yield self + def pre_order(self): + """Returns a pre-order iterator for the tree.""" + yield self + def set_prefix(self, prefix): """Sets the prefix for the node.""" self.changed() Modified: sandbox/trunk/2to3/refactor.py ============================================================================== --- sandbox/trunk/2to3/refactor.py (original) +++ sandbox/trunk/2to3/refactor.py Sat Jul 14 20:23:32 2007 @@ -111,12 +111,19 @@ self.driver = driver.Driver(pygram.python_grammar, convert=pytree.convert, logger=self.logger) - self.fixers = self.get_fixers() + self.pre_order, self.post_order = self.get_fixers() self.files = [] # List of files that were or should be modified def get_fixers(self): - """Inspects the options to load the requested patterns and handlers.""" - fixers = [] + """Inspects the options to load the requested patterns and handlers. + + Returns: + (pre_order, post_order), where pre_order is the list of fixers that + want a pre-order AST traversal, and post_order is the list that want + post-order traversal. + """ + pre_order_fixers = [] + post_order_fixers = [] fix_names = self.options.fix if not fix_names or "all" in fix_names: fix_names = get_all_fix_names() @@ -142,8 +149,14 @@ continue if self.options.verbose: self.log_message("Adding transformation: %s", fix_name) - fixers.append(fixer) - return fixers + + if fixer.order == "pre": + pre_order_fixers.append(fixer) + elif fixer.order == "post": + post_order_fixers.append(fixer) + else: + raise ValueError("Illegal fixer order: %r" % fixer.order) + return (pre_order_fixers, post_order_fixers) def log_error(self, msg, *args, **kwds): """Increments error count and log a message.""" @@ -249,11 +262,26 @@ def refactor_tree(self, tree, filename): """Refactors a parse tree (modifying the tree in place).""" - for fixer in self.fixers: + changed = False + all_fixers = self.pre_order + self.post_order + for fixer in all_fixers: fixer.start_tree(tree, filename) - changes = 0 - for node in tree.post_order(): - for fixer in self.fixers: + + changed |= self.traverse_by(self.pre_order, tree.pre_order()) + changed |= self.traverse_by(self.post_order, tree.post_order()) + if tree.was_changed: + changes = True + + for fixer in all_fixers: + fixer.finish_tree(tree, filename) + return changed + + def traverse_by(self, fixers, traversal): + changed = False + if not fixers: + return changed + for node in traversal: + for fixer in fixers: results = fixer.match(node) if results: new = fixer.transform(node, results) @@ -261,12 +289,8 @@ str(new) != str(node)): node.replace(new) node = new - changes += 1 - elif tree.was_changed: - changes += 1 - for fixer in self.fixers: - fixer.finish_tree(tree, filename) - return changes + changed = True + return changed def write_file(self, new_text, filename, old_text=None): """Writes a string to a file. Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sat Jul 14 20:23:32 2007 @@ -47,7 +47,10 @@ self.logging_stream = StringIO() sh = logging.StreamHandler(self.logging_stream) sh.setFormatter(logging.Formatter("%(message)s")) - self.refactor.fixers = [Fixer(f, sh) for f in self.refactor.fixers] + self.refactor.pre_order = [Fixer(f, sh) for f + in self.refactor.pre_order] + self.refactor.post_order = [Fixer(f, sh) for f + in self.refactor.post_order] def tearDown(self): self.logging_stream = None Modified: sandbox/trunk/2to3/tests/test_pytree.py ============================================================================== --- sandbox/trunk/2to3/tests/test_pytree.py (original) +++ sandbox/trunk/2to3/tests/test_pytree.py Sat Jul 14 20:23:32 2007 @@ -150,6 +150,18 @@ # XXX pass + def testPostOrder(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(list(n1.post_order()), [l1, l2, n1]) + + def testPreOrder(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2]) + self.assertEqual(list(n1.pre_order()), [n1, l1, l2]) + def testChangedLeaf(self): l1 = pytree.Leaf(100, "f") self.failIf(l1.was_changed) From python-checkins at python.org Sat Jul 14 20:23:48 2007 From: python-checkins at python.org (collin.winter) Date: Sat, 14 Jul 2007 20:23:48 +0200 (CEST) Subject: [Python-checkins] r56374 - in sandbox/trunk/2to3: tests/support.py tests/test_all_fixers.py tests/test_parser.py Message-ID: <20070714182348.A19AA1E400C@bag.python.org> Author: collin.winter Date: Sat Jul 14 20:23:48 2007 New Revision: 56374 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/support.py sandbox/trunk/2to3/tests/test_all_fixers.py sandbox/trunk/2to3/tests/test_parser.py Log: Create a helper function for tests that use the project's files as input data (fixer and parser tests). Modified: sandbox/trunk/2to3/tests/support.py ============================================================================== --- sandbox/trunk/2to3/tests/support.py (original) +++ sandbox/trunk/2to3/tests/support.py Sat Jul 14 20:23:48 2007 @@ -4,6 +4,7 @@ # Python imports import unittest import sys +import os import os.path import re from textwrap import dedent @@ -15,6 +16,7 @@ from pgen2 import driver test_dir = os.path.dirname(__file__) +proj_dir = os.path.normpath(os.path.join(test_dir, "..")) grammar_path = os.path.join(test_dir, "..", "Grammar.txt") grammar = driver.load_grammar(grammar_path) driver = driver.Driver(grammar, convert=pytree.convert) @@ -36,4 +38,10 @@ def reformat(string): return dedent(string) + "\n\n" +def all_project_files(): + for dirpath, dirnames, filenames in os.walk(proj_dir): + for filename in filenames: + if filename.endswith(".py"): + yield os.path.join(dirpath, filename) + TestCase = unittest.TestCase Modified: sandbox/trunk/2to3/tests/test_all_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_all_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_all_fixers.py Sat Jul 14 20:23:48 2007 @@ -39,22 +39,10 @@ self.refactor.refactor_tree(tree, stream_name) return str(tree) - def test_examples_file(self): - # Just test that we can parse examples.py without failing - basedir = os.path.dirname(refactor.__file__) - example = os.path.join(basedir, "example.py") - self.refactor_stream("example.py", open(example)) - - def test_fixers(self): - # Just test that we can parse all the fixers without failing - basedir = os.path.dirname(refactor.__file__) - fixerdir = os.path.join(basedir, "fixes") - for filename in os.listdir(fixerdir): - if not filename.endswith(".py"): - continue - print "Fixing %s..." % filename - fixer = os.path.join(fixerdir, filename) - self.refactor_stream(fixer, open(fixer)) + def test_all_project_files(self): + for filepath in support.all_project_files(): + print "Fixing %s..." % filepath + self.refactor_stream(filepath, open(filepath)) if __name__ == "__main__": Modified: sandbox/trunk/2to3/tests/test_parser.py ============================================================================== --- sandbox/trunk/2to3/tests/test_parser.py (original) +++ sandbox/trunk/2to3/tests/test_parser.py Sat Jul 14 20:23:48 2007 @@ -18,17 +18,12 @@ """A cut-down version of pytree_idempotency.py.""" - def test_2to3_files(self): - proj_dir = os.path.join(test_dir, "..") - - for dirpath, dirnames, filenames in os.walk(proj_dir): - for filename in filenames: - if filename.endswith(".py"): - filepath = os.path.join(dirpath, filename) - print "Parsing %s..." % os.path.normpath(filepath) - tree = driver.parse_file(filepath, debug=True) - if diff(filepath, tree): - self.fail("Idempotency failed: %s" % filename) + def test_all_project_files(self): + for filepath in support.all_project_files(): + print "Parsing %s..." % filepath + tree = driver.parse_file(filepath, debug=True) + if diff(filepath, tree): + self.fail("Idempotency failed: %s" % filepath) def diff(fn, tree): From python-checkins at python.org Sat Jul 14 21:23:08 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 14 Jul 2007 21:23:08 +0200 (CEST) Subject: [Python-checkins] r56378 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070714192308.25BF31E4008@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 14 21:23:07 2007 New Revision: 56378 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Remove the no-load functions. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Sat Jul 14 21:23:07 2007 @@ -2382,10 +2382,10 @@ static struct PyMethodDef Pickler_methods[] = { {"dump", (PyCFunction) Pickler_dump, METH_VARARGS, - PyDoc_STR("dump(object) -- " + PyDoc_STR("dump(object) -> None.\n\n" "Write an object in pickle format to the object's pickle stream")}, {"clear_memo", (PyCFunction) Pickle_clear_memo, METH_NOARGS, - PyDoc_STR("clear_memo() -- Clear the picklers memo")}, + PyDoc_STR("clear_memo() -> None. Clear the picklers memo.")}, {NULL, NULL} /* sentinel */ }; @@ -4387,428 +4387,16 @@ } -/* No-load functions to support noload, which is used to - find persistent references. */ - -static int -noload_obj(UnpicklerObject * self) -{ - int i; - - if ((i = marker(self)) < 0) - return -1; - return Pdata_clear(self->stack, i + 1); -} - - -static int -noload_inst(UnpicklerObject * self) -{ - int i; - char *s; - - if ((i = marker(self)) < 0) - return -1; - Pdata_clear(self->stack, i); - if (self->readline_func(self, &s) < 0) - return -1; - if (self->readline_func(self, &s) < 0) - return -1; - PDATA_APPEND(self->stack, Py_None, -1); - return 0; -} - -static int -noload_newobj(UnpicklerObject * self) -{ - PyObject *obj; - - PDATA_POP(self->stack, obj); /* pop argtuple */ - if (obj == NULL) - return -1; - Py_DECREF(obj); - - PDATA_POP(self->stack, obj); /* pop cls */ - if (obj == NULL) - return -1; - Py_DECREF(obj); - - PDATA_APPEND(self->stack, Py_None, -1); - return 0; -} - -static int -noload_global(UnpicklerObject *self) -{ - char *s; - - if (self->readline_func(self, &s) < 0) - return -1; - if (self->readline_func(self, &s) < 0) - return -1; - PDATA_APPEND(self->stack, Py_None, -1); - return 0; -} - -static int -noload_reduce(UnpicklerObject *self) -{ - - if (self->stack->length < 2) - return stackUnderflow(); - Pdata_clear(self->stack, self->stack->length - 2); - PDATA_APPEND(self->stack, Py_None, -1); - return 0; -} - -static int -noload_build(UnpicklerObject *self) -{ - - if (self->stack->length < 1) - return stackUnderflow(); - Pdata_clear(self->stack, self->stack->length - 1); - return 0; -} - -static int -noload_extension(UnpicklerObject *self, int nbytes) -{ - char *codebytes; - - assert(nbytes == 1 || nbytes == 2 || nbytes == 4); - if (self->read_func(self, &codebytes, nbytes) < 0) - return -1; - PDATA_APPEND(self->stack, Py_None, -1); - return 0; -} - - static PyObject * -noload(UnpicklerObject *self) -{ - PyObject *err = 0, *val = 0; - char *s; - - self->num_marks = 0; - Pdata_clear(self->stack, 0); - - while (1) { - if (self->read_func(self, &s, 1) < 0) - break; - - switch (s[0]) { - case NONE: - if (load_none(self) < 0) - break; - continue; - - case BININT: - if (load_binint(self) < 0) - break; - continue; - - case BININT1: - if (load_binint1(self) < 0) - break; - continue; - - case BININT2: - if (load_binint2(self) < 0) - break; - continue; - - case INT: - if (load_int(self) < 0) - break; - continue; - - case LONG: - if (load_long(self) < 0) - break; - continue; - - case LONG1: - if (load_counted_long(self, 1) < 0) - break; - continue; - - case LONG4: - if (load_counted_long(self, 4) < 0) - break; - continue; - - case FLOAT: - if (load_float(self) < 0) - break; - continue; - - case BINFLOAT: - if (load_binfloat(self) < 0) - break; - continue; - - case BINSTRING: - if (load_binstring(self) < 0) - break; - continue; - - case SHORT_BINSTRING: - if (load_short_binstring(self) < 0) - break; - continue; - - case STRING: - if (load_string(self) < 0) - break; - continue; - -#ifdef Py_USING_UNICODE - case UNICODE: - if (load_unicode(self) < 0) - break; - continue; - - case BINUNICODE: - if (load_binunicode(self) < 0) - break; - continue; -#endif - - case EMPTY_TUPLE: - if (load_counted_tuple(self, 0) < 0) - break; - continue; - - case TUPLE1: - if (load_counted_tuple(self, 1) < 0) - break; - continue; - - case TUPLE2: - if (load_counted_tuple(self, 2) < 0) - break; - continue; - - case TUPLE3: - if (load_counted_tuple(self, 3) < 0) - break; - continue; - - case TUPLE: - if (load_tuple(self) < 0) - break; - continue; - - case EMPTY_LIST: - if (load_empty_list(self) < 0) - break; - continue; - - case LIST: - if (load_list(self) < 0) - break; - continue; - - case EMPTY_DICT: - if (load_empty_dict(self) < 0) - break; - continue; - - case DICT: - if (load_dict(self) < 0) - break; - continue; - - case OBJ: - if (noload_obj(self) < 0) - break; - continue; - - case INST: - if (noload_inst(self) < 0) - break; - continue; - - case NEWOBJ: - if (noload_newobj(self) < 0) - break; - continue; - - case GLOBAL: - if (noload_global(self) < 0) - break; - continue; - - case APPEND: - if (load_append(self) < 0) - break; - continue; - - case APPENDS: - if (load_appends(self) < 0) - break; - continue; - - case BUILD: - if (noload_build(self) < 0) - break; - continue; - - case DUP: - if (load_dup(self) < 0) - break; - continue; - - case BINGET: - if (load_binget(self) < 0) - break; - continue; - - case LONG_BINGET: - if (load_long_binget(self) < 0) - break; - continue; - - case GET: - if (load_get(self) < 0) - break; - continue; - - case EXT1: - if (noload_extension(self, 1) < 0) - break; - continue; - - case EXT2: - if (noload_extension(self, 2) < 0) - break; - continue; - - case EXT4: - if (noload_extension(self, 4) < 0) - break; - continue; - - case MARK: - if (load_mark(self) < 0) - break; - continue; - - case BINPUT: - if (load_binput(self) < 0) - break; - continue; - - case LONG_BINPUT: - if (load_long_binput(self) < 0) - break; - continue; - - case PUT: - if (load_put(self) < 0) - break; - continue; - - case POP: - if (load_pop(self) < 0) - break; - continue; - - case POP_MARK: - if (load_pop_mark(self) < 0) - break; - continue; - - case SETITEM: - if (load_setitem(self) < 0) - break; - continue; - - case SETITEMS: - if (load_setitems(self) < 0) - break; - continue; - - case STOP: - break; - - case PERSID: - if (load_persid(self) < 0) - break; - continue; - - case BINPERSID: - if (load_binpersid(self) < 0) - break; - continue; - - case REDUCE: - if (noload_reduce(self) < 0) - break; - continue; - - case PROTO: - if (load_proto(self) < 0) - break; - continue; - - case NEWTRUE: - if (load_bool(self, Py_True) < 0) - break; - continue; - - case NEWFALSE: - if (load_bool(self, Py_False) < 0) - break; - continue; - default: - pickle_ErrFormat(UnpicklingError, - "invalid load key, '%s'.", "c", s[0]); - return NULL; - } - - break; - } - - if ((err = PyErr_Occurred())) { - if (err == PyExc_EOFError) { - PyErr_SetNone(PyExc_EOFError); - } - return NULL; - } - - PDATA_POP(self->stack, val); - return val; -} - - -static PyObject * -Unpickler_load(UnpicklerObject * self, PyObject * unused) +Unpickler_load(UnpicklerObject *self) { return load(self); } -static PyObject * -Unpickler_noload(UnpicklerObject *self, PyObject *unused) -{ - return noload(self); -} - static struct PyMethodDef Unpickler_methods[] = { - {"load", (PyCFunction) Unpickler_load, METH_NOARGS, - PyDoc_STR("load() -- Load a pickle") - }, - {"noload", (PyCFunction) Unpickler_noload, METH_NOARGS, - PyDoc_STR - ("noload() -- not load a pickle, but go through most of the motions\n" - "\n" - "This function can be used to read past a pickle without instantiating\n" - "any objects or importing any modules. It can also be used to find all\n" - "persistent references without instantiating any objects or importing\n" - "any modules.\n") - }, + {"load", (PyCFunction)Unpickler_load, METH_NOARGS, + PyDoc_STR("load() -> None. Load a pickle")}, {NULL, NULL} /* sentinel */ }; From python-checkins at python.org Sat Jul 14 21:37:21 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Sat, 14 Jul 2007 21:37:21 +0200 (CEST) Subject: [Python-checkins] r56379 - python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Message-ID: <20070714193721.7F2781E4008@bag.python.org> Author: alexandre.vassalotti Date: Sat Jul 14 21:37:21 2007 New Revision: 56379 Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c python/branches/cpy_merge/Modules/_string_iomodule.c Log: Fix a minor reference leak. Modified: python/branches/cpy_merge/Modules/_bytes_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_bytes_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_bytes_iomodule.c Sat Jul 14 21:37:21 2007 @@ -491,8 +491,10 @@ self->buf_size = size + 1; if (size > 0) { - if (write_bytes(self, buf, size) == -1) + if (write_bytes(self, buf, size) == -1) { + Py_DECREF(self); return NULL; + } self->pos = 0; } if (self->buf == NULL) { Modified: python/branches/cpy_merge/Modules/_string_iomodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_string_iomodule.c (original) +++ python/branches/cpy_merge/Modules/_string_iomodule.c Sat Jul 14 21:37:21 2007 @@ -474,8 +474,10 @@ self->buf_size = size + 1; if (size> 0) { - if (write_str(self, buf, size) == -1) + if (write_str(self, buf, size) == -1) { + Py_DECREF(self); return NULL; + } self->pos = 0; } if (self->buf == NULL) { From python-checkins at python.org Sat Jul 14 22:58:21 2007 From: python-checkins at python.org (andrew.kuchling) Date: Sat, 14 Jul 2007 22:58:21 +0200 (CEST) Subject: [Python-checkins] r56380 - python/trunk/Doc/lib/email.tex Message-ID: <20070714205821.C29AA1E4008@bag.python.org> Author: andrew.kuchling Date: Sat Jul 14 22:58:21 2007 New Revision: 56380 Modified: python/trunk/Doc/lib/email.tex Log: Typo fix Modified: python/trunk/Doc/lib/email.tex ============================================================================== --- python/trunk/Doc/lib/email.tex (original) +++ python/trunk/Doc/lib/email.tex Sat Jul 14 22:58:21 2007 @@ -94,7 +94,7 @@ This table describes the release history of the email package, corresponding to the version of Python that the package was released with. For purposes of this document, when you see a note about change or added versions, these refer -to the Python version the change was made it, \emph{not} the email package +to the Python version the change was made in, \emph{not} the email package version. This table also describes the Python compatibility of each version of the package. From python-checkins at python.org Sat Jul 14 22:58:24 2007 From: python-checkins at python.org (andrew.kuchling) Date: Sat, 14 Jul 2007 22:58:24 +0200 (CEST) Subject: [Python-checkins] r56381 - python/branches/release25-maint/Doc/lib/email.tex Message-ID: <20070714205824.162731E4008@bag.python.org> Author: andrew.kuchling Date: Sat Jul 14 22:58:23 2007 New Revision: 56381 Modified: python/branches/release25-maint/Doc/lib/email.tex Log: Typo fix Modified: python/branches/release25-maint/Doc/lib/email.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/email.tex (original) +++ python/branches/release25-maint/Doc/lib/email.tex Sat Jul 14 22:58:23 2007 @@ -94,7 +94,7 @@ This table describes the release history of the email package, corresponding to the version of Python that the package was released with. For purposes of this document, when you see a note about change or added versions, these refer -to the Python version the change was made it, \emph{not} the email package +to the Python version the change was made in, \emph{not} the email package version. This table also describes the Python compatibility of each version of the package. From python-checkins at python.org Sat Jul 14 23:56:19 2007 From: python-checkins at python.org (andrew.kuchling) Date: Sat, 14 Jul 2007 23:56:19 +0200 (CEST) Subject: [Python-checkins] r56382 - in python/trunk/Lib: mailbox.py test/test_mailbox.py Message-ID: <20070714215619.773881E4008@bag.python.org> Author: andrew.kuchling Date: Sat Jul 14 23:56:19 2007 New Revision: 56382 Modified: python/trunk/Lib/mailbox.py python/trunk/Lib/test/test_mailbox.py Log: Avoid exception if there's a stray directory inside a Maildir folder. The Maildir specification doesn't seem to say anything about this situation, and it can happen if you're keeping a Maildir mailbox in Subversion (.svn directories) or some similar system. The patch just ignores directories in the cur/, new/, tmp/ folders. Modified: python/trunk/Lib/mailbox.py ============================================================================== --- python/trunk/Lib/mailbox.py (original) +++ python/trunk/Lib/mailbox.py Sat Jul 14 23:56:19 2007 @@ -459,7 +459,11 @@ """Update table of contents mapping.""" self._toc = {} for subdir in ('new', 'cur'): - for entry in os.listdir(os.path.join(self._path, subdir)): + subdir_path = os.path.join(self._path, subdir) + for entry in os.listdir(subdir_path): + p = os.path.join(subdir_path, entry) + if os.path.isdir(p): + continue uniq = entry.split(self.colon)[0] self._toc[uniq] = os.path.join(subdir, entry) Modified: python/trunk/Lib/test/test_mailbox.py ============================================================================== --- python/trunk/Lib/test/test_mailbox.py (original) +++ python/trunk/Lib/test/test_mailbox.py Sat Jul 14 23:56:19 2007 @@ -686,7 +686,18 @@ folder1_alias = box.get_folder('folder1') self.assert_(folder1_alias._factory is dummy_factory) - + def test_directory_in_folder (self): + # Test that mailboxes still work if there's a stray extra directory + # in a folder. + for i in range(10): + self._box.add(mailbox.Message(_sample_message)) + + # Create a stray directory + os.mkdir(os.path.join(self._path, 'cur', 'stray-dir')) + + # Check that looping still works with the directory present. + for msg in self._box: + pass class _TestMboxMMDF(TestMailbox): From python-checkins at python.org Sun Jul 15 00:06:32 2007 From: python-checkins at python.org (andrew.kuchling) Date: Sun, 15 Jul 2007 00:06:32 +0200 (CEST) Subject: [Python-checkins] r56383 - in python/branches/release25-maint: Lib/mailbox.py Lib/test/test_mailbox.py Misc/NEWS Message-ID: <20070714220632.131081E4008@bag.python.org> Author: andrew.kuchling Date: Sun Jul 15 00:06:30 2007 New Revision: 56383 Modified: python/branches/release25-maint/Lib/mailbox.py python/branches/release25-maint/Lib/test/test_mailbox.py python/branches/release25-maint/Misc/NEWS Log: [Backport of r56382] Avoid exception if there's a stray directory inside a Maildir folder. The Maildir specification doesn't seem to say anything about this situation, and it can happen if you're keeping a Maildir mailbox in Subversion (.svn directories) or some similar system. The patch just ignores directories in the cur/, new/, tmp/ folders. Modified: python/branches/release25-maint/Lib/mailbox.py ============================================================================== --- python/branches/release25-maint/Lib/mailbox.py (original) +++ python/branches/release25-maint/Lib/mailbox.py Sun Jul 15 00:06:30 2007 @@ -459,7 +459,11 @@ """Update table of contents mapping.""" self._toc = {} for subdir in ('new', 'cur'): - for entry in os.listdir(os.path.join(self._path, subdir)): + subdir_path = os.path.join(self._path, subdir) + for entry in os.listdir(subdir_path): + p = os.path.join(subdir_path, entry) + if os.path.isdir(p): + continue uniq = entry.split(self.colon)[0] self._toc[uniq] = os.path.join(subdir, entry) Modified: python/branches/release25-maint/Lib/test/test_mailbox.py ============================================================================== --- python/branches/release25-maint/Lib/test/test_mailbox.py (original) +++ python/branches/release25-maint/Lib/test/test_mailbox.py Sun Jul 15 00:06:30 2007 @@ -685,7 +685,18 @@ folder1_alias = box.get_folder('folder1') self.assert_(folder1_alias._factory is dummy_factory) - + def test_directory_in_folder (self): + # Test that mailboxes still work if there's a stray extra directory + # in a folder. + for i in range(10): + self._box.add(mailbox.Message(_sample_message)) + + # Create a stray directory + os.mkdir(os.path.join(self._path, 'cur', 'stray-dir')) + + # Check that looping still works with the directory present. + for msg in self._box: + pass class _TestMboxMMDF(TestMailbox): Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Sun Jul 15 00:06:30 2007 @@ -45,6 +45,9 @@ - Fix bug in marshal where bad data would cause a segfault due to lack of an infinite recursion check. +- mailbox.py: Ignore stray directories found in Maildir's cur/new/tmp + subdirectories. + - HTML-escape the plain traceback in cgitb's HTML output, to prevent the traceback inadvertently or maliciously closing the comment and injecting HTML into the error page. From python-checkins at python.org Sun Jul 15 00:33:12 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:33:12 +0200 (CEST) Subject: [Python-checkins] r56384 - in sandbox/trunk/2to3: fixes/fix_stringio.py tests/test_fixers.py Message-ID: <20070714223312.185B91E4008@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:33:11 2007 New Revision: 56384 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_stringio.py sandbox/trunk/2to3/tests/test_fixers.py Log: Add support for 'from StringIO import *' to StringIO fixer. Modified: sandbox/trunk/2to3/fixes/fix_stringio.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_stringio.py (original) +++ sandbox/trunk/2to3/fixes/fix_stringio.py Sun Jul 15 00:33:11 2007 @@ -24,6 +24,8 @@ import_from< 'from' module_name='StringIO' 'import' ( 'StringIO' | import_as_name< 'StringIO' 'as' any >) > | + import_from< 'from' module_name='StringIO' 'import' star='*' > + | import_name< 'import' dotted_as_name< module_name='StringIO' 'as' any > > | power< module_name='StringIO' trailer< '.' 'StringIO' > any* > @@ -51,11 +53,14 @@ import_mod = results.get("module") module_name = results.get("module_name") bare_name = results.get("bare_name") + star = results.get("star") if import_mod: self.module_import = True import_mod.replace(Name("io", prefix=import_mod.get_prefix())) elif module_name: module_name.replace(Name("io", prefix=module_name.get_prefix())) + if star: + star.replace(Name("StringIO", prefix=star.get_prefix())) elif bare_name and self.module_import: bare_name.replace(Name("io", prefix=bare_name.get_prefix())) Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sun Jul 15 00:33:11 2007 @@ -1197,6 +1197,10 @@ a = "from io import StringIO" self.check(b, a) + b = "from StringIO import *" + a = "from io import StringIO" + self.check(b, a) + s = "from foo import StringIO" self.check(s, s) From python-checkins at python.org Sun Jul 15 00:33:31 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:33:31 +0200 (CEST) Subject: [Python-checkins] r56385 - in sandbox/trunk/2to3: tests/test_grammar.py tests/test_parser.py Message-ID: <20070714223331.B86EE1E400C@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:33:29 2007 New Revision: 56385 Removed: sandbox/trunk/2to3/tests/test_grammar.py Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/test_parser.py Log: Merge test_grammar into test_parser. Deleted: /sandbox/trunk/2to3/tests/test_grammar.py ============================================================================== --- /sandbox/trunk/2to3/tests/test_grammar.py Sun Jul 15 00:33:29 2007 +++ (empty file) @@ -1,156 +0,0 @@ -#!/usr/bin/env python2.5 -""" Test suite for Grammar.txt. This is the place to add tests for -changes to 2to3's grammar, such as those merging the grammars for -Python 2 and 3. - -In addition to specific tests for parts of the grammar we've changed, -TestGrammarFiles also attempts to process the test_grammar.py files -from Python 2 and Python 3. """ -# Author: Collin Winter - -# Testing imports -import support -from support import driver, test_dir - -# Python imports -import os.path - -# Local imports -from pgen2.parse import ParseError - -class GrammarTest(support.TestCase): - def validate(self, code): - support.parse_string(code) - - def invalid_syntax(self, code): - try: - self.validate(code) - except ParseError: - pass - else: - raise AssertionError("Syntax shouldn't have been valid") - - -class TestRaiseChanges(GrammarTest): - def test_2x_style_1(self): - self.validate("raise") - - def test_2x_style_2(self): - self.validate("raise E, V") - - def test_2x_style_3(self): - self.validate("raise E, V, T") - - def test_2x_style_invalid_1(self): - self.invalid_syntax("raise E, V, T, Z") - - def test_3x_style(self): - self.validate("raise E1 from E2") - - def test_3x_style_invalid_1(self): - self.invalid_syntax("raise E, V from E1") - - def test_3x_style_invalid_2(self): - self.invalid_syntax("raise E from E1, E2") - - def test_3x_style_invalid_3(self): - self.invalid_syntax("raise from E1, E2") - - def test_3x_style_invalid_4(self): - self.invalid_syntax("raise E from") - - -# Adapated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef -class TestFunctionAnnotations(GrammarTest): - def test_1(self): - self.validate("""def f(x) -> list: pass""") - - def test_2(self): - self.validate("""def f(x:int): pass""") - - def test_3(self): - self.validate("""def f(*x:str): pass""") - - def test_4(self): - self.validate("""def f(**x:float): pass""") - - def test_5(self): - self.validate("""def f(x, y:1+2): pass""") - - def test_6(self): - self.validate("""def f(a, (b:1, c:2, d)): pass""") - - def test_7(self): - self.validate("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""") - - def test_8(self): - s = """def f(a, (b:1, c:2, d), e:3=4, f=5, - *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass""" - self.validate(s) - - -class TestExcept(GrammarTest): - def test_new(self): - s = """ - try: - x - except E as N: - y""" - self.validate(s) - - def test_old(self): - s = """ - try: - x - except E, N: - y""" - self.validate(s) - - -# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testAtoms -class TestSetLiteral(GrammarTest): - def test_1(self): - self.validate("""x = {'one'}""") - - def test_2(self): - self.validate("""x = {'one', 1,}""") - - def test_3(self): - self.validate("""x = {'one', 'two', 'three'}""") - - def test_4(self): - self.validate("""x = {2, 3, 4,}""") - - -class TestNumericLiterals(GrammarTest): - def test_new_octal_notation(self): - self.validate("""0o7777777777777""") - self.invalid_syntax("""0o7324528887""") - - def test_new_binary_notation(self): - self.validate("""0b101010""") - self.invalid_syntax("""0b0101021""") - - -class TestClassDef(GrammarTest): - def test_new_syntax(self): - self.validate("class B(t=7): pass") - self.validate("class B(t, *args): pass") - self.validate("class B(t, **kwargs): pass") - self.validate("class B(t, *args, **kwargs): pass") - self.validate("class B(t, y=9, *args, **kwargs): pass") - - -class TestGrammarFiles(GrammarTest): - def test_python2(self): - f = os.path.join(test_dir, "data", "py2_test_grammar.py") - driver.parse_file(f) - - def test_python3(self): - f = os.path.join(test_dir, "data", "py3_test_grammar.py") - driver.parse_file(f) - - -if __name__ == "__main__": - import __main__ - support.run_all_tests(__main__) Modified: sandbox/trunk/2to3/tests/test_parser.py ============================================================================== --- sandbox/trunk/2to3/tests/test_parser.py (original) +++ sandbox/trunk/2to3/tests/test_parser.py Sun Jul 15 00:33:29 2007 @@ -1,5 +1,11 @@ #!/usr/bin/env python2.5 -"""Test suite for 2to3's parser.""" +"""Test suite for 2to3's parser and grammar files. + +This is the place to add tests for changes to 2to3's grammar, such as those +merging the grammars for Python 2 and 3. In addition to specific tests for +parts of the grammar we've changed, we also make sure we can parse the +test_grammar.py files from both Python 2 and Python 3. +""" # Author: Collin Winter # Testing imports @@ -14,6 +20,129 @@ from pgen2.parse import ParseError +class GrammarTest(support.TestCase): + def validate(self, code): + support.parse_string(code) + + def invalid_syntax(self, code): + try: + self.validate(code) + except ParseError: + pass + else: + raise AssertionError("Syntax shouldn't have been valid") + + +class TestRaiseChanges(GrammarTest): + def test_2x_style_1(self): + self.validate("raise") + + def test_2x_style_2(self): + self.validate("raise E, V") + + def test_2x_style_3(self): + self.validate("raise E, V, T") + + def test_2x_style_invalid_1(self): + self.invalid_syntax("raise E, V, T, Z") + + def test_3x_style(self): + self.validate("raise E1 from E2") + + def test_3x_style_invalid_1(self): + self.invalid_syntax("raise E, V from E1") + + def test_3x_style_invalid_2(self): + self.invalid_syntax("raise E from E1, E2") + + def test_3x_style_invalid_3(self): + self.invalid_syntax("raise from E1, E2") + + def test_3x_style_invalid_4(self): + self.invalid_syntax("raise E from") + + +# Adapated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef +class TestFunctionAnnotations(GrammarTest): + def test_1(self): + self.validate("""def f(x) -> list: pass""") + + def test_2(self): + self.validate("""def f(x:int): pass""") + + def test_3(self): + self.validate("""def f(*x:str): pass""") + + def test_4(self): + self.validate("""def f(**x:float): pass""") + + def test_5(self): + self.validate("""def f(x, y:1+2): pass""") + + def test_6(self): + self.validate("""def f(a, (b:1, c:2, d)): pass""") + + def test_7(self): + self.validate("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""") + + def test_8(self): + s = """def f(a, (b:1, c:2, d), e:3=4, f=5, + *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass""" + self.validate(s) + + +class TestExcept(GrammarTest): + def test_new(self): + s = """ + try: + x + except E as N: + y""" + self.validate(s) + + def test_old(self): + s = """ + try: + x + except E, N: + y""" + self.validate(s) + + +# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testAtoms +class TestSetLiteral(GrammarTest): + def test_1(self): + self.validate("""x = {'one'}""") + + def test_2(self): + self.validate("""x = {'one', 1,}""") + + def test_3(self): + self.validate("""x = {'one', 'two', 'three'}""") + + def test_4(self): + self.validate("""x = {2, 3, 4,}""") + + +class TestNumericLiterals(GrammarTest): + def test_new_octal_notation(self): + self.validate("""0o7777777777777""") + self.invalid_syntax("""0o7324528887""") + + def test_new_binary_notation(self): + self.validate("""0b101010""") + self.invalid_syntax("""0b0101021""") + + +class TestClassDef(GrammarTest): + def test_new_syntax(self): + self.validate("class B(t=7): pass") + self.validate("class B(t, *args): pass") + self.validate("class B(t, **kwargs): pass") + self.validate("class B(t, *args, **kwargs): pass") + self.validate("class B(t, y=9, *args, **kwargs): pass") + + class TestParserIdempotency(support.TestCase): """A cut-down version of pytree_idempotency.py.""" From buildbot at python.org Sun Jul 15 00:33:36 2007 From: buildbot at python.org (buildbot at python.org) Date: Sat, 14 Jul 2007 22:33:36 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP-3 trunk Message-ID: <20070714223337.1E4181E4008@bag.python.org> The Buildbot has detected a new failure of x86 XP-3 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP-3%2520trunk/builds/81 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: andrew.kuchling,georg.brandl Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_timeout sincerely, -The Buildbot From python-checkins at python.org Sun Jul 15 00:33:47 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:33:47 +0200 (CEST) Subject: [Python-checkins] r56386 - in sandbox/trunk/2to3: Grammar.txt fixes/fix_dict.py fixes/fix_filter.py fixes/fix_map.py fixes/util.py tests/data/py3_test_grammar.py Message-ID: <20070714223347.8EC361E4008@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:33:47 2007 New Revision: 56386 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/Grammar.txt sandbox/trunk/2to3/fixes/fix_dict.py sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py sandbox/trunk/2to3/fixes/util.py sandbox/trunk/2to3/tests/data/py3_test_grammar.py Log: Sync grammar with p3yk branch, update some fixers accordingly. Modified: sandbox/trunk/2to3/Grammar.txt ============================================================================== --- sandbox/trunk/2to3/Grammar.txt (original) +++ sandbox/trunk/2to3/Grammar.txt Sun Jul 15 00:33:47 2007 @@ -124,8 +124,8 @@ '{' [dictsetmaker] '}' | '`' testlist1 '`' | NAME | NUMBER | STRING+ | '.' '.' '.') -listmaker: test ( list_for | (',' test)* [','] ) -testlist_gexp: test ( gen_for | (',' test)* [','] ) +listmaker: test ( comp_for | (',' test)* [','] ) +testlist_gexp: test ( comp_for | (',' test)* [','] ) lambdef: 'lambda' [varargslist] ':' test trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME subscriptlist: subscript (',' subscript)* [','] @@ -133,20 +133,17 @@ sliceop: ':' [test] exprlist: expr (',' expr)* [','] testlist: test (',' test)* [','] -dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [',']) +dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) | + (test (comp_for | (',' test)* [','])) ) classdef: 'class' NAME ['(' [arglist] ')'] ':' suite arglist: (argument ',')* (argument [',']| '*' test [',' '**' test] | '**' test) -argument: test [gen_for] | test '=' test # Really [keyword '='] test +argument: test [comp_for] | test '=' test # Really [keyword '='] test -list_iter: list_for | list_if -list_for: 'for' exprlist 'in' testlist_safe [list_iter] -list_if: 'if' old_test [list_iter] - -gen_iter: gen_for | gen_if -gen_for: 'for' exprlist 'in' or_test [gen_iter] -gen_if: 'if' old_test [gen_iter] +comp_iter: comp_for | comp_if +comp_for: 'for' exprlist 'in' testlist_safe [comp_iter] +comp_if: 'if' old_test [comp_iter] testlist1: test (',' test)* Modified: sandbox/trunk/2to3/fixes/fix_dict.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_dict.py (original) +++ sandbox/trunk/2to3/fixes/fix_dict.py Sun Jul 15 00:33:47 2007 @@ -70,8 +70,7 @@ p1 = patcomp.compile_pattern(P1) P2 = """for_stmt< 'for' any 'in' node=any ':' any* > - | list_for< 'for' any 'in' node=any any* > - | gen_for< 'for' any 'in' node=any any* > + | comp_for< 'for' any 'in' node=any any* > """ p2 = patcomp.compile_pattern(P2) Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Sun Jul 15 00:33:47 2007 @@ -58,8 +58,7 @@ return new P0 = """for_stmt< 'for' any 'in' node=any ':' any* > - | list_for< 'for' any 'in' node=any any* > - | gen_for< 'for' any 'in' node=any any* > + | comp_for< 'for' any 'in' node=any any* > """ p0 = patcomp.compile_pattern(P0) Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Sun Jul 15 00:33:47 2007 @@ -70,8 +70,7 @@ return new P0 = """for_stmt< 'for' any 'in' node=any ':' any* > - | list_for< 'for' any 'in' node=any any* > - | gen_for< 'for' any 'in' node=any any* > + | comp_for< 'for' any 'in' node=any any* > """ p0 = patcomp.compile_pattern(P0) Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Sun Jul 15 00:33:47 2007 @@ -96,8 +96,8 @@ test.set_prefix(" ") if_leaf = Leaf(token.NAME, "if") if_leaf.set_prefix(" ") - inner_args.append(Node(syms.list_if, [if_leaf, test])) - inner = Node(syms.listmaker, [xp, Node(syms.list_for, inner_args)]) + inner_args.append(Node(syms.comp_if, [if_leaf, test])) + inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) return Node(syms.atom, [Leaf(token.LBRACE, "["), inner, Modified: sandbox/trunk/2to3/tests/data/py3_test_grammar.py ============================================================================== --- sandbox/trunk/2to3/tests/data/py3_test_grammar.py (original) +++ sandbox/trunk/2to3/tests/data/py3_test_grammar.py Sun Jul 15 00:33:47 2007 @@ -1,5 +1,3 @@ -# Python 3's Lib/test/test_grammar.py (r53781) - # Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. @@ -16,7 +14,6 @@ # testing import * from sys import * - at class_decorator class TokenTests(unittest.TestCase): def testBackslash(self): @@ -30,26 +27,32 @@ self.assertEquals(x, 0, 'backslash ending comment') def testPlainIntegers(self): + self.assertEquals(type(000), type(0)) self.assertEquals(0xff, 255) - self.assertEquals(0377, 255) - self.assertEquals(2147483647, 017777777777) + self.assertEquals(0o377, 255) + self.assertEquals(2147483647, 0o17777777777) + self.assertEquals(0b1001, 9) from sys import maxint if maxint == 2147483647: - self.assertEquals(-2147483647-1, -020000000000) + self.assertEquals(-2147483647-1, -0o20000000000) # XXX -2147483648 - self.assert_(037777777777 > 0) + self.assert_(0o37777777777 > 0) self.assert_(0xffffffff > 0) - for s in '2147483648', '040000000000', '0x100000000': + self.assert_(0b1111111111111111111111111111111 > 0) + for s in ('2147483648', '0o40000000000', '0x100000000', + '0b10000000000000000000000000000000'): try: x = eval(s) except OverflowError: self.fail("OverflowError on huge integer literal %r" % s) elif maxint == 9223372036854775807: - self.assertEquals(-9223372036854775807-1, -01000000000000000000000) - self.assert_(01777777777777777777777 > 0) + self.assertEquals(-9223372036854775807-1, -0o1000000000000000000000) + self.assert_(0o1777777777777777777777 > 0) self.assert_(0xffffffffffffffff > 0) - for s in '9223372036854775808', '02000000000000000000000', \ - '0x10000000000000000': + self.assert_(0b11111111111111111111111111111111111111111111111111111111111111 > 0) + for s in '9223372036854775808', '0o2000000000000000000000', \ + '0x10000000000000000', \ + '0b100000000000000000000000000000000000000000000000000000000000000': try: x = eval(s) except OverflowError: @@ -59,13 +62,13 @@ def testLongIntegers(self): x = 0 - x = 0 - x = 0xffffffffffffffff x = 0xffffffffffffffff - x = 077777777777777777 - x = 077777777777777777 - x = 123456789012345678901234567890 + x = 0Xffffffffffffffff + x = 0o77777777777777777 + x = 0O77777777777777777 x = 123456789012345678901234567890 + x = 0b100000000000000000000000000000000000000000000000000000000000000000000 + x = 0B111111111111111111111111111111111111111111111111111111111111111111111 def testFloats(self): x = 3.14 @@ -124,6 +127,7 @@ def testEllipsis(self): x = ... self.assert_(x is Ellipsis) + self.assertRaises(SyntaxError, eval, ".. .") class GrammarTests(unittest.TestCase): @@ -146,51 +150,32 @@ ### decorators: decorator+ ### parameters: '(' [typedargslist] ')' ### typedargslist: ((tfpdef ['=' test] ',')* - ### ('*' [tname] (',' tname ['=' test])* [',' '**' tname] | '**' tname) + ### ('*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef) ### | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) - ### tname: NAME [':' test] - ### tfpdef: tname | '(' tfplist ')' - ### tfplist: tfpdef (',' tfpdef)* [','] + ### tfpdef: NAME [':' test] ### varargslist: ((vfpdef ['=' test] ',')* - ### ('*' [vname] (',' vname ['=' test])* [',' '**' vname] | '**' vname) + ### ('*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef) ### | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) - ### vname: NAME - ### vfpdef: vname | '(' vfplist ')' - ### vfplist: vfpdef (',' vfpdef)* [','] + ### vfpdef: NAME def f1(): pass f1() f1(*()) f1(*(), **{}) def f2(one_argument): pass def f3(two, arguments): pass - def f4(two, (compound, (argument, list))): pass - def f5((compound, first), two): pass - self.assertEquals(f2.func_code.co_varnames, ('one_argument',)) - self.assertEquals(f3.func_code.co_varnames, ('two', 'arguments')) - if sys.platform.startswith('java'): - self.assertEquals(f4.func_code.co_varnames, - ('two', '(compound, (argument, list))', 'compound', 'argument', - 'list',)) - self.assertEquals(f5.func_code.co_varnames, - ('(compound, first)', 'two', 'compound', 'first')) - else: - self.assertEquals(f4.func_code.co_varnames, - ('two', '.1', 'compound', 'argument', 'list')) - self.assertEquals(f5.func_code.co_varnames, - ('.0', 'two', 'compound', 'first')) + self.assertEquals(f2.__code__.co_varnames, ('one_argument',)) + self.assertEquals(f3.__code__.co_varnames, ('two', 'arguments')) def a1(one_arg,): pass def a2(two, args,): pass def v0(*rest): pass def v1(a, *rest): pass def v2(a, b, *rest): pass - def v3(a, (b, c), *rest): return a, b, c, rest f1() f2(1) f2(1,) f3(1, 2) f3(1, 2,) - f4(1, (2, (3, 4))) v0() v0(1) v0(1,) @@ -205,17 +190,7 @@ v2(1,2,3) v2(1,2,3,4) v2(1,2,3,4,5,6,7,8,9,0) - v3(1,(2,3)) - v3(1,(2,3),4) - v3(1,(2,3),4,5,6,7,8,9,0) - - # ceval unpacks the formal arguments into the first argcount names; - # thus, the names nested inside tuples must appear after these names. - if sys.platform.startswith('java'): - self.assertEquals(v3.func_code.co_varnames, ('a', '(b, c)', 'rest', 'b', 'c')) - else: - self.assertEquals(v3.func_code.co_varnames, ('a', '.1', 'rest', 'b', 'c')) - self.assertEquals(v3(1, (2, 3), 4), (1, 2, 3, (4,))) + def d01(a=1): pass d01() d01(1) @@ -288,10 +263,6 @@ d22v(*(1, 2, 3, 4)) d22v(1, 2, *(3, 4, 5)) d22v(1, *(2, 3), **{'d': 4}) - def d31v((x)): pass - d31v(1) - def d32v((x,)): pass - d32v((1,)) # keyword only argument tests def pos0key1(*, key): return key pos0key1(key=100) @@ -305,25 +276,37 @@ # argument annotation tests def f(x) -> list: pass - self.assertEquals(f.func_annotations, {'return': list}) + self.assertEquals(f.__annotations__, {'return': list}) def f(x:int): pass - self.assertEquals(f.func_annotations, {'x': int}) + self.assertEquals(f.__annotations__, {'x': int}) def f(*x:str): pass - self.assertEquals(f.func_annotations, {'x': str}) + self.assertEquals(f.__annotations__, {'x': str}) def f(**x:float): pass - self.assertEquals(f.func_annotations, {'x': float}) + self.assertEquals(f.__annotations__, {'x': float}) def f(x, y:1+2): pass - self.assertEquals(f.func_annotations, {'y': 3}) - def f(a, (b:1, c:2, d)): pass - self.assertEquals(f.func_annotations, {'b': 1, 'c': 2}) - def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass - self.assertEquals(f.func_annotations, + self.assertEquals(f.__annotations__, {'y': 3}) + def f(a, b:1, c:2, d): pass + self.assertEquals(f.__annotations__, {'b': 1, 'c': 2}) + def f(a, b:1, c:2, d, e:3=4, f=5, *g:6): pass + self.assertEquals(f.__annotations__, {'b': 1, 'c': 2, 'e': 3, 'g': 6}) - def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6, h:7, i=8, j:9=10, + def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass - self.assertEquals(f.func_annotations, + self.assertEquals(f.__annotations__, {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9, 'k': 11, 'return': 12}) + # Check for SF Bug #1697248 - mixing decorators and a return annotation + def null(x): return x + @null + def f(x) -> list: pass + self.assertEquals(f.__annotations__, {'return': list}) + + # test MAKE_CLOSURE with a variety of oparg's + closure = 1 + def f(): return closure + def f(x=1): return closure + def f(*, k=1): return closure + def f() -> int: return closure def testLambdef(self): ### lambdef: 'lambda' [varargslist] ':' test @@ -354,7 +337,7 @@ x = 1; pass; del x; foo() - ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt + ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt # Tested below def testExprStmt(self): @@ -714,6 +697,20 @@ def meth2(self, arg): pass def meth3(self, a1, a2): pass + # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE + # decorators: decorator+ + # decorated: decorators (classdef | funcdef) + def class_decorator(x): return x + @class_decorator + class G: pass + + def testDictcomps(self): + # dictorsetmaker: ( (test ':' test (comp_for | + # (',' test ':' test)* [','])) | + # (test (comp_for | (',' test)* [','])) ) + nums = [1, 2, 3] + self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4}) + def testListcomps(self): # list comprehension tests nums = [1, 2, 3, 4, 5] @@ -780,9 +777,9 @@ def testGenexps(self): # generator expression tests g = ([x for x in range(10)] for x in range(1)) - self.assertEqual(g.next(), [x for x in range(10)]) + self.assertEqual(next(g), [x for x in range(10)]) try: - g.next() + next(g) self.fail('should produce StopIteration exception') except StopIteration: pass @@ -790,7 +787,7 @@ a = 1 try: g = (a for d in a) - g.next() + next(g) self.fail('should produce TypeError') except TypeError: pass @@ -838,7 +835,8 @@ print(x) return ret - self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) + # the next line is not allowed anymore + #self.assertEqual([ x() for x in lambda: True, lambda: False if x() ], [True]) self.assertEqual([ x() for x in (lambda: True, lambda: False) if x() ], [True]) self.assertEqual([ x(False) for x in (lambda x: False if x else True, lambda x: True if x else False) if x(False) ], [True]) self.assertEqual((5 if 1 else _checkeval("check 1", 0)), 5) From python-checkins at python.org Sun Jul 15 00:34:03 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:34:03 +0200 (CEST) Subject: [Python-checkins] r56387 - in sandbox/trunk/2to3: README fixes/fix_funcattrs.py tests/test_fixers.py Message-ID: <20070714223403.44DBE1E4008@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:34:02 2007 New Revision: 56387 Added: sandbox/trunk/2to3/fixes/fix_funcattrs.py Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/README sandbox/trunk/2to3/tests/test_fixers.py Log: Add a fixer for function attributes (f.func_x -> f.__x__). Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Sun Jul 15 00:34:02 2007 @@ -45,6 +45,8 @@ * **fix_filter** - changes filter(F, X) into list(filter(F, X)). +* **fix_funcattrs** - fix function attribute names (f.func_x -> f.__x__). + * **fix_has_key** - "d.has_key(x)" -> "x in d". * **fix_input** - "input()" -> "eval(input())" (PEP 3111). Added: sandbox/trunk/2to3/fixes/fix_funcattrs.py ============================================================================== --- (empty file) +++ sandbox/trunk/2to3/fixes/fix_funcattrs.py Sun Jul 15 00:34:02 2007 @@ -0,0 +1,19 @@ +"""Fix function attribute names (f.func_x -> f.__x__).""" +# Author: Collin Winter + +# Local imports +from fixes import basefix +from fixes.util import Name + + +class FixFuncattrs(basefix.BaseFix): + PATTERN = """ + power< any trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' + | 'func_name' | 'func_defaults' | 'func_code' + | 'func_dict') > any* > + """ + + def transform(self, node, results): + attr = results["attr"][0] + attr.replace(Name(("__%s__" % attr.value[5:]), + prefix=attr.get_prefix())) Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sun Jul 15 00:34:02 2007 @@ -1180,6 +1180,26 @@ self.check(b, a) +class Test_funcattrs(FixerTestCase): + fixer = "funcattrs" + + attrs = ["closure", "doc", "name", "defaults", "code", "globals", "dict"] + + def test(self): + for attr in self.attrs: + b = "a.func_%s" % attr + a = "a.__%s__" % attr + self.check(b, a) + + def test_unchanged(self): + for attr in self.attrs: + s = "foo(func_%s + 5)" % attr + self.check(s, s) + + s = "f(foo.__%s__)" % attr + self.check(s, s) + + class Test_stringio(FixerTestCase): fixer = "stringio" From python-checkins at python.org Sun Jul 15 00:34:18 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:34:18 +0200 (CEST) Subject: [Python-checkins] r56388 - in sandbox/trunk/2to3: pytree.py tests/test_pytree.py Message-ID: <20070714223418.7B1201E4008@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:34:18 2007 New Revision: 56388 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/pytree.py sandbox/trunk/2to3/tests/test_pytree.py Log: Remove the ability to remove a node by passing None to replace(); add the ability to replace a node with a list of nodes. Modified: sandbox/trunk/2to3/pytree.py ============================================================================== --- sandbox/trunk/2to3/pytree.py (original) +++ sandbox/trunk/2to3/pytree.py Sun Jul 15 00:34:18 2007 @@ -101,28 +101,26 @@ raise NotImplementedError def replace(self, new): - """Replaces this node with a new one in the parent. - - This can also be used to remove this node from the parent by - passing None. - """ + """Replaces this node with a new one in the parent.""" assert self.parent is not None, str(self) - assert new is None or new.parent is None, str(new) + assert new is not None + if not isinstance(new, list): + new = [new] l_children = [] found = False for ch in self.parent.children: if ch is self: assert not found, (self.parent.children, self, new) if new is not None: - l_children.append(new) + l_children.extend(new) found = True else: l_children.append(ch) assert found, (self.children, self, new) - self.changed() - self.parent.children = tuple(l_children) - if new is not None: - new.parent = self.parent + self.parent.changed() + self.parent.children = l_children + for x in new: + x.parent = self.parent self.parent = None def get_lineno(self): Modified: sandbox/trunk/2to3/tests/test_pytree.py ============================================================================== --- sandbox/trunk/2to3/tests/test_pytree.py (original) +++ sandbox/trunk/2to3/tests/test_pytree.py Sun Jul 15 00:34:18 2007 @@ -140,12 +140,24 @@ l3 = pytree.Leaf(100, "bar") n1 = pytree.Node(1000, [l1, l2, l3]) self.assertEqual(n1.children, [l1, l2, l3]) + self.failUnless(isinstance(n1.children, list)) self.failIf(n1.was_changed) l2new = pytree.Leaf(100, "-") l2.replace(l2new) - self.assertEqual(n1.children, (l1, l2new, l3)) + self.assertEqual(n1.children, [l1, l2new, l3]) + self.failUnless(isinstance(n1.children, list)) self.failUnless(n1.was_changed) + def testReplaceWithList(self): + l1 = pytree.Leaf(100, "foo") + l2 = pytree.Leaf(100, "+") + l3 = pytree.Leaf(100, "bar") + n1 = pytree.Node(1000, [l1, l2, l3]) + + l2.replace([pytree.Leaf(100, "*"), pytree.Leaf(100, "*")]) + self.assertEqual(str(n1), "foo**bar") + self.failUnless(isinstance(n1.children, list)) + def testConvert(self): # XXX pass From python-checkins at python.org Sun Jul 15 00:34:33 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:34:33 +0200 (CEST) Subject: [Python-checkins] r56389 - in sandbox/trunk/2to3: tests/test_pytree.py Message-ID: <20070714223433.6113D1E4008@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:34:33 2007 New Revision: 56389 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/test_pytree.py Log: Remove the stub for a test that's never going to get filled in. Modified: sandbox/trunk/2to3/tests/test_pytree.py ============================================================================== --- sandbox/trunk/2to3/tests/test_pytree.py (original) +++ sandbox/trunk/2to3/tests/test_pytree.py Sun Jul 15 00:34:33 2007 @@ -158,10 +158,6 @@ self.assertEqual(str(n1), "foo**bar") self.failUnless(isinstance(n1.children, list)) - def testConvert(self): - # XXX - pass - def testPostOrder(self): l1 = pytree.Leaf(100, "foo") l2 = pytree.Leaf(100, "bar") From python-checkins at python.org Sun Jul 15 00:34:48 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:34:48 +0200 (CEST) Subject: [Python-checkins] r56390 - in sandbox/trunk/2to3: README fixes/fix_xreadlines.py tests/test_fixers.py Message-ID: <20070714223448.808921E4016@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:34:48 2007 New Revision: 56390 Added: sandbox/trunk/2to3/fixes/fix_xreadlines.py Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/README sandbox/trunk/2to3/tests/test_fixers.py Log: Add a fixer to convert 'for x in f.xreadlines():' into 'for x in f:'. Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Sun Jul 15 00:34:48 2007 @@ -87,6 +87,9 @@ * **fix_xrange** - "xrange()" -> "range()". +* **fix_xreadlines** - "for x in f.xreadlines():" -> "for x in f:". Also, + "g(f.xreadlines)" -> "g(f.__iter__)". + Limitations =========== Added: sandbox/trunk/2to3/fixes/fix_xreadlines.py ============================================================================== --- (empty file) +++ sandbox/trunk/2to3/fixes/fix_xreadlines.py Sun Jul 15 00:34:48 2007 @@ -0,0 +1,24 @@ +"""Fix "for x in f.xreadlines()" -> "for x in f". + +This fixer will also convert g(f.xreadlines) into g(f.__iter__).""" +# Author: Collin Winter + +# Local imports +from fixes import basefix +from fixes.util import Name + + +class FixXreadlines(basefix.BaseFix): + PATTERN = """ + power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > + | + power< any+ trailer< '.' no_call='xreadlines' > > + """ + + def transform(self, node, results): + no_call = results.get("no_call") + + if no_call: + no_call.replace(Name("__iter__", prefix=no_call.get_prefix())) + else: + node.replace([x.clone() for x in results["call"]]) Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sun Jul 15 00:34:48 2007 @@ -1200,6 +1200,49 @@ self.check(s, s) +class Test_xreadlines(FixerTestCase): + fixer = "xreadlines" + + def test_call(self): + b = "for x in f.xreadlines(): pass" + a = "for x in f: pass" + self.check(b, a) + + b = "for x in foo().xreadlines(): pass" + a = "for x in foo(): pass" + self.check(b, a) + + b = "for x in (5 + foo()).xreadlines(): pass" + a = "for x in (5 + foo()): pass" + self.check(b, a) + + def test_attr_ref(self): + b = "foo(f.xreadlines + 5)" + a = "foo(f.__iter__ + 5)" + self.check(b, a) + + b = "foo(f().xreadlines + 5)" + a = "foo(f().__iter__ + 5)" + self.check(b, a) + + b = "foo((5 + f()).xreadlines + 5)" + a = "foo((5 + f()).__iter__ + 5)" + self.check(b, a) + + def test_unchanged(self): + s = "for x in f.xreadlines(5): pass" + self.check(s, s) + + s = "for x in f.xreadlines(k=5): pass" + self.check(s, s) + + s = "for x in f.xreadlines(*k, **v): pass" + self.check(s, s) + + s = "foo(xreadlines)" + self.check(s, s) + + class Test_stringio(FixerTestCase): fixer = "stringio" From python-checkins at python.org Sun Jul 15 00:35:04 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 00:35:04 +0200 (CEST) Subject: [Python-checkins] r56391 - in sandbox/trunk/2to3: README fixes/fix_sysexcattrs.py tests/test_fixers.py Message-ID: <20070714223504.1D8D71E401C@bag.python.org> Author: collin.winter Date: Sun Jul 15 00:35:03 2007 New Revision: 56391 Removed: sandbox/trunk/2to3/fixes/fix_sysexcattrs.py Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/README sandbox/trunk/2to3/tests/test_fixers.py Log: Remove the sysexcattrs fixer; 2.6's warning mode will take care of this. Modified: sandbox/trunk/2to3/README ============================================================================== --- sandbox/trunk/2to3/README (original) +++ sandbox/trunk/2to3/README Sun Jul 15 00:35:03 2007 @@ -75,9 +75,6 @@ * **fix_stringio** - StringIO.StringIO -> io.StringIO (imports, too). -* **fix_sysexcattrs** - warn on usage of sys.value, sys.type and - sys.traceback. - * **fix_throw** - fix generator.throw() calls to be 3.0-compliant (PEP 3109). * **fix_tuple_params** - remove tuple parameters from function, method and Deleted: /sandbox/trunk/2to3/fixes/fix_sysexcattrs.py ============================================================================== --- /sandbox/trunk/2to3/fixes/fix_sysexcattrs.py Sun Jul 15 00:35:03 2007 +++ (empty file) @@ -1,18 +0,0 @@ -"""Fixer/warner for sys.exc_{value,type,traceback}""" -# Author: Collin Winter - -# Local imports -from pytree import Leaf -from fixes import basefix - - -class FixSysexcattrs(basefix.BaseFix): - - PATTERN = """ - power< 'sys' - trailer< '.' ('exc_value' | 'exc_traceback' | 'exc_type')> - any* > - """ - - def transform(self, node, results): - self.cannot_convert(node, "This attribute is going away in Python 3") Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sun Jul 15 00:35:03 2007 @@ -940,20 +940,6 @@ self.check(b, a) -class Test_sysexcattrs(FixerTestCase): - fixer = "sysexcattrs" - - def test(self): - s = """f = sys.exc_type""" - self.warns(s, s, "This attribute is going away") - - s = """f = sys.exc_value""" - self.warns(s, s, "This attribute is going away") - - s = """f = sys.exc_traceback""" - self.warns(s, s, "This attribute is going away") - - class Test_dict(FixerTestCase): fixer = "dict" From python-checkins at python.org Sun Jul 15 00:41:45 2007 From: python-checkins at python.org (facundo.batista) Date: Sun, 15 Jul 2007 00:41:45 +0200 (CEST) Subject: [Python-checkins] r56392 - python/trunk/Lib/test/test_asyncore.py Message-ID: <20070714224145.9C1A31E4008@bag.python.org> Author: facundo.batista Date: Sun Jul 15 00:41:45 2007 New Revision: 56392 Added: python/trunk/Lib/test/test_asyncore.py Log: First version. Includes tests for helper functions: read, write, _exception, readwrite, closeall, compact_traceback; and for classes dispatcher, dispatcher_with_send, and file_wrapper. [Alan McIntyre - GSoC] Added: python/trunk/Lib/test/test_asyncore.py ============================================================================== --- (empty file) +++ python/trunk/Lib/test/test_asyncore.py Sun Jul 15 00:41:45 2007 @@ -0,0 +1,370 @@ +import asyncore +import unittest +import select +import os +import socket +import threading +import sys +import time + +from test import test_support +from test.test_support import TESTFN, run_unittest, unlink +from StringIO import StringIO + +HOST = "127.0.0.1" +PORT = 54322 + +class dummysocket: + def __init__(self): + self.closed = False + + def close(self): + self.closed = True + + def fileno(self): + return 42 + +class dummychannel: + def __init__(self): + self.socket = dummysocket() + +class exitingdummy: + def __init__(self): + pass + + def handle_read_event(self): + raise asyncore.ExitNow() + + handle_write_event = handle_read_event + handle_expt_event = handle_read_event + +class crashingdummy: + def __init__(self): + self.error_handled = False + + def handle_read_event(self): + raise Exception() + + handle_write_event = handle_read_event + handle_expt_event = handle_read_event + + def handle_error(self): + self.error_handled = True + +# used when testing senders; just collects what it gets until newline is sent +class capture_server(threading.Thread): + def run(self): + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + global PORT + PORT = test_support.bind_port(sock, HOST, PORT) + sock.listen(1) + conn, client = sock.accept() + self.captured = "" + while 1: + data = conn.recv(10) + if '\n' in data: + break + self.captured = self.captured + data + + conn.close() + sock.close() + + +class HelperFunctionTests(unittest.TestCase): + def test_readwriteexc(self): + # Check exception handling behavior of read, write and _exception + + # check that ExitNow exceptions in the object handler method + # bubbles all the way up through asyncore read/write/_exception calls + tr1 = exitingdummy() + self.assertRaises(asyncore.ExitNow, asyncore.read, tr1) + self.assertRaises(asyncore.ExitNow, asyncore.write, tr1) + self.assertRaises(asyncore.ExitNow, asyncore._exception, tr1) + + # check that an exception other than ExitNow in the object handler + # method causes the handle_error method to get called + tr2 = crashingdummy() + asyncore.read(tr2) + self.assertEqual(tr2.error_handled, True) + + tr2 = crashingdummy() + asyncore.write(tr2) + self.assertEqual(tr2.error_handled, True) + + tr2 = crashingdummy() + asyncore._exception(tr2) + self.assertEqual(tr2.error_handled, True) + + def test_readwrite(self): + # Check that correct methods are called by readwrite() + + class testobj: + def __init__(self): + self.read = False + self.write = False + self.expt = False + + def handle_read_event(self): + self.read = True + + def handle_write_event(self): + self.write = True + + def handle_expt_event(self): + self.expt = True + + def handle_error(self): + self.error_handled = True + + for flag in (select.POLLIN, select.POLLPRI): + tobj = testobj() + self.assertEqual(tobj.read, False) + asyncore.readwrite(tobj, flag) + self.assertEqual(tobj.read, True) + + # check that ExitNow exceptions in the object handler method + # bubbles all the way up through asyncore readwrite call + tr1 = exitingdummy() + self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) + + # check that an exception other than ExitNow in the object handler + # method causes the handle_error method to get called + tr2 = crashingdummy() + asyncore.readwrite(tr2, flag) + self.assertEqual(tr2.error_handled, True) + + tobj = testobj() + self.assertEqual(tobj.write, False) + asyncore.readwrite(tobj, select.POLLOUT) + self.assertEqual(tobj.write, True) + + # check that ExitNow exceptions in the object handler method + # bubbles all the way up through asyncore readwrite call + tr1 = exitingdummy() + self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, + select.POLLOUT) + + # check that an exception other than ExitNow in the object handler + # method causes the handle_error method to get called + tr2 = crashingdummy() + asyncore.readwrite(tr2, select.POLLOUT) + self.assertEqual(tr2.error_handled, True) + + for flag in (select.POLLERR, select.POLLHUP, select.POLLNVAL): + tobj = testobj() + self.assertEqual(tobj.expt, False) + asyncore.readwrite(tobj, flag) + self.assertEqual(tobj.expt, True) + + # check that ExitNow exceptions in the object handler method + # bubbles all the way up through asyncore readwrite calls + tr1 = exitingdummy() + self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) + + # check that an exception other than ExitNow in the object handler + # method causes the handle_error method to get called + tr2 = crashingdummy() + asyncore.readwrite(tr2, flag) + self.assertEqual(tr2.error_handled, True) + + def test_closeall(self): + self.closeall_check(False) + + def test_closeall_default(self): + self.closeall_check(True) + + def closeall_check(self, usedefault): + # Check that close_all() closes everything in a given map + + l = [] + testmap = {} + for i in range(10): + c = dummychannel() + l.append(c) + self.assertEqual(c.socket.closed, False) + testmap[i] = c + + if usedefault: + socketmap = asyncore.socket_map + try: + asyncore.socket_map = testmap + asyncore.close_all() + finally: + testmap, asyncore.socket_map = asyncore.socket_map, socketmap + else: + asyncore.close_all(testmap) + + self.assertEqual(len(testmap), 0) + + for c in l: + self.assertEqual(c.socket.closed, True) + + def test_compact_traceback(self): + try: + raise Exception("I don't like spam!") + except: + real_t, real_v, real_tb = sys.exc_info() + r = asyncore.compact_traceback() + else: + self.fail("Expected exception") + + (f, function, line), t, v, info = r + self.assertEqual(os.path.split(f)[-1], 'test_asyncore.py') + self.assertEqual(function, 'test_compact_traceback') + self.assertEqual(t, real_t) + self.assertEqual(v, real_v) + self.assertEqual(info, '[%s|%s|%s]' % (f, function, line)) + + +class DispatcherTests(unittest.TestCase): + def test_basic(self): + d = asyncore.dispatcher() + self.assertEqual(d.readable(), True) + self.assertEqual(d.writable(), True) + + def test_repr(self): + d = asyncore.dispatcher() + self.assertEqual(repr(d), '' % id(d)) + + def test_log(self): + d = asyncore.dispatcher() + + # capture output of dispatcher.log() (to stderr) + fp = StringIO() + stderr = sys.stderr + l1 = "Lovely spam! Wonderful spam!" + l2 = "I don't like spam!" + try: + sys.stderr = fp + d.log(l1) + d.log(l2) + finally: + sys.stderr = stderr + + lines = fp.getvalue().splitlines() + self.assertEquals(lines, ['log: %s' % l1, 'log: %s' % l2]) + + def test_log_info(self): + d = asyncore.dispatcher() + + # capture output of dispatcher.log_info() (to stdout via print) + fp = StringIO() + stdout = sys.stdout + l1 = "Have you got anything without spam?" + l2 = "Why can't she have egg bacon spam and sausage?" + l3 = "THAT'S got spam in it!" + try: + sys.stdout = fp + d.log_info(l1, 'EGGS') + d.log_info(l2) + d.log_info(l3, 'SPAM') + finally: + sys.stdout = stdout + + lines = fp.getvalue().splitlines() + expected = ['EGGS: %s' % l1, 'info: %s' % l2, 'SPAM: %s' % l3] + self.assertEquals(lines, expected) + + def test_unhandled(self): + d = asyncore.dispatcher() + + # capture output of dispatcher.log_info() (to stdout via print) + fp = StringIO() + stdout = sys.stdout + try: + sys.stdout = fp + d.handle_expt() + d.handle_read() + d.handle_write() + d.handle_connect() + d.handle_accept() + finally: + sys.stdout = stdout + + lines = fp.getvalue().splitlines() + expected = ['warning: unhandled exception', + 'warning: unhandled read event', + 'warning: unhandled write event', + 'warning: unhandled connect event', + 'warning: unhandled accept event'] + self.assertEquals(lines, expected) + + + +class dispatcherwithsend_noread(asyncore.dispatcher_with_send): + def readable(self): + return False + + def handle_connect(self): + pass + +class DispatcherWithSendTests(unittest.TestCase): + usepoll = False + + def test_send(self): + s = capture_server() + s.start() + time.sleep(1) # Give server time to initialize + + data = "Suppose there isn't a 16-ton weight?"*100 + d = dispatcherwithsend_noread() + d.create_socket(socket.AF_INET, socket.SOCK_STREAM) + d.connect((HOST, PORT)) + d.send(data) + d.send('\n') + + while d.out_buffer: + asyncore.poll() + + s.stopit = True + s.join() + + self.assertEqual(s.captured, data) + + +class DispatcherWithSendTests_UsePoll(DispatcherWithSendTests): + usepoll = True + +if hasattr(asyncore, 'file_wrapper'): + class FileWrapperTest(unittest.TestCase): + def setUp(self): + self.d = "It's not dead, it's sleeping!" + file(TESTFN, 'w').write(self.d) + + def tearDown(self): + unlink(TESTFN) + + def test_recv(self): + fd = os.open(TESTFN, os.O_RDONLY) + w = asyncore.file_wrapper(fd) + + self.assertEqual(w.fd, fd) + self.assertEqual(w.fileno(), fd) + self.assertEqual(w.recv(13), "It's not dead") + self.assertEqual(w.read(6), ", it's") + w.close() + self.assertRaises(OSError, w.read, 1) + + def test_send(self): + d1 = "Come again?" + d2 = "I want to buy some cheese." + fd = os.open(TESTFN, os.O_WRONLY | os.O_APPEND) + w = asyncore.file_wrapper(fd) + + w.write(d1) + w.send(d2) + w.close() + self.assertEqual(file(TESTFN).read(), self.d + d1 + d2) + + +def test_main(): + tests = [HelperFunctionTests, DispatcherTests, DispatcherWithSendTests, + DispatcherWithSendTests_UsePoll] + if hasattr(asyncore, 'file_wrapper'): + tests.append(FileWrapperTest) + + run_unittest(*tests) + +if __name__ == "__main__": + test_main() From python-checkins at python.org Sun Jul 15 01:04:54 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 01:04:54 +0200 (CEST) Subject: [Python-checkins] r56393 - in sandbox/trunk/2to3: fixes/basefix.py fixes/fix_print.py fixes/fix_raise.py fixes/fix_raw_input.py fixes/fix_throw.py fixes/fix_tuple_params.py fixes/fix_unicode.py fixes/fix_xrange.py fixes/util.py Message-ID: <20070714230454.E4BA71E401A@bag.python.org> Author: collin.winter Date: Sun Jul 15 01:04:54 2007 New Revision: 56393 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/basefix.py sandbox/trunk/2to3/fixes/fix_print.py sandbox/trunk/2to3/fixes/fix_raise.py sandbox/trunk/2to3/fixes/fix_raw_input.py sandbox/trunk/2to3/fixes/fix_throw.py sandbox/trunk/2to3/fixes/fix_tuple_params.py sandbox/trunk/2to3/fixes/fix_unicode.py sandbox/trunk/2to3/fixes/fix_xrange.py sandbox/trunk/2to3/fixes/util.py Log: General fixer clean-up (whitespace, imports, use new utils, etc). Modified: sandbox/trunk/2to3/fixes/basefix.py ============================================================================== --- sandbox/trunk/2to3/fixes/basefix.py (original) +++ sandbox/trunk/2to3/fixes/basefix.py Sun Jul 15 01:04:54 2007 @@ -116,7 +116,7 @@ self.logger.warning(msg % (lineno, for_output)) if reason: self.logger.warning(reason) - + def warning(self, node, reason): """Used for warning the user about possible uncertainty in the translation. Modified: sandbox/trunk/2to3/fixes/fix_print.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_print.py (original) +++ sandbox/trunk/2to3/fixes/fix_print.py Sun Jul 15 01:04:54 2007 @@ -20,8 +20,8 @@ class FixPrint(basefix.BaseFix): PATTERN = """ - 'print' | print_stmt - """ + 'print' | print_stmt + """ def match(self, node): # Override Modified: sandbox/trunk/2to3/fixes/fix_raise.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_raise.py (original) +++ sandbox/trunk/2to3/fixes/fix_raise.py Sun Jul 15 01:04:54 2007 @@ -78,6 +78,6 @@ new.set_prefix(node.get_prefix()) return new else: - new = pytree.Node(syms.raise_stmt, [Name("raise"), Call(exc, args)]) - new.set_prefix(node.get_prefix()) - return new + return pytree.Node(syms.raise_stmt, + [Name("raise"), Call(exc, args)], + prefix=node.get_prefix()) Modified: sandbox/trunk/2to3/fixes/fix_raw_input.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_raw_input.py (original) +++ sandbox/trunk/2to3/fixes/fix_raw_input.py Sun Jul 15 01:04:54 2007 @@ -2,19 +2,15 @@ # Author: Andre Roberge # Local imports -import pytree from fixes import basefix from fixes.util import Name class FixRawInput(basefix.BaseFix): PATTERN = """ - power< 'raw_input' args=trailer< '(' [any] ')' > > + power< name='raw_input' trailer< '(' [any] ')' > > """ def transform(self, node, results): - args = results["args"] - - new = pytree.Node(self.syms.power, [Name("input"), args.clone()]) - new.set_prefix(node.get_prefix()) - return new + name = results["name"] + name.replace(Name("input", prefix=name.get_prefix())) Modified: sandbox/trunk/2to3/fixes/fix_throw.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_throw.py (original) +++ sandbox/trunk/2to3/fixes/fix_throw.py Sun Jul 15 01:04:54 2007 @@ -30,11 +30,11 @@ if exc.type is token.STRING: self.cannot_convert(node, "Python 3 does not support string exceptions") return - + # Leave "g.throw(E)" alone val = results.get("val") if val is None: - return + return val = val.clone() if is_tuple(val): @@ -48,7 +48,7 @@ if "tb" in results: tb = results["tb"].clone() tb.set_prefix("") - + e = Call(exc, args) with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] throw_args.replace(pytree.Node(syms.power, with_tb)) Modified: sandbox/trunk/2to3/fixes/fix_tuple_params.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_tuple_params.py (original) +++ sandbox/trunk/2to3/fixes/fix_tuple_params.py Sun Jul 15 01:04:54 2007 @@ -33,11 +33,9 @@ lambda=lambdef< 'lambda' args=vfpdef< any+ > ':' body=any >""" def transform(self, node, results): - assert results - if "lambda" in results: - return self.transform_lambda(node) - + return self.transform_lambda(node, results) + new_lines = [] suite = results["suite"] args = results["args"] @@ -51,7 +49,7 @@ start = 0 indent = "; " end = pytree.Leaf(token.INDENT, "") - + # We need access to self for new_name(), and making this a method # doesn't feel right. Closing over self and new_lines makes the # code below cleaner. @@ -63,8 +61,9 @@ if add_prefix: n.set_prefix(" ") tuple_arg.replace(n) - new_lines.append(pytree.Node(syms.simple_stmt, [stmt, end.clone()])) - + new_lines.append(pytree.Node(syms.simple_stmt, + [stmt, end.clone()])) + if args.type == syms.tfpdef: handle_tuple(args) elif args.type == syms.typedargslist: @@ -73,15 +72,15 @@ # Without add_prefix, the emitted code is correct, # just ugly. handle_tuple(arg, add_prefix=(i > 0)) - + if not new_lines: return node - + # This isn't strictly necessary, but it plays nicely with other fixers. # TODO(cwinter) get rid of this when children becomes a smart list for line in new_lines: line.parent = suite[0] - + # TODO(cwinter) suite-cleanup after = start if start == 0: @@ -89,22 +88,20 @@ elif is_docstring(suite[0].children[start]): new_lines[0].set_prefix(indent) after = start + 1 - + suite[0].children[after:after] = new_lines for i in range(after+1, after+len(new_lines)+1): suite[0].children[i].set_prefix(indent) suite[0].changed() - - def transform_lambda(self, node): - results = self.match(node) - assert results + + def transform_lambda(self, node, results): args = results["args"] body = results["body"] params = find_params(args) to_index = map_to_index(params) tup_name = self.new_name(tuple_name(params)) - + new_param = Name(tup_name) new_param.set_prefix(args.get_prefix()) args.replace(new_param.clone()) Modified: sandbox/trunk/2to3/fixes/fix_unicode.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_unicode.py (original) +++ sandbox/trunk/2to3/fixes/fix_unicode.py Sun Jul 15 01:04:54 2007 @@ -3,7 +3,6 @@ """ import re -import pytree from pgen2 import token from fixes import basefix @@ -27,4 +26,3 @@ new = node.clone() new.value = new.value[1:] return new - return None Modified: sandbox/trunk/2to3/fixes/fix_xrange.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_xrange.py (original) +++ sandbox/trunk/2to3/fixes/fix_xrange.py Sun Jul 15 01:04:54 2007 @@ -4,23 +4,15 @@ """Fixer that changes xrange(...) into range(...).""" # Local imports -import pytree -from pgen2 import token from fixes import basefix from fixes.util import Name class FixXrange(basefix.BaseFix): PATTERN = """ - power< - 'xrange' - args=trailer< '(' [any] ')' > - > - """ + power< name='xrange' trailer< '(' [any] ')' > > + """ def transform(self, node, results): - args = results["args"] - new = pytree.Node(self.syms.power, - [Name("range"), args.clone()]) - new.set_prefix(node.get_prefix()) - return new + name = results["name"] + name.replace(Name("range", prefix=name.get_prefix())) Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Sun Jul 15 01:04:54 2007 @@ -7,10 +7,6 @@ from pygram import python_symbols as syms -### Constant nodes -ass_leaf = Leaf(token.EQUAL, "=") -ass_leaf.set_prefix(" ") - ########################################################### ### Common node-construction "macros" ########################################################### @@ -33,7 +29,8 @@ source.set_prefix(" ") source = [source] - return Node(syms.atom, target + [ass_leaf.clone()] + source) + return Node(syms.atom, + target + [Leaf(token.EQUAL, "=", prefix=" ")] + source) def Name(name, prefix=None): """Return a NAME leaf""" @@ -102,7 +99,7 @@ [Leaf(token.LBRACE, "["), inner, Leaf(token.RBRACE, "]")]) - + ########################################################### ### Determine whether a node represents a given literal ########################################################### From python-checkins at python.org Sun Jul 15 01:05:10 2007 From: python-checkins at python.org (collin.winter) Date: Sun, 15 Jul 2007 01:05:10 +0200 (CEST) Subject: [Python-checkins] r56394 - in sandbox/trunk/2to3: tests/test_fixers.py Message-ID: <20070714230510.945891E4010@bag.python.org> Author: collin.winter Date: Sun Jul 15 01:05:10 2007 New Revision: 56394 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/test_fixers.py Log: Additional tests for fix_raw_input and fix_input. Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Sun Jul 15 01:05:10 2007 @@ -1165,6 +1165,11 @@ a = """x = input('prompt')""" self.check(b, a) + def test_4(self): + b = """x = raw_input(foo(a) + 6)""" + a = """x = input(foo(a) + 6)""" + self.check(b, a) + class Test_funcattrs(FixerTestCase): fixer = "funcattrs" @@ -1322,6 +1327,11 @@ a = """x = eval(input('prompt'))""" self.check(b, a) + def test_4(self): + b = """x = input(foo(5) + 9)""" + a = """x = eval(input(foo(5) + 9))""" + self.check(b, a) + class Test_tuple_params(FixerTestCase): fixer = "tuple_params" From buildbot at python.org Sun Jul 15 01:33:59 2007 From: buildbot at python.org (buildbot at python.org) Date: Sat, 14 Jul 2007 23:33:59 +0000 Subject: [Python-checkins] buildbot warnings in x86 gentoo trunk Message-ID: <20070714233400.0AF4E1E4008@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2310 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Sun Jul 15 01:46:12 2007 From: buildbot at python.org (buildbot at python.org) Date: Sat, 14 Jul 2007 23:46:12 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP trunk Message-ID: <20070714234612.827541E4012@bag.python.org> The Buildbot has detected a new failure of x86 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP%2520trunk/builds/518 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings failed slave lost sincerely, -The Buildbot From buildbot at python.org Sun Jul 15 01:57:44 2007 From: buildbot at python.org (buildbot at python.org) Date: Sat, 14 Jul 2007 23:57:44 +0000 Subject: [Python-checkins] buildbot warnings in sparc solaris10 gcc trunk Message-ID: <20070714235744.398861E4008@bag.python.org> The Buildbot has detected a new failure of sparc solaris10 gcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/sparc%2520solaris10%2520gcc%2520trunk/builds/2130 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Sun Jul 15 02:14:20 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 15 Jul 2007 00:14:20 +0000 Subject: [Python-checkins] buildbot warnings in PPC64 Debian trunk Message-ID: <20070715001420.24D351E4002@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/55 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Sun Jul 15 02:35:06 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 15 Jul 2007 00:35:06 +0000 Subject: [Python-checkins] buildbot warnings in g4 osx.4 trunk Message-ID: <20070715003506.4B02C1E4002@bag.python.org> The Buildbot has detected a new failure of g4 osx.4 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/g4%2520osx.4%2520trunk/builds/2139 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Sun Jul 15 02:44:46 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 15 Jul 2007 00:44:46 +0000 Subject: [Python-checkins] buildbot warnings in S-390 Debian trunk Message-ID: <20070715004446.CFF231E4002@bag.python.org> The Buildbot has detected a new failure of S-390 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/S-390%2520Debian%2520trunk/builds/1045 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From buildbot at python.org Sun Jul 15 03:06:44 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 15 Jul 2007 01:06:44 +0000 Subject: [Python-checkins] buildbot warnings in ia64 Ubuntu trunk trunk Message-ID: <20070715010644.7D2BC1E4002@bag.python.org> The Buildbot has detected a new failure of ia64 Ubuntu trunk trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ia64%2520Ubuntu%2520trunk%2520trunk/builds/753 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: sincerely, -The Buildbot From nnorwitz at gmail.com Sun Jul 15 10:12:58 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Sun, 15 Jul 2007 04:12:58 -0400 Subject: [Python-checkins] Python Regression Test Failures opt (1) Message-ID: <20070715081258.GA3646@python.psfb.org> test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_unittest test_doctest test_doctest2 test_MimeWriter test_StringIO test___all__ test___future__ test__locale test_aepack test_aepack skipped -- No module named aepack test_al test_al skipped -- No module named al test_anydbm test_applesingle test_applesingle skipped -- No module named macostools test_array test_ast test_asynchat test_asyncore test test_asyncore failed -- Traceback (most recent call last): File "/tmp/python-test/local/lib/python2.6/test/test_asyncore.py", line 267, in test_log_info self.assertEquals(lines, expected) AssertionError: ['EGGS: Have you got anything without spam?', "SPAM: THAT'S got spam in it!"] != ['EGGS: Have you got anything without spam?', "info: Why can't she have egg bacon spam and sausage?", "SPAM: THAT'S got spam in it!"] test_atexit test_audioop test_augassign test_base64 test_bastion test_bigaddrspace test_bigmem test_binascii test_binhex test_binop test_bisect test_bool test_bsddb test_bsddb185 test_bsddb185 skipped -- No module named bsddb185 test_bsddb3 test_bsddb3 skipped -- Use of the `bsddb' resource not enabled test_bufio test_bz2 test_cProfile test_calendar test_call test_capi test_cd test_cd skipped -- No module named cd test_cfgparser test_cgi test_charmapcodec test_cl test_cl skipped -- No module named cl test_class test_cmath test_cmd_line test_code test_codeccallbacks test_codecencodings_cn test_codecencodings_hk test_codecencodings_jp test_codecencodings_kr test_codecencodings_tw test_codecmaps_cn test_codecmaps_cn skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_hk test_codecmaps_hk skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_jp test_codecmaps_jp skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_kr test_codecmaps_kr skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_tw test_codecmaps_tw skipped -- Use of the `urlfetch' resource not enabled test_codecs test_codeop test_coding test_coercion test_collections test_colorsys test_commands test_compare test_compile test_compiler test_complex test_complex_args test_contains test_contextlib test_cookie test_cookielib test_copy test_copy_reg test_cpickle test_crypt test_csv test_ctypes test_curses test_curses skipped -- Use of the `curses' resource not enabled test_datetime test_dbm test_decimal test_decorators test_defaultdict test_deque test_descr test_descrtut test_difflib test_dircache test_dis test_distutils [9008 refs] test_dl test_dumbdbm test_dummy_thread test_dummy_threading test_email test_email_codecs test_email_renamed test_enumerate test_eof test_errno test_exception_variations test_extcall test_fcntl test_file test_filecmp test_fileinput test_float test_fnmatch test_fork1 test_format test_fpformat test_frozen test_ftplib test_funcattrs test_functools test_future test_gc test_gdbm test_generators test_genericpath test_genexps test_getargs test_getargs2 test_getopt test_gettext test_gl test_gl skipped -- No module named gl test_glob test_global test_grp test_gzip test_hash test_hashlib test_heapq test_hexoct test_hmac test_hotshot test_htmllib test_htmlparser test_httplib test_imageop test_imageop skipped -- No module named imgfile test_imaplib test_imgfile test_imgfile skipped -- No module named imgfile test_imp test_import test_importhooks test_index test_inspect test_ioctl test_ioctl skipped -- Unable to open /dev/tty test_isinstance test_iter test_iterlen test_itertools test_largefile test_linuxaudiodev test_linuxaudiodev skipped -- Use of the `audio' resource not enabled test_list test_locale test_logging test_long test_long_future test_longexp test_macostools test_macostools skipped -- No module named macostools test_macpath test_mailbox test_marshal test_math test_md5 test_mhlib test_mimetools test_mimetypes test_minidom test_mmap test_module test_modulefinder test_multibytecodec test_multibytecodec_support test_multifile test_mutants test_netrc test_new test_nis test_normalization test_normalization skipped -- Use of the `urlfetch' resource not enabled test_ntpath test_old_mailbox test_openpty test_operator test_optparse test_os test_ossaudiodev test_ossaudiodev skipped -- Use of the `audio' resource not enabled test_parser test_peepholer test_pep247 test_pep263 test_pep277 test_pep277 skipped -- test works only on NT+ test_pep292 test_pep352 test_pickle test_pickletools test_pkg test_pkgimport test_platform test_plistlib test_plistlib skipped -- No module named plistlib test_poll test_popen [7328 refs] [7328 refs] [7328 refs] test_popen2 test_poplib test_posix test_posixpath test_pow test_pprint test_profile test_profilehooks test_pty test_pwd test_pyclbr test_pyexpat test_queue test_quopri [7703 refs] [7703 refs] test_random test_re test_repr test_resource test_rfc822 test_richcmp test_robotparser test_runpy test_sax test_scope test_scriptpackages test_scriptpackages skipped -- No module named aetools test_select test_set test_sets test_sgmllib test_sha test_shelve test_shlex test_shutil test_signal test_site test_slice test_smtplib test_socket test_socket_ssl test_socketserver test_socketserver skipped -- Use of the `network' resource not enabled test_softspace test_sort test_sqlite test_startfile test_startfile skipped -- cannot import name startfile test_str test_strftime test_string test_stringprep test_strop test_strptime test_struct test_structmembers test_structseq test_subprocess [7323 refs] [7321 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7321 refs] [8869 refs] [7539 refs] [7324 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] . [7323 refs] [7323 refs] this bit of output is from a test of stdout in a different process ... [7323 refs] [7323 refs] [7539 refs] test_sunaudiodev test_sunaudiodev skipped -- No module named sunaudiodev test_sundry test_symtable test_syntax test_sys [7323 refs] [7323 refs] test_tarfile test_tcl test_tcl skipped -- No module named _tkinter test_telnetlib test_tempfile [7327 refs] test_textwrap test_thread test_threaded_import test_threadedtempfile test_threading test_threading_local test_threadsignals test_time test_timeout test_timeout skipped -- Use of the `network' resource not enabled test_tokenize test_trace test_traceback test_transformer test_tuple test_ucn test_unary test_unicode test_unicode_file test_unicode_file skipped -- No Unicode filesystem semantics on this platform. test_unicodedata test_univnewlines test_unpack test_urllib test_urllib2 test_urllib2_localnet test_urllib2net test_urllib2net skipped -- Use of the `network' resource not enabled test_urllibnet test_urllibnet skipped -- Use of the `network' resource not enabled test_urlparse test_userdict test_userlist test_userstring test_uu test_uuid WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._ifconfig_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._unixdll_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. test_wait3 test_wait4 test_warnings test_wave test_weakref test_whichdb test_winreg test_winreg skipped -- No module named _winreg test_winsound test_winsound skipped -- No module named winsound test_with test_wsgiref test_xdrlib test_xml_etree test_xml_etree_c test_xmllib test_xmlrpc test_xpickle test_xrange test_zipfile test_zipfile64 test_zipfile64 skipped -- test requires loads of disk-space bytes and a long time to run test_zipimport test_zlib 291 tests OK. 1 test failed: test_asyncore 35 tests skipped: test_aepack test_al test_applesingle test_bsddb185 test_bsddb3 test_cd test_cl test_codecmaps_cn test_codecmaps_hk test_codecmaps_jp test_codecmaps_kr test_codecmaps_tw test_curses test_gl test_imageop test_imgfile test_ioctl test_linuxaudiodev test_macostools test_normalization test_ossaudiodev test_pep277 test_plistlib test_scriptpackages test_socketserver test_startfile test_sunaudiodev test_tcl test_timeout test_unicode_file test_urllib2net test_urllibnet test_winreg test_winsound test_zipfile64 1 skip unexpected on linux2: test_ioctl [487550 refs] From nnorwitz at gmail.com Sun Jul 15 22:07:04 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Sun, 15 Jul 2007 16:07:04 -0400 Subject: [Python-checkins] Python Regression Test Failures basics (1) Message-ID: <20070715200704.GA8312@python.psfb.org> test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_unittest test_doctest test_doctest2 test_MimeWriter test_StringIO test___all__ test___future__ test__locale test_aepack test_aepack skipped -- No module named aepack test_al test_al skipped -- No module named al test_anydbm test_applesingle test_applesingle skipped -- No module named macostools test_array test_ast test_asynchat test_asyncore test test_asyncore failed -- Traceback (most recent call last): File "/tmp/python-test/local/lib/python2.6/test/test_asyncore.py", line 323, in test_send self.assertEqual(s.captured, data) AssertionError: "Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton" != "Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?" test_atexit test_audioop test_augassign test_base64 test_bastion test_bigaddrspace test_bigmem test_binascii test_binhex test_binop test_bisect test_bool test_bsddb test_bsddb185 test_bsddb185 skipped -- No module named bsddb185 test_bsddb3 test_bsddb3 skipped -- Use of the `bsddb' resource not enabled test_bufio test_bz2 test_cProfile test_calendar test_call test_capi test_cd test_cd skipped -- No module named cd test_cfgparser test_cgi test_charmapcodec test_cl test_cl skipped -- No module named cl test_class test_cmath test_cmd_line test_code test_codeccallbacks test_codecencodings_cn test_codecencodings_hk test_codecencodings_jp test_codecencodings_kr test_codecencodings_tw test_codecmaps_cn test_codecmaps_cn skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_hk test_codecmaps_hk skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_jp test_codecmaps_jp skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_kr test_codecmaps_kr skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_tw test_codecmaps_tw skipped -- Use of the `urlfetch' resource not enabled test_codecs test_codeop test_coding test_coercion test_collections test_colorsys test_commands test_compare test_compile test_compiler test_complex test_complex_args test_contains test_contextlib test_cookie test_cookielib test_copy test_copy_reg test_cpickle test_crypt test_csv test_ctypes test_curses test_curses skipped -- Use of the `curses' resource not enabled test_datetime test_dbm test_decimal test_decorators test_defaultdict test_deque test_descr test_descrtut test_difflib test_dircache test_dis test_distutils test_dl test_dumbdbm test_dummy_thread test_dummy_threading test_email test_email_codecs test_email_renamed test_enumerate test_eof test_errno test_exception_variations test_extcall test_fcntl test_file test_filecmp test_fileinput test_float test_fnmatch test_fork1 test_format test_fpformat test_frozen test_ftplib test_funcattrs test_functools test_future test_gc test_gdbm test_generators test_genericpath test_genexps test_getargs test_getargs2 test_getopt test_gettext test_gl test_gl skipped -- No module named gl test_glob test_global test_grp test_gzip test_hash test_hashlib test_heapq test_hexoct test_hmac test_hotshot test_htmllib test_htmlparser test_httplib test_imageop test_imageop skipped -- No module named imgfile test_imaplib test_imgfile test_imgfile skipped -- No module named imgfile test_imp test_import test_importhooks test_index test_inspect test_ioctl test_ioctl skipped -- Unable to open /dev/tty test_isinstance test_iter test_iterlen test_itertools test_largefile test_linuxaudiodev test_linuxaudiodev skipped -- Use of the `audio' resource not enabled test_list test_locale test_logging test_long test_long_future test_longexp test_macostools test_macostools skipped -- No module named macostools test_macpath test_mailbox test_marshal test_math test_md5 test_mhlib test_mimetools test_mimetypes test_minidom test_mmap test_module test_modulefinder test_multibytecodec test_multibytecodec_support test_multifile test_mutants test_netrc test_new test_nis test_normalization test_normalization skipped -- Use of the `urlfetch' resource not enabled test_ntpath test_old_mailbox test_openpty test_operator test_optparse test_os test_ossaudiodev test_ossaudiodev skipped -- Use of the `audio' resource not enabled test_parser test_peepholer test_pep247 test_pep263 test_pep277 test_pep277 skipped -- test works only on NT+ test_pep292 test_pep352 test_pickle test_pickletools test_pkg test_pkgimport test_platform test_plistlib test_plistlib skipped -- No module named plistlib test_poll test_popen [7328 refs] [7328 refs] [7328 refs] test_popen2 test_poplib test_posix test_posixpath test_pow test_pprint test_profile test_profilehooks test_pty test_pwd test_pyclbr test_pyexpat test_queue test_quopri [7703 refs] [7703 refs] test_random test_re test_repr test_resource test_rfc822 test_richcmp test_robotparser test_runpy test_sax test_scope test_scriptpackages test_scriptpackages skipped -- No module named aetools test_select test_set test_sets test_sgmllib test_sha test_shelve test_shlex test_shutil test_signal test_site test_slice test_smtplib test_socket test_socket_ssl test_socketserver test_socketserver skipped -- Use of the `network' resource not enabled test_softspace test_sort test_sqlite test_startfile test_startfile skipped -- cannot import name startfile test_str test_strftime test_string test_stringprep test_strop test_strptime test_struct test_structmembers test_structseq test_subprocess [7323 refs] [7321 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7321 refs] [8869 refs] [7539 refs] [7324 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] . [7323 refs] [7323 refs] this bit of output is from a test of stdout in a different process ... [7323 refs] [7323 refs] [7539 refs] test_sunaudiodev test_sunaudiodev skipped -- No module named sunaudiodev test_sundry test_symtable test_syntax test_sys [7323 refs] [7323 refs] test_tarfile test_tcl test_tcl skipped -- No module named _tkinter test_telnetlib test_tempfile [7327 refs] test_textwrap test_thread test_threaded_import test_threadedtempfile test_threading test_threading_local test_threadsignals test_time test_timeout test_timeout skipped -- Use of the `network' resource not enabled test_tokenize test_trace test_traceback test_transformer test_tuple test_ucn test_unary test_unicode test_unicode_file test_unicode_file skipped -- No Unicode filesystem semantics on this platform. test_unicodedata test_univnewlines test_unpack test_urllib test_urllib2 test_urllib2_localnet test_urllib2net test_urllib2net skipped -- Use of the `network' resource not enabled test_urllibnet test_urllibnet skipped -- Use of the `network' resource not enabled test_urlparse test_userdict test_userlist test_userstring test_uu test_uuid WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._ifconfig_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._unixdll_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. test_wait3 test_wait4 test_warnings test_wave test_weakref test_whichdb test_winreg test_winreg skipped -- No module named _winreg test_winsound test_winsound skipped -- No module named winsound test_with test_wsgiref test_xdrlib test_xml_etree test_xml_etree_c test_xmllib test_xmlrpc test_xpickle test_xrange test_zipfile test_zipfile64 test_zipfile64 skipped -- test requires loads of disk-space bytes and a long time to run test_zipimport test_zlib 291 tests OK. 1 test failed: test_asyncore 35 tests skipped: test_aepack test_al test_applesingle test_bsddb185 test_bsddb3 test_cd test_cl test_codecmaps_cn test_codecmaps_hk test_codecmaps_jp test_codecmaps_kr test_codecmaps_tw test_curses test_gl test_imageop test_imgfile test_ioctl test_linuxaudiodev test_macostools test_normalization test_ossaudiodev test_pep277 test_plistlib test_scriptpackages test_socketserver test_startfile test_sunaudiodev test_tcl test_timeout test_unicode_file test_urllib2net test_urllibnet test_winreg test_winsound test_zipfile64 1 skip unexpected on linux2: test_ioctl [487916 refs] From nnorwitz at gmail.com Sun Jul 15 22:12:41 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Sun, 15 Jul 2007 16:12:41 -0400 Subject: [Python-checkins] Python Regression Test Failures opt (1) Message-ID: <20070715201241.GA9018@python.psfb.org> test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_unittest test_doctest test_doctest2 test_MimeWriter test_StringIO test___all__ test___future__ test__locale test_aepack test_aepack skipped -- No module named aepack test_al test_al skipped -- No module named al test_anydbm test_applesingle test_applesingle skipped -- No module named macostools test_array test_ast test_asynchat test_asyncore test test_asyncore failed -- Traceback (most recent call last): File "/tmp/python-test/local/lib/python2.6/test/test_asyncore.py", line 267, in test_log_info self.assertEquals(lines, expected) AssertionError: ['EGGS: Have you got anything without spam?', "SPAM: THAT'S got spam in it!"] != ['EGGS: Have you got anything without spam?', "info: Why can't she have egg bacon spam and sausage?", "SPAM: THAT'S got spam in it!"] test_atexit test_audioop test_augassign test_base64 test_bastion test_bigaddrspace test_bigmem test_binascii test_binhex test_binop test_bisect test_bool test_bsddb test_bsddb185 test_bsddb185 skipped -- No module named bsddb185 test_bsddb3 test_bsddb3 skipped -- Use of the `bsddb' resource not enabled test_bufio test_bz2 test_cProfile test_calendar test_call test_capi test_cd test_cd skipped -- No module named cd test_cfgparser test_cgi test_charmapcodec test_cl test_cl skipped -- No module named cl test_class test_cmath test_cmd_line test_code test_codeccallbacks test_codecencodings_cn test_codecencodings_hk test_codecencodings_jp test_codecencodings_kr test_codecencodings_tw test_codecmaps_cn test_codecmaps_cn skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_hk test_codecmaps_hk skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_jp test_codecmaps_jp skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_kr test_codecmaps_kr skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_tw test_codecmaps_tw skipped -- Use of the `urlfetch' resource not enabled test_codecs test_codeop test_coding test_coercion test_collections test_colorsys test_commands test_compare test_compile test_compiler test_complex test_complex_args test_contains test_contextlib test_cookie test_cookielib test_copy test_copy_reg test_cpickle test_crypt test_csv test_ctypes test_curses test_curses skipped -- Use of the `curses' resource not enabled test_datetime test_dbm test_decimal test_decorators test_defaultdict test_deque test_descr test_descrtut test_difflib test_dircache test_dis test_distutils [9008 refs] test_dl test_dumbdbm test_dummy_thread test_dummy_threading test_email test_email_codecs test_email_renamed test_enumerate test_eof test_errno test_exception_variations test_extcall test_fcntl test_file test_filecmp test_fileinput test_float test_fnmatch test_fork1 test_format test_fpformat test_frozen test_ftplib test_funcattrs test_functools test_future test_gc test_gdbm test_generators test_genericpath test_genexps test_getargs test_getargs2 test_getopt test_gettext test_gl test_gl skipped -- No module named gl test_glob test_global test_grp test_gzip test_hash test_hashlib test_heapq test_hexoct test_hmac test_hotshot test_htmllib test_htmlparser test_httplib test_imageop test_imageop skipped -- No module named imgfile test_imaplib test_imgfile test_imgfile skipped -- No module named imgfile test_imp test_import test_importhooks test_index test_inspect test_ioctl test_ioctl skipped -- Unable to open /dev/tty test_isinstance test_iter test_iterlen test_itertools test_largefile test_linuxaudiodev test_linuxaudiodev skipped -- Use of the `audio' resource not enabled test_list test_locale test_logging test_long test_long_future test_longexp test_macostools test_macostools skipped -- No module named macostools test_macpath test_mailbox test_marshal test_math test_md5 test_mhlib test_mimetools test_mimetypes test_minidom test_mmap test_module test_modulefinder test_multibytecodec test_multibytecodec_support test_multifile test_mutants test_netrc test_new test_nis test_normalization test_normalization skipped -- Use of the `urlfetch' resource not enabled test_ntpath test_old_mailbox test_openpty test_operator test_optparse test_os test_ossaudiodev test_ossaudiodev skipped -- Use of the `audio' resource not enabled test_parser test_peepholer test_pep247 test_pep263 test_pep277 test_pep277 skipped -- test works only on NT+ test_pep292 test_pep352 test_pickle test_pickletools test_pkg test_pkgimport test_platform test_plistlib test_plistlib skipped -- No module named plistlib test_poll test_popen [7328 refs] [7328 refs] [7328 refs] test_popen2 test_poplib test_posix test_posixpath test_pow test_pprint test_profile test_profilehooks test_pty test_pwd test_pyclbr test_pyexpat test_queue test_quopri [7703 refs] [7703 refs] test_random test_re test_repr test_resource test_rfc822 test_richcmp test_robotparser test_runpy test_sax test_scope test_scriptpackages test_scriptpackages skipped -- No module named aetools test_select test_set test_sets test_sgmllib test_sha test_shelve test_shlex test_shutil test_signal test_site test_slice test_smtplib test_socket test_socket_ssl test_socketserver test_socketserver skipped -- Use of the `network' resource not enabled test_softspace test_sort test_sqlite test_startfile test_startfile skipped -- cannot import name startfile test_str test_strftime test_string test_stringprep test_strop test_strptime test_struct test_structmembers test_structseq test_subprocess [7323 refs] [7321 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7321 refs] [8869 refs] [7539 refs] [7324 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] . [7323 refs] [7323 refs] this bit of output is from a test of stdout in a different process ... [7323 refs] [7323 refs] [7539 refs] test_sunaudiodev test_sunaudiodev skipped -- No module named sunaudiodev test_sundry test_symtable test_syntax test_sys [7323 refs] [7323 refs] test_tarfile test_tcl test_tcl skipped -- No module named _tkinter test_telnetlib test_tempfile [7327 refs] test_textwrap test_thread test_threaded_import test_threadedtempfile test_threading test_threading_local test_threadsignals test_time test_timeout test_timeout skipped -- Use of the `network' resource not enabled test_tokenize test_trace test_traceback test_transformer test_tuple test_ucn test_unary test_unicode test_unicode_file test_unicode_file skipped -- No Unicode filesystem semantics on this platform. test_unicodedata test_univnewlines test_unpack test_urllib test_urllib2 test_urllib2_localnet test_urllib2net test_urllib2net skipped -- Use of the `network' resource not enabled test_urllibnet test_urllibnet skipped -- Use of the `network' resource not enabled test_urlparse test_userdict test_userlist test_userstring test_uu test_uuid WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._ifconfig_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._unixdll_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. test_wait3 test_wait4 test_warnings test_wave test_weakref test_whichdb test_winreg test_winreg skipped -- No module named _winreg test_winsound test_winsound skipped -- No module named winsound test_with test_wsgiref test_xdrlib test_xml_etree test_xml_etree_c test_xmllib test_xmlrpc test_xpickle test_xrange test_zipfile test_zipfile64 test_zipfile64 skipped -- test requires loads of disk-space bytes and a long time to run test_zipimport test_zlib 291 tests OK. 1 test failed: test_asyncore 35 tests skipped: test_aepack test_al test_applesingle test_bsddb185 test_bsddb3 test_cd test_cl test_codecmaps_cn test_codecmaps_hk test_codecmaps_jp test_codecmaps_kr test_codecmaps_tw test_curses test_gl test_imageop test_imgfile test_ioctl test_linuxaudiodev test_macostools test_normalization test_ossaudiodev test_pep277 test_plistlib test_scriptpackages test_socketserver test_startfile test_sunaudiodev test_tcl test_timeout test_unicode_file test_urllib2net test_urllibnet test_winreg test_winsound test_zipfile64 1 skip unexpected on linux2: test_ioctl [487552 refs] From python-checkins at python.org Sun Jul 15 22:30:40 2007 From: python-checkins at python.org (facundo.batista) Date: Sun, 15 Jul 2007 22:30:40 +0200 (CEST) Subject: [Python-checkins] r56399 - python/trunk/Lib/test/test_asyncore.py Message-ID: <20070715203040.2ED271E400D@bag.python.org> Author: facundo.batista Date: Sun Jul 15 22:30:39 2007 New Revision: 56399 Modified: python/trunk/Lib/test/test_asyncore.py Log: Changed the used port and commented out some tests that uses a non documented function that appers to uses resources not present in Windows. Modified: python/trunk/Lib/test/test_asyncore.py ============================================================================== --- python/trunk/Lib/test/test_asyncore.py (original) +++ python/trunk/Lib/test/test_asyncore.py Sun Jul 15 22:30:39 2007 @@ -12,7 +12,7 @@ from StringIO import StringIO HOST = "127.0.0.1" -PORT = 54322 +PORT = 54329 class dummysocket: def __init__(self): @@ -96,77 +96,87 @@ asyncore._exception(tr2) self.assertEqual(tr2.error_handled, True) - def test_readwrite(self): - # Check that correct methods are called by readwrite() - - class testobj: - def __init__(self): - self.read = False - self.write = False - self.expt = False - - def handle_read_event(self): - self.read = True - - def handle_write_event(self): - self.write = True - - def handle_expt_event(self): - self.expt = True - - def handle_error(self): - self.error_handled = True - - for flag in (select.POLLIN, select.POLLPRI): - tobj = testobj() - self.assertEqual(tobj.read, False) - asyncore.readwrite(tobj, flag) - self.assertEqual(tobj.read, True) - - # check that ExitNow exceptions in the object handler method - # bubbles all the way up through asyncore readwrite call - tr1 = exitingdummy() - self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) - - # check that an exception other than ExitNow in the object handler - # method causes the handle_error method to get called - tr2 = crashingdummy() - asyncore.readwrite(tr2, flag) - self.assertEqual(tr2.error_handled, True) - - tobj = testobj() - self.assertEqual(tobj.write, False) - asyncore.readwrite(tobj, select.POLLOUT) - self.assertEqual(tobj.write, True) - - # check that ExitNow exceptions in the object handler method - # bubbles all the way up through asyncore readwrite call - tr1 = exitingdummy() - self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, - select.POLLOUT) - - # check that an exception other than ExitNow in the object handler - # method causes the handle_error method to get called - tr2 = crashingdummy() - asyncore.readwrite(tr2, select.POLLOUT) - self.assertEqual(tr2.error_handled, True) - - for flag in (select.POLLERR, select.POLLHUP, select.POLLNVAL): - tobj = testobj() - self.assertEqual(tobj.expt, False) - asyncore.readwrite(tobj, flag) - self.assertEqual(tobj.expt, True) - - # check that ExitNow exceptions in the object handler method - # bubbles all the way up through asyncore readwrite calls - tr1 = exitingdummy() - self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) - - # check that an exception other than ExitNow in the object handler - # method causes the handle_error method to get called - tr2 = crashingdummy() - asyncore.readwrite(tr2, flag) - self.assertEqual(tr2.error_handled, True) +## Commented out these tests because test a non-documented function +## (which is actually public, why it's not documented?). Anyway, the +## tests *and* the function uses constants in the select module that +## are not present in Windows systems (see this thread: +## http://mail.python.org/pipermail/python-list/2001-October/109973.html) +## Note even that these constants are mentioned in the select +## documentation, as a parameter of "poll" method "register", but are +## not explicit declared as constants of the module. +## . Facundo Batista +## +## def test_readwrite(self): +## # Check that correct methods are called by readwrite() +## +## class testobj: +## def __init__(self): +## self.read = False +## self.write = False +## self.expt = False +## +## def handle_read_event(self): +## self.read = True +## +## def handle_write_event(self): +## self.write = True +## +## def handle_expt_event(self): +## self.expt = True +## +## def handle_error(self): +## self.error_handled = True +## +## for flag in (select.POLLIN, select.POLLPRI): +## tobj = testobj() +## self.assertEqual(tobj.read, False) +## asyncore.readwrite(tobj, flag) +## self.assertEqual(tobj.read, True) +## +## # check that ExitNow exceptions in the object handler method +## # bubbles all the way up through asyncore readwrite call +## tr1 = exitingdummy() +## self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) +## +## # check that an exception other than ExitNow in the object handler +## # method causes the handle_error method to get called +## tr2 = crashingdummy() +## asyncore.readwrite(tr2, flag) +## self.assertEqual(tr2.error_handled, True) +## +## tobj = testobj() +## self.assertEqual(tobj.write, False) +## asyncore.readwrite(tobj, select.POLLOUT) +## self.assertEqual(tobj.write, True) +## +## # check that ExitNow exceptions in the object handler method +## # bubbles all the way up through asyncore readwrite call +## tr1 = exitingdummy() +## self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, +## select.POLLOUT) +## +## # check that an exception other than ExitNow in the object handler +## # method causes the handle_error method to get called +## tr2 = crashingdummy() +## asyncore.readwrite(tr2, select.POLLOUT) +## self.assertEqual(tr2.error_handled, True) +## +## for flag in (select.POLLERR, select.POLLHUP, select.POLLNVAL): +## tobj = testobj() +## self.assertEqual(tobj.expt, False) +## asyncore.readwrite(tobj, flag) +## self.assertEqual(tobj.expt, True) +## +## # check that ExitNow exceptions in the object handler method +## # bubbles all the way up through asyncore readwrite calls +## tr1 = exitingdummy() +## self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) +## +## # check that an exception other than ExitNow in the object handler +## # method causes the handle_error method to get called +## tr2 = crashingdummy() +## asyncore.readwrite(tr2, flag) +## self.assertEqual(tr2.error_handled, True) def test_closeall(self): self.closeall_check(False) From nnorwitz at gmail.com Mon Jul 16 10:12:49 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Mon, 16 Jul 2007 04:12:49 -0400 Subject: [Python-checkins] Python Regression Test Failures opt (1) Message-ID: <20070716081249.GA26592@python.psfb.org> test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_unittest test_doctest test_doctest2 test_MimeWriter test_StringIO test___all__ test___future__ test__locale test_aepack test_aepack skipped -- No module named aepack test_al test_al skipped -- No module named al test_anydbm test_applesingle test_applesingle skipped -- No module named macostools test_array test_ast test_asynchat test_asyncore test test_asyncore failed -- Traceback (most recent call last): File "/tmp/python-test/local/lib/python2.6/test/test_asyncore.py", line 277, in test_log_info self.assertEquals(lines, expected) AssertionError: ['EGGS: Have you got anything without spam?', "SPAM: THAT'S got spam in it!"] != ['EGGS: Have you got anything without spam?', "info: Why can't she have egg bacon spam and sausage?", "SPAM: THAT'S got spam in it!"] test_atexit test_audioop test_augassign test_base64 test_bastion test_bigaddrspace test_bigmem test_binascii test_binhex test_binop test_bisect test_bool test_bsddb test_bsddb185 test_bsddb185 skipped -- No module named bsddb185 test_bsddb3 test_bsddb3 skipped -- Use of the `bsddb' resource not enabled test_bufio test_bz2 test_cProfile test_calendar test_call test_capi test_cd test_cd skipped -- No module named cd test_cfgparser test_cgi test_charmapcodec test_cl test_cl skipped -- No module named cl test_class test_cmath test_cmd_line test_code test_codeccallbacks test_codecencodings_cn test_codecencodings_hk test_codecencodings_jp test_codecencodings_kr test_codecencodings_tw test_codecmaps_cn test_codecmaps_cn skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_hk test_codecmaps_hk skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_jp test_codecmaps_jp skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_kr test_codecmaps_kr skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_tw test_codecmaps_tw skipped -- Use of the `urlfetch' resource not enabled test_codecs test_codeop test_coding test_coercion test_collections test_colorsys test_commands test_compare test_compile test_compiler test_complex test_complex_args test_contains test_contextlib test_cookie test_cookielib test_copy test_copy_reg test_cpickle test_crypt test_csv test_ctypes test_curses test_curses skipped -- Use of the `curses' resource not enabled test_datetime test_dbm test_decimal test_decorators test_defaultdict test_deque test_descr test_descrtut test_difflib test_dircache test_dis test_distutils [9008 refs] test_dl test_dumbdbm test_dummy_thread test_dummy_threading test_email test_email_codecs test_email_renamed test_enumerate test_eof test_errno test_exception_variations test_extcall test_fcntl test_file test_filecmp test_fileinput test_float test_fnmatch test_fork1 test_format test_fpformat test_frozen test_ftplib test_funcattrs test_functools test_future test_gc test_gdbm test_generators test_genericpath test_genexps test_getargs test_getargs2 test_getopt test_gettext test_gl test_gl skipped -- No module named gl test_glob test_global test_grp test_gzip test_hash test_hashlib test_heapq test_hexoct test_hmac test_hotshot test_htmllib test_htmlparser test_httplib test_imageop test_imageop skipped -- No module named imgfile test_imaplib test_imgfile test_imgfile skipped -- No module named imgfile test_imp test_import test_importhooks test_index test_inspect test_ioctl test_ioctl skipped -- Unable to open /dev/tty test_isinstance test_iter test_iterlen test_itertools test_largefile test_linuxaudiodev test_linuxaudiodev skipped -- Use of the `audio' resource not enabled test_list test_locale test_logging test_long test_long_future test_longexp test_macostools test_macostools skipped -- No module named macostools test_macpath test_mailbox test_marshal test_math test_md5 test_mhlib test_mimetools test_mimetypes test_minidom test_mmap test_module test_modulefinder test_multibytecodec test_multibytecodec_support test_multifile test_mutants test_netrc test_new test_nis test_normalization test_normalization skipped -- Use of the `urlfetch' resource not enabled test_ntpath test_old_mailbox test_openpty test_operator test_optparse test_os test_ossaudiodev test_ossaudiodev skipped -- Use of the `audio' resource not enabled test_parser test_peepholer test_pep247 test_pep263 test_pep277 test_pep277 skipped -- test works only on NT+ test_pep292 test_pep352 test_pickle test_pickletools test_pkg test_pkgimport test_platform test_plistlib test_plistlib skipped -- No module named plistlib test_poll test_popen [7328 refs] [7328 refs] [7328 refs] test_popen2 test_poplib test_posix test_posixpath test_pow test_pprint test_profile test_profilehooks test_pty test_pwd test_pyclbr test_pyexpat test_queue test_quopri [7703 refs] [7703 refs] test_random test_re test_repr test_resource test_rfc822 test_richcmp test_robotparser test_runpy test_sax test_scope test_scriptpackages test_scriptpackages skipped -- No module named aetools test_select test_set test_sets test_sgmllib test_sha test_shelve test_shlex test_shutil test_signal test_site test_slice test_smtplib test_socket test_socket_ssl test_socketserver test_socketserver skipped -- Use of the `network' resource not enabled test_softspace test_sort test_sqlite test_startfile test_startfile skipped -- cannot import name startfile test_str test_strftime test_string test_stringprep test_strop test_strptime test_struct test_structmembers test_structseq test_subprocess [7323 refs] [7321 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7321 refs] [8869 refs] [7539 refs] [7324 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] . [7323 refs] [7323 refs] this bit of output is from a test of stdout in a different process ... [7323 refs] [7323 refs] [7539 refs] test_sunaudiodev test_sunaudiodev skipped -- No module named sunaudiodev test_sundry test_symtable test_syntax test_sys [7323 refs] [7323 refs] test_tarfile test_tcl test_tcl skipped -- No module named _tkinter test_telnetlib test_tempfile [7327 refs] test_textwrap test_thread test_threaded_import test_threadedtempfile test_threading test_threading_local test_threadsignals test_time test_timeout test_timeout skipped -- Use of the `network' resource not enabled test_tokenize test_trace test_traceback test_transformer test_tuple test_ucn test_unary test_unicode test_unicode_file test_unicode_file skipped -- No Unicode filesystem semantics on this platform. test_unicodedata test_univnewlines test_unpack test_urllib test_urllib2 test_urllib2_localnet test_urllib2net test_urllib2net skipped -- Use of the `network' resource not enabled test_urllibnet test_urllibnet skipped -- Use of the `network' resource not enabled test_urlparse test_userdict test_userlist test_userstring test_uu test_uuid WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._ifconfig_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._unixdll_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. test_wait3 test_wait4 test_warnings test_wave test_weakref test_whichdb test_winreg test_winreg skipped -- No module named _winreg test_winsound test_winsound skipped -- No module named winsound test_with test_wsgiref test_xdrlib test_xml_etree test_xml_etree_c test_xmllib test_xmlrpc test_xpickle test_xrange test_zipfile test_zipfile64 test_zipfile64 skipped -- test requires loads of disk-space bytes and a long time to run test_zipimport test_zlib 291 tests OK. 1 test failed: test_asyncore 35 tests skipped: test_aepack test_al test_applesingle test_bsddb185 test_bsddb3 test_cd test_cl test_codecmaps_cn test_codecmaps_hk test_codecmaps_jp test_codecmaps_kr test_codecmaps_tw test_curses test_gl test_imageop test_imgfile test_ioctl test_linuxaudiodev test_macostools test_normalization test_ossaudiodev test_pep277 test_plistlib test_scriptpackages test_socketserver test_startfile test_sunaudiodev test_tcl test_timeout test_unicode_file test_urllib2net test_urllibnet test_winreg test_winsound test_zipfile64 1 skip unexpected on linux2: test_ioctl [487407 refs] From buildbot at python.org Mon Jul 16 10:27:35 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 16 Jul 2007 08:27:35 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP-3 trunk Message-ID: <20070716082735.6D7CA1E4003@bag.python.org> The Buildbot has detected a new failure of x86 XP-3 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP-3%2520trunk/builds/84 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: The web-page 'force build' button was pressed by 'theller': after reboot Build Source Stamp: [branch trunk] HEAD Blamelist: Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_asyncore ====================================================================== FAIL: test_send (test.test_asyncore.DispatcherWithSendTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\work\trunk.heller-windows\build\lib\test\test_asyncore.py", line 333, in test_send self.assertEqual(s.captured, data) AssertionError: "Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton w" != "Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?" ====================================================================== FAIL: test_send (test.test_asyncore.DispatcherWithSendTests_UsePoll) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\work\trunk.heller-windows\build\lib\test\test_asyncore.py", line 333, in test_send self.assertEqual(s.captured, data) AssertionError: "Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton w" != "Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?Suppose there isn't a 16-ton weight?" sincerely, -The Buildbot From nnorwitz at gmail.com Mon Jul 16 22:12:46 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Mon, 16 Jul 2007 16:12:46 -0400 Subject: [Python-checkins] Python Regression Test Failures opt (1) Message-ID: <20070716201246.GA31553@python.psfb.org> test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_unittest test_doctest test_doctest2 test_MimeWriter test_StringIO test___all__ test___future__ test__locale test_aepack test_aepack skipped -- No module named aepack test_al test_al skipped -- No module named al test_anydbm test_applesingle test_applesingle skipped -- No module named macostools test_array test_ast test_asynchat test_asyncore test test_asyncore failed -- Traceback (most recent call last): File "/tmp/python-test/local/lib/python2.6/test/test_asyncore.py", line 277, in test_log_info self.assertEquals(lines, expected) AssertionError: ['EGGS: Have you got anything without spam?', "SPAM: THAT'S got spam in it!"] != ['EGGS: Have you got anything without spam?', "info: Why can't she have egg bacon spam and sausage?", "SPAM: THAT'S got spam in it!"] test_atexit test_audioop test_augassign test_base64 test_bastion test_bigaddrspace test_bigmem test_binascii test_binhex test_binop test_bisect test_bool test_bsddb test_bsddb185 test_bsddb185 skipped -- No module named bsddb185 test_bsddb3 test_bsddb3 skipped -- Use of the `bsddb' resource not enabled test_bufio test_bz2 test_cProfile test_calendar test_call test_capi test_cd test_cd skipped -- No module named cd test_cfgparser test_cgi test_charmapcodec test_cl test_cl skipped -- No module named cl test_class test_cmath test_cmd_line test_code test_codeccallbacks test_codecencodings_cn test_codecencodings_hk test_codecencodings_jp test_codecencodings_kr test_codecencodings_tw test_codecmaps_cn test_codecmaps_cn skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_hk test_codecmaps_hk skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_jp test_codecmaps_jp skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_kr test_codecmaps_kr skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_tw test_codecmaps_tw skipped -- Use of the `urlfetch' resource not enabled test_codecs test_codeop test_coding test_coercion test_collections test_colorsys test_commands test_compare test_compile test_compiler test_complex test_complex_args test_contains test_contextlib test_cookie test_cookielib test_copy test_copy_reg test_cpickle test_crypt test_csv test_ctypes test_curses test_curses skipped -- Use of the `curses' resource not enabled test_datetime test_dbm test_decimal test_decorators test_defaultdict test_deque test_descr test_descrtut test_difflib test_dircache test_dis test_distutils [9008 refs] test_dl test_dumbdbm test_dummy_thread test_dummy_threading test_email test_email_codecs test_email_renamed test_enumerate test_eof test_errno test_exception_variations test_extcall test_fcntl test_file test_filecmp test_fileinput test_float test_fnmatch test_fork1 test_format test_fpformat test_frozen test_ftplib test_funcattrs test_functools test_future test_gc test_gdbm test_generators test_genericpath test_genexps test_getargs test_getargs2 test_getopt test_gettext test_gl test_gl skipped -- No module named gl test_glob test_global test_grp test_gzip test_hash test_hashlib test_heapq test_hexoct test_hmac test_hotshot test_htmllib test_htmlparser test_httplib test_imageop test_imageop skipped -- No module named imgfile test_imaplib test_imgfile test_imgfile skipped -- No module named imgfile test_imp test_import test_importhooks test_index test_inspect test_ioctl test_ioctl skipped -- Unable to open /dev/tty test_isinstance test_iter test_iterlen test_itertools test_largefile test_linuxaudiodev test_linuxaudiodev skipped -- Use of the `audio' resource not enabled test_list test_locale test_logging test_long test_long_future test_longexp test_macostools test_macostools skipped -- No module named macostools test_macpath test_mailbox test_marshal test_math test_md5 test_mhlib test_mimetools test_mimetypes test_minidom test_mmap test_module test_modulefinder test_multibytecodec test_multibytecodec_support test_multifile test_mutants test_netrc test_new test_nis test_normalization test_normalization skipped -- Use of the `urlfetch' resource not enabled test_ntpath test_old_mailbox test_openpty test_operator test_optparse test_os test_ossaudiodev test_ossaudiodev skipped -- Use of the `audio' resource not enabled test_parser test_peepholer test_pep247 test_pep263 test_pep277 test_pep277 skipped -- test works only on NT+ test_pep292 test_pep352 test_pickle test_pickletools test_pkg test_pkgimport test_platform test_plistlib test_plistlib skipped -- No module named plistlib test_poll test_popen [7328 refs] [7328 refs] [7328 refs] test_popen2 test_poplib test_posix test_posixpath test_pow test_pprint test_profile test_profilehooks test_pty test_pwd test_pyclbr test_pyexpat test_queue test_quopri [7703 refs] [7703 refs] test_random test_re test_repr test_resource test_rfc822 test_richcmp test_robotparser test_runpy test_sax test_scope test_scriptpackages test_scriptpackages skipped -- No module named aetools test_select test_set test_sets test_sgmllib test_sha test_shelve test_shlex test_shutil test_signal test_site test_slice test_smtplib test_socket test_socket_ssl test_socketserver test_socketserver skipped -- Use of the `network' resource not enabled test_softspace test_sort test_sqlite test_startfile test_startfile skipped -- cannot import name startfile test_str test_strftime test_string test_stringprep test_strop test_strptime test_struct test_structmembers test_structseq test_subprocess [7323 refs] [7321 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7321 refs] [8869 refs] [7539 refs] [7324 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] . [7323 refs] [7323 refs] this bit of output is from a test of stdout in a different process ... [7323 refs] [7323 refs] [7539 refs] test_sunaudiodev test_sunaudiodev skipped -- No module named sunaudiodev test_sundry test_symtable test_syntax test_sys [7323 refs] [7323 refs] test_tarfile test_tcl test_tcl skipped -- No module named _tkinter test_telnetlib test_tempfile [7327 refs] test_textwrap test_thread test_threaded_import test_threadedtempfile test_threading test_threading_local test_threadsignals test_time test_timeout test_timeout skipped -- Use of the `network' resource not enabled test_tokenize test_trace test_traceback test_transformer test_tuple test_ucn test_unary test_unicode test_unicode_file test_unicode_file skipped -- No Unicode filesystem semantics on this platform. test_unicodedata test_univnewlines test_unpack test_urllib test_urllib2 test_urllib2_localnet test_urllib2net test_urllib2net skipped -- Use of the `network' resource not enabled test_urllibnet test_urllibnet skipped -- Use of the `network' resource not enabled test_urlparse test_userdict test_userlist test_userstring test_uu test_uuid WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._ifconfig_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._unixdll_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. test_wait3 test_wait4 test_warnings test_wave test_weakref test_whichdb test_winreg test_winreg skipped -- No module named _winreg test_winsound test_winsound skipped -- No module named winsound test_with test_wsgiref test_xdrlib test_xml_etree test_xml_etree_c test_xmllib test_xmlrpc test_xpickle test_xrange test_zipfile test_zipfile64 test_zipfile64 skipped -- test requires loads of disk-space bytes and a long time to run test_zipimport test_zlib 291 tests OK. 1 test failed: test_asyncore 35 tests skipped: test_aepack test_al test_applesingle test_bsddb185 test_bsddb3 test_cd test_cl test_codecmaps_cn test_codecmaps_hk test_codecmaps_jp test_codecmaps_kr test_codecmaps_tw test_curses test_gl test_imageop test_imgfile test_ioctl test_linuxaudiodev test_macostools test_normalization test_ossaudiodev test_pep277 test_plistlib test_scriptpackages test_socketserver test_startfile test_sunaudiodev test_tcl test_timeout test_unicode_file test_urllib2net test_urllibnet test_winreg test_winsound test_zipfile64 1 skip unexpected on linux2: test_ioctl [487401 refs] From python-checkins at python.org Tue Jul 17 04:19:39 2007 From: python-checkins at python.org (facundo.batista) Date: Tue, 17 Jul 2007 04:19:39 +0200 (CEST) Subject: [Python-checkins] r56412 - python/trunk/Lib/test/test_asyncore.py Message-ID: <20070717021939.008551E4007@bag.python.org> Author: facundo.batista Date: Tue Jul 17 04:19:39 2007 New Revision: 56412 Modified: python/trunk/Lib/test/test_asyncore.py Log: Prevent asyncore.dispatcher tests from hanging by adding loop counters to server & client, and by adding asyncore.close_all calls in tearDown. Also choose correct expected logging results based on the value of __debug__ [Alan McIntyre - GSoC] Modified: python/trunk/Lib/test/test_asyncore.py ============================================================================== --- python/trunk/Lib/test/test_asyncore.py (original) +++ python/trunk/Lib/test/test_asyncore.py Tue Jul 17 04:19:39 2007 @@ -52,23 +52,31 @@ self.error_handled = True # used when testing senders; just collects what it gets until newline is sent -class capture_server(threading.Thread): - def run(self): - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - global PORT - PORT = test_support.bind_port(sock, HOST, PORT) - sock.listen(1) - conn, client = sock.accept() - self.captured = "" - while 1: +def capture_server(evt, buf): + serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + serv.settimeout(3) + serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + serv.bind(("", PORT)) + serv.listen(5) + try: + conn, addr = serv.accept() + except socket.timeout: + pass + else: + n = 200 + while n > 0: data = conn.recv(10) + # keep everything except for the newline terminator + buf.write(data.replace('\n', '')) if '\n' in data: break - self.captured = self.captured + data + n -= 1 + time.sleep(0.01) conn.close() - sock.close() + finally: + serv.close() + evt.set() class HelperFunctionTests(unittest.TestCase): @@ -228,6 +236,12 @@ class DispatcherTests(unittest.TestCase): + def setUp(self): + pass + + def tearDown(self): + asyncore.close_all() + def test_basic(self): d = asyncore.dispatcher() self.assertEqual(d.readable(), True) @@ -273,7 +287,11 @@ sys.stdout = stdout lines = fp.getvalue().splitlines() - expected = ['EGGS: %s' % l1, 'info: %s' % l2, 'SPAM: %s' % l3] + if __debug__: + expected = ['EGGS: %s' % l1, 'info: %s' % l2, 'SPAM: %s' % l3] + else: + expected = ['EGGS: %s' % l1, 'SPAM: %s' % l3] + self.assertEquals(lines, expected) def test_unhandled(self): @@ -312,25 +330,33 @@ class DispatcherWithSendTests(unittest.TestCase): usepoll = False + def setUp(self): + pass + + def tearDown(self): + asyncore.close_all() + def test_send(self): - s = capture_server() - s.start() + self.evt = threading.Event() + cap = StringIO() + threading.Thread(target=capture_server, args=(self.evt,cap)).start() time.sleep(1) # Give server time to initialize - data = "Suppose there isn't a 16-ton weight?"*100 + data = "Suppose there isn't a 16-ton weight?"*5 d = dispatcherwithsend_noread() d.create_socket(socket.AF_INET, socket.SOCK_STREAM) d.connect((HOST, PORT)) d.send(data) d.send('\n') - while d.out_buffer: + n = 1000 + while d.out_buffer and n > 0: asyncore.poll() + n -= 1 - s.stopit = True - s.join() + self.evt.wait() - self.assertEqual(s.captured, data) + self.assertEqual(cap.getvalue(), data) class DispatcherWithSendTests_UsePoll(DispatcherWithSendTests): From buildbot at python.org Tue Jul 17 04:44:29 2007 From: buildbot at python.org (buildbot at python.org) Date: Tue, 17 Jul 2007 02:44:29 +0000 Subject: [Python-checkins] buildbot warnings in amd64 XP trunk Message-ID: <20070717024429.3EC721E4007@bag.python.org> The Buildbot has detected a new failure of amd64 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%2520trunk/builds/52 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: 3 tests failed: test_asyncore test_ctypes test_winsound ====================================================================== ERROR: test_send (test.test_asyncore.DispatcherWithSendTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 350, in test_send d.send('\n') File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 467, in send self.initiate_send() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 454, in initiate_send num_sent = dispatcher.send(self, self.out_buffer[:512]) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 331, in send result = self.socket.send(data) error: (10053, 'Software caused connection abort') ====================================================================== ERROR: test_send (test.test_asyncore.DispatcherWithSendTests_UsePoll) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 350, in test_send d.send('\n') File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 467, in send self.initiate_send() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 454, in initiate_send num_sent = dispatcher.send(self, self.out_buffer[:512]) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 331, in send result = self.socket.send(data) error: (10053, 'Software caused connection abort') ====================================================================== ERROR: test_extremes (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 18, in test_extremes winsound.Beep(37, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_increasingfrequency (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 23, in test_increasingfrequency winsound.Beep(i, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_alias_asterisk (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 64, in test_alias_asterisk winsound.PlaySound('SystemAsterisk', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exclamation (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 74, in test_alias_exclamation winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exit (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 84, in test_alias_exit winsound.PlaySound('SystemExit', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_hand (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 94, in test_alias_hand winsound.PlaySound('SystemHand', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_question (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 104, in test_alias_question winsound.PlaySound('SystemQuestion', winsound.SND_ALIAS) RuntimeError: Failed to play sound Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') sincerely, -The Buildbot From buildbot at python.org Tue Jul 17 05:03:24 2007 From: buildbot at python.org (buildbot at python.org) Date: Tue, 17 Jul 2007 03:03:24 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP trunk Message-ID: <20070717030324.6D49C1E4011@bag.python.org> The Buildbot has detected a new failure of x86 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP%2520trunk/builds/521 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_asyncore ====================================================================== ERROR: test_send (test.test_asyncore.DispatcherWithSendTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\test\test_asyncore.py", line 350, in test_send d.send('\n') File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\asyncore.py", line 467, in send self.initiate_send() File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\asyncore.py", line 454, in initiate_send num_sent = dispatcher.send(self, self.out_buffer[:512]) File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\asyncore.py", line 331, in send result = self.socket.send(data) error: (10053, 'Software caused connection abort') ====================================================================== ERROR: test_send (test.test_asyncore.DispatcherWithSendTests_UsePoll) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\test\test_asyncore.py", line 350, in test_send d.send('\n') File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\asyncore.py", line 467, in send self.initiate_send() File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\asyncore.py", line 454, in initiate_send num_sent = dispatcher.send(self, self.out_buffer[:512]) File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\asyncore.py", line 331, in send result = self.socket.send(data) error: (10053, 'Software caused connection abort') Traceback (most recent call last): File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot_py25\trunk.mcintyre-windows\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') sincerely, -The Buildbot From python-checkins at python.org Tue Jul 17 19:51:44 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Tue, 17 Jul 2007 19:51:44 +0200 (CEST) Subject: [Python-checkins] r56414 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070717175144.B4A851E4013@bag.python.org> Author: alexandre.vassalotti Date: Tue Jul 17 19:51:44 2007 New Revision: 56414 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Small changes to make _pickle API similar to pickle. Use docstrings from pickle.py. Rename 'binary' member to 'bin'. Raise KeyError instead of BadPickleGet. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Tue Jul 17 19:51:44 2007 @@ -4,11 +4,6 @@ PyDoc_STRVAR(pickle_module_documentation, "C implementation and optimization of the Python pickle module."); -#ifndef Py_eval_input -#include -#define Py_eval_input eval_input -#endif /* Py_eval_input */ - #define WRITE_BUF_SIZE 256 /* Bump this when new opcodes are added to the pickle protocol. */ @@ -97,7 +92,6 @@ static PyObject *PicklingError; static PyObject *UnpickleableError; static PyObject *UnpicklingError; -static PyObject *BadPickleGet; /* As the name says, an empty tuple. */ static PyObject *empty_tuple; @@ -2380,12 +2374,22 @@ Py_RETURN_NONE; } +PyDoc_STRVAR(Pickler_dump_doc, +"dump(obj) -> None. Write a pickled representation of obj to the open file."); + +PyDoc_STRVAR(Pickler_clear_memo_doc, +"clear_memo() -> None. Clears the pickler's \"memo\"." +"\n" +"The memo is the data structure that remembers which objects the\n" +"pickler has already seen, so that shared or recursive objects are\n" +"pickled by reference and not by value. This method is useful when\n" +"re-using picklers."); + static struct PyMethodDef Pickler_methods[] = { {"dump", (PyCFunction) Pickler_dump, METH_VARARGS, - PyDoc_STR("dump(object) -> None.\n\n" - "Write an object in pickle format to the object's pickle stream")}, + Pickler_dump_doc}, {"clear_memo", (PyCFunction) Pickle_clear_memo, METH_NOARGS, - PyDoc_STR("clear_memo() -> None. Clear the picklers memo.")}, + Pickler_clear_memo_doc}, {NULL, NULL} /* sentinel */ }; @@ -2404,9 +2408,8 @@ if (proto < 0) proto = HIGHEST_PROTOCOL; if (proto > HIGHEST_PROTOCOL) { - PyErr_Format(PyExc_ValueError, "pickle protocol %d asked for; " - "the highest available protocol is %d", - proto, HIGHEST_PROTOCOL); + PyErr_Format(PyExc_ValueError, "pickle protocol must be <= %d", + HIGHEST_PROTOCOL); return NULL; } @@ -2589,7 +2592,7 @@ } static PyMemberDef Pickler_members[] = { - {"binary", T_INT, offsetof(PicklerObject, bin)}, + {"bin", T_INT, offsetof(PicklerObject, bin)}, {"fast", T_INT, offsetof(PicklerObject, fast)}, {NULL} }; @@ -3520,7 +3523,6 @@ return 0; } - static int load_pop_mark(UnpicklerObject *self) { @@ -3534,7 +3536,6 @@ return 0; } - static int load_dup(UnpicklerObject *self) { @@ -3549,68 +3550,61 @@ return 0; } - static int load_get(UnpicklerObject *self) { - PyObject *py_str = 0, *value = 0; + PyObject *py_key, *value; int len; char *s; - int rc; if ((len = self->readline_func(self, &s)) < 0) return -1; if (len < 2) return bad_readline(); - if (!(py_str = PyString_FromStringAndSize(s, len - 1))) + py_key = PyString_FromStringAndSize(s, len - 1); + if (!py_key) return -1; - value = PyDict_GetItem(self->memo, py_str); + value = PyDict_GetItem(self->memo, py_key); if (!value) { - PyErr_SetObject(BadPickleGet, py_str); - rc = -1; + PyErr_SetObject(PyExc_KeyError, py_key); + Py_DECREF(py_key); + return -1; } - else { + PDATA_APPEND(self->stack, value, -1); - rc = 0; + Py_DECREF(py_key); + return 0; } - Py_DECREF(py_str); - return rc; -} - - static int load_binget(UnpicklerObject *self) { - PyObject *py_key = 0, *value = 0; + PyObject *py_key, *value; unsigned char key; char *s; - int rc; if (self->read_func(self, &s, 1) < 0) return -1; key = (unsigned char) s[0]; - if (!(py_key = PyInt_FromLong((long) key))) + py_key = PyInt_FromLong((long)key); + if (!py_key) return -1; value = PyDict_GetItem(self->memo, py_key); if (!value) { - PyErr_SetObject(BadPickleGet, py_key); - rc = -1; - } - else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; + PyErr_SetObject(PyExc_KeyError, py_key); + Py_DECREF(py_key); + return -1; } + PDATA_APPEND(self->stack, value, -1); Py_DECREF(py_key); - return rc; + return 0; } - static int load_long_binget(UnpicklerObject *self) { @@ -3618,7 +3612,6 @@ unsigned char c; char *s; long key; - int rc; if (self->read_func(self, &s, 4) < 0) return -1; @@ -3637,16 +3630,13 @@ value = PyDict_GetItem(self->memo, py_key); if (!value) { - PyErr_SetObject(BadPickleGet, py_key); - rc = -1; - } - else { - PDATA_APPEND(self->stack, value, -1); - rc = 0; + Py_DECREF(py_key); + PyErr_SetObject(PyExc_KeyError, py_key); } + PDATA_APPEND(self->stack, value, -1); Py_DECREF(py_key); - return rc; + return 0; } /* Push an object from the extension registry (EXT[124]). nbytes is @@ -4771,10 +4761,6 @@ PickleError, NULL))) return -1; - if (!(BadPickleGet = PyErr_NewException("pickle.BadPickleGet", - UnpicklingError, NULL))) - return -1; - if (PyDict_SetItemString(module_dict, "PickleError", PickleError) < 0) return -1; @@ -4789,9 +4775,6 @@ UnpickleableError) < 0) return -1; - if (PyDict_SetItemString(module_dict, "BadPickleGet", BadPickleGet) < 0) - return -1; - return 0; } From meshafiee at yahoo.com Tue Jul 17 21:19:44 2007 From: meshafiee at yahoo.com (m.ebrahim shafiee) Date: Tue, 17 Jul 2007 12:19:44 -0700 (PDT) Subject: [Python-checkins] University selection underway Message-ID: <674748.12885.qm@web33407.mail.mud.yahoo.com> Please give me more detale. --------------------------------- Need a vacation? Get great deals to amazing places on Yahoo! Travel. -------------- next part -------------- An HTML attachment was scrubbed... URL: http://mail.python.org/pipermail/python-checkins/attachments/20070717/34921788/attachment.html From python-checkins at python.org Tue Jul 17 23:09:36 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:09:36 +0200 (CEST) Subject: [Python-checkins] r56419 - in sandbox/trunk/2to3: fixes/fix_intern.py fixes/fix_nonzero.py fixes/fix_raise.py Message-ID: <20070717210936.56F4A1E400D@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:09:35 2007 New Revision: 56419 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_intern.py sandbox/trunk/2to3/fixes/fix_nonzero.py sandbox/trunk/2to3/fixes/fix_raise.py Log: Remove some unneeded assert statements. Modified: sandbox/trunk/2to3/fixes/fix_intern.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_intern.py (original) +++ sandbox/trunk/2to3/fixes/fix_intern.py Tue Jul 17 23:09:35 2007 @@ -24,7 +24,6 @@ """ def transform(self, node, results): - assert results syms = self.syms obj = results["obj"].clone() if obj.type == syms.arglist: Modified: sandbox/trunk/2to3/fixes/fix_nonzero.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_nonzero.py (original) +++ sandbox/trunk/2to3/fixes/fix_nonzero.py Tue Jul 17 23:09:35 2007 @@ -15,8 +15,6 @@ """ def transform(self, node, results): - assert results - name = results["name"] new = Name("__bool__", prefix=name.get_prefix()) name.replace(new) Modified: sandbox/trunk/2to3/fixes/fix_raise.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_raise.py (original) +++ sandbox/trunk/2to3/fixes/fix_raise.py Tue Jul 17 23:09:35 2007 @@ -34,7 +34,6 @@ """ def transform(self, node, results): - assert results syms = self.syms exc = results["exc"].clone() From python-checkins at python.org Tue Jul 17 23:09:53 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:09:53 +0200 (CEST) Subject: [Python-checkins] r56420 - in sandbox/trunk/2to3: fixes/fix_apply.py fixes/fix_callable.py fixes/fix_exec.py fixes/fix_input.py fixes/fix_print.py fixes/fix_repr.py fixes/util.py Message-ID: <20070717210953.65CE71E401B@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:09:52 2007 New Revision: 56420 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_apply.py sandbox/trunk/2to3/fixes/fix_callable.py sandbox/trunk/2to3/fixes/fix_exec.py sandbox/trunk/2to3/fixes/fix_input.py sandbox/trunk/2to3/fixes/fix_print.py sandbox/trunk/2to3/fixes/fix_repr.py sandbox/trunk/2to3/fixes/util.py Log: Add a prefix keyword arg to Call() and make use of it. Modified: sandbox/trunk/2to3/fixes/fix_apply.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_apply.py (original) +++ sandbox/trunk/2to3/fixes/fix_apply.py Tue Jul 17 23:09:52 2007 @@ -55,6 +55,4 @@ # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t) # can be translated into f(x, y, *t) instead of f(*(x, y) + t) #new = pytree.Node(syms.power, (func, ArgList(l_newargs))) - new = Call(func, l_newargs) - new.set_prefix(prefix) - return new + return Call(func, l_newargs, prefix=prefix) Modified: sandbox/trunk/2to3/fixes/fix_callable.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_callable.py (original) +++ sandbox/trunk/2to3/fixes/fix_callable.py Tue Jul 17 23:09:52 2007 @@ -28,7 +28,4 @@ func = results["func"] args = [func.clone(), String(', '), String("'__call__'")] - new = Call(Name("hasattr"), args) - new.set_prefix(node.get_prefix()) - return new - + return Call(Name("hasattr"), args, prefix=node.get_prefix()) Modified: sandbox/trunk/2to3/fixes/fix_exec.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_exec.py (original) +++ sandbox/trunk/2to3/fixes/fix_exec.py Tue Jul 17 23:09:52 2007 @@ -36,6 +36,4 @@ if c is not None: args.extend([Comma(), c.clone()]) - new = Call(Name("exec"), args) - new.set_prefix(node.get_prefix()) - return new + return Call(Name("exec"), args, prefix=node.get_prefix()) Modified: sandbox/trunk/2to3/fixes/fix_input.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_input.py (original) +++ sandbox/trunk/2to3/fixes/fix_input.py Tue Jul 17 23:09:52 2007 @@ -17,6 +17,4 @@ def transform(self, node, results): new = node.clone() new.set_prefix("") - new = Call(Name("eval"), [new]) - new.set_prefix(node.get_prefix()) - return new + return Call(Name("eval"), [new], prefix=node.get_prefix()) Modified: sandbox/trunk/2to3/fixes/fix_print.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_print.py (original) +++ sandbox/trunk/2to3/fixes/fix_print.py Tue Jul 17 23:09:52 2007 @@ -36,9 +36,7 @@ if node == Name("print"): # Special-case print all by itself - new = Call(Name("print"), []) - new.set_prefix(node.get_prefix()) - return new + return Call(Name("print"), [], prefix=node.get_prefix()) assert node.children[0] == Name("print") args = node.children[1:] sep = end = file = None Modified: sandbox/trunk/2to3/fixes/fix_repr.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_repr.py (original) +++ sandbox/trunk/2to3/fixes/fix_repr.py Tue Jul 17 23:09:52 2007 @@ -19,6 +19,4 @@ if expr.type == self.syms.testlist1: expr = self.parenthesize(expr) - new = Call(Name("repr"), [expr]) - new.set_prefix(node.get_prefix()) - return new + return Call(Name("repr"), [expr], prefix=node.get_prefix()) Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Tue Jul 17 23:09:52 2007 @@ -55,9 +55,12 @@ Node(syms.arglist, args), rparen.clone()]) -def Call(func_name, args): +def Call(func_name, args, prefix=None): """A function call""" - return Node(syms.power, [func_name, ArgList(args)]) + node = Node(syms.power, [func_name, ArgList(args)]) + if prefix is not None: + node.set_prefix(prefix) + return node def Newline(): """A newline literal""" From python-checkins at python.org Tue Jul 17 23:10:10 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:10:10 +0200 (CEST) Subject: [Python-checkins] r56421 - in sandbox/trunk/2to3: fixes/fix_stringio.py fixes/util.py Message-ID: <20070717211010.0ED6F1E400F@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:10:09 2007 New Revision: 56421 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_stringio.py sandbox/trunk/2to3/fixes/util.py Log: Have fixes.util.attr_chain() yield the starting object (useful for some other fixers). Modified: sandbox/trunk/2to3/fixes/fix_stringio.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_stringio.py (original) +++ sandbox/trunk/2to3/fixes/fix_stringio.py Tue Jul 17 23:10:09 2007 @@ -40,7 +40,7 @@ match = super(FixStringio, self).match results = match(node) if results: - if any([match(obj) for obj in attr_chain(node, "parent")]): + if any([match(obj) for obj in attr_chain(node.parent, "parent")]): return False return results return False Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Tue Jul 17 23:10:09 2007 @@ -169,6 +169,7 @@ Yields: Each successive object in the chain. """ + yield obj next = getattr(obj, attr) while next: yield next From python-checkins at python.org Tue Jul 17 23:10:37 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:10:37 +0200 (CEST) Subject: [Python-checkins] r56422 - in sandbox/trunk/2to3: fixes/fix_stringio.py fixes/util.py Message-ID: <20070717211037.81B281E4023@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:10:36 2007 New Revision: 56422 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_stringio.py sandbox/trunk/2to3/fixes/util.py Log: Revert last revision (not so useful as I thought. oops). Modified: sandbox/trunk/2to3/fixes/fix_stringio.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_stringio.py (original) +++ sandbox/trunk/2to3/fixes/fix_stringio.py Tue Jul 17 23:10:36 2007 @@ -40,7 +40,7 @@ match = super(FixStringio, self).match results = match(node) if results: - if any([match(obj) for obj in attr_chain(node.parent, "parent")]): + if any([match(obj) for obj in attr_chain(node, "parent")]): return False return results return False Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Tue Jul 17 23:10:36 2007 @@ -169,7 +169,6 @@ Yields: Each successive object in the chain. """ - yield obj next = getattr(obj, attr) while next: yield next From python-checkins at python.org Tue Jul 17 23:10:48 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:10:48 +0200 (CEST) Subject: [Python-checkins] r56423 - in sandbox/trunk/2to3: fixes/fix_filter.py fixes/fix_map.py Message-ID: <20070717211048.DAF6E1E402A@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:10:48 2007 New Revision: 56423 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py Log: Refactor fix_filter and fix_map to take advantage of attr_chain(). Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Tue Jul 17 23:10:48 2007 @@ -18,7 +18,7 @@ import patcomp from pgen2 import token from fixes import basefix -from fixes.util import Name, Call, ListComp +from fixes.util import Name, Call, ListComp, attr_chain class FixFilter(basefix.BaseFix): @@ -49,7 +49,7 @@ results.get("it").clone(), results.get("xp").clone()) else: - if self.in_special_context(node): + if in_special_context(node): return None new = node.clone() new.set_prefix("") @@ -57,49 +57,33 @@ new.set_prefix(node.get_prefix()) return new - P0 = """for_stmt< 'for' any 'in' node=any ':' any* > - | comp_for< 'for' any 'in' node=any any* > - """ - p0 = patcomp.compile_pattern(P0) - - P1 = """ - power< - ( 'iter' | 'list' | 'tuple' | 'sorted' ) - trailer< '(' node=any ')' > - any* - > - """ - p1 = patcomp.compile_pattern(P1) - - P2 = """ - power< - 'sorted' - trailer< '(' arglist ')' > - any* - > - """ - p2 = patcomp.compile_pattern(P2) - - def in_special_context(self, node): - p = node.parent - if p is None: - return False - results = {} - if self.p0.match(p, results) and results["node"] is node: - return True - - pp = p.parent - if pp is None: - return False +P0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ +p0 = patcomp.compile_pattern(P0) + +P1 = """ +power< + ( 'iter' | 'list' | 'tuple' | 'sorted' ) + trailer< '(' node=any ')' > + any* +> +""" +p1 = patcomp.compile_pattern(P1) + +P2 = """ +power< + 'sorted' + trailer< '(' arglist ')' > + any* +> +""" +p2 = patcomp.compile_pattern(P2) + +def in_special_context(node): + patterns = [p0, p1, p2] + for pattern, parent in zip(patterns, attr_chain(node, "parent")): results = {} - if self.p1.match(pp, results) and results["node"] is node: + if pattern.match(parent, results) and results["node"] is node: return True - - ppp = pp.parent - if ppp is None: - return False - results = {} - if self.p2.match(ppp, results) and results["node"] is node: - return True - - return False + return False Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Tue Jul 17 23:10:48 2007 @@ -22,7 +22,7 @@ import patcomp from pgen2 import token from fixes import basefix -from fixes.util import Name, Call, ListComp +from fixes.util import Name, Call, ListComp, attr_chain from pygram import python_symbols as syms class FixMap(basefix.BaseFix): @@ -61,7 +61,7 @@ if "map_none" in results: new = results["arg"].clone() else: - if self.in_special_context(node): + if in_special_context(node): return None new = node.clone() new.set_prefix("") @@ -69,49 +69,33 @@ new.set_prefix(node.get_prefix()) return new - P0 = """for_stmt< 'for' any 'in' node=any ':' any* > - | comp_for< 'for' any 'in' node=any any* > - """ - p0 = patcomp.compile_pattern(P0) - - P1 = """ - power< - ( 'iter' | 'list' | 'tuple' | 'sorted' ) - trailer< '(' node=any ')' > - any* - > - """ - p1 = patcomp.compile_pattern(P1) - - P2 = """ - power< - 'sorted' - trailer< '(' arglist ')' > - any* - > - """ - p2 = patcomp.compile_pattern(P2) - - def in_special_context(self, node): - p = node.parent - if p is None: - return False - results = {} - if self.p0.match(p, results) and results["node"] is node: - return True - - pp = p.parent - if pp is None: - return False +P0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ +p0 = patcomp.compile_pattern(P0) + +P1 = """ +power< + ( 'iter' | 'list' | 'tuple' | 'sorted' ) + trailer< '(' node=any ')' > + any* +> +""" +p1 = patcomp.compile_pattern(P1) + +P2 = """ +power< + 'sorted' + trailer< '(' arglist ')' > + any* +> +""" +p2 = patcomp.compile_pattern(P2) + +def in_special_context(node): + patterns = [p0, p1, p2] + for pattern, parent in zip(patterns, attr_chain(node, "parent")): results = {} - if self.p1.match(pp, results) and results["node"] is node: + if pattern.match(parent, results) and results["node"] is node: return True - - ppp = pp.parent - if ppp is None: - return False - results = {} - if self.p2.match(ppp, results) and results["node"] is node: - return True - - return False + return False From python-checkins at python.org Tue Jul 17 23:11:08 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:11:08 +0200 (CEST) Subject: [Python-checkins] r56424 - in sandbox/trunk/2to3: fixes/fix_except.py tests/test_fixers.py Message-ID: <20070717211108.E296C1E4011@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:11:08 2007 New Revision: 56424 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_except.py sandbox/trunk/2to3/tests/test_fixers.py Log: Fix a bug in fix_except related to prefix handling. Modified: sandbox/trunk/2to3/fixes/fix_except.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_except.py (original) +++ sandbox/trunk/2to3/fixes/fix_except.py Tue Jul 17 23:11:08 2007 @@ -43,7 +43,6 @@ """ def transform(self, node, results): - assert results syms = self.syms try_cleanup = [ch.clone() for ch in results['cleanup']] @@ -80,6 +79,10 @@ for child in reversed(suite_stmts[:i]): e_suite.insert_child(0, child) e_suite.insert_child(i, assign) + elif N.get_prefix() == "": + # No space after a comma is legal; no space after "as", + # not so much. + N.set_prefix(" ") #TODO(cwinter) fix this when children becomes a smart list children = [c.clone() for c in node.children[:3]] + try_cleanup Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Tue Jul 17 23:11:08 2007 @@ -502,6 +502,32 @@ pass""" self.check(b, a) + def test_simple(self): + b = """ + try: + pass + except Foo, e: + pass""" + a = """ + try: + pass + except Foo as e: + pass""" + self.check(b, a) + + def test_simple_no_space_before_target(self): + b = """ + try: + pass + except Foo,e: + pass""" + a = """ + try: + pass + except Foo as e: + pass""" + self.check(b, a) + def test_tuple_unpack(self): b = """ def foo(): From python-checkins at python.org Tue Jul 17 23:11:20 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:11:20 +0200 (CEST) Subject: [Python-checkins] r56425 - sandbox/trunk/2to3 sandbox/trunk/2to3/refactor.py Message-ID: <20070717211120.352FD1E4019@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:11:19 2007 New Revision: 56425 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/refactor.py Log: Greatly simplify the scheme for determining whether a tree has changed. Modified: sandbox/trunk/2to3/refactor.py ============================================================================== --- sandbox/trunk/2to3/refactor.py (original) +++ sandbox/trunk/2to3/refactor.py Tue Jul 17 23:11:19 2007 @@ -262,24 +262,20 @@ def refactor_tree(self, tree, filename): """Refactors a parse tree (modifying the tree in place).""" - changed = False all_fixers = self.pre_order + self.post_order for fixer in all_fixers: fixer.start_tree(tree, filename) - changed |= self.traverse_by(self.pre_order, tree.pre_order()) - changed |= self.traverse_by(self.post_order, tree.post_order()) - if tree.was_changed: - changes = True + self.traverse_by(self.pre_order, tree.pre_order()) + self.traverse_by(self.post_order, tree.post_order()) for fixer in all_fixers: fixer.finish_tree(tree, filename) - return changed + return tree.was_changed def traverse_by(self, fixers, traversal): - changed = False if not fixers: - return changed + return for node in traversal: for fixer in fixers: results = fixer.match(node) @@ -289,8 +285,6 @@ str(new) != str(node)): node.replace(new) node = new - changed = True - return changed def write_file(self, new_text, filename, old_text=None): """Writes a string to a file. From python-checkins at python.org Tue Jul 17 23:11:31 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:11:31 +0200 (CEST) Subject: [Python-checkins] r56426 - in sandbox/trunk/2to3: tests/test_fixers.py Message-ID: <20070717211131.A283C1E401B@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:11:31 2007 New Revision: 56426 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/test_fixers.py Log: Add assertions to make sure that all trees that should change register as 'changed'. Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Tue Jul 17 23:11:31 2007 @@ -58,17 +58,29 @@ def _check(self, before, after): before = support.reformat(before) after = support.reformat(after) - refactored = self.refactor_stream("", StringIO(before)) + refactored, tree = self.refactor_stream("", StringIO(before)) self.failUnlessEqual(after, refactored) + return tree def check(self, before, after, ignore_warnings=False): - self._check(before, after) + tree = self._check(before, after) + self.failUnless(tree.was_changed) if not ignore_warnings: self.failUnlessEqual(self.logging_stream.getvalue(), "") - def warns(self, before, after, message): - self._check(before, after) + def warns(self, before, after, message, unchanged=False): + tree = self._check(before, after) self.failUnless(message in self.logging_stream.getvalue()) + if not unchanged: + self.failUnless(tree.was_changed) + + def warns_unchanged(self, before, message): + self.warns(before, before, message, unchanged=True) + + def unchanged(self, before, ignore_warnings=False): + self._check(before, before) + if not ignore_warnings: + self.failUnlessEqual(self.logging_stream.getvalue(), "") def refactor_stream(self, stream_name, stream): try: @@ -76,7 +88,7 @@ except Exception, err: raise self.refactor.refactor_tree(tree, stream_name) - return str(tree) + return str(tree), tree class Test_ne(FixerTestCase): @@ -249,39 +261,39 @@ def test_unchanged_1(self): s = """apply()""" - self.check(s, s) + self.unchanged(s) def test_unchanged_2(self): s = """apply(f)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_3(self): s = """apply(f,)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_4(self): s = """apply(f, args, kwds, extras)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_5(self): s = """apply(f, *args, **kwds)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_6(self): s = """apply(f, *args)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_7(self): s = """apply(func=f, args=args, kwds=kwds)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_8(self): s = """apply(f, args=args, kwds=kwds)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_9(self): s = """apply(f, args, kwds=kwds)""" - self.check(s, s) + self.unchanged(s) class Test_intern(FixerTestCase): @@ -319,19 +331,19 @@ def test_unchanged(self): s = """intern(a=1)""" - self.check(s, s) + self.unchanged(s) s = """intern(f, g)""" - self.check(s, s) + self.unchanged(s) s = """intern(*h)""" - self.check(s, s) + self.unchanged(s) s = """intern(**i)""" - self.check(s, s) + self.unchanged(s) s = """intern()""" - self.check(s, s) + self.unchanged(s) class Test_print(FixerTestCase): fixer = "print" @@ -433,19 +445,19 @@ def test_unchanged_1(self): s = """exec(code)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_2(self): s = """exec (code)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_3(self): s = """exec(code, ns)""" - self.check(s, s) + self.unchanged(s) def test_unchanged_4(self): s = """exec(code, ns1, ns2)""" - self.check(s, s) + self.unchanged(s) class Test_repr(FixerTestCase): @@ -626,46 +638,28 @@ # These should not be touched: def test_unchanged_1(self): - b = """ - try: - pass - except: - pass""" - - a = """ + s = """ try: pass except: pass""" - self.check(b, a) + self.unchanged(s) def test_unchanged_2(self): - b = """ - try: - pass - except Exception: - pass""" - - a = """ + s = """ try: pass except Exception: pass""" - self.check(b, a) + self.unchanged(s) def test_unchanged_3(self): - b = """ - try: - pass - except (Exception, SystemExit): - pass""" - - a = """ + s = """ try: pass except (Exception, SystemExit): pass""" - self.check(b, a) + self.unchanged(s) class Test_raise(FixerTestCase): @@ -724,15 +718,15 @@ def test_string_exc(self): s = """raise 'foo'""" - self.warns(s, s, "Python 3 does not support string exceptions") + self.warns_unchanged(s, "Python 3 does not support string exceptions") def test_string_exc_val(self): s = """raise "foo", 5""" - self.warns(s, s, "Python 3 does not support string exceptions") + self.warns_unchanged(s, "Python 3 does not support string exceptions") def test_string_exc_val_tb(self): s = """raise "foo", 5, 6""" - self.warns(s, s, "Python 3 does not support string exceptions") + self.warns_unchanged(s, "Python 3 does not support string exceptions") # These should result in traceback-assignment @@ -818,32 +812,29 @@ def test_warn_1(self): s = """g.throw("foo")""" - self.warns(s, s, "Python 3 does not support string exceptions") + self.warns_unchanged(s, "Python 3 does not support string exceptions") def test_warn_2(self): s = """g.throw("foo", 5)""" - self.warns(s, s, "Python 3 does not support string exceptions") + self.warns_unchanged(s, "Python 3 does not support string exceptions") def test_warn_3(self): s = """g.throw("foo", 5, 6)""" - self.warns(s, s, "Python 3 does not support string exceptions") + self.warns_unchanged(s, "Python 3 does not support string exceptions") # These should not be touched def test_untouched_1(self): - b = """g.throw(Exception)""" - a = """g.throw(Exception)""" - self.check(b, a) + s = """g.throw(Exception)""" + self.unchanged(s) def test_untouched_2(self): - b = """g.throw(Exception(5, 6))""" - a = """g.throw(Exception(5, 6))""" - self.check(b, a) + s = """g.throw(Exception(5, 6))""" + self.unchanged(s) def test_untouched_3(self): - b = """5 + g.throw(Exception(5, 6))""" - a = """5 + g.throw(Exception(5, 6))""" - self.check(b, a) + s = """5 + g.throw(Exception(5, 6))""" + self.unchanged(s) # These should result in traceback-assignment @@ -949,16 +940,16 @@ self.check(b, a) def test_unchanged_1(self): - b = """a = 12""" - self.check(b, b) + s = """a = 12""" + self.unchanged(s) def test_unchanged_2(self): - b = """b = 0x12""" - self.check(b, b) + s = """b = 0x12""" + self.unchanged(s) def test_unchanged_3(self): - b = """c = 3.14""" - self.check(b, b) + s = """c = 3.14""" + self.unchanged(s) def test_prefix_preservation(self): b = """x = long( x )""" @@ -1040,12 +1031,12 @@ self.check(b, a) def test_07(self): - b = "list(d.keys())" - self.check(b, b) + s = "list(d.keys())" + self.unchanged(s) def test_08(self): - b = "sorted(d.keys())" - self.check(b, b) + s = "sorted(d.keys())" + self.unchanged(s) def test_09(self): b = "iter(d.keys())" @@ -1211,10 +1202,16 @@ def test_unchanged(self): for attr in self.attrs: s = "foo(func_%s + 5)" % attr - self.check(s, s) + self.unchanged(s) s = "f(foo.__%s__)" % attr - self.check(s, s) + self.unchanged(s) + + def test_regressions(self): + # Found in setuptools + b = "extract_constant(f1.func_code,'q', -1)" + a = "extract_constant(f1.__code__,'q', -1)" + self.check(b, a) class Test_xreadlines(FixerTestCase): @@ -1248,16 +1245,16 @@ def test_unchanged(self): s = "for x in f.xreadlines(5): pass" - self.check(s, s) + self.unchanged(s) s = "for x in f.xreadlines(k=5): pass" - self.check(s, s) + self.unchanged(s) s = "for x in f.xreadlines(*k, **v): pass" - self.check(s, s) + self.unchanged(s) s = "foo(xreadlines)" - self.check(s, s) + self.unchanged(s) class Test_stringio(FixerTestCase): @@ -1282,7 +1279,7 @@ self.check(b, a) s = "from foo import StringIO" - self.check(s, s) + self.unchanged(s) def test_import_module_as(self): b = "import StringIO as foo_bar" @@ -1364,15 +1361,15 @@ def test_unchanged_1(self): s = """def foo(): pass""" - self.check(s, s) + self.unchanged(s) def test_unchanged_2(self): s = """def foo(a, b, c): pass""" - self.check(s, s) + self.unchanged(s) def test_unchanged_3(self): s = """def foo(a=3, b=4, c=5): pass""" - self.check(s, s) + self.unchanged(s) def test_1(self): b = """ @@ -1477,7 +1474,7 @@ def test_lambda_no_change(self): s = """lambda x: x + 5""" - self.check(s, s) + self.unchanged(s) def test_lambda_simple(self): b = """lambda (x, y): x + f(y)""" @@ -1695,7 +1692,7 @@ def next(self, a, b): pass """ - self.check(s, s) + self.unchanged(s) def test_shadowing_assign_simple(self): s = """ @@ -1705,7 +1702,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_assign_tuple_1(self): s = """ @@ -1715,7 +1712,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_assign_tuple_2(self): s = """ @@ -1725,7 +1722,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_assign_list_1(self): s = """ @@ -1735,7 +1732,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_assign_list_2(self): s = """ @@ -1745,7 +1742,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_builtin_assign(self): s = """ @@ -1756,7 +1753,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_builtin_assign_in_tuple(self): s = """ @@ -1767,7 +1764,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_builtin_assign_in_list(self): s = """ @@ -1778,7 +1775,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_assign_to_next(self): s = """ @@ -1789,7 +1786,7 @@ def next(self, a, b): pass """ - self.check(s, s) + self.unchanged(s) def test_assign_to_next_in_tuple(self): s = """ @@ -1800,7 +1797,7 @@ def next(self, a, b): pass """ - self.check(s, s) + self.unchanged(s) def test_assign_to_next_in_list(self): s = """ @@ -1811,7 +1808,7 @@ def next(self, a, b): pass """ - self.check(s, s) + self.unchanged(s) def test_shadowing_import_1(self): s = """ @@ -1821,7 +1818,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_import_2(self): s = """ @@ -1831,7 +1828,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_import_3(self): s = """ @@ -1841,7 +1838,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_import_from_1(self): s = """ @@ -1851,7 +1848,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_import_from_2(self): s = """ @@ -1861,7 +1858,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_import_from_3(self): s = """ @@ -1871,7 +1868,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_import_from_4(self): s = """ @@ -1881,7 +1878,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_funcdef_1(self): s = """ @@ -1892,7 +1889,7 @@ def next(self, a, b): pass """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_funcdef_2(self): b = """ @@ -1923,7 +1920,7 @@ global next next = 5 """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_global_2(self): s = """ @@ -1931,7 +1928,7 @@ global a, next, b next = 5 """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_for_simple(self): s = """ @@ -1941,7 +1938,7 @@ b = 5 c = 6 """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_for_tuple_1(self): s = """ @@ -1951,7 +1948,7 @@ b = 5 c = 6 """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_shadowing_for_tuple_2(self): s = """ @@ -1961,7 +1958,7 @@ b = 5 c = 6 """ - self.warns(s, s, "Calls to builtin next() possibly shadowed") + self.warns_unchanged(s, "Calls to builtin next() possibly shadowed") def test_noncall_access_1(self): b = """gnext = g.next""" @@ -2013,7 +2010,7 @@ def __bool__(self): pass """ - self.check(s, s) + self.unchanged(s) def test_unchanged_2(self): s = """ @@ -2021,14 +2018,14 @@ def __nonzero__(self, a): pass """ - self.check(s, s) + self.unchanged(s) def test_unchanged_func(self): s = """ def __nonzero__(self): pass """ - self.check(s, s) + self.unchanged(s) class Test_numliterals(FixerTestCase): fixer = "numliterals" @@ -2055,37 +2052,37 @@ def test_unchanged_int(self): s = """5""" - self.check(s, s) + self.unchanged(s) def test_unchanged_float(self): s = """5.0""" - self.check(s, s) + self.unchanged(s) def test_unchanged_octal(self): s = """0o755""" - self.check(s, s) + self.unchanged(s) def test_unchanged_hex(self): s = """0xABC""" - self.check(s, s) + self.unchanged(s) def test_unchanged_exp(self): s = """5.0e10""" - self.check(s, s) + self.unchanged(s) def test_unchanged_complex_int(self): s = """5 + 4j""" - self.check(s, s) + self.unchanged(s) def test_unchanged_complex_float(self): s = """5.4 + 4.9j""" - self.check(s, s) + self.unchanged(s) def test_unchanged_complex_bare(self): s = """4j""" - self.check(s, s) + self.unchanged(s) s = """4.4j""" - self.check(s, s) + self.unchanged(s) class Test_unicode(FixerTestCase): fixer = "unicode" @@ -2129,16 +2126,16 @@ def test_callable_should_not_change(self): a = """callable(*x)""" - self.check(a, a) + self.unchanged(a) a = """callable(x, y)""" - self.check(a, a) + self.unchanged(a) a = """callable(x, kw=y)""" - self.check(a, a) + self.unchanged(a) a = """callable()""" - self.check(a, a) + self.unchanged(a) class Test_filter(FixerTestCase): fixer = "filter" @@ -2168,25 +2165,25 @@ def test_filter_nochange(self): a = """iter(filter(f, 'abc'))""" - self.check(a, a) + self.unchanged(a, a) a = """list(filter(f, 'abc'))""" - self.check(a, a) + self.unchanged(a, a) a = """list(filter(f, 'abc'))[0]""" - self.check(a, a) + self.unchanged(a, a) a = """tuple(filter(f, 'abc'))""" - self.check(a, a) + self.unchanged(a, a) a = """sorted(filter(f, 'abc'))""" - self.check(a, a) + self.unchanged(a, a) a = """sorted(filter(f, 'abc'), key=blah)""" - self.check(a, a) + self.unchanged(a, a) a = """sorted(filter(f, 'abc'), key=blah)[0]""" - self.check(a, a) + self.unchanged(a, a) a = """for i in filter(f, 'abc'): pass""" - self.check(a, a) + self.unchanged(a, a) a = """[x for x in filter(f, 'abc')]""" - self.check(a, a) + self.unchanged(a, a) a = """(x for x in filter(f, 'abc'))""" - self.check(a, a) + self.unchanged(a, a) class Test_map(FixerTestCase): fixer = "map" @@ -2229,25 +2226,25 @@ def test_map_nochange(self): a = """iter(map(f, 'abc'))""" - self.check(a, a) + self.unchanged(a) a = """list(map(f, 'abc'))""" - self.check(a, a) + self.unchanged(a) a = """list(map(f, 'abc'))[0]""" - self.check(a, a) + self.unchanged(a) a = """tuple(map(f, 'abc'))""" - self.check(a, a) + self.unchanged(a) a = """sorted(map(f, 'abc'))""" - self.check(a, a) + self.unchanged(a) a = """sorted(map(f, 'abc'), key=blah)""" - self.check(a, a) + self.unchanged(a) a = """sorted(map(f, 'abc'), key=blah)[0]""" - self.check(a, a) + self.unchanged(a) a = """for i in map(f, 'abc'): pass""" - self.check(a, a) + self.unchanged(a) a = """[x for x in map(f, 'abc')]""" - self.check(a, a) + self.unchanged(a) a = """(x for x in map(f, 'abc'))""" - self.check(a, a) + self.unchanged(a) if __name__ == "__main__": From python-checkins at python.org Tue Jul 17 23:11:44 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:11:44 +0200 (CEST) Subject: [Python-checkins] r56427 - in sandbox/trunk/2to3: fixes/fix_funcattrs.py tests/test_fixers.py Message-ID: <20070717211144.1FA5F1E400D@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:11:43 2007 New Revision: 56427 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_funcattrs.py sandbox/trunk/2to3/tests/test_fixers.py Log: Fix a bug in fix_funcattrs that ignored deeply-nested attribute lookups. Modified: sandbox/trunk/2to3/fixes/fix_funcattrs.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_funcattrs.py (original) +++ sandbox/trunk/2to3/fixes/fix_funcattrs.py Tue Jul 17 23:11:43 2007 @@ -8,7 +8,7 @@ class FixFuncattrs(basefix.BaseFix): PATTERN = """ - power< any trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' + power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' | 'func_name' | 'func_defaults' | 'func_code' | 'func_dict') > any* > """ Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Tue Jul 17 23:11:43 2007 @@ -1199,6 +1199,10 @@ a = "a.__%s__" % attr self.check(b, a) + b = "self.foo.func_%s.foo_bar" % attr + a = "self.foo.__%s__.foo_bar" % attr + self.check(b, a) + def test_unchanged(self): for attr in self.attrs: s = "foo(func_%s + 5)" % attr @@ -1207,11 +1211,8 @@ s = "f(foo.__%s__)" % attr self.unchanged(s) - def test_regressions(self): - # Found in setuptools - b = "extract_constant(f1.func_code,'q', -1)" - a = "extract_constant(f1.__code__,'q', -1)" - self.check(b, a) + s = "f(foo.__%s__.foo)" % attr + self.unchanged(s) class Test_xreadlines(FixerTestCase): From python-checkins at python.org Tue Jul 17 23:11:56 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:11:56 +0200 (CEST) Subject: [Python-checkins] r56428 - in sandbox/trunk/2to3: fixes/util.py tests/test_util.py Message-ID: <20070717211156.1E2561E400F@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:11:55 2007 New Revision: 56428 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/util.py sandbox/trunk/2to3/tests/test_util.py Log: Make is_tuple() recognize empty tuple literals. Modified: sandbox/trunk/2to3/fixes/util.py ============================================================================== --- sandbox/trunk/2to3/fixes/util.py (original) +++ sandbox/trunk/2to3/fixes/util.py Tue Jul 17 23:11:55 2007 @@ -109,6 +109,8 @@ def is_tuple(node): """Does the node represent a tuple literal?""" + if isinstance(node, Node) and node.children == [LParen(), RParen()]: + return True return (isinstance(node, Node) and len(node.children) == 3 and isinstance(node.children[0], Leaf) Modified: sandbox/trunk/2to3/tests/test_util.py ============================================================================== --- sandbox/trunk/2to3/tests/test_util.py (original) +++ sandbox/trunk/2to3/tests/test_util.py Tue Jul 17 23:11:55 2007 @@ -38,6 +38,7 @@ self.failUnless(self.is_tuple("(a, (b, c))")) self.failUnless(self.is_tuple("((a, (b, c)),)")) self.failUnless(self.is_tuple("(a,)")) + self.failUnless(self.is_tuple("()")) def test_invalid(self): self.failIf(self.is_tuple("(a)")) From python-checkins at python.org Tue Jul 17 23:12:07 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:12:07 +0200 (CEST) Subject: [Python-checkins] r56429 - in sandbox/trunk/2to3: fixes/fix_print.py tests/test_fixers.py Message-ID: <20070717211207.B38291E4011@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:12:07 2007 New Revision: 56429 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_print.py sandbox/trunk/2to3/tests/test_fixers.py Log: Simplify print fixer, make it idempotent. Modified: sandbox/trunk/2to3/fixes/fix_print.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_print.py (original) +++ sandbox/trunk/2to3/fixes/fix_print.py Tue Jul 17 23:12:07 2007 @@ -14,32 +14,31 @@ import pytree from pgen2 import token from fixes import basefix -from fixes.util import Name, Call, Comma, String +from fixes.util import Name, Call, Comma, String, is_tuple class FixPrint(basefix.BaseFix): PATTERN = """ - 'print' | print_stmt + simple_stmt< bare='print' any > | print_stmt """ - def match(self, node): - # Override - if node.parent is not None and node.parent.type == self.syms.print_stmt: - # Avoid matching 'print' as part of a print_stmt - return None - return self.pattern.match(node) - def transform(self, node, results): assert results - syms = self.syms + bare_print = results.get("bare") - if node == Name("print"): + if bare_print: # Special-case print all by itself - return Call(Name("print"), [], prefix=node.get_prefix()) + bare_print.replace(Call(Name("print"), [], + prefix=bare_print.get_prefix())) + return assert node.children[0] == Name("print") args = node.children[1:] sep = end = file = None + if is_tuple(args[0]): + # We don't want to keep sticking parens around an + # already-parenthesised expression. + return if args and args[-1] == Comma(): args = args[:-1] end = " " Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Tue Jul 17 23:12:07 2007 @@ -13,6 +13,7 @@ import unittest # Local imports +import pygram import pytree import refactor @@ -353,6 +354,24 @@ a = """print(1, 1+1, 1+1+1)""" self.check(b, a) + def test_idempotency(self): + s = """print(1, 1+1, 1+1+1)""" + self.unchanged(s) + + s = """print()""" + self.unchanged(s) + + def test_idempotency_print_as_function(self): + print_stmt = pygram.python_grammar.keywords.pop("print") + try: + s = """print(1, 1+1, 1+1+1)""" + self.unchanged(s) + + s = """print()""" + self.unchanged(s) + finally: + pygram.python_grammar.keywords["print"] = print_stmt + def test_1(self): b = """print 1, 1+1, 1+1+1""" a = """print(1, 1+1, 1+1+1)""" From python-checkins at python.org Tue Jul 17 23:12:24 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:12:24 +0200 (CEST) Subject: [Python-checkins] r56430 - in sandbox/trunk/2to3: fixes/fix_dummy.py Message-ID: <20070717211224.3E0DC1E4011@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:12:23 2007 New Revision: 56430 Removed: sandbox/trunk/2to3/fixes/fix_dummy.py Modified: sandbox/trunk/2to3/ (props changed) Log: Remove the dummy fixer. Deleted: /sandbox/trunk/2to3/fixes/fix_dummy.py ============================================================================== --- /sandbox/trunk/2to3/fixes/fix_dummy.py Tue Jul 17 23:12:23 2007 +++ (empty file) @@ -1,16 +0,0 @@ -# Copyright 2007 Google, Inc. All Rights Reserved. -# Licensed to PSF under a Contributor Agreement. - -"""This is a pass-through fixer. It can be useful when changing certain -parts of the parser or pytree.""" - -# Local imports -from fixes import basefix - -class FixDummy(basefix.BaseFix): - - def match(self, node): - return True - - def transform(self, node, results): - node.changed() From python-checkins at python.org Tue Jul 17 23:12:35 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:12:35 +0200 (CEST) Subject: [Python-checkins] r56431 - in sandbox/trunk/2to3: tests/test_util.py Message-ID: <20070717211235.83BFB1E400F@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:12:35 2007 New Revision: 56431 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/test_util.py Log: Whitespace, line length tweaks. Modified: sandbox/trunk/2to3/tests/test_util.py ============================================================================== --- sandbox/trunk/2to3/tests/test_util.py (original) +++ sandbox/trunk/2to3/tests/test_util.py Tue Jul 17 23:12:35 2007 @@ -21,7 +21,7 @@ tree = tree.children[0] tree.parent = None return tree - + class MacroTestCase(support.TestCase): def assertStr(self, node, string): if isinstance(node, (tuple, list)): @@ -39,7 +39,7 @@ self.failUnless(self.is_tuple("((a, (b, c)),)")) self.failUnless(self.is_tuple("(a,)")) self.failUnless(self.is_tuple("()")) - + def test_invalid(self): self.failIf(self.is_tuple("(a)")) self.failIf(self.is_tuple("('foo') % (b, c)")) @@ -55,7 +55,7 @@ self.failUnless(self.is_list("[a, b]")) self.failUnless(self.is_list("[a, [b, c]]")) self.failUnless(self.is_list("[[a, [b, c]],]")) - + def test_invalid(self): self.failIf(self.is_list("[]+[]")) @@ -64,25 +64,25 @@ def test(self): from fixes.util import Attr, Name call = parse("foo()", strip_levels=2) - + self.assertStr(Attr(Name("a"), Name("b")), "a.b") self.assertStr(Attr(call, Name("b")), "foo().b") - + def test_returns(self): from fixes.util import Attr, Name - + attr = Attr(Name("a"), Name("b")) self.assertEqual(type(attr), list) - + class Test_Name(MacroTestCase): def test(self): from fixes.util import Name - + self.assertStr(Name("a"), "a") self.assertStr(Name("foo.foo().bar"), "foo.foo().bar") self.assertStr(Name("a", prefix="b"), "ba") - + class Test_find_binding(support.TestCase): def find_binding(self, name, string): @@ -100,7 +100,7 @@ self.failUnless(self.find_binding("a", "(a,) = b")) self.failUnless(self.find_binding("a", "(a, b, c) = [b, c, d]")) self.failUnless(self.find_binding("a", "(c, (d, a), b) = foo()")) - self.failUnless(self.find_binding("a", "(a, b) = foo().foo.foo[6][foo]")) + self.failUnless(self.find_binding("a", "(a, b) = foo().foo[6][foo]")) self.failIf(self.find_binding("a", "(foo, b) = (b, a)")) self.failIf(self.find_binding("a", "(foo, (b, c)) = (a, b, c)")) @@ -108,22 +108,22 @@ self.failUnless(self.find_binding("a", "[a] = b")) self.failUnless(self.find_binding("a", "[a, b, c] = [b, c, d]")) self.failUnless(self.find_binding("a", "[c, [d, a], b] = foo()")) - self.failUnless(self.find_binding("a", "[a, b] = foo().foo.foo[a][foo]")) + self.failUnless(self.find_binding("a", "[a, b] = foo().foo[a][foo]")) self.failIf(self.find_binding("a", "[foo, b] = (b, a)")) self.failIf(self.find_binding("a", "[foo, [b, c]] = (a, b, c)")) - + def test_invalid_assignments(self): self.failIf(self.find_binding("a", "foo.a = 5")) self.failIf(self.find_binding("a", "foo[a] = 5")) self.failIf(self.find_binding("a", "foo(a) = 5")) self.failIf(self.find_binding("a", "foo(a, b) = 5")) - + def test_simple_import(self): self.failUnless(self.find_binding("a", "import a")) self.failUnless(self.find_binding("a", "import b, c, a, d")) self.failIf(self.find_binding("a", "import b")) self.failIf(self.find_binding("a", "import b, c, d")) - + def test_from_import(self): self.failUnless(self.find_binding("a", "from x import a")) self.failUnless(self.find_binding("a", "from a import a")) @@ -133,13 +133,13 @@ self.failIf(self.find_binding("a", "from a import b")) self.failIf(self.find_binding("a", "from a.d import b")) self.failIf(self.find_binding("a", "from d.a import b")) - + def test_import_as(self): self.failUnless(self.find_binding("a", "import b as a")) self.failUnless(self.find_binding("a", "import b as a, c, a as f, d")) self.failIf(self.find_binding("a", "import a as f")) self.failIf(self.find_binding("a", "import b, c as f, d as e")) - + def test_from_import_as(self): self.failUnless(self.find_binding("a", "from x import b as a")) self.failUnless(self.find_binding("a", "from x import g as a, d as b")) @@ -148,7 +148,7 @@ self.failIf(self.find_binding("a", "from a import b as t")) self.failIf(self.find_binding("a", "from a.d import b as t")) self.failIf(self.find_binding("a", "from d.a import b as t")) - + def test_function_def(self): self.failUnless(self.find_binding("a", "def a(): pass")) self.failUnless(self.find_binding("a", "def a(b, c, d): pass")) @@ -157,13 +157,13 @@ self.failIf(self.find_binding("a", "def d(a=7): pass")) self.failIf(self.find_binding("a", "def d(a): pass")) self.failIf(self.find_binding("a", "def d(): a = 7")) - + s = """ def d(): def a(): pass""" self.failIf(self.find_binding("a", s)) - + def test_class_def(self): self.failUnless(self.find_binding("a", "class a: pass")) self.failUnless(self.find_binding("a", "class a(): pass")) @@ -175,13 +175,13 @@ self.failIf(self.find_binding("a", "class d(b, *a): pass")) self.failIf(self.find_binding("a", "class d(b, **a): pass")) self.failIf(self.find_binding("a", "class d: a = 7")) - + s = """ class d(): class a(): pass""" self.failIf(self.find_binding("a", s)) - + def test_for(self): self.failUnless(self.find_binding("a", "for a in r: pass")) self.failUnless(self.find_binding("a", "for a, b in r: pass")) @@ -190,90 +190,90 @@ self.failUnless(self.find_binding("a", "for c, (a, b) in r: pass")) self.failUnless(self.find_binding("a", "for c in r: a = c")) self.failIf(self.find_binding("a", "for c in a: pass")) - + def test_for_nested(self): s = """ for b in r: for a in b: pass""" self.failUnless(self.find_binding("a", s)) - + s = """ for b in r: for a, c in b: pass""" self.failUnless(self.find_binding("a", s)) - + s = """ for b in r: for (a, c) in b: pass""" self.failUnless(self.find_binding("a", s)) - + s = """ for b in r: for (a,) in b: pass""" self.failUnless(self.find_binding("a", s)) - + s = """ for b in r: for c, (a, d) in b: pass""" self.failUnless(self.find_binding("a", s)) - + s = """ for b in r: for c in b: a = 7""" self.failUnless(self.find_binding("a", s)) - + s = """ for b in r: for c in b: d = a""" self.failIf(self.find_binding("a", s)) - + s = """ for b in r: for c in a: d = 7""" self.failIf(self.find_binding("a", s)) - + def test_if(self): self.failUnless(self.find_binding("a", "if b in r: a = c")) self.failIf(self.find_binding("a", "if a in r: d = e")) - + def test_if_nested(self): s = """ if b in r: if c in d: a = c""" self.failUnless(self.find_binding("a", s)) - + s = """ if b in r: if c in d: c = a""" self.failIf(self.find_binding("a", s)) - + def test_while(self): self.failUnless(self.find_binding("a", "while b in r: a = c")) self.failIf(self.find_binding("a", "while a in r: d = e")) - + def test_while_nested(self): s = """ while b in r: while c in d: a = c""" self.failUnless(self.find_binding("a", s)) - + s = """ while b in r: while c in d: c = a""" self.failIf(self.find_binding("a", s)) - + def test_try_except(self): s = """ try: @@ -281,14 +281,14 @@ except: b = 8""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 except: a = 6""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 @@ -297,14 +297,14 @@ except: a = 6""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 except: b = 6""" self.failIf(self.find_binding("a", s)) - + def test_try_except_nested(self): s = """ try: @@ -315,7 +315,7 @@ except: b = 8""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 @@ -325,7 +325,7 @@ except: pass""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 @@ -335,7 +335,7 @@ except: a = 6""" self.failUnless(self.find_binding("a", s)) - + s = """ try: try: @@ -347,7 +347,7 @@ except: pass""" self.failUnless(self.find_binding("a", s)) - + s = """ try: pass @@ -359,14 +359,14 @@ except: a = 6""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 except: b = 6""" self.failIf(self.find_binding("a", s)) - + s = """ try: try: @@ -381,7 +381,7 @@ except: o = y""" self.failIf(self.find_binding("a", s)) - + def test_try_except_finally(self): s = """ try: @@ -391,21 +391,21 @@ finally: a = 9""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 finally: a = 6""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 finally: b = 6""" self.failIf(self.find_binding("a", s)) - + s = """ try: b = 8 @@ -414,7 +414,7 @@ finally: b = 6""" self.failIf(self.find_binding("a", s)) - + def test_try_except_finally_nested(self): s = """ try: @@ -429,7 +429,7 @@ finally: c = 9""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 @@ -439,7 +439,7 @@ finally: a = 6""" self.failUnless(self.find_binding("a", s)) - + s = """ try: b = 8 @@ -449,7 +449,7 @@ finally: b = 7""" self.failIf(self.find_binding("a", s)) - + if __name__ == "__main__": import __main__ From python-checkins at python.org Tue Jul 17 23:12:52 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:12:52 +0200 (CEST) Subject: [Python-checkins] r56432 - in sandbox/trunk/2to3: fixes/fix_stringio.py tests/test_fixers.py Message-ID: <20070717211252.83DDD1E401E@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:12:52 2007 New Revision: 56432 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_stringio.py sandbox/trunk/2to3/tests/test_fixers.py Log: Make fix_stringio also patch up usages of cStringIO. Modified: sandbox/trunk/2to3/fixes/fix_stringio.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_stringio.py (original) +++ sandbox/trunk/2to3/fixes/fix_stringio.py Tue Jul 17 23:12:52 2007 @@ -15,23 +15,24 @@ from fixes import basefix from fixes.util import Name, attr_chain, any +MODULE = "('StringIO' | 'cStringIO')" class FixStringio(basefix.BaseFix): PATTERN = """ - import_name< 'import' (module='StringIO' - | dotted_as_names< any* module='StringIO' any* >) > + import_name< 'import' (module=%s + | dotted_as_names< any* module=%s any* >) > | - import_from< 'from' module_name='StringIO' 'import' + import_from< 'from' module_name=%s 'import' ( 'StringIO' | import_as_name< 'StringIO' 'as' any >) > | - import_from< 'from' module_name='StringIO' 'import' star='*' > + import_from< 'from' module_name=%s 'import' star='*' > | - import_name< 'import' dotted_as_name< module_name='StringIO' 'as' any > > + import_name< 'import' dotted_as_name< module_name=%s 'as' any > > | - power< module_name='StringIO' trailer< '.' 'StringIO' > any* > + power< module_name=%s trailer< '.' 'StringIO' > any* > | - bare_name='StringIO' - """ + bare_name=%s + """ % ((MODULE,) * 7) order = "pre" # Pre-order tree traversal @@ -56,11 +57,14 @@ star = results.get("star") if import_mod: + import_mod = import_mod[0] self.module_import = True import_mod.replace(Name("io", prefix=import_mod.get_prefix())) elif module_name: + module_name = module_name[0] module_name.replace(Name("io", prefix=module_name.get_prefix())) if star: star.replace(Name("StringIO", prefix=star.get_prefix())) elif bare_name and self.module_import: + bare_name = bare_name[0] bare_name.replace(Name("io", prefix=bare_name.get_prefix())) Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Tue Jul 17 23:12:52 2007 @@ -1280,62 +1280,70 @@ class Test_stringio(FixerTestCase): fixer = "stringio" - def test_import_module(self): - b = "import StringIO" - a = "import io" - self.check(b, a) + modules = ["StringIO", "cStringIO"] - b = "import foo, StringIO, bar" - a = "import foo, io, bar" - self.check(b, a) + def test_import_module(self): + for module in self.modules: + b = "import %s" % module + a = "import io" + self.check(b, a) + + b = "import foo, %s, bar" % module + a = "import foo, io, bar" + self.check(b, a) def test_import_from(self): - b = "from StringIO import StringIO" - a = "from io import StringIO" - self.check(b, a) + for module in self.modules: + b = "from %s import StringIO" % module + a = "from io import StringIO" + self.check(b, a) + + b = "from %s import *" % module + a = "from io import StringIO" + self.check(b, a) - b = "from StringIO import *" - a = "from io import StringIO" - self.check(b, a) - - s = "from foo import StringIO" - self.unchanged(s) + s = "from foo import StringIO" + self.unchanged(s) def test_import_module_as(self): - b = "import StringIO as foo_bar" - a = "import io as foo_bar" - self.check(b, a) - - b = "import StringIO as foo_bar" - a = "import io as foo_bar" - self.check(b, a) + for module in self.modules: + b = "import %s as foo_bar" % module + a = "import io as foo_bar" + self.check(b, a) + + b = "import %s as foo_bar" % module + a = "import io as foo_bar" + self.check(b, a) def test_import_from_as(self): - b = "from StringIO import StringIO as foo_bar" - a = "from io import StringIO as foo_bar" - self.check(b, a) + for module in self.modules: + b = "from %s import StringIO as foo_bar" % module + a = "from io import StringIO as foo_bar" + self.check(b, a) def test_import_module_usage(self): - b = """ - import StringIO - foo(StringIO, StringIO.StringIO) - """ - a = """ - import io - foo(io, io.StringIO) - """ - self.check(b, a) + for module in self.modules: + b = """ + import %s + foo(%s, %s.StringIO) + """ % (module, module, module) + a = """ + import io + foo(io, io.StringIO) + """ + self.check(b, a) def test_from_import_usage(self): - b = """ - from StringIO import StringIO - foo(StringIO, StringIO()) - """ - a = """ - from io import StringIO - foo(StringIO, StringIO()) - """ - self.check(b, a) + for module in self.modules: + b = """ + from %s import StringIO + foo(StringIO, StringIO()) + """ % module + a = """ + from io import StringIO + foo(StringIO, StringIO()) + """ + self.check(b, a) class Test_input(FixerTestCase): From python-checkins at python.org Tue Jul 17 23:13:04 2007 From: python-checkins at python.org (collin.winter) Date: Tue, 17 Jul 2007 23:13:04 +0200 (CEST) Subject: [Python-checkins] r56433 - in sandbox/trunk/2to3: fixes/fix_filter.py fixes/fix_map.py tests/test_fixers.py Message-ID: <20070717211304.825081E401F@bag.python.org> Author: collin.winter Date: Tue Jul 17 23:13:04 2007 New Revision: 56433 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py sandbox/trunk/2to3/tests/test_fixers.py Log: Add set() to the list of special contexts for the map and filter fixers. Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Tue Jul 17 23:13:04 2007 @@ -64,7 +64,7 @@ P1 = """ power< - ( 'iter' | 'list' | 'tuple' | 'sorted' ) + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' ) trailer< '(' node=any ')' > any* > Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Tue Jul 17 23:13:04 2007 @@ -76,7 +76,7 @@ P1 = """ power< - ( 'iter' | 'list' | 'tuple' | 'sorted' ) + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' ) trailer< '(' node=any ')' > any* > Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Tue Jul 17 23:13:04 2007 @@ -2198,6 +2198,10 @@ self.unchanged(a, a) a = """list(filter(f, 'abc'))[0]""" self.unchanged(a, a) + a = """set(filter(f, 'abc'))""" + self.unchanged(a) + a = """set(filter(f, 'abc')).pop()""" + self.unchanged(a) a = """tuple(filter(f, 'abc'))""" self.unchanged(a, a) a = """sorted(filter(f, 'abc'))""" @@ -2259,6 +2263,10 @@ self.unchanged(a) a = """list(map(f, 'abc'))[0]""" self.unchanged(a) + a = """set(map(f, 'abc'))""" + self.unchanged(a) + a = """set(map(f, 'abc')).pop()""" + self.unchanged(a) a = """tuple(map(f, 'abc'))""" self.unchanged(a) a = """sorted(map(f, 'abc'))""" From python-checkins at python.org Wed Jul 18 00:12:42 2007 From: python-checkins at python.org (collin.winter) Date: Wed, 18 Jul 2007 00:12:42 +0200 (CEST) Subject: [Python-checkins] r56434 - in sandbox/trunk/2to3: tests/test_fixers.py Message-ID: <20070717221242.15F391E4019@bag.python.org> Author: collin.winter Date: Wed Jul 18 00:12:41 2007 New Revision: 56434 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/tests/test_fixers.py Log: Fix search-and-replace error. Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Wed Jul 18 00:12:41 2007 @@ -2193,29 +2193,29 @@ def test_filter_nochange(self): a = """iter(filter(f, 'abc'))""" - self.unchanged(a, a) + self.unchanged(a) a = """list(filter(f, 'abc'))""" - self.unchanged(a, a) + self.unchanged(a) a = """list(filter(f, 'abc'))[0]""" - self.unchanged(a, a) + self.unchanged(a) a = """set(filter(f, 'abc'))""" self.unchanged(a) a = """set(filter(f, 'abc')).pop()""" self.unchanged(a) a = """tuple(filter(f, 'abc'))""" - self.unchanged(a, a) + self.unchanged(a) a = """sorted(filter(f, 'abc'))""" - self.unchanged(a, a) + self.unchanged(a) a = """sorted(filter(f, 'abc'), key=blah)""" - self.unchanged(a, a) + self.unchanged(a) a = """sorted(filter(f, 'abc'), key=blah)[0]""" - self.unchanged(a, a) + self.unchanged(a) a = """for i in filter(f, 'abc'): pass""" - self.unchanged(a, a) + self.unchanged(a) a = """[x for x in filter(f, 'abc')]""" - self.unchanged(a, a) + self.unchanged(a) a = """(x for x in filter(f, 'abc'))""" - self.unchanged(a, a) + self.unchanged(a) class Test_map(FixerTestCase): fixer = "map" From python-checkins at python.org Wed Jul 18 00:12:54 2007 From: python-checkins at python.org (collin.winter) Date: Wed, 18 Jul 2007 00:12:54 +0200 (CEST) Subject: [Python-checkins] r56435 - in sandbox/trunk/2to3: fixes/fix_filter.py fixes/fix_map.py tests/test_fixers.py Message-ID: <20070717221254.7FF811E4015@bag.python.org> Author: collin.winter Date: Wed Jul 18 00:12:54 2007 New Revision: 56435 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py sandbox/trunk/2to3/tests/test_fixers.py Log: Add join() method calls to the set of special contexts for fix_filter and fix_map. Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Wed Jul 18 00:12:54 2007 @@ -64,7 +64,8 @@ P1 = """ power< - ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' ) + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | + (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Wed Jul 18 00:12:54 2007 @@ -76,7 +76,8 @@ P1 = """ power< - ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' ) + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | + (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* > Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Wed Jul 18 00:12:54 2007 @@ -2192,6 +2192,10 @@ ## self.check(b, a) def test_filter_nochange(self): + a = """b.join(filter(f, 'abc'))""" + self.unchanged(a) + a = """(a + foo(5)).join(filter(f, 'abc'))""" + self.unchanged(a) a = """iter(filter(f, 'abc'))""" self.unchanged(a) a = """list(filter(f, 'abc'))""" @@ -2257,6 +2261,10 @@ ## self.check(b, a) def test_map_nochange(self): + a = """b.join(map(f, 'abc'))""" + self.unchanged(a) + a = """(a + foo(5)).join(map(f, 'abc'))""" + self.unchanged(a) a = """iter(map(f, 'abc'))""" self.unchanged(a) a = """list(map(f, 'abc'))""" From python-checkins at python.org Wed Jul 18 00:13:19 2007 From: python-checkins at python.org (collin.winter) Date: Wed, 18 Jul 2007 00:13:19 +0200 (CEST) Subject: [Python-checkins] r56436 - in sandbox/trunk/2to3: fixes/fix_filter.py fixes/fix_map.py tests/test_fixers.py Message-ID: <20070717221319.E4F331E4010@bag.python.org> Author: collin.winter Date: Wed Jul 18 00:13:19 2007 New Revision: 56436 Modified: sandbox/trunk/2to3/ (props changed) sandbox/trunk/2to3/fixes/fix_filter.py sandbox/trunk/2to3/fixes/fix_map.py sandbox/trunk/2to3/tests/test_fixers.py Log: Add enumerate() calls to the set of special contexts for fix_filter and fix_map. Modified: sandbox/trunk/2to3/fixes/fix_filter.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_filter.py (original) +++ sandbox/trunk/2to3/fixes/fix_filter.py Wed Jul 18 00:13:19 2007 @@ -64,7 +64,7 @@ P1 = """ power< - ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'enumerate' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* Modified: sandbox/trunk/2to3/fixes/fix_map.py ============================================================================== --- sandbox/trunk/2to3/fixes/fix_map.py (original) +++ sandbox/trunk/2to3/fixes/fix_map.py Wed Jul 18 00:13:19 2007 @@ -76,7 +76,7 @@ P1 = """ power< - ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'enumerate' | (any* trailer< '.' 'join' >) ) trailer< '(' node=any ')' > any* Modified: sandbox/trunk/2to3/tests/test_fixers.py ============================================================================== --- sandbox/trunk/2to3/tests/test_fixers.py (original) +++ sandbox/trunk/2to3/tests/test_fixers.py Wed Jul 18 00:13:19 2007 @@ -2208,6 +2208,8 @@ self.unchanged(a) a = """tuple(filter(f, 'abc'))""" self.unchanged(a) + a = """enumerate(filter(f, 'abc'))""" + self.unchanged(a) a = """sorted(filter(f, 'abc'))""" self.unchanged(a) a = """sorted(filter(f, 'abc'), key=blah)""" @@ -2277,6 +2279,8 @@ self.unchanged(a) a = """tuple(map(f, 'abc'))""" self.unchanged(a) + a = """enumerate(map(f, 'abc'))""" + self.unchanged(a) a = """sorted(map(f, 'abc'))""" self.unchanged(a) a = """sorted(map(f, 'abc'), key=blah)""" From python-checkins at python.org Wed Jul 18 00:52:08 2007 From: python-checkins at python.org (guido.van.rossum) Date: Wed, 18 Jul 2007 00:52:08 +0200 (CEST) Subject: [Python-checkins] r56437 - peps/trunk/pep-0000.txt peps/trunk/pep-3124.txt Message-ID: <20070717225208.7CDE01E400F@bag.python.org> Author: guido.van.rossum Date: Wed Jul 18 00:52:08 2007 New Revision: 56437 Modified: peps/trunk/pep-0000.txt peps/trunk/pep-3124.txt Log: Defer PEP 3124. Modified: peps/trunk/pep-0000.txt ============================================================================== --- peps/trunk/pep-0000.txt (original) +++ peps/trunk/pep-0000.txt Wed Jul 18 00:52:08 2007 @@ -101,7 +101,6 @@ S 3108 Standard Library Reorganization Cannon S 3116 New I/O Stutzbach, Verdone, GvR S 3118 Revising the buffer protocol Oliphant, Banks - S 3124 Overloading, Generic Functions, Interfaces Eby S 3134 Exception Chaining and Embedded Tracebacks Yee S 3135 New Super Spealman, Delaney S 3136 Labeled break and continue Chisholm @@ -266,6 +265,7 @@ SR 3103 A Switch/Case Statement GvR SR 3117 Postfix Type Declarations Brandl SR 3122 Delineation of the main module Cannon + SD 3124 Overloading, Generic Functions, Interfaces Eby SR 3125 Remove Backslash Continuation Jewett SR 3126 Remove Implicit String Concatenation Jewett SR 3128 BList: A Faster List-like Type Stutzbach @@ -496,7 +496,7 @@ SA 3121 Extension Module Initialization & Finalization von L?wis SR 3122 Delineation of the main module Cannon SA 3123 Making PyObject_HEAD conform to standard C von L?wis - S 3124 Overloading, Generic Functions, Interfaces Eby + SD 3124 Overloading, Generic Functions, Interfaces Eby SR 3125 Remove Backslash Continuation Jewett SR 3126 Remove Implicit String Concatenation Jewett SA 3127 Integer Literal Support and Syntax Maupin Modified: peps/trunk/pep-3124.txt ============================================================================== --- peps/trunk/pep-3124.txt (original) +++ peps/trunk/pep-3124.txt Wed Jul 18 00:52:08 2007 @@ -13,6 +13,12 @@ Replaces: 245, 246 +Deferred +======== + +See http://mail.python.org/pipermail/python-3000/2007-July/008784.html. + + Abstract ======== From python-checkins at python.org Wed Jul 18 08:37:56 2007 From: python-checkins at python.org (georg.brandl) Date: Wed, 18 Jul 2007 08:37:56 +0200 (CEST) Subject: [Python-checkins] r56439 - python/trunk/Doc/lib/libnis.tex Message-ID: <20070718063756.5F1391E4010@bag.python.org> Author: georg.brandl Date: Wed Jul 18 08:37:55 2007 New Revision: 56439 Modified: python/trunk/Doc/lib/libnis.tex Log: Use "Unix" as platform name, not "UNIX". Modified: python/trunk/Doc/lib/libnis.tex ============================================================================== --- python/trunk/Doc/lib/libnis.tex (original) +++ python/trunk/Doc/lib/libnis.tex Wed Jul 18 08:37:55 2007 @@ -2,7 +2,7 @@ Interface to Sun's NIS (Yellow Pages)} \declaremodule{extension}{nis} - \platform{UNIX} + \platform{Unix} \moduleauthor{Fred Gansevles}{Fred.Gansevles at cs.utwente.nl} \sectionauthor{Moshe Zadka}{moshez at zadka.site.co.il} \modulesynopsis{Interface to Sun's NIS (Yellow Pages) library.} From python-checkins at python.org Wed Jul 18 19:19:14 2007 From: python-checkins at python.org (guido.van.rossum) Date: Wed, 18 Jul 2007 19:19:14 +0200 (CEST) Subject: [Python-checkins] r56441 - in python/trunk: Lib/test/test_parser.py Python/ast.c Message-ID: <20070718171914.D05071E4006@bag.python.org> Author: guido.van.rossum Date: Wed Jul 18 19:19:14 2007 New Revision: 56441 Modified: python/trunk/Lib/test/test_parser.py python/trunk/Python/ast.c Log: SF patch# 1755885 by Kurt Kaiser: show location of Unicode escape errors. (Slightly tweaked for style and refcounts.) Modified: python/trunk/Lib/test/test_parser.py ============================================================================== --- python/trunk/Lib/test/test_parser.py (original) +++ python/trunk/Lib/test/test_parser.py Wed Jul 18 19:19:14 2007 @@ -474,6 +474,12 @@ st = parser.suite('1 = 3 + 4') self.assertRaises(SyntaxError, parser.compilest, st) + def test_compile_badunicode(self): + st = parser.suite('a = u"\U12345678"') + self.assertRaises(SyntaxError, parser.compilest, st) + st = parser.suite('a = u"\u1"') + self.assertRaises(SyntaxError, parser.compilest, st) + def test_main(): test_support.run_unittest( RoundtripLegalSyntaxTestCase, Modified: python/trunk/Python/ast.c ============================================================================== --- python/trunk/Python/ast.c (original) +++ python/trunk/Python/ast.c Wed Jul 18 19:19:14 2007 @@ -1243,9 +1243,26 @@ c->c_arena); case STRING: { PyObject *str = parsestrplus(c, n); - if (!str) + if (!str) { + if (PyErr_ExceptionMatches(PyExc_UnicodeError)){ + PyObject *type, *value, *tback, *errstr; + PyErr_Fetch(&type, &value, &tback); + errstr = ((PyUnicodeErrorObject *)value)->reason; + if (errstr) { + char *s = ""; + char buf[128]; + s = PyString_AsString(errstr); + PyOS_snprintf(buf, sizeof(buf), "(unicode error) %s", s); + ast_error(n, buf); + } else { + ast_error(n, "(unicode error) unknown error"); + } + Py_DECREF(type); + Py_DECREF(value); + Py_XDECREF(tback); + } return NULL; - + } PyArena_AddPyObject(c->c_arena, str); return Str(str, LINENO(n), n->n_col_offset, c->c_arena); } From buildbot at python.org Wed Jul 18 19:30:02 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 17:30:02 +0000 Subject: [Python-checkins] buildbot failure in amd64 XP trunk Message-ID: <20070718173002.E98D41E4006@bag.python.org> The Buildbot has detected a new failure of amd64 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%2520trunk/builds/53 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum BUILD FAILED: failed failed slave lost sincerely, -The Buildbot From buildbot at python.org Wed Jul 18 19:44:13 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 17:44:13 +0000 Subject: [Python-checkins] buildbot warnings in x86 gentoo trunk Message-ID: <20070718174413.C90761E4006@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2313 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum Build had warnings: warnings test Excerpt from the test logfile: 2 tests failed: test_unicode test_urllib2net ====================================================================== ERROR: test_literals (test.test_unicode.UnicodeTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_unicode.py", line 58, in test_literals self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "", line 1 SyntaxError: (unicode error) illegal Unicode character (, line 1) make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Wed Jul 18 19:48:30 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 17:48:30 +0000 Subject: [Python-checkins] buildbot warnings in x86 W2k trunk Message-ID: <20070718174830.21D5D1E4006@bag.python.org> The Buildbot has detected a new failure of x86 W2k trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520W2k%2520trunk/builds/405 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_unicode ====================================================================== ERROR: test_literals (test.test_unicode.UnicodeTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\trentm\data\buildbot\python-slave\trunk.mick-windows\build\lib\test\test_unicode.py", line 58, in test_literals self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') File "C:\trentm\data\buildbot\python-slave\trunk.mick-windows\build\lib\unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "", line 1 SyntaxError: (unicode error) illegal Unicode character (, line 1) sincerely, -The Buildbot From buildbot at python.org Wed Jul 18 20:04:18 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 18:04:18 +0000 Subject: [Python-checkins] buildbot warnings in sparc solaris10 gcc trunk Message-ID: <20070718180418.943471E4006@bag.python.org> The Buildbot has detected a new failure of sparc solaris10 gcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/sparc%2520solaris10%2520gcc%2520trunk/builds/2133 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_unicode ====================================================================== ERROR: test_literals (test.test_unicode.UnicodeTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/test/test_unicode.py", line 58, in test_literals self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') File "/opt/users/buildbot/slave/trunk.loewis-sun/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "", line 1 SyntaxError: (unicode error) illegal Unicode character (, line 1) sincerely, -The Buildbot From buildbot at python.org Wed Jul 18 20:09:03 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 18:09:03 +0000 Subject: [Python-checkins] buildbot warnings in PPC64 Debian trunk Message-ID: <20070718180904.159F01E4006@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/58 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_unicode ====================================================================== ERROR: test_literals (test.test_unicode.UnicodeTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/test/test_unicode.py", line 58, in test_literals self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "", line 1 SyntaxError: (unicode error) illegal Unicode character (, line 1) make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Wed Jul 18 20:09:34 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 18:09:34 +0000 Subject: [Python-checkins] buildbot warnings in g4 osx.4 trunk Message-ID: <20070718180934.B86031E4006@bag.python.org> The Buildbot has detected a new failure of g4 osx.4 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/g4%2520osx.4%2520trunk/builds/2142 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_unicode ====================================================================== ERROR: test_literals (test.test_unicode.UnicodeTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/Users/buildslave/bb/trunk.psf-g4/build/Lib/test/test_unicode.py", line 58, in test_literals self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') File "/Users/buildslave/bb/trunk.psf-g4/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "", line 1 SyntaxError: (unicode error) illegal Unicode character (, line 1) make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Wed Jul 18 20:18:36 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 18:18:36 +0000 Subject: [Python-checkins] buildbot warnings in S-390 Debian trunk Message-ID: <20070718181836.DD3E21E4006@bag.python.org> The Buildbot has detected a new failure of S-390 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/S-390%2520Debian%2520trunk/builds/1048 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_unicode ====================================================================== ERROR: test_literals (test.test_unicode.UnicodeTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-s390/build/Lib/test/test_unicode.py", line 58, in test_literals self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') File "/home/pybot/buildarea/trunk.klose-debian-s390/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "", line 1 SyntaxError: (unicode error) illegal Unicode character (, line 1) make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Wed Jul 18 20:25:03 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 18:25:03 +0000 Subject: [Python-checkins] buildbot warnings in ia64 Ubuntu trunk trunk Message-ID: <20070718182503.756261E4006@bag.python.org> The Buildbot has detected a new failure of ia64 Ubuntu trunk trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ia64%2520Ubuntu%2520trunk%2520trunk/builds/756 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,guido.van.rossum Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_unicode ====================================================================== ERROR: test_literals (test.test_unicode.UnicodeTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-ia64/build/Lib/test/test_unicode.py", line 58, in test_literals self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') File "/home/pybot/buildarea/trunk.klose-debian-ia64/build/Lib/unittest.py", line 329, in failUnlessRaises callableObj(*args, **kwargs) File "", line 1 SyntaxError: (unicode error) illegal Unicode character (, line 1) make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Wed Jul 18 21:58:42 2007 From: python-checkins at python.org (kurt.kaiser) Date: Wed, 18 Jul 2007 21:58:42 +0200 (CEST) Subject: [Python-checkins] r56444 - python/trunk/Lib/test/test_unicode.py Message-ID: <20070718195842.EB6DA1E4006@bag.python.org> Author: kurt.kaiser Date: Wed Jul 18 21:58:42 2007 New Revision: 56444 Modified: python/trunk/Lib/test/test_unicode.py Log: Fix failing unicode test caused by change to ast.c at r56441 Modified: python/trunk/Lib/test/test_unicode.py ============================================================================== --- python/trunk/Lib/test/test_unicode.py (original) +++ python/trunk/Lib/test/test_unicode.py Wed Jul 18 21:58:42 2007 @@ -55,9 +55,9 @@ def test_literals(self): self.assertEqual(u'\xff', u'\u00ff') self.assertEqual(u'\uffff', u'\U0000ffff') - self.assertRaises(UnicodeError, eval, 'u\'\\Ufffffffe\'') - self.assertRaises(UnicodeError, eval, 'u\'\\Uffffffff\'') - self.assertRaises(UnicodeError, eval, 'u\'\\U%08x\'' % 0x110000) + self.assertRaises(SyntaxError, eval, 'u\'\\Ufffffffe\'') + self.assertRaises(SyntaxError, eval, 'u\'\\Uffffffff\'') + self.assertRaises(SyntaxError, eval, 'u\'\\U%08x\'' % 0x110000) def test_repr(self): if not sys.platform.startswith('java'): From nnorwitz at gmail.com Wed Jul 18 22:07:41 2007 From: nnorwitz at gmail.com (Neal Norwitz) Date: Wed, 18 Jul 2007 16:07:41 -0400 Subject: [Python-checkins] Python Regression Test Failures basics (1) Message-ID: <20070718200741.GA21484@python.psfb.org> test_grammar test_opcodes test_dict test_builtin test_exceptions test_types test_unittest test_doctest test_doctest2 test_MimeWriter test_StringIO test___all__ test___future__ test__locale test_aepack test_aepack skipped -- No module named aepack test_al test_al skipped -- No module named al test_anydbm test_applesingle test_applesingle skipped -- No module named macostools test_array test_ast test_asynchat test_asyncore test_atexit test_audioop test_augassign test_base64 test_bastion test_bigaddrspace test_bigmem test_binascii test_binhex test_binop test_bisect test_bool test_bsddb test_bsddb185 test_bsddb185 skipped -- No module named bsddb185 test_bsddb3 test_bsddb3 skipped -- Use of the `bsddb' resource not enabled test_bufio test_bz2 test_cProfile test_calendar test_call test_capi test_cd test_cd skipped -- No module named cd test_cfgparser test_cgi test_charmapcodec test_cl test_cl skipped -- No module named cl test_class test_cmath test_cmd_line test_code test_codeccallbacks test_codecencodings_cn test_codecencodings_hk test_codecencodings_jp test_codecencodings_kr test_codecencodings_tw test_codecmaps_cn test_codecmaps_cn skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_hk test_codecmaps_hk skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_jp test_codecmaps_jp skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_kr test_codecmaps_kr skipped -- Use of the `urlfetch' resource not enabled test_codecmaps_tw test_codecmaps_tw skipped -- Use of the `urlfetch' resource not enabled test_codecs test_codeop test_coding test_coercion test_collections test_colorsys test_commands test_compare test_compile test_compiler test_complex test_complex_args test_contains test_contextlib test_cookie test_cookielib test_copy test_copy_reg test_cpickle test_crypt test_csv test_ctypes test_curses test_curses skipped -- Use of the `curses' resource not enabled test_datetime test_dbm test_decimal test_decorators test_defaultdict test_deque test_descr test_descrtut test_difflib test_dircache test_dis test_distutils test_dl test_dumbdbm test_dummy_thread test_dummy_threading test_email test_email_codecs test_email_renamed test_enumerate test_eof test_errno test_exception_variations test_extcall test_fcntl test_file test_filecmp test_fileinput test_float test_fnmatch test_fork1 test_format test_fpformat test_frozen test_ftplib test_funcattrs test_functools test_future test_gc test_gdbm test_generators test_genericpath test_genexps test_getargs test_getargs2 test_getopt test_gettext test_gl test_gl skipped -- No module named gl test_glob test_global test_grp test_gzip test_hash test_hashlib test_heapq test_hexoct test_hmac test_hotshot test_htmllib test_htmlparser test_httplib test_imageop test_imageop skipped -- No module named imgfile test_imaplib test_imgfile test_imgfile skipped -- No module named imgfile test_imp test_import test_importhooks test_index test_inspect test_ioctl test_ioctl skipped -- Unable to open /dev/tty test_isinstance test_iter test_iterlen test_itertools test_largefile test_linuxaudiodev test_linuxaudiodev skipped -- Use of the `audio' resource not enabled test_list test_locale test_logging test_long test_long_future test_longexp test_macostools test_macostools skipped -- No module named macostools test_macpath test_mailbox test_marshal test_math test_md5 test_mhlib test_mimetools test_mimetypes test_minidom test_mmap test_module test_modulefinder test_multibytecodec test_multibytecodec_support test_multifile test_mutants test_netrc test_new test_nis test_normalization test_normalization skipped -- Use of the `urlfetch' resource not enabled test_ntpath test_old_mailbox test_openpty test_operator test_optparse test_os test_ossaudiodev test_ossaudiodev skipped -- Use of the `audio' resource not enabled test_parser test_peepholer test_pep247 test_pep263 test_pep277 test_pep277 skipped -- test works only on NT+ test_pep292 test_pep352 test_pickle test_pickletools test_pkg test_pkgimport test_platform test_plistlib test_plistlib skipped -- No module named plistlib test_poll test_popen [7328 refs] [7328 refs] [7328 refs] test_popen2 test_poplib test_posix test_posixpath test_pow test_pprint test_profile test_profilehooks test_pty test_pwd test_pyclbr test_pyexpat test_queue test_quopri [7703 refs] [7703 refs] test_random test_re test_repr test_resource test_rfc822 test_richcmp test_robotparser test_runpy test_sax test_scope test_scriptpackages test_scriptpackages skipped -- No module named aetools test_select test_set test_sets test_sgmllib test_sha test_shelve test_shlex test_shutil test_signal test_site test_slice test_smtplib test test_smtplib failed -- Traceback (most recent call last): File "/tmp/python-test/local/lib/python2.6/test/test_smtplib.py", line 59, in testTimeoutNone smtp = smtplib.SMTP("localhost", 9091, timeout=None) File "/tmp/python-test/local/lib/python2.6/smtplib.py", line 248, in __init__ (code, msg) = self.connect(host, port) File "/tmp/python-test/local/lib/python2.6/smtplib.py", line 304, in connect self.sock = self._get_socket(host, port, self.timeout) File "/tmp/python-test/local/lib/python2.6/smtplib.py", line 282, in _get_socket return socket.create_connection((port, host), timeout) File "/tmp/python-test/local/lib/python2.6/socket.py", line 443, in create_connection raise error, msg error: (111, 'Connection refused') test_socket test_socket_ssl test_socketserver test_socketserver skipped -- Use of the `network' resource not enabled test_softspace test_sort test_sqlite test_startfile test_startfile skipped -- cannot import name startfile test_str test_strftime test_string test_stringprep test_strop test_strptime test_struct test_structmembers test_structseq test_subprocess [7323 refs] [7321 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7321 refs] [8869 refs] [7539 refs] [7324 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] [7323 refs] . [7323 refs] [7323 refs] this bit of output is from a test of stdout in a different process ... [7323 refs] [7323 refs] [7539 refs] test_sunaudiodev test_sunaudiodev skipped -- No module named sunaudiodev test_sundry test_symtable test_syntax test_sys [7323 refs] [7323 refs] test_tarfile test_tcl test_tcl skipped -- No module named _tkinter test_telnetlib test_tempfile [7327 refs] test_textwrap test_thread test_threaded_import test_threadedtempfile test_threading test_threading_local test_threadsignals test_time test_timeout test_timeout skipped -- Use of the `network' resource not enabled test_tokenize test_trace test_traceback test_transformer test_tuple test_ucn test_unary test_unicode test_unicode_file test_unicode_file skipped -- No Unicode filesystem semantics on this platform. test_unicodedata test_univnewlines test_unpack test_urllib test_urllib2 test_urllib2_localnet test_urllib2net test_urllib2net skipped -- Use of the `network' resource not enabled test_urllibnet test_urllibnet skipped -- Use of the `network' resource not enabled test_urlparse test_userdict test_userlist test_userstring test_uu test_uuid WARNING: uuid.getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._ifconfig_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. WARNING: uuid._unixdll_getnode is unreliable on many platforms. It is disabled until the code and/or test can be fixed properly. test_wait3 test_wait4 test_warnings test_wave test_weakref test_whichdb test_winreg test_winreg skipped -- No module named _winreg test_winsound test_winsound skipped -- No module named winsound test_with test_wsgiref test_xdrlib test_xml_etree test_xml_etree_c test_xmllib test_xmlrpc test_xpickle test_xrange test_zipfile test_zipfile64 test_zipfile64 skipped -- test requires loads of disk-space bytes and a long time to run test_zipimport test_zlib 291 tests OK. 1 test failed: test_smtplib 35 tests skipped: test_aepack test_al test_applesingle test_bsddb185 test_bsddb3 test_cd test_cl test_codecmaps_cn test_codecmaps_hk test_codecmaps_jp test_codecmaps_kr test_codecmaps_tw test_curses test_gl test_imageop test_imgfile test_ioctl test_linuxaudiodev test_macostools test_normalization test_ossaudiodev test_pep277 test_plistlib test_scriptpackages test_socketserver test_startfile test_sunaudiodev test_tcl test_timeout test_unicode_file test_urllib2net test_urllibnet test_winreg test_winsound test_zipfile64 1 skip unexpected on linux2: test_ioctl [486718 refs] From python-checkins at python.org Wed Jul 18 22:22:55 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Wed, 18 Jul 2007 22:22:55 +0200 (CEST) Subject: [Python-checkins] r56445 - python/branches/cpy_merge/Lib/test/test_memoryio.py Message-ID: <20070718202255.813F51E4006@bag.python.org> Author: alexandre.vassalotti Date: Wed Jul 18 22:22:55 2007 New Revision: 56445 Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py Log: Test if getvalue() result is constant after a read(). Test the flush method. Test subclassing support. Test if over-seeking with BytesIO inserts null-bytes. Modified: python/branches/cpy_merge/Lib/test/test_memoryio.py ============================================================================== --- python/branches/cpy_merge/Lib/test/test_memoryio.py (original) +++ python/branches/cpy_merge/Lib/test/test_memoryio.py Wed Jul 18 22:22:55 2007 @@ -147,6 +147,8 @@ memio = self.ioclass(buf) self.assertEqual(memio.getvalue(), buf) + memio.read() + self.assertEqual(memio.getvalue(), buf) memio = self.ioclass(buf * 1000) self.assertEqual(memio.getvalue()[-3:], "890") memio.close() @@ -169,14 +171,22 @@ buf = self.buftype("1234567890") memio = self.ioclass(buf) - self.assertEqual(0, memio.tell()) + self.assertEqual(memio.tell(), 0) memio.seek(5) - self.assertEqual(5, memio.tell()) + self.assertEqual(memio.tell(), 5) memio.seek(10000) - self.assertEqual(10000, memio.tell()) + self.assertEqual(memio.tell(), 10000) memio.close() self.assertRaises(ValueError, memio.tell) + def test_flush(self): + buf = self.buftype("1234567890") + memio = self.ioclass(buf) + + self.assertEqual(memio.flush(), None) + memio.close() + self.assertRaises(ValueError, memio.flush) + def test_flags(self): memio = self.ioclass() @@ -192,6 +202,15 @@ self.assertRaises(ValueError, memio.isatty) self.assertEqual(memio.closed, True) + def test_subclassing(self): + buf = self.buftype("1234567890") + def test(): + class MemIO(self.ioclass): + pass + m = MemIO(buf) + return m.getvalue() + self.assertEqual(test(), buf) + class PyBytesIOTest(MemoryTestMixin, unittest.TestCase): buftype = bytes @@ -220,6 +239,14 @@ memio.close() self.assertRaises(ValueError, memio.readinto, b) + def test_overseek(self): + buf = self.buftype("1234567890") + memio = self.ioclass() + + memio.seek(2) + memio.write(buf) + self.assertEqual(memio.getvalue(), '\0\0' + buf) + class PyStringIOTest(MemoryTestMixin, unittest.TestCase): buftype = unicode From buildbot at python.org Wed Jul 18 22:43:31 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 18 Jul 2007 20:43:31 +0000 Subject: [Python-checkins] buildbot warnings in amd64 XP trunk Message-ID: <20070718204331.E877B1E4006@bag.python.org> The Buildbot has detected a new failure of amd64 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%2520trunk/builds/54 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: kurt.kaiser Build had warnings: warnings test Excerpt from the test logfile: 3 tests failed: test_asyncore test_ctypes test_winsound ====================================================================== ERROR: test_send (test.test_asyncore.DispatcherWithSendTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 350, in test_send d.send('\n') File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 467, in send self.initiate_send() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 454, in initiate_send num_sent = dispatcher.send(self, self.out_buffer[:512]) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 331, in send result = self.socket.send(data) error: (10053, 'Software caused connection abort') ====================================================================== ERROR: test_send (test.test_asyncore.DispatcherWithSendTests_UsePoll) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 350, in test_send d.send('\n') File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 467, in send self.initiate_send() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 454, in initiate_send num_sent = dispatcher.send(self, self.out_buffer[:512]) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\asyncore.py", line 331, in send result = self.socket.send(data) error: (10053, 'Software caused connection abort') ====================================================================== ERROR: test_extremes (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 18, in test_extremes winsound.Beep(37, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_increasingfrequency (test.test_winsound.BeepTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 23, in test_increasingfrequency winsound.Beep(i, 75) RuntimeError: Failed to beep ====================================================================== ERROR: test_alias_asterisk (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 64, in test_alias_asterisk winsound.PlaySound('SystemAsterisk', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exclamation (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 74, in test_alias_exclamation winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_exit (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 84, in test_alias_exit winsound.PlaySound('SystemExit', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_hand (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 94, in test_alias_hand winsound.PlaySound('SystemHand', winsound.SND_ALIAS) RuntimeError: Failed to play sound ====================================================================== ERROR: test_alias_question (test.test_winsound.PlaySoundTest) ---------------------------------------------------------------------- Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_winsound.py", line 104, in test_alias_question winsound.PlaySound('SystemQuestion', winsound.SND_ALIAS) RuntimeError: Failed to play sound Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') Traceback (most recent call last): File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 465, in __bootstrap self.run() File "C:\buildbot\trunk.heller-windows-amd64\build\lib\threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "C:\buildbot\trunk.heller-windows-amd64\build\lib\test\test_asyncore.py", line 68, in capture_server data = conn.recv(10) error: (10035, 'The socket operation could not complete without blocking') sincerely, -The Buildbot From python-checkins at python.org Thu Jul 19 00:31:31 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Thu, 19 Jul 2007 00:31:31 +0200 (CEST) Subject: [Python-checkins] r56450 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070718223131.4628A1E4006@bag.python.org> Author: alexandre.vassalotti Date: Thu Jul 19 00:31:30 2007 New Revision: 56450 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Clean up the global namespace. Convert the #define for opcodes into enums. Remove unused variable: * __getinitargs___str * __getstate___str * copy_reg_str Replace UnpickleableError by PicklingError. Remove Pickler_get_error(). Use svn special revision keyword for __version__. Remove temporary dictionary in module initialization. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Thu Jul 19 00:31:30 2007 @@ -13,71 +13,77 @@ * Pickle opcodes. These must be kept in synch with pickle.py. Extensive * docs are in pickletools.py. */ -#define MARK '(' -#define STOP '.' -#define POP '0' -#define POP_MARK '1' -#define DUP '2' -#define FLOAT 'F' -#define BINFLOAT 'G' -#define INT 'I' -#define BININT 'J' -#define BININT1 'K' -#define LONG 'L' -#define BININT2 'M' -#define NONE 'N' -#define PERSID 'P' -#define BINPERSID 'Q' -#define REDUCE 'R' -#define STRING 'S' -#define BINSTRING 'T' -#define SHORT_BINSTRING 'U' -#define UNICODE 'V' -#define BINUNICODE 'X' -#define APPEND 'a' -#define BUILD 'b' -#define GLOBAL 'c' -#define DICT 'd' -#define EMPTY_DICT '}' -#define APPENDS 'e' -#define GET 'g' -#define BINGET 'h' -#define INST 'i' -#define LONG_BINGET 'j' -#define LIST 'l' -#define EMPTY_LIST ']' -#define OBJ 'o' -#define PUT 'p' -#define BINPUT 'q' -#define LONG_BINPUT 'r' -#define SETITEM 's' -#define TUPLE 't' -#define EMPTY_TUPLE ')' -#define SETITEMS 'u' +enum opcodes { + MARK = '(', + STOP = '.', + POP = '0', + POP_MARK = '1', + DUP = '2', + FLOAT = 'F', + BINFLOAT = 'G', + INT = 'I', + BININT = 'J', + BININT1 = 'K', + LONG = 'L', + BININT2 = 'M', + NONE = 'N', + PERSID = 'P', + BINPERSID = 'Q', + REDUCE = 'R', + STRING = 'S', + BINSTRING = 'T', + SHORT_BINSTRING = 'U', + UNICODE = 'V', + BINUNICODE = 'X', + APPEND = 'a', + BUILD = 'b', + GLOBAL = 'c', + DICT = 'd', + EMPTY_DICT = '}', + APPENDS = 'e', + GET = 'g', + BINGET = 'h', + INST = 'i', + LONG_BINGET = 'j', + LIST = 'l', + EMPTY_LIST = ']', + OBJ = 'o', + PUT = 'p', + BINPUT = 'q', + LONG_BINPUT = 'r', + SETITEM = 's', + TUPLE = 't', + EMPTY_TUPLE = ')', + SETITEMS = 'u' +}; /* Protocol 2. */ -#define PROTO '\x80' /* identify pickle protocol */ -#define NEWOBJ '\x81' /* build object by applying cls.__new__ to argtuple */ -#define EXT1 '\x82' /* push object from extension registry; 1-byte index */ -#define EXT2 '\x83' /* ditto, but 2-byte index */ -#define EXT4 '\x84' /* ditto, but 4-byte index */ -#define TUPLE1 '\x85' /* build 1-tuple from stack top */ -#define TUPLE2 '\x86' /* build 2-tuple from two topmost stack items */ -#define TUPLE3 '\x87' /* build 3-tuple from three topmost stack items */ -#define NEWTRUE '\x88' /* push True */ -#define NEWFALSE '\x89' /* push False */ -#define LONG1 '\x8a' /* push long from < 256 bytes */ -#define LONG4 '\x8b' /* push really big long */ +enum { + PROTO = '\x80', /* identify pickle protocol */ + NEWOBJ, /* build object by applying cls.__new__ to argtuple */ + EXT1, /* push object from extension registry; 1-byte index */ + EXT2, /* ditto but 2-byte index */ + EXT4, /* ditto but 4-byte index */ + TUPLE1, /* build 1-tuple from stack top */ + TUPLE2, /* build 2-tuple from two topmost stack items */ + TUPLE3, /* build 3-tuple from three topmost stack items */ + NEWTRUE, /* push True */ + NEWFALSE, /* push False */ + LONG1, /* push long from < 256 bytes */ + LONG4, /* push really big long */ +}; -/* There aren't opcodes -- they're ways to pickle bools before protocol 2, +/* These aren't opcodes -- they're ways to pickle bools before protocol 2 * so that unpicklers written before bools were introduced unpickle them * as ints, but unpicklers after can recognize that bools were intended. * Note that protocol 2 added direct ways to pickle bools. */ #undef TRUE -#define TRUE "I01\n" +#define TRUE "I01\n" #undef FALSE -#define FALSE "I00\n" +#define FALSE "I00\n" + +static const char MARKv = MARK; /* Keep in synch with pickle.Pickler._BATCHSIZE. This is how many elements * batch_list/dict() pumps out before doing APPENDS/SETITEMS. Nothing will @@ -86,11 +92,8 @@ */ #define BATCHSIZE 1000 -static char MARKv = MARK; - static PyObject *PickleError; static PyObject *PicklingError; -static PyObject *UnpickleableError; static PyObject *UnpicklingError; /* As the name says, an empty tuple. */ @@ -116,9 +119,7 @@ static PyObject \ *__class___str, - *__getinitargs___str, *__dict___str, - *__getstate___str, *__setstate___str, *__name___str, *__reduce___str, @@ -128,7 +129,6 @@ *read_str, *readline_str, *__main___str, - *copy_reg_str, *dispatch_table_str; /************************************************************************* @@ -2276,7 +2276,8 @@ t = PyObject_Call(__reduce__, empty_tuple, NULL); } else { - PyErr_SetObject(UnpickleableError, args); + pickle_ErrFormat(PicklingError, "Can't pickle '%s' object: %r", + "sO", type->tp_name, args); goto finally; } } @@ -2583,14 +2584,6 @@ return 0; } -static PyObject * -Pickler_get_error(PicklerObject *p) -{ - /* why is this an attribute on the Pickler? */ - Py_INCREF(PicklingError); - return PicklingError; -} - static PyMemberDef Pickler_members[] = { {"bin", T_INT, offsetof(PicklerObject, bin)}, {"fast", T_INT, offsetof(PicklerObject, fast)}, @@ -2602,7 +2595,6 @@ (setter) Pickler_set_pers_func}, {"inst_persistent_id", NULL, (setter) Pickler_set_inst_pers_func}, {"memo", (getter) Pickler_get_memo, (setter) Pickler_set_memo}, - {"PicklingError", (getter) Pickler_get_error, NULL}, {NULL} }; @@ -4665,9 +4657,7 @@ return -1; INIT_STR(__class__); - INIT_STR(__getinitargs__); INIT_STR(__dict__); - INIT_STR(__getstate__); INIT_STR(__setstate__); INIT_STR(__name__); INIT_STR(__main__); @@ -4677,7 +4667,6 @@ INIT_STR(append); INIT_STR(read); INIT_STR(readline); - INIT_STR(copy_reg); INIT_STR(dispatch_table); if (!(copy_reg = PyImport_ImportModule("copy_reg"))) @@ -4725,7 +4714,7 @@ if (!(t = PyDict_New())) return -1; if (!(r = PyRun_String("def __str__(self):\n" - " return self.args and ('%s' % self.args[0]) or '(what)'\n", + " return self.args and ('%s' % self.args[0]) or ''\n", Py_file_input, module_dict, t))) return -1; Py_DECREF(r); @@ -4745,16 +4734,11 @@ return -1; if (!(r = PyRun_String("def __str__(self):\n" " a=self.args\n" - " a=a and type(a[0]) or '(what)'\n" + " a=a and type(a[0]) or ''\n" " return 'Cannot pickle %s objects' % a\n", Py_file_input, module_dict, t))) return -1; Py_DECREF(r); - - if (!(UnpickleableError = PyErr_NewException("pickle.UnpickleableError", - PicklingError, t))) - return -1; - Py_DECREF(t); if (!(UnpicklingError = PyErr_NewException("pickle.UnpicklingError", @@ -4771,19 +4755,15 @@ UnpicklingError) < 0) return -1; - if (PyDict_SetItemString(module_dict, "UnpickleableError", - UnpickleableError) < 0) - return -1; - return 0; } PyMODINIT_FUNC init_pickle(void) { - PyObject *m, *d, *di, *v, *k; + PyObject *m, *d, *v; Py_ssize_t i; - char *rev = "1.71"; /* XXX when does this change? */ + char *rev = "$Revision$"; PyObject *format_version; PyObject *compatible_formats; @@ -4791,15 +4771,6 @@ Unpickler_Type.ob_type = &PyType_Type; PdataType.ob_type = &PyType_Type; - /* Initialize some pieces. We need to do this before module creation, - * so we're forced to use a temporary dictionary. :( - */ - di = PyDict_New(); - if (!di) - return; - if (init_stuff(di) < 0) - return; - /* Create the module and add the functions */ m = Py_InitModule3("_pickle", NULL, pickle_module_documentation); if (m == NULL) @@ -4814,14 +4785,8 @@ PyDict_SetItemString(d, "__version__", v); Py_XDECREF(v); - /* Copy data from di. Waaa. */ - for (i = 0; PyDict_Next(di, &i, &k, &v);) { - if (PyObject_SetItem(d, k, v) < 0) { - Py_DECREF(di); - return; - } - } - Py_DECREF(di); + if (init_stuff(d) < 0) + return; i = PyModule_AddIntConstant(m, "HIGHEST_PROTOCOL", HIGHEST_PROTOCOL); if (i < 0) From python-checkins at python.org Thu Jul 19 00:36:53 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 19 Jul 2007 00:36:53 +0200 (CEST) Subject: [Python-checkins] r56451 - python/trunk/Doc/lib/libwave.tex Message-ID: <20070718223653.DBD7A1E4006@bag.python.org> Author: georg.brandl Date: Thu Jul 19 00:36:53 2007 New Revision: 56451 Modified: python/trunk/Doc/lib/libwave.tex Log: Add description for wave.setcomptype() values Modified: python/trunk/Doc/lib/libwave.tex ============================================================================== --- python/trunk/Doc/lib/libwave.tex (original) +++ python/trunk/Doc/lib/libwave.tex Thu Jul 19 00:36:53 2007 @@ -142,6 +142,8 @@ \begin{methoddesc}[Wave_write]{setcomptype}{type, name} Set the compression type and description. +At the moment, only compression type \samp{NONE} is supported, +meaning no compression. \end{methoddesc} \begin{methoddesc}[Wave_write]{setparams}{tuple} From python-checkins at python.org Thu Jul 19 00:36:56 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 19 Jul 2007 00:36:56 +0200 (CEST) Subject: [Python-checkins] r56452 - python/branches/release25-maint/Doc/lib/libwave.tex Message-ID: <20070718223656.0F12D1E4011@bag.python.org> Author: georg.brandl Date: Thu Jul 19 00:36:55 2007 New Revision: 56452 Modified: python/branches/release25-maint/Doc/lib/libwave.tex Log: Add description for wave.setcomptype() values (backport from rev. 56451) Modified: python/branches/release25-maint/Doc/lib/libwave.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/libwave.tex (original) +++ python/branches/release25-maint/Doc/lib/libwave.tex Thu Jul 19 00:36:55 2007 @@ -142,6 +142,8 @@ \begin{methoddesc}[Wave_write]{setcomptype}{type, name} Set the compression type and description. +At the moment, only compression type \samp{NONE} is supported, +meaning no compression. \end{methoddesc} \begin{methoddesc}[Wave_write]{setparams}{tuple} From python-checkins at python.org Thu Jul 19 01:11:31 2007 From: python-checkins at python.org (guido.van.rossum) Date: Thu, 19 Jul 2007 01:11:31 +0200 (CEST) Subject: [Python-checkins] r56453 - peps/trunk/pep-0000.txt peps/trunk/pep-3136.txt Message-ID: <20070718231131.93DDA1E4006@bag.python.org> Author: guido.van.rossum Date: Thu Jul 19 01:11:31 2007 New Revision: 56453 Modified: peps/trunk/pep-0000.txt peps/trunk/pep-3136.txt Log: Reject PEP 3136. Modified: peps/trunk/pep-0000.txt ============================================================================== --- peps/trunk/pep-0000.txt (original) +++ peps/trunk/pep-0000.txt Thu Jul 19 01:11:31 2007 @@ -103,7 +103,6 @@ S 3118 Revising the buffer protocol Oliphant, Banks S 3134 Exception Chaining and Embedded Tracebacks Yee S 3135 New Super Spealman, Delaney - S 3136 Labeled break and continue Chisholm S 3141 A Type Hierarchy for Numbers Yasskin Finished PEPs (done, implemented in Subversion) @@ -271,6 +270,7 @@ SR 3128 BList: A Faster List-like Type Stutzbach SR 3130 Access to Current Module/Class/Function Jewett SR 3133 Introducing Roles Winter + SR 3136 Labeled break and continue Chisholm Numerical Index @@ -508,7 +508,7 @@ SR 3133 Introducing Roles Winter S 3134 Exception Chaining and Embedded Tracebacks Yee S 3135 New Super Spealman, Delaney - S 3136 Labeled break and continue Chisholm + SR 3136 Labeled break and continue Chisholm S 3141 A Type Hierarchy for Numbers Yasskin Modified: peps/trunk/pep-3136.txt ============================================================================== --- peps/trunk/pep-3136.txt (original) +++ peps/trunk/pep-3136.txt Thu Jul 19 01:11:31 2007 @@ -3,7 +3,7 @@ Version: $Revision$ Last-Modified: $Date$ Author: Matt Chisholm -Status: Draft +Status: Rejected Type: Standards Track Content-Type: text/x-rst Created: 30-Jun-2007 @@ -11,6 +11,14 @@ Post-History: +Rejection Notice +================ + +This PEP is rejected. +See http://mail.python.org/pipermail/python-3000/2007-July/008663.html. + + + Abstract ======== From python-checkins at python.org Thu Jul 19 15:04:38 2007 From: python-checkins at python.org (walter.doerwald) Date: Thu, 19 Jul 2007 15:04:38 +0200 (CEST) Subject: [Python-checkins] r56456 - python/trunk/Modules/_codecsmodule.c Message-ID: <20070719130438.914621E400A@bag.python.org> Author: walter.doerwald Date: Thu Jul 19 15:04:38 2007 New Revision: 56456 Modified: python/trunk/Modules/_codecsmodule.c Log: Document that codecs.lookup() returns a CodecInfo object. (fixes SF bug #1754453). Modified: python/trunk/Modules/_codecsmodule.c ============================================================================== --- python/trunk/Modules/_codecsmodule.c (original) +++ python/trunk/Modules/_codecsmodule.c Thu Jul 19 15:04:38 2007 @@ -10,7 +10,7 @@ register(search_function) -> None - lookup(encoding) -> (encoder, decoder, stream_reader, stream_writer) + lookup(encoding) -> CodecInfo object The builtin Unicode codecs use the following interface: @@ -45,7 +45,8 @@ \n\ Register a codec search function. Search functions are expected to take\n\ one argument, the encoding name in all lower case letters, and return\n\ -a tuple of functions (encoder, decoder, stream_reader, stream_writer)."); +a tuple of functions (encoder, decoder, stream_reader, stream_writer)\n\ +(or a CodecInfo object)."); static PyObject *codec_register(PyObject *self, PyObject *search_function) @@ -57,10 +58,10 @@ } PyDoc_STRVAR(lookup__doc__, -"lookup(encoding) -> (encoder, decoder, stream_reader, stream_writer)\n\ +"lookup(encoding) -> CodecInfo\n\ \n\ Looks up a codec tuple in the Python codec registry and returns\n\ -a tuple of functions."); +a tuple of function (or a CodecInfo object)."); static PyObject *codec_lookup(PyObject *self, PyObject *args) From buildbot at python.org Thu Jul 19 15:33:16 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 19 Jul 2007 13:33:16 +0000 Subject: [Python-checkins] buildbot warnings in x86 W2k trunk Message-ID: <20070719133316.6702D1E400A@bag.python.org> The Buildbot has detected a new failure of x86 W2k trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520W2k%2520trunk/builds/407 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,walter.doerwald Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_socket_ssl sincerely, -The Buildbot From buildbot at python.org Thu Jul 19 15:35:01 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 19 Jul 2007 13:35:01 +0000 Subject: [Python-checkins] buildbot warnings in x86 gentoo trunk Message-ID: <20070719133501.A05A31E400A@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2316 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: georg.brandl,walter.doerwald Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 281, in readerThread rec = dbutils.DeadlockWrap(c.next, max_retries=10) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/dbutils.py", line 62, in DeadlockWrap return function(*_args, **_kwargs) DBLockDeadlockError: (-30996, 'DB_LOCK_DEADLOCK: Locker killed to resolve a deadlock') Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 281, in readerThread rec = dbutils.DeadlockWrap(c.next, max_retries=10) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/dbutils.py", line 62, in DeadlockWrap return function(*_args, **_kwargs) DBLockDeadlockError: (-30996, 'DB_LOCK_DEADLOCK: Locker killed to resolve a deadlock') Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 281, in readerThread rec = dbutils.DeadlockWrap(c.next, max_retries=10) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/dbutils.py", line 62, in DeadlockWrap return function(*_args, **_kwargs) DBLockDeadlockError: (-30996, 'DB_LOCK_DEADLOCK: Locker killed to resolve a deadlock') Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 281, in readerThread rec = dbutils.DeadlockWrap(c.next, max_retries=10) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/dbutils.py", line 62, in DeadlockWrap return function(*_args, **_kwargs) DBLockDeadlockError: (-30996, 'DB_LOCK_DEADLOCK: Locker killed to resolve a deadlock') 1 test failed: test_ftplib make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Thu Jul 19 19:06:47 2007 From: python-checkins at python.org (guido.van.rossum) Date: Thu, 19 Jul 2007 19:06:47 +0200 (CEST) Subject: [Python-checkins] r56458 - peps/trunk/pep-0000.txt peps/trunk/pep-3112.txt Message-ID: <20070719170647.2AF241E4013@bag.python.org> Author: guido.van.rossum Date: Thu Jul 19 19:06:46 2007 New Revision: 56458 Modified: peps/trunk/pep-0000.txt peps/trunk/pep-3112.txt Log: Accept PEP 3112. I don't see anything left over. Modified: peps/trunk/pep-0000.txt ============================================================================== --- peps/trunk/pep-0000.txt (original) +++ peps/trunk/pep-0000.txt Thu Jul 19 19:06:46 2007 @@ -77,7 +77,6 @@ SA 358 The "bytes" Object Schemenauer, GvR SA 3106 Revamping dict.keys(), .values() & .items() GvR SA 3109 Raising Exceptions in Python 3000 Winter - SA 3112 Bytes literals in Python 3000 Orendorff SA 3115 Metaclasses in Python 3000 Talin SA 3119 Introducing Abstract Base Classes GvR, Talin SA 3120 Using UTF-8 as the default source encoding von L?wis @@ -173,6 +172,7 @@ SF 3107 Function Annotations Winter, Lownds SF 3110 Catching Exceptions in Python 3000 Winter SF 3111 Simple input built-in in Python 3000 Roberge + SF 3112 Bytes literals in Python 3000 Orendorff SF 3113 Removal of Tuple Parameter Unpacking Cannon SF 3114 Renaming iterator.next() to .__next__() Yee SF 3129 Class Decorators Winter @@ -484,7 +484,7 @@ SA 3109 Raising Exceptions in Python 3000 Winter SF 3110 Catching Exceptions in Python 3000 Winter SF 3111 Simple input built-in in Python 3000 Roberge - SA 3112 Bytes literals in Python 3000 Orendorff + SF 3112 Bytes literals in Python 3000 Orendorff SF 3113 Removal of Tuple Parameter Unpacking Cannon SF 3114 Renaming iterator.next() to .__next__() Yee SA 3115 Metaclasses in Python 3000 Talin Modified: peps/trunk/pep-3112.txt ============================================================================== --- peps/trunk/pep-3112.txt (original) +++ peps/trunk/pep-3112.txt Thu Jul 19 19:06:46 2007 @@ -3,7 +3,7 @@ Version: $Revision$ Last-Modified: $Date$ Author: Jason Orendorff -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Requires: 358 From python-checkins at python.org Thu Jul 19 19:10:10 2007 From: python-checkins at python.org (guido.van.rossum) Date: Thu, 19 Jul 2007 19:10:10 +0200 (CEST) Subject: [Python-checkins] r56459 - peps/trunk/pep-0000.txt peps/trunk/pep-3127.txt Message-ID: <20070719171010.605091E4010@bag.python.org> Author: guido.van.rossum Date: Thu Jul 19 19:10:10 2007 New Revision: 56459 Modified: peps/trunk/pep-0000.txt peps/trunk/pep-3127.txt Log: Accept 3127. There's nothing left to do. Modified: peps/trunk/pep-0000.txt ============================================================================== --- peps/trunk/pep-0000.txt (original) +++ peps/trunk/pep-0000.txt Thu Jul 19 19:10:10 2007 @@ -82,7 +82,6 @@ SA 3120 Using UTF-8 as the default source encoding von L?wis SA 3121 Extension Module Initialization & Finalization von L?wis SA 3123 Making PyObject_HEAD conform to standard C von L?wis - SA 3127 Integer Literal Support and Syntax Maupin SA 3131 Supporting Non-ASCII Identifiers von L?wis Open PEPs (under consideration) @@ -175,6 +174,7 @@ SF 3112 Bytes literals in Python 3000 Orendorff SF 3113 Removal of Tuple Parameter Unpacking Cannon SF 3114 Renaming iterator.next() to .__next__() Yee + SF 3127 Integer Literal Support and Syntax Maupin SF 3129 Class Decorators Winter SF 3132 Extended Iterable Unpacking Brandl @@ -499,7 +499,7 @@ SD 3124 Overloading, Generic Functions, Interfaces Eby SR 3125 Remove Backslash Continuation Jewett SR 3126 Remove Implicit String Concatenation Jewett - SA 3127 Integer Literal Support and Syntax Maupin + SF 3127 Integer Literal Support and Syntax Maupin SR 3128 BList: A Faster List-like Type Stutzbach SF 3129 Class Decorators Winter SR 3130 Access to Current Module/Class/Function Jewett Modified: peps/trunk/pep-3127.txt ============================================================================== --- peps/trunk/pep-3127.txt (original) +++ peps/trunk/pep-3127.txt Thu Jul 19 19:10:10 2007 @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Patrick Maupin Discussions-To: Python-3000 at python.org -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 14-Mar-2007 From python-checkins at python.org Fri Jul 20 01:57:38 2007 From: python-checkins at python.org (facundo.batista) Date: Fri, 20 Jul 2007 01:57:38 +0200 (CEST) Subject: [Python-checkins] r56463 - python/trunk/Lib/test/test_asyncore.py Message-ID: <20070719235738.B698B1E4019@bag.python.org> Author: facundo.batista Date: Fri Jul 20 01:57:38 2007 New Revision: 56463 Modified: python/trunk/Lib/test/test_asyncore.py Log: Added a select.select call in the test server loop to make sure the socket is ready to be read from before attempting a read (this prevents an error 10035 on some Windows platforms). [GSoC - Alan McIntyre] Modified: python/trunk/Lib/test/test_asyncore.py ============================================================================== --- python/trunk/Lib/test/test_asyncore.py (original) +++ python/trunk/Lib/test/test_asyncore.py Fri Jul 20 01:57:38 2007 @@ -65,11 +65,13 @@ else: n = 200 while n > 0: - data = conn.recv(10) - # keep everything except for the newline terminator - buf.write(data.replace('\n', '')) - if '\n' in data: - break + r, w, e = select.select([conn], [], []) + if r: + data = conn.recv(10) + # keep everything except for the newline terminator + buf.write(data.replace('\n', '')) + if '\n' in data: + break n -= 1 time.sleep(0.01) From python-checkins at python.org Sat Jul 21 08:55:08 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 21 Jul 2007 08:55:08 +0200 (CEST) Subject: [Python-checkins] r56476 - in python/trunk: Include/abstract.h Include/boolobject.h Include/bufferobject.h Include/cStringIO.h Include/cellobject.h Include/cobject.h Include/code.h Include/complexobject.h Include/datetime.h Include/descrobject.h Include/dictobject.h Include/fileobject.h Include/floatobject.h Include/funcobject.h Include/genobject.h Include/iterobject.h Include/listobject.h Include/longobject.h Include/methodobject.h Include/moduleobject.h Include/object.h Include/objimpl.h Include/py_curses.h Include/rangeobject.h Include/setobject.h Include/sliceobject.h Include/stringobject.h Include/symtable.h Include/traceback.h Include/tupleobject.h Include/unicodeobject.h Include/weakrefobject.h Misc/NEWS Modules/_bsddb.c Modules/_collectionsmodule.c Modules/_csv.c Modules/_ctypes/_ctypes.c Modules/_ctypes/callproc.c Modules/_ctypes/cfield.c Modules/_ctypes/stgdict.c Modules/_curses_panel.c Modules/_cursesmodule.c Modules/_elementtree.c Modules/_functoolsmodule.c Modules/_hashopenssl.c Modules/_hotshot.c Modules/_lsprof.c Modules/_randommodule.c Modules/_sqlite/cache.c Modules/_sqlite/connection.c Modules/_sqlite/cursor.c Modules/_sqlite/prepare_protocol.c Modules/_sqlite/row.c Modules/_sqlite/statement.c Modules/_sre.c Modules/_ssl.c Modules/_struct.c Modules/_testcapimodule.c Modules/_tkinter.c Modules/_typesmodule.c Modules/_weakref.c Modules/arraymodule.c Modules/bz2module.c Modules/cPickle.c Modules/cStringIO.c Modules/cjkcodecs/multibytecodec.c Modules/datetimemodule.c Modules/dbmmodule.c Modules/dlmodule.c Modules/gcmodule.c Modules/gdbmmodule.c Modules/itertoolsmodule.c Modules/linuxaudiodev.c Modules/md5module.c Modules/mmapmodule.c Modules/operator.c Modules/ossaudiodev.c Modules/parsermodule.c Modules/posixmodule.c Modules/pyexpat.c Modules/selectmodule.c Modules/sha256module.c Modules/sha512module.c Modules/shamodule.c Modules/socketmodule.c Modules/sunaudiodev.c Modules/threadmodule.c Modules/unicodedata.c Modules/xxmodule.c Modules/xxsubtype.c Modules/zipimport.c Modules/zlibmodule.c Objects/boolobject.c Objects/bufferobject.c Objects/cellobject.c Objects/cobject.c Objects/codeobject.c Objects/complexobject.c Objects/descrobject.c Objects/dictobject.c Objects/enumobject.c Objects/exceptions.c Objects/fileobject.c Objects/floatobject.c Objects/frameobject.c Objects/funcobject.c Objects/genobject.c Objects/intobject.c Objects/iterobject.c Objects/listobject.c Objects/longobject.c Objects/methodobject.c Objects/moduleobject.c Objects/object.c Objects/obmalloc.c Objects/setobject.c Objects/sliceobject.c Objects/stringobject.c Objects/structseq.c Objects/tupleobject.c Objects/typeobject.c Objects/unicodeobject.c Objects/weakrefobject.c PC/_msi.c PC/_winreg.c Python/ceval.c Python/import.c Python/symtable.c Python/traceback.c Message-ID: <20070721065508.8777E1E4008@bag.python.org> Author: martin.v.loewis Date: Sat Jul 21 08:55:02 2007 New Revision: 56476 Modified: python/trunk/Include/abstract.h python/trunk/Include/boolobject.h python/trunk/Include/bufferobject.h python/trunk/Include/cStringIO.h python/trunk/Include/cellobject.h python/trunk/Include/cobject.h python/trunk/Include/code.h python/trunk/Include/complexobject.h python/trunk/Include/datetime.h python/trunk/Include/descrobject.h python/trunk/Include/dictobject.h python/trunk/Include/fileobject.h python/trunk/Include/floatobject.h python/trunk/Include/funcobject.h python/trunk/Include/genobject.h python/trunk/Include/iterobject.h python/trunk/Include/listobject.h python/trunk/Include/longobject.h python/trunk/Include/methodobject.h python/trunk/Include/moduleobject.h python/trunk/Include/object.h python/trunk/Include/objimpl.h python/trunk/Include/py_curses.h python/trunk/Include/rangeobject.h python/trunk/Include/setobject.h python/trunk/Include/sliceobject.h python/trunk/Include/stringobject.h python/trunk/Include/symtable.h python/trunk/Include/traceback.h python/trunk/Include/tupleobject.h python/trunk/Include/unicodeobject.h python/trunk/Include/weakrefobject.h python/trunk/Misc/NEWS python/trunk/Modules/_bsddb.c python/trunk/Modules/_collectionsmodule.c python/trunk/Modules/_csv.c python/trunk/Modules/_ctypes/_ctypes.c python/trunk/Modules/_ctypes/callproc.c python/trunk/Modules/_ctypes/cfield.c python/trunk/Modules/_ctypes/stgdict.c python/trunk/Modules/_curses_panel.c python/trunk/Modules/_cursesmodule.c python/trunk/Modules/_elementtree.c python/trunk/Modules/_functoolsmodule.c python/trunk/Modules/_hashopenssl.c python/trunk/Modules/_hotshot.c python/trunk/Modules/_lsprof.c python/trunk/Modules/_randommodule.c python/trunk/Modules/_sqlite/cache.c python/trunk/Modules/_sqlite/connection.c python/trunk/Modules/_sqlite/cursor.c python/trunk/Modules/_sqlite/prepare_protocol.c python/trunk/Modules/_sqlite/row.c python/trunk/Modules/_sqlite/statement.c python/trunk/Modules/_sre.c python/trunk/Modules/_ssl.c python/trunk/Modules/_struct.c python/trunk/Modules/_testcapimodule.c python/trunk/Modules/_tkinter.c python/trunk/Modules/_typesmodule.c python/trunk/Modules/_weakref.c python/trunk/Modules/arraymodule.c python/trunk/Modules/bz2module.c python/trunk/Modules/cPickle.c python/trunk/Modules/cStringIO.c python/trunk/Modules/cjkcodecs/multibytecodec.c python/trunk/Modules/datetimemodule.c python/trunk/Modules/dbmmodule.c python/trunk/Modules/dlmodule.c python/trunk/Modules/gcmodule.c python/trunk/Modules/gdbmmodule.c python/trunk/Modules/itertoolsmodule.c python/trunk/Modules/linuxaudiodev.c python/trunk/Modules/md5module.c python/trunk/Modules/mmapmodule.c python/trunk/Modules/operator.c python/trunk/Modules/ossaudiodev.c python/trunk/Modules/parsermodule.c python/trunk/Modules/posixmodule.c python/trunk/Modules/pyexpat.c python/trunk/Modules/selectmodule.c python/trunk/Modules/sha256module.c python/trunk/Modules/sha512module.c python/trunk/Modules/shamodule.c python/trunk/Modules/socketmodule.c python/trunk/Modules/sunaudiodev.c python/trunk/Modules/threadmodule.c python/trunk/Modules/unicodedata.c python/trunk/Modules/xxmodule.c python/trunk/Modules/xxsubtype.c python/trunk/Modules/zipimport.c python/trunk/Modules/zlibmodule.c python/trunk/Objects/boolobject.c python/trunk/Objects/bufferobject.c python/trunk/Objects/cellobject.c python/trunk/Objects/cobject.c python/trunk/Objects/codeobject.c python/trunk/Objects/complexobject.c python/trunk/Objects/descrobject.c python/trunk/Objects/dictobject.c python/trunk/Objects/enumobject.c python/trunk/Objects/exceptions.c python/trunk/Objects/fileobject.c python/trunk/Objects/floatobject.c python/trunk/Objects/frameobject.c python/trunk/Objects/funcobject.c python/trunk/Objects/genobject.c python/trunk/Objects/intobject.c python/trunk/Objects/iterobject.c python/trunk/Objects/listobject.c python/trunk/Objects/longobject.c python/trunk/Objects/methodobject.c python/trunk/Objects/moduleobject.c python/trunk/Objects/object.c python/trunk/Objects/obmalloc.c python/trunk/Objects/setobject.c python/trunk/Objects/sliceobject.c python/trunk/Objects/stringobject.c python/trunk/Objects/structseq.c python/trunk/Objects/tupleobject.c python/trunk/Objects/typeobject.c python/trunk/Objects/unicodeobject.c python/trunk/Objects/weakrefobject.c python/trunk/PC/_msi.c python/trunk/PC/_winreg.c python/trunk/Python/ceval.c python/trunk/Python/import.c python/trunk/Python/symtable.c python/trunk/Python/traceback.c Log: PEP 3123: Provide forward compatibility with Python 3.0, while keeping backwards compatibility. Add Py_Refcnt, Py_Type, Py_Size, and PyVarObject_HEAD_INIT. Modified: python/trunk/Include/abstract.h ============================================================================== --- python/trunk/Include/abstract.h (original) +++ python/trunk/Include/abstract.h Sat Jul 21 08:55:02 2007 @@ -1064,7 +1064,7 @@ */ #define PySequence_ITEM(o, i)\ - ( o->ob_type->tp_as_sequence->sq_item(o, i) ) + ( Py_Type(o)->tp_as_sequence->sq_item(o, i) ) /* Assume tp_as_sequence and sq_item exist and that i does not need to be corrected for a negative index */ Modified: python/trunk/Include/boolobject.h ============================================================================== --- python/trunk/Include/boolobject.h (original) +++ python/trunk/Include/boolobject.h Sat Jul 21 08:55:02 2007 @@ -11,7 +11,7 @@ PyAPI_DATA(PyTypeObject) PyBool_Type; -#define PyBool_Check(x) ((x)->ob_type == &PyBool_Type) +#define PyBool_Check(x) (Py_Type(x) == &PyBool_Type) /* Py_False and Py_True are the only two bools in existence. Don't forget to apply Py_INCREF() when returning either!!! */ Modified: python/trunk/Include/bufferobject.h ============================================================================== --- python/trunk/Include/bufferobject.h (original) +++ python/trunk/Include/bufferobject.h Sat Jul 21 08:55:02 2007 @@ -12,7 +12,7 @@ PyAPI_DATA(PyTypeObject) PyBuffer_Type; -#define PyBuffer_Check(op) ((op)->ob_type == &PyBuffer_Type) +#define PyBuffer_Check(op) (Py_Type(op) == &PyBuffer_Type) #define Py_END_OF_BUFFER (-1) Modified: python/trunk/Include/cStringIO.h ============================================================================== --- python/trunk/Include/cStringIO.h (original) +++ python/trunk/Include/cStringIO.h Sat Jul 21 08:55:02 2007 @@ -60,9 +60,9 @@ /* These can be used to test if you have one */ #define PycStringIO_InputCheck(O) \ - ((O)->ob_type==PycStringIO->InputType) + (Py_Type(O)==PycStringIO->InputType) #define PycStringIO_OutputCheck(O) \ - ((O)->ob_type==PycStringIO->OutputType) + (Py_Type(O)==PycStringIO->OutputType) #ifdef __cplusplus } Modified: python/trunk/Include/cellobject.h ============================================================================== --- python/trunk/Include/cellobject.h (original) +++ python/trunk/Include/cellobject.h Sat Jul 21 08:55:02 2007 @@ -13,7 +13,7 @@ PyAPI_DATA(PyTypeObject) PyCell_Type; -#define PyCell_Check(op) ((op)->ob_type == &PyCell_Type) +#define PyCell_Check(op) (Py_Type(op) == &PyCell_Type) PyAPI_FUNC(PyObject *) PyCell_New(PyObject *); PyAPI_FUNC(PyObject *) PyCell_Get(PyObject *); Modified: python/trunk/Include/cobject.h ============================================================================== --- python/trunk/Include/cobject.h (original) +++ python/trunk/Include/cobject.h Sat Jul 21 08:55:02 2007 @@ -16,7 +16,7 @@ PyAPI_DATA(PyTypeObject) PyCObject_Type; -#define PyCObject_Check(op) ((op)->ob_type == &PyCObject_Type) +#define PyCObject_Check(op) (Py_Type(op) == &PyCObject_Type) /* Create a PyCObject from a pointer to a C object and an optional destructor function. If the second argument is non-null, then it Modified: python/trunk/Include/code.h ============================================================================== --- python/trunk/Include/code.h (original) +++ python/trunk/Include/code.h Sat Jul 21 08:55:02 2007 @@ -60,7 +60,7 @@ PyAPI_DATA(PyTypeObject) PyCode_Type; -#define PyCode_Check(op) ((op)->ob_type == &PyCode_Type) +#define PyCode_Check(op) (Py_Type(op) == &PyCode_Type) #define PyCode_GetNumFree(op) (PyTuple_GET_SIZE((op)->co_freevars)) /* Public interface */ @@ -72,7 +72,7 @@ /* for internal use only */ #define _PyCode_GETCODEPTR(co, pp) \ - ((*(co)->co_code->ob_type->tp_as_buffer->bf_getreadbuffer) \ + ((*Py_Type((co)->co_code)->tp_as_buffer->bf_getreadbuffer) \ ((co)->co_code, 0, (void **)(pp))) typedef struct _addr_pair { Modified: python/trunk/Include/complexobject.h ============================================================================== --- python/trunk/Include/complexobject.h (original) +++ python/trunk/Include/complexobject.h Sat Jul 21 08:55:02 2007 @@ -43,7 +43,7 @@ PyAPI_DATA(PyTypeObject) PyComplex_Type; #define PyComplex_Check(op) PyObject_TypeCheck(op, &PyComplex_Type) -#define PyComplex_CheckExact(op) ((op)->ob_type == &PyComplex_Type) +#define PyComplex_CheckExact(op) (Py_Type(op) == &PyComplex_Type) PyAPI_FUNC(PyObject *) PyComplex_FromCComplex(Py_complex); PyAPI_FUNC(PyObject *) PyComplex_FromDoubles(double real, double imag); Modified: python/trunk/Include/datetime.h ============================================================================== --- python/trunk/Include/datetime.h (original) +++ python/trunk/Include/datetime.h Sat Jul 21 08:55:02 2007 @@ -166,19 +166,19 @@ /* Macros for type checking when building the Python core. */ #define PyDate_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateType) -#define PyDate_CheckExact(op) ((op)->ob_type == &PyDateTime_DateType) +#define PyDate_CheckExact(op) (Py_Type(op) == &PyDateTime_DateType) #define PyDateTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateTimeType) -#define PyDateTime_CheckExact(op) ((op)->ob_type == &PyDateTime_DateTimeType) +#define PyDateTime_CheckExact(op) (Py_Type(op) == &PyDateTime_DateTimeType) #define PyTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_TimeType) -#define PyTime_CheckExact(op) ((op)->ob_type == &PyDateTime_TimeType) +#define PyTime_CheckExact(op) (Py_Type(op) == &PyDateTime_TimeType) #define PyDelta_Check(op) PyObject_TypeCheck(op, &PyDateTime_DeltaType) -#define PyDelta_CheckExact(op) ((op)->ob_type == &PyDateTime_DeltaType) +#define PyDelta_CheckExact(op) (Py_Type(op) == &PyDateTime_DeltaType) #define PyTZInfo_Check(op) PyObject_TypeCheck(op, &PyDateTime_TZInfoType) -#define PyTZInfo_CheckExact(op) ((op)->ob_type == &PyDateTime_TZInfoType) +#define PyTZInfo_CheckExact(op) (Py_Type(op) == &PyDateTime_TZInfoType) #else @@ -198,19 +198,19 @@ /* Macros for type checking when not building the Python core. */ #define PyDate_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DateType) -#define PyDate_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->DateType) +#define PyDate_CheckExact(op) (Py_Type(op) == PyDateTimeAPI->DateType) #define PyDateTime_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DateTimeType) -#define PyDateTime_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->DateTimeType) +#define PyDateTime_CheckExact(op) (Py_Type(op) == PyDateTimeAPI->DateTimeType) #define PyTime_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->TimeType) -#define PyTime_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->TimeType) +#define PyTime_CheckExact(op) (Py_Type(op) == PyDateTimeAPI->TimeType) #define PyDelta_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DeltaType) -#define PyDelta_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->DeltaType) +#define PyDelta_CheckExact(op) (Py_Type(op) == PyDateTimeAPI->DeltaType) #define PyTZInfo_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->TZInfoType) -#define PyTZInfo_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->TZInfoType) +#define PyTZInfo_CheckExact(op) (Py_Type(op) == PyDateTimeAPI->TZInfoType) /* Macros for accessing constructors in a simplified fashion. */ #define PyDate_FromDate(year, month, day) \ Modified: python/trunk/Include/descrobject.h ============================================================================== --- python/trunk/Include/descrobject.h (original) +++ python/trunk/Include/descrobject.h Sat Jul 21 08:55:02 2007 @@ -77,7 +77,7 @@ struct PyGetSetDef *); PyAPI_FUNC(PyObject *) PyDescr_NewWrapper(PyTypeObject *, struct wrapperbase *, void *); -#define PyDescr_IsData(d) ((d)->ob_type->tp_descr_set != NULL) +#define PyDescr_IsData(d) (Py_Type(d)->tp_descr_set != NULL) PyAPI_FUNC(PyObject *) PyDictProxy_New(PyObject *); PyAPI_FUNC(PyObject *) PyWrapper_New(PyObject *, PyObject *); Modified: python/trunk/Include/dictobject.h ============================================================================== --- python/trunk/Include/dictobject.h (original) +++ python/trunk/Include/dictobject.h Sat Jul 21 08:55:02 2007 @@ -91,8 +91,8 @@ PyAPI_DATA(PyTypeObject) PyDict_Type; #define PyDict_Check(op) \ - PyType_FastSubclass((op)->ob_type, Py_TPFLAGS_DICT_SUBCLASS) -#define PyDict_CheckExact(op) ((op)->ob_type == &PyDict_Type) + PyType_FastSubclass(Py_Type(op), Py_TPFLAGS_DICT_SUBCLASS) +#define PyDict_CheckExact(op) (Py_Type(op) == &PyDict_Type) PyAPI_FUNC(PyObject *) PyDict_New(void); PyAPI_FUNC(PyObject *) PyDict_GetItem(PyObject *mp, PyObject *key); Modified: python/trunk/Include/fileobject.h ============================================================================== --- python/trunk/Include/fileobject.h (original) +++ python/trunk/Include/fileobject.h Sat Jul 21 08:55:02 2007 @@ -30,7 +30,7 @@ PyAPI_DATA(PyTypeObject) PyFile_Type; #define PyFile_Check(op) PyObject_TypeCheck(op, &PyFile_Type) -#define PyFile_CheckExact(op) ((op)->ob_type == &PyFile_Type) +#define PyFile_CheckExact(op) (Py_Type(op) == &PyFile_Type) PyAPI_FUNC(PyObject *) PyFile_FromString(char *, char *); PyAPI_FUNC(void) PyFile_SetBufSize(PyObject *, int); Modified: python/trunk/Include/floatobject.h ============================================================================== --- python/trunk/Include/floatobject.h (original) +++ python/trunk/Include/floatobject.h Sat Jul 21 08:55:02 2007 @@ -19,7 +19,7 @@ PyAPI_DATA(PyTypeObject) PyFloat_Type; #define PyFloat_Check(op) PyObject_TypeCheck(op, &PyFloat_Type) -#define PyFloat_CheckExact(op) ((op)->ob_type == &PyFloat_Type) +#define PyFloat_CheckExact(op) (Py_Type(op) == &PyFloat_Type) /* Return Python float from string PyObject. Second argument ignored on input, and, if non-NULL, NULL is stored into *junk (this tried to serve a Modified: python/trunk/Include/funcobject.h ============================================================================== --- python/trunk/Include/funcobject.h (original) +++ python/trunk/Include/funcobject.h Sat Jul 21 08:55:02 2007 @@ -39,7 +39,7 @@ PyAPI_DATA(PyTypeObject) PyFunction_Type; -#define PyFunction_Check(op) ((op)->ob_type == &PyFunction_Type) +#define PyFunction_Check(op) (Py_Type(op) == &PyFunction_Type) PyAPI_FUNC(PyObject *) PyFunction_New(PyObject *, PyObject *); PyAPI_FUNC(PyObject *) PyFunction_GetCode(PyObject *); Modified: python/trunk/Include/genobject.h ============================================================================== --- python/trunk/Include/genobject.h (original) +++ python/trunk/Include/genobject.h Sat Jul 21 08:55:02 2007 @@ -26,7 +26,7 @@ PyAPI_DATA(PyTypeObject) PyGen_Type; #define PyGen_Check(op) PyObject_TypeCheck(op, &PyGen_Type) -#define PyGen_CheckExact(op) ((op)->ob_type == &PyGen_Type) +#define PyGen_CheckExact(op) (Py_Type(op) == &PyGen_Type) PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *); PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *); Modified: python/trunk/Include/iterobject.h ============================================================================== --- python/trunk/Include/iterobject.h (original) +++ python/trunk/Include/iterobject.h Sat Jul 21 08:55:02 2007 @@ -7,13 +7,13 @@ PyAPI_DATA(PyTypeObject) PySeqIter_Type; -#define PySeqIter_Check(op) ((op)->ob_type == &PySeqIter_Type) +#define PySeqIter_Check(op) (Py_Type(op) == &PySeqIter_Type) PyAPI_FUNC(PyObject *) PySeqIter_New(PyObject *); PyAPI_DATA(PyTypeObject) PyCallIter_Type; -#define PyCallIter_Check(op) ((op)->ob_type == &PyCallIter_Type) +#define PyCallIter_Check(op) (Py_Type(op) == &PyCallIter_Type) PyAPI_FUNC(PyObject *) PyCallIter_New(PyObject *, PyObject *); #ifdef __cplusplus Modified: python/trunk/Include/listobject.h ============================================================================== --- python/trunk/Include/listobject.h (original) +++ python/trunk/Include/listobject.h Sat Jul 21 08:55:02 2007 @@ -41,8 +41,8 @@ PyAPI_DATA(PyTypeObject) PyList_Type; #define PyList_Check(op) \ - PyType_FastSubclass((op)->ob_type, Py_TPFLAGS_LIST_SUBCLASS) -#define PyList_CheckExact(op) ((op)->ob_type == &PyList_Type) + PyType_FastSubclass(Py_Type(op), Py_TPFLAGS_LIST_SUBCLASS) +#define PyList_CheckExact(op) (Py_Type(op) == &PyList_Type) PyAPI_FUNC(PyObject *) PyList_New(Py_ssize_t size); PyAPI_FUNC(Py_ssize_t) PyList_Size(PyObject *); @@ -60,7 +60,7 @@ /* Macro, trading safety for speed */ #define PyList_GET_ITEM(op, i) (((PyListObject *)(op))->ob_item[i]) #define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v)) -#define PyList_GET_SIZE(op) (((PyListObject *)(op))->ob_size) +#define PyList_GET_SIZE(op) Py_Size(op) #ifdef __cplusplus } Modified: python/trunk/Include/longobject.h ============================================================================== --- python/trunk/Include/longobject.h (original) +++ python/trunk/Include/longobject.h Sat Jul 21 08:55:02 2007 @@ -12,8 +12,8 @@ PyAPI_DATA(PyTypeObject) PyLong_Type; #define PyLong_Check(op) \ - PyType_FastSubclass((op)->ob_type, Py_TPFLAGS_LONG_SUBCLASS) -#define PyLong_CheckExact(op) ((op)->ob_type == &PyLong_Type) + PyType_FastSubclass(Py_Type(op), Py_TPFLAGS_LONG_SUBCLASS) +#define PyLong_CheckExact(op) (Py_Type(op) == &PyLong_Type) PyAPI_FUNC(PyObject *) PyLong_FromLong(long); PyAPI_FUNC(PyObject *) PyLong_FromUnsignedLong(unsigned long); Modified: python/trunk/Include/methodobject.h ============================================================================== --- python/trunk/Include/methodobject.h (original) +++ python/trunk/Include/methodobject.h Sat Jul 21 08:55:02 2007 @@ -13,7 +13,7 @@ PyAPI_DATA(PyTypeObject) PyCFunction_Type; -#define PyCFunction_Check(op) ((op)->ob_type == &PyCFunction_Type) +#define PyCFunction_Check(op) (Py_Type(op) == &PyCFunction_Type) typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *, Modified: python/trunk/Include/moduleobject.h ============================================================================== --- python/trunk/Include/moduleobject.h (original) +++ python/trunk/Include/moduleobject.h Sat Jul 21 08:55:02 2007 @@ -10,7 +10,7 @@ PyAPI_DATA(PyTypeObject) PyModule_Type; #define PyModule_Check(op) PyObject_TypeCheck(op, &PyModule_Type) -#define PyModule_CheckExact(op) ((op)->ob_type == &PyModule_Type) +#define PyModule_CheckExact(op) (Py_Type(op) == &PyModule_Type) PyAPI_FUNC(PyObject *) PyModule_New(const char *); PyAPI_FUNC(PyObject *) PyModule_GetDict(PyObject *); Modified: python/trunk/Include/object.h ============================================================================== --- python/trunk/Include/object.h (original) +++ python/trunk/Include/object.h Sat Jul 21 08:55:02 2007 @@ -84,6 +84,9 @@ _PyObject_EXTRA_INIT \ 1, type, +#define PyVarObject_HEAD_INIT(type, size) \ + PyObject_HEAD_INIT(type) size, + /* PyObject_VAR_HEAD defines the initial segment of all variable-size * container objects. These end with a declaration of an array with 1 * element, but enough space is malloc'ed so that the array actually @@ -108,6 +111,9 @@ PyObject_VAR_HEAD } PyVarObject; +#define Py_Refcnt(ob) (((PyObject*)(ob))->ob_refcnt) +#define Py_Type(ob) (((PyObject*)(ob))->ob_type) +#define Py_Size(ob) (((PyVarObject*)(ob))->ob_size) /* Type objects contain a string containing the type name (to help somewhat @@ -364,21 +370,21 @@ /* access macro to the members which are floating "behind" the object */ #define PyHeapType_GET_MEMBERS(etype) \ - ((PyMemberDef *)(((char *)etype) + (etype)->ht_type.ob_type->tp_basicsize)) + ((PyMemberDef *)(((char *)etype) + Py_Type(etype)->tp_basicsize)) /* Generic type check */ PyAPI_FUNC(int) PyType_IsSubtype(PyTypeObject *, PyTypeObject *); #define PyObject_TypeCheck(ob, tp) \ - ((ob)->ob_type == (tp) || PyType_IsSubtype((ob)->ob_type, (tp))) + (Py_Type(ob) == (tp) || PyType_IsSubtype(Py_Type(ob), (tp))) PyAPI_DATA(PyTypeObject) PyType_Type; /* built-in 'type' */ PyAPI_DATA(PyTypeObject) PyBaseObject_Type; /* built-in 'object' */ PyAPI_DATA(PyTypeObject) PySuper_Type; /* built-in 'super' */ #define PyType_Check(op) \ - PyType_FastSubclass((op)->ob_type, Py_TPFLAGS_TYPE_SUBCLASS) -#define PyType_CheckExact(op) ((op)->ob_type == &PyType_Type) + PyType_FastSubclass(Py_Type(op), Py_TPFLAGS_TYPE_SUBCLASS) +#define PyType_CheckExact(op) (Py_Type(op) == &PyType_Type) PyAPI_FUNC(int) PyType_Ready(PyTypeObject *); PyAPI_FUNC(PyObject *) PyType_GenericAlloc(PyTypeObject *, Py_ssize_t); @@ -599,7 +605,7 @@ #define _Py_DEC_REFTOTAL _Py_RefTotal-- #define _Py_REF_DEBUG_COMMA , #define _Py_CHECK_REFCNT(OP) \ -{ if ((OP)->ob_refcnt < 0) \ +{ if (((PyObject*)OP)->ob_refcnt < 0) \ _Py_NegativeRefcount(__FILE__, __LINE__, \ (PyObject *)(OP)); \ } @@ -613,9 +619,9 @@ #ifdef COUNT_ALLOCS PyAPI_FUNC(void) inc_count(PyTypeObject *); PyAPI_FUNC(void) dec_count(PyTypeObject *); -#define _Py_INC_TPALLOCS(OP) inc_count((OP)->ob_type) -#define _Py_INC_TPFREES(OP) dec_count((OP)->ob_type) -#define _Py_DEC_TPFREES(OP) (OP)->ob_type->tp_frees-- +#define _Py_INC_TPALLOCS(OP) inc_count(Py_Type(OP)) +#define _Py_INC_TPFREES(OP) dec_count(Py_Type(OP)) +#define _Py_DEC_TPFREES(OP) Py_Type(OP)->tp_frees-- #define _Py_COUNT_ALLOCS_COMMA , #else #define _Py_INC_TPALLOCS(OP) @@ -640,22 +646,22 @@ #define _Py_NewReference(op) ( \ _Py_INC_TPALLOCS(op) _Py_COUNT_ALLOCS_COMMA \ _Py_INC_REFTOTAL _Py_REF_DEBUG_COMMA \ - (op)->ob_refcnt = 1) + Py_Refcnt(op) = 1) #define _Py_ForgetReference(op) _Py_INC_TPFREES(op) #define _Py_Dealloc(op) ( \ _Py_INC_TPFREES(op) _Py_COUNT_ALLOCS_COMMA \ - (*(op)->ob_type->tp_dealloc)((PyObject *)(op))) + (*Py_Type(op)->tp_dealloc)((PyObject *)(op))) #endif /* !Py_TRACE_REFS */ #define Py_INCREF(op) ( \ _Py_INC_REFTOTAL _Py_REF_DEBUG_COMMA \ - (op)->ob_refcnt++) + ((PyObject*)(op))->ob_refcnt++) #define Py_DECREF(op) \ if (_Py_DEC_REFTOTAL _Py_REF_DEBUG_COMMA \ - --(op)->ob_refcnt != 0) \ + --((PyObject*)(op))->ob_refcnt != 0) \ _Py_CHECK_REFCNT(op) \ else \ _Py_Dealloc((PyObject *)(op)) Modified: python/trunk/Include/objimpl.h ============================================================================== --- python/trunk/Include/objimpl.h (original) +++ python/trunk/Include/objimpl.h Sat Jul 21 08:55:02 2007 @@ -154,9 +154,9 @@ /* Macros trading binary compatibility for speed. See also pymem.h. Note that these macros expect non-NULL object pointers.*/ #define PyObject_INIT(op, typeobj) \ - ( (op)->ob_type = (typeobj), _Py_NewReference((PyObject *)(op)), (op) ) + ( Py_Type(op) = (typeobj), _Py_NewReference((PyObject *)(op)), (op) ) #define PyObject_INIT_VAR(op, typeobj, size) \ - ( (op)->ob_size = (size), PyObject_INIT((op), (typeobj)) ) + ( Py_Size(op) = (size), PyObject_INIT((op), (typeobj)) ) #define _PyObject_SIZE(typeobj) ( (typeobj)->tp_basicsize ) @@ -231,8 +231,8 @@ #define PyType_IS_GC(t) PyType_HasFeature((t), Py_TPFLAGS_HAVE_GC) /* Test if an object has a GC head */ -#define PyObject_IS_GC(o) (PyType_IS_GC((o)->ob_type) && \ - ((o)->ob_type->tp_is_gc == NULL || (o)->ob_type->tp_is_gc(o))) +#define PyObject_IS_GC(o) (PyType_IS_GC(Py_Type(o)) && \ + (Py_Type(o)->tp_is_gc == NULL || Py_Type(o)->tp_is_gc(o))) PyAPI_FUNC(PyVarObject *) _PyObject_GC_Resize(PyVarObject *, Py_ssize_t); #define PyObject_GC_Resize(type, op, n) \ @@ -328,7 +328,7 @@ && ((t)->tp_weaklistoffset > 0)) #define PyObject_GET_WEAKREFS_LISTPTR(o) \ - ((PyObject **) (((char *) (o)) + (o)->ob_type->tp_weaklistoffset)) + ((PyObject **) (((char *) (o)) + Py_Type(o)->tp_weaklistoffset)) #ifdef __cplusplus } Modified: python/trunk/Include/py_curses.h ============================================================================== --- python/trunk/Include/py_curses.h (original) +++ python/trunk/Include/py_curses.h Sat Jul 21 08:55:02 2007 @@ -73,7 +73,7 @@ WINDOW *win; } PyCursesWindowObject; -#define PyCursesWindow_Check(v) ((v)->ob_type == &PyCursesWindow_Type) +#define PyCursesWindow_Check(v) (Py_Type(v) == &PyCursesWindow_Type) #ifdef CURSES_MODULE /* This section is used when compiling _cursesmodule.c */ Modified: python/trunk/Include/rangeobject.h ============================================================================== --- python/trunk/Include/rangeobject.h (original) +++ python/trunk/Include/rangeobject.h Sat Jul 21 08:55:02 2007 @@ -20,7 +20,7 @@ PyAPI_DATA(PyTypeObject) PyRange_Type; -#define PyRange_Check(op) ((op)->ob_type == &PyRange_Type) +#define PyRange_Check(op) (Py_Type(op) == &PyRange_Type) #ifdef __cplusplus } Modified: python/trunk/Include/setobject.h ============================================================================== --- python/trunk/Include/setobject.h (original) +++ python/trunk/Include/setobject.h Sat Jul 21 08:55:02 2007 @@ -66,13 +66,13 @@ * hash is -1 */ -#define PyFrozenSet_CheckExact(ob) ((ob)->ob_type == &PyFrozenSet_Type) +#define PyFrozenSet_CheckExact(ob) (Py_Type(ob) == &PyFrozenSet_Type) #define PyAnySet_CheckExact(ob) \ - ((ob)->ob_type == &PySet_Type || (ob)->ob_type == &PyFrozenSet_Type) + (Py_Type(ob) == &PySet_Type || Py_Type(ob) == &PyFrozenSet_Type) #define PyAnySet_Check(ob) \ - ((ob)->ob_type == &PySet_Type || (ob)->ob_type == &PyFrozenSet_Type || \ - PyType_IsSubtype((ob)->ob_type, &PySet_Type) || \ - PyType_IsSubtype((ob)->ob_type, &PyFrozenSet_Type)) + (Py_Type(ob) == &PySet_Type || Py_Type(ob) == &PyFrozenSet_Type || \ + PyType_IsSubtype(Py_Type(ob), &PySet_Type) || \ + PyType_IsSubtype(Py_Type(ob), &PyFrozenSet_Type)) PyAPI_FUNC(PyObject *) PySet_New(PyObject *); PyAPI_FUNC(PyObject *) PyFrozenSet_New(PyObject *); Modified: python/trunk/Include/sliceobject.h ============================================================================== --- python/trunk/Include/sliceobject.h (original) +++ python/trunk/Include/sliceobject.h Sat Jul 21 08:55:02 2007 @@ -26,7 +26,7 @@ PyAPI_DATA(PyTypeObject) PySlice_Type; -#define PySlice_Check(op) ((op)->ob_type == &PySlice_Type) +#define PySlice_Check(op) (Py_Type(op) == &PySlice_Type) PyAPI_FUNC(PyObject *) PySlice_New(PyObject* start, PyObject* stop, PyObject* step); Modified: python/trunk/Include/stringobject.h ============================================================================== --- python/trunk/Include/stringobject.h (original) +++ python/trunk/Include/stringobject.h Sat Jul 21 08:55:02 2007 @@ -56,8 +56,8 @@ PyAPI_DATA(PyTypeObject) PyString_Type; #define PyString_Check(op) \ - PyType_FastSubclass((op)->ob_type, Py_TPFLAGS_STRING_SUBCLASS) -#define PyString_CheckExact(op) ((op)->ob_type == &PyString_Type) + PyType_FastSubclass(Py_Type(op), Py_TPFLAGS_STRING_SUBCLASS) +#define PyString_CheckExact(op) (Py_Type(op) == &PyString_Type) PyAPI_FUNC(PyObject *) PyString_FromStringAndSize(const char *, Py_ssize_t); PyAPI_FUNC(PyObject *) PyString_FromString(const char *); @@ -89,7 +89,7 @@ /* Macro, trading safety for speed */ #define PyString_AS_STRING(op) (((PyStringObject *)(op))->ob_sval) -#define PyString_GET_SIZE(op) (((PyStringObject *)(op))->ob_size) +#define PyString_GET_SIZE(op) Py_Size(op) /* _PyString_Join(sep, x) is like sep.join(x). sep must be PyStringObject*, x must be an iterable object. */ Modified: python/trunk/Include/symtable.h ============================================================================== --- python/trunk/Include/symtable.h (original) +++ python/trunk/Include/symtable.h Sat Jul 21 08:55:02 2007 @@ -49,7 +49,7 @@ PyAPI_DATA(PyTypeObject) PySTEntry_Type; -#define PySTEntry_Check(op) ((op)->ob_type == &PySTEntry_Type) +#define PySTEntry_Check(op) (Py_Type(op) == &PySTEntry_Type) PyAPI_FUNC(int) PyST_GetScope(PySTEntryObject *, PyObject *); Modified: python/trunk/Include/traceback.h ============================================================================== --- python/trunk/Include/traceback.h (original) +++ python/trunk/Include/traceback.h Sat Jul 21 08:55:02 2007 @@ -22,7 +22,7 @@ /* Reveal traceback type so we can typecheck traceback objects */ PyAPI_DATA(PyTypeObject) PyTraceBack_Type; -#define PyTraceBack_Check(v) ((v)->ob_type == &PyTraceBack_Type) +#define PyTraceBack_Check(v) (Py_Type(v) == &PyTraceBack_Type) #ifdef __cplusplus } Modified: python/trunk/Include/tupleobject.h ============================================================================== --- python/trunk/Include/tupleobject.h (original) +++ python/trunk/Include/tupleobject.h Sat Jul 21 08:55:02 2007 @@ -34,8 +34,8 @@ PyAPI_DATA(PyTypeObject) PyTuple_Type; #define PyTuple_Check(op) \ - PyType_FastSubclass((op)->ob_type, Py_TPFLAGS_TUPLE_SUBCLASS) -#define PyTuple_CheckExact(op) ((op)->ob_type == &PyTuple_Type) + PyType_FastSubclass(Py_Type(op), Py_TPFLAGS_TUPLE_SUBCLASS) +#define PyTuple_CheckExact(op) (Py_Type(op) == &PyTuple_Type) PyAPI_FUNC(PyObject *) PyTuple_New(Py_ssize_t size); PyAPI_FUNC(Py_ssize_t) PyTuple_Size(PyObject *); @@ -47,7 +47,7 @@ /* Macro, trading safety for speed */ #define PyTuple_GET_ITEM(op, i) (((PyTupleObject *)(op))->ob_item[i]) -#define PyTuple_GET_SIZE(op) (((PyTupleObject *)(op))->ob_size) +#define PyTuple_GET_SIZE(op) Py_Size(op) /* Macro, *only* to be used to fill in brand new tuples */ #define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v) Modified: python/trunk/Include/unicodeobject.h ============================================================================== --- python/trunk/Include/unicodeobject.h (original) +++ python/trunk/Include/unicodeobject.h Sat Jul 21 08:55:02 2007 @@ -393,8 +393,8 @@ PyAPI_DATA(PyTypeObject) PyUnicode_Type; #define PyUnicode_Check(op) \ - PyType_FastSubclass((op)->ob_type, Py_TPFLAGS_UNICODE_SUBCLASS) -#define PyUnicode_CheckExact(op) ((op)->ob_type == &PyUnicode_Type) + PyType_FastSubclass(Py_Type(op), Py_TPFLAGS_UNICODE_SUBCLASS) +#define PyUnicode_CheckExact(op) (Py_Type(op) == &PyUnicode_Type) /* Fast access macros */ #define PyUnicode_GET_SIZE(op) \ Modified: python/trunk/Include/weakrefobject.h ============================================================================== --- python/trunk/Include/weakrefobject.h (original) +++ python/trunk/Include/weakrefobject.h Sat Jul 21 08:55:02 2007 @@ -44,10 +44,10 @@ #define PyWeakref_CheckRef(op) PyObject_TypeCheck(op, &_PyWeakref_RefType) #define PyWeakref_CheckRefExact(op) \ - ((op)->ob_type == &_PyWeakref_RefType) + (Py_Type(op) == &_PyWeakref_RefType) #define PyWeakref_CheckProxy(op) \ - (((op)->ob_type == &_PyWeakref_ProxyType) || \ - ((op)->ob_type == &_PyWeakref_CallableProxyType)) + ((Py_Type(op) == &_PyWeakref_ProxyType) || \ + (Py_Type(op) == &_PyWeakref_CallableProxyType)) /* This macro calls PyWeakref_CheckRef() last since that can involve a function call; this makes it more likely that the function call Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Sat Jul 21 08:55:02 2007 @@ -12,6 +12,10 @@ Core and builtins ----------------- +- PEP 3123: Provide forward compatibility with Python 3.0, while keeping + backwards compatibility. Add Py_Refcnt, Py_Type, Py_Size, and + PyVarObject_HEAD_INIT. + - Patch #1673759: add a missing overflow check when formatting floats with %G. Modified: python/trunk/Modules/_bsddb.c ============================================================================== --- python/trunk/Modules/_bsddb.c (original) +++ python/trunk/Modules/_bsddb.c Sat Jul 21 08:55:02 2007 @@ -300,13 +300,13 @@ staticforward PyTypeObject DB_Type, DBCursor_Type, DBEnv_Type, DBTxn_Type, DBLock_Type; -#define DBObject_Check(v) ((v)->ob_type == &DB_Type) -#define DBCursorObject_Check(v) ((v)->ob_type == &DBCursor_Type) -#define DBEnvObject_Check(v) ((v)->ob_type == &DBEnv_Type) -#define DBTxnObject_Check(v) ((v)->ob_type == &DBTxn_Type) -#define DBLockObject_Check(v) ((v)->ob_type == &DBLock_Type) +#define DBObject_Check(v) (Py_Type(v) == &DB_Type) +#define DBCursorObject_Check(v) (Py_Type(v) == &DBCursor_Type) +#define DBEnvObject_Check(v) (Py_Type(v) == &DBEnv_Type) +#define DBTxnObject_Check(v) (Py_Type(v) == &DBTxn_Type) +#define DBLockObject_Check(v) (Py_Type(v) == &DBLock_Type) #if (DBVER >= 43) -#define DBSequenceObject_Check(v) ((v)->ob_type == &DBSequence_Type) +#define DBSequenceObject_Check(v) (Py_Type(v) == &DBSequence_Type) #endif @@ -461,7 +461,7 @@ else { PyErr_Format(PyExc_TypeError, "String or Integer object expected for key, %s found", - keyobj->ob_type->tp_name); + Py_Type(keyobj)->tp_name); return 0; } @@ -616,7 +616,7 @@ static void makeTypeError(char* expected, PyObject* found) { PyErr_Format(PyExc_TypeError, "Expected %s argument, %s found.", - expected, found->ob_type->tp_name); + expected, Py_Type(found)->tp_name); } @@ -5666,13 +5666,13 @@ /* Initialize the type of the new type objects here; doing it here is required for portability to Windows without requiring C++. */ - DB_Type.ob_type = &PyType_Type; - DBCursor_Type.ob_type = &PyType_Type; - DBEnv_Type.ob_type = &PyType_Type; - DBTxn_Type.ob_type = &PyType_Type; - DBLock_Type.ob_type = &PyType_Type; + Py_Type(&DB_Type) = &PyType_Type; + Py_Type(&DBCursor_Type) = &PyType_Type; + Py_Type(&DBEnv_Type) = &PyType_Type; + Py_Type(&DBTxn_Type) = &PyType_Type; + Py_Type(&DBLock_Type) = &PyType_Type; #if (DBVER >= 43) - DBSequence_Type.ob_type = &PyType_Type; + Py_Type(&DBSequence_Type) = &PyType_Type; #endif Modified: python/trunk/Modules/_collectionsmodule.c ============================================================================== --- python/trunk/Modules/_collectionsmodule.c (original) +++ python/trunk/Modules/_collectionsmodule.c Sat Jul 21 08:55:02 2007 @@ -544,7 +544,7 @@ } deque->leftblock = NULL; deque->rightblock = NULL; - deque->ob_type->tp_free(deque); + Py_Type(deque)->tp_free(deque); } static int @@ -579,7 +579,7 @@ static PyObject * deque_copy(PyObject *deque) { - return PyObject_CallFunctionObjArgs((PyObject *)(deque->ob_type), + return PyObject_CallFunctionObjArgs((PyObject *)(Py_Type(deque)), deque, NULL); } @@ -601,7 +601,7 @@ Py_DECREF(dict); return NULL; } - result = Py_BuildValue("O()ON", deque->ob_type, dict, it); + result = Py_BuildValue("O()ON", Py_Type(deque), dict, it); Py_DECREF(dict); return result; } @@ -825,8 +825,7 @@ Build an ordered collection accessible from endpoints only."); static PyTypeObject deque_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "collections.deque", /* tp_name */ sizeof(dequeobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -903,7 +902,7 @@ dequeiter_dealloc(dequeiterobject *dio) { Py_XDECREF(dio->deque); - dio->ob_type->tp_free(dio); + Py_Type(dio)->tp_free(dio); } static PyObject * @@ -948,8 +947,7 @@ }; PyTypeObject dequeiter_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "deque_iterator", /* tp_name */ sizeof(dequeiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1031,8 +1029,7 @@ } PyTypeObject dequereviter_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "deque_reverse_iterator", /* tp_name */ sizeof(dequeiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1113,7 +1110,7 @@ whose class constructor has the same signature. Subclasses that define a different constructor signature must override copy(). */ - return PyObject_CallFunctionObjArgs((PyObject *)dd->dict.ob_type, + return PyObject_CallFunctionObjArgs(Py_Type(dd), dd->default_factory, dd, NULL); } @@ -1156,7 +1153,7 @@ Py_DECREF(args); return NULL; } - result = PyTuple_Pack(5, dd->dict.ob_type, args, + result = PyTuple_Pack(5, Py_Type(dd), args, Py_None, Py_None, items); Py_DECREF(items); Py_DECREF(args); @@ -1288,8 +1285,7 @@ #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject defdict_type = { - PyObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type)) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "collections.defaultdict", /* tp_name */ sizeof(defdictobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_csv.c ============================================================================== --- python/trunk/Modules/_csv.c (original) +++ python/trunk/Modules/_csv.c Sat Jul 21 08:55:02 2007 @@ -125,7 +125,7 @@ staticforward PyTypeObject Reader_Type; -#define ReaderObject_Check(v) ((v)->ob_type == &Reader_Type) +#define ReaderObject_Check(v) (Py_Type(v) == &Reader_Type) typedef struct { PyObject_HEAD @@ -310,7 +310,7 @@ Dialect_dealloc(DialectObj *self) { Py_XDECREF(self->lineterminator); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static char *dialect_kws[] = { @@ -460,8 +460,7 @@ "The Dialect type records CSV parsing and generation options.\n"); static PyTypeObject Dialect_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_csv.Dialect", /* tp_name */ sizeof(DialectObj), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -869,8 +868,7 @@ static PyTypeObject Reader_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_csv.reader", /*tp_name*/ sizeof(ReaderObj), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -1280,8 +1278,7 @@ ); static PyTypeObject Writer_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_csv.writer", /*tp_name*/ sizeof(WriterObj), /*tp_basicsize*/ 0, /*tp_itemsize*/ Modified: python/trunk/Modules/_ctypes/_ctypes.c ============================================================================== --- python/trunk/Modules/_ctypes/_ctypes.c (original) +++ python/trunk/Modules/_ctypes/_ctypes.c Sat Jul 21 08:55:02 2007 @@ -333,7 +333,7 @@ Py_INCREF(value); return value; } - ob_name = (ob) ? ob->ob_type->tp_name : "???"; + ob_name = (ob) ? Py_Type(ob)->tp_name : "???"; PyErr_Format(PyExc_TypeError, "expected %s instance instead of pointer to %s", ((PyTypeObject *)type)->tp_name, ob_name); @@ -349,7 +349,7 @@ PyErr_Format(PyExc_TypeError, "expected %s instance instead of %s", ((PyTypeObject *)type)->tp_name, - value->ob_type->tp_name); + Py_Type(value)->tp_name); return NULL; } @@ -435,8 +435,7 @@ PyTypeObject StructType_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.StructType", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ @@ -478,8 +477,7 @@ }; static PyTypeObject UnionType_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.UnionType", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ @@ -693,8 +691,7 @@ }; PyTypeObject PointerType_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.PointerType", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ @@ -751,7 +748,7 @@ char *ptr; Py_ssize_t size; if (PyBuffer_Check(value)) { - size = value->ob_type->tp_as_buffer->bf_getreadbuffer(value, 0, (void *)&ptr); + size = Py_Type(value)->tp_as_buffer->bf_getreadbuffer(value, 0, (void *)&ptr); if (size < 0) return -1; } else if (-1 == PyString_AsStringAndSize(value, &ptr, &size)) { @@ -800,7 +797,7 @@ } else if (!PyString_Check(value)) { PyErr_Format(PyExc_TypeError, "string expected instead of %s instance", - value->ob_type->tp_name); + Py_Type(value)->tp_name); return -1; } else Py_INCREF(value); @@ -855,7 +852,7 @@ } else if (!PyUnicode_Check(value)) { PyErr_Format(PyExc_TypeError, "unicode string expected instead of %s instance", - value->ob_type->tp_name); + Py_Type(value)->tp_name); return -1; } else Py_INCREF(value); @@ -1051,8 +1048,7 @@ } PyTypeObject ArrayType_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.ArrayType", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1684,8 +1680,7 @@ }; PyTypeObject SimpleType_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.SimpleType", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1899,8 +1894,7 @@ } PyTypeObject CFuncPtrType_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.CFuncPtrType", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2076,7 +2070,7 @@ CData_dealloc(PyObject *self) { CData_clear((CDataObject *)self); - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } static PyMemberDef CData_members[] = { @@ -2143,8 +2137,7 @@ }; PyTypeObject CData_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes._CData", sizeof(CDataObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2350,7 +2343,7 @@ PyErr_Format(PyExc_TypeError, "expected %s instance, got %s", ((PyTypeObject *)type)->tp_name, - value->ob_type->tp_name); + Py_Type(value)->tp_name); return NULL; } } @@ -2381,7 +2374,7 @@ if (p1->proto != p2->proto) { PyErr_Format(PyExc_TypeError, "incompatible types, %s instance instead of %s instance", - value->ob_type->tp_name, + Py_Type(value)->tp_name, ((PyTypeObject *)type)->tp_name); return NULL; } @@ -2400,7 +2393,7 @@ } PyErr_Format(PyExc_TypeError, "incompatible types, %s instance instead of %s instance", - value->ob_type->tp_name, + Py_Type(value)->tp_name, ((PyTypeObject *)type)->tp_name); return NULL; } @@ -2661,7 +2654,7 @@ Py_SAFE_DOWNCAST(index, Py_ssize_t, int), PyType_Check(arg) ? ((PyTypeObject *)arg)->tp_name : - arg->ob_type->tp_name); + Py_Type(arg)->tp_name); return 0; } @@ -3447,7 +3440,7 @@ CFuncPtr_dealloc(CFuncPtrObject *self) { CFuncPtr_clear(self); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static PyObject * @@ -3457,17 +3450,16 @@ if (self->index) return PyString_FromFormat("", self->index - 0x1000, - self->ob_type->tp_name, + Py_Type(self)->tp_name, self); #endif return PyString_FromFormat("<%s object at %p>", - self->ob_type->tp_name, + Py_Type(self)->tp_name, self); } PyTypeObject CFuncPtr_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.CFuncPtr", sizeof(CFuncPtrObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -3595,8 +3587,7 @@ } static PyTypeObject Struct_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.Structure", sizeof(CDataObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -3638,8 +3629,7 @@ }; static PyTypeObject Union_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.Union", sizeof(CDataObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -3871,8 +3861,7 @@ }; PyTypeObject Array_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.Array", sizeof(CDataObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -4025,7 +4014,7 @@ static PyObject * Simple_from_outparm(PyObject *self, PyObject *args) { - if (IsSimpleSubType((PyObject *)self->ob_type)) { + if (IsSimpleSubType((PyObject *)Py_Type(self))) { Py_INCREF(self); return self; } @@ -4090,9 +4079,9 @@ PyObject *val, *name, *args, *result; static PyObject *format; - if (self->ob_type->tp_base != &Simple_Type) { + if (Py_Type(self)->tp_base != &Simple_Type) { return PyString_FromFormat("<%s object at %p>", - self->ob_type->tp_name, self); + Py_Type(self)->tp_name, self); } if (format == NULL) { @@ -4105,7 +4094,7 @@ if (val == NULL) return NULL; - name = PyString_FromString(self->ob_type->tp_name); + name = PyString_FromString(Py_Type(self)->tp_name); if (name == NULL) { Py_DECREF(val); return NULL; @@ -4123,8 +4112,7 @@ } static PyTypeObject Simple_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes._SimpleCData", sizeof(CDataObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -4277,7 +4265,7 @@ PyErr_Format(PyExc_TypeError, "expected %s instead of %s", ((PyTypeObject *)(stgdict->proto))->tp_name, - value->ob_type->tp_name); + Py_Type(value)->tp_name); return -1; } @@ -4406,8 +4394,7 @@ }; PyTypeObject Pointer_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes._Pointer", sizeof(CDataObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -4585,7 +4572,7 @@ "cast() argument 2 must be a pointer type, not %s", PyType_Check(arg) ? ((PyTypeObject *)arg)->tp_name - : arg->ob_type->tp_name); + : Py_Type(arg)->tp_name); return 0; } @@ -4712,37 +4699,37 @@ if (PyType_Ready(&CData_Type) < 0) return; - Struct_Type.ob_type = &StructType_Type; + Py_Type(&Struct_Type) = &StructType_Type; Struct_Type.tp_base = &CData_Type; if (PyType_Ready(&Struct_Type) < 0) return; PyModule_AddObject(m, "Structure", (PyObject *)&Struct_Type); - Union_Type.ob_type = &UnionType_Type; + Py_Type(&Union_Type) = &UnionType_Type; Union_Type.tp_base = &CData_Type; if (PyType_Ready(&Union_Type) < 0) return; PyModule_AddObject(m, "Union", (PyObject *)&Union_Type); - Pointer_Type.ob_type = &PointerType_Type; + Py_Type(&Pointer_Type) = &PointerType_Type; Pointer_Type.tp_base = &CData_Type; if (PyType_Ready(&Pointer_Type) < 0) return; PyModule_AddObject(m, "_Pointer", (PyObject *)&Pointer_Type); - Array_Type.ob_type = &ArrayType_Type; + Py_Type(&Array_Type) = &ArrayType_Type; Array_Type.tp_base = &CData_Type; if (PyType_Ready(&Array_Type) < 0) return; PyModule_AddObject(m, "Array", (PyObject *)&Array_Type); - Simple_Type.ob_type = &SimpleType_Type; + Py_Type(&Simple_Type) = &SimpleType_Type; Simple_Type.tp_base = &CData_Type; if (PyType_Ready(&Simple_Type) < 0) return; PyModule_AddObject(m, "_SimpleCData", (PyObject *)&Simple_Type); - CFuncPtr_Type.ob_type = &CFuncPtrType_Type; + Py_Type(&CFuncPtr_Type) = &CFuncPtrType_Type; CFuncPtr_Type.tp_base = &CData_Type; if (PyType_Ready(&CFuncPtr_Type) < 0) return; Modified: python/trunk/Modules/_ctypes/callproc.c ============================================================================== --- python/trunk/Modules/_ctypes/callproc.c (original) +++ python/trunk/Modules/_ctypes/callproc.c Sat Jul 21 08:55:02 2007 @@ -381,8 +381,7 @@ }; PyTypeObject PyCArg_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "CArgObject", sizeof(PyCArgObject), 0, @@ -1180,7 +1179,7 @@ if (!CDataObject_Check(pcom) || (pcom->b_size != sizeof(void *))) { PyErr_Format(PyExc_TypeError, "COM Pointer expected instead of %s instance", - pcom->ob_type->tp_name); + Py_Type(pcom)->tp_name); return NULL; } @@ -1420,7 +1419,7 @@ if (!CDataObject_Check(obj)) { PyErr_Format(PyExc_TypeError, "byref() argument must be a ctypes instance, not '%s'", - obj->ob_type->tp_name); + Py_Type(obj)->tp_name); return NULL; } Modified: python/trunk/Modules/_ctypes/cfield.c ============================================================================== --- python/trunk/Modules/_ctypes/cfield.c (original) +++ python/trunk/Modules/_ctypes/cfield.c Sat Jul 21 08:55:02 2007 @@ -286,8 +286,7 @@ } PyTypeObject CField_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.CField", /* tp_name */ sizeof(CFieldObject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_ctypes/stgdict.c ============================================================================== --- python/trunk/Modules/_ctypes/stgdict.c (original) +++ python/trunk/Modules/_ctypes/stgdict.c Sat Jul 21 08:55:02 2007 @@ -83,8 +83,7 @@ } PyTypeObject StgDict_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "StgDict", sizeof(StgDictObject), 0, @@ -192,7 +191,7 @@ Py_DECREF(fieldlist); return -1; } - if (fdescr->ob_type != &CField_Type) { + if (Py_Type(fdescr) != &CField_Type) { PyErr_SetString(PyExc_TypeError, "unexpected type"); Py_DECREF(fdescr); Py_DECREF(fieldlist); @@ -215,7 +214,7 @@ Py_DECREF(fieldlist); return -1; } - assert(new_descr->ob_type == &CField_Type); + assert(Py_Type(new_descr) == &CField_Type); new_descr->size = fdescr->size; new_descr->offset = fdescr->offset + offset; new_descr->index = fdescr->index + index; @@ -263,7 +262,7 @@ Py_DECREF(anon_names); return -1; } - assert(descr->ob_type == &CField_Type); + assert(Py_Type(descr) == &CField_Type); descr->anonymous = 1; /* descr is in the field descriptor. */ Modified: python/trunk/Modules/_curses_panel.c ============================================================================== --- python/trunk/Modules/_curses_panel.c (original) +++ python/trunk/Modules/_curses_panel.c Sat Jul 21 08:55:02 2007 @@ -56,7 +56,7 @@ PyTypeObject PyCursesPanel_Type; -#define PyCursesPanel_Check(v) ((v)->ob_type == &PyCursesPanel_Type) +#define PyCursesPanel_Check(v) (Py_Type(v) == &PyCursesPanel_Type) /* Some helper functions. The problem is that there's always a window associated with a panel. To ensure that Python's GC doesn't pull @@ -338,8 +338,7 @@ /* -------------------------------------------------------*/ PyTypeObject PyCursesPanel_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_curses_panel.curses panel", /*tp_name*/ sizeof(PyCursesPanelObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -458,7 +457,7 @@ PyObject *m, *d, *v; /* Initialize object type */ - PyCursesPanel_Type.ob_type = &PyType_Type; + Py_Type(&PyCursesPanel_Type) = &PyType_Type; import_curses(); Modified: python/trunk/Modules/_cursesmodule.c ============================================================================== --- python/trunk/Modules/_cursesmodule.c (original) +++ python/trunk/Modules/_cursesmodule.c Sat Jul 21 08:55:02 2007 @@ -1566,8 +1566,7 @@ /* -------------------------------------------------------*/ PyTypeObject PyCursesWindow_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_curses.curses window", /*tp_name*/ sizeof(PyCursesWindowObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -2657,7 +2656,7 @@ static void *PyCurses_API[PyCurses_API_pointers]; /* Initialize object type */ - PyCursesWindow_Type.ob_type = &PyType_Type; + Py_Type(&PyCursesWindow_Type) = &PyType_Type; /* Initialize the C API pointer array */ PyCurses_API[0] = (void *)&PyCursesWindow_Type; Modified: python/trunk/Modules/_elementtree.c ============================================================================== --- python/trunk/Modules/_elementtree.c (original) +++ python/trunk/Modules/_elementtree.c Sat Jul 21 08:55:02 2007 @@ -269,7 +269,7 @@ staticforward PyTypeObject Element_Type; -#define Element_CheckExact(op) ((op)->ob_type == &Element_Type) +#define Element_CheckExact(op) (Py_Type(op) == &Element_Type) /* -------------------------------------------------------------------- */ /* element constructor and destructor */ @@ -1207,7 +1207,7 @@ /* FIXME: support arbitrary sequences? */ PyErr_Format( PyExc_TypeError, - "expected list, not \"%.200s\"", item->ob_type->tp_name + "expected list, not \"%.200s\"", Py_Type(item)->tp_name ); return -1; } @@ -1440,7 +1440,7 @@ staticforward PyTypeObject TreeBuilder_Type; -#define TreeBuilder_CheckExact(op) ((op)->ob_type == &TreeBuilder_Type) +#define TreeBuilder_CheckExact(op) (Py_Type(op) == &TreeBuilder_Type) /* -------------------------------------------------------------------- */ /* constructor and destructor */ @@ -1607,7 +1607,7 @@ Py_INCREF(data); self->data = data; } else { /* more than one item; use a list to collect items */ - if (PyString_CheckExact(self->data) && self->data->ob_refcnt == 1 && + if (PyString_CheckExact(self->data) && Py_Refcnt(self->data) == 1 && PyString_CheckExact(data) && PyString_GET_SIZE(data) == 1) { /* expat often generates single character data sections; handle the most common case by resizing the existing string... */ @@ -2623,9 +2623,9 @@ #endif /* Patch object type */ - Element_Type.ob_type = TreeBuilder_Type.ob_type = &PyType_Type; + Py_Type(&Element_Type) = Py_Type(&TreeBuilder_Type) = &PyType_Type; #if defined(USE_EXPAT) - XMLParser_Type.ob_type = &PyType_Type; + Py_Type(&XMLParser_Type) = &PyType_Type; #endif m = Py_InitModule("_elementtree", _functions); Modified: python/trunk/Modules/_functoolsmodule.c ============================================================================== --- python/trunk/Modules/_functoolsmodule.c (original) +++ python/trunk/Modules/_functoolsmodule.c Sat Jul 21 08:55:02 2007 @@ -81,7 +81,7 @@ Py_XDECREF(pto->args); Py_XDECREF(pto->kw); Py_XDECREF(pto->dict); - pto->ob_type->tp_free(pto); + Py_Type(pto)->tp_free(pto); } static PyObject * @@ -197,8 +197,7 @@ }; static PyTypeObject partial_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "functools.partial", /* tp_name */ sizeof(partialobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_hashopenssl.c ============================================================================== --- python/trunk/Modules/_hashopenssl.c (original) +++ python/trunk/Modules/_hashopenssl.c Sat Jul 21 08:55:02 2007 @@ -281,8 +281,7 @@ digest_size -- number of bytes in this hashes output\n"); static PyTypeObject EVPtype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_hashlib.HASH", /*tp_name*/ sizeof(EVPobject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -464,7 +463,7 @@ * but having some be unsupported. Only init appropriate * constants. */ - EVPtype.ob_type = &PyType_Type; + Py_Type(&EVPtype) = &PyType_Type; if (PyType_Ready(&EVPtype) < 0) return; Modified: python/trunk/Modules/_hotshot.c ============================================================================== --- python/trunk/Modules/_hotshot.c (original) +++ python/trunk/Modules/_hotshot.c Sat Jul 21 08:55:02 2007 @@ -1220,8 +1220,7 @@ "linetimings: True if line events collect timing information."); static PyTypeObject ProfilerType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_hotshot.ProfilerType", /* tp_name */ (int) sizeof(ProfilerObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1305,8 +1304,7 @@ }; static PyTypeObject LogReaderType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_hotshot.LogReaderType", /* tp_name */ (int) sizeof(LogReaderObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1613,8 +1611,8 @@ { PyObject *module; - LogReaderType.ob_type = &PyType_Type; - ProfilerType.ob_type = &PyType_Type; + Py_Type(&LogReaderType) = &PyType_Type; + Py_Type(&ProfilerType) = &PyType_Type; module = Py_InitModule("_hotshot", functions); if (module != NULL) { char *s = get_version_string(); Modified: python/trunk/Modules/_lsprof.c ============================================================================== --- python/trunk/Modules/_lsprof.c (original) +++ python/trunk/Modules/_lsprof.c Sat Jul 21 08:55:02 2007 @@ -120,7 +120,7 @@ staticforward PyTypeObject PyProfiler_Type; #define PyProfiler_Check(op) PyObject_TypeCheck(op, &PyProfiler_Type) -#define PyProfiler_CheckExact(op) ((op)->ob_type == &PyProfiler_Type) +#define PyProfiler_CheckExact(op) (Py_Type(op) == &PyProfiler_Type) /*** External Timers ***/ @@ -207,7 +207,7 @@ PyObject *self = fn->m_self; PyObject *name = PyString_FromString(fn->m_ml->ml_name); if (name != NULL) { - PyObject *mo = _PyType_Lookup(self->ob_type, name); + PyObject *mo = _PyType_Lookup(Py_Type(self), name); Py_XINCREF(mo); Py_DECREF(name); if (mo != NULL) { @@ -744,7 +744,7 @@ flush_unmatched(op); clearEntries(op); Py_XDECREF(op->externalTimer); - op->ob_type->tp_free(op); + Py_Type(op)->tp_free(op); } static int Modified: python/trunk/Modules/_randommodule.c ============================================================================== --- python/trunk/Modules/_randommodule.c (original) +++ python/trunk/Modules/_randommodule.c Sat Jul 21 08:55:02 2007 @@ -84,7 +84,7 @@ static PyTypeObject Random_Type; -#define RandomObject_Check(v) ((v)->ob_type == &Random_Type) +#define RandomObject_Check(v) (Py_Type(v) == &Random_Type) /* Random methods */ @@ -404,7 +404,7 @@ if (!PyInt_Check(n) && !PyLong_Check(n)) { PyErr_Format(PyExc_TypeError, "jumpahead requires an " "integer, not '%s'", - n->ob_type->tp_name); + Py_Type(n)->tp_name); return NULL; } @@ -518,8 +518,7 @@ "Random() -> create a random number generator with its own internal state."); static PyTypeObject Random_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_random.Random", /*tp_name*/ sizeof(RandomObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ Modified: python/trunk/Modules/_sqlite/cache.c ============================================================================== --- python/trunk/Modules/_sqlite/cache.c (original) +++ python/trunk/Modules/_sqlite/cache.c Sat Jul 21 08:55:02 2007 @@ -51,7 +51,7 @@ Py_DECREF(self->key); Py_DECREF(self->data); - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } int pysqlite_cache_init(pysqlite_Cache* self, PyObject* args, PyObject* kwargs) @@ -109,7 +109,7 @@ } Py_DECREF(self->mapping); - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } PyObject* pysqlite_cache_get(pysqlite_Cache* self, PyObject* args) @@ -274,8 +274,7 @@ }; PyTypeObject pysqlite_NodeType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) MODULE_NAME "Node", /* tp_name */ sizeof(pysqlite_Node), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -317,8 +316,7 @@ }; PyTypeObject pysqlite_CacheType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) MODULE_NAME ".Cache", /* tp_name */ sizeof(pysqlite_Cache), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_sqlite/connection.c ============================================================================== --- python/trunk/Modules/_sqlite/connection.c (original) +++ python/trunk/Modules/_sqlite/connection.c Sat Jul 21 08:55:02 2007 @@ -205,7 +205,7 @@ Py_XDECREF(self->collations); Py_XDECREF(self->statements); - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } PyObject* pysqlite_connection_cursor(pysqlite_Connection* self, PyObject* args, PyObject* kwargs) @@ -1206,8 +1206,7 @@ }; PyTypeObject pysqlite_ConnectionType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) MODULE_NAME ".Connection", /* tp_name */ sizeof(pysqlite_Connection), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_sqlite/cursor.c ============================================================================== --- python/trunk/Modules/_sqlite/cursor.c (original) +++ python/trunk/Modules/_sqlite/cursor.c Sat Jul 21 08:55:02 2007 @@ -134,7 +134,7 @@ Py_XDECREF(self->row_factory); Py_XDECREF(self->next_row); - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } PyObject* _pysqlite_get_converter(PyObject* key) @@ -1020,8 +1020,7 @@ PyDoc_STR("SQLite database cursor class."); PyTypeObject pysqlite_CursorType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) MODULE_NAME ".Cursor", /* tp_name */ sizeof(pysqlite_Cursor), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_sqlite/prepare_protocol.c ============================================================================== --- python/trunk/Modules/_sqlite/prepare_protocol.c (original) +++ python/trunk/Modules/_sqlite/prepare_protocol.c Sat Jul 21 08:55:02 2007 @@ -30,12 +30,11 @@ void pysqlite_prepare_protocol_dealloc(pysqlite_PrepareProtocol* self) { - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } PyTypeObject pysqlite_PrepareProtocolType= { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) MODULE_NAME ".PrepareProtocol", /* tp_name */ sizeof(pysqlite_PrepareProtocol), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -79,6 +78,6 @@ extern int pysqlite_prepare_protocol_setup_types(void) { pysqlite_PrepareProtocolType.tp_new = PyType_GenericNew; - pysqlite_PrepareProtocolType.ob_type= &PyType_Type; + Py_Type(&pysqlite_PrepareProtocolType)= &PyType_Type; return PyType_Ready(&pysqlite_PrepareProtocolType); } Modified: python/trunk/Modules/_sqlite/row.c ============================================================================== --- python/trunk/Modules/_sqlite/row.c (original) +++ python/trunk/Modules/_sqlite/row.c Sat Jul 21 08:55:02 2007 @@ -30,7 +30,7 @@ Py_XDECREF(self->data); Py_XDECREF(self->description); - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } int pysqlite_row_init(pysqlite_Row* self, PyObject* args, PyObject* kwargs) @@ -183,8 +183,7 @@ PyTypeObject pysqlite_RowType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) MODULE_NAME ".Row", /* tp_name */ sizeof(pysqlite_Row), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_sqlite/statement.c ============================================================================== --- python/trunk/Modules/_sqlite/statement.c (original) +++ python/trunk/Modules/_sqlite/statement.c Sat Jul 21 08:55:02 2007 @@ -309,7 +309,7 @@ PyObject_ClearWeakRefs((PyObject*)self); } - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } /* @@ -383,8 +383,7 @@ } PyTypeObject pysqlite_StatementType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) MODULE_NAME ".Statement", /* tp_name */ sizeof(pysqlite_Statement), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_sre.c ============================================================================== --- python/trunk/Modules/_sre.c (original) +++ python/trunk/Modules/_sre.c Sat Jul 21 08:55:02 2007 @@ -1689,7 +1689,7 @@ #endif /* get pointer to string buffer */ - buffer = string->ob_type->tp_as_buffer; + buffer = Py_Type(string)->tp_as_buffer; if (!buffer || !buffer->bf_getreadbuffer || !buffer->bf_getsegcount || buffer->bf_getsegcount(string, NULL) != 1) { PyErr_SetString(PyExc_TypeError, "expected string or buffer"); Modified: python/trunk/Modules/_ssl.c ============================================================================== --- python/trunk/Modules/_ssl.c (original) +++ python/trunk/Modules/_ssl.c Sat Jul 21 08:55:02 2007 @@ -72,7 +72,7 @@ static int check_socket_and_wait_for_timeout(PySocketSockObject *s, int writing); -#define PySSLObject_Check(v) ((v)->ob_type == &PySSL_Type) +#define PySSLObject_Check(v) (Py_Type(v) == &PySSL_Type) typedef enum { SOCKET_IS_NONBLOCKING, @@ -570,8 +570,7 @@ } static PyTypeObject PySSL_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "socket.SSL", /*tp_name*/ sizeof(PySSLObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -632,7 +631,7 @@ if (!PyString_Check(arg)) return PyErr_Format(PyExc_TypeError, "RAND_egd() expected string, found %s", - arg->ob_type->tp_name); + Py_Type(arg)->tp_name); bytes = RAND_egd(PyString_AS_STRING(arg)); if (bytes == -1) { PyErr_SetString(PySSLErrorObject, @@ -678,7 +677,7 @@ { PyObject *m, *d; - PySSL_Type.ob_type = &PyType_Type; + Py_Type(&PySSL_Type) = &PyType_Type; m = Py_InitModule3("_ssl", PySSL_methods, module_doc); if (m == NULL) Modified: python/trunk/Modules/_struct.c ============================================================================== --- python/trunk/Modules/_struct.c (original) +++ python/trunk/Modules/_struct.c Sat Jul 21 08:55:02 2007 @@ -72,7 +72,7 @@ #define PyStruct_Check(op) PyObject_TypeCheck(op, &PyStructType) -#define PyStruct_CheckExact(op) ((op)->ob_type == &PyStructType) +#define PyStruct_CheckExact(op) (Py_Type(op) == &PyStructType) /* Exception */ @@ -133,7 +133,7 @@ Py_INCREF(v); return v; } - m = v->ob_type->tp_as_number; + m = Py_Type(v)->tp_as_number; if (m != NULL && m->nb_long != NULL) { v = m->nb_long(v); if (v == NULL) @@ -1487,7 +1487,7 @@ PyMem_FREE(s->s_codes); } Py_XDECREF(s->s_format); - s->ob_type->tp_free((PyObject *)s); + Py_Type(s)->tp_free((PyObject *)s); } static PyObject * @@ -1806,8 +1806,7 @@ static PyTypeObject PyStructType = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "Struct", sizeof(PyStructObject), 0, @@ -1857,7 +1856,7 @@ if (m == NULL) return; - PyStructType.ob_type = &PyType_Type; + Py_Type(&PyStructType) = &PyType_Type; if (PyType_Ready(&PyStructType) < 0) return; Modified: python/trunk/Modules/_testcapimodule.c ============================================================================== --- python/trunk/Modules/_testcapimodule.c (original) +++ python/trunk/Modules/_testcapimodule.c Sat Jul 21 08:55:02 2007 @@ -839,8 +839,7 @@ } static PyTypeObject test_structmembersType = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "test_structmembersType", sizeof(test_structmembers), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -890,7 +889,7 @@ if (m == NULL) return; - test_structmembersType.ob_type=&PyType_Type; + Py_Type(&test_structmembersType)=&PyType_Type; Py_INCREF(&test_structmembersType); PyModule_AddObject(m, "test_structmembersType", (PyObject *)&test_structmembersType); Modified: python/trunk/Modules/_tkinter.c ============================================================================== --- python/trunk/Modules/_tkinter.c (original) +++ python/trunk/Modules/_tkinter.c Sat Jul 21 08:55:02 2007 @@ -262,12 +262,12 @@ Tcl_ObjType *StringType; } TkappObject; -#define Tkapp_Check(v) ((v)->ob_type == &Tkapp_Type) +#define Tkapp_Check(v) (Py_Type(v) == &Tkapp_Type) #define Tkapp_Interp(v) (((TkappObject *) (v))->interp) #define Tkapp_Result(v) Tcl_GetStringResult(Tkapp_Interp(v)) #define DEBUG_REFCNT(v) (printf("DEBUG: id=%p, refcnt=%i\n", \ -(void *) v, ((PyObject *) v)->ob_refcnt)) +(void *) v, Py_Refcnt(v))) @@ -2420,8 +2420,7 @@ static PyTypeObject Tktt_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "tktimertoken", /*tp_name */ sizeof(TkttObject), /*tp_basicsize */ 0, /*tp_itemsize */ @@ -2765,8 +2764,7 @@ static PyTypeObject Tkapp_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "tkapp", /*tp_name */ sizeof(TkappObject), /*tp_basicsize */ 0, /*tp_itemsize */ @@ -3105,7 +3103,7 @@ { PyObject *m, *d; - Tkapp_Type.ob_type = &PyType_Type; + Py_Type(&Tkapp_Type) = &PyType_Type; #ifdef WITH_THREAD tcl_lock = PyThread_allocate_lock(); @@ -3133,10 +3131,10 @@ PyDict_SetItemString(d, "TkappType", (PyObject *)&Tkapp_Type); - Tktt_Type.ob_type = &PyType_Type; + Py_Type(&Tktt_Type) = &PyType_Type; PyDict_SetItemString(d, "TkttType", (PyObject *)&Tktt_Type); - PyTclObject_Type.ob_type = &PyType_Type; + Py_Type(&PyTclObject_Type) = &PyType_Type; PyDict_SetItemString(d, "Tcl_Obj", (PyObject *)&PyTclObject_Type); #ifdef TK_AQUA Modified: python/trunk/Modules/_typesmodule.c ============================================================================== --- python/trunk/Modules/_typesmodule.c (original) +++ python/trunk/Modules/_typesmodule.c Sat Jul 21 08:55:02 2007 @@ -33,8 +33,7 @@ }; static PyTypeObject HelperType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "_types.Helper", /* tp_name */ sizeof(Helper), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/_weakref.c ============================================================================== --- python/trunk/Modules/_weakref.c (original) +++ python/trunk/Modules/_weakref.c Sat Jul 21 08:55:02 2007 @@ -14,7 +14,7 @@ { PyObject *result = NULL; - if (PyType_SUPPORTS_WEAKREFS(object->ob_type)) { + if (PyType_SUPPORTS_WEAKREFS(Py_Type(object))) { PyWeakReference **list = GET_WEAKREFS_LISTPTR(object); result = PyInt_FromSsize_t(_PyWeakref_GetWeakrefCount(*list)); @@ -35,7 +35,7 @@ { PyObject *result = NULL; - if (PyType_SUPPORTS_WEAKREFS(object->ob_type)) { + if (PyType_SUPPORTS_WEAKREFS(Py_Type(object))) { PyWeakReference **list = GET_WEAKREFS_LISTPTR(object); Py_ssize_t count = _PyWeakref_GetWeakrefCount(*list); Modified: python/trunk/Modules/arraymodule.c ============================================================================== --- python/trunk/Modules/arraymodule.c (original) +++ python/trunk/Modules/arraymodule.c Sat Jul 21 08:55:02 2007 @@ -39,7 +39,7 @@ static PyTypeObject Arraytype; #define array_Check(op) PyObject_TypeCheck(op, &Arraytype) -#define array_CheckExact(op) ((op)->ob_type == &Arraytype) +#define array_CheckExact(op) (Py_Type(op) == &Arraytype) static int array_resize(arrayobject *self, Py_ssize_t newsize) @@ -53,9 +53,9 @@ */ if (self->allocated >= newsize && - self->ob_size < newsize + 16 && + Py_Size(self) < newsize + 16 && self->ob_item != NULL) { - self->ob_size = newsize; + Py_Size(self) = newsize; return 0; } @@ -71,7 +71,7 @@ * memory critical. */ - _new_size = (newsize >> 4) + (self->ob_size < 8 ? 3 : 7) + newsize; + _new_size = (newsize >> 4) + (Py_Size(self) < 8 ? 3 : 7) + newsize; items = self->ob_item; /* XXX The following multiplication and division does not optimize away like it does for lists since the size is not known at compile time */ @@ -84,7 +84,7 @@ return -1; } self->ob_item = items; - self->ob_size = newsize; + Py_Size(self) = newsize; self->allocated = _new_size; return 0; } @@ -432,7 +432,7 @@ if (op == NULL) { return NULL; } - op->ob_size = size; + Py_Size(op) = size; if (size <= 0) { op->ob_item = NULL; } @@ -455,7 +455,7 @@ register arrayobject *ap; assert(array_Check(op)); ap = (arrayobject *)op; - assert(i>=0 && iob_size); + assert(i>=0 && iob_descr->getitem)(ap, i); } @@ -463,7 +463,7 @@ ins1(arrayobject *self, Py_ssize_t where, PyObject *v) { char *items; - Py_ssize_t n = self->ob_size; + Py_ssize_t n = Py_Size(self); if (v == NULL) { PyErr_BadInternalCall(); return -1; @@ -498,7 +498,7 @@ PyObject_ClearWeakRefs((PyObject *) op); if (op->ob_item != NULL) PyMem_DEL(op->ob_item); - op->ob_type->tp_free((PyObject *)op); + Py_Type(op)->tp_free((PyObject *)op); } static PyObject * @@ -518,7 +518,7 @@ va = (arrayobject *)v; wa = (arrayobject *)w; - if (va->ob_size != wa->ob_size && (op == Py_EQ || op == Py_NE)) { + if (Py_Size(va) != Py_Size(wa) && (op == Py_EQ || op == Py_NE)) { /* Shortcut: if the lengths differ, the arrays differ */ if (op == Py_EQ) res = Py_False; @@ -530,7 +530,7 @@ /* Search for the first index where items are different */ k = 1; - for (i = 0; i < va->ob_size && i < wa->ob_size; i++) { + for (i = 0; i < Py_Size(va) && i < Py_Size(wa); i++) { vi = getarrayitem(v, i); wi = getarrayitem(w, i); if (vi == NULL || wi == NULL) { @@ -549,8 +549,8 @@ if (k) { /* No more items to compare -- compare sizes */ - Py_ssize_t vs = va->ob_size; - Py_ssize_t ws = wa->ob_size; + Py_ssize_t vs = Py_Size(va); + Py_ssize_t ws = Py_Size(wa); int cmp; switch (op) { case Py_LT: cmp = vs < ws; break; @@ -590,13 +590,13 @@ static Py_ssize_t array_length(arrayobject *a) { - return a->ob_size; + return Py_Size(a); } static PyObject * array_item(arrayobject *a, Py_ssize_t i) { - if (i < 0 || i >= a->ob_size) { + if (i < 0 || i >= Py_Size(a)) { PyErr_SetString(PyExc_IndexError, "array index out of range"); return NULL; } @@ -609,14 +609,14 @@ arrayobject *np; if (ilow < 0) ilow = 0; - else if (ilow > a->ob_size) - ilow = a->ob_size; + else if (ilow > Py_Size(a)) + ilow = Py_Size(a); if (ihigh < 0) ihigh = 0; if (ihigh < ilow) ihigh = ilow; - else if (ihigh > a->ob_size) - ihigh = a->ob_size; + else if (ihigh > Py_Size(a)) + ihigh = Py_Size(a); np = (arrayobject *) newarrayobject(&Arraytype, ihigh - ilow, a->ob_descr); if (np == NULL) return NULL; @@ -628,7 +628,7 @@ static PyObject * array_copy(arrayobject *a, PyObject *unused) { - return array_slice(a, 0, a->ob_size); + return array_slice(a, 0, Py_Size(a)); } PyDoc_STRVAR(copy_doc, @@ -644,7 +644,7 @@ if (!array_Check(bb)) { PyErr_Format(PyExc_TypeError, "can only append array (not \"%.200s\") to array", - bb->ob_type->tp_name); + Py_Type(bb)->tp_name); return NULL; } #define b ((arrayobject *)bb) @@ -652,14 +652,14 @@ PyErr_BadArgument(); return NULL; } - size = a->ob_size + b->ob_size; + size = Py_Size(a) + Py_Size(b); np = (arrayobject *) newarrayobject(&Arraytype, size, a->ob_descr); if (np == NULL) { return NULL; } - memcpy(np->ob_item, a->ob_item, a->ob_size*a->ob_descr->itemsize); - memcpy(np->ob_item + a->ob_size*a->ob_descr->itemsize, - b->ob_item, b->ob_size*b->ob_descr->itemsize); + memcpy(np->ob_item, a->ob_item, Py_Size(a)*a->ob_descr->itemsize); + memcpy(np->ob_item + Py_Size(a)*a->ob_descr->itemsize, + b->ob_item, Py_Size(b)*b->ob_descr->itemsize); return (PyObject *)np; #undef b } @@ -674,12 +674,12 @@ Py_ssize_t nbytes; if (n < 0) n = 0; - size = a->ob_size * n; + size = Py_Size(a) * n; np = (arrayobject *) newarrayobject(&Arraytype, size, a->ob_descr); if (np == NULL) return NULL; p = np->ob_item; - nbytes = a->ob_size * a->ob_descr->itemsize; + nbytes = Py_Size(a) * a->ob_descr->itemsize; for (i = 0; i < n; i++) { memcpy(p, a->ob_item, nbytes); p += nbytes; @@ -697,7 +697,7 @@ if (v == NULL) n = 0; else if (array_Check(v)) { - n = b->ob_size; + n = Py_Size(b); if (a == b) { /* Special case "a[i:j] = a" -- copy b first */ int ret; @@ -716,44 +716,44 @@ else { PyErr_Format(PyExc_TypeError, "can only assign array (not \"%.200s\") to array slice", - v->ob_type->tp_name); + Py_Type(v)->tp_name); return -1; } if (ilow < 0) ilow = 0; - else if (ilow > a->ob_size) - ilow = a->ob_size; + else if (ilow > Py_Size(a)) + ilow = Py_Size(a); if (ihigh < 0) ihigh = 0; if (ihigh < ilow) ihigh = ilow; - else if (ihigh > a->ob_size) - ihigh = a->ob_size; + else if (ihigh > Py_Size(a)) + ihigh = Py_Size(a); item = a->ob_item; d = n - (ihigh-ilow); if (d < 0) { /* Delete -d items */ memmove(item + (ihigh+d)*a->ob_descr->itemsize, item + ihigh*a->ob_descr->itemsize, - (a->ob_size-ihigh)*a->ob_descr->itemsize); - a->ob_size += d; - PyMem_RESIZE(item, char, a->ob_size*a->ob_descr->itemsize); + (Py_Size(a)-ihigh)*a->ob_descr->itemsize); + Py_Size(a) += d; + PyMem_RESIZE(item, char, Py_Size(a)*a->ob_descr->itemsize); /* Can't fail */ a->ob_item = item; - a->allocated = a->ob_size; + a->allocated = Py_Size(a); } else if (d > 0) { /* Insert d items */ PyMem_RESIZE(item, char, - (a->ob_size + d)*a->ob_descr->itemsize); + (Py_Size(a) + d)*a->ob_descr->itemsize); if (item == NULL) { PyErr_NoMemory(); return -1; } memmove(item + (ihigh+d)*a->ob_descr->itemsize, item + ihigh*a->ob_descr->itemsize, - (a->ob_size-ihigh)*a->ob_descr->itemsize); + (Py_Size(a)-ihigh)*a->ob_descr->itemsize); a->ob_item = item; - a->ob_size += d; - a->allocated = a->ob_size; + Py_Size(a) += d; + a->allocated = Py_Size(a); } if (n > 0) memcpy(item + ilow*a->ob_descr->itemsize, b->ob_item, @@ -765,7 +765,7 @@ static int array_ass_item(arrayobject *a, Py_ssize_t i, PyObject *v) { - if (i < 0 || i >= a->ob_size) { + if (i < 0 || i >= Py_Size(a)) { PyErr_SetString(PyExc_IndexError, "array assignment index out of range"); return -1; @@ -792,7 +792,7 @@ return -1; while ((v = PyIter_Next(it)) != NULL) { - if (ins1(self, (int) self->ob_size, v) != 0) { + if (ins1(self, (int) Py_Size(self), v) != 0) { Py_DECREF(v); Py_DECREF(it); return -1; @@ -818,16 +818,16 @@ "can only extend with array of same kind"); return -1; } - size = self->ob_size + b->ob_size; + size = Py_Size(self) + Py_Size(b); PyMem_RESIZE(self->ob_item, char, size*self->ob_descr->itemsize); if (self->ob_item == NULL) { PyObject_Del(self); PyErr_NoMemory(); return -1; } - memcpy(self->ob_item + self->ob_size*self->ob_descr->itemsize, - b->ob_item, b->ob_size*b->ob_descr->itemsize); - self->ob_size = size; + memcpy(self->ob_item + Py_Size(self)*self->ob_descr->itemsize, + b->ob_item, Py_Size(b)*b->ob_descr->itemsize); + Py_Size(self) = size; self->allocated = size; return 0; @@ -840,7 +840,7 @@ if (!array_Check(bb)) { PyErr_Format(PyExc_TypeError, "can only extend array with array (not \"%.200s\")", - bb->ob_type->tp_name); + Py_Type(bb)->tp_name); return NULL; } if (array_do_extend(self, bb) == -1) @@ -855,15 +855,15 @@ char *items, *p; Py_ssize_t size, i; - if (self->ob_size > 0) { + if (Py_Size(self) > 0) { if (n < 0) n = 0; items = self->ob_item; - size = self->ob_size * self->ob_descr->itemsize; + size = Py_Size(self) * self->ob_descr->itemsize; if (n == 0) { PyMem_FREE(items); self->ob_item = NULL; - self->ob_size = 0; + Py_Size(self) = 0; self->allocated = 0; } else { @@ -876,8 +876,8 @@ memcpy(p, items, size); } self->ob_item = items; - self->ob_size *= n; - self->allocated = self->ob_size; + Py_Size(self) *= n; + self->allocated = Py_Size(self); } } Py_INCREF(self); @@ -900,7 +900,7 @@ Py_ssize_t count = 0; Py_ssize_t i; - for (i = 0; i < self->ob_size; i++) { + for (i = 0; i < Py_Size(self); i++) { PyObject *selfi = getarrayitem((PyObject *)self, i); int cmp = PyObject_RichCompareBool(selfi, v, Py_EQ); Py_DECREF(selfi); @@ -922,7 +922,7 @@ { Py_ssize_t i; - for (i = 0; i < self->ob_size; i++) { + for (i = 0; i < Py_Size(self); i++) { PyObject *selfi = getarrayitem((PyObject *)self, i); int cmp = PyObject_RichCompareBool(selfi, v, Py_EQ); Py_DECREF(selfi); @@ -947,7 +947,7 @@ Py_ssize_t i; int cmp; - for (i = 0, cmp = 0 ; cmp == 0 && i < self->ob_size; i++) { + for (i = 0, cmp = 0 ; cmp == 0 && i < Py_Size(self); i++) { PyObject *selfi = getarrayitem((PyObject *)self, i); cmp = PyObject_RichCompareBool(selfi, v, Py_EQ); Py_DECREF(selfi); @@ -960,7 +960,7 @@ { int i; - for (i = 0; i < self->ob_size; i++) { + for (i = 0; i < Py_Size(self); i++) { PyObject *selfi = getarrayitem((PyObject *)self,i); int cmp = PyObject_RichCompareBool(selfi, v, Py_EQ); Py_DECREF(selfi); @@ -990,14 +990,14 @@ PyObject *v; if (!PyArg_ParseTuple(args, "|n:pop", &i)) return NULL; - if (self->ob_size == 0) { + if (Py_Size(self) == 0) { /* Special-case most common failure cause */ PyErr_SetString(PyExc_IndexError, "pop from empty array"); return NULL; } if (i < 0) - i += self->ob_size; - if (i < 0 || i >= self->ob_size) { + i += Py_Size(self); + if (i < 0 || i >= Py_Size(self)) { PyErr_SetString(PyExc_IndexError, "pop index out of range"); return NULL; } @@ -1053,7 +1053,7 @@ return NULL; PyTuple_SET_ITEM(retval, 0, PyLong_FromVoidPtr(self->ob_item)); - PyTuple_SET_ITEM(retval, 1, PyInt_FromLong((long)(self->ob_size))); + PyTuple_SET_ITEM(retval, 1, PyInt_FromLong((long)(Py_Size(self)))); return retval; } @@ -1070,7 +1070,7 @@ static PyObject * array_append(arrayobject *self, PyObject *v) { - return ins(self, (int) self->ob_size, v); + return ins(self, (int) Py_Size(self), v); } PyDoc_STRVAR(append_doc, @@ -1089,14 +1089,14 @@ case 1: break; case 2: - for (p = self->ob_item, i = self->ob_size; --i >= 0; p += 2) { + for (p = self->ob_item, i = Py_Size(self); --i >= 0; p += 2) { char p0 = p[0]; p[0] = p[1]; p[1] = p0; } break; case 4: - for (p = self->ob_item, i = self->ob_size; --i >= 0; p += 4) { + for (p = self->ob_item, i = Py_Size(self); --i >= 0; p += 4) { char p0 = p[0]; char p1 = p[1]; p[0] = p[3]; @@ -1106,7 +1106,7 @@ } break; case 8: - for (p = self->ob_item, i = self->ob_size; --i >= 0; p += 8) { + for (p = self->ob_item, i = Py_Size(self); --i >= 0; p += 8) { char p0 = p[0]; char p1 = p[1]; char p2 = p[2]; @@ -1147,16 +1147,16 @@ dict = Py_None; Py_INCREF(dict); } - if (array->ob_size > 0) { + if (Py_Size(array) > 0) { result = Py_BuildValue("O(cs#)O", - array->ob_type, + Py_Type(array), array->ob_descr->typecode, array->ob_item, - array->ob_size * array->ob_descr->itemsize, + Py_Size(array) * array->ob_descr->itemsize, dict); } else { result = Py_BuildValue("O(c)O", - array->ob_type, + Py_Type(array), array->ob_descr->typecode, dict); } @@ -1175,9 +1175,9 @@ char tmp[256]; /* 8 is probably enough -- but why skimp */ assert((size_t)itemsize <= sizeof(tmp)); - if (self->ob_size > 1) { + if (Py_Size(self) > 1) { for (p = self->ob_item, - q = self->ob_item + (self->ob_size - 1)*itemsize; + q = self->ob_item + (Py_Size(self) - 1)*itemsize; p < q; p += itemsize, q -= itemsize) { /* memory areas guaranteed disjoint, so memcpy @@ -1218,7 +1218,7 @@ Py_ssize_t newlength; size_t newbytes; /* Be careful here about overflow */ - if ((newlength = self->ob_size + n) <= 0 || + if ((newlength = Py_Size(self) + n) <= 0 || (newbytes = newlength * itemsize) / itemsize != (size_t)newlength) goto nomem; @@ -1229,15 +1229,15 @@ return NULL; } self->ob_item = item; - self->ob_size += n; - self->allocated = self->ob_size; - nread = fread(item + (self->ob_size - n) * itemsize, + Py_Size(self) += n; + self->allocated = Py_Size(self); + nread = fread(item + (Py_Size(self) - n) * itemsize, itemsize, n, fp); if (nread < (size_t)n) { - self->ob_size -= (n - nread); - PyMem_RESIZE(item, char, self->ob_size*itemsize); + Py_Size(self) -= (n - nread); + PyMem_RESIZE(item, char, Py_Size(self)*itemsize); self->ob_item = item; - self->allocated = self->ob_size; + self->allocated = Py_Size(self); PyErr_SetString(PyExc_EOFError, "not enough items in file"); return NULL; @@ -1297,23 +1297,23 @@ if (n > 0) { char *item = self->ob_item; Py_ssize_t i; - PyMem_RESIZE(item, char, (self->ob_size + n) * itemsize); + PyMem_RESIZE(item, char, (Py_Size(self) + n) * itemsize); if (item == NULL) { PyErr_NoMemory(); return NULL; } self->ob_item = item; - self->ob_size += n; - self->allocated = self->ob_size; + Py_Size(self) += n; + self->allocated = Py_Size(self); for (i = 0; i < n; i++) { PyObject *v = PyList_GetItem(list, i); if ((*self->ob_descr->setitem)(self, - self->ob_size - n + i, v) != 0) { - self->ob_size -= n; + Py_Size(self) - n + i, v) != 0) { + Py_Size(self) -= n; PyMem_RESIZE(item, char, - self->ob_size * itemsize); + Py_Size(self) * itemsize); self->ob_item = item; - self->allocated = self->ob_size; + self->allocated = Py_Size(self); return NULL; } } @@ -1331,12 +1331,12 @@ static PyObject * array_tolist(arrayobject *self, PyObject *unused) { - PyObject *list = PyList_New(self->ob_size); + PyObject *list = PyList_New(Py_Size(self)); Py_ssize_t i; if (list == NULL) return NULL; - for (i = 0; i < self->ob_size; i++) { + for (i = 0; i < Py_Size(self); i++) { PyObject *v = getarrayitem((PyObject *)self, i); if (v == NULL) { Py_DECREF(list); @@ -1369,15 +1369,15 @@ n = n / itemsize; if (n > 0) { char *item = self->ob_item; - PyMem_RESIZE(item, char, (self->ob_size + n) * itemsize); + PyMem_RESIZE(item, char, (Py_Size(self) + n) * itemsize); if (item == NULL) { PyErr_NoMemory(); return NULL; } self->ob_item = item; - self->ob_size += n; - self->allocated = self->ob_size; - memcpy(item + (self->ob_size - n) * itemsize, + Py_Size(self) += n; + self->allocated = Py_Size(self); + memcpy(item + (Py_Size(self) - n) * itemsize, str, itemsize*n); } Py_INCREF(Py_None); @@ -1395,7 +1395,7 @@ array_tostring(arrayobject *self, PyObject *unused) { return PyString_FromStringAndSize(self->ob_item, - self->ob_size * self->ob_descr->itemsize); + Py_Size(self) * self->ob_descr->itemsize); } PyDoc_STRVAR(tostring_doc, @@ -1423,15 +1423,15 @@ } if (n > 0) { Py_UNICODE *item = (Py_UNICODE *) self->ob_item; - PyMem_RESIZE(item, Py_UNICODE, self->ob_size + n); + PyMem_RESIZE(item, Py_UNICODE, Py_Size(self) + n); if (item == NULL) { PyErr_NoMemory(); return NULL; } self->ob_item = (char *) item; - self->ob_size += n; - self->allocated = self->ob_size; - memcpy(item + self->ob_size - n, + Py_Size(self) += n; + self->allocated = Py_Size(self); + memcpy(item + Py_Size(self) - n, ustr, n * sizeof(Py_UNICODE)); } @@ -1456,7 +1456,7 @@ "tounicode() may only be called on type 'u' arrays"); return NULL; } - return PyUnicode_FromUnicode((Py_UNICODE *) self->ob_item, self->ob_size); + return PyUnicode_FromUnicode((Py_UNICODE *) self->ob_item, Py_Size(self)); } PyDoc_STRVAR(tounicode_doc, @@ -1554,7 +1554,7 @@ PyObject *s, *t, *v = NULL; Py_ssize_t len; - len = a->ob_size; + len = Py_Size(a); typecode = a->ob_descr->typecode; if (len == 0) { PyOS_snprintf(buf, sizeof(buf), "array('%c')", typecode); @@ -1588,7 +1588,7 @@ return NULL; } if (i < 0) - i += self->ob_size; + i += Py_Size(self); return array_item(self, i); } else if (PySlice_Check(item)) { @@ -1597,7 +1597,7 @@ arrayobject* ar; int itemsize = self->ob_descr->itemsize; - if (PySlice_GetIndicesEx((PySliceObject*)item, self->ob_size, + if (PySlice_GetIndicesEx((PySliceObject*)item, Py_Size(self), &start, &stop, &step, &slicelength) < 0) { return NULL; } @@ -1636,14 +1636,14 @@ if (i==-1 && PyErr_Occurred()) return -1; if (i < 0) - i += self->ob_size; + i += Py_Size(self); return array_ass_item(self, i, value); } else if (PySlice_Check(item)) { Py_ssize_t start, stop, step, slicelength; int itemsize = self->ob_descr->itemsize; - if (PySlice_GetIndicesEx((PySliceObject*)item, self->ob_size, + if (PySlice_GetIndicesEx((PySliceObject*)item, Py_Size(self), &start, &stop, &step, &slicelength) < 0) { return -1; } @@ -1671,17 +1671,17 @@ self->ob_item + (cur + 1)*itemsize, (step - 1) * itemsize); } - extra = self->ob_size - (cur + 1); + extra = Py_Size(self) - (cur + 1); if (extra > 0) { memmove(self->ob_item + (cur - i)*itemsize, self->ob_item + (cur + 1)*itemsize, extra*itemsize); } - self->ob_size -= slicelength; + Py_Size(self) -= slicelength; self->ob_item = (char *)PyMem_REALLOC(self->ob_item, - itemsize*self->ob_size); - self->allocated = self->ob_size; + itemsize*Py_Size(self)); + self->allocated = Py_Size(self); return 0; } @@ -1693,16 +1693,16 @@ if (!array_Check(value)) { PyErr_Format(PyExc_TypeError, "must assign array (not \"%.200s\") to slice", - value->ob_type->tp_name); + Py_Type(value)->tp_name); return -1; } av = (arrayobject*)value; - if (av->ob_size != slicelength) { + if (Py_Size(av) != slicelength) { PyErr_Format(PyExc_ValueError, "attempt to assign array of size %ld to extended slice of size %ld", - /*XXX*/(long)av->ob_size, /*XXX*/(long)slicelength); + /*XXX*/(long)Py_Size(av), /*XXX*/(long)slicelength); return -1; } @@ -1711,7 +1711,7 @@ /* protect against a[::-1] = a */ if (self == av) { - value = array_slice(av, 0, av->ob_size); + value = array_slice(av, 0, Py_Size(av)); av = (arrayobject*)value; if (!av) return -1; @@ -1758,7 +1758,7 @@ *ptr = (void *)self->ob_item; if (*ptr == NULL) *ptr = emptybuf; - return self->ob_size*self->ob_descr->itemsize; + return Py_Size(self)*self->ob_descr->itemsize; } static Py_ssize_t @@ -1772,14 +1772,14 @@ *ptr = (void *)self->ob_item; if (*ptr == NULL) *ptr = emptybuf; - return self->ob_size*self->ob_descr->itemsize; + return Py_Size(self)*self->ob_descr->itemsize; } static Py_ssize_t array_buffer_getsegcount(arrayobject *self, Py_ssize_t *lenp) { if ( lenp ) - *lenp = self->ob_size*self->ob_descr->itemsize; + *lenp = Py_Size(self)*self->ob_descr->itemsize; return 1; } @@ -1888,9 +1888,9 @@ return NULL; } self->ob_item = item; - self->ob_size = n / sizeof(Py_UNICODE); + Py_Size(self) = n / sizeof(Py_UNICODE); memcpy(item, PyUnicode_AS_DATA(initial), n); - self->allocated = self->ob_size; + self->allocated = Py_Size(self); } #endif } @@ -1978,8 +1978,7 @@ static PyObject *array_iter(arrayobject *ao); static PyTypeObject Arraytype = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "array.array", sizeof(arrayobject), 0, @@ -2060,7 +2059,7 @@ arrayiter_next(arrayiterobject *it) { assert(PyArrayIter_Check(it)); - if (it->index < it->ao->ob_size) + if (it->index < Py_Size(it->ao)) return (*it->getitem)(it->ao, it->index++); return NULL; } @@ -2081,8 +2080,7 @@ } static PyTypeObject PyArrayIter_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "arrayiterator", /* tp_name */ sizeof(arrayiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/bz2module.c ============================================================================== --- python/trunk/Modules/bz2module.c (original) +++ python/trunk/Modules/bz2module.c Sat Jul 21 08:55:02 2007 @@ -41,7 +41,7 @@ #define MODE_READ_EOF 2 #define MODE_WRITE 3 -#define BZ2FileObject_Check(v) ((v)->ob_type == &BZ2File_Type) +#define BZ2FileObject_Check(v) (Py_Type(v) == &BZ2File_Type) #ifdef BZ_CONFIG_ERROR @@ -1418,7 +1418,7 @@ } Util_DropReadAhead(self); Py_XDECREF(self->file); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } /* This is a hacked version of Python's fileobject.c:file_getiter(). */ @@ -1480,8 +1480,7 @@ ; static PyTypeObject BZ2File_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "bz2.BZ2File", /*tp_name*/ sizeof(BZ2FileObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -1735,7 +1734,7 @@ PyThread_free_lock(self->lock); #endif BZ2_bzCompressEnd(&self->bzs); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } @@ -1752,8 +1751,7 @@ "); static PyTypeObject BZ2Comp_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "bz2.BZ2Compressor", /*tp_name*/ sizeof(BZ2CompObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -1958,7 +1956,7 @@ #endif Py_XDECREF(self->unused_data); BZ2_bzDecompressEnd(&self->bzs); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } @@ -1974,8 +1972,7 @@ "); static PyTypeObject BZ2Decomp_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "bz2.BZ2Decompressor", /*tp_name*/ sizeof(BZ2DecompObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -2209,9 +2206,9 @@ { PyObject *m; - BZ2File_Type.ob_type = &PyType_Type; - BZ2Comp_Type.ob_type = &PyType_Type; - BZ2Decomp_Type.ob_type = &PyType_Type; + Py_Type(&BZ2File_Type) = &PyType_Type; + Py_Type(&BZ2Comp_Type) = &PyType_Type; + Py_Type(&BZ2Decomp_Type) = &PyType_Type; m = Py_InitModule3("bz2", bz2_methods, bz2__doc__); if (m == NULL) Modified: python/trunk/Modules/cPickle.c ============================================================================== --- python/trunk/Modules/cPickle.c (original) +++ python/trunk/Modules/cPickle.c Sat Jul 21 08:55:02 2007 @@ -151,12 +151,12 @@ } static PyTypeObject PdataType = { - PyObject_HEAD_INIT(NULL) 0, "cPickle.Pdata", sizeof(Pdata), 0, + PyVarObject_HEAD_INIT(NULL, 0) "cPickle.Pdata", sizeof(Pdata), 0, (destructor)Pdata_dealloc, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0L,0L,0L,0L, "" }; -#define Pdata_Check(O) ((O)->ob_type == &PdataType) +#define Pdata_Check(O) (Py_Type(O) == &PdataType) static PyObject * Pdata_New(void) @@ -316,7 +316,7 @@ } #define FREE_ARG_TUP(self) { \ - if (self->arg->ob_refcnt > 1) { \ + if (Py_Refcnt(self->arg) > 1) { \ Py_DECREF(self->arg); \ self->arg=NULL; \ } \ @@ -752,7 +752,7 @@ static int put(Picklerobject *self, PyObject *ob) { - if (ob->ob_refcnt < 2 || self->fast) + if (Py_Refcnt(ob) < 2 || self->fast) return 0; return put2(self, ob); @@ -916,7 +916,7 @@ PyErr_Format(PyExc_ValueError, "fast mode: can't pickle cyclic objects " "including object type %s at %p", - obj->ob_type->tp_name, obj); + Py_Type(obj)->tp_name, obj); self->fast_container = -1; return 0; } @@ -2320,7 +2320,7 @@ goto finally; } - type = args->ob_type; + type = Py_Type(args); switch (type->tp_name[0]) { case 'b': @@ -2372,7 +2372,7 @@ #endif } - if (args->ob_refcnt > 1) { + if (Py_Refcnt(args) > 1) { if (!( py_ob_id = PyLong_FromVoidPtr(args))) goto finally; @@ -2913,7 +2913,7 @@ Py_XDECREF(self->inst_pers_func); Py_XDECREF(self->dispatch_table); PyMem_Free(self->write_buf); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int @@ -3037,8 +3037,7 @@ "Objects that know how to pickle objects\n"); static PyTypeObject Picklertype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "cPickle.Pickler", /*tp_name*/ sizeof(Picklerobject), /*tp_basicsize*/ 0, @@ -5254,7 +5253,7 @@ free(self->buf); } - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int @@ -5483,8 +5482,7 @@ "Objects that know how to unpickle"); static PyTypeObject Unpicklertype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "cPickle.Unpickler", /*tp_name*/ sizeof(Unpicklerobject), /*tp_basicsize*/ 0, @@ -5708,9 +5706,9 @@ PyObject *format_version; PyObject *compatible_formats; - Picklertype.ob_type = &PyType_Type; - Unpicklertype.ob_type = &PyType_Type; - PdataType.ob_type = &PyType_Type; + Py_Type(&Picklertype) = &PyType_Type; + Py_Type(&Unpicklertype) = &PyType_Type; + Py_Type(&PdataType) = &PyType_Type; /* Initialize some pieces. We need to do this before module creation, * so we're forced to use a temporary dictionary. :( Modified: python/trunk/Modules/cStringIO.c ============================================================================== --- python/trunk/Modules/cStringIO.c (original) +++ python/trunk/Modules/cStringIO.c Sat Jul 21 08:55:02 2007 @@ -514,8 +514,7 @@ PyDoc_STRVAR(Otype__doc__, "Simple type for output to strings."); static PyTypeObject Otype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "cStringIO.StringO", /*tp_name*/ sizeof(Oobject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -635,8 +634,7 @@ "Simple type for treating strings as input file streams"); static PyTypeObject Itype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "cStringIO.StringI", /*tp_name*/ sizeof(Iobject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -746,8 +744,8 @@ d = PyModule_GetDict(m); /* Export C API */ - Itype.ob_type=&PyType_Type; - Otype.ob_type=&PyType_Type; + Py_Type(&Itype)=&PyType_Type; + Py_Type(&Otype)=&PyType_Type; if (PyType_Ready(&Otype) < 0) return; if (PyType_Ready(&Itype) < 0) return; PyDict_SetItemString(d,"cStringIO_CAPI", Modified: python/trunk/Modules/cjkcodecs/multibytecodec.c ============================================================================== --- python/trunk/Modules/cjkcodecs/multibytecodec.c (original) +++ python/trunk/Modules/cjkcodecs/multibytecodec.c Sat Jul 21 08:55:02 2007 @@ -670,8 +670,7 @@ } static PyTypeObject MultibyteCodec_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "MultibyteCodec", /* tp_name */ sizeof(MultibyteCodecObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -946,12 +945,11 @@ { PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } static PyTypeObject MultibyteIncrementalEncoder_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "MultibyteIncrementalEncoder", /* tp_name */ sizeof(MultibyteIncrementalEncoderObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1147,12 +1145,11 @@ { PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } static PyTypeObject MultibyteIncrementalDecoder_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "MultibyteIncrementalDecoder", /* tp_name */ sizeof(MultibyteIncrementalDecoderObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1464,12 +1461,11 @@ PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); Py_DECREF(self->stream); - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } static PyTypeObject MultibyteStreamReader_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "MultibyteStreamReader", /* tp_name */ sizeof(MultibyteStreamReaderObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1667,7 +1663,7 @@ PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); Py_DECREF(self->stream); - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } static struct PyMethodDef mbstreamwriter_methods[] = { @@ -1688,8 +1684,7 @@ }; static PyTypeObject MultibyteStreamWriter_Type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "MultibyteStreamWriter", /* tp_name */ sizeof(MultibyteStreamWriterObject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/datetimemodule.c ============================================================================== --- python/trunk/Modules/datetimemodule.c (original) +++ python/trunk/Modules/datetimemodule.c Sat Jul 21 08:55:02 2007 @@ -764,7 +764,7 @@ PyErr_Format(PyExc_TypeError, "tzinfo argument must be None or of a tzinfo subclass, " "not type '%s'", - p->ob_type->tp_name); + Py_Type(p)->tp_name); return -1; } @@ -855,7 +855,7 @@ PyErr_Format(PyExc_TypeError, "tzinfo.%s() must return None or " "timedelta, not '%s'", - name, u->ob_type->tp_name); + name, Py_Type(u)->tp_name); } Py_DECREF(u); @@ -948,7 +948,7 @@ if (result != NULL && result != Py_None && ! PyString_Check(result)) { PyErr_Format(PyExc_TypeError, "tzinfo.tzname() must " "return None or a string, not '%s'", - result->ob_type->tp_name); + Py_Type(result)->tp_name); Py_DECREF(result); result = NULL; } @@ -1421,7 +1421,7 @@ { PyErr_Format(PyExc_TypeError, "can't compare %s to %s", - a->ob_type->tp_name, b->ob_type->tp_name); + Py_Type(a)->tp_name, Py_Type(b)->tp_name); return NULL; } @@ -1876,7 +1876,7 @@ PyErr_Format(PyExc_TypeError, "unsupported type for timedelta %s component: %s", - tag, num->ob_type->tp_name); + tag, Py_Type(num)->tp_name); return NULL; } @@ -1980,18 +1980,18 @@ { if (GET_TD_MICROSECONDS(self) != 0) return PyString_FromFormat("%s(%d, %d, %d)", - self->ob_type->tp_name, + Py_Type(self)->tp_name, GET_TD_DAYS(self), GET_TD_SECONDS(self), GET_TD_MICROSECONDS(self)); if (GET_TD_SECONDS(self) != 0) return PyString_FromFormat("%s(%d, %d)", - self->ob_type->tp_name, + Py_Type(self)->tp_name, GET_TD_DAYS(self), GET_TD_SECONDS(self)); return PyString_FromFormat("%s(%d)", - self->ob_type->tp_name, + Py_Type(self)->tp_name, GET_TD_DAYS(self)); } @@ -2055,7 +2055,7 @@ static PyObject * delta_reduce(PyDateTime_Delta* self) { - return Py_BuildValue("ON", self->ob_type, delta_getstate(self)); + return Py_BuildValue("ON", Py_Type(self), delta_getstate(self)); } #define OFFSET(field) offsetof(PyDateTime_Delta, field) @@ -2125,8 +2125,7 @@ }; static PyTypeObject PyDateTime_DeltaType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "datetime.timedelta", /* tp_name */ sizeof(PyDateTime_Delta), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2415,7 +2414,7 @@ char buffer[1028]; const char *type_name; - type_name = self->ob_type->tp_name; + type_name = Py_Type(self)->tp_name; PyOS_snprintf(buffer, sizeof(buffer), "%s(%d, %d, %d)", type_name, GET_YEAR(self), GET_MONTH(self), GET_DAY(self)); @@ -2555,7 +2554,7 @@ tuple = Py_BuildValue("iii", year, month, day); if (tuple == NULL) return NULL; - clone = date_new(self->ob_type, tuple, NULL); + clone = date_new(Py_Type(self), tuple, NULL); Py_DECREF(tuple); return clone; } @@ -2605,7 +2604,7 @@ static PyObject * date_reduce(PyDateTime_Date *self, PyObject *arg) { - return Py_BuildValue("(ON)", self->ob_type, date_getstate(self)); + return Py_BuildValue("(ON)", Py_Type(self), date_getstate(self)); } static PyMethodDef date_methods[] = { @@ -2683,8 +2682,7 @@ }; static PyTypeObject PyDateTime_DateType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "datetime.date", /* tp_name */ sizeof(PyDateTime_Date), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2908,10 +2906,10 @@ if (state == Py_None) { Py_DECREF(state); - return Py_BuildValue("(ON)", self->ob_type, args); + return Py_BuildValue("(ON)", Py_Type(self), args); } else - return Py_BuildValue("(ONN)", self->ob_type, args, state); + return Py_BuildValue("(ONN)", Py_Type(self), args, state); } static PyMethodDef tzinfo_methods[] = { @@ -3106,7 +3104,7 @@ if (HASTZINFO(self)) { Py_XDECREF(self->tzinfo); } - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } /* @@ -3140,7 +3138,7 @@ time_repr(PyDateTime_Time *self) { char buffer[100]; - const char *type_name = self->ob_type->tp_name; + const char *type_name = Py_Type(self)->tp_name; int h = TIME_GET_HOUR(self); int m = TIME_GET_MINUTE(self); int s = TIME_GET_SECOND(self); @@ -3354,7 +3352,7 @@ tuple = Py_BuildValue("iiiiO", hh, mm, ss, us, tzinfo); if (tuple == NULL) return NULL; - clone = time_new(self->ob_type, tuple, NULL); + clone = time_new(Py_Type(self), tuple, NULL); Py_DECREF(tuple); return clone; } @@ -3408,7 +3406,7 @@ static PyObject * time_reduce(PyDateTime_Time *self, PyObject *arg) { - return Py_BuildValue("(ON)", self->ob_type, time_getstate(self)); + return Py_BuildValue("(ON)", Py_Type(self), time_getstate(self)); } static PyMethodDef time_methods[] = { @@ -3902,7 +3900,7 @@ if (HASTZINFO(self)) { Py_XDECREF(self->tzinfo); } - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } /* @@ -4051,7 +4049,7 @@ datetime_repr(PyDateTime_DateTime *self) { char buffer[1000]; - const char *type_name = self->ob_type->tp_name; + const char *type_name = Py_Type(self)->tp_name; PyObject *baserepr; if (DATE_GET_MICROSECOND(self)) { @@ -4272,7 +4270,7 @@ tuple = Py_BuildValue("iiiiiiiO", y, m, d, hh, mm, ss, us, tzinfo); if (tuple == NULL) return NULL; - clone = datetime_new(self->ob_type, tuple, NULL); + clone = datetime_new(Py_Type(self), tuple, NULL); Py_DECREF(tuple); return clone; } @@ -4460,7 +4458,7 @@ static PyObject * datetime_reduce(PyDateTime_DateTime *self, PyObject *arg) { - return Py_BuildValue("(ON)", self->ob_type, datetime_getstate(self)); + return Py_BuildValue("(ON)", Py_Type(self), datetime_getstate(self)); } static PyMethodDef datetime_methods[] = { Modified: python/trunk/Modules/dbmmodule.c ============================================================================== --- python/trunk/Modules/dbmmodule.c (original) +++ python/trunk/Modules/dbmmodule.c Sat Jul 21 08:55:02 2007 @@ -36,7 +36,7 @@ static PyTypeObject Dbmtype; -#define is_dbmobject(v) ((v)->ob_type == &Dbmtype) +#define is_dbmobject(v) (Py_Type(v) == &Dbmtype) #define check_dbmobject_open(v) if ((v)->di_dbm == NULL) \ { PyErr_SetString(DbmError, "DBM object has already been closed"); \ return NULL; } @@ -302,8 +302,7 @@ } static PyTypeObject Dbmtype = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "dbm.dbm", sizeof(dbmobject), 0, Modified: python/trunk/Modules/dlmodule.c ============================================================================== --- python/trunk/Modules/dlmodule.c (original) +++ python/trunk/Modules/dlmodule.c Sat Jul 21 08:55:02 2007 @@ -62,7 +62,7 @@ name = PyString_AS_STRING(args); } else { PyErr_Format(PyExc_TypeError, "expected string, found %.200s", - args->ob_type->tp_name); + Py_Type(args)->tp_name); return NULL; } func = dlsym(xp->dl_handle, name); @@ -141,8 +141,7 @@ static PyTypeObject Dltype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "dl.dl", /*tp_name*/ sizeof(dlobject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -237,7 +236,7 @@ PyObject *m, *d, *x; /* Initialize object type */ - Dltype.ob_type = &PyType_Type; + Py_Type(&Dltype) = &PyType_Type; /* Create the module and add the functions */ m = Py_InitModule("dl", dl_methods); Modified: python/trunk/Modules/gcmodule.c ============================================================================== --- python/trunk/Modules/gcmodule.c (original) +++ python/trunk/Modules/gcmodule.c Sat Jul 21 08:55:02 2007 @@ -239,7 +239,7 @@ PyGC_Head *gc = containers->gc.gc_next; for (; gc != containers; gc = gc->gc.gc_next) { assert(gc->gc.gc_refs == GC_REACHABLE); - gc->gc.gc_refs = FROM_GC(gc)->ob_refcnt; + gc->gc.gc_refs = Py_Refcnt(FROM_GC(gc)); /* Python's cyclic gc should never see an incoming refcount * of 0: if something decref'ed to 0, it should have been * deallocated immediately at that time. @@ -291,7 +291,7 @@ traverseproc traverse; PyGC_Head *gc = containers->gc.gc_next; for (; gc != containers; gc=gc->gc.gc_next) { - traverse = FROM_GC(gc)->ob_type->tp_traverse; + traverse = Py_Type(FROM_GC(gc))->tp_traverse; (void) traverse(FROM_GC(gc), (visitproc)visit_decref, NULL); @@ -376,7 +376,7 @@ * the next object to visit. */ PyObject *op = FROM_GC(gc); - traverseproc traverse = op->ob_type->tp_traverse; + traverseproc traverse = Py_Type(op)->tp_traverse; assert(gc->gc.gc_refs > 0); gc->gc.gc_refs = GC_REACHABLE; (void) traverse(op, @@ -472,7 +472,7 @@ PyGC_Head *gc = finalizers->gc.gc_next; for (; gc != finalizers; gc = gc->gc.gc_next) { /* Note that the finalizers list may grow during this. */ - traverse = FROM_GC(gc)->ob_type->tp_traverse; + traverse = Py_Type(FROM_GC(gc))->tp_traverse; (void) traverse(FROM_GC(gc), (visitproc)visit_move, (void *)finalizers); @@ -517,7 +517,7 @@ assert(IS_TENTATIVELY_UNREACHABLE(op)); next = gc->gc.gc_next; - if (! PyType_SUPPORTS_WEAKREFS(op->ob_type)) + if (! PyType_SUPPORTS_WEAKREFS(Py_Type(op))) continue; /* It supports weakrefs. Does it have any? */ @@ -654,7 +654,7 @@ } else if (debug & DEBUG_OBJECTS) { PySys_WriteStderr("gc: %.100s <%.100s %p>\n", - msg, op->ob_type->tp_name, op); + msg, Py_Type(op)->tp_name, op); } } @@ -708,7 +708,7 @@ PyList_Append(garbage, op); } else { - if ((clear = op->ob_type->tp_clear) != NULL) { + if ((clear = Py_Type(op)->tp_clear) != NULL) { Py_INCREF(op); clear(op); Py_DECREF(op); @@ -1079,7 +1079,7 @@ traverseproc traverse; for (gc = list->gc.gc_next; gc != list; gc = gc->gc.gc_next) { obj = FROM_GC(gc); - traverse = obj->ob_type->tp_traverse; + traverse = Py_Type(obj)->tp_traverse; if (obj == objs || obj == resultlist) continue; if (traverse(obj, (visitproc)referrersvisit, objs)) { @@ -1136,7 +1136,7 @@ if (! PyObject_IS_GC(obj)) continue; - traverse = obj->ob_type->tp_traverse; + traverse = Py_Type(obj)->tp_traverse; if (! traverse) continue; if (traverse(obj, (visitproc)referentsvisit, result)) { @@ -1359,13 +1359,13 @@ PyVarObject * _PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems) { - const size_t basicsize = _PyObject_VAR_SIZE(op->ob_type, nitems); + const size_t basicsize = _PyObject_VAR_SIZE(Py_Type(op), nitems); PyGC_Head *g = AS_GC(op); g = (PyGC_Head *)PyObject_REALLOC(g, sizeof(PyGC_Head) + basicsize); if (g == NULL) return (PyVarObject *)PyErr_NoMemory(); op = (PyVarObject *) FROM_GC(g); - op->ob_size = nitems; + Py_Size(op) = nitems; return op; } Modified: python/trunk/Modules/gdbmmodule.c ============================================================================== --- python/trunk/Modules/gdbmmodule.c (original) +++ python/trunk/Modules/gdbmmodule.c Sat Jul 21 08:55:02 2007 @@ -36,7 +36,7 @@ static PyTypeObject Dbmtype; -#define is_dbmobject(v) ((v)->ob_type == &Dbmtype) +#define is_dbmobject(v) (Py_Type(v) == &Dbmtype) #define check_dbmobject_open(v) if ((v)->di_dbm == NULL) \ { PyErr_SetString(DbmError, "GDBM object has already been closed"); \ return NULL; } @@ -370,8 +370,7 @@ } static PyTypeObject Dbmtype = { - PyObject_HEAD_INIT(0) - 0, + PyVarObject_HEAD_INIT(0, 0) "gdbm.gdbm", sizeof(dbmobject), 0, Modified: python/trunk/Modules/itertoolsmodule.c ============================================================================== --- python/trunk/Modules/itertoolsmodule.c (original) +++ python/trunk/Modules/itertoolsmodule.c Sat Jul 21 08:55:02 2007 @@ -59,7 +59,7 @@ Py_XDECREF(gbo->tgtkey); Py_XDECREF(gbo->currkey); Py_XDECREF(gbo->currvalue); - gbo->ob_type->tp_free(gbo); + Py_Type(gbo)->tp_free(gbo); } static int @@ -139,8 +139,7 @@ (key, sub-iterator) grouped by each value of key(value).\n"); static PyTypeObject groupby_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.groupby", /* tp_name */ sizeof(groupbyobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -261,8 +260,7 @@ } static PyTypeObject _grouper_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools._grouper", /* tp_name */ sizeof(_grouperobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -415,8 +413,7 @@ PyDoc_STRVAR(teedataobject_doc, "Data container common to multiple tee objects."); static PyTypeObject teedataobject_type = { - PyObject_HEAD_INIT(0) /* Must fill in type value later */ - 0, /* ob_size */ + PyVarObject_HEAD_INIT(0, 0) /* Must fill in type value later */ "itertools.tee_dataobject", /* tp_name */ sizeof(teedataobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -572,8 +569,7 @@ }; static PyTypeObject tee_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.tee", /* tp_name */ sizeof(teeobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -718,7 +714,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->saved); Py_XDECREF(lz->it); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -768,8 +764,7 @@ Then repeat the sequence indefinitely."); static PyTypeObject cycle_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.cycle", /* tp_name */ sizeof(cycleobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -862,7 +857,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->func); Py_XDECREF(lz->it); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -882,7 +877,7 @@ PyObject *(*iternext)(PyObject *); assert(PyIter_Check(it)); - iternext = *it->ob_type->tp_iternext; + iternext = *Py_Type(it)->tp_iternext; for (;;) { item = iternext(it); if (item == NULL) @@ -912,8 +907,7 @@ Afterwards, return every element until the iterable is exhausted."); static PyTypeObject dropwhile_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.dropwhile", /* tp_name */ sizeof(dropwhileobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1006,7 +1000,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->func); Py_XDECREF(lz->it); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -1028,7 +1022,7 @@ return NULL; assert(PyIter_Check(it)); - item = (*it->ob_type->tp_iternext)(it); + item = (*Py_Type(it)->tp_iternext)(it); if (item == NULL) return NULL; @@ -1053,8 +1047,7 @@ predicate evaluates to true for each entry."); static PyTypeObject takewhile_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.takewhile", /* tp_name */ sizeof(takewhileobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1197,7 +1190,7 @@ { PyObject_GC_UnTrack(lz); Py_XDECREF(lz->it); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -1216,7 +1209,7 @@ PyObject *(*iternext)(PyObject *); assert(PyIter_Check(it)); - iternext = *it->ob_type->tp_iternext; + iternext = *Py_Type(it)->tp_iternext; while (lz->cnt < lz->next) { item = iternext(it); if (item == NULL) @@ -1249,8 +1242,7 @@ but returns an iterator."); static PyTypeObject islice_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.islice", /* tp_name */ sizeof(isliceobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1341,7 +1333,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->func); Py_XDECREF(lz->it); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -1360,7 +1352,7 @@ PyObject *it = lz->it; assert(PyIter_Check(it)); - args = (*it->ob_type->tp_iternext)(it); + args = (*Py_Type(it)->tp_iternext)(it); if (args == NULL) return NULL; if (!PyTuple_CheckExact(args)) { @@ -1381,8 +1373,7 @@ with a argument tuple taken from the given sequence."); static PyTypeObject starmap_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.starmap", /* tp_name */ sizeof(starmapobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1487,7 +1478,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->iters); Py_XDECREF(lz->func); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -1561,8 +1552,7 @@ iterables."); static PyTypeObject imap_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.imap", /* tp_name */ sizeof(imapobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1666,7 +1656,7 @@ { PyObject_GC_UnTrack(lz); Py_XDECREF(lz->ittuple); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -1706,8 +1696,7 @@ iterable, until all of the iterables are exhausted."); static PyTypeObject chain_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.chain", /* tp_name */ sizeof(chainobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1798,7 +1787,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->func); Py_XDECREF(lz->it); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -1818,7 +1807,7 @@ PyObject *(*iternext)(PyObject *); assert(PyIter_Check(it)); - iternext = *it->ob_type->tp_iternext; + iternext = *Py_Type(it)->tp_iternext; for (;;) { item = iternext(it); if (item == NULL) @@ -1850,8 +1839,7 @@ If function is None, return the items that are true."); static PyTypeObject ifilter_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.ifilter", /* tp_name */ sizeof(ifilterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1943,7 +1931,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->func); Py_XDECREF(lz->it); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -1963,7 +1951,7 @@ PyObject *(*iternext)(PyObject *); assert(PyIter_Check(it)); - iternext = *it->ob_type->tp_iternext; + iternext = *Py_Type(it)->tp_iternext; for (;;) { item = iternext(it); if (item == NULL) @@ -1995,8 +1983,7 @@ If function is None, return the items that are false."); static PyTypeObject ifilterfalse_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.ifilterfalse", /* tp_name */ sizeof(ifilterfalseobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2094,8 +2081,7 @@ integers starting from zero or, if specified, from firstval."); static PyTypeObject count_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.count", /* tp_name */ sizeof(countobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2214,7 +2200,7 @@ PyObject_GC_UnTrack(lz); Py_XDECREF(lz->ittuple); Py_XDECREF(lz->result); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -2237,12 +2223,12 @@ if (tuplesize == 0) return NULL; - if (result->ob_refcnt == 1) { + if (Py_Refcnt(result) == 1) { Py_INCREF(result); for (i=0 ; i < tuplesize ; i++) { it = PyTuple_GET_ITEM(lz->ittuple, i); assert(PyIter_Check(it)); - item = (*it->ob_type->tp_iternext)(it); + item = (*Py_Type(it)->tp_iternext)(it); if (item == NULL) { Py_DECREF(result); return NULL; @@ -2258,7 +2244,7 @@ for (i=0 ; i < tuplesize ; i++) { it = PyTuple_GET_ITEM(lz->ittuple, i); assert(PyIter_Check(it)); - item = (*it->ob_type->tp_iternext)(it); + item = (*Py_Type(it)->tp_iternext)(it); if (item == NULL) { Py_DECREF(result); return NULL; @@ -2280,8 +2266,7 @@ a list."); static PyTypeObject izip_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.izip", /* tp_name */ sizeof(izipobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2365,7 +2350,7 @@ { PyObject_GC_UnTrack(ro); Py_XDECREF(ro->element); - ro->ob_type->tp_free(ro); + Py_Type(ro)->tp_free(ro); } static int @@ -2428,8 +2413,7 @@ endlessly."); static PyTypeObject repeat_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.repeat", /* tp_name */ sizeof(repeatobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2561,7 +2545,7 @@ Py_XDECREF(lz->ittuple); Py_XDECREF(lz->result); Py_XDECREF(lz->fillvalue); - lz->ob_type->tp_free(lz); + Py_Type(lz)->tp_free(lz); } static int @@ -2587,7 +2571,7 @@ return NULL; if (lz->numactive == 0) return NULL; - if (result->ob_refcnt == 1) { + if (Py_Refcnt(result) == 1) { Py_INCREF(result); for (i=0 ; i < tuplesize ; i++) { it = PyTuple_GET_ITEM(lz->ittuple, i); @@ -2596,7 +2580,7 @@ item = lz->fillvalue; } else { assert(PyIter_Check(it)); - item = (*it->ob_type->tp_iternext)(it); + item = (*Py_Type(it)->tp_iternext)(it); if (item == NULL) { lz->numactive -= 1; if (lz->numactive == 0) { @@ -2625,7 +2609,7 @@ item = lz->fillvalue; } else { assert(PyIter_Check(it)); - item = (*it->ob_type->tp_iternext)(it); + item = (*Py_Type(it)->tp_iternext)(it); if (item == NULL) { lz->numactive -= 1; if (lz->numactive == 0) { @@ -2657,8 +2641,7 @@ "); static PyTypeObject iziplongest_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "itertools.izip_longest", /* tp_name */ sizeof(iziplongestobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2757,7 +2740,7 @@ NULL }; - teedataobject_type.ob_type = &PyType_Type; + Py_Type(&teedataobject_type) = &PyType_Type; m = Py_InitModule3("itertools", module_methods, module_doc); if (m == NULL) return; Modified: python/trunk/Modules/linuxaudiodev.c ============================================================================== --- python/trunk/Modules/linuxaudiodev.c (original) +++ python/trunk/Modules/linuxaudiodev.c Sat Jul 21 08:55:02 2007 @@ -441,8 +441,7 @@ } static PyTypeObject Ladtype = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "linuxaudiodev.linux_audio_device", /*tp_name*/ sizeof(lad_t), /*tp_size*/ 0, /*tp_itemsize*/ Modified: python/trunk/Modules/md5module.c ============================================================================== --- python/trunk/Modules/md5module.c (original) +++ python/trunk/Modules/md5module.c Sat Jul 21 08:55:02 2007 @@ -221,8 +221,7 @@ copy() -- return a copy of the current md5 object"); static PyTypeObject MD5type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_md5.md5", /*tp_name*/ sizeof(md5object), /*tp_size*/ 0, /*tp_itemsize*/ @@ -299,7 +298,7 @@ { PyObject *m, *d; - MD5type.ob_type = &PyType_Type; + Py_Type(&MD5type) = &PyType_Type; if (PyType_Ready(&MD5type) < 0) return; m = Py_InitModule3("_md5", md5_functions, module_doc); Modified: python/trunk/Modules/mmapmodule.c ============================================================================== --- python/trunk/Modules/mmapmodule.c (original) +++ python/trunk/Modules/mmapmodule.c Sat Jul 21 08:55:02 2007 @@ -782,8 +782,7 @@ }; static PyTypeObject mmap_object_type = { - PyObject_HEAD_INIT(0) /* patched in module init */ - 0, /* ob_size */ + PyVarObject_HEAD_INIT(0, 0) /* patched in module init */ "mmap.mmap", /* tp_name */ sizeof(mmap_object), /* tp_size */ 0, /* tp_itemsize */ @@ -1141,7 +1140,7 @@ PyObject *dict, *module; /* Patch the object type */ - mmap_object_type.ob_type = &PyType_Type; + Py_Type(&mmap_object_type) = &PyType_Type; module = Py_InitModule("mmap", mmap_functions); if (module == NULL) Modified: python/trunk/Modules/operator.c ============================================================================== --- python/trunk/Modules/operator.c (original) +++ python/trunk/Modules/operator.c Sat Jul 21 08:55:02 2007 @@ -397,8 +397,7 @@ After, g=itemgetter(2,5,3), the call g(r) returns (r[2], r[5], r[3])"); static PyTypeObject itemgetter_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "operator.itemgetter", /* tp_name */ sizeof(itemgetterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -535,8 +534,7 @@ After, g=attrgetter('name', 'date'), the call g(r) returns (r.name, r.date)."); static PyTypeObject attrgetter_type = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(NULL, 0) "operator.attrgetter", /* tp_name */ sizeof(attrgetterobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Modules/ossaudiodev.c ============================================================================== --- python/trunk/Modules/ossaudiodev.c (original) +++ python/trunk/Modules/ossaudiodev.c Sat Jul 21 08:55:02 2007 @@ -841,8 +841,7 @@ } static PyTypeObject OSSAudioType = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "ossaudiodev.oss_audio_device", /*tp_name*/ sizeof(oss_audio_t), /*tp_size*/ 0, /*tp_itemsize*/ @@ -856,8 +855,7 @@ }; static PyTypeObject OSSMixerType = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "ossaudiodev.oss_mixer_device", /*tp_name*/ sizeof(oss_mixer_t), /*tp_size*/ 0, /*tp_itemsize*/ Modified: python/trunk/Modules/parsermodule.c ============================================================================== --- python/trunk/Modules/parsermodule.c (original) +++ python/trunk/Modules/parsermodule.c Sat Jul 21 08:55:02 2007 @@ -166,8 +166,7 @@ static PyTypeObject PyST_Type = { - PyObject_HEAD_INIT(NULL) - 0, + PyVarObject_HEAD_INIT(NULL, 0) "parser.st", /* tp_name */ (int) sizeof(PyST_Object), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -694,7 +693,7 @@ PyErr_Format(parser_error, "second item in terminal node must be a string," " found %s", - temp->ob_type->tp_name); + Py_Type(temp)->tp_name); Py_DECREF(temp); return 0; } @@ -707,7 +706,7 @@ PyErr_Format(parser_error, "third item in terminal node must be an" " integer, found %s", - temp->ob_type->tp_name); + Py_Type(temp)->tp_name); Py_DECREF(o); Py_DECREF(temp); return 0; @@ -3233,7 +3232,7 @@ { PyObject *module, *copyreg; - PyST_Type.ob_type = &PyType_Type; + Py_Type(&PyST_Type) = &PyType_Type; module = Py_InitModule("parser", parser_functions); if (module == NULL) return; Modified: python/trunk/Modules/posixmodule.c ============================================================================== --- python/trunk/Modules/posixmodule.c (original) +++ python/trunk/Modules/posixmodule.c Sat Jul 21 08:55:02 2007 @@ -2593,7 +2593,7 @@ long intval; if (PyFloat_Check(t)) { double tval = PyFloat_AsDouble(t); - PyObject *intobj = t->ob_type->tp_as_number->nb_int(t); + PyObject *intobj = Py_Type(t)->tp_as_number->nb_int(t); if (!intobj) return -1; intval = PyInt_AsLong(intobj); Modified: python/trunk/Modules/pyexpat.c ============================================================================== --- python/trunk/Modules/pyexpat.c (original) +++ python/trunk/Modules/pyexpat.c Sat Jul 21 08:55:02 2007 @@ -974,7 +974,7 @@ if (!PyString_Check(str)) { PyErr_Format(PyExc_TypeError, "read() did not return a string object (type=%.400s)", - str->ob_type->tp_name); + Py_Type(str)->tp_name); goto finally; } len = PyString_GET_SIZE(str); @@ -1687,8 +1687,7 @@ PyDoc_STRVAR(Xmlparsetype__doc__, "XML parser"); static PyTypeObject Xmlparsetype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "pyexpat.xmlparser", /*tp_name*/ sizeof(xmlparseobject) + PyGC_HEAD_SIZE,/*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -1860,7 +1859,7 @@ if (modelmod_name == NULL) return; - Xmlparsetype.ob_type = &PyType_Type; + Py_Type(&Xmlparsetype) = &PyType_Type; /* Create the module and add the functions */ m = Py_InitModule3(MODULE_NAME, pyexpat_methods, Modified: python/trunk/Modules/selectmodule.c ============================================================================== --- python/trunk/Modules/selectmodule.c (original) +++ python/trunk/Modules/selectmodule.c Sat Jul 21 08:55:02 2007 @@ -587,8 +587,7 @@ static PyTypeObject poll_Type = { /* The ob_type field must be initialized in the module init function * to be portable to Windows without using C++. */ - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "select.poll", /*tp_name*/ sizeof(pollObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -706,7 +705,7 @@ #else { #endif - poll_Type.ob_type = &PyType_Type; + Py_Type(&poll_Type) = &PyType_Type; PyModule_AddIntConstant(m, "POLLIN", POLLIN); PyModule_AddIntConstant(m, "POLLPRI", POLLPRI); PyModule_AddIntConstant(m, "POLLOUT", POLLOUT); Modified: python/trunk/Modules/sha256module.c ============================================================================== --- python/trunk/Modules/sha256module.c (original) +++ python/trunk/Modules/sha256module.c Sat Jul 21 08:55:02 2007 @@ -409,7 +409,7 @@ { SHAobject *newobj; - if (((PyObject*)self)->ob_type == &SHA256type) { + if (Py_Type(self) == &SHA256type) { if ( (newobj = newSHA256object())==NULL) return NULL; } else { @@ -536,8 +536,7 @@ }; static PyTypeObject SHA224type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_sha256.sha224", /*tp_name*/ sizeof(SHAobject), /*tp_size*/ 0, /*tp_itemsize*/ @@ -571,8 +570,7 @@ }; static PyTypeObject SHA256type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_sha256.sha256", /*tp_name*/ sizeof(SHAobject), /*tp_size*/ 0, /*tp_itemsize*/ @@ -689,10 +687,10 @@ { PyObject *m; - SHA224type.ob_type = &PyType_Type; + Py_Type(&SHA224type) = &PyType_Type; if (PyType_Ready(&SHA224type) < 0) return; - SHA256type.ob_type = &PyType_Type; + Py_Type(&SHA256type) = &PyType_Type; if (PyType_Ready(&SHA256type) < 0) return; m = Py_InitModule("_sha256", SHA_functions); Modified: python/trunk/Modules/sha512module.c ============================================================================== --- python/trunk/Modules/sha512module.c (original) +++ python/trunk/Modules/sha512module.c Sat Jul 21 08:55:02 2007 @@ -602,8 +602,7 @@ }; static PyTypeObject SHA384type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_sha512.sha384", /*tp_name*/ sizeof(SHAobject), /*tp_size*/ 0, /*tp_itemsize*/ @@ -637,8 +636,7 @@ }; static PyTypeObject SHA512type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_sha512.sha512", /*tp_name*/ sizeof(SHAobject), /*tp_size*/ 0, /*tp_itemsize*/ @@ -755,10 +753,10 @@ { PyObject *m; - SHA384type.ob_type = &PyType_Type; + Py_Type(&SHA384type) = &PyType_Type; if (PyType_Ready(&SHA384type) < 0) return; - SHA512type.ob_type = &PyType_Type; + Py_Type(&SHA512type) = &PyType_Type; if (PyType_Ready(&SHA512type) < 0) return; m = Py_InitModule("_sha512", SHA_functions); Modified: python/trunk/Modules/shamodule.c ============================================================================== --- python/trunk/Modules/shamodule.c (original) +++ python/trunk/Modules/shamodule.c Sat Jul 21 08:55:02 2007 @@ -489,8 +489,7 @@ }; static PyTypeObject SHAtype = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_sha.sha", /*tp_name*/ sizeof(SHAobject), /*tp_size*/ 0, /*tp_itemsize*/ @@ -577,7 +576,7 @@ { PyObject *m; - SHAtype.ob_type = &PyType_Type; + Py_Type(&SHAtype) = &PyType_Type; if (PyType_Ready(&SHAtype) < 0) return; m = Py_InitModule("_sha", SHA_functions); Modified: python/trunk/Modules/socketmodule.c ============================================================================== --- python/trunk/Modules/socketmodule.c (original) +++ python/trunk/Modules/socketmodule.c Sat Jul 21 08:55:02 2007 @@ -1245,7 +1245,7 @@ PyExc_TypeError, "getsockaddrarg: " "AF_NETLINK address must be tuple, not %.500s", - args->ob_type->tp_name); + Py_Type(args)->tp_name); return 0; } if (!PyArg_ParseTuple(args, "II:getsockaddrarg", &pid, &groups)) @@ -1268,7 +1268,7 @@ PyExc_TypeError, "getsockaddrarg: " "AF_INET address must be tuple, not %.500s", - args->ob_type->tp_name); + Py_Type(args)->tp_name); return 0; } if (!PyArg_ParseTuple(args, "eti:getsockaddrarg", @@ -1298,7 +1298,7 @@ PyExc_TypeError, "getsockaddrarg: " "AF_INET6 address must be tuple, not %.500s", - args->ob_type->tp_name); + Py_Type(args)->tp_name); return 0; } if (!PyArg_ParseTuple(args, "eti|ii", @@ -1420,7 +1420,7 @@ PyExc_TypeError, "getsockaddrarg: " "AF_PACKET address must be tuple, not %.500s", - args->ob_type->tp_name); + Py_Type(args)->tp_name); return 0; } if (!PyArg_ParseTuple(args, "si|iis#", &interfaceName, @@ -2843,7 +2843,7 @@ { if (s->sock_fd != -1) (void) SOCKETCLOSE(s->sock_fd); - s->ob_type->tp_free((PyObject *)s); + Py_Type(s)->tp_free((PyObject *)s); } @@ -2928,8 +2928,7 @@ /* Type object for socket objects. */ static PyTypeObject sock_type = { - PyObject_HEAD_INIT(0) /* Must fill in type value later */ - 0, /* ob_size */ + PyVarObject_HEAD_INIT(0, 0) /* Must fill in type value later */ "_socket.socket", /* tp_name */ sizeof(PySocketSockObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -3555,7 +3554,7 @@ else return PyErr_Format(PyExc_TypeError, "expected int/long, %s found", - arg->ob_type->tp_name); + Py_Type(arg)->tp_name); if (x == (unsigned long) -1 && PyErr_Occurred()) return NULL; return PyLong_FromUnsignedLong(ntohl(x)); @@ -3624,7 +3623,7 @@ else return PyErr_Format(PyExc_TypeError, "expected int/long, %s found", - arg->ob_type->tp_name); + Py_Type(arg)->tp_name); return PyLong_FromUnsignedLong(htonl((unsigned long)x)); } @@ -4274,7 +4273,7 @@ if (!os_init()) return; - sock_type.ob_type = &PyType_Type; + Py_Type(&sock_type) = &PyType_Type; m = Py_InitModule3(PySocket_MODULE_NAME, socket_methods, socket_doc); Modified: python/trunk/Modules/sunaudiodev.c ============================================================================== --- python/trunk/Modules/sunaudiodev.c (original) +++ python/trunk/Modules/sunaudiodev.c Sat Jul 21 08:55:02 2007 @@ -42,8 +42,8 @@ static PyObject *SunAudioError; -#define is_sadobject(v) ((v)->ob_type == &Sadtype) -#define is_sadstatusobject(v) ((v)->ob_type == &Sadstatustype) +#define is_sadobject(v) (Py_Type(v) == &Sadtype) +#define is_sadstatusobject(v) (Py_Type(v) == &Sadstatustype) static sadobject * @@ -409,8 +409,7 @@ static PyTypeObject Sadtype = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "sunaudiodev.sun_audio_device", /*tp_name*/ sizeof(sadobject), /*tp_size*/ 0, /*tp_itemsize*/ @@ -424,8 +423,7 @@ }; static PyTypeObject Sadstatustype = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "sunaudiodev.sun_audio_device_status", /*tp_name*/ sizeof(sadstatusobject), /*tp_size*/ 0, /*tp_itemsize*/ Modified: python/trunk/Modules/threadmodule.c ============================================================================== --- python/trunk/Modules/threadmodule.c (original) +++ python/trunk/Modules/threadmodule.c Sat Jul 21 08:55:02 2007 @@ -126,8 +126,7 @@ } static PyTypeObject Locktype = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "thread.lock", /*tp_name*/ sizeof(lockobject), /*tp_size*/ 0, /*tp_itemsize*/ @@ -251,7 +250,7 @@ } local_clear(self); - self->ob_type->tp_free((PyObject*)self); + Py_Type(self)->tp_free((PyObject*)self); } static PyObject * @@ -283,8 +282,8 @@ Py_INCREF(ldict); self->dict = ldict; /* still borrowed */ - if (self->ob_type->tp_init != PyBaseObject_Type.tp_init && - self->ob_type->tp_init((PyObject*)self, + if (Py_Type(self)->tp_init != PyBaseObject_Type.tp_init && + Py_Type(self)->tp_init((PyObject*)self, self->args, self->kw) < 0) { /* we need to get rid of ldict from thread so we create a new one the next time we do an attr @@ -336,8 +335,7 @@ static PyObject *local_getattro(localobject *, PyObject *); static PyTypeObject localtype = { - PyObject_HEAD_INIT(NULL) - /* ob_size */ 0, + PyVarObject_HEAD_INIT(NULL, 0) /* tp_name */ "thread._local", /* tp_basicsize */ sizeof(localobject), /* tp_itemsize */ 0, @@ -388,7 +386,7 @@ if (ldict == NULL) return NULL; - if (self->ob_type != &localtype) + if (Py_Type(self) != &localtype) /* use generic lookup for subtypes */ return PyObject_GenericGetAttr((PyObject *)self, name); Modified: python/trunk/Modules/unicodedata.c ============================================================================== --- python/trunk/Modules/unicodedata.c (original) +++ python/trunk/Modules/unicodedata.c Sat Jul 21 08:55:02 2007 @@ -1134,8 +1134,7 @@ static PyTypeObject UCD_Type = { /* The ob_type field must be initialized in the module init function * to be portable to Windows without using C++. */ - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "unicodedata.UCD", /*tp_name*/ sizeof(PreviousDBVersion), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -1193,7 +1192,7 @@ { PyObject *m, *v; - UCD_Type.ob_type = &PyType_Type; + Py_Type(&UCD_Type) = &PyType_Type; m = Py_InitModule3( "unicodedata", unicodedata_functions, unicodedata_docstring); Modified: python/trunk/Modules/xxmodule.c ============================================================================== --- python/trunk/Modules/xxmodule.c (original) +++ python/trunk/Modules/xxmodule.c Sat Jul 21 08:55:02 2007 @@ -25,7 +25,7 @@ static PyTypeObject Xxo_Type; -#define XxoObject_Check(v) ((v)->ob_type == &Xxo_Type) +#define XxoObject_Check(v) (Py_Type(v) == &Xxo_Type) static XxoObject * newXxoObject(PyObject *arg) @@ -97,8 +97,7 @@ static PyTypeObject Xxo_Type = { /* The ob_type field must be initialized in the module init function * to be portable to Windows without using C++. */ - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "xxmodule.Xxo", /*tp_name*/ sizeof(XxoObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -216,8 +215,7 @@ static PyTypeObject Str_Type = { /* The ob_type field must be initialized in the module init function * to be portable to Windows without using C++. */ - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "xxmodule.Str", /*tp_name*/ 0, /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -272,8 +270,7 @@ static PyTypeObject Null_Type = { /* The ob_type field must be initialized in the module init function * to be portable to Windows without using C++. */ - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "xxmodule.Null", /*tp_name*/ 0, /*tp_basicsize*/ 0, /*tp_itemsize*/ Modified: python/trunk/Modules/xxsubtype.c ============================================================================== --- python/trunk/Modules/xxsubtype.c (original) +++ python/trunk/Modules/xxsubtype.c Sat Jul 21 08:55:02 2007 @@ -101,8 +101,7 @@ }; static PyTypeObject spamlist_type = { - PyObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type)) - 0, + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "xxsubtype.spamlist", sizeof(spamlistobject), 0, @@ -193,8 +192,7 @@ }; static PyTypeObject spamdict_type = { - PyObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type)) - 0, + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "xxsubtype.spamdict", sizeof(spamdictobject), 0, Modified: python/trunk/Modules/zipimport.c ============================================================================== --- python/trunk/Modules/zipimport.c (original) +++ python/trunk/Modules/zipimport.c Sat Jul 21 08:55:02 2007 @@ -181,7 +181,7 @@ Py_XDECREF(self->archive); Py_XDECREF(self->prefix); Py_XDECREF(self->files); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static PyObject * @@ -561,8 +561,7 @@ #define DEFERRED_ADDRESS(ADDR) 0 static PyTypeObject ZipImporter_Type = { - PyObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type)) - 0, + PyVarObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type), 0) "zipimport.zipimporter", sizeof(ZipImporter), 0, /* tp_itemsize */ Modified: python/trunk/Modules/zlibmodule.c ============================================================================== --- python/trunk/Modules/zlibmodule.c (original) +++ python/trunk/Modules/zlibmodule.c Sat Jul 21 08:55:02 2007 @@ -935,8 +935,7 @@ }; static PyTypeObject Comptype = { - PyObject_HEAD_INIT(0) - 0, + PyVarObject_HEAD_INIT(0, 0) "zlib.Compress", sizeof(compobject), 0, @@ -952,8 +951,7 @@ }; static PyTypeObject Decomptype = { - PyObject_HEAD_INIT(0) - 0, + PyVarObject_HEAD_INIT(0, 0) "zlib.Decompress", sizeof(compobject), 0, @@ -987,8 +985,8 @@ PyInit_zlib(void) { PyObject *m, *ver; - Comptype.ob_type = &PyType_Type; - Decomptype.ob_type = &PyType_Type; + Py_Type(&Comptype) = &PyType_Type; + Py_Type(&Decomptype) = &PyType_Type; m = Py_InitModule4("zlib", zlib_methods, zlib_module_documentation, (PyObject*)NULL,PYTHON_API_VERSION); Modified: python/trunk/Objects/boolobject.c ============================================================================== --- python/trunk/Objects/boolobject.c (original) +++ python/trunk/Objects/boolobject.c Sat Jul 21 08:55:02 2007 @@ -146,8 +146,7 @@ /* The type object for bool. Note that this cannot be subclassed! */ PyTypeObject PyBool_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "bool", sizeof(PyIntObject), 0, Modified: python/trunk/Objects/bufferobject.c ============================================================================== --- python/trunk/Objects/bufferobject.c (original) +++ python/trunk/Objects/bufferobject.c Sat Jul 21 08:55:02 2007 @@ -664,8 +664,7 @@ }; PyTypeObject PyBuffer_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "buffer", sizeof(PyBufferObject), 0, Modified: python/trunk/Objects/cellobject.c ============================================================================== --- python/trunk/Objects/cellobject.c (original) +++ python/trunk/Objects/cellobject.c Sat Jul 21 08:55:02 2007 @@ -99,8 +99,7 @@ }; PyTypeObject PyCell_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "cell", sizeof(PyCellObject), 0, Modified: python/trunk/Objects/cobject.c ============================================================================== --- python/trunk/Objects/cobject.c (original) +++ python/trunk/Objects/cobject.c Sat Jul 21 08:55:02 2007 @@ -135,8 +135,7 @@ mechanism to link to one another."); PyTypeObject PyCObject_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "PyCObject", /*tp_name*/ sizeof(PyCObject), /*tp_basicsize*/ 0, /*tp_itemsize*/ Modified: python/trunk/Objects/codeobject.c ============================================================================== --- python/trunk/Objects/codeobject.c (original) +++ python/trunk/Objects/codeobject.c Sat Jul 21 08:55:02 2007 @@ -354,8 +354,7 @@ /* XXX code objects need to participate in GC? */ PyTypeObject PyCode_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "code", sizeof(PyCodeObject), 0, Modified: python/trunk/Objects/complexobject.c ============================================================================== --- python/trunk/Objects/complexobject.c (original) +++ python/trunk/Objects/complexobject.c Sat Jul 21 08:55:02 2007 @@ -1074,8 +1074,7 @@ }; PyTypeObject PyComplex_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "complex", sizeof(PyComplexObject), 0, Modified: python/trunk/Objects/descrobject.c ============================================================================== --- python/trunk/Objects/descrobject.c (original) +++ python/trunk/Objects/descrobject.c Sat Jul 21 08:55:02 2007 @@ -382,8 +382,7 @@ } static PyTypeObject PyMethodDescr_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "method_descriptor", sizeof(PyMethodDescrObject), 0, @@ -421,8 +420,7 @@ /* This is for METH_CLASS in C, not for "f = classmethod(f)" in Python! */ static PyTypeObject PyClassMethodDescr_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "classmethod_descriptor", sizeof(PyMethodDescrObject), 0, @@ -459,8 +457,7 @@ }; static PyTypeObject PyMemberDescr_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "member_descriptor", sizeof(PyMemberDescrObject), 0, @@ -497,8 +494,7 @@ }; static PyTypeObject PyGetSetDescr_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "getset_descriptor", sizeof(PyGetSetDescrObject), 0, @@ -535,8 +531,7 @@ }; PyTypeObject PyWrapperDescr_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "wrapper_descriptor", sizeof(PyWrapperDescrObject), 0, @@ -825,8 +820,7 @@ } static PyTypeObject proxytype = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "dictproxy", /* tp_name */ sizeof(proxyobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1007,8 +1001,7 @@ } static PyTypeObject wrappertype = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "method-wrapper", /* tp_name */ sizeof(wrapperobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1238,8 +1231,7 @@ } PyTypeObject PyProperty_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "property", /* tp_name */ sizeof(propertyobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/dictobject.c ============================================================================== --- python/trunk/Objects/dictobject.c (original) +++ python/trunk/Objects/dictobject.c Sat Jul 21 08:55:02 2007 @@ -204,7 +204,7 @@ if (num_free_dicts) { mp = free_dicts[--num_free_dicts]; assert (mp != NULL); - assert (mp->ob_type == &PyDict_Type); + assert (Py_Type(mp) == &PyDict_Type); _Py_NewReference((PyObject *)mp); if (mp->ma_fill) { EMPTY_TO_MINSIZE(mp); @@ -849,10 +849,10 @@ } if (mp->ma_table != mp->ma_smalltable) PyMem_DEL(mp->ma_table); - if (num_free_dicts < MAXFREEDICTS && mp->ob_type == &PyDict_Type) + if (num_free_dicts < MAXFREEDICTS && Py_Type(mp) == &PyDict_Type) free_dicts[num_free_dicts++] = mp; else - mp->ob_type->tp_free((PyObject *)mp); + Py_Type(mp)->tp_free((PyObject *)mp); Py_TRASHCAN_SAFE_END(mp) } @@ -1011,7 +1011,7 @@ if (missing_str == NULL) missing_str = PyString_InternFromString("__missing__"); - missing = _PyType_Lookup(mp->ob_type, missing_str); + missing = _PyType_Lookup(Py_Type(mp), missing_str); if (missing != NULL) return PyObject_CallFunctionObjArgs(missing, (PyObject *)mp, key, NULL); @@ -2119,8 +2119,7 @@ " in the keyword argument list. For example: dict(one=1, two=2)"); PyTypeObject PyDict_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "dict", sizeof(dictobject), 0, @@ -2302,8 +2301,7 @@ } PyTypeObject PyDictIterKey_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "dictionary-keyiterator", /* tp_name */ sizeof(dictiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2375,8 +2373,7 @@ } PyTypeObject PyDictIterValue_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "dictionary-valueiterator", /* tp_name */ sizeof(dictiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2462,8 +2459,7 @@ } PyTypeObject PyDictIterItem_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "dictionary-itemiterator", /* tp_name */ sizeof(dictiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/enumobject.c ============================================================================== --- python/trunk/Objects/enumobject.c (original) +++ python/trunk/Objects/enumobject.c Sat Jul 21 08:55:02 2007 @@ -43,7 +43,7 @@ PyObject_GC_UnTrack(en); Py_XDECREF(en->en_sit); Py_XDECREF(en->en_result); - en->ob_type->tp_free(en); + Py_Type(en)->tp_free(en); } static int @@ -68,7 +68,7 @@ return NULL; } - next_item = (*it->ob_type->tp_iternext)(it); + next_item = (*Py_Type(it)->tp_iternext)(it); if (next_item == NULL) return NULL; @@ -105,8 +105,7 @@ "for obtaining an indexed list: (0, seq[0]), (1, seq[1]), (2, seq[2]), ..."); PyTypeObject PyEnum_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "enumerate", /* tp_name */ sizeof(enumobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -195,7 +194,7 @@ { PyObject_GC_UnTrack(ro); Py_XDECREF(ro->seq); - ro->ob_type->tp_free(ro); + Py_Type(ro)->tp_free(ro); } static int @@ -253,8 +252,7 @@ }; PyTypeObject PyReversed_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "reversed", /* tp_name */ sizeof(reversedobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/exceptions.c ============================================================================== --- python/trunk/Objects/exceptions.c (original) +++ python/trunk/Objects/exceptions.c Sat Jul 21 08:55:02 2007 @@ -56,7 +56,7 @@ static int BaseException_init(PyBaseExceptionObject *self, PyObject *args, PyObject *kwds) { - if (!_PyArg_NoKeywords(self->ob_type->tp_name, kwds)) + if (!_PyArg_NoKeywords(Py_Type(self)->tp_name, kwds)) return -1; Py_DECREF(self->args); @@ -85,7 +85,7 @@ { _PyObject_GC_UNTRACK(self); BaseException_clear(self); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int @@ -129,7 +129,7 @@ if (!repr_suffix) return NULL; - name = (char *)self->ob_type->tp_name; + name = (char *)Py_Type(self)->tp_name; dot = strrchr(name, '.'); if (dot != NULL) name = dot+1; @@ -148,9 +148,9 @@ BaseException_reduce(PyBaseExceptionObject *self) { if (self->args && self->dict) - return PyTuple_Pack(3, self->ob_type, self->args, self->dict); + return PyTuple_Pack(3, Py_Type(self), self->args, self->dict); else - return PyTuple_Pack(2, self->ob_type, self->args); + return PyTuple_Pack(2, Py_Type(self), self->args); } /* @@ -478,7 +478,7 @@ { _PyObject_GC_UNTRACK(self); SystemExit_clear(self); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int @@ -583,7 +583,7 @@ { _PyObject_GC_UNTRACK(self); EnvironmentError_clear(self); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int @@ -725,9 +725,9 @@ Py_INCREF(args); if (self->dict) - res = PyTuple_Pack(3, self->ob_type, args, self->dict); + res = PyTuple_Pack(3, Py_Type(self), args, self->dict); else - res = PyTuple_Pack(2, self->ob_type, args); + res = PyTuple_Pack(2, Py_Type(self), args); Py_DECREF(args); return res; } @@ -780,7 +780,7 @@ { _PyObject_GC_UNTRACK(self); WindowsError_clear(self); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int @@ -1054,7 +1054,7 @@ { _PyObject_GC_UNTRACK(self); SyntaxError_clear(self); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int @@ -1532,7 +1532,7 @@ { _PyObject_GC_UNTRACK(self); UnicodeError_clear(self); - self->ob_type->tp_free((PyObject *)self); + Py_Type(self)->tp_free((PyObject *)self); } static int Modified: python/trunk/Objects/fileobject.c ============================================================================== --- python/trunk/Objects/fileobject.c (original) +++ python/trunk/Objects/fileobject.c Sat Jul 21 08:55:02 2007 @@ -406,7 +406,7 @@ Py_XDECREF(f->f_mode); Py_XDECREF(f->f_encoding); drop_readahead(f); - f->ob_type->tp_free((PyObject *)f); + Py_Type(f)->tp_free((PyObject *)f); } static PyObject * @@ -2077,8 +2077,7 @@ ); PyTypeObject PyFile_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "file", sizeof(PyFileObject), 0, Modified: python/trunk/Objects/floatobject.c ============================================================================== --- python/trunk/Objects/floatobject.c (original) +++ python/trunk/Objects/floatobject.c Sat Jul 21 08:55:02 2007 @@ -41,8 +41,8 @@ p = &((PyFloatBlock *)p)->objects[0]; q = p + N_FLOATOBJECTS; while (--q > p) - q->ob_type = (struct _typeobject *)(q-1); - q->ob_type = NULL; + Py_Type(q) = (struct _typeobject *)(q-1); + Py_Type(q) = NULL; return p + N_FLOATOBJECTS - 1; } @@ -56,7 +56,7 @@ } /* Inline PyObject_New */ op = free_list; - free_list = (PyFloatObject *)op->ob_type; + free_list = (PyFloatObject *)Py_Type(op); PyObject_INIT(op, &PyFloat_Type); op->ob_fval = fval; return (PyObject *) op; @@ -175,11 +175,11 @@ float_dealloc(PyFloatObject *op) { if (PyFloat_CheckExact(op)) { - op->ob_type = (struct _typeobject *)free_list; + Py_Type(op) = (struct _typeobject *)free_list; free_list = op; } else - op->ob_type->tp_free((PyObject *)op); + Py_Type(op)->tp_free((PyObject *)op); } double @@ -197,7 +197,7 @@ return -1; } - if ((nb = op->ob_type->tp_as_number) == NULL || nb->nb_float == NULL) { + if ((nb = Py_Type(op)->tp_as_number) == NULL || nb->nb_float == NULL) { PyErr_SetString(PyExc_TypeError, "a float is required"); return -1; } @@ -986,7 +986,7 @@ if (!PyString_Check(arg)) { PyErr_Format(PyExc_TypeError, "__getformat__() argument must be string, not %.500s", - arg->ob_type->tp_name); + Py_Type(arg)->tp_name); return NULL; } s = PyString_AS_STRING(arg); @@ -1152,8 +1152,7 @@ }; PyTypeObject PyFloat_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "float", sizeof(PyFloatObject), 0, @@ -1265,7 +1264,7 @@ for (i = 0, p = &list->objects[0]; i < N_FLOATOBJECTS; i++, p++) { - if (PyFloat_CheckExact(p) && p->ob_refcnt != 0) + if (PyFloat_CheckExact(p) && Py_Refcnt(p) != 0) frem++; } next = list->next; @@ -1276,8 +1275,8 @@ i < N_FLOATOBJECTS; i++, p++) { if (!PyFloat_CheckExact(p) || - p->ob_refcnt == 0) { - p->ob_type = (struct _typeobject *) + Py_Refcnt(p) == 0) { + Py_Type(p) = (struct _typeobject *) free_list; free_list = p; } @@ -1309,7 +1308,7 @@ i < N_FLOATOBJECTS; i++, p++) { if (PyFloat_CheckExact(p) && - p->ob_refcnt != 0) { + Py_Refcnt(p) != 0) { char buf[100]; PyFloat_AsString(buf, p); /* XXX(twouters) cast refcount to @@ -1318,7 +1317,7 @@ */ fprintf(stderr, "# \n", - p, (long)p->ob_refcnt, buf); + p, (long)Py_Refcnt(p), buf); } } list = list->next; Modified: python/trunk/Objects/frameobject.c ============================================================================== --- python/trunk/Objects/frameobject.c (original) +++ python/trunk/Objects/frameobject.c Sat Jul 21 08:55:02 2007 @@ -509,8 +509,7 @@ PyTypeObject PyFrame_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "frame", sizeof(PyFrameObject), sizeof(PyObject *), @@ -623,7 +622,7 @@ --numfree; f = free_list; free_list = free_list->f_back; - if (f->ob_size < extras) { + if (Py_Size(f) < extras) { f = PyObject_GC_Resize(PyFrameObject, f, extras); if (f == NULL) { Py_DECREF(builtins); Modified: python/trunk/Objects/funcobject.c ============================================================================== --- python/trunk/Objects/funcobject.c (original) +++ python/trunk/Objects/funcobject.c Sat Jul 21 08:55:02 2007 @@ -544,8 +544,7 @@ } PyTypeObject PyFunction_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "function", sizeof(PyFunctionObject), 0, @@ -615,7 +614,7 @@ { _PyObject_GC_UNTRACK((PyObject *)cm); Py_XDECREF(cm->cm_callable); - cm->ob_type->tp_free((PyObject *)cm); + Py_Type(cm)->tp_free((PyObject *)cm); } static int @@ -644,9 +643,9 @@ return NULL; } if (type == NULL) - type = (PyObject *)(obj->ob_type); + type = (PyObject *)(Py_Type(obj)); return PyMethod_New(cm->cm_callable, - type, (PyObject *)(type->ob_type)); + type, (PyObject *)(Py_Type(type))); } static int @@ -692,8 +691,7 @@ If you want those, see the staticmethod builtin."); PyTypeObject PyClassMethod_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "classmethod", sizeof(classmethod), 0, @@ -773,7 +771,7 @@ { _PyObject_GC_UNTRACK((PyObject *)sm); Py_XDECREF(sm->sm_callable); - sm->ob_type->tp_free((PyObject *)sm); + Py_Type(sm)->tp_free((PyObject *)sm); } static int @@ -840,8 +838,7 @@ For a more advanced concept, see the classmethod builtin."); PyTypeObject PyStaticMethod_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "staticmethod", sizeof(staticmethod), 0, Modified: python/trunk/Objects/genobject.c ============================================================================== --- python/trunk/Objects/genobject.c (original) +++ python/trunk/Objects/genobject.c Sat Jul 21 08:55:02 2007 @@ -28,7 +28,7 @@ if (gen->gi_frame != NULL && gen->gi_frame->f_stacktop != NULL) { /* Generator is paused, so we need to close */ - gen->ob_type->tp_del(self); + Py_Type(gen)->tp_del(self); if (self->ob_refcnt > 0) return; /* resurrected. :( */ } @@ -296,8 +296,7 @@ }; PyTypeObject PyGen_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "generator", /* tp_name */ sizeof(PyGenObject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/intobject.c ============================================================================== --- python/trunk/Objects/intobject.c (original) +++ python/trunk/Objects/intobject.c Sat Jul 21 08:55:02 2007 @@ -56,8 +56,8 @@ p = &((PyIntBlock *)p)->objects[0]; q = p + N_INTOBJECTS; while (--q > p) - q->ob_type = (struct _typeobject *)(q-1); - q->ob_type = NULL; + Py_Type(q) = (struct _typeobject *)(q-1); + Py_Type(q) = NULL; return p + N_INTOBJECTS - 1; } @@ -102,7 +102,7 @@ } /* Inline PyObject_New */ v = free_list; - free_list = (PyIntObject *)v->ob_type; + free_list = (PyIntObject *)Py_Type(v); PyObject_INIT(v, &PyInt_Type); v->ob_ival = ival; return (PyObject *) v; @@ -128,17 +128,17 @@ int_dealloc(PyIntObject *v) { if (PyInt_CheckExact(v)) { - v->ob_type = (struct _typeobject *)free_list; + Py_Type(v) = (struct _typeobject *)free_list; free_list = v; } else - v->ob_type->tp_free((PyObject *)v); + Py_Type(v)->tp_free((PyObject *)v); } static void int_free(PyIntObject *v) { - v->ob_type = (struct _typeobject *)free_list; + Py_Type(v) = (struct _typeobject *)free_list; free_list = v; } @@ -152,7 +152,7 @@ if (op && PyInt_Check(op)) return PyInt_AS_LONG((PyIntObject*) op); - if (op == NULL || (nb = op->ob_type->tp_as_number) == NULL || + if (op == NULL || (nb = Py_Type(op)->tp_as_number) == NULL || nb->nb_int == NULL) { PyErr_SetString(PyExc_TypeError, "an integer is required"); return -1; @@ -207,7 +207,7 @@ return PyInt_AsLong(op); #else - if ((nb = op->ob_type->tp_as_number) == NULL || + if ((nb = Py_Type(op)->tp_as_number) == NULL || (nb->nb_int == NULL && nb->nb_long == 0)) { PyErr_SetString(PyExc_TypeError, "an integer is required"); return -1; @@ -256,7 +256,7 @@ if (op && PyLong_Check(op)) return PyLong_AsUnsignedLongMask(op); - if (op == NULL || (nb = op->ob_type->tp_as_number) == NULL || + if (op == NULL || (nb = Py_Type(op)->tp_as_number) == NULL || nb->nb_int == NULL) { PyErr_SetString(PyExc_TypeError, "an integer is required"); return (unsigned long)-1; @@ -301,7 +301,7 @@ if (op && PyLong_Check(op)) return PyLong_AsUnsignedLongLongMask(op); - if (op == NULL || (nb = op->ob_type->tp_as_number) == NULL || + if (op == NULL || (nb = Py_Type(op)->tp_as_number) == NULL || nb->nb_int == NULL) { PyErr_SetString(PyExc_TypeError, "an integer is required"); return (unsigned PY_LONG_LONG)-1; @@ -1116,8 +1116,7 @@ }; PyTypeObject PyInt_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "int", sizeof(PyIntObject), 0, @@ -1170,7 +1169,7 @@ return 0; /* PyObject_New is inlined */ v = free_list; - free_list = (PyIntObject *)v->ob_type; + free_list = (PyIntObject *)Py_Type(v); PyObject_INIT(v, &PyInt_Type); v->ob_ival = ival; small_ints[ival + NSMALLNEGINTS] = v; @@ -1223,7 +1222,7 @@ ctr++, p++) { if (!PyInt_CheckExact(p) || p->ob_refcnt == 0) { - p->ob_type = (struct _typeobject *) + Py_Type(p) = (struct _typeobject *) free_list; free_list = p; } Modified: python/trunk/Objects/iterobject.c ============================================================================== --- python/trunk/Objects/iterobject.c (original) +++ python/trunk/Objects/iterobject.c Sat Jul 21 08:55:02 2007 @@ -94,8 +94,7 @@ }; PyTypeObject PySeqIter_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "iterator", /* tp_name */ sizeof(seqiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -199,8 +198,7 @@ } PyTypeObject PyCallIter_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "callable-iterator", /* tp_name */ sizeof(calliterobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/listobject.c ============================================================================== --- python/trunk/Objects/listobject.c (original) +++ python/trunk/Objects/listobject.c Sat Jul 21 08:55:02 2007 @@ -34,7 +34,7 @@ */ if (allocated >= newsize && newsize >= (allocated >> 1)) { assert(self->ob_item != NULL || newsize == 0); - self->ob_size = newsize; + Py_Size(self) = newsize; return 0; } @@ -58,7 +58,7 @@ return -1; } self->ob_item = items; - self->ob_size = newsize; + Py_Size(self) = newsize; self->allocated = new_allocated; return 0; } @@ -114,7 +114,7 @@ } memset(op->ob_item, 0, nbytes); } - op->ob_size = size; + Py_Size(op) = size; op->allocated = size; _PyObject_GC_TRACK(op); return (PyObject *) op; @@ -128,7 +128,7 @@ return -1; } else - return ((PyListObject *)op) -> ob_size; + return Py_Size(op); } static PyObject *indexerr = NULL; @@ -140,7 +140,7 @@ PyErr_BadInternalCall(); return NULL; } - if (i < 0 || i >= ((PyListObject *)op) -> ob_size) { + if (i < 0 || i >= Py_Size(op)) { if (indexerr == NULL) indexerr = PyString_FromString( "list index out of range"); @@ -161,7 +161,7 @@ PyErr_BadInternalCall(); return -1; } - if (i < 0 || i >= ((PyListObject *)op) -> ob_size) { + if (i < 0 || i >= Py_Size(op)) { Py_XDECREF(newitem); PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); @@ -177,7 +177,7 @@ static int ins1(PyListObject *self, Py_ssize_t where, PyObject *v) { - Py_ssize_t i, n = self->ob_size; + Py_ssize_t i, n = Py_Size(self); PyObject **items; if (v == NULL) { PyErr_BadInternalCall(); @@ -259,7 +259,7 @@ There's a simple test case where somehow this reduces thrashing when a *very* large list is created and immediately deleted. */ - i = op->ob_size; + i = Py_Size(op); while (--i >= 0) { Py_XDECREF(op->ob_item[i]); } @@ -268,7 +268,7 @@ if (num_free_lists < MAXFREELISTS && PyList_CheckExact(op)) free_lists[num_free_lists++] = op; else - op->ob_type->tp_free((PyObject *)op); + Py_Type(op)->tp_free((PyObject *)op); Py_TRASHCAN_SAFE_END(op) } @@ -286,7 +286,7 @@ return 0; } fprintf(fp, "["); - for (i = 0; i < op->ob_size; i++) { + for (i = 0; i < Py_Size(op); i++) { if (i > 0) fprintf(fp, ", "); if (PyObject_Print(op->ob_item[i], fp, 0) != 0) { @@ -311,7 +311,7 @@ return i > 0 ? PyString_FromString("[...]") : NULL; } - if (v->ob_size == 0) { + if (Py_Size(v) == 0) { result = PyString_FromString("[]"); goto Done; } @@ -322,7 +322,7 @@ /* Do repr() on each element. Note that this may mutate the list, so must refetch the list size on each iteration. */ - for (i = 0; i < v->ob_size; ++i) { + for (i = 0; i < Py_Size(v); ++i) { int status; s = PyObject_Repr(v->ob_item[i]); if (s == NULL) @@ -369,7 +369,7 @@ static Py_ssize_t list_length(PyListObject *a) { - return a->ob_size; + return Py_Size(a); } static int @@ -378,7 +378,7 @@ Py_ssize_t i; int cmp; - for (i = 0, cmp = 0 ; cmp == 0 && i < a->ob_size; ++i) + for (i = 0, cmp = 0 ; cmp == 0 && i < Py_Size(a); ++i) cmp = PyObject_RichCompareBool(el, PyList_GET_ITEM(a, i), Py_EQ); return cmp; @@ -387,7 +387,7 @@ static PyObject * list_item(PyListObject *a, Py_ssize_t i) { - if (i < 0 || i >= a->ob_size) { + if (i < 0 || i >= Py_Size(a)) { if (indexerr == NULL) indexerr = PyString_FromString( "list index out of range"); @@ -406,12 +406,12 @@ Py_ssize_t i, len; if (ilow < 0) ilow = 0; - else if (ilow > a->ob_size) - ilow = a->ob_size; + else if (ilow > Py_Size(a)) + ilow = Py_Size(a); if (ihigh < ilow) ihigh = ilow; - else if (ihigh > a->ob_size) - ihigh = a->ob_size; + else if (ihigh > Py_Size(a)) + ihigh = Py_Size(a); len = ihigh - ilow; np = (PyListObject *) PyList_New(len); if (np == NULL) @@ -451,7 +451,7 @@ return NULL; } #define b ((PyListObject *)bb) - size = a->ob_size + b->ob_size; + size = Py_Size(a) + Py_Size(b); if (size < 0) return PyErr_NoMemory(); np = (PyListObject *) PyList_New(size); @@ -460,14 +460,14 @@ } src = a->ob_item; dest = np->ob_item; - for (i = 0; i < a->ob_size; i++) { + for (i = 0; i < Py_Size(a); i++) { PyObject *v = src[i]; Py_INCREF(v); dest[i] = v; } src = b->ob_item; - dest = np->ob_item + a->ob_size; - for (i = 0; i < b->ob_size; i++) { + dest = np->ob_item + Py_Size(a); + for (i = 0; i < Py_Size(b); i++) { PyObject *v = src[i]; Py_INCREF(v); dest[i] = v; @@ -486,17 +486,17 @@ PyObject *elem; if (n < 0) n = 0; - size = a->ob_size * n; + size = Py_Size(a) * n; if (size == 0) return PyList_New(0); - if (n && size/n != a->ob_size) + if (n && size/n != Py_Size(a)) return PyErr_NoMemory(); np = (PyListObject *) PyList_New(size); if (np == NULL) return NULL; items = np->ob_item; - if (a->ob_size == 1) { + if (Py_Size(a) == 1) { elem = a->ob_item[0]; for (i = 0; i < n; i++) { items[i] = elem; @@ -507,7 +507,7 @@ p = np->ob_item; items = a->ob_item; for (i = 0; i < n; i++) { - for (j = 0; j < a->ob_size; j++) { + for (j = 0; j < Py_Size(a); j++) { *p = items[j]; Py_INCREF(*p); p++; @@ -524,8 +524,8 @@ if (item != NULL) { /* Because XDECREF can recursively invoke operations on this list, we make it empty first. */ - i = a->ob_size; - a->ob_size = 0; + i = Py_Size(a); + Py_Size(a) = 0; a->ob_item = NULL; a->allocated = 0; while (--i >= 0) { @@ -571,7 +571,7 @@ else { if (a == b) { /* Special case "a[i:j] = a" -- copy b first */ - v = list_slice(b, 0, b->ob_size); + v = list_slice(b, 0, Py_Size(b)); if (v == NULL) return result; result = list_ass_slice(a, ilow, ihigh, v); @@ -586,18 +586,18 @@ } if (ilow < 0) ilow = 0; - else if (ilow > a->ob_size) - ilow = a->ob_size; + else if (ilow > Py_Size(a)) + ilow = Py_Size(a); if (ihigh < ilow) ihigh = ilow; - else if (ihigh > a->ob_size) - ihigh = a->ob_size; + else if (ihigh > Py_Size(a)) + ihigh = Py_Size(a); norig = ihigh - ilow; assert(norig >= 0); d = n - norig; - if (a->ob_size + d == 0) { + if (Py_Size(a) + d == 0) { Py_XDECREF(v_as_SF); return list_clear(a); } @@ -615,12 +615,12 @@ if (d < 0) { /* Delete -d items */ memmove(&item[ihigh+d], &item[ihigh], - (a->ob_size - ihigh)*sizeof(PyObject *)); - list_resize(a, a->ob_size + d); + (Py_Size(a) - ihigh)*sizeof(PyObject *)); + list_resize(a, Py_Size(a) + d); item = a->ob_item; } else if (d > 0) { /* Insert d items */ - k = a->ob_size; + k = Py_Size(a); if (list_resize(a, k+d) < 0) goto Error; item = a->ob_item; @@ -692,7 +692,7 @@ list_ass_item(PyListObject *a, Py_ssize_t i, PyObject *v) { PyObject *old_value; - if (i < 0 || i >= a->ob_size) { + if (i < 0 || i >= Py_Size(a)) { PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); return -1; @@ -751,7 +751,7 @@ Py_DECREF(b); Py_RETURN_NONE; } - m = self->ob_size; + m = Py_Size(self); if (list_resize(self, m + n) == -1) { Py_DECREF(b); return NULL; @@ -789,14 +789,14 @@ PyErr_Clear(); n = 8; /* arbitrary */ } - m = self->ob_size; + m = Py_Size(self); mn = m + n; if (mn >= m) { /* Make room. */ if (list_resize(self, mn) == -1) goto error; /* Make the list sane again. */ - self->ob_size = m; + Py_Size(self) = m; } /* Else m + n overflowed; on the chance that n lied, and there really * is enough room, ignore it. If n was telling the truth, we'll @@ -815,10 +815,10 @@ } break; } - if (self->ob_size < self->allocated) { + if (Py_Size(self) < self->allocated) { /* steals ref */ - PyList_SET_ITEM(self, self->ob_size, item); - ++self->ob_size; + PyList_SET_ITEM(self, Py_Size(self), item); + ++Py_Size(self); } else { int status = app1(self, item); @@ -829,8 +829,8 @@ } /* Cut back result list if initial guess was too large. */ - if (self->ob_size < self->allocated) - list_resize(self, self->ob_size); /* shrinking can't fail */ + if (Py_Size(self) < self->allocated) + list_resize(self, Py_Size(self)); /* shrinking can't fail */ Py_DECREF(it); Py_RETURN_NONE; @@ -869,20 +869,20 @@ if (!PyArg_ParseTuple(args, "|n:pop", &i)) return NULL; - if (self->ob_size == 0) { + if (Py_Size(self) == 0) { /* Special-case most common failure cause */ PyErr_SetString(PyExc_IndexError, "pop from empty list"); return NULL; } if (i < 0) - i += self->ob_size; - if (i < 0 || i >= self->ob_size) { + i += Py_Size(self); + if (i < 0 || i >= Py_Size(self)) { PyErr_SetString(PyExc_IndexError, "pop index out of range"); return NULL; } v = self->ob_item[i]; - if (i == self->ob_size - 1) { - status = list_resize(self, self->ob_size - 1); + if (i == Py_Size(self) - 1) { + status = list_resize(self, Py_Size(self) - 1); assert(status >= 0); return v; /* and v now owns the reference the list had */ } @@ -1812,8 +1812,7 @@ sortwrapper_dealloc(sortwrapperobject *); static PyTypeObject sortwrapper_type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "sortwrapper", /* tp_name */ sizeof(sortwrapperobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -1930,8 +1929,7 @@ PyDoc_STRVAR(cmpwrapper_doc, "cmp() wrapper for sort with custom keys."); static PyTypeObject cmpwrapper_type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "cmpwrapper", /* tp_name */ sizeof(cmpwrapperobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2014,10 +2012,10 @@ * sorting (allowing mutations during sorting is a core-dump * factory, since ob_item may change). */ - saved_ob_size = self->ob_size; + saved_ob_size = Py_Size(self); saved_ob_item = self->ob_item; saved_allocated = self->allocated; - self->ob_size = 0; + Py_Size(self) = 0; self->ob_item = NULL; self->allocated = -1; /* any operation will reset it to >= 0 */ @@ -2123,8 +2121,8 @@ dsu_fail: final_ob_item = self->ob_item; - i = self->ob_size; - self->ob_size = saved_ob_size; + i = Py_Size(self); + Py_Size(self) = saved_ob_size; self->ob_item = saved_ob_item; self->allocated = saved_allocated; if (final_ob_item != NULL) { @@ -2159,8 +2157,8 @@ static PyObject * listreverse(PyListObject *self) { - if (self->ob_size > 1) - reverse_slice(self->ob_item, self->ob_item + self->ob_size); + if (Py_Size(self) > 1) + reverse_slice(self->ob_item, self->ob_item + Py_Size(self)); Py_RETURN_NONE; } @@ -2173,8 +2171,8 @@ PyErr_BadInternalCall(); return -1; } - if (self->ob_size > 1) - reverse_slice(self->ob_item, self->ob_item + self->ob_size); + if (Py_Size(self) > 1) + reverse_slice(self->ob_item, self->ob_item + Py_Size(self)); return 0; } @@ -2188,7 +2186,7 @@ PyErr_BadInternalCall(); return NULL; } - n = ((PyListObject *)v)->ob_size; + n = Py_Size(v); w = PyTuple_New(n); if (w == NULL) return NULL; @@ -2206,7 +2204,7 @@ static PyObject * listindex(PyListObject *self, PyObject *args) { - Py_ssize_t i, start=0, stop=self->ob_size; + Py_ssize_t i, start=0, stop=Py_Size(self); PyObject *v; if (!PyArg_ParseTuple(args, "O|O&O&:index", &v, @@ -2214,16 +2212,16 @@ _PyEval_SliceIndex, &stop)) return NULL; if (start < 0) { - start += self->ob_size; + start += Py_Size(self); if (start < 0) start = 0; } if (stop < 0) { - stop += self->ob_size; + stop += Py_Size(self); if (stop < 0) stop = 0; } - for (i = start; i < stop && i < self->ob_size; i++) { + for (i = start; i < stop && i < Py_Size(self); i++) { int cmp = PyObject_RichCompareBool(self->ob_item[i], v, Py_EQ); if (cmp > 0) return PyInt_FromSsize_t(i); @@ -2240,7 +2238,7 @@ Py_ssize_t count = 0; Py_ssize_t i; - for (i = 0; i < self->ob_size; i++) { + for (i = 0; i < Py_Size(self); i++) { int cmp = PyObject_RichCompareBool(self->ob_item[i], v, Py_EQ); if (cmp > 0) count++; @@ -2255,7 +2253,7 @@ { Py_ssize_t i; - for (i = 0; i < self->ob_size; i++) { + for (i = 0; i < Py_Size(self); i++) { int cmp = PyObject_RichCompareBool(self->ob_item[i], v, Py_EQ); if (cmp > 0) { if (list_ass_slice(self, i, i+1, @@ -2275,7 +2273,7 @@ { Py_ssize_t i; - for (i = o->ob_size; --i >= 0; ) + for (i = Py_Size(o); --i >= 0; ) Py_VISIT(o->ob_item[i]); return 0; } @@ -2294,7 +2292,7 @@ vl = (PyListObject *)v; wl = (PyListObject *)w; - if (vl->ob_size != wl->ob_size && (op == Py_EQ || op == Py_NE)) { + if (Py_Size(vl) != Py_Size(wl) && (op == Py_EQ || op == Py_NE)) { /* Shortcut: if the lengths differ, the lists differ */ PyObject *res; if (op == Py_EQ) @@ -2306,7 +2304,7 @@ } /* Search for the first index where items are different */ - for (i = 0; i < vl->ob_size && i < wl->ob_size; i++) { + for (i = 0; i < Py_Size(vl) && i < Py_Size(wl); i++) { int k = PyObject_RichCompareBool(vl->ob_item[i], wl->ob_item[i], Py_EQ); if (k < 0) @@ -2315,10 +2313,10 @@ break; } - if (i >= vl->ob_size || i >= wl->ob_size) { + if (i >= Py_Size(vl) || i >= Py_Size(wl)) { /* No more items to compare -- compare sizes */ - Py_ssize_t vs = vl->ob_size; - Py_ssize_t ws = wl->ob_size; + Py_ssize_t vs = Py_Size(vl); + Py_ssize_t ws = Py_Size(wl); int cmp; PyObject *res; switch (op) { @@ -2362,8 +2360,8 @@ return -1; /* Verify list invariants established by PyType_GenericAlloc() */ - assert(0 <= self->ob_size); - assert(self->ob_size <= self->allocated || self->allocated == -1); + assert(0 <= Py_Size(self)); + assert(Py_Size(self) <= self->allocated || self->allocated == -1); assert(self->ob_item != NULL || self->allocated == 0 || self->allocated == -1); @@ -2467,7 +2465,7 @@ PyObject* it; PyObject **src, **dest; - if (PySlice_GetIndicesEx((PySliceObject*)item, self->ob_size, + if (PySlice_GetIndicesEx((PySliceObject*)item, Py_Size(self), &start, &stop, &step, &slicelength) < 0) { return NULL; } @@ -2513,7 +2511,7 @@ else if (PySlice_Check(item)) { Py_ssize_t start, stop, step, slicelength; - if (PySlice_GetIndicesEx((PySliceObject*)item, self->ob_size, + if (PySlice_GetIndicesEx((PySliceObject*)item, Py_Size(self), &start, &stop, &step, &slicelength) < 0) { return -1; } @@ -2552,8 +2550,8 @@ garbage[i] = PyList_GET_ITEM(self, cur); - if (cur + step >= self->ob_size) { - lim = self->ob_size - cur - 1; + if (cur + step >= Py_Size(self)) { + lim = Py_Size(self) - cur - 1; } memmove(self->ob_item + cur - i, @@ -2562,13 +2560,13 @@ } for (cur = start + slicelength*step + 1; - cur < self->ob_size; cur++) { + cur < Py_Size(self); cur++) { PyList_SET_ITEM(self, cur - slicelength, PyList_GET_ITEM(self, cur)); } - self->ob_size -= slicelength; - list_resize(self, self->ob_size); + Py_Size(self) -= slicelength; + list_resize(self, Py_Size(self)); for (i = 0; i < slicelength; i++) { Py_DECREF(garbage[i]); @@ -2651,8 +2649,7 @@ }; PyTypeObject PyList_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "list", sizeof(PyListObject), 0, @@ -2717,8 +2714,7 @@ }; PyTypeObject PyListIter_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "listiterator", /* tp_name */ sizeof(listiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2840,8 +2836,7 @@ }; PyTypeObject PyListRevIter_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "listreverseiterator", /* tp_name */ sizeof(listreviterobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/longobject.c ============================================================================== --- python/trunk/Objects/longobject.c (original) +++ python/trunk/Objects/longobject.c Sat Jul 21 08:55:02 2007 @@ -50,13 +50,13 @@ static PyLongObject * long_normalize(register PyLongObject *v) { - Py_ssize_t j = ABS(v->ob_size); + Py_ssize_t j = ABS(Py_Size(v)); Py_ssize_t i = j; while (i > 0 && v->ob_digit[i-1] == 0) --i; if (i != j) - v->ob_size = (v->ob_size < 0) ? -(i) : i; + Py_Size(v) = (Py_Size(v) < 0) ? -(i) : i; return v; } @@ -147,7 +147,7 @@ v = _PyLong_New(ndigits); if (v != NULL) { digit *p = v->ob_digit; - v->ob_size = ndigits; + Py_Size(v) = ndigits; while (ival) { *p++ = (digit)(ival & MASK); ival >>= SHIFT; @@ -189,7 +189,7 @@ frac = ldexp(frac, SHIFT); } if (neg) - v->ob_size = -(v->ob_size); + Py_Size(v) = -(Py_Size(v)); return (PyObject *)v; } @@ -323,7 +323,7 @@ return (unsigned long) -1; } v = (PyLongObject *)vv; - i = v->ob_size; + i = Py_Size(v); x = 0; if (i < 0) { PyErr_SetString(PyExc_OverflowError, @@ -381,7 +381,7 @@ assert(v != NULL); assert(PyLong_Check(v)); - return v->ob_size == 0 ? 0 : (v->ob_size < 0 ? -1 : 1); + return Py_Size(v) == 0 ? 0 : (Py_Size(v) < 0 ? -1 : 1); } size_t @@ -393,7 +393,7 @@ assert(v != NULL); assert(PyLong_Check(v)); - ndigits = ABS(v->ob_size); + ndigits = ABS(Py_Size(v)); assert(ndigits == 0 || v->ob_digit[ndigits - 1] != 0); if (ndigits > 0) { digit msd = v->ob_digit[ndigits - 1]; @@ -519,7 +519,7 @@ } } - v->ob_size = is_signed ? -idigit : idigit; + Py_Size(v) = is_signed ? -idigit : idigit; return (PyObject *)long_normalize(v); } @@ -540,8 +540,8 @@ assert(v != NULL && PyLong_Check(v)); - if (v->ob_size < 0) { - ndigits = -(v->ob_size); + if (Py_Size(v) < 0) { + ndigits = -(Py_Size(v)); if (!is_signed) { PyErr_SetString(PyExc_TypeError, "can't convert negative long to unsigned"); @@ -550,7 +550,7 @@ do_twos_comp = 1; } else { - ndigits = v->ob_size; + ndigits = Py_Size(v); do_twos_comp = 0; } @@ -686,7 +686,7 @@ return -1; } v = (PyLongObject *)vv; - i = v->ob_size; + i = Py_Size(v); sign = 1; if (i < 0) { sign = -1; @@ -847,7 +847,7 @@ v = _PyLong_New(ndigits); if (v != NULL) { digit *p = v->ob_digit; - v->ob_size = negative ? -ndigits : ndigits; + Py_Size(v) = negative ? -ndigits : ndigits; t = (unsigned PY_LONG_LONG)ival; while (t) { *p++ = (digit)(t & MASK); @@ -875,7 +875,7 @@ v = _PyLong_New(ndigits); if (v != NULL) { digit *p = v->ob_digit; - v->ob_size = ndigits; + Py_Size(v) = ndigits; while (ival) { *p++ = (digit)(ival & MASK); ival >>= SHIFT; @@ -1117,7 +1117,7 @@ static PyLongObject * muladd1(PyLongObject *a, wdigit n, wdigit extra) { - Py_ssize_t size_a = ABS(a->ob_size); + Py_ssize_t size_a = ABS(Py_Size(a)); PyLongObject *z = _PyLong_New(size_a+1); twodigits carry = extra; Py_ssize_t i; @@ -1163,7 +1163,7 @@ static PyLongObject * divrem1(PyLongObject *a, digit n, digit *prem) { - const Py_ssize_t size = ABS(a->ob_size); + const Py_ssize_t size = ABS(Py_Size(a)); PyLongObject *z; assert(n > 0 && n <= MASK); @@ -1194,7 +1194,7 @@ return NULL; } assert(base >= 2 && base <= 36); - size_a = ABS(a->ob_size); + size_a = ABS(Py_Size(a)); /* Compute a rough upper bound for the length of the string */ i = base; @@ -1597,7 +1597,7 @@ z = _PyLong_New(size_z); if (z == NULL) return NULL; - z->ob_size = 0; + Py_Size(z) = 0; /* `convwidth` consecutive input digits are treated as a single * digit in base `convmultmax`. @@ -1627,7 +1627,7 @@ /* Multiply z by convmult, and add c. */ pz = z->ob_digit; - pzstop = pz + z->ob_size; + pzstop = pz + Py_Size(z); for (; pz < pzstop; ++pz) { c += (twodigits)*pz * convmult; *pz = (digit)(c & MASK); @@ -1636,14 +1636,14 @@ /* carry off the current end? */ if (c) { assert(c < BASE); - if (z->ob_size < size_z) { + if (Py_Size(z) < size_z) { *pz = (digit)c; - ++z->ob_size; + ++Py_Size(z); } else { PyLongObject *tmp; /* Extremely rare. Get more space. */ - assert(z->ob_size == size_z); + assert(Py_Size(z) == size_z); tmp = _PyLong_New(size_z + 1); if (tmp == NULL) { Py_DECREF(z); @@ -1665,7 +1665,7 @@ if (str == start) goto onError; if (sign < 0) - z->ob_size = -(z->ob_size); + Py_Size(z) = -(Py_Size(z)); if (*str == 'L' || *str == 'l') str++; while (*str && isspace(Py_CHARMASK(*str))) @@ -1726,7 +1726,7 @@ long_divrem(PyLongObject *a, PyLongObject *b, PyLongObject **pdiv, PyLongObject **prem) { - Py_ssize_t size_a = ABS(a->ob_size), size_b = ABS(b->ob_size); + Py_ssize_t size_a = ABS(Py_Size(a)), size_b = ABS(Py_Size(b)); PyLongObject *z; if (size_b == 0) { @@ -1778,7 +1778,7 @@ static PyLongObject * x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) { - Py_ssize_t size_v = ABS(v1->ob_size), size_w = ABS(w1->ob_size); + Py_ssize_t size_v = ABS(Py_Size(v1)), size_w = ABS(Py_Size(w1)); digit d = (digit) ((twodigits)BASE / (w1->ob_digit[size_w-1] + 1)); PyLongObject *v = mul1(v1, d); PyLongObject *w = mul1(w1, d); @@ -1792,10 +1792,10 @@ } assert(size_v >= size_w && size_w > 1); /* Assert checks by div() */ - assert(v->ob_refcnt == 1); /* Since v will be used as accumulator! */ - assert(size_w == ABS(w->ob_size)); /* That's how d was calculated */ + assert(Py_Refcnt(v) == 1); /* Since v will be used as accumulator! */ + assert(size_w == ABS(Py_Size(w))); /* That's how d was calculated */ - size_v = ABS(v->ob_size); + size_v = ABS(Py_Size(v)); k = size_v - size_w; a = _PyLong_New(k + 1); @@ -1878,7 +1878,7 @@ static void long_dealloc(PyObject *v) { - v->ob_type->tp_free(v); + Py_Type(v)->tp_free(v); } static PyObject * @@ -1898,21 +1898,21 @@ { Py_ssize_t sign; - if (a->ob_size != b->ob_size) { - if (ABS(a->ob_size) == 0 && ABS(b->ob_size) == 0) + if (Py_Size(a) != Py_Size(b)) { + if (ABS(Py_Size(a)) == 0 && ABS(Py_Size(b)) == 0) sign = 0; else - sign = a->ob_size - b->ob_size; + sign = Py_Size(a) - Py_Size(b); } else { - Py_ssize_t i = ABS(a->ob_size); + Py_ssize_t i = ABS(Py_Size(a)); while (--i >= 0 && a->ob_digit[i] == b->ob_digit[i]) ; if (i < 0) sign = 0; else { sign = (int)a->ob_digit[i] - (int)b->ob_digit[i]; - if (a->ob_size < 0) + if (Py_Size(a) < 0) sign = -sign; } } @@ -1955,7 +1955,7 @@ static PyLongObject * x_add(PyLongObject *a, PyLongObject *b) { - Py_ssize_t size_a = ABS(a->ob_size), size_b = ABS(b->ob_size); + Py_ssize_t size_a = ABS(Py_Size(a)), size_b = ABS(Py_Size(b)); PyLongObject *z; int i; digit carry = 0; @@ -1989,7 +1989,7 @@ static PyLongObject * x_sub(PyLongObject *a, PyLongObject *b) { - Py_ssize_t size_a = ABS(a->ob_size), size_b = ABS(b->ob_size); + Py_ssize_t size_a = ABS(Py_Size(a)), size_b = ABS(Py_Size(b)); PyLongObject *z; Py_ssize_t i; int sign = 1; @@ -2099,15 +2099,15 @@ x_mul(PyLongObject *a, PyLongObject *b) { PyLongObject *z; - Py_ssize_t size_a = ABS(a->ob_size); - Py_ssize_t size_b = ABS(b->ob_size); + Py_ssize_t size_a = ABS(Py_Size(a)); + Py_ssize_t size_b = ABS(Py_Size(b)); Py_ssize_t i; z = _PyLong_New(size_a + size_b); if (z == NULL) return NULL; - memset(z->ob_digit, 0, z->ob_size * sizeof(digit)); + memset(z->ob_digit, 0, Py_Size(z) * sizeof(digit)); if (a == b) { /* Efficient squaring per HAC, Algorithm 14.16: * http://www.cacr.math.uwaterloo.ca/hac/about/chap14.pdf @@ -2191,7 +2191,7 @@ { PyLongObject *hi, *lo; Py_ssize_t size_lo, size_hi; - const Py_ssize_t size_n = ABS(n->ob_size); + const Py_ssize_t size_n = ABS(Py_Size(n)); size_lo = MIN(size_n, size); size_hi = size_n - size_lo; @@ -2220,8 +2220,8 @@ static PyLongObject * k_mul(PyLongObject *a, PyLongObject *b) { - Py_ssize_t asize = ABS(a->ob_size); - Py_ssize_t bsize = ABS(b->ob_size); + Py_ssize_t asize = ABS(Py_Size(a)); + Py_ssize_t bsize = ABS(Py_Size(b)); PyLongObject *ah = NULL; PyLongObject *al = NULL; PyLongObject *bh = NULL; @@ -2273,7 +2273,7 @@ /* Split a & b into hi & lo pieces. */ shift = bsize >> 1; if (kmul_split(a, shift, &ah, &al) < 0) goto fail; - assert(ah->ob_size > 0); /* the split isn't degenerate */ + assert(Py_Size(ah) > 0); /* the split isn't degenerate */ if (a == b) { bh = ah; @@ -2304,20 +2304,20 @@ if (ret == NULL) goto fail; #ifdef Py_DEBUG /* Fill with trash, to catch reference to uninitialized digits. */ - memset(ret->ob_digit, 0xDF, ret->ob_size * sizeof(digit)); + memset(ret->ob_digit, 0xDF, Py_Size(ret) * sizeof(digit)); #endif /* 2. t1 <- ah*bh, and copy into high digits of result. */ if ((t1 = k_mul(ah, bh)) == NULL) goto fail; - assert(t1->ob_size >= 0); - assert(2*shift + t1->ob_size <= ret->ob_size); + assert(Py_Size(t1) >= 0); + assert(2*shift + Py_Size(t1) <= Py_Size(ret)); memcpy(ret->ob_digit + 2*shift, t1->ob_digit, - t1->ob_size * sizeof(digit)); + Py_Size(t1) * sizeof(digit)); /* Zero-out the digits higher than the ah*bh copy. */ - i = ret->ob_size - 2*shift - t1->ob_size; + i = Py_Size(ret) - 2*shift - Py_Size(t1); if (i) - memset(ret->ob_digit + 2*shift + t1->ob_size, 0, + memset(ret->ob_digit + 2*shift + Py_Size(t1), 0, i * sizeof(digit)); /* 3. t2 <- al*bl, and copy into the low digits. */ @@ -2325,23 +2325,23 @@ Py_DECREF(t1); goto fail; } - assert(t2->ob_size >= 0); - assert(t2->ob_size <= 2*shift); /* no overlap with high digits */ - memcpy(ret->ob_digit, t2->ob_digit, t2->ob_size * sizeof(digit)); + assert(Py_Size(t2) >= 0); + assert(Py_Size(t2) <= 2*shift); /* no overlap with high digits */ + memcpy(ret->ob_digit, t2->ob_digit, Py_Size(t2) * sizeof(digit)); /* Zero out remaining digits. */ - i = 2*shift - t2->ob_size; /* number of uninitialized digits */ + i = 2*shift - Py_Size(t2); /* number of uninitialized digits */ if (i) - memset(ret->ob_digit + t2->ob_size, 0, i * sizeof(digit)); + memset(ret->ob_digit + Py_Size(t2), 0, i * sizeof(digit)); /* 4 & 5. Subtract ah*bh (t1) and al*bl (t2). We do al*bl first * because it's fresher in cache. */ - i = ret->ob_size - shift; /* # digits after shift */ - (void)v_isub(ret->ob_digit + shift, i, t2->ob_digit, t2->ob_size); + i = Py_Size(ret) - shift; /* # digits after shift */ + (void)v_isub(ret->ob_digit + shift, i, t2->ob_digit, Py_Size(t2)); Py_DECREF(t2); - (void)v_isub(ret->ob_digit + shift, i, t1->ob_digit, t1->ob_size); + (void)v_isub(ret->ob_digit + shift, i, t1->ob_digit, Py_Size(t1)); Py_DECREF(t1); /* 6. t3 <- (ah+al)(bh+bl), and add into result. */ @@ -2366,12 +2366,12 @@ Py_DECREF(t1); Py_DECREF(t2); if (t3 == NULL) goto fail; - assert(t3->ob_size >= 0); + assert(Py_Size(t3) >= 0); /* Add t3. It's not obvious why we can't run out of room here. * See the (*) comment after this function. */ - (void)v_iadd(ret->ob_digit + shift, i, t3->ob_digit, t3->ob_size); + (void)v_iadd(ret->ob_digit + shift, i, t3->ob_digit, Py_Size(t3)); Py_DECREF(t3); return long_normalize(ret); @@ -2441,8 +2441,8 @@ static PyLongObject * k_lopsided_mul(PyLongObject *a, PyLongObject *b) { - const Py_ssize_t asize = ABS(a->ob_size); - Py_ssize_t bsize = ABS(b->ob_size); + const Py_ssize_t asize = ABS(Py_Size(a)); + Py_ssize_t bsize = ABS(Py_Size(b)); Py_ssize_t nbdone; /* # of b digits already multiplied */ PyLongObject *ret; PyLongObject *bslice = NULL; @@ -2454,7 +2454,7 @@ ret = _PyLong_New(asize + bsize); if (ret == NULL) return NULL; - memset(ret->ob_digit, 0, ret->ob_size * sizeof(digit)); + memset(ret->ob_digit, 0, Py_Size(ret) * sizeof(digit)); /* Successive slices of b are copied into bslice. */ bslice = _PyLong_New(asize); @@ -2469,14 +2469,14 @@ /* Multiply the next slice of b by a. */ memcpy(bslice->ob_digit, b->ob_digit + nbdone, nbtouse * sizeof(digit)); - bslice->ob_size = nbtouse; + Py_Size(bslice) = nbtouse; product = k_mul(a, bslice); if (product == NULL) goto fail; /* Add into result. */ - (void)v_iadd(ret->ob_digit + nbdone, ret->ob_size - nbdone, - product->ob_digit, product->ob_size); + (void)v_iadd(ret->ob_digit + nbdone, Py_Size(ret) - nbdone, + product->ob_digit, Py_Size(product)); Py_DECREF(product); bsize -= nbtouse; @@ -2540,8 +2540,8 @@ if (long_divrem(v, w, &div, &mod) < 0) return -1; - if ((mod->ob_size < 0 && w->ob_size > 0) || - (mod->ob_size > 0 && w->ob_size < 0)) { + if ((Py_Size(mod) < 0 && Py_Size(w) > 0) || + (Py_Size(mod) > 0 && Py_Size(w) < 0)) { PyLongObject *temp; PyLongObject *one; temp = (PyLongObject *) long_add(mod, w); @@ -2729,7 +2729,7 @@ return Py_NotImplemented; } - if (b->ob_size < 0) { /* if exponent is negative */ + if (Py_Size(b) < 0) { /* if exponent is negative */ if (c) { PyErr_SetString(PyExc_TypeError, "pow() 2nd argument " "cannot be negative when 3rd argument specified"); @@ -2748,7 +2748,7 @@ if (c) { /* if modulus == 0: raise ValueError() */ - if (c->ob_size == 0) { + if (Py_Size(c) == 0) { PyErr_SetString(PyExc_ValueError, "pow() 3rd argument cannot be 0"); goto Error; @@ -2757,7 +2757,7 @@ /* if modulus < 0: negativeOutput = True modulus = -modulus */ - if (c->ob_size < 0) { + if (Py_Size(c) < 0) { negativeOutput = 1; temp = (PyLongObject *)_PyLong_Copy(c); if (temp == NULL) @@ -2770,7 +2770,7 @@ /* if modulus == 1: return 0 */ - if ((c->ob_size == 1) && (c->ob_digit[0] == 1)) { + if ((Py_Size(c) == 1) && (c->ob_digit[0] == 1)) { z = (PyLongObject *)PyLong_FromLong(0L); goto Done; } @@ -2778,7 +2778,7 @@ /* if base < 0: base = base % modulus Having the base positive just makes things easier. */ - if (a->ob_size < 0) { + if (Py_Size(a) < 0) { if (l_divmod(a, c, NULL, &temp) < 0) goto Error; Py_DECREF(a); @@ -2819,10 +2819,10 @@ REDUCE(result) \ } - if (b->ob_size <= FIVEARY_CUTOFF) { + if (Py_Size(b) <= FIVEARY_CUTOFF) { /* Left-to-right binary exponentiation (HAC Algorithm 14.79) */ /* http://www.cacr.math.uwaterloo.ca/hac/about/chap14.pdf */ - for (i = b->ob_size - 1; i >= 0; --i) { + for (i = Py_Size(b) - 1; i >= 0; --i) { digit bi = b->ob_digit[i]; for (j = 1 << (SHIFT-1); j != 0; j >>= 1) { @@ -2839,7 +2839,7 @@ for (i = 1; i < 32; ++i) MULT(table[i-1], a, table[i]) - for (i = b->ob_size - 1; i >= 0; --i) { + for (i = Py_Size(b) - 1; i >= 0; --i) { const digit bi = b->ob_digit[i]; for (j = SHIFT - 5; j >= 0; j -= 5) { @@ -2852,7 +2852,7 @@ } } - if (negativeOutput && (z->ob_size != 0)) { + if (negativeOutput && (Py_Size(z) != 0)) { temp = (PyLongObject *)long_sub(z, c); if (temp == NULL) goto Error; @@ -2869,7 +2869,7 @@ } /* fall through */ Done: - if (b->ob_size > FIVEARY_CUTOFF) { + if (Py_Size(b) > FIVEARY_CUTOFF) { for (i = 0; i < 32; ++i) Py_XDECREF(table[i]); } @@ -2893,7 +2893,7 @@ Py_DECREF(w); if (x == NULL) return NULL; - x->ob_size = -(x->ob_size); + Py_Size(x) = -(Py_Size(x)); return (PyObject *)x; } @@ -2935,7 +2935,7 @@ static int long_nonzero(PyLongObject *v) { - return ABS(v->ob_size) != 0; + return ABS(Py_Size(v)) != 0; } static PyObject * @@ -2949,7 +2949,7 @@ CONVERT_BINOP((PyObject *)v, (PyObject *)w, &a, &b); - if (a->ob_size < 0) { + if (Py_Size(a) < 0) { /* Right shifting negative numbers is harder */ PyLongObject *a1, *a2; a1 = (PyLongObject *) long_invert(a); @@ -2973,7 +2973,7 @@ goto rshift_error; } wordshift = shiftby / SHIFT; - newsize = ABS(a->ob_size) - wordshift; + newsize = ABS(Py_Size(a)) - wordshift; if (newsize <= 0) { z = _PyLong_New(0); Py_DECREF(a); @@ -2987,8 +2987,8 @@ z = _PyLong_New(newsize); if (z == NULL) goto rshift_error; - if (a->ob_size < 0) - z->ob_size = -(z->ob_size); + if (Py_Size(a) < 0) + Py_Size(z) = -(Py_Size(z)); for (i = 0, j = wordshift; i < newsize; i++, j++) { z->ob_digit[i] = (a->ob_digit[j] >> loshift) & lomask; if (i+1 < newsize) @@ -3076,7 +3076,7 @@ digit diga, digb; PyObject *v; - if (a->ob_size < 0) { + if (Py_Size(a) < 0) { a = (PyLongObject *) long_invert(a); if (a == NULL) return NULL; @@ -3086,7 +3086,7 @@ Py_INCREF(a); maska = 0; } - if (b->ob_size < 0) { + if (Py_Size(b) < 0) { b = (PyLongObject *) long_invert(b); if (b == NULL) { Py_DECREF(a); @@ -3135,8 +3135,8 @@ whose length should be ignored. */ - size_a = a->ob_size; - size_b = b->ob_size; + size_a = Py_Size(a); + size_b = Py_Size(b); size_z = op == '&' ? (maska ? size_b @@ -3343,7 +3343,7 @@ if (tmp == NULL) return NULL; assert(PyLong_CheckExact(tmp)); - n = tmp->ob_size; + n = Py_Size(tmp); if (n < 0) n = -n; newobj = (PyLongObject *)type->tp_alloc(type, n); @@ -3352,7 +3352,7 @@ return NULL; } assert(PyLong_Check(newobj)); - newobj->ob_size = tmp->ob_size; + Py_Size(newobj) = Py_Size(tmp); for (i = 0; i < n; i++) newobj->ob_digit[i] = tmp->ob_digit[i]; Py_DECREF(tmp); Modified: python/trunk/Objects/methodobject.c ============================================================================== --- python/trunk/Objects/methodobject.c (original) +++ python/trunk/Objects/methodobject.c Sat Jul 21 08:55:02 2007 @@ -231,8 +231,7 @@ PyTypeObject PyCFunction_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "builtin_function_or_method", sizeof(PyCFunctionObject), 0, Modified: python/trunk/Objects/moduleobject.c ============================================================================== --- python/trunk/Objects/moduleobject.c (original) +++ python/trunk/Objects/moduleobject.c Sat Jul 21 08:55:02 2007 @@ -176,7 +176,7 @@ _PyModule_Clear((PyObject *)m); Py_DECREF(m->md_dict); } - m->ob_type->tp_free((PyObject *)m); + Py_Type(m)->tp_free((PyObject *)m); } static PyObject * @@ -215,8 +215,7 @@ The name must be a string; the optional doc argument can have any type."); PyTypeObject PyModule_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "module", /* tp_name */ sizeof(PyModuleObject), /* tp_size */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/object.c ============================================================================== --- python/trunk/Objects/object.c (original) +++ python/trunk/Objects/object.c Sat Jul 21 08:55:02 2007 @@ -214,7 +214,7 @@ if (op == NULL) return PyErr_NoMemory(); /* Any changes should be reflected in PyObject_INIT (objimpl.h) */ - op->ob_type = tp; + Py_Type(op) = tp; _Py_NewReference(op); return op; } @@ -226,7 +226,7 @@ return (PyVarObject *) PyErr_NoMemory(); /* Any changes should be reflected in PyObject_INIT_VAR */ op->ob_size = size; - op->ob_type = tp; + Py_Type(op) = tp; _Py_NewReference((PyObject *)op); return op; } @@ -287,7 +287,7 @@ universally available */ fprintf(fp, "", (long)op->ob_refcnt, op); - else if (op->ob_type->tp_print == NULL) { + else if (Py_Type(op)->tp_print == NULL) { PyObject *s; if (flags & Py_PRINT_RAW) s = PyObject_Str(op); @@ -302,7 +302,7 @@ Py_XDECREF(s); } else - ret = (*op->ob_type->tp_print)(op, fp, flags); + ret = (*Py_Type(op)->tp_print)(op, fp, flags); } if (ret == 0) { if (ferror(fp)) { @@ -335,7 +335,7 @@ "type : %s\n" "refcount: %ld\n" "address : %p\n", - op->ob_type==NULL ? "NULL" : op->ob_type->tp_name, + Py_Type(op)==NULL ? "NULL" : Py_Type(op)->tp_name, (long)op->ob_refcnt, op); } @@ -354,12 +354,12 @@ #endif if (v == NULL) return PyString_FromString(""); - else if (v->ob_type->tp_repr == NULL) + else if (Py_Type(v)->tp_repr == NULL) return PyString_FromFormat("<%s object at %p>", - v->ob_type->tp_name, v); + Py_Type(v)->tp_name, v); else { PyObject *res; - res = (*v->ob_type->tp_repr)(v); + res = (*Py_Type(v)->tp_repr)(v); if (res == NULL) return NULL; #ifdef Py_USING_UNICODE @@ -376,7 +376,7 @@ if (!PyString_Check(res)) { PyErr_Format(PyExc_TypeError, "__repr__ returned non-string (type %.200s)", - res->ob_type->tp_name); + Py_Type(res)->tp_name); Py_DECREF(res); return NULL; } @@ -401,10 +401,10 @@ return v; } #endif - if (v->ob_type->tp_str == NULL) + if (Py_Type(v)->tp_str == NULL) return PyObject_Repr(v); - res = (*v->ob_type->tp_str)(v); + res = (*Py_Type(v)->tp_str)(v); if (res == NULL) return NULL; type_ok = PyString_Check(res); @@ -414,7 +414,7 @@ if (!type_ok) { PyErr_Format(PyExc_TypeError, "__str__ returned non-string (type %.200s)", - res->ob_type->tp_name); + Py_Type(res)->tp_name); Py_DECREF(res); return NULL; } @@ -488,8 +488,8 @@ res = v; } else { - if (v->ob_type->tp_str != NULL) - res = (*v->ob_type->tp_str)(v); + if (Py_Type(v)->tp_str != NULL) + res = (*Py_Type(v)->tp_str)(v); else res = PyObject_Repr(v); } @@ -1062,8 +1062,8 @@ { PyObject *w, *res; - if (v->ob_type->tp_getattr != NULL) - return (*v->ob_type->tp_getattr)(v, (char*)name); + if (Py_Type(v)->tp_getattr != NULL) + return (*Py_Type(v)->tp_getattr)(v, (char*)name); w = PyString_InternFromString(name); if (w == NULL) return NULL; @@ -1090,8 +1090,8 @@ PyObject *s; int res; - if (v->ob_type->tp_setattr != NULL) - return (*v->ob_type->tp_setattr)(v, (char*)name, w); + if (Py_Type(v)->tp_setattr != NULL) + return (*Py_Type(v)->tp_setattr)(v, (char*)name, w); s = PyString_InternFromString(name); if (s == NULL) return -1; @@ -1103,7 +1103,7 @@ PyObject * PyObject_GetAttr(PyObject *v, PyObject *name) { - PyTypeObject *tp = v->ob_type; + PyTypeObject *tp = Py_Type(v); if (!PyString_Check(name)) { #ifdef Py_USING_UNICODE @@ -1120,7 +1120,7 @@ { PyErr_Format(PyExc_TypeError, "attribute name must be string, not '%.200s'", - name->ob_type->tp_name); + Py_Type(name)->tp_name); return NULL; } } @@ -1149,7 +1149,7 @@ int PyObject_SetAttr(PyObject *v, PyObject *name, PyObject *value) { - PyTypeObject *tp = v->ob_type; + PyTypeObject *tp = Py_Type(v); int err; if (!PyString_Check(name)){ @@ -1167,7 +1167,7 @@ { PyErr_Format(PyExc_TypeError, "attribute name must be string, not '%.200s'", - name->ob_type->tp_name); + Py_Type(name)->tp_name); return -1; } } @@ -1209,7 +1209,7 @@ _PyObject_GetDictPtr(PyObject *obj) { Py_ssize_t dictoffset; - PyTypeObject *tp = obj->ob_type; + PyTypeObject *tp = Py_Type(obj); if (!(tp->tp_flags & Py_TPFLAGS_HAVE_CLASS)) return NULL; @@ -1244,7 +1244,7 @@ PyObject * PyObject_GenericGetAttr(PyObject *obj, PyObject *name) { - PyTypeObject *tp = obj->ob_type; + PyTypeObject *tp = Py_Type(obj); PyObject *descr = NULL; PyObject *res = NULL; descrgetfunc f; @@ -1266,7 +1266,7 @@ { PyErr_Format(PyExc_TypeError, "attribute name must be string, not '%.200s'", - name->ob_type->tp_name); + Py_Type(name)->tp_name); return NULL; } } @@ -1346,7 +1346,7 @@ } if (f != NULL) { - res = f(descr, obj, (PyObject *)obj->ob_type); + res = f(descr, obj, (PyObject *)Py_Type(obj)); Py_DECREF(descr); goto done; } @@ -1368,7 +1368,7 @@ int PyObject_GenericSetAttr(PyObject *obj, PyObject *name, PyObject *value) { - PyTypeObject *tp = obj->ob_type; + PyTypeObject *tp = Py_Type(obj); PyObject *descr; descrsetfunc f; PyObject **dictptr; @@ -1389,7 +1389,7 @@ { PyErr_Format(PyExc_TypeError, "attribute name must be string, not '%.200s'", - name->ob_type->tp_name); + Py_Type(name)->tp_name); return -1; } } @@ -1683,7 +1683,7 @@ if (!PyList_Check(names)) { PyErr_Format(PyExc_TypeError, "dir(): expected keys() of locals to be a list, " - "not '%.200s'", names->ob_type->tp_name); + "not '%.200s'", Py_Type(names)->tp_name); Py_DECREF(names); return NULL; } @@ -1818,7 +1818,7 @@ if (!PyList_Check(result)) { PyErr_Format(PyExc_TypeError, "__dir__() must return a list, not %.200s", - result->ob_type->tp_name); + Py_Type(result)->tp_name); Py_DECREF(result); result = NULL; } @@ -1880,8 +1880,7 @@ static PyTypeObject PyNone_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "NoneType", 0, 0, @@ -1898,7 +1897,8 @@ }; PyObject _Py_NoneStruct = { - PyObject_HEAD_INIT(&PyNone_Type) + _PyObject_EXTRA_INIT + 1, &PyNone_Type }; /* NotImplemented is an object that can be used to signal that an @@ -1911,8 +1911,7 @@ } static PyTypeObject PyNotImplemented_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "NotImplementedType", 0, 0, @@ -1929,7 +1928,8 @@ }; PyObject _Py_NotImplementedStruct = { - PyObject_HEAD_INIT(&PyNotImplemented_Type) + _PyObject_EXTRA_INIT + 1, &PyNotImplemented_Type }; void @@ -1997,7 +1997,7 @@ void _Py_Dealloc(PyObject *op) { - destructor dealloc = op->ob_type->tp_dealloc; + destructor dealloc = Py_Type(op)->tp_dealloc; _Py_ForgetReference(op); (*dealloc)(op); } @@ -2028,7 +2028,7 @@ fprintf(fp, "Remaining object addresses:\n"); for (op = refchain._ob_next; op != &refchain; op = op->_ob_next) fprintf(fp, "%p [%" PY_FORMAT_SIZE_T "d] %s\n", op, - op->ob_refcnt, op->ob_type->tp_name); + op->ob_refcnt, Py_Type(op)->tp_name); } PyObject * @@ -2046,7 +2046,7 @@ return NULL; for (i = 0; (n == 0 || i < n) && op != &refchain; i++) { while (op == self || op == args || op == res || op == t || - (t != NULL && op->ob_type != (PyTypeObject *) t)) { + (t != NULL && Py_Type(op) != (PyTypeObject *) t)) { op = op->_ob_next; if (op == &refchain) return res; @@ -2189,7 +2189,7 @@ { while (_PyTrash_delete_later) { PyObject *op = _PyTrash_delete_later; - destructor dealloc = op->ob_type->tp_dealloc; + destructor dealloc = Py_Type(op)->tp_dealloc; _PyTrash_delete_later = (PyObject*) _Py_AS_GC(op)->gc.gc_prev; Modified: python/trunk/Objects/obmalloc.c ============================================================================== --- python/trunk/Objects/obmalloc.c (original) +++ python/trunk/Objects/obmalloc.c Sat Jul 21 08:55:02 2007 @@ -675,8 +675,8 @@ /* This is only useful when running memory debuggers such as * Purify or Valgrind. Uncomment to use. * -#define Py_USING_MEMORY_DEBUGGER */ +#define Py_USING_MEMORY_DEBUGGER #ifdef Py_USING_MEMORY_DEBUGGER Modified: python/trunk/Objects/setobject.c ============================================================================== --- python/trunk/Objects/setobject.c (original) +++ python/trunk/Objects/setobject.c Sat Jul 21 08:55:02 2007 @@ -3,7 +3,7 @@ Written and maintained by Raymond D. Hettinger Derived from Lib/sets.py and Objects/dictobject.c. - Copyright (c) 2003-6 Python Software Foundation. + Copyright (c) 2003-2007 Python Software Foundation. All rights reserved. */ @@ -561,7 +561,7 @@ if (num_free_sets < MAXFREESETS && PyAnySet_CheckExact(so)) free_sets[num_free_sets++] = so; else - so->ob_type->tp_free(so); + Py_Type(so)->tp_free(so); Py_TRASHCAN_SAFE_END(so) } @@ -860,8 +860,7 @@ } static PyTypeObject PySetIter_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "setiterator", /* tp_name */ sizeof(setiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -987,7 +986,7 @@ (type == &PySet_Type || type == &PyFrozenSet_Type)) { so = free_sets[--num_free_sets]; assert (so != NULL && PyAnySet_CheckExact(so)); - so->ob_type = type; + Py_Type(so) = type; _Py_NewReference((PyObject *)so); EMPTY_TO_MINSIZE(so); PyObject_GC_Track(so); @@ -1113,8 +1112,8 @@ memcpy(b->smalltable, tab, sizeof(tab)); } - if (PyType_IsSubtype(a->ob_type, &PyFrozenSet_Type) && - PyType_IsSubtype(b->ob_type, &PyFrozenSet_Type)) { + if (PyType_IsSubtype(Py_Type(a), &PyFrozenSet_Type) && + PyType_IsSubtype(Py_Type(b), &PyFrozenSet_Type)) { h = a->hash; a->hash = b->hash; b->hash = h; } else { a->hash = -1; @@ -1125,7 +1124,7 @@ static PyObject * set_copy(PySetObject *so) { - return make_new_set(so->ob_type, (PyObject *)so); + return make_new_set(Py_Type(so), (PyObject *)so); } static PyObject * @@ -1203,7 +1202,7 @@ if ((PyObject *)so == other) return set_copy(so); - result = (PySetObject *)make_new_set(so->ob_type, NULL); + result = (PySetObject *)make_new_set(Py_Type(so), NULL); if (result == NULL) return NULL; @@ -1390,7 +1389,7 @@ return NULL; } - result = make_new_set(so->ob_type, NULL); + result = make_new_set(Py_Type(so), NULL); if (result == NULL) return NULL; @@ -1491,7 +1490,7 @@ Py_INCREF(other); otherset = (PySetObject *)other; } else { - otherset = (PySetObject *)make_new_set(so->ob_type, other); + otherset = (PySetObject *)make_new_set(Py_Type(so), other); if (otherset == NULL) return NULL; } @@ -1522,7 +1521,7 @@ PyObject *rv; PySetObject *otherset; - otherset = (PySetObject *)make_new_set(so->ob_type, other); + otherset = (PySetObject *)make_new_set(Py_Type(so), other); if (otherset == NULL) return NULL; rv = set_symmetric_difference_update(otherset, (PyObject *)so); @@ -1789,7 +1788,7 @@ dict = Py_None; Py_INCREF(dict); } - result = PyTuple_Pack(3, so->ob_type, args, dict); + result = PyTuple_Pack(3, Py_Type(so), args, dict); done: Py_XDECREF(args); Py_XDECREF(keys); @@ -1806,7 +1805,7 @@ if (!PyAnySet_Check(self)) return -1; - if (!PyArg_UnpackTuple(args, self->ob_type->tp_name, 0, 1, &iterable)) + if (!PyArg_UnpackTuple(args, Py_Type(self)->tp_name, 0, 1, &iterable)) return -1; set_clear_internal(self); self->hash = -1; @@ -1922,8 +1921,7 @@ Build an unordered collection of unique elements."); PyTypeObject PySet_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "set", /* tp_name */ sizeof(PySetObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2017,8 +2015,7 @@ Build an immutable unordered collection of unique elements."); PyTypeObject PyFrozenSet_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "frozenset", /* tp_name */ sizeof(PySetObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -2099,7 +2096,7 @@ int PySet_Clear(PyObject *set) { - if (!PyType_IsSubtype(set->ob_type, &PySet_Type)) { + if (!PyType_IsSubtype(Py_Type(set), &PySet_Type)) { PyErr_BadInternalCall(); return -1; } @@ -2119,7 +2116,7 @@ int PySet_Discard(PyObject *set, PyObject *key) { - if (!PyType_IsSubtype(set->ob_type, &PySet_Type)) { + if (!PyType_IsSubtype(Py_Type(set), &PySet_Type)) { PyErr_BadInternalCall(); return -1; } @@ -2129,7 +2126,7 @@ int PySet_Add(PyObject *set, PyObject *key) { - if (!PyType_IsSubtype(set->ob_type, &PySet_Type)) { + if (!PyType_IsSubtype(Py_Type(set), &PySet_Type)) { PyErr_BadInternalCall(); return -1; } @@ -2170,7 +2167,7 @@ PyObject * PySet_Pop(PyObject *set) { - if (!PyType_IsSubtype(set->ob_type, &PySet_Type)) { + if (!PyType_IsSubtype(Py_Type(set), &PySet_Type)) { PyErr_BadInternalCall(); return NULL; } @@ -2180,7 +2177,7 @@ int _PySet_Update(PyObject *set, PyObject *iterable) { - if (!PyType_IsSubtype(set->ob_type, &PySet_Type)) { + if (!PyType_IsSubtype(Py_Type(set), &PySet_Type)) { PyErr_BadInternalCall(); return -1; } Modified: python/trunk/Objects/sliceobject.c ============================================================================== --- python/trunk/Objects/sliceobject.c (original) +++ python/trunk/Objects/sliceobject.c Sat Jul 21 08:55:02 2007 @@ -23,8 +23,7 @@ } static PyTypeObject PyEllipsis_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "ellipsis", /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ @@ -47,7 +46,8 @@ }; PyObject _Py_EllipsisObject = { - PyObject_HEAD_INIT(&PyEllipsis_Type) + _PyObject_EXTRA_INIT + 1, &PyEllipsis_Type }; @@ -277,7 +277,7 @@ static PyObject * slice_reduce(PySliceObject* self) { - return Py_BuildValue("O(OOO)", self->ob_type, self->start, self->stop, self->step); + return Py_BuildValue("O(OOO)", Py_Type(self), self->start, self->stop, self->step); } PyDoc_STRVAR(reduce_doc, "Return state information for pickling."); @@ -319,8 +319,7 @@ } PyTypeObject PySlice_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* Number of items for varobject */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "slice", /* Name of this type */ sizeof(PySliceObject), /* Basic object size */ 0, /* Item size for varobject */ Modified: python/trunk/Objects/stringobject.c ============================================================================== --- python/trunk/Objects/stringobject.c (original) +++ python/trunk/Objects/stringobject.c Sat Jul 21 08:55:02 2007 @@ -421,7 +421,7 @@ if (!PyString_Check(v)) { PyErr_Format(PyExc_TypeError, "decoder did not return a string object (type=%.400s)", - v->ob_type->tp_name); + Py_Type(v)->tp_name); Py_DECREF(v); goto onError; } @@ -501,7 +501,7 @@ if (!PyString_Check(v)) { PyErr_Format(PyExc_TypeError, "encoder did not return a string object (type=%.400s)", - v->ob_type->tp_name); + Py_Type(v)->tp_name); Py_DECREF(v); goto onError; } @@ -521,7 +521,7 @@ case SSTATE_INTERNED_MORTAL: /* revive dead object temporarily for DelItem */ - op->ob_refcnt = 3; + Py_Refcnt(op) = 3; if (PyDict_DelItem(interned, op) != 0) Py_FatalError( "deletion of interned string failed"); @@ -533,7 +533,7 @@ default: Py_FatalError("Inconsistent interned string state."); } - op->ob_type->tp_free(op); + Py_Type(op)->tp_free(op); } /* Unescape a backslash-escaped string. If unicode is non-zero, @@ -717,7 +717,7 @@ { if (!PyString_Check(op)) return string_getsize(op); - return ((PyStringObject *)op) -> ob_size; + return Py_Size(op); } /*const*/ char * @@ -750,7 +750,7 @@ { PyErr_Format(PyExc_TypeError, "expected string or Unicode object, " - "%.200s found", obj->ob_type->tp_name); + "%.200s found", Py_Type(obj)->tp_name); return -1; } } @@ -805,7 +805,7 @@ } if (flags & Py_PRINT_RAW) { char *data = op->ob_sval; - Py_ssize_t size = op->ob_size; + Py_ssize_t size = Py_Size(op); while (size > INT_MAX) { /* Very long strings cannot be written atomically. * But don't write exactly INT_MAX bytes at a time @@ -826,12 +826,12 @@ /* figure out which quote to use; single is preferred */ quote = '\''; - if (memchr(op->ob_sval, '\'', op->ob_size) && - !memchr(op->ob_sval, '"', op->ob_size)) + if (memchr(op->ob_sval, '\'', Py_Size(op)) && + !memchr(op->ob_sval, '"', Py_Size(op))) quote = '"'; fputc(quote, fp); - for (i = 0; i < op->ob_size; i++) { + for (i = 0; i < Py_Size(op); i++) { c = op->ob_sval[i]; if (c == quote || c == '\\') fprintf(fp, "\\%c", c); @@ -854,9 +854,9 @@ PyString_Repr(PyObject *obj, int smartquotes) { register PyStringObject* op = (PyStringObject*) obj; - size_t newsize = 2 + 4 * op->ob_size; + size_t newsize = 2 + 4 * Py_Size(op); PyObject *v; - if (newsize > PY_SSIZE_T_MAX || newsize / 4 != op->ob_size) { + if (newsize > PY_SSIZE_T_MAX || newsize / 4 != Py_Size(op)) { PyErr_SetString(PyExc_OverflowError, "string is too large to make repr"); } @@ -873,13 +873,13 @@ /* figure out which quote to use; single is preferred */ quote = '\''; if (smartquotes && - memchr(op->ob_sval, '\'', op->ob_size) && - !memchr(op->ob_sval, '"', op->ob_size)) + memchr(op->ob_sval, '\'', Py_Size(op)) && + !memchr(op->ob_sval, '"', Py_Size(op))) quote = '"'; p = PyString_AS_STRING(v); *p++ = quote; - for (i = 0; i < op->ob_size; i++) { + for (i = 0; i < Py_Size(op); i++) { /* There's at least enough room for a hex escape and a closing quote. */ assert(newsize - (p - PyString_AS_STRING(v)) >= 5); @@ -928,14 +928,14 @@ else { /* Subtype -- return genuine string with the same value. */ PyStringObject *t = (PyStringObject *) s; - return PyString_FromStringAndSize(t->ob_sval, t->ob_size); + return PyString_FromStringAndSize(t->ob_sval, Py_Size(t)); } } static Py_ssize_t string_length(PyStringObject *a) { - return a->ob_size; + return Py_Size(a); } static PyObject * @@ -950,21 +950,21 @@ #endif PyErr_Format(PyExc_TypeError, "cannot concatenate 'str' and '%.200s' objects", - bb->ob_type->tp_name); + Py_Type(bb)->tp_name); return NULL; } #define b ((PyStringObject *)bb) /* Optimize cases with empty left or right operand */ - if ((a->ob_size == 0 || b->ob_size == 0) && + if ((Py_Size(a) == 0 || Py_Size(b) == 0) && PyString_CheckExact(a) && PyString_CheckExact(b)) { - if (a->ob_size == 0) { + if (Py_Size(a) == 0) { Py_INCREF(bb); return bb; } Py_INCREF(a); return (PyObject *)a; } - size = a->ob_size + b->ob_size; + size = Py_Size(a) + Py_Size(b); if (size < 0) { PyErr_SetString(PyExc_OverflowError, "strings are too large to concat"); @@ -978,8 +978,8 @@ PyObject_INIT_VAR(op, &PyString_Type, size); op->ob_shash = -1; op->ob_sstate = SSTATE_NOT_INTERNED; - Py_MEMCPY(op->ob_sval, a->ob_sval, a->ob_size); - Py_MEMCPY(op->ob_sval + a->ob_size, b->ob_sval, b->ob_size); + Py_MEMCPY(op->ob_sval, a->ob_sval, Py_Size(a)); + Py_MEMCPY(op->ob_sval + Py_Size(a), b->ob_sval, Py_Size(b)); op->ob_sval[size] = '\0'; return (PyObject *) op; #undef b @@ -998,13 +998,13 @@ /* watch out for overflows: the size can overflow int, * and the # of bytes needed can overflow size_t */ - size = a->ob_size * n; - if (n && size / n != a->ob_size) { + size = Py_Size(a) * n; + if (n && size / n != Py_Size(a)) { PyErr_SetString(PyExc_OverflowError, "repeated string is too long"); return NULL; } - if (size == a->ob_size && PyString_CheckExact(a)) { + if (size == Py_Size(a) && PyString_CheckExact(a)) { Py_INCREF(a); return (PyObject *)a; } @@ -1022,14 +1022,14 @@ op->ob_shash = -1; op->ob_sstate = SSTATE_NOT_INTERNED; op->ob_sval[size] = '\0'; - if (a->ob_size == 1 && n > 0) { + if (Py_Size(a) == 1 && n > 0) { memset(op->ob_sval, a->ob_sval[0] , n); return (PyObject *) op; } i = 0; if (i < size) { - Py_MEMCPY(op->ob_sval, a->ob_sval, a->ob_size); - i = a->ob_size; + Py_MEMCPY(op->ob_sval, a->ob_sval, Py_Size(a)); + i = Py_Size(a); } while (i < size) { j = (i <= size-i) ? i : size-i; @@ -1050,9 +1050,9 @@ i = 0; if (j < 0) j = 0; /* Avoid signed/unsigned bug in next line */ - if (j > a->ob_size) - j = a->ob_size; - if (i == 0 && j == a->ob_size && PyString_CheckExact(a)) { + if (j > Py_Size(a)) + j = Py_Size(a); + if (i == 0 && j == Py_Size(a) && PyString_CheckExact(a)) { /* It's the same as a */ Py_INCREF(a); return (PyObject *)a; @@ -1073,7 +1073,7 @@ if (!PyString_Check(sub_obj)) { PyErr_Format(PyExc_TypeError, "'in ' requires string as left operand, " - "not %.200s", sub_obj->ob_type->tp_name); + "not %.200s", Py_Type(sub_obj)->tp_name); return -1; } } @@ -1086,7 +1086,7 @@ { char pchar; PyObject *v; - if (i < 0 || i >= a->ob_size) { + if (i < 0 || i >= Py_Size(a)) { PyErr_SetString(PyExc_IndexError, "string index out of range"); return NULL; } @@ -1129,16 +1129,16 @@ if (op == Py_EQ) { /* Supporting Py_NE here as well does not save much time, since Py_NE is rarely used. */ - if (a->ob_size == b->ob_size + if (Py_Size(a) == Py_Size(b) && (a->ob_sval[0] == b->ob_sval[0] - && memcmp(a->ob_sval, b->ob_sval, a->ob_size) == 0)) { + && memcmp(a->ob_sval, b->ob_sval, Py_Size(a)) == 0)) { result = Py_True; } else { result = Py_False; } goto out; } - len_a = a->ob_size; len_b = b->ob_size; + len_a = Py_Size(a); len_b = Py_Size(b); min_len = (len_a < len_b) ? len_a : len_b; if (min_len > 0) { c = Py_CHARMASK(*a->ob_sval) - Py_CHARMASK(*b->ob_sval); @@ -1170,9 +1170,9 @@ { PyStringObject *a = (PyStringObject*) o1; PyStringObject *b = (PyStringObject*) o2; - return a->ob_size == b->ob_size + return Py_Size(a) == Py_Size(b) && *a->ob_sval == *b->ob_sval - && memcmp(a->ob_sval, b->ob_sval, a->ob_size) == 0; + && memcmp(a->ob_sval, b->ob_sval, Py_Size(a)) == 0; } static long @@ -1184,12 +1184,12 @@ if (a->ob_shash != -1) return a->ob_shash; - len = a->ob_size; + len = Py_Size(a); p = (unsigned char *) a->ob_sval; x = *p << 7; while (--len >= 0) x = (1000003*x) ^ *p++; - x ^= a->ob_size; + x ^= Py_Size(a); if (x == -1) x = -2; a->ob_shash = x; @@ -1242,7 +1242,7 @@ else { PyErr_Format(PyExc_TypeError, "string indices must be integers, not %.200s", - item->ob_type->tp_name); + Py_Type(item)->tp_name); return NULL; } } @@ -1256,7 +1256,7 @@ return -1; } *ptr = (void *)self->ob_sval; - return self->ob_size; + return Py_Size(self); } static Py_ssize_t @@ -1271,7 +1271,7 @@ string_buffer_getsegcount(PyStringObject *self, Py_ssize_t *lenp) { if ( lenp ) - *lenp = self->ob_size; + *lenp = Py_Size(self); return 1; } @@ -1284,7 +1284,7 @@ return -1; } *ptr = self->ob_sval; - return self->ob_size; + return Py_Size(self); } static PySequenceMethods string_as_sequence = { @@ -1373,7 +1373,7 @@ count++; } /* Always force the list to the expected size. */ -#define FIX_PREALLOC_SIZE(list) ((PyListObject *)list)->ob_size = count +#define FIX_PREALLOC_SIZE(list) Py_Size(list) = count #define SKIP_SPACE(s, i, len) { while (iob_type->tp_name); + i, Py_Type(item)->tp_name); Py_DECREF(seq); return NULL; } @@ -3248,7 +3248,7 @@ PyErr_Format(PyExc_TypeError, "encoder did not return a string/unicode object " "(type=%.400s)", - v->ob_type->tp_name); + Py_Type(v)->tp_name); Py_DECREF(v); return NULL; } @@ -3285,7 +3285,7 @@ PyErr_Format(PyExc_TypeError, "decoder did not return a string/unicode object " "(type=%.400s)", - v->ob_type->tp_name); + Py_Type(v)->tp_name); Py_DECREF(v); return NULL; } @@ -3843,7 +3843,7 @@ static PyObject * string_getnewargs(PyStringObject *v) { - return Py_BuildValue("(s#)", v->ob_sval, v->ob_size); + return Py_BuildValue("(s#)", v->ob_sval, Py_Size(v)); } @@ -3972,8 +3972,7 @@ PyTypeObject PyBaseString_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "basestring", 0, 0, @@ -4021,8 +4020,7 @@ If the argument is a string, the return value is the same object."); PyTypeObject PyString_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "str", sizeof(PyStringObject), sizeof(char), @@ -4108,7 +4106,7 @@ register PyObject *v; register PyStringObject *sv; v = *pv; - if (!PyString_Check(v) || v->ob_refcnt != 1 || newsize < 0 || + if (!PyString_Check(v) || Py_Refcnt(v) != 1 || newsize < 0 || PyString_CHECK_INTERNED(v)) { *pv = 0; Py_DECREF(v); @@ -4127,7 +4125,7 @@ } _Py_NewReference(*pv); sv = (PyStringObject *) *pv; - sv->ob_size = newsize; + Py_Size(sv) = newsize; sv->ob_sval[newsize] = '\0'; sv->ob_shash = -1; /* invalidate cached hash value */ return 0; @@ -4175,7 +4173,7 @@ x = PyFloat_AsDouble(v); if (x == -1.0 && PyErr_Occurred()) { PyErr_Format(PyExc_TypeError, "float argument required, " - "not %.200s", v->ob_type->tp_name); + "not %.200s", Py_Type(v)->tp_name); return -1; } if (prec < 0) @@ -4249,15 +4247,15 @@ switch (type) { case 'd': case 'u': - result = val->ob_type->tp_str(val); + result = Py_Type(val)->tp_str(val); break; case 'o': - result = val->ob_type->tp_as_number->nb_oct(val); + result = Py_Type(val)->tp_as_number->nb_oct(val); break; case 'x': case 'X': numnondigits = 2; - result = val->ob_type->tp_as_number->nb_hex(val); + result = Py_Type(val)->tp_as_number->nb_hex(val); break; default: assert(!"'type' not in [duoxX]"); @@ -4272,7 +4270,7 @@ } /* To modify the string in-place, there can only be one reference. */ - if (result->ob_refcnt != 1) { + if (Py_Refcnt(result) != 1) { PyErr_BadInternalCall(); return NULL; } @@ -4372,7 +4370,7 @@ x = PyInt_AsLong(v); if (x == -1 && PyErr_Occurred()) { PyErr_Format(PyExc_TypeError, "int argument required, not %.200s", - v->ob_type->tp_name); + Py_Type(v)->tp_name); return -1; } if (x < 0 && type == 'u') { @@ -4489,7 +4487,7 @@ arglen = -1; argidx = -2; } - if (args->ob_type->tp_as_mapping && !PyTuple_Check(args) && + if (Py_Type(args)->tp_as_mapping && !PyTuple_Check(args) && !PyObject_TypeCheck(args, &PyBaseString_Type)) dict = args; while (--fmtcnt >= 0) { @@ -4956,7 +4954,7 @@ } /* The two references in interned are not counted by refcnt. The string deallocator will take care of this */ - s->ob_refcnt -= 2; + Py_Refcnt(s) -= 2; PyString_CHECK_INTERNED(s) = SSTATE_INTERNED_MORTAL; } @@ -5023,12 +5021,12 @@ /* XXX Shouldn't happen */ break; case SSTATE_INTERNED_IMMORTAL: - s->ob_refcnt += 1; - immortal_size += s->ob_size; + Py_Refcnt(s) += 1; + immortal_size += Py_Size(s); break; case SSTATE_INTERNED_MORTAL: - s->ob_refcnt += 2; - mortal_size += s->ob_size; + Py_Refcnt(s) += 2; + mortal_size += Py_Size(s); break; default: Py_FatalError("Inconsistent interned string state."); Modified: python/trunk/Objects/structseq.c ============================================================================== --- python/trunk/Objects/structseq.c (original) +++ python/trunk/Objects/structseq.c Sat Jul 21 08:55:02 2007 @@ -13,17 +13,17 @@ They are only allowed for indices < n_visible_fields. */ char *PyStructSequence_UnnamedField = "unnamed field"; -#define VISIBLE_SIZE(op) ((op)->ob_size) +#define VISIBLE_SIZE(op) Py_Size(op) #define VISIBLE_SIZE_TP(tp) PyInt_AsLong( \ PyDict_GetItemString((tp)->tp_dict, visible_length_key)) #define REAL_SIZE_TP(tp) PyInt_AsLong( \ PyDict_GetItemString((tp)->tp_dict, real_length_key)) -#define REAL_SIZE(op) REAL_SIZE_TP((op)->ob_type) +#define REAL_SIZE(op) REAL_SIZE_TP(Py_Type(op)) #define UNNAMED_FIELDS_TP(tp) PyInt_AsLong( \ PyDict_GetItemString((tp)->tp_dict, unnamed_fields_key)) -#define UNNAMED_FIELDS(op) UNNAMED_FIELDS_TP((op)->ob_type) +#define UNNAMED_FIELDS(op) UNNAMED_FIELDS_TP(Py_Type(op)) PyObject * @@ -32,7 +32,7 @@ PyStructSequence *obj; obj = PyObject_New(PyStructSequence, type); - obj->ob_size = VISIBLE_SIZE_TP(type); + Py_Size(obj) = VISIBLE_SIZE_TP(type); return (PyObject*) obj; } @@ -274,12 +274,12 @@ } for (; i < n_fields; i++) { - char *n = self->ob_type->tp_members[i-n_unnamed_fields].name; + char *n = Py_Type(self)->tp_members[i-n_unnamed_fields].name; PyDict_SetItemString(dict, n, self->ob_item[i]); } - result = Py_BuildValue("(O(OO))", self->ob_type, tup, dict); + result = Py_BuildValue("(O(OO))", Py_Type(self), tup, dict); Py_DECREF(tup); Py_DECREF(dict); @@ -305,8 +305,7 @@ }; static PyTypeObject _struct_sequence_template = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) NULL, /* tp_name */ 0, /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/tupleobject.c ============================================================================== --- python/trunk/Objects/tupleobject.c (original) +++ python/trunk/Objects/tupleobject.c Sat Jul 21 08:55:02 2007 @@ -49,8 +49,8 @@ #endif /* Inline PyObject_InitVar */ #ifdef Py_TRACE_REFS - op->ob_size = size; - op->ob_type = &PyTuple_Type; + Py_Size(op) = size; + Py_Type(op) = &PyTuple_Type; #endif _Py_NewReference((PyObject *)op); } @@ -90,7 +90,7 @@ return -1; } else - return ((PyTupleObject *)op)->ob_size; + return Py_Size(op); } PyObject * @@ -100,7 +100,7 @@ PyErr_BadInternalCall(); return NULL; } - if (i < 0 || i >= ((PyTupleObject *)op) -> ob_size) { + if (i < 0 || i >= Py_Size(op)) { PyErr_SetString(PyExc_IndexError, "tuple index out of range"); return NULL; } @@ -117,7 +117,7 @@ PyErr_BadInternalCall(); return -1; } - if (i < 0 || i >= ((PyTupleObject *)op) -> ob_size) { + if (i < 0 || i >= Py_Size(op)) { Py_XDECREF(newitem); PyErr_SetString(PyExc_IndexError, "tuple assignment index out of range"); @@ -160,7 +160,7 @@ tupledealloc(register PyTupleObject *op) { register Py_ssize_t i; - register Py_ssize_t len = op->ob_size; + register Py_ssize_t len = Py_Size(op); PyObject_GC_UnTrack(op); Py_TRASHCAN_SAFE_BEGIN(op) if (len > 0) { @@ -170,7 +170,7 @@ #if MAXSAVESIZE > 0 if (len < MAXSAVESIZE && num_free_tuples[len] < MAXSAVEDTUPLES && - op->ob_type == &PyTuple_Type) + Py_Type(op) == &PyTuple_Type) { op->ob_item[0] = (PyObject *) free_tuples[len]; num_free_tuples[len]++; @@ -179,7 +179,7 @@ } #endif } - op->ob_type->tp_free((PyObject *)op); + Py_Type(op)->tp_free((PyObject *)op); done: Py_TRASHCAN_SAFE_END(op) } @@ -189,13 +189,13 @@ { Py_ssize_t i; fprintf(fp, "("); - for (i = 0; i < op->ob_size; i++) { + for (i = 0; i < Py_Size(op); i++) { if (i > 0) fprintf(fp, ", "); if (PyObject_Print(op->ob_item[i], fp, 0) != 0) return -1; } - if (op->ob_size == 1) + if (Py_Size(op) == 1) fprintf(fp, ","); fprintf(fp, ")"); return 0; @@ -208,7 +208,7 @@ PyObject *s, *temp; PyObject *pieces, *result = NULL; - n = v->ob_size; + n = Py_Size(v); if (n == 0) return PyString_FromString("()"); @@ -268,7 +268,7 @@ tuplehash(PyTupleObject *v) { register long x, y; - register Py_ssize_t len = v->ob_size; + register Py_ssize_t len = Py_Size(v); register PyObject **p; long mult = 1000003L; x = 0x345678L; @@ -290,7 +290,7 @@ static Py_ssize_t tuplelength(PyTupleObject *a) { - return a->ob_size; + return Py_Size(a); } static int @@ -299,7 +299,7 @@ Py_ssize_t i; int cmp; - for (i = 0, cmp = 0 ; cmp == 0 && i < a->ob_size; ++i) + for (i = 0, cmp = 0 ; cmp == 0 && i < Py_Size(a); ++i) cmp = PyObject_RichCompareBool(el, PyTuple_GET_ITEM(a, i), Py_EQ); return cmp; @@ -308,7 +308,7 @@ static PyObject * tupleitem(register PyTupleObject *a, register Py_ssize_t i) { - if (i < 0 || i >= a->ob_size) { + if (i < 0 || i >= Py_Size(a)) { PyErr_SetString(PyExc_IndexError, "tuple index out of range"); return NULL; } @@ -326,11 +326,11 @@ Py_ssize_t len; if (ilow < 0) ilow = 0; - if (ihigh > a->ob_size) - ihigh = a->ob_size; + if (ihigh > Py_Size(a)) + ihigh = Py_Size(a); if (ihigh < ilow) ihigh = ilow; - if (ilow == 0 && ihigh == a->ob_size && PyTuple_CheckExact(a)) { + if (ilow == 0 && ihigh == Py_Size(a) && PyTuple_CheckExact(a)) { Py_INCREF(a); return (PyObject *)a; } @@ -368,11 +368,11 @@ if (!PyTuple_Check(bb)) { PyErr_Format(PyExc_TypeError, "can only concatenate tuple (not \"%.200s\") to tuple", - bb->ob_type->tp_name); + Py_Type(bb)->tp_name); return NULL; } #define b ((PyTupleObject *)bb) - size = a->ob_size + b->ob_size; + size = Py_Size(a) + Py_Size(b); if (size < 0) return PyErr_NoMemory(); np = (PyTupleObject *) PyTuple_New(size); @@ -381,14 +381,14 @@ } src = a->ob_item; dest = np->ob_item; - for (i = 0; i < a->ob_size; i++) { + for (i = 0; i < Py_Size(a); i++) { PyObject *v = src[i]; Py_INCREF(v); dest[i] = v; } src = b->ob_item; - dest = np->ob_item + a->ob_size; - for (i = 0; i < b->ob_size; i++) { + dest = np->ob_item + Py_Size(a); + for (i = 0; i < Py_Size(b); i++) { PyObject *v = src[i]; Py_INCREF(v); dest[i] = v; @@ -406,18 +406,18 @@ PyObject **p, **items; if (n < 0) n = 0; - if (a->ob_size == 0 || n == 1) { + if (Py_Size(a) == 0 || n == 1) { if (PyTuple_CheckExact(a)) { /* Since tuples are immutable, we can return a shared copy in this case */ Py_INCREF(a); return (PyObject *)a; } - if (a->ob_size == 0) + if (Py_Size(a) == 0) return PyTuple_New(0); } - size = a->ob_size * n; - if (size/a->ob_size != n) + size = Py_Size(a) * n; + if (size/Py_Size(a) != n) return PyErr_NoMemory(); np = (PyTupleObject *) PyTuple_New(size); if (np == NULL) @@ -425,7 +425,7 @@ p = np->ob_item; items = a->ob_item; for (i = 0; i < n; i++) { - for (j = 0; j < a->ob_size; j++) { + for (j = 0; j < Py_Size(a); j++) { *p = items[j]; Py_INCREF(*p); p++; @@ -439,7 +439,7 @@ { Py_ssize_t i; - for (i = o->ob_size; --i >= 0; ) + for (i = Py_Size(o); --i >= 0; ) Py_VISIT(o->ob_item[i]); return 0; } @@ -459,8 +459,8 @@ vt = (PyTupleObject *)v; wt = (PyTupleObject *)w; - vlen = vt->ob_size; - wlen = wt->ob_size; + vlen = Py_Size(vt); + wlen = Py_Size(wt); /* Note: the corresponding code for lists has an "early out" test * here when op is EQ or NE and the lengths differ. That pays there, @@ -622,7 +622,7 @@ else { PyErr_Format(PyExc_TypeError, "tuple indices must be integers, not %.200s", - item->ob_type->tp_name); + Py_Type(item)->tp_name); return NULL; } } @@ -630,7 +630,7 @@ static PyObject * tuple_getnewargs(PyTupleObject *v) { - return Py_BuildValue("(N)", tupleslice(v, 0, v->ob_size)); + return Py_BuildValue("(N)", tupleslice(v, 0, Py_Size(v))); } @@ -648,8 +648,7 @@ static PyObject *tuple_iter(PyObject *seq); PyTypeObject PyTuple_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "tuple", sizeof(PyTupleObject) - sizeof(PyObject *), sizeof(PyObject *), @@ -707,14 +706,14 @@ Py_ssize_t oldsize; v = (PyTupleObject *) *pv; - if (v == NULL || v->ob_type != &PyTuple_Type || - (v->ob_size != 0 && v->ob_refcnt != 1)) { + if (v == NULL || Py_Type(v) != &PyTuple_Type || + (Py_Size(v) != 0 && Py_Refcnt(v) != 1)) { *pv = 0; Py_XDECREF(v); PyErr_BadInternalCall(); return -1; } - oldsize = v->ob_size; + oldsize = Py_Size(v); if (oldsize == newsize) return 0; @@ -838,8 +837,7 @@ }; PyTypeObject PyTupleIter_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "tupleiterator", /* tp_name */ sizeof(tupleiterobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/typeobject.c ============================================================================== --- python/trunk/Objects/typeobject.c (original) +++ python/trunk/Objects/typeobject.c Sat Jul 21 08:55:02 2007 @@ -57,7 +57,7 @@ if (!PyString_Check(value)) { PyErr_Format(PyExc_TypeError, "can only assign string to %s.__name__, not '%s'", - type->tp_name, value->ob_type->tp_name); + type->tp_name, Py_Type(value)->tp_name); return -1; } if (strlen(PyString_AS_STRING(value)) @@ -203,7 +203,7 @@ if (!PyTuple_Check(value)) { PyErr_Format(PyExc_TypeError, "can only assign tuple to %s.__bases__, not %s", - type->tp_name, value->ob_type->tp_name); + type->tp_name, Py_Type(value)->tp_name); return -1; } if (PyTuple_GET_SIZE(value) == 0) { @@ -218,7 +218,7 @@ PyErr_Format( PyExc_TypeError, "%s.__bases__ must be tuple of old- or new-style classes, not '%s'", - type->tp_name, ob->ob_type->tp_name); + type->tp_name, Py_Type(ob)->tp_name); return -1; } if (PyType_Check(ob)) { @@ -343,8 +343,8 @@ result = Py_None; Py_INCREF(result); } - else if (result->ob_type->tp_descr_get) { - result = result->ob_type->tp_descr_get(result, NULL, + else if (Py_Type(result)->tp_descr_get) { + result = Py_Type(result)->tp_descr_get(result, NULL, (PyObject *)type); } else { @@ -488,7 +488,7 @@ Py_ssize_t i, n; PyMemberDef *mp; - n = type->ob_size; + n = Py_Size(type); mp = PyHeapType_GET_MEMBERS((PyHeapTypeObject *)type); for (i = 0; i < n; i++, mp++) { if (mp->type == T_OBJECT_EX) { @@ -512,10 +512,10 @@ /* Find the nearest base with a different tp_traverse, and traverse slots while we're at it */ - type = self->ob_type; + type = Py_Type(self); base = type; while ((basetraverse = base->tp_traverse) == subtype_traverse) { - if (base->ob_size) { + if (Py_Size(base)) { int err = traverse_slots(base, self, visit, arg); if (err) return err; @@ -547,7 +547,7 @@ Py_ssize_t i, n; PyMemberDef *mp; - n = type->ob_size; + n = Py_Size(type); mp = PyHeapType_GET_MEMBERS((PyHeapTypeObject *)type); for (i = 0; i < n; i++, mp++) { if (mp->type == T_OBJECT_EX && !(mp->flags & READONLY)) { @@ -569,10 +569,10 @@ /* Find the nearest base with a different tp_clear and clear slots while we're at it */ - type = self->ob_type; + type = Py_Type(self); base = type; while ((baseclear = base->tp_clear) == subtype_clear) { - if (base->ob_size) + if (Py_Size(base)) clear_slots(base, self); base = base->tp_base; assert(base); @@ -593,7 +593,7 @@ destructor basedealloc; /* Extract the type; we expect it to be a heap type */ - type = self->ob_type; + type = Py_Type(self); assert(type->tp_flags & Py_TPFLAGS_HEAPTYPE); /* Test whether the type has GC exactly once */ @@ -615,7 +615,7 @@ /* Find the nearest base with a different tp_dealloc */ base = type; while ((basedealloc = base->tp_dealloc) == subtype_dealloc) { - assert(base->ob_size == 0); + assert(Py_Size(base) == 0); base = base->tp_base; assert(base); } @@ -683,7 +683,7 @@ /* Clear slots up to the nearest base with a different tp_dealloc */ base = type; while ((basedealloc = base->tp_dealloc) == subtype_dealloc) { - if (base->ob_size) + if (Py_Size(base)) clear_slots(base, self); base = base->tp_base; assert(base); @@ -877,13 +877,13 @@ if (*attrobj == NULL) return NULL; } - res = _PyType_Lookup(self->ob_type, *attrobj); + res = _PyType_Lookup(Py_Type(self), *attrobj); if (res != NULL) { descrgetfunc f; - if ((f = res->ob_type->tp_descr_get) == NULL) + if ((f = Py_Type(res)->tp_descr_get) == NULL) Py_INCREF(res); else - res = f(res, self, (PyObject *)(self->ob_type)); + res = f(res, self, (PyObject *)(Py_Type(self))); } return res; } @@ -1301,7 +1301,7 @@ PyObject *mro, *result, *tuple; int checkit = 0; - if (type->ob_type == &PyType_Type) { + if (Py_Type(type) == &PyType_Type) { result = mro_implementation(type); } else { @@ -1336,7 +1336,7 @@ else if (!PyType_Check(cls)) { PyErr_Format(PyExc_TypeError, "mro() returned a non-class ('%.500s')", - cls->ob_type->tp_name); + Py_Type(cls)->tp_name); Py_DECREF(tuple); return -1; } @@ -1565,7 +1565,7 @@ if (value != NULL && !PyDict_Check(value)) { PyErr_Format(PyExc_TypeError, "__dict__ must be set to a dictionary, " - "not a '%.200s'", value->ob_type->tp_name); + "not a '%.200s'", Py_Type(value)->tp_name); return -1; } dict = *dictptr; @@ -1581,16 +1581,16 @@ PyObject **weaklistptr; PyObject *result; - if (obj->ob_type->tp_weaklistoffset == 0) { + if (Py_Type(obj)->tp_weaklistoffset == 0) { PyErr_SetString(PyExc_AttributeError, "This object has no __weakref__"); return NULL; } - assert(obj->ob_type->tp_weaklistoffset > 0); - assert(obj->ob_type->tp_weaklistoffset + sizeof(PyObject *) <= - (size_t)(obj->ob_type->tp_basicsize)); + assert(Py_Type(obj)->tp_weaklistoffset > 0); + assert(Py_Type(obj)->tp_weaklistoffset + sizeof(PyObject *) <= + (size_t)(Py_Type(obj)->tp_basicsize)); weaklistptr = (PyObject **) - ((char *)obj + obj->ob_type->tp_weaklistoffset); + ((char *)obj + Py_Type(obj)->tp_weaklistoffset); if (*weaklistptr == NULL) result = Py_None; else @@ -1630,7 +1630,7 @@ if (!PyString_Check(s)) { PyErr_Format(PyExc_TypeError, "__slots__ items must be strings, not '%.200s'", - s->ob_type->tp_name); + Py_Type(s)->tp_name); return 0; } p = (unsigned char *) PyString_AS_STRING(s); @@ -1740,8 +1740,8 @@ if (PyType_CheckExact(metatype) && nargs == 1 && nkwds == 0) { PyObject *x = PyTuple_GET_ITEM(args, 0); - Py_INCREF(x->ob_type); - return (PyObject *) x->ob_type; + Py_INCREF(Py_Type(x)); + return (PyObject *) Py_Type(x); } /* SF bug 475327 -- if that didn't trigger, we need 3 @@ -2176,7 +2176,7 @@ static PyObject * type_getattro(PyTypeObject *type, PyObject *name) { - PyTypeObject *metatype = type->ob_type; + PyTypeObject *metatype = Py_Type(type); PyObject *meta_attribute, *attribute; descrgetfunc meta_get; @@ -2193,7 +2193,7 @@ meta_attribute = _PyType_Lookup(metatype, name); if (meta_attribute != NULL) { - meta_get = meta_attribute->ob_type->tp_descr_get; + meta_get = Py_Type(meta_attribute)->tp_descr_get; if (meta_get != NULL && PyDescr_IsData(meta_attribute)) { /* Data descriptors implement tp_descr_set to intercept @@ -2211,7 +2211,7 @@ attribute = _PyType_Lookup(type, name); if (attribute != NULL) { /* Implement descriptor functionality, if any */ - descrgetfunc local_get = attribute->ob_type->tp_descr_get; + descrgetfunc local_get = Py_Type(attribute)->tp_descr_get; Py_XDECREF(meta_attribute); @@ -2289,7 +2289,7 @@ PyObject_Free((char *)type->tp_doc); Py_XDECREF(et->ht_name); Py_XDECREF(et->ht_slots); - type->ob_type->tp_free((PyObject *)type); + Py_Type(type)->tp_free((PyObject *)type); } static PyObject * @@ -2397,8 +2397,7 @@ } PyTypeObject PyType_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "type", /* tp_name */ sizeof(PyHeapTypeObject), /* tp_basicsize */ sizeof(PyMemberDef), /* tp_itemsize */ @@ -2500,7 +2499,7 @@ { int err = 0; if (excess_args(args, kwds)) { - PyTypeObject *type = self->ob_type; + PyTypeObject *type = Py_Type(self); if (type->tp_init != object_init && type->tp_new != object_new) { @@ -2547,7 +2546,7 @@ static void object_dealloc(PyObject *self) { - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } static PyObject * @@ -2556,7 +2555,7 @@ PyTypeObject *type; PyObject *mod, *name, *rtn; - type = self->ob_type; + type = Py_Type(self); mod = type_module(type, NULL); if (mod == NULL) PyErr_Clear(); @@ -2585,7 +2584,7 @@ { unaryfunc f; - f = self->ob_type->tp_repr; + f = Py_Type(self)->tp_repr; if (f == NULL) f = object_repr; return f(self); @@ -2600,8 +2599,8 @@ static PyObject * object_get_class(PyObject *self, void *closure) { - Py_INCREF(self->ob_type); - return (PyObject *)(self->ob_type); + Py_INCREF(Py_Type(self)); + return (PyObject *)(Py_Type(self)); } static int @@ -2686,7 +2685,7 @@ static int object_set_class(PyObject *self, PyObject *value, void *closure) { - PyTypeObject *oldto = self->ob_type; + PyTypeObject *oldto = Py_Type(self); PyTypeObject *newto; if (value == NULL) { @@ -2697,7 +2696,7 @@ if (!PyType_Check(value)) { PyErr_Format(PyExc_TypeError, "__class__ must be set to new-style class, not '%s' object", - value->ob_type->tp_name); + Py_Type(value)->tp_name); return -1; } newto = (PyTypeObject *)value; @@ -2710,7 +2709,7 @@ } if (compatible_for_assignment(newto, oldto, "__class__")) { Py_INCREF(newto); - self->ob_type = newto; + Py_Type(self) = newto; Py_DECREF(oldto); return 0; } @@ -2806,7 +2805,7 @@ if (args != NULL && !PyTuple_Check(args)) { PyErr_Format(PyExc_TypeError, "__getnewargs__ should return a tuple, " - "not '%.200s'", args->ob_type->tp_name); + "not '%.200s'", Py_Type(args)->tp_name); goto end; } } @@ -3018,8 +3017,7 @@ PyTypeObject PyBaseObject_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "object", /* tp_name */ sizeof(PyObject), /* tp_basicsize */ 0, /* tp_itemsize */ @@ -3461,8 +3459,8 @@ NULL when type is &PyBaseObject_Type, and we know its ob_type is not NULL (it's initialized to &PyType_Type). But coverity doesn't know that. */ - if (type->ob_type == NULL && base != NULL) - type->ob_type = base->ob_type; + if (Py_Type(type) == NULL && base != NULL) + Py_Type(type) = Py_Type(base); /* Initialize tp_bases */ bases = type->tp_bases; @@ -3822,7 +3820,7 @@ if (i == -1 && PyErr_Occurred()) return -1; if (i < 0) { - PySequenceMethods *sq = self->ob_type->tp_as_sequence; + PySequenceMethods *sq = Py_Type(self)->tp_as_sequence; if (sq && sq->sq_length) { Py_ssize_t n = (*sq->sq_length)(self); if (n < 0) @@ -3998,14 +3996,14 @@ if (!check_num_args(args, 1)) return NULL; other = PyTuple_GET_ITEM(args, 0); - if (other->ob_type->tp_compare != func && - !PyType_IsSubtype(other->ob_type, self->ob_type)) { + if (Py_Type(other)->tp_compare != func && + !PyType_IsSubtype(Py_Type(other), Py_Type(self))) { PyErr_Format( PyExc_TypeError, "%s.__cmp__(x,y) requires y to be a '%s', not a '%s'", - self->ob_type->tp_name, - self->ob_type->tp_name, - other->ob_type->tp_name); + Py_Type(self)->tp_name, + Py_Type(self)->tp_name, + Py_Type(other)->tp_name); return NULL; } res = (*func)(self, other); @@ -4019,7 +4017,7 @@ static int hackcheck(PyObject *self, setattrofunc func, char *what) { - PyTypeObject *type = self->ob_type; + PyTypeObject *type = Py_Type(self); while (type && type->tp_flags & Py_TPFLAGS_HEAPTYPE) type = type->tp_base; /* If type is NULL now, this is a really weird type. @@ -4219,7 +4217,7 @@ PyErr_Format(PyExc_TypeError, "%s.__new__(X): X is not a type object (%s)", type->tp_name, - arg0->ob_type->tp_name); + Py_Type(arg0)->tp_name); return NULL; } subtype = (PyTypeObject *)arg0; @@ -4310,14 +4308,14 @@ PyObject *a, *b; int ok; - b = PyObject_GetAttrString((PyObject *)(right->ob_type), name); + b = PyObject_GetAttrString((PyObject *)(Py_Type(right)), name); if (b == NULL) { PyErr_Clear(); /* If right doesn't have it, it's not overloaded */ return 0; } - a = PyObject_GetAttrString((PyObject *)(left->ob_type), name); + a = PyObject_GetAttrString((PyObject *)(Py_Type(left)), name); if (a == NULL) { PyErr_Clear(); Py_DECREF(b); @@ -4342,14 +4340,14 @@ FUNCNAME(PyObject *self, PyObject *other) \ { \ static PyObject *cache_str, *rcache_str; \ - int do_other = self->ob_type != other->ob_type && \ - other->ob_type->tp_as_number != NULL && \ - other->ob_type->tp_as_number->SLOTNAME == TESTFUNC; \ - if (self->ob_type->tp_as_number != NULL && \ - self->ob_type->tp_as_number->SLOTNAME == TESTFUNC) { \ + int do_other = Py_Type(self) != Py_Type(other) && \ + Py_Type(other)->tp_as_number != NULL && \ + Py_Type(other)->tp_as_number->SLOTNAME == TESTFUNC; \ + if (Py_Type(self)->tp_as_number != NULL && \ + Py_Type(self)->tp_as_number->SLOTNAME == TESTFUNC) { \ PyObject *r; \ if (do_other && \ - PyType_IsSubtype(other->ob_type, self->ob_type) && \ + PyType_IsSubtype(Py_Type(other), Py_Type(self)) && \ method_is_overloaded(self, other, ROPSTR)) { \ r = call_maybe( \ other, ROPSTR, &rcache_str, "(O)", self); \ @@ -4361,7 +4359,7 @@ r = call_maybe( \ self, OPSTR, &cache_str, "(O)", other); \ if (r != Py_NotImplemented || \ - other->ob_type == self->ob_type) \ + Py_Type(other) == Py_Type(self)) \ return r; \ Py_DECREF(r); \ } \ @@ -4419,12 +4417,12 @@ if (getitem_str == NULL) return NULL; } - func = _PyType_Lookup(self->ob_type, getitem_str); + func = _PyType_Lookup(Py_Type(self), getitem_str); if (func != NULL) { - if ((f = func->ob_type->tp_descr_get) == NULL) + if ((f = Py_Type(func)->tp_descr_get) == NULL) Py_INCREF(func); else { - func = f(func, self, (PyObject *)(self->ob_type)); + func = f(func, self, (PyObject *)(Py_Type(self))); if (func == NULL) { return NULL; } @@ -4563,8 +4561,8 @@ /* Three-arg power doesn't use __rpow__. But ternary_op can call this when the second argument's type uses slot_nb_power, so check before calling self.__pow__. */ - if (self->ob_type->tp_as_number != NULL && - self->ob_type->tp_as_number->nb_power == slot_nb_power) { + if (Py_Type(self)->tp_as_number != NULL && + Py_Type(self)->tp_as_number->nb_power == slot_nb_power) { return call_method(self, "__pow__", &pow_str, "(OO)", other, modulus); } @@ -4754,12 +4752,12 @@ { int c; - if (self->ob_type->tp_compare == _PyObject_SlotCompare) { + if (Py_Type(self)->tp_compare == _PyObject_SlotCompare) { c = half_compare(self, other); if (c <= 1) return c; } - if (other->ob_type->tp_compare == _PyObject_SlotCompare) { + if (Py_Type(other)->tp_compare == _PyObject_SlotCompare) { c = half_compare(other, self); if (c < -1) return -2; @@ -4784,7 +4782,7 @@ } PyErr_Clear(); return PyString_FromFormat("<%s object at %p>", - self->ob_type->tp_name, self); + Py_Type(self)->tp_name, self); } static PyObject * @@ -4893,7 +4891,7 @@ static PyObject * slot_tp_getattr_hook(PyObject *self, PyObject *name) { - PyTypeObject *tp = self->ob_type; + PyTypeObject *tp = Py_Type(self); PyObject *getattr, *getattribute, *res; static PyObject *getattribute_str = NULL; static PyObject *getattr_str = NULL; @@ -4917,7 +4915,7 @@ } getattribute = _PyType_Lookup(tp, getattribute_str); if (getattribute == NULL || - (getattribute->ob_type == &PyWrapperDescr_Type && + (Py_Type(getattribute) == &PyWrapperDescr_Type && ((PyWrapperDescrObject *)getattribute)->d_wrapped == (void *)PyObject_GenericGetAttr)) res = PyObject_GenericGetAttr(self, name); @@ -4986,13 +4984,13 @@ { PyObject *res; - if (self->ob_type->tp_richcompare == slot_tp_richcompare) { + if (Py_Type(self)->tp_richcompare == slot_tp_richcompare) { res = half_richcompare(self, other, op); if (res != Py_NotImplemented) return res; Py_DECREF(res); } - if (other->ob_type->tp_richcompare == slot_tp_richcompare) { + if (Py_Type(other)->tp_richcompare == slot_tp_richcompare) { res = half_richcompare(other, self, _Py_SwappedOp[op]); if (res != Py_NotImplemented) { return res; @@ -5025,7 +5023,7 @@ if (func == NULL) { PyErr_Format(PyExc_TypeError, "'%.200s' object is not iterable", - self->ob_type->tp_name); + Py_Type(self)->tp_name); return NULL; } Py_DECREF(func); @@ -5042,7 +5040,7 @@ static PyObject * slot_tp_descr_get(PyObject *self, PyObject *obj, PyObject *type) { - PyTypeObject *tp = self->ob_type; + PyTypeObject *tp = Py_Type(self); PyObject *get; static PyObject *get_str = NULL; @@ -5100,7 +5098,7 @@ if (res != Py_None) { PyErr_Format(PyExc_TypeError, "__init__() should return None, not '%.200s'", - res->ob_type->tp_name); + Py_Type(res)->tp_name); Py_DECREF(res); return -1; } @@ -5185,7 +5183,7 @@ _Py_NewReference(self); self->ob_refcnt = refcnt; } - assert(!PyType_IS_GC(self->ob_type) || + assert(!PyType_IS_GC(Py_Type(self)) || _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED); /* If Py_REF_DEBUG, _Py_NewReference bumped _Py_RefTotal, so * we need to undo that. */ @@ -5197,8 +5195,8 @@ * undone. */ #ifdef COUNT_ALLOCS - --self->ob_type->tp_frees; - --self->ob_type->tp_allocs; + --Py_Type(self)->tp_frees; + --Py_Type(self)->tp_allocs; #endif } @@ -5560,7 +5558,7 @@ descr = _PyType_Lookup(type, p->name_strobj); if (descr == NULL) continue; - if (descr->ob_type == &PyWrapperDescr_Type) { + if (Py_Type(descr) == &PyWrapperDescr_Type) { void **tptr = resolve_slotdups(type, p->name_strobj); if (tptr == NULL || tptr == ptr) generic = p->function; @@ -5575,7 +5573,7 @@ use_generic = 1; } } - else if (descr->ob_type == &PyCFunction_Type && + else if (Py_Type(descr) == &PyCFunction_Type && PyCFunction_GET_FUNCTION(descr) == (PyCFunction)tp_new_wrapper && strcmp(p->name, "__new__") == 0) @@ -5846,7 +5844,7 @@ Py_XDECREF(su->obj); Py_XDECREF(su->type); Py_XDECREF(su->obj_type); - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } static PyObject * @@ -5911,7 +5909,7 @@ res = PyDict_GetItem(dict, name); if (res != NULL) { Py_INCREF(res); - f = res->ob_type->tp_descr_get; + f = Py_Type(res)->tp_descr_get; if (f != NULL) { tmp = f(res, /* Only pass 'obj' param if @@ -5947,7 +5945,7 @@ the normal case; the return value is obj.__class__. But... when obj is an instance, we want to allow for the case where - obj->ob_type is not a subclass of type, but obj.__class__ is! + Py_Type(obj) is not a subclass of type, but obj.__class__ is! This will allow using super() with a proxy for obj. */ @@ -5958,9 +5956,9 @@ } /* Normal case */ - if (PyType_IsSubtype(obj->ob_type, type)) { - Py_INCREF(obj->ob_type); - return obj->ob_type; + if (PyType_IsSubtype(Py_Type(obj), type)) { + Py_INCREF(Py_Type(obj)); + return Py_Type(obj); } else { /* Try the slow way */ @@ -5977,7 +5975,7 @@ if (class_attr != NULL && PyType_Check(class_attr) && - (PyTypeObject *)class_attr != obj->ob_type) + (PyTypeObject *)class_attr != Py_Type(obj)) { int ok = PyType_IsSubtype( (PyTypeObject *)class_attr, type); @@ -6008,10 +6006,10 @@ Py_INCREF(self); return self; } - if (su->ob_type != &PySuper_Type) + if (Py_Type(su) != &PySuper_Type) /* If su is an instance of a (strict) subclass of super, call its type */ - return PyObject_CallFunctionObjArgs((PyObject *)su->ob_type, + return PyObject_CallFunctionObjArgs((PyObject *)Py_Type(su), su->type, obj, NULL); else { /* Inline the common case */ @@ -6080,8 +6078,7 @@ } PyTypeObject PySuper_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "super", /* tp_name */ sizeof(superobject), /* tp_basicsize */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/unicodeobject.c ============================================================================== --- python/trunk/Objects/unicodeobject.c (original) +++ python/trunk/Objects/unicodeobject.c Sat Jul 21 08:55:02 2007 @@ -312,7 +312,7 @@ else { PyMem_DEL(unicode->str); Py_XDECREF(unicode->defenc); - unicode->ob_type->tp_free((PyObject *)unicode); + Py_Type(unicode)->tp_free((PyObject *)unicode); } } @@ -326,7 +326,7 @@ return -1; } v = (PyUnicodeObject *)*unicode; - if (v == NULL || !PyUnicode_Check(v) || v->ob_refcnt != 1 || length < 0) { + if (v == NULL || !PyUnicode_Check(v) || Py_Refcnt(v) != 1 || length < 0) { PyErr_BadInternalCall(); return -1; } @@ -554,7 +554,7 @@ PyErr_Format(PyExc_TypeError, "coercing to Unicode: need string or buffer, " "%.80s found", - obj->ob_type->tp_name); + Py_Type(obj)->tp_name); goto onError; } @@ -604,7 +604,7 @@ if (!PyUnicode_Check(unicode)) { PyErr_Format(PyExc_TypeError, "decoder did not return an unicode object (type=%.400s)", - unicode->ob_type->tp_name); + Py_Type(unicode)->tp_name); Py_DECREF(unicode); goto onError; } @@ -714,7 +714,7 @@ if (!PyString_Check(v)) { PyErr_Format(PyExc_TypeError, "encoder did not return a string object (type=%.400s)", - v->ob_type->tp_name); + Py_Type(v)->tp_name); Py_DECREF(v); goto onError; } @@ -3242,8 +3242,7 @@ } static PyTypeObject EncodingMapType = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "EncodingMap", /*tp_name*/ sizeof(struct encoding_map), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -3502,7 +3501,7 @@ char *outstart; Py_ssize_t outsize = PyString_GET_SIZE(*outobj); - if (mapping->ob_type == &EncodingMapType) { + if (Py_Type(mapping) == &EncodingMapType) { int res = encoding_map_lookup(c, mapping); Py_ssize_t requiredsize = *outpos+1; if (res == -1) @@ -3574,7 +3573,7 @@ /* find all unencodable characters */ while (collendpos < size) { PyObject *rep; - if (mapping->ob_type == &EncodingMapType) { + if (Py_Type(mapping) == &EncodingMapType) { int res = encoding_map_lookup(p[collendpos], mapping); if (res != -1) break; @@ -4632,7 +4631,7 @@ PyErr_Format(PyExc_TypeError, "sequence item %zd: expected string or Unicode," " %.80s found", - i, item->ob_type->tp_name); + i, Py_Type(item)->tp_name); goto onError; } item = PyUnicode_FromObject(item); @@ -5632,7 +5631,7 @@ PyErr_Format(PyExc_TypeError, "encoder did not return a string/unicode object " "(type=%.400s)", - v->ob_type->tp_name); + Py_Type(v)->tp_name); Py_DECREF(v); return NULL; } @@ -5668,7 +5667,7 @@ PyErr_Format(PyExc_TypeError, "decoder did not return a string/unicode object " "(type=%.400s)", - v->ob_type->tp_name); + Py_Type(v)->tp_name); Py_DECREF(v); return NULL; } @@ -7499,7 +7498,7 @@ arglen = -1; argidx = -2; } - if (args->ob_type->tp_as_mapping && !PyTuple_Check(args) && + if (Py_Type(args)->tp_as_mapping && !PyTuple_Check(args) && !PyObject_TypeCheck(args, &PyBaseString_Type)) dict = args; @@ -7963,8 +7962,7 @@ errors can be 'strict', 'replace' or 'ignore' and defaults to 'strict'."); PyTypeObject PyUnicode_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, /* ob_size */ + PyVarObject_HEAD_INIT(&PyType_Type, 0) "unicode", /* tp_name */ sizeof(PyUnicodeObject), /* tp_size */ 0, /* tp_itemsize */ Modified: python/trunk/Objects/weakrefobject.c ============================================================================== --- python/trunk/Objects/weakrefobject.c (original) +++ python/trunk/Objects/weakrefobject.c Sat Jul 21 08:55:02 2007 @@ -105,7 +105,7 @@ { PyObject_GC_UnTrack(self); clear_weakref((PyWeakReference *) self); - self->ob_type->tp_free(self); + Py_Type(self)->tp_free(self); } @@ -172,7 +172,7 @@ name ? "" : "", self, - PyWeakref_GET_OBJECT(self)->ob_type->tp_name, + Py_Type(PyWeakref_GET_OBJECT(self))->tp_name, PyWeakref_GET_OBJECT(self), name); Py_XDECREF(nameobj); @@ -274,10 +274,10 @@ PyWeakReference *ref, *proxy; PyWeakReference **list; - if (!PyType_SUPPORTS_WEAKREFS(ob->ob_type)) { + if (!PyType_SUPPORTS_WEAKREFS(Py_Type(ob))) { PyErr_Format(PyExc_TypeError, "cannot create weak reference to '%s' object", - ob->ob_type->tp_name); + Py_Type(ob)->tp_name); return NULL; } if (callback == Py_None) @@ -332,8 +332,7 @@ PyTypeObject _PyWeakref_RefType = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "weakref", sizeof(PyWeakReference), 0, @@ -447,7 +446,7 @@ char buf[160]; PyOS_snprintf(buf, sizeof(buf), "", proxy, - PyWeakref_GET_OBJECT(proxy)->ob_type->tp_name, + Py_Type(PyWeakref_GET_OBJECT(proxy))->tp_name, PyWeakref_GET_OBJECT(proxy)); return PyString_FromString(buf); } @@ -646,8 +645,7 @@ PyTypeObject _PyWeakref_ProxyType = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "weakproxy", sizeof(PyWeakReference), 0, @@ -681,8 +679,7 @@ PyTypeObject _PyWeakref_CallableProxyType = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "weakcallableproxy", sizeof(PyWeakReference), 0, @@ -722,10 +719,10 @@ PyWeakReference **list; PyWeakReference *ref, *proxy; - if (!PyType_SUPPORTS_WEAKREFS(ob->ob_type)) { + if (!PyType_SUPPORTS_WEAKREFS(Py_Type(ob))) { PyErr_Format(PyExc_TypeError, "cannot create weak reference to '%s' object", - ob->ob_type->tp_name); + Py_Type(ob)->tp_name); return NULL; } list = GET_WEAKREFS_LISTPTR(ob); @@ -781,10 +778,10 @@ PyWeakReference **list; PyWeakReference *ref, *proxy; - if (!PyType_SUPPORTS_WEAKREFS(ob->ob_type)) { + if (!PyType_SUPPORTS_WEAKREFS(Py_Type(ob))) { PyErr_Format(PyExc_TypeError, "cannot create weak reference to '%s' object", - ob->ob_type->tp_name); + Py_Type(ob)->tp_name); return NULL; } list = GET_WEAKREFS_LISTPTR(ob); @@ -807,9 +804,9 @@ PyWeakReference *prev; if (PyCallable_Check(ob)) - result->ob_type = &_PyWeakref_CallableProxyType; + Py_Type(result) = &_PyWeakref_CallableProxyType; else - result->ob_type = &_PyWeakref_ProxyType; + Py_Type(result) = &_PyWeakref_ProxyType; get_basic_refs(*list, &ref, &proxy); if (callback == NULL) { if (proxy != NULL) { @@ -874,7 +871,7 @@ PyWeakReference **list; if (object == NULL - || !PyType_SUPPORTS_WEAKREFS(object->ob_type) + || !PyType_SUPPORTS_WEAKREFS(Py_Type(object)) || object->ob_refcnt != 0) { PyErr_BadInternalCall(); return; Modified: python/trunk/PC/_msi.c ============================================================================== --- python/trunk/PC/_msi.c (original) +++ python/trunk/PC/_msi.c Sat Jul 21 08:55:02 2007 @@ -417,8 +417,7 @@ }; static PyTypeObject record_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_msi.Record", /*tp_name*/ sizeof(msiobj), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -584,8 +583,7 @@ }; static PyTypeObject summary_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_msi.SummaryInformation", /*tp_name*/ sizeof(msiobj), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -733,8 +731,7 @@ }; static PyTypeObject msiview_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_msi.View", /*tp_name*/ sizeof(msiobj), /*tp_basicsize*/ 0, /*tp_itemsize*/ @@ -851,8 +848,7 @@ }; static PyTypeObject msidb_Type = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "_msi.Database", /*tp_name*/ sizeof(msiobj), /*tp_basicsize*/ 0, /*tp_itemsize*/ Modified: python/trunk/PC/_winreg.c ============================================================================== --- python/trunk/PC/_winreg.c (original) +++ python/trunk/PC/_winreg.c Sat Jul 21 08:55:02 2007 @@ -460,8 +460,7 @@ /* The type itself */ PyTypeObject PyHKEY_Type = { - PyObject_HEAD_INIT(0) /* fill in type at module init */ - 0, + PyVarObject_HEAD_INIT(0, 0) /* fill in type at module init */ "PyHKEY", sizeof(PyHKEYObject), 0, Modified: python/trunk/Python/ceval.c ============================================================================== --- python/trunk/Python/ceval.c (original) +++ python/trunk/Python/ceval.c Sat Jul 21 08:55:02 2007 @@ -3655,7 +3655,7 @@ } if (argdefs != NULL) { d = &PyTuple_GET_ITEM(argdefs, 0); - nd = ((PyTupleObject *)argdefs)->ob_size; + nd = Py_Size(argdefs); } return PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, (*pp_stack)-n, na, Modified: python/trunk/Python/import.c ============================================================================== --- python/trunk/Python/import.c (original) +++ python/trunk/Python/import.c Sat Jul 21 08:55:02 2007 @@ -3030,8 +3030,7 @@ static PyTypeObject NullImporterType = { - PyObject_HEAD_INIT(NULL) - 0, /*ob_size*/ + PyVarObject_HEAD_INIT(NULL, 0) "imp.NullImporter", /*tp_name*/ sizeof(NullImporter), /*tp_basicsize*/ 0, /*tp_itemsize*/ Modified: python/trunk/Python/symtable.c ============================================================================== --- python/trunk/Python/symtable.c (original) +++ python/trunk/Python/symtable.c Sat Jul 21 08:55:02 2007 @@ -117,8 +117,7 @@ }; PyTypeObject PySTEntry_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "symtable entry", sizeof(PySTEntryObject), 0, Modified: python/trunk/Python/traceback.c ============================================================================== --- python/trunk/Python/traceback.c (original) +++ python/trunk/Python/traceback.c Sat Jul 21 08:55:02 2007 @@ -52,8 +52,7 @@ } PyTypeObject PyTraceBack_Type = { - PyObject_HEAD_INIT(&PyType_Type) - 0, + PyVarObject_HEAD_INIT(&PyType_Type, 0) "traceback", sizeof(PyTracebackObject), 0, From python-checkins at python.org Sat Jul 21 11:18:45 2007 From: python-checkins at python.org (georg.brandl) Date: Sat, 21 Jul 2007 11:18:45 +0200 (CEST) Subject: [Python-checkins] r56481 - peps/trunk/pep-3123.txt Message-ID: <20070721091845.B59D71E4008@bag.python.org> Author: georg.brandl Date: Sat Jul 21 11:18:45 2007 New Revision: 56481 Modified: peps/trunk/pep-3123.txt Log: Minor fixes and markup. Modified: peps/trunk/pep-3123.txt ============================================================================== --- peps/trunk/pep-3123.txt (original) +++ peps/trunk/pep-3123.txt Sat Jul 21 11:18:45 2007 @@ -14,7 +14,7 @@ ======== Python currently relies on undefined C behavior, with its -usage of PyObject_HEAD. This PEP proposes to change that +usage of ``PyObject_HEAD``. This PEP proposes to change that into standard C. Rationale @@ -43,18 +43,18 @@ } The problem here is that the storage is both accessed as -if it where struct PyObject, and as struct FooObject. +if it where struct ``PyObject``, and as struct ``FooObject``. Historically, compilers did not have any problems with this code. However, modern compilers use that clause as an -optimization opportunity, finding that f->ob_refcnt and -o->ob_refcnt cannot possibly refer to the same memory, and +optimization opportunity, finding that ``f->ob_refcnt`` and +``o->ob_refcnt`` cannot possibly refer to the same memory, and that therefore the function should return 0, without having to fetch the value of ob_refcnt at all in the return -statement. For GCC, Python now uses -fno-strict-aliasing +statement. For GCC, Python now uses ``-fno-strict-aliasing`` to work around that problem; with other compilers, it may just see undefined behavior. Even with GCC, using --fno-strict-aliasing may pessimize the generated code +``-fno-strict-aliasing`` may pessimize the generated code unnecessarily. Specification @@ -63,12 +63,12 @@ Standard C has one specific exception to its aliasing rules precisely designed to support the case of Python: a value of a struct type may also be accessed through a pointer to the first field. E.g. if a -struct starts with an int, the struct\* may also be cast to an int\*, -allowing to write int values into the first field. +struct starts with an ``int``, the ``struct *`` may also be cast to +an ``int *``, allowing to write int values into the first field. -For Python, PyObject_HEAD and PyObject_VAR_HEAD will be changed +For Python, ``PyObject_HEAD`` and ``PyObject_VAR_HEAD`` will be changed to not list all fields anymore, but list a single field of type -PyObject/PyVarObject:: +``PyObject``/``PyVarObject``:: typedef struct _object { _PyObject_HEAD_EXTRA @@ -92,14 +92,14 @@ PyObject *start, *stop, *step; } PySliceObject; - typedef struct{ + typedef struct { PyVarObject ob_base; PyObject **ob_item; Py_ssize_t allocated; } PyListObject; -The above definitions of PyObject_HEAD is normative, so extension -authors MAY either use the macro, or put the ob_base field explicitly +The above definitions of ``PyObject_HEAD`` are normative, so extension +authors MAY either use the macro, or put the ``ob_base`` field explicitly into their structs. As a convention, the base field SHOULD be called ob_base. However, all @@ -112,7 +112,7 @@ #define Py_Refcnt(o) (((PyObject*)(o))->ob_refcnt) #define Py_Size(o) (((PyVarObject*)(o))->ob_size) -are added. E.g. the code blocks:: +are added. E.g. the code blocks :: #define PyList_CheckExact(op) ((op)->ob_type == &PyList_Type) @@ -124,12 +124,12 @@ return Py_Type(func)->tp_name; -For initialization of type objects, the current sequence:: +For initialization of type objects, the current sequence :: PyObject_HEAD_INIT(NULL) 0, /* ob_size */ -becomes incorrect, and must be replaced with +becomes incorrect, and must be replaced with :: PyVarObject_HEAD_INIT(NULL, 0) @@ -137,9 +137,9 @@ ============================= To support modules that compile with both Python 2.6 and Python 3.0, -the Py_* macros is added to Python 2.6. The macros Py_INCREF -and Py_DECREF will be changed to cast their argument to PyObject\*, -so that module authors can also explicitly declare the ob_base +the ``Py_*`` macros are added to Python 2.6. The macros ``Py_INCREF`` +and ``Py_DECREF`` will be changed to cast their argument to ``PyObject *``, +so that module authors can also explicitly declare the ``ob_base`` field in modules designed for Python 2.6. Copyright From python-checkins at python.org Sun Jul 22 02:13:00 2007 From: python-checkins at python.org (facundo.batista) Date: Sun, 22 Jul 2007 02:13:00 +0200 (CEST) Subject: [Python-checkins] r56485 - python/trunk/Lib/test/test_asyncore.py Message-ID: <20070722001300.930AE1E4008@bag.python.org> Author: facundo.batista Date: Sun Jul 22 02:13:00 2007 New Revision: 56485 Modified: python/trunk/Lib/test/test_asyncore.py Log: Selectively enable tests for asyncore.readwrite based on the presence of poll support in the select module (since this is the only case in which readwrite can be called). [GSoC - Alan McIntyre] Modified: python/trunk/Lib/test/test_asyncore.py ============================================================================== --- python/trunk/Lib/test/test_asyncore.py (original) +++ python/trunk/Lib/test/test_asyncore.py Sun Jul 22 02:13:00 2007 @@ -106,87 +106,83 @@ asyncore._exception(tr2) self.assertEqual(tr2.error_handled, True) -## Commented out these tests because test a non-documented function -## (which is actually public, why it's not documented?). Anyway, the -## tests *and* the function uses constants in the select module that -## are not present in Windows systems (see this thread: -## http://mail.python.org/pipermail/python-list/2001-October/109973.html) -## Note even that these constants are mentioned in the select -## documentation, as a parameter of "poll" method "register", but are -## not explicit declared as constants of the module. -## . Facundo Batista -## -## def test_readwrite(self): -## # Check that correct methods are called by readwrite() -## -## class testobj: -## def __init__(self): -## self.read = False -## self.write = False -## self.expt = False -## -## def handle_read_event(self): -## self.read = True -## -## def handle_write_event(self): -## self.write = True -## -## def handle_expt_event(self): -## self.expt = True -## -## def handle_error(self): -## self.error_handled = True -## -## for flag in (select.POLLIN, select.POLLPRI): -## tobj = testobj() -## self.assertEqual(tobj.read, False) -## asyncore.readwrite(tobj, flag) -## self.assertEqual(tobj.read, True) -## -## # check that ExitNow exceptions in the object handler method -## # bubbles all the way up through asyncore readwrite call -## tr1 = exitingdummy() -## self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) -## -## # check that an exception other than ExitNow in the object handler -## # method causes the handle_error method to get called -## tr2 = crashingdummy() -## asyncore.readwrite(tr2, flag) -## self.assertEqual(tr2.error_handled, True) -## -## tobj = testobj() -## self.assertEqual(tobj.write, False) -## asyncore.readwrite(tobj, select.POLLOUT) -## self.assertEqual(tobj.write, True) -## -## # check that ExitNow exceptions in the object handler method -## # bubbles all the way up through asyncore readwrite call -## tr1 = exitingdummy() -## self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, -## select.POLLOUT) -## -## # check that an exception other than ExitNow in the object handler -## # method causes the handle_error method to get called -## tr2 = crashingdummy() -## asyncore.readwrite(tr2, select.POLLOUT) -## self.assertEqual(tr2.error_handled, True) -## -## for flag in (select.POLLERR, select.POLLHUP, select.POLLNVAL): -## tobj = testobj() -## self.assertEqual(tobj.expt, False) -## asyncore.readwrite(tobj, flag) -## self.assertEqual(tobj.expt, True) -## -## # check that ExitNow exceptions in the object handler method -## # bubbles all the way up through asyncore readwrite calls -## tr1 = exitingdummy() -## self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) -## -## # check that an exception other than ExitNow in the object handler -## # method causes the handle_error method to get called -## tr2 = crashingdummy() -## asyncore.readwrite(tr2, flag) -## self.assertEqual(tr2.error_handled, True) + # asyncore.readwrite uses constants in the select module that + # are not present in Windows systems (see this thread: + # http://mail.python.org/pipermail/python-list/2001-October/109973.html) + # These constants should be present as long as poll is available + + if hasattr(select, 'poll'): + def test_readwrite(self): + # Check that correct methods are called by readwrite() + + class testobj: + def __init__(self): + self.read = False + self.write = False + self.expt = False + + def handle_read_event(self): + self.read = True + + def handle_write_event(self): + self.write = True + + def handle_expt_event(self): + self.expt = True + + def handle_error(self): + self.error_handled = True + + for flag in (select.POLLIN, select.POLLPRI): + tobj = testobj() + self.assertEqual(tobj.read, False) + asyncore.readwrite(tobj, flag) + self.assertEqual(tobj.read, True) + + # check that ExitNow exceptions in the object handler method + # bubbles all the way up through asyncore readwrite call + tr1 = exitingdummy() + self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) + + # check that an exception other than ExitNow in the object handler + # method causes the handle_error method to get called + tr2 = crashingdummy() + asyncore.readwrite(tr2, flag) + self.assertEqual(tr2.error_handled, True) + + tobj = testobj() + self.assertEqual(tobj.write, False) + asyncore.readwrite(tobj, select.POLLOUT) + self.assertEqual(tobj.write, True) + + # check that ExitNow exceptions in the object handler method + # bubbles all the way up through asyncore readwrite call + tr1 = exitingdummy() + self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, + select.POLLOUT) + + # check that an exception other than ExitNow in the object handler + # method causes the handle_error method to get called + tr2 = crashingdummy() + asyncore.readwrite(tr2, select.POLLOUT) + self.assertEqual(tr2.error_handled, True) + + for flag in (select.POLLERR, select.POLLHUP, select.POLLNVAL): + tobj = testobj() + self.assertEqual(tobj.expt, False) + asyncore.readwrite(tobj, flag) + self.assertEqual(tobj.expt, True) + + # check that ExitNow exceptions in the object handler method + # bubbles all the way up through asyncore readwrite calls + tr1 = exitingdummy() + self.assertRaises(asyncore.ExitNow, asyncore.readwrite, tr1, flag) + + # check that an exception other than ExitNow in the object handler + # method causes the handle_error method to get called + tr2 = crashingdummy() + asyncore.readwrite(tr2, flag) + self.assertEqual(tr2.error_handled, True) def test_closeall(self): self.closeall_check(False) From python-checkins at python.org Sun Jul 22 12:18:07 2007 From: python-checkins at python.org (nick.coghlan) Date: Sun, 22 Jul 2007 12:18:07 +0200 (CEST) Subject: [Python-checkins] r56488 - python/trunk/Lib/test/test_runpy.py Message-ID: <20070722101807.938721E4008@bag.python.org> Author: nick.coghlan Date: Sun Jul 22 12:18:07 2007 New Revision: 56488 Modified: python/trunk/Lib/test/test_runpy.py Log: Add explicit relative import tests for runpy.run_module Modified: python/trunk/Lib/test/test_runpy.py ============================================================================== --- python/trunk/Lib/test/test_runpy.py (original) +++ python/trunk/Lib/test/test_runpy.py Sun Jul 22 12:18:07 2007 @@ -87,9 +87,15 @@ def test_library_module(self): run_module("runpy") + def _add_pkg_dir(self, pkg_dir): + os.mkdir(pkg_dir) + pkg_fname = os.path.join(pkg_dir, "__init__"+os.extsep+"py") + pkg_file = open(pkg_fname, "w") + pkg_file.close() + return pkg_fname + def _make_pkg(self, source, depth): pkg_name = "__runpy_pkg__" - init_fname = "__init__"+os.extsep+"py" test_fname = "runpy_test"+os.extsep+"py" pkg_dir = sub_dir = tempfile.mkdtemp() if verbose: print " Package tree in:", sub_dir @@ -97,11 +103,8 @@ if verbose: print " Updated sys.path:", sys.path[0] for i in range(depth): sub_dir = os.path.join(sub_dir, pkg_name) - os.mkdir(sub_dir) + pkg_fname = self._add_pkg_dir(sub_dir) if verbose: print " Next level in:", sub_dir - pkg_fname = os.path.join(sub_dir, init_fname) - pkg_file = open(pkg_fname, "w") - pkg_file.close() if verbose: print " Created:", pkg_fname mod_fname = os.path.join(sub_dir, test_fname) mod_file = open(mod_fname, "w") @@ -146,23 +149,81 @@ try: if verbose: print "Running from source:", mod_name d1 = run_module(mod_name) # Read from source + self.failUnless("x" in d1) self.failUnless(d1["x"] == 1) del d1 # Ensure __loader__ entry doesn't keep file open __import__(mod_name) os.remove(mod_fname) if verbose: print "Running from compiled:", mod_name d2 = run_module(mod_name) # Read from bytecode + self.failUnless("x" in d2) self.failUnless(d2["x"] == 1) del d2 # Ensure __loader__ entry doesn't keep file open finally: self._del_pkg(pkg_dir, depth, mod_name) if verbose: print "Module executed successfully" + def _add_relative_modules(self, base_dir, depth): + if depth <= 1: + raise ValueError("Relative module test needs depth > 1") + pkg_name = "__runpy_pkg__" + module_dir = base_dir + for i in range(depth): + parent_dir = module_dir + module_dir = os.path.join(module_dir, pkg_name) + # Add sibling module + sibling_fname = os.path.join(module_dir, "sibling"+os.extsep+"py") + sibling_file = open(sibling_fname, "w") + sibling_file.close() + if verbose: print " Added sibling module:", sibling_fname + # Add nephew module + uncle_dir = os.path.join(parent_dir, "uncle") + self._add_pkg_dir(uncle_dir) + if verbose: print " Added uncle package:", uncle_dir + cousin_dir = os.path.join(uncle_dir, "cousin") + self._add_pkg_dir(cousin_dir) + if verbose: print " Added cousin package:", cousin_dir + nephew_fname = os.path.join(cousin_dir, "nephew"+os.extsep+"py") + nephew_file = open(nephew_fname, "w") + nephew_file.close() + if verbose: print " Added nephew module:", nephew_fname + + def _check_relative_imports(self, depth, run_name=None): + contents = """\ +from __future__ import absolute_import +from . import sibling +from ..uncle.cousin import nephew +""" + pkg_dir, mod_fname, mod_name = ( + self._make_pkg(contents, depth)) + try: + self._add_relative_modules(pkg_dir, depth) + if verbose: print "Running from source:", mod_name + d1 = run_module(mod_name) # Read from source + self.failUnless("sibling" in d1) + self.failUnless("nephew" in d1) + del d1 # Ensure __loader__ entry doesn't keep file open + __import__(mod_name) + os.remove(mod_fname) + if verbose: print "Running from compiled:", mod_name + d2 = run_module(mod_name) # Read from bytecode + self.failUnless("sibling" in d2) + self.failUnless("nephew" in d2) + del d2 # Ensure __loader__ entry doesn't keep file open + finally: + self._del_pkg(pkg_dir, depth, mod_name) + if verbose: print "Module executed successfully" + def test_run_module(self): for depth in range(4): if verbose: print "Testing package depth:", depth self._check_module(depth) + def test_explicit_relative_import(self): + for depth in range(2, 5): + if verbose: print "Testing relative imports at depth:", depth + self._check_relative_imports(depth) + def test_main(): run_unittest(RunModuleCodeTest) From buildbot at python.org Sun Jul 22 13:01:59 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 22 Jul 2007 11:01:59 +0000 Subject: [Python-checkins] buildbot warnings in PPC64 Debian trunk Message-ID: <20070722110159.CCAB71E4007@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/64 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: nick.coghlan Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/test/test_socketserver.py", line 93, in run svr.serve_a_few() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/test/test_socketserver.py", line 35, in serve_a_few self.handle_request() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 224, in handle_request self.handle_error(request, client_address) File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 222, in handle_request self.process_request(request, client_address) File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 429, in process_request self.collect_children() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 425, in collect_children self.active_children.remove(pid) ValueError: list.remove(x): x not in list 1 test failed: test_socketserver make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Mon Jul 23 07:20:50 2007 From: python-checkins at python.org (george.yoshida) Date: Mon, 23 Jul 2007 07:20:50 +0200 (CEST) Subject: [Python-checkins] r56505 - peps/trunk/pep-0224.txt peps/trunk/pep-0367.txt peps/trunk/pep-3135.txt Message-ID: <20070723052050.4D0DA1E4007@bag.python.org> Author: george.yoshida Date: Mon Jul 23 07:20:49 2007 New Revision: 56505 Modified: peps/trunk/pep-0224.txt peps/trunk/pep-0367.txt peps/trunk/pep-3135.txt Log: fix typos Modified: peps/trunk/pep-0224.txt ============================================================================== --- peps/trunk/pep-0224.txt (original) +++ peps/trunk/pep-0224.txt Mon Jul 23 07:20:49 2007 @@ -202,7 +202,7 @@ * must start with two underscores (to match __doc__) * must be extractable using some form of inspection (e.g. by using a naming convention which includes some fixed name part) - * must be compatible with class inheritence (i.e. should be + * must be compatible with class inheritance (i.e. should be stored as attribute) Later on in March, Guido pronounced on this PEP in March 2001 (on Modified: peps/trunk/pep-0367.txt ============================================================================== --- peps/trunk/pep-0367.txt (original) +++ peps/trunk/pep-0367.txt Mon Jul 23 07:20:49 2007 @@ -528,7 +528,7 @@ This proposal leaves no room for different names, signatures, or application to other classes, or instances. A way to allow some similar use alongside the normal proposal would be favorable, encouraging good design of multiple -inheritence trees and compatible methods. +inheritance trees and compatible methods. super(\*p, \*\*kw) ------------------ Modified: peps/trunk/pep-3135.txt ============================================================================== --- peps/trunk/pep-3135.txt (original) +++ peps/trunk/pep-3135.txt Mon Jul 23 07:20:49 2007 @@ -528,7 +528,7 @@ This proposal leaves no room for different names, signatures, or application to other classes, or instances. A way to allow some similar use alongside the normal proposal would be favorable, encouraging good design of multiple -inheritence trees and compatible methods. +inheritance trees and compatible methods. super(\*p, \*\*kw) ------------------ From python-checkins at python.org Mon Jul 23 10:54:44 2007 From: python-checkins at python.org (georg.brandl) Date: Mon, 23 Jul 2007 10:54:44 +0200 (CEST) Subject: [Python-checkins] r56506 - doctools Message-ID: <20070723085444.313231E4007@bag.python.org> Author: georg.brandl Date: Mon Jul 23 10:54:43 2007 New Revision: 56506 Added: doctools/ Log: Add a new project directory for the documentation tools. From python-checkins at python.org Mon Jul 23 10:56:48 2007 From: python-checkins at python.org (georg.brandl) Date: Mon, 23 Jul 2007 10:56:48 +0200 (CEST) Subject: [Python-checkins] r56507 - in doctools: branches tags trunk Message-ID: <20070723085648.5A2361E4007@bag.python.org> Author: georg.brandl Date: Mon Jul 23 10:56:48 2007 New Revision: 56507 Added: doctools/branches/ doctools/tags/ doctools/trunk/ Log: Make trunk, branches, tags directories. From python-checkins at python.org Mon Jul 23 11:02:29 2007 From: python-checkins at python.org (georg.brandl) Date: Mon, 23 Jul 2007 11:02:29 +0200 (CEST) Subject: [Python-checkins] r56508 - in doctools/trunk: Makefile README TODO convert.py converter converter/__init__.py converter/console.py converter/docnodes.py converter/filenamemap.py converter/latexparser.py converter/newfiles converter/newfiles/TODO converter/newfiles/about.rst converter/newfiles/api_index.rst converter/newfiles/conf.py converter/newfiles/contents.rst converter/newfiles/dist_index.rst converter/newfiles/doc.rst converter/newfiles/doc_intro.rst converter/newfiles/doc_markup.rst converter/newfiles/doc_rest.rst converter/newfiles/doc_sphinx.rst converter/newfiles/doc_style.rst converter/newfiles/ext_index.rst converter/newfiles/mac_index.rst converter/newfiles/modules_index.rst converter/newfiles/ref_index.rst converter/newfiles/tutorial_index.rst converter/restwriter.py converter/scanner.py converter/tokenizer.py converter/util.py etc etc/inst.diff sphinx sphinx-build.py sphinx-web.py sphinx/__init__.py sphinx/_jinja.py sphinx/addnodes.py sphinx/builder.py sphinx/console.py sphinx/directives.py sphinx/environment.py sphinx/highlighting.py sphinx/htmlhelp.py sphinx/json.py sphinx/refcounting.py sphinx/roles.py sphinx/search.py sphinx/smartypants.py sphinx/stemmer.py sphinx/style sphinx/style/admin.css sphinx/style/comment.png sphinx/style/default.css sphinx/style/doctools.js sphinx/style/file.png sphinx/style/hovercomment.png sphinx/style/interface.js sphinx/style/jquery.js sphinx/style/minus.png sphinx/style/nocomment.png sphinx/style/plus.png sphinx/style/preview.png sphinx/style/rightsidebar.css sphinx/style/searchtools.js sphinx/style/stickysidebar.css sphinx/style/top.png sphinx/style/traditional.css sphinx/templates sphinx/templates/_commentform.html sphinx/templates/admin sphinx/templates/admin/change_password.html sphinx/templates/admin/index.html sphinx/templates/admin/layout.html sphinx/templates/admin/login.html sphinx/templates/admin/manage_users.html sphinx/templates/admin/moderate_comments.html sphinx/templates/commentform.html sphinx/templates/comments.html sphinx/templates/edit.html sphinx/templates/genindex.html sphinx/templates/index.html sphinx/templates/inlinecomments.html sphinx/templates/keyword_not_found.html sphinx/templates/layout.html sphinx/templates/modindex.html sphinx/templates/not_found.html sphinx/templates/page.html sphinx/templates/search.html sphinx/templates/settings.html sphinx/templates/show_source.html sphinx/templates/sidebar.html sphinx/templates/submitted.html sphinx/util.py sphinx/web sphinx/web/__init__.py sphinx/web/admin.py sphinx/web/antispam.py sphinx/web/application.py sphinx/web/database.py sphinx/web/feed.py sphinx/web/mail.py sphinx/web/markup.py sphinx/web/oldurls.py sphinx/web/serve.py sphinx/web/userdb.py sphinx/web/util.py sphinx/web/webconf.py sphinx/web/wsgiutil.py sphinx/writer.py utils utils/check_sources.py utils/pylintrc utils/reindent.py Message-ID: <20070723090229.7C0961E4007@bag.python.org> Author: georg.brandl Date: Mon Jul 23 11:02:25 2007 New Revision: 56508 Added: doctools/trunk/Makefile doctools/trunk/README doctools/trunk/TODO doctools/trunk/convert.py doctools/trunk/converter/ doctools/trunk/converter/__init__.py doctools/trunk/converter/console.py doctools/trunk/converter/docnodes.py doctools/trunk/converter/filenamemap.py doctools/trunk/converter/latexparser.py doctools/trunk/converter/newfiles/ doctools/trunk/converter/newfiles/TODO doctools/trunk/converter/newfiles/about.rst doctools/trunk/converter/newfiles/api_index.rst doctools/trunk/converter/newfiles/conf.py doctools/trunk/converter/newfiles/contents.rst doctools/trunk/converter/newfiles/dist_index.rst doctools/trunk/converter/newfiles/doc.rst doctools/trunk/converter/newfiles/doc_intro.rst doctools/trunk/converter/newfiles/doc_markup.rst doctools/trunk/converter/newfiles/doc_rest.rst doctools/trunk/converter/newfiles/doc_sphinx.rst doctools/trunk/converter/newfiles/doc_style.rst doctools/trunk/converter/newfiles/ext_index.rst doctools/trunk/converter/newfiles/mac_index.rst doctools/trunk/converter/newfiles/modules_index.rst doctools/trunk/converter/newfiles/ref_index.rst doctools/trunk/converter/newfiles/tutorial_index.rst doctools/trunk/converter/restwriter.py doctools/trunk/converter/scanner.py doctools/trunk/converter/tokenizer.py doctools/trunk/converter/util.py doctools/trunk/etc/ doctools/trunk/etc/inst.diff doctools/trunk/sphinx/ (props changed) doctools/trunk/sphinx-build.py doctools/trunk/sphinx-web.py doctools/trunk/sphinx/__init__.py doctools/trunk/sphinx/_jinja.py doctools/trunk/sphinx/addnodes.py doctools/trunk/sphinx/builder.py doctools/trunk/sphinx/console.py doctools/trunk/sphinx/directives.py doctools/trunk/sphinx/environment.py doctools/trunk/sphinx/highlighting.py doctools/trunk/sphinx/htmlhelp.py doctools/trunk/sphinx/json.py doctools/trunk/sphinx/refcounting.py doctools/trunk/sphinx/roles.py doctools/trunk/sphinx/search.py doctools/trunk/sphinx/smartypants.py doctools/trunk/sphinx/stemmer.py doctools/trunk/sphinx/style/ doctools/trunk/sphinx/style/admin.css doctools/trunk/sphinx/style/comment.png (contents, props changed) doctools/trunk/sphinx/style/default.css doctools/trunk/sphinx/style/doctools.js doctools/trunk/sphinx/style/file.png (contents, props changed) doctools/trunk/sphinx/style/hovercomment.png (contents, props changed) doctools/trunk/sphinx/style/interface.js doctools/trunk/sphinx/style/jquery.js doctools/trunk/sphinx/style/minus.png (contents, props changed) doctools/trunk/sphinx/style/nocomment.png (contents, props changed) doctools/trunk/sphinx/style/plus.png (contents, props changed) doctools/trunk/sphinx/style/preview.png (contents, props changed) doctools/trunk/sphinx/style/rightsidebar.css doctools/trunk/sphinx/style/searchtools.js doctools/trunk/sphinx/style/stickysidebar.css doctools/trunk/sphinx/style/top.png (contents, props changed) doctools/trunk/sphinx/style/traditional.css doctools/trunk/sphinx/templates/ doctools/trunk/sphinx/templates/_commentform.html doctools/trunk/sphinx/templates/admin/ doctools/trunk/sphinx/templates/admin/change_password.html doctools/trunk/sphinx/templates/admin/index.html doctools/trunk/sphinx/templates/admin/layout.html doctools/trunk/sphinx/templates/admin/login.html doctools/trunk/sphinx/templates/admin/manage_users.html doctools/trunk/sphinx/templates/admin/moderate_comments.html doctools/trunk/sphinx/templates/commentform.html doctools/trunk/sphinx/templates/comments.html doctools/trunk/sphinx/templates/edit.html doctools/trunk/sphinx/templates/genindex.html doctools/trunk/sphinx/templates/index.html doctools/trunk/sphinx/templates/inlinecomments.html doctools/trunk/sphinx/templates/keyword_not_found.html doctools/trunk/sphinx/templates/layout.html doctools/trunk/sphinx/templates/modindex.html doctools/trunk/sphinx/templates/not_found.html doctools/trunk/sphinx/templates/page.html doctools/trunk/sphinx/templates/search.html doctools/trunk/sphinx/templates/settings.html doctools/trunk/sphinx/templates/show_source.html doctools/trunk/sphinx/templates/sidebar.html doctools/trunk/sphinx/templates/submitted.html doctools/trunk/sphinx/util.py doctools/trunk/sphinx/web/ doctools/trunk/sphinx/web/__init__.py doctools/trunk/sphinx/web/admin.py doctools/trunk/sphinx/web/antispam.py doctools/trunk/sphinx/web/application.py doctools/trunk/sphinx/web/database.py doctools/trunk/sphinx/web/feed.py doctools/trunk/sphinx/web/mail.py doctools/trunk/sphinx/web/markup.py doctools/trunk/sphinx/web/oldurls.py doctools/trunk/sphinx/web/serve.py doctools/trunk/sphinx/web/userdb.py doctools/trunk/sphinx/web/util.py doctools/trunk/sphinx/web/webconf.py doctools/trunk/sphinx/web/wsgiutil.py doctools/trunk/sphinx/writer.py doctools/trunk/utils/ doctools/trunk/utils/check_sources.py (contents, props changed) doctools/trunk/utils/pylintrc doctools/trunk/utils/reindent.py (contents, props changed) Log: Initial import of the doc tools. Added: doctools/trunk/Makefile ============================================================================== --- (empty file) +++ doctools/trunk/Makefile Mon Jul 23 11:02:25 2007 @@ -0,0 +1,24 @@ +PYTHON ?= python + +export PYTHONPATH = $(shell echo "$$PYTHONPATH"):./sphinx + +.PHONY: all check clean clean-pyc pylint reindent testserver + +all: clean-pyc check + +check: + @$(PYTHON) utils/check_sources.py -i sphinx/style/jquery.js sphinx + @$(PYTHON) utils/check_sources.py converter + +clean: clean-pyc + +clean-pyc: + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + +pylint: + @pylint --rcfile utils/pylintrc sphinx converter + +reindent: + @$(PYTHON) utils/reindent.py -r -B . Added: doctools/trunk/README ============================================================================== --- (empty file) +++ doctools/trunk/README Mon Jul 23 11:02:25 2007 @@ -0,0 +1,79 @@ +py-rest-doc +=========== + +This sandbox project is about moving the official Python documentation +to reStructuredText. + + +What you need to know +--------------------- + +This project uses Python 2.5 features, so you'll need a working Python +2.5 setup. + +If you want code highlighting, you need Pygments >= 0.8, easily +installable from PyPI. Jinja, the template engine, is included as a +SVN external. + +For the rest of this document, let's assume that you have a Python +checkout (you need the 2.6 line, i.e. the trunk) in ~/devel/python and +this checkout in the current directory. + +To convert the LaTeX doc to reST, you first have to apply the patch in +``etc/inst.diff`` to the ``inst/inst.tex`` LaTeX file in the Python +checkout:: + + patch -d ~/devel/python/Doc -p0 < etc/inst.diff + +Then, create a target directory for the reST sources and run the +converter script:: + + mkdir sources + python convert.py ~/devel/python/Doc sources + +This will convert all LaTeX sources to reST files in the ``sources`` +directory. + +The ``sources`` directory contains a ``conf.py`` file which contains +general configuration for the build process, such as the Python +version that should be shown, or the date format for "last updated on" +notes. + + +Building the HTML version +------------------------- + +Then, create a target directory and run :: + + mkdir build-html + python sphinx-build.py -b html sources build-html + +This will create HTML files in the ``build-html`` directory. + +The ``build-html`` directory will also contain a ``.doctrees`` +directory, which caches pickles containing the docutils doctrees for +all source files, as well as an ``environment.pickle`` file that +collects all meta-information and data that's needed to +cross-reference the sources and generate indices. + + +Running the online (web) version +-------------------------------- + +First, you need to build the source with the "web" builder:: + + mkdir build-web + python sphinx-build.py -b web sources build-web + +This will create files with pickled contents for the web application +in the target directory. + +Then, you can run :: + + python sphinx-web.py build-web + +which will start a webserver using wsgiref on ``localhost:3000``. The +web application has a configuration file ``build-web/webconf.py``, +where you can configure the server and port for the application as +well as different other settings specific to the web app. + Added: doctools/trunk/TODO ============================================================================== --- (empty file) +++ doctools/trunk/TODO Mon Jul 23 11:02:25 2007 @@ -0,0 +1,13 @@ +Global TODO +=========== + +- discuss and debug comments system +- write new Makefile, handle automatic version info and checkout +- write a "printable" builder (export to latex, most probably) +- discuss the default role +- discuss lib -> ref section move +- prepare for databases other than sqlite for comments +- look at the old tools/ scripts, what functionality should be rewritten +- add search via Xapian? +- optionally have a contents tree view in the sidebar (AJAX based)? + Added: doctools/trunk/convert.py ============================================================================== --- (empty file) +++ doctools/trunk/convert.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +""" + Convert the Python documentation to Sphinx + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import sys +import os + +from converter import convert_dir + +if __name__ == '__main__': + try: + rootdir = sys.argv[1] + destdir = os.path.abspath(sys.argv[2]) + except IndexError: + print "usage: convert.py docrootdir destdir" + sys.exit() + + assert os.path.isdir(os.path.join(rootdir, 'texinputs')) + os.chdir(rootdir) + convert_dir(destdir, *sys.argv[3:]) Added: doctools/trunk/converter/__init__.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/__init__.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +""" + Documentation converter - high level functions + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import sys +import os +import glob +import shutil +import codecs +from os import path + +from .tokenizer import Tokenizer +from .latexparser import DocParser +from .restwriter import RestWriter +from .filenamemap import (fn_mapping, copyfiles_mapping, newfiles_mapping, + rename_mapping, dirs_to_make, toctree_mapping, + amendments_mapping) +from .console import red, green + +def convert_file(infile, outfile, doraise=True, splitchap=False, + toctree=None, deflang=None, labelprefix=''): + inf = codecs.open(infile, 'r', 'latin1') + p = DocParser(Tokenizer(inf.read()).tokenize(), infile) + if not splitchap: + outf = codecs.open(outfile, 'w', 'utf-8') + else: + outf = None + r = RestWriter(outf, splitchap, toctree, deflang, labelprefix) + try: + r.write_document(p.parse()) + if splitchap: + for i, chapter in enumerate(r.chapters[1:]): + coutf = codecs.open('%s/%d_%s' % ( + path.dirname(outfile), i+1, path.basename(outfile)), + 'w', 'utf-8') + coutf.write(chapter.getvalue()) + coutf.close() + else: + outf.close() + return 1, r.warnings + except Exception, err: + if doraise: + raise + return 0, str(err) + + +def convert_dir(outdirname, *args): + # make directories + for dirname in dirs_to_make: + try: + os.mkdir(path.join(outdirname, dirname)) + except OSError: + pass + + # copy files (currently only non-tex includes) + for oldfn, newfn in copyfiles_mapping.iteritems(): + newpathfn = path.join(outdirname, newfn) + globfns = glob.glob(oldfn) + if len(globfns) == 1 and not path.isdir(newpathfn): + shutil.copyfile(globfns[0], newpathfn) + else: + for globfn in globfns: + shutil.copyfile(globfn, path.join(newpathfn, + path.basename(globfn))) + + # convert tex files + # "doc" is not converted. It must be rewritten anyway. + for subdir in ('api', 'dist', 'ext', 'inst', 'commontex', + 'lib', 'mac', 'ref', 'tut', 'whatsnew'): + if args and subdir not in args: + continue + if subdir not in fn_mapping: + continue + newsubdir = fn_mapping[subdir]['__newname__'] + deflang = fn_mapping[subdir].get('__defaulthighlightlang__') + labelprefix = fn_mapping[subdir].get('__labelprefix__', '') + for filename in sorted(os.listdir(subdir)): + if not filename.endswith('.tex'): + continue + filename = filename[:-4] # strip extension + newname = fn_mapping[subdir][filename] + if newname is None: + continue + if newname.endswith(':split'): + newname = newname[:-6] + splitchap = True + else: + splitchap = False + if '/' not in newname: + outfilename = path.join(outdirname, newsubdir, newname + '.rst') + else: + outfilename = path.join(outdirname, newname + '.rst') + toctree = toctree_mapping.get(path.join(subdir, filename)) + infilename = path.join(subdir, filename + '.tex') + print green(infilename), + success, state = convert_file(infilename, outfilename, False, + splitchap, toctree, deflang, labelprefix) + if not success: + print red("ERROR:") + print red(" " + state) + else: + if state: + print "warnings:" + for warning in state: + print " " + warning + + # rename files, e.g. splitted ones + for oldfn, newfn in rename_mapping.iteritems(): + try: + if newfn is None: + os.unlink(path.join(outdirname, oldfn)) + else: + os.rename(path.join(outdirname, oldfn), + path.join(outdirname, newfn)) + except OSError, err: + if err.errno == 2: + continue + raise + + # copy new files + srcdirname = path.join(path.dirname(__file__), 'newfiles') + for fn, newfn in newfiles_mapping.iteritems(): + shutil.copyfile(path.join(srcdirname, fn), + path.join(outdirname, newfn)) + + # make amendments + for newfn, (pre, post) in amendments_mapping.iteritems(): + fn = path.join(outdirname, newfn) + try: + ft = open(fn).read() + except Exception, err: + print "Error making amendments to %s: %s" % (newfn, err) + continue + else: + fw = open(fn, 'w') + fw.write(pre) + fw.write(ft) + fw.write(post) + fw.close() Added: doctools/trunk/converter/console.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/console.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +""" + Console utils + ~~~~~~~~~~~~~ + + Format colored console output. + + :copyright: 1998-2004 by the Gentoo Foundation. + :copyright: 2006-2007 by Georg Brandl. + :license: GNU GPL. +""" + +esc_seq = "\x1b[" + +codes = {} +codes["reset"] = esc_seq + "39;49;00m" + +codes["bold"] = esc_seq + "01m" +codes["faint"] = esc_seq + "02m" +codes["standout"] = esc_seq + "03m" +codes["underline"] = esc_seq + "04m" +codes["blink"] = esc_seq + "05m" +codes["overline"] = esc_seq + "06m" # Who made this up? Seriously. + +ansi_color_codes = [] +for x in xrange(30, 38): + ansi_color_codes.append("%im" % x) + ansi_color_codes.append("%i;01m" % x) + +rgb_ansi_colors = [ + '0x000000', '0x555555', '0xAA0000', '0xFF5555', + '0x00AA00', '0x55FF55', '0xAA5500', '0xFFFF55', + '0x0000AA', '0x5555FF', '0xAA00AA', '0xFF55FF', + '0x00AAAA', '0x55FFFF', '0xAAAAAA', '0xFFFFFF' +] + +for x in xrange(len(rgb_ansi_colors)): + codes[rgb_ansi_colors[x]] = esc_seq + ansi_color_codes[x] + +del x + +codes["black"] = codes["0x000000"] +codes["darkgray"] = codes["0x555555"] + +codes["red"] = codes["0xFF5555"] +codes["darkred"] = codes["0xAA0000"] + +codes["green"] = codes["0x55FF55"] +codes["darkgreen"] = codes["0x00AA00"] + +codes["yellow"] = codes["0xFFFF55"] +codes["brown"] = codes["0xAA5500"] + +codes["blue"] = codes["0x5555FF"] +codes["darkblue"] = codes["0x0000AA"] + +codes["fuchsia"] = codes["0xFF55FF"] +codes["purple"] = codes["0xAA00AA"] + +codes["teal"] = codes["0x00AAAA"] +codes["turquoise"] = codes["0x55FFFF"] + +codes["white"] = codes["0xFFFFFF"] +codes["lightgray"] = codes["0xAAAAAA"] + +codes["darkteal"] = codes["turquoise"] +codes["darkyellow"] = codes["brown"] +codes["fuscia"] = codes["fuchsia"] +codes["white"] = codes["bold"] + +def nocolor(): + "turn off colorization" + for code in codes: + codes[code] = "" + +def reset_color(): + return codes["reset"] + +def colorize(color_key, text): + return codes[color_key] + text + codes["reset"] + +functions_colors = [ + "bold", "white", "teal", "turquoise", "darkteal", + "fuscia", "fuchsia", "purple", "blue", "darkblue", + "green", "darkgreen", "yellow", "brown", + "darkyellow", "red", "darkred" +] + +def create_color_func(color_key): + """ + Return a function that formats its argument in the given color. + """ + def derived_func(text): + return colorize(color_key, text) + return derived_func + +ns = locals() +for c in functions_colors: + ns[c] = create_color_func(c) + +del c, ns Added: doctools/trunk/converter/docnodes.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/docnodes.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +""" + Python documentation LaTeX parser - document nodes + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + + +class DocNode(object): + """ A node in the document tree. """ + def __repr__(self): + return '%s()' % self.__class__.__name__ + + def __str__(self): + raise RuntimeError('cannot stringify docnodes') + + def walk(self): + return [] + + +class CommentNode(DocNode): + """ A comment. """ + def __init__(self, comment): + assert isinstance(comment, basestring) + self.comment = comment + + def __repr__(self): + return 'CommentNode(%r)' % self.comment + + +class RootNode(DocNode): + """ A whole document. """ + def __init__(self, filename, children): + self.filename = filename + self.children = children + self.params = {} + self.labels = {} + + def __repr__(self): + return 'RootNode(%r, %r)' % (self.filename, self.children) + + def walk(self): + return self.children + + def transform(self): + """ Do restructurings not possible during parsing. """ + def do_descenvs(node): + r""" Make \xxxlines an attribute of the parent xxxdesc node. """ + for subnode in node.walk(): + do_descenvs(subnode) + if isinstance(node, DescEnvironmentNode): + for subnode in node.content.walk(): + if isinstance(subnode, DescLineCommandNode): + node.additional.append((subnode.cmdname, subnode.args)) + + do_descenvs(self) + + +class NodeList(DocNode, list): + """ A list of subnodes. """ + def __init__(self, children=None): + list.__init__(self, children or []) + + def __repr__(self): + return 'NL%s' % list.__repr__(self) + + def walk(self): + return self + + def append(self, node): + assert isinstance(node, DocNode) + if type(node) is EmptyNode: + return + elif self and isinstance(node, TextNode) and \ + type(self[-1]) is TextNode: + self[-1].text += node.text + elif type(node) is NodeList: + list.extend(self, node) + elif type(node) is VerbatimNode and self and \ + isinstance(self[-1], ParaSepNode): + # don't allow a ParaSepNode before VerbatimNode + # because this breaks ReST's '::' + self[-1] = node + else: + list.append(self, node) + + def flatten(self): + if len(self) > 1: + return self + elif len(self) == 1: + return self[0] + else: + return EmptyNode() + + +class ParaSepNode(DocNode): + """ A node for paragraph separator. """ + def __repr__(self): + return 'Para' + + +class TextNode(DocNode): + """ A node containing text. """ + def __init__(self, text): + assert isinstance(text, basestring) + self.text = text + + def __repr__(self): + if type(self) is TextNode: + return 'T%r' % self.text + else: + return '%s(%r)' % (self.__class__.__name__, self.text) + + +class EmptyNode(TextNode): + """ An empty node. """ + def __init__(self, *args): + self.text = '' + + +class NbspNode(TextNode): + """ A non-breaking space. """ + def __init__(self, *args): + # this breaks ReST markup (!) + #self.text = u'\N{NO-BREAK SPACE}' + self.text = ' ' + + def __repr__(self): + return 'NBSP' + + +simplecmd_mapping = { + 'ldots': u'...', + 'moreargs': '...', + 'unspecified': '...', + 'ASCII': 'ASCII', + 'UNIX': 'Unix', + 'Unix': 'Unix', + 'POSIX': 'POSIX', + 'LaTeX': 'LaTeX', + 'EOF': 'EOF', + 'Cpp': 'C++', + 'C': 'C', + 'sub': u'--> ', + 'textbackslash': '\\\\', + 'textunderscore': '_', + 'texteuro': u'\N{EURO SIGN}', + 'textasciicircum': u'^', + 'textasciitilde': u'~', + 'textgreater': '>', + 'textless': '<', + 'textbar': '|', + 'backslash': '\\\\', + 'tilde': '~', + 'copyright': u'\N{COPYRIGHT SIGN}', + # \e is mostly inside \code and therefore not escaped. + 'e': '\\', + 'infinity': u'\N{INFINITY}', + 'plusminus': u'\N{PLUS-MINUS SIGN}', + 'leq': u'\N{LESS-THAN OR EQUAL TO}', + 'geq': u'\N{GREATER-THAN OR EQUAL TO}', + 'pi': u'\N{GREEK SMALL LETTER PI}', + 'AA': u'\N{LATIN CAPITAL LETTER A WITH RING ABOVE}', +} + +class SimpleCmdNode(TextNode): + """ A command resulting in simple text. """ + def __init__(self, cmdname, args): + self.text = simplecmd_mapping[cmdname] + + +class BreakNode(DocNode): + """ A line break. """ + def __repr__(self): + return 'BR' + + +class CommandNode(DocNode): + """ A general command. """ + def __init__(self, cmdname, args): + self.cmdname = cmdname + self.args = args + + def __repr__(self): + return '%s(%r, %r)' % (self.__class__.__name__, self.cmdname, self.args) + + def walk(self): + return self.args + + +class DescLineCommandNode(CommandNode): + """ A \\xxxline command. """ + + +class InlineNode(CommandNode): + """ A node with inline markup. """ + def walk(self): + return [] + + +class IndexNode(InlineNode): + """ An index-generating command. """ + def __init__(self, cmdname, args): + self.cmdname = cmdname + # tricky -- this is to make this silent in paragraphs + # while still generating index entries for textonly() + self.args = [] + self.indexargs = args + + +class SectioningNode(CommandNode): + """ A heading node. """ + + +class EnvironmentNode(DocNode): + """ An environment. """ + def __init__(self, envname, args, content): + self.envname = envname + self.args = args + self.content = content + + def __repr__(self): + return 'EnvironmentNode(%r, %r, %r)' % (self.envname, + self.args, self.content) + + def walk(self): + return [self.content] + + +class DescEnvironmentNode(EnvironmentNode): + """ An xxxdesc environment. """ + def __init__(self, envname, args, content): + self.envname = envname + self.args = args + self.additional = [] + self.content = content + + def __repr__(self): + return 'DescEnvironmentNode(%r, %r, %r)' % (self.envname, + self.args, self.content) + + +class TableNode(EnvironmentNode): + def __init__(self, numcols, headings, lines): + self.numcols = numcols + self.headings = headings + self.lines = lines + + def __repr__(self): + return 'TableNode(%r, %r, %r)' % (self.numcols, + self.headings, self.lines) + + def walk(self): + return [] + + +class VerbatimNode(DocNode): + """ A verbatim code block. """ + def __init__(self, content): + self.content = content + + def __repr__(self): + return 'VerbatimNode(%r)' % self.content + + +class ListNode(DocNode): + """ A list. """ + def __init__(self, items): + self.items = items + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.items) + + def walk(self): + return [item[1] for item in self.items] + + +class ItemizeNode(ListNode): + """ An enumeration with bullets. """ + + +class EnumerateNode(ListNode): + """ An enumeration with numbers. """ + + +class DescriptionNode(ListNode): + """ A description list. """ + + +class DefinitionsNode(ListNode): + """ A definition list. """ + + +class ProductionListNode(ListNode): + """ A grammar production list. """ Added: doctools/trunk/converter/filenamemap.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/filenamemap.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,632 @@ +# -*- coding: utf-8 -*- +""" + Map LaTeX filenames to ReST filenames + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +# '' means: use same name, strip prefix if applicable. +# None means: don't translate at all. + +_mapping = { + 'lib': { + '__newname__' : 'modules', + + 'asttable': '', + 'compiler': '', + 'distutils': '', + 'email': '', + 'emailcharsets': 'email.charset', + 'emailencoders': 'email.encoders', + 'emailexc': 'email.errors', + 'emailgenerator': 'email.generator', + 'emailheaders': 'email.header', + 'emailiter': 'email.iterators', + 'emailmessage': 'email.message', + 'emailmimebase': 'email.mime', + 'emailparser': 'email.parser', + 'emailutil': 'email.util', + 'libaifc': '', + 'libanydbm': '', + 'libarray': '', + 'libascii': 'curses.ascii', + 'libast': '', + 'libasynchat': '', + 'libasyncore': '', + 'libatexit': '', + 'libaudioop': '', + 'libbase64': '', + 'libbasehttp': 'basehttpserver', + 'libbastion': '', + 'libbinascii': '', + 'libbinhex': '', + 'libbisect': '', + 'libbltin': '__builtin__', + 'libbsddb': '', + 'libbz2': '', + 'libcalendar': '', + 'libcfgparser': 'configparser', + 'libcgihttp': 'cgihttpserver', + 'libcgi': '', + 'libcgitb': '', + 'libchunk': '', + 'libcmath': '', + 'libcmd': '', + 'libcodecs': '', + 'libcodeop': '', + 'libcode': '', + 'libcollections': '', + 'libcolorsys': '', + 'libcommands': '', + 'libcompileall': '', + 'libcontextlib': '', + 'libcookielib': '', + 'libcookie': '', + 'libcopyreg': 'copy_reg', + 'libcopy': '', + 'libcrypt': '', + 'libcsv': '', + 'libctypes': '', + 'libcursespanel': 'curses.panel', + 'libcurses': '', + 'libdatetime': '', + 'libdbhash': '', + 'libdbm': '', + 'libdecimal': '', + 'libdifflib': '', + 'libdircache': '', + 'libdis': '', + 'libdl': '', + 'libdoctest': '', + 'libdocxmlrpc': 'docxmlrpcserver', + 'libdumbdbm': '', + 'libdummythreading': 'dummy_threading', + 'libdummythread': 'dummy_thread', + 'liberrno': '', + 'libetree': 'xml.etree.elementtree', + 'libfcntl': '', + 'libfilecmp': '', + 'libfileinput': '', + 'libfnmatch': '', + 'libformatter': '', + 'libfpectl': '', + 'libfpformat': '', + 'libftplib': '', + 'libfunctools': '', + 'libfuture': '__future__', + 'libgc': '', + 'libgdbm': '', + 'libgetopt': '', + 'libgetpass': '', + 'libgettext': '', + 'libglob': '', + 'libgrp': '', + 'libgzip': '', + 'libhashlib': '', + 'libheapq': '', + 'libhmac': '', + 'libhotshot': '', + 'libhtmllib': '', + 'libhtmlparser': '', + 'libhttplib': '', + 'libimageop': '', + 'libimaplib': '', + 'libimgfile': '', + 'libimghdr': '', + 'libimp': '', + 'libinspect': '', + 'libitertools': '', + 'libjpeg': '', + 'libkeyword': '', + 'liblinecache': '', + 'liblocale': '', + 'liblogging': '', + 'libmailbox': '', + 'libmailcap': '', + 'libmain': '__main__', + 'libmarshal': '', + 'libmath': '', + 'libmd5': '', + 'libmhlib': '', + 'libmimetools': '', + 'libmimetypes': '', + 'libmimewriter': '', + 'libmimify': '', + 'libmmap': '', + 'libmodulefinder': '', + 'libmsilib': '', + 'libmsvcrt': '', + 'libmultifile': '', + 'libmutex': '', + 'libnetrc': '', + 'libnew': '', + 'libnis': '', + 'libnntplib': '', + 'liboperator': '', + 'liboptparse': '', + 'libos': '', + 'libossaudiodev': '', + 'libparser': '', + 'libpdb': '', + 'libpickle': '', + 'libpickletools': '', + 'libpipes': '', + 'libpkgutil': '', + 'libplatform': '', + 'libpopen2': '', + 'libpoplib': '', + 'libposixpath': 'os.path', + 'libposix': '', + 'libpprint': '', + 'libprofile': '', + 'libpty': '', + 'libpwd': '', + 'libpyclbr': '', + 'libpycompile': 'py_compile', + 'libpydoc': '', + 'libpyexpat': '', + 'libqueue': '', + 'libquopri': '', + 'librandom': '', + 'libreadline': '', + 'librepr': '', + 'libre': '', + 'libresource': '', + 'librexec': '', + 'librfc822': '', + 'librlcompleter': '', + 'librobotparser': '', + 'librunpy': '', + 'libsched': '', + 'libselect': '', + 'libsets': '', + 'libsgmllib': '', + 'libsha': '', + 'libshelve': '', + 'libshlex': '', + 'libshutil': '', + 'libsignal': '', + 'libsimplehttp': 'simplehttpserver', + 'libsimplexmlrpc': 'simplexmlrpcserver', + 'libsite': '', + 'libsmtpd': '', + 'libsmtplib': '', + 'libsndhdr': '', + 'libsocket': '', + 'libsocksvr': 'socketserver', + 'libspwd': '', + 'libsqlite3': '', + 'libstat': '', + 'libstatvfs': '', + 'libstringio': '', + 'libstringprep': '', + 'libstring': '', + 'libstruct': '', + 'libsunaudio': '', + 'libsunau': '', + 'libsubprocess': '', + 'libsymbol': '', + 'libsyslog': '', + 'libsys': '', + 'libtabnanny': '', + 'libtarfile': '', + 'libtelnetlib': '', + 'libtempfile': '', + 'libtermios': '', + 'libtest': '', + 'libtextwrap': '', + 'libthreading': '', + 'libthread': '', + 'libtimeit': '', + 'libtime': '', + 'libtokenize': '', + 'libtoken': '', + 'libtraceback': '', + 'libtrace': '', + 'libtty': '', + 'libturtle': '', + 'libtypes': '', + 'libunicodedata': '', + 'libunittest': '', + 'liburllib2': '', + 'liburllib': '', + 'liburlparse': '', + 'libuserdict': '', + 'libuser': '', + 'libuuid': '', + 'libuu': '', + 'libwarnings': '', + 'libwave': '', + 'libweakref': '', + 'libwebbrowser': '', + 'libwhichdb': '', + 'libwinreg': '_winreg', + 'libwinsound': '', + 'libwsgiref': '', + 'libxdrlib': '', + 'libxmllib': '', + 'libxmlrpclib': '', + 'libzipfile': '', + 'libzipimport': '', + 'libzlib': '', + 'tkinter': '', + 'xmldomminidom': 'xml.dom.minidom', + 'xmldompulldom': 'xml.dom.pulldom', + 'xmldom': 'xml.dom', + 'xmletree': 'xml.etree', + 'xmlsaxhandler': 'xml.sax.handler', + 'xmlsaxreader': 'xml.sax.reader', + 'xmlsax': 'xml.sax', + 'xmlsaxutils': 'xml.sax.utils', + 'libal': '', + 'libcd': '', + 'libfl': '', + 'libfm': '', + 'libgl': '', + 'libposixfile': '', + + # specials + 'libundoc': '', + 'libintro': '', + + # -> ref + 'libconsts': 'reference/consts', + 'libexcs': 'reference/exceptions', + 'libfuncs': 'reference/functions', + 'libobjs': 'reference/objects', + 'libstdtypes': 'reference/stdtypes', + + # mainfiles + 'lib': None, + 'mimelib': None, + + # obsolete + 'libni': None, + 'libcmpcache': None, + 'libcmp': None, + + # chapter overviews + 'fileformats': '', + 'filesys': '', + 'frameworks': '', + 'i18n': '', + 'internet': '', + 'ipc': '', + 'language': '', + 'archiving': '', + 'custominterp': '', + 'datatypes': '', + 'development': '', + 'markup': '', + 'modules': '', + 'netdata': '', + 'numeric': '', + 'persistence': '', + 'windows': '', + 'libsun': '', + 'libmm': '', + 'liballos': '', + 'libcrypto': '', + 'libsomeos': '', + 'libsgi': '', + 'libmisc': '', + 'libpython': '', + 'librestricted': '', + 'libstrings': '', + 'libunix': '', + }, + + 'ref': { + '__newname__': 'reference', + 'ref': None, + 'ref1': 'introduction', + 'ref2': 'lexical_analysis', + 'ref3': 'datamodel', + 'ref4': 'executionmodel', + 'ref5': 'expressions', + 'ref6': 'simple_stmts', + 'ref7': 'compound_stmts', + 'ref8': 'toplevel_components', + }, + + 'tut': { + '__newname__': 'tutorial', + '__labelprefix__': 'tut-', + 'tut': 'tutorial:split', + 'glossary': 'glossary', + }, + + 'api': { + '__newname__': 'c-api', + '__defaulthighlightlang__': 'c', + 'api': None, + + 'abstract': '', + 'concrete': '', + 'exceptions': '', + 'init': '', + 'intro': '', + 'memory': '', + 'newtypes': '', + 'refcounting': '', + 'utilities': '', + 'veryhigh': '', + }, + + 'ext': { + '__newname__': 'extending', + '__defaulthighlightlang__': 'c', + 'ext': None, + + 'building': '', + 'embedding': '', + 'extending': 'extending', + 'newtypes': '', + 'windows': '', + }, + + 'dist': { + '__newname__': 'distutils', + 'dist': 'distutils:split', + 'sysconfig': '', + }, + + 'mac': { + '__newname__': 'macmodules', + 'mac': None, + + 'libaepack': 'aepack', + 'libaetools': 'aetools', + 'libaetypes': 'aetypes', + 'libautogil': 'autogil', + 'libcolorpicker': 'colorpicker', + 'libframework': 'framework', + 'libgensuitemodule': 'gensuitemodule', + 'libmacic': 'macic', + 'libmacos': 'macos', + 'libmacostools': 'macostools', + 'libmac': 'mac', + 'libmacui': 'macui', + 'libminiae': 'miniae', + 'libscrap': 'scrap', + 'scripting': '', + 'toolbox': '', + 'undoc': '', + 'using': '', + + }, + + 'inst': { + '__newname__': 'install', + '__defaulthighlightlang__': 'none', + 'inst': 'index', + }, + + 'whatsnew': { + '__newname__': 'whatsnew', + 'whatsnew20': '2.0', + 'whatsnew21': '2.1', + 'whatsnew22': '2.2', + 'whatsnew23': '2.3', + 'whatsnew24': '2.4', + 'whatsnew25': '2.5', + 'whatsnew26': '2.6', + }, + + 'commontex': { + '__newname__': '', + 'boilerplate': None, + 'patchlevel': None, + 'copyright': '', + 'license': '', + 'reportingbugs': 'bugs', + }, +} + +fn_mapping = {} + +for dir, files in _mapping.iteritems(): + newmap = fn_mapping[dir] = {} + for fn in files: + if not fn.startswith('_') and files[fn] == '': + if fn.startswith(dir): + newmap[fn] = fn[len(dir):] + else: + newmap[fn] = fn + else: + newmap[fn] = files[fn] + + +# new directories to create +dirs_to_make = [ + 'c-api', + 'data', + 'distutils', + 'documenting', + 'extending', + 'includes', + 'includes/sqlite3', + 'install', + 'macmodules', + 'modules', + 'reference', + 'tutorial', + 'whatsnew', +] + +# includefiles for \verbatiminput and \input +includes_mapping = { + '../../Parser/Python.asdl': None, # XXX + '../../Lib/test/exception_hierarchy.txt': None, + 'emailmessage': 'email.message.rst', + 'emailparser': 'email.parser.rst', + 'emailgenerator': 'email.generator.rst', + 'emailmimebase': 'email.mime.rst', + 'emailheaders': 'email.header.rst', + 'emailcharsets': 'email.charset.rst', + 'emailencoders': 'email.encoders.rst', + 'emailexc': 'email.errors.rst', + 'emailutil': 'email.util.rst', + 'emailiter': 'email.iterators.rst', +} + +# new files to copy from converter/newfiles +newfiles_mapping = { + 'conf.py': 'conf.py', + 'TODO': 'TODO', + + 'ref_index.rst': 'reference/index.rst', + 'tutorial_index.rst': 'tutorial/index.rst', + 'modules_index.rst': 'modules/index.rst', + 'mac_index.rst': 'macmodules/index.rst', + 'ext_index.rst': 'extending/index.rst', + 'api_index.rst': 'c-api/index.rst', + 'dist_index.rst': 'distutils/index.rst', + 'contents.rst': 'contents.rst', + 'about.rst': 'about.rst', + + 'doc.rst': 'documenting/index.rst', + 'doc_intro.rst': 'documenting/intro.rst', + 'doc_style.rst': 'documenting/style.rst', + 'doc_sphinx.rst': 'documenting/sphinx.rst', + 'doc_rest.rst': 'documenting/rest.rst', + 'doc_markup.rst': 'documenting/markup.rst', +} + +# copy files from the old doc tree +copyfiles_mapping = { + 'api/refcounts.dat': 'data', + 'lib/email-*.py': 'includes', + 'lib/minidom-example.py': 'includes', + 'lib/tzinfo-examples.py': 'includes', + 'lib/sqlite3/*.py': 'includes/sqlite3', + 'ext/*.c': 'includes', + 'ext/*.py': 'includes', + 'commontex/typestruct.h': 'includes', +} + +# files to rename +rename_mapping = { + 'tutorial/1_tutorial.rst': None, # delete + 'tutorial/2_tutorial.rst': 'tutorial/appetite.rst', + 'tutorial/3_tutorial.rst': 'tutorial/interpreter.rst', + 'tutorial/4_tutorial.rst': 'tutorial/introduction.rst', + 'tutorial/5_tutorial.rst': 'tutorial/controlflow.rst', + 'tutorial/6_tutorial.rst': 'tutorial/datastructures.rst', + 'tutorial/7_tutorial.rst': 'tutorial/modules.rst', + 'tutorial/8_tutorial.rst': 'tutorial/inputoutput.rst', + 'tutorial/9_tutorial.rst': 'tutorial/errors.rst', + 'tutorial/10_tutorial.rst': 'tutorial/classes.rst', + 'tutorial/11_tutorial.rst': 'tutorial/stdlib.rst', + 'tutorial/12_tutorial.rst': 'tutorial/stdlib2.rst', + 'tutorial/13_tutorial.rst': 'tutorial/whatnow.rst', + 'tutorial/14_tutorial.rst': 'tutorial/interactive.rst', + 'tutorial/15_tutorial.rst': 'tutorial/floatingpoint.rst', + 'tutorial/16_tutorial.rst': None, # delete + + 'distutils/1_distutils.rst': 'distutils/introduction.rst', + 'distutils/2_distutils.rst': 'distutils/setupscript.rst', + 'distutils/3_distutils.rst': 'distutils/configfile.rst', + 'distutils/4_distutils.rst': 'distutils/sourcedist.rst', + 'distutils/5_distutils.rst': 'distutils/builtdist.rst', + 'distutils/6_distutils.rst': 'distutils/packageindex.rst', + 'distutils/7_distutils.rst': 'distutils/uploading.rst', + 'distutils/8_distutils.rst': 'distutils/examples.rst', + 'distutils/9_distutils.rst': 'distutils/extending.rst', + 'distutils/10_distutils.rst': 'distutils/commandref.rst', + 'distutils/11_distutils.rst': 'distutils/apiref.rst', +} + +# toctree entries +toctree_mapping = { + 'mac/scripting': ['gensuitemodule', 'aetools', 'aepack', 'aetypes', 'miniae'], + 'mac/toolbox': ['colorpicker'], + 'lib/libstrings': ['string', 're', 'struct', 'difflib', 'stringio', 'textwrap', + 'codecs', 'unicodedata', 'stringprep', 'fpformat'], + 'lib/datatypes': ['datetime', 'calendar', 'collections', 'heapq', 'bisect', + 'array', 'sets', 'sched', 'mutex', 'queue', 'weakref', + 'userdict', 'types', 'new', 'copy', 'pprint', 'repr'], + 'lib/numeric': ['math', 'cmath', 'decimal', 'random', 'itertools', 'functools', + 'operator'], + 'lib/netdata': ['email', 'mailcap', 'mailbox', 'mhlib', 'mimetools', 'mimetypes', + 'mimewriter', 'mimify', 'multifile', 'rfc822', + 'base64', 'binhex', 'binascii', 'quopri', 'uu'], + 'lib/markup': ['htmlparser', 'sgmllib', 'htmllib', 'pyexpat', 'xml.dom', + 'xml.dom.minidom', 'xml.dom.pulldom', 'xml.sax', 'xml.sax.handler', + 'xml.sax.utils', 'xml.sax.reader', 'xml.etree.elementtree'], + 'lib/fileformats': ['csv', 'configparser', 'robotparser', 'netrc', 'xdrlib'], + 'lib/libcrypto': ['hashlib', 'hmac', 'md5', 'sha'], + 'lib/filesys': ['os.path', 'fileinput', 'stat', 'statvfs', 'filecmp', + 'tempfile', 'glob', 'fnmatch', 'linecache', 'shutil', 'dircache'], + 'lib/archiving': ['zlib', 'gzip', 'bz2', 'zipfile', 'tarfile'], + 'lib/persistence': ['pickle', 'copy_reg', 'shelve', 'marshal', 'anydbm', + 'whichdb', 'dbm', 'gdbm', 'dbhash', 'bsddb', 'dumbdbm', + 'sqlite3'], + 'lib/liballos': ['os', 'time', 'optparse', 'getopt', 'logging', 'getpass', + 'curses', 'curses.ascii', 'curses.panel', 'platform', + 'errno', 'ctypes'], + 'lib/libsomeos': ['select', 'thread', 'threading', 'dummy_thread', 'dummy_threading', + 'mmap', 'readline', 'rlcompleter'], + 'lib/libunix': ['posix', 'pwd', 'spwd', 'grp', 'crypt', 'dl', 'termios', 'tty', + 'pty', 'fcntl', 'pipes', 'posixfile', 'resource', 'nis', + 'syslog', 'commands'], + 'lib/ipc': ['subprocess', 'socket', 'signal', 'popen2', 'asyncore', 'asynchat'], + 'lib/internet': ['webbrowser', 'cgi', 'cgitb', 'wsgiref', 'urllib', 'urllib2', + 'httplib', 'ftplib', 'poplib', 'imaplib', + 'nntplib', 'smtplib', 'smtpd', 'telnetlib', 'uuid', 'urlparse', + 'socketserver', 'basehttpserver', 'simplehttpserver', + 'cgihttpserver', 'cookielib', 'cookie', 'xmlrpclib', + 'simplexmlrpcserver', 'docxmlrpcserver'], + 'lib/libmm': ['audioop', 'imageop', 'aifc', 'sunau', 'wave', 'chunk', + 'colorsys', 'imghdr', 'sndhdr', 'ossaudiodev'], + 'lib/i18n': ['gettext', 'locale'], + 'lib/frameworks': ['cmd', 'shlex'], + 'lib/development': ['pydoc', 'doctest', 'unittest', 'test'], + 'lib/libpython': ['sys', '__builtin__', '__main__', 'warnings', 'contextlib', + 'atexit', 'traceback', '__future__', 'gc', 'inspect', + 'site', 'user', 'fpectl'], + 'lib/custominterp': ['code', 'codeop'], + 'lib/librestricted': ['rexec', 'bastion'], + 'lib/modules': ['imp', 'zipimport', 'pkgutil', 'modulefinder', 'runpy'], + 'lib/language': ['parser', 'symbol', 'token', 'keyword', 'tokenize', + 'tabnanny', 'pyclbr', 'py_compile', 'compileall', 'dis', + 'pickletools', 'distutils'], + 'lib/compiler': ['ast'], + 'lib/libmisc': ['formatter'], + 'lib/libsgi': ['al', 'cd', 'fl', 'fm', 'gl', 'imgfile', 'jpeg'], + 'lib/libsun': ['sunaudio'], + 'lib/windows': ['msilib', 'msvcrt', '_winreg', 'winsound'], +} + +# map sourcefilename to [pre, post] +amendments_mapping = { + 'license.rst': ['''\ +.. highlightlang:: none + +******************* +History and License +******************* + +''', ''], + + 'bugs.rst': ['''\ +************** +Reporting Bugs +************** + +''', ''], + + 'copyright.rst': ['''\ +********* +Copyright +********* + +''', ''], + + 'install/index.rst': ['''\ +.. _install-index: + +''', ''], +} Added: doctools/trunk/converter/latexparser.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/latexparser.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,697 @@ +# -*- coding: utf-8 -*- +""" + Python documentation LaTeX file parser + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + For more documentation, look into the ``restwriter.py`` file. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +from .docnodes import CommentNode, RootNode, NodeList, ParaSepNode, \ + TextNode, EmptyNode, NbspNode, SimpleCmdNode, BreakNode, CommandNode, \ + DescLineCommandNode, InlineNode, IndexNode, SectioningNode, \ + EnvironmentNode, DescEnvironmentNode, TableNode, VerbatimNode, \ + ListNode, ItemizeNode, EnumerateNode, DescriptionNode, \ + DefinitionsNode, ProductionListNode + +from .util import umlaut, empty + + +class ParserError(Exception): + def __init__(self, msg, lineno): + Exception.__init__(self, msg, lineno) + + def __str__(self): + return '%s, line %s' % self.args + + +def generic_command(name, argspec, nodetype=CommandNode): + def handle(self): + args = self.parse_args('\\'+name, argspec) + return nodetype(name, args) + return handle + +def sectioning_command(name): + """ Special handling for sectioning commands: move labels directly following + a sectioning command before it, as required by reST. """ + def handle(self): + args = self.parse_args('\\'+name, 'M') + snode = SectioningNode(name, args) + for l, t, v, r in self.tokens: + if t == 'command' and v == 'label': + largs = self.parse_args('\\label', 'T') + snode.args[0] = NodeList([snode.args[0], CommandNode('label', largs)]) + break + if t == 'text': + if not v.strip(): + # discard whitespace; after a section that's no problem + continue + self.tokens.push((l, t, v, r)) + break + # no label followed + return snode + return handle + +def generic_environment(name, argspec, nodetype=EnvironmentNode): + def handle(self): + args = self.parse_args(name, argspec) + return nodetype(name, args, self.parse_until(self.environment_end)) + return handle + + +class DocParserMeta(type): + def __init__(cls, name, bases, dict): + for nodetype, commands in cls.generic_commands.iteritems(): + for cmdname, argspec in commands.iteritems(): + setattr(cls, 'handle_' + cmdname, + generic_command(cmdname, argspec, nodetype)) + + for cmdname in cls.sectioning_commands: + setattr(cls, 'handle_' + cmdname, sectioning_command(cmdname)) + + for nodetype, envs in cls.generic_envs.iteritems(): + for envname, argspec in envs.iteritems(): + setattr(cls, 'handle_%s_env' % envname, + generic_environment(envname, argspec, nodetype)) + + +class DocParser(object): + """ Parse a Python documentation LaTeX file. """ + __metaclass__ = DocParserMeta + + def __init__(self, tokenstream, filename): + self.tokens = tokenstream + self.filename = filename + + def parse(self): + self.rootnode = RootNode(self.filename, None) + self.rootnode.children = self.parse_until(None) + self.rootnode.transform() + return self.rootnode + + def parse_until(self, condition=None, endatbrace=False): + nodelist = NodeList() + bracelevel = 0 + for l, t, v, r in self.tokens: + if condition and condition(t, v, bracelevel): + return nodelist.flatten() + if t == 'command': + if len(v) == 1 and not v.isalpha(): + nodelist.append(self.handle_special_command(v)) + continue + handler = getattr(self, 'handle_' + v, None) + if not handler: + raise ParserError('no handler for \\%s command' % v, l) + nodelist.append(handler()) + elif t == 'bgroup': + bracelevel += 1 + elif t == 'egroup': + if bracelevel == 0 and endatbrace: + return nodelist.flatten() + bracelevel -= 1 + elif t == 'comment': + nodelist.append(CommentNode(v)) + elif t == 'tilde': + nodelist.append(NbspNode()) + elif t == 'mathmode': + pass # ignore math mode + elif t == 'parasep': + nodelist.append(ParaSepNode()) + else: + # includes 'boptional' and 'eoptional' which don't have a + # special meaning in text + nodelist.append(TextNode(v)) + return nodelist.flatten() + + def parse_args(self, cmdname, argspec): + """ Helper to parse arguments of a command. """ + # argspec: M = mandatory, T = mandatory, check text-only, + # O = optional, Q = optional, check text-only + args = [] + def optional_end(type, value, bracelevel): + return type == 'eoptional' and bracelevel == 0 + + for i, c in enumerate(argspec): + assert c in 'OMTQ' + nextl, nextt, nextv, nextr = self.tokens.pop() + while nextt == 'comment' or (nextt == 'text' and nextv.isspace()): + nextl, nextt, nextv, nextr = self.tokens.pop() + + if c in 'OQ': + if nextt == 'boptional': + arg = self.parse_until(optional_end) + if c == 'Q' and not isinstance(arg, TextNode): + raise ParserError('%s: argument %d must be text only' % + (cmdname, i), nextl) + args.append(arg) + else: + # not given + args.append(EmptyNode()) + self.tokens.push((nextl, nextt, nextv, nextr)) + continue + + if nextt == 'bgroup': + arg = self.parse_until(None, endatbrace=True) + if c == 'T' and not isinstance(arg, TextNode): + raise ParserError('%s: argument %d must be text only' % + (cmdname, i), nextl) + args.append(arg) + else: + if nextt != 'text': + raise ParserError('%s: non-grouped non-text arguments not ' + 'supported' % cmdname, nextl) + args.append(TextNode(nextv[0])) + self.tokens.push((nextl, nextt, nextv[1:], nextr[1:])) + return args + + sectioning_commands = [ + 'chapter', + 'chapter*', + 'section', + 'subsection', + 'subsubsection', + 'paragraph', + ] + + generic_commands = { + CommandNode: { + 'label': 'T', + + 'localmoduletable': '', + 'verbatiminput': 'T', + 'input': 'T', + 'centerline': 'M', + + # Pydoc specific commands + 'versionadded': 'OT', + 'versionchanged': 'OT', + 'deprecated': 'TM', + 'XX' 'X': 'M', # used in dist.tex ;) + + # module-specific + 'declaremodule': 'QTT', + 'platform': 'T', + 'modulesynopsis': 'M', + 'moduleauthor': 'TT', + 'sectionauthor': 'TT', + + # reference lists + 'seelink': 'TMM', + 'seemodule': 'QTM', + 'seepep': 'TMM', + 'seerfc': 'TTM', + 'seetext': 'M', + 'seetitle': 'OMM', + 'seeurl': 'MM', + }, + + DescLineCommandNode: { + # additional items for ...desc + 'funcline': 'TM', + 'funclineni': 'TM', + 'methodline': 'QTM', + 'methodlineni': 'QTM', + 'memberline': 'QT', + 'memberlineni': 'QT', + 'dataline': 'T', + 'datalineni': 'T', + 'cfuncline': 'MTM', + 'cmemberline': 'TTT', + 'csimplemacroline': 'T', + 'ctypeline': 'QT', + 'cvarline': 'TT', + }, + + InlineNode: { + # specials + 'footnote': 'M', + 'frac': 'TT', + 'refmodule': 'QT', + 'citetitle': 'QT', + 'ulink': 'MT', + 'url': 'M', + + # mapped to normal + 'textrm': 'M', + 'b': 'M', + 'email': 'M', # email addresses are recognized by ReST + + # mapped to **strong** + 'textbf': 'M', + 'strong': 'M', + + # mapped to *emphasized* + 'textit': 'M', + 'emph': 'M', + + # mapped to ``code`` + 'bfcode': 'M', + 'code': 'M', + 'samp': 'M', + 'character': 'M', + 'texttt': 'M', + + # mapped to `default role` + 'var': 'M', + + # mapped to [brackets] + 'optional': 'M', + + # mapped to :role:`text` + 'cdata': 'M', + 'cfunction': 'M', # -> :cfunc: + 'class': 'M', + 'command': 'M', + 'constant': 'M', # -> :const: + 'csimplemacro': 'M', # -> :cmacro: + 'ctype': 'M', + 'data': 'M', # NEW + 'dfn': 'M', + 'envvar': 'M', + 'exception': 'M', # -> :exc: + 'file': 'M', + 'filenq': 'M', + 'filevar': 'M', + 'function': 'M', # -> :func: + 'grammartoken': 'M', # -> :token: + 'guilabel': 'M', + 'kbd': 'M', + 'keyword': 'M', + 'mailheader': 'M', + 'makevar': 'M', + 'manpage': 'MM', + 'member': 'M', + 'menuselection': 'M', + 'method': 'M', # -> :meth: + 'mimetype': 'M', + 'module': 'M', # -> :mod: + 'newsgroup': 'M', + 'option': 'M', + 'pep': 'M', + 'program': 'M', + 'programopt': 'M', # -> :option: + 'longprogramopt': 'M', # -> :option: + 'ref': 'T', + 'regexp': 'M', + 'rfc': 'M', + 'token': 'M', + + 'NULL': '', + # these are defined via substitutions + 'shortversion': '', + 'version': '', + 'today': '', + }, + + SimpleCmdNode: { + # these are directly mapped to text + 'AA': '', # A as in Angstrom + 'ASCII': '', + 'C': '', + 'Cpp': '', + 'EOF': '', + 'LaTeX': '', + 'POSIX': '', + 'UNIX': '', + 'Unix': '', + 'backslash': '', + 'copyright': '', + 'e': '', # backslash + 'geq': '', + 'infinity': '', + 'ldots': '', + 'leq': '', + 'moreargs': '', + 'pi': '', + 'plusminus': '', + 'sub': '', # menu separator + 'textbackslash': '', + 'textunderscore': '', + 'texteuro': '', + 'textasciicircum': '', + 'textasciitilde': '', + 'textgreater': '', + 'textless': '', + 'textbar': '', + 'tilde': '', + 'unspecified': '', + }, + + IndexNode: { + 'bifuncindex': 'T', + 'exindex': 'T', + 'kwindex': 'T', + 'obindex': 'T', + 'opindex': 'T', + 'refmodindex': 'T', + 'refexmodindex': 'T', + 'refbimodindex': 'T', + 'refstmodindex': 'T', + 'stindex': 'T', + 'index': 'M', + 'indexii': 'TT', + 'indexiii': 'TTT', + 'indexiv': 'TTTT', + 'ttindex': 'T', + 'withsubitem': 'TM', + }, + + # These can be safely ignored + EmptyNode: { + 'setindexsubitem': 'T', + 'tableofcontents': '', + 'makeindex': '', + 'makemodindex': '', + 'maketitle': '', + 'appendix': '', + 'documentclass': 'OM', + 'usepackage': 'OM', + 'noindent': '', + 'protect': '', + 'ifhtml': '', + 'fi': '', + }, + } + + generic_envs = { + EnvironmentNode: { + # generic LaTeX environments + 'abstract': '', + 'quote': '', + 'quotation': '', + + 'notice': 'Q', + 'seealso': '', + 'seealso*': '', + }, + + DescEnvironmentNode: { + # information units + 'datadesc': 'T', + 'datadescni': 'T', + 'excclassdesc': 'TM', + 'excdesc': 'T', + 'funcdesc': 'TM', + 'funcdescni': 'TM', + 'classdesc': 'TM', + 'classdesc*': 'T', + 'memberdesc': 'QT', + 'memberdescni': 'QT', + 'methoddesc': 'QMM', + 'methoddescni': 'QMM', + 'opcodedesc': 'TT', + + 'cfuncdesc': 'MTM', + 'cmemberdesc': 'TTT', + 'csimplemacrodesc': 'T', + 'ctypedesc': 'QT', + 'cvardesc': 'TT', + }, + } + + # ------------------------- special handlers ----------------------------- + + def handle_special_command(self, cmdname): + if cmdname in '{}%$^#&_ ': + # these are just escapes for special LaTeX commands + return TextNode(cmdname) + elif cmdname in '\'`~"c': + # accents and umlauts + nextl, nextt, nextv, nextr = self.tokens.next() + if nextt == 'bgroup': + _, nextt, _, _ = self.tokens.next() + if nextt != 'egroup': + raise ParserError('wrong argtype for \\%s' % cmdname, nextl) + return TextNode(cmdname) + if nextt != 'text': + # not nice, but {\~} = ~ + self.tokens.push((nextl, nextt, nextv, nextr)) + return TextNode(cmdname) + c = umlaut(cmdname, nextv[0]) + self.tokens.push((nextl, nextt, nextv[1:], nextr[1:])) + return TextNode(c) + elif cmdname == '\\': + return BreakNode() + raise ParserError('no handler for \\%s command' % cmdname, + self.tokens.peek()[0]) + + def handle_begin(self): + envname, = self.parse_args('begin', 'T') + handler = getattr(self, 'handle_%s_env' % envname.text, None) + if not handler: + raise ParserError('no handler for %s environment' % envname.text, + self.tokens.peek()[0]) + return handler() + + # ------------------------- command handlers ----------------------------- + + def mk_metadata_handler(self, name, mdname=None): + if mdname is None: + mdname = name + def handler(self): + data, = self.parse_args('\\'+name, 'M') + self.rootnode.params[mdname] = data + return EmptyNode() + return handler + + handle_title = mk_metadata_handler(None, 'title') + handle_author = mk_metadata_handler(None, 'author') + handle_authoraddress = mk_metadata_handler(None, 'authoraddress') + handle_date = mk_metadata_handler(None, 'date') + handle_release = mk_metadata_handler(None, 'release') + handle_setshortversion = mk_metadata_handler(None, 'setshortversion', + 'shortversion') + handle_setreleaseinfo = mk_metadata_handler(None, 'setreleaseinfo', + 'releaseinfo') + + def handle_note(self): + note = self.parse_args('\\note', 'M')[0] + return EnvironmentNode('notice', [TextNode('note')], note) + + def handle_warning(self): + warning = self.parse_args('\\warning', 'M')[0] + return EnvironmentNode('notice', [TextNode('warning')], warning) + + def handle_ifx(self): + for l, t, v, r in self.tokens: + if t == 'command' and v == 'fi': + break + return EmptyNode() + + def handle_c(self): + return self.handle_special_command('c') + + def handle_mbox(self): + return self.parse_args('\\mbox', 'M')[0] + + def handle_leftline(self): + return self.parse_args('\\leftline', 'M')[0] + + def handle_Large(self): + return self.parse_args('\\Large', 'M')[0] + + def handle_pytype(self): + # \pytype{x} is synonymous to \class{x} now + return self.handle_class() + + def handle_nodename(self): + return self.handle_label() + + def handle_verb(self): + # skip delimiter + l, t, v, r = self.tokens.next() + l, t, v, r = self.tokens.next() + assert t == 'text' + node = InlineNode('code', [TextNode(r)]) + # skip delimiter + l, t, v, r = self.tokens.next() + return node + + def handle_locallinewidth(self): + return EmptyNode() + + def handle_linewidth(self): + return EmptyNode() + + def handle_setlength(self): + self.parse_args('\\setlength', 'MM') + return EmptyNode() + + def handle_stmodindex(self): + arg, = self.parse_args('\\stmodindex', 'T') + return CommandNode('declaremodule', [EmptyNode(), + TextNode(u'standard'), + arg]) + + def handle_indexname(self): + return EmptyNode() + + def handle_renewcommand(self): + self.parse_args('\\renewcommand', 'MM') + return EmptyNode() + + # ------------------------- environment handlers ------------------------- + + def handle_document_env(self): + return self.parse_until(self.environment_end) + + handle_sloppypar_env = handle_document_env + handle_flushleft_env = handle_document_env + handle_math_env = handle_document_env + + def handle_verbatim_env(self): + text = [] + for l, t, v, r in self.tokens: + if t == 'command' and v == 'end' : + tok = self.tokens.peekmany(3) + if tok[0][1] == 'bgroup' and \ + tok[1][1] == 'text' and \ + tok[1][2] == 'verbatim' and \ + tok[2][1] == 'egroup': + self.tokens.popmany(3) + break + text.append(r) + return VerbatimNode(TextNode(''.join(text))) + + # involved math markup must be corrected manually + def handle_displaymath_env(self): + text = ['XXX: translate this math'] + for l, t, v, r in self.tokens: + if t == 'command' and v == 'end' : + tok = self.tokens.peekmany(3) + if tok[0][1] == 'bgroup' and \ + tok[1][1] == 'text' and \ + tok[1][2] == 'displaymath' and \ + tok[2][1] == 'egroup': + self.tokens.popmany(3) + break + text.append(r) + return VerbatimNode(TextNode(''.join(text))) + + # alltt is different from verbatim because it allows markup + def handle_alltt_env(self): + nodelist = NodeList() + for l, t, v, r in self.tokens: + if self.environment_end(t, v): + break + if t == 'command': + if len(v) == 1 and not v.isalpha(): + nodelist.append(self.handle_special_command(v)) + continue + handler = getattr(self, 'handle_' + v, None) + if not handler: + raise ParserError('no handler for \\%s command' % v, l) + nodelist.append(handler()) + elif t == 'comment': + nodelist.append(CommentNode(v)) + else: + # all else is appended raw + nodelist.append(TextNode(r)) + return VerbatimNode(nodelist.flatten()) + + def handle_itemize_env(self, nodetype=ItemizeNode): + items = [] + # a usecase for nonlocal :) + running = [False] + + def item_condition(t, v, bracelevel): + if self.environment_end(t, v): + del running[:] + return True + if t == 'command' and v == 'item': + return True + return False + + # the text until the first \item is discarded + self.parse_until(item_condition) + while running: + itemname, = self.parse_args('\\item', 'O') + itemcontent = self.parse_until(item_condition) + items.append([itemname, itemcontent]) + return nodetype(items) + + def handle_enumerate_env(self): + return self.handle_itemize_env(EnumerateNode) + + def handle_description_env(self): + return self.handle_itemize_env(DescriptionNode) + + def handle_definitions_env(self): + items = [] + running = [False] + + def item_condition(t, v, bracelevel): + if self.environment_end(t, v): + del running[:] + return True + if t == 'command' and v == 'term': + return True + return False + + # the text until the first \item is discarded + self.parse_until(item_condition) + while running: + itemname, = self.parse_args('\\term', 'M') + itemcontent = self.parse_until(item_condition) + items.append([itemname, itemcontent]) + return DefinitionsNode(items) + + def mk_table_handler(self, envname, numcols): + def handle_table(self): + args = self.parse_args('table'+envname, 'TT' + 'M'*numcols) + firstcolformat = args[1].text + headings = args[2:] + lines = [] + for l, t, v, r in self.tokens: + # XXX: everything outside of \linexxx is lost here + if t == 'command': + if v == 'line'+envname: + lines.append(self.parse_args('\\line'+envname, + 'M'*numcols)) + elif v == 'end': + arg = self.parse_args('\\end', 'T') + assert arg[0].text.endswith('table'+envname), arg[0].text + break + for line in lines: + if not empty(line[0]): + line[0] = InlineNode(firstcolformat, [line[0]]) + return TableNode(numcols, headings, lines) + return handle_table + + handle_tableii_env = mk_table_handler(None, 'ii', 2) + handle_longtableii_env = handle_tableii_env + handle_tableiii_env = mk_table_handler(None, 'iii', 3) + handle_longtableiii_env = handle_tableiii_env + handle_tableiv_env = mk_table_handler(None, 'iv', 4) + handle_longtableiv_env = handle_tableiv_env + handle_tablev_env = mk_table_handler(None, 'v', 5) + handle_longtablev_env = handle_tablev_env + + def handle_productionlist_env(self): + env_args = self.parse_args('productionlist', 'Q') + items = [] + for l, t, v, r in self.tokens: + # XXX: everything outside of \production is lost here + if t == 'command': + if v == 'production': + items.append(self.parse_args('\\production', 'TM')) + elif v == 'productioncont': + args = self.parse_args('\\productioncont', 'M') + args.insert(0, EmptyNode()) + items.append(args) + elif v == 'end': + arg = self.parse_args('\\end', 'T') + assert arg[0].text == 'productionlist' + break + node = ProductionListNode(items) + # the argument specifies a production group + node.arg = env_args[0] + return node + + def environment_end(self, t, v, bracelevel=0): + if t == 'command' and v == 'end': + self.parse_args('\\end', 'T') + return True + return False Added: doctools/trunk/converter/newfiles/TODO ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/TODO Mon Jul 23 11:02:25 2007 @@ -0,0 +1,18 @@ +To do after conversion +====================== + +* fix all references and links marked with `XXX` +* adjust all literal include paths +* remove all non-literal includes +* fix all duplicate labels and undefined label references +* fix the email package docs: add a toctree +* split very large files and add toctrees +* integrate standalone HOWTOs +* find out which files get "comments disabled" metadata +* double backslashes in production lists +* add synopses for each module +* write "About these documents" +* finish "Documenting Python" +* extend copyright.rst +* merge ACKS into about.rst +* fix the "quadruple" index term Added: doctools/trunk/converter/newfiles/about.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/about.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,16 @@ +===================== +About these documents +===================== + +These documents are generated from `reStructuredText +`_ sources by *Sphinx*, a document processor +specifically written for the Python documentation. + +In the online version of these documents, you can submit comments and suggest +changes directly on the documentation pages. + +Development of the documentation and its toolchain takes place on the +docs at python.org mailing list. We're always looking for volunteers wanting +to help with the docs, so feel free to send a mail there! + +See :ref:`reporting-bugs` for information how to report bugs in Python itself. \ No newline at end of file Added: doctools/trunk/converter/newfiles/api_index.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/api_index.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,33 @@ +.. _c-api-index: + +################################## + Python/C API Reference Manual +################################## + +:Release: |version| +:Date: |today| + +This manual documents the API used by C and C++ programmers who want to write +extension modules or embed Python. It is a companion to :ref:`extending-index`, +which describes the general principles of extension writing but does not +document the API functions in detail. + +.. warning:: + + The current version of this document is somewhat incomplete. However, most of + the important functions, types and structures are described. + + +.. toctree:: + :maxdepth: 2 + + intro.rst + veryhigh.rst + refcounting.rst + exceptions.rst + utilities.rst + abstract.rst + concrete.rst + init.rst + memory.rst + newtypes.rst Added: doctools/trunk/converter/newfiles/conf.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/conf.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Python documentation build configuration file +# +# The contents of this file are pickled, so don't put values in the namespace +# that aren't pickleable (module imports are okay, they're removed automatically). +# + +# The default replacements for |version| and |release|: +# The short X.Y version. +version = '2.6' +# The full version, including alpha/beta/rc tags. +release = '2.6a0' +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +today = '' +# Else, today_fmt is used as the format for a strftime call. +today_fmt = '%B %d, %Y' + +# List of files that shouldn't be included in the build. +unused_files = [ + 'whatsnew/2.0.rst', + 'whatsnew/2.1.rst', + 'whatsnew/2.2.rst', + 'whatsnew/2.3.rst', + 'whatsnew/2.4.rst', + 'whatsnew/2.5.rst', + 'macmodules/scrap.rst', + 'modules/xmllib.rst', +] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +last_updated_format = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +use_smartypants = True + +# If true, trailing '()' will be stripped from :func: etc. cross-references. +strip_trailing_parentheses = False Added: doctools/trunk/converter/newfiles/contents.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/contents.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,21 @@ +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + Python Documentation contents +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +.. toctree:: + + whatsnew/2.6.rst + tutorial/index.rst + reference/index.rst + modules/index.rst + macmodules/index.rst + extending/index.rst + c-api/index.rst + distutils/index.rst + install/index.rst + documenting/index.rst + + bugs.rst + about.rst + license.rst + copyright.rst Added: doctools/trunk/converter/newfiles/dist_index.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/dist_index.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,28 @@ +.. _distutils-index: + +############################### + Distributing Python Modules +############################### + +:Release: |version| +:Date: |today| + +This document describes the Python Distribution Utilities ("Distutils") from +the module developer's point of view, describing how to use the Distutils to +make Python modules and extensions easily available to a wider audience with +very little overhead for build/release/install mechanics. + +.. toctree:: + :maxdepth: 2 + + introduction.rst + setupscript.rst + configfile.rst + sourcedist.rst + builtdist.rst + packageindex.rst + uploading.rst + examples.rst + extending.rst + commandref.rst + apiref.rst Added: doctools/trunk/converter/newfiles/doc.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/doc.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,33 @@ +.. _documenting-index: + +###################### + Documenting Python +###################### + + +The Python language has a substantial body of documentation, much of it +contributed by various authors. The markup used for the Python documentation is +`reStructuredText`_, developed by the `docutils`_ project, amended by custom +directives and using a toolset named *Sphinx* to postprocess the HTML output. + +This document describes the style guide for our documentation, the custom +reStructuredText markup introduced to support Python documentation and how it +should be used, as well as the Sphinx build system. + +.. _reStructuredText: http://docutils.sf.net/rst.html +.. _docutils: http://docutils.sf.net/ + +If you're interested in contributing to Python's documentation, there's no need +to write reStructuredText if you're not so inclined; plain text contributions +are more than welcome as well. + +.. toctree:: + + intro.rst + style.rst + rest.rst + markup.rst + sphinx.rst + +.. XXX add credits, thanks etc. + Added: doctools/trunk/converter/newfiles/doc_intro.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/doc_intro.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,29 @@ +Introduction +============ + +Python's documentation has long been considered to be good for a free +programming language. There are a number of reasons for this, the most +important being the early commitment of Python's creator, Guido van Rossum, to +providing documentation on the language and its libraries, and the continuing +involvement of the user community in providing assistance for creating and +maintaining documentation. + +The involvement of the community takes many forms, from authoring to bug reports +to just plain complaining when the documentation could be more complete or +easier to use. + +This document is aimed at authors and potential authors of documentation for +Python. More specifically, it is for people contributing to the standard +documentation and developing additional documents using the same tools as the +standard documents. This guide will be less useful for authors using the Python +documentation tools for topics other than Python, and less useful still for +authors not using the tools at all. + +If your interest is in contributing to the Python documentation, but you don't +have the time or inclination to learn reStructuredText and the markup structures +documented here, there's a welcoming place for you among the Python contributors +as well. Any time you feel that you can clarify existing documentation or +provide documentation that's missing, the existing documentation team will +gladly work with you to integrate your text, dealing with the markup for you. +Please don't let the material in this document stand between the documentation +and your desire to help out! \ No newline at end of file Added: doctools/trunk/converter/newfiles/doc_markup.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/doc_markup.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,738 @@ +.. highlightlang:: rest + +Additional Markup Constructs +============================ + +Sphinx adds a lot of new directives and interpreted text roles to standard reST +markup. This section contains the reference material for these facilities. +Documentation for "standard" reST constructs is not included here, though +they are used in the Python documentation. + +XXX: file-wide metadata + +Meta-information markup +----------------------- + +.. describe:: sectionauthor + + Identifies the author of the current section. The argument should include + the author's name such that it can be used for presentation (though it isn't) + and email address. The domain name portion of the address should be lower + case. Example:: + + .. sectionauthor:: Guido van Rossum + + Currently, this markup isn't reflected in the output in any way, but it helps + keep track of contributions. + + +Module-specific markup +---------------------- + +The markup described in this section is used to provide information about a +module being documented. Each module should be documented in its own file. +Normally this markup appears after the title heading of that file; a typical +file might start like this:: + + :mod:`parrot` -- Dead parrot access + =================================== + + .. module:: parrot + :platform: Unix, Windows + :synopsis: Analyze and reanimate dead parrots. + .. moduleauthor:: Eric Cleese + .. moduleauthor:: John Idle + +As you can see, the module-specific markup consists of two directives, the +``module`` directive and the ``moduleauthor`` directive. + +.. describe:: module + + This directive marks the beginning of the description of a module (or package + submodule, in which case the name should be fully qualified, including the + package name). + + The ``platform`` option, if present, is a comma-separated list of the + platforms on which the module is available (if it is available on all + platforms, the option should be omitted). The keys are short identifiers; + examples that are in use include "IRIX", "Mac", "Windows", and "Unix". It is + important to use a key which has already been used when applicable. + + The ``synopsis`` option should consist of one sentence describing the + module's purpose -- it is currently only used in the Global Module Index. + +.. describe:: moduleauthor + + The ``moduleauthor`` directive, which can appear multiple times, names the + authors of the module code, just like ``sectionauthor`` names the author(s) + of a piece of documentation. It too does not result in any output currently. + + +.. note:: + + It is important to make the section title of a module-describing file + meaningful since that value will be inserted in the table-of-contents trees + in overview files. + + +Information units +----------------- + +There are a number of directives used to describe specific features provided by +modules. Each directive requires one or more signatures to provide basic +information about what is being described, and the content should be the +description. The basic version makes entries in the general index; if no index +entry is desired, you can give the directive option flag ``:noindex:``. The +following example shows all of the features of this directive type:: + + .. function:: spam(eggs) + ham(eggs) + :noindex: + + Spam or ham the foo. + +The signatures of object methods or data attributes should always include the +type name (``.. method:: FileInput.input(...)``), even if it is obvious from the +context which type they belong to; this is to enable consistent +cross-references. If you describe methods belonging to an abstract protocol, +such as "context managers", include a (pseudo-)type name too to make the +index entries more informative. + +The directives are: + +.. describe:: cfunction + + Describes a C function. The signature should be given as in C, e.g.:: + + .. cfunction:: PyObject* PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems) + + This is also used to describe function-like preprocessor macros. The names + of the arguments should be given so they may be used in the description. + + Note that you don't have to backslash-escape asterisks in the signature, + as it is not parsed by the reST inliner. + +.. describe:: cmember + + Describes a C struct member. Example signature:: + + .. cmember:: PyObject* PyTypeObject.tp_bases + + The text of the description should include the range of values allowed, how + the value should be interpreted, and whether the value can be changed. + References to structure members in text should use the ``member`` role. + +.. describe:: cmacro + + Describes a "simple" C macro. Simple macros are macros which are used + for code expansion, but which do not take arguments so cannot be described as + functions. This is not to be used for simple constant definitions. Examples + of its use in the Python documentation include :cmacro:`PyObject_HEAD` and + :cmacro:`Py_BEGIN_ALLOW_THREADS`. + +.. describe:: ctype + + Describes a C type. The signature should just be the type name. + +.. describe:: cvar + + Describes a global C variable. The signature should include the type, such + as:: + + .. cvar:: PyObject* PyClass_Type + +.. describe:: data + + Describes global data in a module, including both variables and values used + as "defined constants." Class and object attributes are not documented + using this environment. + +.. describe:: exception + + Describes an exception class. The signature can, but need not include + parentheses with constructor arguments. + +.. describe:: function + + Describes a module-level function. The signature should include the + parameters, enclosing optional parameters in brackets. Default values can be + given if it enhances clarity. For example:: + + .. function:: Timer.repeat([repeat=3[, number=1000000]]) + + Object methods are not documented using this directive. Bound object methods + placed in the module namespace as part of the public interface of the module + are documented using this, as they are equivalent to normal functions for + most purposes. + + The description should include information about the parameters required and + how they are used (especially whether mutable objects passed as parameters + are modified), side effects, and possible exceptions. A small example may be + provided. + +.. describe:: class + + Describes a class. The signature can include parentheses with parameters + which will be shown as the constructor arguments. + +.. describe:: attribute + + Describes an object data attribute. The description should include + information about the type of the data to be expected and whether it may be + changed directly. + +.. describe:: method + + Describes an object method. The parameters should not include the ``self`` + parameter. The description should include similar information to that + described for ``function``. + +.. describe:: opcode + + Describes a Python bytecode instruction. + + +There is also a generic version of these directives: + +.. describe:: describe + + This directive produces the same formatting as the specific ones explained + above but does not create index entries or cross-referencing targets. It is + used, for example, to describe the directives in this document. Example:: + + .. describe:: opcode + + Describes a Python bytecode instruction. + + +Showing code examples +--------------------- + +Examples of Python source code or interactive sessions are represented using +standard reST literal blocks. They are started by a ``::`` at the end of the +preceding paragraph and delimited by indentation. + +Representing an interactive session requires including the prompts and output +along with the Python code. No special markup is required for interactive +sessions. After the last line of input or output presented, there should not be +an "unused" primary prompt; this is an example of what *not* to do:: + + >>> 1 + 1 + 2 + >>> + +Syntax highlighting is handled in a smart way: + +* There is a "highlighting language" for each source file. Per default, + this is ``'python'`` as the majority of files will have to highlight Python + snippets. + +* Within Python highlighting mode, interactive sessions are recognized + automatically and highlighted appropriately. + +* The highlighting language can be changed using the ``highlightlang`` + directive, used as follows:: + + .. highlightlang:: c + + This language is used until the next ``highlightlang`` directive is + encountered. + +* The valid values for the highlighting language are: + + * ``python`` (the default) + * ``c`` + * ``rest`` + * ``none`` (no highlighting) + +* If highlighting with the current language fails, the block is not highlighted + in any way. + +Longer displays of verbatim text may be included by storing the example text in +an external file containing only plain text. The file may be included using the +standard ``include`` directive with the ``literal`` option flag. For example, +to include the Python source file :file:`example.py`, use:: + + .. include:: example.py + :literal: + + +Inline markup +------------- + +As said before, Sphinx uses interpreted text roles to insert semantic markup in +documents. + +The default role is ``var``, as that was one of the most common macros used in +the old LaTeX docs. That means that you can use ```var``` to refer to a +variable named "var". + +For all other roles, you have to write ``:rolename:`content```. + +The following roles refer to objects in modules and are possibly hyperlinked if +a matching identifier is found: + +.. describe:: mod + + The name of a module; a dotted name may be used. This should also be used for + package names. + +.. describe:: func + + The name of a Python function; dotted names may be used. The role text + should include trailing parentheses to enhance readability. The parentheses + are stripped when searching for identifiers. + +.. describe:: data + + The name of a module-level variable. + +.. describe:: const + + The name of a "defined" constant. This may be a C-language ``#define`` + or a Python variable that is not intended to be changed. + +.. describe:: class + + A class name; a dotted name may be used. + +.. describe:: meth + + The name of a method of an object. The role text should include the type + name, method name and the trailing parentheses. A dotted name may be used. + +.. describe:: attr + + The name of a data attribute of an object. + +.. describe:: exc + + The name of an exception. A dotted name may be used. + +The name enclosed in this markup can include a module name and/or a class name. +For example, ``:func:`filter``` could refer to a function named ``filter`` in +the current module, or the built-in function of that name. In contrast, +``:func:`foo.filter``` clearly refers to the ``filter`` function in the ``foo`` +module. + +A similar heuristic is used to determine whether the name is an attribute of +the currently documented class. + +The following roles create cross-references to C-language constructs if they +are defined in the API documentation: + +.. describe:: cdata + + The name of a C-language variable. + +.. describe:: cfunc + + The name of a C-language function. Should include trailing parentheses. + +.. describe:: cmacro + + The name of a "simple" C macro, as defined above. + +.. describe:: ctype + + The name of a C-language type. + + +The following role does possibly create a cross-reference, but does not refer +to objects: + +.. describe:: token + + The name of a grammar token (used in the reference manual to create links + between production displays). + +--------- + +The following roles don't do anything special except formatting the text +in a different style: + +.. describe:: command + + The name of an OS-level command, such as ``rm``. + +.. describe:: dfn + + Mark the defining instance of a term in the text. (No index entries are + generated.) + +.. describe:: envvar + + An environment variable. Index entries are generated. + +.. describe:: file + + The name of a file or directory. + +.. XXX: filenq, filevar + +.. describe:: guilabel + + Labels presented as part of an interactive user interface should be marked + using ``guilabel``. This includes labels from text-based interfaces such as + those created using :mod:`curses` or other text-based libraries. Any label + used in the interface should be marked with this role, including button + labels, window titles, field names, menu and menu selection names, and even + values in selection lists. + +.. describe:: kbd + + Mark a sequence of keystrokes. What form the key sequence takes may depend + on platform- or application-specific conventions. When there are no relevant + conventions, the names of modifier keys should be spelled out, to improve + accessibility for new users and non-native speakers. For example, an + *xemacs* key sequence may be marked like ``:kbd:`C-x C-f```, but without + reference to a specific application or platform, the same sequence should be + marked as ``:kbd:`Control-x Control-f```. + +.. describe:: keyword + + The name of a keyword in a programming language. + +.. describe:: mailheader + + The name of an RFC 822-style mail header. This markup does not imply that + the header is being used in an email message, but can be used to refer to any + header of the same "style." This is also used for headers defined by the + various MIME specifications. The header name should be entered in the same + way it would normally be found in practice, with the camel-casing conventions + being preferred where there is more than one common usage. For example: + ``:mailheader:`Content-Type```. + +.. describe:: makevar + + The name of a :command:`make` variable. + +.. describe:: manpage + + A reference to a Unix manual page including the section, + e.g. ``:manpage:`ls(1)```. + +.. describe:: menuselection + + Menu selections should be marked using the ``menuselection`` role. This is + used to mark a complete sequence of menu selections, including selecting + submenus and choosing a specific operation, or any subsequence of such a + sequence. The names of individual selections should be separated by + ``-->``. + + For example, to mark the selection "Start > Programs", use this markup:: + + :menuselection:`Start --> Programs` + + When including a selection that includes some trailing indicator, such as the + ellipsis some operating systems use to indicate that the command opens a + dialog, the indicator should be omitted from the selection name. + +.. describe:: mimetype + + The name of a MIME type, or a component of a MIME type (the major or minor + portion, taken alone). + +.. describe:: newsgroup + + The name of a Usenet newsgroup. + +.. describe:: option + + A command-line option to an executable program. The leading hyphen(s) must + be included. + +.. describe:: program + + The name of an executable program. This may differ from the file name for + the executable for some platforms. In particular, the ``.exe`` (or other) + extension should be omitted for Windows programs. + +.. describe:: regexp + + A regular expression. Quotes should not be included. + +.. describe:: var + + A Python or C variable or parameter name. + + +The following roles generate external links: + +.. describe:: pep + + A reference to a Python Enhancement Proposal. This generates appropriate + index entries. The text "PEP *number*\ " is generated; in the HTML output, + this text is a hyperlink to an online copy of the specified PEP. + +.. describe:: rfc + + A reference to an Internet Request for Comments. This generates appropriate + index entries. The text "RFC *number*\ " is generated; in the HTML output, + this text is a hyperlink to an online copy of the specified RFC. + + +Note that there are no special roles for including hyperlinks as you can use +the standard reST markup for that purpose. + + +.. _doc-ref-role: + +Cross-linking markup +-------------------- + +To support cross-referencing to arbitrary sections in the documentation, the +standard reST labels are "abused" a bit: Every label must precede a section +title; and every label name must be unique throughout the entire documentation +source. + +You can then reference to these sections using the ``:ref:`label-name``` role. + +Example:: + + .. _my-reference-label: + + Section to cross-reference + -------------------------- + + This is the text of the section. + + It refers to the section itself, see :ref:`my-reference-label`. + +The ``:ref:`` invocation is replaced with the section title. + + +Paragraph-level markup +---------------------- + +These directives create short paragraphs and can be used inside information +units as well as normal text: + +.. describe:: note + + An especially important bit of information about an API that a user should be + aware of when using whatever bit of API the note pertains to. The content of + the directive should be written in complete sentences and include all + appropriate punctuation. + + Example:: + + .. note:: + + This function is not suitable for sending spam e-mails. + +.. describe:: warning + + An important bit of information about an API that a user should be very aware + of when using whatever bit of API the warning pertains to. The content of + the directive should be written in complete sentences and include all + appropriate punctuation. This differs from ``note`` in that it is recommended + over ``note`` for information regarding security. + +.. describe:: versionadded + + This directive documents the version of Python which added the described + feature to the library or C API. When this applies to an entire module, it + should be placed at the top of the module section before any prose. + + The first argument must be given and is the version in question; you can add + a second argument consisting of a *brief* explanation of the change. + + Example:: + + .. versionadded:: 2.5 + The `spam` parameter. + + Note that there must be no blank line between the directive head and the + explanation; this is to make these blocks visually continuous in the markup. + +.. describe:: versionchanged + + Similar to ``versionadded``, but describes when and what changed in the named + feature in some way (new parameters, changed side effects, etc.). + +-------------- + +.. describe:: seealso + + Many sections include a list of references to module documentation or + external documents. These lists are created using the ``seealso`` directive. + + The ``seealso`` directive is typically placed in a section just before any + sub-sections. For the HTML output, it is shown boxed off from the main flow + of the text. + + The content of the ``seealso`` directive should be a reST definition list. + Example:: + + .. seealso:: + + Module :mod:`zipfile` + Documentation of the :mod:`zipfile` standard module. + + `GNU tar manual, Basic Tar Format `_ + Documentation for tar archive files, including GNU tar extensions. + +.. describe:: rubric + + This directive creates a paragraph heading that is not used to create a + table of contents node. It is currently used for the "Footnotes" caption. + +.. describe:: centered + + This directive creates a centered boldfaced paragraph. Use it as follows:: + + .. centered:: + + Paragraph contents. + + +Table-of-contents markup +------------------------ + +Since reST does not have facilities to interconnect several documents, or split +documents into multiple output files, Sphinx uses a custom directive to add +relations between the single files the documentation is made of, as well as +tables of contents. The ``toctree`` directive is the central element. + +.. describe:: toctree + + This directive inserts a "TOC tree" at the current location, using the + individual TOCs (including "sub-TOC trees") of the files given in the + directive body. A numeric ``maxdepth`` option may be given to indicate the + depth of the tree; by default, all levels are included. + + Consider this example (taken from the library reference index):: + + .. toctree:: + :maxdepth: 2 + + intro.rst + strings.rst + datatypes.rst + numeric.rst + (many more files listed here) + + This accomplishes two things: + + * Tables of contents from all those files are inserted, with a maximum depth + of two, that means one nested heading. ``toctree`` directives in those + files are also taken into account. + * Sphinx knows that the relative order of the files ``intro.rst``, + ``strings.rst`` and so forth, and it knows that they are children of the + shown file, the library index. From this information it generates "next + chapter", "previous chapter" and "parent chapter" links. + + In the end, all files included in the build process must occur in one + ``toctree`` directive; Sphinx will emit a warning if it finds a file that is + not included, because that means that this file will not be reachable through + standard navigation. + + The special file ``contents.rst`` at the root of the source directory is the + "root" of the TOC tree hierarchy; from it the "Contents" page is generated. + + +Index-generating markup +----------------------- + +Sphinx automatically creates index entries from all information units (like +functions, classes or attributes) like discussed before. + +However, there is also an explicit directive available, to make the index more +comprehensive and enable index entries in documents where information is not +mainly contained in information units, such as the language reference. + +The directive is ``index`` and contains one or more index entries. Each entry +consists of a type and a value, separated by a colon. + +For example:: + + .. index:: + single: execution!context + module: __main__ + module: sys + triple: module; search; path + +This directive contains five entries, which will be converted to entries in the +generated index which link to the exact location of the index statement (or, in +case of offline media, the corresponding page number). + +The possible entry types are: + +single + Creates a single index entry. Can be made a subentry by separating the + subentry text with a semicolon (this is also used below to describe what + entries are created). +pair + ``pair: loop; statement`` is a shortcut that creates two index entries, + namely ``loop; statement`` and ``statement; loop``. +triple + Likewise, ``triple: module; search; path`` is a shortcut that creates three + index entries, which are ``module; search path``, ``search; path, module`` and + ``path; module search``. +module, keyword, operator, object, exception, statement, builtin + These all create two index entries. For example, ``module: hashlib`` creates + the entries ``module; hashlib`` and ``hashlib; module``. + + +Grammar production displays +--------------------------- + +Special markup is available for displaying the productions of a formal grammar. +The markup is simple and does not attempt to model all aspects of BNF (or any +derived forms), but provides enough to allow context-free grammars to be +displayed in a way that causes uses of a symbol to be rendered as hyperlinks to +the definition of the symbol. There is this directive: + +.. describe:: productionlist + + This directive is used to enclose a group of productions. Each production is + given on a single line and consists of a name, separated by a colon from the + following definition. If the definition spans multiple lines, each + continuation line must begin with a colon placed at the same column as in the + first line. + + Blank lines are not allowed within ``productionlist`` directive arguments. + + The definition can contain token names which are marked as interpreted text + (e.g. ``sum ::= `integer` "+" `integer```) -- this generates cross-references + to the productions of these tokens. Note that vertical bars used to indicate + alternatives must be escaped with backslashes because otherwise they would + indicate a substitution reference to the reST parser. + + +.. XXX describe optional first parameter + +The following is an example taken from the Python Reference Manual:: + + .. productionlist:: + try_stmt: try1_stmt \| try2_stmt + try1_stmt: "try" ":" :token:`suite` + : ("except" [:token:`expression` ["," :token:`target`]] ":" :token:`suite`)+ + : ["else" ":" :token:`suite`] + : ["finally" ":" :token:`suite`] + try2_stmt: "try" ":" :token:`suite` + : "finally" ":" :token:`suite` + + +Substitutions +------------- + +The documentation system provides three substitutions that are defined by default. +They are set in the build configuration file, see :ref:`doc-build-config`. + +.. describe:: |release| + + Replaced by the Python release the documentation refers to. This is the full + version string including alpha/beta/release candidate tags, e.g. ``2.5.2b3``. + +.. describe:: |version| + + Replaced by the Python version the documentation refers to. This consists + only of the major and minor version parts, e.g. ``2.5``, even for version + 2.5.1. + +.. describe:: |today| + + Replaced by either today's date, or the date set in the build configuration + file. Normally has the format ``April 14, 2007``. Added: doctools/trunk/converter/newfiles/doc_rest.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/doc_rest.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,229 @@ +.. highlightlang:: rest + +reStructuredText Primer +======================= + +This section is a brief introduction to reStructuredText (reST) concepts and +syntax, to provide authors enough information to autor documents productively. +Since reST was designed to be a simple, unobtrusive markup language, this will +not take too long. + +.. seealso:: + + The authoritative `reStructuredText User + Documentation `_. + + +Paragraphs +---------- + +The most basic block a reST document is made of. Paragraphs are chunks of text +separated by one ore more blank lines. As in Python, indentation is significant +in reST, so all lines of a paragraph must be left-aligned. + + +Inline markup +------------- + +The standard reST inline markup is quite simple: use + +* one asterisk: ``*text*`` for emphasis (italics), +* two asterisks: ``**text**`` for strong emphasis (boldface), and +* backquotes: ````text```` for code samples. + +If asterisks or backquotes appear in running text and could be confused with +inline markup delimiters, they have to be escaped with a backslash. + +Be aware of some restrictions of this markup: + +* it may not be nested, +* content may not start or end with whitespace: ``* text*`` is wrong, +* it must be separated from surrounding text by non-word characters. Use a + backslash escaped space to work around that: ``thisis\ *one*\ word``. + +These restrictions may be lifted in future versions of the docutils. + +reST also allows for custom "interpreted text roles"', which signify that the +enclosed text should be interpreted in a specific way. Sphinx uses this to +provide semantic markup and cross-referencing of identifiers, as described in +the appropriate section. The general syntax is ``:rolename:`content```. + + +Lists and Quotes +---------------- + +List markup is natural: just place an asterisk at the start of a paragraph and +indent properly. The same goes for numbered lists; they can also be +autonumbered using a ``#`` sign:: + + * This is a bulleted list. + * It has two items, the second + item uses two lines. + + #. This is a numbered list. + #. It has two items too. + +Nested lists are possible, but be aware that they must be separated from the +parent list items by blank lines:: + + * this is + * a list + + * with a nested list + * and some subitems + + * and here the parent list continues + +Definition lists are created as follows:: + + term (up to a line of text) + Definition of the term, which must be indented + + and can even consist of multiple paragraphs + + next term + Description. + + +Paragraphs are quoted by just indenting them more than the surrounding +paragraphs. + + +Source Code +----------- + +Literal code blocks are introduced by ending a paragraph with the special marker +``::``. The literal block must be indented, to be able to include blank lines:: + + This is a normal text paragraph. The next paragraph is a code sample:: + + It is not processed in any way, except + that the indentation is removed. + + It can span multiple lines. + + This is a normal text paragraph again. + +The handling of the ``::`` marker is smart: + +* If it occurs as a paragraph of its own, that paragraph is completely left + out of the document. +* If it is preceded by whitespace, the marker is removed. +* If it is preceded by non-whitespace, the marker is replaced by a single + colon. + +That way, the second sentence in the above example's first paragraph would be +rendered as "The next paragraph is a code sample:". + + +Hyperlinks +---------- + +External links +^^^^^^^^^^^^^^ + +Use ```Link text `_`` for inline web links. If the link text +should be the web address, you don't need special markup at all, the parser +finds links and mail addresses in ordinary text. + +Internal links +^^^^^^^^^^^^^^ + +Internal linking is done via a special reST role, see the section on specific +markup, :ref:`doc-ref-role`. + + +Sections +-------- + +Section headers are created by underlining (and optionally overlining) the +section title with a punctuation character, at least as long as the text:: + + ================= + This is a heading + ================= + +Normally, there are no heading levels assigned to certain characters as the +structure is determined from the succession of headings. However, for the +Python documentation, we use this convention: + +* ``#`` with overline, for parts +* ``*`` with overline, for chapters +* ``=``, for sections +* ``-``, for subsections +* ``^``, for subsubsections +* ``"``, for paragraphs + + +Explicit Markup +--------------- + +"Explicit markup" is used in reST for most constructs that need special +handling, such as footnotes, specially-highlighted paragraphs, comments, and +generic directives. + +An explicit markup block begins with a line starting with ``..`` followed by +whitespace and is terminated by the next paragraph at the same level of +indentation. (There needs to be a blank line between explicit markup and normal +paragraphs. This may all sound a bit complicated, but it is intuitive enough +when you write it.) + + +Directives +---------- + +A directive is a generic block of explicit markup. Besides roles, it is one of +the extension mechanisms of reST, and Sphinx makes heavy use of it. + +Basically, a directive consists of a name, arguments, options and content. (Keep +this terminology in mind, it is used in the next chapter describing custom +directives.) Looking at this example, :: + + .. function:: foo(x) + foo(y, z) + :bar: no + + Return a line of text input from the user. + +``function`` is the directive name. It is given two arguments here, the +remainder of the first line and the second line, as well as one option ``bar`` +(as you can see, options are given in the lines immediately following the +arguments and indicated by the colons). + +The directive content follows after a blank line and is indented relative to the +directive start. + + +Footnotes +--------- + +For footnotes, use ``[#]_`` to mark the footnote location, and add the footnote +body at the bottom of the document after a "Footnotes" rubric heading, like so:: + + Lorem ipsum [#]_ dolor sit amet ... [#]_ + + .. rubric:: Footnotes + + .. [#] Text of the first footnote. + .. [#] Text of the second footnote. + + +Comments +-------- + +Every explicit markup block which isn't a valid markup construct (like the +footnotes above) is regared as a comment. + + +Source encoding +--------------- + +Since the easiest way to include special characters like em dashes or copyright +signs in reST is to directly write them as Unicode characters, one has to +specify an encoding: + +All Python documentation source files must be in UTF-8 encoding, and the HTML +documents written from them will be in that encoding as well. + + +XXX: Gotchas \ No newline at end of file Added: doctools/trunk/converter/newfiles/doc_sphinx.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/doc_sphinx.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,55 @@ +.. highlightlang:: rest + +The Sphinx build system +======================= + +XXX: intro... + +.. _doc-build-config: + +The build configuration file +---------------------------- + +The documentation root, that is the ``Doc`` subdirectory of the source +distribution, contains a file named ``conf.py``. This file is called the "build +configuration file", and it contains several variables that are read and used +during a build run. + +These variables are: + +release : string + A string that is used as a replacement for the ``|release|`` reST + substitution. It should be the full version string including + alpha/beta/release candidate tags, e.g. ``2.5.2b3``. + +version : string + A string that is used as a replacement for the ``|version|`` reST + substitution. It should be the Python version the documentation refers to. + This consists only of the major and minor version parts, e.g. ``2.5``, even + for version 2.5.1. + +today_fmt : string + A ``strftime`` format that is used to format a replacement for the + ``|today|`` reST substitution. + +today : string + A string that can contain a date that should be written to the documentation + output literally. If this is nonzero, it is used instead of + ``strftime(today_fmt)``. + +unused_file : list of strings + A list of reST filenames that are to be disregarded during building. This + could be docs for temporarily disabled modules or documentation that's not + yet ready for public consumption. + +last_updated_format : string + If this is not an empty string, it will be given to ``time.strftime()`` and + written to each generated output file after "last updated on:". + +use_smartypants : bool + If true, use SmartyPants to convert quotes and dashes to the typographically + correct entities. + +strip_trailing_parentheses : bool + If true, trailing parentheses will be stripped from ``:func:`` etc. + crossreferences. \ No newline at end of file Added: doctools/trunk/converter/newfiles/doc_style.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/doc_style.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,57 @@ +.. highlightlang:: rest + +Style Guide +=========== + +The Python documentation should follow the `Apple Publications Style Guide`_ +wherever possible. This particular style guide was selected mostly because it +seems reasonable and is easy to get online. + +.. _Apple Publications Style Guide: http://developer.apple.com/documentation/UserExperience/Conceptual/APStyleGuide/AppleStyleGuide2003.pdf + +Topics which are not covered in the Apple's style guide will be discussed in +this document if necessary. + +Footnotes are generally discouraged, though they may be used when they are the +best way to present specific information. When a footnote reference is added at +the end of the sentence, it should follow the sentence-ending punctuation. The +reST markup should appear something like this:: + + This sentence has a footnote reference. [#]_ This is the next sentence. + +Footnotes should be gathered at the end of a file, or if the file is very long, +at the end of a section. The docutils will automatically create backlinks to the +footnote reference. + +Footnotes may appear in the middle of sentences where appropriate. + +Many special names are used in the Python documentation, including the names of +operating systems, programming languages, standards bodies, and the like. Most +of these entities are not assigned any special markup, but the preferred +spellings are given here to aid authors in maintaining the consistency of +presentation in the Python documentation. + +Other terms and words deserve special mention as well; these conventions should +be used to ensure consistency throughout the documentation: + +CPU + For "central processing unit." Many style guides say this should be spelled + out on the first use (and if you must use it, do so!). For the Python + documentation, this abbreviation should be avoided since there's no + reasonable way to predict which occurrence will be the first seen by the + reader. It is better to use the word "processor" instead. + +POSIX + The name assigned to a particular group of standards. This is always + uppercase. + +Python + The name of our favorite programming language is always capitalized. + +Unicode + The name of a character set and matching encoding. This is always written + capitalized. + +Unix + The name of the operating system developed at AT&T Bell Labs in the early + 1970s. \ No newline at end of file Added: doctools/trunk/converter/newfiles/ext_index.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/ext_index.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,34 @@ +.. _extending-index: + +################################################## + Extending and Embedding the Python Interpreter +################################################## + +:Release: |version| +:Date: |today| + +This document describes how to write modules in C or C++ to extend the Python +interpreter with new modules. Those modules can define new functions but also +new object types and their methods. The document also describes how to embed +the Python interpreter in another application, for use as an extension language. +Finally, it shows how to compile and link extension modules so that they can be +loaded dynamically (at run time) into the interpreter, if the underlying +operating system supports this feature. + +This document assumes basic knowledge about Python. For an informal +introduction to the language, see :ref:`tutorial-index`. :ref:`reference-index` +gives a more formal definition of the language. :ref:`modules-index` documents +the existing object types, functions and modules (both built-in and written in +Python) that give the language its wide application range. + +For a detailed description of the whole Python/C API, see the separate +:ref:`c-api-index`. + +.. toctree:: + :maxdepth: 2 + + extending.rst + newtypes.rst + building.rst + windows.rst + embedding.rst Added: doctools/trunk/converter/newfiles/mac_index.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/mac_index.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,34 @@ +.. _macmodules-index: + +############################## + Macintosh Library Modules +############################## + +:Release: |version| +:Date: |today| + +This library reference manual documents Python's extensions for the Macintosh. +It should be used in conjunction with :ref:`modules-index`, which documents the +standard library and built-in types. + +This manual assumes basic knowledge about the Python language. For an informal +introduction to Python, see :ref:`tutorial-index`; :ref:`reference-index` +remains the highest authority on syntactic and semantic questions. Finally, the +manual entitled :ref:`extending-index` describes how to add new extensions to +Python and how to embed it in other applications. + +.. toctree:: + :maxdepth: 2 + + using.rst + mac.rst + macic.rst + macos.rst + macostools.rst + macui.rst + framework.rst + autogil.rst + scripting.rst + toolbox.rst + colorpicker.rst + undoc.rst Added: doctools/trunk/converter/newfiles/modules_index.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/modules_index.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,67 @@ +.. _modules-index: + +############################### + The Python standard library +############################### + +:Release: |version| +:Date: |today| + +While :ref:`reference-index` describes the exact syntax and semantics of the +language, it does not describe the standard library that is distributed with the +language, and which greatly enhances its immediate usability. This library +contains built-in modules (written in C) that provide access to system +functionality such as file I/O that would otherwise be inaccessible to Python +programmers, as well as modules written in Python that provide standardized +solutions for many problems that occur in everyday programming. Some of these +modules are explicitly designed to encourage and enhance the portability of +Python programs. + +This library reference manual documents Python's standard library, as well as +many optional library modules (which may or may not be available, depending on +whether the underlying platform supports them and on the configuration choices +made at compile time). It also documents the standard types of the language and +its built-in functions and exceptions, many of which are not or incompletely +documented in the Reference Manual. + + +.. toctree:: + :maxdepth: 2 + + intro.rst + strings.rst + datatypes.rst + numeric.rst + netdata.rst + markup.rst + fileformats.rst + crypto.rst + filesys.rst + archiving.rst + persistence.rst + allos.rst + someos.rst + unix.rst + ipc.rst + internet.rst + mm.rst + tkinter.rst + i18n.rst + frameworks.rst + development.rst + pdb.rst + profile.rst + hotshot.rst + timeit.rst + trace.rst + python.rst + custominterp.rst + restricted.rst + modules.rst + language.rst + compiler.rst + misc.rst + sgi.rst + sun.rst + windows.rst + undoc.rst Added: doctools/trunk/converter/newfiles/ref_index.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/ref_index.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,34 @@ +.. _reference-index: + +################################# + The Python language reference +################################# + +:Release: |version| +:Date: |today| + +This reference manual describes the syntax and "core semantics" of the +language. It is terse, but attempts to be exact and complete. The semantics of +non-essential built-in object types and of the built-in functions and modules +are described in :ref:`modules-index`. For an informal introduction to the +language, see :ref:`tutorial-index`. For C or C++ programmers, two additional +manuals exist: :ref:`extending-index` describes the high-level picture of how to +write a Python extension module, and the :ref:`c-api-index` describes the +interfaces available to C/C++ programmers in detail. + +.. toctree:: + :maxdepth: 2 + + introduction.rst + lexical_analysis.rst + datamodel.rst + executionmodel.rst + expressions.rst + simple_stmts.rst + compound_stmts.rst + toplevel_components.rst + functions.rst + consts.rst + objects.rst + stdtypes.rst + exceptions.rst Added: doctools/trunk/converter/newfiles/tutorial_index.rst ============================================================================== --- (empty file) +++ doctools/trunk/converter/newfiles/tutorial_index.rst Mon Jul 23 11:02:25 2007 @@ -0,0 +1,60 @@ +.. _tutorial-index: + +###################### + The Python tutorial +###################### + +:Release: |version| +:Date: |today| + +Python is an easy to learn, powerful programming language. It has efficient +high-level data structures and a simple but effective approach to +object-oriented programming. Python's elegant syntax and dynamic typing, +together with its interpreted nature, make it an ideal language for scripting +and rapid application development in many areas on most platforms. + +The Python interpreter and the extensive standard library are freely available +in source or binary form for all major platforms from the Python Web site, +http://www.python.org/, and may be freely distributed. The same site also +contains distributions of and pointers to many free third party Python modules, +programs and tools, and additional documentation. + +The Python interpreter is easily extended with new functions and data types +implemented in C or C++ (or other languages callable from C). Python is also +suitable as an extension language for customizable applications. + +This tutorial introduces the reader informally to the basic concepts and +features of the Python language and system. It helps to have a Python +interpreter handy for hands-on experience, but all examples are self-contained, +so the tutorial can be read off-line as well. + +For a description of standard objects and modules, see the Python Library +Reference document. The Python Reference Manual gives a more formal definition +of the language. To write extensions in C or C++, read Extending and Embedding +the Python Interpreter and Python/C API Reference. There are also several books +covering Python in depth. + +This tutorial does not attempt to be comprehensive and cover every single +feature, or even every commonly used feature. Instead, it introduces many of +Python's most noteworthy features, and will give you a good idea of the +language's flavor and style. After reading it, you will be able to read and +write Python modules and programs, and you will be ready to learn more about the +various Python library modules described in the Python Library Reference. + +.. toctree:: + + appetite.rst + interpreter.rst + introduction.rst + controlflow.rst + datastructures.rst + modules.rst + inputoutput.rst + errors.rst + classes.rst + stdlib.rst + stdlib2.rst + whatnow.rst + interactive.rst + floatingpoint.rst + glossary.rst Added: doctools/trunk/converter/restwriter.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/restwriter.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,959 @@ +# -*- coding: utf-8 -*- +""" + Python documentation ReST writer + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + How the converter works + ======================= + + A LaTeX document is tokenized by a `Tokenizer`. The tokens are processed by + the `DocParser` class which emits a tree of `DocNode`\s. The `RestWriter` + now walks this node tree and generates ReST from that. + + There are some intricacies while writing ReST: + + - Paragraph text must be rewrapped in order to avoid ragged lines. The + `textwrap` module does that nicely, but it must obviously operate on a + whole paragraph at a time. Therefore the contents of the current paragraph + are cached in `self.curpar`. Every time a block level element is + encountered, its node handler calls `self.flush_par()` which writes out a + paragraph. Because this can be detrimental for the markup at several + stages, the `self.noflush` context manager can be used to forbid paragraph + flushing temporarily, which means that no block level nodes can be + processed. + + - There are no inline comments in ReST. Therefore comments are stored in + `self.comments` and written out every time the paragraph is flushed. + + - A similar thing goes for footnotes: `self.footnotes`. + + - Some inline markup cannot contain nested markup. Therefore the function + `textonly()` exists which returns a node similar to its argument, but + stripped of inline markup. + + - Some constructs need to format non-block-level nodes, but without writing + the result to the current paragraph. These use `self.get_node_text()` + which writes to a temporary paragraph and returns the resulting markup. + + - Indentation is important. The `self.indent` context manager helps keeping + track of indentation levels. + + - Some blocks, like lists, need to prevent the first line from being + indented because the indentation space is already filled (e.g. by a + bullet). Therefore the `self.indent` context manager accepts a + `firstline` flag which can be set to ``False``, resulting in the first + line not being indented. + + + There are some restrictions on markup compared to LaTeX: + + - Table cells may not contain blocks. + + - Hard line breaks don't exist. + + - Block level markup inside "alltt" environments doesn't work. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +# yay! +from __future__ import with_statement + +import re +import StringIO +import textwrap + +WIDTH = 80 +INDENT = 3 + +new_wordsep_re = re.compile( + r'(\s+|' # any whitespace + r'(?<=\s)(?::[a-z-]+:)?`\S+|' # interpreted text start + r'[^\s\w]*\w+[a-zA-Z]-(?=\w+[a-zA-Z])|' # hyphenated words + r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash + +import textwrap +# monkey-patch... +textwrap.TextWrapper.wordsep_re = new_wordsep_re +wrapper = textwrap.TextWrapper(width=WIDTH, break_long_words=False) + +from .docnodes import RootNode, TextNode, NodeList, InlineNode, \ + CommentNode, EmptyNode +from .util import fixup_text, empty, text, my_make_id, \ + repair_bad_inline_markup +from .filenamemap import includes_mapping + +class WriterError(Exception): + pass + + +class Indenter(object): + """ Context manager factory for indentation. """ + def __init__(self, writer): + class IndenterManager(object): + def __init__(self, indentlevel, flush, firstline): + self.indentlevel = indentlevel + self.flush = flush + self.firstline = firstline + + def __enter__(self): + writer.indentation += (self.indentlevel * ' ') + writer.indentfirstline = self.firstline + return self + + def __exit__(self, *ignored): + if self.flush: + writer.flush_par() + writer.indentation = writer.indentation[:-self.indentlevel] + + self.manager = IndenterManager + + def __call__(self, indentlevel=INDENT, flush=True, firstline=True): + return self.manager(indentlevel, flush, firstline) + + +class NoFlush(object): + """ Convenience context manager. """ + def __init__(self, writer): + self.writer = writer + + def __enter__(self): + self.writer.no_flushing += 1 + + def __exit__(self, *ignored): + self.writer.no_flushing -= 1 + + +class SectionMeta(object): + def __init__(self): + self.modname = '' + self.platform = '' + self.synopsis = [] + self.modauthors = [] + self.sectauthors = [] + + +class RestWriter(object): + """ Write ReST from a node tree. """ + + def __init__(self, fp, splitchap=False, toctree=None, deflang=None, labelprefix=''): + self.splitchap = splitchap # split output at chapters? + if splitchap: + self.fp = StringIO.StringIO() # dummy one + self.chapters = [self.fp] + else: + self.fp = fp # file pointer + self.toctree = toctree # entries for the TOC tree + self.deflang = deflang # default highlighting language + self.labelprefix = labelprefix # prefix for all label names + + # indentation tools + self.indentation = '' # current indentation string + self.indentfirstline = True # indent the first line of next paragraph? + self.indented = Indenter(self) # convenience context manager + + # paragraph flushing tools + self.flush_cb = None # callback run on next paragraph flush, used + # for properly separating field lists from + # the following paragraph + self.no_flushing = 0 # raise an error on paragraph flush? + self.noflush = NoFlush(self) # convenience context manager + + # collected items to output later + self.curpar = [] # text in current paragraph + self.comments = [] # comments to be output after flushing + self.indexentries = [] # indexentries to be output before flushing + self.footnotes = [] # footnotes to be output at document end + self.warnings = [] # warnings while writing + + # specials + self.sectionlabel = '' # most recent \label command + self.thisclass = '' # most recent classdesc name + self.sectionmeta = None # current section metadata + self.noescape = 0 # don't escape text nodes + self.indexsubitem = '' # current \withsubitem text + + def write_document(self, rootnode): + """ Write a document, represented by a RootNode. """ + assert type(rootnode) is RootNode + + if self.deflang: + self.write_directive('highlightlang', self.deflang) + + self.visit_node(rootnode) + self.write_footnotes() + + def new_chapter(self): + """ Called if self.splitchap is True. Create a new file pointer + and set self.fp to it. """ + new_fp = StringIO.StringIO() + self.chapters.append(new_fp) + self.fp = new_fp + + def write(self, text='', nl=True, first=False): + """ Write a string to the output file. """ + if first: + self.fp.write((self.indentation if self.indentfirstline else '') + text) + self.indentfirstline = True + elif text: # don't write indentation only + self.fp.write(self.indentation + text) + if nl: + self.fp.write('\n') + + def write_footnotes(self): + """ Write the current footnotes, if any. """ + self.flush_par() + if self.footnotes: + self.write('.. rubric:: Footnotes\n') + footnotes = self.footnotes + self.footnotes = [] # first clear since indented() will flush + for footnode in footnotes: + self.write('.. [#] ', nl=False) + with self.indented(3, firstline=False): + self.visit_node(footnode) + + def write_directive(self, name, args='', node=None, spabove=False, spbelow=True): + """ Helper to write a ReST directive. """ + if spabove: + self.write() + self.write('.. %s::%s' % (name, args and ' '+args)) + if spbelow: + self.write() + with self.indented(): + if node is not None: + self.visit_node(node) + + def write_sectionmeta(self): + mod = self.sectionmeta + self.sectionmeta = None + if not mod: + return + if mod.modname: + self.write('.. module:: %s' % mod.modname) + if mod.platform: + self.write(' :platform: %s' % mod.platform) + if mod.synopsis: + self.write(' :synopsis: %s' % mod.synopsis[0]) + for line in mod.synopsis[1:]: + self.write(' %s' % line) + if mod.modauthors: + for author in mod.modauthors: + self.write('.. moduleauthor:: %s' % author) + if mod.sectauthors: + for author in mod.sectauthors: + self.write('.. sectionauthor:: %s' % author) + self.write() + self.write() + + indexentry_mapping = { + 'index': 'single', + 'indexii': 'pair', + 'indexiii': 'triple', + 'indexiv': 'quadruple', + 'stindex': 'statement', + 'ttindex': 'single', + 'obindex': 'object', + 'opindex': 'operator', + 'kwindex': 'keyword', + 'exindex': 'exception', + 'bifuncindex': 'builtin', + 'refmodindex': 'module', + 'refbimodindex': 'module', + 'refexmodindex': 'module', + 'refstmodindex': 'module', + } + + def get_indexentries(self, entries): + """ Return a list of lines for the index entries. """ + def format_entry(cmdname, args, subitem): + textargs = [] + for arg in args: + if isinstance(arg, TextNode): + textarg = text(arg) + else: + textarg = self.get_node_text(self.get_textonly_node(arg, warn=0)) + if ';' in textarg: + raise WriterError("semicolon in index args: " + textarg) + textarg += subitem + textarg = textarg.replace('!', '; ') + textargs.append(textarg) + return '%s: %s' % (self.indexentry_mapping[cmdname], + '; '.join(textarg for textarg in textargs + if not empty(arg))) + + ret = [] + if len(entries) == 1: + ret.append('.. index:: %s' % format_entry(*entries[0])) + else: + ret.append('.. index::') + for entry in entries: + ret.append(' %s' % format_entry(*entry)) + return ret + + def get_par(self, wrap, width=None): + """ Get the contents of the current paragraph. + Returns a list if wrap and not indent, else a string. """ + if not self.curpar: + if wrap: + return [] + else: + return '' + text = ''.join(self.curpar).lstrip() + text = repair_bad_inline_markup(text) + self.curpar = [] + if wrap: + # returns a list! + wrapper.width = width or WIDTH + return wrapper.wrap(text) + else: + return text + + no_warn_textonly = set(( + 'var', 'code', 'textrm', 'emph', 'keyword', 'textit', 'programopt', + 'cfunction', 'texttt', 'email', 'constant', + )) + + def get_textonly_node(self, node, cmd='', warn=1): + """ Return a similar Node or NodeList that only has TextNode subnodes. + + Warning values: + - 0: never warn + - 1: warn for markup losing information + """ + if cmd == 'code': + warn = 0 + def do(subnode): + if isinstance(subnode, TextNode): + return subnode + if isinstance(subnode, NodeList): + return NodeList(do(subsubnode) for subsubnode in subnode) + if isinstance(subnode, CommentNode): + # loses comments, but huh + return EmptyNode() + if isinstance(subnode, InlineNode): + if subnode.cmdname == 'optional': + # this is not mapped to ReST markup + return subnode + if len(subnode.args) == 1: + if warn == 1 and subnode.cmdname not in self.no_warn_textonly: + self.warnings.append('%r: Discarding %s markup in %r' % + (cmd, subnode.cmdname, node)) + return do(subnode.args[0]) + elif len(subnode.args) == 0: + # should only happen for IndexNodes which stay in + return subnode + elif len(subnode.args) == 2 and subnode.cmdname == 'refmodule': + if not warn: + return do(subnode.args[1]) + raise WriterError('get_textonly_node() failed for %r' % subnode) + return do(node) + + def get_node_text(self, node, wrap=False, width=None): + """ Write the node to a temporary paragraph and return the result + as a string. """ + with self.noflush: + self._old_curpar = self.curpar + self.curpar = [] + self.visit_node(node) + ret = self.get_par(wrap, width=width) + self.curpar = self._old_curpar + return ret + + def flush_par(self, nocb=False, nocomments=False): + """ Write the current paragraph to the output file. + Prepend index entries, append comments and footnotes. """ + if self.no_flushing: + raise WriterError('called flush_par() while noflush active') + if self.indexentries: + for line in self.get_indexentries(self.indexentries): + self.write(line) + self.write() + self.indexentries = [] + if self.flush_cb and not nocb: + self.flush_cb() + self.flush_cb = None + par = self.get_par(wrap=True) + if par: + for i, line in enumerate(par): + self.write(line, first=(i==0)) + self.write() + if self.comments and not nocomments: + for comment in self.comments: + self.write('.. % ' + comment) + self.write() + self.comments = [] + + def visit_wrapped(self, pre, node, post, noescape=False): + """ Write a node within a paragraph, wrapped with pre and post strings. """ + if noescape: + self.noescape += 1 + self.curpar.append(pre) + with self.noflush: + self.visit_node(node) + self.curpar.append(post) + if noescape: + self.noescape -= 1 + + def visit_node(self, node): + """ "Write" a node (appends to curpar or writes something). """ + visitfunc = getattr(self, 'visit_' + node.__class__.__name__, None) + if not visitfunc: + raise WriterError('no visit function for %s node' % node.__class__) + visitfunc(node) + + # ------------------------- node handlers ----------------------------- + + def visit_RootNode(self, node): + if node.params.get('title'): + title = self.get_node_text(node.params['title']) + hl = len(title) + self.write('*' * (hl+4)) + self.write(' %s ' % title) + self.write('*' * (hl+4)) + self.write() + + if node.params.get('author'): + self.write(':Author: %s%s' % + (self.get_node_text(node.params['author']), + (' <%s>' % self.get_node_text(node.params['authoremail']) + if 'authoremail' in node.params else ''))) + self.write() + + if node.params.get('date'): + self.write(':Date: %s' % self.get_node_text(node.params['date'])) + self.write() + + if node.params.get('release'): + self.write('.. |release| replace:: %s' % + self.get_node_text(node.params['release'])) + self.write() + + self.visit_NodeList(node.children) + + def visit_NodeList(self, nodelist): + for node in nodelist: + self.visit_node(node) + + def visit_CommentNode(self, node): + # no inline comments -> they are all output at the start of a new paragraph + self.comments.append(node.comment.strip()) + + sectchars = { + 'chapter': '*', + 'chapter*': '*', + 'section': '=', + 'subsection': '-', + 'subsubsection': '^', + 'paragraph': '"', + } + + sectdoubleline = [ + 'chapter', + 'chapter*', + ] + + def visit_SectioningNode(self, node): + self.flush_par() + self.sectionlabel = '' + self.thisclass = '' + self.write() + + if self.splitchap and node.cmdname.startswith('chapter'): + self.write_footnotes() + self.new_chapter() + + heading = self.get_node_text(node.args[0]).strip() + if self.sectionlabel: + self.write('.. _%s:\n' % self.sectionlabel) + hl = len(heading) + if node.cmdname in self.sectdoubleline: + self.write(self.sectchars[node.cmdname] * hl) + self.write(heading) + self.write(self.sectchars[node.cmdname] * hl) + self.write() + + def visit_EnvironmentNode(self, node): + self.flush_par() + envname = node.envname + if envname == 'notice': + type = text(node.args[0]) or 'note' + self.write_directive(type, '', node.content) + elif envname in ('seealso', 'seealso*'): + self.write_directive('seealso', '', node.content, spabove=True) + elif envname == 'abstract': + self.write_directive('topic', 'Abstract', node.content, spabove=True) + elif envname == 'quote': + with self.indented(): + self.visit_node(node.content) + self.write() + elif envname == 'quotation': + self.write_directive('epigraph', '', node.content, spabove=True) + else: + raise WriterError('no handler for %s environment' % envname) + + descmap = { + 'funcdesc': ('function', '0(1)'), + 'funcdescni': ('function', '0(1)'), + 'classdesc': ('class', '0(1)'), + 'classdesc*': ('class', '0'), + 'methoddesc': ('method', '0.1(2)'), + 'methoddescni': ('method', '0.1(2)'), + 'excdesc': ('exception', '0'), + 'excclassdesc': ('exception', '0(1)'), + 'datadesc': ('data', '0'), + 'datadescni': ('data', '0'), + 'memberdesc': ('attribute', '0.1'), + 'memberdescni': ('attribute', '0.1'), + 'opcodedesc': ('opcode', '0 (1)'), + + 'cfuncdesc': ('cfunction', '0 1(2)'), + 'cmemberdesc': ('cmember', '1 0.2'), + 'csimplemacrodesc': ('cmacro', '0'), + 'ctypedesc': ('ctype', '1'), + 'cvardesc': ('cvar', '0 1'), + } + + def _write_sig(self, spec, args): + # don't escape "*" in signatures + self.noescape += 1 + for c in spec: + if c.isdigit(): + self.visit_node(self.get_textonly_node(args[int(c)])) + else: + self.curpar.append(c) + self.noescape -= 1 + + def visit_DescEnvironmentNode(self, node): + envname = node.envname + if envname not in self.descmap: + raise WriterError('no handler for %s environment' % envname) + + self.flush_par() + # automatically fill in the class name if not given + if envname[:9] == 'classdesc' or envname[:12] == 'excclassdesc': + self.thisclass = text(node.args[0]) + elif envname[:10] in ('methoddesc', 'memberdesc') and not \ + text(node.args[0]): + if not self.thisclass: + raise WriterError('No current class for %s member' % + text(node.args[1])) + node.args[0] = TextNode(self.thisclass) + directivename, sigspec = self.descmap[envname] + self._write_sig(sigspec, node.args) + signature = self.get_par(wrap=False) + self.write() + self.write('.. %s:: %s' % (directivename, signature)) + if node.additional: + for cmdname, add in node.additional: + entry = self.descmap[cmdname.replace('line', 'desc')] + if envname[:10] in ('methoddesc', 'memberdesc') and not \ + text(add[0]): + if not self.thisclass: + raise WriterError('No current class for %s member' % + text(add[1])) + add[0] = TextNode(self.thisclass) + self._write_sig(entry[1], add) + signature = self.get_par(wrap=False) + self.write(' %s%s' % (' ' * (len(directivename) - 2), + signature)) + if envname.endswith('ni'): + self.write(' :noindex:') + self.write() + with self.indented(): + self.visit_node(node.content) + + + def visit_CommandNode(self, node): + cmdname = node.cmdname + if cmdname == 'label': + labelname = self.labelprefix + text(node.args[0]).lower() + if self.no_flushing: + # in section + self.sectionlabel = labelname + else: + self.flush_par() + self.write('.. _%s:\n' % labelname) + return + + elif cmdname in ('declaremodule', 'modulesynopsis', + 'moduleauthor', 'sectionauthor', 'platform'): + self.flush_par(nocb=True, nocomments=True) + if not self.sectionmeta: + self.sectionmeta = SectionMeta() + if cmdname == 'declaremodule': + self.sectionmeta.modname = text(node.args[2]) + elif cmdname == 'modulesynopsis': + self.sectionmeta.synopsis = self.get_node_text( + self.get_textonly_node(node.args[0], warn=0), wrap=True) + elif cmdname == 'moduleauthor': + email = text(node.args[1]) + self.sectionmeta.modauthors.append( + '%s%s' % (text(node.args[0]), (email and ' <%s>' % email))) + elif cmdname == 'sectionauthor': + email = text(node.args[1]) + self.sectionmeta.sectauthors.append( + '%s%s' % (text(node.args[0]), (email and ' <%s>' % email))) + elif cmdname == 'platform': + self.sectionmeta.platform = text(node.args[0]) + self.flush_cb = lambda: self.write_sectionmeta() + return + + self.flush_par() + if cmdname.startswith('see'): + i = 2 + if cmdname == 'seemodule': + self.write('Module :mod:`%s`' % text(node.args[1])) + elif cmdname == 'seelink': + linktext = self.get_node_text(node.args[1]) + self.write('`%s <%s>`_' % (linktext, text(node.args[0]))) + elif cmdname == 'seepep': + self.write(':pep:`%s` - %s' % (text(node.args[0]), + self.get_node_text(node.args[1]))) + elif cmdname == 'seerfc': + self.write(':rfc:`%s` - %s' % (text(node.args[0]), + text(node.args[1]))) + elif cmdname == 'seetitle': + if empty(node.args[0]): + self.write('%s' % text(node.args[1])) + else: + self.write('`%s <%s>`_' % (text(node.args[1]), + text(node.args[0]))) + elif cmdname == 'seeurl': + i = 1 + self.write('%s' % text(node.args[0])) + elif cmdname == 'seetext': + self.visit_node(node.args[0]) + return + with self.indented(): + self.visit_node(node.args[i]) + elif cmdname in ('versionchanged', 'versionadded'): + self.write('.. %s:: %s' % (cmdname, text(node.args[1]))) + if not empty(node.args[0]): + with self.indented(): + self.visit_node(node.args[0]) + self.curpar.append('.') + else: + self.write() + elif cmdname == 'deprecated': + self.write_directive('deprecated', text(node.args[0]), node.args[1], + spbelow=False) + elif cmdname == 'localmoduletable': + if self.toctree: + self.write_directive('toctree', '', spbelow=True, spabove=True) + with self.indented(): + for entry in self.toctree: + self.write(entry + '.rst') + else: + self.warnings.append('no toctree given, but \\localmoduletable in file') + elif cmdname == 'verbatiminput': + inclname = text(node.args[0]) + newname = includes_mapping.get(inclname, '../includes/' + inclname) + if newname is None: + self.write() + self.write('.. XXX includefile %s' % inclname) + return + self.write() + self.write('.. include:: %s' % newname) + self.write(' :literal:') + self.write() + elif cmdname == 'input': + inclname = text(node.args[0]) + newname = includes_mapping.get(inclname, None) + if newname is None: + self.write('X' 'XX: input{%s} :XX' 'X' % inclname) + return + self.write_directive('include', newname, spabove=True) + elif cmdname == 'centerline': + self.write_directive('centered', self.get_node_text(node.args[0]), + spabove=True, spbelow=True) + elif cmdname == 'XX' 'X': + self.visit_wrapped(r'**\*\*** ', node.args[0], ' **\*\***') + else: + raise WriterError('no handler for %s command' % cmdname) + + def visit_DescLineCommandNode(self, node): + # these have already been written as arguments of the corresponding + # DescEnvironmentNode + pass + + def visit_ParaSepNode(self, node): + self.flush_par() + + def visit_VerbatimNode(self, node): + if self.comments: + # these interfer with the literal block + self.flush_par() + if self.curpar: + last = self.curpar[-1].rstrip(' ') + if last.endswith(':'): + self.curpar[-1] = last + ':' + else: + self.curpar.append(' ::') + else: + self.curpar.append('::') + self.flush_par() + with self.indented(): + if isinstance(node.content, TextNode): + # verbatim + lines = textwrap.dedent(text(node.content).lstrip('\n')).split('\n') + if not lines: + return + else: + # alltt, possibly with inline formats + lines = self.get_node_text(self.get_textonly_node( + node.content, warn=0)).split('\n') + [''] + # discard leading blank links + while not lines[0].strip(): + del lines[0] + for line in lines: + self.write(line) + + note_re = re.compile('^\(\d\)$') + + def visit_TableNode(self, node): + self.flush_par() + lines = node.lines[:] + lines.insert(0, node.headings) + fmted_rows = [] + width = WIDTH - len(self.indentation) + realwidths = [0] * node.numcols + colwidth = (width / node.numcols) + 5 + # don't allow paragraphs in table cells for now + with self.noflush: + for line in lines: + cells = [] + for i, cell in enumerate(line): + par = self.get_node_text(cell, wrap=True, width=colwidth) + if len(par) == 1 and self.note_re.match(par[0].strip()): + # special case: escape "(1)" to avoid enumeration + par[0] = '\\' + par[0] + maxwidth = max(map(len, par)) if par else 0 + realwidths[i] = max(realwidths[i], maxwidth) + cells.append(par) + fmted_rows.append(cells) + + def writesep(char='-'): + out = ['+'] + for width in realwidths: + out.append(char * (width+2)) + out.append('+') + self.write(''.join(out)) + + def writerow(row): + lines = map(None, *row) + for line in lines: + out = ['|'] + for i, cell in enumerate(line): + if cell: + out.append(' ' + cell.ljust(realwidths[i]+1)) + else: + out.append(' ' * (realwidths[i] + 2)) + out.append('|') + self.write(''.join(out)) + + writesep('-') + writerow(fmted_rows[0]) + writesep('=') + for row in fmted_rows[1:]: + writerow(row) + writesep('-') + self.write() + + def visit_ItemizeNode(self, node): + self.flush_par() + for title, content in node.items: + if not empty(title): + # do it like in a description list + self.write(self.get_node_text(title)) + with self.indented(): + self.visit_node(content) + else: + self.curpar.append('* ') + with self.indented(2, firstline=False): + self.visit_node(content) + + def visit_EnumerateNode(self, node): + self.flush_par() + for title, content in node.items: + assert empty(title) + self.curpar.append('#. ') + with self.indented(3, firstline=False): + self.visit_node(content) + + def visit_DescriptionNode(self, node): + self.flush_par() + for title, content in node.items: + self.write(self.get_node_text(title)) + with self.indented(): + self.visit_node(content) + + visit_DefinitionsNode = visit_DescriptionNode + + def visit_ProductionListNode(self, node): + self.flush_par() + arg = text(node.arg) + self.write('.. productionlist::%s' % (' '+arg if arg else '')) + with self.indented(): + for item in node.items: + if not empty(item[0]): + lasttext = text(item[0]) + self.write('%s: %s' % ( + text(item[0]).ljust(len(lasttext)), + self.get_node_text(item[1]))) + self.write() + + def visit_EmptyNode(self, node): + pass + + def visit_TextNode(self, node): + if self.noescape: + self.curpar.append(node.text) + else: + self.curpar.append(fixup_text(node.text)) + + visit_NbspNode = visit_TextNode + visit_SimpleCmdNode = visit_TextNode + + def visit_BreakNode(self, node): + # XXX: linebreaks in ReST? + self.curpar.append(' --- ') + + def visit_IndexNode(self, node): + if node.cmdname == 'withsubitem': + self.indexsubitem = ' ' + text(node.indexargs[0]) + self.visit_node(node.indexargs[1]) + self.indexsubitem = '' + else: + self.indexentries.append((node.cmdname, node.indexargs, + self.indexsubitem)) + + # maps argumentless commands to text + simplecmd_mapping = { + 'NULL': '`NULL`', + 'shortversion': '|version|', + 'version': '|release|', + 'today': '|today|', + } + + # map LaTeX command names to roles: shorter names! + role_mapping = { + 'cfunction': 'cfunc', + 'constant': 'const', + 'csimplemacro': 'cmacro', + 'exception': 'exc', + 'function': 'func', + 'grammartoken': 'token', + 'member': 'attr', + 'method': 'meth', + 'module': 'mod', + 'programopt': 'option', + # these mean: no change + 'cdata': '', + 'class': '', + 'command': '', + 'ctype': '', + 'data': '', # NEW + 'dfn': '', + 'envvar': '', + 'file': '', + 'filenq': '', + 'filevar': '', + 'guilabel': '', + 'kbd': '', + 'keyword': '', + 'mailheader': '', + 'makevar': '', + 'menuselection': '', + 'mimetype': '', + 'newsgroup': '', + 'option': '', + 'pep': '', + 'program': '', + 'ref': '', + 'rfc': '', + } + + # do not warn about nested inline markup in these roles + role_no_warn = set(( + 'cdata', 'cfunction', 'class', 'constant', 'csimplemacro', 'ctype', + 'data', 'exception', 'function', 'member', 'method', 'module', + )) + + def visit_InlineNode(self, node): + # XXX: no nested markup -- docutils doesn't support it + cmdname = node.cmdname + if not node.args: + self.curpar.append(self.simplecmd_mapping[cmdname]) + return + content = node.args[0] + if cmdname in ('code', 'bfcode', 'samp', 'texttt', 'regexp'): + self.visit_wrapped('``', self.get_textonly_node(content, 'code', + warn=1), '``', noescape=True) + elif cmdname in ('emph', 'textit'): + self.visit_wrapped('*', self.get_textonly_node(content, 'emph', + warn=1), '*') + elif cmdname in ('strong', 'textbf'): + self.visit_wrapped('**', self.get_textonly_node(content, 'strong', + warn=1), '**') + elif cmdname in ('b', 'textrm', 'email'): + self.visit_node(content) + elif cmdname in ('var', 'token'): + # \token appears in productionlists only + self.visit_wrapped('`', self.get_textonly_node(content, 'var', + warn=1), '`') + elif cmdname == 'ref': + self.curpar.append(':ref:`%s%s`' % (self.labelprefix, + text(node.args[0]).lower())) + elif cmdname == 'refmodule': + self.visit_wrapped(':mod:`', node.args[1], '`', noescape=True) + elif cmdname == 'optional': + self.visit_wrapped('[', content, ']') + elif cmdname == 'url': + self.visit_node(content) + elif cmdname == 'ulink': + target = text(node.args[1]) + if target.startswith('..'): + self.visit_wrapped('', content, ' (X' + 'XX reference: %s)' % target) + elif not target.startswith(('http:', 'mailto:')): + #self.warnings.append('Local \\ulink to %s, use \\ref instead' % target) + self.visit_wrapped('', content, ' (X' 'XX reference: %s)' % target) + else: + self.visit_wrapped('`', self.get_textonly_node(content, 'ulink', warn=1), + ' <%s>`_' % target) + elif cmdname == 'citetitle': + target = text(content) + if not target: + self.visit_node(node.args[1]) + elif target.startswith('..'): + self.visit_wrapped('', node.args[1], + ' (X' + 'XX reference: %s)' % target) + else: + self.visit_wrapped('`', self.get_textonly_node(node.args[1], + 'citetitle', warn=1), + ' <%s>`_' % target) + elif cmdname == 'character': + # ``'a'`` is not longer than :character:`a` + self.visit_wrapped("``'", content, "'``", noescape=True) + elif cmdname == 'manpage': + self.curpar.append(':manpage:`') + self.visit_node(self.get_textonly_node(content, warn=0)) + self.visit_wrapped('(', self.get_textonly_node(node.args[1], warn=0), ')') + self.curpar.append('`') + elif cmdname == 'footnote': + self.curpar.append(' [#]_') + self.footnotes.append(content) + elif cmdname == 'frac': + self.visit_wrapped('(', node.args[0], ')/') + self.visit_wrapped('(', node.args[1], ')') + elif cmdname == 'longprogramopt': + self.visit_wrapped(':option:`--', content, '`') + elif cmdname == '': + self.visit_node(content) + # stray commands from distutils + elif cmdname in ('argument name', 'value', 'attribute', 'option name'): + self.visit_wrapped('`', content, '`') + else: + self.visit_wrapped(':%s:`' % (self.role_mapping[cmdname] or cmdname), + self.get_textonly_node( + content, cmdname, warn=(cmdname not in self.role_no_warn)), '`') Added: doctools/trunk/converter/scanner.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/scanner.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +""" + scanner + ~~~~~~~ + + This library implements a regex based scanner. + + :copyright: 2006-2007 by Armin Ronacher, Georg Brandl. + :license: BSD license. +""" +import re + + +class EndOfText(RuntimeError): + """ + Raise if end of text is reached and the user + tried to call a match function. + """ + + +class Scanner(object): + """ + Simple scanner + + All method patterns are regular expression strings (not + compiled expressions!) + """ + + def __init__(self, text, flags=0): + """ + :param text: The text which should be scanned + :param flags: default regular expression flags + """ + self.data = text + self.data_length = len(text) + self.start_pos = 0 + self.pos = 0 + self.flags = flags + self.last = None + self.match = None + self._re_cache = {} + + def eos(self): + """`True` if the scanner reached the end of text.""" + return self.pos >= self.data_length + eos = property(eos, eos.__doc__) + + def check(self, pattern): + """ + Apply `pattern` on the current position and return + the match object. (Doesn't touch pos). Use this for + lookahead. + """ + if self.eos: + raise EndOfText() + if pattern not in self._re_cache: + self._re_cache[pattern] = re.compile(pattern, self.flags) + return self._re_cache[pattern].match(self.data, self.pos) + + def test(self, pattern): + """Apply a pattern on the current position and check + if it patches. Doesn't touch pos.""" + return self.check(pattern) is not None + + def scan(self, pattern): + """ + Scan the text for the given pattern and update pos/match + and related fields. The return value is a boolen that + indicates if the pattern matched. The matched value is + stored on the instance as ``match``, the last value is + stored as ``last``. ``start_pos`` is the position of the + pointer before the pattern was matched, ``pos`` is the + end position. + """ + if self.eos: + raise EndOfText() + if pattern not in self._re_cache: + self._re_cache[pattern] = re.compile(pattern, self.flags) + self.last = self.match + m = self._re_cache[pattern].match(self.data, self.pos) + if m is None: + return False + self.start_pos = m.start() + self.pos = m.end() + self.match = m + return True + + def get_char(self): + """Scan exactly one char.""" + self.scan('.') + + def __repr__(self): + return '<%s %d/%d>' % ( + self.__class__.__name__, + self.pos, + self.data_length + ) Added: doctools/trunk/converter/tokenizer.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/tokenizer.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +""" + Python documentation LaTeX file tokenizer + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + For more documentation, look into the ``restwriter.py`` file. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import re + +from .scanner import Scanner + +class Tokenizer(Scanner): + """ Lex a Python doc LaTeX document. """ + + specials = { + '{': 'bgroup', + '}': 'egroup', + '[': 'boptional', + ']': 'eoptional', + '~': 'tilde', + '$': 'mathmode', + } + + @property + def mtext(self): + return self.match.group() + + def tokenize(self): + return TokenStream(self._tokenize()) + + def _tokenize(self): + lineno = 1 + while not self.eos: + if self.scan(r'\\verb([^a-zA-Z])(.*?)(\1)'): + # specialcase \verb here + yield lineno, 'command', 'verb', '\\verb' + yield lineno, 'text', self.match.group(1), self.match.group(1) + yield lineno, 'text', self.match.group(2), self.match.group(2) + yield lineno, 'text', self.match.group(3), self.match.group(3) + elif self.scan(r'\\([a-zA-Z]+\*?)[ \t]*'): + yield lineno, 'command', self.match.group(1), self.mtext + elif self.scan(r'\\.'): + yield lineno, 'command', self.mtext[1], self.mtext + elif self.scan(r'\\\n'): + yield lineno, 'text', self.mtext, self.mtext + lineno += 1 + elif self.scan(r'%(.*)\n[ \t]*'): + yield lineno, 'comment', self.match.group(1), self.mtext + lineno += 1 + elif self.scan(r'[{}\[\]~$]'): + yield lineno, self.specials[self.mtext], self.mtext, self.mtext + elif self.scan(r'(\n[ \t]*){2,}'): + lines = self.mtext.count('\n') + yield lineno, 'parasep', '\n' * lines, self.mtext + lineno += lines + elif self.scan(r'\n[ \t]*'): + yield lineno, 'text', ' ', self.mtext + lineno += 1 + elif self.scan(r'[^\\%}{\[\]~\n]+'): + yield lineno, 'text', self.mtext, self.mtext + else: + raise RuntimeError('unexpected text on line %d: %r' % + (lineno, self.data[self.pos:self.pos+100])) + + +class TokenStream(object): + """ + A token stream works like a normal generator just that + it supports peeking and pushing tokens back to the stream. + """ + + def __init__(self, generator): + self._generator = generator + self._pushed = [] + self.last = (1, 'initial', '') + + def __iter__(self): + return self + + def __nonzero__(self): + """ Are we at the end of the tokenstream? """ + if self._pushed: + return True + try: + self.push(self.next()) + except StopIteration: + return False + return True + + def pop(self): + """ Return the next token from the stream. """ + if self._pushed: + rv = self._pushed.pop() + else: + rv = self._generator.next() + self.last = rv + return rv + + next = pop + + def popmany(self, num=1): + """ Pop a list of tokens. """ + return [self.next() for i in range(num)] + + def peek(self): + """ Pop and push a token, return it. """ + token = self.next() + self.push(token) + return token + + def peekmany(self, num=1): + """ Pop and push a list of tokens. """ + tokens = self.popmany(num) + for tok in tokens: + self.push(tok) + return tokens + + def push(self, item): + """ Push a token back to the stream. """ + self._pushed.append(item) Added: doctools/trunk/converter/util.py ============================================================================== --- (empty file) +++ doctools/trunk/converter/util.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" + Python documentation conversion utils + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import re + +from docutils.nodes import make_id + +from .docnodes import TextNode, EmptyNode, NodeList + + +def umlaut(cmd, c): + try: + if cmd == '"': + return {'o': u'?', + 'a': u'?', + 'u': u'?', + 'i': u'?', + 'O': u'?', + 'A': u'?', + 'U': u'?'}[c] + elif cmd == "'": + return {'a': u'?', + 'e': u'?'}[c] + elif cmd == '~': + return {'n': u'?'}[c] + elif cmd == 'c': + return {'c': u'?'}[c] + elif cmd == '`': + return {'o': u'?'}[c] + else: + from .latexparser import ParserError + raise ParserError('invalid umlaut \\%s' % cmd, 0) + except KeyError: + from .latexparser import ParserError + raise ParserError('unsupported umlaut \\%s%s' % (cmd, c), 0) + +def fixup_text(text): + return text.replace('``', '"').replace("''", '"').replace('`', "'").\ + replace('|', '\\|').replace('*', '\\*') + +def empty(node): + return (type(node) is EmptyNode) + +def text(node): + """ Return the text for a TextNode or raise an error. """ + if isinstance(node, TextNode): + return node.text + elif isinstance(node, NodeList): + restext = '' + for subnode in node: + restext += text(subnode) + return restext + from .restwriter import WriterError + raise WriterError('text() failed for %r' % node) + +markup_re = re.compile(r'(:[a-zA-Z0-9_-]+:)?`(.*?)`') + +def my_make_id(name): + """ Like make_id(), but strip roles first. """ + return make_id(markup_re.sub(r'\2', name)) + +alphanum = u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' +wordchars_s = alphanum + u'_.-' +wordchars_e = alphanum + u'+`(-' +bad_markup_re = re.compile(r'(:[a-zA-Z0-9_-]+:)?(`{1,2})[ ]*(.+?)[ ]*(\2)') +quoted_code_re = re.compile(r'\\`(``.+?``)\'') + +def repair_bad_inline_markup(text): + # remove quoting from `\code{x}' + xtext = quoted_code_re.sub(r'\1', text) + + # special: the literal backslash + xtext = xtext.replace('``\\``', '\x03') + # special: literal backquotes + xtext = xtext.replace('``````', '\x02') + + ntext = [] + lasti = 0 + l = len(xtext) + for m in bad_markup_re.finditer(xtext): + ntext.append(xtext[lasti:m.start()]) + s, e = m.start(), m.end() + if s != 0 and xtext[s-1:s] in wordchars_s: + ntext.append('\\ ') + ntext.append((m.group(1) or '') + m.group(2) + m.group(3) + m.group(4)) + if e != l and xtext[e:e+1] in wordchars_e: + ntext.append('\\ ') + lasti = m.end() + ntext.append(xtext[lasti:]) + return ''.join(ntext).replace('\x02', '``````').replace('\x03', '``\\``') Added: doctools/trunk/etc/inst.diff ============================================================================== --- (empty file) +++ doctools/trunk/etc/inst.diff Mon Jul 23 11:02:25 2007 @@ -0,0 +1,122 @@ +Index: inst/inst.tex +=================================================================== +--- inst/inst.tex (Revision 54633) ++++ inst/inst.tex (Arbeitskopie) +@@ -324,32 +324,6 @@ + section~\ref{custom-install} on custom installations. + + +-% This rather nasty macro is used to generate the tables that describe +-% each installation scheme. It's nasty because it takes two arguments +-% for each "slot" in an installation scheme, there will soon be more +-% than five of these slots, and TeX has a limit of 10 arguments to a +-% macro. Uh-oh. +- +-\newcommand{\installscheme}[8] +- {\begin{tableiii}{l|l|l}{textrm} +- {Type of file} +- {Installation Directory} +- {Override option} +- \lineiii{pure module distribution} +- {\filevar{#1}\filenq{#2}} +- {\longprogramopt{install-purelib}} +- \lineiii{non-pure module distribution} +- {\filevar{#3}\filenq{#4}} +- {\longprogramopt{install-platlib}} +- \lineiii{scripts} +- {\filevar{#5}\filenq{#6}} +- {\longprogramopt{install-scripts}} +- \lineiii{data} +- {\filevar{#7}\filenq{#8}} +- {\longprogramopt{install-data}} +- \end{tableiii}} +- +- + \section{Alternate Installation} + \label{alt-install} + +@@ -399,10 +373,23 @@ + The \longprogramopt{home} option defines the installation base + directory. Files are installed to the following directories under the + installation base as follows: +-\installscheme{home}{/lib/python} +- {home}{/lib/python} +- {home}{/bin} +- {home}{/share} ++\begin{tableiii}{l|l|l}{textrm} ++ {Type of file} ++ {Installation Directory} ++ {Override option} ++ \lineiii{pure module distribution} ++ {\filevar{home}\filenq{/lib/python}} ++ {\longprogramopt{install-purelib}} ++ \lineiii{non-pure module distribution} ++ {\filevar{home}\filenq{/lib/python}} ++ {\longprogramopt{install-platlib}} ++ \lineiii{scripts} ++ {\filevar{home}\filenq{/bin}} ++ {\longprogramopt{install-scripts}} ++ \lineiii{data} ++ {\filevar{home}\filenq{/share}} ++ {\longprogramopt{install-data}} ++\end{tableiii} + + + \versionchanged[The \longprogramopt{home} option used to be supported +@@ -452,10 +439,23 @@ + etc.) If \longprogramopt{exec-prefix} is not supplied, it defaults to + \longprogramopt{prefix}. Files are installed as follows: + +-\installscheme{prefix}{/lib/python2.\filevar{X}/site-packages} +- {exec-prefix}{/lib/python2.\filevar{X}/site-packages} +- {prefix}{/bin} +- {prefix}{/share} ++\begin{tableiii}{l|l|l}{textrm} ++ {Type of file} ++ {Installation Directory} ++ {Override option} ++ \lineiii{pure module distribution} ++ {\filevar{prefix}\filenq{/lib/python2.\filevar{X}/site-packages}} ++ {\longprogramopt{install-purelib}} ++ \lineiii{non-pure module distribution} ++ {\filevar{exec-prefix}\filenq{/lib/python2.\filevar{X}/site-packages}} ++ {\longprogramopt{install-platlib}} ++ \lineiii{scripts} ++ {\filevar{prefix}\filenq{/bin}} ++ {\longprogramopt{install-scripts}} ++ \lineiii{data} ++ {\filevar{prefix}\filenq{/share}} ++ {\longprogramopt{install-data}} ++\end{tableiii} + + There is no requirement that \longprogramopt{prefix} or + \longprogramopt{exec-prefix} actually point to an alternate Python +@@ -502,11 +502,24 @@ + The installation base is defined by the \longprogramopt{prefix} option; + the \longprogramopt{exec-prefix} option is not supported under Windows. + Files are installed as follows: +-\installscheme{prefix}{} +- {prefix}{} +- {prefix}{\textbackslash{}Scripts} +- {prefix}{\textbackslash{}Data} + ++\begin{tableiii}{l|l|l}{textrm} ++ {Type of file} ++ {Installation Directory} ++ {Override option} ++ \lineiii{pure module distribution} ++ {\filevar{prefix}\filenq{}} ++ {\longprogramopt{install-purelib}} ++ \lineiii{non-pure module distribution} ++ {\filevar{prefix}\filenq{}} ++ {\longprogramopt{install-platlib}} ++ \lineiii{scripts} ++ {\filevar{prefix}\filenq{\textbackslash{}Scripts}} ++ {\longprogramopt{install-scripts}} ++ \lineiii{data} ++ {\filevar{prefix}\filenq{\textbackslash{}Data}} ++ {\longprogramopt{install-data}} ++\end{tableiii} + + + \section{Custom Installation} Added: doctools/trunk/sphinx-build.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx-build.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +""" + Sphinx - Python documentation toolchain + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import sys + +if __name__ == '__main__': + from sphinx import main + try: + sys.exit(main(sys.argv)) + except Exception: + import traceback + traceback.print_exc() + import pdb + pdb.post_mortem(sys.exc_traceback) Added: doctools/trunk/sphinx-web.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx-web.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" + Sphinx - Python documentation webserver + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: 2007 by Armin Ronacher, Georg Brandl. + :license: Python license. +""" +import os +import sys +import getopt + +import sphinx +from sphinx.web.application import setup_app +from sphinx.web.serve import run_simple + +try: + from werkzeug.debug import DebuggedApplication +except ImportError: + DebuggedApplication = lambda x, y: x + + +def main(argv): + opts, args = getopt.getopt(argv[1:], "dhf:") + opts = dict(opts) + if len(args) != 1 or '-h' in opts: + print 'usage: %s [-d] [-f cfg.py] ' % argv[0] + print ' -d: debug mode, use werkzeug debugger if installed' + print ' -f: use "cfg.py" file instead of doc_root/webconf.py' + return 2 + + conffile = opts.get('-f', os.path.join(args[0], 'webconf.py')) + config = {} + execfile(conffile, config) + + port = config.get('listen_port', 3000) + hostname = config.get('listen_addr', 'localhost') + debug = ('-d' in opts) or (hostname == 'localhost') + + config['data_root_path'] = args[0] + config['debug'] = debug + + def make_app(): + app = setup_app(config, check_superuser=True) + if debug: + app = DebuggedApplication(app, True) + return app + + if os.environ.get('RUN_MAIN') != 'true': + print '* Sphinx %s- Python documentation web application' % \ + sphinx.__version__.replace('$', '').replace('Revision:', 'rev.') + if debug: + print '* Running in debug mode' + + run_simple(hostname, port, make_app, use_reloader=debug) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) Added: doctools/trunk/sphinx/__init__.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/__init__.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +""" + Sphinx + ~~~~~~ + + The Python documentation toolchain. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import sys +import getopt +from os import path + +from .builder import builders +from .console import nocolor + +__version__ = '$Revision: 5369 $' + + +def usage(argv, msg=None): + if msg: + print >>sys.stderr, msg + print >>sys.stderr + print >>sys.stderr, """\ +usage: %s [options] sourcedir outdir [filenames...]" +options: -b -- builder to use (one of %s) + -a -- write all files; default is to only write new and changed files + -O -- give option to to the builder (-O help for list) + -D -- override a setting in sourcedir/conf.py + -N -- do not do colored output +modi: +* without -a and without filenames, write new and changed files. +* with -a, write all files. +* with filenames, write these.""" % (argv[0], ', '.join(builders)) + + +def main(argv): + try: + opts, args = getopt.getopt(argv[1:], 'ab:O:D:N') + srcdirname = path.abspath(args[0]) + if not path.isdir(srcdirname): + print >>sys.stderr, 'Error: Cannot find source directory.' + return 1 + if not path.isfile(path.join(srcdirname, 'conf.py')): + print >>sys.stderr, 'Error: Source directory doesn\'t contain conf.py file.' + return 1 + outdirname = path.abspath(args[1]) + if not path.isdir(outdirname): + print >>sys.stderr, 'Error: Cannot find output directory.' + return 1 + except (IndexError, getopt.error): + usage(argv) + return 1 + + filenames = args[2:] + err = 0 + for filename in filenames: + if not path.isfile(filename): + print >>sys.stderr, 'Cannot find file %r.' % filename + err = 1 + if err: + return 1 + + builder = all_files = None + opt_help = False + options = {} + confoverrides = {} + for opt, val in opts: + if opt == '-b': + if val not in builders: + usage(argv, 'Invalid builder value specified.') + return 1 + builder = val + elif opt == '-a': + if filenames: + usage(argv, 'Cannot combine -a option and filenames.') + return 1 + all_files = True + elif opt == '-O': + if val == 'help': + opt_help = True + continue + if '=' in val: + key, val = val.split('=') + try: + val = int(val) + except: pass + else: + key, val = val, True + options[key] = val + elif opt == '-D': + key, val = val.split('=') + try: + val = int(val) + except: pass + confoverrides[key] = val + elif opt == '-N': + nocolor() + + if builder is None: + print 'No builder selected, using default: html' + builder = 'html' + + builderobj = builders[builder] + + if opt_help: + print 'Options recognized by the %s builder:' % builder + for optname, description in builderobj.option_spec.iteritems(): + print ' * %s: %s' % (optname, description) + return 0 + + builderobj = builderobj(srcdirname, outdirname, options, + status_stream=sys.stdout, + warning_stream=sys.stderr, + confoverrides=confoverrides) + if all_files: + builderobj.build_all() + elif filenames: + builderobj.build_specific(filenames) + else: + builderobj.build_update() + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) Added: doctools/trunk/sphinx/_jinja.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/_jinja.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +""" + sphinx._jinja + ~~~~~~~~~~~~~ + + Jinja glue. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" +from __future__ import absolute_import + +import sys +from os import path + +sys.path.insert(0, path.dirname(__file__)) + +from jinja import Environment, FileSystemLoader Added: doctools/trunk/sphinx/addnodes.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/addnodes.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +""" + sphinx.addnodes + ~~~~~~~~~~~~~~~ + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +from docutils import nodes + +# index markup +class index(nodes.Invisible, nodes.Inline, nodes.TextElement): pass + +# description units (classdesc, funcdesc etc.) +class desc(nodes.Admonition, nodes.Element): pass +class desc_content(nodes.General, nodes.Element): pass +class desc_signature(nodes.Part, nodes.Inline, nodes.TextElement): pass +class desc_classname(nodes.Part, nodes.Inline, nodes.TextElement): pass +class desc_name(nodes.Part, nodes.Inline, nodes.TextElement): pass +class desc_parameterlist(nodes.Part, nodes.Inline, nodes.TextElement): pass +class desc_parameter(nodes.Part, nodes.Inline, nodes.TextElement): pass +class desc_optional(nodes.Part, nodes.Inline, nodes.TextElement): pass + +# refcount annotation +class refcount(nodes.emphasis): pass + +# \versionadded, \versionchanged, \deprecated +class versionmodified(nodes.Admonition, nodes.TextElement): pass + +# seealso +class seealso(nodes.Admonition, nodes.Element): pass + +# productionlist +class productionlist(nodes.Admonition, nodes.Element): pass +class production(nodes.Part, nodes.Inline, nodes.TextElement): pass + +# toc tree +class toctree(nodes.General, nodes.Element): pass + +# centered +class centered(nodes.Part, nodes.Element): pass + +# pending xref +class pending_xref(nodes.Element): pass + +# compact paragraph -- never makes a

+class compact_paragraph(nodes.paragraph): pass + +# sets the highlighting language for literal blocks +class highlightlang(nodes.Element): pass + +# make them known to docutils. this is needed, because the HTMl writer +# will choke at some point if these are not added +nodes._add_node_class_names("""index desc desc_content desc_signature + desc_classname desc_name desc_parameterlist desc_parameter desc_optional + centered versionmodified seealso productionlist production toctree + pending_xref compact_paragraph highlightlang""".split()) Added: doctools/trunk/sphinx/builder.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/builder.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,608 @@ +# -*- coding: utf-8 -*- +""" + sphinx.builder + ~~~~~~~~~~~~~~ + + Builder classes for different output formats. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" +from __future__ import with_statement + +import os +import sys +import time +import types +import codecs +import shutil +import cPickle as pickle +import cStringIO as StringIO +from os import path + +from docutils.io import StringOutput, DocTreeInput +from docutils.core import publish_parts +from docutils.utils import new_document +from docutils.readers import doctree +from docutils.frontend import OptionParser + +from .util import (get_matching_files, attrdict, status_iterator, + ensuredir, get_category, relative_uri) +from .writer import HTMLWriter +from .console import bold, purple, green +from .htmlhelp import build_hhx +from .environment import BuildEnvironment +from .highlighting import pygments, get_stylesheet + +# side effect: registers roles and directives +from . import roles +from . import directives + +ENV_PICKLE_FILENAME = 'environment.pickle' +LAST_BUILD_FILENAME = 'last_build' + +# Helper objects + +class relpath_to(object): + def __init__(self, builder, filename): + self.baseuri = builder.get_target_uri(filename) + self.builder = builder + def __call__(self, otheruri, resource=False): + if not resource: + otheruri = self.builder.get_target_uri(otheruri) + return relative_uri(self.baseuri, otheruri) + + +class collect_env_warnings(object): + def __init__(self, builder): + self.builder = builder + def __enter__(self): + self.stream = StringIO.StringIO() + self.builder.env.set_warning_stream(self.stream) + def __exit__(self, *args): + self.builder.env.set_warning_stream(self.builder.warning_stream) + warnings = self.stream.getvalue() + if warnings: + print >>self.builder.warning_stream, warnings + + +class Builder(object): + """ + Builds target formats from the reST sources. + """ + + option_spec = { + 'freshenv': 'Don\'t use a pickled environment', + } + + def __init__(self, srcdirname, outdirname, options, env=None, + status_stream=None, warning_stream=None, + confoverrides=None): + self.srcdir = srcdirname + self.outdir = outdirname + if not path.isdir(path.join(outdirname, '.doctrees')): + os.mkdir(path.join(outdirname, '.doctrees')) + + self.options = attrdict(options) + self.validate_options() + + # probably set in load_env() + self.env = env + + self.config = {} + execfile(path.join(srcdirname, 'conf.py'), self.config) + # remove potentially pickling-problematic values + del self.config['__builtins__'] + for key, val in self.config.items(): + if isinstance(val, types.ModuleType): + del self.config[key] + if confoverrides: + self.config.update(confoverrides) + + self.status_stream = status_stream or sys.stdout + self.warning_stream = warning_stream or sys.stderr + + self.init() + + # helper methods + + def validate_options(self): + for option in self.options: + if option not in self.option_spec: + raise ValueError('Got unexpected option %s' % option) + for option in self.option_spec: + if option not in self.options: + self.options[option] = False + + def msg(self, message='', nonl=False, nobold=False): + if not nobold: message = bold(message) + if nonl: + print >>self.status_stream, message, + else: + print >>self.status_stream, message + self.status_stream.flush() + + def init(self): + """Load necessary templates and perform initialization.""" + raise NotImplementedError + + def get_target_uri(self, source_filename): + """Return the target URI for a source filename.""" + raise NotImplementedError + + def get_relative_uri(self, from_, to): + """Return a relative URI between two source filenames.""" + return relative_uri(self.get_target_uri(from_), + self.get_target_uri(to)) + + def get_outdated_files(self): + """Return a list of output files that are outdated.""" + raise NotImplementedError + + # build methods + + def load_env(self): + """Set up the build environment. Return True if a pickled file could be + successfully loaded, False if a new environment had to be created.""" + if self.env: + return + if not self.options.freshenv: + try: + self.msg('trying to load pickled env...', nonl=True) + self.env = BuildEnvironment.frompickle( + path.join(self.outdir, ENV_PICKLE_FILENAME)) + self.msg('done', nobold=True) + except Exception, err: + self.msg('failed: %s' % err, nobold=True) + self.env = BuildEnvironment(self.srcdir, + path.join(self.outdir, '.doctrees')) + else: + self.env = BuildEnvironment(self.srcdir, + path.join(self.outdir, '.doctrees')) + + def build_all(self): + """Build all source files.""" + self.load_env() + self.build(None, summary='all source files') + + def build_specific(self, source_filenames): + """Only rebuild as much as needed for changes in the source_filenames.""" + # bring the filenames to the canonical format, that is, + # relative to the source directory. + dirlen = len(self.srcdir) + 1 + to_write = [path.abspath(filename)[dirlen:] for filename in source_filenames] + self.load_env() + self.build(to_write, + summary='%d source files given on command line' % len(to_write)) + + def build_update(self): + """Only rebuild files changed or added since last build.""" + self.load_env() + to_build = list(self.get_outdated_files()) + if not to_build: + self.msg('no files are out of date, exiting.') + return + self.build(to_build, + summary='%d source files that are out of date' % len(to_build)) + + def build(self, filenames, summary=None): + if summary: + self.msg('building [%s]:' % self.name, nonl=1) + self.msg(summary, nobold=1) + + # while reading, collect all warnings from docutils + with collect_env_warnings(self): + self.msg('reading, updating environment:', nonl=1) + iterator = self.env.update(self.config) + self.msg(iterator.next(), nobold=1) + for filename in iterator: + self.msg(purple(filename), nonl=1, nobold=1) + self.msg() + + # save the environment + self.msg('pickling the env...', nonl=True) + self.env.topickle(path.join(self.outdir, ENV_PICKLE_FILENAME)) + self.msg('done', nobold=True) + + # global actions + self.msg('checking consistency...') + self.env.check_consistency() + self.msg('creating index...') + self.env.create_index(self) + + self.prepare_writing() + + if filenames: + # add all TOC files that may have changed + filenames_set = set(filenames) + for filename in filenames: + for tocfilename in self.env.files_to_rebuild.get(filename, []): + filenames_set.add(tocfilename) + filenames_set.add('contents.rst') + else: + # build all + filenames_set = set(self.env.all_files) + + # write target files + with collect_env_warnings(self): + self.msg('writing output...') + for filename in status_iterator(sorted(filenames_set), green, + stream=self.status_stream): + doctree = self.env.get_and_resolve_doctree(filename, self) + self.write_file(filename, doctree) + + # finish (write style files etc.) + self.msg('finishing...') + self.finish() + self.msg('done!') + + def prepare_writing(self): + raise NotImplementedError + + def write_file(self, filename, doctree): + raise NotImplementedError + + def finish(self): + raise NotImplementedError + + +class StandaloneHTMLBuilder(Builder): + """ + Builds standalone HTML docs. + """ + name = 'html' + + option_spec = Builder.option_spec + option_spec.update({ + 'nostyle': 'Don\'t copy style and script files', + 'nosearchindex': 'Don\'t create a JSON search index for offline search', + }) + + copysource = True + + def init(self): + """Load templates.""" + # lazily import this, maybe other builders won't need it + from ._jinja import Environment, FileSystemLoader + + # load templates + self.templates = {} + templates_path = path.join(path.dirname(__file__), 'templates') + jinja_env = Environment(loader=FileSystemLoader(templates_path), + # disable traceback, more likely that something in the + # application is broken than in the templates + friendly_traceback=False) + for fname in os.listdir(templates_path): + if fname.endswith('.html'): + self.templates[fname[:-5]] = jinja_env.get_template(fname) + + def render_partial(self, node): + """Utility: Render a lone doctree node.""" + doc = new_document('foo') + doc.append(node) + return publish_parts( + doc, + source_class=DocTreeInput, + reader=doctree.Reader(), + writer=HTMLWriter(self.config), + settings_overrides={'output_encoding': 'unicode'} + ) + + def prepare_writing(self): + if not self.options.nosearchindex: + from .search import IndexBuilder + self.indexer = IndexBuilder() + else: + self.indexer = None + self.docwriter = HTMLWriter(self.config) + self.docsettings = OptionParser( + defaults=self.env.settings, + components=(self.docwriter,)).get_default_values() + + # format the "last updated on" string, only once is enough since it + # typically doesn't include the time of day + lufmt = self.config.get('last_updated_format') + if lufmt: + self.last_updated = time.strftime(lufmt) + else: + self.last_updated = None + + self.globalcontext = dict( + last_updated = self.last_updated, + builder = self.name, + release = self.config['release'], + parents = [], + len = len, + titles = {}, + ) + + def write_file(self, filename, doctree): + destination = StringOutput(encoding='utf-8') + doctree.settings = self.docsettings + + output = self.docwriter.write(doctree, destination) + self.docwriter.assemble_parts() + + prev = next = None + parents = [] + related = self.env.toctree_relations.get(filename) + if related: + prev = {'link': self.get_relative_uri(filename, related[1]), + 'title': self.render_partial(self.env.titles[related[1]])['title']} + next = {'link': self.get_relative_uri(filename, related[2]), + 'title': self.render_partial(self.env.titles[related[2]])['title']} + while related: + parents.append( + {'link': self.get_relative_uri(filename, related[0]), + 'title': self.render_partial(self.env.titles[related[0]])['title']}) + related = self.env.toctree_relations.get(related[0]) + if parents: + parents.pop() # remove link to "contents.rst"; we have a generic + # "back to index" link already + parents.reverse() + + title = self.env.titles.get(filename) + if title: + title = self.render_partial(title)['title'] + else: + title = '' + self.globalcontext['titles'][filename] = title + sourcename = filename[:-4] + '.txt' + context = dict( + title = title, + sourcename = sourcename, + pathto = relpath_to(self, self.get_target_uri(filename)), + body = self.docwriter.parts['fragment'], + toc = self.render_partial(self.env.get_toc_for(filename))['fragment'], + # only display a TOC if there's more than one item to show + display_toc = (self.env.toc_num_entries[filename] > 1), + parents = parents, + prev = prev, + next = next, + ) + + self.index_file(filename, doctree, title) + self.handle_file(filename, context) + + def finish(self): + self.msg('writing additional files...') + + # the global general index + + # the total count of lines for each index letter, used to distribute + # the entries into two columns + indexcounts = [] + for key, entries in self.env.index: + indexcounts.append(sum(1 + len(subitems) for _, (_, subitems) in entries)) + + genindexcontext = dict( + genindexentries = self.env.index, + genindexcounts = indexcounts, + current_page_name = 'genindex', + pathto = relpath_to(self, self.get_target_uri('genindex.rst')), + ) + self.handle_file('genindex.rst', genindexcontext, 'genindex') + + # the global module index + + # the sorted list of all modules, for the global module index + modules = sorted(((mn, (self.get_relative_uri('modindex.rst', fn) + + '#module-' + mn, sy, pl)) + for (mn, (fn, sy, pl)) in self.env.modules.iteritems()), + key=lambda x: x[0].lower()) + # collect all platforms + platforms = set() + # sort out collapsable modules + modindexentries = [] + pmn = '' + cg = 0 # collapse group + fl = '' # first letter + for mn, (fn, sy, pl) in modules: + pl = pl.split(', ') if pl else [] + platforms.update(pl) + if fl != mn[0].lower() and mn[0] != '_': + modindexentries.append(['', False, 0, False, mn[0].upper(), '', []]) + tn = mn.partition('.')[0] + if tn != mn: + # submodule + if pmn == tn: + # first submodule - make parent collapsable + modindexentries[-1][1] = True + elif not pmn.startswith(tn): + # submodule without parent in list, add dummy entry + cg += 1 + modindexentries.append([tn, True, cg, False, '', '', []]) + else: + cg += 1 + modindexentries.append([mn, False, cg, (tn != mn), fn, sy, pl]) + pmn = mn + fl = mn[0].lower() + platforms = sorted(platforms) + + modindexcontext = dict( + modindexentries = modindexentries, + platforms = platforms, + current_page_name = 'modindex', + pathto = relpath_to(self, self.get_target_uri('modindex.rst')), + ) + self.handle_file('modindex.rst', modindexcontext, 'modindex') + + # the index page + indexcontext = dict( + pathto = relpath_to(self, self.get_target_uri('index.rst')), + current_page_name = 'index', + ) + self.handle_file('index.rst', indexcontext, 'index') + + # the search page + searchcontext = dict( + pathto = relpath_to(self, self.get_target_uri('search.rst')), + current_page_name = 'search', + ) + self.handle_file('search.rst', searchcontext, 'search') + + if not self.options.nostyle: + self.msg('copying style files...') + # copy style files + styledirname = path.join(path.dirname(__file__), 'style') + ensuredir(path.join(self.outdir, 'style')) + for filename in os.listdir(styledirname): + if not filename.startswith('.'): + shutil.copyfile(path.join(styledirname, filename), + path.join(self.outdir, 'style', filename)) + # add pygments style file + f = open(path.join(self.outdir, 'style', 'pygments.css'), 'w') + if pygments: + f.write(get_stylesheet()) + f.close() + + # dump the search index + self.handle_finish() + + # --------- these are overwritten by the Web builder + + def get_target_uri(self, source_filename): + return source_filename[:-4] + '.html' + + def get_outdated_files(self): + for filename in get_matching_files( + self.srcdir, '*.rst', exclude=set(self.config.get('unused_files', ()))): + try: + targetmtime = path.getmtime(path.join(self.outdir, + filename[:-4] + '.html')) + except: + targetmtime = 0 + if path.getmtime(path.join(self.srcdir, filename)) > targetmtime: + yield filename + + def index_file(self, filename, doctree, title): + # only index pages with title + if self.indexer is not None and title: + category = get_category(filename) + if category is not None: + self.indexer.feed(self.get_target_uri(filename)[:-5], # strip '.html' + category, title, doctree) + + def handle_file(self, filename, context, templatename='page'): + ctx = self.globalcontext.copy() + ctx.update(context) + output = self.templates[templatename].render(ctx) + outfilename = path.join(self.outdir, filename[:-4] + '.html') + ensuredir(path.dirname(outfilename)) # normally different from self.outdir + try: + with codecs.open(outfilename, 'w', 'utf-8') as fp: + fp.write(output) + except (IOError, OSError), err: + print >>self.warning_stream, "Error writing file %s: %s" % (outfilename, err) + if self.copysource and context.get('sourcename'): + # copy the source file for the "show source" link + shutil.copyfile(path.join(self.srcdir, filename), + path.join(self.outdir, context['sourcename'])) + + def handle_finish(self): + if self.indexer is not None: + self.msg('dumping search index...') + f = open(path.join(self.outdir, 'searchindex.json'), 'w') + self.indexer.dump(f, 'json') + f.close() + + +class WebHTMLBuilder(StandaloneHTMLBuilder): + """ + Builds HTML docs usable with the web-based doc server. + """ + name = 'web' + + # doesn't use the standalone specific options + option_spec = Builder.option_spec.copy() + option_spec.update({ + 'nostyle': 'Don\'t copy style and script files', + 'nosearchindex': 'Don\'t create a search index for the online search', + }) + + def init(self): + # Nothing to do here. + pass + + def get_outdated_files(self): + for filename in get_matching_files( + self.srcdir, '*.rst', exclude=set(self.config.get('unused_files', ()))): + try: + targetmtime = path.getmtime(path.join(self.outdir, + filename[:-4] + '.fpickle')) + except: + targetmtime = 0 + if path.getmtime(path.join(self.srcdir, filename)) > targetmtime: + yield filename + + def get_target_uri(self, source_filename): + if source_filename == 'index.rst': + return '' + if source_filename.endswith('/index.rst'): + return source_filename[:-9] # up to / + return source_filename[:-4] + '/' + + def index_file(self, filename, doctree, title): + # only index pages with title and category + if self.indexer is not None and title: + category = get_category(filename) + if category is not None: + self.indexer.feed(filename, category, title, doctree) + + def handle_file(self, filename, context, templatename='page'): + outfilename = path.join(self.outdir, filename[:-4] + '.fpickle') + ensuredir(path.dirname(outfilename)) + context.pop('pathto', None) # can't be pickled + with file(outfilename, 'wb') as fp: + pickle.dump(context, fp, 2) + + # if there is a source file, copy the source file for the "show source" link + if context.get('sourcename'): + source_name = path.join(self.outdir, 'sources', context['sourcename']) + ensuredir(path.dirname(source_name)) + shutil.copyfile(path.join(self.srcdir, filename), source_name) + + def handle_finish(self): + # dump the global context + outfilename = path.join(self.outdir, 'globalcontext.pickle') + with file(outfilename, 'wb') as fp: + pickle.dump(self.globalcontext, fp, 2) + + if self.indexer is not None: + self.msg('dumping search index...') + f = open(path.join(self.outdir, 'searchindex.pickle'), 'w') + self.indexer.dump(f, 'pickle') + f.close() + # touch 'last build' file, used by the web application to determine + # when to reload its environment and clear the cache + open(path.join(self.outdir, LAST_BUILD_FILENAME), 'w').close() + # copy configuration file if not present + if not path.isfile(path.join(self.outdir, 'webconf.py')): + shutil.copyfile(path.join(path.dirname(__file__), 'web', 'webconf.py'), + path.join(self.outdir, 'webconf.py')) + + +class HTMLHelpBuilder(StandaloneHTMLBuilder): + """ + Builder that also outputs Windows HTML help project, contents and index files. + Adapted from the original Doc/tools/prechm.py. + """ + name = 'htmlhelp' + + option_spec = Builder.option_spec.copy() + option_spec.update({ + 'outname': 'Output file base name (default "pydoc")' + }) + + # don't copy the reST source + copysource = False + + def handle_finish(self): + build_hhx(self, self.outdir, self.options.get('outname') or 'pydoc') + + +builders = { + 'html': StandaloneHTMLBuilder, + 'web': WebHTMLBuilder, + 'htmlhelp': HTMLHelpBuilder, +} Added: doctools/trunk/sphinx/console.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/console.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +""" + sphinx.console + ~~~~~~~~~~~~~~ + + Format colored console output. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +codes = {} + +def nocolor(): + codes.clear() + +def colorize(name, text): + return codes.get(name, '') + text + codes.get('reset', '') + +def create_color_func(name): + def inner(text): + return colorize(name, text) + globals()[name] = inner + +_attrs = { + 'reset': '39;49;00m', + 'bold': '01m', + 'faint': '02m', + 'standout': '03m', + 'underline': '04m', + 'blink': '05m', +} + +for name, value in _attrs.items(): + codes[name] = '\x1b[' + value + +_colors = [ + ('black', 'darkgray'), + ('darkred', 'red'), + ('darkgreen', 'green'), + ('brown', 'yellow'), + ('darkblue', 'blue'), + ('purple', 'fuchsia'), + ('turquoise', 'teal'), + ('lightgray', 'white'), +] + +for i, (dark, light) in enumerate(_colors): + codes[dark] = '\x1b[%im' % (i+30) + codes[light] = '\x1b[%i;01m' % (i+30) + +for name in codes: + create_color_func(name) Added: doctools/trunk/sphinx/directives.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/directives.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,519 @@ +# -*- coding: utf-8 -*- +""" + sphinx.directives + ~~~~~~~~~~~~~~~~~ + + Handlers for additional ReST directives. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import re +import string +from os import path + +from docutils import nodes +from docutils.parsers.rst import directives, roles +from docutils.parsers.rst.directives import admonitions + +from . import addnodes + +# ------ index markup -------------------------------------------------------------- + +entrytypes = [ + 'single', 'pair', 'triple', 'quadruple', + 'module', 'keyword', 'operator', 'object', 'exception', 'statement', 'builtin', +] + +def index_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + arguments = arguments[0].split('\n') + env = state.document.settings.env + targetid = 'index-%s' % env.index_num + env.index_num += 1 + targetnode = nodes.target('', '', ids=[targetid]) + state.document.note_explicit_target(targetnode) + indexnode = addnodes.index() + indexnode['entries'] = arguments + for entry in arguments: + try: + type, string = entry.split(':', 1) + env.note_index_entry(type.strip(), string.strip(), + targetid, string.strip()) + except ValueError: + continue + return [indexnode, targetnode] + +index_directive.arguments = (1, 0, 1) +directives.register_directive('index', index_directive) + +# ------ information units --------------------------------------------------------- + +def desc_index_text(desctype, currmodule, name): + if desctype == 'function': + if not currmodule: + return '%s() (built-in function)' % name + return '%s() (in module %s)' % (name, currmodule) + elif desctype == 'data': + if not currmodule: + return '%s (built-in variable)' % name + return '%s (in module %s)' % (name, currmodule) + elif desctype == 'class': + return '%s (class in %s)' % (name, currmodule) + elif desctype == 'exception': + return name + elif desctype == 'method': + try: + clsname, methname = name.rsplit('.', 1) + except: + if currmodule: + return '%s() (in module %s)' % (name, currmodule) + else: + return '%s()' % name + if currmodule: + return '%s() (%s.%s method)' % (methname, currmodule, clsname) + else: + return '%s() (%s method)' % (methname, clsname) + elif desctype == 'attribute': + try: + clsname, attrname = name.rsplit('.', 1) + except: + if currmodule: + return '%s (in module %s)' % (name, currmodule) + else: + return name + if currmodule: + return '%s (%s.%s attribute)' % (attrname, currmodule, clsname) + else: + return '%s (%s attribute)' % (attrname, clsname) + elif desctype == 'opcode': + return '%s (opcode)' % name + elif desctype == 'cfunction': + return '%s (C function)' % name + elif desctype == 'cmember': + return '%s (C member)' % name + elif desctype == 'cmacro': + return '%s (C macro)' % name + elif desctype == 'ctype': + return '%s (C type)' % name + elif desctype == 'cvar': + return '%s (C variable)' % name + else: + raise ValueError("unhandled descenv: %s" % desctype) + + +# ------ functions to parse a Python or C signature and create desc_* nodes. + +py_sig_re = re.compile(r'''^([\w.]*\.)? # class names + (\w+) \s* # thing name + (?: \((.*)\) )? $ # optionally arguments + ''', re.VERBOSE) + +py_paramlist_re = re.compile(r'([\[\],])') # split at '[', ']' and ',' + +def parse_py_signature(signode, sig, desctype, currclass): + """ + Transform a python signature into RST nodes. Returns (signode, fullname). + Return the fully qualified name of the thing. + + If inside a class, the current class name is handled intelligently: + * it is stripped from the displayed name if present + * it is added to the full name (return value) if not present + """ + m = py_sig_re.match(sig) + if m is None: raise ValueError + classname, name, arglist = m.groups() + + if currclass: + if classname and classname.startswith(currclass): + fullname = classname + name + classname = classname[len(currclass):].lstrip('.') + elif classname: + fullname = currclass + '.' + classname + name + else: + fullname = currclass + '.' + name + else: + fullname = classname + name if classname else name + + if classname: + signode += addnodes.desc_classname(classname, classname) + signode += addnodes.desc_name(name, name) + if not arglist: + if desctype in ('function', 'method'): + # for callables, add an empty parameter list + signode += addnodes.desc_parameterlist() + return fullname + signode += addnodes.desc_parameterlist() + + stack = [signode[-1]] + arglist = arglist.replace('`', '').replace(r'\ ', '') # remove markup + for token in py_paramlist_re.split(arglist): + if token == '[': + opt = addnodes.desc_optional() + stack[-1] += opt + stack.append(opt) + elif token == ']': + try: stack.pop() + except IndexError: raise ValueError + elif not token or token == ',' or token.isspace(): + pass + else: + token = token.strip() + stack[-1] += addnodes.desc_parameter(token, token) + if len(stack) != 1: raise ValueError + return fullname + + +c_sig_re = re.compile( + r'''^([^(]*?) # return type + (\w+) \s* # thing name + (?: \((.*)\) )? $ # optionally arguments + ''', re.VERBOSE) +c_funcptr_sig_re = re.compile( + r'''^([^(]+?) # return type + (\( [^()]+ \)) \s* # name in parentheses + \( (.*) \) $ # arguments + ''', re.VERBOSE) + +# RE to split at word boundaries +wsplit_re = re.compile(r'(\W+)') + +# These C types aren't described in the reference, so don't try to create +# a cross-reference to them +stopwords = set(('const', 'void', 'char', 'int', 'long', 'FILE', 'struct')) + +def parse_c_type(node, ctype): + # add cross-ref nodes for all words + for part in filter(None, wsplit_re.split(ctype)): + tnode = nodes.Text(part, part) + if part[0] in string.letters+'_' and part not in stopwords: + pnode = addnodes.pending_xref( + '', reftype='ctype', reftarget=part, modname=None, classname=None) + pnode += tnode + node += pnode + else: + node += tnode + +def parse_c_signature(signode, sig, desctype): + """Transform a C-language signature into RST nodes.""" + # first try the function pointer signature regex, it's more specific + m = c_funcptr_sig_re.match(sig) + if m is None: + m = c_sig_re.match(sig) + if m is None: + raise ValueError('no match') + rettype, name, arglist = m.groups() + + parse_c_type(signode, rettype) + signode += addnodes.desc_name(name, name) + if not arglist: + if desctype == 'cfunction': + # for functions, add an empty parameter list + signode += addnodes.desc_parameterlist() + return name + + paramlist = addnodes.desc_parameterlist() + arglist = arglist.replace('`', '').replace('\\ ', '') # remove markup + # this messes up function pointer types, but not too badly ;) + args = arglist.split(',') + for arg in args: + arg = arg.strip() + param = addnodes.desc_parameter('', '', noemph=True) + try: + ctype, argname = arg.rsplit(' ', 1) + except ValueError: + # no argument name given, only the type + parse_c_type(param, arg) + else: + parse_c_type(param, ctype) + param += nodes.emphasis(' '+argname, ' '+argname) + paramlist += param + signode += paramlist + return name + + +opcode_sig_re = re.compile(r'(\w+(?:\+\d)?)\s*\((.*)\)') + +def parse_opcode_signature(signode, sig, desctype): + """Transform an opcode signature into RST nodes.""" + m = opcode_sig_re.match(sig) + if m is None: raise ValueError + opname, arglist = m.groups() + signode += addnodes.desc_name(opname, opname) + paramlist = addnodes.desc_parameterlist() + signode += paramlist + paramlist += addnodes.desc_parameter(arglist, arglist) + return opname.strip() + + +def add_refcount_annotation(env, node, name): + """Add a reference count annotation. Return None.""" + entry = env.refcounts.get(name) + if not entry: + return + elif entry.result_type not in ("PyObject*", "PyVarObject*"): + return + rc = 'Return value: ' + if entry.result_refs is None: + rc += "Always NULL." + else: + rc += ("New" if entry.result_refs else "Borrowed") + " reference." + node += addnodes.refcount(rc, rc) + + +def desc_directive(desctype, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + env = state.document.settings.env + node = addnodes.desc() + node['desctype'] = desctype + + noindex = ('noindex' in options) + signatures = map(lambda s: s.strip(), arguments[0].split('\n')) + names = [] + for i, sig in enumerate(signatures): + # add a signature node for each signature in the current unit + # and add a reference target for it + sig = sig.strip() + signode = addnodes.desc_signature(sig, '') + signode['first'] = False + node.append(signode) + try: + if desctype in ('function', 'data', 'class', 'exception', + 'method', 'attribute'): + name = parse_py_signature(signode, sig, desctype, env.currclass) + elif desctype in ('cfunction', 'cmember', 'cmacro', 'ctype', 'cvar'): + name = parse_c_signature(signode, sig, desctype) + elif desctype == 'opcode': + name = parse_opcode_signature(signode, sig, desctype) + else: + # describe: use generic fallback + raise ValueError + except ValueError, err: + signode.clear() + signode += addnodes.desc_name(sig, sig) + continue # we don't want an index entry here + # only add target and index entry if this is the first description of the + # function name in this desc block + if not noindex and name not in names: + fullname = (env.currmodule + '.' if env.currmodule else '') + name + # note target + if fullname not in state.document.ids: + signode['names'].append(fullname) + signode['ids'].append(fullname) + signode['first'] = (not names) + state.document.note_explicit_target(signode) + env.note_descref(fullname, desctype) + names.append(name) + + env.note_index_entry('single', + desc_index_text(desctype, env.currmodule, name), + fullname, fullname) + + subnode = addnodes.desc_content() + if desctype == 'cfunction': + add_refcount_annotation(env, subnode, name) + # needed for automatic qualification of members + if desctype == 'class' and names: + env.currclass = names[0] + # needed for association of version{added,changed} directives + if names: + env.currdesc = names[0] + state.nested_parse(content, content_offset, subnode) + if desctype == 'class': + env.currclass = None + env.currdesc = None + node.append(subnode) + return [node] + +desc_directive.content = 1 +desc_directive.arguments = (1, 0, 1) +desc_directive.options = {'noindex': directives.flag} + +desctypes = [ + # the Python ones + 'function', + 'data', + 'class', + 'method', + 'attribute', + 'exception', + # the C ones + 'cfunction', + 'cmember', + 'cmacro', + 'ctype', + 'cvar', + # the odd one + 'opcode', + # the generic one + 'describe', +] + +for name in desctypes: + directives.register_directive(name, desc_directive) + + +# ------ versionadded/versionchanged ----------------------------------------------- + +def version_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + node = addnodes.versionmodified() + node['type'] = name + node['version'] = arguments[0] + if len(arguments) == 2: + inodes, messages = state.inline_text(arguments[1], lineno+1) + node.extend(inodes) + if content: + state.nested_parse(content, content_offset, node) + ret = [node] + messages + else: + ret = [node] + env = state.document.settings.env + env.note_versionchange(node['type'], node['version'], node) + return ret + +version_directive.arguments = (1, 1, 1) +version_directive.content = 1 + +directives.register_directive('deprecated', version_directive) +directives.register_directive('versionadded', version_directive) +directives.register_directive('versionchanged', version_directive) + + +# ------ see also ------------------------------------------------------------------ + +def seealso_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + rv = admonitions.make_admonition( + addnodes.seealso, name, ['See also:'], options, content, + lineno, content_offset, block_text, state, state_machine) + return rv + +seealso_directive.content = 1 +seealso_directive.arguments = (0, 0, 0) +directives.register_directive('seealso', seealso_directive) + + +# ------ production list (for the reference) --------------------------------------- + +def productionlist_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + env = state.document.settings.env + node = addnodes.productionlist() + messages = [] + i = 0 + + # use token as the default role while in production list + roles._roles[''] = roles._role_registry['token'] + for rule in arguments[0].split('\n'): + if i == 0 and ':' not in rule: + # production group + continue + i += 1 + try: + name, tokens = rule.split(':', 1) + except ValueError: + break + subnode = addnodes.production() + subnode['tokenname'] = name.strip() + if subnode['tokenname']: + idname = 'grammar-token-%s' % subnode['tokenname'] + if idname not in state.document.ids: + subnode['ids'].append(idname) + state.document.note_implicit_target(subnode, subnode) + env.note_token(subnode['tokenname']) + inodes, imessages = state.inline_text(tokens, lineno+i) + subnode.extend(inodes) + messages.extend(imessages) + node.append(subnode) + del roles._roles[''] + return [node] + messages + +productionlist_directive.content = 0 +productionlist_directive.arguments = (1, 0, 1) +directives.register_directive('productionlist', productionlist_directive) + +# ------ section metadata ---------------------------------------------------------- + +def module_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + env = state.document.settings.env + modname = arguments[0].strip() + env.currmodule = modname + env.note_module(modname, options.get('synopsis', ''), options.get('platform', '')) + ret = [] + targetnode = nodes.target('', '', ids=['module-' + modname]) + state.document.note_explicit_target(targetnode) + ret.append(targetnode) + if 'platform' in options: + node = nodes.paragraph() + node += nodes.emphasis('Platforms: ', 'Platforms: ') + node += nodes.Text(options['platform'], options['platform']) + ret.append(node) + # the synopsis isn't printed; in fact, it is only used in the modindex currently + env.note_index_entry('single', '%s (module)' % modname, 'module-' + modname, + modname) + return ret + +module_directive.arguments = (1, 0, 0) +module_directive.options = {'platform': lambda x: x, + 'synopsis': lambda x: x} +directives.register_directive('module', module_directive) + + +def author_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + # The author directives aren't included in the built document + return [] + +author_directive.arguments = (1, 0, 1) +directives.register_directive('sectionauthor', author_directive) +directives.register_directive('moduleauthor', author_directive) + + +# ------ toctree directive --------------------------------------------------------- + +def toctree_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + env = state.document.settings.env + dirname = path.dirname(env.filename) + + subnode = addnodes.toctree() + includefiles = filter(None, content) + # absolutize filenames + includefiles = map(lambda x: path.normpath(path.join(dirname, x)), includefiles) + subnode['includefiles'] = includefiles + subnode['maxdepth'] = options.get('maxdepth', -1) + return [subnode] + +toctree_directive.content = 1 +toctree_directive.options = {'maxdepth': int} +directives.register_directive('toctree', toctree_directive) + + +# ------ centered directive --------------------------------------------------------- + +def centered_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + if not arguments: + return [] + subnode = addnodes.centered() + inodes, messages = state.inline_text(arguments[0], lineno) + subnode.extend(inodes) + return [subnode] + messages + +centered_directive.arguments = (1, 0, 1) +directives.register_directive('centered', centered_directive) + + +# ------ highlightlanguage directive ------------------------------------------------ + +def highlightlang_directive(name, arguments, options, content, lineno, + content_offset, block_text, state, state_machine): + return [addnodes.highlightlang(lang=arguments[0].strip())] + +highlightlang_directive.content = 0 +highlightlang_directive.arguments = (1, 0, 0) +directives.register_directive('highlightlang', + highlightlang_directive) Added: doctools/trunk/sphinx/environment.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/environment.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,840 @@ +# -*- coding: utf-8 -*- +""" + sphinx.environment + ~~~~~~~~~~~~~~~~~~ + + Global creation environment. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" +from __future__ import with_statement + +import re +import os +import time +import heapq +import hashlib +import difflib +import itertools +import cPickle as pickle +from os import path +from string import uppercase + +from docutils import nodes +from docutils.io import FileInput +from docutils.core import publish_doctree +from docutils.utils import Reporter +from docutils.readers import standalone +from docutils.transforms import Transform +from docutils.transforms.parts import ContentsFilter +from docutils.transforms.universal import FilterMessages + +from . import addnodes +from .util import get_matching_files +from .refcounting import Refcounts + +default_settings = { + 'embed_stylesheet': False, + 'cloak_email_addresses': True, + 'pep_base_url': 'http://www.python.org/dev/peps/', + 'input_encoding': 'utf-8', + 'doctitle_xform': False, + 'sectsubtitle_xform': False, +} + +# This is increased every time a new environment attribute is added +# to properly invalidate pickle files. +ENV_VERSION = 9 + + +def walk_depth(node, depth, maxdepth): + """Utility: Cut a TOC at a specified depth.""" + for subnode in node.children[:]: + if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): + walk_depth(subnode, depth, maxdepth) + elif isinstance(subnode, nodes.bullet_list): + if depth > maxdepth: + subnode.parent.replace(subnode, []) + else: + walk_depth(subnode, depth+1, maxdepth) + + +default_substitutions = set([ + 'version', + 'release', + 'today', +]) + + +class DefaultSubstitutions(Transform): + """ + Replace some substitutions if they aren't defined in the document. + """ + # run before the default Substitutions + default_priority = 210 + + def apply(self): + config = self.document.settings.env.config + # only handle those not otherwise defined in the document + to_handle = default_substitutions - set(self.document.substitution_defs) + for ref in self.document.traverse(nodes.substitution_reference): + refname = ref['refname'] + if refname in to_handle: + text = config.get(refname, '') + if refname == 'today' and not text: + # special handling: can also specify a strftime format + text = time.strftime(config.get('today_fmt', '%B %d, %Y')) + ref.replace_self(nodes.Text(text, text)) + + +class MoveModuleTargets(Transform): + """ + Move module targets to their nearest enclosing section title. + """ + default_priority = 210 + + def apply(self): + for node in self.document.traverse(nodes.target): + if not node['ids']: + continue + if node['ids'][0].startswith('module-') and \ + node.parent.__class__ is nodes.section: + node.parent['ids'] = node['ids'] + node.parent.remove(node) + + +class MyStandaloneReader(standalone.Reader): + """ + Add our own Substitutions transform. + """ + def get_transforms(self): + tf = standalone.Reader.get_transforms(self) + return tf + [DefaultSubstitutions, MoveModuleTargets, + FilterMessages] + + +class MyContentsFilter(ContentsFilter): + """ + Used with BuildEnvironment.add_toc_from() to discard cross-file links + within table-of-contents link nodes. + """ + def visit_pending_xref(self, node): + self.parent.append(nodes.literal(node['reftarget'], node['reftarget'])) + raise nodes.SkipNode + + +class BuildEnvironment: + """ + The environment in which the ReST files are translated. + Stores an inventory of cross-file targets and provides doctree + transformations to resolve links to them. + + Not all doctrees are stored in the environment, only those of files + containing a "toctree" directive, because they have to change if sections + are edited in other files. This keeps the environment size moderate. + """ + + # --------- ENVIRONMENT PERSISTENCE ---------------------------------------- + + @staticmethod + def frompickle(filename): + with open(filename, 'rb') as picklefile: + env = pickle.load(picklefile) + if env.version != ENV_VERSION: + raise IOError('env version not current') + return env + + def topickle(self, filename): + # remove unpicklable attributes + wstream = self.warning_stream + self.set_warning_stream(None) + with open(filename, 'wb') as picklefile: + pickle.dump(self, picklefile, pickle.HIGHEST_PROTOCOL) + # reset stream + self.set_warning_stream(wstream) + + # --------- ENVIRONMENT INITIALIZATION ------------------------------------- + + def __init__(self, srcdir, doctreedir): + self.doctreedir = doctreedir + self.srcdir = srcdir + self.config = {} + + # read the refcounts file + self.refcounts = Refcounts.fromfile( + path.join(self.srcdir, 'data', 'refcounts.dat')) + + # the docutils settings for building + self.settings = default_settings.copy() + self.settings['env'] = self + + # the stream to write warning messages to + self.warning_stream = None + + # this is to invalidate old pickles + self.version = ENV_VERSION + + # Build times -- to determine changed files + # Also use this as an inventory of all existing and built filenames. + self.all_files = {} # filename -> (mtime, md5) at the time of build + + # File metadata + self.metadata = {} # filename -> dict of metadata items + + # TOC inventory + self.titles = {} # filename -> title node + self.tocs = {} # filename -> table of contents nodetree + self.toc_num_entries = {} # filename -> number of real entries + # used to determine when to show the TOC in a sidebar + # (don't show if it's only one item) + self.toctree_relations = {} # filename -> ["parent", "previous", "next"] filename + # for navigating in the toctree + self.files_to_rebuild = {} # filename -> list of files (containing its TOCs) + # to rebuild too + + # X-ref target inventory + self.descrefs = {} # fullname -> filename, desctype + self.filemodules = {} # filename -> [modules] + self.modules = {} # modname -> filename, synopsis, platform + self.tokens = {} # tokenname -> filename + self.labels = {} # labelname -> filename, labelid + + # Other inventories + self.indexentries = {} # filename -> list of + # (type, string, target, aliasname) + self.versionchanges = {} # version -> list of + # (type, filename, module, descname, content) + + # These are set while parsing a file + self.filename = None # current file name + self.currmodule = None # current module name + self.currclass = None # current class name + self.currdesc = None # current descref name + self.index_num = 0 # autonumber for index targets + + def set_warning_stream(self, stream): + self.warning_stream = stream + self.settings['warning_stream'] = stream + + def clear_file(self, filename): + """Remove all traces of a source file in the inventory.""" + if filename in self.all_files: + self.all_files.pop(filename, None) + self.metadata.pop(filename, None) + self.titles.pop(filename, None) + self.tocs.pop(filename, None) + self.toc_num_entries.pop(filename, None) + self.files_to_rebuild.pop(filename, None) + + for fullname, (fn, _) in self.descrefs.items(): + if fn == filename: + del self.descrefs[fullname] + self.filemodules.pop(filename, None) + for modname, (fn, _, _) in self.modules.items(): + if fn == filename: + del self.modules[modname] + for tokenname, fn in self.tokens.items(): + if fn == filename: + del self.tokens[tokenname] + for labelname, (fn, _, _) in self.labels.items(): + if fn == filename: + del self.labels[labelname] + self.indexentries.pop(filename, None) + for version, changes in self.versionchanges.items(): + new = [change for change in changes if change[1] != filename] + changes[:] = new + + def get_outdated_files(self, config): + """ + Return (removed, changed) iterables. + """ + all_source_files = list(get_matching_files( + self.srcdir, '*.rst', exclude=set(config.get('unused_files', ())))) + + # clear all files no longer present + removed = set(self.all_files) - set(all_source_files) + + if config != self.config: + # config values affect e.g. substitutions + changed = all_source_files + else: + changed = [] + for filename in all_source_files: + if filename not in self.all_files: + changed.append(filename) + else: + # if the doctree file is not there, rebuild + if not path.isfile(path.join(self.doctreedir, + filename[:-3] + 'doctree')): + changed.append(filename) + continue + mtime, md5 = self.all_files[filename] + newmtime = path.getmtime(path.join(self.srcdir, filename)) + if newmtime == mtime: + continue + # check the MD5 + with file(path.join(self.srcdir, filename), 'rb') as f: + newmd5 = hashlib.md5(f.read()).digest() + if newmd5 != md5: + changed.append(filename) + + return removed, changed + + def update(self, config): + """ + (Re-)read all files new or changed since last update. + Yields a summary and then filenames as it processes them. + """ + removed, changed = self.get_outdated_files(config) + msg = '%s removed, %s changed' % (len(removed), len(changed)) + if self.config != config: + msg = '[config changed] ' + msg + yield msg + + self.config = config + + # clear all files no longer present + for filename in removed: + self.clear_file(filename) + + # re-read the refcount file + self.refcounts = Refcounts.fromfile( + path.join(self.srcdir, 'data', 'refcounts.dat')) + + # read all new and changed files + for filename in changed: + yield filename + self.read_file(filename) + + # --------- SINGLE FILE BUILDING ------------------------------------------- + + def read_file(self, filename, src_path=None, save_parsed=True): + """Parse a file and add/update inventory entries for the doctree. + If srcpath is given, read from a different source file.""" + # remove all inventory entries for that file + self.clear_file(filename) + + if src_path is None: + src_path = path.join(self.srcdir, filename) + + self.filename = filename + doctree = publish_doctree(None, src_path, FileInput, + settings_overrides=self.settings, + reader=MyStandaloneReader()) + self.process_metadata(filename, doctree) + self.create_title_from(filename, doctree) + self.note_labels_from(filename, doctree) + self.build_toc_from(filename, doctree) + + # calculate the MD5 of the file at time of build + with file(src_path, 'rb') as f: + md5 = hashlib.md5(f.read()).digest() + self.all_files[filename] = (path.getmtime(src_path), md5) + + # make it picklable + doctree.reporter = None + doctree.transformer = None + doctree.settings.env = None + doctree.settings.warning_stream = None + + # cleanup + self.filename = None + self.currmodule = None + self.currclass = None + + if save_parsed: + # save the parsed doctree + doctree_filename = path.join(self.doctreedir, filename[:-3] + 'doctree') + dirname = path.dirname(doctree_filename) + if not path.isdir(dirname): + os.makedirs(dirname) + with file(doctree_filename, 'wb') as f: + pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL) + else: + return doctree + + def process_metadata(self, filename, doctree): + """ + Process the docinfo part of the doctree as metadata. + """ + self.metadata[filename] = md = {} + docinfo = doctree[0] + if docinfo.__class__ is not nodes.docinfo: + # nothing to see here + return + for node in docinfo: + if node.__class__ is nodes.author: + # handled specially by docutils + md['author'] = node.astext() + elif node.__class__ is nodes.field: + name, body = node + md[name.astext()] = body.astext() + del doctree[0] + + def create_title_from(self, filename, document): + """ + Add a title node to the document (just copy the first section title), + and store that title in the environment. + """ + for node in document.traverse(nodes.section): + titlenode = nodes.title() + visitor = MyContentsFilter(document) + node[0].walkabout(visitor) + titlenode += visitor.get_entry_text() + self.titles[filename] = titlenode + return + + def note_labels_from(self, filename, document): + for name, explicit in document.nametypes.iteritems(): + if not explicit: + continue + labelid = document.nameids[name] + node = document.ids[labelid] + if not isinstance(node, nodes.section): + # e.g. desc-signatures + continue + sectname = node[0].astext() # node[0] == title node + if name in self.labels: + print >>self.warning_stream, \ + ('WARNING: duplicate label %s, ' % name + + 'in %s and %s' % (self.labels[name][0], filename)) + self.labels[name] = filename, labelid, sectname + + def note_toctree(self, filename, toctreenode): + """Note a TOC tree directive in a document and gather information about + file relations from it.""" + includefiles = toctreenode['includefiles'] + includefiles_len = len(includefiles) + for i, includefile in enumerate(includefiles): + # the "previous" file for the first toctree item is the parent + previous = includefiles[i-1] if i > 0 else filename + # the "next" file for the last toctree item is the parent again + next = includefiles[i+1] if i < includefiles_len-1 else filename + self.toctree_relations[includefile] = [filename, previous, next] + # note that if the included file is rebuilt, this one must be + # too (since the TOC of the included file could have changed) + self.files_to_rebuild.setdefault(includefile, set()).add(filename) + + + def build_toc_from(self, filename, document): + """Build a TOC from the doctree and store it in the inventory.""" + numentries = [0] # nonlocal again... + + def build_toc(node): + entries = [] + for subnode in node: + if isinstance(subnode, addnodes.toctree): + # just copy the toctree node which is then resolved + # in self.resolve_toctrees + item = subnode.copy() + entries.append(item) + # do the inventory stuff + self.note_toctree(filename, subnode) + continue + if not isinstance(subnode, nodes.section): + continue + title = subnode[0] + # copy the contents of the section title, but without references + # and unnecessary stuff + visitor = MyContentsFilter(document) + title.walkabout(visitor) + nodetext = visitor.get_entry_text() + if not numentries[0]: + # for the very first toc entry, don't add an anchor + # as it is the file's title anyway + anchorname = '' + else: + anchorname = '#' + subnode['ids'][0] + numentries[0] += 1 + reference = nodes.reference('', '', refuri=filename, + anchorname=anchorname, + *nodetext) + para = addnodes.compact_paragraph('', '', reference) + item = nodes.list_item('', para) + item += build_toc(subnode) + entries.append(item) + if entries: + return nodes.bullet_list('', *entries) + return [] + toc = build_toc(document) + if toc: + self.tocs[filename] = toc + else: + self.tocs[filename] = nodes.bullet_list('') + self.toc_num_entries[filename] = numentries[0] + + def get_toc_for(self, filename): + """Return a TOC nodetree -- for use on the same page only!""" + toc = self.tocs[filename].deepcopy() + for node in toc.traverse(nodes.reference): + node['refuri'] = node['anchorname'] + return toc + + # ------- + # these are called from docutils directives and therefore use self.filename + # + def note_descref(self, fullname, desctype): + if fullname in self.descrefs: + print >>self.warning_stream, \ + ('WARNING: duplicate canonical description name %s, ' % fullname + + 'in %s and %s' % (self.descrefs[fullname][0], self.filename)) + self.descrefs[fullname] = (self.filename, desctype) + + def note_module(self, modname, synopsis, platform): + self.modules[modname] = (self.filename, synopsis, platform) + self.filemodules.setdefault(self.filename, []).append(modname) + + def note_token(self, tokenname): + self.tokens[tokenname] = self.filename + + + def note_index_entry(self, type, string, targetid, aliasname): + self.indexentries.setdefault(self.filename, []).append( + (type, string, targetid, aliasname)) + + def note_versionchange(self, type, version, node): + self.versionchanges.setdefault(version, []).append( + (type, self.filename, self.currmodule, self.currdesc, node.deepcopy())) + # ------- + + # --------- RESOLVING REFERENCES AND TOCTREES ------------------------------ + + def get_doctree(self, filename): + """Read the doctree for a file from the pickle and return it.""" + doctree_filename = path.join(self.doctreedir, filename[:-3] + 'doctree') + with file(doctree_filename, 'rb') as f: + doctree = pickle.load(f) + doctree.reporter = Reporter(filename, 2, 4, stream=self.warning_stream) + return doctree + + def get_and_resolve_doctree(self, filename, builder, doctree=None): + """Read the doctree from the pickle, resolve cross-references and + toctrees and return it.""" + if doctree is None: + doctree = self.get_doctree(filename) + + # resolve all pending cross-references + self.resolve_references(doctree, filename, builder) + + # now, resolve all toctree nodes + def _entries_from_toctree(toctreenode): + """Return TOC entries for a toctree node.""" + includefiles = map(str, toctreenode['includefiles']) + + entries = [] + for includefile in includefiles: + try: + toc = self.tocs[includefile].deepcopy() + except KeyError, err: + # this is raised if the included file does not exist + print >>self.warning_stream, 'WARNING: %s: toctree contains ' \ + 'ref to nonexisting file %r' % (filename, includefile) + else: + for toctreenode in toc.traverse(addnodes.toctree): + toctreenode.parent.replace_self( + _entries_from_toctree(toctreenode)) + entries.append(toc) + if entries: + return addnodes.compact_paragraph('', '', *entries) + return [] + + for toctreenode in doctree.traverse(addnodes.toctree): + maxdepth = toctreenode.get('maxdepth', -1) + newnode = _entries_from_toctree(toctreenode) + # prune the tree to maxdepth + if maxdepth > 0: + walk_depth(newnode, 1, maxdepth) + toctreenode.replace_self(newnode) + + # set the target paths in the toctrees (they are not known + # at TOC generation time) + for node in doctree.traverse(nodes.reference): + if node.hasattr('anchorname'): + # a TOC reference + node['refuri'] = builder.get_relative_uri( + filename, node['refuri']) + node['anchorname'] + + return doctree + + + def resolve_references(self, doctree, docfilename, builder): + for node in doctree.traverse(addnodes.pending_xref): + contnode = node[0].deepcopy() + newnode = None + + typ = node['reftype'] + target = node['reftarget'] + modname = node['modname'] + clsname = node['classname'] + + if typ == 'ref': + filename, labelid, sectname = self.labels.get(target, ('','','')) + if not filename: + newnode = doctree.reporter.system_message( + 2, 'undefined label: %s' % target) + print >>self.warning_stream, \ + '%s: undefined label: %s' % (docfilename, target) + else: + newnode = nodes.reference('', '') + if filename == docfilename: + newnode['refid'] = labelid + else: + newnode['refuri'] = builder.get_relative_uri( + docfilename, filename) + '#' + labelid + newnode.append(nodes.emphasis(sectname, sectname)) + elif typ == 'token': + filename = self.tokens.get(target, '') + if not filename: + newnode = contnode + else: + newnode = nodes.reference('', '') + if filename == docfilename: + newnode['refid'] = 'grammar-token-' + target + else: + newnode['refuri'] = builder.get_relative_uri( + docfilename, filename) + '#grammar-token-' + target + newnode.append(contnode) + elif typ == 'mod': + filename, synopsis, platform = self.modules.get(target, ('','','')) + # just link to an anchor if there are multiple modules in one file + # because the anchor is generally below the heading which is ugly + # but can't be helped easily + anchor = '' + if not filename or filename == docfilename: + # don't link to self + newnode = contnode + else: + if len(self.filemodules[filename]) > 1: + anchor = '#' + 'module-' + target + newnode = nodes.reference('', '') + newnode['refuri'] = ( + builder.get_relative_uri(docfilename, filename) + anchor) + newnode.append(contnode) + else: + name, desc = self.find_desc(modname, clsname, target, typ) + if not desc: + newnode = contnode + else: + newnode = nodes.reference('', '') + if desc[0] == docfilename: + newnode['refid'] = name + else: + newnode['refuri'] = ( + builder.get_relative_uri(docfilename, desc[0]) + + '#' + name) + newnode.append(contnode) + + if newnode: + node.replace_self(newnode) + + def create_index(self, builder, _fixre=re.compile(r'(.*) ([(][^()]*[)])')): + """Create the real index from the collected index entries.""" + new = {} + + def add_entry(word, subword, dic=new): + entry = dic.get(word) + if not entry: + dic[word] = entry = [[], {}] + if subword: + add_entry(subword, '', dic=entry[1]) + else: + entry[0].append(builder.get_relative_uri('genindex.rst', fn) + + '#' + tid) + + for fn, entries in self.indexentries.iteritems(): + # new entry types must be listed in directives.py! + for type, string, tid, alias in entries: + if type == 'single': + entry, _, subentry = string.partition('!') + add_entry(entry, subentry) + elif type == 'pair': + first, second = map(lambda x: x.strip(), string.split(';', 1)) + add_entry(first, second) + add_entry(second, first) + elif type == 'triple': + first, second, third = map(lambda x: x.strip(), string.split(';', 2)) + add_entry(first, second+' '+third) + add_entry(second, third+', '+first) + add_entry(third, first+' '+second) +# this is a bit ridiculous... +# elif type == 'quadruple': +# first, second, third, fourth = \ +# map(lambda x: x.strip(), string.split(';', 3)) +# add_entry(first, '%s %s %s' % (second, third, fourth)) +# add_entry(second, '%s %s, %s' % (third, fourth, first)) +# add_entry(third, '%s, %s %s' % (fourth, first, second)) +# add_entry(fourth, '%s %s %s' % (first, second, third)) + elif type in ('module', 'keyword', 'operator', 'object', + 'exception', 'statement'): + add_entry(string, type) + add_entry(type, string) + elif type == 'builtin': + add_entry(string, 'built-in function') + add_entry('built-in function', string) + else: + print >>self.warning_stream, \ + "unknown index entry type %r in %s" % (type, fn) + + newlist = new.items() + newlist.sort(key=lambda t: t[0].lower()) + + # fixup entries: transform + # func() (in module foo) + # func() (in module bar) + # into + # func() + # (in module foo) + # (in module bar) + oldkey = '' + oldsubitems = None + i = 0 + while i < len(newlist): + key, (targets, subitems) = newlist[i] + # cannot move if it hassubitems; structure gets too complex + if not subitems: + m = _fixre.match(key) + if m: + if oldkey == m.group(1): + # prefixes match: add entry as subitem of the previous entry + oldsubitems.setdefault(m.group(2), [[], {}])[0].extend(targets) + del newlist[i] + continue + oldkey = m.group(1) + else: + oldkey = key + oldsubitems = subitems + i += 1 + + # group the entries by letter + def keyfunc((k, v), ltrs=uppercase+'_'): + # hack: mutate the subitems dicts to a list in the keyfunc + v[1] = sorted((si, se) for (si, (se, void)) in v[1].iteritems()) + # now calculate the key + letter = k[0].upper() + if letter in ltrs: + return letter + else: + # get all other symbols under one heading + return 'Symbols' + self.index = [(key, list(group)) for (key, group) in + itertools.groupby(newlist, keyfunc)] + + def check_consistency(self): + """Do consistency checks.""" + + for filename in self.all_files: + if filename not in self.toctree_relations: + if filename == 'contents.rst': + # the master file is not included anywhere ;) + continue + self.warning_stream.write( + 'WARNING: %s isn\'t included in any toctree\n' % filename) + + # --------- QUERYING ------------------------------------------------------- + + def find_desc(self, modname, classname, name, type): + """Find a description node matching "name", perhaps using + the given module and/or classname.""" + # skip parens + if name[-2:] == '()': + name = name[:-2] + + # don't add module and class names for C things + if type[0] == 'c' and type not in ('class', 'const'): + # skip trailing star and whitespace + name = name.rstrip(' *') + if name in self.descrefs and self.descrefs[name][1][0] == 'c': + return name, self.descrefs[name] + return None, None + + if name in self.descrefs: + newname = name + elif modname and modname + '.' + name in self.descrefs: + newname = modname + '.' + name + elif modname and classname and \ + modname + '.' + classname + '.' + name in self.descrefs: + newname = modname + '.' + classname + '.' + name + # special case: builtin exceptions have module "exceptions" set + elif type == 'exc' and '.' not in name and \ + 'exceptions.' + name in self.descrefs: + newname = 'exceptions.' + name + # special case: object methods + elif type in ('func', 'meth') and '.' not in name and \ + 'object.' + name in self.descrefs: + newname = 'object.' + name + else: + return None, None + return newname, self.descrefs[newname] + + def find_keyword(self, keyword, avoid_fuzzy=False, cutoff=0.6, n=20): + """ + Find keyword matches for a keyword. If there's an exact match, just return + it, else return a list of fuzzy matches if avoid_fuzzy isn't True. + + Keywords searched are: first modules, then descrefs. + + Returns: None if nothing found + (type, filename, anchorname) if exact match found + list of (quality, type, filename, anchorname, description) if fuzzy + """ + + if keyword in self.modules: + filename, title, system = self.modules[keyword] + return 'module', filename, 'module-' + keyword + if keyword in self.descrefs: + filename, ref_type = self.descrefs[keyword] + return ref_type, filename, keyword + # special cases + if '.' not in keyword: + # exceptions are documented in the exceptions module + if 'exceptions.'+keyword in self.descrefs: + filename, ref_type = self.descrefs['exceptions.'+keyword] + return ref_type, filename, 'exceptions.'+keyword + # special methods are documented as object methods + if 'object.'+keyword in self.descrefs: + filename, ref_type = self.descrefs['object.'+keyword] + return ref_type, filename, 'object.'+keyword + + if avoid_fuzzy: + return + + # find fuzzy matches + s = difflib.SequenceMatcher() + s.set_seq2(keyword.lower()) + + def possibilities(): + for title, (fn, desc, _) in self.modules.iteritems(): + yield ('module', fn, 'module-'+title, desc) + for title, (fn, desctype) in self.descrefs.iteritems(): + yield (desctype, fn, title, '') + + def dotsearch(string): + parts = string.lower().split('.') + for idx in xrange(0, len(parts)): + yield '.'.join(parts[idx:]) + + result = [] + for type, filename, title, desc in possibilities(): + best_res = 0 + for part in dotsearch(title): + s.set_seq1(part) + if s.real_quick_ratio() >= cutoff and \ + s.quick_ratio() >= cutoff and \ + s.ratio() >= cutoff and \ + s.ratio() > best_res: + best_res = s.ratio() + if best_res: + result.append((best_res, type, filename, title, desc)) + + return heapq.nlargest(n, result) + + def get_real_filename(self, filename): + """ + Pass this function a filename without .rst extension to get the real + filename. This also resolves the special `index.rst` files. If the file + does not exist the return value will be `None`. + """ + for rstname in filename + '.rst', filename + path.sep + 'index.rst': + if rstname in self.all_files: + return rstname Added: doctools/trunk/sphinx/highlighting.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/highlighting.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +""" + sphinx.highlighting + ~~~~~~~~~~~~~~~~~~~ + + Highlight code blocks using Pygments. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import cgi +from collections import defaultdict + +try: + import pygments + from pygments import highlight + from pygments.lexers import PythonLexer, PythonConsoleLexer, CLexer, \ + TextLexer, RstLexer + from pygments.formatters import HtmlFormatter + from pygments.filters import ErrorToken + from pygments.style import Style + from pygments.styles.friendly import FriendlyStyle + from pygments.token import Generic, Comment +except ImportError: + pygments = None +else: + class PythonDocStyle(Style): + """ + Like friendly, but a bit darker to enhance contrast on the green background. + """ + + background_color = '#eeffcc' + default_style = '' + + styles = FriendlyStyle.styles + styles.update({ + Generic.Output: 'italic #333', + Comment: 'italic #408090', + }) + + lexers = defaultdict(TextLexer, + none = TextLexer(), + python = PythonLexer(), + pycon = PythonConsoleLexer(), + rest = RstLexer(), + c = CLexer(), + ) + for _lexer in lexers.values(): + _lexer.add_filter('raiseonerror') + + fmter = HtmlFormatter(style=PythonDocStyle) + + +def highlight_block(source, lang): + if not pygments: + return '

' + cgi.escape(source) + '
\n' + if lang == 'python': + if source.startswith('>>>'): + lexer = lexers['pycon'] + else: + lexer = lexers['python'] + else: + lexer = lexers[lang] + try: + return highlight(source, lexer, fmter) + except ErrorToken: + # this is most probably not Python, so let it pass textonly + return '
' + cgi.escape(source) + '
\n' + +def get_stylesheet(): + return fmter.get_style_defs() Added: doctools/trunk/sphinx/htmlhelp.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/htmlhelp.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +""" + sphinx.htmlhelp + ~~~~~~~~~~~~~~~ + + Build HTML help support files. + Adapted from the original Doc/tools/prechm.py. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" +from __future__ import with_statement + +import os +import cgi +from os import path + +from docutils import nodes + +from . import addnodes + +# Project file (*.hhp) template. 'outname' is the file basename (like +# the pythlp in pythlp.hhp); 'version' is the doc version number (like +# the 2.2 in Python 2.2). +# The magical numbers in the long line under [WINDOWS] set most of the +# user-visible features (visible buttons, tabs, etc). +# About 0x10384e: This defines the buttons in the help viewer. The +# following defns are taken from htmlhelp.h. Not all possibilities +# actually work, and not all those that work are available from the Help +# Workshop GUI. In particular, the Zoom/Font button works and is not +# available from the GUI. The ones we're using are marked with 'x': +# +# 0x000002 Hide/Show x +# 0x000004 Back x +# 0x000008 Forward x +# 0x000010 Stop +# 0x000020 Refresh +# 0x000040 Home x +# 0x000080 Forward +# 0x000100 Back +# 0x000200 Notes +# 0x000400 Contents +# 0x000800 Locate x +# 0x001000 Options x +# 0x002000 Print x +# 0x004000 Index +# 0x008000 Search +# 0x010000 History +# 0x020000 Favorites +# 0x040000 Jump 1 +# 0x080000 Jump 2 +# 0x100000 Zoom/Font x +# 0x200000 TOC Next +# 0x400000 TOC Prev + +project_template = '''\ +[OPTIONS] +Compiled file=%(outname)s.chm +Contents file=%(outname)s.hhc +Default Window=%(outname)s +Default topic=index.html +Display compile progress=No +Full text search stop list file=%(outname)s.stp +Full-text search=Yes +Index file=%(outname)s.hhk +Language=0x409 +Title=Python %(version)s Documentation + +[WINDOWS] +%(outname)s="Python %(version)s Documentation","%(outname)s.hhc","%(outname)s.hhk",\ +"index.html","index.html",,,,,0x63520,220,0x10384e,[0,0,1024,768],,,,,,,0 + +[FILES] +''' + +contents_header = '''\ + + + + + + + + + + +
    +''' + +contents_footer = '''\ +
+''' + +object_sitemap = '''\ + + + + +''' + +# List of words the full text search facility shouldn't index. This +# becomes file outname.stp. Note that this list must be pretty small! +# Different versions of the MS docs claim the file has a maximum size of +# 256 or 512 bytes (including \r\n at the end of each line). +# Note that "and", "or", "not" and "near" are operators in the search +# language, so no point indexing them even if we wanted to. +stopwords = """ +a and are as at +be but by +for +if in into is it +near no not +of on or +such +that the their then there these they this to +was will with +""".split() + + +def build_hhx(builder, outdir, outname): + builder.msg('dumping stopword list...') + with open(path.join(outdir, outname+'.stp'), 'w') as f: + for word in sorted(stopwords): + print >>f, word + + builder.msg('writing project file...') + with open(path.join(outdir, outname+'.hhp'), 'w') as f: + f.write(project_template % {'outname': outname, + 'version': builder.config['version']}) + if not outdir.endswith(os.sep): + outdir += os.sep + olen = len(outdir) + for root, dirs, files in os.walk(outdir): + for fn in files: + if fn.endswith(('.html', '.css', '.js')): + print >>f, path.join(root, fn)[olen:].replace('/', '\\') + + builder.msg('writing TOC file...') + with open(path.join(outdir, outname+'.hhc'), 'w') as f: + f.write(contents_header) + # special books + f.write('
  • ' + object_sitemap % ('Main page', 'index.html')) + f.write('
  • ' + object_sitemap % ('Global Module Index', 'modindex.html')) + # the TOC + toc = builder.env.get_and_resolve_doctree('contents.rst', builder) + def write_toc(node, ullevel=0): + if isinstance(node, nodes.list_item): + f.write('
  • ') + for subnode in node: + write_toc(subnode, ullevel) + elif isinstance(node, nodes.reference): + f.write(object_sitemap % (cgi.escape(node.astext()), + node['refuri'])) + elif isinstance(node, nodes.bullet_list): + if ullevel != 0: + f.write('
      \n') + for subnode in node: + write_toc(subnode, ullevel+1) + if ullevel != 0: + f.write('
    \n') + elif isinstance(node, addnodes.compact_paragraph): + for subnode in node: + write_toc(subnode, ullevel) + elif isinstance(node, nodes.section): + write_toc(node[1], ullevel) + elif isinstance(node, nodes.document): + write_toc(node[0], ullevel) + write_toc(toc) + f.write(contents_footer) + + builder.msg('writing index file...') + with open(path.join(outdir, outname+'.hhk'), 'w') as f: + f.write('
      \n') + def write_index(title, refs, subitems): + if refs: + f.write('
    • ') + f.write(object_sitemap % (cgi.escape(title), refs[0])) + for ref in refs[1:]: + f.write(object_sitemap % ('[Link]', ref)) + if subitems: + f.write('
        ') + for subitem in subitems: + write_index(subitem[0], subitem[1], []) + f.write('
      ') + for (key, group) in builder.env.index: + for title, (refs, subitems) in group: + write_index(title, refs, subitems) + f.write('
    \n') Added: doctools/trunk/sphinx/json.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/json.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +""" + sphinx.json + ~~~~~~~~~~~ + + Minimal JSON module that generates small dumps. + + This is not fully JSON compliant but enough for the searchindex. + And the generated files are smaller than the simplejson ones. + + Uses the basestring encode function from simplejson. + + :copyright: 2007 by Armin Ronacher, Bob Ippolito. + :license: Python license. +""" + +import re + +ESCAPE = re.compile(r'[\x00-\x19\\"\b\f\n\r\t]') +ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') +ESCAPE_DICT = { + '\\': '\\\\', + '"': '\\"', + '\b': '\\b', + '\f': '\\f', + '\n': '\\n', + '\r': '\\r', + '\t': '\\t', +} +for i in range(0x20): + ESCAPE_DICT.setdefault(chr(i), '\\u%04x' % (i,)) + + +def encode_basestring_ascii(s): + def replace(match): + s = match.group(0) + try: + return ESCAPE_DICT[s] + except KeyError: + n = ord(s) + if n < 0x10000: + return '\\u%04x' % (n,) + else: + # surrogate pair + n -= 0x10000 + s1 = 0xd800 | ((n >> 10) & 0x3ff) + s2 = 0xdc00 | (n & 0x3ff) + return '\\u%04x\\u%04x' % (s1, s2) + return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' + + +def dump_json(obj, key=False): + if key: + if not isinstance(obj, basestring): + obj = str(obj) + return encode_basestring_ascii(obj) + if obj is None: + return 'null' + elif obj is True or obj is False: + return obj and 'true' or 'false' + elif isinstance(obj, (int, long, float)): + return str(obj) + elif isinstance(obj, dict): + return '{%s}' % ','.join('%s:%s' % ( + dump_json(key, True), + dump_json(value) + ) for key, value in obj.iteritems()) + elif isinstance(obj, (tuple, list, set)): + return '[%s]' % ','.join(dump_json(x) for x in obj) + elif isinstance(obj, basestring): + return encode_basestring_ascii(obj) + raise TypeError(type(obj)) Added: doctools/trunk/sphinx/refcounting.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/refcounting.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +""" + sphinx.refcounting + ~~~~~~~~~~~~~~~~~~ + + Handle reference counting annotations, based on refcount.py + and anno-api.py. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" +from __future__ import with_statement + + +class RCEntry: + def __init__(self, name): + self.name = name + self.args = [] + self.result_type = '' + self.result_refs = None + + +class Refcounts(dict): + @classmethod + def fromfile(cls, filename): + d = cls() + with open(filename, 'r') as fp: + for line in fp: + line = line.strip() + if line[:1] in ("", "#"): + # blank lines and comments + continue + parts = line.split(":", 4) + if len(parts) != 5: + raise ValueError("Wrong field count in %r" % line) + function, type, arg, refcount, comment = parts + # Get the entry, creating it if needed: + try: + entry = d[function] + except KeyError: + entry = d[function] = RCEntry(function) + if not refcount or refcount == "null": + refcount = None + else: + refcount = int(refcount) + # Update the entry with the new parameter or the result information. + if arg: + entry.args.append((arg, type, refcount)) + else: + entry.result_type = type + entry.result_refs = refcount + return d Added: doctools/trunk/sphinx/roles.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/roles.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +""" + sphinx.roles + ~~~~~~~~~~~~ + + Handlers for additional ReST roles. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import re + +from docutils import nodes, utils +from docutils.parsers.rst import roles + +from . import addnodes + +ws_re = re.compile(r'\s+') + +generic_docroles = { + 'command' : nodes.strong, + 'dfn' : nodes.emphasis, + 'file' : nodes.emphasis, + 'filenq' : nodes.emphasis, + 'filevar' : nodes.emphasis, + 'guilabel' : nodes.strong, + 'kbd' : nodes.literal, + 'keyword' : nodes.literal, + 'mailheader' : nodes.emphasis, + 'makevar' : nodes.Text, + 'manpage' : nodes.emphasis, + 'mimetype' : nodes.emphasis, + 'newsgroup' : nodes.emphasis, + 'option' : nodes.emphasis, + 'program' : nodes.strong, + 'regexp' : nodes.literal, +} + +for rolename, nodeclass in generic_docroles.iteritems(): + roles.register_generic_role(rolename, nodeclass) + + +def indexmarkup_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): + env = inliner.document.settings.env + text = utils.unescape(text) + targetid = 'index-%s' % env.index_num + env.index_num += 1 + targetnode = nodes.target('', '', ids=[targetid]) + inliner.document.note_explicit_target(targetnode) + if typ == 'envvar': + env.note_index_entry('single', '%s' % text, + targetid, text) + env.note_index_entry('single', 'environment variables!%s' % text, + targetid, text) + textnode = nodes.strong(text, text) + return [targetnode, textnode], [] + elif typ == 'pep': + env.note_index_entry('single', 'Python Enhancement Proposals!PEP %s' % text, + targetid, 'PEP %s' % text) + try: + pepnum = int(text) + except ValueError: + msg = inliner.reporter.error('invalid PEP number %s' % text, line=lineno) + prb = inliner.problematic(rawtext, rawtext, msg) + return [prb], [msg] + ref = inliner.document.settings.pep_base_url + 'pep-%04d' % pepnum + sn = nodes.strong('PEP '+text, 'PEP '+text) + rn = nodes.reference('', '', refuri=ref) + rn += sn + return [targetnode, rn], [] + elif typ == 'rfc': + env.note_index_entry('single', 'RFC!RFC %s' % text, + targetid, 'RFC %s' % text) + try: + rfcnum = int(text) + except ValueError: + msg = inliner.reporter.error('invalid RFC number %s' % text, line=lineno) + prb = inliner.problematic(rawtext, rawtext, msg) + return [prb], [msg] + ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum + sn = nodes.strong('RFC '+text, 'RFC '+text) + rn = nodes.reference('', '', refuri=ref) + rn += sn + return [targetnode, rn], [] + +roles.register_canonical_role('envvar', indexmarkup_role) +roles.register_local_role('pep', indexmarkup_role) +roles.register_local_role('rfc', indexmarkup_role) + + +# default is `literal` +innernodetypes = { + 'ref': nodes.emphasis, + 'token': nodes.strong, +} + +def xfileref_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): + env = inliner.document.settings.env + text = utils.unescape(text) + # 'token' is the default role inside 'productionlist' directives + if typ == '': + typ = 'token' + if env.config.get('strip_trailing_parentheses', False): + if text[-2:] == '()': + text = text[:-2] + pnode = addnodes.pending_xref(rawtext) + pnode['reftype'] = typ + pnode['reftarget'] = ws_re.sub('', text) + pnode['modname'] = env.currmodule + pnode['classname'] = env.currclass + pnode += innernodetypes.get(typ, nodes.literal)(rawtext, text, classes=['xref']) + return [pnode], [] + + +def menusel_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): + return [nodes.emphasis(rawtext, text.replace('-->', u'\N{TRIANGULAR BULLET}'))], [] + + +specific_docroles = { + 'data': xfileref_role, + 'exc': xfileref_role, + 'func': xfileref_role, + 'class': xfileref_role, + 'const': xfileref_role, + 'attr': xfileref_role, + 'meth': xfileref_role, + + 'cfunc' : xfileref_role, + 'cdata' : xfileref_role, + 'ctype' : xfileref_role, + 'cmacro' : xfileref_role, + + 'mod' : xfileref_role, + + 'ref': xfileref_role, + 'token' : xfileref_role, + + 'menuselection' : menusel_role, +} + +for rolename, func in specific_docroles.iteritems(): + roles.register_canonical_role(rolename, func) Added: doctools/trunk/sphinx/search.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/search.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +""" + sphinx.search + ~~~~~~~~~~~~~ + + Create a search index for offline search. + + :copyright: 2007 by Armin Ronacher. + :license: Python license. +""" +import re +import pickle + +from collections import defaultdict +from docutils.nodes import Text, NodeVisitor +from .stemmer import PorterStemmer +from .json import dump_json + + +word_re = re.compile(r'\w+(?u)') + + +class Stemmer(PorterStemmer): + """ + All those porter stemmer implementations look hideous. + make at least the stem method nicer. + """ + + def stem(self, word): + return PorterStemmer.stem(self, word, 0, len(word) - 1) + + +class WordCollector(NodeVisitor): + """ + A special visitor that collects words for the `IndexBuilder`. + """ + + def __init__(self, document): + NodeVisitor.__init__(self, document) + self.found_words = [] + + def dispatch_visit(self, node): + if node.__class__ is Text: + self.found_words.extend(word_re.findall(node.astext())) + + +class IndexBuilder(object): + """ + Helper class that creates a searchindex based on the doctrees + passed to the `feed` method. + """ + formats = { + 'json': dump_json, + 'pickle': pickle.dumps + } + + def __init__(self): + self._filenames = {} + self._mapping = {} + self._titles = {} + self._categories = {} + self._stemmer = Stemmer() + + def dump(self, stream, format): + """Dump the freezed index to a stream.""" + stream.write(self.formats[format](self.freeze())) + + def freeze(self): + """ + Create a useable data structure. You can pass this output + to the `SearchFrontend` to search the index. + """ + return [ + [k for k, v in sorted(self._filenames.items(), + key=lambda x: x[1])], + dict(item for item in sorted(self._categories.items(), + key=lambda x: x[0])), + [v for k, v in sorted(self._titles.items(), + key=lambda x: x[0])], + dict(item for item in sorted(self._mapping.items(), + key=lambda x: x[0])), + ] + + def feed(self, filename, category, title, doctree): + """Feed a doctree to the index.""" + file_id = self._filenames.setdefault(filename, len(self._filenames)) + self._titles[file_id] = title + visitor = WordCollector(doctree) + doctree.walk(visitor) + self._categories.setdefault(category, set()).add(file_id) + for word in word_re.findall(title) + visitor.found_words: + self._mapping.setdefault(self._stemmer.stem(word.lower()), + set()).add(file_id) + + +class SearchFrontend(object): + """ + This class acts as a frontend for the search index. It can search + a searchindex as provided by `IndexBuilder`. + """ + + def __init__(self, index): + self.filenames, self.areas, self.titles, self.words = index + self._stemmer = Stemmer() + + def query(self, required, excluded, areas): + file_map = defaultdict(set) + for word in required: + if word not in self.words: + break + for fid in self.words[word]: + file_map[fid].add(word) + + return sorted(((self.filenames[fid], self.titles[fid]) + for fid, words in file_map.iteritems() + if len(words) == len(required) and + any(fid in self.areas.get(area, ()) for area in areas) and not + any(fid in self.words.get(word, ()) for word in excluded) + ), key=lambda x: x[1].lower()) + + def search(self, searchstring, areas): + required = set() + excluded = set() + for word in searchstring.split(): + if word.startswith('-'): + storage = excluded + word = word[1:] + else: + storage = required + storage.add(self._stemmer.stem(word.lower())) + + return self.query(required, excluded, areas) Added: doctools/trunk/sphinx/smartypants.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/smartypants.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,263 @@ +r""" +This is based on SmartyPants.py by `Chad Miller`_. + +Copyright and License +===================== + +SmartyPants_ license:: + + Copyright (c) 2003 John Gruber + (http://daringfireball.net/) + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name "SmartyPants" nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + This software is provided by the copyright holders and contributors "as + is" and any express or implied warranties, including, but not limited + to, the implied warranties of merchantability and fitness for a + particular purpose are disclaimed. In no event shall the copyright + owner or contributors be liable for any direct, indirect, incidental, + special, exemplary, or consequential damages (including, but not + limited to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on any + theory of liability, whether in contract, strict liability, or tort + (including negligence or otherwise) arising in any way out of the use + of this software, even if advised of the possibility of such damage. + + +smartypants.py license:: + + smartypants.py is a derivative work of SmartyPants. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + This software is provided by the copyright holders and contributors "as + is" and any express or implied warranties, including, but not limited + to, the implied warranties of merchantability and fitness for a + particular purpose are disclaimed. In no event shall the copyright + owner or contributors be liable for any direct, indirect, incidental, + special, exemplary, or consequential damages (including, but not + limited to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on any + theory of liability, whether in contract, strict liability, or tort + (including negligence or otherwise) arising in any way out of the use + of this software, even if advised of the possibility of such damage. + +.. _Chad Miller: http://web.chad.org/ +""" + +import re + + +def sphinx_smarty_pants(t): + t = t.replace('"', '"') + t = educateDashesOldSchool(t) + t = educateQuotes(t) + t = t.replace('"', '"') + return t + +# Constants for quote education. + +punct_class = r"""[!"#\$\%'()*+,-.\/:;<=>?\@\[\\\]\^_`{|}~]""" +close_class = r"""[^\ \t\r\n\[\{\(\-]""" +dec_dashes = r"""–|—""" + +# Special case if the very first character is a quote +# followed by punctuation at a non-word-break. Close the quotes by brute force: +single_quote_start_re = re.compile(r"""^'(?=%s\\B)""" % (punct_class,)) +double_quote_start_re = re.compile(r"""^"(?=%s\\B)""" % (punct_class,)) + +# Special case for double sets of quotes, e.g.: +#

    He said, "'Quoted' words in a larger quote."

    +double_quote_sets_re = re.compile(r""""'(?=\w)""") +single_quote_sets_re = re.compile(r"""'"(?=\w)""") + +# Special case for decade abbreviations (the '80s): +decade_abbr_re = re.compile(r"""\b'(?=\d{2}s)""") + +# Get most opening double quotes: +opening_double_quotes_regex = re.compile(r""" + ( + \s | # a whitespace char, or +   | # a non-breaking space entity, or + -- | # dashes, or + &[mn]dash; | # named dash entities + %s | # or decimal entities + &\#x201[34]; # or hex + ) + " # the quote + (?=\w) # followed by a word character + """ % (dec_dashes,), re.VERBOSE) + +# Double closing quotes: +closing_double_quotes_regex = re.compile(r""" + #(%s)? # character that indicates the quote should be closing + " + (?=\s) + """ % (close_class,), re.VERBOSE) + +closing_double_quotes_regex_2 = re.compile(r""" + (%s) # character that indicates the quote should be closing + " + """ % (close_class,), re.VERBOSE) + +# Get most opening single quotes: +opening_single_quotes_regex = re.compile(r""" + ( + \s | # a whitespace char, or +   | # a non-breaking space entity, or + -- | # dashes, or + &[mn]dash; | # named dash entities + %s | # or decimal entities + &\#x201[34]; # or hex + ) + ' # the quote + (?=\w) # followed by a word character + """ % (dec_dashes,), re.VERBOSE) + +closing_single_quotes_regex = re.compile(r""" + (%s) + ' + (?!\s | s\b | \d) + """ % (close_class,), re.VERBOSE) + +closing_single_quotes_regex_2 = re.compile(r""" + (%s) + ' + (\s | s\b) + """ % (close_class,), re.VERBOSE) + +def educateQuotes(str): + """ + Parameter: String. + + Returns: The string, with "educated" curly quote HTML entities. + + Example input: "Isn't this fun?" + Example output: “Isn’t this fun?” + """ + + # Special case if the very first character is a quote + # followed by punctuation at a non-word-break. Close the quotes by brute force: + str = single_quote_start_re.sub("’", str) + str = double_quote_start_re.sub("”", str) + + # Special case for double sets of quotes, e.g.: + #

    He said, "'Quoted' words in a larger quote."

    + str = double_quote_sets_re.sub("“‘", str) + str = single_quote_sets_re.sub("‘“", str) + + # Special case for decade abbreviations (the '80s): + str = decade_abbr_re.sub("’", str) + + str = opening_single_quotes_regex.sub(r"\1‘", str) + str = closing_single_quotes_regex.sub(r"\1’", str) + str = closing_single_quotes_regex_2.sub(r"\1’\2", str) + + # Any remaining single quotes should be opening ones: + str = str.replace("'", "‘") + + str = opening_double_quotes_regex.sub(r"\1“", str) + str = closing_double_quotes_regex.sub(r"”", str) + str = closing_double_quotes_regex_2.sub(r"\1”", str) + + # Any remaining quotes should be opening ones. + str = str.replace('"', "“") + + return str + + +def educateBackticks(str): + """ + Parameter: String. + Returns: The string, with ``backticks'' -style double quotes + translated into HTML curly quote entities. + Example input: ``Isn't this fun?'' + Example output: “Isn't this fun?” + """ + return str.replace("``", "“").replace("''", "”") + + +def educateSingleBackticks(str): + """ + Parameter: String. + Returns: The string, with `backticks' -style single quotes + translated into HTML curly quote entities. + + Example input: `Isn't this fun?' + Example output: ‘Isn’t this fun?’ + """ + return str.replace('`', "‘").replace("'", "’") + + +def educateDashesOldSchool(str): + """ + Parameter: String. + + Returns: The string, with each instance of "--" translated to + an en-dash HTML entity, and each "---" translated to + an em-dash HTML entity. + """ + return str.replace('---', "—").replace('--', "–") + + +def educateDashesOldSchoolInverted(str): + """ + Parameter: String. + + Returns: The string, with each instance of "--" translated to + an em-dash HTML entity, and each "---" translated to + an en-dash HTML entity. Two reasons why: First, unlike the + en- and em-dash syntax supported by + EducateDashesOldSchool(), it's compatible with existing + entries written before SmartyPants 1.1, back when "--" was + only used for em-dashes. Second, em-dashes are more + common than en-dashes, and so it sort of makes sense that + the shortcut should be shorter to type. (Thanks to Aaron + Swartz for the idea.) + """ + return str.replace('---', "–").replace('--', "—") + + + +def educateEllipses(str): + """ + Parameter: String. + Returns: The string, with each instance of "..." translated to + an ellipsis HTML entity. + + Example input: Huh...? + Example output: Huh…? + """ + return str.replace('...', "…").replace('. . .', "…") + + +__author__ = "Chad Miller " +__version__ = "1.5_1.5: Sat, 13 Aug 2005 15:50:24 -0400" +__url__ = "http://wiki.chad.org/SmartyPantsPy" +__description__ = \ + "Smart-quotes, smart-ellipses, and smart-dashes for weblog entries in pyblosxom" Added: doctools/trunk/sphinx/stemmer.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/stemmer.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,344 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" + sphinx.stemmer + ~~~~~~~~~~~~~~ + + Porter Stemming Algorithm + + This is the Porter stemming algorithm, ported to Python from the + version coded up in ANSI C by the author. It may be be regarded + as canonical, in that it follows the algorithm presented in + + Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14, + no. 3, pp 130-137, + + only differing from it at the points maked --DEPARTURE-- below. + + See also http://www.tartarus.org/~martin/PorterStemmer + + The algorithm as described in the paper could be exactly replicated + by adjusting the points of DEPARTURE, but this is barely necessary, + because (a) the points of DEPARTURE are definitely improvements, and + (b) no encoding of the Porter stemmer I have seen is anything like + as exact as this version, even with the points of DEPARTURE! + + Release 1: January 2001 + + :copyright: 2001 by Vivake Gupta . + :license: Public Domain (?). +""" + +class PorterStemmer(object): + + def __init__(self): + """The main part of the stemming algorithm starts here. + b is a buffer holding a word to be stemmed. The letters are in b[k0], + b[k0+1] ... ending at b[k]. In fact k0 = 0 in this demo program. k is + readjusted downwards as the stemming progresses. Zero termination is + not in fact used in the algorithm. + + Note that only lower case sequences are stemmed. Forcing to lower case + should be done before stem(...) is called. + """ + + self.b = "" # buffer for word to be stemmed + self.k = 0 + self.k0 = 0 + self.j = 0 # j is a general offset into the string + + def cons(self, i): + """cons(i) is TRUE <=> b[i] is a consonant.""" + if self.b[i] == 'a' or self.b[i] == 'e' or self.b[i] == 'i' \ + or self.b[i] == 'o' or self.b[i] == 'u': + return 0 + if self.b[i] == 'y': + if i == self.k0: + return 1 + else: + return (not self.cons(i - 1)) + return 1 + + def m(self): + """m() measures the number of consonant sequences between k0 and j. + if c is a consonant sequence and v a vowel sequence, and <..> + indicates arbitrary presence, + + gives 0 + vc gives 1 + vcvc gives 2 + vcvcvc gives 3 + .... + """ + n = 0 + i = self.k0 + while 1: + if i > self.j: + return n + if not self.cons(i): + break + i = i + 1 + i = i + 1 + while 1: + while 1: + if i > self.j: + return n + if self.cons(i): + break + i = i + 1 + i = i + 1 + n = n + 1 + while 1: + if i > self.j: + return n + if not self.cons(i): + break + i = i + 1 + i = i + 1 + + def vowelinstem(self): + """vowelinstem() is TRUE <=> k0,...j contains a vowel""" + for i in range(self.k0, self.j + 1): + if not self.cons(i): + return 1 + return 0 + + def doublec(self, j): + """doublec(j) is TRUE <=> j,(j-1) contain a double consonant.""" + if j < (self.k0 + 1): + return 0 + if (self.b[j] != self.b[j-1]): + return 0 + return self.cons(j) + + def cvc(self, i): + """cvc(i) is TRUE <=> i-2,i-1,i has the form consonant - vowel - consonant + and also if the second c is not w,x or y. this is used when trying to + restore an e at the end of a short e.g. + + cav(e), lov(e), hop(e), crim(e), but + snow, box, tray. + """ + if i < (self.k0 + 2) or not self.cons(i) or self.cons(i-1) or not self.cons(i-2): + return 0 + ch = self.b[i] + if ch == 'w' or ch == 'x' or ch == 'y': + return 0 + return 1 + + def ends(self, s): + """ends(s) is TRUE <=> k0,...k ends with the string s.""" + length = len(s) + if s[length - 1] != self.b[self.k]: # tiny speed-up + return 0 + if length > (self.k - self.k0 + 1): + return 0 + if self.b[self.k-length+1:self.k+1] != s: + return 0 + self.j = self.k - length + return 1 + + def setto(self, s): + """setto(s) sets (j+1),...k to the characters in the string s, readjusting k.""" + length = len(s) + self.b = self.b[:self.j+1] + s + self.b[self.j+length+1:] + self.k = self.j + length + + def r(self, s): + """r(s) is used further down.""" + if self.m() > 0: + self.setto(s) + + def step1ab(self): + """step1ab() gets rid of plurals and -ed or -ing. e.g. + + caresses -> caress + ponies -> poni + ties -> ti + caress -> caress + cats -> cat + + feed -> feed + agreed -> agree + disabled -> disable + + matting -> mat + mating -> mate + meeting -> meet + milling -> mill + messing -> mess + + meetings -> meet + """ + if self.b[self.k] == 's': + if self.ends("sses"): + self.k = self.k - 2 + elif self.ends("ies"): + self.setto("i") + elif self.b[self.k - 1] != 's': + self.k = self.k - 1 + if self.ends("eed"): + if self.m() > 0: + self.k = self.k - 1 + elif (self.ends("ed") or self.ends("ing")) and self.vowelinstem(): + self.k = self.j + if self.ends("at"): self.setto("ate") + elif self.ends("bl"): self.setto("ble") + elif self.ends("iz"): self.setto("ize") + elif self.doublec(self.k): + self.k = self.k - 1 + ch = self.b[self.k] + if ch == 'l' or ch == 's' or ch == 'z': + self.k = self.k + 1 + elif (self.m() == 1 and self.cvc(self.k)): + self.setto("e") + + def step1c(self): + """step1c() turns terminal y to i when there is another vowel in the stem.""" + if (self.ends("y") and self.vowelinstem()): + self.b = self.b[:self.k] + 'i' + self.b[self.k+1:] + + def step2(self): + """step2() maps double suffices to single ones. + so -ization ( = -ize plus -ation) maps to -ize etc. note that the + string before the suffix must give m() > 0. + """ + if self.b[self.k - 1] == 'a': + if self.ends("ational"): self.r("ate") + elif self.ends("tional"): self.r("tion") + elif self.b[self.k - 1] == 'c': + if self.ends("enci"): self.r("ence") + elif self.ends("anci"): self.r("ance") + elif self.b[self.k - 1] == 'e': + if self.ends("izer"): self.r("ize") + elif self.b[self.k - 1] == 'l': + if self.ends("bli"): self.r("ble") # --DEPARTURE-- + # To match the published algorithm, replace this phrase with + # if self.ends("abli"): self.r("able") + elif self.ends("alli"): self.r("al") + elif self.ends("entli"): self.r("ent") + elif self.ends("eli"): self.r("e") + elif self.ends("ousli"): self.r("ous") + elif self.b[self.k - 1] == 'o': + if self.ends("ization"): self.r("ize") + elif self.ends("ation"): self.r("ate") + elif self.ends("ator"): self.r("ate") + elif self.b[self.k - 1] == 's': + if self.ends("alism"): self.r("al") + elif self.ends("iveness"): self.r("ive") + elif self.ends("fulness"): self.r("ful") + elif self.ends("ousness"): self.r("ous") + elif self.b[self.k - 1] == 't': + if self.ends("aliti"): self.r("al") + elif self.ends("iviti"): self.r("ive") + elif self.ends("biliti"): self.r("ble") + elif self.b[self.k - 1] == 'g': # --DEPARTURE-- + if self.ends("logi"): self.r("log") + # To match the published algorithm, delete this phrase + + def step3(self): + """step3() dels with -ic-, -full, -ness etc. similar strategy to step2.""" + if self.b[self.k] == 'e': + if self.ends("icate"): self.r("ic") + elif self.ends("ative"): self.r("") + elif self.ends("alize"): self.r("al") + elif self.b[self.k] == 'i': + if self.ends("iciti"): self.r("ic") + elif self.b[self.k] == 'l': + if self.ends("ical"): self.r("ic") + elif self.ends("ful"): self.r("") + elif self.b[self.k] == 's': + if self.ends("ness"): self.r("") + + def step4(self): + """step4() takes off -ant, -ence etc., in context vcvc.""" + if self.b[self.k - 1] == 'a': + if self.ends("al"): pass + else: return + elif self.b[self.k - 1] == 'c': + if self.ends("ance"): pass + elif self.ends("ence"): pass + else: return + elif self.b[self.k - 1] == 'e': + if self.ends("er"): pass + else: return + elif self.b[self.k - 1] == 'i': + if self.ends("ic"): pass + else: return + elif self.b[self.k - 1] == 'l': + if self.ends("able"): pass + elif self.ends("ible"): pass + else: return + elif self.b[self.k - 1] == 'n': + if self.ends("ant"): pass + elif self.ends("ement"): pass + elif self.ends("ment"): pass + elif self.ends("ent"): pass + else: return + elif self.b[self.k - 1] == 'o': + if self.ends("ion") and (self.b[self.j] == 's' \ + or self.b[self.j] == 't'): pass + elif self.ends("ou"): pass + # takes care of -ous + else: return + elif self.b[self.k - 1] == 's': + if self.ends("ism"): pass + else: return + elif self.b[self.k - 1] == 't': + if self.ends("ate"): pass + elif self.ends("iti"): pass + else: return + elif self.b[self.k - 1] == 'u': + if self.ends("ous"): pass + else: return + elif self.b[self.k - 1] == 'v': + if self.ends("ive"): pass + else: return + elif self.b[self.k - 1] == 'z': + if self.ends("ize"): pass + else: return + else: + return + if self.m() > 1: + self.k = self.j + + def step5(self): + """step5() removes a final -e if m() > 1, and changes -ll to -l if + m() > 1. + """ + self.j = self.k + if self.b[self.k] == 'e': + a = self.m() + if a > 1 or (a == 1 and not self.cvc(self.k-1)): + self.k = self.k - 1 + if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1: + self.k = self.k -1 + + def stem(self, p, i, j): + """In stem(p,i,j), p is a char pointer, and the string to be stemmed + is from p[i] to p[j] inclusive. Typically i is zero and j is the + offset to the last character of a string, (p[j+1] == '\0'). The + stemmer adjusts the characters p[i] ... p[j] and returns the new + end-point of the string, k. Stemming never increases word length, so + i <= k <= j. To turn the stemmer into a module, declare 'stem' as + extern, and delete the remainder of this file. + """ + # copy the parameters into statics + self.b = p + self.k = j + self.k0 = i + if self.k <= self.k0 + 1: + return self.b # --DEPARTURE-- + + # With this line, strings of length 1 or 2 don't go through the + # stemming process, although no mention is made of this in the + # published algorithm. Remove the line to match the published + # algorithm. + + self.step1ab() + self.step1c() + self.step2() + self.step3() + self.step4() + self.step5() + return self.b[self.k0:self.k+1] Added: doctools/trunk/sphinx/style/admin.css ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/style/admin.css Mon Jul 23 11:02:25 2007 @@ -0,0 +1,162 @@ +/** + * Sphinx Admin Panel + */ + +div.admin { + margin: 0 -20px -30px -20px; + padding: 0 20px 10px 20px; + background-color: #f2f2f2; + color: black; +} + +div.admin a { + color: #333; + text-decoration: underline; +} + +div.admin a:hover { + color: black; +} + +div.admin h1, +div.admin h2 { + background-color: #555; + border-bottom: 1px solid #222; + color: white; +} + +div.admin form form { + display: inline; +} + +div.admin input, div.admin textarea { + font-family: 'Bitstream Vera Sans', 'Arial', sans-serif; + font-size: 13px; + color: #333; + padding: 2px; + background-color: #fff; + border: 1px solid #aaa; +} + +div.admin input[type="reset"], +div.admin input[type="submit"] { + cursor: pointer; + font-weight: bold; + padding: 2px; +} + +div.admin input[type="reset"]:hover, +div.admin input[type="submit"]:hover { + border: 1px solid #333; +} + +div.admin div.actions { + margin: 10px 0 0 0; + padding: 5px; + background-color: #aaa; + border: 1px solid #777; +} + +div.admin div.error { + margin: 10px 0 0 0; + padding: 5px; + border: 2px solid #222; + background-color: #ccc; + font-weight: bold; +} + +div.admin div.dialog { + background-color: #ccc; + margin: 10px 0 10px 0; +} + +div.admin div.dialog h2 { + margin: 0; + font-size: 18px; + padding: 4px 10px 4px 10px; +} + +div.admin div.dialog div.text { + padding: 10px; +} + +div.admin div.dialog div.buttons { + padding: 5px 10px 5px 10px; +} + +div.admin table.mapping { + width: 100%; + border: 1px solid #999; + border-collapse: collapse; + background-color: #aaa; +} + +div.admin table.mapping th { + background-color: #ddd; + border-bottom: 1px solid #888; + padding: 5px; +} + +div.admin table.mapping th.recent_comments { + background-color: #c5cba4; +} + +div.admin table.mapping, +div.admin table.mapping a { + color: black; +} + +div.admin table.mapping td { + border: 1px solid #888; + border-left: none; + border-right: none; + text-align: left; + line-height: 24px; + padding: 0 5px 0 5px; +} + +div.admin table.mapping tr:hover { + background-color: #888; +} + +div.admin table.mapping td.username { + width: 180px; +} + +div.admin table.mapping td.pub_date { + font-style: italic; + text-align: right; +} + +div.admin table.mapping td.groups input { + width: 100%; +} + +div.admin table.mapping td.actions input { + padding: 0; +} + +div.admin table.mapping .actions { + text-align: right; + width: 70px; +} + +div.admin table.mapping span.meta { + font-size: 11px; + color: #222; +} + +div.admin table.mapping span.meta a { + color: #222; +} + +div.admin div.detail_form dt { + clear: both; + float: left; + width: 110px; +} + +div.admin div.detail_form textarea { + width: 98%; + height: 160px; +} Added: doctools/trunk/sphinx/style/comment.png ============================================================================== Binary file. No diff available. Added: doctools/trunk/sphinx/style/default.css ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/style/default.css Mon Jul 23 11:02:25 2007 @@ -0,0 +1,764 @@ +/** + * Python Doc Design + */ + +body { + font-family: 'Bitstream Vera Sans', 'Arial', sans-serif; + font-size: 13px; + background-color: #11303d; + color: #000; + margin: 0; + padding: 0; +} + +/* :::: LAYOUT :::: */ + +div.document { + background-color: #1c4e63; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 230px; +} + +div.body { + background-color: white; + padding: 0 20px 30px 20px; +} + +div.sidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sidebar { + float: left; + width: 230px; + margin-left: -100%; +} + +div.clearer { + clear: both; +} + +div.footer { + color: #fff; + width: 100%; + padding: 9px 0 9px 0; + text-align: center; +} + +div.footer a { + color: #fff; + text-decoration: underline; +} + +div.related { + background-color: #133f52; + color: #fff; + width: 100%; + height: 30px; + line-height: 30px; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +div.related a { + color: white; +} + +/* ::: TOC :::: */ +div.sidebar h3 { + font-family: 'Trebuchet MS', sans-serif; + color: white; + font-size: 24px; + font-weight: normal; + margin: 0; + padding: 0; +} + +div.sidebar h4 { + font-family: 'Trebuchet MS', sans-serif; + color: white; + font-size: 16px; + font-weight: normal; + margin: 5px 0 0 0; + padding: 0; +} + +div.sidebar p { + color: white; +} + +div.sidebar p.topless { + margin: 5px 10px 10px 10px; +} + +div.sidebar ul { + margin: 10px; + padding: 0; + list-style: none; + color: white; +} + +div.sidebar ul ul, +div.sidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sidebar a { + color: #98dbcc; +} + +div.sidebar form { + margin-top: 10px; +} + +div.sidebar input { + border: 1px solid #98dbcc; + font-family: 'Bitstream Vera Sans', 'Arial', sans-serif; + font-size: 1em; +} + +/* :::: MODULE CLOUD :::: */ +div.modulecloud { + margin: -5px 10px 5px 10px; + padding: 10px; + font-size: 110%; + line-height: 160%; + border: 1px solid #cbe7e5; + background-color: #f2fbfd; +} + +div.modulecloud a { + padding: 0 5px 0 5px; +} + +/* :::: SEARCH :::: */ +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* :::: COMMON FORM STYLES :::: */ + +div.actions { + padding: 5px 10px 5px 10px; + border-top: 1px solid #cbe7e5; + border-bottom: 1px solid #cbe7e5; + background-color: #e0f6f4; +} + +form dl { + color: #333; +} + +form dt { + clear: both; + float: left; + min-width: 110px; + margin-right: 10px; + padding-top: 2px; +} + +input#homepage { + display: none; +} + +div.error { + margin: 5px 20px 0 0; + padding: 5px; + border: 1px solid #d00; + font-weight: bold; +} + +/* :::: INLINE COMMENTS :::: */ + +div.inlinecomments { + position: absolute; + right: 20px; +} + +div.inlinecomments a.bubble { + display: block; + float: right; + background-image: url(style/comment.png); + background-repeat: no-repeat; + width: 25px; + height: 25px; + text-align: center; + padding-top: 3px; + font-size: 12px; + line-height: 14px; + font-weight: bold; + color: black; +} + +div.inlinecomments a.bubble span { + display: none; +} + +div.inlinecomments a.emptybubble { + background-image: url(style/nocomment.png); +} + +div.inlinecomments a.bubble:hover { + background-image: url(style/hovercomment.png); + text-decoration: none; + color: #3ca0a4; +} + +div.inlinecomments div.comments { + float: right; + margin: 25px 5px 0 0; + max-width: 50em; + min-width: 30em; + border: 1px solid #2eabb0; + background-color: #f2fbfd; + z-index: 150; +} + +div#comments { + border: 1px solid #2eabb0; +} + +div#comments div.nocomments { + padding: 10px; + font-weight: bold; +} + +div.inlinecomments div.comments h3, +div#comments h3 { + margin: 0; + padding: 0; + background-color: #2eabb0; + color: white; + border: none; + padding: 3px; +} + +div.inlinecomments div.comments div.actions { + padding: 4px; + margin: 0; + border-top: none; +} + +div#comments div.comment { + margin: 10px; + border: 1px solid #2eabb0; +} + +div.inlinecomments div.comment h4, +div.commentwindow div.comment h4, +div#comments div.comment h4 { + margin: 10px 0 0 0; + background-color: #2eabb0; + color: white; + border: none; + padding: 1px 4px 1px 4px; +} + +div#comments div.comment h4 { + margin: 0; +} + +div#comments div.comment h4 a { + color: #d5f4f4; +} + +div.inlinecomments div.comment div.text, +div.commentwindow div.comment div.text, +div#comments div.comment div.text { + margin: -5px 0 -5px 0; + padding: 0 10px 0 10px; +} + +div.inlinecomments div.comment div.meta, +div.commentwindow div.comment div.meta, +div#comments div.comment div.meta { + text-align: right; + padding: 2px 10px 2px 0; + font-size: 95%; + color: #538893; + border-top: 1px solid #cbe7e5; + background-color: #e0f6f4; +} + +div.commentwindow { + position: absolute; + width: 500px; + border: 1px solid #cbe7e5; + background-color: #f2fbfd; + display: none; + z-index: 130; +} + +div.commentwindow h3 { + margin: 0; + background-color: #2eabb0; + color: white; + border: none; + padding: 5px; + font-size: 22px; + cursor: pointer; +} + +div.commentwindow div.actions { + margin: 10px -10px 0 -10px; + padding: 4px 10px 4px 10px; + color: #538893; +} + +div.commentwindow div.actions input { + border: 1px solid #2eabb0; + background-color: white; + color: #135355; + cursor: pointer; +} + +div.commentwindow div.form { + padding: 0 10px 0 10px; +} + +div.commentwindow div.form input, +div.commentwindow div.form textarea { + border: 1px solid #3c9ea2; + background-color: white; + color: black; +} + +div.commentwindow div.error { + margin: 10px 5px 10px 5px; + background-color: #fbe5dc; + display: none; +} + +div.commentwindow div.form textarea { + width: 99%; +} + +div.commentwindow div.preview { + margin: 10px 0 10px 0; + background-color: #70d0d4; + padding: 0 1px 1px 25px; +} + +div.commentwindow div.preview h4 { + margin: 0 0 -5px -20px; + padding: 4px 0 0 4px; + color: white; + font-size: 18px; +} + +div.commentwindow div.preview div.comment { + background-color: #f2fbfd; +} + +div.commentwindow div.preview div.comment h4 { + margin: 10px 0 0 0!important; + padding: 1px 4px 1px 4px!important; + font-size: 16px; +} + +/* :::: SUGGEST CHANGES :::: */ +div#suggest-changes-box input, div#suggest-changes-box textarea { + border: 1px solid #ccc; + background-color: white; + color: black; +} + +div#suggest-changes-box textarea { + width: 99%; + height: 400px; +} + + +/* :::: PREVIEW :::: */ +div.preview { + background-image: url(style/preview.png); + padding: 0 20px 20px 20px; + margin-bottom: 30px; +} + + +/* :::: INDEX PAGE :::: */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.5em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; +} + +/* :::: INDEX STYLES :::: */ + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +form.pfform { + margin: 10px 0 20px 0; +} + +/* :::: GLOBAL STYLES :::: */ + +.docwarning { + background-color: #ffe4e4; + padding: 10px; + margin: 0 -20px 0 -20px; + border-bottom: 1px solid #f66; +} + +p.subhead { + font-weight: bold; + margin-top: 20px; +} + +a { + color: #355f7c; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: 'Trebuchet MS', sans-serif; + background-color: #f2f2f2; + font-weight: normal; + color: #20435c; + border-bottom: 1px solid #ccc; + margin: 20px -20px 10px -20px; + padding: 3px 0 3px 10px; +} + +div.body h1 { margin-top: 0; font-size: 30px; } +div.body h2 { font-size: 25px; } +div.body h3 { font-size: 21px; } +div.body h4 { font-size: 18px; } +div.body h5 { font-size: 14px; } +div.body h6 { font-size: 12px; } + +a.headerlink, +a.headerlink, +a.headerlink, +a.headerlink, +a.headerlink, +a.headerlink { + color: #c60f0f; + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; + visibility: hidden; +} + +*:hover > a.headerlink, +*:hover > a.headerlink, +*:hover > a.headerlink, +*:hover > a.headerlink, +*:hover > a.headerlink, +*:hover > a.headerlink { + visibility: visible; +} + +a.headerlink:hover, +a.headerlink:hover, +a.headerlink:hover, +a.headerlink:hover, +a.headerlink:hover, +a.headerlink:hover { + background-color: #c60f0f; + color: white; +} + +div.body p, div.body dd, div.body li { + text-align: justify; + line-height: 130%; +} + +div.body td { + text-align: left; +} + +ul.fakelist { + list-style: none; + margin: 10px 0 10px 20px; + padding: 0; +} + +/* "Footnotes" heading */ +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +/* Admonitions */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 10px 10px 0px 10px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dd { + margin-bottom: 10px; +} + +div.seealso { + background-color: #ffc; + border: 1px solid #ff6; +} + +div.warning { + background-color: #ffe4e4; + border: 1px solid #f66; +} + +div.note { + background-color: #eee; + border: 1px solid #ccc; +} + +p.admonition-title { + margin: 0px 0px 5px 0px; + font-weight: bold; + font-size: 1.1em; +} + +table.docutils { + border: 0; +} + +table.docutils td, table.docutils th { + margin: 2px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +dl { + margin-bottom: 15px; + clear: both; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +.refcount { + color: #060; +} + +dt:target, +.highlight { + background-color: #fbe54e; +} + +th { + text-align: left; + padding-right: 5px; +} + +pre { + font-family: 'Bitstream Vera Sans Mono', monospace; + padding: 5px; + background-color: #efc; + color: #333; + border: 1px solid #ac9; + border-left: none; + border-right: none; +} + +tt { + font-family: 'Bitstream Vera Sans Mono', monospace; + background-color: #ecf0f3; + padding: 1px; +} + +tt.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +tt.descclassname { + background-color: transparent; +} + +tt.xref, a tt { + background-color: transparent; + font-weight: bold; +} + +.footnote:target { background-color: #ffa } + +h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.versionmodified { + font-style: italic; +} + +form.comment { + margin: 0; + padding: 10px 30px 10px 30px; + background-color: #eee; +} + +form.comment h3 { + background-color: #326591; + color: white; + margin: -10px -30px 10px -30px; + padding: 5px; + font-size: 1.4em; +} + +form.comment input, +form.comment textarea { + border: 1px solid #ccc; + padding: 2px; + font-family: 'Bitstream Vera Sans', 'Verdana', sans-serif; + font-size: 13px; +} + +form.comment input[type="text"] { + width: 240px; +} + +form.comment textarea { + width: 100%; + height: 200px; + margin-bottom: 10px; +} + +/* :::: PRINT :::: */ + at media print { + div.documentwrapper { + width: 100%; + } + + div.body { + margin: 0; + } + + div.sidebar, + div.related, + div.footer, + div#comments div.new-comment-box, + #top-link { + display: none; + } +} Added: doctools/trunk/sphinx/style/doctools.js ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/style/doctools.js Mon Jul 23 11:02:25 2007 @@ -0,0 +1,349 @@ +/// XXX: make it cross browser + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger + */ +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", "dirxml", + "group", "groupEnd", "time", "timeEnd", "count", "trace", "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {} +} + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +} + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s == 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +} + +/** + * small function to check if an array contains + * a given item. + */ +jQuery.contains = function(arr, item) { + for (var i = 0; i < arr.length; i++) { + if (arr[i] == item) + return true; + } + return false; +} + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node) { + if (node.nodeType == 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && !jQuery.className.has(node.parentNode, className)) { + var span = document.createElement("span"); + span.className = className; + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this) + }); + } + } + return this.each(function() { + highlight(this); + }); +} + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.addContextElements(); + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initModIndex(); + this.initComments(); + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + for (var i = 1; i <= 6; i++) { + $('h' + i + '[@id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', 'Permalink to this headline'). + appendTo(this); + }); + } + $('dt[@id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', 'Permalink to this definition'). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlight'); + }); + }, 10); + $('
  • ') + .appendTo($('.sidebar .this-page-menu')); + } + }, + + /** + * init the modindex toggle buttons + */ + initModIndex : function() { + $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + console.log($('tr.cg-' + idnum).toggle()); + if (src.substr(-9) == 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', '').click(); + }, + + /** + * init the inline comments + */ + initComments : function() { + $('.inlinecomments div.actions').each(function() { + this.innerHTML += ' | '; + $(this).append($('hide comments').click(function() { + $(this).parent().parent().toggle(); + return false; + })); + }); + $('.inlinecomments .comments').hide(); + $('.inlinecomments a.bubble').each(function() { + $(this).click($(this).is('.emptybubble') ? function() { + var params = $.getQueryParameters(this.href); + Documentation.newComment(params.target[0]); + return false; + } : function() { + $('.comments', $(this).parent().parent()[0]).toggle(); + return false; + }); + }); + $('#comments div.actions a.newcomment').click(function() { + Documentation.newComment(); + return false; + }); + if (document.location.hash.match(/^#comment-/)) + $('.inlinecomments .comments ' + document.location.hash) + .parent().toggle(); + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('.sidebar .this-page-menu li.highlight-link').fadeOut(300); + $('span.highlight').removeClass('highlight'); + }, + + /** + * show the comment window for a certain id or the whole page. + */ + newComment : function(id) { + Documentation.CommentWindow.openFor(id || ''); + }, + + /** + * write a new comment from within a comment view box + */ + newCommentFromBox : function(link) { + var params = $.getQueryParameters(link.href); + $(link).parent().parent().fadeOut('slow'); + this.newComment(params.target); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this == '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + }, + + /** + * class that represents the comment window + */ + CommentWindow : (function() { + var openWindows = {}; + + var Window = function(sectionID) { + this.url = Documentation.makeURL('@comments/' + Documentation.getCurrentURL() + + '/?target=' + $.urlencode(sectionID) + '&mode=ajax'); + this.sectionID = sectionID; + + this.root = $('
    '); + this.root.appendTo($('body')); + this.title = $('

    New Comment

    ').appendTo(this.root); + this.body = $('
    please wait...
    ').appendTo(this.root); + this.resizeHandle = $('
    ').appendTo(this.root); + + this.root.Draggable({ + handle: this.title[0], + }); + + this.root.css({ + left: window.innerWidth / 2 - $(this.root).width() / 2, + top: window.scrollY + (window.innerHeight / 2 - 150) + }); + this.root.fadeIn('slow'); + this.updateView(); + }; + + Window.prototype.updateView = function(data) { + var self = this; + function update(data) { + if (data.posted) { + document.location.hash = '#comment-' + data.commentID; + document.location.reload(); + } + else { + self.body.html(data.body); + $('div.actions', self.body).append($('') + .attr('type', 'button') + .attr('value', 'Close') + .click(function() { self.close(); }) + ); + $('div.actions input[@name="preview"]') + .attr('type', 'button') + .click(function() { self.submitForm($('form', self.body)[0], true); }); + $('form', self.body).bind("submit", function() { + self.submitForm(this); + return false; + }); + + if (data.error) { + self.root.Highlight(1000, '#aadee1'); + $('div.error', self.root).slideDown(500); + } + } + } + + if (typeof data == 'undefined') + $.getJSON(this.url, function(json) { update(json); }); + else + $.ajax({ + url: this.url, + type: 'POST', + dataType: 'json', + data: data, + success: function(json) { update(json); } + }); + } + + Window.prototype.getFormValue = function(name) { + return $('*[@name="' + name + '"]', this.body)[0].value; + } + + Window.prototype.submitForm = function(form, previewMode) { + this.updateView({ + author: form.author.value, + author_mail: form.author_mail.value, + title: form.title.value, + comment_body: form.comment_body.value, + preview: previewMode ? 'yes' : '' + }); + } + + Window.prototype.close = function() { + var self = this; + delete openWindows[this.sectionID]; + this.root.fadeOut('slow', function() { + self.root.remove(); + }); + } + + Window.openFor = function(sectionID) { + if (sectionID in openWindows) + return openWindows[sectionID]; + return new Window(sectionID); + } + + return Window; + })() +}; + + +$(document).ready(function() { + Documentation.init(); +}); Added: doctools/trunk/sphinx/style/file.png ============================================================================== Binary file. No diff available. Added: doctools/trunk/sphinx/style/hovercomment.png ============================================================================== Binary file. No diff available. Added: doctools/trunk/sphinx/style/interface.js ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/style/interface.js Mon Jul 23 11:02:25 2007 @@ -0,0 +1,8 @@ +/* + * Interface elements for jQuery - http://interface.eyecon.ro + * + * Copyright (c) 2006 Stefan Petre + * Dual licensed under the MIT (MIT-LICENSE.txt) + * and GPL (GPL-LICENSE.txt) licenses. + */ + eval(function(p,a,c,k,e,d){e=function(c){return(c35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('k.1a={2R:u(e){D x=0;D y=0;D 5H=I;D es=e.18;if(k(e).B(\'19\')==\'1n\'){62=es.3j;9C=es.Y;es.3j=\'2O\';es.19=\'2E\';es.Y=\'1O\';5H=1b}D el=e;7o(el){x+=el.8n+(el.4Y&&!k.3h.7N?T(el.4Y.5a)||0:0);y+=el.8t+(el.4Y&&!k.3h.7N?T(el.4Y.4Z)||0:0);el=el.dr}el=e;7o(el&&el.4S&&el.4S.5Z()!=\'2e\'){x-=el.3g||0;y-=el.2V||0;el=el.3e}if(5H){es.19=\'1n\';es.Y=9C;es.3j=62}E{x:x,y:y}},bN:u(el){D x=0,y=0;7o(el){x+=el.8n||0;y+=el.8t||0;el=el.dr}E{x:x,y:y}},2p:u(e){D w=k.B(e,\'Z\');D h=k.B(e,\'V\');D 1D=0;D hb=0;D es=e.18;if(k(e).B(\'19\')!=\'1n\'){1D=e.4b;hb=e.63}P{62=es.3j;9C=es.Y;es.3j=\'2O\';es.19=\'2E\';es.Y=\'1O\';1D=e.4b;hb=e.63;es.19=\'1n\';es.Y=9C;es.3j=62}E{w:w,h:h,1D:1D,hb:hb}},82:u(el){E{1D:el.4b||0,hb:el.63||0}},bq:u(e){D h,w,de;if(e){w=e.8k;h=e.8z}P{de=1j.4J;w=1V.d0||9B.d0||(de&&de.8k)||1j.2e.8k;h=1V.d1||9B.d1||(de&&de.8z)||1j.2e.8z}E{w:w,h:h}},6W:u(e){D t,l,w,h,iw,ih;if(e&&e.9A.5Z()!=\'2e\'){t=e.2V;l=e.3g;w=e.cY;h=e.cW;iw=0;ih=0}P{if(1j.4J&&1j.4J.2V){t=1j.4J.2V;l=1j.4J.3g;w=1j.4J.cY;h=1j.4J.cW}P if(1j.2e){t=1j.2e.2V;l=1j.2e.3g;w=1j.2e.cY;h=1j.2e.cW}iw=9B.d0||1j.4J.8k||1j.2e.8k||0;ih=9B.d1||1j.4J.8z||1j.2e.8z||0}E{t:t,l:l,w:w,h:h,iw:iw,ih:ih}},c8:u(e,7C){D el=k(e);D t=el.B(\'5o\')||\'\';D r=el.B(\'5p\')||\'\';D b=el.B(\'5m\')||\'\';D l=el.B(\'5k\')||\'\';if(7C)E{t:T(t)||0,r:T(r)||0,b:T(b)||0,l:T(l)};P E{t:t,r:r,b:b,l:l}},aj:u(e,7C){D el=k(e);D t=el.B(\'66\')||\'\';D r=el.B(\'6j\')||\'\';D b=el.B(\'5M\')||\'\';D l=el.B(\'4X\')||\'\';if(7C)E{t:T(t)||0,r:T(r)||0,b:T(b)||0,l:T(l)};P E{t:t,r:r,b:b,l:l}},6h:u(e,7C){D el=k(e);D t=el.B(\'4Z\')||\'\';D r=el.B(\'6k\')||\'\';D b=el.B(\'6g\')||\'\';D l=el.B(\'5a\')||\'\';if(7C)E{t:T(t)||0,r:T(r)||0,b:T(b)||0,l:T(l)||0};P E{t:t,r:r,b:b,l:l}},44:u(2l){D x=2l.hI||(2l.hK+(1j.4J.3g||1j.2e.3g))||0;D y=2l.hL||(2l.hM+(1j.4J.2V||1j.2e.2V))||0;E{x:x,y:y}},cS:u(54,cT){cT(54);54=54.77;7o(54){k.1a.cS(54,cT);54=54.hU}},i1:u(54){k.1a.cS(54,u(el){1Y(D 1p in el){if(2h el[1p]===\'u\'){el[1p]=U}}})},i3:u(el,1N){D 5C=$.1a.6W();D d3=$.1a.2p(el);if(!1N||1N==\'4i\')$(el).B({Q:5C.t+((14.3v(5C.h,5C.ih)-5C.t-d3.hb)/2)+\'S\'});if(!1N||1N==\'4a\')$(el).B({O:5C.l+((14.3v(5C.w,5C.iw)-5C.l-d3.1D)/2)+\'S\'})},i0:u(el,dP){D 1Q=$(\'1U[@2M*="95"]\',el||1j),95;1Q.1B(u(){95=q.2M;q.2M=dP;q.18.69="aw:ax.ay.hZ(2M=\'"+95+"\')"})}};[].3F||(7b.hV.3F=u(v,n){n=(n==U)?0:n;D m=q.1h;1Y(D i=n;i-cd?r:(2m(k.B(2i,2z))||0);49=49==\'3Y\'?(7M==\'1n\'?\'22\':\'2G\'):49;M[49]=1b;2k[2z]=49==\'22\'?[0,2i.6u[2z]]:[2i.6u[2z],0];if(2z!=\'1J\')y[2z]=2k[2z][0]+(2z!=\'3B\'&&2z!=\'8h\'?\'S\':\'\');P k.1p(y,"1J",2k[2z][0])}P{2k[2z]=[2m(k.3M(2i,2z)),2m(49)||0]}}P if(k.fx.d9[2z])2k[2z]=[k.fx.6H(k.3M(2i,2z)),k.fx.6H(49)];P if(/^6X$|92$|2B$|9I$|cD$/i.43(2z)){D m=49.4v(/\\s+/g,\' \').4v(/7K\\s*\\(\\s*/g,\'7K(\').4v(/\\s*,\\s*/g,\',\').4v(/\\s*\\)/g,\')\').bU(/([^\\s]+)/g);3m(2z){1e\'6X\':1e\'92\':1e\'cD\':1e\'9I\':m[3]=m[3]||m[1]||m[0];m[2]=m[2]||m[0];m[1]=m[1]||m[0];1Y(D i=0;iM.1m+z.9x){6c(z.2H);z.2H=U;1Y(p in 2k){if(p=="1J")k.1p(y,"1J",2k[p][1]);P if(2h 2k[p][1]==\'8i\')y[p]=\'7K(\'+2k[p][1].r+\',\'+2k[p][1].g+\',\'+2k[p][1].b+\')\';P y[p]=2k[p][1]+(p!=\'3B\'&&p!=\'8h\'?\'S\':\'\')}if(M.2G||M.22)1Y(D p in 2i.6u)if(p=="1J")k.1p(y,p,2i.6u[p]);P y[p]="";y.19=M.2G?\'1n\':(7M!=\'1n\'?7M:\'2E\');y.2Y=eH;2i.5R=U;if(k.eI(M.23))M.23.1F(2i)}P{D n=t-q.9x;D 8x=n/M.1m;1Y(p in 2k){if(2h 2k[p][1]==\'8i\'){y[p]=\'7K(\'+T(k.G[M.G](8x,n,2k[p][0].r,(2k[p][1].r-2k[p][0].r),M.1m))+\',\'+T(k.G[M.G](8x,n,2k[p][0].g,(2k[p][1].g-2k[p][0].g),M.1m))+\',\'+T(k.G[M.G](8x,n,2k[p][0].b,(2k[p][1].b-2k[p][0].b),M.1m))+\')\'}P{D cG=k.G[M.G](8x,n,2k[p][0],(2k[p][1]-2k[p][0]),M.1m);if(p=="1J")k.1p(y,"1J",cG);P y[p]=cG+(p!=\'3B\'&&p!=\'8h\'?\'S\':\'\')}}}};z.2H=6I(u(){z.2D()},13);2i.5R=z},cv:u(2i,2D){if(2D)2i.5R.9x-=kM;P{1V.6c(2i.5R.2H);2i.5R=U;k.2L(2i,"fx")}}});k.cu=u(5S){D 5u={};if(2h 5S==\'5g\'){5S=5S.5Z().7h(\';\');1Y(D i=0;i<5S.1h;i++){7H=5S[i].7h(\':\');if(7H.1h==2){5u[k.eP(7H[0].4v(/\\-(\\w)/g,u(m,c){E c.kn()}))]=k.eP(7H[1])}}}E 5u};k.12={1c:U,F:U,58:u(){E q.1B(u(){if(q.9q){q.A.5e.3p(\'5b\',k.12.cU);q.A=U;q.9q=I;if(k.3h.4I){q.d4="fQ"}P{q.18.kk=\'\';q.18.ej=\'\';q.18.e6=\'\'}}})},cU:u(e){if(k.12.F!=U){k.12.9w(e);E I}D C=q.3Z;k(1j).1H(\'3H\',k.12.d6).1H(\'61\',k.12.9w);C.A.1s=k.1a.44(e);C.A.4t=C.A.1s;C.A.7W=I;C.A.ki=q!=q.3Z;k.12.F=C;if(C.A.5i&&q!=q.3Z){ce=k.1a.2R(C.3e);cf=k.1a.2p(C);cg={x:T(k.B(C,\'O\'))||0,y:T(k.B(C,\'Q\'))||0};dx=C.A.4t.x-ce.x-cf.1D/2-cg.x;dy=C.A.4t.y-ce.y-cf.hb/2-cg.y;k.3d.59(C,[dx,dy])}E k.7Z||I},dT:u(e){D C=k.12.F;C.A.7W=1b;D 9p=C.18;C.A.7i=k.B(C,\'19\');C.A.4m=k.B(C,\'Y\');if(!C.A.c4)C.A.c4=C.A.4m;C.A.2c={x:T(k.B(C,\'O\'))||0,y:T(k.B(C,\'Q\'))||0};C.A.9l=0;C.A.9m=0;if(k.3h.4I){D cl=k.1a.6h(C,1b);C.A.9l=cl.l||0;C.A.9m=cl.t||0}C.A.1C=k.21(k.1a.2R(C),k.1a.2p(C));if(C.A.4m!=\'2y\'&&C.A.4m!=\'1O\'){9p.Y=\'2y\'}k.12.1c.5t();D 5s=C.dn(1b);k(5s).B({19:\'2E\',O:\'3c\',Q:\'3c\'});5s.18.5o=\'0\';5s.18.5p=\'0\';5s.18.5m=\'0\';5s.18.5k=\'0\';k.12.1c.1R(5s);D 3X=k.12.1c.K(0).18;if(C.A.cO){3X.Z=\'ao\';3X.V=\'ao\'}P{3X.V=C.A.1C.hb+\'S\';3X.Z=C.A.1C.1D+\'S\'}3X.19=\'2E\';3X.5o=\'3c\';3X.5p=\'3c\';3X.5m=\'3c\';3X.5k=\'3c\';k.21(C.A.1C,k.1a.2p(5s));if(C.A.2S){if(C.A.2S.O){C.A.2c.x+=C.A.1s.x-C.A.1C.x-C.A.2S.O;C.A.1C.x=C.A.1s.x-C.A.2S.O}if(C.A.2S.Q){C.A.2c.y+=C.A.1s.y-C.A.1C.y-C.A.2S.Q;C.A.1C.y=C.A.1s.y-C.A.2S.Q}if(C.A.2S.2N){C.A.2c.x+=C.A.1s.x-C.A.1C.x-C.A.1C.hb+C.A.2S.2N;C.A.1C.x=C.A.1s.x-C.A.1C.1D+C.A.2S.2N}if(C.A.2S.4l){C.A.2c.y+=C.A.1s.y-C.A.1C.y-C.A.1C.hb+C.A.2S.4l;C.A.1C.y=C.A.1s.y-C.A.1C.hb+C.A.2S.4l}}C.A.2x=C.A.2c.x;C.A.2r=C.A.2c.y;if(C.A.8g||C.A.2o==\'96\'){89=k.1a.6h(C.3e,1b);C.A.1C.x=C.8n+(k.3h.4I?0:k.3h.7N?-89.l:89.l);C.A.1C.y=C.8t+(k.3h.4I?0:k.3h.7N?-89.t:89.t);k(C.3e).1R(k.12.1c.K(0))}if(C.A.2o){k.12.bP(C);C.A.5J.2o=k.12.bH}if(C.A.5i){k.3d.bO(C)}3X.O=C.A.1C.x-C.A.9l+\'S\';3X.Q=C.A.1C.y-C.A.9m+\'S\';3X.Z=C.A.1C.1D+\'S\';3X.V=C.A.1C.hb+\'S\';k.12.F.A.9n=I;if(C.A.gx){C.A.5J.67=k.12.bI}if(C.A.3B!=I){k.12.1c.B(\'3B\',C.A.3B)}if(C.A.1J){k.12.1c.B(\'1J\',C.A.1J);if(1V.7a){k.12.1c.B(\'69\',\'9V(1J=\'+C.A.1J*2b+\')\')}}if(C.A.7w){k.12.1c.2Z(C.A.7w);k.12.1c.K(0).77.18.19=\'1n\'}if(C.A.4A)C.A.4A.1F(C,[5s,C.A.2c.x,C.A.2c.y]);if(k.1x&&k.1x.8W>0){k.1x.ea(C)}if(C.A.4j==I){9p.19=\'1n\'}E I},bP:u(C){if(C.A.2o.1K==b5){if(C.A.2o==\'96\'){C.A.24=k.21({x:0,y:0},k.1a.2p(C.3e));D 84=k.1a.6h(C.3e,1b);C.A.24.w=C.A.24.1D-84.l-84.r;C.A.24.h=C.A.24.hb-84.t-84.b}P if(C.A.2o==\'1j\'){D cM=k.1a.bq();C.A.24={x:0,y:0,w:cM.w,h:cM.h}}}P if(C.A.2o.1K==7b){C.A.24={x:T(C.A.2o[0])||0,y:T(C.A.2o[1])||0,w:T(C.A.2o[2])||0,h:T(C.A.2o[3])||0}}C.A.24.dx=C.A.24.x-C.A.1C.x;C.A.24.dy=C.A.24.y-C.A.1C.y},9o:u(F){if(F.A.8g||F.A.2o==\'96\'){k(\'2e\',1j).1R(k.12.1c.K(0))}k.12.1c.5t().2G().B(\'1J\',1);if(1V.7a){k.12.1c.B(\'69\',\'9V(1J=2b)\')}},9w:u(e){k(1j).3p(\'3H\',k.12.d6).3p(\'61\',k.12.9w);if(k.12.F==U){E}D F=k.12.F;k.12.F=U;if(F.A.7W==I){E I}if(F.A.48==1b){k(F).B(\'Y\',F.A.4m)}D 9p=F.18;if(F.5i){k.12.1c.B(\'94\',\'8C\')}if(F.A.7w){k.12.1c.4p(F.A.7w)}if(F.A.6o==I){if(F.A.fx>0){if(!F.A.1N||F.A.1N==\'4a\'){D x=11 k.fx(F,{1m:F.A.fx},\'O\');x.1L(F.A.2c.x,F.A.8c)}if(!F.A.1N||F.A.1N==\'4i\'){D y=11 k.fx(F,{1m:F.A.fx},\'Q\');y.1L(F.A.2c.y,F.A.8j)}}P{if(!F.A.1N||F.A.1N==\'4a\')F.18.O=F.A.8c+\'S\';if(!F.A.1N||F.A.1N==\'4i\')F.18.Q=F.A.8j+\'S\'}k.12.9o(F);if(F.A.4j==I){k(F).B(\'19\',F.A.7i)}}P if(F.A.fx>0){F.A.9n=1b;D dh=I;if(k.1x&&k.1t&&F.A.48){dh=k.1a.2R(k.1t.1c.K(0))}k.12.1c.5K({O:dh?dh.x:F.A.1C.x,Q:dh?dh.y:F.A.1C.y},F.A.fx,u(){F.A.9n=I;if(F.A.4j==I){F.18.19=F.A.7i}k.12.9o(F)})}P{k.12.9o(F);if(F.A.4j==I){k(F).B(\'19\',F.A.7i)}}if(k.1x&&k.1x.8W>0){k.1x.ed(F)}if(k.1t&&F.A.48){k.1t.dp(F)}if(F.A.2T&&(F.A.8c!=F.A.2c.x||F.A.8j!=F.A.2c.y)){F.A.2T.1F(F,F.A.bQ||[0,0,F.A.8c,F.A.8j])}if(F.A.3S)F.A.3S.1F(F);E I},bI:u(x,y,dx,dy){if(dx!=0)dx=T((dx+(q.A.gx*dx/14.3R(dx))/2)/q.A.gx)*q.A.gx;if(dy!=0)dy=T((dy+(q.A.gy*dy/14.3R(dy))/2)/q.A.gy)*q.A.gy;E{dx:dx,dy:dy,x:0,y:0}},bH:u(x,y,dx,dy){dx=14.3D(14.3v(dx,q.A.24.dx),q.A.24.w+q.A.24.dx-q.A.1C.1D);dy=14.3D(14.3v(dy,q.A.24.dy),q.A.24.h+q.A.24.dy-q.A.1C.hb);E{dx:dx,dy:dy,x:0,y:0}},d6:u(e){if(k.12.F==U||k.12.F.A.9n==1b){E}D F=k.12.F;F.A.4t=k.1a.44(e);if(F.A.7W==I){46=14.dm(14.5Y(F.A.1s.x-F.A.4t.x,2)+14.5Y(F.A.1s.y-F.A.4t.y,2));if(460){k.1x.a3(F)}E I},2s:u(o){if(!k.12.1c){k(\'2e\',1j).1R(\'<26 id="dW">\');k.12.1c=k(\'#dW\');D el=k.12.1c.K(0);D 4P=el.18;4P.Y=\'1O\';4P.19=\'1n\';4P.94=\'8C\';4P.dV=\'1n\';4P.2Y=\'2O\';if(1V.7a){el.d4="en"}P{4P.kh=\'1n\';4P.e6=\'1n\';4P.ej=\'1n\'}}if(!o){o={}}E q.1B(u(){if(q.9q||!k.1a)E;if(1V.7a){q.kf=u(){E I};q.kj=u(){E I}}D el=q;D 5e=o.3y?k(q).kp(o.3y):k(q);if(k.3h.4I){5e.1B(u(){q.d4="en"})}P{5e.B(\'-kE-7l-8Z\',\'1n\');5e.B(\'7l-8Z\',\'1n\');5e.B(\'-ko-7l-8Z\',\'1n\')}q.A={5e:5e,6o:o.6o?1b:I,4j:o.4j?1b:I,48:o.48?o.48:I,5i:o.5i?o.5i:I,8g:o.8g?o.8g:I,3B:o.3B?T(o.3B)||0:I,1J:o.1J?2m(o.1J):I,fx:T(o.fx)||U,6p:o.6p?o.6p:I,5J:{},1s:{},4A:o.4A&&o.4A.1K==2C?o.4A:I,3S:o.3S&&o.3S.1K==2C?o.3S:I,2T:o.2T&&o.2T.1K==2C?o.2T:I,1N:/4i|4a/.43(o.1N)?o.1N:I,6m:o.6m?T(o.6m)||0:0,2S:o.2S?o.2S:I,cO:o.cO?1b:I,7w:o.7w||I};if(o.5J&&o.5J.1K==2C)q.A.5J.7l=o.5J;if(o.4x&&o.4x.1K==2C)q.A.4x=o.4x;if(o.2o&&((o.2o.1K==b5&&(o.2o==\'96\'||o.2o==\'1j\'))||(o.2o.1K==7b&&o.2o.1h==4))){q.A.2o=o.2o}if(o.2K){q.A.2K=o.2K}if(o.67){if(2h o.67==\'kl\'){q.A.gx=T(o.67)||1;q.A.gy=T(o.67)||1}P if(o.67.1h==2){q.A.gx=T(o.67[0])||1;q.A.gy=T(o.67[1])||1}}if(o.3z&&o.3z.1K==2C){q.A.3z=o.3z}q.9q=1b;5e.1B(u(){q.3Z=el});5e.1H(\'5b\',k.12.cU)})}};k.fn.21({a4:k.12.58,6Y:k.12.2s});k.1x={ee:u(5r,5y,7j,7g){E 5r<=k.12.F.A.2x&&(5r+7j)>=(k.12.F.A.2x+k.12.F.A.1C.w)&&5y<=k.12.F.A.2r&&(5y+7g)>=(k.12.F.A.2r+k.12.F.A.1C.h)?1b:I},by:u(5r,5y,7j,7g){E!(5r>(k.12.F.A.2x+k.12.F.A.1C.w)||(5r+7j)(k.12.F.A.2r+k.12.F.A.1C.h)||(5y+7g)k.12.F.A.4t.x&&5yk.12.F.A.4t.y?1b:I},5l:I,3W:{},8W:0,3J:{},ea:u(C){if(k.12.F==U){E}D i;k.1x.3W={};D cZ=I;1Y(i in k.1x.3J){if(k.1x.3J[i]!=U){D 1k=k.1x.3J[i].K(0);if(k(k.12.F).is(\'.\'+1k.1i.a)){if(1k.1i.m==I){1k.1i.p=k.21(k.1a.2R(1k),k.1a.82(1k));1k.1i.m=1b}if(1k.1i.ac){k.1x.3J[i].2Z(1k.1i.ac)}k.1x.3W[i]=k.1x.3J[i];if(k.1t&&1k.1i.s&&k.12.F.A.48){1k.1i.el=k(\'.\'+1k.1i.a,1k);C.18.19=\'1n\';k.1t.c5(1k);1k.1i.9Z=k.1t.8o(k.1p(1k,\'id\')).7U;C.18.19=C.A.7i;cZ=1b}if(1k.1i.9v){1k.1i.9v.1F(k.1x.3J[i].K(0),[k.12.F])}}}}if(cZ){k.1t.28()}},ek:u(){k.1x.3W={};1Y(i in k.1x.3J){if(k.1x.3J[i]!=U){D 1k=k.1x.3J[i].K(0);if(k(k.12.F).is(\'.\'+1k.1i.a)){1k.1i.p=k.21(k.1a.2R(1k),k.1a.82(1k));if(1k.1i.ac){k.1x.3J[i].2Z(1k.1i.ac)}k.1x.3W[i]=k.1x.3J[i];if(k.1t&&1k.1i.s&&k.12.F.A.48){1k.1i.el=k(\'.\'+1k.1i.a,1k);C.18.19=\'1n\';k.1t.c5(1k);C.18.19=C.A.7i}}}}},a3:u(e){if(k.12.F==U){E}k.1x.5l=I;D i;D cb=I;D ec=0;1Y(i in k.1x.3W){D 1k=k.1x.3W[i].K(0);if(k.1x.5l==I&&k.1x[1k.1i.t](1k.1i.p.x,1k.1i.p.y,1k.1i.p.1D,1k.1i.p.hb)){if(1k.1i.hc&&1k.1i.h==I){k.1x.3W[i].2Z(1k.1i.hc)}if(1k.1i.h==I&&1k.1i.7T){cb=1b}1k.1i.h=1b;k.1x.5l=1k;if(k.1t&&1k.1i.s&&k.12.F.A.48){k.1t.1c.K(0).3b=1k.1i.eb;k.1t.a3(1k)}ec++}P if(1k.1i.h==1b){if(1k.1i.7Q){1k.1i.7Q.1F(1k,[e,k.12.1c.K(0).77,1k.1i.fx])}if(1k.1i.hc){k.1x.3W[i].4p(1k.1i.hc)}1k.1i.h=I}}if(k.1t&&!k.1x.5l&&k.12.F.48){k.1t.1c.K(0).18.19=\'1n\'}if(cb){k.1x.5l.1i.7T.1F(k.1x.5l,[e,k.12.1c.K(0).77])}},ed:u(e){D i;1Y(i in k.1x.3W){D 1k=k.1x.3W[i].K(0);if(1k.1i.ac){k.1x.3W[i].4p(1k.1i.ac)}if(1k.1i.hc){k.1x.3W[i].4p(1k.1i.hc)}if(1k.1i.s){k.1t.7V[k.1t.7V.1h]=i}if(1k.1i.9r&&1k.1i.h==1b){1k.1i.h=I;1k.1i.9r.1F(1k,[e,1k.1i.fx])}1k.1i.m=I;1k.1i.h=I}k.1x.3W={}},58:u(){E q.1B(u(){if(q.9u){if(q.1i.s){id=k.1p(q,\'id\');k.1t.5j[id]=U;k(\'.\'+q.1i.a,q).a4()}k.1x.3J[\'d\'+q.bn]=U;q.9u=I;q.f=U}})},2s:u(o){E q.1B(u(){if(q.9u==1b||!o.3P||!k.1a||!k.12){E}q.1i={a:o.3P,ac:o.a8||I,hc:o.a7||I,eb:o.4V||I,9r:o.kO||o.9r||I,7T:o.7T||o.dN||I,7Q:o.7Q||o.dz||I,9v:o.9v||I,t:o.6n&&(o.6n==\'ee\'||o.6n==\'by\')?o.6n:\'1s\',fx:o.fx?o.fx:I,m:I,h:I};if(o.bD==1b&&k.1t){id=k.1p(q,\'id\');k.1t.5j[id]=q.1i.a;q.1i.s=1b;if(o.2T){q.1i.2T=o.2T;q.1i.9Z=k.1t.8o(id).7U}}q.9u=1b;q.bn=T(14.6w()*cd);k.1x.3J[\'d\'+q.bn]=k(q);k.1x.8W++})}};k.fn.21({df:k.1x.58,dO:k.1x.2s});k.kH=k.1x.ek;k.R={1A:U,3Q:U,F:U,1s:U,1q:U,Y:U,7r:u(e){k.R.F=(q.a2)?q.a2:q;k.R.1s=k.1a.44(e);k.R.1q={Z:T(k(k.R.F).B(\'Z\'))||0,V:T(k(k.R.F).B(\'V\'))||0};k.R.Y={Q:T(k(k.R.F).B(\'Q\'))||0,O:T(k(k.R.F).B(\'O\'))||0};k(1j).1H(\'3H\',k.R.bj).1H(\'61\',k.R.bs);if(2h k.R.F.1g.ei===\'u\'){k.R.F.1g.ei.1F(k.R.F)}E I},bs:u(e){k(1j).3p(\'3H\',k.R.bj).3p(\'61\',k.R.bs);if(2h k.R.F.1g.e7===\'u\'){k.R.F.1g.e7.1F(k.R.F)}k.R.F=U},bj:u(e){if(!k.R.F){E}1s=k.1a.44(e);7u=k.R.Y.Q-k.R.1s.y+1s.y;7v=k.R.Y.O-k.R.1s.x+1s.x;7u=14.3v(14.3D(7u,k.R.F.1g.8U-k.R.1q.V),k.R.F.1g.7s);7v=14.3v(14.3D(7v,k.R.F.1g.8T-k.R.1q.Z),k.R.F.1g.7p);if(2h k.R.F.1g.4x===\'u\'){D 8J=k.R.F.1g.4x.1F(k.R.F,[7v,7u]);if(2h 8J==\'kc\'&&8J.1h==2){7v=8J[0];7u=8J[1]}}k.R.F.18.Q=7u+\'S\';k.R.F.18.O=7v+\'S\';E I},28:u(e){k(1j).1H(\'3H\',k.R.8C).1H(\'61\',k.R.8v);k.R.1A=q.1A;k.R.3Q=q.3Q;k.R.1s=k.1a.44(e);if(k.R.1A.1g.4A){k.R.1A.1g.4A.1F(k.R.1A,[q])}k.R.1q={Z:T(k(q.1A).B(\'Z\'))||0,V:T(k(q.1A).B(\'V\'))||0};k.R.Y={Q:T(k(q.1A).B(\'Q\'))||0,O:T(k(q.1A).B(\'O\'))||0};E I},8v:u(){k(1j).3p(\'3H\',k.R.8C).3p(\'61\',k.R.8v);if(k.R.1A.1g.3S){k.R.1A.1g.3S.1F(k.R.1A,[k.R.3Q])}k.R.1A=U;k.R.3Q=U},6V:u(dx,9t){E 14.3D(14.3v(k.R.1q.Z+dx*9t,k.R.1A.1g.9s),k.R.1A.1g.6q)},6Q:u(dy,9t){E 14.3D(14.3v(k.R.1q.V+dy*9t,k.R.1A.1g.8L),k.R.1A.1g.8M)},dX:u(V){E 14.3D(14.3v(V,k.R.1A.1g.8L),k.R.1A.1g.8M)},8C:u(e){if(k.R.1A==U){E}1s=k.1a.44(e);dx=1s.x-k.R.1s.x;dy=1s.y-k.R.1s.y;1E={Z:k.R.1q.Z,V:k.R.1q.V};2n={Q:k.R.Y.Q,O:k.R.Y.O};3m(k.R.3Q){1e\'e\':1E.Z=k.R.6V(dx,1);1r;1e\'eO\':1E.Z=k.R.6V(dx,1);1E.V=k.R.6Q(dy,1);1r;1e\'w\':1E.Z=k.R.6V(dx,-1);2n.O=k.R.Y.O-1E.Z+k.R.1q.Z;1r;1e\'5O\':1E.Z=k.R.6V(dx,-1);2n.O=k.R.Y.O-1E.Z+k.R.1q.Z;1E.V=k.R.6Q(dy,1);1r;1e\'7q\':1E.V=k.R.6Q(dy,-1);2n.Q=k.R.Y.Q-1E.V+k.R.1q.V;1E.Z=k.R.6V(dx,-1);2n.O=k.R.Y.O-1E.Z+k.R.1q.Z;1r;1e\'n\':1E.V=k.R.6Q(dy,-1);2n.Q=k.R.Y.Q-1E.V+k.R.1q.V;1r;1e\'9J\':1E.V=k.R.6Q(dy,-1);2n.Q=k.R.Y.Q-1E.V+k.R.1q.V;1E.Z=k.R.6V(dx,1);1r;1e\'s\':1E.V=k.R.6Q(dy,1);1r}if(k.R.1A.1g.4D){if(k.R.3Q==\'n\'||k.R.3Q==\'s\')4B=1E.V*k.R.1A.1g.4D;P 4B=1E.Z;5c=k.R.dX(4B*k.R.1A.1g.4D);4B=5c/k.R.1A.1g.4D;3m(k.R.3Q){1e\'n\':1e\'7q\':1e\'9J\':2n.Q+=1E.V-5c;1r}3m(k.R.3Q){1e\'7q\':1e\'w\':1e\'5O\':2n.O+=1E.Z-4B;1r}1E.V=5c;1E.Z=4B}if(2n.Qk.R.1A.1g.8U){1E.V=k.R.1A.1g.8U-2n.Q;if(k.R.1A.1g.4D){1E.Z=1E.V/k.R.1A.1g.4D}}if(2n.O+1E.Z>k.R.1A.1g.8T){1E.Z=k.R.1A.1g.8T-2n.O;if(k.R.1A.1g.4D){1E.V=1E.Z*k.R.1A.1g.4D}}D 6O=I;5L=k.R.1A.18;5L.O=2n.O+\'S\';5L.Q=2n.Q+\'S\';5L.Z=1E.Z+\'S\';5L.V=1E.V+\'S\';if(k.R.1A.1g.dY){6O=k.R.1A.1g.dY.1F(k.R.1A,[1E,2n]);if(6O){if(6O.1q){k.21(1E,6O.1q)}if(6O.Y){k.21(2n,6O.Y)}}}5L.O=2n.O+\'S\';5L.Q=2n.Q+\'S\';5L.Z=1E.Z+\'S\';5L.V=1E.V+\'S\';E I},2s:u(M){if(!M||!M.3U||M.3U.1K!=7n){E}E q.1B(u(){D el=q;el.1g=M;el.1g.9s=M.9s||10;el.1g.8L=M.8L||10;el.1g.6q=M.6q||6x;el.1g.8M=M.8M||6x;el.1g.7s=M.7s||-aF;el.1g.7p=M.7p||-aF;el.1g.8T=M.8T||6x;el.1g.8U=M.8U||6x;b3=k(el).B(\'Y\');if(!(b3==\'2y\'||b3==\'1O\')){el.18.Y=\'2y\'}eM=/n|9J|e|eO|s|5O|w|7q/g;1Y(i in el.1g.3U){if(i.5Z().bU(eM)!=U){if(el.1g.3U[i].1K==b5){3y=k(el.1g.3U[i]);if(3y.1P()>0){el.1g.3U[i]=3y.K(0)}}if(el.1g.3U[i].4S){el.1g.3U[i].1A=el;el.1g.3U[i].3Q=i;k(el.1g.3U[i]).1H(\'5b\',k.R.28)}}}if(el.1g.4N){if(2h el.1g.4N===\'5g\'){9K=k(el.1g.4N);if(9K.1P()>0){9K.1B(u(){q.a2=el});9K.1H(\'5b\',k.R.7r)}}P if(el.1g.4N.4S){el.1g.4N.a2=el;k(el.1g.4N).1H(\'5b\',k.R.7r)}P if(el.1g.4N==1b){k(q).1H(\'5b\',k.R.7r)}}})},58:u(){E q.1B(u(){D el=q;1Y(i in el.1g.3U){el.1g.3U[i].1A=U;el.1g.3U[i].3Q=U;k(el.1g.3U[i]).3p(\'5b\',k.R.28)}if(el.1g.4N){if(2h el.1g.4N===\'5g\'){3y=k(el.1g.4N);if(3y.1P()>0){3y.3p(\'5b\',k.R.7r)}}P if(el.1g.4N==1b){k(q).3p(\'5b\',k.R.7r)}}el.1g=U})}};k.fn.21({j5:k.R.2s,j4:k.R.58});k.2u=U;k.7Z=I;k.3n=U;k.81=[];k.a0=u(e){D 3O=e.7F||e.7A||-1;if(3O==17||3O==16){k.7Z=1b}};k.9Y=u(e){k.7Z=I};k.eW=u(e){q.f.1s=k.1a.44(e);q.f.1M=k.21(k.1a.2R(q),k.1a.2p(q));q.f.3a=k.1a.6W(q);q.f.1s.x-=q.f.1M.x;q.f.1s.y-=q.f.1M.y;if(q.f.hc)k.2u.2Z(q.f.hc);k.2u.B({19:\'2E\',Z:\'83\',V:\'83\'});if(q.f.o){k.2u.B(\'1J\',q.f.o)}k.3n=q;k.8K=I;k.81=[];q.f.el.1B(u(){q.1M={x:q.8n+(q.4Y&&!k.3h.7N?T(q.4Y.5a)||0:0)+(k.3n.3g||0),y:q.8t+(q.4Y&&!k.3h.7N?T(q.4Y.4Z)||0:0)+(k.3n.2V||0),1D:q.4b,hb:q.63};if(q.s==1b){if(k.7Z==I){q.s=I;k(q).4p(k.3n.f.7X)}P{k.8K=1b;k.81[k.81.1h]=k.1p(q,\'id\')}}});k(q).1R(k.2u.K(0));q.f.93=k.1a.6h(k.2u[0],1b);k.a1.1F(q,[e]);k(1j).1H(\'3H\',k.a1).1H(\'61\',k.bT);E I};k.a1=u(e){if(!k.3n)E;k.eU.1F(k.3n,[e])};k.eU=u(e){if(!k.3n)E;D 1s=k.1a.44(e);D 3a=k.1a.6W(k.3n);1s.x+=3a.l-q.f.3a.l-q.f.1M.x;1s.y+=3a.t-q.f.3a.t-q.f.1M.y;D 8D=14.3D(1s.x,q.f.1s.x);D 5O=14.3D(14.3R(1s.x-q.f.1s.x),14.3R(q.f.3a.w-8D));D 9f=14.3D(1s.y,q.f.1s.y);D 8R=14.3D(14.3R(1s.y-q.f.1s.y),14.3R(q.f.3a.h-9f));if(q.2V>0&&1s.y-20q.2V+q.f.1M.h){D 3T=14.3D(q.f.3a.h-q.2V,10);q.2V+=3T;if(q.2V!=3a.t)8R+=3T}if(q.3g>0&&1s.x-20q.3g+q.f.1M.w){D 3T=14.3D(q.f.3a.w-q.3g,10);q.3g+=3T;if(q.3g!=3a.l)5O+=3T}k.2u.B({O:8D+\'S\',Q:9f+\'S\',Z:5O-(q.f.93.l+q.f.93.r)+\'S\',V:8R-(q.f.93.t+q.f.93.b)+\'S\'});k.2u.l=8D+q.f.3a.l;k.2u.t=9f+q.f.3a.t;k.2u.r=k.2u.l+5O;k.2u.b=k.2u.t+8R;k.8K=I;q.f.el.1B(u(){9k=k.81.3F(k.1p(q,\'id\'));if(!(q.1M.x>k.2u.r||(q.1M.x+q.1M.1D)k.2u.b||(q.1M.y+q.1M.hb)0){h+=\'&\'}h+=s+\'[]=\'+k.1p(q,\'id\');o[o.1h]=k.1p(q,\'id\')}})}E{7U:h,o:o}};k.fn.jZ=u(o){if(!k.2u){k(\'2e\',1j).1R(\'<26 id="2u">\').1H(\'7E\',k.a0).1H(\'6S\',k.9Y);k.2u=k(\'#2u\');k.2u.B({Y:\'1O\',19:\'1n\'});if(1V.2l){k(\'2e\',1j).1H(\'7E\',k.a0).1H(\'6S\',k.9Y)}P{k(1j).1H(\'7E\',k.a0).1H(\'6S\',k.9Y)}}if(!o){o={}}E q.1B(u(){if(q.eX)E;q.eX=1b;q.f={a:o.3P,o:o.1J?2m(o.1J):I,7X:o.eE?o.eE:I,hc:o.4V?o.4V:I,8Y:o.8Y?o.8Y:I,8X:o.8X?o.8X:I};q.f.el=k(\'.\'+o.3P);k(q).1H(\'5b\',k.eW)})};k.1t={7V:[],5j:{},1c:I,7Y:U,28:u(){if(k.12.F==U){E}D 4M,3A,c,cs;k.1t.1c.K(0).3b=k.12.F.A.6p;4M=k.1t.1c.K(0).18;4M.19=\'2E\';k.1t.1c.1C=k.21(k.1a.2R(k.1t.1c.K(0)),k.1a.2p(k.1t.1c.K(0)));4M.Z=k.12.F.A.1C.1D+\'S\';4M.V=k.12.F.A.1C.hb+\'S\';3A=k.1a.c8(k.12.F);4M.5o=3A.t;4M.5p=3A.r;4M.5m=3A.b;4M.5k=3A.l;if(k.12.F.A.4j==1b){c=k.12.F.dn(1b);cs=c.18;cs.5o=\'3c\';cs.5p=\'3c\';cs.5m=\'3c\';cs.5k=\'3c\';cs.19=\'2E\';k.1t.1c.5t().1R(c)}k(k.12.F).dj(k.1t.1c.K(0));k.12.F.18.19=\'1n\'},dp:u(e){if(!e.A.48&&k.1x.5l.bD){if(e.A.3S)e.A.3S.1F(F);k(e).B(\'Y\',e.A.c4||e.A.4m);k(e).a4();k(k.1x.5l).dd(e)}k.1t.1c.4p(e.A.6p).3w(\'&7J;\');k.1t.7Y=U;D 4M=k.1t.1c.K(0).18;4M.19=\'1n\';k.1t.1c.dj(e);if(e.A.fx>0){k(e).7m(e.A.fx)}k(\'2e\').1R(k.1t.1c.K(0));D 86=[];D 8d=I;1Y(D i=0;i0){8d(86)}},a3:u(e,o){if(!k.12.F)E;D 6i=I;D i=0;if(e.1i.el.1P()>0){1Y(i=e.1i.el.1P();i>0;i--){if(e.1i.el.K(i-1)!=k.12.F){if(!e.5V.bM){if((e.1i.el.K(i-1).1M.y+e.1i.el.K(i-1).1M.hb/2)>k.12.F.A.2r){6i=e.1i.el.K(i-1)}P{1r}}P{if((e.1i.el.K(i-1).1M.x+e.1i.el.K(i-1).1M.1D/2)>k.12.F.A.2x&&(e.1i.el.K(i-1).1M.y+e.1i.el.K(i-1).1M.hb/2)>k.12.F.A.2r){6i=e.1i.el.K(i-1)}}}}}if(6i&&k.1t.7Y!=6i){k.1t.7Y=6i;k(6i).k6(k.1t.1c.K(0))}P if(!6i&&(k.1t.7Y!=U||k.1t.1c.K(0).3e!=e)){k.1t.7Y=U;k(e).1R(k.1t.1c.K(0))}k.1t.1c.K(0).18.19=\'2E\'},c5:u(e){if(k.12.F==U){E}e.1i.el.1B(u(){q.1M=k.21(k.1a.82(q),k.1a.2R(q))})},8o:u(s){D i;D h=\'\';D o={};if(s){if(k.1t.5j[s]){o[s]=[];k(\'#\'+s+\' .\'+k.1t.5j[s]).1B(u(){if(h.1h>0){h+=\'&\'}h+=s+\'[]=\'+k.1p(q,\'id\');o[s][o[s].1h]=k.1p(q,\'id\')})}P{1Y(a in s){if(k.1t.5j[s[a]]){o[s[a]]=[];k(\'#\'+s[a]+\' .\'+k.1t.5j[s[a]]).1B(u(){if(h.1h>0){h+=\'&\'}h+=s[a]+\'[]=\'+k.1p(q,\'id\');o[s[a]][o[s[a]].1h]=k.1p(q,\'id\')})}}}}P{1Y(i in k.1t.5j){o[i]=[];k(\'#\'+i+\' .\'+k.1t.5j[i]).1B(u(){if(h.1h>0){h+=\'&\'}h+=i+\'[]=\'+k.1p(q,\'id\');o[i][o[i].1h]=k.1p(q,\'id\')})}}E{7U:h,o:o}},dc:u(e){if(!e.jJ){E}E q.1B(u(){if(!q.5V||!k(e).is(\'.\'+q.5V.3P))k(e).2Z(q.5V.3P);k(e).6Y(q.5V.A)})},58:u(){E q.1B(u(){k(\'.\'+q.5V.3P).a4();k(q).df();q.5V=U;q.dD=U})},2s:u(o){if(o.3P&&k.1a&&k.12&&k.1x){if(!k.1t.1c){k(\'2e\',1j).1R(\'<26 id="dt">&7J;\');k.1t.1c=k(\'#dt\');k.1t.1c.K(0).18.19=\'1n\'}q.dO({3P:o.3P,a8:o.a8?o.a8:I,a7:o.a7?o.a7:I,4V:o.4V?o.4V:I,7T:o.7T||o.dN,7Q:o.7Q||o.dz,bD:1b,2T:o.2T||o.jL,fx:o.fx?o.fx:I,4j:o.4j?1b:I,6n:o.6n?o.6n:\'by\'});E q.1B(u(){D A={6o:o.6o?1b:I,dF:6x,1J:o.1J?2m(o.1J):I,6p:o.4V?o.4V:I,fx:o.fx?o.fx:I,48:1b,4j:o.4j?1b:I,3y:o.3y?o.3y:U,2o:o.2o?o.2o:U,4A:o.4A&&o.4A.1K==2C?o.4A:I,4x:o.4x&&o.4x.1K==2C?o.4x:I,3S:o.3S&&o.3S.1K==2C?o.3S:I,1N:/4i|4a/.43(o.1N)?o.1N:I,6m:o.6m?T(o.6m)||0:I,2S:o.2S?o.2S:I};k(\'.\'+o.3P,q).6Y(A);q.dD=1b;q.5V={3P:o.3P,6o:o.6o?1b:I,dF:6x,1J:o.1J?2m(o.1J):I,6p:o.4V?o.4V:I,fx:o.fx?o.fx:I,48:1b,4j:o.4j?1b:I,3y:o.3y?o.3y:U,2o:o.2o?o.2o:U,bM:o.bM?1b:I,A:A}})}}};k.fn.21({jR:k.1t.2s,dd:k.1t.dc,jQ:k.1t.58});k.jN=k.1t.8o;k.3d={bG:1,f0:u(3u){D 3u=3u;E q.1B(u(){q.4r.6T.1B(u(a6){k.3d.59(q,3u[a6])})})},K:u(){D 3u=[];q.1B(u(bJ){if(q.bF){3u[bJ]=[];D C=q;D 1q=k.1a.2p(q);q.4r.6T.1B(u(a6){D x=q.8n;D y=q.8t;99=T(x*2b/(1q.w-q.4b));8a=T(y*2b/(1q.h-q.63));3u[bJ][a6]=[99||0,8a||0,x||0,y||0]})}});E 3u},bO:u(C){C.A.fK=C.A.24.w-C.A.1C.1D;C.A.fN=C.A.24.h-C.A.1C.hb;if(C.9P.4r.bE){a5=C.9P.4r.6T.K(C.bR+1);if(a5){C.A.24.w=(T(k(a5).B(\'O\'))||0)+C.A.1C.1D;C.A.24.h=(T(k(a5).B(\'Q\'))||0)+C.A.1C.hb}9X=C.9P.4r.6T.K(C.bR-1);if(9X){D bL=T(k(9X).B(\'O\'))||0;D bK=T(k(9X).B(\'O\'))||0;C.A.24.x+=bL;C.A.24.y+=bK;C.A.24.w-=bL;C.A.24.h-=bK}}C.A.fW=C.A.24.w-C.A.1C.1D;C.A.fV=C.A.24.h-C.A.1C.hb;if(C.A.2K){C.A.gx=((C.A.24.w-C.A.1C.1D)/C.A.2K)||1;C.A.gy=((C.A.24.h-C.A.1C.hb)/C.A.2K)||1;C.A.fY=C.A.fW/C.A.2K;C.A.fS=C.A.fV/C.A.2K}C.A.24.dx=C.A.24.x-C.A.2c.x;C.A.24.dy=C.A.24.y-C.A.2c.y;k.12.1c.B(\'94\',\'aG\')},3z:u(C,x,y){if(C.A.2K){fZ=T(x/C.A.fY);99=fZ*2b/C.A.2K;fL=T(y/C.A.fS);8a=fL*2b/C.A.2K}P{99=T(x*2b/C.A.fK);8a=T(y*2b/C.A.fN)}C.A.bQ=[99||0,8a||0,x||0,y||0];if(C.A.3z)C.A.3z.1F(C,C.A.bQ)},g4:u(2l){3O=2l.7F||2l.7A||-1;3m(3O){1e 35:k.3d.59(q.3Z,[9W,9W]);1r;1e 36:k.3d.59(q.3Z,[-9W,-9W]);1r;1e 37:k.3d.59(q.3Z,[-q.3Z.A.gx||-1,0]);1r;1e 38:k.3d.59(q.3Z,[0,-q.3Z.A.gy||-1]);1r;1e 39:k.3d.59(q.3Z,[q.3Z.A.gx||1,0]);1r;1e 40:k.12.59(q.3Z,[0,q.3Z.A.gy||1]);1r}},59:u(C,Y){if(!C.A){E}C.A.1C=k.21(k.1a.2R(C),k.1a.2p(C));C.A.2c={x:T(k.B(C,\'O\'))||0,y:T(k.B(C,\'Q\'))||0};C.A.4m=k.B(C,\'Y\');if(C.A.4m!=\'2y\'&&C.A.4m!=\'1O\'){C.18.Y=\'2y\'}k.12.bP(C);k.3d.bO(C);dx=T(Y[0])||0;dy=T(Y[1])||0;2x=C.A.2c.x+dx;2r=C.A.2c.y+dy;if(C.A.2K){3q=k.12.bI.1F(C,[2x,2r,dx,dy]);if(3q.1K==7n){dx=3q.dx;dy=3q.dy}2x=C.A.2c.x+dx;2r=C.A.2c.y+dy}3q=k.12.bH.1F(C,[2x,2r,dx,dy]);if(3q&&3q.1K==7n){dx=3q.dx;dy=3q.dy}2x=C.A.2c.x+dx;2r=C.A.2c.y+dy;if(C.A.5i&&(C.A.3z||C.A.2T)){k.3d.3z(C,2x,2r)}2x=!C.A.1N||C.A.1N==\'4a\'?2x:C.A.2c.x||0;2r=!C.A.1N||C.A.1N==\'4i\'?2r:C.A.2c.y||0;C.18.O=2x+\'S\';C.18.Q=2r+\'S\'},2s:u(o){E q.1B(u(){if(q.bF==1b||!o.3P||!k.1a||!k.12||!k.1x){E}5N=k(o.3P,q);if(5N.1P()==0){E}D 4K={2o:\'96\',5i:1b,3z:o.3z&&o.3z.1K==2C?o.3z:U,2T:o.2T&&o.2T.1K==2C?o.2T:U,3y:q,1J:o.1J||I};if(o.2K&&T(o.2K)){4K.2K=T(o.2K)||1;4K.2K=4K.2K>0?4K.2K:1}if(5N.1P()==1)5N.6Y(4K);P{k(5N.K(0)).6Y(4K);4K.3y=U;5N.6Y(4K)}5N.7E(k.3d.g4);5N.1p(\'bG\',k.3d.bG++);q.bF=1b;q.4r={};q.4r.g6=4K.g6;q.4r.2K=4K.2K;q.4r.6T=5N;q.4r.bE=o.bE?1b:I;bS=q;bS.4r.6T.1B(u(2I){q.bR=2I;q.9P=bS});if(o.3u&&o.3u.1K==7b){1Y(i=o.3u.1h-1;i>=0;i--){if(o.3u[i].1K==7b&&o.3u[i].1h==2){el=q.4r.6T.K(i);if(el.4S){k.3d.59(el,o.3u[i])}}}}})}};k.fn.21({jV:k.3d.2s,k9:k.3d.f0,kb:k.3d.K});k.2t={6J:U,7c:I,9O:U,6D:u(e){k.2t.7c=1b;k.2t.22(e,q,1b)},bx:u(e){if(k.2t.6J!=q)E;k.2t.7c=I;k.2t.2G(e,q)},22:u(e,el,7c){if(k.2t.6J!=U)E;if(!el){el=q}k.2t.6J=el;1M=k.21(k.1a.2R(el),k.1a.2p(el));8G=k(el);45=8G.1p(\'45\');3f=8G.1p(\'3f\');if(45){k.2t.9O=45;8G.1p(\'45\',\'\');k(\'#fF\').3w(45);if(3f)k(\'#c9\').3w(3f.4v(\'k4://\',\'\'));P k(\'#c9\').3w(\'\');1c=k(\'#8V\');if(el.4T.3b){1c.K(0).3b=el.4T.3b}P{1c.K(0).3b=\'\'}c7=k.1a.2p(1c.K(0));fj=7c&&el.4T.Y==\'c3\'?\'4l\':el.4T.Y;3m(fj){1e\'Q\':2r=1M.y-c7.hb;2x=1M.x;1r;1e\'O\':2r=1M.y;2x=1M.x-c7.1D;1r;1e\'2N\':2r=1M.y;2x=1M.x+1M.1D;1r;1e\'c3\':k(\'2e\').1H(\'3H\',k.2t.3H);1s=k.1a.44(e);2r=1s.y+15;2x=1s.x+15;1r;aG:2r=1M.y+1M.hb;2x=1M.x;1r}1c.B({Q:2r+\'S\',O:2x+\'S\'});if(el.4T.53==I){1c.22()}P{1c.7m(el.4T.53)}if(el.4T.2U)el.4T.2U.1F(el);8G.1H(\'8q\',k.2t.2G).1H(\'5I\',k.2t.bx)}},3H:u(e){if(k.2t.6J==U){k(\'2e\').3p(\'3H\',k.2t.3H);E}1s=k.1a.44(e);k(\'#8V\').B({Q:1s.y+15+\'S\',O:1s.x+15+\'S\'})},2G:u(e,el){if(!el){el=q}if(k.2t.7c!=1b&&k.2t.6J==el){k.2t.6J=U;k(\'#8V\').7k(1);k(el).1p(\'45\',k.2t.9O).3p(\'8q\',k.2t.2G).3p(\'5I\',k.2t.bx);if(el.4T.3i)el.4T.3i.1F(el);k.2t.9O=U}},2s:u(M){if(!k.2t.1c){k(\'2e\').1R(\'<26 id="8V"><26 id="fF"><26 id="c9">\');k(\'#8V\').B({Y:\'1O\',3B:6x,19:\'1n\'});k.2t.1c=1b}E q.1B(u(){if(k.1p(q,\'45\')){q.4T={Y:/Q|4l|O|2N|c3/.43(M.Y)?M.Y:\'4l\',3b:M.3b?M.3b:I,53:M.53?M.53:I,2U:M.2U&&M.2U.1K==2C?M.2U:I,3i:M.3i&&M.3i.1K==2C?M.3i:I};D el=k(q);el.1H(\'aV\',k.2t.22);el.1H(\'6D\',k.2t.6D)}})}};k.fn.k0=k.2t.2s;k.21({G:{bV:u(p,n,1W,1I,1m){E((-14.5v(p*14.2Q)/2)+0.5)*1I+1W},k2:u(p,n,1W,1I,1m){E 1I*(n/=1m)*n*n+1W},fG:u(p,n,1W,1I,1m){E-1I*((n=n/1m-1)*n*n*n-1)+1W},k1:u(p,n,1W,1I,1m){if((n/=1m/2)<1)E 1I/2*n*n*n*n+1W;E-1I/2*((n-=2)*n*n*n-2)+1W},9c:u(p,n,1W,1I,1m){if((n/=1m)<(1/2.75)){E 1I*(7.9N*n*n)+1W}P if(n<(2/2.75)){E 1I*(7.9N*(n-=(1.5/2.75))*n+.75)+1W}P if(n<(2.5/2.75)){E 1I*(7.9N*(n-=(2.25/2.75))*n+.jC)+1W}P{E 1I*(7.9N*(n-=(2.jB/2.75))*n+.jd)+1W}},bY:u(p,n,1W,1I,1m){if(k.G.9c)E 1I-k.G.9c(p,1m-n,0,1I,1m)+1W;E 1W+1I},jc:u(p,n,1W,1I,1m){if(k.G.bY&&k.G.9c)if(n<1m/2)E k.G.bY(p,n*2,0,1I,1m)*.5+1W;E k.G.9c(p,n*2-1m,0,1I,1m)*.5+1I*.5+1W;E 1W+1I},jb:u(p,n,1W,1I,1m){D a,s;if(n==0)E 1W;if((n/=1m)==1)E 1W+1I;a=1I*0.3;p=1m*.3;if(a<14.3R(1I)){a=1I;s=p/4}P{s=p/(2*14.2Q)*14.c0(1I/a)}E-(a*14.5Y(2,10*(n-=1))*14.98((n*1m-s)*(2*14.2Q)/p))+1W},je:u(p,n,1W,1I,1m){D a,s;if(n==0)E 1W;if((n/=1m/2)==2)E 1W+1I;a=1I*0.3;p=1m*.3;if(a<14.3R(1I)){a=1I;s=p/4}P{s=p/(2*14.2Q)*14.c0(1I/a)}E a*14.5Y(2,-10*n)*14.98((n*1m-s)*(2*14.2Q)/p)+1I+1W},jf:u(p,n,1W,1I,1m){D a,s;if(n==0)E 1W;if((n/=1m/2)==2)E 1W+1I;a=1I*0.3;p=1m*.3;if(a<14.3R(1I)){a=1I;s=p/4}P{s=p/(2*14.2Q)*14.c0(1I/a)}if(n<1){E-.5*(a*14.5Y(2,10*(n-=1))*14.98((n*1m-s)*(2*14.2Q)/p))+1W}E a*14.5Y(2,-10*(n-=1))*14.98((n*1m-s)*(2*14.2Q)/p)*.5+1I+1W}}});k.fn.21({fz:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5W(q,H,J,\'4U\',G)})},fP:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5W(q,H,J,\'4y\',G)})},j9:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5W(q,H,J,\'f8\',G)})},j3:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5W(q,H,J,\'O\',G)})},j2:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5W(q,H,J,\'2N\',G)})},j1:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5W(q,H,J,\'fh\',G)})}});k.fx.5W=u(e,H,J,2P,G){if(!k.4O(e)){k.2L(e,\'1o\');E I}D z=q;z.el=k(e);z.1P=k.1a.2p(e);z.G=2h J==\'5g\'?J:G||U;if(!e.4s)e.4s=z.el.B(\'19\');if(2P==\'f8\'){2P=z.el.B(\'19\')==\'1n\'?\'4y\':\'4U\'}P if(2P==\'fh\'){2P=z.el.B(\'19\')==\'1n\'?\'2N\':\'O\'}z.el.22();z.H=H;z.J=2h J==\'u\'?J:U;z.fx=k.fx.9h(e);z.2P=2P;z.23=u(){if(z.J&&z.J.1K==2C){z.J.1F(z.el.K(0))}if(z.2P==\'4y\'||z.2P==\'2N\'){z.el.B(\'19\',z.el.K(0).4s==\'1n\'?\'2E\':z.el.K(0).4s)}P{z.el.2G()}k.fx.9g(z.fx.3o.K(0),z.fx.W);k.2L(z.el.K(0),\'1o\')};3m(z.2P){1e\'4U\':6d=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,z.23),\'V\');6d.1L(z.fx.W.1q.hb,0);1r;1e\'4y\':z.fx.3o.B(\'V\',\'83\');z.el.22();6d=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,z.23),\'V\');6d.1L(0,z.fx.W.1q.hb);1r;1e\'O\':6d=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,z.23),\'Z\');6d.1L(z.fx.W.1q.1D,0);1r;1e\'2N\':z.fx.3o.B(\'Z\',\'83\');z.el.22();6d=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,z.23),\'Z\');6d.1L(0,z.fx.W.1q.1D);1r}};k.fn.kd=u(5w,J){E q.1w(\'1o\',u(){if(!k.4O(q)){k.2L(q,\'1o\');E I}D e=11 k.fx.fa(q,5w,J);e.bc()})};k.fx.fa=u(e,5w,J){D z=q;z.el=k(e);z.el.22();z.J=J;z.5w=T(5w)||40;z.W={};z.W.Y=z.el.B(\'Y\');z.W.Q=T(z.el.B(\'Q\'))||0;z.W.O=T(z.el.B(\'O\'))||0;if(z.W.Y!=\'2y\'&&z.W.Y!=\'1O\'){z.el.B(\'Y\',\'2y\')}z.41=5;z.5D=1;z.bc=u(){z.5D++;z.e=11 k.fx(z.el.K(0),{1m:j6,23:u(){z.e=11 k.fx(z.el.K(0),{1m:80,23:u(){z.5w=T(z.5w/2);if(z.5D<=z.41)z.bc();P{z.el.B(\'Y\',z.W.Y).B(\'Q\',z.W.Q+\'S\').B(\'O\',z.W.O+\'S\');k.2L(z.el.K(0),\'1o\');if(z.J&&z.J.1K==2C){z.J.1F(z.el.K(0))}}}},\'Q\');z.e.1L(z.W.Q-z.5w,z.W.Q)}},\'Q\');z.e.1L(z.W.Q,z.W.Q-z.5w)}};k.fn.21({ji:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'4y\',\'4d\',G)})},jj:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'4y\',\'in\',G)})},jw:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'4y\',\'3Y\',G)})},jv:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'4U\',\'4d\',G)})},ju:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'4U\',\'in\',G)})},jx:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'4U\',\'3Y\',G)})},jy:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'O\',\'4d\',G)})},jz:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'O\',\'in\',G)})},jt:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'O\',\'3Y\',G)})},js:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'2N\',\'4d\',G)})},jm:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'2N\',\'in\',G)})},jl:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.4k(q,H,J,\'2N\',\'3Y\',G)})}});k.fx.4k=u(e,H,J,2P,1u,G){if(!k.4O(e)){k.2L(e,\'1o\');E I}D z=q;z.el=k(e);z.G=2h J==\'5g\'?J:G||U;z.W={};z.W.Y=z.el.B(\'Y\');z.W.Q=z.el.B(\'Q\');z.W.O=z.el.B(\'O\');if(!e.4s)e.4s=z.el.B(\'19\');if(1u==\'3Y\'){1u=z.el.B(\'19\')==\'1n\'?\'in\':\'4d\'}z.el.22();if(z.W.Y!=\'2y\'&&z.W.Y!=\'1O\'){z.el.B(\'Y\',\'2y\')}z.1u=1u;J=2h J==\'u\'?J:U;8y=1;3m(2P){1e\'4U\':z.e=11 k.fx(z.el.K(0),k.H(H-15,z.G,J),\'Q\');z.68=2m(z.W.Q)||0;z.9L=z.fM;8y=-1;1r;1e\'4y\':z.e=11 k.fx(z.el.K(0),k.H(H-15,z.G,J),\'Q\');z.68=2m(z.W.Q)||0;z.9L=z.fM;1r;1e\'2N\':z.e=11 k.fx(z.el.K(0),k.H(H-15,z.G,J),\'O\');z.68=2m(z.W.O)||0;z.9L=z.f4;1r;1e\'O\':z.e=11 k.fx(z.el.K(0),k.H(H-15,z.G,J),\'O\');z.68=2m(z.W.O)||0;z.9L=z.f4;8y=-1;1r}z.e2=11 k.fx(z.el.K(0),k.H(H,z.G,u(){z.el.B(z.W);if(z.1u==\'4d\'){z.el.B(\'19\',\'1n\')}P z.el.B(\'19\',z.el.K(0).4s==\'1n\'?\'2E\':z.el.K(0).4s);k.2L(z.el.K(0),\'1o\')}),\'1J\');if(1u==\'in\'){z.e.1L(z.68+2b*8y,z.68);z.e2.1L(0,1)}P{z.e.1L(z.68,z.68+2b*8y);z.e2.1L(1,0)}};k.fn.21({jn:u(H,V,J,G){E q.1w(\'1o\',u(){11 k.fx.9M(q,H,V,J,\'g7\',G)})},jo:u(H,V,J,G){E q.1w(\'1o\',u(){11 k.fx.9M(q,H,V,J,\'9Q\',G)})},jr:u(H,V,J,G){E q.1w(\'1o\',u(){11 k.fx.9M(q,H,V,J,\'3Y\',G)})}});k.fx.9M=u(e,H,V,J,1u,G){if(!k.4O(e)){k.2L(e,\'1o\');E I}D z=q;z.el=k(e);z.G=2h J==\'5g\'?J:G||U;z.J=2h J==\'u\'?J:U;if(1u==\'3Y\'){1u=z.el.B(\'19\')==\'1n\'?\'9Q\':\'g7\'}z.H=H;z.V=V&&V.1K==cR?V:20;z.fx=k.fx.9h(e);z.1u=1u;z.23=u(){if(z.J&&z.J.1K==2C){z.J.1F(z.el.K(0))}if(z.1u==\'9Q\'){z.el.22()}P{z.el.2G()}k.fx.9g(z.fx.3o.K(0),z.fx.W);k.2L(z.el.K(0),\'1o\')};if(z.1u==\'9Q\'){z.el.22();z.fx.3o.B(\'V\',z.V+\'S\').B(\'Z\',\'83\');z.ef=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,u(){z.ef=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,z.23),\'V\');z.ef.1L(z.V,z.fx.W.1q.hb)}),\'Z\');z.ef.1L(0,z.fx.W.1q.1D)}P{z.ef=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,u(){z.ef=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G,z.23),\'Z\');z.ef.1L(z.fx.W.1q.1D,0)}),\'V\');z.ef.1L(z.fx.W.1q.hb,z.V)}};k.fn.21({jq:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.6z(q,H,1,2b,1b,J,\'f1\',G)})},jp:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.6z(q,H,2b,1,1b,J,\'d2\',G)})},kt:u(H,J,G){E q.1w(\'1o\',u(){D G=G||\'fG\';11 k.fx.6z(q,H,2b,fd,1b,J,\'6l\',G)})},6z:u(H,5d,4L,6E,J,G){E q.1w(\'1o\',u(){11 k.fx.6z(q,H,5d,4L,6E,J,\'6z\',G)})}});k.fx.6z=u(e,H,5d,4L,6E,J,1u,G){if(!k.4O(e)){k.2L(e,\'1o\');E I}D z=q;z.el=k(e);z.5d=T(5d)||2b;z.4L=T(4L)||2b;z.G=2h J==\'5g\'?J:G||U;z.J=2h J==\'u\'?J:U;z.1m=k.H(H).1m;z.6E=6E||U;z.2f=k.1a.2p(e);z.W={Z:z.el.B(\'Z\'),V:z.el.B(\'V\'),4w:z.el.B(\'4w\')||\'2b%\',Y:z.el.B(\'Y\'),19:z.el.B(\'19\'),Q:z.el.B(\'Q\'),O:z.el.B(\'O\'),2Y:z.el.B(\'2Y\'),4Z:z.el.B(\'4Z\'),6k:z.el.B(\'6k\'),6g:z.el.B(\'6g\'),5a:z.el.B(\'5a\'),66:z.el.B(\'66\'),6j:z.el.B(\'6j\'),5M:z.el.B(\'5M\'),4X:z.el.B(\'4X\')};z.Z=T(z.W.Z)||e.4b||0;z.V=T(z.W.V)||e.63||0;z.Q=T(z.W.Q)||0;z.O=T(z.W.O)||0;1q=[\'em\',\'S\',\'kJ\',\'%\'];1Y(i in 1q){if(z.W.4w.3F(1q[i])>0){z.fi=1q[i];z.4w=2m(z.W.4w)}if(z.W.4Z.3F(1q[i])>0){z.fw=1q[i];z.bt=2m(z.W.4Z)||0}if(z.W.6k.3F(1q[i])>0){z.fB=1q[i];z.bg=2m(z.W.6k)||0}if(z.W.6g.3F(1q[i])>0){z.fE=1q[i];z.bf=2m(z.W.6g)||0}if(z.W.5a.3F(1q[i])>0){z.fv=1q[i];z.be=2m(z.W.5a)||0}if(z.W.66.3F(1q[i])>0){z.fk=1q[i];z.bb=2m(z.W.66)||0}if(z.W.6j.3F(1q[i])>0){z.fs=1q[i];z.ba=2m(z.W.6j)||0}if(z.W.5M.3F(1q[i])>0){z.fb=1q[i];z.cJ=2m(z.W.5M)||0}if(z.W.4X.3F(1q[i])>0){z.fq=1q[i];z.cX=2m(z.W.4X)||0}}if(z.W.Y!=\'2y\'&&z.W.Y!=\'1O\'){z.el.B(\'Y\',\'2y\')}z.el.B(\'2Y\',\'2O\');z.1u=1u;3m(z.1u){1e\'f1\':z.4f=z.Q+z.2f.h/2;z.57=z.Q;z.4c=z.O+z.2f.w/2;z.4W=z.O;1r;1e\'d2\':z.57=z.Q+z.2f.h/2;z.4f=z.Q;z.4W=z.O+z.2f.w/2;z.4c=z.O;1r;1e\'6l\':z.57=z.Q-z.2f.h/4;z.4f=z.Q;z.4W=z.O-z.2f.w/4;z.4c=z.O;1r}z.bo=I;z.t=(11 72).71();z.4u=u(){6c(z.2H);z.2H=U};z.2D=u(){if(z.bo==I){z.el.22();z.bo=1b}D t=(11 72).71();D n=t-z.t;D p=n/z.1m;if(t>=z.1m+z.t){b1(u(){o=1;if(z.1u){t=z.57;l=z.4W;if(z.1u==\'6l\')o=0}z.bv(z.4L,l,t,1b,o)},13);z.4u()}P{o=1;if(!k.G||!k.G[z.G]){s=((-14.5v(p*14.2Q)/2)+0.5)*(z.4L-z.5d)+z.5d}P{s=k.G[z.G](p,n,z.5d,(z.4L-z.5d),z.1m)}if(z.1u){if(!k.G||!k.G[z.G]){t=((-14.5v(p*14.2Q)/2)+0.5)*(z.57-z.4f)+z.4f;l=((-14.5v(p*14.2Q)/2)+0.5)*(z.4W-z.4c)+z.4c;if(z.1u==\'6l\')o=((-14.5v(p*14.2Q)/2)+0.5)*(-0.9R)+0.9R}P{t=k.G[z.G](p,n,z.4f,(z.57-z.4f),z.1m);l=k.G[z.G](p,n,z.4c,(z.4W-z.4c),z.1m);if(z.1u==\'6l\')o=k.G[z.G](p,n,0.9R,-0.9R,z.1m)}}z.bv(s,l,t,I,o)}};z.2H=6I(u(){z.2D()},13);z.bv=u(4z,O,Q,fp,1J){z.el.B(\'V\',z.V*4z/2b+\'S\').B(\'Z\',z.Z*4z/2b+\'S\').B(\'O\',O+\'S\').B(\'Q\',Q+\'S\').B(\'4w\',z.4w*4z/2b+z.fi);if(z.bt)z.el.B(\'4Z\',z.bt*4z/2b+z.fw);if(z.bg)z.el.B(\'6k\',z.bg*4z/2b+z.fB);if(z.bf)z.el.B(\'6g\',z.bf*4z/2b+z.fE);if(z.be)z.el.B(\'5a\',z.be*4z/2b+z.fv);if(z.bb)z.el.B(\'66\',z.bb*4z/2b+z.fk);if(z.ba)z.el.B(\'6j\',z.ba*4z/2b+z.fs);if(z.cJ)z.el.B(\'5M\',z.cJ*4z/2b+z.fb);if(z.cX)z.el.B(\'4X\',z.cX*4z/2b+z.fq);if(z.1u==\'6l\'){if(1V.7a)z.el.K(0).18.69="9V(1J="+1J*2b+")";z.el.K(0).18.1J=1J}if(fp){if(z.6E){z.el.B(z.W)}if(z.1u==\'d2\'||z.1u==\'6l\'){z.el.B(\'19\',\'1n\');if(z.1u==\'6l\'){if(1V.7a)z.el.K(0).18.69="9V(1J="+2b+")";z.el.K(0).18.1J=1}}P z.el.B(\'19\',\'2E\');if(z.J)z.J.1F(z.el.K(0));k.2L(z.el.K(0),\'1o\')}}};k.fn.kL=u(H,4C,J,G){E q.1w(\'f6\',u(){q.73=k(q).1p("18")||\'\';G=2h J==\'5g\'?J:G||U;J=2h J==\'u\'?J:U;D 9U=k(q).B(\'7f\');D 87=q.3e;7o(9U==\'b7\'&&87){9U=k(87).B(\'7f\');87=87.3e}k(q).B(\'7f\',4C);if(2h q.73==\'8i\')q.73=q.73["9T"];k(q).5K({\'7f\':9U},H,G,u(){k.2L(q,\'f6\');if(2h k(q).1p("18")==\'8i\'){k(q).1p("18")["9T"]="";k(q).1p("18")["9T"]=q.73}P{k(q).1p("18",q.73)}if(J)J.1F(q)})})};k.fn.21({kg:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5A(q,H,J,\'4i\',\'5P\',G)})},kq:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5A(q,H,J,\'4a\',\'5P\',G)})},kr:u(H,J,G){E q.1w(\'1o\',u(){if(k.B(q,\'19\')==\'1n\'){11 k.fx.5A(q,H,J,\'4a\',\'7e\',G)}P{11 k.fx.5A(q,H,J,\'4a\',\'5P\',G)}})},kz:u(H,J,G){E q.1w(\'1o\',u(){if(k.B(q,\'19\')==\'1n\'){11 k.fx.5A(q,H,J,\'4i\',\'7e\',G)}P{11 k.fx.5A(q,H,J,\'4i\',\'5P\',G)}})},ky:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5A(q,H,J,\'4i\',\'7e\',G)})},kx:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.5A(q,H,J,\'4a\',\'7e\',G)})}});k.fx.5A=u(e,H,J,2P,1u,G){if(!k.4O(e)){k.2L(e,\'1o\');E I}D z=q;D 5H=I;z.el=k(e);z.G=2h J==\'5g\'?J:G||U;z.J=2h J==\'u\'?J:U;z.1u=1u;z.H=H;z.2f=k.1a.2p(e);z.W={};z.W.Y=z.el.B(\'Y\');z.W.19=z.el.B(\'19\');if(z.W.19==\'1n\'){62=z.el.B(\'3j\');z.el.22();5H=1b}z.W.Q=z.el.B(\'Q\');z.W.O=z.el.B(\'O\');if(5H){z.el.2G();z.el.B(\'3j\',62)}z.W.Z=z.2f.w+\'S\';z.W.V=z.2f.h+\'S\';z.W.2Y=z.el.B(\'2Y\');z.2f.Q=T(z.W.Q)||0;z.2f.O=T(z.W.O)||0;if(z.W.Y!=\'2y\'&&z.W.Y!=\'1O\'){z.el.B(\'Y\',\'2y\')}z.el.B(\'2Y\',\'2O\').B(\'V\',1u==\'7e\'&&2P==\'4i\'?1:z.2f.h+\'S\').B(\'Z\',1u==\'7e\'&&2P==\'4a\'?1:z.2f.w+\'S\');z.23=u(){z.el.B(z.W);if(z.1u==\'5P\')z.el.2G();P z.el.22();k.2L(z.el.K(0),\'1o\')};3m(2P){1e\'4i\':z.eh=11 k.fx(z.el.K(0),k.H(H-15,z.G,J),\'V\');z.et=11 k.fx(z.el.K(0),k.H(z.H,z.G,z.23),\'Q\');if(z.1u==\'5P\'){z.eh.1L(z.2f.h,0);z.et.1L(z.2f.Q,z.2f.Q+z.2f.h/2)}P{z.eh.1L(0,z.2f.h);z.et.1L(z.2f.Q+z.2f.h/2,z.2f.Q)}1r;1e\'4a\':z.eh=11 k.fx(z.el.K(0),k.H(H-15,z.G,J),\'Z\');z.et=11 k.fx(z.el.K(0),k.H(z.H,z.G,z.23),\'O\');if(z.1u==\'5P\'){z.eh.1L(z.2f.w,0);z.et.1L(z.2f.O,z.2f.O+z.2f.w/2)}P{z.eh.1L(0,z.2f.w);z.et.1L(z.2f.O+z.2f.w/2,z.2f.O)}1r}};k.fn.cr=u(H,41,J){E q.1w(\'1o\',u(){if(!k.4O(q)){k.2L(q,\'1o\');E I}D fx=11 k.fx.cr(q,H,41,J);fx.cm()})};k.fx.cr=u(el,H,41,J){D z=q;z.41=41;z.5D=1;z.el=el;z.H=H;z.J=J;k(z.el).22();z.cm=u(){z.5D++;z.e=11 k.fx(z.el,k.H(z.H,u(){z.ef=11 k.fx(z.el,k.H(z.H,u(){if(z.5D<=z.41)z.cm();P{k.2L(z.el,\'1o\');if(z.J&&z.J.1K==2C){z.J.1F(z.el)}}}),\'1J\');z.ef.1L(0,1)}),\'1J\');z.e.1L(1,0)}};k.fn.21({9S:u(H,1N,G){o=k.H(H);E q.1w(\'1o\',u(){11 k.fx.9S(q,o,1N,G)})},ks:u(H,1N,G){E q.1B(u(){k(\'a[@3f*="#"]\',q).5G(u(e){g8=q.3f.7h(\'#\');k(\'#\'+g8[1]).9S(H,1N,G);E I})})}});k.fx.9S=u(e,o,1N,G){D z=q;z.o=o;z.e=e;z.1N=/g3|g0/.43(1N)?1N:I;z.G=G;p=k.1a.2R(e);s=k.1a.6W();z.4u=u(){6c(z.2H);z.2H=U;k.2L(z.e,\'1o\')};z.t=(11 72).71();s.h=s.h>s.ih?(s.h-s.ih):s.h;s.w=s.w>s.iw?(s.w-s.iw):s.w;z.57=p.y>s.h?s.h:p.y;z.4W=p.x>s.w?s.w:p.x;z.4f=s.t;z.4c=s.l;z.2D=u(){D t=(11 72).71();D n=t-z.t;D p=n/z.o.1m;if(t>=z.o.1m+z.t){z.4u();b1(u(){z.cE(z.57,z.4W)},13)}P{if(!z.1N||z.1N==\'g3\'){if(!k.G||!k.G[z.G]){aa=((-14.5v(p*14.2Q)/2)+0.5)*(z.57-z.4f)+z.4f}P{aa=k.G[z.G](p,n,z.4f,(z.57-z.4f),z.o.1m)}}P{aa=z.4f}if(!z.1N||z.1N==\'g0\'){if(!k.G||!k.G[z.G]){a9=((-14.5v(p*14.2Q)/2)+0.5)*(z.4W-z.4c)+z.4c}P{a9=k.G[z.G](p,n,z.4c,(z.4W-z.4c),z.o.1m)}}P{a9=z.4c}z.cE(aa,a9)}};z.cE=u(t,l){1V.gN(l,t)};z.2H=6I(u(){z.2D()},13)};k.fn.cy=u(41,J){E q.1w(\'1o\',u(){if(!k.4O(q)){k.2L(q,\'1o\');E I}D e=11 k.fx.cy(q,41,J);e.cx()})};k.fx.cy=u(e,41,J){D z=q;z.el=k(e);z.el.22();z.41=T(41)||3;z.J=J;z.5D=1;z.W={};z.W.Y=z.el.B(\'Y\');z.W.Q=T(z.el.B(\'Q\'))||0;z.W.O=T(z.el.B(\'O\'))||0;if(z.W.Y!=\'2y\'&&z.W.Y!=\'1O\'){z.el.B(\'Y\',\'2y\')}z.cx=u(){z.5D++;z.e=11 k.fx(z.el.K(0),{1m:60,23:u(){z.e=11 k.fx(z.el.K(0),{1m:60,23:u(){z.e=11 k.fx(e,{1m:60,23:u(){if(z.5D<=z.41)z.cx();P{z.el.B(\'Y\',z.W.Y).B(\'Q\',z.W.Q+\'S\').B(\'O\',z.W.O+\'S\');k.2L(z.el.K(0),\'1o\');if(z.J&&z.J.1K==2C){z.J.1F(z.el.K(0))}}}},\'O\');z.e.1L(z.W.O-20,z.W.O)}},\'O\');z.e.1L(z.W.O+20,z.W.O-20)}},\'O\');z.e.1L(z.W.O,z.W.O+20)}};k.fn.21({g9:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'4U\',\'in\',G)})},f3:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'4U\',\'4d\',G)})},gM:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'4U\',\'3Y\',G)})},gL:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'4y\',\'in\',G)})},gK:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'4y\',\'4d\',G)})},gS:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'4y\',\'3Y\',G)})},gR:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'O\',\'in\',G)})},gJ:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'O\',\'4d\',G)})},gI:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'O\',\'3Y\',G)})},gC:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'2N\',\'in\',G)})},gB:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'2N\',\'4d\',G)})},gU:u(H,J,G){E q.1w(\'1o\',u(){11 k.fx.1z(q,H,J,\'2N\',\'3Y\',G)})}});k.fx.1z=u(e,H,J,2P,1u,G){if(!k.4O(e)){k.2L(e,\'1o\');E I}D z=q;z.el=k(e);z.G=2h J==\'5g\'?J:G||U;z.J=2h J==\'u\'?J:U;if(1u==\'3Y\'){1u=z.el.B(\'19\')==\'1n\'?\'in\':\'4d\'}if(!e.4s)e.4s=z.el.B(\'19\');z.el.22();z.H=H;z.fx=k.fx.9h(e);z.1u=1u;z.2P=2P;z.23=u(){if(z.1u==\'4d\')z.el.B(\'3j\',\'2O\');k.fx.9g(z.fx.3o.K(0),z.fx.W);if(z.1u==\'in\'){z.el.B(\'19\',z.el.K(0).4s==\'1n\'?\'2E\':z.el.K(0).4s)}P{z.el.B(\'19\',\'1n\');z.el.B(\'3j\',\'dR\')}if(z.J&&z.J.1K==2C){z.J.1F(z.el.K(0))}k.2L(z.el.K(0),\'1o\')};3m(z.2P){1e\'4U\':z.ef=11 k.fx(z.el.K(0),k.H(z.H,z.G,z.23),\'Q\');z.7S=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G),\'V\');if(z.1u==\'in\'){z.ef.1L(-z.fx.W.1q.hb,0);z.7S.1L(0,z.fx.W.1q.hb)}P{z.ef.1L(0,-z.fx.W.1q.hb);z.7S.1L(z.fx.W.1q.hb,0)}1r;1e\'4y\':z.ef=11 k.fx(z.el.K(0),k.H(z.H,z.G,z.23),\'Q\');if(z.1u==\'in\'){z.ef.1L(z.fx.W.1q.hb,0)}P{z.ef.1L(0,z.fx.W.1q.hb)}1r;1e\'O\':z.ef=11 k.fx(z.el.K(0),k.H(z.H,z.G,z.23),\'O\');z.7S=11 k.fx(z.fx.3o.K(0),k.H(z.H,z.G),\'Z\');if(z.1u==\'in\'){z.ef.1L(-z.fx.W.1q.1D,0);z.7S.1L(0,z.fx.W.1q.1D)}P{z.ef.1L(0,-z.fx.W.1q.1D);z.7S.1L(z.fx.W.1q.1D,0)}1r;1e\'2N\':z.ef=11 k.fx(z.el.K(0),k.H(z.H,z.G,z.23),\'O\');if(z.1u==\'in\'){z.ef.1L(z.fx.W.1q.1D,0)}P{z.ef.1L(0,z.fx.W.1q.1D)}1r}};k.h2=U;k.fn.h1=u(o){E q.1B(u(){if(!o||!o.4L){E}D el=q;k(o.4L).1B(u(){11 k.fx.fu(el,q,o)})})};k.fx.fu=u(e,8s,o){D z=q;z.el=k(e);z.8s=8s;z.4e=1j.3t(\'26\');k(z.4e).B({Y:\'1O\'}).2Z(o.3b);if(!o.1m){o.1m=er}z.1m=o.1m;z.23=o.23;z.9i=0;z.9j=0;if(k.f5){z.9i=(T(k.3M(z.4e,\'5a\'))||0)+(T(k.3M(z.4e,\'6k\'))||0)+(T(k.3M(z.4e,\'4X\'))||0)+(T(k.3M(z.4e,\'6j\'))||0);z.9j=(T(k.3M(z.4e,\'4Z\'))||0)+(T(k.3M(z.4e,\'6g\'))||0)+(T(k.3M(z.4e,\'66\'))||0)+(T(k.3M(z.4e,\'5M\'))||0)}z.28=k.21(k.1a.2R(z.el.K(0)),k.1a.2p(z.el.K(0)));z.2X=k.21(k.1a.2R(z.8s),k.1a.2p(z.8s));z.28.1D-=z.9i;z.28.hb-=z.9j;z.2X.1D-=z.9i;z.2X.hb-=z.9j;z.J=o.23;k(\'2e\').1R(z.4e);k(z.4e).B(\'Z\',z.28.1D+\'S\').B(\'V\',z.28.hb+\'S\').B(\'Q\',z.28.y+\'S\').B(\'O\',z.28.x+\'S\').5K({Q:z.2X.y,O:z.2X.x,Z:z.2X.1D,V:z.2X.hb},z.1m,u(){k(z.4e).aB();if(z.23&&z.23.1K==2C){z.23.1F(z.el.K(0),[z.4L])}})};k.ak={2s:u(M){E q.1B(u(){D el=q;D 7x=2*14.2Q/eY;D aZ=2*14.2Q;if(k(el).B(\'Y\')!=\'2y\'&&k(el).B(\'Y\')!=\'1O\'){k(el).B(\'Y\',\'2y\')}el.1l={1S:k(M.1S,q),2F:M.2F,6M:M.6M,an:M.an,aZ:aZ,1P:k.1a.2p(q),Y:k.1a.2R(q),28:14.2Q/2,ct:M.ct,91:M.6R,6R:[],aY:I,7x:2*14.2Q/eY};el.1l.eZ=(el.1l.1P.w-el.1l.2F)/2;el.1l.7O=(el.1l.1P.h-el.1l.6M-el.1l.6M*el.1l.91)/2;el.1l.2D=2*14.2Q/el.1l.1S.1P();el.1l.cI=el.1l.1P.w/2;el.1l.cF=el.1l.1P.h/2-el.1l.6M*el.1l.91;D aS=1j.3t(\'26\');k(aS).B({Y:\'1O\',3B:1,Q:0,O:0});k(el).1R(aS);el.1l.1S.1B(u(2I){ab=k(\'1U\',q).K(0);V=T(el.1l.6M*el.1l.91);if(k.3h.4I){3N=1j.3t(\'1U\');k(3N).B(\'Y\',\'1O\');3N.2M=ab.2M;3N.18.69=\'iW aw:ax.ay.c1(1J=60, 18=1, iJ=0, i6=0, hz=0, hx=0)\'}P{3N=1j.3t(\'3N\');if(3N.ga){4H=3N.ga("2d");3N.18.Y=\'1O\';3N.18.V=V+\'S\';3N.18.Z=el.1l.2F+\'S\';3N.V=V;3N.Z=el.1l.2F;4H.i4();4H.i2(0,V);4H.hT(1,-1);4H.hJ(ab,0,0,el.1l.2F,V);4H.6E();4H.hN="hO-4d";D b0=4H.hQ(0,0,0,V);b0.g1(1,"fU(1X, 1X, 1X, 1)");b0.g1(0,"fU(1X, 1X, 1X, 0.6)");4H.hR=b0;if(iR.iv.3F(\'ix\')!=-1){4H.it()}P{4H.ir(0,0,el.1l.2F,V)}}}el.1l.6R[2I]=3N;k(aS).1R(3N)}).1H(\'aV\',u(e){el.1l.aY=1b;el.1l.H=el.1l.7x*0.1*el.1l.H/14.3R(el.1l.H);E I}).1H(\'8q\',u(e){el.1l.aY=I;E I});k.ak.7P(el);el.1l.H=el.1l.7x*0.2;el.1l.gm=1V.6I(u(){el.1l.28+=el.1l.H;if(el.1l.28>aZ)el.1l.28=0;k.ak.7P(el)},20);k(el).1H(\'8q\',u(){el.1l.H=el.1l.7x*0.2*el.1l.H/14.3R(el.1l.H)}).1H(\'3H\',u(e){if(el.1l.aY==I){1s=k.1a.44(e);fe=el.1l.1P.w-1s.x+el.1l.Y.x;el.1l.H=el.1l.ct*el.1l.7x*(el.1l.1P.w/2-fe)/(el.1l.1P.w/2)}})})},7P:u(el){el.1l.1S.1B(u(2I){ch=el.1l.28+2I*el.1l.2D;x=el.1l.eZ*14.5v(ch);y=el.1l.7O*14.98(ch);fm=T(2b*(el.1l.7O+y)/(2*el.1l.7O));fl=(el.1l.7O+y)/(2*el.1l.7O);Z=T((el.1l.2F-el.1l.an)*fl+el.1l.an);V=T(Z*el.1l.6M/el.1l.2F);q.18.Q=el.1l.cF+y-V/2+"S";q.18.O=el.1l.cI+x-Z/2+"S";q.18.Z=Z+"S";q.18.V=V+"S";q.18.3B=fm;el.1l.6R[2I].18.Q=T(el.1l.cF+y+V-1-V/2)+"S";el.1l.6R[2I].18.O=T(el.1l.cI+x-Z/2)+"S";el.1l.6R[2I].18.Z=Z+"S";el.1l.6R[2I].18.V=T(V*el.1l.91)+"S"})}};k.fn.h9=k.ak.2s;k.ff={2s:u(M){E q.1B(u(){if(!M.ae||!M.ad)E;D el=q;el.2j={ag:M.ag||bw,ae:M.ae,ad:M.ad,8r:M.8r||\'f7\',af:M.af||\'f7\',2U:M.2U&&2h M.2U==\'u\'?M.2U:I,3i:M.2U&&2h M.3i==\'u\'?M.3i:I,74:M.74&&2h M.74==\'u\'?M.74:I,ai:k(M.ae,q),8f:k(M.ad,q),H:M.H||8w,6e:M.6e||0};el.2j.8f.2G().B(\'V\',\'83\').eq(0).B({V:el.2j.ag+\'S\',19:\'2E\'}).2X();el.2j.ai.1B(u(2I){q.7d=2I}).h6(u(){k(q).2Z(el.2j.af)},u(){k(q).4p(el.2j.af)}).1H(\'5G\',u(e){if(el.2j.6e==q.7d)E;el.2j.ai.eq(el.2j.6e).4p(el.2j.8r).2X().eq(q.7d).2Z(el.2j.8r).2X();el.2j.8f.eq(el.2j.6e).5K({V:0},el.2j.H,u(){q.18.19=\'1n\';if(el.2j.3i){el.2j.3i.1F(el,[q])}}).2X().eq(q.7d).22().5K({V:el.2j.ag},el.2j.H,u(){q.18.19=\'2E\';if(el.2j.2U){el.2j.2U.1F(el,[q])}}).2X();if(el.2j.74){el.2j.74.1F(el,[q,el.2j.8f.K(q.7d),el.2j.ai.K(el.2j.6e),el.2j.8f.K(el.2j.6e)])}el.2j.6e=q.7d}).eq(0).2Z(el.2j.8r).2X();k(q).B(\'V\',k(q).B(\'V\')).B(\'2Y\',\'2O\')})}};k.fn.h7=k.ff.2s;k.3L={1c:U,8u:u(){31=q.2v;if(!31)E;18={fg:k(q).B(\'fg\')||\'\',4w:k(q).B(\'4w\')||\'\',8h:k(q).B(\'8h\')||\'\',fI:k(q).B(\'fI\')||\'\',fJ:k(q).B(\'fJ\')||\'\',fT:k(q).B(\'fT\')||\'\',cH:k(q).B(\'cH\')||\'\',fc:k(q).B(\'fc\')||\'\'};k.3L.1c.B(18);3w=k.3L.g2(31);3w=3w.4v(11 cp("\\\\n","g"),"
    ");k.3L.1c.3w(\'km\');ck=k.3L.1c.K(0).4b;k.3L.1c.3w(3w);Z=k.3L.1c.K(0).4b+ck;if(q.6t.2J&&Z>q.6t.2J[0]){Z=q.6t.2J[0]}q.18.Z=Z+\'S\';if(q.4S==\'cQ\'){V=k.3L.1c.K(0).63+ck;if(q.6t.2J&&V>q.6t.2J[1]){V=q.6t.2J[1]}q.18.V=V+\'S\'}},g2:u(31){co={\'&\':\'&j0;\',\'<\':\'&kB;\',\'>\':\'>\',\'"\':\'&kw;\'};1Y(i in co){31=31.4v(11 cp(i,\'g\'),co[i])}E 31},2s:u(2J){if(k.3L.1c==U){k(\'2e\',1j).1R(\'<26 id="fH" 18="Y: 1O; Q: 0; O: 0; 3j: 2O;">\');k.3L.1c=k(\'#fH\')}E q.1B(u(){if(/cQ|bz/.43(q.4S)){if(q.4S==\'bz\'){f9=q.5n(\'1u\');if(!/31|kv/.43(f9)){E}}if(2J&&(2J.1K==cR||(2J.1K==7b&&2J.1h==2))){if(2J.1K==cR)2J=[2J,2J];P{2J[0]=T(2J[0])||8w;2J[1]=T(2J[1])||8w}q.6t={2J:2J}}k(q).5I(k.3L.8u).6S(k.3L.8u).fX(k.3L.8u);k.3L.8u.1F(q)}})}};k.fn.ke=k.3L.2s;k.N={1c:U,8S:U,3E:U,2H:U,4o:U,bp:U,1d:U,2g:U,1S:U,5t:u(){k.N.8S.5t();if(k.N.3E){k.N.3E.2G()}},4u:u(){k.N.1S=U;k.N.2g=U;k.N.4o=k.N.1d.2v;if(k.N.1c.B(\'19\')==\'2E\'){if(k.N.1d.1f.fx){3m(k.N.1d.1f.fx.1u){1e\'bB\':k.N.1c.7k(k.N.1d.1f.fx.1m,k.N.5t);1r;1e\'1z\':k.N.1c.f3(k.N.1d.1f.fx.1m,k.N.5t);1r;1e\'aT\':k.N.1c.fz(k.N.1d.1f.fx.1m,k.N.5t);1r}}P{k.N.1c.2G()}if(k.N.1d.1f.3i)k.N.1d.1f.3i.1F(k.N.1d,[k.N.1c,k.N.3E])}P{k.N.5t()}1V.c6(k.N.2H)},fy:u(){D 1d=k.N.1d;D 4g=k.N.ap(1d);if(1d&&4g.3k!=k.N.4o&&4g.3k.1h>=1d.1f.aL){k.N.4o=4g.3k;k.N.bp=4g.3k;79={2q:k(1d).1p(\'kP\')||\'2q\',2v:4g.3k};k.kN({1u:\'kG\',79:k.kI(79),kF:u(ft){1d.1f.4h=k(\'3k\',ft);1P=1d.1f.4h.1P();if(1P>0){D 5x=\'\';1d.1f.4h.1B(u(2I){5x+=\'<90 4G="\'+k(\'2v\',q).31()+\'" 8O="\'+2I+\'" 18="94: aG;">\'+k(\'31\',q).31()+\'\'});if(1d.1f.aR){D 3G=k(\'2v\',1d.1f.4h.K(0)).31();1d.2v=4g.3l+3G+1d.1f.3K+4g.5Q;k.N.6G(1d,4g.3k.1h!=3G.1h?(4g.3l.1h+4g.3k.1h):3G.1h,4g.3k.1h!=3G.1h?(4g.3l.1h+3G.1h):3G.1h)}if(1P>0){k.N.b4(1d,5x)}P{k.N.4u()}}P{k.N.4u()}},6b:1d.1f.aM})}},b4:u(1d,5x){k.N.8S.3w(5x);k.N.1S=k(\'90\',k.N.8S.K(0));k.N.1S.aV(k.N.f2).1H(\'5G\',k.N.fO);D Y=k.1a.2R(1d);D 1P=k.1a.2p(1d);k.N.1c.B(\'Q\',Y.y+1P.hb+\'S\').B(\'O\',Y.x+\'S\').2Z(1d.1f.aK);if(k.N.3E){k.N.3E.B(\'19\',\'2E\').B(\'Q\',Y.y+1P.hb+\'S\').B(\'O\',Y.x+\'S\').B(\'Z\',k.N.1c.B(\'Z\')).B(\'V\',k.N.1c.B(\'V\'))}k.N.2g=0;k.N.1S.K(0).3b=1d.1f.70;k.N.8P(1d,1d.1f.4h.K(0),\'6Z\');if(k.N.1c.B(\'19\')==\'1n\'){if(1d.1f.bA){D bm=k.1a.aj(1d,1b);D bl=k.1a.6h(1d,1b);k.N.1c.B(\'Z\',1d.4b-(k.f5?(bm.l+bm.r+bl.l+bl.r):0)+\'S\')}if(1d.1f.fx){3m(1d.1f.fx.1u){1e\'bB\':k.N.1c.7m(1d.1f.fx.1m);1r;1e\'1z\':k.N.1c.g9(1d.1f.fx.1m);1r;1e\'aT\':k.N.1c.fP(1d.1f.fx.1m);1r}}P{k.N.1c.22()}if(k.N.1d.1f.2U)k.N.1d.1f.2U.1F(k.N.1d,[k.N.1c,k.N.3E])}},fC:u(){D 1d=q;if(1d.1f.4h){k.N.4o=1d.2v;k.N.bp=1d.2v;D 5x=\'\';1d.1f.4h.1B(u(2I){2v=k(\'2v\',q).31().5Z();fR=1d.2v.5Z();if(2v.3F(fR)==0){5x+=\'<90 4G="\'+k(\'2v\',q).31()+\'" 8O="\'+2I+\'" 18="94: aG;">\'+k(\'31\',q).31()+\'\'}});if(5x!=\'\'){k.N.b4(1d,5x);q.1f.aW=1b;E}}1d.1f.4h=U;q.1f.aW=I},6G:u(2q,28,2X){if(2q.aI){D 6K=2q.aI();6K.j8(1b);6K.fr("bW",28);6K.ja("bW",-2X+28);6K.8Z()}P if(2q.aU){2q.aU(28,2X)}P{if(2q.5B){2q.5B=28;2q.dq=2X}}2q.6D()},fD:u(2q){if(2q.5B)E 2q.5B;P if(2q.aI){D 6K=1j.6G.du();D fo=6K.jg();E 0-fo.fr(\'bW\',-jX)}},ap:u(2q){D 4F={2v:2q.2v,3l:\'\',5Q:\'\',3k:\'\'};if(2q.1f.aO){D 97=I;D 5B=k.N.fD(2q)||0;D 56=4F.2v.7h(2q.1f.3K);1Y(D i=0;i<56.1h;i++){if((4F.3l.1h+56[i].1h>=5B||5B==0)&&!97){if(4F.3l.1h<=5B)4F.3k=56[i];P 4F.5Q+=56[i]+(56[i]!=\'\'?2q.1f.3K:\'\');97=1b}P if(97){4F.5Q+=56[i]+(56[i]!=\'\'?2q.1f.3K:\'\')}if(!97){4F.3l+=56[i]+(56.1h>1?2q.1f.3K:\'\')}}}P{4F.3k=4F.2v}E 4F},bu:u(e){1V.c6(k.N.2H);D 1d=k.N.ap(q);D 3O=e.7F||e.7A||-1;if(/^13$|27$|35$|36$|38$|40$|^9$/.43(3O)&&k.N.1S){if(1V.2l){1V.2l.cj=1b;1V.2l.ci=I}P{e.al();e.am()}if(k.N.2g!=U)k.N.1S.K(k.N.2g||0).3b=\'\';P k.N.2g=-1;3m(3O){1e 9:1e 13:if(k.N.2g==-1)k.N.2g=0;D 2g=k.N.1S.K(k.N.2g||0);D 3G=2g.5n(\'4G\');q.2v=1d.3l+3G+q.1f.3K+1d.5Q;k.N.4o=1d.3k;k.N.6G(q,1d.3l.1h+3G.1h+q.1f.3K.1h,1d.3l.1h+3G.1h+q.1f.3K.1h);k.N.4u();if(q.1f.6a){4n=T(2g.5n(\'8O\'))||0;k.N.8P(q,q.1f.4h.K(4n),\'6a\')}if(q.76)q.76(I);E 3O!=13;1r;1e 27:q.2v=1d.3l+k.N.4o+q.1f.3K+1d.5Q;q.1f.4h=U;k.N.4u();if(q.76)q.76(I);E I;1r;1e 35:k.N.2g=k.N.1S.1P()-1;1r;1e 36:k.N.2g=0;1r;1e 38:k.N.2g--;if(k.N.2g<0)k.N.2g=k.N.1S.1P()-1;1r;1e 40:k.N.2g++;if(k.N.2g==k.N.1S.1P())k.N.2g=0;1r}k.N.8P(q,q.1f.4h.K(k.N.2g||0),\'6Z\');k.N.1S.K(k.N.2g||0).3b=q.1f.70;if(k.N.1S.K(k.N.2g||0).76)k.N.1S.K(k.N.2g||0).76(I);if(q.1f.aR){D aA=k.N.1S.K(k.N.2g||0).5n(\'4G\');q.2v=1d.3l+aA+q.1f.3K+1d.5Q;if(k.N.4o.1h!=aA.1h)k.N.6G(q,1d.3l.1h+k.N.4o.1h,1d.3l.1h+aA.1h)}E I}k.N.fC.1F(q);if(q.1f.aW==I){if(1d.3k!=k.N.4o&&1d.3k.1h>=q.1f.aL)k.N.2H=1V.b1(k.N.fy,q.1f.53);if(k.N.1S){k.N.4u()}}E 1b},8P:u(2q,3k,1u){if(2q.1f[1u]){D 79={};aE=3k.dU(\'*\');1Y(i=0;i\');k.N.3E=k(\'#g5\')}k(\'2e\',1j).1R(\'<26 id="gc" 18="Y: 1O; Q: 0; O: 0; z-b2: jE; 19: 1n;">&7J;\');k.N.1c=k(\'#gc\');k.N.8S=k(\'aX\',k.N.1c)}E q.1B(u(){if(q.4S!=\'bz\'&&q.5n(\'1u\')!=\'31\')E;q.1f={};q.1f.aM=M.aM;q.1f.aL=14.3R(T(M.aL)||1);q.1f.aK=M.aK?M.aK:\'\';q.1f.70=M.70?M.70:\'\';q.1f.6a=M.6a&&M.6a.1K==2C?M.6a:U;q.1f.2U=M.2U&&M.2U.1K==2C?M.2U:U;q.1f.3i=M.3i&&M.3i.1K==2C?M.3i:U;q.1f.6Z=M.6Z&&M.6Z.1K==2C?M.6Z:U;q.1f.bA=M.bA||I;q.1f.aO=M.aO||I;q.1f.3K=q.1f.aO?(M.3K||\', \'):\'\';q.1f.aR=M.aR?1b:I;q.1f.53=14.3R(T(M.53)||aF);if(M.fx&&M.fx.1K==7n){if(!M.fx.1u||!/bB|1z|aT/.43(M.fx.1u)){M.fx.1u=\'1z\'}if(M.fx.1u==\'1z\'&&!k.fx.1z)E;if(M.fx.1u==\'aT\'&&!k.fx.5W)E;M.fx.1m=14.3R(T(M.fx.1m)||8w);if(M.fx.1m>q.1f.53){M.fx.1m=q.1f.53-2b}q.1f.fx=M.fx}q.1f.4h=U;q.1f.aW=I;k(q).1p(\'bu\',\'fQ\').6D(u(){k.N.1d=q;k.N.4o=q.2v}).fX(k.N.gb).6S(k.N.bu).5I(u(){k.N.2H=1V.b1(k.N.4u,jP)})})}};k.fn.jO=k.N.2s;k.1y={2H:U,4E:U,29:U,2D:10,28:u(el,4P,2D,di){k.1y.4E=el;k.1y.29=4P;k.1y.2D=T(2D)||10;k.1y.2H=1V.6I(k.1y.db,T(di)||40)},db:u(){1Y(i=0;i0&&k.1y.29[i].30.y+k.1y.29[i].30.t>6f.y){k.1y.29[i].2V-=k.1y.2D}P if(k.1y.29[i].30.t<=k.1y.29[i].30.h&&k.1y.29[i].30.t+k.1y.29[i].30.hb<6f.y+6f.hb){k.1y.29[i].2V+=k.1y.2D}if(k.1y.29[i].30.l>0&&k.1y.29[i].30.x+k.1y.29[i].30.l>6f.x){k.1y.29[i].3g-=k.1y.2D}P if(k.1y.29[i].30.l<=k.1y.29[i].30.jT&&k.1y.29[i].30.l+k.1y.29[i].30.1D<6f.x+6f.1D){k.1y.29[i].3g+=k.1y.2D}}},8v:u(){1V.6c(k.1y.2H);k.1y.4E=U;k.1y.29=U;1Y(i in k.1y.29){k.1y.29[i].30=U}}};k.6y={2s:u(M){E q.1B(u(){D el=q;el.1G={1S:k(M.1S,q),1Z:k(M.1Z,q),1M:k.1a.2R(q),2F:M.2F,aN:M.aN,7R:M.7R,dw:M.dw,51:M.51,6q:M.6q};k.6y.aJ(el,0);k(1V).1H(\'jS\',u(){el.1G.1M=k.1a.2R(el);k.6y.aJ(el,0);k.6y.7P(el)});k.6y.7P(el);el.1G.1S.1H(\'aV\',u(){k(el.1G.aN,q).K(0).18.19=\'2E\'}).1H(\'8q\',u(){k(el.1G.aN,q).K(0).18.19=\'1n\'});k(1j).1H(\'3H\',u(e){D 1s=k.1a.44(e);D 5q=0;if(el.1G.51&&el.1G.51==\'b8\')D aQ=1s.x-el.1G.1M.x-(el.4b-el.1G.2F*el.1G.1S.1P())/2-el.1G.2F/2;P if(el.1G.51&&el.1G.51==\'2N\')D aQ=1s.x-el.1G.1M.x-el.4b+el.1G.2F*el.1G.1S.1P();P D aQ=1s.x-el.1G.1M.x;D dB=14.5Y(1s.y-el.1G.1M.y-el.63/2,2);el.1G.1S.1B(u(2I){46=14.dm(14.5Y(aQ-2I*el.1G.2F,2)+dB);46-=el.1G.2F/2;46=46<0?0:46;46=46>el.1G.7R?el.1G.7R:46;46=el.1G.7R-46;bC=el.1G.6q*46/el.1G.7R;q.18.Z=el.1G.2F+bC+\'S\';q.18.O=el.1G.2F*2I+5q+\'S\';5q+=bC});k.6y.aJ(el,5q)})})},aJ:u(el,5q){if(el.1G.51)if(el.1G.51==\'b8\')el.1G.1Z.K(0).18.O=(el.4b-el.1G.2F*el.1G.1S.1P())/2-5q/2+\'S\';P if(el.1G.51==\'O\')el.1G.1Z.K(0).18.O=-5q/el.1G.1S.1P()+\'S\';P if(el.1G.51==\'2N\')el.1G.1Z.K(0).18.O=(el.4b-el.1G.2F*el.1G.1S.1P())-5q/2+\'S\';el.1G.1Z.K(0).18.Z=el.1G.2F*el.1G.1S.1P()+5q+\'S\'},7P:u(el){el.1G.1S.1B(u(2I){q.18.Z=el.1G.2F+\'S\';q.18.O=el.1G.2F*2I+\'S\'})}};k.fn.jD=k.6y.2s;k.1v={M:{2B:10,eV:\'1Q/jG.eF\',eT:\'<1U 2M="1Q/5P.eC" />\',eN:0.8,e3:\'jK ab\',e5:\'5d\',3V:8w},jI:I,jU:I,6r:U,9d:I,9e:I,ca:u(2l){if(!k.1v.9e||k.1v.9d)E;D 3O=2l.7F||2l.7A||-1;3m(3O){1e 35:if(k.1v.6r)k.1v.28(U,k(\'a[@4G=\'+k.1v.6r+\']:k7\').K(0));1r;1e 36:if(k.1v.6r)k.1v.28(U,k(\'a[@4G=\'+k.1v.6r+\']:k5\').K(0));1r;1e 37:1e 8:1e 33:1e 80:1e k8:D ar=k(\'#9a\');if(ar.K(0).52!=U){ar.K(0).52.1F(ar.K(0))}1r;1e 38:1r;1e 39:1e 34:1e 32:1e ka:1e 78:D aD=k(\'#9b\');if(aD.K(0).52!=U){aD.K(0).52.1F(aD.K(0))}1r;1e 40:1r;1e 27:k.1v.ah();1r}},7W:u(M){if(M)k.21(k.1v.M,M);if(1V.2l){k(\'2e\',1j).1H(\'6S\',k.1v.ca)}P{k(1j).1H(\'6S\',k.1v.ca)}k(\'a\').1B(u(){el=k(q);dQ=el.1p(\'4G\')||\'\';eA=el.1p(\'3f\')||\'\';eu=/\\.eC|\\.jY|\\.95|\\.eF|\\.jW/g;if(eA.5Z().bU(eu)!=U&&dQ.5Z().3F(\'eJ\')==0){el.1H(\'5G\',k.1v.28)}});if(k.3h.4I){3E=1j.3t(\'3E\');k(3E).1p({id:\'b6\',2M:\'ew:I;\',ez:\'bX\',ey:\'bX\'}).B({19:\'1n\',Y:\'1O\',Q:\'0\',O:\'0\',69:\'aw:ax.ay.c1(1J=0)\'});k(\'2e\').1R(3E)}8Q=1j.3t(\'26\');k(8Q).1p(\'id\',\'bk\').B({Y:\'1O\',19:\'1n\',Q:\'0\',O:\'0\',1J:0}).1R(1j.8F(\' \')).1H(\'5G\',k.1v.ah);6L=1j.3t(\'26\');k(6L).1p(\'id\',\'dZ\').B({4X:k.1v.M.2B+\'S\'}).1R(1j.8F(\' \'));bZ=1j.3t(\'26\');k(bZ).1p(\'id\',\'e1\').B({4X:k.1v.M.2B+\'S\',5M:k.1v.M.2B+\'S\'}).1R(1j.8F(\' \'));cc=1j.3t(\'a\');k(cc).1p({id:\'jh\',3f:\'#\'}).B({Y:\'1O\',2N:k.1v.M.2B+\'S\',Q:\'0\'}).1R(k.1v.M.eT).1H(\'5G\',k.1v.ah);7t=1j.3t(\'26\');k(7t).1p(\'id\',\'bh\').B({Y:\'2y\',b9:\'O\',6X:\'0 ao\',3B:1}).1R(6L).1R(bZ).1R(cc);2a=1j.3t(\'1U\');2a.2M=k.1v.M.eV;k(2a).1p(\'id\',\'ep\').B({Y:\'1O\'});4R=1j.3t(\'a\');k(4R).1p({id:\'9a\',3f:\'#\'}).B({Y:\'1O\',19:\'1n\',2Y:\'2O\',eQ:\'1n\'}).1R(1j.8F(\' \'));4Q=1j.3t(\'a\');k(4Q).1p({id:\'9b\',3f:\'#\'}).B({Y:\'1O\',2Y:\'2O\',eQ:\'1n\'}).1R(1j.8F(\' \'));1Z=1j.3t(\'26\');k(1Z).1p(\'id\',\'e0\').B({19:\'1n\',Y:\'2y\',2Y:\'2O\',b9:\'O\',6X:\'0 ao\',Q:\'0\',O:\'0\',3B:2}).1R([2a,4R,4Q]);6N=1j.3t(\'26\');k(6N).1p(\'id\',\'aq\').B({19:\'1n\',Y:\'1O\',2Y:\'2O\',Q:\'0\',O:\'0\',b9:\'b8\',7f:\'b7\',j7:\'0\'}).1R([1Z,7t]);k(\'2e\').1R(8Q).1R(6N)},28:u(e,C){el=C?k(C):k(q);at=el.1p(\'4G\');D 6P,4n,4R,4Q;if(at!=\'eJ\'){k.1v.6r=at;8N=k(\'a[@4G=\'+at+\']\');6P=8N.1P();4n=8N.b2(C?C:q);4R=8N.K(4n-1);4Q=8N.K(4n+1)}8H=el.1p(\'3f\');6L=el.1p(\'45\');3I=k.1a.6W();8Q=k(\'#bk\');if(!k.1v.9e){k.1v.9e=1b;if(k.3h.4I){k(\'#b6\').B(\'V\',14.3v(3I.ih,3I.h)+\'S\').B(\'Z\',14.3v(3I.iw,3I.w)+\'S\').22()}8Q.B(\'V\',14.3v(3I.ih,3I.h)+\'S\').B(\'Z\',14.3v(3I.iw,3I.w)+\'S\').22().eo(bw,k.1v.M.eN,u(){k.1v.bd(8H,6L,3I,6P,4n,4R,4Q)});k(\'#aq\').B(\'Z\',14.3v(3I.iw,3I.w)+\'S\')}P{k(\'#9a\').K(0).52=U;k(\'#9b\').K(0).52=U;k.1v.bd(8H,6L,3I,6P,4n,4R,4Q)}E I},bd:u(8H,jA,3I,6P,4n,4R,4Q){k(\'#bi\').aB();aC=k(\'#9a\');aC.2G();as=k(\'#9b\');as.2G();2a=k(\'#ep\');1Z=k(\'#e0\');6N=k(\'#aq\');7t=k(\'#bh\').B(\'3j\',\'2O\');k(\'#dZ\').3w(6L);k.1v.9d=1b;if(6P)k(\'#e1\').3w(k.1v.M.e3+\' \'+(4n+1)+\' \'+k.1v.M.e5+\' \'+6P);if(4R){aC.K(0).52=u(){q.5I();k.1v.28(U,4R);E I}}if(4Q){as.K(0).52=u(){q.5I();k.1v.28(U,4Q);E I}}2a.22();8E=k.1a.2p(1Z.K(0));5f=14.3v(8E.1D,2a.K(0).Z+k.1v.M.2B*2);5T=14.3v(8E.hb,2a.K(0).V+k.1v.M.2B*2);2a.B({O:(5f-2a.K(0).Z)/2+\'S\',Q:(5T-2a.K(0).V)/2+\'S\'});1Z.B({Z:5f+\'S\',V:5T+\'S\'}).22();e4=k.1a.bq();6N.B(\'Q\',3I.t+(e4.h/15)+\'S\');if(6N.B(\'19\')==\'1n\'){6N.22().7m(k.1v.M.3V)}6U=11 aH;k(6U).1p(\'id\',\'bi\').1H(\'jk\',u(){5f=6U.Z+k.1v.M.2B*2;5T=6U.V+k.1v.M.2B*2;2a.2G();1Z.5K({V:5T},8E.hb!=5T?k.1v.M.3V:1,u(){1Z.5K({Z:5f},8E.1D!=5f?k.1v.M.3V:1,u(){1Z.cA(6U);k(6U).B({Y:\'1O\',O:k.1v.M.2B+\'S\',Q:k.1v.M.2B+\'S\'}).7m(k.1v.M.3V,u(){dS=k.1a.2p(7t.K(0));if(4R){aC.B({O:k.1v.M.2B+\'S\',Q:k.1v.M.2B+\'S\',Z:5f/2-k.1v.M.2B*3+\'S\',V:5T-k.1v.M.2B*2+\'S\'}).22()}if(4Q){as.B({O:5f/2+k.1v.M.2B*2+\'S\',Q:k.1v.M.2B+\'S\',Z:5f/2-k.1v.M.2B*3+\'S\',V:5T-k.1v.M.2B*2+\'S\'}).22()}7t.B({Z:5f+\'S\',Q:-dS.hb+\'S\',3j:\'dR\'}).5K({Q:-1},k.1v.M.3V,u(){k.1v.9d=I})})})})});6U.2M=8H},ah:u(){k(\'#bi\').aB();k(\'#aq\').2G();k(\'#bh\').B(\'3j\',\'2O\');k(\'#bk\').eo(bw,0,u(){k(q).2G();if(k.3h.4I){k(\'#b6\').2G()}});k(\'#9a\').K(0).52=U;k(\'#9b\').K(0).52=U;k.1v.6r=U;k.1v.9e=I;k.1v.9d=I;E I}};k.2A={5E:[],eS:u(){q.5I();X=q.3e;id=k.1p(X,\'id\');if(k.2A.5E[id]!=U){1V.6c(k.2A.5E[id])}1z=X.L.3x+1;if(X.L.1Q.1h<1z){1z=1}1Q=k(\'1U\',X.L.5F);X.L.3x=1z;if(1Q.1P()>0){1Q.7k(X.L.3V,k.2A.8B)}},eG:u(){q.5I();X=q.3e;id=k.1p(X,\'id\');if(k.2A.5E[id]!=U){1V.6c(k.2A.5E[id])}1z=X.L.3x-1;1Q=k(\'1U\',X.L.5F);if(1z<1){1z=X.L.1Q.1h}X.L.3x=1z;if(1Q.1P()>0){1Q.7k(X.L.3V,k.2A.8B)}},2H:u(c){X=1j.cP(c);if(X.L.6w){1z=X.L.3x;7o(1z==X.L.3x){1z=1+T(14.6w()*X.L.1Q.1h)}}P{1z=X.L.3x+1;if(X.L.1Q.1h<1z){1z=1}}1Q=k(\'1U\',X.L.5F);X.L.3x=1z;if(1Q.1P()>0){1Q.7k(X.L.3V,k.2A.8B)}},go:u(o){D X;if(o&&o.1K==7n){if(o.2a){X=1j.cP(o.2a.X);6b=1V.kK.3f.7h("#");o.2a.6B=U;if(6b.1h==2){1z=T(6b[1]);22=6b[1].4v(1z,\'\');if(k.1p(X,\'id\')!=22){1z=1}}P{1z=1}}if(o.8A){o.8A.5I();X=o.8A.3e.3e;id=k.1p(X,\'id\');if(k.2A.5E[id]!=U){1V.6c(k.2A.5E[id])}6b=o.8A.3f.7h("#");1z=T(6b[1]);22=6b[1].4v(1z,\'\');if(k.1p(X,\'id\')!=22){1z=1}}if(X.L.1Q.1h<1z||1z<1){1z=1}X.L.3x=1z;5h=k.1a.2p(X);e8=k.1a.aj(X);e9=k.1a.6h(X);if(X.L.3s){X.L.3s.o.B(\'19\',\'1n\')}if(X.L.3r){X.L.3r.o.B(\'19\',\'1n\')}if(X.L.2a){y=T(e8.t)+T(e9.t);if(X.L.1T){if(X.L.1T.5z==\'Q\'){y+=X.L.1T.4q.hb}P{5h.h-=X.L.1T.4q.hb}}if(X.L.2w){if(X.L.2w&&X.L.2w.6s==\'Q\'){y+=X.L.2w.4q.hb}P{5h.h-=X.L.2w.4q.hb}}if(!X.L.cV){X.L.eg=o.2a?o.2a.V:(T(X.L.2a.B(\'V\'))||0);X.L.cV=o.2a?o.2a.Z:(T(X.L.2a.B(\'Z\'))||0)}X.L.2a.B(\'Q\',y+(5h.h-X.L.eg)/2+\'S\');X.L.2a.B(\'O\',(5h.1D-X.L.cV)/2+\'S\');X.L.2a.B(\'19\',\'2E\')}1Q=k(\'1U\',X.L.5F);if(1Q.1P()>0){1Q.7k(X.L.3V,k.2A.8B)}P{aP=k(\'a\',X.L.1T.o).K(1z-1);k(aP).2Z(X.L.1T.64);D 1U=11 aH();1U.X=k.1p(X,\'id\');1U.1z=1z-1;1U.2M=X.L.1Q[X.L.3x-1].2M;if(1U.23){1U.6B=U;k.2A.19.1F(1U)}P{1U.6B=k.2A.19}if(X.L.2w){X.L.2w.o.3w(X.L.1Q[1z-1].6v)}}}},8B:u(){X=q.3e.3e;X.L.5F.B(\'19\',\'1n\');if(X.L.1T.64){aP=k(\'a\',X.L.1T.o).4p(X.L.1T.64).K(X.L.3x-1);k(aP).2Z(X.L.1T.64)}D 1U=11 aH();1U.X=k.1p(X,\'id\');1U.1z=X.L.3x-1;1U.2M=X.L.1Q[X.L.3x-1].2M;if(1U.23){1U.6B=U;k.2A.19.1F(1U)}P{1U.6B=k.2A.19}if(X.L.2w){X.L.2w.o.3w(X.L.1Q[X.L.3x-1].6v)}},19:u(){X=1j.cP(q.X);if(X.L.3s){X.L.3s.o.B(\'19\',\'1n\')}if(X.L.3r){X.L.3r.o.B(\'19\',\'1n\')}5h=k.1a.2p(X);y=0;if(X.L.1T){if(X.L.1T.5z==\'Q\'){y+=X.L.1T.4q.hb}P{5h.h-=X.L.1T.4q.hb}}if(X.L.2w){if(X.L.2w&&X.L.2w.6s==\'Q\'){y+=X.L.2w.4q.hb}P{5h.h-=X.L.2w.4q.hb}}kD=k(\'.cz\',X);y=y+(5h.h-q.V)/2;x=(5h.1D-q.Z)/2;X.L.5F.B(\'Q\',y+\'S\').B(\'O\',x+\'S\').3w(\'<1U 2M="\'+q.2M+\'" />\');X.L.5F.7m(X.L.3V);3r=X.L.3x+1;if(3r>X.L.1Q.1h){3r=1}3s=X.L.3x-1;if(3s<1){3s=X.L.1Q.1h}X.L.3r.o.B(\'19\',\'2E\').B(\'Q\',y+\'S\').B(\'O\',x+2*q.Z/3+\'S\').B(\'Z\',q.Z/3+\'S\').B(\'V\',q.V+\'S\').1p(\'45\',X.L.1Q[3r-1].6v);X.L.3r.o.K(0).3f=\'#\'+3r+k.1p(X,\'id\');X.L.3s.o.B(\'19\',\'2E\').B(\'Q\',y+\'S\').B(\'O\',x+\'S\').B(\'Z\',q.Z/3+\'S\').B(\'V\',q.V+\'S\').1p(\'45\',X.L.1Q[3s-1].6v);X.L.3s.o.K(0).3f=\'#\'+3s+k.1p(X,\'id\')},2s:u(o){if(!o||!o.1Z||k.2A.5E[o.1Z])E;D 1Z=k(\'#\'+o.1Z);D el=1Z.K(0);if(el.18.Y!=\'1O\'&&el.18.Y!=\'2y\'){el.18.Y=\'2y\'}el.18.2Y=\'2O\';if(1Z.1P()==0)E;el.L={};el.L.1Q=o.1Q?o.1Q:[];el.L.6w=o.6w&&o.6w==1b||I;8b=el.dU(\'kA\');1Y(i=0;i<8b.1h;i++){7I=el.L.1Q.1h;el.L.1Q[7I]={2M:8b[i].2M,6v:8b[i].45||8b[i].kC||\'\'}}if(el.L.1Q.1h==0){E}el.L.4m=k.21(k.1a.2R(el),k.1a.2p(el));el.L.d5=k.1a.aj(el);el.L.cL=k.1a.6h(el);t=T(el.L.d5.t)+T(el.L.cL.t);b=T(el.L.d5.b)+T(el.L.cL.b);k(\'1U\',el).aB();el.L.3V=o.3V?o.3V:er;if(o.5z||o.88||o.64){el.L.1T={};1Z.1R(\'<26 6A="eL">\');el.L.1T.o=k(\'.eL\',el);if(o.88){el.L.1T.88=o.88;el.L.1T.o.2Z(o.88)}if(o.64){el.L.1T.64=o.64}el.L.1T.o.B(\'Y\',\'1O\').B(\'Z\',el.L.4m.w+\'S\');if(o.5z&&o.5z==\'Q\'){el.L.1T.5z=\'Q\';el.L.1T.o.B(\'Q\',t+\'S\')}P{el.L.1T.5z=\'4l\';el.L.1T.o.B(\'4l\',b+\'S\')}el.L.1T.au=o.au?o.au:\' \';1Y(D i=0;i\'+7I+\'\'+(7I!=el.L.1Q.1h?el.L.1T.au:\'\'))}k(\'a\',el.L.1T.o).1H(\'5G\',u(){k.2A.go({8A:q})});el.L.1T.4q=k.1a.2p(el.L.1T.o.K(0))}if(o.6s||o.8l){el.L.2w={};1Z.1R(\'<26 6A="eK">&7J;\');el.L.2w.o=k(\'.eK\',el);if(o.8l){el.L.2w.8l=o.8l;el.L.2w.o.2Z(o.8l)}el.L.2w.o.B(\'Y\',\'1O\').B(\'Z\',el.L.4m.w+\'S\');if(o.6s&&o.6s==\'Q\'){el.L.2w.6s=\'Q\';el.L.2w.o.B(\'Q\',(el.L.1T&&el.L.1T.5z==\'Q\'?el.L.1T.4q.hb+t:t)+\'S\')}P{el.L.2w.6s=\'4l\';el.L.2w.o.B(\'4l\',(el.L.1T&&el.L.1T.5z==\'4l\'?el.L.1T.4q.hb+b:b)+\'S\')}el.L.2w.4q=k.1a.2p(el.L.2w.o.K(0))}if(o.az){el.L.3r={az:o.az};1Z.1R(\'&7J;\');el.L.3r.o=k(\'.eR\',el);el.L.3r.o.B(\'Y\',\'1O\').B(\'19\',\'1n\').B(\'2Y\',\'2O\').B(\'4w\',\'eB\').2Z(el.L.3r.az);el.L.3r.o.1H(\'5G\',k.2A.eS)}if(o.av){el.L.3s={av:o.av};1Z.1R(\'&7J;\');el.L.3s.o=k(\'.ev\',el);el.L.3s.o.B(\'Y\',\'1O\').B(\'19\',\'1n\').B(\'2Y\',\'2O\').B(\'4w\',\'eB\').2Z(el.L.3s.av);el.L.3s.o.1H(\'5G\',k.2A.eG)}1Z.cA(\'<26 6A="cz">\');el.L.5F=k(\'.cz\',el);el.L.5F.B(\'Y\',\'1O\').B(\'Q\',\'3c\').B(\'O\',\'3c\').B(\'19\',\'1n\');if(o.2a){1Z.cA(\'<26 6A="eD" 18="19: 1n;"><1U 2M="\'+o.2a+\'" />\');el.L.2a=k(\'.eD\',el);el.L.2a.B(\'Y\',\'1O\');D 1U=11 aH();1U.X=o.1Z;1U.2M=o.2a;if(1U.23){1U.6B=U;k.2A.go({2a:1U})}P{1U.6B=u(){k.2A.go({2a:q})}}}P{k.2A.go({1Z:el})}if(o.cB){do=T(o.cB)*aF}k.2A.5E[o.1Z]=o.cB?1V.6I(\'k.2A.2H(\\\'\'+o.1Z+\'\\\')\',do):U}};k.X=k.2A.2s;k.8e={cN:u(e){3O=e.7F||e.7A||-1;if(3O==9){if(1V.2l){1V.2l.cj=1b;1V.2l.ci=I}P{e.al();e.am()}if(q.aI){1j.6G.du().31="\\t";q.dv=u(){q.6D();q.dv=U}}P if(q.aU){28=q.5B;2X=q.dq;q.2v=q.2v.iL(0,28)+"\\t"+q.2v.hm(2X);q.aU(28+1,28+1);q.6D()}E I}},58:u(){E q.1B(u(){if(q.7D&&q.7D==1b){k(q).3p(\'7E\',k.8e.cN);q.7D=I}})},2s:u(){E q.1B(u(){if(q.4S==\'cQ\'&&(!q.7D||q.7D==I)){k(q).1H(\'7E\',k.8e.cN);q.7D=1b}})}};k.fn.21({hS:k.8e.2s,hP:k.8e.58});',62,1292,'||||||||||||||||||||jQuery||||||this||||function||||||dragCfg|css|elm|var|return|dragged|easing|speed|false|callback|get|ss|options|iAuto|left|else|top|iResize|px|parseInt|null|height|oldStyle|slideshow|position|width||new|iDrag||Math||||style|display|iUtil|true|helper|subject|case|autoCFG|resizeOptions|length|dropCfg|document|iEL|carouselCfg|duration|none|interfaceFX|attr|sizes|break|pointer|iSort|type|ImageBox|queue|iDrop|iAutoscroller|slide|resizeElement|each|oC|wb|newSizes|apply|fisheyeCfg|bind|delta|opacity|constructor|custom|pos|axis|absolute|size|images|append|items|slideslinks|img|window|firstNum|255|for|container||extend|show|complete|cont||div||start|elsToScroll|loader|100|oR||body|oldP|selectedItem|typeof|elem|accordionCfg|props|event|parseFloat|newPosition|containment|getSize|field|ny|build|iTooltip|selectHelper|value|slideCaption|nx|relative|tp|islideshow|border|Function|step|block|itemWidth|hide|timer|nr|limit|fractions|dequeue|src|right|hidden|direction|PI|getPosition|cursorAt|onChange|onShow|scrollTop|result|end|overflow|addClass|parentData|text|||||||||scr|className|0px|iSlider|parentNode|href|scrollLeft|browser|onHide|visibility|item|pre|switch|selectdrug|wrapper|unbind|newCoords|nextslide|prevslide|createElement|values|max|html|currentslide|handle|onSlide|margins|zIndex|wrs|min|iframe|indexOf|valueToAdd|mousemove|pageSize|zones|multipleSeparator|iExpander|curCSS|canvas|pressedKey|accept|resizeDirection|abs|onStop|diff|handlers|fadeDuration|highlighted|dhs|toggle|dragElem||times||test|getPointer|title|distance||so|vp|horizontally|offsetWidth|startLeft|out|transferEl|startTop|subjectValue|lastSuggestion|vertically|ghosting|DropOutDirectiont|bottom|oP|iteration|lastValue|removeClass|dimm|slideCfg|ifxFirstDisplay|currentPointer|clear|replace|fontSize|onDrag|down|percent|onStart|nWidth|color|ratio|elToScroll|fieldData|rel|context|msie|documentElement|params|to|shs|dragHandle|fxCheckTag|els|nextImage|prevImage|tagName|tooltipCFG|up|helperclass|endLeft|paddingLeft|currentStyle|borderTopWidth||halign|onclick|delay|nodeEl||chunks|endTop|destroy|dragmoveBy|borderLeftWidth|mousedown|nHeight|from|dhe|containerW|string|slidePos|si|collected|marginLeft|overzone|marginBottom|getAttribute|marginTop|marginRight|toAdd|zonex|clonedEl|empty|newStyles|cos|hight|toWrite|zoney|linksPosition|OpenClose|selectionStart|clientScroll|cnt|slideshows|holder|click|restoreStyle|blur|onDragModifier|animate|elS|paddingBottom|toDrag|sw|close|post|animationHandler|styles|containerH|prop|sortCfg|BlindDirection|nmp|pow|toLowerCase||mouseup|oldVisibility|offsetHeight|activeLinkClass|old|paddingTop|grid|point|filter|onSelect|url|clearInterval|fxh|currentPanel|elementData|borderBottomWidth|getBorder|cur|paddingRight|borderRightWidth|puff|snapDistance|tolerance|revert|hpc|maxWidth|currentRel|captionPosition|Expander|orig|caption|random|3000|iFisheye|Scale|class|onload|wr|focus|restore|128|selection|parseColor|setInterval|current|selRange|captionText|itemHeight|outerContainer|newDimensions|totalImages|getHeight|reflections|keyup|sliders|imageEl|getWidth|getScroll|margin|Draggable|onHighlight|selectClass|getTime|Date|oldStyleAttr|onClick||scrollIntoView|firstChild||data|ActiveXObject|Array|focused|accordionPos|open|backgroundColor|zoneh|split|oD|zonew|fadeOut|user|fadeIn|Object|while|minLeft|nw|startDrag|minTop|captionEl|newTop|newLeft|frameClass|increment|F0|0x|keyCode|139|toInteger|hasTabsEnabled|keydown|charCode|cssRules|rule|indic|nbsp|rgb|np|oldDisplay|opera|radiusY|positionItems|onOut|proximity|efx|onHover|hash|changed|init|sc|inFrontOf|selectKeyHelper||selectCurrent|getSizeLite|1px|contBorders||ts|parentEl|linksClass|parentBorders|yproc|imgs|nRx|fnc|iTTabs|panels|insideParent|fontWeight|object|nRy|clientWidth|captionClass|namedColors|offsetLeft|serialize|cssSides|mouseout|activeClass|targetEl|offsetTop|expand|stop|400|pr|directionIncrement|clientHeight|link|showImage|move|sx|containerSize|createTextNode|jEl|imageSrc|ser|newPos|selectedone|minHeight|maxHeight|gallery|dir|applyOn|overlay|sh|content|maxRight|maxBottom|tooltipHelper|count|onselectstop|onselect|select|li|reflectionSize|padding|selectBorders|cursor|png|parent|finishedPre|sin|xproc|ImageBoxPrevImage|ImageBoxNextImage|bounceout|animationInProgress|opened|sy|destroyWrapper|buildWrapper|diffWidth|diffHeight|iIndex|diffX|diffY|prot|hidehelper|dEs|isDraggable|onDrop|minWidth|side|isDroppable|onActivate|dragstop|startTime|211|192|nodeName|self|oldPosition|exec|opt|getValues|styleSheets|sideEnd|borderColor|ne|handleEl|unit|DoFold|5625|oldTitle|SliderContainer|unfold|9999|ScrollTo|cssText|oldColor|alpha|2000|prev|selectKeyUp|os|selectKeyDown|selectcheck|dragEl|checkhover|DraggableDestroy|next|key|hoverclass|activeclass|sl|st|image||panelSelector|headerSelector|hoverClass|panelHeight|hideImage|headers|getPadding|iCarousel|preventDefault|stopPropagation|itemMinWidth|auto|getFieldValues|ImageBoxOuterContainer|prevEl|nextImageEl|linkRel|linksSeparator|prevslideClass|progid|DXImageTransform|Microsoft|nextslideClass|valToAdd|remove|prevImageEl|nextEl|childs|1000|default|Image|createTextRange|positionContainer|helperClass|minchars|source|itemsText|multiple|lnk|posx|autofill|reflexions|blind|setSelectionRange|mouseover|inCache|ul|protectRotation|maxRotation|gradient|setTimeout|index|elPosition|writeItems|String|ImageBoxIframe|transparent|center|textAlign|paddingRightSize|paddingTopSize|bounce|loadImage|borderLeftSize|borderBottomSize|borderRightSize|ImageBoxCaption|ImageBoxCurrentImage|moveDrag|ImageBoxOverlay|paddings|borders|idsa|firstStep|currentValue|getClient||stopDrag|borderTopSize|autocomplete|zoom|300|hidefocused|intersect|INPUT|inputWidth|fade|extraWidth|sortable|restricted|isSlider|tabindex|fitToContainer|snapToGrid|slider|prevTop|prevLeft|floats|getPositionLite|modifyContainer|getContainment|lastSi|SliderIteration|sliderEl|selectstop|match|linear|character|no|bouncein|captionImages|asin|Alpha|Selectserialize|mouse|initialPosition|measure|clearTimeout|helperSize|getMargins|tooltipURL|keyPressed|applyOnHover|closeEl|10000|parentPos|sliderSize|sliderPos|angle|returnValue|cancelBubble|spacer|oldBorder|pulse|169|entities|RegExp|Color|Pulsate||rotationSpeed|parseStyle|stopAnim|cssSidesEnd|shake|Shake|slideshowHolder|prepend|autoplay|floatVal|borderWidth|scroll|paddingY|pValue|letterSpacing|paddingX|paddingBottomSize|pause|oBor|clnt|doTab|autoSize|getElementById|TEXTAREA|Number|traverseDOM|func|draginit|loaderWidth|scrollHeight|paddingLeftSize|scrollWidth|oneIsSortable|innerWidth|innerHeight|shrink|windowSize|unselectable|oPad|dragmove|oldFloat|cssProps|colorCssProps|107|doScroll|addItem|SortableAddItem||DroppableDestroy|fxe||interval|after|insertBefore||sqrt|cloneNode|time|check|selectionEnd|offsetParent|Width|sortHelper|createRange|onblur|valign|||onout|224|posy|wid|isSortable|165|zindex|245|notColor|140|240|230|144|styleFloat|onhover|Droppable|emptyGIF|relAttr|visible|captionSize|dragstart|getElementsByTagName|listStyle|dragHelper|getHeightMinMax|onResize|ImageBoxCaptionText|ImageBoxContainer|ImageBoxCaptionImages||textImage|clientSize|textImageFrom|userSelect|onDragStop|slidePad|slideBor|highlight|shc|hlt|checkdrop|fit||loaderHeight||onDragStart|KhtmlUserSelect|remeasure|||on|fadeTo|ImageBoxLoader||500|||imageTypes|slideshowPrevslide|javascript|selectstopApply|scrolling|frameborder|hrefAttr|30px|jpg|slideshowLoader|selectedclass|gif|goprev|oldOverflow|isFunction|imagebox|slideshowCaption|slideshowLinks|directions|overlayOpacity|se|trim|textDecoration|slideshowNextSlide|gonext|closeHTML|selectcheckApply|loaderSRC|selectstart|isSelectable|360|radiusX|set|grow|hoverItem|SlideOutUp|leftUnit|boxModel|interfaceColorFX|fakeAccordionClass|togglever|elType|iBounce|paddingBottomUnit|wordSpacing|150|mousex|iAccordion|fontFamily|togglehor|fontUnit|filteredPosition|paddingTopUnit|parte|itemZIndex||selRange2|finish|paddingLeftUnit|moveStart|paddingRightUnit|xml|itransferTo|borderLeftUnit|borderTopUnit||update|BlindUp||borderRightUnit|checkCache|getSelectionStart|borderBottomUnit|tooltipTitle|easeout|expanderHelper|fontStyle|fontStretch|containerMaxx|yfrac|topUnit|containerMaxy|clickItem|BlindDown|off|inputValue|fracH|fontVariant|rgba|maxy|maxx|keypress|fracW|xfrac|horizontal|addColorStop|htmlEntities|vertical|dragmoveByKey|autocompleteIframe|onslide|fold|parts|SlideInUp|getContext|protect|autocompleteHelper|olive|orange|pink|white|maroon|navy|magenta|203|193|rotationTimer|lightpink||red|lightyellow|182|lime||purple|silver|Top|||inset|outset|SlideOutRight|SlideInRight|ridge|groove|dashed|solid|double|SlideToggleLeft|SlideOutLeft|SlideOutDown|SlideInDown|SlideToggleUp|scrollTo|selectorText|rules|borderStyle|SlideInLeft|SlideToggleDown|dotted|SlideToggleRight|textIndent|borderBottomColor|borderLeftColor|borderRightColor|outlineWidth|outlineOffset|TransferTo|transferHelper|lineHeight|borderTopColor|outlineColor|hover|Accordion|isNaN|Carousel|stopAll|||Right|Bottom|Left|yellow|215|option|frameset|optgroup|meta|substr|frame|script|col|colgroup||th|header|removeChild|float|ol|finishx|fxWrapper|starty|table|form|w_|input|textarea|button|tfoot|thead|pageX|drawImage|clientX|pageY|clientY|globalCompositeOperation|destination|DisableTabs|createLinearGradient|fillStyle|EnableTabs|scale|nextSibling|prototype|tr|td|tbody|AlphaImageLoader|fixPNG|purgeEvents|translate|centerEl|save|cssFloat|startx|fuchsia|148|gold|green|indigo|darkviolet||122||204||darkred|darksalmon|233|130|khaki||lightcyan|lightgreen|238|fillRect||fill|216|appVersion||WebKit|lightblue|173|153|darkorchid|black|220|blue|brown|cyan|beige|azure|finishOpacity|appendChild|substring|aqua|darkblue|darkcyan|darkmagenta|darkolivegreen|navigator|darkorange|183|189|darkgrey|flipv|darkgreen|darkkhaki|lightgrey|amp|BlindToggleHorizontally|BlindRight|BlindLeft|ResizableDestroy|Resizable|120|lineHeigt|collapse|BlindToggleVertically|moveEnd|elasticin|bounceboth|984375|elasticout|elasticboth|duplicate|ImageBoxClose|DropOutDown|DropInDown|load|DropToggleRight|DropInRight|Fold|UnFold|Shrink|Grow|FoldToggle|DropOutRight|DropToggleLeft|DropInUp|DropOutUp|DropToggleDown|DropToggleUp|DropOutLeft|DropInLeft|captiontext|625|9375|Fisheye|30001|list|loading|fix|imageLoaded|childNodes|Showing|onchange|30002|SortSerialize|Autocomplete|200|SortableDestroy|Sortable|resize|wh|firstResize|Slider|bmp|100000|jpeg|Selectable|ToolTip|easeboth|easein|nodeValue|http|first|before|last|112|SliderSetValues|110|SliderGetValues|array|Bounce|Autoexpand|onselectstart|CloseVertically|mozUserSelect|fromHandler|ondragstart|MozUserSelect|number|pW|toUpperCase|khtml|find|CloseHorizontally|SwitchHorizontally|ScrollToAnchors|Puff|slideshowLink|password|quot|OpenHorizontally|OpenVertically|SwitchVertically|IMG|lt|alt|par|moz|success|POST|recallDroppables|param|pt|location|Highlight|100000000|ajax|ondrop|name'.split('|'),0,{})) Added: doctools/trunk/sphinx/style/jquery.js ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/style/jquery.js Mon Jul 23 11:02:25 2007 @@ -0,0 +1,2344 @@ +// prevent execution of jQuery if included more than once +if(typeof window.jQuery == "undefined") { +/* + * jQuery 1.1.3.1 - New Wave Javascript + * + * Copyright (c) 2007 John Resig (jquery.com) + * Dual licensed under the MIT (MIT-LICENSE.txt) + * and GPL (GPL-LICENSE.txt) licenses. + * + * $Date: 2007-07-05 00:43:24 -0400 (Thu, 05 Jul 2007) $ + * $Rev: 2243 $ + */ + +// Global undefined variable +window.undefined = window.undefined; +var jQuery = function(a,c) { + // If the context is global, return a new object + if ( window == this || !this.init ) + return new jQuery(a,c); + + return this.init(a,c); +}; + +// Map over the $ in case of overwrite +if ( typeof $ != "undefined" ) + jQuery._$ = $; + +// Map the jQuery namespace to the '$' one +var $ = jQuery; + +jQuery.fn = jQuery.prototype = { + init: function(a,c) { + // Make sure that a selection was provided + a = a || document; + + // HANDLE: $(function) + // Shortcut for document ready + if ( jQuery.isFunction(a) ) + return new jQuery(document)[ jQuery.fn.ready ? "ready" : "load" ]( a ); + + // Handle HTML strings + if ( typeof a == "string" ) { + // HANDLE: $(html) -> $(array) + var m = /^[^<]*(<(.|\s)+>)[^>]*$/.exec(a); + if ( m ) + a = jQuery.clean( [ m[1] ] ); + + // HANDLE: $(expr) + else + return new jQuery( c ).find( a ); + } + + return this.setArray( + // HANDLE: $(array) + a.constructor == Array && a || + + // HANDLE: $(arraylike) + // Watch for when an array-like object is passed as the selector + (a.jquery || a.length && a != window && !a.nodeType && a[0] != undefined && a[0].nodeType) && jQuery.makeArray( a ) || + + // HANDLE: $(*) + [ a ] ); + }, + jquery: "1.1.3.1", + + size: function() { + return this.length; + }, + + length: 0, + + get: function( num ) { + return num == undefined ? + + // Return a 'clean' array + jQuery.makeArray( this ) : + + // Return just the object + this[num]; + }, + pushStack: function( a ) { + var ret = jQuery(a); + ret.prevObject = this; + return ret; + }, + setArray: function( a ) { + this.length = 0; + [].push.apply( this, a ); + return this; + }, + each: function( fn, args ) { + return jQuery.each( this, fn, args ); + }, + index: function( obj ) { + var pos = -1; + this.each(function(i){ + if ( this == obj ) pos = i; + }); + return pos; + }, + + attr: function( key, value, type ) { + var obj = key; + + // Look for the case where we're accessing a style value + if ( key.constructor == String ) + if ( value == undefined ) + return this.length && jQuery[ type || "attr" ]( this[0], key ) || undefined; + else { + obj = {}; + obj[ key ] = value; + } + + // Check to see if we're setting style values + return this.each(function(index){ + // Set all the styles + for ( var prop in obj ) + jQuery.attr( + type ? this.style : this, + prop, jQuery.prop(this, obj[prop], type, index, prop) + ); + }); + }, + + css: function( key, value ) { + return this.attr( key, value, "curCSS" ); + }, + + text: function(e) { + if ( typeof e == "string" ) + return this.empty().append( document.createTextNode( e ) ); + + var t = ""; + jQuery.each( e || this, function(){ + jQuery.each( this.childNodes, function(){ + if ( this.nodeType != 8 ) + t += this.nodeType != 1 ? + this.nodeValue : jQuery.fn.text([ this ]); + }); + }); + return t; + }, + + wrap: function() { + // The elements to wrap the target around + var a, args = arguments; + + // Wrap each of the matched elements individually + return this.each(function(){ + if ( !a ) + a = jQuery.clean(args, this.ownerDocument); + + // Clone the structure that we're using to wrap + var b = a[0].cloneNode(true); + + // Insert it before the element to be wrapped + this.parentNode.insertBefore( b, this ); + + // Find the deepest point in the wrap structure + while ( b.firstChild ) + b = b.firstChild; + + // Move the matched element to within the wrap structure + b.appendChild( this ); + }); + }, + append: function() { + return this.domManip(arguments, true, 1, function(a){ + this.appendChild( a ); + }); + }, + prepend: function() { + return this.domManip(arguments, true, -1, function(a){ + this.insertBefore( a, this.firstChild ); + }); + }, + before: function() { + return this.domManip(arguments, false, 1, function(a){ + this.parentNode.insertBefore( a, this ); + }); + }, + after: function() { + return this.domManip(arguments, false, -1, function(a){ + this.parentNode.insertBefore( a, this.nextSibling ); + }); + }, + end: function() { + return this.prevObject || jQuery([]); + }, + find: function(t) { + var data = jQuery.map(this, function(a){ return jQuery.find(t,a); }); + return this.pushStack( /[^+>] [^+>]/.test( t ) || t.indexOf("..") > -1 ? + jQuery.unique( data ) : data ); + }, + clone: function(deep) { + // Need to remove events on the element and its descendants + var $this = this.add(this.find("*")); + $this.each(function() { + this._$events = {}; + for (var type in this.$events) + this._$events[type] = jQuery.extend({},this.$events[type]); + }).unbind(); + + // Do the clone + var r = this.pushStack( jQuery.map( this, function(a){ + return a.cloneNode( deep != undefined ? deep : true ); + }) ); + + // Add the events back to the original and its descendants + $this.each(function() { + var events = this._$events; + for (var type in events) + for (var handler in events[type]) + jQuery.event.add(this, type, events[type][handler], events[type][handler].data); + this._$events = null; + }); + + // Return the cloned set + return r; + }, + + filter: function(t) { + return this.pushStack( + jQuery.isFunction( t ) && + jQuery.grep(this, function(el, index){ + return t.apply(el, [index]) + }) || + + jQuery.multiFilter(t,this) ); + }, + + not: function(t) { + return this.pushStack( + t.constructor == String && + jQuery.multiFilter(t, this, true) || + + jQuery.grep(this, function(a) { + return ( t.constructor == Array || t.jquery ) + ? jQuery.inArray( a, t ) < 0 + : a != t; + }) + ); + }, + + add: function(t) { + return this.pushStack( jQuery.merge( + this.get(), + t.constructor == String ? + jQuery(t).get() : + t.length != undefined && (!t.nodeName || t.nodeName == "FORM") ? + t : [t] ) + ); + }, + is: function(expr) { + return expr ? jQuery.multiFilter(expr,this).length > 0 : false; + }, + + val: function( val ) { + return val == undefined ? + ( this.length ? this[0].value : null ) : + this.attr( "value", val ); + }, + + html: function( val ) { + return val == undefined ? + ( this.length ? this[0].innerHTML : null ) : + this.empty().append( val ); + }, + domManip: function(args, table, dir, fn){ + var clone = this.length > 1, a; + + return this.each(function(){ + if ( !a ) { + a = jQuery.clean(args, this.ownerDocument); + if ( dir < 0 ) + a.reverse(); + } + + var obj = this; + + if ( table && jQuery.nodeName(this, "table") && jQuery.nodeName(a[0], "tr") ) + obj = this.getElementsByTagName("tbody")[0] || this.appendChild(document.createElement("tbody")); + + jQuery.each( a, function(){ + fn.apply( obj, [ clone ? this.cloneNode(true) : this ] ); + }); + + }); + } +}; + +jQuery.extend = jQuery.fn.extend = function() { + // copy reference to target object + var target = arguments[0], a = 1; + + // extend jQuery itself if only one argument is passed + if ( arguments.length == 1 ) { + target = this; + a = 0; + } + var prop; + while ( (prop = arguments[a++]) != null ) + // Extend the base object + for ( var i in prop ) target[i] = prop[i]; + + // Return the modified object + return target; +}; + +jQuery.extend({ + noConflict: function() { + if ( jQuery._$ ) + $ = jQuery._$; + return jQuery; + }, + + // This may seem like some crazy code, but trust me when I say that this + // is the only cross-browser way to do this. --John + isFunction: function( fn ) { + return !!fn && typeof fn != "string" && !fn.nodeName && + fn.constructor != Array && /function/i.test( fn + "" ); + }, + + // check if an element is in a XML document + isXMLDoc: function(elem) { + return elem.tagName && elem.ownerDocument && !elem.ownerDocument.body; + }, + + nodeName: function( elem, name ) { + return elem.nodeName && elem.nodeName.toUpperCase() == name.toUpperCase(); + }, + // args is for internal usage only + each: function( obj, fn, args ) { + if ( obj.length == undefined ) + for ( var i in obj ) + fn.apply( obj[i], args || [i, obj[i]] ); + else + for ( var i = 0, ol = obj.length; i < ol; i++ ) + if ( fn.apply( obj[i], args || [i, obj[i]] ) === false ) break; + return obj; + }, + + prop: function(elem, value, type, index, prop){ + // Handle executable functions + if ( jQuery.isFunction( value ) ) + value = value.call( elem, [index] ); + + // exclude the following css properties to add px + var exclude = /z-?index|font-?weight|opacity|zoom|line-?height/i; + + // Handle passing in a number to a CSS property + return value && value.constructor == Number && type == "curCSS" && !exclude.test(prop) ? + value + "px" : + value; + }, + + className: { + // internal only, use addClass("class") + add: function( elem, c ){ + jQuery.each( c.split(/\s+/), function(i, cur){ + if ( !jQuery.className.has( elem.className, cur ) ) + elem.className += ( elem.className ? " " : "" ) + cur; + }); + }, + + // internal only, use removeClass("class") + remove: function( elem, c ){ + elem.className = c != undefined ? + jQuery.grep( elem.className.split(/\s+/), function(cur){ + return !jQuery.className.has( c, cur ); + }).join(" ") : ""; + }, + + // internal only, use is(".class") + has: function( t, c ) { + return jQuery.inArray( c, (t.className || t).toString().split(/\s+/) ) > -1; + } + }, + swap: function(e,o,f) { + for ( var i in o ) { + e.style["old"+i] = e.style[i]; + e.style[i] = o[i]; + } + f.apply( e, [] ); + for ( var i in o ) + e.style[i] = e.style["old"+i]; + }, + + css: function(e,p) { + if ( p == "height" || p == "width" ) { + var old = {}, oHeight, oWidth, d = ["Top","Bottom","Right","Left"]; + + jQuery.each( d, function(){ + old["padding" + this] = 0; + old["border" + this + "Width"] = 0; + }); + + jQuery.swap( e, old, function() { + if ( jQuery(e).is(':visible') ) { + oHeight = e.offsetHeight; + oWidth = e.offsetWidth; + } else { + e = jQuery(e.cloneNode(true)) + .find(":radio").removeAttr("checked").end() + .css({ + visibility: "hidden", position: "absolute", display: "block", right: "0", left: "0" + }).appendTo(e.parentNode)[0]; + + var parPos = jQuery.css(e.parentNode,"position") || "static"; + if ( parPos == "static" ) + e.parentNode.style.position = "relative"; + + oHeight = e.clientHeight; + oWidth = e.clientWidth; + + if ( parPos == "static" ) + e.parentNode.style.position = "static"; + + e.parentNode.removeChild(e); + } + }); + + return p == "height" ? oHeight : oWidth; + } + + return jQuery.curCSS( e, p ); + }, + + curCSS: function(elem, prop, force) { + var ret; + + if (prop == "opacity" && jQuery.browser.msie) { + ret = jQuery.attr(elem.style, "opacity"); + return ret == "" ? "1" : ret; + } + + if (prop.match(/float/i)) + prop = jQuery.styleFloat; + + if (!force && elem.style[prop]) + ret = elem.style[prop]; + + else if (document.defaultView && document.defaultView.getComputedStyle) { + + if (prop.match(/float/i)) + prop = "float"; + + prop = prop.replace(/([A-Z])/g,"-$1").toLowerCase(); + var cur = document.defaultView.getComputedStyle(elem, null); + + if ( cur ) + ret = cur.getPropertyValue(prop); + else if ( prop == "display" ) + ret = "none"; + else + jQuery.swap(elem, { display: "block" }, function() { + var c = document.defaultView.getComputedStyle(this, ""); + ret = c && c.getPropertyValue(prop) || ""; + }); + + } else if (elem.currentStyle) { + var newProp = prop.replace(/\-(\w)/g,function(m,c){return c.toUpperCase();}); + ret = elem.currentStyle[prop] || elem.currentStyle[newProp]; + } + + return ret; + }, + + clean: function(a, doc) { + var r = []; + doc = doc || document; + + jQuery.each( a, function(i,arg){ + if ( !arg ) return; + + if ( arg.constructor == Number ) + arg = arg.toString(); + + // Convert html string into DOM nodes + if ( typeof arg == "string" ) { + // Trim whitespace, otherwise indexOf won't work as expected + var s = jQuery.trim(arg).toLowerCase(), div = doc.createElement("div"), tb = []; + + var wrap = + // option or optgroup + !s.indexOf("", ""] || + + !s.indexOf("", ""] || + + (!s.indexOf("", ""] || + + !s.indexOf("", ""] || + + // matched above + (!s.indexOf("", ""] || + + !s.indexOf("", ""] || + + [0,"",""]; + + // Go to html and back, then peel off extra wrappers + div.innerHTML = wrap[1] + arg + wrap[2]; + + // Move to the right depth + while ( wrap[0]-- ) + div = div.firstChild; + + // Remove IE's autoinserted from table fragments + if ( jQuery.browser.msie ) { + + // String was a , *may* have spurious + if ( !s.indexOf(" or + else if ( wrap[1] == "
    " && s.indexOf("= 0 ; --n ) + if ( jQuery.nodeName(tb[n], "tbody") && !tb[n].childNodes.length ) + tb[n].parentNode.removeChild(tb[n]); + + } + + arg = jQuery.makeArray( div.childNodes ); + } + + if ( 0 === arg.length && (!jQuery.nodeName(arg, "form") && !jQuery.nodeName(arg, "select")) ) + return; + + if ( arg[0] == undefined || jQuery.nodeName(arg, "form") || arg.options ) + r.push( arg ); + else + r = jQuery.merge( r, arg ); + + }); + + return r; + }, + + attr: function(elem, name, value){ + var fix = jQuery.isXMLDoc(elem) ? {} : jQuery.props; + + // Certain attributes only work when accessed via the old DOM 0 way + if ( fix[name] ) { + if ( value != undefined ) elem[fix[name]] = value; + return elem[fix[name]]; + + } else if ( value == undefined && jQuery.browser.msie && jQuery.nodeName(elem, "form") && (name == "action" || name == "method") ) + return elem.getAttributeNode(name).nodeValue; + + // IE elem.getAttribute passes even for style + else if ( elem.tagName ) { + + + if ( value != undefined ) elem.setAttribute( name, value ); + if ( jQuery.browser.msie && /href|src/.test(name) && !jQuery.isXMLDoc(elem) ) + return elem.getAttribute( name, 2 ); + return elem.getAttribute( name ); + + // elem is actually elem.style ... set the style + } else { + // IE actually uses filters for opacity + if ( name == "opacity" && jQuery.browser.msie ) { + if ( value != undefined ) { + // IE has trouble with opacity if it does not have layout + // Force it by setting the zoom level + elem.zoom = 1; + + // Set the alpha filter to set the opacity + elem.filter = (elem.filter || "").replace(/alpha\([^)]*\)/,"") + + (parseFloat(value).toString() == "NaN" ? "" : "alpha(opacity=" + value * 100 + ")"); + } + + return elem.filter ? + (parseFloat( elem.filter.match(/opacity=([^)]*)/)[1] ) / 100).toString() : ""; + } + name = name.replace(/-([a-z])/ig,function(z,b){return b.toUpperCase();}); + if ( value != undefined ) elem[name] = value; + return elem[name]; + } + }, + trim: function(t){ + return t.replace(/^\s+|\s+$/g, ""); + }, + + makeArray: function( a ) { + var r = []; + + // Need to use typeof to fight Safari childNodes crashes + if ( typeof a != "array" ) + for ( var i = 0, al = a.length; i < al; i++ ) + r.push( a[i] ); + else + r = a.slice( 0 ); + + return r; + }, + + inArray: function( b, a ) { + for ( var i = 0, al = a.length; i < al; i++ ) + if ( a[i] == b ) + return i; + return -1; + }, + merge: function(first, second) { + // We have to loop this way because IE & Opera overwrite the length + // expando of getElementsByTagName + for ( var i = 0; second[i]; i++ ) + first.push(second[i]); + return first; + }, + unique: function(first) { + var r = [], num = jQuery.mergeNum++; + + for ( var i = 0, fl = first.length; i < fl; i++ ) + if ( num != first[i].mergeNum ) { + first[i].mergeNum = num; + r.push(first[i]); + } + + return r; + }, + + mergeNum: 0, + grep: function(elems, fn, inv) { + // If a string is passed in for the function, make a function + // for it (a handy shortcut) + if ( typeof fn == "string" ) + fn = new Function("a","i","return " + fn); + + var result = []; + + // Go through the array, only saving the items + // that pass the validator function + for ( var i = 0, el = elems.length; i < el; i++ ) + if ( !inv && fn(elems[i],i) || inv && !fn(elems[i],i) ) + result.push( elems[i] ); + + return result; + }, + map: function(elems, fn) { + // If a string is passed in for the function, make a function + // for it (a handy shortcut) + if ( typeof fn == "string" ) + fn = new Function("a","return " + fn); + + var result = []; + + // Go through the array, translating each of the items to their + // new value (or values). + for ( var i = 0, el = elems.length; i < el; i++ ) { + var val = fn(elems[i],i); + + if ( val !== null && val != undefined ) { + if ( val.constructor != Array ) val = [val]; + result = result.concat( val ); + } + } + + return result; + } +}); + +/* + * Whether the W3C compliant box model is being used. + * + * @property + * @name $.boxModel + * @type Boolean + * @cat JavaScript + */ +new function() { + var b = navigator.userAgent.toLowerCase(); + + // Figure out what browser is being used + jQuery.browser = { + version: (b.match(/.+(?:rv|it|ra|ie)[\/: ]([\d.]+)/) || [])[1], + safari: /webkit/.test(b), + opera: /opera/.test(b), + msie: /msie/.test(b) && !/opera/.test(b), + mozilla: /mozilla/.test(b) && !/(compatible|webkit)/.test(b) + }; + + // Check to see if the W3C box model is being used + jQuery.boxModel = !jQuery.browser.msie || document.compatMode == "CSS1Compat"; + + jQuery.styleFloat = jQuery.browser.msie ? "styleFloat" : "cssFloat", + + jQuery.props = { + "for": "htmlFor", + "class": "className", + "float": jQuery.styleFloat, + cssFloat: jQuery.styleFloat, + styleFloat: jQuery.styleFloat, + innerHTML: "innerHTML", + className: "className", + value: "value", + disabled: "disabled", + checked: "checked", + readonly: "readOnly", + selected: "selected", + maxlength: "maxLength" + }; +}; + +jQuery.each({ + parent: "a.parentNode", + parents: "jQuery.parents(a)", + next: "jQuery.nth(a,2,'nextSibling')", + prev: "jQuery.nth(a,2,'previousSibling')", + siblings: "jQuery.sibling(a.parentNode.firstChild,a)", + children: "jQuery.sibling(a.firstChild)" +}, function(i,n){ + jQuery.fn[ i ] = function(a) { + var ret = jQuery.map(this,n); + if ( a && typeof a == "string" ) + ret = jQuery.multiFilter(a,ret); + return this.pushStack( ret ); + }; +}); + +jQuery.each({ + appendTo: "append", + prependTo: "prepend", + insertBefore: "before", + insertAfter: "after" +}, function(i,n){ + jQuery.fn[ i ] = function(){ + var a = arguments; + return this.each(function(){ + for ( var j = 0, al = a.length; j < al; j++ ) + jQuery(a[j])[n]( this ); + }); + }; +}); + +jQuery.each( { + removeAttr: function( key ) { + jQuery.attr( this, key, "" ); + this.removeAttribute( key ); + }, + addClass: function(c){ + jQuery.className.add(this,c); + }, + removeClass: function(c){ + jQuery.className.remove(this,c); + }, + toggleClass: function( c ){ + jQuery.className[ jQuery.className.has(this,c) ? "remove" : "add" ](this, c); + }, + remove: function(a){ + if ( !a || jQuery.filter( a, [this] ).r.length ) + this.parentNode.removeChild( this ); + }, + empty: function() { + while ( this.firstChild ) + this.removeChild( this.firstChild ); + } +}, function(i,n){ + jQuery.fn[ i ] = function() { + return this.each( n, arguments ); + }; +}); + +jQuery.each( [ "eq", "lt", "gt", "contains" ], function(i,n){ + jQuery.fn[ n ] = function(num,fn) { + return this.filter( ":" + n + "(" + num + ")", fn ); + }; +}); + +jQuery.each( [ "height", "width" ], function(i,n){ + jQuery.fn[ n ] = function(h) { + return h == undefined ? + ( this.length ? jQuery.css( this[0], n ) : null ) : + this.css( n, h.constructor == String ? h : h + "px" ); + }; +}); +jQuery.extend({ + expr: { + "": "m[2]=='*'||jQuery.nodeName(a,m[2])", + "#": "a.getAttribute('id')==m[2]", + ":": { + // Position Checks + lt: "im[3]-0", + nth: "m[3]-0==i", + eq: "m[3]-0==i", + first: "i==0", + last: "i==r.length-1", + even: "i%2==0", + odd: "i%2", + + // Child Checks + "first-child": "a.parentNode.getElementsByTagName('*')[0]==a", + "last-child": "jQuery.nth(a.parentNode.lastChild,1,'previousSibling')==a", + "only-child": "!jQuery.nth(a.parentNode.lastChild,2,'previousSibling')", + + // Parent Checks + parent: "a.firstChild", + empty: "!a.firstChild", + + // Text Check + contains: "(a.textContent||a.innerText||'').indexOf(m[3])>=0", + + // Visibility + visible: '"hidden"!=a.type&&jQuery.css(a,"display")!="none"&&jQuery.css(a,"visibility")!="hidden"', + hidden: '"hidden"==a.type||jQuery.css(a,"display")=="none"||jQuery.css(a,"visibility")=="hidden"', + + // Form attributes + enabled: "!a.disabled", + disabled: "a.disabled", + checked: "a.checked", + selected: "a.selected||jQuery.attr(a,'selected')", + + // Form elements + text: "'text'==a.type", + radio: "'radio'==a.type", + checkbox: "'checkbox'==a.type", + file: "'file'==a.type", + password: "'password'==a.type", + submit: "'submit'==a.type", + image: "'image'==a.type", + reset: "'reset'==a.type", + button: '"button"==a.type||jQuery.nodeName(a,"button")', + input: "/input|select|textarea|button/i.test(a.nodeName)" + }, + "[": "jQuery.find(m[2],a).length" + }, + + // The regular expressions that power the parsing engine + parse: [ + // Match: [@value='test'], [@foo] + /^\[ *(@)([\w-]+) *([!*$^~=]*) *('?"?)(.*?)\4 *\]/, + + // Match: [div], [div p] + /^(\[)\s*(.*?(\[.*?\])?[^[]*?)\s*\]/, + + // Match: :contains('foo') + /^(:)([\w-]+)\("?'?(.*?(\(.*?\))?[^(]*?)"?'?\)/, + + // Match: :even, :last-chlid, #id, .class + new RegExp("^([:.#]*)(" + + ( jQuery.chars = jQuery.browser.safari && jQuery.browser.version < "3.0.0" ? "\\w" : "(?:[\\w\u0128-\uFFFF*_-]|\\\\.)" ) + "+)") + ], + + multiFilter: function( expr, elems, not ) { + var old, cur = []; + + while ( expr && expr != old ) { + old = expr; + var f = jQuery.filter( expr, elems, not ); + expr = f.t.replace(/^\s*,\s*/, "" ); + cur = not ? elems = f.r : jQuery.merge( cur, f.r ); + } + + return cur; + }, + find: function( t, context ) { + // Quickly handle non-string expressions + if ( typeof t != "string" ) + return [ t ]; + + // Make sure that the context is a DOM Element + if ( context && !context.nodeType ) + context = null; + + // Set the correct context (if none is provided) + context = context || document; + + // Handle the common XPath // expression + if ( !t.indexOf("//") ) { + context = context.documentElement; + t = t.substr(2,t.length); + + // And the / root expression + } else if ( !t.indexOf("/") && !context.ownerDocument ) { + context = context.documentElement; + t = t.substr(1,t.length); + if ( t.indexOf("/") >= 1 ) + t = t.substr(t.indexOf("/"),t.length); + } + + // Initialize the search + var ret = [context], done = [], last; + + // Continue while a selector expression exists, and while + // we're no longer looping upon ourselves + while ( t && last != t ) { + var r = []; + last = t; + + t = jQuery.trim(t).replace( /^\/\//, "" ); + + var foundToken = false; + + // An attempt at speeding up child selectors that + // point to a specific element tag + var re = new RegExp("^[/>]\\s*(" + jQuery.chars + "+)"); + var m = re.exec(t); + + if ( m ) { + var nodeName = m[1].toUpperCase(); + + // Perform our own iteration and filter + for ( var i = 0; ret[i]; i++ ) + for ( var c = ret[i].firstChild; c; c = c.nextSibling ) + if ( c.nodeType == 1 && (nodeName == "*" || c.nodeName.toUpperCase() == nodeName.toUpperCase()) ) + r.push( c ); + + ret = r; + t = t.replace( re, "" ); + if ( t.indexOf(" ") == 0 ) continue; + foundToken = true; + } else { + re = /^((\/?\.\.)|([>\/+~]))\s*([a-z]*)/i; + + if ( (m = re.exec(t)) != null ) { + r = []; + + var nodeName = m[4], mergeNum = jQuery.mergeNum++; + m = m[1]; + + for ( var j = 0, rl = ret.length; j < rl; j++ ) + if ( m.indexOf("..") < 0 ) { + var n = m == "~" || m == "+" ? ret[j].nextSibling : ret[j].firstChild; + for ( ; n; n = n.nextSibling ) + if ( n.nodeType == 1 ) { + if ( m == "~" && n.mergeNum == mergeNum ) break; + + if (!nodeName || n.nodeName.toUpperCase() == nodeName.toUpperCase() ) { + if ( m == "~" ) n.mergeNum = mergeNum; + r.push( n ); + } + + if ( m == "+" ) break; + } + } else + r.push( ret[j].parentNode ); + + ret = r; + + // And remove the token + t = jQuery.trim( t.replace( re, "" ) ); + foundToken = true; + } + } + + // See if there's still an expression, and that we haven't already + // matched a token + if ( t && !foundToken ) { + // Handle multiple expressions + if ( !t.indexOf(",") ) { + // Clean the result set + if ( context == ret[0] ) ret.shift(); + + // Merge the result sets + done = jQuery.merge( done, ret ); + + // Reset the context + r = ret = [context]; + + // Touch up the selector string + t = " " + t.substr(1,t.length); + + } else { + // Optomize for the case nodeName#idName + var re2 = new RegExp("^(" + jQuery.chars + "+)(#)(" + jQuery.chars + "+)"); + var m = re2.exec(t); + + // Re-organize the results, so that they're consistent + if ( m ) { + m = [ 0, m[2], m[3], m[1] ]; + + } else { + // Otherwise, do a traditional filter check for + // ID, class, and element selectors + re2 = new RegExp("^([#.]?)(" + jQuery.chars + "*)"); + m = re2.exec(t); + } + + m[2] = m[2].replace(/\\/g, ""); + + var elem = ret[ret.length-1]; + + // Try to do a global search by ID, where we can + if ( m[1] == "#" && elem && elem.getElementById ) { + // Optimization for HTML document case + var oid = elem.getElementById(m[2]); + + // Do a quick check for the existence of the actual ID attribute + // to avoid selecting by the name attribute in IE + // also check to insure id is a string to avoid selecting an element with the name of 'id' inside a form + if ( (jQuery.browser.msie||jQuery.browser.opera) && oid && typeof oid.id == "string" && oid.id != m[2] ) + oid = jQuery('[@id="'+m[2]+'"]', elem)[0]; + + // Do a quick check for node name (where applicable) so + // that div#foo searches will be really fast + ret = r = oid && (!m[3] || jQuery.nodeName(oid, m[3])) ? [oid] : []; + } else { + // We need to find all descendant elements + for ( var i = 0; ret[i]; i++ ) { + // Grab the tag name being searched for + var tag = m[1] != "" || m[0] == "" ? "*" : m[2]; + + // Handle IE7 being really dumb about s + if ( tag == "*" && ret[i].nodeName.toLowerCase() == "object" ) + tag = "param"; + + r = jQuery.merge( r, ret[i].getElementsByTagName( tag )); + } + + // It's faster to filter by class and be done with it + if ( m[1] == "." ) + r = jQuery.classFilter( r, m[2] ); + + // Same with ID filtering + if ( m[1] == "#" ) { + var tmp = []; + + // Try to find the element with the ID + for ( var i = 0; r[i]; i++ ) + if ( r[i].getAttribute("id") == m[2] ) { + tmp = [ r[i] ]; + break; + } + + r = tmp; + } + + ret = r; + } + + t = t.replace( re2, "" ); + } + + } + + // If a selector string still exists + if ( t ) { + // Attempt to filter it + var val = jQuery.filter(t,r); + ret = r = val.r; + t = jQuery.trim(val.t); + } + } + + // An error occurred with the selector; + // just return an empty set instead + if ( t ) + ret = []; + + // Remove the root context + if ( ret && context == ret[0] ) + ret.shift(); + + // And combine the results + done = jQuery.merge( done, ret ); + + return done; + }, + + classFilter: function(r,m,not){ + m = " " + m + " "; + var tmp = []; + for ( var i = 0; r[i]; i++ ) { + var pass = (" " + r[i].className + " ").indexOf( m ) >= 0; + if ( !not && pass || not && !pass ) + tmp.push( r[i] ); + } + return tmp; + }, + + filter: function(t,r,not) { + var last; + + // Look for common filter expressions + while ( t && t != last ) { + last = t; + + var p = jQuery.parse, m; + + for ( var i = 0; p[i]; i++ ) { + m = p[i].exec( t ); + + if ( m ) { + // Remove what we just matched + t = t.substring( m[0].length ); + + m[2] = m[2].replace(/\\/g, ""); + break; + } + } + + if ( !m ) + break; + + // :not() is a special case that can be optimized by + // keeping it out of the expression list + if ( m[1] == ":" && m[2] == "not" ) + r = jQuery.filter(m[3], r, true).r; + + // We can get a big speed boost by filtering by class here + else if ( m[1] == "." ) + r = jQuery.classFilter(r, m[2], not); + + else if ( m[1] == "@" ) { + var tmp = [], type = m[3]; + + for ( var i = 0, rl = r.length; i < rl; i++ ) { + var a = r[i], z = a[ jQuery.props[m[2]] || m[2] ]; + + if ( z == null || /href|src/.test(m[2]) ) + z = jQuery.attr(a,m[2]) || ''; + + if ( (type == "" && !!z || + type == "=" && z == m[5] || + type == "!=" && z != m[5] || + type == "^=" && z && !z.indexOf(m[5]) || + type == "$=" && z.substr(z.length - m[5].length) == m[5] || + (type == "*=" || type == "~=") && z.indexOf(m[5]) >= 0) ^ not ) + tmp.push( a ); + } + + r = tmp; + + // We can get a speed boost by handling nth-child here + } else if ( m[1] == ":" && m[2] == "nth-child" ) { + var num = jQuery.mergeNum++, tmp = [], + test = /(\d*)n\+?(\d*)/.exec( + m[3] == "even" && "2n" || m[3] == "odd" && "2n+1" || + !/\D/.test(m[3]) && "n+" + m[3] || m[3]), + first = (test[1] || 1) - 0, last = test[2] - 0; + + for ( var i = 0, rl = r.length; i < rl; i++ ) { + var node = r[i], parentNode = node.parentNode; + + if ( num != parentNode.mergeNum ) { + var c = 1; + + for ( var n = parentNode.firstChild; n; n = n.nextSibling ) + if ( n.nodeType == 1 ) + n.nodeIndex = c++; + + parentNode.mergeNum = num; + } + + var add = false; + + if ( first == 1 ) { + if ( last == 0 || node.nodeIndex == last ) + add = true; + } else if ( (node.nodeIndex + last) % first == 0 ) + add = true; + + if ( add ^ not ) + tmp.push( node ); + } + + r = tmp; + + // Otherwise, find the expression to execute + } else { + var f = jQuery.expr[m[1]]; + if ( typeof f != "string" ) + f = jQuery.expr[m[1]][m[2]]; + + // Build a custom macro to enclose it + eval("f = function(a,i){return " + f + "}"); + + // Execute it against the current filter + r = jQuery.grep( r, f, not ); + } + } + + // Return an array of filtered elements (r) + // and the modified expression string (t) + return { r: r, t: t }; + }, + parents: function( elem ){ + var matched = []; + var cur = elem.parentNode; + while ( cur && cur != document ) { + matched.push( cur ); + cur = cur.parentNode; + } + return matched; + }, + nth: function(cur,result,dir,elem){ + result = result || 1; + var num = 0; + + for ( ; cur; cur = cur[dir] ) + if ( cur.nodeType == 1 && ++num == result ) + break; + + return cur; + }, + sibling: function( n, elem ) { + var r = []; + + for ( ; n; n = n.nextSibling ) { + if ( n.nodeType == 1 && (!elem || n != elem) ) + r.push( n ); + } + + return r; + } +}); +/* + * A number of helper functions used for managing events. + * Many of the ideas behind this code orignated from + * Dean Edwards' addEvent library. + */ +jQuery.event = { + + // Bind an event to an element + // Original by Dean Edwards + add: function(element, type, handler, data) { + // For whatever reason, IE has trouble passing the window object + // around, causing it to be cloned in the process + if ( jQuery.browser.msie && element.setInterval != undefined ) + element = window; + + // Make sure that the function being executed has a unique ID + if ( !handler.guid ) + handler.guid = this.guid++; + + // if data is passed, bind to handler + if( data != undefined ) { + // Create temporary function pointer to original handler + var fn = handler; + + // Create unique handler function, wrapped around original handler + handler = function() { + // Pass arguments and context to original handler + return fn.apply(this, arguments); + }; + + // Store data in unique handler + handler.data = data; + + // Set the guid of unique handler to the same of original handler, so it can be removed + handler.guid = fn.guid; + } + + // Init the element's event structure + if (!element.$events) + element.$events = {}; + + if (!element.$handle) + element.$handle = function() { + // returned undefined or false + var val; + + // Handle the second event of a trigger and when + // an event is called after a page has unloaded + if ( typeof jQuery == "undefined" || jQuery.event.triggered ) + return val; + + val = jQuery.event.handle.apply(element, arguments); + + return val; + }; + + // Get the current list of functions bound to this event + var handlers = element.$events[type]; + + // Init the event handler queue + if (!handlers) { + handlers = element.$events[type] = {}; + + // And bind the global event handler to the element + if (element.addEventListener) + element.addEventListener(type, element.$handle, false); + else + element.attachEvent("on" + type, element.$handle); + } + + // Add the function to the element's handler list + handlers[handler.guid] = handler; + + // Remember the function in a global list (for triggering) + if (!this.global[type]) + this.global[type] = []; + // Only add the element to the global list once + if (jQuery.inArray(element, this.global[type]) == -1) + this.global[type].push( element ); + }, + + guid: 1, + global: {}, + + // Detach an event or set of events from an element + remove: function(element, type, handler) { + var events = element.$events, ret, index; + + if ( events ) { + // type is actually an event object here + if ( type && type.type ) { + handler = type.handler; + type = type.type; + } + + if ( !type ) { + for ( type in events ) + this.remove( element, type ); + + } else if ( events[type] ) { + // remove the given handler for the given type + if ( handler ) + delete events[type][handler.guid]; + + // remove all handlers for the given type + else + for ( handler in element.$events[type] ) + delete events[type][handler]; + + // remove generic event handler if no more handlers exist + for ( ret in events[type] ) break; + if ( !ret ) { + if (element.removeEventListener) + element.removeEventListener(type, element.$handle, false); + else + element.detachEvent("on" + type, element.$handle); + ret = null; + delete events[type]; + + // Remove element from the global event type cache + while ( this.global[type] && ( (index = jQuery.inArray(element, this.global[type])) >= 0 ) ) + delete this.global[type][index]; + } + } + + // Remove the expando if it's no longer used + for ( ret in events ) break; + if ( !ret ) + element.$handle = element.$events = null; + } + }, + + trigger: function(type, data, element) { + // Clone the incoming data, if any + data = jQuery.makeArray(data || []); + + // Handle a global trigger + if ( !element ) + jQuery.each( this.global[type] || [], function(){ + jQuery.event.trigger( type, data, this ); + }); + + // Handle triggering a single element + else { + var val, ret, fn = jQuery.isFunction( element[ type ] || null ); + + // Pass along a fake event + data.unshift( this.fix({ type: type, target: element }) ); + + // Trigger the event + if ( jQuery.isFunction(element.$handle) && (val = element.$handle.apply( element, data )) !== false ) + this.triggered = true; + + if ( fn && val !== false && !jQuery.nodeName(element, 'a') ) + element[ type ](); + + this.triggered = false; + } + }, + + handle: function(event) { + // returned undefined or false + var val; + + // Empty object is for triggered events with no data + event = jQuery.event.fix( event || window.event || {} ); + + var c = this.$events && this.$events[event.type], args = [].slice.call( arguments, 1 ); + args.unshift( event ); + + for ( var j in c ) { + // Pass in a reference to the handler function itself + // So that we can later remove it + args[0].handler = c[j]; + args[0].data = c[j].data; + + if ( c[j].apply( this, args ) === false ) { + event.preventDefault(); + event.stopPropagation(); + val = false; + } + } + + // Clean up added properties in IE to prevent memory leak + if (jQuery.browser.msie) + event.target = event.preventDefault = event.stopPropagation = + event.handler = event.data = null; + + return val; + }, + + fix: function(event) { + // store a copy of the original event object + // and clone to set read-only properties + var originalEvent = event; + event = jQuery.extend({}, originalEvent); + + // add preventDefault and stopPropagation since + // they will not work on the clone + event.preventDefault = function() { + // if preventDefault exists run it on the original event + if (originalEvent.preventDefault) + return originalEvent.preventDefault(); + // otherwise set the returnValue property of the original event to false (IE) + originalEvent.returnValue = false; + }; + event.stopPropagation = function() { + // if stopPropagation exists run it on the original event + if (originalEvent.stopPropagation) + return originalEvent.stopPropagation(); + // otherwise set the cancelBubble property of the original event to true (IE) + originalEvent.cancelBubble = true; + }; + + // Fix target property, if necessary + if ( !event.target && event.srcElement ) + event.target = event.srcElement; + + // check if target is a textnode (safari) + if (jQuery.browser.safari && event.target.nodeType == 3) + event.target = originalEvent.target.parentNode; + + // Add relatedTarget, if necessary + if ( !event.relatedTarget && event.fromElement ) + event.relatedTarget = event.fromElement == event.target ? event.toElement : event.fromElement; + + // Calculate pageX/Y if missing and clientX/Y available + if ( event.pageX == null && event.clientX != null ) { + var e = document.documentElement, b = document.body; + event.pageX = event.clientX + (e && e.scrollLeft || b.scrollLeft); + event.pageY = event.clientY + (e && e.scrollTop || b.scrollTop); + } + + // Add which for key events + if ( !event.which && (event.charCode || event.keyCode) ) + event.which = event.charCode || event.keyCode; + + // Add metaKey to non-Mac browsers (use ctrl for PC's and Meta for Macs) + if ( !event.metaKey && event.ctrlKey ) + event.metaKey = event.ctrlKey; + + // Add which for click: 1 == left; 2 == middle; 3 == right + // Note: button is not normalized, so don't use it + if ( !event.which && event.button ) + event.which = (event.button & 1 ? 1 : ( event.button & 2 ? 3 : ( event.button & 4 ? 2 : 0 ) )); + + return event; + } +}; + +jQuery.fn.extend({ + bind: function( type, data, fn ) { + return type == "unload" ? this.one(type, data, fn) : this.each(function(){ + jQuery.event.add( this, type, fn || data, fn && data ); + }); + }, + one: function( type, data, fn ) { + return this.each(function(){ + jQuery.event.add( this, type, function(event) { + jQuery(this).unbind(event); + return (fn || data).apply( this, arguments); + }, fn && data); + }); + }, + unbind: function( type, fn ) { + return this.each(function(){ + jQuery.event.remove( this, type, fn ); + }); + }, + trigger: function( type, data ) { + return this.each(function(){ + jQuery.event.trigger( type, data, this ); + }); + }, + toggle: function() { + // Save reference to arguments for access in closure + var a = arguments; + + return this.click(function(e) { + // Figure out which function to execute + this.lastToggle = 0 == this.lastToggle ? 1 : 0; + + // Make sure that clicks stop + e.preventDefault(); + + // and execute the function + return a[this.lastToggle].apply( this, [e] ) || false; + }); + }, + hover: function(f,g) { + + // A private function for handling mouse 'hovering' + function handleHover(e) { + // Check if mouse(over|out) are still within the same parent element + var p = e.relatedTarget; + + // Traverse up the tree + while ( p && p != this ) try { p = p.parentNode } catch(e) { p = this; }; + + // If we actually just moused on to a sub-element, ignore it + if ( p == this ) return false; + + // Execute the right function + return (e.type == "mouseover" ? f : g).apply(this, [e]); + } + + // Bind the function to the two event listeners + return this.mouseover(handleHover).mouseout(handleHover); + }, + ready: function(f) { + // If the DOM is already ready + if ( jQuery.isReady ) + // Execute the function immediately + f.apply( document, [jQuery] ); + + // Otherwise, remember the function for later + else + // Add the function to the wait list + jQuery.readyList.push( function() { return f.apply(this, [jQuery]) } ); + + return this; + } +}); + +jQuery.extend({ + /* + * All the code that makes DOM Ready work nicely. + */ + isReady: false, + readyList: [], + + // Handle when the DOM is ready + ready: function() { + // Make sure that the DOM is not already loaded + if ( !jQuery.isReady ) { + // Remember that the DOM is ready + jQuery.isReady = true; + + // If there are functions bound, to execute + if ( jQuery.readyList ) { + // Execute all of them + jQuery.each( jQuery.readyList, function(){ + this.apply( document ); + }); + + // Reset the list of functions + jQuery.readyList = null; + } + // Remove event listener to avoid memory leak + if ( jQuery.browser.mozilla || jQuery.browser.opera ) + document.removeEventListener( "DOMContentLoaded", jQuery.ready, false ); + + // Remove script element used by IE hack + if( !window.frames.length ) // don't remove if frames are present (#1187) + jQuery(window).load(function(){ jQuery("#__ie_init").remove(); }); + } + } +}); + +new function(){ + + jQuery.each( ("blur,focus,load,resize,scroll,unload,click,dblclick," + + "mousedown,mouseup,mousemove,mouseover,mouseout,change,select," + + "submit,keydown,keypress,keyup,error").split(","), function(i,o){ + + // Handle event binding + jQuery.fn[o] = function(f){ + return f ? this.bind(o, f) : this.trigger(o); + }; + + }); + + // If Mozilla is used + if ( jQuery.browser.mozilla || jQuery.browser.opera ) + // Use the handy event callback + document.addEventListener( "DOMContentLoaded", jQuery.ready, false ); + + // If IE is used, use the excellent hack by Matthias Miller + // http://www.outofhanwell.com/blog/index.php?title=the_window_onload_problem_revisited + else if ( jQuery.browser.msie ) { + + // Only works if you document.write() it + document.write("<\/script>"); + + // Use the defer script hack + var script = document.getElementById("__ie_init"); + + // script does not exist if jQuery is loaded dynamically + if ( script ) + script.onreadystatechange = function() { + if ( this.readyState != "complete" ) return; + jQuery.ready(); + }; + + // Clear from memory + script = null; + + // If Safari is used + } else if ( jQuery.browser.safari ) + // Continually check to see if the document.readyState is valid + jQuery.safariTimer = setInterval(function(){ + // loaded and complete are both valid states + if ( document.readyState == "loaded" || + document.readyState == "complete" ) { + + // If either one are found, remove the timer + clearInterval( jQuery.safariTimer ); + jQuery.safariTimer = null; + + // and execute any waiting functions + jQuery.ready(); + } + }, 10); + + // A fallback to window.onload, that will always work + jQuery.event.add( window, "load", jQuery.ready ); + +}; + +// Clean up after IE to avoid memory leaks +if (jQuery.browser.msie) + jQuery(window).one("unload", function() { + var global = jQuery.event.global; + for ( var type in global ) { + var els = global[type], i = els.length; + if ( i && type != 'unload' ) + do + els[i-1] && jQuery.event.remove(els[i-1], type); + while (--i); + } + }); +jQuery.fn.extend({ + loadIfModified: function( url, params, callback ) { + this.load( url, params, callback, 1 ); + }, + load: function( url, params, callback, ifModified ) { + if ( jQuery.isFunction( url ) ) + return this.bind("load", url); + + callback = callback || function(){}; + + // Default to a GET request + var type = "GET"; + + // If the second parameter was provided + if ( params ) + // If it's a function + if ( jQuery.isFunction( params ) ) { + // We assume that it's the callback + callback = params; + params = null; + + // Otherwise, build a param string + } else { + params = jQuery.param( params ); + type = "POST"; + } + + var self = this; + + // Request the remote document + jQuery.ajax({ + url: url, + type: type, + data: params, + ifModified: ifModified, + complete: function(res, status){ + if ( status == "success" || !ifModified && status == "notmodified" ) + // Inject the HTML into all the matched elements + self.attr("innerHTML", res.responseText) + // Execute all the scripts inside of the newly-injected HTML + .evalScripts() + // Execute callback + .each( callback, [res.responseText, status, res] ); + else + callback.apply( self, [res.responseText, status, res] ); + } + }); + return this; + }, + serialize: function() { + return jQuery.param( this ); + }, + evalScripts: function() { + return this.find("script").each(function(){ + if ( this.src ) + jQuery.getScript( this.src ); + else + jQuery.globalEval( this.text || this.textContent || this.innerHTML || "" ); + }).end(); + } + +}); + +// Attach a bunch of functions for handling common AJAX events + +jQuery.each( "ajaxStart,ajaxStop,ajaxComplete,ajaxError,ajaxSuccess,ajaxSend".split(","), function(i,o){ + jQuery.fn[o] = function(f){ + return this.bind(o, f); + }; +}); + +jQuery.extend({ + get: function( url, data, callback, type, ifModified ) { + // shift arguments if data argument was ommited + if ( jQuery.isFunction( data ) ) { + callback = data; + data = null; + } + + return jQuery.ajax({ + type: "GET", + url: url, + data: data, + success: callback, + dataType: type, + ifModified: ifModified + }); + }, + getIfModified: function( url, data, callback, type ) { + return jQuery.get(url, data, callback, type, 1); + }, + getScript: function( url, callback ) { + return jQuery.get(url, null, callback, "script"); + }, + getJSON: function( url, data, callback ) { + return jQuery.get(url, data, callback, "json"); + }, + post: function( url, data, callback, type ) { + if ( jQuery.isFunction( data ) ) { + callback = data; + data = {}; + } + + return jQuery.ajax({ + type: "POST", + url: url, + data: data, + success: callback, + dataType: type + }); + }, + ajaxTimeout: function( timeout ) { + jQuery.ajaxSettings.timeout = timeout; + }, + ajaxSetup: function( settings ) { + jQuery.extend( jQuery.ajaxSettings, settings ); + }, + + ajaxSettings: { + global: true, + type: "GET", + timeout: 0, + contentType: "application/x-www-form-urlencoded", + processData: true, + async: true, + data: null + }, + + // Last-Modified header cache for next request + lastModified: {}, + ajax: function( s ) { + // TODO introduce global settings, allowing the client to modify them for all requests, not only timeout + s = jQuery.extend({}, jQuery.ajaxSettings, s); + + // if data available + if ( s.data ) { + // convert data if not already a string + if (s.processData && typeof s.data != "string") + s.data = jQuery.param(s.data); + // append data to url for get requests + if( s.type.toLowerCase() == "get" ) { + // "?" + data or "&" + data (in case there are already params) + s.url += ((s.url.indexOf("?") > -1) ? "&" : "?") + s.data; + // IE likes to send both get and post data, prevent this + s.data = null; + } + } + + // Watch for a new set of requests + if ( s.global && ! jQuery.active++ ) + jQuery.event.trigger( "ajaxStart" ); + + var requestDone = false; + + // Create the request object; Microsoft failed to properly + // implement the XMLHttpRequest in IE7, so we use the ActiveXObject when it is available + var xml = window.ActiveXObject ? new ActiveXObject("Microsoft.XMLHTTP") : new XMLHttpRequest(); + + // Open the socket + xml.open(s.type, s.url, s.async); + + // Set the correct header, if data is being sent + if ( s.data ) + xml.setRequestHeader("Content-Type", s.contentType); + + // Set the If-Modified-Since header, if ifModified mode. + if ( s.ifModified ) + xml.setRequestHeader("If-Modified-Since", + jQuery.lastModified[s.url] || "Thu, 01 Jan 1970 00:00:00 GMT" ); + + // Set header so the called script knows that it's an XMLHttpRequest + xml.setRequestHeader("X-Requested-With", "XMLHttpRequest"); + + // Allow custom headers/mimetypes + if( s.beforeSend ) + s.beforeSend(xml); + + if ( s.global ) + jQuery.event.trigger("ajaxSend", [xml, s]); + + // Wait for a response to come back + var onreadystatechange = function(isTimeout){ + // The transfer is complete and the data is available, or the request timed out + if ( xml && (xml.readyState == 4 || isTimeout == "timeout") ) { + requestDone = true; + + // clear poll interval + if (ival) { + clearInterval(ival); + ival = null; + } + + var status; + try { + status = jQuery.httpSuccess( xml ) && isTimeout != "timeout" ? + s.ifModified && jQuery.httpNotModified( xml, s.url ) ? "notmodified" : "success" : "error"; + // Make sure that the request was successful or notmodified + if ( status != "error" ) { + // Cache Last-Modified header, if ifModified mode. + var modRes; + try { + modRes = xml.getResponseHeader("Last-Modified"); + } catch(e) {} // swallow exception thrown by FF if header is not available + + if ( s.ifModified && modRes ) + jQuery.lastModified[s.url] = modRes; + + // process the data (runs the xml through httpData regardless of callback) + var data = jQuery.httpData( xml, s.dataType ); + + // If a local callback was specified, fire it and pass it the data + if ( s.success ) + s.success( data, status ); + + // Fire the global callback + if( s.global ) + jQuery.event.trigger( "ajaxSuccess", [xml, s] ); + } else + jQuery.handleError(s, xml, status); + } catch(e) { + status = "error"; + jQuery.handleError(s, xml, status, e); + } + + // The request was completed + if( s.global ) + jQuery.event.trigger( "ajaxComplete", [xml, s] ); + + // Handle the global AJAX counter + if ( s.global && ! --jQuery.active ) + jQuery.event.trigger( "ajaxStop" ); + + // Process result + if ( s.complete ) + s.complete(xml, status); + + // Stop memory leaks + if(s.async) + xml = null; + } + }; + + // don't attach the handler to the request, just poll it instead + var ival = setInterval(onreadystatechange, 13); + + // Timeout checker + if ( s.timeout > 0 ) + setTimeout(function(){ + // Check to see if the request is still happening + if ( xml ) { + // Cancel the request + xml.abort(); + + if( !requestDone ) + onreadystatechange( "timeout" ); + } + }, s.timeout); + + // Send the data + try { + xml.send(s.data); + } catch(e) { + jQuery.handleError(s, xml, null, e); + } + + // firefox 1.5 doesn't fire statechange for sync requests + if ( !s.async ) + onreadystatechange(); + + // return XMLHttpRequest to allow aborting the request etc. + return xml; + }, + + handleError: function( s, xml, status, e ) { + // If a local callback was specified, fire it + if ( s.error ) s.error( xml, status, e ); + + // Fire the global callback + if ( s.global ) + jQuery.event.trigger( "ajaxError", [xml, s, e] ); + }, + + // Counter for holding the number of active queries + active: 0, + + // Determines if an XMLHttpRequest was successful or not + httpSuccess: function( r ) { + try { + return !r.status && location.protocol == "file:" || + ( r.status >= 200 && r.status < 300 ) || r.status == 304 || + jQuery.browser.safari && r.status == undefined; + } catch(e){} + return false; + }, + + // Determines if an XMLHttpRequest returns NotModified + httpNotModified: function( xml, url ) { + try { + var xmlRes = xml.getResponseHeader("Last-Modified"); + + // Firefox always returns 200. check Last-Modified date + return xml.status == 304 || xmlRes == jQuery.lastModified[url] || + jQuery.browser.safari && xml.status == undefined; + } catch(e){} + return false; + }, + + /* Get the data out of an XMLHttpRequest. + * Return parsed XML if content-type header is "xml" and type is "xml" or omitted, + * otherwise return plain text. + * (String) data - The type of data that you're expecting back, + * (e.g. "xml", "html", "script") + */ + httpData: function( r, type ) { + var ct = r.getResponseHeader("content-type"); + var data = !type && ct && ct.indexOf("xml") >= 0; + data = type == "xml" || data ? r.responseXML : r.responseText; + + // If the type is "script", eval it in global context + if ( type == "script" ) + jQuery.globalEval( data ); + + // Get the JavaScript object, if JSON is used. + if ( type == "json" ) + data = eval("(" + data + ")"); + + // evaluate scripts within html + if ( type == "html" ) + jQuery("
    ").html(data).evalScripts(); + + return data; + }, + + // Serialize an array of form elements or a set of + // key/values into a query string + param: function( a ) { + var s = []; + + // If an array was passed in, assume that it is an array + // of form elements + if ( a.constructor == Array || a.jquery ) + // Serialize the form elements + jQuery.each( a, function(){ + s.push( encodeURIComponent(this.name) + "=" + encodeURIComponent( this.value ) ); + }); + + // Otherwise, assume that it's an object of key/value pairs + else + // Serialize the key/values + for ( var j in a ) + // If the value is an array then the key names need to be repeated + if ( a[j] && a[j].constructor == Array ) + jQuery.each( a[j], function(){ + s.push( encodeURIComponent(j) + "=" + encodeURIComponent( this ) ); + }); + else + s.push( encodeURIComponent(j) + "=" + encodeURIComponent( a[j] ) ); + + // Return the resulting serialization + return s.join("&"); + }, + + // evalulates a script in global context + // not reliable for safari + globalEval: function( data ) { + if ( window.execScript ) + window.execScript( data ); + else if ( jQuery.browser.safari ) + // safari doesn't provide a synchronous global eval + window.setTimeout( data, 0 ); + else + eval.call( window, data ); + } + +}); +jQuery.fn.extend({ + + show: function(speed,callback){ + return speed ? + this.animate({ + height: "show", width: "show", opacity: "show" + }, speed, callback) : + + this.filter(":hidden").each(function(){ + this.style.display = this.oldblock ? this.oldblock : ""; + if ( jQuery.css(this,"display") == "none" ) + this.style.display = "block"; + }).end(); + }, + + hide: function(speed,callback){ + return speed ? + this.animate({ + height: "hide", width: "hide", opacity: "hide" + }, speed, callback) : + + this.filter(":visible").each(function(){ + this.oldblock = this.oldblock || jQuery.css(this,"display"); + if ( this.oldblock == "none" ) + this.oldblock = "block"; + this.style.display = "none"; + }).end(); + }, + + // Save the old toggle function + _toggle: jQuery.fn.toggle, + toggle: function( fn, fn2 ){ + return jQuery.isFunction(fn) && jQuery.isFunction(fn2) ? + this._toggle( fn, fn2 ) : + fn ? + this.animate({ + height: "toggle", width: "toggle", opacity: "toggle" + }, fn, fn2) : + this.each(function(){ + jQuery(this)[ jQuery(this).is(":hidden") ? "show" : "hide" ](); + }); + }, + slideDown: function(speed,callback){ + return this.animate({height: "show"}, speed, callback); + }, + slideUp: function(speed,callback){ + return this.animate({height: "hide"}, speed, callback); + }, + slideToggle: function(speed, callback){ + return this.animate({height: "toggle"}, speed, callback); + }, + fadeIn: function(speed, callback){ + return this.animate({opacity: "show"}, speed, callback); + }, + fadeOut: function(speed, callback){ + return this.animate({opacity: "hide"}, speed, callback); + }, + fadeTo: function(speed,to,callback){ + return this.animate({opacity: to}, speed, callback); + }, + animate: function( prop, speed, easing, callback ) { + return this.queue(function(){ + var hidden = jQuery(this).is(":hidden"), + opt = jQuery.speed(speed, easing, callback), + self = this; + + for ( var p in prop ) { + if ( prop[p] == "hide" && hidden || prop[p] == "show" && !hidden ) + return jQuery.isFunction(opt.complete) && opt.complete.apply(this); + + if ( p == "height" || p == "width" ) { + // Store display property + opt.display = jQuery.css(this, "display"); + + // Make sure that nothing sneaks out + opt.overflow = this.style.overflow; + } + } + + if ( opt.overflow != null ) + this.style.overflow = "hidden"; + + this.curAnim = jQuery.extend({}, prop); + + jQuery.each( prop, function(name, val){ + var e = new jQuery.fx( self, opt, name ); + if ( val.constructor == Number ) + e.custom( e.cur(), val ); + else + e[ val == "toggle" ? hidden ? "show" : "hide" : val ]( prop ); + }); + }); + }, + queue: function(type,fn){ + if ( !fn ) { + fn = type; + type = "fx"; + } + + return this.each(function(){ + if ( !this.queue ) + this.queue = {}; + + if ( !this.queue[type] ) + this.queue[type] = []; + + this.queue[type].push( fn ); + + if ( this.queue[type].length == 1 ) + fn.apply(this); + }); + } + +}); + +jQuery.extend({ + + speed: function(speed, easing, fn) { + var opt = speed && speed.constructor == Object ? speed : { + complete: fn || !fn && easing || + jQuery.isFunction( speed ) && speed, + duration: speed, + easing: fn && easing || easing && easing.constructor != Function && easing || (jQuery.easing.swing ? "swing" : "linear") + }; + + opt.duration = (opt.duration && opt.duration.constructor == Number ? + opt.duration : + { slow: 600, fast: 200 }[opt.duration]) || 400; + + // Queueing + opt.old = opt.complete; + opt.complete = function(){ + jQuery.dequeue(this, "fx"); + if ( jQuery.isFunction( opt.old ) ) + opt.old.apply( this ); + }; + + return opt; + }, + + easing: { + linear: function( p, n, firstNum, diff ) { + return firstNum + diff * p; + }, + swing: function( p, n, firstNum, diff ) { + return ((-Math.cos(p*Math.PI)/2) + 0.5) * diff + firstNum; + } + }, + + queue: {}, + + dequeue: function(elem,type){ + type = type || "fx"; + + if ( elem.queue && elem.queue[type] ) { + // Remove self + elem.queue[type].shift(); + + // Get next function + var f = elem.queue[type][0]; + + if ( f ) f.apply( elem ); + } + }, + + timers: [], + + /* + * I originally wrote fx() as a clone of moo.fx and in the process + * of making it small in size the code became illegible to sane + * people. You've been warned. + */ + + fx: function( elem, options, prop ){ + + var z = this; + + // The styles + var y = elem.style; + + // Simple function for setting a style value + z.a = function(){ + if ( options.step ) + options.step.apply( elem, [ z.now ] ); + + if ( prop == "opacity" ) + jQuery.attr(y, "opacity", z.now); // Let attr handle opacity + else { + y[prop] = parseInt(z.now) + "px"; + y.display = "block"; // Set display property to block for animation + } + }; + + // Figure out the maximum number to run to + z.max = function(){ + return parseFloat( jQuery.css(elem,prop) ); + }; + + // Get the current size + z.cur = function(){ + var r = parseFloat( jQuery.curCSS(elem, prop) ); + return r && r > -10000 ? r : z.max(); + }; + + // Start an animation from one number to another + z.custom = function(from,to){ + z.startTime = (new Date()).getTime(); + z.now = from; + z.a(); + + jQuery.timers.push(function(){ + return z.step(from, to); + }); + + if ( jQuery.timers.length == 1 ) { + var timer = setInterval(function(){ + var timers = jQuery.timers; + + for ( var i = 0; i < timers.length; i++ ) + if ( !timers[i]() ) + timers.splice(i--, 1); + + if ( !timers.length ) + clearInterval( timer ); + }, 13); + } + }; + + // Simple 'show' function + z.show = function(){ + if ( !elem.orig ) elem.orig = {}; + + // Remember where we started, so that we can go back to it later + elem.orig[prop] = jQuery.attr( elem.style, prop ); + + options.show = true; + + // Begin the animation + z.custom(0, this.cur()); + + // Make sure that we start at a small width/height to avoid any + // flash of content + if ( prop != "opacity" ) + y[prop] = "1px"; + + // Start by showing the element + jQuery(elem).show(); + }; + + // Simple 'hide' function + z.hide = function(){ + if ( !elem.orig ) elem.orig = {}; + + // Remember where we started, so that we can go back to it later + elem.orig[prop] = jQuery.attr( elem.style, prop ); + + options.hide = true; + + // Begin the animation + z.custom(this.cur(), 0); + }; + + // Each step of an animation + z.step = function(firstNum, lastNum){ + var t = (new Date()).getTime(); + + if (t > options.duration + z.startTime) { + z.now = lastNum; + z.a(); + + if (elem.curAnim) elem.curAnim[ prop ] = true; + + var done = true; + for ( var i in elem.curAnim ) + if ( elem.curAnim[i] !== true ) + done = false; + + if ( done ) { + if ( options.display != null ) { + // Reset the overflow + y.overflow = options.overflow; + + // Reset the display + y.display = options.display; + if ( jQuery.css(elem, "display") == "none" ) + y.display = "block"; + } + + // Hide the element if the "hide" operation was done + if ( options.hide ) + y.display = "none"; + + // Reset the properties, if the item has been hidden or shown + if ( options.hide || options.show ) + for ( var p in elem.curAnim ) + jQuery.attr(y, p, elem.orig[p]); + } + + // If a callback was provided, execute it + if ( done && jQuery.isFunction( options.complete ) ) + // Execute the complete function + options.complete.apply( elem ); + + return false; + } else { + var n = t - this.startTime; + // Figure out where in the animation we are and set the number + var p = n / options.duration; + + // Perform the easing function, defaults to swing + z.now = jQuery.easing[options.easing](p, n, firstNum, (lastNum-firstNum), options.duration); + + // Perform the next step of the animation + z.a(); + } + + return true; + }; + + } +}); +} Added: doctools/trunk/sphinx/style/minus.png ============================================================================== Binary file. No diff available. Added: doctools/trunk/sphinx/style/nocomment.png ============================================================================== Binary file. No diff available. Added: doctools/trunk/sphinx/style/plus.png ============================================================================== Binary file. No diff available. Added: doctools/trunk/sphinx/style/preview.png ============================================================================== Binary file. No diff available. Added: doctools/trunk/sphinx/style/rightsidebar.css ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/style/rightsidebar.css Mon Jul 23 11:02:25 2007 @@ -0,0 +1,16 @@ +/** + * Python Doc Design -- Right Side Bar Overrides + */ + + +div.sidebar { + float: right; +} + +div.bodywrapper { + margin: 0 230px 0 0; +} + +div.inlinecomments { + right: 250px; +} Added: doctools/trunk/sphinx/style/searchtools.js ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/style/searchtools.js Mon Jul 23 11:02:25 2007 @@ -0,0 +1,428 @@ +/** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words, hlwords is the list of normal, unstemmed + * words. the first one is used to find the occurance, the + * latter for highlighting it. + */ +jQuery.makeSearchSummary = function(text, keywords, hlwords) { + var textLower = text.toLowerCase(); + var start = 0; + $.each(keywords, function() { + var i = textLower.indexOf(this.toLowerCase()); + if (i > -1) { + start = i; + } + }); + start = Math.max(start - 120, 0); + var excerpt = ((start > 0) ? '...' : '') + + $.trim(text.substr(start, 240)) + + ((start + 240 - text.length) ? '...' : ''); + var rv = $('
    ').text(excerpt); + $.each(hlwords, function() { + rv = rv.highlightText(this, 'highlight'); + }); + return rv; +} + +/** + * Porter Stemmer + */ +var PorterStemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) { + return w; + } + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") { + w = firstch.toUpperCase() + w.substr(1); + } + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) { + w = w.replace(re,"$1$2"); + } + else if (re2.test(w)) { + w = w.replace(re2,"$1$2"); + } + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) { + w = w + "e"; + } + else if (re3.test(w)) { + re = /.$/; w = w.replace(re,""); + } + else if (re4.test(w)) { + w = w + "e"; + } + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) { w = stem + "i"; } + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) { + w = stem + step2list[suffix]; + } + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) { + w = stem + step3list[suffix]; + } + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) { + w = stem; + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) { + w = stem; + } + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) { + w = stem; + } + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") { + w = firstch.toLowerCase() + w.substr(1); + } + return w; + } +} + + + +/** + * Search Module + */ +var Search = { + + init : function() { + var params = $.getQueryParameters(); + if (params.q) { + var query = params.q[0]; + var areas = params.area || []; + + // auto default + if (areas.length == 1 && areas[0] == 'default') { + areas = ['tutorial', 'modules', 'install', 'distutils']; + } + + // update input fields + $('input[@type="checkbox"]').each(function() { + this.checked = $.contains(areas, this.value); + }); + $('input[@name="q"]')[0].value = query; + + this.performSearch(query, areas); + } + }, + + /** + * perform a search for something + */ + performSearch : function(query, areas) { + // create the required interface elements + var out = $('#search-results'); + var title = $('

    Searching

    ').appendTo(out); + var dots = $('').appendTo(title); + var status = $('

    ').appendTo(out); + var output = $('
    + + + + + + {%- for user, privileges in users|dictsort %} + + + + + + {%- endfor %} +
    UsernamePrivilegesDelete
    {{ user|e }}
    +
    + + + +
    + +{% endblock %} Added: doctools/trunk/sphinx/templates/admin/moderate_comments.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/admin/moderate_comments.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,104 @@ +{% extends "admin/layout.html" %} +{% block admin_body %} +

    Moderate Comments

    +

    + From here you can delete and edit comments. If you want to be + informed about new comments you can use the feed provided. +

    +
    + {% if ask_confirmation %} +
    +

    Confirm

    +
    + {% trans amount=to_delete|length %} + Do you really want to delete one comment? + {% pluralize %} + Do you really want to delete {{ amount }} comments? + {% endtrans %} +
    +
    + + +
    +
    + {% endif %} + {% if edit_detail %} +
    +

    Edit Comment

    +
    + +
    +
    Name
    +
    +
    E-Mail
    +
    +
    Comment Title
    +
    +
    + +
    +
    + + + + +
    +
    + {% endif %} + {%- macro render_row(comment, include_page=false) %} + + + {{ comment.title|e }} + by {{ comment.author|e }}{% if include_page + %} on {{ comment.associated_page }} + + {{ comment.pub_date|datetimeformat }} + + edit + + + + {%- endmacro %} + + {% if pages_with_comments %} + + + + {%- for comment in recent_comments %} + {{- render_row(comment, true) }} + {%- endfor %} + {%- for page in pages_with_comments %} + + + + {%- if page.has_details %} + {%- for comment in page.comments %} + {{- render_row(comment) }} + {%- endfor %} + {%- endif %} + {% endfor %} + {%- else %} + + {%- endif %} +
    + Recent Comments + (feed) +
    + {{ page.title|e }} + (view | + feed) +
    no comments submitted so far
    +
    + + +
    +
    +{% endblock %} Added: doctools/trunk/sphinx/templates/commentform.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/commentform.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,26 @@ +{% extends "layout.html" %} +{% block body %} +
    +

    New Comment

    + {{ form }} +
    +

    + You can format a comment using the + following syntax elements provided: +

    +

    + `code` / ``code too`` / **strong** / + *emphasized* / !!!important!!! / + [[link_target Link Title]] / + [[link_target_only]] / <code>code block with + syntax highlighting</code> / <quote>some + quoted text</quote>. +

    +

    + HTML is not supported, relative link targets are treated as + quicklinks and code blocks that start with ">>>" are + highlighted as interactive python sessions. +

    +
    +
    +{% endblock %} Added: doctools/trunk/sphinx/templates/comments.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/comments.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,22 @@ +
    +

    Comments

    + {% for comment in comments %} +
    +

    {{ comment.title|e }} + {%- if comment.associated_name %} — on + {{- + comment.associated_name }}{% endif %}

    +
    {{ comment.parsed_comment_body }}
    +
    by {{ comment.author|e }}, written on + {{ comment.pub_date|datetimeformat }} | + #
    +
    + {% else %} +
    + There are no user contributed notes for this page. +
    + {% endfor %} + +
    Added: doctools/trunk/sphinx/templates/edit.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/edit.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,53 @@ +{% extends "layout.html" %} +{% if rendered %}{% set title = "Suggest changes - Preview" %} +{% else %}{% set title = "Suggest changes" %}{% endif %} +{% block body %} +{% if rendered %} +

    Preview

    +
    +
    + {{ rendered }} +
    +
    + {% if warnings %} +

    Warnings

    +

    You must fix these warnings before you can submit your patch.

    +
      + {% for warning in warnings %} +
    • {{ warning }}
    • + {% endfor %} +
    + {% endif %} +{% endif %} +

    Suggest changes for this page

    +{% if not rendered %} +

    Here you can edit the source of “{{ doctitle|striptags }}” and + submit the results as a patch to the Python documentation team. If you want + to know more about reST, the markup language used, read + Documenting Python.

    +{% endif %} +
    +
    + + {# XXX: shortcuts to make the edit area larger/smaller #} + {% if form_error %} +
    {{ form_error|e }}
    + {% endif %} +
    +
    Name:
    +
    +
    E-mail Address:
    +
    +
    Summary of the change:
    +
    +
    + +
    + + + + +
    +
    +
    +{% endblock %} Added: doctools/trunk/sphinx/templates/genindex.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/genindex.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,46 @@ +{% extends "layout.html" %} +{% set title = 'Index' %} +{% block body %} + +

    Index

    + + {% for key, dummy in genindexentries -%} + {{ key }} {% if not loop.last %}| {% endif %} + {%- endfor %} + +
    + + {% for key, entries in genindexentries %} +

    {{ key }}

    +
    +
    + {%- set breakat = genindexcounts[loop.index0] // 2 %} + {%- set numcols = 1 %} + {%- set numitems = 0 %} + {% for entryname, (links, subitems) in entries %} +
    {%- if links -%} + {{ entryname }} + {%- for link in links[1:] %}, [Link]{% endfor -%} + {%- else -%} + {{ entryname }} + {%- endif -%}
    + {%- if subitems %} +
    + {%- for subentryname, subentrylinks in subitems %} +
    {{ subentryname }} + {%- for link in subentrylinks[1:] %}, [Link]{% endfor -%} +
    + {%- endfor %} +
    + {%- endif -%} + {%- set numitems = numitems + 1 + len(subitems) -%} + {%- if numcols < 2 and numitems > breakat -%} + {%- set numcols = numcols+1 -%} +
    + {%- endif -%} + {% endfor %} +
    + + {% endfor %} + +{% endblock %} Added: doctools/trunk/sphinx/templates/index.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/index.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,67 @@ +{% extends "layout.html" %} +{% set title = 'Overview' %} +{% set current_page_name = 'index' %} +{% set page_links = [ + (pathto('@rss/recent'), 'application/rss+xml', 'Recent Comments') +] %} +{% block body %} +

    Python Documentation

    +

    + Welcome! This is the documentation for Python + {{ release }}{% if last_updated %}, last updated {{ last_updated }}{% endif %}. +

    + +

    Parts of the documentation:

    + + +
    + + + + + + + + + + + +
    + +

    Indices and tables:

    + + +
    + + + + + +
    + +

    Meta information:

    + + +
    + + + + + +
    + +{% endblock %} Added: doctools/trunk/sphinx/templates/inlinecomments.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/inlinecomments.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,36 @@ +{# rendered for inline comments -#} +
    +{%- if mode == 'bottom' %} + {%- if comments -%} + [Read Comments] + {%- else -%} + [Write Comments] + {%- endif %} +{%- else %} +
    + {%- if comments -%} + [{{ comments|length }} Comments] + {%- else -%} + [Write Comment] + {%- endif -%} +
    + {%- if comments %} +
    +

    Comments

    + + {%- for comment in comments %} +
    +

    {{ comment.title|e }}

    +
    {{ comment.parsed_comment_body }}
    +
    by {{ comment.author|e }}, written on + {{ comment.pub_date|datetimeformat }} | + #
    +
    + {%- endfor %} +
    + {%- endif %} +{%- endif %} +
    Added: doctools/trunk/sphinx/templates/keyword_not_found.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/keyword_not_found.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,31 @@ +{% extends "layout.html" %} +{% set title = 'Keyword Not Found' %} +{% block body %} +

    Keyword Not Found

    +

    + The keyword {{ keyword|e }} is not directly associated with + a page. {% if close_matches %}A similarity search returned {{ + close_matches|length }} items that are possible matches. + {% if good_matches_count %}{{ good_matches_count }} of them are really + good matches and emphasized.{% endif %}{% endif %} +

    + {% if close_matches %} +
      + {% for item in close_matches %} + {{ item.title|e }} ({{ + item.type }}) {% if item.description + %} — {{ item.description|e }}{% endif %} + {% endfor %} +
    + {% endif %} +

    + If you want to search the entire Python documentation for the string + "{{ keyword|e }}", then use the search function. +

    +

    + For a quick overview over all documented modules, + click here. +

    +{% endblock %} Added: doctools/trunk/sphinx/templates/layout.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/layout.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,89 @@ +{% if builder != 'htmlhelp' %}{% set titlesuffix = " — Python Documentation" %}{% endif -%} + + + + + {{ title|striptags }}{{ titlesuffix }} + {%- if builder == 'web' %} + + {%- for link, type, title in page_links %} + + {%- endfor %} + {%- else %} + + + {%- endif %} + + + + + + + + + + + {%- if parents %} + + {%- endif %} + {%- if next %} + + {%- endif %} + {%- if prev %} + + {%- endif %} + {% block head %}{% endblock %} + + + +
    +
    + {%- if builder != 'htmlhelp' %} +
    + {%- endif %} +
    + {% block body %}{% endblock %} +
    + {%- if builder != 'htmlhelp' %} +
    + {%- endif %} +
    + {%- if builder != 'htmlhelp' %} + {%- include "sidebar.html" %} + {%- endif %} +
    +
    + + + Added: doctools/trunk/sphinx/templates/modindex.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/modindex.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,45 @@ +{% extends "layout.html" %} +{% set title = 'Global Module Index' %} +{% block body %} + +

    Global Module Index

    +{% if builder == 'web' and freqentries %} +

    Most popular modules:

    +
    + {%- for module in freqentries %} + {{ module.name|e }} + {%- endfor %} +
    +{% endif %} +
    + Show modules only available on these platforms:
    + {% for pl in platforms -%} + + + {% endfor %} + +
    + + + {%- for modname, collapse, cgroup, indent, fname, synops, pform in modindexentries %} + {%- if not modname -%} + + + {%- else -%} + + + + {%- endif -%} + {% endfor %} +
     
    {{ fname }}
    {% if collapse -%} + + {%- endif %}{% if indent %}   {% endif %} + {% if fname %}{% endif -%} + {{ modname|e }} + {%- if fname %}{% endif %} + {%- if pform[0] %} ({{ pform|join(', ') }}){% endif -%} + {{ synops|e }}
    + +{% endblock %} Added: doctools/trunk/sphinx/templates/not_found.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/not_found.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,11 @@ +{% extends "layout.html" %} +{% set title = 'Page Not Found' %} +{% block body %} +

    Page Not Found

    +

    + The page {{ req.path|e }} does not exist on this server. +

    +

    + Click here to return to the index. +

    +{% endblock %} Added: doctools/trunk/sphinx/templates/page.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/page.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,14 @@ +{% extends "layout.html" %} +{% set page_links = [ + (pathto('@rss/' + sourcename), 'application/rss+xml', 'Page Comments'), +] %} +{% block body %} + {% if oldurl %} +
    + Note: You requested an out-of-date URL from this server. + We've tried to redirect you to the new location of this page, but it may not + be the right one. +
    + {% endif %} + {{ body }} +{% endblock %} Added: doctools/trunk/sphinx/templates/search.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/search.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,60 @@ +{% extends "layout.html" %} +{% set title = 'Search Documentation' %} +{% block header %} + +{% endblock %} +{% block body %} +

    Search Documentation

    +

    + From here you can search the Python documentation. Enter your search + words into the box below and click "search". Note that the search + function will automatically search for all of the words. Pages + containing less words won't appear in the result list. +

    +

    + In order to speed up the results you can limit your search by + excluding some of the sections listed below. +

    +
    + + +

    + Sections: +

    +
      + {% for id, name, checked in [ + ('tutorial', 'Python Tutorial', true), + ('modules', 'Library Reference', true), + ('macmodules', 'Macintosh Library Modules', false), + ('extending', 'Extending and Embedding', false), + ('c-api', 'Python/C API', false), + ('install', 'Installing Python Modules', true), + ('distutils', 'Distributing Python Modules', true), + ('documenting', 'Documenting Python', false), + ('whatsnew', 'What\'s new in Python?', false), + ('reference', 'Language Reference', false) + ] -%} +
    • +
    • + {% endfor %} +
    +
    + {% if search_performed %} +

    Search Results

    + {% if not search_results %} +

    Your search did not match any results.

    + {% endif %} + {% endif %} +
    + {% if search_results %} +
      + {% for href, caption, context in search_results %} +
    • {{ caption }} +
      {{ context|e }}
      +
    • + {% endfor %} +
    + {% endif %} +
    +{% endblock %} Added: doctools/trunk/sphinx/templates/settings.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/settings.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,37 @@ +{% extends "layout.html" %} +{% set title = 'Settings' %} +{% set current_page_name = 'settings' %} +{% block body %} +

    Python Documentation Settings

    +

    + Here you can customize how you want to view the Python documentation. + These settings are saved using a cookie on your computer. +

    + +
    +

    Select your stylesheet:

    +

    + {%- for design, (foo, descr) in known_designs %} + +
    + {%- endfor %} +

    + +

    Select how you want to view comments:

    +

    + {%- for meth, descr in comments_methods %} + +
    + {%- endfor %} +

    + +

    +    +    +    +

    +
    + +{% endblock %} Added: doctools/trunk/sphinx/templates/show_source.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/show_source.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,6 @@ +{% extends "layout.html" %} +{% set title = 'Page Source' %} +{% block body %} +

    Page Source

    + {{ highlighted_code }} +{% endblock %} Added: doctools/trunk/sphinx/templates/sidebar.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/sidebar.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,48 @@ +{# this file is included by layout.html #} + Added: doctools/trunk/sphinx/templates/submitted.html ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/templates/submitted.html Mon Jul 23 11:02:25 2007 @@ -0,0 +1,12 @@ +{% extends "layout.html" %} +{% set title = "Patch submitted" %} +{% block head %} + +{% endblock %} +{% block body %} +

    Patch submitted

    +

    Your patch has been submitted to the Python documentation team and will be + processed shortly.

    +

    You will be redirected to the + original documentation page shortly.

    +{% endblock %} \ No newline at end of file Added: doctools/trunk/sphinx/util.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/util.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +""" + sphinx.util + ~~~~~~~~~~~ + + Utility functions for Sphinx. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import os +import sys +import fnmatch +from os import path + + +def relative_uri(base, to): + """Return a relative URL from ``base`` to ``to``.""" + b2 = base.split('/') + t2 = to.split('/') + # remove common segments + for x, y in zip(b2, t2): + if x != y: + break + b2.pop(0) + t2.pop(0) + return '../' * (len(b2)-1) + '/'.join(t2) + + +def ensuredir(path): + """Ensure that a path exists.""" + try: + os.makedirs(path) + except OSError, err: + if not err.errno == 17: + raise + + +def status_iterator(iterable, colorfunc=lambda x: x, stream=sys.stdout): + """Print out each item before yielding it.""" + for item in iterable: + print >>stream, colorfunc(item), + stream.flush() + yield item + print >>stream + + +def get_matching_files(dirname, pattern, exclude=()): + """Get all files matching a pattern in a directory, recursively.""" + # dirname is a normalized absolute path. + dirname = path.normpath(path.abspath(dirname)) + dirlen = len(dirname) + 1 # exclude slash + for root, dirs, files in os.walk(dirname): + dirs.sort() + files.sort() + for sfile in files: + if not fnmatch.fnmatch(sfile, pattern): + continue + qualified_name = path.join(root[dirlen:], sfile) + if qualified_name in exclude: + continue + yield qualified_name + + +def get_category(filename): + """Get the "category" part of a RST filename.""" + parts = filename.split('/', 1) + if len(parts) < 2: + return + return parts[0] + + +def shorten_result(text='', keywords=[], maxlen=240, fuzz=60): + if not text: + text = '' + text_low = text.lower() + beg = -1 + for k in keywords: + i = text_low.find(k.lower()) + if (i > -1 and i < beg) or beg == -1: + beg = i + excerpt_beg = 0 + if beg > fuzz: + for sep in ('.', ':', ';', '='): + eb = text.find(sep, beg - fuzz, beg - 1) + if eb > -1: + eb += 1 + break + else: + eb = beg - fuzz + excerpt_beg = eb + if excerpt_beg < 0: + excerpt_beg = 0 + msg = text[excerpt_beg:beg+maxlen] + if beg > fuzz: + msg = '... ' + msg + if beg < len(text)-maxlen: + msg = msg + ' ...' + return msg + + +class attrdict(dict): + def __getattr__(self, key): + return self[key] + def __setattr__(self, key, val): + self[key] = val + def __delattr__(self, key): + del self[key] Added: doctools/trunk/sphinx/web/__init__.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/__init__.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web + ~~~~~~~~~~ + + A web application to serve the Python docs interactively. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" Added: doctools/trunk/sphinx/web/admin.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/admin.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.admin + ~~~~~~~~~~~~~~~~ + + Admin application parts. + + :copyright: 2007 by Georg Brandl, Armin Ronacher. + :license: Python license. +""" + +from .util import render_template +from .wsgiutil import Response, RedirectResponse, NotFound +from .database import Comment + + +class AdminPanel(object): + """ + Provide the admin functionallity. + """ + + def __init__(self, app): + self.app = app + self.env = app.env + self.userdb = app.userdb + + def dispatch(self, req, page): + """ + Dispatch the requests for the current user in the admin panel. + """ + is_logged_in = req.user is not None + if is_logged_in: + privileges = self.userdb.privileges[req.user] + is_master_admin = 'master' in privileges + can_change_password = 'frozenpassword' not in privileges + else: + privileges = set() + can_change_password = is_master_admin = False + + # login and logout + if page == 'login': + return self.do_login(req) + elif not is_logged_in: + return RedirectResponse('@admin/login/') + elif page == 'logout': + return self.do_logout(req) + + # account maintance + elif page == 'change_password' and can_change_password: + return self.do_change_password(req) + elif page == 'manage_users' and is_master_admin: + return self.do_manage_users(req) + + # moderate comments + elif page.split('/')[0] == 'moderate_comments': + return self.do_moderate_comments(req, page[18:]) + + # missing page + elif page != '': + raise NotFound() + return Response(render_template(req, 'admin/index.html', { + 'is_master_admin': is_master_admin, + 'can_change_password': can_change_password + })) + + def do_login(self, req): + """ + Display login form and do the login procedure. + """ + if req.user is not None: + return RedirectResponse('@admin/') + login_failed = False + if req.method == 'POST': + if req.form.get('cancel'): + return RedirectResponse('') + username = req.form.get('username') + password = req.form.get('password') + if self.userdb.check_password(username, password): + req.login(username) + return RedirectResponse('@admin/') + login_failed = True + return Response(render_template(req, 'admin/login.html', { + 'login_failed': login_failed + })) + + def do_logout(self, req): + """ + Log the user out. + """ + req.logout() + return RedirectResponse('admin/login/') + + def do_change_password(self, req): + """ + Allows the user to change his password. + """ + change_failed = change_successful = False + if req.method == 'POST': + if req.form.get('cancel'): + return RedirectResponse('@admin/') + pw = req.form.get('pw1') + if pw and pw == req.form.get('pw2'): + self.userdb.set_password(req.user, pw) + self.userdb.save() + change_successful = True + else: + change_failed = True + return Response(render_template(req, 'admin/change_password.html', { + 'change_failed': change_failed, + 'change_successful': change_successful + })) + + def do_manage_users(self, req): + """ + Manage other user accounts. Requires master privileges. + """ + add_user_mode = False + user_privileges = {} + users = sorted((user, []) for user in self.userdb.users) + to_delete = set() + generated_user = generated_password = None + user_exists = False + + if req.method == 'POST': + for item in req.form.getlist('delete'): + try: + to_delete.add(item) + except ValueError: + pass + for name, item in req.form.iteritems(): + if name.startswith('privileges-'): + user_privileges[name[11:]] = [x.strip() for x + in item.split(',')] + if req.form.get('cancel'): + return RedirectResponse('@admin/') + elif req.form.get('add_user'): + username = req.form.get('username') + if username: + if username in self.userdb.users: + user_exists = username + else: + generated_password = self.userdb.add_user(username) + self.userdb.save() + generated_user = username + else: + add_user_mode = True + elif req.form.get('aborted'): + return RedirectResponse('@admin/manage_users/') + + users = {} + for user in self.userdb.users: + if user not in user_privileges: + users[user] = sorted(self.userdb.privileges[user]) + else: + users[user] = user_privileges[user] + + new_users = users.copy() + for user in to_delete: + new_users.pop(user, None) + + self_destruction = req.user not in new_users or \ + 'master' not in new_users[req.user] + + if req.method == 'POST' and (not to_delete or + (to_delete and req.form.get('confirmed'))) and \ + req.form.get('update'): + old_users = self.userdb.users.copy() + for user in old_users: + if user not in new_users: + del self.userdb.users[user] + else: + self.userdb.privileges[user].clear() + self.userdb.privileges[user].update(new_users[user]) + self.userdb.save() + return RedirectResponse('@admin/manage_users/') + + return Response(render_template(req, 'admin/manage_users.html', { + 'users': users, + 'add_user_mode': add_user_mode, + 'to_delete': to_delete, + 'ask_confirmation': req.method == 'POST' and to_delete \ + and not self_destruction, + 'generated_user': generated_user, + 'generated_password': generated_password, + 'self_destruction': self_destruction, + 'user_exists': user_exists + })) + + def do_moderate_comments(self, req, url): + """ + Comment moderation panel. + """ + if url == 'recent_comments': + details_for = None + recent_comments = Comment.get_recent(20) + else: + details_for = url and self.env.get_real_filename(url) or None + recent_comments = None + to_delete = set() + edit_detail = None + + if 'edit' in req.args: + try: + edit_detail = Comment.get(int(req.args['edit'])) + except ValueError: + pass + + if req.method == 'POST': + for item in req.form.getlist('delete'): + try: + to_delete.add(int(item)) + except ValueError: + pass + if req.form.get('cancel'): + return RedirectResponse('@admin/') + elif req.form.get('confirmed'): + for comment_id in to_delete: + try: + Comment.get(comment_id).delete() + except ValueError: + pass + return RedirectResponse(req.path) + elif req.form.get('aborted'): + return RedirectResponse(req.path) + elif req.form.get('edit') and not to_delete: + if 'delete_this' in req.form: + try: + to_delete.add(req.form['delete_this']) + except ValueError: + pass + else: + try: + edit_detail = c = Comment.get(int(req.args['edit'])) + except ValueError: + pass + else: + if req.form.get('view'): + return RedirectResponse(c.url) + c.author = req.form.get('author', '') + c.author_mail = req.form.get('author_mail', '') + c.title = req.form.get('title', '') + c.comment_body = req.form.get('comment_body', '') + c.save() + self.app.cache.pop(edit_detail.associated_page, None) + return RedirectResponse(req.path) + + return Response(render_template(req, 'admin/moderate_comments.html', { + 'pages_with_comments': [{ + 'page_id': page_id, + 'title': page_id, #XXX: get title somehow + 'has_details': details_for == page_id, + 'comments': comments + } for page_id, comments in Comment.get_overview(details_for)], + 'recent_comments': recent_comments, + 'to_delete': to_delete, + 'ask_confirmation': req.method == 'POST' and to_delete, + 'edit_detail': edit_detail + })) Added: doctools/trunk/sphinx/web/antispam.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/antispam.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.antispam + ~~~~~~~~~~~~~~~~~~~ + + Small module that performs anti spam tests based on the bad content + regex list provided by moin moin. + + :copyright: 2007 by Armin Ronacher. + :license: Python license. +""" +from __future__ import with_statement +import re +import urllib +import time +from os import path + +DOWNLOAD_URL = 'http://moinmaster.wikiwikiweb.de/BadContent?action=raw' +UPDATE_INTERVAL = 60 * 60 * 24 * 7 + + +class AntiSpam(object): + """ + Class that reads a bad content database (flat file that is automatically + updated from the moin moin server) and checks strings against it. + """ + + def __init__(self, bad_content_file): + self.bad_content_file = bad_content_file + lines = None + + if not path.exists(self.bad_content_file): + last_change = 0 + else: + last_change = path.getmtime(self.bad_content_file) + + if last_change + UPDATE_INTERVAL < time.time(): + try: + f = urllib.urlopen(DOWNLOAD_URL) + data = f.read() + except: + pass + else: + lines = [l.strip() for l in data.splitlines() + if not l.startswith('#')] + f = file(bad_content_file, 'w') + f.write('\n'.join(lines)) + last_change = int(time.time()) + + if lines is None: + with file(bad_content_file) as f: + lines = [l.strip() for l in f] + self.rules = [re.compile(rule) for rule in lines if rule] + + def is_spam(self, fields): + for regex in self.rules: + for field in fields: + if regex.search(field) is not None: + return True + return False Added: doctools/trunk/sphinx/web/application.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/application.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,790 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.application + ~~~~~~~~~~~~~~~~~~~~~~ + + A simple WSGI application that serves an interactive version + of the python documentation. + + :copyright: 2007 by Georg Brandl, Armin Ronacher. + :license: Python license. +""" +from __future__ import with_statement + +import os +import re +import copy +import time +import heapq +import math +import difflib +import tempfile +import threading +import cPickle as pickle +import cStringIO as StringIO +from os import path +from itertools import groupby +from collections import defaultdict + +from .feed import Feed +from .mail import Email +from .util import render_template, render_simple_template, get_target_uri, \ + blackhole_dict, striptags +from .admin import AdminPanel +from .userdb import UserDatabase +from .oldurls import handle_html_url +from .antispam import AntiSpam +from .database import connect, set_connection, Comment +from .wsgiutil import Request, Response, RedirectResponse, \ + JSONResponse, SharedDataMiddleware, NotFound, get_base_uri + +from ..util import relative_uri, shorten_result +from ..search import SearchFrontend +from ..writer import HTMLWriter +from ..builder import LAST_BUILD_FILENAME, ENV_PICKLE_FILENAME + +from docutils.io import StringOutput +from docutils.utils import Reporter +from docutils.frontend import OptionParser + +_mail_re = re.compile(r'^([a-zA-Z0-9_\.\-])+\@' + r'(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,})+$') + +env_lock = threading.Lock() + + +PATCH_MESSAGE = '''\ +A new documentation patch has been submitted. + Author: %(author)s <%(email)s> + Date: %(asctime)s + Page: %(page_id)s + Summary: %(summary)s + +''' + +known_designs = { + 'default': (['default.css', 'pygments.css'], + 'The default design, with the sidebar on the left side.'), + 'rightsidebar': (['default.css', 'rightsidebar.css', 'pygments.css'], + 'Display the sidebar on the right side.'), + 'stickysidebar': (['default.css', 'stickysidebar.css', 'pygments.css'], + '''\ + Display the sidebar on the left and don\'t scroll it + with the content. This can cause parts of the content to + become inaccessible when the table of contents is too long.'''), + 'traditional': (['traditional.css'], + '''\ + A design similar to the old documentation style.'''), +} + +comments_methods = { + 'inline': 'Show all comments inline.', + 'bottom': 'Show all comments at the page bottom.', + 'none': 'Don\'t show comments at all.', +} + + +class MockBuilder(object): + def get_relative_uri(self, from_, to): + return '' + + +NoCache = object() + +def cached(inner): + """ + Response caching system. + """ + def caching_function(self, *args, **kwds): + gen = inner(self, *args, **kwds) + cache_id = gen.next() + if cache_id is NoCache: + response = gen.next() + gen.close() + # this could also return a RedirectResponse... + if isinstance(response, Response): + return response + else: + return Response(response) + try: + text = self.cache[cache_id] + gen.close() + except KeyError: + text = gen.next() + self.cache[cache_id] = text + return Response(text) + return caching_function + + +class DocumentationApplication(object): + """ + Serves the documentation. + """ + + def __init__(self, config): + self.cache = blackhole_dict() if config['debug'] else {} + self.freqmodules = defaultdict(int) + self.last_most_frequent = [] + self.generated_stylesheets = {} + self.config = config + self.data_root = config['data_root_path'] + self.buildfile = path.join(self.data_root, LAST_BUILD_FILENAME) + self.buildmtime = -1 + self.load_env(0) + self.db_con = connect(path.join(self.data_root, 'sphinx.db')) + self.antispam = AntiSpam(path.join(self.data_root, 'bad_content')) + self.userdb = UserDatabase(path.join(self.data_root, 'docusers')) + self.admin_panel = AdminPanel(self) + + + def load_env(self, new_mtime): + env_lock.acquire() + try: + if self.buildmtime == new_mtime: + # happens if another thread already reloaded the env + return + print "* Loading the environment..." + with file(path.join(self.data_root, ENV_PICKLE_FILENAME)) as f: + self.env = pickle.load(f) + with file(path.join(self.data_root, 'globalcontext.pickle')) as f: + self.globalcontext = pickle.load(f) + with file(path.join(self.data_root, 'searchindex.pickle')) as f: + self.search_frontend = SearchFrontend(pickle.load(f)) + self.buildmtime = path.getmtime(self.buildfile) + self.cache.clear() + finally: + env_lock.release() + + + def search(self, req): + """ + Search the database. Currently just a keyword based search. + """ + if not req.args.get('q'): + return RedirectResponse('') + return RedirectResponse('q/%s/' % req.args['q']) + + + def get_page_source(self, page): + """ + Get the reST source of a page. + """ + page_id = self.env.get_real_filename(page) + if page_id is None: + raise NotFound() + filename = path.join(self.data_root, 'sources', page_id)[:-3] + 'txt' + with file(filename) as f: + return page_id, f.read() + + + def show_source(self, req, page): + """ + Show the highlighted source for a given page. + """ + return Response(self.get_page_source(page)[1], mimetype='text/plain') + + + def suggest_changes(self, req, page): + """ + Show a "suggest changes" form. + """ + page_id, contents = self.get_page_source(page) + + return Response(render_template(req, 'edit.html', self.globalcontext, dict( + contents=contents, + pagename=page, + doctitle=self.globalcontext['titles'].get(page_id) or 'this page', + submiturl=relative_uri('/@edit/'+page+'/', '/@submit/'+page), + ))) + + def _generate_preview(self, page_id, contents): + """ + Generate a preview for suggested changes. + """ + handle, pathname = tempfile.mkstemp() + os.write(handle, contents.encode('utf-8')) + os.close(handle) + + warning_stream = StringIO.StringIO() + env2 = copy.deepcopy(self.env) + destination = StringOutput(encoding='utf-8') + writer = HTMLWriter(env2.config) + doctree = env2.read_file(page_id, pathname, save_parsed=False) + doctree = env2.get_and_resolve_doctree(page_id, MockBuilder(), doctree) + doctree.settings = OptionParser(defaults=env2.settings, + components=(writer,)).get_default_values() + doctree.reporter = Reporter(page_id, 2, 4, stream=warning_stream) + output = writer.write(doctree, destination) + writer.assemble_parts() + return writer.parts['fragment'] + + + def submit_changes(self, req, page): + """ + Submit the suggested changes as a patch. + """ + if req.method != 'POST': + # only available via POST + raise NotFound() + if req.form.get('cancel'): + # handle cancel requests directly + return RedirectResponse(page) + # raises NotFound if page doesn't exist + page_id, orig_contents = self.get_page_source(page) + author = req.form.get('name') + email = req.form.get('email') + summary = req.form.get('summary') + contents = req.form.get('contents') + fields = (author, email, summary, contents) + + form_error = None + rendered = None + + if not all(fields): + form_error = 'You have to fill out all fields.' + elif not _mail_re.search(email): + form_error = 'You have to provide a valid e-mail address.' + elif req.form.get('homepage') or self.antispam.is_spam(fields): + form_error = 'Your text contains blocked URLs or words.' + else: + if req.form.get('preview'): + rendered = self._generate_preview(page_id, contents) + + else: + asctime = time.asctime() + contents = contents.splitlines() + orig_contents = orig_contents.splitlines() + diffname = 'suggestion on %s by %s <%s>' % (asctime, author, email) + diff = difflib.unified_diff(orig_contents, contents, n=3, + fromfile=page_id, tofile=diffname, + lineterm='') + diff_text = '\n'.join(diff) + try: + mail = Email( + self.config['patch_mail_from'], 'Python Documentation Patches', + self.config['patch_mail_to'], '', + 'Patch for %s by %s' % (page_id, author), + PATCH_MESSAGE % locals(), + self.config['patch_mail_smtp'], + ) + mail.attachments.add_string('patch.diff', diff_text, 'text/x-diff') + mail.send() + except: + import traceback + traceback.print_exc() + # XXX: how to report? + pass + return Response(render_template(req, 'submitted.html', + self.globalcontext, dict( + backlink=relative_uri('/@submit/'+page+'/', page+'/') + ))) + + return Response(render_template(req, 'edit.html', self.globalcontext, dict( + contents=contents, + author=author, + email=email, + summary=summary, + pagename=page, + form_error=form_error, + rendered=rendered, + submiturl=relative_uri('/@edit/'+page+'/', '/@submit/'+page), + ))) + + + def get_settings_page(self, req): + """ + Handle the settings page. + """ + referer = req.environ.get('HTTP_REFERER') or '' + if referer: + base = get_base_uri(req.environ) + if not referer.startswith(base): + referer = '' + else: + referer = referer[len(base):] + referer = referer.rpartition('?')[0] or referer + + if req.method == 'POST': + if req.form.get('cancel'): + if req.form.get('referer'): + return RedirectResponse(req.form['referer']) + return RedirectResponse('') + new_style = req.form.get('design') + if new_style and new_style in known_designs: + req.session['design'] = new_style + new_comments = req.form.get('comments') + if new_comments and new_comments in comments_methods: + req.session['comments'] = new_comments + if req.form.get('goback') and req.form.get('referer'): + return RedirectResponse(req.form['referer']) + # else display the same page again + referer = '' + + context = { + 'known_designs': sorted(known_designs.iteritems()), + 'comments_methods': comments_methods.items(), + 'curdesign': req.session.get('design') or 'default', + 'curcomments': req.session.get('comments') or 'inline', + 'referer': referer, + } + + return Response(render_template(req, 'settings.html', + self.globalcontext, context)) + + + @cached + def get_module_index(self, req): + """ + Get the module index or redirect to a module from the module index. + """ + most_frequent = heapq.nlargest(30, self.freqmodules.iteritems(), + lambda x: x[1]) + most_frequent = [{ + 'name': x[0], + 'size': 100 + math.log(x[1] or 1) * 20, + 'count': x[1] + } for x in sorted(most_frequent)] + + showpf = None + newpf = req.args.get('pf') + sesspf = req.session.get('pf') + if newpf or sesspf: + yield NoCache + if newpf: + req.session['pf'] = showpf = req.args.getlist('pf') + else: + showpf = sesspf + else: + if most_frequent != self.last_most_frequent: + self.cache.pop('@modindex', None) + yield '@modindex' + + filename = path.join(self.data_root, 'modindex.fpickle') + with open(filename, 'rb') as f: + context = pickle.load(f) + if showpf: + entries = context['modindexentries'] + i = 0 + while i < len(entries): + if entries[i][6]: + for pform in entries[i][6]: + if pform in showpf: + break + else: + del entries[i] + continue + i += 1 + context['freqentries'] = most_frequent + context['showpf'] = showpf or context['platforms'] + self.last_most_frequent = most_frequent + yield render_template(req, 'modindex.html', + self.globalcontext, context) + + def show_comment_form(self, req, page): + """ + Show the "new comment" form. + """ + page_id = self.env.get_real_filename(page) + ajax_mode = req.args.get('mode') == 'ajax' + target = req.args.get('target') + page_comment_mode = not target + + form_error = preview = None + title = req.form.get('title', '').strip() + if 'author' in req.form: + author = req.form['author'] + else: + author = req.session.get('author', '') + if 'author_mail' in req.form: + author_mail = req.form['author_mail'] + else: + author_mail = req.session.get('author_mail', '') + comment_body = req.form.get('comment_body', '') + fields = (title, author, author_mail, comment_body) + + if req.method == 'POST': + if req.form.get('preview'): + preview = Comment(page_id, target, title, author, author_mail, + comment_body) + # 'homepage' is a forbidden field to thwart bots + elif req.form.get('homepage') or self.antispam.is_spam(fields): + form_error = 'Your text contains blocked URLs or words.' + else: + if not all(fields): + form_error = 'You have to fill out all fields.' + elif _mail_re.search(author_mail) is None: + form_error = 'You have to provide a valid e-mail address.' + elif len(comment_body) < 20: + form_error = 'You comment is too short ' \ + '(must have at least 20 characters).' + else: + # '|none' can stay since it doesn't include comments + self.cache.pop(page_id + '|inline', None) + self.cache.pop(page_id + '|bottom', None) + comment = Comment(page_id, target, + title, author, author_mail, + comment_body) + comment.save() + req.session['author'] = author + req.session['author_mail'] = author_mail + if ajax_mode: + return JSONResponse({'posted': True, 'error': False, + 'commentID': comment.comment_id}) + return RedirectResponse(comment.url) + + output = render_template(req, '_commentform.html', { + 'ajax_mode': ajax_mode, + 'preview': preview, + 'suggest_url': '@edit/%s/' % page, + 'comments_form': { + 'target': target, + 'title': title, + 'author': author, + 'author_mail': author_mail, + 'comment_body': comment_body, + 'error': form_error + } + }) + + if ajax_mode: + return JSONResponse({ + 'body': output, + 'error': bool(form_error), + 'posted': False + }) + return Response(render_template(req, 'commentform.html', { + 'form': output + })) + + def _insert_comments(self, req, url, context, mode): + """ + Insert inline comments into a page context. + """ + if 'body' not in context: + return + + comment_url = '@comments/%s/' % url + page_id = self.env.get_real_filename(url) + tx = context['body'] + all_comments = Comment.get_for_page(page_id) + global_comments = [] + for name, comments in groupby(all_comments, lambda x: x.associated_name): + if not name: + global_comments.extend(comments) + continue + comments = list(comments) + if not comments: + continue + tx = re.sub('' % name, + render_template(req, 'inlinecomments.html', { + 'comments': comments, + 'id': name, + 'comment_url': comment_url, + 'mode': mode}), + tx) + if mode == 'bottom': + global_comments.extend(comments) + if mode == 'inline': + # replace all markers for items without comments + tx = re.sub('', + (lambda match: + render_template(req, 'inlinecomments.html', { + 'id': match.group(1), + 'mode': 'inline', + 'comment_url': comment_url + },)), + tx) + tx += render_template(req, 'comments.html', { + 'comments': global_comments, + 'comment_url': comment_url + }) + context['body'] = tx + + + @cached + def get_page(self, req, url): + """ + Show the requested documentation page or raise an + `NotFound` exception to display a page with close matches. + """ + page_id = self.env.get_real_filename(url) + if page_id is None: + raise NotFound(show_keyword_matches=True) + # increment view count of all modules on that page + for modname in self.env.filemodules.get(page_id, ()): + self.freqmodules[modname] += 1 + # comments enabled? + comments = self.env.metadata[page_id].get('comments_enabled', True) + + # how does the user want to view comments? + commentmode = req.session.get('comments', 'inline') if comments else '' + + # show "old URL" message? -> no caching possible + oldurl = req.args.get('oldurl') + if oldurl: + yield NoCache + else: + # there must be different cache entries per comment mode + yield page_id + '|' + commentmode + + # cache miss; load the page and render it + filename = path.join(self.data_root, page_id[:-3] + 'fpickle') + with open(filename, 'rb') as f: + context = pickle.load(f) + + # add comments to paqe text + if commentmode != 'none': + self._insert_comments(req, url, context, commentmode) + + yield render_template(req, 'page.html', self.globalcontext, context, + {'oldurl': oldurl}) + + + @cached + def get_special_page(self, req, name): + yield '@'+name + filename = path.join(self.data_root, name + '.fpickle') + with open(filename, 'rb') as f: + context = pickle.load(f) + yield render_template(req, name+'.html', + self.globalcontext, context) + + + def comments_feed(self, req, url): + if url == 'recent': + feed = Feed(req, 'Recent Comments', 'Recent Comments', '') + for comment in Comment.get_recent(): + feed.add_item(comment.title, comment.author, comment.url, + comment.parsed_comment_body, comment.pub_date) + else: + page_id = self.env.get_real_filename(url) + doctitle = striptags(self.globalcontext['titles'].get(page_id, url)) + feed = Feed(req, 'Comments for "%s"' % doctitle, + 'List of comments for the topic "%s"' % doctitle, url) + for comment in Comment.get_for_page(page_id): + feed.add_item(comment.title, comment.author, comment.url, + comment.parsed_comment_body, comment.pub_date) + return Response(feed.generate(), mimetype='application/rss+xml') + + + def get_error_404(self, req): + """ + Show a simple error 404 page. + """ + return Response(render_template(req, 'not_found.html', self.globalcontext)) + + + pretty_type = { + 'data': 'module data', + 'cfunction': 'C function', + 'cmember': 'C member', + 'cmacro': 'C macro', + 'ctype': 'C type', + 'cvar': 'C variable', + } + + def get_keyword_matches(self, req, term=None, avoid_fuzzy=False, + is_error_page=False): + """ + Find keyword matches. If there is an exact match, just redirect: + http://docs.python.org/os.path.exists would automatically + redirect to http://docs.python.org/modules/os.path/#os.path.exists. + Else, show a page with close matches. + + Module references are processed first so that "os.path" is handled as + a module and not as member of os. + """ + if term is None: + term = req.path.strip('/') + + matches = self.env.find_keyword(term, avoid_fuzzy) + + # if avoid_fuzzy is False matches can be None + if matches is None: + return + + if isinstance(matches, tuple): + url = get_target_uri(matches[1]) + if matches[0] != 'module': + url += '#' + matches[2] + return RedirectResponse(url) + else: + # get some close matches + close_matches = [] + good_matches = 0 + for ratio, type, filename, anchorname, desc in matches: + link = get_target_uri(filename) + if type != 'module': + link += '#' + anchorname + good_match = ratio > 0.75 + good_matches += good_match + close_matches.append({ + 'href': relative_uri(req.path, link), + 'title': anchorname, + 'good_match': good_match, + 'type': self.pretty_type.get(type, type), + 'description': desc, + }) + return Response(render_template(req, 'keyword_not_found.html', { + 'close_matches': close_matches, + 'good_matches_count': good_matches, + 'keyword': term + }, self.globalcontext), status=404 if is_error_page else 404) + + + def get_user_stylesheet(self, req): + """ + Stylesheets are exchangeable. Handle them here and + cache them on the server side until server shuts down + and on the client side for 1 hour (not in debug mode). + """ + style = req.session.get('design') + if style not in known_designs: + style = 'default' + + if style in self.generated_stylesheets: + stylesheet = self.generated_stylesheets[style] + else: + stylesheet = [] + for filename in known_designs[style][0]: + with file(path.join(self.data_root, 'style', filename)) as f: + stylesheet.append(f.read()) + stylesheet = '\n'.join(stylesheet) + if not self.config.get('debug'): + self.generated_stylesheets[style] = stylesheet + + if req.args.get('admin') == 'yes': + with file(path.join(self.data_root, 'style', 'admin.css')) as f: + stylesheet += '\n' + f.read() + + # XXX: add timestamp based http caching + return Response(stylesheet, mimetype='text/css') + + def __call__(self, environ, start_response): + """ + Dispatch requests. + """ + set_connection(self.db_con) + req = Request(environ) + url = req.path.strip('/') or 'index' + + # check if the environment was updated + new_mtime = path.getmtime(self.buildfile) + if self.buildmtime != new_mtime: + self.load_env(new_mtime) + + try: + if req.path == 'favicon.ico': + # TODO: change this to real favicon? + resp = self.get_error_404() + elif not req.path.endswith('/') and req.method == 'GET': + # may be an old URL + if url.endswith('.html'): + resp = handle_html_url(self, url) + else: + # else, require a trailing slash on GET requests + # this ensures nice looking urls and working relative + # links for cached resources. + query = req.environ.get('QUERY_STRING', '') + resp = RedirectResponse(req.path + '/' + (query and '?'+query)) + # index page is special + elif url == 'index': + # presets for settings + if req.args.get('design') and req.args['design'] in known_designs: + req.session['design'] = req.args['design'] + if req.args.get('comments') and req.args['comments'] in comments_methods: + req.session['comments'] = req.args['comments'] + # alias for fuzzy search + if 'q' in req.args: + resp = RedirectResponse('q/%s/' % req.args['q']) + # stylesheet + elif req.args.get('do') == 'stylesheet': + resp = self.get_user_stylesheet(req) + else: + resp = self.get_special_page(req, 'index') + # go to the search page + # XXX: this is currently just a redirect to /q/ which is handled below + elif url == 'search': + resp = self.search(req) + # settings page cannot be cached + elif url == 'settings': + resp = self.get_settings_page(req) + # module index page is special + elif url == 'modindex': + resp = self.get_module_index(req) + # genindex page is special too + elif url == 'genindex': + resp = self.get_special_page(req, 'genindex') + # start the fuzzy search + elif url[:2] == 'q/': + resp = self.get_keyword_matches(req, url[2:]) + # special URLs + elif url[0] == '@': + # source view + if url[:8] == '@source/': + resp = self.show_source(req, url[8:]) + # suggest changes view + elif url[:6] == '@edit/': + resp = self.suggest_changes(req, url[6:]) + # suggest changes submit + elif url[:8] == '@submit/': + resp = self.submit_changes(req, url[8:]) + # show that comment form + elif url[:10] == '@comments/': + resp = self.show_comment_form(req, url[10:]) + # comments RSS feed + elif url[:5] == '@rss/': + resp = self.comments_feed(req, url[5:]) + # dispatch requests to the admin panel + elif url == '@admin' or url[:7] == '@admin/': + resp = self.admin_panel.dispatch(req, url[7:]) + else: + raise NotFound() + # everything else is handled as page or fuzzy search + # if a page does not exist. + else: + resp = self.get_page(req, url) + # views can raise a NotFound exception to show an error page. + # Either a real not found page or a similar matches page. + except NotFound, e: + if e.show_keyword_matches: + resp = self.get_keyword_matches(req, is_error_page=True) + else: + resp = self.get_error_404(req) + return resp(environ, start_response) + + +def _check_superuser(app): + """Check if there is a superuser and create one if necessary.""" + if not app.userdb.users: + print 'Warning: you have no user database or no master "admin" account.' + create = raw_input('Do you want to create an admin account now? [y/n] ') + if not create or create.lower().startswith('y'): + import getpass + print 'Creating "admin" user.' + pw1 = getpass.getpass('Enter password: ') + pw2 = getpass.getpass('Enter password again: ') + if pw1 != pw2: + print 'Error: Passwords don\'t match.' + sys.exit(1) + app.userdb.set_password('admin', pw1) + app.userdb.privileges['admin'].add('master') + app.userdb.save() + + +def setup_app(config, check_superuser=False): + """ + Create the WSGI application based on a configuration dict. + Handled configuration values so far: + + `data_root_path` + the folder containing the documentation data as generated + by sphinx with the web builder. + """ + app = DocumentationApplication(config) + if check_superuser: + _check_superuser(app) + app = SharedDataMiddleware(app, { + '/style': path.join(config['data_root_path'], 'style') + }) + return app Added: doctools/trunk/sphinx/web/database.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/database.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.database + ~~~~~~~~~~~~~~~~~~~ + + The database connections are thread local. To set the connection + for a thread use the `set_connection` function provided. The + `connect` method automatically sets up new tables and returns a + usable connection which is also set as the connection for the + thread that called that function. + + :copyright: 2007 by Georg Brandl, Armin Ronacher. + :license: Python license. +""" +import time +import sqlite3 +from datetime import datetime +from threading import local + +from .markup import markup + + +_thread_local = local() + + +def connect(path): + """Connect and create tables if required. Also assigns + the connection for the current thread.""" + con = sqlite3.connect(path, detect_types=sqlite3.PARSE_DECLTYPES) + con.isolation_level = None + + # create tables that do not exist. + for table in tables: + try: + con.execute('select * from %s limit 1;' % table) + except sqlite3.OperationalError: + con.execute(tables[table]) + + set_connection(con) + return con + + +def get_cursor(): + """Return a new cursor.""" + return _thread_local.connection.cursor() + + +def set_connection(con): + """Call this after thread creation to make this connection + the connection for this thread.""" + _thread_local.connection = con + + +#: tables that we use +tables = { + 'comments': ''' + create table comments ( + comment_id integer primary key, + associated_page varchar(200), + associated_name varchar(200), + title varchar(120), + author varchar(200), + author_mail varchar(250), + comment_body text, + pub_date timestamp + );''' +} + + +class Comment(object): + """ + Represents one comment. + """ + + def __init__(self, associated_page, associated_name, title, author, + author_mail, comment_body, pub_date=None): + self.comment_id = None + self.associated_page = associated_page + self.associated_name = associated_name + self.title = title + if pub_date is None: + pub_date = datetime.utcnow() + self.pub_date = pub_date + self.author = author + self.author_mail = author_mail + self.comment_body = comment_body + + @property + def url(self): + return '%s#comment-%s' % ( + self.associated_page[:-4], + self.comment_id + ) + + @property + def parsed_comment_body(self): + from .util import get_target_uri + from ..util import relative_uri + uri = get_target_uri(self.associated_page) + def make_rel_link(keyword): + return relative_uri(uri, 'q/%s/' % keyword) + return markup(self.comment_body, make_rel_link) + + def save(self): + """ + Save the comment and use the cursor provided. + """ + cur = get_cursor() + args = (self.associated_page, self.associated_name, self.title, + self.author, self.author_mail, self.comment_body, self.pub_date) + if self.comment_id is None: + cur.execute('''insert into comments (associated_page, associated_name, + title, + author, author_mail, + comment_body, pub_date) + values (?, ?, ?, ?, ?, ?, ?)''', args) + self.comment_id = cur.lastrowid + else: + args += (self.comment_id,) + cur.execute('''update comments set associated_page=?, + associated_name=?, + title=?, author=?, + author_mail=?, comment_body=?, + pub_date=? where comment_id = ?''', args) + cur.close() + + def delete(self): + cur = get_cursor() + cur.execute('delete from comments where comment_id = ?', + (self.comment_id,)) + cur.close() + + @staticmethod + def _make_comment(row): + rv = Comment(*row[1:]) + rv.comment_id = row[0] + return rv + + @staticmethod + def get(comment_id): + cur = get_cursor() + cur.execute('select * from comments where comment_id = ?', (comment_id,)) + row = cur.fetchone() + if row is None: + raise ValueError('comment not found') + try: + return Comment._make_comment(row) + finally: + cur.close() + + @staticmethod + def get_for_page(associated_page, reverse=False): + cur = get_cursor() + cur.execute('''select * from comments where associated_page = ? + order by associated_name, comment_id %s''' % + ('desc' if reverse else 'asc'), + (associated_page,)) + try: + return [Comment._make_comment(row) for row in cur] + finally: + cur.close() + + @staticmethod + def get_recent(n=10): + cur = get_cursor() + cur.execute('select * from comments order by comment_id desc limit ?', + (n,)) + try: + return [Comment._make_comment(row) for row in cur] + finally: + cur.close() + + @staticmethod + def get_overview(detail_for=None): + cur = get_cursor() + cur.execute('''select distinct associated_page from comments + order by associated_page asc''') + pages = [] + for row in cur: + page_id = row[0] + if page_id == detail_for: + pages.append((page_id, Comment.get_for_page(page_id, True))) + else: + pages.append((page_id, [])) + cur.close() + return pages + + def __repr__(self): + return '' % ( + self.author, + self.associated_page, + self.associated_name, + self.comment_id or 'not saved' + ) Added: doctools/trunk/sphinx/web/feed.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/feed.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.feed + ~~~~~~~~~~~~~~~ + + Nifty module that generates RSS feeds. + + :copyright: 2007 by Armin Ronacher. + :license: Python license. +""" +import time +from datetime import datetime +from xml.dom.minidom import Document +from email.Utils import formatdate + + +def format_rss_date(date): + """ + Pass it a datetime object to receive the string representation + for RSS date fields. + """ + return formatdate(time.mktime(date.timetuple()) + date.microsecond / 1e6) + + +class Feed(object): + """ + Abstract feed creation class. To generate feeds use one of + the subclasses `RssFeed` or `AtomFeed`. + """ + + def __init__(self, req, title, description, link): + self.req = req + self.title = title + self.description = description + self.link = req.make_external_url(link) + self.items = [] + self._last_update = None + + def add_item(self, title, author, link, description, pub_date): + if self._last_update is None or pub_date > self._last_update: + self._last_update = pub_date + date = pub_date or datetime.utcnow() + self.items.append({ + 'title': title, + 'author': author, + 'link': self.req.make_external_url(link), + 'description': description, + 'pub_date': date + }) + + def generate(self): + return self.generate_document().toxml('utf-8') + + def generate_document(self): + doc = Document() + Element = doc.createElement + Text = doc.createTextNode + + rss = doc.appendChild(Element('rss')) + rss.setAttribute('version', '2.0') + + channel = rss.appendChild(Element('channel')) + for key in ('title', 'description', 'link'): + value = getattr(self, key) + channel.appendChild(Element(key)).appendChild(Text(value)) + date = format_rss_date(self._last_update or datetime.utcnow()) + channel.appendChild(Element('pubDate')).appendChild(Text(date)) + + for item in self.items: + d = Element('item') + for key in ('title', 'author', 'link', 'description'): + d.appendChild(Element(key)).appendChild(Text(item[key])) + pub_date = format_rss_date(item['pub_date']) + d.appendChild(Element('pubDate')).appendChild(Text(pub_date)) + d.appendChild(Element('guid')).appendChild(Text(item['link'])) + channel.appendChild(d) + + return doc Added: doctools/trunk/sphinx/web/mail.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/mail.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,278 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.mail + ~~~~~~~~~~~~~~~ + + A simple module for sending e-mails, based on simplemail.py. + + :copyright: 2004-2007 by Gerold Penz. + 2007 by Georg Brandl. + :license: Python license. +""" + +import os.path +import sys +import time +import smtplib +import mimetypes + +from email import Encoders +from email.Header import Header +from email.MIMEText import MIMEText +from email.MIMEMultipart import MIMEMultipart +from email.Utils import formataddr +from email.Utils import formatdate +from email.Message import Message +from email.MIMEAudio import MIMEAudio +from email.MIMEBase import MIMEBase +from email.MIMEImage import MIMEImage + + + +# Exceptions +#---------------------------------------------------------------------- +class SimpleMail_Exception(Exception): + def __str__(self): + return self.__doc__ + +class NoFromAddress_Exception(SimpleMail_Exception): + pass + +class NoToAddress_Exception(SimpleMail_Exception): + pass + +class NoSubject_Exception(SimpleMail_Exception): + pass + +class AttachmentNotFound_Exception(SimpleMail_Exception): + pass + + +class Attachments(object): + def __init__(self): + self._attachments = [] + + def add_filename(self, filename = ''): + self._attachments.append(('file', filename)) + + def add_string(self, filename, text, mimetype): + self._attachments.append(('string', (filename, text, mimetype))) + + def count(self): + return len(self._attachments) + + def get_list(self): + return self._attachments + + +class Recipients(object): + def __init__(self): + self._recipients = [] + + def add(self, address, caption = ''): + self._recipients.append(formataddr((caption, address))) + + def count(self): + return len(self._recipients) + + def __repr__(self): + return str(self._recipients) + + def get_list(self): + return self._recipients + + +class CCRecipients(Recipients): + pass + + +class BCCRecipients(Recipients): + pass + + +class Email(object): + + def __init__( + self, + from_address = "", + from_caption = "", + to_address = "", + to_caption = "", + subject = "", + message = "", + smtp_server = "localhost", + smtp_user = "", + smtp_password = "", + user_agent = "", + reply_to_address = "", + reply_to_caption = "", + use_tls = False, + ): + """ + Initialize the email object + from_address = the email address of the sender + from_caption = the caption (name) of the sender + to_address = the email address of the recipient + to_caption = the caption (name) of the recipient + subject = the subject of the email message + message = the body text of the email message + smtp_server = the ip-address or the name of the SMTP-server + smtp_user = (optional) Login name for the SMTP-Server + smtp_password = (optional) Password for the SMTP-Server + user_agent = (optional) program identification + reply_to_address = (optional) Reply-to email address + reply_to_caption = (optional) Reply-to caption (name) + use_tls = (optional) True, if the connection should use TLS + to encrypt. + """ + + self.from_address = from_address + self.from_caption = from_caption + self.recipients = Recipients() + self.cc_recipients = CCRecipients() + self.bcc_recipients = BCCRecipients() + if to_address: + self.recipients.add(to_address, to_caption) + self.subject = subject + self.message = message + self.smtp_server = smtp_server + self.smtp_user = smtp_user + self.smtp_password = smtp_password + self.attachments = Attachments() + self.content_subtype = "plain" + self.content_charset = "iso-8859-1" + self.header_charset = "us-ascii" + self.statusdict = None + self.user_agent = user_agent + self.reply_to_address = reply_to_address + self.reply_to_caption = reply_to_caption + self.use_tls = use_tls + + + def send(self): + """ + Send the mail. Returns True if successfully sent to at least one + recipient. + """ + + # validation + if len(self.from_address.strip()) == 0: + raise NoFromAddress_Exception + if self.recipients.count() == 0: + if ( + (self.cc_recipients.count() == 0) and + (self.bcc_recipients.count() == 0) + ): + raise NoToAddress_Exception + if len(self.subject.strip()) == 0: + raise NoSubject_Exception + + # assemble + if self.attachments.count() == 0: + msg = MIMEText( + _text = self.message, + _subtype = self.content_subtype, + _charset = self.content_charset + ) + else: + msg = MIMEMultipart() + if self.message: + att = MIMEText( + _text = self.message, + _subtype = self.content_subtype, + _charset = self.content_charset + ) + msg.attach(att) + + # add headers + from_str = formataddr((self.from_caption, self.from_address)) + msg["From"] = from_str + if self.reply_to_address: + reply_to_str = formataddr((self.reply_to_caption, self.reply_to_address)) + msg["Reply-To"] = reply_to_str + if self.recipients.count() > 0: + msg["To"] = ", ".join(self.recipients.get_list()) + if self.cc_recipients.count() > 0: + msg["Cc"] = ", ".join(self.cc_recipients.get_list()) + msg["Date"] = formatdate(time.time()) + msg["User-Agent"] = self.user_agent + try: + msg["Subject"] = Header( + self.subject, self.header_charset + ) + except(UnicodeDecodeError): + msg["Subject"] = Header( + self.subject, self.content_charset + ) + msg.preamble = "You will not see this in a MIME-aware mail reader.\n" + msg.epilogue = "" + + # assemble multipart + if self.attachments.count() > 0: + for typ, info in self.attachments.get_list(): + if typ == 'file': + filename = info + if not os.path.isfile(filename): + raise AttachmentNotFound_Exception, filename + mimetype, encoding = mimetypes.guess_type(filename) + if mimetype is None or encoding is not None: + mimetype = 'application/octet-stream' + if mimetype.startswith('text/'): + fp = file(filename) + else: + fp = file(filename, 'rb') + text = fp.read() + fp.close() + else: + filename, text, mimetype = info + maintype, subtype = mimetype.split('/', 1) + if maintype == 'text': + # Note: we should handle calculating the charset + att = MIMEText(text, _subtype=subtype) + elif maintype == 'image': + att = MIMEImage(text, _subtype=subtype) + elif maintype == 'audio': + att = MIMEAudio(text, _subtype=subtype) + else: + att = MIMEBase(maintype, subtype) + att.set_payload(text) + # Encode the payload using Base64 + Encoders.encode_base64(att) + # Set the filename parameter + att.add_header( + 'Content-Disposition', + 'attachment', + filename = os.path.basename(filename).strip() + ) + msg.attach(att) + + # connect to server + smtp = smtplib.SMTP() + if self.smtp_server: + smtp.connect(self.smtp_server) + else: + smtp.connect() + + # TLS? + if self.use_tls: + smtp.ehlo() + smtp.starttls() + smtp.ehlo() + + # authenticate + if self.smtp_user: + smtp.login(user = self.smtp_user, password = self.smtp_password) + + # send + self.statusdict = smtp.sendmail( + from_str, + ( + self.recipients.get_list() + + self.cc_recipients.get_list() + + self.bcc_recipients.get_list() + ), + msg.as_string() + ) + smtp.close() + + return True Added: doctools/trunk/sphinx/web/markup.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/markup.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.markup + ~~~~~~~~~~~~~~~~~ + + Awfully simple markup used in comments. Syntax: + + `this is some ` + like in HTML + + ``this is like ` just that i can contain backticks`` + like in HTML + + *emphasized* + translates to + + **strong** + translates to + + !!!very important message!!! + use this to mark important or dangerous things. + Translates to + + [[http://www.google.com/]] + Simple link with the link target as caption. If the + URL is relative the provided callback is called to get + the full URL. + + [[http://www.google.com/ go to google]] + Link with "go to google" as caption. + + preformatted code that could by python code + Python code (most of the time), otherwise preformatted. + + cite someone + Like
    in HTML. + + :copyright: 2007 by Armin Ronacher. + :license: Python license. +""" +import cgi +import re +from urlparse import urlparse + +from ..highlighting import highlight_block + + +inline_formatting = { + 'escaped_code': ('``', '``'), + 'code': ('`', '`'), + 'strong': ('**', '**'), + 'emphasized': ('*', '*'), + 'important': ('!!!', '!!!'), + 'link': ('[[', ']]'), + 'quote': ('', ''), + 'code_block': ('', ''), + 'paragraph': (r'\n{2,}', None), + 'newline': (r'\\$', None) +} + +simple_formattings = { + 'strong_begin': '', + 'strong_end': '', + 'emphasized_begin': '', + 'emphasized_end': '', + 'important_begin': '', + 'important_end': '', + 'quote_begin': '
    ', + 'quote_end': '
    ' +} + +raw_formatting = set(['link', 'code', 'escaped_code', 'code_block']) + +formatting_start_re = re.compile('|'.join( + '(?P<%s>%s)' % (name, end is not None and re.escape(start) or start) + for name, (start, end) + in sorted(inline_formatting.items(), key=lambda x: -len(x[1][0])) +), re.S | re.M) + +formatting_end_res = dict( + (name, re.compile(re.escape(end))) for name, (start, end) + in inline_formatting.iteritems() if end is not None +) + +without_end_tag = set(name for name, (_, end) in inline_formatting.iteritems() + if end is None) + + + +class StreamProcessor(object): + + def __init__(self, stream): + self._pushed = [] + self._stream = stream + + def __iter__(self): + return self + + def next(self): + if self._pushed: + return self._pushed.pop() + return self._stream.next() + + def push(self, token, data): + self._pushed.append((token, data)) + + def get_data(self, drop_needle=False): + result = [] + try: + while True: + token, data = self.next() + if token != 'text': + if not drop_needle: + self.push(token, data) + break + result.append(data) + except StopIteration: + pass + return ''.join(result) + + +class MarkupParser(object): + + def __init__(self, make_rel_url): + self.make_rel_url = make_rel_url + + def tokenize(self, text): + text = '\n'.join(text.splitlines()) + last_pos = 0 + pos = 0 + end = len(text) + stack = [] + text_buffer = [] + + while pos < end: + if stack: + m = formatting_end_res[stack[-1]].match(text, pos) + if m is not None: + if text_buffer: + yield 'text', ''.join(text_buffer) + del text_buffer[:] + yield stack[-1] + '_end', None + stack.pop() + pos = m.end() + continue + + m = formatting_start_re.match(text, pos) + if m is not None: + if text_buffer: + yield 'text', ''.join(text_buffer) + del text_buffer[:] + + for key, value in m.groupdict().iteritems(): + if value is not None: + if key in without_end_tag: + yield key, None + else: + if key in raw_formatting: + regex = formatting_end_res[key] + m2 = regex.search(text, m.end()) + if m2 is None: + yield key, text[m.end():] + else: + yield key, text[m.end():m2.start()] + m = m2 + else: + yield key + '_begin', None + stack.append(key) + break + + if m is None: + break + else: + pos = m.end() + continue + + text_buffer.append(text[pos]) + pos += 1 + + yield 'text', ''.join(text_buffer) + for token in reversed(stack): + yield token + '_end', None + + def stream_to_html(self, text): + stream = StreamProcessor(self.tokenize(text)) + paragraph = [] + result = [] + + def new_paragraph(): + result.append(paragraph[:]) + del paragraph[:] + + for token, data in stream: + if token in simple_formattings: + paragraph.append(simple_formattings[token]) + elif token in ('text', 'escaped_code', 'code'): + if data: + data = cgi.escape(data) + if token in ('escaped_code', 'code'): + data = '%s' % data + paragraph.append(data) + elif token == 'link': + if ' ' in data: + href, caption = data.split(' ', 1) + else: + href = caption = data + protocol = urlparse(href)[0] + nofollow = True + if not protocol: + href = self.make_rel_url(href) + nofollow = False + elif protocol == 'javascript': + href = href[11:] + paragraph.append('%s' % (cgi.escape(href), + ' rel="nofollow"' if nofollow else '', + cgi.escape(caption))) + elif token == 'code_block': + result.append(highlight_block(data, 'python')) + new_paragraph() + elif token == 'paragraph': + new_paragraph() + elif token == 'newline': + paragraph.append('
    ') + + if paragraph: + result.append(paragraph) + for item in result: + if isinstance(item, list): + if item: + yield '

    %s

    ' % ''.join(item) + else: + yield item + + def to_html(self, text): + return ''.join(self.stream_to_html(text)) + + +def markup(text, make_rel_url=lambda x: './' + x): + return MarkupParser(make_rel_url).to_html(text) Added: doctools/trunk/sphinx/web/oldurls.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/oldurls.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.oldurls + ~~~~~~~~~~~~~~~~~~ + + Handle old URLs gracefully. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +import re + +from .wsgiutil import RedirectResponse, NotFound + + +_module_re = re.compile(r'module-(.*)\.html') +_modobj_re = re.compile(r'(.*)-objects\.html') +_modsub_re = re.compile(r'(.*?)-(.*)\.html') + + +special_module_names = { + 'main': '__main__', + 'builtin': '__builtin__', + 'future': '__future__', + 'pycompile': 'py_compile', +} + +tutorial_nodes = [ + '', '', '', + 'appetite', + 'interpreter', + 'introduction', + 'controlflow', + 'datastructures', + 'modules', + 'inputoutput', + 'errors', + 'classes', + 'stdlib', + 'stdlib2', + 'whatnow', + 'interactive', + 'floatingpoint', + '', + 'glossary', +] + + +def handle_html_url(req, url): + def inner(): + # global special pages + if url.endswith('/contents.html'): + return 'contents/' + if url.endswith('/genindex.html'): + return 'genindex/' + if url.endswith('/about.html'): + return 'about/' + if url.endswith('/reporting-bugs.html'): + return 'bugs/' + if url == 'modindex.html' or url.endswith('/modindex.html'): + return 'modindex/' + # modules, macmodules + if url[:4] in ('lib/', 'mac/'): + p = '' if url[0] == 'l' else 'mac' + m = _module_re.match(url[4:]) + if m: + mn = m.group(1) + return p + 'modules/' + special_module_names.get(mn, mn) + # module sub-pages + m = _modsub_re.match(url[4:]) + if m and not _modobj_re.match(url[4:]): + mn = m.group(1) + return p + 'modules/' + special_module_names.get(mn, mn) + # XXX: handle all others + # tutorial + elif url[:4] == 'tut/': + try: + node = int(url[8:].partition('.html')[0]) + except ValueError: + pass + else: + if tutorial_nodes[node]: + return 'tutorial/' + tutorial_nodes[node] + # installing: all in one (ATM) + elif url[:5] == 'inst/': + return 'install/' + # no mapping for "documenting Python..." + # nothing found + raise NotFound() + return RedirectResponse('%s?oldurl=1' % inner()) Added: doctools/trunk/sphinx/web/serve.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/serve.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.serve + ~~~~~~~~~~~~~~~~ + + This module optionally wraps the `wsgiref` module so that it reloads code + automatically. Works with any WSGI application but it won't help in non + `wsgiref` environments. Use it only for development. + + :copyright: 2007 by Armin Ronacher, Georg Brandl. + :license: Python license. +""" +import os +import sys +import time +import thread + + +def reloader_loop(extra_files): + """When this function is run from the main thread, it will force other + threads to exit when any modules currently loaded change. + + :param extra_files: a list of additional files it should watch. + """ + mtimes = {} + while True: + for filename in filter(None, [getattr(module, '__file__', None) + for module in sys.modules.values()] + + extra_files): + while not os.path.isfile(filename): + filename = os.path.dirname(filename) + if not filename: + break + if not filename: + continue + + if filename[-4:] in ('.pyc', '.pyo'): + filename = filename[:-1] + + mtime = os.stat(filename).st_mtime + if filename not in mtimes: + mtimes[filename] = mtime + continue + if mtime > mtimes[filename]: + sys.exit(3) + time.sleep(1) + + +def restart_with_reloader(): + """Spawn a new Python interpreter with the same arguments as this one, + but running the reloader thread.""" + while True: + print '* Restarting with reloader...' + args = [sys.executable] + sys.argv + if sys.platform == 'win32': + args = ['"%s"' % arg for arg in args] + new_environ = os.environ.copy() + new_environ['RUN_MAIN'] = 'true' + exit_code = os.spawnve(os.P_WAIT, sys.executable, args, new_environ) + if exit_code != 3: + return exit_code + + +def run_with_reloader(main_func, extra_watch): + """ + Run the given function in an independent python interpreter. + """ + if os.environ.get('RUN_MAIN') == 'true': + thread.start_new_thread(main_func, ()) + try: + reloader_loop(extra_watch) + except KeyboardInterrupt: + return + try: + sys.exit(restart_with_reloader()) + except KeyboardInterrupt: + pass + + +def run_simple(hostname, port, make_app, use_reloader=False, + extra_files=None): + """ + Start an application using wsgiref and with an optional reloader. + """ + from wsgiref.simple_server import make_server + def inner(): + application = make_app() + print '* Startup complete.' + srv = make_server(hostname, port, application) + try: + srv.serve_forever() + except KeyboardInterrupt: + pass + if os.environ.get('RUN_MAIN') != 'true': + print '* Running on http://%s:%d/' % (hostname, port) + if use_reloader: + run_with_reloader(inner, extra_files or []) + else: + inner() Added: doctools/trunk/sphinx/web/userdb.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/userdb.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.userdb + ~~~~~~~~~~~~~~~~~ + + A module that provides pythonic access to the `docusers` file + that stores users and their passwords so that they can gain access + to the administration system. + + :copyright: 2007 by Armin Ronacher. + :license: Python license. +""" +from __future__ import with_statement +from os import path +from hashlib import sha1 +from random import choice, randrange +from collections import defaultdict + + +def gen_password(length=8, add_numbers=True, mix_case=True, + add_special_char=True): + """ + Generate a pronounceable password. + """ + if length <= 0: + raise ValueError('requested password of length <= 0') + consonants = 'bcdfghjklmnprstvwz' + vowels = 'aeiou' + if mix_case: + consonants = consonants * 2 + consonants.upper() + vowels = vowels * 2 + vowels.upper() + pw = ''.join([choice(consonants) + + choice(vowels) + + choice(consonants + vowels) for _ + in xrange(length // 3 + 1)])[:length] + if add_numbers: + n = length // 3 + if n > 0: + pw = pw[:-n] + for _ in xrange(n): + pw += choice('0123456789') + if add_special_char: + tmp = randrange(0, len(pw)) + l1 = pw[:tmp] + l2 = pw[tmp:] + if max(len(l1), len(l2)) == len(l1): + l1 = l1[:-1] + else: + l2 = l2[:-1] + return l1 + choice('#$&%?!') + l2 + return pw + + +class UserDatabase(object): + + def __init__(self, filename): + self.filename = filename + self.users = {} + self.privileges = defaultdict(set) + if path.exists(filename): + with file(filename) as f: + for line in f: + line = line.strip() + if line and line[0] != '#': + parts = line.split(':') + self.users[parts[0]] = parts[1] + self.privileges[parts[0]].update(x for x in + parts[2].split(',') + if x) + + def set_password(self, user, password): + """Encode the password for a user (also adds users).""" + self.users[user] = sha1('%s|%s' % (user, password)).hexdigest() + + def add_user(self, user): + """Add a new user and return the generated password.""" + pw = gen_password(8, add_special_char=False) + self.set_password(user, pw) + self.privileges[user].clear() + return pw + + def check_password(self, user, password): + return user in self.users and \ + self.users[user] == sha1('%s|%s' % (user, password)).hexdigest() + + def save(self): + with file(self.filename, 'w') as f: + for username, password in self.users.iteritems(): + privileges = ','.join(self.privileges.get(username, ())) + f.write('%s:%s:%s\n' % (username, password, privileges)) Added: doctools/trunk/sphinx/web/util.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/util.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.util + ~~~~~~~~~~~~~~~ + + Miscellaneous utilities. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" +from __future__ import with_statement + +import re +from os import path + +from ..util import relative_uri +from .._jinja import Environment, FileSystemLoader + + +def get_target_uri(source_filename): + """Get the web-URI for a given reST file name.""" + if source_filename == 'index.rst': + return '' + if source_filename.endswith('/index.rst'): + return source_filename[:-9] # up to / + return source_filename[:-4] + '/' + + +# ------------------------------------------------------------------------------ +# Setup the templating environment + +templates_path = path.join(path.dirname(__file__), '..', 'templates') +jinja_env = Environment(loader=FileSystemLoader(templates_path, + use_memcache=True), + friendly_traceback=True) + +def do_datetime_format(): + def wrapped(env, ctx, value): + return value.strftime('%a, %d %b %Y %H:%M') + return wrapped + +jinja_env.filters['datetimeformat'] = do_datetime_format + + +_striptags_re = re.compile(r'(|<[^>]+>)') + +def striptags(text): + return ' '.join(_striptags_re.sub('', text).split()) + + +def render_template(req, template_name, *contexts): + context = {} + for ctx in contexts: + context.update(ctx) + tmpl = jinja_env.get_template(template_name) + + path = req.path.lstrip('/') + if not path[-1:] == '/': + path += '/' + def relative_path_to(otheruri, resource=False): + if not resource: + otheruri = get_target_uri(otheruri) + return relative_uri(path, otheruri) + context['pathto'] = relative_path_to + + # add it here a second time for templates that don't + # get the builder information from the environment (such as search) + context['builder'] = 'web' + context['req'] = req + + return tmpl.render(context) + + +def render_simple_template(template_name, context): + tmpl = jinja_env.get_template(template_name) + return tmpl.render(context) + + +class lazy_property(object): + """ + Descriptor implementing a "lazy property", i.e. the function + calculating the property value is called only once. + """ + + def __init__(self, func, name=None, doc=None): + self._func = func + self._name = name or func.func_name + self.__doc__ = doc or func.__doc__ + + def __get__(self, obj, objtype=None): + if obj is None: + return self + value = self._func(obj) + setattr(obj, self._name, value) + return value + + +class blackhole_dict(dict): + def __setitem__(self, key, value): + pass Added: doctools/trunk/sphinx/web/webconf.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/webconf.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +# +# Python documentation web application configuration file +# + +# Where the server listens. +listen_addr = 'localhost' +listen_port = 3000 + +# How patch mails are sent. +patch_mail_from = 'patches at localhost' +patch_mail_to = 'docs at localhost' +patch_mail_smtp = 'localhost' Added: doctools/trunk/sphinx/web/wsgiutil.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/web/wsgiutil.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,697 @@ +# -*- coding: utf-8 -*- +""" + sphinx.web.wsgiutil + ~~~~~~~~~~~~~~~~~~~ + + To avoid further dependencies this module collects some of the + classes werkzeug provides and use in other views. + + :copyright: 2007 by Armin Ronacher. + :license: Python license. +""" +from __future__ import with_statement + +import cgi +import urllib +import cPickle as pickle +import tempfile +from os import path +from time import gmtime, time, asctime +from random import random +from Cookie import SimpleCookie +from hashlib import sha1 +from datetime import datetime +from cStringIO import StringIO + +from .util import lazy_property +from .json import dump_json + + +HTTP_STATUS_CODES = { + 100: 'CONTINUE', + 101: 'SWITCHING PROTOCOLS', + 102: 'PROCESSING', + 200: 'OK', + 201: 'CREATED', + 202: 'ACCEPTED', + 203: 'NON-AUTHORITATIVE INFORMATION', + 204: 'NO CONTENT', + 205: 'RESET CONTENT', + 206: 'PARTIAL CONTENT', + 207: 'MULTI STATUS', + 300: 'MULTIPLE CHOICES', + 301: 'MOVED PERMANENTLY', + 302: 'FOUND', + 303: 'SEE OTHER', + 304: 'NOT MODIFIED', + 305: 'USE PROXY', + 306: 'RESERVED', + 307: 'TEMPORARY REDIRECT', + 400: 'BAD REQUEST', + 401: 'UNAUTHORIZED', + 402: 'PAYMENT REQUIRED', + 403: 'FORBIDDEN', + 404: 'NOT FOUND', + 405: 'METHOD NOT ALLOWED', + 406: 'NOT ACCEPTABLE', + 407: 'PROXY AUTHENTICATION REQUIRED', + 408: 'REQUEST TIMEOUT', + 409: 'CONFLICT', + 410: 'GONE', + 411: 'LENGTH REQUIRED', + 412: 'PRECONDITION FAILED', + 413: 'REQUEST ENTITY TOO LARGE', + 414: 'REQUEST-URI TOO LONG', + 415: 'UNSUPPORTED MEDIA TYPE', + 416: 'REQUESTED RANGE NOT SATISFIABLE', + 417: 'EXPECTATION FAILED', + 500: 'INTERNAL SERVER ERROR', + 501: 'NOT IMPLEMENTED', + 502: 'BAD GATEWAY', + 503: 'SERVICE UNAVAILABLE', + 504: 'GATEWAY TIMEOUT', + 505: 'HTTP VERSION NOT SUPPORTED', + 506: 'VARIANT ALSO VARIES', + 507: 'INSUFFICIENT STORAGE', + 510: 'NOT EXTENDED' +} + +SID_COOKIE_NAME = 'python_doc_sid' + + +# ------------------------------------------------------------------------------ +# Support for HTTP parameter parsing, requests and responses + + +class _StorageHelper(cgi.FieldStorage): + """ + Helper class used by `Request` to parse submitted file and + form data. Don't use this class directly. + """ + + FieldStorageClass = cgi.FieldStorage + + def __init__(self, environ, get_stream): + cgi.FieldStorage.__init__(self, + fp=environ['wsgi.input'], + environ={ + 'REQUEST_METHOD': environ['REQUEST_METHOD'], + 'CONTENT_TYPE': environ['CONTENT_TYPE'], + 'CONTENT_LENGTH': environ['CONTENT_LENGTH'] + }, + keep_blank_values=True + ) + self.get_stream = get_stream + + def make_file(self, binary=None): + return self.get_stream() + + +class MultiDict(dict): + """ + A dict that takes a list of multiple values as only argument + in order to store multiple values per key. + """ + + def __init__(self, mapping=()): + if isinstance(mapping, MultiDict): + dict.__init__(self, mapping.lists()) + elif isinstance(mapping, dict): + tmp = {} + for key, value in mapping: + tmp[key] = [value] + dict.__init__(self, tmp) + else: + tmp = {} + for key, value in mapping: + tmp.setdefault(key, []).append(value) + dict.__init__(self, tmp) + + def __getitem__(self, key): + """ + Return the first data value for this key; + raises KeyError if not found. + """ + return dict.__getitem__(self, key)[0] + + def __setitem__(self, key, value): + """Set an item as list.""" + dict.__setitem__(self, key, [value]) + + def get(self, key, default=None): + """Return the default value if the requested data doesn't exist""" + try: + return self[key] + except KeyError: + return default + + def getlist(self, key): + """Return an empty list if the requested data doesn't exist""" + try: + return dict.__getitem__(self, key) + except KeyError: + return [] + + def setlist(self, key, new_list): + """Set new values for an key.""" + dict.__setitem__(self, key, list(new_list)) + + def setdefault(self, key, default=None): + if key not in self: + self[key] = default + else: + default = self[key] + return default + + def setlistdefault(self, key, default_list=()): + if key not in self: + default_list = list(default_list) + dict.__setitem__(self, key, default_list) + else: + default_list = self.getlist(key) + return default_list + + def items(self): + """ + Return a list of (key, value) pairs, where value is the last item in + the list associated with the key. + """ + return [(key, self[key]) for key in self.iterkeys()] + + lists = dict.items + + def values(self): + """Returns a list of the last value on every key list.""" + return [self[key] for key in self.iterkeys()] + + listvalues = dict.values + + def iteritems(self): + for key, values in dict.iteritems(self): + yield key, values[0] + + iterlists = dict.iteritems + + def itervalues(self): + for values in dict.itervalues(self): + yield values[0] + + iterlistvalues = dict.itervalues + + def copy(self): + """Return a shallow copy of this object.""" + return self.__class__(self) + + def update(self, other_dict): + """update() extends rather than replaces existing key lists.""" + if isinstance(other_dict, MultiDict): + for key, value_list in other_dict.iterlists(): + self.setlistdefault(key, []).extend(value_list) + elif isinstance(other_dict, dict): + for key, value in other_dict.items(): + self.setlistdefault(key, []).append(value) + else: + for key, value in other_dict: + self.setlistdefault(key, []).append(value) + + def pop(self, *args): + """Pop the first item for a list on the dict.""" + return dict.pop(self, *args)[0] + + def popitem(self): + """Pop an item from the dict.""" + item = dict.popitem(self) + return (item[0], item[1][0]) + + poplist = dict.pop + popitemlist = dict.popitem + + def __repr__(self): + tmp = [] + for key, values in self.iterlists(): + for value in values: + tmp.append((key, value)) + return '%s(%r)' % (self.__class__.__name__, tmp) + + +class Headers(object): + """ + An object that stores some headers. + """ + + def __init__(self, defaults=None): + self._list = [] + if isinstance(defaults, dict): + for key, value in defaults.iteritems(): + if isinstance(value, (tuple, list)): + for v in value: + self._list.append((key, v)) + else: + self._list.append((key, value)) + elif defaults is not None: + for key, value in defaults: + self._list.append((key, value)) + + def __getitem__(self, key): + ikey = key.lower() + for k, v in self._list: + if k.lower() == ikey: + return v + raise KeyError(key) + + def get(self, key, default=None): + try: + return self[key] + except KeyError: + return default + + def getlist(self, key): + ikey = key.lower() + result = [] + for k, v in self._list: + if k.lower() == ikey: + result.append((k, v)) + return result + + def setlist(self, key, values): + del self[key] + self.addlist(key, values) + + def addlist(self, key, values): + self._list.extend(values) + + def lists(self, lowercased=False): + if not lowercased: + return self._list[:] + return [(x.lower(), y) for x, y in self._list] + + def iterlists(self, lowercased=False): + for key, value in self._list: + if lowercased: + key = key.lower() + yield key, value + + def iterkeys(self): + for key, _ in self.iterlists(): + yield key + + def itervalues(self): + for _, value in self.iterlists(): + yield value + + def keys(self): + return list(self.iterkeys()) + + def values(self): + return list(self.itervalues()) + + def __delitem__(self, key): + key = key.lower() + new = [] + for k, v in self._list: + if k != key: + new.append((k, v)) + self._list[:] = new + + remove = __delitem__ + + def __contains__(self, key): + key = key.lower() + for k, v in self._list: + if k.lower() == key: + return True + return False + + has_key = __contains__ + + def __iter__(self): + return iter(self._list) + + def add(self, key, value): + """add a new header tuple to the list""" + self._list.append((key, value)) + + def clear(self): + """clears all headers""" + del self._list[:] + + def set(self, key, value): + """remove all header tuples for key and add + a new one + """ + del self[key] + self.add(key, value) + + __setitem__ = set + + def to_list(self, charset): + """Create a str only list of the headers.""" + result = [] + for k, v in self: + if isinstance(v, unicode): + v = v.encode(charset) + else: + v = str(v) + result.append((k, v)) + return result + + def copy(self): + return self.__class__(self._list) + + def __repr__(self): + return '%s(%r)' % ( + self.__class__.__name__, + self._list + ) + + +class Session(dict): + + def __init__(self, sid): + self.sid = sid + if sid is not None: + if path.exists(self.filename): + with file(self.filename, 'rb') as f: + self.update(pickle.load(f)) + self._orig = dict(self) + + @property + def filename(self): + if self.sid is not None: + return path.join(tempfile.gettempdir(), '__pydoc_sess' + self.sid) + + @property + def worth_saving(self): + return self != self._orig + + def save(self): + if self.sid is None: + self.sid = sha1('%s|%s' % (time(), random())).hexdigest() + with file(self.filename, 'wb') as f: + pickle.dump(dict(self), f, pickle.HIGHEST_PROTOCOL) + self._orig = dict(self) + + +class Request(object): + charset = 'utf-8' + + def __init__(self, environ): + self.environ = environ + self.environ['werkzeug.request'] = self + self.session = Session(self.cookies.get(SID_COOKIE_NAME)) + self.user = self.session.get('user') + + def login(self, user): + self.user = self.session['user'] = user + + def logout(self): + self.user = None + self.session.pop('user', None) + + def _get_file_stream(self): + """Called to get a stream for the file upload. + + This must provide a file-like class with `read()`, `readline()` + and `seek()` methods that is both writeable and readable.""" + return tempfile.TemporaryFile('w+b') + + def _load_post_data(self): + """Method used internally to retrieve submitted data.""" + self._data = '' + post = [] + files = [] + if self.environ['REQUEST_METHOD'] in ('POST', 'PUT'): + storage = _StorageHelper(self.environ, self._get_file_stream) + for key in storage.keys(): + values = storage[key] + if not isinstance(values, list): + values = [values] + for item in values: + if getattr(item, 'filename', None) is not None: + fn = item.filename.decode(self.charset, 'ignore') + # fix stupid IE bug + if len(fn) > 1 and fn[1] == ':' and '\\' in fn: + fn = fn[fn.index('\\') + 1:] + files.append((key, FileStorage(key, fn, item.type, + item.length, item.file))) + else: + post.append((key, item.value.decode(self.charset, + 'ignore'))) + self._form = MultiDict(post) + self._files = MultiDict(files) + + def read(self, *args): + if not hasattr(self, '_buffered_stream'): + self._buffered_stream = StringIO(self.data) + return self._buffered_stream.read(*args) + + def readline(self, *args): + if not hasattr(self, '_buffered_stream'): + self._buffered_stream = StringIO(self.data) + return self._buffered_stream.readline(*args) + + def make_external_url(self, path): + url = self.environ['wsgi.url_scheme'] + '://' + if 'HTTP_HOST' in self.environ: + url += self.environ['HTTP_HOST'] + else: + url += self.environ['SERVER_NAME'] + if (self.environ['wsgi.url_scheme'], self.environ['SERVER_PORT']) not \ + in (('https', '443'), ('http', '80')): + url += ':' + self.environ['SERVER_PORT'] + + url += urllib.quote(self.environ.get('SCRIPT_INFO', '').rstrip('/')) + if not path.startswith('/'): + path = '/' + path + return url + path + + def args(self): + """URL parameters""" + items = [] + qs = self.environ.get('QUERY_STRING', '') + for key, values in cgi.parse_qs(qs, True).iteritems(): + for value in values: + value = value.decode(self.charset, 'ignore') + items.append((key, value)) + return MultiDict(items) + args = lazy_property(args) + + def data(self): + """raw value of input stream.""" + if not hasattr(self, '_data'): + self._load_post_data() + return self._data + data = lazy_property(data) + + def form(self): + """form parameters.""" + if not hasattr(self, '_form'): + self._load_post_data() + return self._form + form = lazy_property(form) + + def files(self): + """File uploads.""" + if not hasattr(self, '_files'): + self._load_post_data() + return self._files + files = lazy_property(files) + + def cookies(self): + """Stored Cookies.""" + cookie = SimpleCookie() + cookie.load(self.environ.get('HTTP_COOKIE', '')) + result = {} + for key, value in cookie.iteritems(): + result[key] = value.value.decode(self.charset, 'ignore') + return result + cookies = lazy_property(cookies) + + def method(self): + """Request method.""" + return self.environ['REQUEST_METHOD'] + method = property(method, doc=method.__doc__) + + def path(self): + """Requested path.""" + path = '/' + (self.environ.get('PATH_INFO') or '').lstrip('/') + path = path.decode(self.charset, self.charset) + parts = path.replace('+', ' ').split('/') + return u'/'.join(p for p in parts if p != '..') + path = lazy_property(path) + + +class Response(object): + charset = 'utf-8' + default_mimetype = 'text/html' + + def __init__(self, response=None, headers=None, status=200, mimetype=None): + if response is None: + self.response = [] + elif isinstance(response, basestring): + self.response = [response] + else: + self.response = iter(response) + if not headers: + self.headers = Headers() + elif isinstance(headers, Headers): + self.headers = headers + else: + self.headers = Headers(headers) + if mimetype is None and 'Content-Type' not in self.headers: + mimetype = self.default_mimetype + if mimetype is not None: + if 'charset=' not in mimetype and mimetype.startswith('text/'): + mimetype += '; charset=' + self.charset + self.headers['Content-Type'] = mimetype + self.status = status + self._cookies = None + + def write(self, value): + if not isinstance(self.response, list): + raise RuntimeError('cannot write to streaming response') + self.write = self.response.append + self.response.append(value) + + def set_cookie(self, key, value='', max_age=None, expires=None, + path='/', domain=None, secure=None): + if self._cookies is None: + self._cookies = SimpleCookie() + if isinstance(value, unicode): + value = value.encode(self.charset) + self._cookies[key] = value + if max_age is not None: + self._cookies[key]['max-age'] = max_age + if expires is not None: + if isinstance(expires, basestring): + self._cookies[key]['expires'] = expires + expires = None + elif isinstance(expires, datetime): + expires = expires.utctimetuple() + elif not isinstance(expires, (int, long)): + expires = gmtime(expires) + else: + raise ValueError('datetime or integer required') + month = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', + 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][expires.tm_mon - 1] + day = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', + 'Friday', 'Saturday', 'Sunday'][expires.tm_wday] + date = '%02d-%s-%s' % ( + expires.tm_mday, month, str(expires.tm_year)[-2:] + ) + d = '%s, %s %02d:%02d:%02d GMT' % (day, date, expires.tm_hour, + expires.tm_min, expires.tm_sec) + self._cookies[key]['expires'] = d + if path is not None: + self._cookies[key]['path'] = path + if domain is not None: + self._cookies[key]['domain'] = domain + if secure is not None: + self._cookies[key]['secure'] = secure + + def delete_cookie(self, key): + if self._cookies is None: + self._cookies = SimpleCookie() + if key not in self._cookies: + self._cookies[key] = '' + self._cookies[key]['max-age'] = 0 + + def __call__(self, environ, start_response): + req = environ['werkzeug.request'] + if req.session.worth_saving: + req.session.save() + self.set_cookie(SID_COOKIE_NAME, req.session.sid) + + headers = self.headers.to_list(self.charset) + if self._cookies is not None: + for morsel in self._cookies.values(): + headers.append(('Set-Cookie', morsel.output(header=''))) + status = '%d %s' % (self.status, HTTP_STATUS_CODES[self.status]) + + charset = self.charset or 'ascii' + start_response(status, headers) + for item in self.response: + if isinstance(item, unicode): + yield item.encode(charset) + else: + yield str(item) + +def get_base_uri(environ): + url = environ['wsgi.url_scheme'] + '://' + if 'HTTP_HOST' in environ: + url += environ['HTTP_HOST'] + else: + url += environ['SERVER_NAME'] + if (environ['wsgi.url_scheme'], environ['SERVER_PORT']) not \ + in (('https', '443'), ('http', '80')): + url += ':' + environ['SERVER_PORT'] + url += urllib.quote(environ.get('SCRIPT_INFO', '').rstrip('/')) + return url + + +class RedirectResponse(Response): + + def __init__(self, target_url, code=302): + if not target_url.startswith('/'): + target_url = '/' + target_url + self.target_url = target_url + super(RedirectResponse, self).__init__('Moved...', status=code) + + def __call__(self, environ, start_response): + url = get_base_uri(environ) + self.target_url + self.headers['Location'] = url + return super(RedirectResponse, self).__call__(environ, start_response) + + +class JSONResponse(Response): + + def __init__(self, data): + assert not isinstance(data, list), 'list unsafe for json dumping' + super(JSONResponse, self).__init__(dump_json(data), mimetype='text/javascript') + + +class SharedDataMiddleware(object): + """ + Redirects calls to an folder with static data. + """ + + def __init__(self, app, exports): + self.app = app + self.exports = exports + self.cache = {} + + def serve_file(self, filename, start_response): + from mimetypes import guess_type + guessed_type = guess_type(filename) + mime_type = guessed_type[0] or 'text/plain' + expiry = time() + 3600 # one hour + expiry = asctime(gmtime(expiry)) + start_response('200 OK', [('Content-Type', mime_type), + ('Cache-Control', 'public'), + ('Expires', expiry)]) + with open(filename, 'rb') as f: + return [f.read()] + + def __call__(self, environ, start_response): + p = environ.get('PATH_INFO', '') + if p in self.cache: + return self.serve_file(self.cache[p], start_response) + for search_path, file_path in self.exports.iteritems(): + if not search_path.endswith('/'): + search_path += '/' + if p.startswith(search_path): + real_path = path.join(file_path, p[len(search_path):]) + if path.exists(real_path) and path.isfile(real_path): + self.cache[p] = real_path + return self.serve_file(real_path, start_response) + return self.app(environ, start_response) + + +class NotFound(Exception): + """ + Raise to display the 404 error page. + """ + + def __init__(self, show_keyword_matches=False): + self.show_keyword_matches = show_keyword_matches + Exception.__init__(self, show_keyword_matches) Added: doctools/trunk/sphinx/writer.py ============================================================================== --- (empty file) +++ doctools/trunk/sphinx/writer.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,229 @@ +# -*- coding: utf-8 -*- +""" + sphinx.writer + ~~~~~~~~~~~~~ + + docutils writers handling Sphinx' custom nodes. + + :copyright: 2007 by Georg Brandl. + :license: Python license. +""" + +from docutils import nodes +from docutils.writers.html4css1 import Writer, HTMLTranslator as BaseTranslator + +from .smartypants import sphinx_smarty_pants + + +class HTMLWriter(Writer): + def __init__(self, config): + Writer.__init__(self) + if config.get('use_smartypants', False): + self.translator_class = SmartyPantsHTMLTranslator + else: + self.translator_class = HTMLTranslator + + +version_text = { + 'deprecated': 'Deprecated in version %s', + 'versionchanged': 'Changed in version %s', + 'versionadded': 'New in version %s', +} + +class HTMLTranslator(BaseTranslator): + """ + Our custom HTML translator. + """ + + def __init__(self, *args, **kwds): + self.no_smarty = 0 + BaseTranslator.__init__(self, *args, **kwds) + self.highlightlang = 'python' + + def visit_desc(self, node): + self.body.append(self.starttag(node, 'dl', CLASS=node['desctype'])) + def depart_desc(self, node): + self.body.append('\n\n') + + def visit_desc_signature(self, node): + # the id is set automatically + self.body.append(self.starttag(node, 'dt')) + # anchor for per-desc interactive data + if node.parent['desctype'] != 'describe' and node['ids'] and node['first']: + self.body.append('' % node['ids'][0]) + if node.parent['desctype'] in ('class', 'exception'): + self.body.append('%s ' % node.parent['desctype']) + def depart_desc_signature(self, node): + self.body.append('\n') + + def visit_desc_classname(self, node): + self.body.append(self.starttag(node, 'tt', '', CLASS='descclassname')) + def depart_desc_classname(self, node): + self.body.append('
    ') + + def visit_desc_name(self, node): + self.body.append(self.starttag(node, 'tt', '', CLASS='descname')) + def depart_desc_name(self, node): + self.body.append('
    ') + + def visit_desc_parameterlist(self, node): + self.body.append('(') + self.first_param = 1 + def depart_desc_parameterlist(self, node): + self.body.append(')') + + def visit_desc_parameter(self, node): + if not self.first_param: + self.body.append(', ') + else: + self.first_param = 0 + if not node.hasattr('noemph'): + self.body.append('') + def depart_desc_parameter(self, node): + if not node.hasattr('noemph'): + self.body.append('') + + def visit_desc_optional(self, node): + self.body.append('[') + def depart_desc_optional(self, node): + self.body.append(']') + + def visit_desc_content(self, node): + self.body.append(self.starttag(node, 'dd', '')) + def depart_desc_content(self, node): + self.body.append('') + + def visit_refcount(self, node): + self.body.append(self.starttag(node, 'em', '', CLASS='refcount')) + def depart_refcount(self, node): + self.body.append('
    ') + + def visit_versionmodified(self, node): + self.body.append(self.starttag(node, 'p')) + text = version_text[node['type']] % node['version'] + if len(node): + text += ': ' + else: + text += '.' + self.body.append('%s' % text) + def depart_versionmodified(self, node): + self.body.append('

    \n') + + # overwritten -- we don't want source comments to show up in the HTML + def visit_comment(self, node): + raise nodes.SkipNode + + # overwritten + def visit_admonition(self, node, name=''): + self.body.append(self.start_tag_with_title( + node, 'div', CLASS=('admonition ' + name))) + if name and name != 'seealso': + node.insert(0, nodes.title(name, self.language.labels[name])) + self.set_first_last(node) + + def visit_seealso(self, node): + self.visit_admonition(node, 'seealso') + def depart_seealso(self, node): + self.depart_admonition(node) + + # overwritten + def visit_title(self, node, move_ids=1): + # if we have a section we do our own processing in order + # to have ids in the hN-tags and not in additional a-tags + if isinstance(node.parent, nodes.section): + h_level = self.section_level + self.initial_header_level - 1 + if node.parent.get('ids'): + attrs = {'ids': node.parent['ids']} + else: + attrs = {} + self.body.append(self.starttag(node, 'h%d' % h_level, '', **attrs)) + self.context.append('\n' % h_level) + else: + BaseTranslator.visit_title(self, node, move_ids) + + # overwritten + def visit_literal_block(self, node): + from .highlighting import highlight_block + self.body.append(highlight_block(node.rawsource, self.highlightlang)) + raise nodes.SkipNode + + def visit_productionlist(self, node): + self.body.append(self.starttag(node, 'pre')) + names = [] + for production in node: + names.append(production['tokenname']) + maxlen = max(len(name) for name in names) + for production in node: + if production['tokenname']: + self.body.append(self.starttag(production, 'strong', '')) + self.body.append(production['tokenname'].ljust(maxlen) + + '
    ::= ') + lastname = production['tokenname'] + else: + self.body.append('%s ' % (' '*len(lastname))) + production.walkabout(self) + self.body.append('\n') + self.body.append('\n') + raise nodes.SkipNode + def depart_productionlist(self, node): + pass + + def visit_production(self, node): + pass + def depart_production(self, node): + pass + + def visit_centered(self, node): + self.body.append(self.starttag(node, 'center') + '') + def depart_centered(self, node): + self.body.append('') + + def visit_compact_paragraph(self, node): + pass + def depart_compact_paragraph(self, node): + pass + + def visit_highlightlang(self, node): + self.highlightlang = node['lang'] + def depart_highlightlang(self, node): + pass + + def visit_toctree(self, node): + # this only happens when formatting a toc from env.tocs -- in this + # case we don't want to include the subtree + raise nodes.SkipNode + + def visit_index(self, node): + raise nodes.SkipNode + + +class SmartyPantsHTMLTranslator(HTMLTranslator): + """ + Handle ordinary text via smartypants, converting quotes and dashes + to the correct entities. + """ + + def __init__(self, *args, **kwds): + self.no_smarty = 0 + HTMLTranslator.__init__(self, *args, **kwds) + + def visit_literal(self, node): + self.no_smarty += 1 + try: + # this raises SkipNode + HTMLTranslator.visit_literal(self, node) + finally: + self.no_smarty -= 1 + + def visit_productionlist(self, node): + self.no_smarty += 1 + try: + HTMLTranslator.visit_productionlist(self, node) + finally: + self.no_smarty -= 1 + + def encode(self, text): + text = HTMLTranslator.encode(self, text) + if self.no_smarty <= 0: + text = sphinx_smarty_pants(text) + return text Added: doctools/trunk/utils/check_sources.py ============================================================================== --- (empty file) +++ doctools/trunk/utils/check_sources.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,241 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" + Checker for file headers + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Make sure each Python file has a correct file header + including copyright and license information. + + :copyright: 2006-2007 by Georg Brandl. + :license: GNU GPL, see LICENSE for more details. +""" + +import sys, os, re +import getopt +import cStringIO +from os.path import join, splitext, abspath + + +checkers = {} + +def checker(*suffixes, **kwds): + only_pkg = kwds.pop('only_pkg', False) + def deco(func): + for suffix in suffixes: + checkers.setdefault(suffix, []).append(func) + func.only_pkg = only_pkg + return func + return deco + + +name_mail_re = r'[\w ]+(<.*?>)?' +copyright_re = re.compile(r'^ :copyright: 200\d(-200\d)? by %s(, %s)*[,.]$' % + (name_mail_re, name_mail_re)) +license_re = re.compile(r" :license: (.*?).\n") +copyright_2_re = re.compile(r'^ %s(, %s)*[,.]$' % + (name_mail_re, name_mail_re)) +coding_re = re.compile(r'coding[:=]\s*([-\w.]+)') +not_ix_re = re.compile(r'\bnot\s+\S+?\s+i[sn]\s\S+') +is_const_re = re.compile(r'if.*?==\s+(None|False|True)\b') + +misspellings = ["developement", "adress", "verificate", # ALLOW-MISSPELLING + "informations"] # ALLOW-MISSPELLING + + + at checker('.py') +def check_syntax(fn, lines): + try: + compile(''.join(lines), fn, "exec") + except SyntaxError, err: + yield 0, "not compilable: %s" % err + + + at checker('.py') +def check_style_and_encoding(fn, lines): + encoding = 'ascii' + for lno, line in enumerate(lines): + if len(line) > 90: + yield lno+1, "line too long" + m = not_ix_re.search(line) + if m: + yield lno+1, '"' + m.group() + '"' + if is_const_re.search(line): + yield lno+1, 'using == None/True/False' + if lno < 2: + co = coding_re.search(line) + if co: + encoding = co.group(1) + try: + line.decode(encoding) + except UnicodeDecodeError, err: + yield lno+1, "not decodable: %s\n Line: %r" % (err, line) + except LookupError, err: + yield 0, "unknown encoding: %s" % encoding + encoding = 'latin1' + + + at checker('.py', only_pkg=True) +def check_fileheader(fn, lines): + # line number correction + c = 1 + if lines[0:1] == ['#!/usr/bin/env python\n']: + lines = lines[1:] + c = 2 + + llist = [] + docopen = False + for lno, l in enumerate(lines): + llist.append(l) + if lno == 0: + if l == '# -*- coding: rot13 -*-\n': + # special-case pony package + return + elif l != '# -*- coding: utf-8 -*-\n': + yield 1, "missing coding declaration" + elif lno == 1: + if l != '"""\n' and l != 'r"""\n': + yield 2, 'missing docstring begin (""")' + else: + docopen = True + elif docopen: + if l == '"""\n': + # end of docstring + if lno <= 4: + yield lno+c, "missing module name in docstring" + break + + if l != "\n" and l[:4] != ' ' and docopen: + yield lno+c, "missing correct docstring indentation" + + if lno == 2: + # if not in package, don't check the module name + modname = fn[:-3].replace('/', '.').replace('.__init__', '') + while modname: + if l.lower()[4:-1] == modname: + break + modname = '.'.join(modname.split('.')[1:]) + else: + yield 3, "wrong module name in docstring heading" + modnamelen = len(l.strip()) + elif lno == 3: + if l.strip() != modnamelen * "~": + yield 4, "wrong module name underline, should be ~~~...~" + + else: + yield 0, "missing end and/or start of docstring..." + + # check for copyright and license fields + license = llist[-2:-1] + if not license or not license_re.match(license[0]): + yield 0, "no correct license info" + + ci = -3 + copyright = llist[ci:ci+1] + while copyright and copyright_2_re.match(copyright[0]): + ci -= 1 + copyright = llist[ci:ci+1] + if not copyright or not copyright_re.match(copyright[0]): + yield 0, "no correct copyright info" + + + at checker('.py', '.html', '.js') +def check_whitespace_and_spelling(fn, lines): + for lno, line in enumerate(lines): + if "\t" in line: + yield lno+1, "OMG TABS!!!1 " + if line[:-1].rstrip(' \t') != line[:-1]: + yield lno+1, "trailing whitespace" + for word in misspellings: + if word in line and 'ALLOW-MISSPELLING' not in line: + yield lno+1, '"%s" used' % word + + +bad_tags = ('', '', '', '', '' + '
    ', '', '', '>out, "%s:%d: %s" % (fn, lno, msg) + num += 1 + if verbose: + print + if num == 0: + print "No errors found." + else: + print out.getvalue().rstrip('\n') + print "%d error%s found." % (num, num > 1 and "s" or "") + return int(num > 0) + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) Added: doctools/trunk/utils/pylintrc ============================================================================== --- (empty file) +++ doctools/trunk/utils/pylintrc Mon Jul 23 11:02:25 2007 @@ -0,0 +1,301 @@ +# lint Python modules using external checkers. +# +# This is the main checker controling the other ones and the reports +# generation. It is itself both a raw checker and an astng checker in order +# to: +# * handle message activation / deactivation at the module level +# * handle some basic but necessary stats'data (number of classes, methods...) +# +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Profiled execution. +profile=no + +# Add to the black list. It should be a base name, not a +# path. You may set this option multiple times. +ignore=.svn + +# Pickle collected data for later comparisons. +persistent=yes + +# Set the cache size for astng objects. +cache-size=500 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + + +[MESSAGES CONTROL] + +# Enable only checker(s) with the given id(s). This option conflict with the +# disable-checker option +#enable-checker= + +# Enable all checker(s) except those with the given id(s). This option conflict +# with the disable-checker option +#disable-checker= + +# Enable all messages in the listed categories. +#enable-msg-cat= + +# Disable all messages in the listed categories. +#disable-msg-cat= + +# Enable the message(s) with the given id(s). +#enable-msg= + +# Disable the message(s) with the given id(s). +disable-msg=C0323,W0142,C0301,C0103,C0111,E0213,C0302,C0203,W0703,R0201 + + +[REPORTS] + +# set the output format. Available formats are text, parseable, colorized and +# html +output-format=colorized + +# Include message's id in output +include-ids=yes + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=no + +# Tells wether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note).You have access to the variables errors warning, statement which +# respectivly contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (R0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Add a comment according to your evaluation note. This is used by the global +# evaluation report (R0004). +comment=no + +# Enable the report(s) with the given id(s). +#enable-report= + +# Disable the report(s) with the given id(s). +#disable-report= + + +# checks for +# * unused variables / imports +# * undefined variables +# * redefinition of variable from builtins or from an outer scope +# * use of variable before assigment +# +[VARIABLES] + +# Tells wether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching names used for dummy variables (i.e. not used). +dummy-variables-rgx=_|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +# try to find bugs in the code using type inference +# +[TYPECHECK] + +# Tells wether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# When zope mode is activated, consider the acquired-members option to ignore +# access to some undefined attributes. +zope=no + +# List of members which are usually get through zope's acquisition mecanism and +# so shouldn't trigger E0201 when accessed (need zope=yes to be considered). +acquired-members=REQUEST,acl_users,aq_parent + + +# checks for : +# * doc strings +# * modules / classes / functions / methods / arguments / variables name +# * number of arguments, local variables, branchs, returns and statements in +# functions, methods +# * required module attributes +# * dangerous default values as arguments +# * redefinition of function / method / class +# * uses of the global statement +# +[BASIC] + +# Required attributes for module, separated by a comma +required-attributes= + +# Regular expression which should only match functions or classes name which do +# not require a docstring +no-docstring-rgx=__.*__ + +# Regular expression which should only match correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression which should only match correct module level names +const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))$ + +# Regular expression which should only match correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct instance attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# List of builtins function names that should not be used, separated by a comma +bad-functions=apply,input + + +# checks for sign of poor/misdesign: +# * number of methods, attributes, local variables... +# * size, complexity of functions, methods +# +[DESIGN] + +# Maximum number of arguments for function / method +max-args=12 + +# Maximum number of locals for function / method body +max-locals=30 + +# Maximum number of return / yield for function / method body +max-returns=12 + +# Maximum number of branch for function / method body +max-branchs=30 + +# Maximum number of statements in function / method body +max-statements=60 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=20 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=0 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +# checks for +# * external modules dependencies +# * relative / wildcard imports +# * cyclic imports +# * uses of deprecated modules +# +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,string,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report R0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report R0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report R0402 must +# not be disabled) +int-import-graph= + + +# checks for : +# * methods without self as first argument +# * overridden methods signature +# * access only to existant members via self +# * attributes not defined in the __init__ method +# * supported interfaces implementation +# * unreachable code +# +[CLASSES] + +# List of interface methods to ignore, separated by a comma. This is used for +# instance to not check methods defines in Zope's Interface base class. +ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + + +# checks for similarities and duplicated code. This computation may be +# memory / CPU intensive, so you should disable it if you experiments some +# problems. +# +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=10 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + + +# checks for: +# * warning notes in the code like FIXME, XXX +# * PEP 263: source code with non ascii character but no encoding declaration +# +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +# checks for : +# * unauthorized constructions +# * strict indentation +# * line length +# * use of <> instead of != +# +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=90 + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' Added: doctools/trunk/utils/reindent.py ============================================================================== --- (empty file) +++ doctools/trunk/utils/reindent.py Mon Jul 23 11:02:25 2007 @@ -0,0 +1,291 @@ +#! /usr/bin/env python + +# Released to the public domain, by Tim Peters, 03 October 2000. +# -B option added by Georg Brandl, 2006. + +"""reindent [-d][-r][-v] [ path ... ] + +-d (--dryrun) Dry run. Analyze, but don't make any changes to files. +-r (--recurse) Recurse. Search for all .py files in subdirectories too. +-B (--no-backup) Don't write .bak backup files. +-v (--verbose) Verbose. Print informative msgs; else only names of changed files. +-h (--help) Help. Print this usage information and exit. + +Change Python (.py) files to use 4-space indents and no hard tab characters. +Also trim excess spaces and tabs from ends of lines, and remove empty lines +at the end of files. Also ensure the last line ends with a newline. + +If no paths are given on the command line, reindent operates as a filter, +reading a single source file from standard input and writing the transformed +source to standard output. In this case, the -d, -r and -v flags are +ignored. + +You can pass one or more file and/or directory paths. When a directory +path, all .py files within the directory will be examined, and, if the -r +option is given, likewise recursively for subdirectories. + +If output is not to standard output, reindent overwrites files in place, +renaming the originals with a .bak extension. If it finds nothing to +change, the file is left alone. If reindent does change a file, the changed +file is a fixed-point for future runs (i.e., running reindent on the +resulting .py file won't change it again). + +The hard part of reindenting is figuring out what to do with comment +lines. So long as the input files get a clean bill of health from +tabnanny.py, reindent should do a good job. +""" + +__version__ = "1" + +import tokenize +import os +import sys + +verbose = 0 +recurse = 0 +dryrun = 0 +no_backup = 0 + +def usage(msg=None): + if msg is not None: + print >> sys.stderr, msg + print >> sys.stderr, __doc__ + +def errprint(*args): + sep = "" + for arg in args: + sys.stderr.write(sep + str(arg)) + sep = " " + sys.stderr.write("\n") + +def main(): + import getopt + global verbose, recurse, dryrun, no_backup + + try: + opts, args = getopt.getopt(sys.argv[1:], "drvhB", + ["dryrun", "recurse", "verbose", "help", + "no-backup"]) + except getopt.error, msg: + usage(msg) + return + for o, a in opts: + if o in ('-d', '--dryrun'): + dryrun += 1 + elif o in ('-r', '--recurse'): + recurse += 1 + elif o in ('-v', '--verbose'): + verbose += 1 + elif o in ('-B', '--no-backup'): + no_backup += 1 + elif o in ('-h', '--help'): + usage() + return + if not args: + r = Reindenter(sys.stdin) + r.run() + r.write(sys.stdout) + return + for arg in args: + check(arg) + +def check(file): + if os.path.isdir(file) and not os.path.islink(file): + if verbose: + print "listing directory", file + names = os.listdir(file) + for name in names: + fullname = os.path.join(file, name) + if ((recurse and os.path.isdir(fullname) and + not os.path.islink(fullname)) + or name.lower().endswith(".py")): + check(fullname) + return + + if verbose: + print "checking", file, "...", + try: + f = open(file) + except IOError, msg: + errprint("%s: I/O Error: %s" % (file, str(msg))) + return + + r = Reindenter(f) + f.close() + if r.run(): + if verbose: + print "changed." + if dryrun: + print "But this is a dry run, so leaving it alone." + else: + print "reindented", file, (dryrun and "(dry run => not really)" or "") + if not dryrun: + if not no_backup: + bak = file + ".bak" + if os.path.exists(bak): + os.remove(bak) + os.rename(file, bak) + if verbose: + print "renamed", file, "to", bak + f = open(file, "w") + r.write(f) + f.close() + if verbose: + print "wrote new", file + else: + if verbose: + print "unchanged." + + +class Reindenter: + + def __init__(self, f): + self.find_stmt = 1 # next token begins a fresh stmt? + self.level = 0 # current indent level + + # Raw file lines. + self.raw = f.readlines() + + # File lines, rstripped & tab-expanded. Dummy at start is so + # that we can use tokenize's 1-based line numbering easily. + # Note that a line is all-blank iff it's "\n". + self.lines = [line.rstrip('\n \t').expandtabs() + "\n" + for line in self.raw] + self.lines.insert(0, None) + self.index = 1 # index into self.lines of next line + + # List of (lineno, indentlevel) pairs, one for each stmt and + # comment line. indentlevel is -1 for comment lines, as a + # signal that tokenize doesn't know what to do about them; + # indeed, they're our headache! + self.stats = [] + + def run(self): + tokenize.tokenize(self.getline, self.tokeneater) + # Remove trailing empty lines. + lines = self.lines + while lines and lines[-1] == "\n": + lines.pop() + # Sentinel. + stats = self.stats + stats.append((len(lines), 0)) + # Map count of leading spaces to # we want. + have2want = {} + # Program after transformation. + after = self.after = [] + # Copy over initial empty lines -- there's nothing to do until + # we see a line with *something* on it. + i = stats[0][0] + after.extend(lines[1:i]) + for i in range(len(stats)-1): + thisstmt, thislevel = stats[i] + nextstmt = stats[i+1][0] + have = getlspace(lines[thisstmt]) + want = thislevel * 4 + if want < 0: + # A comment line. + if have: + # An indented comment line. If we saw the same + # indentation before, reuse what it most recently + # mapped to. + want = have2want.get(have, -1) + if want < 0: + # Then it probably belongs to the next real stmt. + for j in xrange(i+1, len(stats)-1): + jline, jlevel = stats[j] + if jlevel >= 0: + if have == getlspace(lines[jline]): + want = jlevel * 4 + break + if want < 0: # Maybe it's a hanging + # comment like this one, + # in which case we should shift it like its base + # line got shifted. + for j in xrange(i-1, -1, -1): + jline, jlevel = stats[j] + if jlevel >= 0: + want = have + getlspace(after[jline-1]) - \ + getlspace(lines[jline]) + break + if want < 0: + # Still no luck -- leave it alone. + want = have + else: + want = 0 + assert want >= 0 + have2want[have] = want + diff = want - have + if diff == 0 or have == 0: + after.extend(lines[thisstmt:nextstmt]) + else: + for line in lines[thisstmt:nextstmt]: + if diff > 0: + if line == "\n": + after.append(line) + else: + after.append(" " * diff + line) + else: + remove = min(getlspace(line), -diff) + after.append(line[remove:]) + return self.raw != self.after + + def write(self, f): + f.writelines(self.after) + + # Line-getter for tokenize. + def getline(self): + if self.index >= len(self.lines): + line = "" + else: + line = self.lines[self.index] + self.index += 1 + return line + + # Line-eater for tokenize. + def tokeneater(self, type, token, (sline, scol), end, line, + INDENT=tokenize.INDENT, + DEDENT=tokenize.DEDENT, + NEWLINE=tokenize.NEWLINE, + COMMENT=tokenize.COMMENT, + NL=tokenize.NL): + + if type == NEWLINE: + # A program statement, or ENDMARKER, will eventually follow, + # after some (possibly empty) run of tokens of the form + # (NL | COMMENT)* (INDENT | DEDENT+)? + self.find_stmt = 1 + + elif type == INDENT: + self.find_stmt = 1 + self.level += 1 + + elif type == DEDENT: + self.find_stmt = 1 + self.level -= 1 + + elif type == COMMENT: + if self.find_stmt: + self.stats.append((sline, -1)) + # but we're still looking for a new stmt, so leave + # find_stmt alone + + elif type == NL: + pass + + elif self.find_stmt: + # This is the first "real token" following a NEWLINE, so it + # must be the first token of the next program statement, or an + # ENDMARKER. + self.find_stmt = 0 + if line: # not endmarker + self.stats.append((sline, self.level)) + +# Count number of leading blanks. +def getlspace(line): + i, n = 0, len(line) + while i < n and line[i] == " ": + i += 1 + return i + +if __name__ == '__main__': + main() From python-checkins at python.org Mon Jul 23 15:41:45 2007 From: python-checkins at python.org (nick.coghlan) Date: Mon, 23 Jul 2007 15:41:45 +0200 (CEST) Subject: [Python-checkins] r56509 - in python/trunk/Lib: runpy.py test/test_runpy.py Message-ID: <20070723134145.526901E4007@bag.python.org> Author: nick.coghlan Date: Mon Jul 23 15:41:45 2007 New Revision: 56509 Modified: python/trunk/Lib/runpy.py python/trunk/Lib/test/test_runpy.py Log: Correctly cleanup sys.modules after executing runpy relative import tests Restore Python 2.4 ImportError when attempting to execute a package (as imports cannot be guaranteed to work properly if you try it) Modified: python/trunk/Lib/runpy.py ============================================================================== --- python/trunk/Lib/runpy.py (original) +++ python/trunk/Lib/runpy.py Mon Jul 23 15:41:45 2007 @@ -84,10 +84,13 @@ """ loader = get_loader(mod_name) if loader is None: - raise ImportError("No module named " + mod_name) + raise ImportError("No module named %s" % mod_name) + if loader.is_package(mod_name): + raise ImportError(("%s is a package and cannot " + + "be directly executed") % mod_name) code = loader.get_code(mod_name) if code is None: - raise ImportError("No code object available for " + mod_name) + raise ImportError("No code object available for %s" % mod_name) filename = _get_filename(loader, mod_name) if run_name is None: run_name = mod_name Modified: python/trunk/Lib/test/test_runpy.py ============================================================================== --- python/trunk/Lib/test/test_runpy.py (original) +++ python/trunk/Lib/test/test_runpy.py Mon Jul 23 15:41:45 2007 @@ -77,12 +77,16 @@ self.fail("Expected import error for " + mod_name) def test_invalid_names(self): + # Builtin module self.expect_import_error("sys") + # Non-existent modules self.expect_import_error("sys.imp.eric") self.expect_import_error("os.path.half") self.expect_import_error("a.bee") self.expect_import_error(".howard") self.expect_import_error("..eaten") + # Package + self.expect_import_error("logging") def test_library_module(self): run_module("runpy") @@ -115,13 +119,9 @@ return pkg_dir, mod_fname, mod_name def _del_pkg(self, top, depth, mod_name): - for i in range(depth+1): # Don't forget the module itself - parts = mod_name.rsplit(".", i) - entry = parts[0] - try: + for entry in list(sys.modules): + if entry.startswith("__runpy_pkg__"): del sys.modules[entry] - except KeyError, ex: - if verbose: print ex # Persist with cleaning up if verbose: print " Removed sys.modules entries" del sys.path[0] if verbose: print " Removed sys.path entry" From buildbot at python.org Mon Jul 23 16:14:51 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 23 Jul 2007 14:14:51 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP-3 trunk Message-ID: <20070723141451.B79E11E4007@bag.python.org> The Buildbot has detected a new failure of x86 XP-3 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP-3%2520trunk/builds/93 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: nick.coghlan Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_urllib2net sincerely, -The Buildbot From python-checkins at python.org Mon Jul 23 19:11:07 2007 From: python-checkins at python.org (georg.brandl) Date: Mon, 23 Jul 2007 19:11:07 +0200 (CEST) Subject: [Python-checkins] r56511 - doctools/trunk/README Message-ID: <20070723171107.1123B1E400C@bag.python.org> Author: georg.brandl Date: Mon Jul 23 19:11:06 2007 New Revision: 56511 Modified: doctools/trunk/README Log: Give explicit docutils version in readme. Modified: doctools/trunk/README ============================================================================== --- doctools/trunk/README (original) +++ doctools/trunk/README Mon Jul 23 19:11:06 2007 @@ -9,7 +9,8 @@ --------------------- This project uses Python 2.5 features, so you'll need a working Python -2.5 setup. +2.5 setup. Docutils 0.4 is required, the Docutils SVN trunk does not +work at the moment, but I don't expect it to be difficult to fix that. If you want code highlighting, you need Pygments >= 0.8, easily installable from PyPI. Jinja, the template engine, is included as a From MortonDianne at acw.us Mon Jul 23 21:59:50 2007 From: MortonDianne at acw.us (Johnathan A Odell) Date: Mon, 23 Jul 2007 16:59:50 -0300 Subject: [Python-checkins] Hospital data for the US Message-ID: <0A137EE3.7684.0415C2D5@localhost> Only until July 27 - Order the Doctor Contact List and get hospitals, dentists, chiropractors and PT/OT completely free! Licensed Doctors in the USA 788,177 in total ? 17,400 emails 34 primary and secondary specialties Fields: First name, Last name, Title, Specialty, Address (city, state, zip, county), Medical School Attended, Location of Residency Training, Phone, Fax, Email, Website, Primary Specialty, Secondary Specialty, Graduation Year, Major Activity, Hospital, Group Practice Abms Certification Special Price: $398 ----------------------------------------------------------------------- FREE OFFER: Get the 4 directories below for FREE with the purchase of the Doctor data - ----------------------------------------------------------------------- Hospitals in the USA 23,000 Admins in more than 7,000 hospitals (value: $399) Dentists in the USA 597,000 dentists and dental services (value: $299) American Chiropractor Directory 100,000 chiropractor?s offices in the USA (value: $249) Physical Therapists in the USA 125,000 listings for PT, OT RT and Speech Therapy (value: $249) send email to: themedbiz at hotmail.com call us: (206) 600-6530 by sending us an email with "exit" in the subject we will know not to contact you again From python-checkins at python.org Tue Jul 24 11:39:58 2007 From: python-checkins at python.org (georg.brandl) Date: Tue, 24 Jul 2007 11:39:58 +0200 (CEST) Subject: [Python-checkins] r56517 - doctools/trunk/README Message-ID: <20070724093958.139F91E400C@bag.python.org> Author: georg.brandl Date: Tue Jul 24 11:39:57 2007 New Revision: 56517 Modified: doctools/trunk/README Log: Update the readme a bit. Modified: doctools/trunk/README ============================================================================== --- doctools/trunk/README (original) +++ doctools/trunk/README Tue Jul 24 11:39:57 2007 @@ -1,9 +1,5 @@ -py-rest-doc -=========== - -This sandbox project is about moving the official Python documentation -to reStructuredText. - +doctools README +=============== What you need to know --------------------- @@ -78,3 +74,13 @@ where you can configure the server and port for the application as well as different other settings specific to the web app. + +Writing new documentation +------------------------- + +The new "Documenting Python" document, contained in a converted documentation +tree, already lists most of the markup and features, both standard reST and +additional, that can be used in the source docs. + +More reST docs are at http://docutils.sf.net/rst.html, tutorials can be found on +the web. \ No newline at end of file From python-checkins at python.org Tue Jul 24 12:25:54 2007 From: python-checkins at python.org (georg.brandl) Date: Tue, 24 Jul 2007 12:25:54 +0200 (CEST) Subject: [Python-checkins] r56518 - in doctools/trunk: HACKING TODO sphinx/__init__.py sphinx/builder.py sphinx/console.py sphinx/json.py sphinx/search.py sphinx/smartypants.py sphinx/stemmer.py sphinx/util sphinx/util.py sphinx/util/__init__.py sphinx/util/console.py sphinx/util/json.py sphinx/util/smartypants.py sphinx/util/stemmer.py sphinx/web/wsgiutil.py sphinx/writer.py Message-ID: <20070724102554.B34791E4011@bag.python.org> Author: georg.brandl Date: Tue Jul 24 12:25:53 2007 New Revision: 56518 Added: doctools/trunk/HACKING doctools/trunk/sphinx/util/ doctools/trunk/sphinx/util/__init__.py - copied unchanged from r56508, doctools/trunk/sphinx/util.py doctools/trunk/sphinx/util/console.py - copied, changed from r56508, doctools/trunk/sphinx/console.py doctools/trunk/sphinx/util/json.py - copied, changed from r56508, doctools/trunk/sphinx/json.py doctools/trunk/sphinx/util/smartypants.py - copied unchanged from r56508, doctools/trunk/sphinx/smartypants.py doctools/trunk/sphinx/util/stemmer.py - copied, changed from r56508, doctools/trunk/sphinx/stemmer.py Removed: doctools/trunk/sphinx/console.py doctools/trunk/sphinx/json.py doctools/trunk/sphinx/smartypants.py doctools/trunk/sphinx/stemmer.py doctools/trunk/sphinx/util.py Modified: doctools/trunk/TODO doctools/trunk/sphinx/__init__.py doctools/trunk/sphinx/builder.py doctools/trunk/sphinx/search.py doctools/trunk/sphinx/web/wsgiutil.py doctools/trunk/sphinx/writer.py Log: Move utils to separate package, add coding document. Added: doctools/trunk/HACKING ============================================================================== --- (empty file) +++ doctools/trunk/HACKING Tue Jul 24 12:25:53 2007 @@ -0,0 +1,140 @@ +.. -*- mode: rst -*- + +=============== +Coding overview +=============== + +This document tries to give you a cursory overview of the doctools code. + + +Dependencies +------------ + +The converter doesn't have any dependencies except Python 2.5. + +Sphinx needs Python 2.5, Docutils 0.4 (not SVN, because of API changes), Jinja +>= 1.1 (which is at the moment included as an SVN external) and Pygments >= 0.8 +(which is optional and can be installed from the cheese shop). + + +The converter +------------- + +There's not too much to say about the converter. It's quite as finished as +possible, and as it has to only work with the body of documentation found in the +Python core, it doesn't have to be as general as possible. + +(If other projects using the LaTeX documentation toolchain want to convert their +docs to the new format, the converter will probably have to be amended.) + +In ``restwriter.py``, there's some commentary about the inner works of the +converter concerning a single file. + +The ``filenamemap.py`` file tells the converter how to rearrange the converted +files in the reST source directories. There, for example, the tutorial is split +up in several files, and old or unusable files are flagged as not convertable. +Also, non-LaTeX files, such as code include files, are listed to be copied into +corresponding directories. + +The directory ``newfiles`` contains a bunch of files that didn't exist in the +old distribution, such as the documentation of Sphinx markup, that will be +copied to the reST directory too. + + +Sphinx +------ + +Sphinx consists of two parts: + +* The builder takes the reST sources and converts them into an output format. + (Presently, HTML, HTML Help or webapp-usable pickles.) + +* The web application takes the webapp-usable pickles, which mainly contain the + HTML bodies converted from reST and some additional information, and turns them + into a WSGI application, complete with commenting, navigation etc. + (The subpackage ``web`` is responsible for this.) + +An overview of the source files: + +addnodes.py + Contains docutils node classes that are not part of standard docutils. These + node classes must be handled by every docutils writer that gets one of our + nodetrees. + + (The docutils parse a reST document into a tree of "nodes". This nodetree can + then be converted into an internal representation, XML or anything a Writer + exists for.) + +builder.py + Contains the Builder classes, which are responsible for the process of building + the output files from docutils node trees. + + The builder is called by ``sphinx-build.py``. + +directives.py + Directive functions that transform our custom directives (like ``.. function::``) + into doctree nodes. + +environment.py + The "build environment", a class that holds metadata about all doctrees, and is + responsible for building them out of reST source files. + + The environment is stored, in a pickled form, in the output directory, in + order to enable incremental builds if only a few source files change, which + usually is the case. + +highlighting.py + Glue to the Pygments highlighting library. Will use no highlighting at all if + that is not installed. Probably a stripped down version of the Pygments Python + lexer and HTML formatter could be included. + +htmlhelp.py + HTML help builder helper methods. + +_jinja.py, jinja + The Jinja templating engine, used for all HTML-related builders. + +refcounting.py + Helper to keep track of reference count data for the C API reference, + which is maintained as a separate file. + +roles.py + Role functions that transform our custom roles (like ``:meth:``) into doctree + nodes. + +search.py + Helper to create a search index for the offline search. + +style + Directory for all static files for HTML-related builders. + +templates + Directory for Jinja templates, ATM only for HTML. + +util + General utilities. + +writer.py + The docutils HTML writer subclass which understands our additional nodes. + + +Code style +---------- + +PEP 8 (http://www.python.org/dev/peps/pep-0008) must be observed, with the +following exceptions: + +* Line length is limited to 90 characters. +* Relative imports are used, using with the new-in-2.5 'leading dot' syntax. + +The file encoding is UTF-8, this should be indicated in the file's first line +with :: + + # -*- coding: utf-8 -*- + + +Python 3.0 compatibility +------------------------ + +As it will be used for Python 3.0 too, the toolset should be kept in a state +where it is fully usable Python 3 code after one run of the ``2to3`` utility. Modified: doctools/trunk/TODO ============================================================================== --- doctools/trunk/TODO (original) +++ doctools/trunk/TODO Tue Jul 24 12:25:53 2007 @@ -2,6 +2,7 @@ =========== - discuss and debug comments system +- navigation links at the bottom too - write new Makefile, handle automatic version info and checkout - write a "printable" builder (export to latex, most probably) - discuss the default role Modified: doctools/trunk/sphinx/__init__.py ============================================================================== --- doctools/trunk/sphinx/__init__.py (original) +++ doctools/trunk/sphinx/__init__.py Tue Jul 24 12:25:53 2007 @@ -14,7 +14,7 @@ from os import path from .builder import builders -from .console import nocolor +from .util.console import nocolor __version__ = '$Revision: 5369 $' @@ -99,6 +99,10 @@ elif opt == '-N': nocolor() + if sys.platform == 'win32': + # Windows' cmd box doesn't understand ANSI sequences + nocolor() + if builder is None: print 'No builder selected, using default: html' builder = 'html' Modified: doctools/trunk/sphinx/builder.py ============================================================================== --- doctools/trunk/sphinx/builder.py (original) +++ doctools/trunk/sphinx/builder.py Tue Jul 24 12:25:53 2007 @@ -29,7 +29,7 @@ from .util import (get_matching_files, attrdict, status_iterator, ensuredir, get_category, relative_uri) from .writer import HTMLWriter -from .console import bold, purple, green +from .util.console import bold, purple, green from .htmlhelp import build_hhx from .environment import BuildEnvironment from .highlighting import pygments, get_stylesheet Deleted: /doctools/trunk/sphinx/console.py ============================================================================== --- /doctools/trunk/sphinx/console.py Tue Jul 24 12:25:53 2007 +++ (empty file) @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.console - ~~~~~~~~~~~~~~ - - Format colored console output. - - :copyright: 2007 by Georg Brandl. - :license: Python license. -""" - -codes = {} - -def nocolor(): - codes.clear() - -def colorize(name, text): - return codes.get(name, '') + text + codes.get('reset', '') - -def create_color_func(name): - def inner(text): - return colorize(name, text) - globals()[name] = inner - -_attrs = { - 'reset': '39;49;00m', - 'bold': '01m', - 'faint': '02m', - 'standout': '03m', - 'underline': '04m', - 'blink': '05m', -} - -for name, value in _attrs.items(): - codes[name] = '\x1b[' + value - -_colors = [ - ('black', 'darkgray'), - ('darkred', 'red'), - ('darkgreen', 'green'), - ('brown', 'yellow'), - ('darkblue', 'blue'), - ('purple', 'fuchsia'), - ('turquoise', 'teal'), - ('lightgray', 'white'), -] - -for i, (dark, light) in enumerate(_colors): - codes[dark] = '\x1b[%im' % (i+30) - codes[light] = '\x1b[%i;01m' % (i+30) - -for name in codes: - create_color_func(name) Deleted: /doctools/trunk/sphinx/json.py ============================================================================== --- /doctools/trunk/sphinx/json.py Tue Jul 24 12:25:53 2007 +++ (empty file) @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.json - ~~~~~~~~~~~ - - Minimal JSON module that generates small dumps. - - This is not fully JSON compliant but enough for the searchindex. - And the generated files are smaller than the simplejson ones. - - Uses the basestring encode function from simplejson. - - :copyright: 2007 by Armin Ronacher, Bob Ippolito. - :license: Python license. -""" - -import re - -ESCAPE = re.compile(r'[\x00-\x19\\"\b\f\n\r\t]') -ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') -ESCAPE_DICT = { - '\\': '\\\\', - '"': '\\"', - '\b': '\\b', - '\f': '\\f', - '\n': '\\n', - '\r': '\\r', - '\t': '\\t', -} -for i in range(0x20): - ESCAPE_DICT.setdefault(chr(i), '\\u%04x' % (i,)) - - -def encode_basestring_ascii(s): - def replace(match): - s = match.group(0) - try: - return ESCAPE_DICT[s] - except KeyError: - n = ord(s) - if n < 0x10000: - return '\\u%04x' % (n,) - else: - # surrogate pair - n -= 0x10000 - s1 = 0xd800 | ((n >> 10) & 0x3ff) - s2 = 0xdc00 | (n & 0x3ff) - return '\\u%04x\\u%04x' % (s1, s2) - return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' - - -def dump_json(obj, key=False): - if key: - if not isinstance(obj, basestring): - obj = str(obj) - return encode_basestring_ascii(obj) - if obj is None: - return 'null' - elif obj is True or obj is False: - return obj and 'true' or 'false' - elif isinstance(obj, (int, long, float)): - return str(obj) - elif isinstance(obj, dict): - return '{%s}' % ','.join('%s:%s' % ( - dump_json(key, True), - dump_json(value) - ) for key, value in obj.iteritems()) - elif isinstance(obj, (tuple, list, set)): - return '[%s]' % ','.join(dump_json(x) for x in obj) - elif isinstance(obj, basestring): - return encode_basestring_ascii(obj) - raise TypeError(type(obj)) Modified: doctools/trunk/sphinx/search.py ============================================================================== --- doctools/trunk/sphinx/search.py (original) +++ doctools/trunk/sphinx/search.py Tue Jul 24 12:25:53 2007 @@ -13,8 +13,8 @@ from collections import defaultdict from docutils.nodes import Text, NodeVisitor -from .stemmer import PorterStemmer -from .json import dump_json +from .util.stemmer import PorterStemmer +from .util.json import dump_json word_re = re.compile(r'\w+(?u)') Deleted: /doctools/trunk/sphinx/smartypants.py ============================================================================== --- /doctools/trunk/sphinx/smartypants.py Tue Jul 24 12:25:53 2007 +++ (empty file) @@ -1,263 +0,0 @@ -r""" -This is based on SmartyPants.py by `Chad Miller`_. - -Copyright and License -===================== - -SmartyPants_ license:: - - Copyright (c) 2003 John Gruber - (http://daringfireball.net/) - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - - * Neither the name "SmartyPants" nor the names of its contributors - may be used to endorse or promote products derived from this - software without specific prior written permission. - - This software is provided by the copyright holders and contributors "as - is" and any express or implied warranties, including, but not limited - to, the implied warranties of merchantability and fitness for a - particular purpose are disclaimed. In no event shall the copyright - owner or contributors be liable for any direct, indirect, incidental, - special, exemplary, or consequential damages (including, but not - limited to, procurement of substitute goods or services; loss of use, - data, or profits; or business interruption) however caused and on any - theory of liability, whether in contract, strict liability, or tort - (including negligence or otherwise) arising in any way out of the use - of this software, even if advised of the possibility of such damage. - - -smartypants.py license:: - - smartypants.py is a derivative work of SmartyPants. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in - the documentation and/or other materials provided with the - distribution. - - This software is provided by the copyright holders and contributors "as - is" and any express or implied warranties, including, but not limited - to, the implied warranties of merchantability and fitness for a - particular purpose are disclaimed. In no event shall the copyright - owner or contributors be liable for any direct, indirect, incidental, - special, exemplary, or consequential damages (including, but not - limited to, procurement of substitute goods or services; loss of use, - data, or profits; or business interruption) however caused and on any - theory of liability, whether in contract, strict liability, or tort - (including negligence or otherwise) arising in any way out of the use - of this software, even if advised of the possibility of such damage. - -.. _Chad Miller: http://web.chad.org/ -""" - -import re - - -def sphinx_smarty_pants(t): - t = t.replace('"', '"') - t = educateDashesOldSchool(t) - t = educateQuotes(t) - t = t.replace('"', '"') - return t - -# Constants for quote education. - -punct_class = r"""[!"#\$\%'()*+,-.\/:;<=>?\@\[\\\]\^_`{|}~]""" -close_class = r"""[^\ \t\r\n\[\{\(\-]""" -dec_dashes = r"""–|—""" - -# Special case if the very first character is a quote -# followed by punctuation at a non-word-break. Close the quotes by brute force: -single_quote_start_re = re.compile(r"""^'(?=%s\\B)""" % (punct_class,)) -double_quote_start_re = re.compile(r"""^"(?=%s\\B)""" % (punct_class,)) - -# Special case for double sets of quotes, e.g.: -#

    He said, "'Quoted' words in a larger quote."

    -double_quote_sets_re = re.compile(r""""'(?=\w)""") -single_quote_sets_re = re.compile(r"""'"(?=\w)""") - -# Special case for decade abbreviations (the '80s): -decade_abbr_re = re.compile(r"""\b'(?=\d{2}s)""") - -# Get most opening double quotes: -opening_double_quotes_regex = re.compile(r""" - ( - \s | # a whitespace char, or -   | # a non-breaking space entity, or - -- | # dashes, or - &[mn]dash; | # named dash entities - %s | # or decimal entities - &\#x201[34]; # or hex - ) - " # the quote - (?=\w) # followed by a word character - """ % (dec_dashes,), re.VERBOSE) - -# Double closing quotes: -closing_double_quotes_regex = re.compile(r""" - #(%s)? # character that indicates the quote should be closing - " - (?=\s) - """ % (close_class,), re.VERBOSE) - -closing_double_quotes_regex_2 = re.compile(r""" - (%s) # character that indicates the quote should be closing - " - """ % (close_class,), re.VERBOSE) - -# Get most opening single quotes: -opening_single_quotes_regex = re.compile(r""" - ( - \s | # a whitespace char, or -   | # a non-breaking space entity, or - -- | # dashes, or - &[mn]dash; | # named dash entities - %s | # or decimal entities - &\#x201[34]; # or hex - ) - ' # the quote - (?=\w) # followed by a word character - """ % (dec_dashes,), re.VERBOSE) - -closing_single_quotes_regex = re.compile(r""" - (%s) - ' - (?!\s | s\b | \d) - """ % (close_class,), re.VERBOSE) - -closing_single_quotes_regex_2 = re.compile(r""" - (%s) - ' - (\s | s\b) - """ % (close_class,), re.VERBOSE) - -def educateQuotes(str): - """ - Parameter: String. - - Returns: The string, with "educated" curly quote HTML entities. - - Example input: "Isn't this fun?" - Example output: “Isn’t this fun?” - """ - - # Special case if the very first character is a quote - # followed by punctuation at a non-word-break. Close the quotes by brute force: - str = single_quote_start_re.sub("’", str) - str = double_quote_start_re.sub("”", str) - - # Special case for double sets of quotes, e.g.: - #

    He said, "'Quoted' words in a larger quote."

    - str = double_quote_sets_re.sub("“‘", str) - str = single_quote_sets_re.sub("‘“", str) - - # Special case for decade abbreviations (the '80s): - str = decade_abbr_re.sub("’", str) - - str = opening_single_quotes_regex.sub(r"\1‘", str) - str = closing_single_quotes_regex.sub(r"\1’", str) - str = closing_single_quotes_regex_2.sub(r"\1’\2", str) - - # Any remaining single quotes should be opening ones: - str = str.replace("'", "‘") - - str = opening_double_quotes_regex.sub(r"\1“", str) - str = closing_double_quotes_regex.sub(r"”", str) - str = closing_double_quotes_regex_2.sub(r"\1”", str) - - # Any remaining quotes should be opening ones. - str = str.replace('"', "“") - - return str - - -def educateBackticks(str): - """ - Parameter: String. - Returns: The string, with ``backticks'' -style double quotes - translated into HTML curly quote entities. - Example input: ``Isn't this fun?'' - Example output: “Isn't this fun?” - """ - return str.replace("``", "“").replace("''", "”") - - -def educateSingleBackticks(str): - """ - Parameter: String. - Returns: The string, with `backticks' -style single quotes - translated into HTML curly quote entities. - - Example input: `Isn't this fun?' - Example output: ‘Isn’t this fun?’ - """ - return str.replace('`', "‘").replace("'", "’") - - -def educateDashesOldSchool(str): - """ - Parameter: String. - - Returns: The string, with each instance of "--" translated to - an en-dash HTML entity, and each "---" translated to - an em-dash HTML entity. - """ - return str.replace('---', "—").replace('--', "–") - - -def educateDashesOldSchoolInverted(str): - """ - Parameter: String. - - Returns: The string, with each instance of "--" translated to - an em-dash HTML entity, and each "---" translated to - an en-dash HTML entity. Two reasons why: First, unlike the - en- and em-dash syntax supported by - EducateDashesOldSchool(), it's compatible with existing - entries written before SmartyPants 1.1, back when "--" was - only used for em-dashes. Second, em-dashes are more - common than en-dashes, and so it sort of makes sense that - the shortcut should be shorter to type. (Thanks to Aaron - Swartz for the idea.) - """ - return str.replace('---', "–").replace('--', "—") - - - -def educateEllipses(str): - """ - Parameter: String. - Returns: The string, with each instance of "..." translated to - an ellipsis HTML entity. - - Example input: Huh...? - Example output: Huh…? - """ - return str.replace('...', "…").replace('. . .', "…") - - -__author__ = "Chad Miller " -__version__ = "1.5_1.5: Sat, 13 Aug 2005 15:50:24 -0400" -__url__ = "http://wiki.chad.org/SmartyPantsPy" -__description__ = \ - "Smart-quotes, smart-ellipses, and smart-dashes for weblog entries in pyblosxom" Deleted: /doctools/trunk/sphinx/stemmer.py ============================================================================== --- /doctools/trunk/sphinx/stemmer.py Tue Jul 24 12:25:53 2007 +++ (empty file) @@ -1,344 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -""" - sphinx.stemmer - ~~~~~~~~~~~~~~ - - Porter Stemming Algorithm - - This is the Porter stemming algorithm, ported to Python from the - version coded up in ANSI C by the author. It may be be regarded - as canonical, in that it follows the algorithm presented in - - Porter, 1980, An algorithm for suffix stripping, Program, Vol. 14, - no. 3, pp 130-137, - - only differing from it at the points maked --DEPARTURE-- below. - - See also http://www.tartarus.org/~martin/PorterStemmer - - The algorithm as described in the paper could be exactly replicated - by adjusting the points of DEPARTURE, but this is barely necessary, - because (a) the points of DEPARTURE are definitely improvements, and - (b) no encoding of the Porter stemmer I have seen is anything like - as exact as this version, even with the points of DEPARTURE! - - Release 1: January 2001 - - :copyright: 2001 by Vivake Gupta . - :license: Public Domain (?). -""" - -class PorterStemmer(object): - - def __init__(self): - """The main part of the stemming algorithm starts here. - b is a buffer holding a word to be stemmed. The letters are in b[k0], - b[k0+1] ... ending at b[k]. In fact k0 = 0 in this demo program. k is - readjusted downwards as the stemming progresses. Zero termination is - not in fact used in the algorithm. - - Note that only lower case sequences are stemmed. Forcing to lower case - should be done before stem(...) is called. - """ - - self.b = "" # buffer for word to be stemmed - self.k = 0 - self.k0 = 0 - self.j = 0 # j is a general offset into the string - - def cons(self, i): - """cons(i) is TRUE <=> b[i] is a consonant.""" - if self.b[i] == 'a' or self.b[i] == 'e' or self.b[i] == 'i' \ - or self.b[i] == 'o' or self.b[i] == 'u': - return 0 - if self.b[i] == 'y': - if i == self.k0: - return 1 - else: - return (not self.cons(i - 1)) - return 1 - - def m(self): - """m() measures the number of consonant sequences between k0 and j. - if c is a consonant sequence and v a vowel sequence, and <..> - indicates arbitrary presence, - - gives 0 - vc gives 1 - vcvc gives 2 - vcvcvc gives 3 - .... - """ - n = 0 - i = self.k0 - while 1: - if i > self.j: - return n - if not self.cons(i): - break - i = i + 1 - i = i + 1 - while 1: - while 1: - if i > self.j: - return n - if self.cons(i): - break - i = i + 1 - i = i + 1 - n = n + 1 - while 1: - if i > self.j: - return n - if not self.cons(i): - break - i = i + 1 - i = i + 1 - - def vowelinstem(self): - """vowelinstem() is TRUE <=> k0,...j contains a vowel""" - for i in range(self.k0, self.j + 1): - if not self.cons(i): - return 1 - return 0 - - def doublec(self, j): - """doublec(j) is TRUE <=> j,(j-1) contain a double consonant.""" - if j < (self.k0 + 1): - return 0 - if (self.b[j] != self.b[j-1]): - return 0 - return self.cons(j) - - def cvc(self, i): - """cvc(i) is TRUE <=> i-2,i-1,i has the form consonant - vowel - consonant - and also if the second c is not w,x or y. this is used when trying to - restore an e at the end of a short e.g. - - cav(e), lov(e), hop(e), crim(e), but - snow, box, tray. - """ - if i < (self.k0 + 2) or not self.cons(i) or self.cons(i-1) or not self.cons(i-2): - return 0 - ch = self.b[i] - if ch == 'w' or ch == 'x' or ch == 'y': - return 0 - return 1 - - def ends(self, s): - """ends(s) is TRUE <=> k0,...k ends with the string s.""" - length = len(s) - if s[length - 1] != self.b[self.k]: # tiny speed-up - return 0 - if length > (self.k - self.k0 + 1): - return 0 - if self.b[self.k-length+1:self.k+1] != s: - return 0 - self.j = self.k - length - return 1 - - def setto(self, s): - """setto(s) sets (j+1),...k to the characters in the string s, readjusting k.""" - length = len(s) - self.b = self.b[:self.j+1] + s + self.b[self.j+length+1:] - self.k = self.j + length - - def r(self, s): - """r(s) is used further down.""" - if self.m() > 0: - self.setto(s) - - def step1ab(self): - """step1ab() gets rid of plurals and -ed or -ing. e.g. - - caresses -> caress - ponies -> poni - ties -> ti - caress -> caress - cats -> cat - - feed -> feed - agreed -> agree - disabled -> disable - - matting -> mat - mating -> mate - meeting -> meet - milling -> mill - messing -> mess - - meetings -> meet - """ - if self.b[self.k] == 's': - if self.ends("sses"): - self.k = self.k - 2 - elif self.ends("ies"): - self.setto("i") - elif self.b[self.k - 1] != 's': - self.k = self.k - 1 - if self.ends("eed"): - if self.m() > 0: - self.k = self.k - 1 - elif (self.ends("ed") or self.ends("ing")) and self.vowelinstem(): - self.k = self.j - if self.ends("at"): self.setto("ate") - elif self.ends("bl"): self.setto("ble") - elif self.ends("iz"): self.setto("ize") - elif self.doublec(self.k): - self.k = self.k - 1 - ch = self.b[self.k] - if ch == 'l' or ch == 's' or ch == 'z': - self.k = self.k + 1 - elif (self.m() == 1 and self.cvc(self.k)): - self.setto("e") - - def step1c(self): - """step1c() turns terminal y to i when there is another vowel in the stem.""" - if (self.ends("y") and self.vowelinstem()): - self.b = self.b[:self.k] + 'i' + self.b[self.k+1:] - - def step2(self): - """step2() maps double suffices to single ones. - so -ization ( = -ize plus -ation) maps to -ize etc. note that the - string before the suffix must give m() > 0. - """ - if self.b[self.k - 1] == 'a': - if self.ends("ational"): self.r("ate") - elif self.ends("tional"): self.r("tion") - elif self.b[self.k - 1] == 'c': - if self.ends("enci"): self.r("ence") - elif self.ends("anci"): self.r("ance") - elif self.b[self.k - 1] == 'e': - if self.ends("izer"): self.r("ize") - elif self.b[self.k - 1] == 'l': - if self.ends("bli"): self.r("ble") # --DEPARTURE-- - # To match the published algorithm, replace this phrase with - # if self.ends("abli"): self.r("able") - elif self.ends("alli"): self.r("al") - elif self.ends("entli"): self.r("ent") - elif self.ends("eli"): self.r("e") - elif self.ends("ousli"): self.r("ous") - elif self.b[self.k - 1] == 'o': - if self.ends("ization"): self.r("ize") - elif self.ends("ation"): self.r("ate") - elif self.ends("ator"): self.r("ate") - elif self.b[self.k - 1] == 's': - if self.ends("alism"): self.r("al") - elif self.ends("iveness"): self.r("ive") - elif self.ends("fulness"): self.r("ful") - elif self.ends("ousness"): self.r("ous") - elif self.b[self.k - 1] == 't': - if self.ends("aliti"): self.r("al") - elif self.ends("iviti"): self.r("ive") - elif self.ends("biliti"): self.r("ble") - elif self.b[self.k - 1] == 'g': # --DEPARTURE-- - if self.ends("logi"): self.r("log") - # To match the published algorithm, delete this phrase - - def step3(self): - """step3() dels with -ic-, -full, -ness etc. similar strategy to step2.""" - if self.b[self.k] == 'e': - if self.ends("icate"): self.r("ic") - elif self.ends("ative"): self.r("") - elif self.ends("alize"): self.r("al") - elif self.b[self.k] == 'i': - if self.ends("iciti"): self.r("ic") - elif self.b[self.k] == 'l': - if self.ends("ical"): self.r("ic") - elif self.ends("ful"): self.r("") - elif self.b[self.k] == 's': - if self.ends("ness"): self.r("") - - def step4(self): - """step4() takes off -ant, -ence etc., in context vcvc.""" - if self.b[self.k - 1] == 'a': - if self.ends("al"): pass - else: return - elif self.b[self.k - 1] == 'c': - if self.ends("ance"): pass - elif self.ends("ence"): pass - else: return - elif self.b[self.k - 1] == 'e': - if self.ends("er"): pass - else: return - elif self.b[self.k - 1] == 'i': - if self.ends("ic"): pass - else: return - elif self.b[self.k - 1] == 'l': - if self.ends("able"): pass - elif self.ends("ible"): pass - else: return - elif self.b[self.k - 1] == 'n': - if self.ends("ant"): pass - elif self.ends("ement"): pass - elif self.ends("ment"): pass - elif self.ends("ent"): pass - else: return - elif self.b[self.k - 1] == 'o': - if self.ends("ion") and (self.b[self.j] == 's' \ - or self.b[self.j] == 't'): pass - elif self.ends("ou"): pass - # takes care of -ous - else: return - elif self.b[self.k - 1] == 's': - if self.ends("ism"): pass - else: return - elif self.b[self.k - 1] == 't': - if self.ends("ate"): pass - elif self.ends("iti"): pass - else: return - elif self.b[self.k - 1] == 'u': - if self.ends("ous"): pass - else: return - elif self.b[self.k - 1] == 'v': - if self.ends("ive"): pass - else: return - elif self.b[self.k - 1] == 'z': - if self.ends("ize"): pass - else: return - else: - return - if self.m() > 1: - self.k = self.j - - def step5(self): - """step5() removes a final -e if m() > 1, and changes -ll to -l if - m() > 1. - """ - self.j = self.k - if self.b[self.k] == 'e': - a = self.m() - if a > 1 or (a == 1 and not self.cvc(self.k-1)): - self.k = self.k - 1 - if self.b[self.k] == 'l' and self.doublec(self.k) and self.m() > 1: - self.k = self.k -1 - - def stem(self, p, i, j): - """In stem(p,i,j), p is a char pointer, and the string to be stemmed - is from p[i] to p[j] inclusive. Typically i is zero and j is the - offset to the last character of a string, (p[j+1] == '\0'). The - stemmer adjusts the characters p[i] ... p[j] and returns the new - end-point of the string, k. Stemming never increases word length, so - i <= k <= j. To turn the stemmer into a module, declare 'stem' as - extern, and delete the remainder of this file. - """ - # copy the parameters into statics - self.b = p - self.k = j - self.k0 = i - if self.k <= self.k0 + 1: - return self.b # --DEPARTURE-- - - # With this line, strings of length 1 or 2 don't go through the - # stemming process, although no mention is made of this in the - # published algorithm. Remove the line to match the published - # algorithm. - - self.step1ab() - self.step1c() - self.step2() - self.step3() - self.step4() - self.step5() - return self.b[self.k0:self.k+1] Deleted: /doctools/trunk/sphinx/util.py ============================================================================== --- /doctools/trunk/sphinx/util.py Tue Jul 24 12:25:53 2007 +++ (empty file) @@ -1,109 +0,0 @@ -# -*- coding: utf-8 -*- -""" - sphinx.util - ~~~~~~~~~~~ - - Utility functions for Sphinx. - - :copyright: 2007 by Georg Brandl. - :license: Python license. -""" - -import os -import sys -import fnmatch -from os import path - - -def relative_uri(base, to): - """Return a relative URL from ``base`` to ``to``.""" - b2 = base.split('/') - t2 = to.split('/') - # remove common segments - for x, y in zip(b2, t2): - if x != y: - break - b2.pop(0) - t2.pop(0) - return '../' * (len(b2)-1) + '/'.join(t2) - - -def ensuredir(path): - """Ensure that a path exists.""" - try: - os.makedirs(path) - except OSError, err: - if not err.errno == 17: - raise - - -def status_iterator(iterable, colorfunc=lambda x: x, stream=sys.stdout): - """Print out each item before yielding it.""" - for item in iterable: - print >>stream, colorfunc(item), - stream.flush() - yield item - print >>stream - - -def get_matching_files(dirname, pattern, exclude=()): - """Get all files matching a pattern in a directory, recursively.""" - # dirname is a normalized absolute path. - dirname = path.normpath(path.abspath(dirname)) - dirlen = len(dirname) + 1 # exclude slash - for root, dirs, files in os.walk(dirname): - dirs.sort() - files.sort() - for sfile in files: - if not fnmatch.fnmatch(sfile, pattern): - continue - qualified_name = path.join(root[dirlen:], sfile) - if qualified_name in exclude: - continue - yield qualified_name - - -def get_category(filename): - """Get the "category" part of a RST filename.""" - parts = filename.split('/', 1) - if len(parts) < 2: - return - return parts[0] - - -def shorten_result(text='', keywords=[], maxlen=240, fuzz=60): - if not text: - text = '' - text_low = text.lower() - beg = -1 - for k in keywords: - i = text_low.find(k.lower()) - if (i > -1 and i < beg) or beg == -1: - beg = i - excerpt_beg = 0 - if beg > fuzz: - for sep in ('.', ':', ';', '='): - eb = text.find(sep, beg - fuzz, beg - 1) - if eb > -1: - eb += 1 - break - else: - eb = beg - fuzz - excerpt_beg = eb - if excerpt_beg < 0: - excerpt_beg = 0 - msg = text[excerpt_beg:beg+maxlen] - if beg > fuzz: - msg = '... ' + msg - if beg < len(text)-maxlen: - msg = msg + ' ...' - return msg - - -class attrdict(dict): - def __getattr__(self, key): - return self[key] - def __setattr__(self, key, val): - self[key] = val - def __delattr__(self, key): - del self[key] Copied: doctools/trunk/sphinx/util/console.py (from r56508, doctools/trunk/sphinx/console.py) ============================================================================== --- doctools/trunk/sphinx/console.py (original) +++ doctools/trunk/sphinx/util/console.py Tue Jul 24 12:25:53 2007 @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """ - sphinx.console - ~~~~~~~~~~~~~~ + sphinx.util.console + ~~~~~~~~~~~~~~~~~~~ Format colored console output. Copied: doctools/trunk/sphinx/util/json.py (from r56508, doctools/trunk/sphinx/json.py) ============================================================================== --- doctools/trunk/sphinx/json.py (original) +++ doctools/trunk/sphinx/util/json.py Tue Jul 24 12:25:53 2007 @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """ - sphinx.json - ~~~~~~~~~~~ + sphinx.util.json + ~~~~~~~~~~~~~~~~ Minimal JSON module that generates small dumps. Copied: doctools/trunk/sphinx/util/stemmer.py (from r56508, doctools/trunk/sphinx/stemmer.py) ============================================================================== --- doctools/trunk/sphinx/stemmer.py (original) +++ doctools/trunk/sphinx/util/stemmer.py Tue Jul 24 12:25:53 2007 @@ -1,8 +1,8 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- """ - sphinx.stemmer - ~~~~~~~~~~~~~~ + sphinx.util.stemmer + ~~~~~~~~~~~~~~~~~~~ Porter Stemming Algorithm Modified: doctools/trunk/sphinx/web/wsgiutil.py ============================================================================== --- doctools/trunk/sphinx/web/wsgiutil.py (original) +++ doctools/trunk/sphinx/web/wsgiutil.py Tue Jul 24 12:25:53 2007 @@ -24,7 +24,7 @@ from cStringIO import StringIO from .util import lazy_property -from .json import dump_json +from ..util.json import dump_json HTTP_STATUS_CODES = { Modified: doctools/trunk/sphinx/writer.py ============================================================================== --- doctools/trunk/sphinx/writer.py (original) +++ doctools/trunk/sphinx/writer.py Tue Jul 24 12:25:53 2007 @@ -12,7 +12,7 @@ from docutils import nodes from docutils.writers.html4css1 import Writer, HTMLTranslator as BaseTranslator -from .smartypants import sphinx_smarty_pants +from .util.smartypants import sphinx_smarty_pants class HTMLWriter(Writer): From python-checkins at python.org Tue Jul 24 15:07:38 2007 From: python-checkins at python.org (nick.coghlan) Date: Tue, 24 Jul 2007 15:07:38 +0200 (CEST) Subject: [Python-checkins] r56519 - python/trunk/Lib/test/test_runpy.py Message-ID: <20070724130738.83B241E400D@bag.python.org> Author: nick.coghlan Date: Tue Jul 24 15:07:38 2007 New Revision: 56519 Modified: python/trunk/Lib/test/test_runpy.py Log: Tweak runpy test to do a better job of confirming that sys has been manipulated correctly Modified: python/trunk/Lib/test/test_runpy.py ============================================================================== --- python/trunk/Lib/test/test_runpy.py (original) +++ python/trunk/Lib/test/test_runpy.py Tue Jul 24 15:07:38 2007 @@ -21,8 +21,9 @@ "# Check the sys module\n" "import sys\n" "run_argv0 = sys.argv[0]\n" - "if __name__ in sys.modules:\n" - " run_name = sys.modules[__name__].__name__\n" + "run_name_in_sys_modules = __name__ in sys.modules\n" + "if run_name_in_sys_modules:\n" + " module_in_sys_modules = globals() is sys.modules[__name__].__dict__\n" "# Check nested operation\n" "import runpy\n" "nested = runpy._run_module_code('x=1\\n', mod_name='',\n" @@ -48,7 +49,8 @@ self.failUnless(d2["result"] == self.expected_result) self.failUnless(d2["nested"]["x"] == 1) self.failUnless(d2["__name__"] is name) - self.failUnless(d2["run_name"] is name) + self.failUnless(d2["run_name_in_sys_modules"]) + self.failUnless(d2["module_in_sys_modules"]) self.failUnless(d2["__file__"] is file) self.failUnless(d2["run_argv0"] is file) self.failUnless(d2["__loader__"] is loader) From buildbot at python.org Tue Jul 24 15:50:22 2007 From: buildbot at python.org (buildbot at python.org) Date: Tue, 24 Jul 2007 13:50:22 +0000 Subject: [Python-checkins] buildbot warnings in x86 gentoo trunk Message-ID: <20070724135023.12E811E4012@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2322 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: nick.coghlan Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/test/test_smtplib.py", line 14, in server serv.bind(("", 9091)) File "", line 1, in bind error: (98, 'Address already in use') sincerely, -The Buildbot From python-checkins at python.org Tue Jul 24 15:58:28 2007 From: python-checkins at python.org (nick.coghlan) Date: Tue, 24 Jul 2007 15:58:28 +0200 (CEST) Subject: [Python-checkins] r56520 - in python/trunk: Doc/lib/librunpy.tex Lib/runpy.py Lib/test/test_runpy.py Message-ID: <20070724135828.63AB41E4006@bag.python.org> Author: nick.coghlan Date: Tue Jul 24 15:58:28 2007 New Revision: 56520 Modified: python/trunk/Doc/lib/librunpy.tex python/trunk/Lib/runpy.py python/trunk/Lib/test/test_runpy.py Log: Fix an incompatibility between the -i and -m command line switches as reported on python-dev by PJE - runpy.run_module now leaves any changes it makes to the sys module intact after the function terminates Modified: python/trunk/Doc/lib/librunpy.tex ============================================================================== --- python/trunk/Doc/lib/librunpy.tex (original) +++ python/trunk/Doc/lib/librunpy.tex Tue Jul 24 15:58:28 2007 @@ -56,9 +56,11 @@ If the argument \var{alter_sys} is supplied and evaluates to \code{True}, then \code{sys.argv[0]} is updated with the value of \code{__file__} and \code{sys.modules[__name__]} is updated with a -temporary module object for the module being executed. Both -\code{sys.argv[0]} and \code{sys.modules[__name__]} are restored to -their original values before the function returns. +new module object for the module being executed. Note that neither +\code{sys.argv[0]} nor \code{sys.modules[__name__]} are restored to +their original values before the function returns - if client code +needs these values preserved, it must either save them explicitly or +else avoid enabling the automatic alterations to \module{sys}. Note that this manipulation of \module{sys} is not thread-safe. Other threads may see the partially initialised module, as well as the Modified: python/trunk/Lib/runpy.py ============================================================================== --- python/trunk/Lib/runpy.py (original) +++ python/trunk/Lib/runpy.py Tue Jul 24 15:58:28 2007 @@ -33,36 +33,21 @@ return run_globals def _run_module_code(code, init_globals=None, - mod_name=None, mod_fname=None, - mod_loader=None, alter_sys=False): + mod_name=None, mod_fname=None, + mod_loader=None, alter_sys=False): """Helper for run_module""" # Set up the top level namespace dictionary if alter_sys: - # Modify sys.argv[0] and sys.module[mod_name] - temp_module = imp.new_module(mod_name) - mod_globals = temp_module.__dict__ - saved_argv0 = sys.argv[0] - restore_module = mod_name in sys.modules - if restore_module: - saved_module = sys.modules[mod_name] + # Modify sys.argv[0] and sys.modules[mod_name] sys.argv[0] = mod_fname - sys.modules[mod_name] = temp_module - try: - _run_code(code, mod_globals, init_globals, - mod_name, mod_fname, mod_loader) - finally: - sys.argv[0] = saved_argv0 - if restore_module: - sys.modules[mod_name] = saved_module - else: - del sys.modules[mod_name] - # Copy the globals of the temporary module, as they - # may be cleared when the temporary module goes away - return mod_globals.copy() + module = imp.new_module(mod_name) + sys.modules[mod_name] = module + mod_globals = module.__dict__ else: # Leave the sys module alone - return _run_code(code, {}, init_globals, - mod_name, mod_fname, mod_loader) + mod_globals = {} + return _run_code(code, mod_globals, init_globals, + mod_name, mod_fname, mod_loader) # This helper is needed due to a missing component in the PEP 302 Modified: python/trunk/Lib/test/test_runpy.py ============================================================================== --- python/trunk/Lib/test/test_runpy.py (original) +++ python/trunk/Lib/test/test_runpy.py Tue Jul 24 15:58:28 2007 @@ -26,8 +26,7 @@ " module_in_sys_modules = globals() is sys.modules[__name__].__dict__\n" "# Check nested operation\n" "import runpy\n" - "nested = runpy._run_module_code('x=1\\n', mod_name='',\n" - " alter_sys=True)\n" + "nested = runpy._run_module_code('x=1\\n', mod_name='')\n" ) @@ -38,35 +37,44 @@ loader = "Now you're just being silly" d1 = dict(initial=initial) saved_argv0 = sys.argv[0] - d2 = _run_module_code(self.test_source, - d1, - name, - file, - loader, - True) - self.failUnless("result" not in d1) - self.failUnless(d2["initial"] is initial) - self.failUnless(d2["result"] == self.expected_result) - self.failUnless(d2["nested"]["x"] == 1) - self.failUnless(d2["__name__"] is name) - self.failUnless(d2["run_name_in_sys_modules"]) - self.failUnless(d2["module_in_sys_modules"]) - self.failUnless(d2["__file__"] is file) - self.failUnless(d2["run_argv0"] is file) - self.failUnless(d2["__loader__"] is loader) - self.failUnless(sys.argv[0] is saved_argv0) - self.failUnless(name not in sys.modules) + try: + d2 = _run_module_code(self.test_source, + d1, + name, + file, + loader, + alter_sys=True) + self.failUnless("result" not in d1) + self.failUnless(d2["initial"] is initial) + self.failUnless(d2["result"] == self.expected_result) + self.failUnless(d2["nested"]["x"] == 1) + self.failUnless(d2["nested"]["__name__"] == "") + self.failUnless(d2["__name__"] is name) + self.failUnless(d2["__file__"] is file) + self.failUnless(d2["__loader__"] is loader) + self.failUnless(d2["run_argv0"] is file) + self.failUnless(d2["run_name_in_sys_modules"]) + self.failUnless(d2["module_in_sys_modules"]) + self.failUnless(sys.argv[0] is not saved_argv0) + self.failUnless(name in sys.modules) + finally: + sys.argv[0] = saved_argv0 + if name in sys.modules: + del sys.modules[name] def test_run_module_code_defaults(self): saved_argv0 = sys.argv[0] d = _run_module_code(self.test_source) self.failUnless(d["result"] == self.expected_result) + self.failUnless(d["nested"]["x"] == 1) + self.failUnless(d["nested"]["__name__"] == "") self.failUnless(d["__name__"] is None) self.failUnless(d["__file__"] is None) self.failUnless(d["__loader__"] is None) self.failUnless(d["run_argv0"] is saved_argv0) - self.failUnless("run_name" not in d) + self.failUnless(not d["run_name_in_sys_modules"]) self.failUnless(sys.argv[0] is saved_argv0) + self.failUnless(None not in sys.modules) class RunModuleTest(unittest.TestCase): From python-checkins at python.org Tue Jul 24 16:37:08 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 16:37:08 +0200 (CEST) Subject: [Python-checkins] r56521 - tracker/importer Message-ID: <20070724143708.C83371E4014@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 16:37:08 2007 New Revision: 56521 Modified: tracker/importer/ (props changed) Log: Ignore *.pyc From python-checkins at python.org Tue Jul 24 16:38:22 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 16:38:22 +0200 (CEST) Subject: [Python-checkins] r56522 - tracker/importer Message-ID: <20070724143822.79AF81E4006@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 16:38:22 2007 New Revision: 56522 Modified: tracker/importer/ (props changed) Log: Ignore *.pyc (and remove wrongly named property From python-checkins at python.org Tue Jul 24 16:39:23 2007 From: python-checkins at python.org (nick.coghlan) Date: Tue, 24 Jul 2007 16:39:23 +0200 (CEST) Subject: [Python-checkins] r56523 - python/trunk/Lib/test/test_resource.py Message-ID: <20070724143923.677CC1E4006@bag.python.org> Author: nick.coghlan Date: Tue Jul 24 16:39:23 2007 New Revision: 56523 Modified: python/trunk/Lib/test/test_resource.py Log: Try to get rid of spurious failure in test_resource on the Debian buildbots by changing the file size limit before attempting to close the file Modified: python/trunk/Lib/test/test_resource.py ============================================================================== --- python/trunk/Lib/test/test_resource.py (original) +++ python/trunk/Lib/test/test_resource.py Tue Jul 24 16:39:23 2007 @@ -49,17 +49,24 @@ except ValueError: limit_set = False f = open(test_support.TESTFN, "wb") - f.write("X" * 1024) try: - f.write("Y") - f.flush() - except IOError: - if not limit_set: - raise - f.close() - os.unlink(test_support.TESTFN) + f.write("X" * 1024) + try: + f.write("Y") + f.flush() + except IOError: + if not limit_set: + raise + if limit_set: + # Close will attempt to flush the byte we wrote + # Restore limit first to avoid getting a spurious error + resource.setrlimit(resource.RLIMIT_FSIZE, (cur, max)) + finally: + f.close() + os.unlink(test_support.TESTFN) finally: - resource.setrlimit(resource.RLIMIT_FSIZE, (cur, max)) + if limit_set: + resource.setrlimit(resource.RLIMIT_FSIZE, (cur, max)) def test_fsize_toobig(self): # Be sure that setrlimit is checking for really large values From python-checkins at python.org Tue Jul 24 16:40:11 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 16:40:11 +0200 (CEST) Subject: [Python-checkins] r56524 - tracker/importer/config.py tracker/importer/xmlexport2handlers.py tracker/importer/xmlexport2toroundup.py Message-ID: <20070724144011.6374F1E4006@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 16:40:10 2007 New Revision: 56524 Added: tracker/importer/config.py tracker/importer/xmlexport2handlers.py tracker/importer/xmlexport2toroundup.py - copied, changed from r56508, tracker/importer/sfxml2roundup.py Log: Importer for the "new" format produced by xml_export2.php Added: tracker/importer/config.py ============================================================================== --- (empty file) +++ tracker/importer/config.py Tue Jul 24 16:40:10 2007 @@ -0,0 +1,26 @@ +mappings = {'category': + {"Demos and tools":"Demos and Tools", + "Distutils and setup.py":"Distutils", + "Python Interpreter Core":"Interpreter Core", + "Core (C code)":"Interpreter Core", + "Python Library":"Library (Lib)", + "Modules":"Extension Modules", + "Parser/Compiler":"Interpreter Core", + "Performance":"Interpreter Core", + "Threads":"Interpreter Core", + "Type/class unification":"Interpreter Core"}, + + 'priority': + {'1':'low', + '2':'low', + '3':'low', + '4':'low', + '5':'normal', + '6':'high', + '7':'high', + '8':'immediate', + '9':'urgent' + }, + } + + Added: tracker/importer/xmlexport2handlers.py ============================================================================== --- (empty file) +++ tracker/importer/xmlexport2handlers.py Tue Jul 24 16:40:10 2007 @@ -0,0 +1,429 @@ +import time, os, urllib, socket, mimetools, stat, re + +from config import mappings +import time + +import BeautifulSoup as BS + +# slightly silly +try: + # import xml.etree.cElementTree as ET # may crash in 2.5b2 !? + import xml.etree.ElementTree as ET +except ImportError: + try: + import cElementTree as ET + except ImportError: + import elementtree.ElementTree as ET + +import htmlentitydefs + +from roundup.support import ensureParentsExist +from roundup.date import Date + +class XMLExport2Handler: + def __init__(self, db, source, target): + self.db = db + self.source = source + self.target = target + + def handle(self, item, roundupdata): + raise NotImplementedError + + +class TextValueHandler(XMLExport2Handler): + def handle(self, item, roundupdata): + roundupdata[self.target] = item.find(self.source).text.encode('utf-8') + +class StatusHandler(XMLExport2Handler): + def __init__(self, db, source, target, statuses): + XMLExport2Handler.__init__(self, db, source, target) + self.statuses = statuses + + def handle(self, item, roundupdata): + status = self.statuses[item.find(self.source).text].lower() + + if "deleted" == status: + status = "closed" + + roundupdata[self.target] = self.db.status.lookup(status) + +class ComponentHandler(XMLExport2Handler): + def __init__(self, db, source, target, categories): + XMLExport2Handler.__init__(self, db, source, target) + self.categories = categories + + def handle(self, item, roundupdata): + category = self.categories[item.find(self.source).text] + category = mappings['category'].get(category, category) + + try: + component_id = self.db.component.lookup(category) + roundupdata[self.target] = [component_id] + except KeyError: + roundupdata[self.target] = \ + [self.db.component.create(name=category)] + +class GroupHandler(XMLExport2Handler): + def __init__(self, db, source, target, groups): + XMLExport2Handler.__init__(self, db, source, target) + self.groups = groups + + def handle(self, item, roundupdata): + roundupdata[self.target] = [] + group = self.groups[item.find(self.source).text] + + if group in ["None", "Irreproducible", "AST", "Not a Bug"]: + return + elif "Feature Request" == group: + roundupdata['type'] = self.db.issue_type.lookup("rfe") + return + elif "Python 3000" == group: + roundupdata['keywords'].append(self.db.keyword.lookup('py3k')) + try: + # Merge as specified in http://psf.upfronthosting.co.za/roundup/meta/issue101 + if group.startswith("Python 2.1"): + group = "Python 2.1" + elif group.startswith("Python 2.2"): + group = "Python 2.2" + version = self.db.version.lookup(group) + roundupdata[self.target] = version + return + except KeyError: + pass + +class ResolutionHandler(XMLExport2Handler): + def __init__(self, db, source, target, resolutions): + XMLExport2Handler.__init__(self, db, source, target) + self.resolutions = resolutions + + def handle(self, item, roundupdata): + resolution = self.resolutions[item.find(self.source).text].lower() + if "none" == resolution: + roundupdata[self.target] = None + else: + roundupdata[self.target] = self.db.resolution.lookup(resolution) + + +class UserlinkHandler(XMLExport2Handler): + def __init__(self, db, source, target, pmembers): + XMLExport2Handler.__init__(self, db, source, target) + self.pmembers = pmembers + + def handle(self, item, roundupdata): + username = item.find(self.source).text + + if "nobody" == username and \ + "assignee" == self.target : + roundupdata[self.target] = None + return + + if "nobody" == username: + username = "anonymous" + + roundupdata[self.target] = self.getauthor(username) + + # Add user to nosy + if roundupdata[self.target] not in roundupdata['nosy'] and \ + roundupdata[self.target] != self.getauthor("anonymous"): + roundupdata['nosy'].append(roundupdata[self.target]) + + def unescape(self, string): + # work around oddities in BeautifulSoup's entity handling + def unescape_entity(m, defs=htmlentitydefs.entitydefs): + try: + return defs[m.group(1)] + except KeyError: + return m.group(0) # use as is + pattern = re.compile("&(\w+);") + return pattern.sub(unescape_entity, string) + + + def loadauthorfile(self, file): + def emit(soup): + if isinstance(soup, BS.NavigableString): + bob.data(self.unescape(soup)) + else: + bob.start(soup.name, dict((k, self.unescape(v)) for k, v in soup.attrs)) + for s in soup: + emit(s) + bob.end(soup.name) + # determine encoding (the document charset is not reliable) + text = open(file).read() + try: + encoding = "utf-8" + unicode(text, encoding) + except UnicodeError: + encoding = "iso-8859-1" + soup = BS.BeautifulSoup( + text, convertEntities="html", fromEncoding=encoding + ) + # build the tree + bob = ET.TreeBuilder() + for s in soup: + emit(s) + return bob.close() + + def getnonprojectmember(self, username): + address = "%s at users.sourceforge.net" % username + + authorfile = os.path.join("authordata", username) + if not os.path.exists(authorfile) or 0 == os.stat(authorfile)[stat.ST_SIZE]: + print "Fetching user information for %s" % username + u = urllib.urlopen("http://sourceforge.net/users/" + username) + open(authorfile, 'w').write(u.fp.read()) + + realname = None + authordata = open(authorfile).read() + if -1 != authordata.find("That user does not exist or is not yet active."): + return ("anonymous", None, None) + + elif -1 != authordata.find("This user account has been deleted"): + realname = "Deleted User %s" % username + return (username, realname, address) + + tree = self.loadauthorfile(authorfile) + try: + table = tree.getiterator('table')[0] + except TypeError: + table = tree.getiterator('table').next() + + alltds = table.findall('.//td') + for i in range(len(alltds)): + header = alltds[i].text or "" + if -1 != header.find("Publicly Displayed Name:"): + realname = alltds[i+1].text + break + + return (username, realname, address) + + + + def getauthor(self, username): + try: + return self.db.user.lookup(username) + except KeyError: + print "Creating new user", username + roles = ["User"] + if not self.pmembers.has_key(username): + (username, realname, address) = self.getnonprojectmember(username) + if "anonymous" == username: + return self.db.user.lookup(username) + realname = realname.encode('utf-8') + else: + realname = self.pmembers[username]['public_name'].encode('utf-8') + address = self.pmembers[username]['email'] + roles.append("Developer") + if self.pmembers[username]['admin']: + roles.append('Coordinator') + return self.db.user.create(username=username, + realname=realname, + address=address, + roles=",".join(roles)) + +class AssigneeHandler(UserlinkHandler): + def handle(self, item, roundupdata): + UserlinkHandler.handle(self, item, roundupdata) + if None == roundupdata[self.target]: + return + user = self.db.user.getnode(roundupdata[self.target]) + roles = user['roles'].split(',') + if not "Developer" in roles: + roles.append('Developer') + user['roles'] = ",".join(roles) + + +class DateHandler(XMLExport2Handler): + def handle(self, item, roundupdata): + roundupdata[self.target] = time.gmtime(int(item.find(self.source).text)) +class PriorityHandler(XMLExport2Handler): + def handle(self, item, roundupdata): + priority = item.find(self.source).text + roundupdata[self.target] = self.db.priority.lookup(mappings['priority'][priority]) + + +class TextstringHandler(XMLExport2Handler): + def handle(self, item, roundupdata): + roundupdata[self.target] = item.find(self.source).text.encode('utf-8') + +class MessagesHandler(UserlinkHandler): + def createmessage(self, roundupdata, author, date, content, recipients): + messageprops = ['author', 'date', 'files', 'content', 'recipients'] + messagevals = [repr(self.getauthor(author)), + repr(time.gmtime(int(date))), + repr([]), + repr(content.encode('utf-8')), + repr([])] + + if not roundupdata.has_key('activity') or \ + int(date) > time.mktime(roundupdata['activity']): + roundupdata['activity'] = time.gmtime(int(date)) + roundupdata['actor'] = self.getauthor(author) + + msg_nodeid = int(self.db.msg.import_list(messageprops, messagevals)) + + msg_filename = self.db.filename(self.db.msg.classname, + msg_nodeid, create=1) + ensureParentsExist(msg_filename) + + mo = re.search('^Logged In: (YES |NO )\nuser_id=[0-9]+\nOriginator: (YES|NO)\n', content, re.MULTILINE) + if mo: + content = content[mo.end():] + + open(msg_filename, 'w').write(content.encode('utf-8')) + + return msg_nodeid + + def handle(self, item, roundupdata): + # Handle 'details' + roundupdata[self.target] = [self.createmessage(roundupdata, + item.find('submitter').text, + item.find('submit_date').text, + item.find(self.source).text, + [])] + + + followups = item.find("followups") + for fu in followups.findall("followup"): + author = fu.find("submitter").text + date = fu.find("date").text + content = fu.find("details").text + roundupdata[self.target].append(self.createmessage(roundupdata, author, date, content, [])) + + authorid = self.getauthor(author) + if authorid not in roundupdata['nosy'] and \ + authorid != self.getauthor('anonymous'): + roundupdata['nosy'].append(authorid) + + +class AttachmentHandler(UserlinkHandler): + def __init__(self, db, source, target, pmembers, + project_group_id, tracker): + UserlinkHandler.__init__(self, db, source, target, pmembers) + self.project_group_id = project_group_id + self.tracker = tracker + + def downloadfile(self, url, cachefilename): + + delay = 0 + backoff = 30 + while True: + print url, "->", cachefilename + try: + f = urllib.urlopen(url) + data = f.read() + if data.find("send-email-to-ipblocked-at-sourceforge-dot-net") >= 0: + delay+=backoff + print "Blocked by Sourceforge. Sleeping %d seconds before trying again" % delay + + out = open(cachefilename + ".tmp", 'w') + out.write(str(f.headers)) + out.write("\n") + out.write(data) + out.close() + try: + os.remove(cachefilename) + except: + pass + os.rename(cachefilename + ".tmp", cachefilename) + break + + except socket.error, e: + print "Error fetching file, retrying", e + continue + except AttributeError, e: + print e, "Probably SF weirdness. Trying again after delay.." + delay+=backoff + except IOError, e: + print e, "Probably SF weirdness. Trying again after delay.." + delay+=backoff + + time.sleep(delay) + + + def handle(self, item, roundupdata): + + tracker_id = self.tracker.find("tracker_id").text + aid = roundupdata["id"] + files = [] + issuefiles = [] + + attachments = item.find(self.source) + for a in attachments.findall("attachment"): + url = a.find("url").text + aid + date = a.find("date").text + author = a.find("submitter").text + filetype = a.find("filetype").text + file_id = a.find("id").text + filename = a.find("filename").text + + files.append((date, url, author, filetype, file_id, filename)) + + files.sort(lambda x, y: cmp(x[0], y[0])) + + backoff = 30 + for timestamp, url, author, filetype, file_id, filename in files: + cachefilename = os.path.join("files", "%s-%s-%s-%s.dat" % (tracker_id, + aid, + file_id, + timestamp)) + if not os.path.exists(cachefilename): + self.downloadfile(url, cachefilename) + + datafile = open(cachefilename, 'rb') + message = mimetools.Message(datafile) + + fileprops = ['creator', 'creation', 'activity', + 'name', 'type'] + + filevals = [repr(self.getauthor(author)), + repr(time.gmtime(int(timestamp))), + repr(time.gmtime(int(timestamp))), + repr(filename), + repr(filetype) + ] + + file_nodeid = int(self.db.file.import_list(fileprops, filevals)) + file_filename = self.db.filename(self.db.file.classname, file_nodeid, + create=1) + ensureParentsExist(file_filename) + open(file_filename, 'w').write(datafile.read()) + + issuefiles.append(file_nodeid) + + if not roundupdata.has_key('activity') or \ + int(timestamp) > time.mktime(roundupdata['activity']): + roundupdata['activity'] = time.gmtime(int(timestamp)) + roundupdata['actor'] = self.getauthor(author) + + roundupdata[self.target] = issuefiles + + +class SeverityHandler(XMLExport2Handler): + def handle(self, item, roundupdata): + roundupdata[self.target] = self.db.severity.lookup('normal') + +class TypeHandler(XMLExport2Handler): + def __init__(self, db, source, target, tracker): + XMLExport2Handler.__init__(self, db, source, target) + self.tracker = tracker + + def handle(self, item, roundupdata): + if "Feature Requests" == self.tracker.find("name").text: + roundupdata[self.target] = self.db.issue_type.lookup("rfe") + elif "Patches" == self.tracker.find("name").text: + roundupdata["keywords"].append(self.db.keyword.lookup("patch")) + + +def handle_journal(db, item, roundupdata, nodeid): + journal = [] + journal.append((nodeid, Date(roundupdata['creation']), + roundupdata['creator'], + 'create', {})) + db.setjournal("issue", nodeid, journal) + + + + + + + Copied: tracker/importer/xmlexport2toroundup.py (from r56508, tracker/importer/sfxml2roundup.py) ============================================================================== --- tracker/importer/sfxml2roundup.py (original) +++ tracker/importer/xmlexport2toroundup.py Tue Jul 24 16:40:10 2007 @@ -19,62 +19,101 @@ origin_dir = os.path.dirname(os.path.realpath(sys.argv[0])) sys.path = [os.path.join(origin_dir, "sourceforge")] + sys.path - import htmlentitydefs, re import getopt -import sfxmlhandlers +import xmlexport2handlers as x2h from roundup import instance -def handle_artifact(db, artifact): +def handle_idmapping(tracker, name, itemname): + print "Reading in '%s'" % name + mapping = tracker.find(name) + ret = {} + for g in mapping.findall(itemname): + ret[g.find('id').text] = g.find('%s_name' % itemname).text + + return ret + +def handle_namemapping(tracker, name, itemname): + resolutions = tracker.find(name) + ret = {} + for r in resolutions.findall(itemname): + ret[r.find('id').text] = r.find('name').text + return ret + + +def handle_tracker(db, project_group_id, tracker, pmembers): + print "Handling tracker \"%s\"" % tracker.find('name').text - handlers = [sfxmlhandlers.IDHandler(db, "artifact_id", 'id'), - sfxmlhandlers.CreationHandler(db, 'open_date', 'creation'), - # activity and actor is set by CreationHandler, FileHandler and MessagesHandler - sfxmlhandlers.UserlinkHandler(db, 'submitted_by', 'creator'), - sfxmlhandlers.TitleHandler(db, 'summary', 'title'), - sfxmlhandlers.MessagesHandler(db, None, 'messages'), - sfxmlhandlers.FilesHandler(db, None, 'files'), - sfxmlhandlers.NosyHandler(db, None, 'nosy'), - # No handler for superseder - sfxmlhandlers.ComponentHandler(db, 'category', 'components'), - sfxmlhandlers.VersionsHandler(db, None, 'versions'), - sfxmlhandlers.SeverityHandler(db, None, 'severity'), - sfxmlhandlers.PriorityHandler(db, 'priority', 'priority'), - sfxmlhandlers.DependencyHandler(db, None, 'dependencies'), - sfxmlhandlers.AssigneeHandler(db, 'assigned_to', 'assignee'), - sfxmlhandlers.StatusHandler(db, 'status', 'status'), - sfxmlhandlers.ResolutionHandler(db, 'resolution', 'resolution'), - sfxmlhandlers.TypeHandler(db, "artifact_type", "type"), - sfxmlhandlers.GroupHandler(db, "artifact_group_id", "versions"), - ] - - roundupdata = {'files':[], 'keywords':[]} - fields = {} - - for field in artifact.findall("field"): - name = field.attrib.get('name') - if None == name: - print "field has no name", field.attrib - continue - fields[name] = field - - aid = int(fields['artifact_id'].text) - - for handler in handlers: - handler.handle(fields, roundupdata) - - props = [] - values = [] - - for key, value in roundupdata.items(): - props.append(key) - values.append(repr(value)) - - nodeid = db.issue.import_list(props, values) - sfxmlhandlers.handle_journal(db, fields, roundupdata, nodeid) - return nodeid + groups = handle_idmapping(tracker, "groups", "group") + categories = handle_idmapping(tracker, "categories", "category") + categories['100100'] = 'None' + categories['100'] = 'None' + groups['100100'] = 'None' + groups['100'] = 'None' + + print groups + + resolutions = handle_namemapping(tracker, 'resolutions', 'resolution') + statuses = handle_namemapping(tracker, 'statuses', 'status') + + handlers = [x2h.TextValueHandler(db, "id", "id"), + x2h.StatusHandler(db, "status_id", "status", statuses), + x2h.ComponentHandler(db, "category_id", "components", + categories), + x2h.GroupHandler(db, "group_id", "versions", + groups), + x2h.ResolutionHandler(db, "resolution_id", "resolution", + resolutions), + x2h.UserlinkHandler(db, 'submitter', 'creator', + pmembers), + x2h.AssigneeHandler(db, 'assignee', 'assignee', pmembers), + # FIXME: Activity + x2h.DateHandler(db, 'submit_date', 'creation'), + x2h.PriorityHandler(db, 'priority', 'priority'), + x2h.TextstringHandler(db, 'summary', 'title'), + x2h.MessagesHandler(db, 'details', 'messages', pmembers), + x2h.AttachmentHandler(db, 'attachments', 'files', + pmembers, project_group_id, tracker), + x2h.SeverityHandler(db, None, "severity"), + x2h.TypeHandler(db, None, "type", tracker), + ] + + for item in tracker.find('tracker_items').findall('tracker_item'): + print "Handling \"%s\" item with id %s" % (tracker.find('name').text, + item.find('id').text) + roundupdata = {'keywords':[], 'files':[], + 'messages':[], 'dependencies':[], 'nosy':[]} + + for handler in handlers: + handler.handle(item, roundupdata) + + props = [] + values = [] + + for key, value in roundupdata.items(): + props.append(key) + values.append(repr(value)) + + nodeid = db.issue.import_list(props, values) + x2h.handle_journal(db, item, roundupdata, nodeid) + + db.commit() + + +def handle_projectmembers(tree): + ps = tree.find('projectsummary').find("projectmembers") + ret = {'nobody':{'public_name':'Nobody/Anonymous', 'admin':False, + 'email':''}} + for pm in ps.findall("projectmember"): + user_name = pm.find('user_name').text + ret[user_name] = {'public_name':pm.find('public_name').text, + 'email':pm.find('email').text, + 'admin':False} + if 'Yes' == pm.find('project_admin').text: + ret[user_name]['admin'] = True + return ret if "__main__" == __name__: @@ -92,26 +131,22 @@ trackerhome = optarg elif "--startat" == opt: startat = int(optarg) + + rounduptracker = instance.open(trackerhome) + db = rounduptracker.open("admin") tree = ET.parse(xmlfile) - artifacts = tree.find('artifacts') - rounduptracker = instance.open(trackerhome) - db = rounduptracker.open("admin") + project_group_id = tree.find("export_details").find("project_group_id").text - max_id = 0 - allartifacts = artifacts.findall('artifact') - i=startat - totalartifacts = len(allartifacts) - for artifact in allartifacts[startat:]: - i+=1 - sys.stdout.write("[%5d/%d] " % (i, totalartifacts)) - aid = handle_artifact(db, artifact) - if max_id < int(aid): - max_id = int(aid) - db.commit() + pmembers = handle_projectmembers(tree) + + trackers = tree.find("trackers") + for tracker in trackers.findall("tracker"): + handle_tracker(db, project_group_id, tracker, pmembers) db.setid('issue', str(999)) - db.commit() + db.commit() + From python-checkins at python.org Tue Jul 24 17:03:38 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 17:03:38 +0200 (CEST) Subject: [Python-checkins] r56525 - tracker/importer/xmlexport2handlers.py Message-ID: <20070724150338.21C8C1E400D@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 17:03:37 2007 New Revision: 56525 Modified: tracker/importer/xmlexport2handlers.py Log: More feedback about newly created users Modified: tracker/importer/xmlexport2handlers.py ============================================================================== --- tracker/importer/xmlexport2handlers.py (original) +++ tracker/importer/xmlexport2handlers.py Tue Jul 24 17:03:37 2007 @@ -202,7 +202,6 @@ try: return self.db.user.lookup(username) except KeyError: - print "Creating new user", username roles = ["User"] if not self.pmembers.has_key(username): (username, realname, address) = self.getnonprojectmember(username) @@ -215,6 +214,7 @@ roles.append("Developer") if self.pmembers[username]['admin']: roles.append('Coordinator') + print "Creating new user", username, roles return self.db.user.create(username=username, realname=realname, address=address, From python-checkins at python.org Tue Jul 24 17:04:20 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 17:04:20 +0200 (CEST) Subject: [Python-checkins] r56526 - tracker/importer/xmlexport2toroundup.py Message-ID: <20070724150420.641D61E4010@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 17:04:19 2007 New Revision: 56526 Modified: tracker/importer/xmlexport2toroundup.py Log: Print per-tracker progress Modified: tracker/importer/xmlexport2toroundup.py ============================================================================== --- tracker/importer/xmlexport2toroundup.py (original) +++ tracker/importer/xmlexport2toroundup.py Tue Jul 24 17:04:19 2007 @@ -80,9 +80,12 @@ x2h.TypeHandler(db, None, "type", tracker), ] + numitems = len(tracker.find('tracker_items').findall('tracker_item')) + i=1 for item in tracker.find('tracker_items').findall('tracker_item'): - print "Handling \"%s\" item with id %s" % (tracker.find('name').text, - item.find('id').text) + print "Handling \"%s\" item with id %s [%d/%d]" % (tracker.find('name').text, + item.find('id').text, + i, numitems) roundupdata = {'keywords':[], 'files':[], 'messages':[], 'dependencies':[], 'nosy':[]} @@ -100,6 +103,7 @@ x2h.handle_journal(db, item, roundupdata, nodeid) db.commit() + i+=1 def handle_projectmembers(tree): From python-checkins at python.org Tue Jul 24 19:18:11 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 19:18:11 +0200 (CEST) Subject: [Python-checkins] r56527 - tracker/importer/xmlexport2toroundup.py Message-ID: <20070724171811.10FE91E400E@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 19:18:10 2007 New Revision: 56527 Modified: tracker/importer/xmlexport2toroundup.py Log: Compensate for missing data Modified: tracker/importer/xmlexport2toroundup.py ============================================================================== --- tracker/importer/xmlexport2toroundup.py (original) +++ tracker/importer/xmlexport2toroundup.py Tue Jul 24 19:18:10 2007 @@ -49,8 +49,10 @@ categories = handle_idmapping(tracker, "categories", "category") categories['100100'] = 'None' + categories['300100'] = 'None' categories['100'] = 'None' groups['100100'] = 'None' + groups['300100'] = 'None' groups['100'] = 'None' print groups From python-checkins at python.org Tue Jul 24 19:18:47 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 19:18:47 +0200 (CEST) Subject: [Python-checkins] r56528 - tracker/importer/xmlexport2handlers.py Message-ID: <20070724171847.7B5071E4016@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 19:18:47 2007 New Revision: 56528 Modified: tracker/importer/xmlexport2handlers.py Log: Use error-recovering download routine when downloading author files as well Modified: tracker/importer/xmlexport2handlers.py ============================================================================== --- tracker/importer/xmlexport2handlers.py (original) +++ tracker/importer/xmlexport2handlers.py Tue Jul 24 19:18:47 2007 @@ -109,6 +109,44 @@ XMLExport2Handler.__init__(self, db, source, target) self.pmembers = pmembers + def downloadfile(self, url, cachefilename): + + delay = 0 + backoff = 30 + while True: + print url, "->", cachefilename + try: + f = urllib.urlopen(url) + data = f.read() + if data.find("send-email-to-ipblocked-at-sourceforge-dot-net") >= 0: + delay+=backoff + print "Blocked by Sourceforge. Sleeping %d seconds before trying again" % delay + + out = open(cachefilename + ".tmp", 'w') + out.write(str(f.headers)) + out.write("\n") + out.write(data) + out.close() + try: + os.remove(cachefilename) + except: + pass + os.rename(cachefilename + ".tmp", cachefilename) + break + + except socket.error, e: + print "Error fetching file, retrying", e + continue + except AttributeError, e: + print e, "Probably SF weirdness. Trying again after delay.." + delay+=backoff + except IOError, e: + print e, "Probably SF weirdness. Trying again after delay.." + delay+=backoff + + time.sleep(delay) + + def handle(self, item, roundupdata): username = item.find(self.source).text @@ -169,8 +207,8 @@ authorfile = os.path.join("authordata", username) if not os.path.exists(authorfile) or 0 == os.stat(authorfile)[stat.ST_SIZE]: print "Fetching user information for %s" % username - u = urllib.urlopen("http://sourceforge.net/users/" + username) - open(authorfile, 'w').write(u.fp.read()) + self.downloadfile("http://sourceforge.net/users/" + username, + authorfile) realname = None authordata = open(authorfile).read() @@ -275,11 +313,12 @@ def handle(self, item, roundupdata): # Handle 'details' - roundupdata[self.target] = [self.createmessage(roundupdata, - item.find('submitter').text, - item.find('submit_date').text, - item.find(self.source).text, - [])] + if None != item.find(self.source).text: + roundupdata[self.target] = [self.createmessage(roundupdata, + item.find('submitter').text, + item.find('submit_date').text, + item.find(self.source).text, + [])] followups = item.find("followups") @@ -302,43 +341,6 @@ self.project_group_id = project_group_id self.tracker = tracker - def downloadfile(self, url, cachefilename): - - delay = 0 - backoff = 30 - while True: - print url, "->", cachefilename - try: - f = urllib.urlopen(url) - data = f.read() - if data.find("send-email-to-ipblocked-at-sourceforge-dot-net") >= 0: - delay+=backoff - print "Blocked by Sourceforge. Sleeping %d seconds before trying again" % delay - - out = open(cachefilename + ".tmp", 'w') - out.write(str(f.headers)) - out.write("\n") - out.write(data) - out.close() - try: - os.remove(cachefilename) - except: - pass - os.rename(cachefilename + ".tmp", cachefilename) - break - - except socket.error, e: - print "Error fetching file, retrying", e - continue - except AttributeError, e: - print e, "Probably SF weirdness. Trying again after delay.." - delay+=backoff - except IOError, e: - print e, "Probably SF weirdness. Trying again after delay.." - delay+=backoff - - time.sleep(delay) - def handle(self, item, roundupdata): From python-checkins at python.org Tue Jul 24 20:58:28 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 20:58:28 +0200 (CEST) Subject: [Python-checkins] r56530 - tracker/importer/xmlexport2handlers.py tracker/importer/xmlexport2toroundup.py Message-ID: <20070724185828.1D8031E4013@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 20:58:27 2007 New Revision: 56530 Modified: tracker/importer/xmlexport2handlers.py tracker/importer/xmlexport2toroundup.py Log: Reorganized code - moved several methods used by many handler classes into base class. Modified: tracker/importer/xmlexport2handlers.py ============================================================================== --- tracker/importer/xmlexport2handlers.py (original) +++ tracker/importer/xmlexport2handlers.py Tue Jul 24 20:58:27 2007 @@ -21,149 +21,25 @@ from roundup.date import Date class XMLExport2Handler: - def __init__(self, db, source, target): + def __init__(self, db, source, target, pmembers): self.db = db self.source = source self.target = target - - def handle(self, item, roundupdata): - raise NotImplementedError - - -class TextValueHandler(XMLExport2Handler): - def handle(self, item, roundupdata): - roundupdata[self.target] = item.find(self.source).text.encode('utf-8') - -class StatusHandler(XMLExport2Handler): - def __init__(self, db, source, target, statuses): - XMLExport2Handler.__init__(self, db, source, target) - self.statuses = statuses - - def handle(self, item, roundupdata): - status = self.statuses[item.find(self.source).text].lower() - - if "deleted" == status: - status = "closed" - - roundupdata[self.target] = self.db.status.lookup(status) - -class ComponentHandler(XMLExport2Handler): - def __init__(self, db, source, target, categories): - XMLExport2Handler.__init__(self, db, source, target) - self.categories = categories - - def handle(self, item, roundupdata): - category = self.categories[item.find(self.source).text] - category = mappings['category'].get(category, category) - - try: - component_id = self.db.component.lookup(category) - roundupdata[self.target] = [component_id] - except KeyError: - roundupdata[self.target] = \ - [self.db.component.create(name=category)] - -class GroupHandler(XMLExport2Handler): - def __init__(self, db, source, target, groups): - XMLExport2Handler.__init__(self, db, source, target) - self.groups = groups - - def handle(self, item, roundupdata): - roundupdata[self.target] = [] - group = self.groups[item.find(self.source).text] - - if group in ["None", "Irreproducible", "AST", "Not a Bug"]: - return - elif "Feature Request" == group: - roundupdata['type'] = self.db.issue_type.lookup("rfe") - return - elif "Python 3000" == group: - roundupdata['keywords'].append(self.db.keyword.lookup('py3k')) - try: - # Merge as specified in http://psf.upfronthosting.co.za/roundup/meta/issue101 - if group.startswith("Python 2.1"): - group = "Python 2.1" - elif group.startswith("Python 2.2"): - group = "Python 2.2" - version = self.db.version.lookup(group) - roundupdata[self.target] = version - return - except KeyError: - pass - -class ResolutionHandler(XMLExport2Handler): - def __init__(self, db, source, target, resolutions): - XMLExport2Handler.__init__(self, db, source, target) - self.resolutions = resolutions - - def handle(self, item, roundupdata): - resolution = self.resolutions[item.find(self.source).text].lower() - if "none" == resolution: - roundupdata[self.target] = None - else: - roundupdata[self.target] = self.db.resolution.lookup(resolution) - - -class UserlinkHandler(XMLExport2Handler): - def __init__(self, db, source, target, pmembers): - XMLExport2Handler.__init__(self, db, source, target) self.pmembers = pmembers - def downloadfile(self, url, cachefilename): - - delay = 0 - backoff = 30 - while True: - print url, "->", cachefilename - try: - f = urllib.urlopen(url) - data = f.read() - if data.find("send-email-to-ipblocked-at-sourceforge-dot-net") >= 0: - delay+=backoff - print "Blocked by Sourceforge. Sleeping %d seconds before trying again" % delay - - out = open(cachefilename + ".tmp", 'w') - out.write(str(f.headers)) - out.write("\n") - out.write(data) - out.close() - try: - os.remove(cachefilename) - except: - pass - os.rename(cachefilename + ".tmp", cachefilename) - break - - except socket.error, e: - print "Error fetching file, retrying", e - continue - except AttributeError, e: - print e, "Probably SF weirdness. Trying again after delay.." - delay+=backoff - except IOError, e: - print e, "Probably SF weirdness. Trying again after delay.." - delay+=backoff - - time.sleep(delay) - - def handle(self, item, roundupdata): - username = item.find(self.source).text - - if "nobody" == username and \ - "assignee" == self.target : - roundupdata[self.target] = None - return - - if "nobody" == username: - username = "anonymous" + raise NotImplementedError - roundupdata[self.target] = self.getauthor(username) + def update_activity(self, roundupdata, timestamp, actorid): + if not roundupdata.has_key('activity') or \ + int(timestamp) > time.mktime(roundupdata['activity']): + roundupdata['activity'] = time.gmtime(int(timestamp)) + roundupdata['actor'] = actorid + + def update_nosy(self, roundupdata, actorid): + if actorid not in roundupdata['nosy']: + roundupdata.append(actorid) - # Add user to nosy - if roundupdata[self.target] not in roundupdata['nosy'] and \ - roundupdata[self.target] != self.getauthor("anonymous"): - roundupdata['nosy'].append(roundupdata[self.target]) def unescape(self, string): # work around oddities in BeautifulSoup's entity handling @@ -233,7 +109,7 @@ break return (username, realname, address) - + def getauthor(self, username): @@ -258,6 +134,139 @@ address=address, roles=",".join(roles)) + def downloadfile(self, url, cachefilename): + + delay = 0 + backoff = 30 + while True: + print url, "->", cachefilename + try: + f = urllib.urlopen(url) + data = f.read() + if data.find("send-email-to-ipblocked-at-sourceforge-dot-net") >= 0: + delay+=backoff + print "Blocked by Sourceforge. Sleeping %d seconds before trying again" % delay + + out = open(cachefilename + ".tmp", 'w') + out.write(str(f.headers)) + out.write("\n") + out.write(data) + out.close() + try: + os.remove(cachefilename) + except: + pass + os.rename(cachefilename + ".tmp", cachefilename) + break + + except socket.error, e: + print "Error fetching file, retrying", e + continue + except AttributeError, e: + print e, "Probably SF weirdness. Trying again after delay.." + delay+=backoff + except IOError, e: + print e, "Probably SF weirdness. Trying again after delay.." + delay+=backoff + + time.sleep(delay) + + +class TextValueHandler(XMLExport2Handler): + def handle(self, item, roundupdata): + roundupdata[self.target] = item.find(self.source).text.encode('utf-8') + +class StatusHandler(XMLExport2Handler): + def __init__(self, db, source, target, pmembers, statuses): + XMLExport2Handler.__init__(self, db, source, target, pmembers) + self.statuses = statuses + + def handle(self, item, roundupdata): + status = self.statuses[item.find(self.source).text].lower() + + if "deleted" == status: + status = "closed" + + roundupdata[self.target] = self.db.status.lookup(status) + +class ComponentHandler(XMLExport2Handler): + def __init__(self, db, source, target, pmembers, categories): + XMLExport2Handler.__init__(self, db, source, target, pmembers) + self.categories = categories + + def handle(self, item, roundupdata): + category = self.categories[item.find(self.source).text] + category = mappings['category'].get(category, category) + + try: + component_id = self.db.component.lookup(category) + roundupdata[self.target] = [component_id] + except KeyError: + roundupdata[self.target] = \ + [self.db.component.create(name=category)] + +class GroupHandler(XMLExport2Handler): + def __init__(self, db, source, target, pmembers, groups): + XMLExport2Handler.__init__(self, db, source, target, pmembers) + self.groups = groups + + def handle(self, item, roundupdata): + roundupdata[self.target] = [] + group = self.groups[item.find(self.source).text] + + if group in ["None", "Irreproducible", "AST", "Not a Bug"]: + return + elif "Feature Request" == group: + roundupdata['type'] = self.db.issue_type.lookup("rfe") + return + elif "Python 3000" == group: + roundupdata['keywords'].append(self.db.keyword.lookup('py3k')) + try: + # Merge as specified in http://psf.upfronthosting.co.za/roundup/meta/issue101 + if group.startswith("Python 2.1"): + group = "Python 2.1" + elif group.startswith("Python 2.2"): + group = "Python 2.2" + version = self.db.version.lookup(group) + roundupdata[self.target] = version + return + except KeyError: + pass + +class ResolutionHandler(XMLExport2Handler): + def __init__(self, db, source, target, pmembers, resolutions): + XMLExport2Handler.__init__(self, db, source, target, pmembers) + self.resolutions = resolutions + + def handle(self, item, roundupdata): + resolution = self.resolutions[item.find(self.source).text].lower() + if "none" == resolution: + roundupdata[self.target] = None + else: + roundupdata[self.target] = self.db.resolution.lookup(resolution) + + +class UserlinkHandler(XMLExport2Handler): + + + def handle(self, item, roundupdata): + username = item.find(self.source).text + + if "nobody" == username and \ + "assignee" == self.target : + roundupdata[self.target] = None + return + + if "nobody" == username: + username = "anonymous" + + roundupdata[self.target] = self.getauthor(username) + + # Add user to nosy + if roundupdata[self.target] not in roundupdata['nosy'] and \ + roundupdata[self.target] != self.getauthor("anonymous"): + roundupdata['nosy'].append(roundupdata[self.target]) + class AssigneeHandler(UserlinkHandler): def handle(self, item, roundupdata): UserlinkHandler.handle(self, item, roundupdata) @@ -270,9 +279,12 @@ user['roles'] = ",".join(roles) -class DateHandler(XMLExport2Handler): +class CreationHandler(XMLExport2Handler): def handle(self, item, roundupdata): roundupdata[self.target] = time.gmtime(int(item.find(self.source).text)) + self.update_activity(roundupdata, int(item.find(self.source).text), + self.getauthor(item.find("submitter").text)) + class PriorityHandler(XMLExport2Handler): def handle(self, item, roundupdata): priority = item.find(self.source).text @@ -292,10 +304,7 @@ repr(content.encode('utf-8')), repr([])] - if not roundupdata.has_key('activity') or \ - int(date) > time.mktime(roundupdata['activity']): - roundupdata['activity'] = time.gmtime(int(date)) - roundupdata['actor'] = self.getauthor(author) + self.update_activity(roundupdata, date, self.getauthor(author)) msg_nodeid = int(self.db.msg.import_list(messageprops, messagevals)) @@ -392,11 +401,8 @@ issuefiles.append(file_nodeid) - if not roundupdata.has_key('activity') or \ - int(timestamp) > time.mktime(roundupdata['activity']): - roundupdata['activity'] = time.gmtime(int(timestamp)) - roundupdata['actor'] = self.getauthor(author) - + + self.update_activity(roundupdata, timestamp, self.getauthor(author)) roundupdata[self.target] = issuefiles @@ -405,8 +411,8 @@ roundupdata[self.target] = self.db.severity.lookup('normal') class TypeHandler(XMLExport2Handler): - def __init__(self, db, source, target, tracker): - XMLExport2Handler.__init__(self, db, source, target) + def __init__(self, db, source, target, pmembers, tracker): + XMLExport2Handler.__init__(self, db, source, target, pmembers) self.tracker = tracker def handle(self, item, roundupdata): Modified: tracker/importer/xmlexport2toroundup.py ============================================================================== --- tracker/importer/xmlexport2toroundup.py (original) +++ tracker/importer/xmlexport2toroundup.py Tue Jul 24 20:58:27 2007 @@ -60,26 +60,26 @@ resolutions = handle_namemapping(tracker, 'resolutions', 'resolution') statuses = handle_namemapping(tracker, 'statuses', 'status') - handlers = [x2h.TextValueHandler(db, "id", "id"), - x2h.StatusHandler(db, "status_id", "status", statuses), + handlers = [x2h.TextValueHandler(db, "id", "id", pmembers), + x2h.StatusHandler(db, "status_id", "status", pmembers, statuses), x2h.ComponentHandler(db, "category_id", "components", - categories), + pmembers, categories), x2h.GroupHandler(db, "group_id", "versions", - groups), + pmembers, groups), x2h.ResolutionHandler(db, "resolution_id", "resolution", - resolutions), - x2h.UserlinkHandler(db, 'submitter', 'creator', + pmembers, resolutions), + x2h.UserlinkHandler(db, 'submitter', 'creator', pmembers), x2h.AssigneeHandler(db, 'assignee', 'assignee', pmembers), # FIXME: Activity - x2h.DateHandler(db, 'submit_date', 'creation'), - x2h.PriorityHandler(db, 'priority', 'priority'), - x2h.TextstringHandler(db, 'summary', 'title'), + x2h.CreationHandler(db, 'submit_date', 'creation', pmembers), + x2h.PriorityHandler(db, 'priority', 'priority', pmembers), + x2h.TextstringHandler(db, 'summary', 'title', pmembers), x2h.MessagesHandler(db, 'details', 'messages', pmembers), x2h.AttachmentHandler(db, 'attachments', 'files', pmembers, project_group_id, tracker), - x2h.SeverityHandler(db, None, "severity"), - x2h.TypeHandler(db, None, "type", tracker), + x2h.SeverityHandler(db, None, "severity", pmembers), + x2h.TypeHandler(db, None, "type", pmembers, tracker), ] numitems = len(tracker.find('tracker_items').findall('tracker_item')) From python-checkins at python.org Tue Jul 24 21:15:39 2007 From: python-checkins at python.org (erik.forsberg) Date: Tue, 24 Jul 2007 21:15:39 +0200 (CEST) Subject: [Python-checkins] r56531 - tracker/instances/python-dev/detectors/config.ini.template tracker/instances/python-dev/detectors/spamcheck.py Message-ID: <20070724191539.CC23C1E4007@bag.python.org> Author: erik.forsberg Date: Tue Jul 24 21:15:39 2007 New Revision: 56531 Modified: tracker/instances/python-dev/detectors/config.ini.template tracker/instances/python-dev/detectors/spamcheck.py Log: Moved configuration (spambayes server uri and cutoff values) into detectors/config.ini. Modified: tracker/instances/python-dev/detectors/config.ini.template ============================================================================== --- tracker/instances/python-dev/detectors/config.ini.template (original) +++ tracker/instances/python-dev/detectors/config.ini.template Tue Jul 24 21:15:39 2007 @@ -5,3 +5,10 @@ triage_email = triage at example.com busybody_email= busybody at example.com +# URI to XMLRPC server doing the actual spam check. +spambayes_uri = http://www.webfast.com:80/sbrpc +# These must match the {ham,spam}_cutoff setting in the SpamBayes server +# config. +spambayes_ham_cutoff = 0.2 +spambayes_spam_cutoff = 0.85 + Modified: tracker/instances/python-dev/detectors/spamcheck.py ============================================================================== --- tracker/instances/python-dev/detectors/spamcheck.py (original) +++ tracker/instances/python-dev/detectors/spamcheck.py Tue Jul 24 21:15:39 2007 @@ -9,31 +9,25 @@ which snuck by the screen. """ -HOST = "www.webfast.com" -PORT = "80" -PATH = "/sbrpc" - -# These must match the {ham,spam}_cutoff setting in the SpamBayes server -# config. -HAM_CUTOFF = 0.2 -SPAM_CUTOFF = 0.85 - import xmlrpclib import socket from roundup.exceptions import Reject -def check_spam(_database, _klass, _nodeid, newvalues): +def check_spam(_db, _klass, _nodeid, newvalues): """Auditor to score a website submission.""" - uri = "http://%s:%s%s" % (HOST, PORT, PATH) - server = xmlrpclib.ServerProxy(uri, verbose=False) + spambayes_uri = _db.config.detectors['SPAMBAYES_URI'] + spam_cutoff = float(_db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + + + server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) try: prob = server.score(newvalues, [], {}) - except (socket.error, xmlrpclib.Error): + except (socket.error, xmlrpclib.Error), e: pass else: - if prob >= SPAM_CUTOFF: + if prob >= spam_cutoff: raise Reject("Looks like spam to me - prob=%.3f" % prob) def init(database): From python-checkins at python.org Tue Jul 24 22:58:37 2007 From: python-checkins at python.org (georg.brandl) Date: Tue, 24 Jul 2007 22:58:37 +0200 (CEST) Subject: [Python-checkins] r56532 - doctools/trunk/README Message-ID: <20070724205837.3720F1E400A@bag.python.org> Author: georg.brandl Date: Tue Jul 24 22:58:36 2007 New Revision: 56532 Modified: doctools/trunk/README Log: Add hint about installing docutils. Modified: doctools/trunk/README ============================================================================== --- doctools/trunk/README (original) +++ doctools/trunk/README Tue Jul 24 22:58:36 2007 @@ -8,13 +8,17 @@ 2.5 setup. Docutils 0.4 is required, the Docutils SVN trunk does not work at the moment, but I don't expect it to be difficult to fix that. +Docutils can be installed from the PyPI (Cheese Shop) via ``easy_install`` +or from http://docutils.sourceforge.net/. + If you want code highlighting, you need Pygments >= 0.8, easily installable from PyPI. Jinja, the template engine, is included as a SVN external. For the rest of this document, let's assume that you have a Python -checkout (you need the 2.6 line, i.e. the trunk) in ~/devel/python and -this checkout in the current directory. +checkout (you need the 2.6 line, i.e. the trunk -- the SVN URL for readonly +access is http://svn.python.org/projects/python/trunk) in ~/devel/python +and this checkout in the current directory. To convert the LaTeX doc to reST, you first have to apply the patch in ``etc/inst.diff`` to the ``inst/inst.tex`` LaTeX file in the Python @@ -83,4 +87,4 @@ additional, that can be used in the source docs. More reST docs are at http://docutils.sf.net/rst.html, tutorials can be found on -the web. \ No newline at end of file +the web. From python-checkins at python.org Tue Jul 24 23:20:42 2007 From: python-checkins at python.org (facundo.batista) Date: Tue, 24 Jul 2007 23:20:42 +0200 (CEST) Subject: [Python-checkins] r56533 - python/trunk/Lib/test/test_smtplib.py Message-ID: <20070724212042.DC37D1E4007@bag.python.org> Author: facundo.batista Date: Tue Jul 24 23:20:42 2007 New Revision: 56533 Modified: python/trunk/Lib/test/test_smtplib.py Log: New tests for basic behavior of smtplib.SMTP and smtpd.DebuggingServer. Change to use global host & port number variables. Modified the 'server' to take a string to send back in order to vary test server responses. Added a test for the reaction of smtplib.SMTP to a non-200 HELO response. [GSoC - Alan McIntyre] Modified: python/trunk/Lib/test/test_smtplib.py ============================================================================== --- python/trunk/Lib/test/test_smtplib.py (original) +++ python/trunk/Lib/test/test_smtplib.py Tue Jul 24 23:20:42 2007 @@ -1,24 +1,40 @@ +import asyncore import socket import threading +import smtpd import smtplib +import StringIO +import sys import time +import select from unittest import TestCase from test import test_support +HOST = "localhost" +PORT = 54328 -def server(evt): +def server(evt, buf): serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) serv.settimeout(3) serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - serv.bind(("", 9091)) + serv.bind(("", PORT)) serv.listen(5) try: conn, addr = serv.accept() except socket.timeout: pass else: - conn.send("220 Hola mundo\n") + n = 200 + while buf and n > 0: + r, w, e = select.select([], [conn], []) + if w: + sent = conn.send(buf) + buf = buf[sent:] + + n -= 1 + time.sleep(0.01) + conn.close() finally: serv.close() @@ -28,26 +44,42 @@ def setUp(self): self.evt = threading.Event() - threading.Thread(target=server, args=(self.evt,)).start() + servargs = (self.evt, "220 Hola mundo\n") + threading.Thread(target=server, args=servargs).start() time.sleep(.1) def tearDown(self): self.evt.wait() - def testBasic(self): + def testBasic1(self): # connects - smtp = smtplib.SMTP("localhost", 9091) + smtp = smtplib.SMTP(HOST, PORT) smtp.sock.close() + def testBasic2(self): + # connects, include port in host name + smtp = smtplib.SMTP("%s:%s" % (HOST, PORT)) + smtp.sock.close() + + def testLocalHostName(self): + # check that supplied local_hostname is used + smtp = smtplib.SMTP(HOST, PORT, local_hostname="testhost") + self.assertEqual(smtp.local_hostname, "testhost") + smtp.sock.close() + + def testNonnumericPort(self): + # check that non-numeric port raises ValueError + self.assertRaises(socket.error, smtplib.SMTP, "localhost", "bogus") + def testTimeoutDefault(self): # default - smtp = smtplib.SMTP("localhost", 9091) + smtp = smtplib.SMTP(HOST, PORT) self.assertTrue(smtp.sock.gettimeout() is None) smtp.sock.close() def testTimeoutValue(self): # a value - smtp = smtplib.SMTP("localhost", 9091, timeout=30) + smtp = smtplib.SMTP(HOST, PORT, timeout=30) self.assertEqual(smtp.sock.gettimeout(), 30) smtp.sock.close() @@ -56,16 +88,95 @@ previous = socket.getdefaulttimeout() socket.setdefaulttimeout(30) try: - smtp = smtplib.SMTP("localhost", 9091, timeout=None) + smtp = smtplib.SMTP(HOST, PORT, timeout=None) finally: socket.setdefaulttimeout(previous) self.assertEqual(smtp.sock.gettimeout(), 30) smtp.sock.close() +# Test server using smtpd.DebuggingServer +def debugging_server(evt): + serv = smtpd.DebuggingServer(("", PORT), ('nowhere', -1)) + + try: + asyncore.loop(timeout=.01, count=300) + except socket.timeout: + pass + finally: + # allow some time for the client to read the result + time.sleep(0.5) + asyncore.close_all() + evt.set() + +MSG_BEGIN = '---------- MESSAGE FOLLOWS ----------\n' +MSG_END = '------------ END MESSAGE ------------\n' + +# Test behavior of smtpd.DebuggingServer +class DebuggingServerTests(TestCase): + + def setUp(self): + self.old_stdout = sys.stdout + self.output = StringIO.StringIO() + sys.stdout = self.output + + self.evt = threading.Event() + threading.Thread(target=debugging_server, args=(self.evt,)).start() + time.sleep(.5) + + def tearDown(self): + self.evt.wait() + sys.stdout = self.old_stdout + + def testBasic(self): + # connect + smtp = smtplib.SMTP(HOST, PORT) + smtp.sock.close() + + def testEHLO(self): + smtp = smtplib.SMTP(HOST, PORT) + self.assertEqual(smtp.ehlo(), (502, 'Error: command "EHLO" not implemented')) + smtp.sock.close() + + def testHELP(self): + smtp = smtplib.SMTP(HOST, PORT) + self.assertEqual(smtp.help(), 'Error: command "HELP" not implemented') + smtp.sock.close() + + def testSend(self): + # connect and send mail + m = 'A test message' + smtp = smtplib.SMTP(HOST, PORT) + smtp.sendmail('John', 'Sally', m) + smtp.sock.close() + + self.evt.wait() + self.output.flush() + mexpect = '%s%s\n%s' % (MSG_BEGIN, m, MSG_END) + self.assertEqual(self.output.getvalue(), mexpect) + + +class BadHELOServerTests(TestCase): + + def setUp(self): + self.old_stdout = sys.stdout + self.output = StringIO.StringIO() + sys.stdout = self.output + + self.evt = threading.Event() + servargs = (self.evt, "199 no hello for you!\n") + threading.Thread(target=server, args=servargs).start() + time.sleep(.5) + + def tearDown(self): + self.evt.wait() + sys.stdout = self.old_stdout + + def testFailingHELO(self): + self.assertRaises(smtplib.SMTPConnectError, smtplib.SMTP, HOST, PORT) def test_main(verbose=None): - test_support.run_unittest(GeneralTests) + test_support.run_unittest(GeneralTests, DebuggingServerTests, BadHELOServerTests) if __name__ == '__main__': test_main() From python-checkins at python.org Tue Jul 24 23:52:25 2007 From: python-checkins at python.org (phillip.eby) Date: Tue, 24 Jul 2007 23:52:25 +0200 (CEST) Subject: [Python-checkins] r56534 - peps/trunk/pep-3124.txt Message-ID: <20070724215225.22D771E4007@bag.python.org> Author: phillip.eby Date: Tue Jul 24 23:52:24 2007 New Revision: 56534 Modified: peps/trunk/pep-3124.txt Log: Misc. fixes/clarifications, add section with rationale for why universal overloading doesn't automatially lead to chaos and anarchy. Modified: peps/trunk/pep-3124.txt ============================================================================== --- peps/trunk/pep-3124.txt (original) +++ peps/trunk/pep-3124.txt Tue Jul 24 23:52:24 2007 @@ -25,7 +25,8 @@ This PEP proposes a new standard library module, ``overloading``, to provide generic programming features including dynamic overloading (aka generic functions), interfaces, adaptation, method combining (ala -CLOS and AspectJ), and simple forms of aspect-oriented programming. +CLOS and AspectJ), and simple forms of aspect-oriented programming +(AOP). The proposed API is also open to extension; that is, it will be possible for library developers to implement their own specialized @@ -384,7 +385,7 @@ The ``@around`` decorator declares a method as an "around" method. "Around" methods are much like primary methods, except that the least-specific "around" method has higher precedence than the -most-specific "before" or method. +most-specific "before" method. Unlike "before" and "after" methods, however, "Around" methods *are* responsible for calling their ``__proceed__`` argument, in order to @@ -615,10 +616,32 @@ ``list.append(mylist, 42)``, thereby implementing the desired operation. -(Note: the ``@abstract`` decorator is not limited to use in interface -definitions; it can be used anywhere that you wish to create an -"empty" generic function that initially has no methods. In -particular, it need not be used inside a class.) + +Abstract and Concrete Methods +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Note, by the way, that the ``@abstract`` decorator is not limited to +use in interface definitions; it can be used anywhere that you wish to +create an "empty" generic function that initially has no methods. In +particular, it need not be used inside a class. + +Also note that interface methods need not be abstract; one could, for +example, write an interface like this:: + + class IWriteMapping(Interface): + @abstract + def __setitem__(self, key, value): + """This has to be implemented""" + + def update(self, other:IReadMapping): + for k, v in IReadMapping(other).items(): + self[k] = v + +As long as ``__setitem__`` is defined for some type, the above +interface will provide a usable ``update()`` implementation. However, +if some specific type (or pair of types) has a more efficient way of +handling ``update()`` operations, an appropriate overload can still +be registered for use in that case. Subclassing and Re-assembly @@ -765,10 +788,11 @@ # ILength(aList).length == list.__len__(aList) when(ILength.length.fget, (list,))(list.__len__) + Alternatively, methods such as ``_get_foo()`` and ``_set_foo()`` may be defined as part of the interface, and the property defined -in terms of those methods, but this a bit more difficult for users +in terms of those methods, but this is a bit more difficult for users to implement correctly when creating a class that directly implements the interface, as they would then need to match all the individual method names, not just the name of the property or attribute. @@ -777,9 +801,9 @@ Aspects ------- -The adaptation system provided assumes that adapters are "stateless", -which is to say that adapters have no attributes or storage apart from -those of the adapted object. This follows the "typeclass/instance" +The adaptation system described above assumes that adapters are "stateless", +which is to say that adapters have no attributes or state apart from +that of the adapted object. This follows the "typeclass/instance" model of Haskell, and the concept of "pure" (i.e., transitively composable) adapters. @@ -789,8 +813,10 @@ One possibility of course, would be to attach monkeypatched "private" attributes to the adaptee. But this is subject to name collisions, -and complicates the process of initialization. It also doesn't work -on objects that don't have a ``__dict__`` attribute. +and complicates the process of initialization (since any code using +these attributes has to check for their existence and initialize them +if necessary). It also doesn't work on objects that don't have a +``__dict__`` attribute. So the ``Aspect`` class is provided to make it easy to attach extra information to objects that either: @@ -802,7 +828,7 @@ a global but thread-safe weak-reference dictionary), or 3. implement or can be adapt to the ``overloading.IAspectOwner`` - interface (technically, #1 or #2 imply this) + interface (technically, #1 or #2 imply this). Subclassing ``Aspect`` creates an adapter class whose state is tied to the life of the adapted object. @@ -817,13 +843,14 @@ count = 0 @after(Target.some_method) - def count_after_call(self, *args, **kw): + def count_after_call(self:Target, *args, **kw): Count(self).count += 1 The above code will keep track of the number of times that -``Target.some_method()`` is successfully called (i.e., it will not -count errors). Other code can then access the count using -``Count(someTarget).count``. +``Target.some_method()`` is successfully called on an instance of +``Target`` (i.e., it will not count errors unless they occur in a +more-specific "after" method). Other code can then access the count +using ``Count(someTarget).count``. ``Aspect`` instances can of course have ``__init__`` methods, to initialize any data structures. They can use either ``__slots__`` @@ -832,8 +859,8 @@ While this facility is rather primitive compared to a full-featured AOP tool like AspectJ, persons who wish to build pointcut libraries or other AspectJ-like features can certainly use ``Aspect`` objects -and method-combination decorators as a base for more expressive AOP -tools. +and method-combination decorators as a base for building more +expressive AOP tools. XXX spec out full aspect API, including keys, N-to-1 aspects, manual attach/detach/delete of aspect instances, and the ``IAspectOwner`` @@ -870,6 +897,98 @@ IAspectOwner +Overloading Usage Patterns +========================== + +In discussion on the Python-3000 list, the proposed feature of allowing +arbitrary functions to be overloaded has been somewhat controversial, +with some people expressing concern that this would make programs more +difficult to understand. + +The general thrust of this argument is that one cannot rely on what a +function does, if it can be changed from anywhere in the program at any +time. Even though in principle this can already happen through +monkeypatching or code substitution, it is considered poor practice to +do so. + +However, providing support for overloading any function (or so the +argument goes), is implicitly blessing such changes as being an +acceptable practice. + +This argument appears to make sense in theory, but it is almost entirely +mooted in practice for two reasons. + +First, people are generally not perverse, defining a function to do one +thing in one place, and then summarily defining it to do the opposite +somewhere else! The principal reasons to extend the behavior of a +function that has *not* been specifically made generic are to: + +* Add special cases not contemplated by the original function's author, + such as support for additional types. + +* Be notified of an action in order to cause some related operation to + be performed, either before the original operation is performed, + after it, or both. This can include general-purpose operations like + adding logging, timing, or tracing, as well as application-specific + behavior. + +None of these reasons for adding overloads imply any change to the +intended default or overall behavior of the existing function, however. +Just as a base class method may be overridden by a subclass for these +same two reasons, so too may a function be overloaded to provide for +such enhancements. + +In other words, universal overloading does not equal *arbitrary* +overloading, in the sense that we need not expect people to randomly +redefine the behavior of existing functions in illogical or +unpredictable ways. If they did so, it would be no less of a bad +practice than any other way of writing illogical or unpredictable code! + +However, to distinguish bad practice from good, it is perhaps necessary +to clarify further what good practice for defining overloads *is*. And +that brings us to the second reason why generic functions do not +necessarily make programs harder to understand: overloading patterns in +actual programs tend to follow very predictable patterns. (Both in +Python and in languages that have no *non*-generic functions.) + +If a module is defining a new generic operation, it will usually also +define any required overloads for existing types in the same place. +Likewise, if a module is defining a new type, then it will usually +define overloads there for any generic functions that it knows or cares +about. + +As a result, the vast majority of overloads can be found adjacent to +either the function being overloaded, or to a newly-defined type for +which the overload is adding support. Thus, overloads are highly- +discoverable in the common case, as you are either looking at the +function or the type, or both. + +It is only in rather infrequent cases that one will have overloads in a +module that contains neither the function nor the type(s) for which the +overload is added. This would be the case if, say, a third-party +created a bridge of support between one library's types and another +library's generic function(s). In such a case, however, best practice +suggests prominently advertising this, especially by way of the module +name. + +For example, PyProtocols defines such bridge support for working with +Zope interfaces and legacy Twisted interfaces, using modules called +``protocols.twisted_support`` and ``protocols.zope_support``. (These +bridges are done with interface adapters, rather than generic functions, +but the basic principle is the same.) + +In short, understanding programs in the presence of universal +overloading need not be any more difficult, given that the vast majority +of overloads will either be adjacent to a function, or the definition of +a type that is passed to that function. + +And, in the absence of incompetence or deliberate intention to be +obscure, the few overloads that are not adjacent to the relevant type(s) +or function(s), will generally not need to be understood or known about +outside the scope where those overloads are defined. (Except in the +"support modules" case, where best practice suggests naming them +accordingly.) + Implementation Notes ==================== From python-checkins at python.org Wed Jul 25 01:36:35 2007 From: python-checkins at python.org (talin) Date: Wed, 25 Jul 2007 01:36:35 +0200 (CEST) Subject: [Python-checkins] r56535 - peps/trunk/pep-3101.txt Message-ID: <20070724233635.698FB1E400A@bag.python.org> Author: talin Date: Wed Jul 25 01:36:34 2007 New Revision: 56535 Modified: peps/trunk/pep-3101.txt Log: Updated PEP 3101 to incorporate latest feedback, and simplify even further. Also added additional explanation of custom formatting classes. Modified: peps/trunk/pep-3101.txt ============================================================================== --- peps/trunk/pep-3101.txt (original) +++ peps/trunk/pep-3101.txt Wed Jul 25 01:36:34 2007 @@ -141,7 +141,7 @@ Simple and Compound Field Names - Simple field names are either names or numbers. If numbers, they + Simple field names are either names or numbers. If numbers, they must be valid base-10 integers; if names, they must be valid Python identifiers. A number is used to identify a positional argument, while a name is used to identify a keyword argument. @@ -152,44 +152,37 @@ "My name is {0.name}".format(file('out.txt')) This example shows the use of the 'getattr' or 'dot' operator - in a field expression. The dot operator allows an attribute of + in a field expression. The dot operator allows an attribute of an input value to be specified as the field value. - The types of expressions that can be used in a compound name - have been deliberately limited in order to prevent potential - security exploits resulting from the ability to place arbitrary - Python expressions inside of strings. Only two operators are - supported, the '.' (getattr) operator, and the '[]' (getitem) - operator. - - Another limitation that is defined to limit potential security - issues is that field names or attribute names beginning with an - underscore are disallowed. This enforces the common convention - that names beginning with an underscore are 'private'. + Unlike some other programming languages, you cannot embed arbitrary + expressions in format strings. This is by design - the types of + expressions that you can use is deliberately limited. Only two operators + are supported: the '.' (getattr) operator, and the '[]' (getitem) + operator. The reason for allowing these operators is that they dont' + normally have side effects in non-pathological code. An example of the 'getitem' syntax: "My name is {0[name]}".format(dict(name='Fred')) - It should be noted that the use of 'getitem' within a string is - much more limited than its normal use. In the above example, the - string 'name' really is the literal string 'name', not a variable - named 'name'. The rules for parsing an item key are very simple. + It should be noted that the use of 'getitem' within a format string + is much more limited than its conventional usage. In the above example, + the string 'name' really is the literal string 'name', not a variable + named 'name'. The rules for parsing an item key are very simple. If it starts with a digit, then its treated as a number, otherwise it is used as a string. It is not possible to specify arbitrary dictionary keys from within a format string. - Implementation note: The implementation of this proposal is + Implementation note: The implementation of this proposal is not required to enforce the rule about a name being a valid Python identifier. Instead, it will rely on the getattr function of the underlying object to throw an exception if the identifier is not legal. The format function will have a minimalist parser which only attempts to figure out when it is "done" with an - identifier (by finding a '.' or a ']', or '}', etc.) The only - exception to this laissez-faire approach is that, by default, - strings are not allowed to have leading underscores. + identifier (by finding a '.' or a ']', or '}', etc.). Conversion Specifiers @@ -215,11 +208,11 @@ Note that the doubled '}' at the end, which would normally be escaped, is not escaped in this case. The reason is because the '{{' and '}}' syntax for escapes is only applied when used - *outside* of a format field. Within a format field, the brace + *outside* of a format field. Within a format field, the brace characters always have their normal meaning. The syntax for conversion specifiers is open-ended, since a class - can override the standard conversion specifiers. In such cases, + can override the standard conversion specifiers. In such cases, the format() method merely passes all of the characters between the first colon and the matching brace to the relevant underlying formatting method. @@ -248,7 +241,7 @@ '>' - Forces the field to be right-aligned within the available space. '=' - Forces the padding to be placed after the sign (if any) - but before the digits. This is used for printing fields + but before the digits. This is used for printing fields in the form '+000000120'. '^' - Forces the field to be centered within the available space. @@ -261,7 +254,7 @@ pad the field to the minimum width. The alignment flag must be supplied if the character is a number other than 0 (otherwise the character would be interpreted as part of the field width - specifier). A zero fill character without an alignment flag + specifier). A zero fill character without an alignment flag implies an alignment type of '='. The 'sign' element can be one of the following: @@ -269,20 +262,20 @@ '+' - indicates that a sign should be used for both positive as well as negative numbers '-' - indicates that a sign should be used only for negative - numbers (this is the default behaviour) + numbers (this is the default behavior) ' ' - indicates that a leading space should be used on positive numbers '()' - indicates that negative numbers should be surrounded by parentheses - 'width' is a decimal integer defining the minimum field width. If + 'width' is a decimal integer defining the minimum field width. If not specified, then the field width will be determined by the content. The 'precision' is a decimal number indicating how many digits should be displayed after the decimal point in a floating point - conversion. In a string conversion the field indicates how many - characters will be used from the field content. The precision is + conversion. In a string conversion the field indicates how many + characters will be used from the field content. The precision is ignored for integer conversions. Finally, the 'type' determines how the data should be presented. @@ -292,11 +285,11 @@ The available string conversion types are: - 's' - String format. Invokes str() on the object. + 's' - String format. Invokes str() on the object. This is the default conversion specifier type. - 'r' - Repr format. Invokes repr() on the object. + 'r' - Repr format. Invokes repr() on the object. - There are several integer conversion types. All invoke int() on + There are several integer conversion types. All invoke int() on the object before attempting to format it. The available integer conversion types are: @@ -311,7 +304,7 @@ 'X' - Hex format. Outputs the number in base 16, using upper- case letters for the digits above 9. - There are several floating point conversion types. All invoke + There are several floating point conversion types. All invoke float() on the object before attempting to format it. The available floating point conversion types are: @@ -380,97 +373,125 @@ format engine can be obtained through the 'Formatter' class that lives in the 'string' module. This class takes additional options which are not accessible via the normal str.format method. - - An application can create their own Formatter instance which has - customized behavior, either by setting the properties of the - Formatter instance, or by subclassing the Formatter class. + + An application can subclass the Formatter class to create their + own customized formatting behavior. The PEP does not attempt to exactly specify all methods and properties defined by the Formatter class; Instead, those will be - defined and documented in the initial implementation. However, this + defined and documented in the initial implementation. However, this PEP will specify the general requirements for the Formatter class, which are listed below. - -Formatter Creation and Initialization - - The Formatter class takes a single initialization argument, 'flags': - - Formatter(flags=0) - - The 'flags' argument is used to control certain subtle behavioral - differences in formatting that would be cumbersome to change via - subclassing. The flags values are defined as static variables - in the "Formatter" class: - - Formatter.ALLOW_LEADING_UNDERSCORES - - By default, leading underscores are not allowed in identifier - lookups (getattr or getitem). Setting this flag will allow - this. - - Formatter.CHECK_UNUSED_POSITIONAL - - If this flag is set, the any positional arguments which are - supplied to the 'format' method but which are not used by - the format string will cause an error. - - Formatter.CHECK_UNUSED_NAME - - If this flag is set, the any named arguments which are - supplied to the 'format' method but which are not used by - the format string will cause an error. + Although string.format() does not directly use the Formatter class + to do formatting, both use the same underlying implementation. The + reason that string.format() does not use the Formatter class directly + is because "string" is a built-in type, which means that all of its + methods must be implemented in C, whereas Formatter is a Python + class. Formatter provides an extensible wrapper around the same + C functions as are used by string.format(). Formatter Methods - The methods of class Formatter are as follows: + The Formatter class takes no initialization arguments: + + fmt = Formatter() + + The public API methods of class Formatter are as follows: -- format(format_string, *args, **kwargs) -- vformat(format_string, args, kwargs) - -- get_positional(args, index) - -- get_named(kwds, name) - -- format_field(value, conversion) - - 'format' is the primary API method. It takes a format template, - and an arbitrary set of positional and keyword argument. 'format' + + 'format' is the primary API method. It takes a format template, + and an arbitrary set of positional and keyword argument. 'format' is just a wrapper that calls 'vformat'. - 'vformat' is the function that does the actual work of formatting. It + 'vformat' is the function that does the actual work of formatting. It is exposed as a separate function for cases where you want to pass in a predefined dictionary of arguments, rather than unpacking and repacking the dictionary as individual arguments using the '*args' and - '**kwds' syntax. 'vformat' does the work of breaking up the format - template string into character data and replacement fields. It calls - the 'get_positional' and 'get_index' methods as appropriate. + '**kwds' syntax. 'vformat' does the work of breaking up the format + template string into character data and replacement fields. It calls + the 'get_positional' and 'get_index' methods as appropriate (described + below.) - Note that the checking of unused arguments, and the restriction on - leading underscores in attribute names are also done in this function. + Formatter defines the following overridable methods: + + -- get_positional(args, index) + -- get_named(kwds, name) + -- check_unused_args(used_args, args, kwargs) + -- format_field(value, conversion) 'get_positional' and 'get_named' are used to retrieve a given field - value. For compound field names, these functions are only called for + value. For compound field names, these functions are only called for the first component of the field name; Subsequent components are - handled through normal attribute and indexing operations. So for - example, the field expression '0.name' would cause 'get_positional' to - be called with the list of positional arguments and a numeric index of - 0, and then the standard 'getattr' function would be called to get the - 'name' attribute of the result. + handled through normal attribute and indexing operations. + + So for example, the field expression '0.name' would cause + 'get_positional' to be called with the parameter 'args' set to the + list of positional arguments to vformat, and 'index' set to zero; + the returned value would then be passed to the standard 'getattr' + function to get the 'name' attribute. If the index or keyword refers to an item that does not exist, then an IndexError/KeyError will be raised. + + 'check_unused_args' is used to implement checking for unused arguments + if desired. The arguments to this function is the set of all argument + keys that were actually referred to in the format string (integers for + positional arguments, and strings for named arguments), and a reference + to the args and kwargs that was passed to vformat. The intersection + of these two sets will be the set of unused args. 'check_unused_args' + is assumed to throw an exception if the check fails. 'format_field' actually generates the text for a replacement field. The 'value' argument corresponds to the value being formatted, which - was retrieved from the arguments using the field name. The + was retrieved from the arguments using the field name. The 'conversion' argument is the conversion spec part of the field, which will be either a string or unicode object, depending on the type of the original format string. - - Note: The final implementation of the Formatter class may define - additional overridable methods and hooks. In particular, it may be - that 'vformat' is itself a composition of several additional, - overridable methods. (Depending on whether it is convenient to the - implementor of Formatter.) + + To get a better understanding of how these functions relate to each + other, here is pseudocode that explains the general operation of + vformat: + + def vformat(format_string, args, kwargs): + + # Output buffer and set of used args + buffer = StringIO.StringIO() + used_args = set() + + # Tokens are either format fields or literal strings + for token in self.parse(format_string): + if is_format_field(token): + field_spec, conversion_spec = token.rsplit(":", 2) + + # 'first_part' is the part before the first '.' or '[' + first_part = get_first_part(token) + used_args.add(first_part) + if is_positional(first_part): + value = self.get_positional(args, first_part) + else: + value = self.get_named(kwargs, first_part) + + # Handle [subfield] or .subfield + for comp in components(token): + value = resolve_subfield(value, comp) + + # Write out the converted value + buffer.write(format_field(value, conversion)) + + else: + buffer.write(token) + + self.check_unused_args(used_args, args, kwargs) + return buffer.getvalue() + + Note that the actual algorithm of the Formatter class may not be the + one presented here. In particular, the final implementation of + the Formatter class may define additional overridable methods and + hooks. Also, the final implementation will be written in C. Customizing Formatters @@ -511,15 +532,15 @@ It would also be possible to create a 'smart' namespace formatter that could automatically access both locals and globals through - snooping of the calling stack. Due to the need for compatibility + snooping of the calling stack. Due to the need for compatibility the different versions of Python, such a capability will not be included in the standard library, however it is anticipated that someone will create and publish a recipe for doing this. Another type of customization is to change the way that built-in - types are formatted by overriding the 'format_field' method. (For + types are formatted by overriding the 'format_field' method. (For non-built-in types, you can simply define a __format__ special - method on that type.) So for example, you could override the + method on that type.) So for example, you could override the formatting of numbers to output scientific notation when needed. @@ -527,8 +548,7 @@ There are two classes of exceptions which can occur during formatting: exceptions generated by the formatter code itself, and exceptions - generated by user code (such as a field object's getattr function, or - the field_hook function). + generated by user code (such as a field object's 'getattr' function). In general, exceptions generated by the formatter code itself are of the "ValueError" variety -- there is an error in the actual "value" @@ -605,7 +625,7 @@ this PEP used backslash rather than doubling to escape a bracket. This worked because backslashes in Python string literals that don't conform to a standard backslash sequence such as '\n' - are left unmodified. However, this caused a certain amount + are left unmodified. However, this caused a certain amount of confusion, and led to potential situations of multiple recursive escapes, i.e. '\\\\{' to place a literal backslash in front of a bracket. @@ -615,6 +635,38 @@ what .Net uses. +Alternate Feature Proposals + + Restricting attribute access: An earlier version of the PEP + restricted the ability to access attributes beginning with a + leading underscore, for example "{0}._private". However, this + is a useful ability to have when debugging, so the feature + was dropped. + + Some developers suggested that the ability to do 'getattr' and + 'getitem' access should be dropped entirely. However, this + is in conflict with the needs of another set of developers who + strongly lobbied for the ability to pass in a large dict as a + single argument (without flattening it into individual keyword + arguments using the **kwargs syntax) and then have the format + string refer to dict entries individually. + + There has also been suggestions to expand the set of expressions + that are allowed in a format string. However, this was seen + to go against the spirit of TOOWTDI, since the same effect can + be achieved in most cases by executing the same expression on + the parameter before it's passed in to the formatting function. + For cases where the format string is being use to do arbitrary + formatting in a data-rich environment, it's recommended to use + a templating engine specialized for this purpose, such as + Genshi [5] or Cheetah [6]. + + Many other features were considered and rejected because they + could easily be achieved by subclassing Formatter instead of + building the feature into the base implementation. This includes + alternate syntax, comments in format strings, and many others. + + Security Considerations Historically, string formatting has been a common source of @@ -622,43 +674,21 @@ string templating system allows arbitrary expressions to be embedded in format strings. - The typical scenario is one where the string data being processed - is coming from outside the application, perhaps from HTTP headers - or fields within a web form. An attacker could substitute their - own strings designed to cause havok. - - The string formatting system outlined in this PEP is by no means - 'secure', in the sense that no Python library module can, on its - own, guarantee security, especially given the open nature of - the Python language. Building a secure application requires a - secure approach to design. - - What this PEP does attempt to do is make the job of designing a - secure application easier, by making it easier for a programmer - to reason about the possible consequences of a string formatting - operation. It does this by limiting those consequences to a smaller - and more easier understood subset. - - For example, because it is possible in Python to override the - 'getattr' operation of a type, the interpretation of a compound - replacement field such as "0.name" could potentially run - arbitrary code. - - However, it is *extremely* rare for the mere retrieval of an - attribute to have side effects. Other operations which are more - likely to have side effects - such as method calls - are disallowed. - Thus, a programmer can be reasonably assured that no string - formatting operation will cause a state change in the program. - This assurance is not only useful in securing an application, but - in debugging it as well. - - Similarly, the restriction on field names beginning with - underscores is intended to provide similar assurances about the - visibility of private data. - - Of course, programmers would be well-advised to avoid using - any external data as format strings, and instead use that data - as the format arguments instead. + The best way to use string formatting in a way that does not + create potential security holes is to never use format strings + that come from an untrusted source. + + Barring that, the next best approach is to insure that string + formatting has no side effects. Because of the open nature of + Python, it is impossible to guarantee that any non-trivial + operation has this property. What this PEP does is limit the + types of expressions in format strings to those in which visible + side effects are both rare and strongly discouraged by the + culture of Python developers. So for example, attribute access + is allowed because it would be considered pathological to write + code where the mere access of an attribute has visible side + effects (whether the code has *invisible* side effects - such + as creating a cache entry for faster lookup - is irrelevant.) Sample Implementation @@ -692,6 +722,12 @@ [4] Composite Formatting - [.Net Framework Developer's Guide] http://msdn.microsoft.com/library/en-us/cpguide/html/cpconcompositeformatting.asp?frame=true + + [5] Genshi templating engine. + http://genshi.edgewall.org/ + + [5] Cheetah - The Python-Powered Template Engine. + http://www.cheetahtemplate.org/ Copyright From python-checkins at python.org Wed Jul 25 14:57:48 2007 From: python-checkins at python.org (nick.coghlan) Date: Wed, 25 Jul 2007 14:57:48 +0200 (CEST) Subject: [Python-checkins] r56538 - python/trunk/Lib/test/test_urllib2_localnet.py Message-ID: <20070725125748.C9A2B1E4002@bag.python.org> Author: nick.coghlan Date: Wed Jul 25 14:57:48 2007 New Revision: 56538 Modified: python/trunk/Lib/test/test_urllib2_localnet.py Log: More buildbot cleanup - let the OS assign the port for test_urllib2_localnet Modified: python/trunk/Lib/test/test_urllib2_localnet.py ============================================================================== --- python/trunk/Lib/test/test_urllib2_localnet.py (original) +++ python/trunk/Lib/test/test_urllib2_localnet.py Wed Jul 25 14:57:48 2007 @@ -40,14 +40,16 @@ class LoopbackHttpServerThread(threading.Thread): """Stoppable thread that runs a loopback http server.""" - def __init__(self, port, RequestHandlerClass): + def __init__(self, request_handler): threading.Thread.__init__(self) - self._RequestHandlerClass = RequestHandlerClass self._stop = False - self._port = port - self._server_address = ('127.0.0.1', self._port) self.ready = threading.Event() - self.error = None + request_handler.protocol_version = "HTTP/1.0" + self.httpd = LoopbackHttpServer(('127.0.0.1', 0), + request_handler) + #print "Serving HTTP on %s port %s" % (self.httpd.server_name, + # self.httpd.server_port) + self.port = self.httpd.server_port def stop(self): """Stops the webserver if it's currently running.""" @@ -58,24 +60,9 @@ self.join() def run(self): - protocol = "HTTP/1.0" - - try: - self._RequestHandlerClass.protocol_version = protocol - httpd = LoopbackHttpServer(self._server_address, - self._RequestHandlerClass) - - sa = httpd.socket.getsockname() - #print "Serving HTTP on", sa[0], "port", sa[1], "..." - except: - # Fail "gracefully" if we are unable to start. - self.ready.set() - self.error = sys.exc_info()[1] - raise - self.ready.set() while not self._stop: - httpd.handle_request() + self.httpd.handle_request() # Authentication infrastructure @@ -232,26 +219,21 @@ class ProxyAuthTests(unittest.TestCase): URL = "http://www.foo.com" - PORT = 8080 USER = "tester" PASSWD = "test123" REALM = "TestRealm" - PROXY_URL = "http://127.0.0.1:%d" % PORT - def setUp(self): FakeProxyHandler.digest_auth_handler.set_users({ self.USER : self.PASSWD }) FakeProxyHandler.digest_auth_handler.set_realm(self.REALM) - self.server = LoopbackHttpServerThread(self.PORT, FakeProxyHandler) + self.server = LoopbackHttpServerThread(FakeProxyHandler) self.server.start() self.server.ready.wait() - if self.server.error: - raise self.server.error - - handler = urllib2.ProxyHandler({"http" : self.PROXY_URL}) + proxy_url = "http://127.0.0.1:%d" % self.server.port + handler = urllib2.ProxyHandler({"http" : proxy_url}) self._digest_auth_handler = urllib2.ProxyDigestAuthHandler() self.opener = urllib2.build_opener(handler, self._digest_auth_handler) From python-checkins at python.org Wed Jul 25 15:18:59 2007 From: python-checkins at python.org (nick.coghlan) Date: Wed, 25 Jul 2007 15:18:59 +0200 (CEST) Subject: [Python-checkins] r56539 - python/trunk/Lib/test/test_pow.py Message-ID: <20070725131859.5B1141E400B@bag.python.org> Author: nick.coghlan Date: Wed Jul 25 15:18:58 2007 New Revision: 56539 Modified: python/trunk/Lib/test/test_pow.py Log: Add a temporary diagnostic message before a strange failure on the alpha Debian buildbot Modified: python/trunk/Lib/test/test_pow.py ============================================================================== --- python/trunk/Lib/test/test_pow.py (original) +++ python/trunk/Lib/test/test_pow.py Wed Jul 25 15:18:58 2007 @@ -106,6 +106,12 @@ # platform pow() was buggy, and Python didn't worm around it. eq = self.assertEquals a = -1.0 + # XXX Temporary diagnostic for failure on alpha Debian buildbot + from sys import __stdout__ + from math import floor + print >> __stdout__, "*** Number: %r" % 1.23e167 + print >> __stdout__, "*** Floor: %r" % floor(1.23e167) + # XXX End diagnostic message eq(pow(a, 1.23e167), 1.0) eq(pow(a, -1.23e167), 1.0) for b in range(-10, 11): From buildbot at python.org Wed Jul 25 15:32:35 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 25 Jul 2007 13:32:35 +0000 Subject: [Python-checkins] buildbot warnings in x86 gentoo trunk Message-ID: <20070725133235.470D51E4002@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2326 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: nick.coghlan Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 281, in readerThread rec = dbutils.DeadlockWrap(c.next, max_retries=10) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/dbutils.py", line 62, in DeadlockWrap return function(*_args, **_kwargs) DBLockDeadlockError: (-30996, 'DB_LOCK_DEADLOCK: Locker killed to resolve a deadlock') Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 245, in writerThread self.assertEqual(data, self.makeData(key)) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/unittest.py", line 343, in failUnlessEqual (msg or '%r != %r' % (first, second)) AssertionError: None != '0306-0306-0306-0306-0306' Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 260, in writerThread self.assertEqual(data, self.makeData(key)) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/unittest.py", line 343, in failUnlessEqual (msg or '%r != %r' % (first, second)) AssertionError: None != '1006-1006-1006-1006-1006' Traceback (most recent call last): File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/bsddb/test/test_thread.py", line 260, in writerThread self.assertEqual(data, self.makeData(key)) File "/home/buildslave/python-trunk/trunk.norwitz-x86/build/Lib/unittest.py", line 343, in failUnlessEqual (msg or '%r != %r' % (first, second)) AssertionError: None != '2003-2003-2003-2003-2003' 1 test failed: test_smtplib make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Wed Jul 25 15:41:04 2007 From: buildbot at python.org (buildbot at python.org) Date: Wed, 25 Jul 2007 13:41:04 +0000 Subject: [Python-checkins] buildbot warnings in ppc Debian unstable trunk Message-ID: <20070725134104.B6A201E4002@bag.python.org> The Buildbot has detected a new failure of ppc Debian unstable trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ppc%2520Debian%2520unstable%2520trunk/builds/70 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: nick.coghlan Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_socket ====================================================================== ERROR: testGetServBy (test.test_socket.GeneralModuleTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/trunk.klose-debian-ppc/build/Lib/test/test_socket.py", line 346, in testGetServBy raise socket.error error make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Wed Jul 25 17:16:10 2007 From: python-checkins at python.org (erik.forsberg) Date: Wed, 25 Jul 2007 17:16:10 +0200 (CEST) Subject: [Python-checkins] r56540 - tracker/instances/python-dev-spambayes-integration Message-ID: <20070725151610.B7A591E400D@bag.python.org> Author: erik.forsberg Date: Wed Jul 25 17:16:10 2007 New Revision: 56540 Added: tracker/instances/python-dev-spambayes-integration/ - copied from r56539, tracker/instances/python-dev/ Log: Creating branch for spambayes integration work. From python-checkins at python.org Wed Jul 25 17:40:24 2007 From: python-checkins at python.org (erik.forsberg) Date: Wed, 25 Jul 2007 17:40:24 +0200 (CEST) Subject: [Python-checkins] r56541 - in tracker/instances/python-dev-spambayes-integration: extensions/spambayes.py html/file.item.html html/msg.item.html schema.py Message-ID: <20070725154024.B98F31E4011@bag.python.org> Author: erik.forsberg Date: Wed Jul 25 17:40:23 2007 New Revision: 56541 Added: tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Modified: tracker/instances/python-dev-spambayes-integration/html/file.item.html tracker/instances/python-dev-spambayes-integration/html/msg.item.html tracker/instances/python-dev-spambayes-integration/schema.py Log: The beginnings of advanced spambayes integration; - Schema modified to include two new attributes on the file and msg class: * spambayes_score * spambayes_misclassified - New action added by extensions/spambayes.py for training msg/file as spam or ham. - item pages for file and msg classes modified to allow training by clicking on button. Added: tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py ============================================================================== --- (empty file) +++ tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Wed Jul 25 17:40:23 2007 @@ -0,0 +1,68 @@ +from roundup.cgi.actions import Action +from roundup.cgi.exceptions import * + +import xmlrpclib, socket + +def extract_classinfo(db, classname, nodeid): + node = db.getnode(classname, nodeid) + + authorage = node['creation'].timestamp() - \ + db.getnode('user', node['author'])['creation'].timestamp() + + tokens = ["klass:%s" % classname, + "author:%s" % node['author'], + "authorage:%d" % int(authorage)] + + klass = db.getclass(classname) + return (klass.get(nodeid, 'content'), tokens) + +def train_spambayes(db, content, tokens, is_spam): + spambayes_uri = db.config.detectors['SPAMBAYES_URI'] + spam_cutoff = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + + server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) + try: + server.train({'content':content}, tokens, {}, is_spam) + return (True, None) + except (socket.error, xmlrpclib.Error), e: + return (False, str(e)) + + +class SpambayesClassify(Action): + def handle(self): + (content, tokens) = extract_classinfo(self.db, + self.classname, self.nodeid) + + if self.form.has_key("trainspam"): + is_spam = True + elif self.form.has_key("trainham"): + is_spam = False + + (status, errmsg) = train_spambayes(self.db, content, tokens, + is_spam) + + node = self.db.getnode(self.classname, self.nodeid) + props = {} + + if status: + if node.get('spambayes_misclassified', False): + props['spambayes_misclassified':True] + + props['spambayes_score'] = 1.0 + + s = " SPAM" + if not is_spam: + props['spambayes_score'] = 0.0 + s = " HAM" + self.client.ok_message.append(self._('Message classified as') + s) + else: + self.client.error_message.append(self._('Unable to classify message, got error:') + errmsg) + + klass = self.db.getclass(self.classname) + klass.set(self.nodeid, **props) + self.db.commit() + + +def init(instance): + instance.registerAction("spambayes_classify", SpambayesClassify) + Modified: tracker/instances/python-dev-spambayes-integration/html/file.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/file.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/file.item.html Wed Jul 25 17:40:23 2007 @@ -29,6 +29,15 @@ Please note that for security reasons, it's not permitted to set content type to text/html. + + SpamBayes Score + + + + + Marked as misclassified + + @@ -48,6 +57,15 @@ tal:attributes="href string:file${context/id}/${context/name}" i18n:translate="">download +
    + + + + +
    + Modified: tracker/instances/python-dev-spambayes-integration/html/msg.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/msg.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/msg.item.html Wed Jul 25 17:40:23 2007 @@ -47,10 +47,34 @@ Date + + + SpamBayes Score + + + + + Marked as misclassified + + + - + + + + + Modified: tracker/instances/python-dev-spambayes-integration/schema.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/schema.py (original) +++ tracker/instances/python-dev-spambayes-integration/schema.py Wed Jul 25 17:40:23 2007 @@ -97,10 +97,14 @@ summary=String(), files=Multilink("file"), messageid=String(), - inreplyto=String()) + inreplyto=String(), + spambayes_score=Number(), + spambayes_misclassified=Boolean(),) file = FileClass(db, "file", - name=String()) + name=String(), + spambayes_score=Number(), + spambayes_misclassified=Boolean(),) # IssueClass automatically gets these properties in addition to the Class ones: # title = String() From python-checkins at python.org Wed Jul 25 18:24:08 2007 From: python-checkins at python.org (martin.v.loewis) Date: Wed, 25 Jul 2007 18:24:08 +0200 (CEST) Subject: [Python-checkins] r56542 - in python/branches/release25-maint: Lib/distutils/command/register.py Lib/distutils/command/upload.py Misc/NEWS Message-ID: <20070725162408.8311F1E4002@bag.python.org> Author: martin.v.loewis Date: Wed Jul 25 18:24:08 2007 New Revision: 56542 Modified: python/branches/release25-maint/Lib/distutils/command/register.py python/branches/release25-maint/Lib/distutils/command/upload.py python/branches/release25-maint/Misc/NEWS Log: Change location of the package index to pypi.python.org/pypi. Modified: python/branches/release25-maint/Lib/distutils/command/register.py ============================================================================== --- python/branches/release25-maint/Lib/distutils/command/register.py (original) +++ python/branches/release25-maint/Lib/distutils/command/register.py Wed Jul 25 18:24:08 2007 @@ -17,7 +17,7 @@ description = ("register the distribution with the Python package index") - DEFAULT_REPOSITORY = 'http://www.python.org/pypi' + DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' user_options = [ ('repository=', 'r', Modified: python/branches/release25-maint/Lib/distutils/command/upload.py ============================================================================== --- python/branches/release25-maint/Lib/distutils/command/upload.py (original) +++ python/branches/release25-maint/Lib/distutils/command/upload.py Wed Jul 25 18:24:08 2007 @@ -20,7 +20,7 @@ description = "upload binary package to PyPI" - DEFAULT_REPOSITORY = 'http://www.python.org/pypi' + DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' user_options = [ ('repository=', 'r', Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Wed Jul 25 18:24:08 2007 @@ -26,6 +26,8 @@ Library ------- +- Change location of the package index to pypi.python.org/pypi + - Bug #1701409: Fix a segfault in printing ctypes.c_char_p and ctypes.c_wchar_p when they point to an invalid location. As a sideeffect the representation of these instances has changed. From python-checkins at python.org Wed Jul 25 18:24:24 2007 From: python-checkins at python.org (martin.v.loewis) Date: Wed, 25 Jul 2007 18:24:24 +0200 (CEST) Subject: [Python-checkins] r56543 - in python/trunk: Lib/distutils/command/register.py Lib/distutils/command/upload.py Misc/NEWS Message-ID: <20070725162424.5811E1E4017@bag.python.org> Author: martin.v.loewis Date: Wed Jul 25 18:24:23 2007 New Revision: 56543 Modified: python/trunk/Lib/distutils/command/register.py python/trunk/Lib/distutils/command/upload.py python/trunk/Misc/NEWS Log: Change location of the package index to pypi.python.org/pypi Modified: python/trunk/Lib/distutils/command/register.py ============================================================================== --- python/trunk/Lib/distutils/command/register.py (original) +++ python/trunk/Lib/distutils/command/register.py Wed Jul 25 18:24:23 2007 @@ -17,7 +17,7 @@ description = ("register the distribution with the Python package index") - DEFAULT_REPOSITORY = 'http://www.python.org/pypi' + DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' user_options = [ ('repository=', 'r', Modified: python/trunk/Lib/distutils/command/upload.py ============================================================================== --- python/trunk/Lib/distutils/command/upload.py (original) +++ python/trunk/Lib/distutils/command/upload.py Wed Jul 25 18:24:23 2007 @@ -20,7 +20,7 @@ description = "upload binary package to PyPI" - DEFAULT_REPOSITORY = 'http://www.python.org/pypi' + DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi' user_options = [ ('repository=', 'r', Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Wed Jul 25 18:24:23 2007 @@ -238,6 +238,8 @@ Library ------- +- Change location of the package index to pypi.python.org/pypi + - Bug #1701409: Fix a segfault in printing ctypes.c_char_p and ctypes.c_wchar_p when they point to an invalid location. As a sideeffect the representation of these instances has changed. From python-checkins at python.org Wed Jul 25 18:30:41 2007 From: python-checkins at python.org (erik.forsberg) Date: Wed, 25 Jul 2007 18:30:41 +0200 (CEST) Subject: [Python-checkins] r56544 - tracker/instances/python-dev-spambayes-integration/detectors/userauditor.py Message-ID: <20070725163041.6E6FD1E4012@bag.python.org> Author: erik.forsberg Date: Wed Jul 25 18:30:40 2007 New Revision: 56544 Modified: tracker/instances/python-dev-spambayes-integration/detectors/userauditor.py Log: Fix the userauditor role check. Modified: tracker/instances/python-dev-spambayes-integration/detectors/userauditor.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/detectors/userauditor.py (original) +++ tracker/instances/python-dev-spambayes-integration/detectors/userauditor.py Wed Jul 25 18:30:40 2007 @@ -36,7 +36,7 @@ raise ValueError, 'Role "%s" does not exist'%rolename if None != nodeid and "admin" in roles: - if not "admin" in [x.lower().strip() for x in cl.get(nodeid, 'roles')]: + if not "admin" in [x.lower().strip() for x in cl.get(nodeid, 'roles').split(",")]: raise ValueError, "Only Admins may assign the Admin role!" From python-checkins at python.org Wed Jul 25 18:31:09 2007 From: python-checkins at python.org (erik.forsberg) Date: Wed, 25 Jul 2007 18:31:09 +0200 (CEST) Subject: [Python-checkins] r56545 - tracker/instances/python-dev-spambayes-integration/detectors/no_texthtml.py Message-ID: <20070725163109.B43E71E4014@bag.python.org> Author: erik.forsberg Date: Wed Jul 25 18:31:09 2007 New Revision: 56545 Modified: tracker/instances/python-dev-spambayes-integration/detectors/no_texthtml.py Log: If there's no type in newvalues, don't try to use newvalues['type']... :-) Modified: tracker/instances/python-dev-spambayes-integration/detectors/no_texthtml.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/detectors/no_texthtml.py (original) +++ tracker/instances/python-dev-spambayes-integration/detectors/no_texthtml.py Wed Jul 25 18:31:09 2007 @@ -1,6 +1,6 @@ def audit_html_files(db, cl, nodeid, newvalues): - if newvalues['type'] == 'text/html': + if newvalues.has_key('type') and newvalues['type'] == 'text/html': newvalues['type'] = 'text/plain' From python-checkins at python.org Thu Jul 26 08:59:31 2007 From: python-checkins at python.org (neal.norwitz) Date: Thu, 26 Jul 2007 08:59:31 +0200 (CEST) Subject: [Python-checkins] r56550 - sandbox/trunk/py_type_refactor sandbox/trunk/py_type_refactor/gen_enums.py sandbox/trunk/py_type_refactor/refactor.py Message-ID: <20070726065931.66A991E400A@bag.python.org> Author: neal.norwitz Date: Thu Jul 26 08:59:30 2007 New Revision: 56550 Added: sandbox/trunk/py_type_refactor/ sandbox/trunk/py_type_refactor/gen_enums.py (contents, props changed) sandbox/trunk/py_type_refactor/refactor.py (contents, props changed) Log: Add a tool that can refactor C source files. It can convert PyTypeObject and its associated structures that in the past have been stored as static objects into function calls. This conversion will allow easier upgrades by eliminating the worst parts of binary compatability. It has been tested on all the .c files in the Objects subdirectory. The updated files were visually inspected. The new API has not been defined, so this just outputs something that is hopefully close enough until we can figure out the real API. Added: sandbox/trunk/py_type_refactor/gen_enums.py ============================================================================== --- (empty file) +++ sandbox/trunk/py_type_refactor/gen_enums.py Thu Jul 26 08:59:30 2007 @@ -0,0 +1,25 @@ +#!/usr/bin/env python + +"""Print to stdout the enums based on the values used in the fixer.""" + +import sys + +import fixer + + +def main(unused_argv): + indent = fixer.INITIAL_INDENT + for cls in fixer.StructParser.__subclasses__(): + print 'typedef enum {' + METHODS = set(range(len(cls.FIELD_KIND))) + if hasattr(cls, '_METHOD_INDICES'): + METHODS = set(cls._METHOD_INDICES) + values = ['%s%s%s' % (indent, cls.FIELD_KIND_PREFIX, name) + for i, name in enumerate(cls.FIELD_KIND) if i in METHODS] + print ',\n'.join(values) + print '} %sKind;' % cls.FIELD_KIND_PREFIX[:-1] + print + + +if __name__ == '__main__': + main(sys.argv) Added: sandbox/trunk/py_type_refactor/refactor.py ============================================================================== --- (empty file) +++ sandbox/trunk/py_type_refactor/refactor.py Thu Jul 26 08:59:30 2007 @@ -0,0 +1,713 @@ +#!/usr/bin/env python + +"""A tool for converting PyTypeObject structures used in Python 2.5 and earlier +to function calls used in 2.6 and above. + +usage: refactor.py src1.c [src2.c] ... +""" + +# TODO: +# * File re-writing outputs to new dir, rather than overwriting existing file. +# * Verify updated C files don't contain syntax errors. +# * Define real APIs and use them. + + +import os +import sys + + +# The amount that code within a function should be indented. +INITIAL_INDENT = ' ' + +# Index at which code should be wrapped to the next line if possible. +WRAP_INDEX = 76 + + +# TODO: use a command line option. +_WRITE_FILE = False +_DEBUG = False + +# These are the type structs we handle. +_INTERESTING_TYPES = ['PyMethodDef', 'PyTypeObject', + 'PyMemberDef', 'PyGetSetDef', + 'PyNumberMethods', 'PySequenceMethods', + 'PyMappingMethods', 'PyBufferProcs', + ] + +def LooksLikeDeclaration(line): + """Returns a bool whether the line looks like a declaration we might + be interested in. False when looks like a parameter or variable decl. + """ + line = line.rstrip() + # Include \\ because it looks like a macro and we aren't gonna handle it. + for c in '),;\\': + if c in line: + return True + return False + + +def GetCsvTokens(data): + """Returns tokens from data that are broken at commas. + This can be used to break up data that is stored in static struct. + + Yields: + strings that correspond to the tokens from data as separated by commas + """ + Clean = lambda s: s.strip().replace('""', '') + start = index = 0 + while 1: + comma_index = data.find(',', index) + paren_index = data.find('(', index) + quote_index = data.find('"', index) + # Determine which token we found first. + if quote_index >= 0 and quote_index < comma_index: + # It was a quote, eat everything up to the next quote. + # XXX: assume that there isn't a \" in there. + index = data.find('"', quote_index + 1) + if index >= 0: + index += 1 + elif comma_index >= 0: + # Did we get a comma or paren first? + if paren_index >= 0 and paren_index < comma_index: + # Paren, find the close paren, ie ). + index = data.find(')', paren_index + 1) + if index >= 0: + index += 1 + else: + # Got a comma, clean up this data and move on to next token. + yield Clean(data[start:comma_index]) + start = index = (comma_index + 1) + else: + # No quote, no comma, must be done. Give 'em what we got. + yield Clean(data[start:]) + break + + +class GenericDef(object): + """Base class for all definitions. Provides common utilities and API + for aggregating info about the definition and outputing code. + """ + + # Value which indicates the line contained a sentinel. + SENTINEL = None + + def __init__(self, name, is_static, first_line): + self.name = name + self.is_static = is_static + self.first_line = first_line + self.last_line = -1 + self.lines = [] + + def AddLine(self, line, reader): + if line: + result = self.ParseLine(line, reader) + self.lines.append(result) + + def ParseLine(self, unused_line, unused_reader): + return self.SENTINEL + + def VerifyValid(self): + """Returns a bool whether everything seems consistent or not.""" + # Verify there is a sentinel value, then remove it. + if not self.lines: + return False + if self.lines[-1] != self.SENTINEL: + return False + del self.lines[-1] + return True + + def GetStatic(self): + if self.is_static: + return 'static ' + return '' + + def GetFunctionDeclaration(self): + return ('%svoid Init_%s(PyTypeObject *type)\n' % + (self.GetStatic(), self.name)) + + def NewCode(self): + """Returns a sequence of new lines of code to replace the old code.""" + print "Can't produce new code", self.__class__.__name__ + return [] + + def AppendParameter(self, line, parameter, indent): + """Append a parameter to a line reflowing if necessary. + The line should *not* end with a comma. + + Args: + line: string of text + parameter: string + indent: int containing how much to indent next line if necessary + + Returns: + new line of text with the proper prefix + """ + nl_index = line.rfind('\n') + if nl_index < 0: + nl_index = 0 + + last_line_len = len(line[nl_index:]) + prefix = ', ' + if (last_line_len + len(parameter)) > WRAP_INDEX: + prefix = ',\n' + (' ' * indent) + return prefix + parameter + + +class ArrayParser(GenericDef): + + # Subclasses should set these values to something appropriate. + + # These are used in ParseLine. + NUM_FIELDS = 0 + SENTINEL = (None,) * NUM_FIELDS + + # PREFIX is used in NewCode. + PREFIX = 'PyType_AddXXXArray(' + + def _GetOneDefinition(self, line, reader): + """Returns the full declaration line even if it originally spanned + multiple lines. If the sentinel value is found, return None. + + Raises: + NotImplementedError is raised if there is a problem parsing the line. + """ + index = line.find('{') + if index < 0: + name = self.__class__.__name__ + args = (name, reader.filename, reader.line_number, line) + raise NotImplementedError( + 'Unable to find start of %s in %s:%d\n%s' % args) + + # If we got a sentinel, just return what we got (ie, nothin). + line = line[index+1:].strip() + if line.startswith('0') or line.startswith('NULL'): + return None + + if '}' not in line: + for count, next_line in reader.GetDefinition(): + if count > 1: + msg = ('Unable to find end of %s after two lines: %s' % + (self.__class__.__name__, line)) + raise NotImplementedError(msg) + count += 1 + line += next_line + return line + + def ParseLine(self, line, reader): + """Returns the values separated by commas in the line of a struct + based on the number of possible values (NUM_FIELDS). If any + values are missing, returns None for those values. + + Returns: + sequence of length NUM_FIELDS + if a sentinel value was found, returns SENTINEL + if a preprocessor directive was found, returns the SENTINEL + with the last value changed the entire text of the line + """ + if line[0] == '#': + return self.SENTINEL[:-1] + (line,) + + line = self._GetOneDefinition(line, reader) + if line is None: + return self.SENTINEL + + # Tokenize line and pull out the parts of the struct. + parts = GetCsvTokens(line) + pieces = filter(None, [s.strip().rstrip(',}') for s in parts]) + if len(pieces) < self.NUM_FIELDS: + # Ensure we have the correct # of pieces, padded with None. + pieces.extend([None] * (self.NUM_FIELDS - len(pieces))) + assert len(pieces) == self.NUM_FIELDS, 'busted line: %r' % line + return pieces + + def GetArgs(self, data): + return [arg or 'NULL' for arg in data] + + def NewCode(self): + prefix = INITIAL_INDENT + self.PREFIX + INDENT = len(prefix) + yield self.GetFunctionDeclaration() + yield '{\n' + for data in self.lines: + name = data[0] + if name is None: + yield data[-1] + '\n' + else: + line = prefix + ('type, %s' % name) + for arg in self.GetArgs(data[1:]): + line += self.AppendParameter(line, arg, INDENT) + line += ');\n' + yield line + yield '}\n' + + +class GetSetDef(ArrayParser): + """Convert PyGetSetDef instances to method calls.""" + + PREFIX = 'PyType_AddGetSet(' + NUM_FIELDS = 5 + SENTINEL = (None,) * NUM_FIELDS + + # Stores lines as 5-tuple that conforms to a PyGetSetDef. + # (name, get, set, doc, closure) + # If there is a preprocessor statement, name will contain None + # and doc will contain the full line. + + +class MemberDef(ArrayParser): + """Convert PyMemberDef instances to method calls.""" + + PREFIX = 'PyType_AddMember(' + NUM_FIELDS = 5 + SENTINEL = (None,) * NUM_FIELDS + + # Stores lines as 5-tuple that conforms to a PyMemberDef. + # (name, type, offset, flags, doc) + # If there is a preprocessor statement, name will contain None + # and doc will contain the full line. + + def GetArgs(self, data): + args = [arg or 'NULL' for arg in data] + if args[2] == 'NULL': + args[2] = '0' + return args + + +class MethodDef(ArrayParser): + """Convert PyMethodDef instances to method calls.""" + + PREFIX = 'PyType_AddMethod(' + NUM_FIELDS = 4 + SENTINEL = (None,) * NUM_FIELDS + + # Stores lines as 4-tuple that conforms to a PyMethodDef. + # (ml_name, ml_meth, ml_flags, ml_doc) + # If there is a preprocessor statement, name will contain None + # and ml_doc will contain the full line. + + def GetArgs(self, data): + args = [arg or 'NULL' for arg in data] + if args[1] == 'NULL': + args[1] = 'METH_OLDARGS' + return args + + +class StructParser(GenericDef): + # Subclasses should set these values to something appropriate. + + # These are used in NewCode. + NUM_FIELDS = 0 + PREFIX = 'PyType_AddXXXMethod(' + # Sequence of enum names to be used as parameter to function called. + # Note: len(FIELD_KIND) == NUM_FIELDS + FIELD_KIND = [] + # Prefix to apply to each FIELD_KIND. + FIELD_KIND_PREFIX = '' + + def ParseLine(self, line, reader): + while ',' not in line and line.find('TPFLAGS') >= 0: + line += reader.GetField() + return line.rstrip(',') + + def HandleSlotMethods(self, prefix, lines, indices): + kinds = self.FIELD_KIND + kind_prefix = self.FIELD_KIND_PREFIX + for i in indices: + data = lines[i] + if data and data != '0': + args = data + if kinds: + args = '%s%s, %s' % (kind_prefix, kinds[i], data) + yield prefix + ('type, %s);\n' % args) + + def NewCode(self): + assert len(self.FIELD_KIND) == self.NUM_FIELDS, \ + ('%s: %d != %d' % + (self.__class__.__name__, len(self.FIELD_KIND), self.NUM_FIELDS)) + + prefix = INITIAL_INDENT + self.PREFIX + + yield self.GetFunctionDeclaration() + yield '{\n' + for line in self.HandleSlotMethods(prefix, self.lines, + range(len(self.lines))): + yield line + yield '}\n' + + +class NumberMethods(StructParser): + """Convert PyNumberMethods instances to method calls.""" + + PREFIX = 'PyType_AddNumberMethod(' + NUM_FIELDS = 39 + FIELD_KIND_PREFIX = 'PyNumberMethod_' + FIELD_KIND = ['Add', 'Subtract', 'Multiply', 'Divide', 'Remainder', + 'DivMod', 'Power', 'Negative', 'Positive', 'Absolute', + 'NonZero', 'Invert', 'LShift', 'RShift', + 'And', 'Xor', 'Or', 'Coerce', + 'Int', 'Long', 'Float', 'Oct', 'Hex', + 'InplaceAdd', 'InplaceSubtract', 'InplaceMultiply', + 'InplaceDivide', 'InplaceRemainder', + 'InplacePower', 'InplaceLShift', 'InplaceRShift', + 'InplaceAnd', 'InplaceXor', 'InplaceOr', + 'FloorDivide', 'TrueDivide', + 'InplaceFloorDivide', 'InplaceTrueDivide', + 'Index' + ] + + +class SequenceMethods(StructParser): + """Convert PySequenceMethods instances to method calls.""" + + PREFIX = 'PyType_AddSequenceMethod(' + NUM_FIELDS = 10 + FIELD_KIND_PREFIX = 'PySequenceMethod_' + FIELD_KIND = ['Length', 'Concat', 'Repeat', 'Item', 'Slice', + 'AssignItem', 'AssignSlice', 'Contains', + 'InplaceConcat', 'InplaceRepeat'] + + +class MappingMethods(StructParser): + """Convert PyMappingMethods instances to method calls.""" + + PREFIX = 'PyType_AddMappingMethod(' + NUM_FIELDS = 3 + FIELD_KIND_PREFIX = 'PyMappingMethod_' + FIELD_KIND = ['Length', 'Subscript', 'SetItem'] + + +class BufferProcs(StructParser): + """Convert PyBufferProcs instances to method calls.""" + + PREFIX = 'PyType_AddBufferProc(' + NUM_FIELDS = 4 + FIELD_KIND_PREFIX = 'PyBufferMethod_' + FIELD_KIND = ['Read', 'Write', 'SegmentCount', 'CharBuffer'] + + +class TypeDef(StructParser): + """Convert PyTypeObject instances to method calls.""" + + PREFIX = 'PyType_AddMethod(' + NUM_FIELDS = 45 + FIELD_KIND_PREFIX = 'PyTypeMethod_' + FIELD_KIND = ['Name', 'BasicSize', 'ItemSize', + 'Dealloc', 'Print', 'Getattr', 'Setattr', 'Compare', 'Repr', + 'AsNumber', 'AsSequence', 'AsMapping', + 'Hash', 'Call', 'Str', 'Getattro', 'Setattro', + 'AsBuffer', 'Flags', 'Doc', 'Traverse', 'Clear', + 'RichCompare', 'WeaklistOffset', 'Iter', 'IterNext', + 'Methods', 'Members', 'GetSet', 'Base', 'Dict', + 'DescrGet', 'DescrSet', 'DictOffset', + 'Init', 'Alloc', 'New', 'Free', 'IsGc', + 'Bases', 'MRO', 'Cache', 'Subclasses', 'Weaklist', 'Del' + ] + _METHOD_INDICES = [3, 4, 5, 6, 7, 8, 12, 13, 14, 15, 16, 20, 21, 22, + 24, 25, 31, 32, 34, 35, 36, 37, 38, 44] + + def NewCode(self): + assert len(self.FIELD_KIND) == self.NUM_FIELDS, \ + ('%s: %d != %d' % + (self.__class__.__name__, len(self.FIELD_KIND), self.NUM_FIELDS)) + + # Find the first quoted line, that's the name which starts the type. + while self.lines and self.lines[0][0] != '"': + # Who cares if this is slow. There aren't that many elements. + self.lines.pop(0) + + # Ensure we have the proper number of values. + lines = self.lines[:] + lines.extend([None] * (self.NUM_FIELDS - len(lines))) + prefix = INITIAL_INDENT + self.PREFIX + + yield '%sPyTypeObject %s;\n' % (self.GetStatic(), self.name) + yield '\n' + yield self.GetFunctionDeclaration() + yield '{\n' + # TODO: Handle: ItemSize: 2?, WeaklistOffset: 23? + name = lines[0] + basic_size = lines[1] + flags = lines[18] + doc = lines[19] + base = lines[29] + line = INITIAL_INDENT + '_PyType_Init(type' + indent = len(line) - len(INITIAL_INDENT) + for arg in (name, basic_size, flags, base, doc): + if not arg: + arg = '0' + line += self.AppendParameter(line, arg, indent) + line += ');\n' + yield line + + def FormatInitCall(name): + if name[0] == '&': + name = name[1:] + return '%sInit_%s(type);\n' % (INITIAL_INDENT, name) + + # Handle all the methods, members, getsets. + # These are special because we need to call the other initializers. + as_number = lines[9] + if as_number and as_number != '0': + yield FormatInitCall(as_number) + as_sequence = lines[10] + if as_sequence and as_sequence != '0': + yield FormatInitCall(as_sequence) + as_mapping = lines[11] + if as_mapping and as_mapping != '0': + yield FormatInitCall(as_mapping) + as_buffer = lines[17] + if as_buffer and as_buffer != '0': + yield FormatInitCall(as_buffer) + + methods = lines[26] + if methods and methods != '0': + yield FormatInitCall(methods) + members = lines[27] + if members and members != '0': + yield FormatInitCall(members) + getsets = lines[28] + if getsets and getsets != '0': + yield FormatInitCall(getsets) + + # TODO: ignore these values for now. Are they are set at runtime? + # Dict, 30, Bases: 39, MRO: 40, Cache: 41, Subclasses:42, Weaklist: 43 + + # Handle all the __special__ methods (ie, slots). + methods = self.HandleSlotMethods(prefix, lines, self._METHOD_INDICES) + for line in methods: + yield line + yield '}\n' + + yield '/* Move this code to an initialization routine.\n' + yield INITIAL_INDENT + 'Init_%s(&%s);\n' % (self.name, self.name) + yield ' * End of code that must be moved. */\n' + +class Fixer(object): + def __init__(self, filename): + self.filename = filename + self.line_number = 0 + self.fp = open(filename) + + # {name: GenericDef} + self.method_defs = {} + self.number_defs = {} + self.seq_defs = {} + self.mapping_defs = {} + self.buffer_defs = {} + self.member_defs = {} + self.getset_defs = {} + self.type_defs = {} + + def Close(self): + self.fp.close() + + def Fix(self): + for line in self.fp: + start_line = self.line_number + self.line_number += 1 + line = self._StripComments(line) + if not line: + continue + # Skip preprocessor lines. + if line.lstrip().startswith('#'): + continue + + # Looks like we got code. Anything we are interested in? + for name in _INTERESTING_TYPES: + index = line.find(name) + if index >= 0 and not LooksLikeDeclaration(line): + line = line.rstrip() + ok, line = self._ShouldProcess(line) + if ok: + self._HandleLine(line, index, name, start_line) + break + + def _HandleLine(self, line, index, name, start_line): + if _DEBUG: print 'Found %s:%d %s' % (name, self.line_number, line) + str_before_type = line[:index].strip() + var_name = line[index+len(name):].strip().split()[0] + var_name = var_name.rstrip(' []') + is_static = (str_before_type and + str_before_type.split()[0] == 'static') + # Lookup the method to handle this type and call it. + getattr(self, 'Handle' + name)(var_name, is_static, start_line) + + def PrintNewMethods(self): + if not _DEBUG: + return + + def Printer(container, name): + if container: + print name, 'from', self.filename + for name, method in container.items(): + print 'Lines %d-%d' % (method.first_line, method.last_line) + print ''.join(method.NewCode()) + + Printer(self.getset_defs, 'GetSet:') + Printer(self.member_defs, 'Members:') + Printer(self.method_defs, 'Methods:') + Printer(self.number_defs, 'NumberMethods:') + Printer(self.seq_defs, 'SequenceMethods:') + Printer(self.mapping_defs, 'MappingMethods:') + Printer(self.buffer_defs, 'BufferProcs:') + Printer(self.type_defs, 'Types:') + + def RewriteFile(self): + # Read the entire file into a list. + self.fp.seek(0, 0) + lines = self.fp.readlines() + + # Update lines with new data. + defs = [self.getset_defs, self.member_defs, self.method_defs, + self.number_defs, self.seq_defs, self.mapping_defs, + self.buffer_defs, self.type_defs ] + # Sort the changed lines from highest line to lowest, so replacing + # is done from the beginning to the end and we don't have to worry + # about shifting lines internally if we add/remove them. + line_numbers = [(c.first_line, c) for d in defs for c in d.values()] + for _, container in sorted(line_numbers, reverse=True): + new_lines = container.NewCode() + lines[container.first_line:container.last_line] = new_lines + + # Construct the new directory and ensure it exists. + base_filename = os.path.basename(self.filename) + path = os.path.join(os.path.dirname(self.filename), 'new') + try: + os.mkdir(path) + except OSError: + pass # TODO: Assume the directory is there and writable for now. + + # Output the new file. + output_fp = open(os.path.join(path, base_filename), 'w') + try: + output_fp.writelines(lines) + finally: + output_fp.close() + + def _ShouldProcess(self, line): + if line[-1] == '{': + return True, line + + next_line = self.fp.next() + self.line_number += 1 + if LooksLikeDeclaration(next_line): + return False, '%s %s' % (line, next_line) + + result = next_line.rstrip()[-1] == '{' + if not result: + print 'Ambiguous %s %d: %s' % (self.filename, self.line_number, line) + print 'next_line', next_line + return result, '%s %s' % (line, next_line) + + def _StripComments(self, line): + index = line.find('//') + if index >= 0: + # Return the line up to the comment. + return line[:index] + + index = line.find('/*') + if index < 0: + # No comment. + return line + + comment_end = line.find('*/', index) + if comment_end >= 0: + # Recurse in case there are multiple comments on the line. + return self._StripComments(line[:index] + line[comment_end+2]) + + # The comment doesn't end on the same line, find the end. + for line in self.fp: + self.line_number += 1 + index = line.find('*/') + if index >= 0: + # Recurse in case there are multiple comments on the line. + return self._StripComments(line[index+2:]) + raise RuntimeError('End of file reached while processing comment.') + + def GetField(self): + # XXX: This is a hack. Just return the next line and let the caller + # take care of determining if this ends the field or not. + self.line_number += 1 + return self._StripComments(self.fp.next()).strip() + + def GetDefinition(self): + curly_count = 1 + count = 0 + line = '' + for line in self.fp: + self.line_number += 1 + line = self._StripComments(line).strip() + if '{' in line: + curly_count += 1 + if '}' in line: + curly_count -= 1 + + # Finished when the openning brace is closed. + if curly_count == 0: + break + + yield count, line + count += 1 + + # The last line may be partial, pass that along too. + if not line.lstrip().startswith('}'): + yield count, line + + # These Handle methods process each type. They will read all the lines + # and print out the lines that should be used to replace the old code. + + def _HandleDefinition(self, def_object, container): + for count, line in self.GetDefinition(): + if line.lstrip().startswith('}'): + break + def_object.AddLine(line, self) + def_object.VerifyValid() + def_object.last_line = self.line_number + container[def_object.name] = def_object + + _HandleArrayDefinition = _HandleDefinition + + def HandlePyGetSetDef(self, name, is_static, start_line): + def_object = GetSetDef(name, is_static, start_line) + self._HandleArrayDefinition(def_object, self.getset_defs) + + def HandlePyMemberDef(self, name, is_static, start_line): + def_object = MemberDef(name, is_static, start_line) + self._HandleArrayDefinition(def_object, self.member_defs) + + def HandlePyMethodDef(self, name, is_static, start_line): + def_object = MethodDef(name, is_static, start_line) + self._HandleArrayDefinition(def_object, self.method_defs) + + def HandlePyNumberMethods(self, name, is_static, start_line): + def_object = NumberMethods(name, is_static, start_line) + self._HandleDefinition(def_object, self.number_defs) + + def HandlePySequenceMethods(self, name, is_static, start_line): + def_object = SequenceMethods(name, is_static, start_line) + self._HandleDefinition(def_object, self.seq_defs) + + def HandlePyMappingMethods(self, name, is_static, start_line): + def_object = MappingMethods(name, is_static, start_line) + self._HandleDefinition(def_object, self.mapping_defs) + + def HandlePyBufferProcs(self, name, is_static, start_line): + def_object = BufferProcs(name, is_static, start_line) + self._HandleDefinition(def_object, self.buffer_defs) + + def HandlePyTypeObject(self, name, is_static, start_line): + def_object = TypeDef(name, is_static, start_line) + self._HandleDefinition(def_object, self.type_defs) + + +def main(argv): + for filename in argv[1:]: + fixer = Fixer(filename) + fixer.Fix() + fixer.PrintNewMethods() + fixer.RewriteFile() + fixer.Close() + + +if __name__ == '__main__': + main(sys.argv) From python-checkins at python.org Thu Jul 26 11:36:25 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 26 Jul 2007 11:36:25 +0200 (CEST) Subject: [Python-checkins] r56551 - python/trunk/Doc/lib/libxmlrpclib.tex Message-ID: <20070726093625.607F41E400A@bag.python.org> Author: georg.brandl Date: Thu Jul 26 11:36:25 2007 New Revision: 56551 Modified: python/trunk/Doc/lib/libxmlrpclib.tex Log: tabs, newlines and crs are valid XML characters. Modified: python/trunk/Doc/lib/libxmlrpclib.tex ============================================================================== --- python/trunk/Doc/lib/libxmlrpclib.tex (original) +++ python/trunk/Doc/lib/libxmlrpclib.tex Thu Jul 26 11:36:25 2007 @@ -94,7 +94,8 @@ \samp{>}, and \samp{\&} will be automatically escaped. However, it's the caller's responsibility to ensure that the string is free of characters that aren't allowed in XML, such as the control characters -with ASCII values between 0 and 31; failing to do this will result in +with ASCII values between 0 and 31 (except, of course, tab, newline and +carriage return); failing to do this will result in an XML-RPC request that isn't well-formed XML. If you have to pass arbitrary strings via XML-RPC, use the \class{Binary} wrapper class described below. From python-checkins at python.org Thu Jul 26 11:36:28 2007 From: python-checkins at python.org (georg.brandl) Date: Thu, 26 Jul 2007 11:36:28 +0200 (CEST) Subject: [Python-checkins] r56552 - python/branches/release25-maint/Doc/lib/libxmlrpclib.tex Message-ID: <20070726093628.9ABA61E400A@bag.python.org> Author: georg.brandl Date: Thu Jul 26 11:36:28 2007 New Revision: 56552 Modified: python/branches/release25-maint/Doc/lib/libxmlrpclib.tex Log: tabs, newlines and crs are valid XML characters. (backport from rev. 56551) Modified: python/branches/release25-maint/Doc/lib/libxmlrpclib.tex ============================================================================== --- python/branches/release25-maint/Doc/lib/libxmlrpclib.tex (original) +++ python/branches/release25-maint/Doc/lib/libxmlrpclib.tex Thu Jul 26 11:36:28 2007 @@ -91,7 +91,8 @@ \samp{>}, and \samp{\&} will be automatically escaped. However, it's the caller's responsibility to ensure that the string is free of characters that aren't allowed in XML, such as the control characters -with ASCII values between 0 and 31; failing to do this will result in +with ASCII values between 0 and 31 (except, of course, tab, newline and +carriage return); failing to do this will result in an XML-RPC request that isn't well-formed XML. If you have to pass arbitrary strings via XML-RPC, use the \class{Binary} wrapper class described below. From buildbot at python.org Thu Jul 26 12:54:24 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 26 Jul 2007 10:54:24 +0000 Subject: [Python-checkins] buildbot warnings in sparc Ubuntu dapper 2.5 Message-ID: <20070726105424.C7ACF1E400A@bag.python.org> The Buildbot has detected a new failure of sparc Ubuntu dapper 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/sparc%2520Ubuntu%2520dapper%25202.5/builds/0 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: andrew.kuchling,barry.warsaw,georg.brandl,martin.v.loewis,raymond.hettinger,ronald.oussoren,thomas.heller Build had warnings: warnings test Excerpt from the test logfile: 3 tests failed: test_fcntl test_ioctl test_resource Traceback (most recent call last): File "./Lib/test/regrtest.py", line 549, in runtest_inner the_package = __import__(abstest, globals(), locals(), []) File "/home/pybot/buildarea/2.5.klose-ubuntu-sparc/build/Lib/test/test_fcntl.py", line 53, in rv = fcntl.fcntl(f.fileno(), fcntl.F_SETLKW, lockdata) IOError: [Errno 14] Bad address ====================================================================== ERROR: test_ioctl (test.test_ioctl.IoctlTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/2.5.klose-ubuntu-sparc/build/Lib/test/test_ioctl.py", line 23, in test_ioctl r = fcntl.ioctl(tty, termios.TIOCGPGRP, " ") IOError: [Errno 14] Bad address ====================================================================== ERROR: test_ioctl_mutate (test.test_ioctl.IoctlTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "/home/pybot/buildarea/2.5.klose-ubuntu-sparc/build/Lib/test/test_ioctl.py", line 32, in test_ioctl_mutate r = fcntl.ioctl(tty, termios.TIOCGPGRP, buf, 1) IOError: [Errno 14] Bad address Traceback (most recent call last): File "./Lib/test/regrtest.py", line 549, in runtest_inner the_package = __import__(abstest, globals(), locals(), []) File "/home/pybot/buildarea/2.5.klose-ubuntu-sparc/build/Lib/test/test_resource.py", line 42, in f.close() IOError: [Errno 27] File too large make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Thu Jul 26 16:03:00 2007 From: python-checkins at python.org (nick.coghlan) Date: Thu, 26 Jul 2007 16:03:00 +0200 (CEST) Subject: [Python-checkins] r56553 - python/trunk/Lib/test/test_math.py python/trunk/Lib/test/test_pow.py Message-ID: <20070726140300.C6F821E400A@bag.python.org> Author: nick.coghlan Date: Thu Jul 26 16:03:00 2007 New Revision: 56553 Modified: python/trunk/Lib/test/test_math.py python/trunk/Lib/test/test_pow.py Log: Add explicit test for a misbehaving math.floor Modified: python/trunk/Lib/test/test_math.py ============================================================================== --- python/trunk/Lib/test/test_math.py (original) +++ python/trunk/Lib/test/test_math.py Thu Jul 26 16:03:00 2007 @@ -92,6 +92,10 @@ self.ftest('floor(-0.5)', math.floor(-0.5), -1) self.ftest('floor(-1.0)', math.floor(-1.0), -1) self.ftest('floor(-1.5)', math.floor(-1.5), -2) + # pow() relies on floor() to check for integers + # This fails on some platforms - so check it here + self.ftest('floor(1.23e167)', math.floor(1.23e167), 1.23e167) + self.ftest('floor(-1.23e167)', math.floor(-1.23e167), -1.23e167) def testFmod(self): self.assertRaises(TypeError, math.fmod) Modified: python/trunk/Lib/test/test_pow.py ============================================================================== --- python/trunk/Lib/test/test_pow.py (original) +++ python/trunk/Lib/test/test_pow.py Thu Jul 26 16:03:00 2007 @@ -106,12 +106,9 @@ # platform pow() was buggy, and Python didn't worm around it. eq = self.assertEquals a = -1.0 - # XXX Temporary diagnostic for failure on alpha Debian buildbot - from sys import __stdout__ - from math import floor - print >> __stdout__, "*** Number: %r" % 1.23e167 - print >> __stdout__, "*** Floor: %r" % floor(1.23e167) - # XXX End diagnostic message + # The next two tests can still fail if the platform floor() + # function doesn't treat all large inputs as integers + # test_math should also fail if that is happening eq(pow(a, 1.23e167), 1.0) eq(pow(a, -1.23e167), 1.0) for b in range(-10, 11): From kacj at snet.net Thu Jul 26 19:23:32 2007 From: kacj at snet.net (Stanislas Curry) Date: Thu, 26 Jul 2007 19:23:32 +0200 Subject: [Python-checkins] Information Message-ID: <46A8D894.6080604@powerweb.net> -------------- next part -------------- A non-text attachment was scrubbed... Name: Information.pdf Type: application/pdf Size: 5755 bytes Desc: not available Url : http://mail.python.org/pipermail/python-checkins/attachments/20070726/1ad5ba83/attachment.pdf From python-checkins at python.org Thu Jul 26 23:06:04 2007 From: python-checkins at python.org (alexandre.vassalotti) Date: Thu, 26 Jul 2007 23:06:04 +0200 (CEST) Subject: [Python-checkins] r56556 - python/branches/cpy_merge/Modules/_picklemodule.c Message-ID: <20070726210604.B005F1E400B@bag.python.org> Author: alexandre.vassalotti Date: Thu Jul 26 23:06:03 2007 New Revision: 56556 Modified: python/branches/cpy_merge/Modules/_picklemodule.c Log: Remove refcount micro-optimization in put(). This makes some pickle streams generated by _pickle unnecessary different the one by pickle.py, which doesn't support this feature. Modified: python/branches/cpy_merge/Modules/_picklemodule.c ============================================================================== --- python/branches/cpy_merge/Modules/_picklemodule.c (original) +++ python/branches/cpy_merge/Modules/_picklemodule.c Thu Jul 26 23:06:03 2007 @@ -221,7 +221,7 @@ goto nomemory; if ((int) (size_t) bigger != bigger) goto nomemory; - nbytes = (size_t) bigger *sizeof(PyObject *); + nbytes = (size_t) bigger * sizeof(PyObject *); if (nbytes / sizeof(PyObject *) != (size_t) bigger) goto nomemory; tmp = realloc(self->data, nbytes); @@ -702,7 +702,7 @@ static int put(PicklerObject *self, PyObject *ob) { - if (ob->ob_refcnt < 2 || self->fast) + if (self->fast) return 0; return put2(self, ob); From python-checkins at python.org Thu Jul 26 23:31:09 2007 From: python-checkins at python.org (ronald.oussoren) Date: Thu, 26 Jul 2007 23:31:09 +0200 (CEST) Subject: [Python-checkins] r56557 - python/branches/release24-maint/Mac/BuildScript/build-installer.py Message-ID: <20070726213109.D81241E4003@bag.python.org> Author: ronald.oussoren Date: Thu Jul 26 23:31:09 2007 New Revision: 56557 Modified: python/branches/release24-maint/Mac/BuildScript/build-installer.py Log: MacOS build script: - be more strict about the build environment - update some of the 3th party libraries (later patchlevels) Modified: python/branches/release24-maint/Mac/BuildScript/build-installer.py ============================================================================== --- python/branches/release24-maint/Mac/BuildScript/build-installer.py (original) +++ python/branches/release24-maint/Mac/BuildScript/build-installer.py Thu Jul 26 23:31:09 2007 @@ -10,7 +10,7 @@ Usage: see USAGE variable in the script. """ import platform, os, sys, getopt, textwrap, shutil, urllib2, stat, time, pwd -import grp +import grp, md5 INCLUDE_TIMESTAMP=1 VERBOSE=1 @@ -62,7 +62,7 @@ # The directory we'll use to store third-party sources. Set this to something # else if you don't want to re-fetch required libraries every time. DEPSRC=os.path.join(WORKDIR, 'third-party') -DEPSRC=os.path.expanduser('~/Universal/other-sources') +DEPSRC=os.path.expanduser('/tmp/other-sources') # Location of the preferred SDK SDKPATH="/Developer/SDKs/MacOSX10.4u.sdk" @@ -94,8 +94,9 @@ # batteries included python. LIBRARY_RECIPES=[ dict( - name="Bzip2 1.0.3", - url="http://www.bzip.org/1.0.3/bzip2-1.0.3.tar.gz", + name="Bzip2 1.0.4", + url="http://www.bzip.org/1.0.4/bzip2-1.0.4.tar.gz", + checksum="fc310b254f6ba5fbb5da018f04533688", configure=None, install='make install PREFIX=%s/usr/local/ CFLAGS="-arch %s -isysroot %s"'%( shellQuote(os.path.join(WORKDIR, 'libraries')), @@ -106,6 +107,7 @@ dict( name="ZLib 1.2.3", url="http://www.gzip.org/zlib/zlib-1.2.3.tar.gz", + checksum="debc62758716a169df9f62e6ab2bc634", configure=None, install='make install prefix=%s/usr/local/ CFLAGS="-arch %s -isysroot %s"'%( shellQuote(os.path.join(WORKDIR, 'libraries')), @@ -117,6 +119,7 @@ # Note that GNU readline is GPL'd software name="GNU Readline 5.1.4", url="http://ftp.gnu.org/pub/gnu/readline/readline-5.1.tar.gz" , + checksum="7ee5a692db88b30ca48927a13fd60e46", patchlevel='0', patches=[ # The readline maintainers don't do actual micro releases, but @@ -131,6 +134,7 @@ dict( name="NCurses 5.5", url="http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.5.tar.gz", + checksum='e73c1ac10b4bfc46db43b2ddfd6244ef', configure_pre=[ "--without-cxx", "--without-ada", @@ -159,6 +163,7 @@ dict( name="Sleepycat DB 4.4", url="http://downloads.sleepycat.com/db-4.4.20.tar.gz", + checksum='d84dff288a19186b136b0daf7067ade3', #name="Sleepycat DB 4.3.29", #url="http://downloads.sleepycat.com/db-4.3.29.tar.gz", buildDir="build_unix", @@ -308,6 +313,19 @@ fatal("Please install the latest version of Xcode and the %s SDK"%( os.path.basename(SDKPATH[:-4]))) + if os.path.exists('/sw'): + fatal("Detected Fink, please remove before building Python") + + if os.path.exists('/opt/local'): + fatal("Detected MacPorts, please remove before building Python") + + if not os.path.exists('/Library/Frameworks/Tcl.framework') or \ + not os.path.exists('/Library/Frameworks/Tk.framework'): + + fatal("Please install a Universal Tcl/Tk framework in /Library from\n\thttp://tcltkaqua.sourceforge.net/") + + + def parseOptions(args = None): @@ -444,6 +462,17 @@ except: pass +def verifyChecksum(path, checksum): + summer = md5.md5() + fp = open(path, 'rb') + block = fp.read(10240) + while block: + summer.update(block) + block = fp.read(10240) + + return summer.hexdigest() == checksum + + def buildRecipe(recipe, basedir, archList): """ Build software using a recipe. This function does the @@ -465,13 +494,16 @@ os.mkdir(DEPSRC) - if os.path.exists(sourceArchive): + if os.path.exists(sourceArchive) and verifyChecksum(sourceArchive, recipe['checksum']): print "Using local copy of %s"%(name,) else: print "Downloading %s"%(name,) downloadURL(url, sourceArchive) print "Archive for %s stored as %s"%(name, sourceArchive) + if not verifyChecksum(sourceArchive, recipe['checksum']): + fatal("Download for %s failed: bad checksum"%(url,)) + print "Extracting archive for %s"%(name,) buildDir=os.path.join(WORKDIR, '_bld') From python-checkins at python.org Thu Jul 26 23:39:37 2007 From: python-checkins at python.org (ronald.oussoren) Date: Thu, 26 Jul 2007 23:39:37 +0200 (CEST) Subject: [Python-checkins] r56558 - python/branches/release25-maint/Mac/BuildScript/build-installer.py Message-ID: <20070726213937.C20D51E400E@bag.python.org> Author: ronald.oussoren Date: Thu Jul 26 23:39:36 2007 New Revision: 56558 Modified: python/branches/release25-maint/Mac/BuildScript/build-installer.py Log: Mac Buildscript: - be more strict about the build environment - update some 3th party libraries Modified: python/branches/release25-maint/Mac/BuildScript/build-installer.py ============================================================================== --- python/branches/release25-maint/Mac/BuildScript/build-installer.py (original) +++ python/branches/release25-maint/Mac/BuildScript/build-installer.py Thu Jul 26 23:39:36 2007 @@ -10,7 +10,7 @@ Usage: see USAGE variable in the script. """ import platform, os, sys, getopt, textwrap, shutil, urllib2, stat, time, pwd -import grp +import grp, md5 INCLUDE_TIMESTAMP=1 VERBOSE=1 @@ -31,6 +31,8 @@ def writePlist(plist, path): plist.write(path) + + def shellQuote(value): """ Return the string value in a form that can safely be inserted into @@ -62,7 +64,7 @@ # The directory we'll use to store third-party sources. Set this to something # else if you don't want to re-fetch required libraries every time. DEPSRC=os.path.join(WORKDIR, 'third-party') -DEPSRC=os.path.expanduser('~/Universal/other-sources') +DEPSRC=os.path.expanduser('/tmp/other-sources') # Location of the preferred SDK SDKPATH="/Developer/SDKs/MacOSX10.4u.sdk" @@ -94,8 +96,9 @@ # batteries included python. LIBRARY_RECIPES=[ dict( - name="Bzip2 1.0.3", - url="http://www.bzip.org/1.0.3/bzip2-1.0.3.tar.gz", + name="Bzip2 1.0.4", + url="http://www.bzip.org/1.0.4/bzip2-1.0.4.tar.gz", + checksum='fc310b254f6ba5fbb5da018f04533688', configure=None, install='make install PREFIX=%s/usr/local/ CFLAGS="-arch %s -isysroot %s"'%( shellQuote(os.path.join(WORKDIR, 'libraries')), @@ -106,6 +109,7 @@ dict( name="ZLib 1.2.3", url="http://www.gzip.org/zlib/zlib-1.2.3.tar.gz", + checksum='debc62758716a169df9f62e6ab2bc634', configure=None, install='make install prefix=%s/usr/local/ CFLAGS="-arch %s -isysroot %s"'%( shellQuote(os.path.join(WORKDIR, 'libraries')), @@ -118,6 +122,7 @@ name="GNU Readline 5.1.4", url="http://ftp.gnu.org/pub/gnu/readline/readline-5.1.tar.gz" , patchlevel='0', + checksum='7ee5a692db88b30ca48927a13fd60e46', patches=[ # The readline maintainers don't do actual micro releases, but # just ship a set of patches. @@ -129,9 +134,9 @@ ), dict( - name="SQLite 3.3.5", - url="http://www.sqlite.org/sqlite-3.3.5.tar.gz", - checksum='93f742986e8bc2dfa34792e16df017a6feccf3a2', + name="SQLite 3.3.14", + url="http://www.sqlite.org/sqlite-3.3.14.tar.gz", + checksum='e1a4428a5cb17f28164731b72f06130a', configure_pre=[ '--enable-threadsafe', '--enable-tempstore', @@ -144,6 +149,7 @@ dict( name="NCurses 5.5", url="http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.5.tar.gz", + checksum='e73c1ac10b4bfc46db43b2ddfd6244ef', configure_pre=[ "--without-cxx", "--without-ada", @@ -172,6 +178,7 @@ dict( name="Sleepycat DB 4.4", url="http://downloads.sleepycat.com/db-4.4.20.tar.gz", + checksum='d84dff288a19186b136b0daf7067ade3', #name="Sleepycat DB 4.3.29", #url="http://downloads.sleepycat.com/db-4.3.29.tar.gz", buildDir="build_unix", @@ -321,6 +328,17 @@ fatal("Please install the latest version of Xcode and the %s SDK"%( os.path.basename(SDKPATH[:-4]))) + if os.path.exists('/sw'): + fatal("Detected Fink, please remove before building Python") + + if os.path.exists('/opt/local'): + fatal("Detected MacPorts, please remove before building Python") + + if not os.path.exists('/Library/Frameworks/Tcl.framework') or \ + not os.path.exists('/Library/Frameworks/Tk.framework'): + + fatal("Please install a Universal Tcl/Tk framework in /Library from\n\thttp://tcltkaqua.sourceforge.net/") + def parseOptions(args = None): @@ -457,6 +475,17 @@ except: pass + +def verifyChecksum(path, checksum): + summer = md5.md5() + fp = open(path, 'rb') + block = fp.read(10240) + while block: + summer.update(block) + block = fp.read(10240) + + return summer.hexdigest() == checksum + def buildRecipe(recipe, basedir, archList): """ Build software using a recipe. This function does the @@ -478,13 +507,15 @@ os.mkdir(DEPSRC) - if os.path.exists(sourceArchive): + if os.path.exists(sourceArchive) and verifyChecksum(sourceArchive, recipe['checksum']): print "Using local copy of %s"%(name,) else: print "Downloading %s"%(name,) downloadURL(url, sourceArchive) print "Archive for %s stored as %s"%(name, sourceArchive) + if not verifyChecksum(sourceArchive, recipe['checksum']): + fatal("Download for %s failed: bad checksum"%(url,)) print "Extracting archive for %s"%(name,) buildDir=os.path.join(WORKDIR, '_bld') @@ -663,7 +694,6 @@ for dn in dirnames: os.chmod(os.path.join(dirpath, dn), 0775) os.chown(os.path.join(dirpath, dn), -1, gid) - for fn in filenames: if os.path.islink(fn): @@ -1010,6 +1040,7 @@ shellQuote(tmpPath), )) + def main(): # First parse options and check if we can perform our work parseOptions() @@ -1062,6 +1093,5 @@ # And copy it to a DMG buildDMG() - if __name__ == "__main__": main() From buildbot at python.org Fri Jul 27 00:46:39 2007 From: buildbot at python.org (buildbot at python.org) Date: Thu, 26 Jul 2007 22:46:39 +0000 Subject: [Python-checkins] buildbot warnings in alpha Tru64 5.1 2.5 Message-ID: <20070726224640.2F43C1E400B@bag.python.org> The Buildbot has detected a new failure of alpha Tru64 5.1 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/alpha%2520Tru64%25205.1%25202.5/builds/286 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: georg.brandl,ronald.oussoren Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_signal sincerely, -The Buildbot From python-checkins at python.org Fri Jul 27 06:52:32 2007 From: python-checkins at python.org (mark.hammond) Date: Fri, 27 Jul 2007 06:52:32 +0200 (CEST) Subject: [Python-checkins] r56561 - python/trunk/PC/pyconfig.h Message-ID: <20070727045232.AF8CD1E4012@bag.python.org> Author: mark.hammond Date: Fri Jul 27 06:52:32 2007 New Revision: 56561 Modified: python/trunk/PC/pyconfig.h Log: In consultation with Kristjan Jonsson, only define WINVER and _WINNT_WIN32 if (a) we are building Python itself and (b) no one previously defined them Modified: python/trunk/PC/pyconfig.h ============================================================================== --- python/trunk/PC/pyconfig.h (original) +++ python/trunk/PC/pyconfig.h Fri Jul 27 06:52:32 2007 @@ -151,12 +151,26 @@ /* set the version macros for the windows headers */ #ifdef MS_WINX64 /* 64 bit only runs on XP or greater */ -#define _WIN32_WINNT 0x0501 -#define WINVER 0x0501 +#define Py_WINVER 0x0501 #else /* NT 4.0 or greater required otherwise */ -#define _WIN32_WINNT 0x0400 -#define WINVER 0x0400 +#define Py_WINVER 0x0400 +#endif + +/* We only set these values when building Python - we don't want to force + these values on extensions, as that will affect the prototypes and + structures exposed in the Windows headers. Even when building Python, we + allow a single source file to override this - they may need access to + structures etc so it can optionally use new Windows features if it + determines at runtime they are available. +*/ +#ifdef Py_BUILD_CORE +#ifndef WINVER +#define WINVER Py_WINVER +#endif +#ifndef _WINNT_WIN32 +#define _WINNT_WIN32 Py_WINVER +#endif #endif /* _W64 is not defined for VC6 or eVC4 */ From python-checkins at python.org Fri Jul 27 07:08:54 2007 From: python-checkins at python.org (mark.hammond) Date: Fri, 27 Jul 2007 07:08:54 +0200 (CEST) Subject: [Python-checkins] r56562 - python/trunk/PC/pyconfig.h Message-ID: <20070727050854.DCF601E4005@bag.python.org> Author: mark.hammond Date: Fri Jul 27 07:08:54 2007 New Revision: 56562 Modified: python/trunk/PC/pyconfig.h Log: Correctly detect AMD64 architecture on VC2003 Modified: python/trunk/PC/pyconfig.h ============================================================================== --- python/trunk/PC/pyconfig.h (original) +++ python/trunk/PC/pyconfig.h Fri Jul 27 07:08:54 2007 @@ -140,7 +140,7 @@ #if defined(_M_IA64) #define COMPILER _Py_PASTE_VERSION("64 bit (Itanium)") #define MS_WINI64 -#elif defined(_M_X64) +#elif defined(_M_X64) || defined(_M_AMD64) #define COMPILER _Py_PASTE_VERSION("64 bit (AMD64)") #define MS_WINX64 #else From buildbot at python.org Fri Jul 27 07:16:26 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 27 Jul 2007 05:16:26 +0000 Subject: [Python-checkins] buildbot warnings in x86 gentoo trunk Message-ID: <20070727051626.572501E4005@bag.python.org> The Buildbot has detected a new failure of x86 gentoo trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520gentoo%2520trunk/builds/2330 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: mark.hammond Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_socket_ssl make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Fri Jul 27 07:33:39 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 27 Jul 2007 05:33:39 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP trunk Message-ID: <20070727053340.182DF1E400E@bag.python.org> The Buildbot has detected a new failure of x86 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP%2520trunk/builds/536 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: mark.hammond Build had warnings: warnings failed slave lost sincerely, -The Buildbot From buildbot at python.org Fri Jul 27 07:37:45 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 27 Jul 2007 05:37:45 +0000 Subject: [Python-checkins] buildbot warnings in PPC64 Debian trunk Message-ID: <20070727053745.CDF2D1E4005@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/74 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: mark.hammond Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_tarfile make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Fri Jul 27 12:04:11 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 12:04:11 +0200 (CEST) Subject: [Python-checkins] r56564 - tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Message-ID: <20070727100411.DA5781E400F@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 12:04:11 2007 New Revision: 56564 Modified: tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Log: Compensate for different classes having different property names for the author. Added utility function for finding out if an instance is classified as spam. Modified: tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py (original) +++ tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Fri Jul 27 12:04:11 2007 @@ -7,10 +7,10 @@ node = db.getnode(classname, nodeid) authorage = node['creation'].timestamp() - \ - db.getnode('user', node['author'])['creation'].timestamp() + db.getnode('user', node.get('author', node.get('creator')))['creation'].timestamp() tokens = ["klass:%s" % classname, - "author:%s" % node['author'], + "author:%s" % node.get('author', node.get('creator')), "authorage:%d" % int(authorage)] klass = db.getclass(classname) @@ -18,7 +18,6 @@ def train_spambayes(db, content, tokens, is_spam): spambayes_uri = db.config.detectors['SPAMBAYES_URI'] - spam_cutoff = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) try: @@ -63,6 +62,16 @@ self.db.commit() +def is_spam(obj): + cutoff_score = float(obj._db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + try: + score = obj['spambayes_score'] + except KeyError: + return False + return score >= cutoff_score + + def init(instance): instance.registerAction("spambayes_classify", SpambayesClassify) + instance.registerUtil('is_spam', is_spam) From python-checkins at python.org Fri Jul 27 12:05:31 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 12:05:31 +0200 (CEST) Subject: [Python-checkins] r56565 - tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py tracker/instances/python-dev-spambayes-integration/detectors/spamcheck.py Message-ID: <20070727100531.105681E400E@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 12:05:30 2007 New Revision: 56565 Added: tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py - copied, changed from r56540, tracker/instances/python-dev-spambayes-integration/detectors/spamcheck.py Removed: tracker/instances/python-dev-spambayes-integration/detectors/spamcheck.py Log: Renamed spamcheck.py to spambayes.py. Modified to new antispam functionality - instead of simply rejecting, set a spam score on all msg and file instances. Copied: tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py (from r56540, tracker/instances/python-dev-spambayes-integration/detectors/spamcheck.py) ============================================================================== --- tracker/instances/python-dev-spambayes-integration/detectors/spamcheck.py (original) +++ tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py Fri Jul 27 12:05:30 2007 @@ -11,28 +11,70 @@ import xmlrpclib import socket +import time from roundup.exceptions import Reject -def check_spam(_db, _klass, _nodeid, newvalues): - """Auditor to score a website submission.""" +def extract_classinfo(db, klass, nodeid, newvalues): + if None == nodeid: + node = newvalues + content = newvalues['content'] + else: + node = db.getnode(klass.classname, nodeid) + content = klass.get(nodeid, 'content') + + if node.has_key('creation') or node.has_key('date'): + nodets = node.get('creation', node.get('date')).timestamp() + else: + nodets = time.time() + + if node.has_key('author') or node.has_key('creator'): + authorid = node.get('author', node.get('creator')) + else: + authorid = db.getuid() + + authorage = nodets - db.getnode('user', authorid)['creation'].timestamp() - spambayes_uri = _db.config.detectors['SPAMBAYES_URI'] - spam_cutoff = float(_db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + tokens = ["klass:%s" % klass.classname, + "author:%s" % authorid, + "authorage:%d" % int(authorage)] + return (content, tokens) + +def check_spambayes(db, content, tokens): + spambayes_uri = db.config.detectors['SPAMBAYES_URI'] server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) + try: - prob = server.score(newvalues, [], {}) + prob = server.score({'content':content}, tokens, {}) + return (True, prob) except (socket.error, xmlrpclib.Error), e: - pass + return (False, str(e)) + + +def check_spam(db, klass, nodeid, newvalues): + """Auditor to score a website submission.""" + + + if newvalues.has_key('spambayes_score'): + if not "coordinator" in [x.lower().strip() for x in db.user.get(db.getuid(), 'roles').split(",")]: + raise ValueError, "Only Coordinators may explicitly assign spambayes_score" + # Don't do anything if we're explicitly setting the score + return + + (content, tokens) = extract_classinfo(db, klass, nodeid, newvalues) + (success, other) = check_spambayes(db, content, tokens) + if success: + newvalues['spambayes_score'] = other + newvalues['spambayes_misclassified'] = False else: - if prob >= spam_cutoff: - raise Reject("Looks like spam to me - prob=%.3f" % prob) + newvalues['spambayes_score'] = -1 + newvalues['spambayes_misclassified'] = True def init(database): """Initialize auditor.""" - database.issue.audit('create', check_spam) - database.issue.audit('set', check_spam) + database.msg.audit('create', check_spam) + database.msg.audit('set', check_spam) database.file.audit('create', check_spam) database.file.audit('set', check_spam) Deleted: /tracker/instances/python-dev-spambayes-integration/detectors/spamcheck.py ============================================================================== --- /tracker/instances/python-dev-spambayes-integration/detectors/spamcheck.py Fri Jul 27 12:05:30 2007 +++ (empty file) @@ -1,38 +0,0 @@ -""" -spamcheck.py - Auditor that consults a SpamBayes server and scores all form -submissions. Submissions which are deemed to be spam are rejected. For the -time being only reject submissions which are assumed to be spam (score >= -SPAM_CUTOFF). Once a reasonable body of ham and spam submissions have been -built up you can consider whether to also reject unsure submissions (score > -HAM_CUTOFF). The current settings make it less likely that you'll reject -valid submissions at the expense of manual checks to correct spammy items -which snuck by the screen. -""" - -import xmlrpclib -import socket - -from roundup.exceptions import Reject - -def check_spam(_db, _klass, _nodeid, newvalues): - """Auditor to score a website submission.""" - - spambayes_uri = _db.config.detectors['SPAMBAYES_URI'] - spam_cutoff = float(_db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) - - - server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) - try: - prob = server.score(newvalues, [], {}) - except (socket.error, xmlrpclib.Error), e: - pass - else: - if prob >= spam_cutoff: - raise Reject("Looks like spam to me - prob=%.3f" % prob) - -def init(database): - """Initialize auditor.""" - database.issue.audit('create', check_spam) - database.issue.audit('set', check_spam) - database.file.audit('create', check_spam) - database.file.audit('set', check_spam) From python-checkins at python.org Fri Jul 27 12:36:30 2007 From: python-checkins at python.org (nick.coghlan) Date: Fri, 27 Jul 2007 12:36:30 +0200 (CEST) Subject: [Python-checkins] r56566 - python/trunk/Lib/test/test_math.py Message-ID: <20070727103630.C4AF21E4018@bag.python.org> Author: nick.coghlan Date: Fri Jul 27 12:36:30 2007 New Revision: 56566 Modified: python/trunk/Lib/test/test_math.py Log: Make test_math error messages more meaningful for small discrepancies in results Modified: python/trunk/Lib/test/test_math.py ============================================================================== --- python/trunk/Lib/test/test_math.py (original) +++ python/trunk/Lib/test/test_math.py Fri Jul 27 12:36:30 2007 @@ -12,7 +12,11 @@ def ftest(self, name, value, expected): if abs(value-expected) > eps: - self.fail('%s returned %f, expected %f'%\ + # Use %r instead of %f so the error message + # displays full precision. Otherwise discrepancies + # in the last few bits will lead to very confusing + # error messages + self.fail('%s returned %r, expected %r' % (name, value, expected)) def testConstants(self): From buildbot at python.org Fri Jul 27 13:47:47 2007 From: buildbot at python.org (buildbot at python.org) Date: Fri, 27 Jul 2007 11:47:47 +0000 Subject: [Python-checkins] buildbot warnings in PPC64 Debian trunk Message-ID: <20070727114747.6F6481E400B@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/76 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: nick.coghlan Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/test/test_socketserver.py", line 93, in run svr.serve_a_few() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/test/test_socketserver.py", line 35, in serve_a_few self.handle_request() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 224, in handle_request self.handle_error(request, client_address) File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 222, in handle_request self.process_request(request, client_address) File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 429, in process_request self.collect_children() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 425, in collect_children self.active_children.remove(pid) ValueError: list.remove(x): x not in list 1 test failed: test_socketserver make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Fri Jul 27 15:19:29 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:19:29 +0200 (CEST) Subject: [Python-checkins] r56567 - tracker/instances/python-dev-spambayes-integration/detectors/config.ini.template Message-ID: <20070727131929.33F481E400C@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:19:28 2007 New Revision: 56567 Modified: tracker/instances/python-dev-spambayes-integration/detectors/config.ini.template Log: Added example values for spam permissions. Modified: tracker/instances/python-dev-spambayes-integration/detectors/config.ini.template ============================================================================== --- tracker/instances/python-dev-spambayes-integration/detectors/config.ini.template (original) +++ tracker/instances/python-dev-spambayes-integration/detectors/config.ini.template Fri Jul 27 15:19:28 2007 @@ -12,3 +12,6 @@ spambayes_ham_cutoff = 0.2 spambayes_spam_cutoff = 0.85 +spambayes_may_view_spam = User,Coordinator,Developer +spambayes_may_classify = Coordinator +spambayes_may_report_misclassified = User,Coordinator,Developer From python-checkins at python.org Fri Jul 27 15:20:24 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:20:24 +0200 (CEST) Subject: [Python-checkins] r56568 - tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Message-ID: <20070727132024.BC0CB1E400D@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:20:23 2007 New Revision: 56568 Modified: tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Log: Make sure only users with some roles may classify spam. Added utility functions used from templates to check if an instance is classified as spam, and if it may be shown or reclassified. Modified: tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py (original) +++ tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Fri Jul 27 15:20:23 2007 @@ -60,9 +60,17 @@ klass = self.db.getclass(self.classname) klass.set(self.nodeid, **props) self.db.commit() + + def permission(self): + roles = set(self.db.user.get(self.userid, 'roles').lower().split(",")) + allowed = set(self.db.config.detectors['SPAMBAYES_MAY_CLASSIFY'].lower().split(",")) + + if not bool(roles.intersection(allowed)): + raise Unauthorised("You do not have permission to train spambayes") + Action.permission(self) -def is_spam(obj): +def sb_is_spam(obj): cutoff_score = float(obj._db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) try: score = obj['spambayes_score'] @@ -70,8 +78,34 @@ return False return score >= cutoff_score +def sb_is_view_ok(obj): + if not sb_is_spam(obj): + return True + roles = set(obj._db.user.get(obj._client.userid, + 'roles').lower().split(",")) + allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) + + return bool(roles.intersection(allowed)) + +def sb_may_report_misclassified(obj): + roles = set(obj._db.user.get(obj._client.userid, + 'roles').lower().split(",")) + allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_REPORT_MISCLASSIFIED'].lower().split(",")) + + return bool(roles.intersection(allowed)) + +def sb_may_classify(obj): + roles = set(obj._db.user.get(obj._client.userid, + 'roles').lower().split(",")) + allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_CLASSIFY'].lower().split(",")) + + return bool(roles.intersection(allowed)) def init(instance): instance.registerAction("spambayes_classify", SpambayesClassify) - instance.registerUtil('is_spam', is_spam) + instance.registerUtil('sb_is_spam', sb_is_spam) + instance.registerUtil('sb_is_view_ok', sb_is_view_ok) + instance.registerUtil('sb_may_report_misclassified', + sb_may_report_misclassified) + instance.registerUtil('sb_may_classify', sb_may_classify) From python-checkins at python.org Fri Jul 27 15:20:57 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:20:57 +0200 (CEST) Subject: [Python-checkins] r56569 - tracker/instances/python-dev-spambayes-integration/schema.py Message-ID: <20070727132057.E99501E4013@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:20:57 2007 New Revision: 56569 Modified: tracker/instances/python-dev-spambayes-integration/schema.py Log: Modified permissions to make sure msg/file instances classified as spam may not be viewed by anonymous users. Modified: tracker/instances/python-dev-spambayes-integration/schema.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/schema.py (original) +++ tracker/instances/python-dev-spambayes-integration/schema.py Fri Jul 27 15:20:57 2007 @@ -145,11 +145,42 @@ ########################## # User permissions ########################## -for cl in ('issue_type', 'severity', 'component', - 'version', 'priority', 'status', 'resolution', - 'issue', 'file', 'msg', 'keyword'): - db.security.addPermissionToRole('User', 'View', cl) - db.security.addPermissionToRole('Anonymous', 'View', cl) + +class may_view_spam: + def __init__(self, klassname): + self.klassname = klassname + + def __call__(self, db, userid, itemid): + klass = db.getclass(self.klassname) + roles = set(db.user.get(userid, "roles").lower().split(",")) + allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) + return bool(roles.intersection(allowed)) + +for cl in ('file', 'msg'): + p = db.security.addPermission(name='View', klass=cl, + description="allowed to see metadata of file object regardless of spam status", + properties=('creation', 'activity', + 'creator', 'actor', + 'name', 'spambayes_score', + 'spambayes_misclassified', + 'author', 'recipients', + 'date', 'files', 'messageid', + 'inreplyto', 'type', + )) + + db.security.addPermissionToRole('Anonymous', p) + db.security.addPermissionToRole('User', p) + + + spamcheck = db.security.addPermission(name='View', klass=cl, + description="allowed to see metadata of file object regardless of spam status", + properties=('content', 'summary'), + check=may_view_spam(cl)) + + db.security.addPermissionToRole('User', spamcheck) + db.security.addPermissionToRole('Anonymous', spamcheck) + + for cl in 'file', 'msg': db.security.addPermissionToRole('User', 'Create', cl) @@ -268,7 +299,7 @@ # Allow anonymous users access to view issues (and the related, linked # information) -for cl in 'issue', 'file', 'msg', 'severity', 'status', 'resolution': +for cl in 'issue', 'severity', 'status', 'resolution': db.security.addPermissionToRole('Anonymous', 'View', cl) # [OPTIONAL] From python-checkins at python.org Fri Jul 27 15:21:45 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:21:45 +0200 (CEST) Subject: [Python-checkins] r56570 - tracker/instances/python-dev-spambayes-integration/html/file.item.html tracker/instances/python-dev-spambayes-integration/html/issue.item.html tracker/instances/python-dev-spambayes-integration/html/msg.item.html Message-ID: <20070727132145.C810E1E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:21:45 2007 New Revision: 56570 Modified: tracker/instances/python-dev-spambayes-integration/html/file.item.html tracker/instances/python-dev-spambayes-integration/html/issue.item.html tracker/instances/python-dev-spambayes-integration/html/msg.item.html Log: Modified templates to notify users that msg/file instances are classified as spam, and that they may not be viewed by anonymous users. Modified: tracker/instances/python-dev-spambayes-integration/html/file.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/file.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/file.item.html Fri Jul 27 15:21:45 2007 @@ -39,7 +39,7 @@ - +
    Content
    Content +
    + + + + +
    +
      @@ -53,13 +53,24 @@
    - + File has been classified as spam.

    + +
    download +

    + Files classified as spam are not available for download by + unathorized users. If you think the file has been misclassified, + please login and click on the button for reclassification. +

    + +
    + tal:attributes="action context/designator" + tal:condition="python:utils.sb_may_classify(context)"> Modified: tracker/instances/python-dev-spambayes-integration/html/issue.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/issue.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/issue.item.html Fri Jul 27 15:21:45 2007 @@ -243,7 +243,12 @@ -
    content
    +

    + Message has been classified as spam. +

    +
    content
    Modified: tracker/instances/python-dev-spambayes-integration/html/msg.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/msg.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/msg.item.html Fri Jul 27 15:21:45 2007 @@ -60,10 +60,14 @@ +

    + Message has been classified as spam

    + + - + - + +
    Content + @@ -75,8 +79,18 @@
    + Message has been classified as spam and is therefore not + available to unathorized users. If you think this is + incorrect, please login and report the message as being + misclassified. +
    From python-checkins at python.org Fri Jul 27 15:39:27 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:39:27 +0200 (CEST) Subject: [Python-checkins] r56571 - in tracker/instances: python-dev-spambayes-integration/detectors/spambayes.py python-dev-spambayes-integration/extensions/spambayes.py spambayes_integration spambayes_integration/detectors spambayes_integration/detectors/spambayes.py spambayes_integration/extensions spambayes_integration/extensions/spambayes.py Message-ID: <20070727133927.ED9C81E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:39:27 2007 New Revision: 56571 Added: tracker/instances/spambayes_integration/ tracker/instances/spambayes_integration/detectors/ tracker/instances/spambayes_integration/detectors/spambayes.py - copied unchanged from r56565, tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py tracker/instances/spambayes_integration/extensions/ tracker/instances/spambayes_integration/extensions/spambayes.py - copied unchanged from r56568, tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Removed: tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Log: Moving spambayes integration into its own directory for easier use by multiple trackers Deleted: /tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py ============================================================================== --- /tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py Fri Jul 27 15:39:27 2007 +++ (empty file) @@ -1,80 +0,0 @@ -""" -spamcheck.py - Auditor that consults a SpamBayes server and scores all form -submissions. Submissions which are deemed to be spam are rejected. For the -time being only reject submissions which are assumed to be spam (score >= -SPAM_CUTOFF). Once a reasonable body of ham and spam submissions have been -built up you can consider whether to also reject unsure submissions (score > -HAM_CUTOFF). The current settings make it less likely that you'll reject -valid submissions at the expense of manual checks to correct spammy items -which snuck by the screen. -""" - -import xmlrpclib -import socket -import time - -from roundup.exceptions import Reject - -def extract_classinfo(db, klass, nodeid, newvalues): - if None == nodeid: - node = newvalues - content = newvalues['content'] - else: - node = db.getnode(klass.classname, nodeid) - content = klass.get(nodeid, 'content') - - if node.has_key('creation') or node.has_key('date'): - nodets = node.get('creation', node.get('date')).timestamp() - else: - nodets = time.time() - - if node.has_key('author') or node.has_key('creator'): - authorid = node.get('author', node.get('creator')) - else: - authorid = db.getuid() - - authorage = nodets - db.getnode('user', authorid)['creation'].timestamp() - - tokens = ["klass:%s" % klass.classname, - "author:%s" % authorid, - "authorage:%d" % int(authorage)] - - - return (content, tokens) - -def check_spambayes(db, content, tokens): - spambayes_uri = db.config.detectors['SPAMBAYES_URI'] - server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) - - try: - prob = server.score({'content':content}, tokens, {}) - return (True, prob) - except (socket.error, xmlrpclib.Error), e: - return (False, str(e)) - - -def check_spam(db, klass, nodeid, newvalues): - """Auditor to score a website submission.""" - - - if newvalues.has_key('spambayes_score'): - if not "coordinator" in [x.lower().strip() for x in db.user.get(db.getuid(), 'roles').split(",")]: - raise ValueError, "Only Coordinators may explicitly assign spambayes_score" - # Don't do anything if we're explicitly setting the score - return - - (content, tokens) = extract_classinfo(db, klass, nodeid, newvalues) - (success, other) = check_spambayes(db, content, tokens) - if success: - newvalues['spambayes_score'] = other - newvalues['spambayes_misclassified'] = False - else: - newvalues['spambayes_score'] = -1 - newvalues['spambayes_misclassified'] = True - -def init(database): - """Initialize auditor.""" - database.msg.audit('create', check_spam) - database.msg.audit('set', check_spam) - database.file.audit('create', check_spam) - database.file.audit('set', check_spam) Deleted: /tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py ============================================================================== --- /tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Fri Jul 27 15:39:27 2007 +++ (empty file) @@ -1,111 +0,0 @@ -from roundup.cgi.actions import Action -from roundup.cgi.exceptions import * - -import xmlrpclib, socket - -def extract_classinfo(db, classname, nodeid): - node = db.getnode(classname, nodeid) - - authorage = node['creation'].timestamp() - \ - db.getnode('user', node.get('author', node.get('creator')))['creation'].timestamp() - - tokens = ["klass:%s" % classname, - "author:%s" % node.get('author', node.get('creator')), - "authorage:%d" % int(authorage)] - - klass = db.getclass(classname) - return (klass.get(nodeid, 'content'), tokens) - -def train_spambayes(db, content, tokens, is_spam): - spambayes_uri = db.config.detectors['SPAMBAYES_URI'] - - server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) - try: - server.train({'content':content}, tokens, {}, is_spam) - return (True, None) - except (socket.error, xmlrpclib.Error), e: - return (False, str(e)) - - -class SpambayesClassify(Action): - def handle(self): - (content, tokens) = extract_classinfo(self.db, - self.classname, self.nodeid) - - if self.form.has_key("trainspam"): - is_spam = True - elif self.form.has_key("trainham"): - is_spam = False - - (status, errmsg) = train_spambayes(self.db, content, tokens, - is_spam) - - node = self.db.getnode(self.classname, self.nodeid) - props = {} - - if status: - if node.get('spambayes_misclassified', False): - props['spambayes_misclassified':True] - - props['spambayes_score'] = 1.0 - - s = " SPAM" - if not is_spam: - props['spambayes_score'] = 0.0 - s = " HAM" - self.client.ok_message.append(self._('Message classified as') + s) - else: - self.client.error_message.append(self._('Unable to classify message, got error:') + errmsg) - - klass = self.db.getclass(self.classname) - klass.set(self.nodeid, **props) - self.db.commit() - - def permission(self): - roles = set(self.db.user.get(self.userid, 'roles').lower().split(",")) - allowed = set(self.db.config.detectors['SPAMBAYES_MAY_CLASSIFY'].lower().split(",")) - - if not bool(roles.intersection(allowed)): - raise Unauthorised("You do not have permission to train spambayes") - Action.permission(self) - - -def sb_is_spam(obj): - cutoff_score = float(obj._db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) - try: - score = obj['spambayes_score'] - except KeyError: - return False - return score >= cutoff_score - -def sb_is_view_ok(obj): - if not sb_is_spam(obj): - return True - roles = set(obj._db.user.get(obj._client.userid, - 'roles').lower().split(",")) - allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) - - return bool(roles.intersection(allowed)) - -def sb_may_report_misclassified(obj): - roles = set(obj._db.user.get(obj._client.userid, - 'roles').lower().split(",")) - allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_REPORT_MISCLASSIFIED'].lower().split(",")) - - return bool(roles.intersection(allowed)) - -def sb_may_classify(obj): - roles = set(obj._db.user.get(obj._client.userid, - 'roles').lower().split(",")) - allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_CLASSIFY'].lower().split(",")) - - return bool(roles.intersection(allowed)) - -def init(instance): - instance.registerAction("spambayes_classify", SpambayesClassify) - instance.registerUtil('sb_is_spam', sb_is_spam) - instance.registerUtil('sb_is_view_ok', sb_is_view_ok) - instance.registerUtil('sb_may_report_misclassified', - sb_may_report_misclassified) - instance.registerUtil('sb_may_classify', sb_may_classify) - From python-checkins at python.org Fri Jul 27 15:44:17 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:44:17 +0200 (CEST) Subject: [Python-checkins] r56572 - tracker/instances/python-dev-spambayes-integration/schema.py Message-ID: <20070727134417.CA3321E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:44:17 2007 New Revision: 56572 Modified: tracker/instances/python-dev-spambayes-integration/schema.py Log: Re-adding some permissions code that were removed by mistake. Oops. Modified: tracker/instances/python-dev-spambayes-integration/schema.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/schema.py (original) +++ tracker/instances/python-dev-spambayes-integration/schema.py Fri Jul 27 15:44:17 2007 @@ -146,6 +146,12 @@ # User permissions ########################## +for cl in ('issue_type', 'severity', 'component', + 'version', 'priority', 'status', 'resolution', + 'issue', 'file', 'msg', 'keyword'): + db.security.addPermissionToRole('User', 'View', cl) + db.security.addPermissionToRole('Anonymous', 'View', cl) + class may_view_spam: def __init__(self, klassname): self.klassname = klassname From python-checkins at python.org Fri Jul 27 15:45:45 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:45:45 +0200 (CEST) Subject: [Python-checkins] r56573 - in tracker/instances/python-dev-spambayes-integration: detectors/spambayes.py extensions/spambayes.py Message-ID: <20070727134545.79F831E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:45:45 2007 New Revision: 56573 Added: tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py (contents, props changed) tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py (contents, props changed) Log: Creating symbolic links to detector/extensions for spambayes integration Added: tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py ============================================================================== --- (empty file) +++ tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py Fri Jul 27 15:45:45 2007 @@ -0,0 +1 @@ +link ../../spambayes_integration/detectors/spambayes.py \ No newline at end of file Added: tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py ============================================================================== --- (empty file) +++ tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Fri Jul 27 15:45:45 2007 @@ -0,0 +1 @@ +link ../../spambayes_integration/extensions/spambayes.py \ No newline at end of file From python-checkins at python.org Fri Jul 27 15:54:29 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:54:29 +0200 (CEST) Subject: [Python-checkins] r56574 - tracker/instances/spambayes_integration/detectors/config.ini.template Message-ID: <20070727135429.3E0411E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:54:28 2007 New Revision: 56574 Added: tracker/instances/spambayes_integration/detectors/config.ini.template Log: Example configuration Added: tracker/instances/spambayes_integration/detectors/config.ini.template ============================================================================== --- (empty file) +++ tracker/instances/spambayes_integration/detectors/config.ini.template Fri Jul 27 15:54:28 2007 @@ -0,0 +1,11 @@ +[main] +# URI to XMLRPC server doing the actual spam check. +spambayes_uri = http://www.webfast.com:80/sbrpc +# These must match the {ham,spam}_cutoff setting in the SpamBayes server +# config. +spambayes_ham_cutoff = 0.2 +spambayes_spam_cutoff = 0.85 + +spambayes_may_view_spam = User,Coordinator,Developer +spambayes_may_classify = Coordinator +spambayes_may_report_misclassified = User,Coordinator,Developer From python-checkins at python.org Fri Jul 27 15:55:49 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 15:55:49 +0200 (CEST) Subject: [Python-checkins] r56575 - tracker/instances/spambayes_integration/detectors/config.ini.template Message-ID: <20070727135549.B13961E4011@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 15:55:49 2007 New Revision: 56575 Modified: tracker/instances/spambayes_integration/detectors/config.ini.template Log: The spambayes server is more likely to run at localhost. Modified: tracker/instances/spambayes_integration/detectors/config.ini.template ============================================================================== --- tracker/instances/spambayes_integration/detectors/config.ini.template (original) +++ tracker/instances/spambayes_integration/detectors/config.ini.template Fri Jul 27 15:55:49 2007 @@ -1,6 +1,6 @@ [main] # URI to XMLRPC server doing the actual spam check. -spambayes_uri = http://www.webfast.com:80/sbrpc +spambayes_uri = http://localhost:8001/sbrpc # These must match the {ham,spam}_cutoff setting in the SpamBayes server # config. spambayes_ham_cutoff = 0.2 From python-checkins at python.org Fri Jul 27 16:20:03 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 16:20:03 +0200 (CEST) Subject: [Python-checkins] r56576 - tracker/instances/python-dev-spambayes-integration/detectors/busybody.py tracker/instances/python-dev-spambayes-integration/detectors/tellteam.py Message-ID: <20070727142003.150E51E401C@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 16:20:02 2007 New Revision: 56576 Modified: tracker/instances/python-dev-spambayes-integration/detectors/busybody.py tracker/instances/python-dev-spambayes-integration/detectors/tellteam.py Log: Don't send messages classified as spam. Modified: tracker/instances/python-dev-spambayes-integration/detectors/busybody.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/detectors/busybody.py (original) +++ tracker/instances/python-dev-spambayes-integration/detectors/busybody.py Fri Jul 27 16:20:02 2007 @@ -22,6 +22,16 @@ from roundup import roundupdb, hyperdb +def is_spam(db, msgid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + + msg = db.getnode("msg", msgid) + if msg.has_key('spambayes_score') and \ + msg['spambayes_score'] > cutoff_score: + return False + return True + + def busyreaction(db, cl, nodeid, oldvalues): ''' busybody mail ''' @@ -36,7 +46,7 @@ else: note = cl.generateChangeNote(nodeid, oldvalues) - for msgid in msgIDS: + for msgid in filter(lambda x: is_spam(db, x), msgIDS): try: cl.send_message(nodeid, msgid, note, sendto) except roundupdb.MessageSendError, message: Modified: tracker/instances/python-dev-spambayes-integration/detectors/tellteam.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/detectors/tellteam.py (original) +++ tracker/instances/python-dev-spambayes-integration/detectors/tellteam.py Fri Jul 27 16:20:02 2007 @@ -1,5 +1,14 @@ from roundup import roundupdb +def is_spam(db, msgid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + + msg = db.getnode("msg", msgid) + if msg.has_key('spambayes_score') and \ + msg['spambayes_score'] > cutoff_score: + return False + return True + def newissuetriage(db, cl, nodeid, oldvalues): ''' Copy a message about new issues to a triage address, set in detectors/config.ini @@ -14,9 +23,10 @@ triage_email = [] if not triage_email: return - for msgid in cl.get(nodeid, 'messages'): + for msgid in filter(lambda x: is_spam(db, x), cl.get(nodeid, 'messages')): try: # note: last arg must be a list + cl.send_message(nodeid, msgid, change_note, triage_email) except roundupdb.MessageSendError, message: raise roundupdb.DetectorError, message From python-checkins at python.org Fri Jul 27 16:27:25 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 16:27:25 +0200 (CEST) Subject: [Python-checkins] r56577 - tracker/instances/meta Message-ID: <20070727142725.C9DB51E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 16:27:25 2007 New Revision: 56577 Added: tracker/instances/meta/ Log: Creating directory for meta tracker From python-checkins at python.org Fri Jul 27 16:30:02 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 16:30:02 +0200 (CEST) Subject: [Python-checkins] r56578 - in tracker/instances/meta: detectors detectors/config.ini detectors/mailonmsgchanges.py detectors/messagesummary.py detectors/nosyreaction.py detectors/statusauditor.py detectors/userauditor.py extensions extensions/README.txt html html/_generic.calendar.html html/_generic.collision.html html/_generic.help-empty.html html/_generic.help-list.html html/_generic.help-search.html html/_generic.help-submit.html html/_generic.help.html html/_generic.index.html html/_generic.item.html html/file.index.html html/file.item.html html/help.html html/help_controls.js html/home.classlist.html html/home.html html/issue.index.html html/issue.item.html html/issue.search.html html/keyword.item.html html/msg.index.html html/msg.item.html html/page.html html/query.edit.html html/query.item.html html/style.css html/user.forgotten.html html/user.help-search.html html/user.help.html html/user.index.html html/user.item.html html/user.register.html html/user.rego_progress.html html/user_utils.js initial_data.py schema.py Message-ID: <20070727143002.A4F271E400B@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 16:30:01 2007 New Revision: 56578 Added: tracker/instances/meta/detectors/ tracker/instances/meta/detectors/config.ini tracker/instances/meta/detectors/mailonmsgchanges.py tracker/instances/meta/detectors/messagesummary.py tracker/instances/meta/detectors/nosyreaction.py tracker/instances/meta/detectors/statusauditor.py tracker/instances/meta/detectors/userauditor.py tracker/instances/meta/extensions/ tracker/instances/meta/extensions/README.txt tracker/instances/meta/html/ tracker/instances/meta/html/_generic.calendar.html tracker/instances/meta/html/_generic.collision.html tracker/instances/meta/html/_generic.help-empty.html tracker/instances/meta/html/_generic.help-list.html tracker/instances/meta/html/_generic.help-search.html tracker/instances/meta/html/_generic.help-submit.html tracker/instances/meta/html/_generic.help.html tracker/instances/meta/html/_generic.index.html tracker/instances/meta/html/_generic.item.html tracker/instances/meta/html/file.index.html tracker/instances/meta/html/file.item.html tracker/instances/meta/html/help.html tracker/instances/meta/html/help_controls.js tracker/instances/meta/html/home.classlist.html tracker/instances/meta/html/home.html tracker/instances/meta/html/issue.index.html tracker/instances/meta/html/issue.item.html tracker/instances/meta/html/issue.search.html tracker/instances/meta/html/keyword.item.html tracker/instances/meta/html/msg.index.html tracker/instances/meta/html/msg.item.html tracker/instances/meta/html/page.html tracker/instances/meta/html/query.edit.html tracker/instances/meta/html/query.item.html tracker/instances/meta/html/style.css tracker/instances/meta/html/user.forgotten.html tracker/instances/meta/html/user.help-search.html tracker/instances/meta/html/user.help.html tracker/instances/meta/html/user.index.html tracker/instances/meta/html/user.item.html tracker/instances/meta/html/user.register.html tracker/instances/meta/html/user.rego_progress.html tracker/instances/meta/html/user_utils.js tracker/instances/meta/initial_data.py tracker/instances/meta/schema.py Modified: tracker/instances/meta/ (props changed) Log: Adding meta tracker Added: tracker/instances/meta/detectors/config.ini ============================================================================== --- (empty file) +++ tracker/instances/meta/detectors/config.ini Fri Jul 27 16:30:01 2007 @@ -0,0 +1,4 @@ +[main] + +msgchange_email = tracker-discuss at python.org + Added: tracker/instances/meta/detectors/mailonmsgchanges.py ============================================================================== --- (empty file) +++ tracker/instances/meta/detectors/mailonmsgchanges.py Fri Jul 27 16:30:01 2007 @@ -0,0 +1,70 @@ +# +# Copyright (c) 2001 Bizar Software Pty Ltd (http://www.bizarsoftware.com.au/) +# This module is free software, and you may redistribute it and/or modify +# under the same terms as Python, so long as this copyright message and +# disclaimer are retained in their original form. +# +# IN NO EVENT SHALL BIZAR SOFTWARE PTY LTD BE LIABLE TO ANY PARTY FOR +# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING +# OUT OF THE USE OF THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# BIZAR SOFTWARE PTY LTD SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, +# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" +# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, +# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. +# +# Modified from nosyreaction by P. Dubois to send mail to a busybody list +# that wants to know about EVERY change. + +import sets + +from roundup import roundupdb, hyperdb + +def mailonmsgchange(db, cl, nodeid, oldvalues): + ''' busybody mail + ''' + try: + sendto = db.config.detectors['MSGCHANGE_EMAIL'].split() + except KeyError: + return + + msgIDS = determineNewMessages(cl, nodeid, oldvalues) + if oldvalues is None: # a create + note = cl.generateCreateNote(nodeid) + else: + note = cl.generateChangeNote(nodeid, oldvalues) + + for msgid in msgIDS: + try: + cl.send_message(nodeid, msgid, note, sendto) + except roundupdb.MessageSendError, message: + raise roundupdb.DetectorError, message + + +def determineNewMessages(cl, nodeid, oldvalues): + ''' Figure a list of the messages that are being added to the given + node in this transaction. + ''' + messages = [] + if oldvalues is None: + # the action was a create, so use all the messages in the create + messages = cl.get(nodeid, 'messages') + elif oldvalues.has_key('messages'): + # the action was a set (so adding new messages to an existing issue) + m = {} + for msgid in oldvalues['messages']: + m[msgid] = 1 + messages = [] + # figure which of the messages now on the issue weren't there before + for msgid in cl.get(nodeid, 'messages'): + if not m.has_key(msgid): + messages.append(msgid) + return messages + +def init(db): + db.issue.react('create', mailonmsgchange) + db.issue.react('set', mailonmsgchange) + +# vim: set filetype=python ts=4 sw=4 et si Added: tracker/instances/meta/detectors/messagesummary.py ============================================================================== --- (empty file) +++ tracker/instances/meta/detectors/messagesummary.py Fri Jul 27 16:30:01 2007 @@ -0,0 +1,20 @@ +#$Id$ + +from roundup.mailgw import parseContent + +def summarygenerator(db, cl, nodeid, newvalues): + ''' If the message doesn't have a summary, make one for it. + ''' + if newvalues.has_key('summary') or not newvalues.has_key('content'): + return + + summary, content = parseContent(newvalues['content'], 1, 1) + newvalues['summary'] = summary + + +def init(db): + # fire before changes are made + db.msg.audit('create', summarygenerator) + +# vim: set filetype=python ts=4 sw=4 et si +#SHA: 38d7638272923ba22aa28342f267b611f3be392d Added: tracker/instances/meta/detectors/nosyreaction.py ============================================================================== --- (empty file) +++ tracker/instances/meta/detectors/nosyreaction.py Fri Jul 27 16:30:01 2007 @@ -0,0 +1,144 @@ +# +# Copyright (c) 2001 Bizar Software Pty Ltd (http://www.bizarsoftware.com.au/) +# This module is free software, and you may redistribute it and/or modify +# under the same terms as Python, so long as this copyright message and +# disclaimer are retained in their original form. +# +# IN NO EVENT SHALL BIZAR SOFTWARE PTY LTD BE LIABLE TO ANY PARTY FOR +# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING +# OUT OF THE USE OF THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# BIZAR SOFTWARE PTY LTD SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, +# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" +# BASIS, AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE, +# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. +# +#$Id$ + +import sets + +from roundup import roundupdb, hyperdb + +def nosyreaction(db, cl, nodeid, oldvalues): + ''' A standard detector is provided that watches for additions to the + "messages" property. + + When a new message is added, the detector sends it to all the users on + the "nosy" list for the issue that are not already on the "recipients" + list of the message. + + Those users are then appended to the "recipients" property on the + message, so multiple copies of a message are never sent to the same + user. + + The journal recorded by the hyperdatabase on the "recipients" property + then provides a log of when the message was sent to whom. + ''' + # send a copy of all new messages to the nosy list + for msgid in determineNewMessages(cl, nodeid, oldvalues): + try: + cl.nosymessage(nodeid, msgid, oldvalues) + except roundupdb.MessageSendError, message: + raise roundupdb.DetectorError, message + +def determineNewMessages(cl, nodeid, oldvalues): + ''' Figure a list of the messages that are being added to the given + node in this transaction. + ''' + messages = [] + if oldvalues is None: + # the action was a create, so use all the messages in the create + messages = cl.get(nodeid, 'messages') + elif oldvalues.has_key('messages'): + # the action was a set (so adding new messages to an existing issue) + m = {} + for msgid in oldvalues['messages']: + m[msgid] = 1 + messages = [] + # figure which of the messages now on the issue weren't there before + for msgid in cl.get(nodeid, 'messages'): + if not m.has_key(msgid): + messages.append(msgid) + return messages + +def updatenosy(db, cl, nodeid, newvalues): + '''Update the nosy list for changes to the assignedto + ''' + # nodeid will be None if this is a new node + current_nosy = sets.Set() + if nodeid is None: + ok = ('new', 'yes') + else: + ok = ('yes',) + # old node, get the current values from the node if they haven't + # changed + if not newvalues.has_key('nosy'): + nosy = cl.get(nodeid, 'nosy') + for value in nosy: + current_nosy.add(value) + + # if the nosy list changed in this transaction, init from the new value + if newvalues.has_key('nosy'): + nosy = newvalues.get('nosy', []) + for value in nosy: + if not db.hasnode('user', value): + continue + current_nosy.add(value) + + new_nosy = sets.Set(current_nosy) + + # add assignedto(s) to the nosy list + if newvalues.has_key('assignedto') and newvalues['assignedto'] is not None: + propdef = cl.getprops() + if isinstance(propdef['assignedto'], hyperdb.Link): + assignedto_ids = [newvalues['assignedto']] + elif isinstance(propdef['assignedto'], hyperdb.Multilink): + assignedto_ids = newvalues['assignedto'] + for assignedto_id in assignedto_ids: + new_nosy.add(assignedto_id) + + # see if there's any new messages - if so, possibly add the author and + # recipient to the nosy + if newvalues.has_key('messages'): + if nodeid is None: + ok = ('new', 'yes') + messages = newvalues['messages'] + else: + ok = ('yes',) + # figure which of the messages now on the issue weren't + oldmessages = cl.get(nodeid, 'messages') + messages = [] + for msgid in newvalues['messages']: + if msgid not in oldmessages: + messages.append(msgid) + + # configs for nosy modifications + add_author = getattr(db.config, 'ADD_AUTHOR_TO_NOSY', 'new') + add_recips = getattr(db.config, 'ADD_RECIPIENTS_TO_NOSY', 'new') + + # now for each new message: + msg = db.msg + for msgid in messages: + if add_author in ok: + authid = msg.get(msgid, 'author') + new_nosy.add(authid) + + # add on the recipients of the message + if add_recips in ok: + for recipient in msg.get(msgid, 'recipients'): + new_nosy.add(recipient) + + if current_nosy != new_nosy: + # that's it, save off the new nosy list + newvalues['nosy'] = list(new_nosy) + +def init(db): + db.issue.react('create', nosyreaction) + db.issue.react('set', nosyreaction) + db.issue.audit('create', updatenosy) + db.issue.audit('set', updatenosy) + +# vim: set filetype=python ts=4 sw=4 et si +#SHA: 6bce01e5c67b0b2250122770f4ffe2d224768ab7 Added: tracker/instances/meta/detectors/statusauditor.py ============================================================================== --- (empty file) +++ tracker/instances/meta/detectors/statusauditor.py Fri Jul 27 16:30:01 2007 @@ -0,0 +1,86 @@ +# Copyright (c) 2002 ekit.com Inc (http://www.ekit-inc.com/) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +#$Id$ + +def chatty(db, cl, nodeid, newvalues): + ''' If the issue is currently 'unread', 'resolved', 'done-cbb' or None, + then set it to 'chatting' + ''' + # don't fire if there's no new message (ie. chat) + if not newvalues.has_key('messages'): + return + if newvalues['messages'] == cl.get(nodeid, 'messages'): + return + + # get the chatting state ID + try: + chatting_id = db.status.lookup('chatting') + except KeyError: + # no chatting state, ignore all this stuff + return + + # get the current value + current_status = cl.get(nodeid, 'status') + + # see if there's an explicit change in this transaction + if newvalues.has_key('status'): + # yep, skip + return + + # determine the id of 'unread', 'resolved' and 'chatting' + fromstates = [] + for state in 'unread resolved done-cbb'.split(): + try: + fromstates.append(db.status.lookup(state)) + except KeyError: + pass + + # ok, there's no explicit change, so check if we are in a state that + # should be changed + if current_status in fromstates + [None]: + # yep, we're now chatting + newvalues['status'] = chatting_id + + +def presetunread(db, cl, nodeid, newvalues): + ''' Make sure the status is set on new issues + ''' + if newvalues.has_key('status') and newvalues['status']: + return + + # get the unread state ID + try: + unread_id = db.status.lookup('unread') + except KeyError: + # no unread state, ignore all this stuff + return + + # ok, do it + newvalues['status'] = unread_id + + +def init(db): + # fire before changes are made + db.issue.audit('set', chatty) + db.issue.audit('create', presetunread) + +# vim: set filetype=python ts=4 sw=4 et si +#SHA: 2c27850eaa007e0021a9427b4c538d812849d218 Added: tracker/instances/meta/detectors/userauditor.py ============================================================================== --- (empty file) +++ tracker/instances/meta/detectors/userauditor.py Fri Jul 27 16:30:01 2007 @@ -0,0 +1,45 @@ +# Copyright (c) 2003 Richard Jones (richard at mechanicalcat.net) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# +#$Id$ + +def audit_user_fields(db, cl, nodeid, newvalues): + ''' Make sure user properties are valid. + + - email address has no spaces in it + - roles specified exist + ''' + if newvalues.has_key('address') and ' ' in newvalues['address']: + raise ValueError, 'Email address must not contain spaces' + + if newvalues.has_key('roles') and newvalues['roles']: + roles = [x.lower().strip() for x in newvalues['roles'].split(',')] + for rolename in roles: + if not db.security.role.has_key(rolename): + raise ValueError, 'Role "%s" does not exist'%rolename + + +def init(db): + # fire before changes are made + db.user.audit('set', audit_user_fields) + db.user.audit('create', audit_user_fields) + +# vim: set filetype=python ts=4 sw=4 et si +#SHA: 5663145877b5c4999449e4b1f28e88c2f721872a Added: tracker/instances/meta/extensions/README.txt ============================================================================== --- (empty file) +++ tracker/instances/meta/extensions/README.txt Fri Jul 27 16:30:01 2007 @@ -0,0 +1,6 @@ +This directory is for tracker extensions: + +- CGI Actions +- Templating functions + +See the customisation doc for more information. Added: tracker/instances/meta/html/_generic.calendar.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.calendar.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,19 @@ + + + + + + + + + + + + Added: tracker/instances/meta/html/_generic.collision.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.collision.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,17 @@ + +<span tal:replace="python:context._classname.capitalize()" + i18n:name="class" /> Edit Collision - <span i18n:name="tracker" + tal:replace="config/TRACKER_NAME" /> + Edit Collision + + + + + Added: tracker/instances/meta/html/_generic.help-empty.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.help-empty.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,9 @@ + + + Empty page (no search performed yet) + + +

    Please specify your search parameters!

    + + + Added: tracker/instances/meta/html/_generic.help-list.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.help-list.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,84 @@ + + + Search result for user helper + + + + + + +
    +
    +  

    You are not + allowed to view this page.

    + + + + + + + + + + +
    + + + + + + + + + + + + + + + + + + + +
     x
    + + + +
    + +
    +
    + +
    +     
    +  
    +
    +
    
    Added: tracker/instances/meta/html/_generic.help-search.html
    ==============================================================================
    --- (empty file)
    +++ tracker/instances/meta/html/_generic.help-search.html	Fri Jul 27 16:30:01 2007
    @@ -0,0 +1,14 @@
    +
    +  
    +    Frame for search input fields
    +  
    +  
    +    

    Generic template + help-search + or version for class + user + is not yet implemented

    + + + + Added: tracker/instances/meta/html/_generic.help-submit.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.help-submit.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,74 @@ + + + + + + Generic submit page for framed helper windows + + + + + +
    + 
    +
    + +
    + + + + +
    + + + + Added: tracker/instances/meta/html/_generic.help.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.help.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,99 @@ + + + + + + + <tal:x i18n:name="property" + tal:content="property" i18n:translate="" /> help - <span i18n:name="tracker" + tal:replace="config/TRACKER_NAME" /> + + + + + +
    + +
    + + + + +
    + + + + + + +
    + + + + + + + + + + + + + + + + +
     x
    + + + +
     x
    + +
    + + + Added: tracker/instances/meta/html/_generic.index.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.index.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,71 @@ + + + +<span tal:replace="python:context._classname.capitalize()" + i18n:name="class" /> editing - <span i18n:name="tracker" + tal:replace="config/TRACKER_NAME" /> + editing + + + +You are not allowed to view this page. + +Please login with your username and password. + + + +

    + You may edit the contents of the + + class using this form. Commas, newlines and double quotes (") must be + handled delicately. You may include commas and newlines by enclosing the + values in double-quotes ("). Double quotes themselves must be quoted by + doubling (""). +

    + +

    + Multilink properties have their multiple values colon (":") separated + (... ,"one:two:three", ...) +

    + +

    + Remove entries by deleting their line. Add new entries by appending + them to the table - put an X in the id column. +

    +
    +
    + +
    + + +
    +
    + + + + + + + + + + +
     
     
    + + + +
    + Added: tracker/instances/meta/html/_generic.item.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/_generic.item.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,54 @@ + +<span tal:replace="python:context._classname.capitalize()" + i18n:name="class" /> editing - <span i18n:name="tracker" + tal:replace="config/TRACKER_NAME" /> + editing + + + +

    + You are not allowed to view this page.

    + +

    + Please login with your username and password.

    + +
    + +
    + + + + + + + + + + + + + + + +
      + submit button will go here +
    + +
    + + + +
    + + + +
    + Added: tracker/instances/meta/html/file.index.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/file.index.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,32 @@ + + +List of files - <span tal:replace="config/TRACKER_NAME" i18n:name="tracker" /> +List of files + + + + + + + + + + + + + + + + + +
    DownloadContent TypeUploaded ByDate
    + dld link + content typecreator's namecreation date
    + + + +
    + Added: tracker/instances/meta/html/file.item.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/file.item.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,54 @@ + +File display - <span + i18n:name="tracker" tal:replace="config/TRACKER_NAME" /> +File display + + + +

    + You are not allowed to view this page.

    + +

    + Please login with your username and password.

    + +
    + + + + + + + + + + + + + + + +
    Name
    Content Type
    +   + + + + submit button here
    +
    + +download + + + + + + + Added: tracker/instances/meta/html/help.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/help.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,39 @@ + + + + + + x + + + + + + + + + + +
    + Added: tracker/instances/meta/html/help_controls.js ============================================================================== --- (empty file) +++ tracker/instances/meta/html/help_controls.js Fri Jul 27 16:30:01 2007 @@ -0,0 +1,324 @@ +// initial values for either Nosy, Superseder, Topic and Waiting On, +// depending on which has called +original_field = form[field].value; + +// Some browsers (ok, IE) don't define the "undefined" variable. +undefined = document.geez_IE_is_really_friggin_annoying; + +function trim(value) { + var temp = value; + var obj = /^(\s*)([\W\w]*)(\b\s*$)/; + if (obj.test(temp)) { temp = temp.replace(obj, '$2'); } + var obj = / /g; + while (temp.match(obj)) { temp = temp.replace(obj, " "); } + return temp; +} + +function determineList() { + // generate a comma-separated list of the checked items + var list = new String(''); + + // either a checkbox object or an array of checkboxes + var check = document.frm_help.check; + + if ((check.length == undefined) && (check.checked != undefined)) { + // only one checkbox on page + if (check.checked) { + list = check.value; + } + } else { + // array of checkboxes + for (box=0; box < check.length; box++) { + if (check[box].checked) { + if (list.length == 0) { + separator = ''; + } + else { + separator = ','; + } + // we used to use an Array and push / join, but IE5.0 sux + list = list + separator + check[box].value; + } + } + } + return list; +} + +/** + * update the field in the opening window; + * the text_field variable must be set in the calling page + */ +function updateOpener() { + // write back to opener window + if (document.frm_help.check==undefined) { return; } + form[field].value = text_field.value; +} + +function updateList() { + // write back to opener window + if (document.frm_help.check==undefined) { return; } + form[field].value = determineList(); +} + +function updatePreview() { + // update the preview box + if (document.frm_help.check==undefined) { return; } + writePreview(determineList()); +} + +function clearList() { + // uncheck all checkboxes + if (document.frm_help.check==undefined) { return; } + for (box=0; box < document.frm_help.check.length; box++) { + document.frm_help.check[box].checked = false; + } +} + +function reviseList_framed(form, textfield) { + // update the checkboxes based on the preview field + // alert('reviseList_framed') + // alert(form) + if (form.check==undefined) + return; + // alert(textfield) + var to_check; + var list = textfield.value.split(","); + if (form.check.length==undefined) { + check = form.check; + to_check = false; + for (val in list) { + if (check.value==trim(list[val])) { + to_check = true; + break; + } + } + check.checked = to_check; + } else { + for (box=0; box < form.check.length; box++) { + check = form.check[box]; + to_check = false; + for (val in list) { + if (check.value==trim(list[val])) { + to_check = true; + break; + } + } + check.checked = to_check; + } + } +} + +function reviseList(vals) { + // update the checkboxes based on the preview field + if (document.frm_help.check==undefined) { return; } + var to_check; + var list = vals.split(","); + if (document.frm_help.check.length==undefined) { + check = document.frm_help.check; + to_check = false; + for (val in list) { + if (check.value==trim(list[val])) { + to_check = true; + break; + } + } + check.checked = to_check; + } else { + for (box=0; box < document.frm_help.check.length; box++) { + check = document.frm_help.check[box]; + to_check = false; + for (val in list) { + if (check.value==trim(list[val])) { + to_check = true; + break; + } + } + check.checked = to_check; + } + } +} + +function resetList() { + // reset preview and check boxes to initial values + if (document.frm_help.check==undefined) { return; } + writePreview(original_field); + reviseList(original_field); +} + +function writePreview(val) { + // writes a value to the text_preview + document.frm_help.text_preview.value = val; +} + +function focusField(name) { + for(i=0; i < document.forms.length; ++i) { + var obj = document.forms[i].elements[name]; + if (obj && obj.focus) {obj.focus();} + } +} + +function selectField(name) { + for(i=0; i < document.forms.length; ++i) { + var obj = document.forms[i].elements[name]; + if (obj && obj.focus){obj.focus();} + if (obj && obj.select){obj.select();} + } +} + +function checkRequiredFields(fields) +{ + var bonk=''; + var res=''; + var argv = checkRequiredFields.arguments; + var argc = argv.length; + var input = ''; + var val=''; + + for (var i=0; i < argc; i++) { + fi = argv[i]; + input = document.getElementById(fi); + if (input) { + val = input.value + if (val == '' || val == '-1' || val == -1) { + if (res == '') { + res = fi; + bonk = input; + } else { + res += ', '+fi; + } + } + } else { + alert('Field with id='+fi+' not found!') + } + } + if (res == '') { + return submit_once(); + } else { + alert('Missing value here ('+res+')!'); + if (window.event && window.event.returnvalue) { + event.returnValue = 0; // work-around for IE + } + bonk.focus(); + return false; + } +} + +/** + * seeks the given value (2nd argument) + * in the value of the given input element (1st argument), + * which is considered a list of values, separated by commas + */ +function has_value(input, val) +{ + var actval = input.value + var arr = feld.value.split(','); + var max = arr.length; + for (i=0;i remove_val() + * + * This will work nicely even for batched lists + */ +function append_val(name, val) +{ + var feld = document.itemSynopsis[name]; + var actval = feld.value; + if (actval == '') { + feld.value = val + } else { + var arr = feld.value.split(','); + var max = arr.length; + for (i=0;i append_val() + */ +function remove_val(name, val) +{ + var feld = document.itemSynopsis[name]; + var actval = feld.value; + var changed=false; + if (actval == '') { + return + } else { + var arr = feld.value.split(','); + var max = arr.length; + var neu = '' + for (i=0;i +List of classes - <span + i18n:name="tracker" tal:replace="config/TRACKER_NAME" /> +List of classes + + + + + + + + + + + + + +
    + classname +
    nametype
    + + +
    + Added: tracker/instances/meta/html/home.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/home.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,11 @@ + + + Added: tracker/instances/meta/html/issue.index.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/issue.index.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,166 @@ + + + + <span tal:omit-tag="true" i18n:translate="" >List of issues</span> + <span tal:condition="request/dispname" + tal:replace="python:' - %s '%request.dispname" + /> - <span tal:replace="config/TRACKER_NAME" /> + + + List of issues + + + + +

    + You are not allowed to view this page.

    + +

    + Please login with your username and password.

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    PriorityIDCreationActivityActorTopicTitleStatusCreatorAssigned To
    + + + +
           + title +    
    + + + + + + +
    +
    + +Download as CSV + +
    + + + + + + + + + + + + + + + + + + + +
    + Sort on: + + + Descending: +
    + Group on: + + + Descending: +
    + + +
    +
    + +
    + + +
    vim: sw=1 ts=8 et si + + Added: tracker/instances/meta/html/issue.item.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/issue.item.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,198 @@ + + + +<tal:block condition="context/id" i18n:translate="" + >Issue <span tal:replace="context/id" i18n:name="id" + />: <span tal:replace="context/title" i18n:name="title" + /> - <span tal:replace="config/TRACKER_NAME" i18n:name="tracker" +/></tal:block> +<tal:block condition="not:context/id" i18n:translate="" + >New Issue - <span tal:replace="config/TRACKER_NAME" i18n:name="tracker" +/></tal:block> + + + New Issue + New Issue Editing + Issue + Issue Editing + + + + +

    + You are not allowed to view this page.

    + +

    + Please login with your username and password.

    + +
    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Titletitle
    PrioritypriorityStatusstatus
    Superseder + + + +
    View: + +
    +
    Nosy List + +
    +
    Assigned Toassignedto menuTopics + + +
    Change Note + +
    File
    +   + + + + submit button + Make a copy +
    +
    + + + + + + + + +
    Note: highlighted fields are required.
    +
    + +

    + Created on + by , + last changed + by . +

    + + + + + + + + + + + + + + + + +
    Files
    File nameUploadedTypeEditRemove
    + dld link + + creator's name, + creation date + + edit + +
    + + + +
    +
    + + + + + + + + + + + + + + +
    Messages
    msg (view)Author: Date: +
    + + + +
    +
    +
    content
    +
    + + + +
    + + + +
    + Added: tracker/instances/meta/html/issue.search.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/issue.search.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,232 @@ + +Issue searching - <span + i18n:name="tracker" tal:replace="config/TRACKER_NAME" /> +Issue searching + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
     Filter onDisplaySort onGroup on
    All text*:   
    Title: 
    Topic:
    ID: 
    Creation Date:
    Creator: + +
    Activity: 
    Actor: + +  
    Priority: + +
    Status: + + + + +
    Assigned to: + + + + +
    No Sort or group:  
    Pagesize:
    Start With:
    Sort Descending: +
    Group Descending: +
    Query name**: + + +
    +   + +
      + + *: The "all text" field will look in message bodies and issue titles +
    + + **: If you supply a name, the query will be saved off and available as a + link in the sidebar + +
    + +
    + + +
    + Added: tracker/instances/meta/html/keyword.item.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/keyword.item.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,56 @@ + + +Keyword editing - <span + i18n:name="tracker" tal:replace="config/TRACKER_NAME" /> +Keyword editing + + + + + + + + + + +
    Existing Keywords
    + keyword here +
    + To edit an existing keyword (for spelling or typing errors), + click on its entry above. +
    + +

    + To create a new keyword, enter it below and click "Submit New Entry". +

    + +
    + + + + + + + + + + + +
    Keywordname
    +   + + + + submit button will go here +
    +
    + + +
    + Added: tracker/instances/meta/html/msg.index.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/msg.index.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,26 @@ + +List of messages - <span tal:replace="config/TRACKER_NAME" + i18n:name="tracker"/> +Message listing + + + + + + + + + + + + + + + +
    Messages
    authordate
    content
    + + +
    + Added: tracker/instances/meta/html/msg.item.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/msg.item.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,84 @@ + + + +<tal:block condition="context/id" i18n:translate="" + >Message <span tal:replace="context/id" i18n:name="id" + /> - <span tal:replace="config/TRACKER_NAME" i18n:name="tracker" +/></tal:block> +<tal:block condition="not:context/id" i18n:translate="" + >New Message - <span tal:replace="config/TRACKER_NAME" i18n:name="tracker" +/></tal:block> + + + New Message + New Message Editing + Message + Message Editing + + + +

    + You are not allowed to view this page.

    + +

    + Please login with your username and password.

    + +
    + + + + + + + + + + + + + + + + +
    Author
    Recipients
    Date
    + + + + + + +
    Content
    + + + + + + + + + + + +
    Files
    File nameUploaded
    + dld link + + creator's name, + creation date +
    + + + +
    + + +
    + Added: tracker/instances/meta/html/page.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/page.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,346 @@ + + + +title goes here + + + + + + + + + + + + + + + + + + + + + + + +
      +
    +

    body title

    +
    + +
    +

    +

    + + clear this message +

    +
    Page content goes here
    + +
    +
    + + + +
    + + + + + + + + + + + + + + + + (cal) + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
      +
    • + +
    • +
    • + + +
    • +
    + + + + + + + + + + + + + + + + + + + + + + + + + + Added: tracker/instances/meta/html/query.edit.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/query.edit.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,110 @@ + + +"Your Queries" Editing - <span tal:replace="config/TRACKER_NAME" + i18n:name="tracker" /> +"Your Queries" Editing + + + +You are not allowed to edit queries. + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    QueryInclude in "Your Queries"EditPrivate to you? 
    query + + + [query is retired]
    query + + edit + + + +
    query + + + edit + [not yours to edit]
    + + + +
    + +
    + +
    + Added: tracker/instances/meta/html/query.item.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/query.item.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,4 @@ + + + + Added: tracker/instances/meta/html/style.css ============================================================================== --- (empty file) +++ tracker/instances/meta/html/style.css Fri Jul 27 16:30:01 2007 @@ -0,0 +1,444 @@ +/* main page styles */ +body.body { + font-family: sans-serif, Arial, Helvetica; + background-color: white; + color: #333; + margin: 0; +} +a[href]:hover { + color:blue; + text-decoration: underline; +} +a[href], a[href]:link { + color:blue; + text-decoration: none; +} + +table.body { + border: 0; + padding: 0; + border-spacing: 0; + border-collapse: separate; +} + +td.page-header-left { + padding: 5px; + border-bottom: 1px solid #444; +} +td.sidebar { + padding: 1px 0 0 1px; + white-space: nowrap; +} + +/* don't display the sidebar when printing */ + at media print { + td.page-header-left { + display: none; + } + td.sidebar { + display: none; + } + .index-controls { + display: none; + } + #searchbox { + display: none; + } +} + +td.page-header-top { + padding: 5px; + border-bottom: 1px solid #444; +} +#searchbox { + float: right; +} + +div#body-title { + float: left; +} + + +div#searchbox { + float: right; + padding-top: 1em; +} + +div#searchbox input#search-text { + width: 10em; +} + +form { + margin: 0; +} + +textarea { + font-family: monospace; +} + +td.sidebar p.classblock { + padding: 2px 5px 2px 5px; + margin: 1px; + border: 1px solid #444; + background-color: #eee; +} + +td.sidebar p.userblock { + padding: 2px 5px 2px 5px; + margin: 1px 1px 1px 1px; + border: 1px solid #444; + background-color: #eef; +} + +.form-small { + padding: 0; + font-size: 75%; +} + + +td.content { + padding: 1px 5px 1px 5px; + vertical-align: top; + width: 100%; +} + +td.date, th.date { + white-space: nowrap; +} + +p.ok-message { + background-color: #22bb22; + padding: 5px; + color: white; + font-weight: bold; +} +p.error-message { + background-color: #bb2222; + padding: 5px; + color: white; + font-weight: bold; +} +p.error-message a[href] { + color: white; + text-decoration: underline; +} + + +/* style for search forms */ +ul.search-checkboxes { + display: inline; + padding: 0; + list-style: none; +} +ul.search-checkboxes > li { + display: inline; + padding-right: .5em; +} + + +/* style for forms */ +table.form { + padding: 2px; + border-spacing: 0; + border-collapse: separate; +} + +table.form th { + color: #338; + text-align: right; + vertical-align: top; + font-weight: normal; + white-space: nowrap; +} + +table.form th.header { + font-weight: bold; + background-color: #eef; + text-align: left; +} + +table.form th.required { + font-weight: bold; +} + +table.form td { + color: #333; + empty-cells: show; + vertical-align: top; +} + +table.form td.optional { + font-weight: bold; + font-style: italic; +} + +table.form td.html { + color: #777; +} + +/* style for lists */ +table.list { + border-spacing: 0; + border-collapse: separate; + width: 100%; +} + +table.list th { + padding: 0 4px 0 4px; + color: #404070; + background-color: #eef; + border: 1px solid white; + vertical-align: top; + empty-cells: show; +} +table.list th a[href]:hover { color: #404070 } +table.list th a[href]:link { color: #404070 } +table.list th a[href] { color: #404070 } +table.list th.group { + background-color: #f4f4ff; + text-align: center; +} + +table.list td { + padding: 0 4px 0 4px; + border: 1px solid white; + color: #404070; + background-color: #efefef; + vertical-align: top; + empty-cells: show; +} + +table.list tr.navigation th { + width: 33%; + border-style: hidden; + text-align: center; +} +table.list tr.navigation td { + border: none +} +table.list tr.navigation th:first-child { + text-align: left; +} +table.list tr.navigation th:last-child { + text-align: right; +} + + +/* style for message displays */ +table.messages { + border-spacing: 0; + border-collapse: separate; + width: 100%; +} + +table.messages th.header{ + padding-top: 10px; + border-bottom: 1px solid gray; + font-weight: bold; + background-color: white; + color: #707040; +} + +table.messages th { + font-weight: bold; + color: black; + text-align: left; + border-bottom: 1px solid #afafaf; +} + +table.messages td { + font-family: monospace; + background-color: #efefef; + border-bottom: 1px solid #afafaf; + color: black; + empty-cells: show; + border-right: 1px solid #afafaf; + vertical-align: top; + padding: 2px 5px 2px 5px; + max-width: 50em; +} + +table.messages td pre { + white-space: pre-wrap; /* css-3 */ + white-space: -moz-pre-wrap; /* Mozilla, since 1999 */ + white-space: -pre-wrap; /* Opera 4-6 */ + white-space: -o-pre-wrap; /* Opera 7 */ + word-wrap: break-word; /* Internet Explorer 5.5+ */ +} + + +table.messages td:first-child { + border-left: 1px solid #afafaf; + border-right: 1px solid #afafaf; +} + +/* style for file displays */ +table.files { + border-spacing: 0; + border-collapse: separate; + width: 100%; +} + +table.files th.header{ + padding-top: 10px; + border-bottom: 1px solid gray; + font-weight: bold; + background-color: white; + color: #707040; +} + +table.files th { + border-bottom: 1px solid #afafaf; + font-weight: bold; + text-align: left; +} + +table.files td { + font-family: monospace; + empty-cells: show; +} + +/* style for history displays */ +table.history { + border-spacing: 0; + border-collapse: separate; + width: 100%; +} + +table.history th.header{ + padding-top: 10px; + border-bottom: 1px solid gray; + font-weight: bold; + background-color: white; + color: #707040; + font-size: 100%; +} + +table.history th { + border-bottom: 1px solid #afafaf; + font-weight: bold; + text-align: left; + font-size: 90%; +} + +table.history td { + font-size: 90%; + vertical-align: top; + empty-cells: show; +} + + +/* style for class list */ +table.classlist { + border-spacing: 0; + border-collapse: separate; + width: 100%; +} + +table.classlist th.header{ + padding-top: 10px; + border-bottom: 1px solid gray; + font-weight: bold; + background-color: white; + color: #707040; +} + +table.classlist th { + font-weight: bold; + text-align: left; +} + + +/* style for class help display */ +table.classhelp { /* the table-layout: fixed; */ + table-layout: fixed; /* compromises quality for speed */ + overflow: hidden; + font-size: .9em; + padding-bottom: 3em; +} + +table.classhelp th { + font-weight: normal; + text-align: left; + color: #444; + background-color: #efefef; + border-bottom: 1px solid #afafaf; + border-top: 1px solid #afafaf; + text-transform: uppercase; + vertical-align: middle; + line-height:1.5em; +} + +table.classhelp td { + vertical-align: middle; + padding-right: .2em; + border-bottom: 1px solid #efefef; + text-align: left; + empty-cells: show; + white-space: nowrap; + vertical-align: middle; +} + +table.classhelp tr:hover { + background-color: #eee; +} + +label.classhelp-label { + cursor: pointer; +} + +#classhelp-controls { + position: fixed; + display: block; + top: auto; + right: 0; + bottom: 0; + left: 0; + padding: .5em; + border-top: 2px solid #444; + background-color: #eee; +} + +#classhelp-controls input.apply { + width: 7em; + font-weight: bold; + margin-right: 2em; + margin-left: 2em; +} + +#classhelp-controls input.preview { + margin-right: 3em; + margin-left: 1em; +} + +/* style for "other" displays */ +table.otherinfo { + border-spacing: 0; + border-collapse: separate; + width: 100%; +} + +table.otherinfo th.header{ + padding-top: 10px; + border-bottom: 1px solid gray; + font-weight: bold; + background-color: white; + color: #707040; +} + +table.otherinfo th { + border-bottom: 1px solid #afafaf; + font-weight: bold; + text-align: left; +} +input[type="text"]:focus, +input[type="checkbox"]:focus, +input[type="radio"]:focus, +input[type="password"]:focus, +textarea:focus, select:focus { + background-color: #ffffc0; +} + +/* vim: sts=2 sw=2 et +*/ +/* SHA: 7243da9b4e481a0b95a5367b45baaaa45fab8998 */ Added: tracker/instances/meta/html/user.forgotten.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/user.forgotten.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,44 @@ + + +Password reset request - <span + i18n:name="tracker" tal:replace="config/TRACKER_NAME" /> +Password reset request + + +

    You have two options if you have forgotten your password. +If you know the email address you registered with, enter it below.

    + +
    + + + + + + + + + +
    Email Address:
      + + + +
    + +

    Or, if you know your username, then enter it below.

    + + + + +
    Username:
    +
    + +

    A confirmation email will be sent to you - +please follow the instructions within it to complete the reset process.

    + + +
    + Added: tracker/instances/meta/html/user.help-search.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/user.help-search.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,86 @@ + + + Search input for user helper + + + + + +
    +    
    + + + + + + + + + + + + + + + + + + + + + + + + +
    Name
    Phone
    role + +
      + + + + + + +
    + +
    +
    +
    +  
    +
    +
    
    Added: tracker/instances/meta/html/user.help.html
    ==============================================================================
    --- (empty file)
    +++ tracker/instances/meta/html/user.help.html	Fri Jul 27 16:30:01 2007
    @@ -0,0 +1,50 @@
    +
    +
    +  
    +      
    +      
    +      
    +      <tal:x i18n:translate=""><tal:x i18n:name="property"
    +       tal:content="property" i18n:translate="" /> help - <span i18n:name="tracker"
    +	       tal:replace="config/TRACKER_NAME" /></tal:x>
    +      
    +      
    +      
    +  
    +
    +  
    +  
    +  
    +  
    +  
    +
    +
    +  <body>
    +<p i18n:translate="">
    +Your browser is not capable of using frames; you should be redirected immediately,
    +or visit <a href="#" tal:attributes="href string:?${qs}&template=help-noframes"
    +i18n:name="link">this link</a>.
    +</p>
    +</body>
    +
    +
    +
    +
    
    Added: tracker/instances/meta/html/user.index.html
    ==============================================================================
    --- (empty file)
    +++ tracker/instances/meta/html/user.index.html	Fri Jul 27 16:30:01 2007
    @@ -0,0 +1,46 @@
    +
    +
    +User listing - <span
    + i18n:name="tracker" tal:replace="config/TRACKER_NAME" />
    +User listing
    +
    +
    +You are not allowed to view this page.
    +
    +Please login with your username and password.
    +
    +
    +
    + 
    + 
    + 
    + 
    + 
    + 
    +
    +
    +
    + 
    + 
    + 
    + 
    + 
    + 
    +
    +
    +
    UsernameReal nameOrganisationEmail addressPhone numberRetire
    + username +      + retire +
    + + +
    + Added: tracker/instances/meta/html/user.item.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/user.item.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,169 @@ + + + +<tal:if condition="context/id" i18n:translate="" + >User <span tal:replace="context/id" i18n:name="id" + />: <span tal:replace="context/username" i18n:name="title" + /> - <span tal:replace="config/TRACKER_NAME" i18n:name="tracker" +/></tal:if> +<tal:if condition="not:context/id" i18n:translate="" + >New User - <span tal:replace="config/TRACKER_NAME" i18n:name="tracker" +/></tal:if> + + + + + + + New User + New User Editing + User + User Editing + + + + +

    + You are not allowed to view this page.

    + +

    + Please login with your username and password.

    + +
    + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Name
    Login Name
    Login Password
    Confirm Password
    + + + + + + + (to give the user more than one role, + enter a comma,separated,list) +
    Phone
    Organisation
    Timezone + (this is a numeric hour offset, the default is + ) +
    E-mail address + calvin at the-z.org + + + +   +
    + +
    +   + + + + +
    +
    + + + + + + + + +
    Note: highlighted fields are required.
    +
    + + + +
    + + + +
    + Added: tracker/instances/meta/html/user.register.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/user.register.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,82 @@ + + +Registering with <span i18n:name="tracker" + tal:replace="db/config/TRACKER_NAME" /> +Registering with + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Namerealname
    Login Nameusername
    Login Passwordpassword
    Confirm Passwordpassword
    Rolesroles + +
    Phonephone
    Organisationorganisation
    E-mail addressaddress
    Alternate E-mail addresses
    One address per line
    alternate_addresses
      + + + + +
    +
    + + + + + + + + +
    Note: highlighted fields are required.
    +
    + + + +
    + Added: tracker/instances/meta/html/user.rego_progress.html ============================================================================== --- (empty file) +++ tracker/instances/meta/html/user.rego_progress.html Fri Jul 27 16:30:01 2007 @@ -0,0 +1,17 @@ + + +Registration in progress - <span i18n:name="tracker" + tal:replace="config/TRACKER_NAME" /> +Registration in progress... + + +

    You will shortly receive an email +to confirm your registration. To complete the registration process, +visit the link indicated in the email. +

    + + +
    + Added: tracker/instances/meta/html/user_utils.js ============================================================================== --- (empty file) +++ tracker/instances/meta/html/user_utils.js Fri Jul 27 16:30:01 2007 @@ -0,0 +1,111 @@ +// User Editing Utilities + +/** + * for new users: + * Depending on the input field which calls it, takes the value + * and dispatches it to certain other input fields: + * + * address + * +-> username + * | `-> realname + * `-> organisation + */ +function split_name(that) { + var raw = that.value + var val = trim(raw) + if (val == '') { + return + } + var username='' + var realname='' + var address='' + switch (that.name) { + case 'address': + address=val + break + case 'username': + username=val + break + case 'realname': + realname=val + break + default: + alert('Ooops - unknown name field '+that.name+'!') + return + } + var the_form = that.form; + + function field_empty(name) { + return the_form[name].value == '' + } + + // no break statements - on purpose! + switch (that.name) { + case 'address': + var split1 = address.split('@') + if (field_empty('username')) { + username = split1[0] + the_form.username.value = username + } + if (field_empty('organisation')) { + the_form.organisation.value = default_organisation(split1[1]) + } + case 'username': + if (field_empty('realname')) { + realname = Cap(username.split('.').join(' ')) + the_form.realname.value = realname + } + case 'realname': + if (field_empty('username')) { + username = Cap(realname.replace(' ', '.')) + the_form.username.value = username + } + if (the_form.firstname && the_form.lastname) { + var split2 = realname.split(' ') + var firstname='', lastname='' + firstname = split2[0] + lastname = split2.slice(1).join(' ') + if (field_empty('firstname')) { + the_form.firstname.value = firstname + } + if (field_empty('lastname')) { + the_form.lastname.value = lastname + } + } + } +} + +function SubCap(str) { + switch (str) { + case 'de': case 'do': case 'da': + case 'du': case 'von': + return str; + } + if (str.toLowerCase().slice(0,2) == 'mc') { + return 'Mc'+str.slice(2,3).toUpperCase()+str.slice(3).toLowerCase() + } + return str.slice(0,1).toUpperCase()+str.slice(1).toLowerCase() +} + +function Cap(str) { + var liz = str.split(' ') + for (var i=0; i/db/files/] +# type = String() [MIME type of the content, default 'text/plain'] +msg = FileClass(db, "msg", + author=Link("user", do_journal='no'), + recipients=Multilink("user", do_journal='no'), + date=Date(), + summary=String(), + files=Multilink("file"), + messageid=String(), + inreplyto=String()) + +file = FileClass(db, "file", + name=String()) + +# IssueClass automatically gets these properties in addition to the Class ones: +# title = String() +# messages = Multilink("msg") +# files = Multilink("file") +# nosy = Multilink("user") +# superseder = Multilink("issue") +issue = IssueClass(db, "issue", + assignedto=Link("user"), + topic=Multilink("keyword"), + priority=Link("priority"), + status=Link("status")) + +# +# TRACKER SECURITY SETTINGS +# +# See the configuration and customisation document for information +# about security setup. + +# +# REGULAR USERS +# +# Give the regular users access to the web and email interface +db.security.addPermissionToRole('User', 'Web Access') +db.security.addPermissionToRole('User', 'Email Access') + +# Assign the access and edit Permissions for issue, file and message +# to regular users now +for cl in 'issue', 'file', 'msg', 'keyword': + db.security.addPermissionToRole('User', 'View', cl) + db.security.addPermissionToRole('User', 'Edit', cl) + db.security.addPermissionToRole('User', 'Create', cl) +for cl in 'priority', 'status': + db.security.addPermissionToRole('User', 'View', cl) + +# May users view other user information? Comment these lines out +# if you don't want them to +db.security.addPermissionToRole('User', 'View', 'user') + +# Users should be able to edit their own details -- this permission is +# limited to only the situation where the Viewed or Edited item is their own. +def own_record(db, userid, itemid): + '''Determine whether the userid matches the item being accessed.''' + return userid == itemid +p = db.security.addPermission(name='View', klass='user', check=own_record, + description="User is allowed to view their own user details") +db.security.addPermissionToRole('User', p) +p = db.security.addPermission(name='Edit', klass='user', check=own_record, + description="User is allowed to edit their own user details") +db.security.addPermissionToRole('User', p) + +# Users should be able to edit and view their own queries. They should also +# be able to view any marked as not private. They should not be able to +# edit others' queries, even if they're not private +def view_query(db, userid, itemid): + private_for = db.query.get(itemid, 'private_for') + if not private_for: return True + return userid == private_for +def edit_query(db, userid, itemid): + return userid == db.query.get(itemid, 'creator') +p = db.security.addPermission(name='View', klass='query', check=view_query, + description="User is allowed to view their own and public queries") +db.security.addPermissionToRole('User', p) +p = db.security.addPermission(name='Edit', klass='query', check=edit_query, + description="User is allowed to edit their queries") +db.security.addPermissionToRole('User', p) +p = db.security.addPermission(name='Create', klass='query', + description="User is allowed to create queries") +db.security.addPermissionToRole('User', p) + + +# +# ANONYMOUS USER PERMISSIONS +# +# Let anonymous users access the web interface. Note that almost all +# trackers will need this Permission. The only situation where it's not +# required is in a tracker that uses an HTTP Basic Authenticated front-end. +db.security.addPermissionToRole('Anonymous', 'Web Access') + +# Let anonymous users access the email interface (note that this implies +# that they will be registered automatically, hence they will need the +# "Create" user Permission below) +# This is disabled by default to stop spam from auto-registering users on +# public trackers. +#db.security.addPermissionToRole('Anonymous', 'Email Access') + +# Assign the appropriate permissions to the anonymous user's Anonymous +# Role. Choices here are: +# - Allow anonymous users to register +db.security.addPermissionToRole('Anonymous', 'Create', 'user') + +# Allow anonymous users access to view issues (and the related, linked +# information) +for cl in 'issue', 'file', 'msg', 'keyword', 'priority', 'status': + db.security.addPermissionToRole('Anonymous', 'View', cl) + +# [OPTIONAL] +# Allow anonymous users access to create or edit "issue" items (and the +# related file and message items) +#for cl in 'issue', 'file', 'msg': +# db.security.addPermissionToRole('Anonymous', 'Create', cl) +# db.security.addPermissionToRole('Anonymous', 'Edit', cl) + + +# vim: set filetype=python sts=4 sw=4 et si : +#SHA: e3fa5650097bb3baf7e65ecbfb138b38d2d70cae From python-checkins at python.org Fri Jul 27 16:31:36 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 16:31:36 +0200 (CEST) Subject: [Python-checkins] r56579 - tracker/instances/meta/detectors tracker/instances/meta/detectors/config.ini tracker/instances/meta/detectors/config.ini.template Message-ID: <20070727143136.9C9FE1E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 16:31:36 2007 New Revision: 56579 Added: tracker/instances/meta/detectors/config.ini.template - copied, changed from r56578, tracker/instances/meta/detectors/config.ini Removed: tracker/instances/meta/detectors/config.ini Modified: tracker/instances/meta/detectors/ (props changed) Log: Don't expose the tracker-discuss mail address Deleted: /tracker/instances/meta/detectors/config.ini ============================================================================== --- /tracker/instances/meta/detectors/config.ini Fri Jul 27 16:31:36 2007 +++ (empty file) @@ -1,4 +0,0 @@ -[main] - -msgchange_email = tracker-discuss at python.org - Copied: tracker/instances/meta/detectors/config.ini.template (from r56578, tracker/instances/meta/detectors/config.ini) ============================================================================== --- tracker/instances/meta/detectors/config.ini (original) +++ tracker/instances/meta/detectors/config.ini.template Fri Jul 27 16:31:36 2007 @@ -1,4 +1,4 @@ [main] -msgchange_email = tracker-discuss at python.org +msgchange_email = tracker-discuss at example.com From python-checkins at python.org Fri Jul 27 17:00:56 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 17:00:56 +0200 (CEST) Subject: [Python-checkins] r56580 - tracker/instances/python-dev-spambayes-integration/schema.py Message-ID: <20070727150056.C39F51E4017@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 17:00:56 2007 New Revision: 56580 Modified: tracker/instances/python-dev-spambayes-integration/schema.py Log: More permissions fiddling. Modified: tracker/instances/python-dev-spambayes-integration/schema.py ============================================================================== --- tracker/instances/python-dev-spambayes-integration/schema.py (original) +++ tracker/instances/python-dev-spambayes-integration/schema.py Fri Jul 27 17:00:56 2007 @@ -148,7 +148,7 @@ for cl in ('issue_type', 'severity', 'component', 'version', 'priority', 'status', 'resolution', - 'issue', 'file', 'msg', 'keyword'): + 'issue', 'keyword'): db.security.addPermissionToRole('User', 'View', cl) db.security.addPermissionToRole('Anonymous', 'View', cl) @@ -157,10 +157,20 @@ self.klassname = klassname def __call__(self, db, userid, itemid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) klass = db.getclass(self.klassname) - roles = set(db.user.get(userid, "roles").lower().split(",")) - allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) - return bool(roles.intersection(allowed)) + + try: + score = klass.get(itemid, 'spambayes_score') + except KeyError: + return True + + if score > cutoff_score: + roles = set(db.user.get(userid, "roles").lower().split(",")) + allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) + return bool(roles.intersection(allowed)) + + return True for cl in ('file', 'msg'): p = db.security.addPermission(name='View', klass=cl, From python-checkins at python.org Fri Jul 27 17:30:17 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 17:30:17 +0200 (CEST) Subject: [Python-checkins] r56581 - in tracker/instances/meta: detectors/mailonmsgchanges.py detectors/spambayes.py extensions/spambayes.py html/file.item.html html/issue.item.html html/msg.item.html schema.py Message-ID: <20070727153017.1610A1E400F@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 17:30:16 2007 New Revision: 56581 Added: tracker/instances/meta/detectors/spambayes.py (contents, props changed) tracker/instances/meta/extensions/spambayes.py (contents, props changed) Modified: tracker/instances/meta/detectors/mailonmsgchanges.py tracker/instances/meta/html/file.item.html tracker/instances/meta/html/issue.item.html tracker/instances/meta/html/msg.item.html tracker/instances/meta/schema.py Log: Integrate with spambayes. Modified: tracker/instances/meta/detectors/mailonmsgchanges.py ============================================================================== --- tracker/instances/meta/detectors/mailonmsgchanges.py (original) +++ tracker/instances/meta/detectors/mailonmsgchanges.py Fri Jul 27 17:30:16 2007 @@ -22,6 +22,15 @@ from roundup import roundupdb, hyperdb +def is_spam(db, msgid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + + msg = db.getnode("msg", msgid) + if msg.has_key('spambayes_score') and \ + msg['spambayes_score'] > cutoff_score: + return False + return True + def mailonmsgchange(db, cl, nodeid, oldvalues): ''' busybody mail ''' @@ -36,7 +45,7 @@ else: note = cl.generateChangeNote(nodeid, oldvalues) - for msgid in msgIDS: + for msgid in filter(lambda x: is_spam(db, x), msgIDS): try: cl.send_message(nodeid, msgid, note, sendto) except roundupdb.MessageSendError, message: Added: tracker/instances/meta/detectors/spambayes.py ============================================================================== --- (empty file) +++ tracker/instances/meta/detectors/spambayes.py Fri Jul 27 17:30:16 2007 @@ -0,0 +1 @@ +link ../../spambayes_integration/detectors/spambayes.py \ No newline at end of file Added: tracker/instances/meta/extensions/spambayes.py ============================================================================== --- (empty file) +++ tracker/instances/meta/extensions/spambayes.py Fri Jul 27 17:30:16 2007 @@ -0,0 +1 @@ +link ../../spambayes_integration/extensions/spambayes.py \ No newline at end of file Modified: tracker/instances/meta/html/file.item.html ============================================================================== --- tracker/instances/meta/html/file.item.html (original) +++ tracker/instances/meta/html/file.item.html Fri Jul 27 17:30:16 2007 @@ -29,6 +29,16 @@ + SpamBayes Score + + + + + Marked as misclassified + + + +   @@ -42,10 +52,30 @@ - + File has been classified as spam.

    + +
    download +

    + Files classified as spam are not available for download by + unathorized users. If you think the file has been misclassified, + please login and click on the button for reclassification. +

    + + +
    + + + + +
    + Modified: tracker/instances/meta/html/issue.item.html ============================================================================== --- tracker/instances/meta/html/issue.item.html (original) +++ tracker/instances/meta/html/issue.item.html Fri Jul 27 17:30:16 2007 @@ -182,7 +182,12 @@ -
    content
    +

    + Message has been classified as spam. +

    +
    content
    Modified: tracker/instances/meta/html/msg.item.html ============================================================================== --- tracker/instances/meta/html/msg.item.html (original) +++ tracker/instances/meta/html/msg.item.html Fri Jul 27 17:30:16 2007 @@ -48,12 +48,45 @@ Date + + + SpamBayes Score + + + + + Marked as misclassified + + + +

    + Message has been classified as spam

    + + - + +
    Content
    +
    + + + + +
    +
    + Message has been classified as spam and is therefore not + available to unathorized users. If you think this is + incorrect, please login and report the message as being + misclassified. +
    Modified: tracker/instances/meta/schema.py ============================================================================== --- tracker/instances/meta/schema.py (original) +++ tracker/instances/meta/schema.py Fri Jul 27 17:30:16 2007 @@ -58,10 +58,14 @@ summary=String(), files=Multilink("file"), messageid=String(), - inreplyto=String()) + inreplyto=String(), + spambayes_score=Number(), + spambayes_misclassified=Boolean(),) file = FileClass(db, "file", - name=String()) + name=String(), + spambayes_score=Number(), + spambayes_misclassified=Boolean(),) # IssueClass automatically gets these properties in addition to the Class ones: # title = String() @@ -81,6 +85,8 @@ # See the configuration and customisation document for information # about security setup. +db.security.addRole(name='Coordinator', description='A coordinator') + # # REGULAR USERS # @@ -155,9 +161,52 @@ # Allow anonymous users access to view issues (and the related, linked # information) -for cl in 'issue', 'file', 'msg', 'keyword', 'priority', 'status': +for cl in 'issue', 'keyword', 'priority', 'status': db.security.addPermissionToRole('Anonymous', 'View', cl) +class may_view_spam: + def __init__(self, klassname): + self.klassname = klassname + + def __call__(self, db, userid, itemid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + klass = db.getclass(self.klassname) + + try: + score = klass.get(itemid, 'spambayes_score') + except KeyError: + return True + + if score > cutoff_score: + roles = set(db.user.get(userid, "roles").lower().split(",")) + allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) + return bool(roles.intersection(allowed)) + + return True + + +for cl in 'file', 'msg': + p = db.security.addPermission(name='View', klass=cl, + description="allowed to see metadata of file object regardless of spam status", + properties=('creation', 'activity', + 'creator', 'actor', + 'name', 'spambayes_score', + 'spambayes_misclassified', + 'author', 'recipients', + 'date', 'files', 'messageid', + 'inreplyto', 'type', + )) + + db.security.addPermissionToRole('Anonymous', p) + + spamcheck = db.security.addPermission(name='View', klass=cl, + description="allowed to see metadata of file object regardless of spam status", + properties=('content', 'summary'), + check=may_view_spam(cl)) + + db.security.addPermissionToRole('Anonymous', spamcheck) + + # [OPTIONAL] # Allow anonymous users access to create or edit "issue" items (and the # related file and message items) From python-checkins at python.org Fri Jul 27 18:26:27 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 18:26:27 +0200 (CEST) Subject: [Python-checkins] r56582 - in tracker/instances/python-dev: detectors/busybody.py detectors/config.ini.template detectors/no_texthtml.py detectors/spambayes.py detectors/spamcheck.py detectors/tellteam.py detectors/userauditor.py extensions/spambayes.py html/file.item.html html/issue.item.html html/msg.item.html schema.py Message-ID: <20070727162627.AB4841E4013@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 18:26:26 2007 New Revision: 56582 Added: tracker/instances/python-dev/detectors/spambayes.py - copied unchanged from r56581, tracker/instances/python-dev-spambayes-integration/detectors/spambayes.py tracker/instances/python-dev/extensions/spambayes.py - copied unchanged from r56581, tracker/instances/python-dev-spambayes-integration/extensions/spambayes.py Removed: tracker/instances/python-dev/detectors/spamcheck.py Modified: tracker/instances/python-dev/detectors/busybody.py tracker/instances/python-dev/detectors/config.ini.template tracker/instances/python-dev/detectors/no_texthtml.py tracker/instances/python-dev/detectors/tellteam.py tracker/instances/python-dev/detectors/userauditor.py tracker/instances/python-dev/html/file.item.html tracker/instances/python-dev/html/issue.item.html tracker/instances/python-dev/html/msg.item.html tracker/instances/python-dev/schema.py Log: Merged python-dev-spambayes-integration rev 56581 down to python-dev Modified: tracker/instances/python-dev/detectors/busybody.py ============================================================================== --- tracker/instances/python-dev/detectors/busybody.py (original) +++ tracker/instances/python-dev/detectors/busybody.py Fri Jul 27 18:26:26 2007 @@ -22,6 +22,16 @@ from roundup import roundupdb, hyperdb +def is_spam(db, msgid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + + msg = db.getnode("msg", msgid) + if msg.has_key('spambayes_score') and \ + msg['spambayes_score'] > cutoff_score: + return False + return True + + def busyreaction(db, cl, nodeid, oldvalues): ''' busybody mail ''' @@ -36,7 +46,7 @@ else: note = cl.generateChangeNote(nodeid, oldvalues) - for msgid in msgIDS: + for msgid in filter(lambda x: is_spam(db, x), msgIDS): try: cl.send_message(nodeid, msgid, note, sendto) except roundupdb.MessageSendError, message: Modified: tracker/instances/python-dev/detectors/config.ini.template ============================================================================== --- tracker/instances/python-dev/detectors/config.ini.template (original) +++ tracker/instances/python-dev/detectors/config.ini.template Fri Jul 27 18:26:26 2007 @@ -12,3 +12,6 @@ spambayes_ham_cutoff = 0.2 spambayes_spam_cutoff = 0.85 +spambayes_may_view_spam = User,Coordinator,Developer +spambayes_may_classify = Coordinator +spambayes_may_report_misclassified = User,Coordinator,Developer Modified: tracker/instances/python-dev/detectors/no_texthtml.py ============================================================================== --- tracker/instances/python-dev/detectors/no_texthtml.py (original) +++ tracker/instances/python-dev/detectors/no_texthtml.py Fri Jul 27 18:26:26 2007 @@ -1,6 +1,6 @@ def audit_html_files(db, cl, nodeid, newvalues): - if newvalues['type'] == 'text/html': + if newvalues.has_key('type') and newvalues['type'] == 'text/html': newvalues['type'] = 'text/plain' Deleted: /tracker/instances/python-dev/detectors/spamcheck.py ============================================================================== --- /tracker/instances/python-dev/detectors/spamcheck.py Fri Jul 27 18:26:26 2007 +++ (empty file) @@ -1,38 +0,0 @@ -""" -spamcheck.py - Auditor that consults a SpamBayes server and scores all form -submissions. Submissions which are deemed to be spam are rejected. For the -time being only reject submissions which are assumed to be spam (score >= -SPAM_CUTOFF). Once a reasonable body of ham and spam submissions have been -built up you can consider whether to also reject unsure submissions (score > -HAM_CUTOFF). The current settings make it less likely that you'll reject -valid submissions at the expense of manual checks to correct spammy items -which snuck by the screen. -""" - -import xmlrpclib -import socket - -from roundup.exceptions import Reject - -def check_spam(_db, _klass, _nodeid, newvalues): - """Auditor to score a website submission.""" - - spambayes_uri = _db.config.detectors['SPAMBAYES_URI'] - spam_cutoff = float(_db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) - - - server = xmlrpclib.ServerProxy(spambayes_uri, verbose=False) - try: - prob = server.score(newvalues, [], {}) - except (socket.error, xmlrpclib.Error), e: - pass - else: - if prob >= spam_cutoff: - raise Reject("Looks like spam to me - prob=%.3f" % prob) - -def init(database): - """Initialize auditor.""" - database.issue.audit('create', check_spam) - database.issue.audit('set', check_spam) - database.file.audit('create', check_spam) - database.file.audit('set', check_spam) Modified: tracker/instances/python-dev/detectors/tellteam.py ============================================================================== --- tracker/instances/python-dev/detectors/tellteam.py (original) +++ tracker/instances/python-dev/detectors/tellteam.py Fri Jul 27 18:26:26 2007 @@ -1,5 +1,14 @@ from roundup import roundupdb +def is_spam(db, msgid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + + msg = db.getnode("msg", msgid) + if msg.has_key('spambayes_score') and \ + msg['spambayes_score'] > cutoff_score: + return False + return True + def newissuetriage(db, cl, nodeid, oldvalues): ''' Copy a message about new issues to a triage address, set in detectors/config.ini @@ -14,9 +23,10 @@ triage_email = [] if not triage_email: return - for msgid in cl.get(nodeid, 'messages'): + for msgid in filter(lambda x: is_spam(db, x), cl.get(nodeid, 'messages')): try: # note: last arg must be a list + cl.send_message(nodeid, msgid, change_note, triage_email) except roundupdb.MessageSendError, message: raise roundupdb.DetectorError, message Modified: tracker/instances/python-dev/detectors/userauditor.py ============================================================================== --- tracker/instances/python-dev/detectors/userauditor.py (original) +++ tracker/instances/python-dev/detectors/userauditor.py Fri Jul 27 18:26:26 2007 @@ -36,7 +36,7 @@ raise ValueError, 'Role "%s" does not exist'%rolename if None != nodeid and "admin" in roles: - if not "admin" in [x.lower().strip() for x in cl.get(nodeid, 'roles')]: + if not "admin" in [x.lower().strip() for x in cl.get(nodeid, 'roles').split(",")]: raise ValueError, "Only Admins may assign the Admin role!" Modified: tracker/instances/python-dev/html/file.item.html ============================================================================== --- tracker/instances/python-dev/html/file.item.html (original) +++ tracker/instances/python-dev/html/file.item.html Fri Jul 27 18:26:26 2007 @@ -29,8 +29,17 @@ Please note that for security reasons, it's not permitted to set content type to text/html. + + SpamBayes Score + + + Marked as misclassified + + + +   @@ -44,10 +53,30 @@ - + File has been classified as spam.

    + +
    download +

    + Files classified as spam are not available for download by + unathorized users. If you think the file has been misclassified, + please login and click on the button for reclassification. +

    + + +
    + + + + +
    + Modified: tracker/instances/python-dev/html/issue.item.html ============================================================================== --- tracker/instances/python-dev/html/issue.item.html (original) +++ tracker/instances/python-dev/html/issue.item.html Fri Jul 27 18:26:26 2007 @@ -243,7 +243,12 @@ -
    content
    +

    + Message has been classified as spam. +

    +
    content
    Modified: tracker/instances/python-dev/html/msg.item.html ============================================================================== --- tracker/instances/python-dev/html/msg.item.html (original) +++ tracker/instances/python-dev/html/msg.item.html Fri Jul 27 18:26:26 2007 @@ -47,12 +47,50 @@ Date + + + SpamBayes Score + + + + + Marked as misclassified + + + +

    + Message has been classified as spam

    + - + + + + + + + - + +
    Content
    Content +
    + + + + +
    +
    + Message has been classified as spam and is therefore not + available to unathorized users. If you think this is + incorrect, please login and report the message as being + misclassified. +
    Modified: tracker/instances/python-dev/schema.py ============================================================================== --- tracker/instances/python-dev/schema.py (original) +++ tracker/instances/python-dev/schema.py Fri Jul 27 18:26:26 2007 @@ -97,10 +97,14 @@ summary=String(), files=Multilink("file"), messageid=String(), - inreplyto=String()) + inreplyto=String(), + spambayes_score=Number(), + spambayes_misclassified=Boolean(),) file = FileClass(db, "file", - name=String()) + name=String(), + spambayes_score=Number(), + spambayes_misclassified=Boolean(),) # IssueClass automatically gets these properties in addition to the Class ones: # title = String() @@ -141,12 +145,59 @@ ########################## # User permissions ########################## + for cl in ('issue_type', 'severity', 'component', 'version', 'priority', 'status', 'resolution', - 'issue', 'file', 'msg', 'keyword'): + 'issue', 'keyword'): db.security.addPermissionToRole('User', 'View', cl) db.security.addPermissionToRole('Anonymous', 'View', cl) +class may_view_spam: + def __init__(self, klassname): + self.klassname = klassname + + def __call__(self, db, userid, itemid): + cutoff_score = float(db.config.detectors['SPAMBAYES_SPAM_CUTOFF']) + klass = db.getclass(self.klassname) + + try: + score = klass.get(itemid, 'spambayes_score') + except KeyError: + return True + + if score > cutoff_score: + roles = set(db.user.get(userid, "roles").lower().split(",")) + allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) + return bool(roles.intersection(allowed)) + + return True + +for cl in ('file', 'msg'): + p = db.security.addPermission(name='View', klass=cl, + description="allowed to see metadata of file object regardless of spam status", + properties=('creation', 'activity', + 'creator', 'actor', + 'name', 'spambayes_score', + 'spambayes_misclassified', + 'author', 'recipients', + 'date', 'files', 'messageid', + 'inreplyto', 'type', + )) + + db.security.addPermissionToRole('Anonymous', p) + db.security.addPermissionToRole('User', p) + + + spamcheck = db.security.addPermission(name='View', klass=cl, + description="allowed to see metadata of file object regardless of spam status", + properties=('content', 'summary'), + check=may_view_spam(cl)) + + db.security.addPermissionToRole('User', spamcheck) + db.security.addPermissionToRole('Anonymous', spamcheck) + + + for cl in 'file', 'msg': db.security.addPermissionToRole('User', 'Create', cl) @@ -264,7 +315,7 @@ # Allow anonymous users access to view issues (and the related, linked # information) -for cl in 'issue', 'file', 'msg', 'severity', 'status', 'resolution': +for cl in 'issue', 'severity', 'status', 'resolution': db.security.addPermissionToRole('Anonymous', 'View', cl) # [OPTIONAL] From python-checkins at python.org Fri Jul 27 18:31:11 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 18:31:11 +0200 (CEST) Subject: [Python-checkins] r56583 - tracker/instances/spambayes_integration/extensions/spambayes.py Message-ID: <20070727163111.16BF11E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 18:31:10 2007 New Revision: 56583 Modified: tracker/instances/spambayes_integration/extensions/spambayes.py Log: Silly bug. Modified: tracker/instances/spambayes_integration/extensions/spambayes.py ============================================================================== --- tracker/instances/spambayes_integration/extensions/spambayes.py (original) +++ tracker/instances/spambayes_integration/extensions/spambayes.py Fri Jul 27 18:31:10 2007 @@ -45,7 +45,7 @@ if status: if node.get('spambayes_misclassified', False): - props['spambayes_misclassified':True] + props['spambayes_misclassified'] = True props['spambayes_score'] = 1.0 From python-checkins at python.org Fri Jul 27 20:28:22 2007 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 27 Jul 2007 20:28:22 +0200 (CEST) Subject: [Python-checkins] r56588 - in python/trunk: Lib/httplib.py Lib/socket.py Lib/test/test_socket_ssl.py Misc/NEWS Message-ID: <20070727182822.E9A191E400C@bag.python.org> Author: martin.v.loewis Date: Fri Jul 27 20:28:22 2007 New Revision: 56588 Modified: python/trunk/Lib/httplib.py python/trunk/Lib/socket.py python/trunk/Lib/test/test_socket_ssl.py python/trunk/Misc/NEWS Log: Bug #978833: Close https sockets by releasing the _ssl object. Modified: python/trunk/Lib/httplib.py ============================================================================== --- python/trunk/Lib/httplib.py (original) +++ python/trunk/Lib/httplib.py Fri Jul 27 20:28:22 2007 @@ -1117,6 +1117,9 @@ def __getattr__(self, attr): return getattr(self._sock, attr) + def close(self): + SharedSocketClient.close(self) + self._ssl = None class HTTPSConnection(HTTPConnection): "This class allows communication via SSL." Modified: python/trunk/Lib/socket.py ============================================================================== --- python/trunk/Lib/socket.py (original) +++ python/trunk/Lib/socket.py Fri Jul 27 20:28:22 2007 @@ -144,6 +144,10 @@ send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy __getattr__ = _dummy +# Wrapper around platform socket objects. This implements +# a platform-independent dup() functionality. The +# implementation currently relies on reference counting +# to close the underlying socket object. class _socketobject(object): __doc__ = _realsocket.__doc__ Modified: python/trunk/Lib/test/test_socket_ssl.py ============================================================================== --- python/trunk/Lib/test/test_socket_ssl.py (original) +++ python/trunk/Lib/test/test_socket_ssl.py Fri Jul 27 20:28:22 2007 @@ -106,6 +106,25 @@ connector() t.join() + def test_978833(self): + if test_support.verbose: + print "test_978833 ..." + + import os, httplib + with test_support.transient_internet(): + s = socket.socket(socket.AF_INET) + s.connect(("www.sf.net", 443)) + fd = s._sock.fileno() + sock = httplib.FakeSocket(s, socket.ssl(s)) + s = None + sock.close() + try: + os.fstat(fd) + except OSError: + pass + else: + raise test_support.TestFailed("Failed to close socket") + class OpenSSLTests(unittest.TestCase): def testBasic(self): Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Fri Jul 27 20:28:22 2007 @@ -238,6 +238,8 @@ Library ------- +- Bug #978833: Close https sockets by releasing the _ssl object. + - Change location of the package index to pypi.python.org/pypi - Bug #1701409: Fix a segfault in printing ctypes.c_char_p and From python-checkins at python.org Fri Jul 27 21:21:32 2007 From: python-checkins at python.org (guido.van.rossum) Date: Fri, 27 Jul 2007 21:21:32 +0200 (CEST) Subject: [Python-checkins] r56589 - peps/trunk/pep-3100.txt Message-ID: <20070727192132.577D71E400B@bag.python.org> Author: guido.van.rossum Date: Fri Jul 27 21:21:32 2007 New Revision: 56589 Modified: peps/trunk/pep-3100.txt Log: Various status updates. Modified: peps/trunk/pep-3100.txt ============================================================================== --- peps/trunk/pep-3100.txt (original) +++ peps/trunk/pep-3100.txt Fri Jul 27 21:21:32 2007 @@ -69,16 +69,15 @@ * Support only new-style classes; classic classes will be gone [1]_ [done] * Replace ``print`` by a function [14]_ [#pep3105]_ [done] * The ``softspace`` attribute of files goes away. [done] -* Use ``except E1, E2, E3 as err:`` if you want the error variable. [3]_ -* ``None`` becomes a keyword [4]_ [done; also ``True`` and ``False``] +* Use ``except E1, E2, E3 as err:`` if you want the error variable. [3]_ [done] +* ``None`` becomes a keyword [4]_; also ``True`` and ``False`` [done] * ``...`` to become a general expression element [16]_ [done] * ``as`` becomes a keyword [5]_ (starting in 2.6 already) [done] * Have list comprehensions be syntactic sugar for passing an equivalent generator expression to ``list()``; as a consequence the - loop variable will no longer be exposed [#pep289]_ + loop variable will no longer be exposed [#pep289]_ [done] * Comparisons other than ``==`` and ``!=`` between disparate types will raise an exception unless explicitly supported by the type [6]_ [done] -* Exceptions might grow an attribute to store the traceback [11]_ * floats will not be acceptable as arguments in place of ints for operations where floats are inadvertantly accepted (PyArg_ParseTuple() i & l formats) * Remove from ... import * at function scope. This means that functions @@ -86,8 +85,8 @@ * Imports [#pep328]_ + Imports will be absolute by default. [done] + Relative imports must be explicitly specified. [done] - + Indirection entires in sys.modules will not be supported. -* __init__.py might become optional in sub-packages. __init__.py will still + + Indirection entires in sys.modules will not be supported. (what's this???) +* __init__.py might become optional in sub-packages? __init__.py will still be required for top-level packages. * Cleanup the Py_InitModule() variants {,3,4} (also import and parser APIs) * Cleanup the APIs exported in pythonrun, etc. @@ -95,8 +94,8 @@ - List comprehensions will require parentheses around the iterables. This will make list comprehensions more similar to generator comprehensions. - [x for x in 1, 2] will need to be: [x for x in (1, 2)] - - Lambdas may have to be parenthesized [#pep308]_ + [x for x in 1, 2] will need to be: [x for x in (1, 2)] [done] + - Lambdas may have to be parenthesized [#pep308]_ [NO] * __builtins__ should get a different name *or* completely unified with __builtin__. Keeping both with confusingly similar spellings @@ -122,7 +121,7 @@ * ```x```: use ``repr(x)`` [2]_ [done] * The ``<>`` operator: use ``!=`` instead [3]_ [done] * The __mod__ and __divmod__ special methods on float. [21]_ -* Might drop unbound methods? [7]_ +* Might drop unbound methods? [7]_ [UNLIKELY] * METH_OLDARGS * WITH_CYCLE_GC [done] * __getslice__, __setslice__, __delslice__ [#sequence-types]_; @@ -147,23 +146,22 @@ literals with 'L' or 'l' suffix disappear [1]_ [done] * Make all strings be Unicode, and have a separate bytes() type [1]_ The new string type will be called 'str'. - [halfway done, in py3k-struni branch] + [largely done, in py3k-struni branch] * Return iterable views instead of lists where appropriate for atomic type methods (e.g. ``dict.keys()``, ``dict.values()``, ``dict.items()``, etc.); iter* methods will be removed. [done] * Make ``string.join()`` stringify its arguments? [18]_ * Fix open() so it returns a ValueError if the mode is bad rather than IOError. - (This probably affects lots of places, we should review the exceptions - and fix them if inappropriate.) + [done] To be removed: * ``basestring.find()`` and ``basestring.rfind()``; use ``basestring.index()`` or ``basestring.[r]partition()`` or - or ``basestring.rindex()`` in a try/except block??? [13]_ + or ``basestring.rindex()`` in a try/except block??? [13]_ [UNLIKELY] * ``file.xreadlines()`` method [#file-object]_ [done] -* ``dict.setdefault()``? [15]_ -* ``dict.has_key()`` method [done] +* ``dict.setdefault()``? [15]_ [UNLIKELY] +* ``dict.has_key()`` method; use ``in`` operator [done] Built-in Namespace @@ -171,8 +169,6 @@ * Make built-ins return an iterator where appropriate (e.g. ``range()``, ``zip()``, ``map()``, ``filter()``, etc.) [done] -* Relevant functions should consume iterators (e.g. ``min()``, - ``max()``) [They already do, since 2.2.] * Remove ``input()`` and rename ``raw_input()`` to ``input()``. If you need the old input(), use eval(input()). [done] * Introduce ``trunc()``, which would call the ``__trunc__()`` method on its @@ -192,8 +188,8 @@ * ``intern()``: put in ``sys`` [2]_, [22]_ [done] * ``reduce()``: write a loop instead [2]_, [9]_ [done] * ``xrange()``: use ``range()`` instead [1]_ [See range() above] [done] -* ``StandardError``: this is a relic from the original exception hierachy; subclass - ``Exception`` instead. [done] +* ``StandardError``: this is a relic from the original exception hierachy; + subclass ``Exception`` instead. [done] Standard library From python-checkins at python.org Fri Jul 27 22:27:34 2007 From: python-checkins at python.org (erik.forsberg) Date: Fri, 27 Jul 2007 22:27:34 +0200 (CEST) Subject: [Python-checkins] r56590 - in tracker/instances: python-dev-spambayes-integration/html/file.item.html python-dev-spambayes-integration/html/issue.item.html python-dev-spambayes-integration/html/msg.item.html python-dev-spambayes-integration/schema.py spambayes_integration/detectors/config.ini.template spambayes_integration/detectors/spambayes.py spambayes_integration/extensions/spambayes.py Message-ID: <20070727202734.B81841E400A@bag.python.org> Author: erik.forsberg Date: Fri Jul 27 22:27:34 2007 New Revision: 56590 Modified: tracker/instances/python-dev-spambayes-integration/html/file.item.html tracker/instances/python-dev-spambayes-integration/html/issue.item.html tracker/instances/python-dev-spambayes-integration/html/msg.item.html tracker/instances/python-dev-spambayes-integration/schema.py tracker/instances/spambayes_integration/detectors/config.ini.template tracker/instances/spambayes_integration/detectors/spambayes.py tracker/instances/spambayes_integration/extensions/spambayes.py Log: Remade spambayes integration, this time using the roundup's built-in permissions system, which I understand better now, 2 hours later :-). Modified: tracker/instances/python-dev-spambayes-integration/html/file.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/file.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/file.item.html Fri Jul 27 22:27:34 2007 @@ -56,11 +56,11 @@

    File has been classified as spam.

    -download -

    +

    Files classified as spam are not available for download by unathorized users. If you think the file has been misclassified, please login and click on the button for reclassification. Modified: tracker/instances/python-dev-spambayes-integration/html/issue.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/issue.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/issue.item.html Fri Jul 27 22:27:34 2007 @@ -247,7 +247,7 @@ tal:condition="python:utils.sb_is_spam(msg)"> Message has been classified as spam.

    -
    content
    Modified: tracker/instances/python-dev-spambayes-integration/html/msg.item.html ============================================================================== --- tracker/instances/python-dev-spambayes-integration/html/msg.item.html (original) +++ tracker/instances/python-dev-spambayes-integration/html/msg.item.html Fri Jul 27 22:27:34 2007 @@ -67,7 +67,8 @@ Content - +
    @@ -82,10 +83,11 @@
    +      
    
       
    +      tal:condition="python:not context.content.is_view_ok()">
                 Message has been classified as spam and is therefore not
           available to unathorized users. If you think this is
           incorrect, please login and report the message as being
    
    Modified: tracker/instances/python-dev-spambayes-integration/schema.py
    ==============================================================================
    --- tracker/instances/python-dev-spambayes-integration/schema.py	(original)
    +++ tracker/instances/python-dev-spambayes-integration/schema.py	Fri Jul 27 22:27:34 2007
    @@ -134,6 +134,9 @@
     db.security.addRole(name='Developer', description='A developer')
     db.security.addRole(name='Coordinator', description='A coordinator')
     
    +sb_may_classify  = db.security.addPermission(name="SB: May Classify")
    +sb_may_report_misclassified = db.security.addPermission(name="SB: May Report Misclassified")
    +
     #
     # REGULAR USERS
     #
    @@ -166,15 +169,13 @@
                 return True
     
             if score > cutoff_score:
    -            roles = set(db.user.get(userid, "roles").lower().split(","))
    -            allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(","))
    -            return bool(roles.intersection(allowed))
    +            return False
     
             return True
     
     for cl in ('file', 'msg'):
         p = db.security.addPermission(name='View', klass=cl,
    -                                  description="allowed to see metadata of file object regardless of spam status",
    +                                  description="allowed to see metadata object regardless of spam status",
                                       properties=('creation', 'activity',
                                                   'creator', 'actor',
                                                   'name', 'spambayes_score',
    @@ -185,22 +186,25 @@
                                                   ))
     
         db.security.addPermissionToRole('Anonymous', p)
    -    db.security.addPermissionToRole('User', p)    
    +    db.security.addPermissionToRole('User', p)
    +
    +    db.security.addPermissionToRole('User', 'Create', cl)
    +
    +    p = db.security.addPermission(name='View', klass=cl,
    +                                  description="Allowed to see content of object regardless of spam status",
    +                                  properties = ('content', 'summary'))
         
    +    db.security.addPermissionToRole('User', p)        
         
         spamcheck = db.security.addPermission(name='View', klass=cl,
    -                                          description="allowed to see metadata of file object regardless of spam status",
    +                                          description="allowed to see content if not spam",
                                               properties=('content', 'summary'),
                                               check=may_view_spam(cl))
    -    
    -    db.security.addPermissionToRole('User', spamcheck)    
    +
         db.security.addPermissionToRole('Anonymous', spamcheck)
     
         
     
    -for cl in 'file', 'msg':
    -    db.security.addPermissionToRole('User', 'Create', cl)
    -
     p = db.security.addPermission(name='Create', klass='issue',
                                   properties=('title', 'type',
                                               'components', 'versions',
    @@ -217,6 +221,9 @@
                                   description='User can report and discuss issues')
     db.security.addPermissionToRole('User', p)
     
    +db.security.addPermissionToRole('User', 'SB: May Report Misclassified')
    +
    +
     
     ##########################
     # Developer permissions
    @@ -240,6 +247,8 @@
         db.security.addPermissionToRole('Coordinator', 'Edit', cl)
         db.security.addPermissionToRole('Coordinator', 'Create', cl)
     
    +db.security.addPermissionToRole('Coordinator', 'SB: May Classify')
    +
     # May users view other user information? Comment these lines out
     # if you don't want them to
     db.security.addPermissionToRole('User', 'View', 'user')
    @@ -314,7 +323,10 @@
     db.security.addPermissionToRole('Anonymous', 'Create', 'user')
     
     # Allow anonymous users access to view issues (and the related, linked
    -# information)
    +# information).
    +# Note permissions settings for file and msg above (due to spambayes
    +# integration).
    +
     for cl in 'issue', 'severity', 'status', 'resolution':
         db.security.addPermissionToRole('Anonymous', 'View', cl)
     
    
    Modified: tracker/instances/spambayes_integration/detectors/config.ini.template
    ==============================================================================
    --- tracker/instances/spambayes_integration/detectors/config.ini.template	(original)
    +++ tracker/instances/spambayes_integration/detectors/config.ini.template	Fri Jul 27 22:27:34 2007
    @@ -5,7 +5,3 @@
     # config.
     spambayes_ham_cutoff = 0.2
     spambayes_spam_cutoff = 0.85
    -
    -spambayes_may_view_spam = User,Coordinator,Developer
    -spambayes_may_classify = Coordinator
    -spambayes_may_report_misclassified = User,Coordinator,Developer
    
    Modified: tracker/instances/spambayes_integration/detectors/spambayes.py
    ==============================================================================
    --- tracker/instances/spambayes_integration/detectors/spambayes.py	(original)
    +++ tracker/instances/spambayes_integration/detectors/spambayes.py	Fri Jul 27 22:27:34 2007
    @@ -1,13 +1,3 @@
    -"""
    -spamcheck.py - Auditor that consults a SpamBayes server and scores all form
    -submissions.  Submissions which are deemed to be spam are rejected.  For the
    -time being only reject submissions which are assumed to be spam (score >=
    -SPAM_CUTOFF).  Once a reasonable body of ham and spam submissions have been
    -built up you can consider whether to also reject unsure submissions (score >
    -HAM_CUTOFF).  The current settings make it less likely that you'll reject
    -valid submissions at the expense of manual checks to correct spammy items
    -which snuck by the screen.
    -"""
     
     import xmlrpclib
     import socket
    @@ -58,8 +48,8 @@
     
     
         if newvalues.has_key('spambayes_score'):
    -        if not "coordinator" in [x.lower().strip() for x in db.user.get(db.getuid(), 'roles').split(",")]:
    -            raise ValueError, "Only Coordinators may explicitly assign spambayes_score"
    +        if not db.security.hasPermission('SB: May Classify', db.getuid()):
    +            raise ValueError, "You don't have permission to spamclassify messages"
             # Don't do anything if we're explicitly setting the score        
             return
     
    
    Modified: tracker/instances/spambayes_integration/extensions/spambayes.py
    ==============================================================================
    --- tracker/instances/spambayes_integration/extensions/spambayes.py	(original)
    +++ tracker/instances/spambayes_integration/extensions/spambayes.py	Fri Jul 27 22:27:34 2007
    @@ -28,6 +28,8 @@
     
     
     class SpambayesClassify(Action):
    +    permissionType = 'SB: May Classify'
    +    
         def handle(self):
             (content, tokens) = extract_classinfo(self.db,
                                                   self.classname, self.nodeid)
    @@ -61,15 +63,6 @@
             klass.set(self.nodeid, **props)
             self.db.commit()
     
    -    def permission(self):
    -        roles = set(self.db.user.get(self.userid, 'roles').lower().split(","))
    -        allowed = set(self.db.config.detectors['SPAMBAYES_MAY_CLASSIFY'].lower().split(","))
    -
    -        if not bool(roles.intersection(allowed)):
    -            raise Unauthorised("You do not have permission to train spambayes")
    -        Action.permission(self)
    -            
    -
     def sb_is_spam(obj):
         cutoff_score = float(obj._db.config.detectors['SPAMBAYES_SPAM_CUTOFF'])
         try:
    @@ -78,34 +71,7 @@
             return False
         return score >= cutoff_score
     
    -def sb_is_view_ok(obj):
    -    if not sb_is_spam(obj):
    -        return True
    -    roles = set(obj._db.user.get(obj._client.userid,
    -                                 'roles').lower().split(","))
    -    allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(","))
    -
    -    return bool(roles.intersection(allowed))
    -
    -def sb_may_report_misclassified(obj):
    -    roles = set(obj._db.user.get(obj._client.userid,
    -                                 'roles').lower().split(","))
    -    allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_REPORT_MISCLASSIFIED'].lower().split(","))
    -
    -    return bool(roles.intersection(allowed))
    -
    -def sb_may_classify(obj):
    -    roles = set(obj._db.user.get(obj._client.userid,
    -                                 'roles').lower().split(","))
    -    allowed = set(obj._db.config.detectors['SPAMBAYES_MAY_CLASSIFY'].lower().split(","))
    -
    -    return bool(roles.intersection(allowed))
    -
     def init(instance):
         instance.registerAction("spambayes_classify", SpambayesClassify)
         instance.registerUtil('sb_is_spam', sb_is_spam)
    -    instance.registerUtil('sb_is_view_ok', sb_is_view_ok)
    -    instance.registerUtil('sb_may_report_misclassified',
    -                          sb_may_report_misclassified)
    -    instance.registerUtil('sb_may_classify', sb_may_classify)
         
    
    From python-checkins at python.org  Fri Jul 27 22:29:30 2007
    From: python-checkins at python.org (erik.forsberg)
    Date: Fri, 27 Jul 2007 22:29:30 +0200 (CEST)
    Subject: [Python-checkins] r56591 - in tracker/instances/python-dev:
    	html/file.item.html html/issue.item.html html/msg.item.html
    	schema.py
    Message-ID: <20070727202930.841DB1E400A@bag.python.org>
    
    Author: erik.forsberg
    Date: Fri Jul 27 22:29:30 2007
    New Revision: 56591
    
    Modified:
       tracker/instances/python-dev/html/file.item.html
       tracker/instances/python-dev/html/issue.item.html
       tracker/instances/python-dev/html/msg.item.html
       tracker/instances/python-dev/schema.py
    Log:
    Merged svn+ssh://svn.python.org/tracker/instances/python-dev-spambayes-integration r56590 down to python-dev
    
    Modified: tracker/instances/python-dev/html/file.item.html
    ==============================================================================
    --- tracker/instances/python-dev/html/file.item.html	(original)
    +++ tracker/instances/python-dev/html/file.item.html	Fri Jul 27 22:29:30 2007
    @@ -56,11 +56,11 @@
     

    File has been classified as spam.

    -download -

    +

    Files classified as spam are not available for download by unathorized users. If you think the file has been misclassified, please login and click on the button for reclassification. Modified: tracker/instances/python-dev/html/issue.item.html ============================================================================== --- tracker/instances/python-dev/html/issue.item.html (original) +++ tracker/instances/python-dev/html/issue.item.html Fri Jul 27 22:29:30 2007 @@ -247,7 +247,7 @@ tal:condition="python:utils.sb_is_spam(msg)"> Message has been classified as spam.

    -
    content
    Modified: tracker/instances/python-dev/html/msg.item.html ============================================================================== --- tracker/instances/python-dev/html/msg.item.html (original) +++ tracker/instances/python-dev/html/msg.item.html Fri Jul 27 22:29:30 2007 @@ -67,7 +67,8 @@ Content - + @@ -82,10 +83,11 @@
    +      
    
       
    +      tal:condition="python:not context.content.is_view_ok()">
                 Message has been classified as spam and is therefore not
           available to unathorized users. If you think this is
           incorrect, please login and report the message as being
    
    Modified: tracker/instances/python-dev/schema.py
    ==============================================================================
    --- tracker/instances/python-dev/schema.py	(original)
    +++ tracker/instances/python-dev/schema.py	Fri Jul 27 22:29:30 2007
    @@ -134,6 +134,9 @@
     db.security.addRole(name='Developer', description='A developer')
     db.security.addRole(name='Coordinator', description='A coordinator')
     
    +sb_may_classify  = db.security.addPermission(name="SB: May Classify")
    +sb_may_report_misclassified = db.security.addPermission(name="SB: May Report Misclassified")
    +
     #
     # REGULAR USERS
     #
    @@ -166,15 +169,13 @@
                 return True
     
             if score > cutoff_score:
    -            roles = set(db.user.get(userid, "roles").lower().split(","))
    -            allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(","))
    -            return bool(roles.intersection(allowed))
    +            return False
     
             return True
     
     for cl in ('file', 'msg'):
         p = db.security.addPermission(name='View', klass=cl,
    -                                  description="allowed to see metadata of file object regardless of spam status",
    +                                  description="allowed to see metadata object regardless of spam status",
                                       properties=('creation', 'activity',
                                                   'creator', 'actor',
                                                   'name', 'spambayes_score',
    @@ -185,22 +186,25 @@
                                                   ))
     
         db.security.addPermissionToRole('Anonymous', p)
    -    db.security.addPermissionToRole('User', p)    
    +    db.security.addPermissionToRole('User', p)
    +
    +    db.security.addPermissionToRole('User', 'Create', cl)
    +
    +    p = db.security.addPermission(name='View', klass=cl,
    +                                  description="Allowed to see content of object regardless of spam status",
    +                                  properties = ('content', 'summary'))
         
    +    db.security.addPermissionToRole('User', p)        
         
         spamcheck = db.security.addPermission(name='View', klass=cl,
    -                                          description="allowed to see metadata of file object regardless of spam status",
    +                                          description="allowed to see content if not spam",
                                               properties=('content', 'summary'),
                                               check=may_view_spam(cl))
    -    
    -    db.security.addPermissionToRole('User', spamcheck)    
    +
         db.security.addPermissionToRole('Anonymous', spamcheck)
     
         
     
    -for cl in 'file', 'msg':
    -    db.security.addPermissionToRole('User', 'Create', cl)
    -
     p = db.security.addPermission(name='Create', klass='issue',
                                   properties=('title', 'type',
                                               'components', 'versions',
    @@ -217,6 +221,9 @@
                                   description='User can report and discuss issues')
     db.security.addPermissionToRole('User', p)
     
    +db.security.addPermissionToRole('User', 'SB: May Report Misclassified')
    +
    +
     
     ##########################
     # Developer permissions
    @@ -240,6 +247,8 @@
         db.security.addPermissionToRole('Coordinator', 'Edit', cl)
         db.security.addPermissionToRole('Coordinator', 'Create', cl)
     
    +db.security.addPermissionToRole('Coordinator', 'SB: May Classify')
    +
     # May users view other user information? Comment these lines out
     # if you don't want them to
     db.security.addPermissionToRole('User', 'View', 'user')
    @@ -314,7 +323,10 @@
     db.security.addPermissionToRole('Anonymous', 'Create', 'user')
     
     # Allow anonymous users access to view issues (and the related, linked
    -# information)
    +# information).
    +# Note permissions settings for file and msg above (due to spambayes
    +# integration).
    +
     for cl in 'issue', 'severity', 'status', 'resolution':
         db.security.addPermissionToRole('Anonymous', 'View', cl)
     
    
    From python-checkins at python.org  Fri Jul 27 22:44:33 2007
    From: python-checkins at python.org (erik.forsberg)
    Date: Fri, 27 Jul 2007 22:44:33 +0200 (CEST)
    Subject: [Python-checkins] r56592 -
    	tracker/instances/python-dev-spambayes-integration/html/file.item.html
    Message-ID: <20070727204433.0D90C1E400A@bag.python.org>
    
    Author: erik.forsberg
    Date: Fri Jul 27 22:44:32 2007
    New Revision: 56592
    
    Modified:
       tracker/instances/python-dev-spambayes-integration/html/file.item.html
    Log:
    
    More use-roundup's-permission-system stuff.
    
    
    Modified: tracker/instances/python-dev-spambayes-integration/html/file.item.html
    ==============================================================================
    --- tracker/instances/python-dev-spambayes-integration/html/file.item.html	(original)
    +++ tracker/instances/python-dev-spambayes-integration/html/file.item.html	Fri Jul 27 22:44:32 2007
    @@ -70,7 +70,7 @@
          
    +       tal:condition="python:request.user.hasPermission('SB: May Classify')">
      
           
           
    
    From python-checkins at python.org  Fri Jul 27 22:44:43 2007
    From: python-checkins at python.org (erik.forsberg)
    Date: Fri, 27 Jul 2007 22:44:43 +0200 (CEST)
    Subject: [Python-checkins] r56593 -
    	tracker/instances/python-dev-spambayes-integration/schema.py
    Message-ID: <20070727204443.87CEA1E400A@bag.python.org>
    
    Author: erik.forsberg
    Date: Fri Jul 27 22:44:43 2007
    New Revision: 56593
    
    Modified:
       tracker/instances/python-dev-spambayes-integration/schema.py
    Log:
    
    Cleanup.
    
    
    Modified: tracker/instances/python-dev-spambayes-integration/schema.py
    ==============================================================================
    --- tracker/instances/python-dev-spambayes-integration/schema.py	(original)
    +++ tracker/instances/python-dev-spambayes-integration/schema.py	Fri Jul 27 22:44:43 2007
    @@ -134,8 +134,8 @@
     db.security.addRole(name='Developer', description='A developer')
     db.security.addRole(name='Coordinator', description='A coordinator')
     
    -sb_may_classify  = db.security.addPermission(name="SB: May Classify")
    -sb_may_report_misclassified = db.security.addPermission(name="SB: May Report Misclassified")
    +db.security.addPermission(name="SB: May Classify")
    +db.security.addPermission(name="SB: May Report Misclassified")
     
     #
     # REGULAR USERS
    
    From python-checkins at python.org  Fri Jul 27 22:47:35 2007
    From: python-checkins at python.org (erik.forsberg)
    Date: Fri, 27 Jul 2007 22:47:35 +0200 (CEST)
    Subject: [Python-checkins] r56594 - in tracker/instances/python-dev:
    	html/file.item.html schema.py
    Message-ID: <20070727204735.978041E400A@bag.python.org>
    
    Author: erik.forsberg
    Date: Fri Jul 27 22:47:35 2007
    New Revision: 56594
    
    Modified:
       tracker/instances/python-dev/html/file.item.html
       tracker/instances/python-dev/schema.py
    Log:
    Merged down svn+ssh://svn.python.org/tracker/instances/python-dev-spambayes-integration r56593
    
    Modified: tracker/instances/python-dev/html/file.item.html
    ==============================================================================
    --- tracker/instances/python-dev/html/file.item.html	(original)
    +++ tracker/instances/python-dev/html/file.item.html	Fri Jul 27 22:47:35 2007
    @@ -70,7 +70,7 @@
          
    +       tal:condition="python:request.user.hasPermission('SB: May Classify')">
      
           
           
    
    Modified: tracker/instances/python-dev/schema.py
    ==============================================================================
    --- tracker/instances/python-dev/schema.py	(original)
    +++ tracker/instances/python-dev/schema.py	Fri Jul 27 22:47:35 2007
    @@ -134,8 +134,8 @@
     db.security.addRole(name='Developer', description='A developer')
     db.security.addRole(name='Coordinator', description='A coordinator')
     
    -sb_may_classify  = db.security.addPermission(name="SB: May Classify")
    -sb_may_report_misclassified = db.security.addPermission(name="SB: May Report Misclassified")
    +db.security.addPermission(name="SB: May Classify")
    +db.security.addPermission(name="SB: May Report Misclassified")
     
     #
     # REGULAR USERS
    
    From python-checkins at python.org  Fri Jul 27 22:48:27 2007
    From: python-checkins at python.org (erik.forsberg)
    Date: Fri, 27 Jul 2007 22:48:27 +0200 (CEST)
    Subject: [Python-checkins] r56595 - in tracker/instances/meta:
    	html/file.item.html html/issue.item.html html/msg.item.html
    	schema.py
    Message-ID: <20070727204827.082AE1E400A@bag.python.org>
    
    Author: erik.forsberg
    Date: Fri Jul 27 22:48:26 2007
    New Revision: 56595
    
    Modified:
       tracker/instances/meta/html/file.item.html
       tracker/instances/meta/html/issue.item.html
       tracker/instances/meta/html/msg.item.html
       tracker/instances/meta/schema.py
    Log:
    
    Use roundup's security system for spambayes integration security.
    
    
    Modified: tracker/instances/meta/html/file.item.html
    ==============================================================================
    --- tracker/instances/meta/html/file.item.html	(original)
    +++ tracker/instances/meta/html/file.item.html	Fri Jul 27 22:48:26 2007
    @@ -55,11 +55,11 @@
     

    File has been classified as spam.

    -download -

    +

    Files classified as spam are not available for download by unathorized users. If you think the file has been misclassified, please login and click on the button for reclassification. @@ -69,7 +69,7 @@ + tal:condition="python:request.user.hasPermission('SB: May Classify')"> Modified: tracker/instances/meta/html/issue.item.html ============================================================================== --- tracker/instances/meta/html/issue.item.html (original) +++ tracker/instances/meta/html/issue.item.html Fri Jul 27 22:48:26 2007 @@ -186,7 +186,7 @@ tal:condition="python:utils.sb_is_spam(msg)"> Message has been classified as spam.

    -
    content
    Modified: tracker/instances/meta/html/msg.item.html ============================================================================== --- tracker/instances/meta/html/msg.item.html (original) +++ tracker/instances/meta/html/msg.item.html Fri Jul 27 22:48:26 2007 @@ -66,7 +66,7 @@ -
    Content
    + @@ -78,10 +78,10 @@
    + tal:condition="python:not context.content.is_view_ok()"> Message has been classified as spam and is therefore not available to unathorized users. If you think this is incorrect, please login and report the message as being Modified: tracker/instances/meta/schema.py ============================================================================== --- tracker/instances/meta/schema.py (original) +++ tracker/instances/meta/schema.py Fri Jul 27 22:48:26 2007 @@ -87,6 +87,11 @@ db.security.addRole(name='Coordinator', description='A coordinator') +db.security.addPermission(name="SB: May Classify") +db.security.addPermission(name="SB: May Report Misclassified") + +db.security.addPermissionToRole('Coordinator', 'SB: May Classify') + # # REGULAR USERS # @@ -178,9 +183,7 @@ return True if score > cutoff_score: - roles = set(db.user.get(userid, "roles").lower().split(",")) - allowed = set(db.config.detectors['SPAMBAYES_MAY_VIEW_SPAM'].lower().split(",")) - return bool(roles.intersection(allowed)) + return False return True @@ -198,6 +201,13 @@ )) db.security.addPermissionToRole('Anonymous', p) + db.security.addPermissionToRole('User', p) + + p = db.security.addPermission(name='View', klass=cl, + description="Allowed to see content of object regardless of spam status", + properties = ('content', 'summary')) + + db.security.addPermissionToRole('User', p) spamcheck = db.security.addPermission(name='View', klass=cl, description="allowed to see metadata of file object regardless of spam status", From python-checkins at python.org Sat Jul 28 00:05:36 2007 From: python-checkins at python.org (erik.forsberg) Date: Sat, 28 Jul 2007 00:05:36 +0200 (CEST) Subject: [Python-checkins] r56596 - tracker/instances/meta/schema.py Message-ID: <20070727220536.611BE1E4014@bag.python.org> Author: erik.forsberg Date: Sat Jul 28 00:05:35 2007 New Revision: 56596 Modified: tracker/instances/meta/schema.py Log: Simplify security settings for 'file' and 'msg'. Modified: tracker/instances/meta/schema.py ============================================================================== --- tracker/instances/meta/schema.py (original) +++ tracker/instances/meta/schema.py Sat Jul 28 00:05:35 2007 @@ -201,13 +201,6 @@ )) db.security.addPermissionToRole('Anonymous', p) - db.security.addPermissionToRole('User', p) - - p = db.security.addPermission(name='View', klass=cl, - description="Allowed to see content of object regardless of spam status", - properties = ('content', 'summary')) - - db.security.addPermissionToRole('User', p) spamcheck = db.security.addPermission(name='View', klass=cl, description="allowed to see metadata of file object regardless of spam status", From python-checkins at python.org Sat Jul 28 01:13:59 2007 From: python-checkins at python.org (erik.forsberg) Date: Sat, 28 Jul 2007 01:13:59 +0200 (CEST) Subject: [Python-checkins] r56597 - tracker/instances/python-dev/detectors/changes_xml_writer.py Message-ID: <20070727231359.A6D781E400A@bag.python.org> Author: erik.forsberg Date: Sat Jul 28 01:13:59 2007 New Revision: 56597 Added: tracker/instances/python-dev/detectors/changes_xml_writer.py Log: Adding recent_changes write as requested in http://psf.upfronthosting.co.za/roundup/meta/issue118 Added: tracker/instances/python-dev/detectors/changes_xml_writer.py ============================================================================== --- (empty file) +++ tracker/instances/python-dev/detectors/changes_xml_writer.py Sat Jul 28 01:13:59 2007 @@ -0,0 +1,191 @@ +# +# changes.xml writer detector. +# +# Copyright (c) 2007 Michal Kwiatkowski +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# * Neither the name of the author nor the names of his contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# + +"""changes.xml writer detector -- save each database change to an XML file. + +Root element is called `changes` and it has at most `ChangesXml.max_items` +children, each called a `change`. Each `change` has the following attributes: + +:date: date in RFC2822 format when the change was made +:id: unique identifier of this change (note: not an integer) +:type: type of this change (see below) + +A structure of a `change` depends on its `type`. Currently implemented +change types and their formats are listed below. + +* type = `file-added` + + Describes a new file attached to an existing issue. Child elements: + + :file-id: unique integer identifier of the file + :file-name: name of the uploaded file + :file-type: MIME type of the file content + :file-url: permanent URL of the file + :issue-id: unique integer identifier of an issue this file is attached to +""" + +import os +import urllib +from xml.dom import minidom +from xml.parsers.expat import ExpatError +from time import gmtime, strftime + +# Relative to tracker home directory. +FILENAME = os.path.join('%(TEMPLATES)s', 'recent-changes.xml') + + +def tracker_url(db): + return str(db.config.options[('tracker', 'web')]) + +def changes_xml_path(db): + return os.path.join(db.config.HOME, FILENAME % db.config.options) + +def rfc2822_date(): + return strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()) + +class File(object): + def __init__(self, db, id, issue_id): + self.db = db + self.id = id + self.issue_id = issue_id + + self.name = db.file.get(id, 'name') + self.type = db.file.get(id, 'type') + # Based on roundup.cgi.templating._HTMLItem.download_url(). + self.download_url = tracker_url(self.db) +\ + urllib.quote('%s%s/%s' % ('file', self.id, self.name)) + +class ChangesXml(object): + # Maximum number of changes stored in a file. + max_items = 20 + + def __init__(self, filename): + self.filename = filename + self._read_document() + self.modified = False + + def save(self): + if not self.modified: + return + + self._trim_to_max_items() + + fd = open(self.filename, 'w') + self.document.writexml(fd, encoding="UTF-8") + fd.close() + + def add_file(self, file): + change = self._change("file%s-added-to-issue%s" % (file.id, file.issue_id), + "file-added") + + change.appendChild(self._element_with_text("file-id", file.id)) + change.appendChild(self._element_with_text("file-name", file.name)) + change.appendChild(self._element_with_text("file-type", file.type)) + change.appendChild(self._element_with_text("file-url", file.download_url)) + change.appendChild(self._element_with_text("issue-id", file.issue_id)) + + self.root.appendChild(change) + self.modified = True + + def add_files(self, files): + for file in files: + self.add_file(file) + + def _change(self, id, type): + """Return new 'change' element of a given type. + + """ + change = self.document.createElement("change") + change.setAttribute("id", id) + change.setAttribute("type", type) + change.setAttribute("date", rfc2822_date()) + return change + + def _element_with_text(self, name, value): + """Return new element with given name and text node as a value. + value + """ + element = self.document.createElement(name) + text = self.document.createTextNode(str(value)) + element.appendChild(text) + return element + + def _trim_to_max_items(self): + """Remove changes exceeding self.max_items. + """ + # Assumes that changes are stored sequentially from oldest to newest. + # Will do for now. + for change in self.root.getElementsByTagName("change")[0:-self.max_items]: + self.root.removeChild(change) + + def _read_document(self): + try: + self.document = minidom.parse(self.filename) + self.root = self.document.firstChild + except IOError, e: + # File not found, create a new one then. + if e.errno != 2: + raise + self._create_new_document() + except ExpatError: + # File has been damaged, forget about it and create a new one. + self._create_new_document() + + def _create_new_document(self): + self.document = minidom.Document() + self.root = self.document.createElement("changes") + self.document.appendChild(self.root) + +def get_new_files_ids(issue_now, issue_then): + """Return ids of files added between `now` and `then`. + """ + files_now = set(issue_now['files']) + files_then = set(issue_then['files']) if issue_then else set() + return map(int, files_now - files_then) + +def file_added_to_issue(db, cl, issue_id, olddata): + try: + changes = ChangesXml(changes_xml_path(db)) + issue = db.issue.getnode(issue_id) + new_files = [ File(db, id, issue_id) for id in get_new_files_ids(issue, olddata) ] + + changes.add_files(new_files) + changes.save() + except: + # We can't mess up with a database commit. + pass + + +def init(db): + db.issue.react('create', file_added_to_issue) + db.issue.react('set', file_added_to_issue) From python-checkins at python.org Sat Jul 28 01:17:58 2007 From: python-checkins at python.org (erik.forsberg) Date: Sat, 28 Jul 2007 01:17:58 +0200 (CEST) Subject: [Python-checkins] r56598 - tracker/instances/python-dev/detectors/changes_xml_writer.py Message-ID: <20070727231758.3178A1E400A@bag.python.org> Author: erik.forsberg Date: Sat Jul 28 01:17:57 2007 New Revision: 56598 Removed: tracker/instances/python-dev/detectors/changes_xml_writer.py Log: Need a python 2.4 compatible variant Deleted: /tracker/instances/python-dev/detectors/changes_xml_writer.py ============================================================================== --- /tracker/instances/python-dev/detectors/changes_xml_writer.py Sat Jul 28 01:17:57 2007 +++ (empty file) @@ -1,191 +0,0 @@ -# -# changes.xml writer detector. -# -# Copyright (c) 2007 Michal Kwiatkowski -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright -# notice, this list of conditions and the following disclaimer in the -# documentation and/or other materials provided with the distribution. -# -# * Neither the name of the author nor the names of his contributors -# may be used to endorse or promote products derived from this software -# without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -# - -"""changes.xml writer detector -- save each database change to an XML file. - -Root element is called `changes` and it has at most `ChangesXml.max_items` -children, each called a `change`. Each `change` has the following attributes: - -:date: date in RFC2822 format when the change was made -:id: unique identifier of this change (note: not an integer) -:type: type of this change (see below) - -A structure of a `change` depends on its `type`. Currently implemented -change types and their formats are listed below. - -* type = `file-added` - - Describes a new file attached to an existing issue. Child elements: - - :file-id: unique integer identifier of the file - :file-name: name of the uploaded file - :file-type: MIME type of the file content - :file-url: permanent URL of the file - :issue-id: unique integer identifier of an issue this file is attached to -""" - -import os -import urllib -from xml.dom import minidom -from xml.parsers.expat import ExpatError -from time import gmtime, strftime - -# Relative to tracker home directory. -FILENAME = os.path.join('%(TEMPLATES)s', 'recent-changes.xml') - - -def tracker_url(db): - return str(db.config.options[('tracker', 'web')]) - -def changes_xml_path(db): - return os.path.join(db.config.HOME, FILENAME % db.config.options) - -def rfc2822_date(): - return strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()) - -class File(object): - def __init__(self, db, id, issue_id): - self.db = db - self.id = id - self.issue_id = issue_id - - self.name = db.file.get(id, 'name') - self.type = db.file.get(id, 'type') - # Based on roundup.cgi.templating._HTMLItem.download_url(). - self.download_url = tracker_url(self.db) +\ - urllib.quote('%s%s/%s' % ('file', self.id, self.name)) - -class ChangesXml(object): - # Maximum number of changes stored in a file. - max_items = 20 - - def __init__(self, filename): - self.filename = filename - self._read_document() - self.modified = False - - def save(self): - if not self.modified: - return - - self._trim_to_max_items() - - fd = open(self.filename, 'w') - self.document.writexml(fd, encoding="UTF-8") - fd.close() - - def add_file(self, file): - change = self._change("file%s-added-to-issue%s" % (file.id, file.issue_id), - "file-added") - - change.appendChild(self._element_with_text("file-id", file.id)) - change.appendChild(self._element_with_text("file-name", file.name)) - change.appendChild(self._element_with_text("file-type", file.type)) - change.appendChild(self._element_with_text("file-url", file.download_url)) - change.appendChild(self._element_with_text("issue-id", file.issue_id)) - - self.root.appendChild(change) - self.modified = True - - def add_files(self, files): - for file in files: - self.add_file(file) - - def _change(self, id, type): - """Return new 'change' element of a given type. - - """ - change = self.document.createElement("change") - change.setAttribute("id", id) - change.setAttribute("type", type) - change.setAttribute("date", rfc2822_date()) - return change - - def _element_with_text(self, name, value): - """Return new element with given name and text node as a value. - value - """ - element = self.document.createElement(name) - text = self.document.createTextNode(str(value)) - element.appendChild(text) - return element - - def _trim_to_max_items(self): - """Remove changes exceeding self.max_items. - """ - # Assumes that changes are stored sequentially from oldest to newest. - # Will do for now. - for change in self.root.getElementsByTagName("change")[0:-self.max_items]: - self.root.removeChild(change) - - def _read_document(self): - try: - self.document = minidom.parse(self.filename) - self.root = self.document.firstChild - except IOError, e: - # File not found, create a new one then. - if e.errno != 2: - raise - self._create_new_document() - except ExpatError: - # File has been damaged, forget about it and create a new one. - self._create_new_document() - - def _create_new_document(self): - self.document = minidom.Document() - self.root = self.document.createElement("changes") - self.document.appendChild(self.root) - -def get_new_files_ids(issue_now, issue_then): - """Return ids of files added between `now` and `then`. - """ - files_now = set(issue_now['files']) - files_then = set(issue_then['files']) if issue_then else set() - return map(int, files_now - files_then) - -def file_added_to_issue(db, cl, issue_id, olddata): - try: - changes = ChangesXml(changes_xml_path(db)) - issue = db.issue.getnode(issue_id) - new_files = [ File(db, id, issue_id) for id in get_new_files_ids(issue, olddata) ] - - changes.add_files(new_files) - changes.save() - except: - # We can't mess up with a database commit. - pass - - -def init(db): - db.issue.react('create', file_added_to_issue) - db.issue.react('set', file_added_to_issue) From python-checkins at python.org Sat Jul 28 01:35:35 2007 From: python-checkins at python.org (brett.cannon) Date: Sat, 28 Jul 2007 01:35:35 +0200 (CEST) Subject: [Python-checkins] r56599 - peps/trunk/pep-3100.txt Message-ID: <20070727233535.136C81E400A@bag.python.org> Author: brett.cannon Date: Sat Jul 28 01:35:34 2007 New Revision: 56599 Modified: peps/trunk/pep-3100.txt Log: Fix a misspelling and clarify what indirection entries in sys.modules are. Modified: peps/trunk/pep-3100.txt ============================================================================== --- peps/trunk/pep-3100.txt (original) +++ peps/trunk/pep-3100.txt Sat Jul 28 01:35:34 2007 @@ -85,7 +85,8 @@ * Imports [#pep328]_ + Imports will be absolute by default. [done] + Relative imports must be explicitly specified. [done] - + Indirection entires in sys.modules will not be supported. (what's this???) + + Indirection entries in sys.modules (i.e., a value of None for + A.string means to use the top-level sring module) will not be supported. * __init__.py might become optional in sub-packages? __init__.py will still be required for top-level packages. * Cleanup the Py_InitModule() variants {,3,4} (also import and parser APIs) From python-checkins at python.org Sat Jul 28 09:01:43 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 28 Jul 2007 09:01:43 +0200 (CEST) Subject: [Python-checkins] r56600 - in python/branches/release25-maint: Lib/test/test_unicodedata.py Misc/NEWS Modules/unicodedata.c Message-ID: <20070728070143.C94CD1E400A@bag.python.org> Author: martin.v.loewis Date: Sat Jul 28 09:01:43 2007 New Revision: 56600 Modified: python/branches/release25-maint/Lib/test/test_unicodedata.py python/branches/release25-maint/Misc/NEWS python/branches/release25-maint/Modules/unicodedata.c Log: Bug #1704793: Raise KeyError if unicodedata.lookup cannot represent the result in a single character. Modified: python/branches/release25-maint/Lib/test/test_unicodedata.py ============================================================================== --- python/branches/release25-maint/Lib/test/test_unicodedata.py (original) +++ python/branches/release25-maint/Lib/test/test_unicodedata.py Sat Jul 28 09:01:43 2007 @@ -6,7 +6,7 @@ """#" import unittest, test.test_support -import hashlib +import hashlib, sys encoding = 'utf-8' @@ -214,6 +214,10 @@ count += 1 self.assert_(count >= 10) # should have tested at least the ASCII digits + def test_bug_1704793(self): + if sys.maxunicode == 65535: + self.assertRaises(KeyError, self.db.lookup, "GOTHIC LETTER FAIHU") + def test_main(): test.test_support.run_unittest( UnicodeMiscTest, Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Sat Jul 28 09:01:43 2007 @@ -26,6 +26,9 @@ Library ------- +- Bug #1704793: Raise KeyError if unicodedata.lookup cannot + represent the result in a single character. + - Change location of the package index to pypi.python.org/pypi - Bug #1701409: Fix a segfault in printing ctypes.c_char_p and Modified: python/branches/release25-maint/Modules/unicodedata.c ============================================================================== --- python/branches/release25-maint/Modules/unicodedata.c (original) +++ python/branches/release25-maint/Modules/unicodedata.c Sat Jul 28 09:01:43 2007 @@ -1102,8 +1102,18 @@ return NULL; } +#ifndef Py_UNICODE_WIDE + if (code >= 0x10000) { + /* Raise KeyError for compatibility; the possibly more + correct ValueError was not documented as a possible + exception for 2.5.x and earlier. */ + PyErr_Format(PyExc_KeyError, "result %d larger than sys.maxunicode", + code); + return NULL; + } +#endif str[0] = (Py_UNICODE) code; - return PyUnicode_FromUnicode(str, 1); + return PyUnicode_FromUnicode(str, 1); } /* XXX Add doc strings. */ From python-checkins at python.org Sat Jul 28 09:03:05 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 28 Jul 2007 09:03:05 +0200 (CEST) Subject: [Python-checkins] r56601 - in python/trunk: Lib/test/test_unicodedata.py Misc/NEWS Modules/unicodedata.c Message-ID: <20070728070305.A7C4F1E400A@bag.python.org> Author: martin.v.loewis Date: Sat Jul 28 09:03:05 2007 New Revision: 56601 Modified: python/trunk/Lib/test/test_unicodedata.py python/trunk/Misc/NEWS python/trunk/Modules/unicodedata.c Log: Bug #1704793: Return UTF-16 pair if unicodedata.lookup cannot represent the result in a single character. Modified: python/trunk/Lib/test/test_unicodedata.py ============================================================================== --- python/trunk/Lib/test/test_unicodedata.py (original) +++ python/trunk/Lib/test/test_unicodedata.py Sat Jul 28 09:03:05 2007 @@ -214,6 +214,9 @@ count += 1 self.assert_(count >= 10) # should have tested at least the ASCII digits + def test_bug_1704793(self): + self.assertEquals(self.db.lookup("GOTHIC LETTER FAIHU"), u'\U00010346') + def test_main(): test.test_support.run_unittest( UnicodeMiscTest, Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Sat Jul 28 09:03:05 2007 @@ -238,6 +238,9 @@ Library ------- +- Bug #1704793: Return UTF-16 pair if unicodedata.lookup cannot + represent the result in a single character. + - Bug #978833: Close https sockets by releasing the _ssl object. - Change location of the package index to pypi.python.org/pypi Modified: python/trunk/Modules/unicodedata.c ============================================================================== --- python/trunk/Modules/unicodedata.c (original) +++ python/trunk/Modules/unicodedata.c Sat Jul 28 09:03:05 2007 @@ -1077,8 +1077,7 @@ unicodedata_lookup(PyObject* self, PyObject* args) { Py_UCS4 code; - Py_UNICODE str[1]; - char errbuf[256]; + Py_UNICODE str[2]; char* name; int namelen; @@ -1086,24 +1085,20 @@ return NULL; if (!_getcode(self, name, namelen, &code)) { - /* XXX(nnorwitz): why are we allocating for the error msg? - Why not always use snprintf? */ - char fmt[] = "undefined character name '%s'"; - char *buf = PyMem_MALLOC(sizeof(fmt) + namelen); - if (buf) - sprintf(buf, fmt, name); - else { - buf = errbuf; - PyOS_snprintf(buf, sizeof(errbuf), fmt, name); - } - PyErr_SetString(PyExc_KeyError, buf); - if (buf != errbuf) - PyMem_FREE(buf); + PyErr_Format(PyExc_KeyError, "undefined character name '%s'", + name); return NULL; } +#ifndef Py_UNICODE_WIDE + if (code >= 0x10000) { + str[0] = 0xd800 + ((code - 0x10000) >> 10); + str[1] = 0xdc00 + ((code - 0x10000) & 0x3ff); + return PyUnicode_FromUnicode(str, 2); + } +#endif str[0] = (Py_UNICODE) code; - return PyUnicode_FromUnicode(str, 1); + return PyUnicode_FromUnicode(str, 1); } /* XXX Add doc strings. */ From python-checkins at python.org Sat Jul 28 12:22:29 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 28 Jul 2007 12:22:29 +0200 (CEST) Subject: [Python-checkins] r56602 - tracker/importer/fixsfmojibake.py Message-ID: <20070728102229.D3C1C1E400A@bag.python.org> Author: martin.v.loewis Date: Sat Jul 28 12:22:29 2007 New Revision: 56602 Added: tracker/importer/fixsfmojibake.py (contents, props changed) Log: Add SF xml_export2 encoding fixing script. Added: tracker/importer/fixsfmojibake.py ============================================================================== --- (empty file) +++ tracker/importer/fixsfmojibake.py Sat Jul 28 12:22:29 2007 @@ -0,0 +1,49 @@ +#!/usr/bin/python +# The data exported from SF often are incorrectly +# encoded - two subsequent Unicode character have to +# be interpreted as the two bytes of a single UTF-8 +# character; it looks like the have UTF-8 in the database +# but encode it as if it was Latin-1. +# Unfortunately, this is not consistently so: for some +# data, the intended encoding is really Latin-1. +# This scripts tries to fix it, by recoding everything +# that looks like UTF-8 into the then-proper character +# references. + +# The script assumes that the file encoding is actually +# ASCII, and that non-ASCII characters are always encoded +# as decimal character references. + +import sys, re + +expr = re.compile('(&#[0-9]+;)+') + +def recode(group): + assert group[:2] == '&#' and group[-1] == ';' + chars = group[2:-1].split(';&#') + chars = [unichr(int(c)) for c in chars] + chars = u''.join(chars) + try: + chars = chars.encode('latin-1').decode('utf-8') + except UnicodeError: + return group + chars = ['&#%d;' % ord(c) for c in chars] + return ''.join(chars) + +print >>sys.stderr, len(indata) + +# Make sure that there are only &#decimal; references, +# and that all &# occurrences are markup +assert indata.find('&#x') == -1 +assert indata.find(' Author: facundo.batista Date: Sat Jul 28 16:21:22 2007 New Revision: 56604 Modified: python/trunk/Lib/test/test_asyncore.py Log: Moved all of the capture_server socket setup code into the try block so that the event gets set if a failure occurs during server setup (otherwise the test will block forever). Changed to let the OS assign the server port number, and client side of test waits for port number assignment before proceeding. The test data in DispatcherWithSendTests is also sent in multiple send() calls instead of one to make sure this works properly. [GSoC - Alan McIntyre] Modified: python/trunk/Lib/test/test_asyncore.py ============================================================================== --- python/trunk/Lib/test/test_asyncore.py (original) +++ python/trunk/Lib/test/test_asyncore.py Sat Jul 28 16:21:22 2007 @@ -12,7 +12,7 @@ from StringIO import StringIO HOST = "127.0.0.1" -PORT = 54329 +PORT = None class dummysocket: def __init__(self): @@ -53,12 +53,14 @@ # used when testing senders; just collects what it gets until newline is sent def capture_server(evt, buf): - serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - serv.settimeout(3) - serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - serv.bind(("", PORT)) - serv.listen(5) try: + serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + serv.settimeout(3) + serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + serv.bind(("", 0)) + global PORT + PORT = serv.getsockname()[1] + serv.listen(5) conn, addr = serv.accept() except socket.timeout: pass @@ -78,6 +80,7 @@ conn.close() finally: serv.close() + PORT = None evt.set() @@ -338,12 +341,26 @@ self.evt = threading.Event() cap = StringIO() threading.Thread(target=capture_server, args=(self.evt,cap)).start() - time.sleep(1) # Give server time to initialize - data = "Suppose there isn't a 16-ton weight?"*5 + # wait until server thread has assigned a port number + n = 1000 + while PORT is None and n > 0: + time.sleep(0.01) + n -= 1 + + # wait a little longer for the server to initialize (it sometimes + # refuses connections on slow machines without this wait) + time.sleep(0.2) + + data = "Suppose there isn't a 16-ton weight?" d = dispatcherwithsend_noread() d.create_socket(socket.AF_INET, socket.SOCK_STREAM) d.connect((HOST, PORT)) + + # give time for socket to connect + time.sleep(0.1) + + d.send(data) d.send(data) d.send('\n') @@ -354,7 +371,7 @@ self.evt.wait() - self.assertEqual(cap.getvalue(), data) + self.assertEqual(cap.getvalue(), data*2) class DispatcherWithSendTests_UsePoll(DispatcherWithSendTests): From buildbot at python.org Sat Jul 28 16:53:28 2007 From: buildbot at python.org (buildbot at python.org) Date: Sat, 28 Jul 2007 14:53:28 +0000 Subject: [Python-checkins] buildbot warnings in x86 XP-3 trunk Message-ID: <20070728145328.920AB1E400A@bag.python.org> The Buildbot has detected a new failure of x86 XP-3 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520XP-3%2520trunk/builds/107 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_mutants sincerely, -The Buildbot From vwwck at gsb.columbia.edu Sat Jul 28 16:57:52 2007 From: vwwck at gsb.columbia.edu (Mata X.Benny) Date: Sat, 28 Jul 2007 09:57:52 -0500 Subject: [Python-checkins] Document Message-ID: <46AB5970.1020302@edelbrock.com> -------------- next part -------------- A non-text attachment was scrubbed... Name: Document.pdf Type: application/pdf Size: 17658 bytes Desc: not available Url : http://mail.python.org/pipermail/python-checkins/attachments/20070728/a20ab968/attachment-0001.pdf From buildbot at python.org Sat Jul 28 18:27:58 2007 From: buildbot at python.org (buildbot at python.org) Date: Sat, 28 Jul 2007 16:27:58 +0000 Subject: [Python-checkins] buildbot warnings in PPC64 Debian trunk Message-ID: <20070728162758.206221E400B@bag.python.org> The Buildbot has detected a new failure of PPC64 Debian trunk. Full details are available at: http://www.python.org/dev/buildbot/all/PPC64%2520Debian%2520trunk/builds/79 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/test/test_socketserver.py", line 93, in run svr.serve_a_few() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/test/test_socketserver.py", line 35, in serve_a_few self.handle_request() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 224, in handle_request self.handle_error(request, client_address) File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 222, in handle_request self.process_request(request, client_address) File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 429, in process_request self.collect_children() File "/home/pybot/buildarea64/trunk.klose-debian-ppc64/build/Lib/SocketServer.py", line 425, in collect_children self.active_children.remove(pid) ValueError: list.remove(x): x not in list 1 test failed: test_socketserver make: *** [buildbottest] Error 1 sincerely, -The Buildbot From zvw at slumberland.co.uk Sun Jul 29 04:33:15 2007 From: zvw at slumberland.co.uk (Collier Y.Ted) Date: Sat, 28 Jul 2007 19:33:15 -0700 Subject: [Python-checkins] readme Message-ID: <46ABFC6B.6050408@wis.com> -------------- next part -------------- A non-text attachment was scrubbed... Name: readme.pdf Type: application/pdf Size: 25842 bytes Desc: not available Url : http://mail.python.org/pipermail/python-checkins/attachments/20070728/4f46ebe8/attachment-0001.pdf From python-checkins at python.org Sun Jul 29 10:26:11 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 29 Jul 2007 10:26:11 +0200 (CEST) Subject: [Python-checkins] r56611 - python/trunk/Doc/whatsnew/whatsnew25.tex Message-ID: <20070729082611.D95B41E4008@bag.python.org> Author: georg.brandl Date: Sun Jul 29 10:26:10 2007 New Revision: 56611 Modified: python/trunk/Doc/whatsnew/whatsnew25.tex Log: Clarify PEP 343 description. Modified: python/trunk/Doc/whatsnew/whatsnew25.tex ============================================================================== --- python/trunk/Doc/whatsnew/whatsnew25.tex (original) +++ python/trunk/Doc/whatsnew/whatsnew25.tex Sun Jul 29 10:26:10 2007 @@ -640,15 +640,20 @@ \end{verbatim} The expression is evaluated, and it should result in an object that -supports the context management protocol. This object may return a -value that can optionally be bound to the name \var{variable}. (Note -carefully that \var{variable} is \emph{not} assigned the result of -\var{expression}.) The object can then run set-up code -before \var{with-block} is executed and some clean-up code -is executed after the block is done, even if the block raised an exception. +supports the context management protocol (that is, has \method{__enter__()} +and \method{__exit__()} methods. -To enable the statement in Python 2.5, you need -to add the following directive to your module: +The object's \method{__enter__()} is called before \var{with-block} is +executed and therefore can run set-up code. It also may return a value +that is bound to the name \var{variable}, if given. (Note carefully +that \var{variable} is \emph{not} assigned the result of \var{expression}.) + +After execution of the \var{with-block} is finished, the object's +\method{__exit__()} method is called, even if the block raised an exception, +and can therefore run clean-up code. + +To enable the statement in Python 2.5, you need to add the following +directive to your module: \begin{verbatim} from __future__ import with_statement @@ -668,9 +673,13 @@ \end{verbatim} After this statement has executed, the file object in \var{f} will -have been automatically closed, even if the 'for' loop +have been automatically closed, even if the \keyword{for} loop raised an exception part-way through the block. +\note{In this case, \var{f} is the same object created by + \function{open()}, because \method{file.__enter__()} returns + \var{self}.} + The \module{threading} module's locks and condition variables also support the '\keyword{with}' statement: From python-checkins at python.org Sun Jul 29 10:26:15 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 29 Jul 2007 10:26:15 +0200 (CEST) Subject: [Python-checkins] r56612 - python/branches/release25-maint/Doc/whatsnew/whatsnew25.tex Message-ID: <20070729082615.DEF9D1E4008@bag.python.org> Author: georg.brandl Date: Sun Jul 29 10:26:15 2007 New Revision: 56612 Modified: python/branches/release25-maint/Doc/whatsnew/whatsnew25.tex Log: Clarify PEP 343 description. (backport from rev. 56611) Modified: python/branches/release25-maint/Doc/whatsnew/whatsnew25.tex ============================================================================== --- python/branches/release25-maint/Doc/whatsnew/whatsnew25.tex (original) +++ python/branches/release25-maint/Doc/whatsnew/whatsnew25.tex Sun Jul 29 10:26:15 2007 @@ -640,15 +640,20 @@ \end{verbatim} The expression is evaluated, and it should result in an object that -supports the context management protocol. This object may return a -value that can optionally be bound to the name \var{variable}. (Note -carefully that \var{variable} is \emph{not} assigned the result of -\var{expression}.) The object can then run set-up code -before \var{with-block} is executed and some clean-up code -is executed after the block is done, even if the block raised an exception. +supports the context management protocol (that is, has \method{__enter__()} +and \method{__exit__()} methods. -To enable the statement in Python 2.5, you need -to add the following directive to your module: +The object's \method{__enter__()} is called before \var{with-block} is +executed and therefore can run set-up code. It also may return a value +that is bound to the name \var{variable}, if given. (Note carefully +that \var{variable} is \emph{not} assigned the result of \var{expression}.) + +After execution of the \var{with-block} is finished, the object's +\method{__exit__()} method is called, even if the block raised an exception, +and can therefore run clean-up code. + +To enable the statement in Python 2.5, you need to add the following +directive to your module: \begin{verbatim} from __future__ import with_statement @@ -668,9 +673,13 @@ \end{verbatim} After this statement has executed, the file object in \var{f} will -have been automatically closed, even if the 'for' loop +have been automatically closed, even if the \keyword{for} loop raised an exception part-way through the block. +\note{In this case, \var{f} is the same object created by + \function{open()}, because \method{file.__enter__()} returns + \var{self}.} + The \module{threading} module's locks and condition variables also support the '\keyword{with}' statement: From python-checkins at python.org Sun Jul 29 10:27:58 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 29 Jul 2007 10:27:58 +0200 (CEST) Subject: [Python-checkins] r56613 - doctools/trunk/converter/newfiles/doc_rest.rst Message-ID: <20070729082758.23BA41E4008@bag.python.org> Author: georg.brandl Date: Sun Jul 29 10:27:57 2007 New Revision: 56613 Modified: doctools/trunk/converter/newfiles/doc_rest.rst Log: [Carl Cerecke] Reword first section. Modified: doctools/trunk/converter/newfiles/doc_rest.rst ============================================================================== --- doctools/trunk/converter/newfiles/doc_rest.rst (original) +++ doctools/trunk/converter/newfiles/doc_rest.rst Sun Jul 29 10:27:57 2007 @@ -4,22 +4,25 @@ ======================= This section is a brief introduction to reStructuredText (reST) concepts and -syntax, to provide authors enough information to autor documents productively. +syntax, intended to provide authors with enough information to author +documents productively. Since reST was designed to be a simple, unobtrusive markup language, this will not take too long. .. seealso:: - The authoritative `reStructuredText User - Documentation `_. + The authoritative `reStructuredText User + Documentation `_. Paragraphs ---------- -The most basic block a reST document is made of. Paragraphs are chunks of text -separated by one ore more blank lines. As in Python, indentation is significant -in reST, so all lines of a paragraph must be left-aligned. +The paragraph is the most basic block in a reST document. +Paragraphs are simply chunks of text +separated by one or more blank lines. As in Python, indentation is significant +in reST, so all lines of the same paragraph must be left-aligned +to the same level of indentation. Inline markup @@ -226,4 +229,4 @@ documents written from them will be in that encoding as well. -XXX: Gotchas \ No newline at end of file +XXX: Gotchas From python-checkins at python.org Sun Jul 29 11:11:16 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 29 Jul 2007 11:11:16 +0200 (CEST) Subject: [Python-checkins] r56614 - python/trunk/Doc/tut/tut.tex Message-ID: <20070729091116.188B01E4008@bag.python.org> Author: georg.brandl Date: Sun Jul 29 11:11:15 2007 New Revision: 56614 Modified: python/trunk/Doc/tut/tut.tex Log: try-except-finally is new in 2.5. Modified: python/trunk/Doc/tut/tut.tex ============================================================================== --- python/trunk/Doc/tut/tut.tex (original) +++ python/trunk/Doc/tut/tut.tex Sun Jul 29 11:11:15 2007 @@ -3765,7 +3765,9 @@ \keyword{finally} clause has been executed. The \keyword{finally} clause is also executed ``on the way out'' when any other clause of the \keyword{try} statement is left via a \keyword{break}, \keyword{continue} -or \keyword{return} statement. A more complicated example: +or \keyword{return} statement. A more complicated example (having +\keyword{except} and \keyword{finally} clauses in the same \keyword{try} +statement works as of Python 2.5): \begin{verbatim} >>> def divide(x, y): From python-checkins at python.org Sun Jul 29 11:11:19 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 29 Jul 2007 11:11:19 +0200 (CEST) Subject: [Python-checkins] r56615 - python/branches/release25-maint/Doc/tut/tut.tex Message-ID: <20070729091119.E52F61E400D@bag.python.org> Author: georg.brandl Date: Sun Jul 29 11:11:19 2007 New Revision: 56615 Modified: python/branches/release25-maint/Doc/tut/tut.tex Log: try-except-finally is new in 2.5. (backport from rev. 56614) Modified: python/branches/release25-maint/Doc/tut/tut.tex ============================================================================== --- python/branches/release25-maint/Doc/tut/tut.tex (original) +++ python/branches/release25-maint/Doc/tut/tut.tex Sun Jul 29 11:11:19 2007 @@ -3765,7 +3765,9 @@ \keyword{finally} clause has been executed. The \keyword{finally} clause is also executed ``on the way out'' when any other clause of the \keyword{try} statement is left via a \keyword{break}, \keyword{continue} -or \keyword{return} statement. A more complicated example: +or \keyword{return} statement. A more complicated example (having +\keyword{except} and \keyword{finally} clauses in the same \keyword{try} +statement works as of Python 2.5): \begin{verbatim} >>> def divide(x, y): From python-checkins at python.org Sun Jul 29 13:55:54 2007 From: python-checkins at python.org (erik.forsberg) Date: Sun, 29 Jul 2007 13:55:54 +0200 (CEST) Subject: [Python-checkins] r56616 - tracker/instances/python-dev/detectors/changes_xml_writer.py Message-ID: <20070729115554.5835D1E4008@bag.python.org> Author: erik.forsberg Date: Sun Jul 29 13:55:54 2007 New Revision: 56616 Added: tracker/instances/python-dev/detectors/changes_xml_writer.py Log: Adding recent-changes detector in python 2.4 version. Resolves http://psf.upfronthosting.co.za/roundup/meta/file55/changes_xml_writer-2.4.py Added: tracker/instances/python-dev/detectors/changes_xml_writer.py ============================================================================== --- (empty file) +++ tracker/instances/python-dev/detectors/changes_xml_writer.py Sun Jul 29 13:55:54 2007 @@ -0,0 +1,194 @@ +# +# changes.xml writer detector. +# +# Copyright (c) 2007 Michal Kwiatkowski +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# * Neither the name of the author nor the names of his contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# + +"""changes.xml writer detector -- save each database change to an XML file. + +Root element is called `changes` and it has at most `ChangesXml.max_items` +children, each called a `change`. Each `change` has the following attributes: + +:date: date in RFC2822 format when the change was made +:id: unique identifier of this change (note: not an integer) +:type: type of this change (see below) + +A structure of a `change` depends on its `type`. Currently implemented +change types and their formats are listed below. + +* type = `file-added` + + Describes a new file attached to an existing issue. Child elements: + + :file-id: unique integer identifier of the file + :file-name: name of the uploaded file + :file-type: MIME type of the file content + :file-url: permanent URL of the file + :issue-id: unique integer identifier of an issue this file is attached to +""" + +import os +import urllib +from xml.dom import minidom +from xml.parsers.expat import ExpatError +from time import gmtime, strftime + +# Relative to tracker home directory. +FILENAME = os.path.join('%(TEMPLATES)s', 'recent-changes.xml') + + +def tracker_url(db): + return str(db.config.options[('tracker', 'web')]) + +def changes_xml_path(db): + return os.path.join(db.config.HOME, FILENAME % db.config.options) + +def rfc2822_date(): + return strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime()) + +class File(object): + def __init__(self, db, id, issue_id): + self.db = db + self.id = id + self.issue_id = issue_id + + self.name = db.file.get(id, 'name') + self.type = db.file.get(id, 'type') + # Based on roundup.cgi.templating._HTMLItem.download_url(). + self.download_url = tracker_url(self.db) +\ + urllib.quote('%s%s/%s' % ('file', self.id, self.name)) + +class ChangesXml(object): + # Maximum number of changes stored in a file. + max_items = 20 + + def __init__(self, filename): + self.filename = filename + self._read_document() + self.modified = False + + def save(self): + if not self.modified: + return + + self._trim_to_max_items() + + fd = open(self.filename, 'w') + self.document.writexml(fd, encoding="UTF-8") + fd.close() + + def add_file(self, file): + change = self._change("file%s-added-to-issue%s" % (file.id, file.issue_id), + "file-added") + + change.appendChild(self._element_with_text("file-id", file.id)) + change.appendChild(self._element_with_text("file-name", file.name)) + change.appendChild(self._element_with_text("file-type", file.type)) + change.appendChild(self._element_with_text("file-url", file.download_url)) + change.appendChild(self._element_with_text("issue-id", file.issue_id)) + + self.root.appendChild(change) + self.modified = True + + def add_files(self, files): + for file in files: + self.add_file(file) + + def _change(self, id, type): + """Return new 'change' element of a given type. + + """ + change = self.document.createElement("change") + change.setAttribute("id", id) + change.setAttribute("type", type) + change.setAttribute("date", rfc2822_date()) + return change + + def _element_with_text(self, name, value): + """Return new element with given name and text node as a value. + value + """ + element = self.document.createElement(name) + text = self.document.createTextNode(str(value)) + element.appendChild(text) + return element + + def _trim_to_max_items(self): + """Remove changes exceeding self.max_items. + """ + # Assumes that changes are stored sequentially from oldest to newest. + # Will do for now. + for change in self.root.getElementsByTagName("change")[0:-self.max_items]: + self.root.removeChild(change) + + def _read_document(self): + try: + self.document = minidom.parse(self.filename) + self.root = self.document.firstChild + except IOError, e: + # File not found, create a new one then. + if e.errno != 2: + raise + self._create_new_document() + except ExpatError: + # File has been damaged, forget about it and create a new one. + self._create_new_document() + + def _create_new_document(self): + self.document = minidom.Document() + self.root = self.document.createElement("changes") + self.document.appendChild(self.root) + +def get_new_files_ids(issue_now, issue_then): + """Return ids of files added between `now` and `then`. + """ + files_now = set(issue_now['files']) + if issue_then: + files_then = set(issue_then['files']) + else: + files_then = set() + return map(int, files_now - files_then) + +def file_added_to_issue(db, cl, issue_id, olddata): + try: + changes = ChangesXml(changes_xml_path(db)) + issue = db.issue.getnode(issue_id) + new_files = [ File(db, id, issue_id) for id in get_new_files_ids(issue, olddata) ] + + changes.add_files(new_files) + changes.save() + except: + # We can't mess up with a database commit. + pass + + +def init(db): + db.issue.react('create', file_added_to_issue) + db.issue.react('set', file_added_to_issue) From python-checkins at python.org Sun Jul 29 16:23:09 2007 From: python-checkins at python.org (facundo.batista) Date: Sun, 29 Jul 2007 16:23:09 +0200 (CEST) Subject: [Python-checkins] r56617 - python/trunk/Lib/test/test_asynchat.py Message-ID: <20070729142309.351E31E4008@bag.python.org> Author: facundo.batista Date: Sun Jul 29 16:23:08 2007 New Revision: 56617 Modified: python/trunk/Lib/test/test_asynchat.py Log: Added tests for asynchat classes simple_producer & fifo, and the find_prefix_at_end function. Check behavior of a string given as a producer. Added tests for behavior of asynchat.async_chat when given int, long, and None terminator arguments. Added usepoll attribute to TestAsynchat to allow running the asynchat tests with poll support chosen whether it's available or not (improves coverage of asyncore code). [GSoC - Alan McIntyre] Modified: python/trunk/Lib/test/test_asynchat.py ============================================================================== --- python/trunk/Lib/test/test_asynchat.py (original) +++ python/trunk/Lib/test/test_asynchat.py Sun Jul 29 16:23:08 2007 @@ -7,8 +7,12 @@ HOST = "127.0.0.1" PORT = 54322 +SERVER_QUIT = 'QUIT\n' class echo_server(threading.Thread): + # parameter to determine the number of bytes passed back to the + # client each send + chunk_size = 1 def run(self): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -17,15 +21,28 @@ PORT = test_support.bind_port(sock, HOST, PORT) sock.listen(1) conn, client = sock.accept() - buffer = "" - while "\n" not in buffer: + self.buffer = "" + # collect data until quit message is seen + while SERVER_QUIT not in self.buffer: data = conn.recv(1) if not data: break - buffer = buffer + data - while buffer: - n = conn.send(buffer) - buffer = buffer[n:] + self.buffer = self.buffer + data + + # remove the SERVER_QUIT message + self.buffer = self.buffer.replace(SERVER_QUIT, '') + + # re-send entire set of collected data + try: + # this may fail on some tests, such as test_close_when_done, since + # the client closes the channel when it's done sending + while self.buffer: + n = conn.send(self.buffer[:self.chunk_size]) + time.sleep(0.001) + self.buffer = self.buffer[n:] + except: + pass + conn.close() sock.close() @@ -33,61 +50,197 @@ def __init__(self, terminator): asynchat.async_chat.__init__(self) - self.contents = None + self.contents = [] self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.connect((HOST, PORT)) self.set_terminator(terminator) - self.buffer = "" + self.buffer = '' def handle_connect(self): pass ##print "Connected" def collect_incoming_data(self, data): - self.buffer = self.buffer + data + self.buffer += data def found_terminator(self): - #print "Received:", repr(self.buffer) - self.contents = self.buffer + self.contents.append(self.buffer) self.buffer = "" - self.close() class TestAsynchat(unittest.TestCase): + usepoll = False + def setUp (self): pass def tearDown (self): pass - def test_line_terminator(self): + def line_terminator_check(self, term, server_chunk): s = echo_server() + s.chunk_size = server_chunk s.start() - time.sleep(1) # Give server time to initialize - c = echo_client('\n') + time.sleep(0.5) # Give server time to initialize + c = echo_client(term) c.push("hello ") - c.push("world\n") - asyncore.loop() + c.push("world%s" % term) + c.push("I'm not dead yet!%s" % term) + c.push(SERVER_QUIT) + asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) s.join() - self.assertEqual(c.contents, 'hello world') + self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"]) + + # the line terminator tests below check receiving variously-sized + # chunks back from the server in order to exercise all branches of + # async_chat.handle_read + + def test_line_terminator1(self): + # test one-character terminator + for l in (1,2,3): + self.line_terminator_check('\n', l) + + def test_line_terminator2(self): + # test two-character terminator + for l in (1,2,3): + self.line_terminator_check('\r\n', l) + + def test_line_terminator3(self): + # test three-character terminator + for l in (1,2,3): + self.line_terminator_check('qqq', l) - def test_numeric_terminator(self): + def numeric_terminator_check(self, termlen): # Try reading a fixed number of bytes s = echo_server() s.start() - time.sleep(1) # Give server time to initialize - c = echo_client(6L) - c.push("hello ") - c.push("world\n") - asyncore.loop() + time.sleep(0.5) # Give server time to initialize + c = echo_client(termlen) + data = "hello world, I'm not dead yet!\n" + c.push(data) + c.push(SERVER_QUIT) + asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + s.join() + + self.assertEqual(c.contents, [data[:termlen]]) + + def test_numeric_terminator1(self): + # check that ints & longs both work (since type is + # explicitly checked in async_chat.handle_read) + self.numeric_terminator_check(1) + self.numeric_terminator_check(1L) + + def test_numeric_terminator2(self): + self.numeric_terminator_check(6L) + + def test_none_terminator(self): + # Try reading a fixed number of bytes + s = echo_server() + s.start() + time.sleep(0.5) # Give server time to initialize + c = echo_client(None) + data = "hello world, I'm not dead yet!\n" + c.push(data) + c.push(SERVER_QUIT) + asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + s.join() + + self.assertEqual(c.contents, []) + self.assertEqual(c.buffer, data) + + def test_simple_producer(self): + s = echo_server() + s.start() + time.sleep(0.5) # Give server time to initialize + c = echo_client('\n') + data = "hello world\nI'm not dead yet!\n" + p = asynchat.simple_producer(data+SERVER_QUIT, buffer_size=8) + c.push_with_producer(p) + asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + s.join() + + self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"]) + + def test_string_producer(self): + s = echo_server() + s.start() + time.sleep(0.5) # Give server time to initialize + c = echo_client('\n') + data = "hello world\nI'm not dead yet!\n" + c.push_with_producer(data+SERVER_QUIT) + asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + s.join() + + self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"]) + + def test_empty_line(self): + # checks that empty lines are handled correctly + s = echo_server() + s.start() + time.sleep(0.5) # Give server time to initialize + c = echo_client('\n') + c.push("hello world\n\nI'm not dead yet!\n") + c.push(SERVER_QUIT) + asyncore.loop(use_poll=self.usepoll) + s.join() + + self.assertEqual(c.contents, ["hello world", "", "I'm not dead yet!"]) + + def test_close_when_done(self): + s = echo_server() + s.start() + time.sleep(0.5) # Give server time to initialize + c = echo_client('\n') + c.push("hello world\nI'm not dead yet!\n") + c.push(SERVER_QUIT) + c.close_when_done() + asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) s.join() - self.assertEqual(c.contents, 'hello ') + self.assertEqual(c.contents, []) + # the server might have been able to send a byte or two back, but this + # at least checks that it received something and didn't just fail + # (which could still result in the client not having received anything) + self.assertTrue(len(s.buffer) > 0) + + +class TestAsynchat_WithPoll(TestAsynchat): + usepoll = True + +class TestHelperFunctions(unittest.TestCase): + def test_find_prefix_at_end(self): + self.assertEqual(asynchat.find_prefix_at_end("qwerty\r", "\r\n"), 1) + self.assertEqual(asynchat.find_prefix_at_end("qwertydkjf", "\r\n"), 0) + +class TestFifo(unittest.TestCase): + def test_basic(self): + f = asynchat.fifo() + f.push(7) + f.push('a') + self.assertEqual(len(f), 2) + self.assertEqual(f.first(), 7) + self.assertEqual(f.pop(), (1, 7)) + self.assertEqual(len(f), 1) + self.assertEqual(f.first(), 'a') + self.assertEqual(f.is_empty(), False) + self.assertEqual(f.pop(), (1, 'a')) + self.assertEqual(len(f), 0) + self.assertEqual(f.is_empty(), True) + self.assertEqual(f.pop(), (0, None)) + + def test_given_list(self): + f = asynchat.fifo(['x', 17, 3]) + self.assertEqual(len(f), 3) + self.assertEqual(f.pop(), (1, 'x')) + self.assertEqual(f.pop(), (1, 17)) + self.assertEqual(f.pop(), (1, 3)) + self.assertEqual(f.pop(), (0, None)) def test_main(verbose=None): - test_support.run_unittest(TestAsynchat) + test_support.run_unittest(TestAsynchat, TestAsynchat_WithPoll, + TestHelperFunctions, TestFifo) if __name__ == "__main__": test_main(verbose=True) From buildbot at python.org Sun Jul 29 17:14:34 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 29 Jul 2007 15:14:34 +0000 Subject: [Python-checkins] buildbot warnings in g4 osx.4 trunk Message-ID: <20070729151434.2ED741E400F@bag.python.org> The Buildbot has detected a new failure of g4 osx.4 trunk. Full details are available at: http://www.python.org/dev/buildbot/all/g4%2520osx.4%2520trunk/builds/2164 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista,georg.brandl Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/Users/buildslave/bb/trunk.psf-g4/build/Lib/threading.py", line 465, in __bootstrap self.run() File "/Users/buildslave/bb/trunk.psf-g4/build/Lib/threading.py", line 445, in run self.__target(*self.__args, **self.__kwargs) File "/Users/buildslave/bb/trunk.psf-g4/build/Lib/bsddb/test/test_thread.py", line 281, in readerThread rec = dbutils.DeadlockWrap(c.next, max_retries=10) File "/Users/buildslave/bb/trunk.psf-g4/build/Lib/bsddb/dbutils.py", line 62, in DeadlockWrap return function(*_args, **_kwargs) DBLockDeadlockError: (-30995, 'DB_LOCK_DEADLOCK: Locker killed to resolve a deadlock') 1 test failed: test_asynchat make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Sun Jul 29 19:17:45 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 29 Jul 2007 19:17:45 +0200 (CEST) Subject: [Python-checkins] r56618 - peps/trunk/pep-0000.txt peps/trunk/pep-3123.txt Message-ID: <20070729171745.CE0D51E4014@bag.python.org> Author: martin.v.loewis Date: Sun Jul 29 19:17:45 2007 New Revision: 56618 Modified: peps/trunk/pep-0000.txt peps/trunk/pep-3123.txt Log: Mark PEP 3123 final. Modified: peps/trunk/pep-0000.txt ============================================================================== --- peps/trunk/pep-0000.txt (original) +++ peps/trunk/pep-0000.txt Sun Jul 29 19:17:45 2007 @@ -81,7 +81,6 @@ SA 3119 Introducing Abstract Base Classes GvR, Talin SA 3120 Using UTF-8 as the default source encoding von L?wis SA 3121 Extension Module Initialization & Finalization von L?wis - SA 3123 Making PyObject_HEAD conform to standard C von L?wis SA 3131 Supporting Non-ASCII Identifiers von L?wis Open PEPs (under consideration) @@ -174,6 +173,7 @@ SF 3112 Bytes literals in Python 3000 Orendorff SF 3113 Removal of Tuple Parameter Unpacking Cannon SF 3114 Renaming iterator.next() to .__next__() Yee + SF 3123 Making PyObject_HEAD conform to standard C von L?wis SF 3127 Integer Literal Support and Syntax Maupin SF 3129 Class Decorators Winter SF 3132 Extended Iterable Unpacking Brandl @@ -495,7 +495,7 @@ SA 3120 Using UTF-8 as the default source encoding von L?wis SA 3121 Extension Module Initialization & Finalization von L?wis SR 3122 Delineation of the main module Cannon - SA 3123 Making PyObject_HEAD conform to standard C von L?wis + SF 3123 Making PyObject_HEAD conform to standard C von L?wis SD 3124 Overloading, Generic Functions, Interfaces Eby SR 3125 Remove Backslash Continuation Jewett SR 3126 Remove Implicit String Concatenation Jewett Modified: peps/trunk/pep-3123.txt ============================================================================== --- peps/trunk/pep-3123.txt (original) +++ peps/trunk/pep-3123.txt Sun Jul 29 19:17:45 2007 @@ -3,7 +3,7 @@ Version: $Revision$ Last-Modified: $Date$ Author: Martin von L?wis -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 27-Apr-2007 From python-checkins at python.org Sun Jul 29 19:37:22 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 29 Jul 2007 19:37:22 +0200 (CEST) Subject: [Python-checkins] r56619 - python/branches/release25-maint/Objects/stringobject.c python/branches/release25-maint/Objects/unicodeobject.c Message-ID: <20070729173722.E642C1E4008@bag.python.org> Author: georg.brandl Date: Sun Jul 29 19:37:22 2007 New Revision: 56619 Modified: python/branches/release25-maint/Objects/stringobject.c python/branches/release25-maint/Objects/unicodeobject.c Log: Bug #1763149: use proper slice syntax in docstring. (backport) Modified: python/branches/release25-maint/Objects/stringobject.c ============================================================================== --- python/branches/release25-maint/Objects/stringobject.c (original) +++ python/branches/release25-maint/Objects/stringobject.c Sun Jul 29 19:37:22 2007 @@ -1890,7 +1890,7 @@ "S.find(sub [,start [,end]]) -> int\n\ \n\ Return the lowest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); @@ -1929,7 +1929,7 @@ "S.rfind(sub [,start [,end]]) -> int\n\ \n\ Return the highest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); Modified: python/branches/release25-maint/Objects/unicodeobject.c ============================================================================== --- python/branches/release25-maint/Objects/unicodeobject.c (original) +++ python/branches/release25-maint/Objects/unicodeobject.c Sun Jul 29 19:37:22 2007 @@ -5757,7 +5757,7 @@ "S.find(sub [,start [,end]]) -> int\n\ \n\ Return the lowest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); @@ -6498,7 +6498,7 @@ "S.rfind(sub [,start [,end]]) -> int\n\ \n\ Return the highest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); From python-checkins at python.org Sun Jul 29 19:38:36 2007 From: python-checkins at python.org (georg.brandl) Date: Sun, 29 Jul 2007 19:38:36 +0200 (CEST) Subject: [Python-checkins] r56620 - python/trunk/Objects/stringobject.c python/trunk/Objects/unicodeobject.c Message-ID: <20070729173836.071301E4008@bag.python.org> Author: georg.brandl Date: Sun Jul 29 19:38:35 2007 New Revision: 56620 Modified: python/trunk/Objects/stringobject.c python/trunk/Objects/unicodeobject.c Log: Bug #1763149: use proper slice syntax in docstring. (backport) Modified: python/trunk/Objects/stringobject.c ============================================================================== --- python/trunk/Objects/stringobject.c (original) +++ python/trunk/Objects/stringobject.c Sun Jul 29 19:38:35 2007 @@ -1891,7 +1891,7 @@ "S.find(sub [,start [,end]]) -> int\n\ \n\ Return the lowest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); @@ -1930,7 +1930,7 @@ "S.rfind(sub [,start [,end]]) -> int\n\ \n\ Return the highest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); Modified: python/trunk/Objects/unicodeobject.c ============================================================================== --- python/trunk/Objects/unicodeobject.c (original) +++ python/trunk/Objects/unicodeobject.c Sun Jul 29 19:38:35 2007 @@ -5760,7 +5760,7 @@ "S.find(sub [,start [,end]]) -> int\n\ \n\ Return the lowest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); @@ -6501,7 +6501,7 @@ "S.rfind(sub [,start [,end]]) -> int\n\ \n\ Return the highest index in S where substring sub is found,\n\ -such that sub is contained within s[start,end]. Optional\n\ +such that sub is contained within s[start:end]. Optional\n\ arguments start and end are interpreted as in slice notation.\n\ \n\ Return -1 on failure."); From buildbot at python.org Sun Jul 29 19:42:33 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 29 Jul 2007 17:42:33 +0000 Subject: [Python-checkins] buildbot failure in sparc Ubuntu dapper trunk Message-ID: <20070729174233.697D61E4008@bag.python.org> The Buildbot has detected a new failure of sparc Ubuntu dapper trunk. Full details are available at: http://www.python.org/dev/buildbot/all/sparc%2520Ubuntu%2520dapper%2520trunk/builds/8 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: facundo.batista BUILD FAILED: failed failed slave lost sincerely, -The Buildbot From buildbot at python.org Sun Jul 29 20:00:36 2007 From: buildbot at python.org (buildbot at python.org) Date: Sun, 29 Jul 2007 18:00:36 +0000 Subject: [Python-checkins] buildbot warnings in amd64 gentoo 2.5 Message-ID: <20070729180037.1A0F71E4008@bag.python.org> The Buildbot has detected a new failure of amd64 gentoo 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520gentoo%25202.5/builds/339 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: georg.brandl Build had warnings: warnings failed slave lost sincerely, -The Buildbot From python-checkins at python.org Sun Jul 29 20:21:40 2007 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 29 Jul 2007 20:21:40 +0200 (CEST) Subject: [Python-checkins] r56622 - peps/trunk/pep-0000.txt peps/trunk/pep-3120.txt Message-ID: <20070729182140.CCF0B1E400C@bag.python.org> Author: martin.v.loewis Date: Sun Jul 29 20:21:40 2007 New Revision: 56622 Modified: peps/trunk/pep-0000.txt peps/trunk/pep-3120.txt Log: Mark PEP 3120 as final. Modified: peps/trunk/pep-0000.txt ============================================================================== --- peps/trunk/pep-0000.txt (original) +++ peps/trunk/pep-0000.txt Sun Jul 29 20:21:40 2007 @@ -79,7 +79,6 @@ SA 3109 Raising Exceptions in Python 3000 Winter SA 3115 Metaclasses in Python 3000 Talin SA 3119 Introducing Abstract Base Classes GvR, Talin - SA 3120 Using UTF-8 as the default source encoding von L?wis SA 3121 Extension Module Initialization & Finalization von L?wis SA 3131 Supporting Non-ASCII Identifiers von L?wis @@ -173,6 +172,7 @@ SF 3112 Bytes literals in Python 3000 Orendorff SF 3113 Removal of Tuple Parameter Unpacking Cannon SF 3114 Renaming iterator.next() to .__next__() Yee + SF 3120 Using UTF-8 as the default source encoding von L?wis SF 3123 Making PyObject_HEAD conform to standard C von L?wis SF 3127 Integer Literal Support and Syntax Maupin SF 3129 Class Decorators Winter @@ -492,7 +492,7 @@ SR 3117 Postfix Type Declarations Brandl S 3118 Revising the buffer protocol Oliphant, Banks SA 3119 Introducing Abstract Base Classes GvR, Talin - SA 3120 Using UTF-8 as the default source encoding von L?wis + SF 3120 Using UTF-8 as the default source encoding von L?wis SA 3121 Extension Module Initialization & Finalization von L?wis SR 3122 Delineation of the main module Cannon SF 3123 Making PyObject_HEAD conform to standard C von L?wis Modified: peps/trunk/pep-3120.txt ============================================================================== --- peps/trunk/pep-3120.txt (original) +++ peps/trunk/pep-3120.txt Sun Jul 29 20:21:40 2007 @@ -3,7 +3,7 @@ Version: $Revision$ Last-Modified: $Date$ Author: Martin von L?wis -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 15-Apr-2007 From python-checkins at python.org Mon Jul 30 02:45:29 2007 From: python-checkins at python.org (mark.hammond) Date: Mon, 30 Jul 2007 02:45:29 +0200 (CEST) Subject: [Python-checkins] r56624 - python/trunk/PC/pyconfig.h Message-ID: <20070730004529.AC9531E4006@bag.python.org> Author: mark.hammond Date: Mon Jul 30 02:45:29 2007 New Revision: 56624 Modified: python/trunk/PC/pyconfig.h Log: Correct use of Py_BUILD_CORE - now make sure it is defined before it is referenced, and also fix definition of _WIN32_WINNT. Resolves patch 1761803. Modified: python/trunk/PC/pyconfig.h ============================================================================== --- python/trunk/PC/pyconfig.h (original) +++ python/trunk/PC/pyconfig.h Mon Jul 30 02:45:29 2007 @@ -32,6 +32,11 @@ #define MS_WINCE #endif +/* Deprecated USE_DL_EXPORT macro - please use Py_BUILD_CORE */ +#ifdef USE_DL_EXPORT +# define Py_BUILD_CORE +#endif /* USE_DL_EXPORT */ + /* Visual Studio 2005 introduces deprecation warnings for "insecure" and POSIX functions. The insecure functions should be replaced by *_s versions (according to Microsoft); the @@ -168,8 +173,8 @@ #ifndef WINVER #define WINVER Py_WINVER #endif -#ifndef _WINNT_WIN32 -#define _WINNT_WIN32 Py_WINVER +#ifndef _WIN32_WINNT +#define _WIN32_WINNT Py_WINVER #endif #endif @@ -301,11 +306,6 @@ # define MS_COREDLL /* deprecated old symbol */ #endif /* !MS_NO_COREDLL && ... */ -/* Deprecated USE_DL_EXPORT macro - please use Py_BUILD_CORE */ -#ifdef USE_DL_EXPORT -# define Py_BUILD_CORE -#endif /* USE_DL_EXPORT */ - /* All windows compilers that use this header support __declspec */ #define HAVE_DECLSPEC_DLL From buildbot at python.org Mon Jul 30 03:13:20 2007 From: buildbot at python.org (buildbot at python.org) Date: Mon, 30 Jul 2007 01:13:20 +0000 Subject: [Python-checkins] buildbot warnings in ia64 Ubuntu trunk trunk Message-ID: <20070730011320.976FD1E4006@bag.python.org> The Buildbot has detected a new failure of ia64 Ubuntu trunk trunk. Full details are available at: http://www.python.org/dev/buildbot/all/ia64%2520Ubuntu%2520trunk%2520trunk/builds/780 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: mark.hammond Build had warnings: warnings failed slave lost sincerely, -The Buildbot From python-checkins at python.org Mon Jul 30 20:11:00 2007 From: python-checkins at python.org (guido.van.rossum) Date: Mon, 30 Jul 2007 20:11:00 +0200 (CEST) Subject: [Python-checkins] r56630 - peps/trunk/pep-3116.txt Message-ID: <20070730181100.E69D11E4011@bag.python.org> Author: guido.van.rossum Date: Mon Jul 30 20:11:00 2007 New Revision: 56630 Modified: peps/trunk/pep-3116.txt Log: Add support for newline='\r', for some Mac apps that still write those. Modified: peps/trunk/pep-3116.txt ============================================================================== --- peps/trunk/pep-3116.txt (original) +++ peps/trunk/pep-3116.txt Mon Jul 30 20:11:00 2007 @@ -346,13 +346,16 @@ an encoding to be used for translating between the byte-representation and character-representation. If it is ``None``, then the system's locale setting will be used as the - default. ``newline`` can be ``None``, ``'\n'``, or ``'\r\n'`` - (all other values are illegal); it indicates the translation - for ``'\n'`` characters written. If ``None``, a + default. ``newline`` can be ``None``, ``'\n'``, ``'\r'``, or + ``'\r\n'`` (all other values are illegal); it indicates the + translation for ``'\n'`` characters written. If ``None``, a system-specific default is chosen, i.e., ``'\r\n'`` on Windows and ``'\n'`` on Unix/Linux. Setting ``newline='\n'`` on input means that no CRLF translation is done; lines ending in - ``'\r\n'`` will be returned as ``'\r\n'``. + ``'\r\n'`` will be returned as ``'\r\n'``. (``'\r'`` support + is still needed for some OSX applications that produce files + using ``'\r'`` line endings; Excel (when exporting to text) + and Adobe Illustrator EPS files are the most common examples. Another implementation, ``StringIO``, creates a file-like ``TextIO`` implementation without an underlying Buffered I/O object. While @@ -419,7 +422,7 @@ assert isinstance(mode, str) assert buffering is None or isinstance(buffering, int) assert encoding is None or isinstance(encoding, str) - assert newline in (None, "\n", "\r\n") + assert newline in (None, "\n", "\r", "\r\n") modes = set(mode) if modes - set("arwb+t") or len(mode) > len(modes): raise ValueError("invalid mode: %r" % mode) From python-checkins at python.org Mon Jul 30 21:47:15 2007 From: python-checkins at python.org (georg.brandl) Date: Mon, 30 Jul 2007 21:47:15 +0200 (CEST) Subject: [Python-checkins] r56631 - doctools/trunk/sphinx/web/application.py Message-ID: <20070730194715.AC4A61E4006@bag.python.org> Author: georg.brandl Date: Mon Jul 30 21:47:15 2007 New Revision: 56631 Modified: doctools/trunk/sphinx/web/application.py Log: Make module cloud sizes relative to the smallest number that's present, not 0. Modified: doctools/trunk/sphinx/web/application.py ============================================================================== --- doctools/trunk/sphinx/web/application.py (original) +++ doctools/trunk/sphinx/web/application.py Mon Jul 30 21:47:15 2007 @@ -339,11 +339,13 @@ """ most_frequent = heapq.nlargest(30, self.freqmodules.iteritems(), lambda x: x[1]) - most_frequent = [{ - 'name': x[0], - 'size': 100 + math.log(x[1] or 1) * 20, - 'count': x[1] - } for x in sorted(most_frequent)] + if most_frequent: + base_count = most_frequent[0][1] + most_frequent = [{ + 'name': x[0], + 'size': 100 + math.log((x[1] - base_count) + 1) * 20, + 'count': x[1] + } for x in sorted(most_frequent)] showpf = None newpf = req.args.get('pf') From python-checkins at python.org Tue Jul 31 05:03:34 2007 From: python-checkins at python.org (facundo.batista) Date: Tue, 31 Jul 2007 05:03:34 +0200 (CEST) Subject: [Python-checkins] r56632 - python/trunk/Lib/test/test_asynchat.py Message-ID: <20070731030334.D43181E4006@bag.python.org> Author: facundo.batista Date: Tue Jul 31 05:03:34 2007 New Revision: 56632 Modified: python/trunk/Lib/test/test_asynchat.py Log: When running asynchat tests on OS X (darwin), the test client now overrides asyncore.dispatcher.handle_expt to do nothing, since select.poll gives a POLLHUP error at the completion of these tests. Added timeout & count arguments to several asyncore.loop calls to avoid the possibility of a test hanging up a build. [GSoC - Alan McIntyre] Modified: python/trunk/Lib/test/test_asynchat.py ============================================================================== --- python/trunk/Lib/test/test_asynchat.py (original) +++ python/trunk/Lib/test/test_asynchat.py Tue Jul 31 05:03:34 2007 @@ -3,6 +3,7 @@ import thread # If this fails, we can't test this module import asyncore, asynchat, socket, threading, time import unittest +import sys from test import test_support HOST = "127.0.0.1" @@ -58,7 +59,12 @@ def handle_connect(self): pass - ##print "Connected" + + if sys.platform == 'darwin': + # select.poll returns a select.POLLHUP at the end of the tests + # on darwin, so just ignore it + def handle_expt(self): + pass def collect_incoming_data(self, data): self.buffer += data @@ -87,7 +93,7 @@ c.push("world%s" % term) c.push("I'm not dead yet!%s" % term) c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) s.join() self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"]) @@ -120,7 +126,7 @@ data = "hello world, I'm not dead yet!\n" c.push(data) c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) s.join() self.assertEqual(c.contents, [data[:termlen]]) @@ -143,7 +149,7 @@ data = "hello world, I'm not dead yet!\n" c.push(data) c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) s.join() self.assertEqual(c.contents, []) @@ -157,7 +163,7 @@ data = "hello world\nI'm not dead yet!\n" p = asynchat.simple_producer(data+SERVER_QUIT, buffer_size=8) c.push_with_producer(p) - asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) s.join() self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"]) @@ -169,7 +175,7 @@ c = echo_client('\n') data = "hello world\nI'm not dead yet!\n" c.push_with_producer(data+SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) s.join() self.assertEqual(c.contents, ["hello world", "I'm not dead yet!"]) @@ -182,7 +188,7 @@ c = echo_client('\n') c.push("hello world\n\nI'm not dead yet!\n") c.push(SERVER_QUIT) - asyncore.loop(use_poll=self.usepoll) + asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) s.join() self.assertEqual(c.contents, ["hello world", "", "I'm not dead yet!"]) @@ -195,7 +201,7 @@ c.push("hello world\nI'm not dead yet!\n") c.push(SERVER_QUIT) c.close_when_done() - asyncore.loop(use_poll=self.usepoll)#, count=5, timeout=5) + asyncore.loop(use_poll=self.usepoll, count=300, timeout=.01) s.join() self.assertEqual(c.contents, []) From python-checkins at python.org Tue Jul 31 15:38:01 2007 From: python-checkins at python.org (nick.coghlan) Date: Tue, 31 Jul 2007 15:38:01 +0200 (CEST) Subject: [Python-checkins] r56633 - python/trunk/Lib/threading.py Message-ID: <20070731133801.B9FF01E4007@bag.python.org> Author: nick.coghlan Date: Tue Jul 31 15:38:01 2007 New Revision: 56633 Modified: python/trunk/Lib/threading.py Log: Eliminate RLock race condition reported in SF bug #1764059 Modified: python/trunk/Lib/threading.py ============================================================================== --- python/trunk/Lib/threading.py (original) +++ python/trunk/Lib/threading.py Tue Jul 31 15:38:01 2007 @@ -85,9 +85,10 @@ self.__count = 0 def __repr__(self): + owner = self.__owner return "<%s(%s, %d)>" % ( self.__class__.__name__, - self.__owner and self.__owner.getName(), + owner and owner.getName(), self.__count) def acquire(self, blocking=1): From python-checkins at python.org Tue Jul 31 21:19:58 2007 From: python-checkins at python.org (guido.van.rossum) Date: Tue, 31 Jul 2007 21:19:58 +0200 (CEST) Subject: [Python-checkins] r56634 - peps/trunk/pep-3141.txt Message-ID: <20070731191958.88C6B1E4016@bag.python.org> Author: guido.van.rossum Date: Tue Jul 31 21:19:57 2007 New Revision: 56634 Modified: peps/trunk/pep-3141.txt Log: New version by Jeffrey Yasskin. Modified: peps/trunk/pep-3141.txt ============================================================================== --- peps/trunk/pep-3141.txt (original) +++ peps/trunk/pep-3141.txt Tue Jul 31 21:19:57 2007 @@ -7,7 +7,7 @@ Type: Standards Track Content-Type: text/x-rst Created: 23-Apr-2007 -Post-History: 25-Apr-2007, 16-May-2007 +Post-History: 25-Apr-2007, 16-May-2007, xx-Aug-2007 Abstract @@ -15,7 +15,7 @@ This proposal defines a hierarchy of Abstract Base Classes (ABCs) (PEP 3119) to represent number-like classes. It proposes a hierarchy of -``Number :> Complex :> Real :> Rational :> Integer`` where ``A :> B`` +``Number :> Complex :> Real :> Rational :> Integral`` where ``A :> B`` means "A is a supertype of B", and a pair of ``Exact``/``Inexact`` classes to capture the difference between ``floats`` and ``ints``. These types are significantly inspired by Scheme's numeric @@ -28,41 +28,16 @@ the properties of those numbers, and if and when overloading based on types is added to the language, should be overloadable based on the types of the arguments. For example, slicing requires its arguments to -be ``Integers``, and the functions in the ``math`` module require +be ``Integrals``, and the functions in the ``math`` module require their arguments to be ``Real``. Specification ============= -This PEP specifies a set of Abstract Base Classes with default -implementations. If the reader prefers to think in terms of Roles (PEP -3133), the default implementations for (for example) the Real ABC -would be moved to a RealDefault class, with Real keeping just the -method declarations. - -Although this PEP uses terminology from PEP 3119, the hierarchy is -intended to be meaningful for any systematic method of defining sets -of classes, including Interfaces. I'm also using the extra notation -from PEP 3107 (Function Annotations) to specify some types. - - -Exact vs. Inexact Classes -------------------------- - -Floating point values may not exactly obey several of the properties -you would expect. For example, it is possible for ``(X + -X) + 3 == -3``, but ``X + (-X + 3) == 0``. On the range of values that most -functions deal with this isn't a problem, but it is something to be -aware of. - -Therefore, I define ``Exact`` and ``Inexact`` ABCs to mark whether -types have this problem. Every instance of ``Integer`` and -``Rational`` should be Exact, but ``Reals`` and ``Complexes`` may or -may not be. (Do we really only need one of these, and the other is -defined as ``not`` the first?):: - - class Exact(metaclass=MetaABC): pass - class Inexact(metaclass=MetaABC): pass +This PEP specifies a set of Abstract Base Classes, and suggests a +general strategy for implementing some of the methods. It uses +terminology from PEP 3119, but the hierarchy is intended to be +meaningful for any systematic method of defining sets of classes. Numeric Classes @@ -70,23 +45,21 @@ We begin with a Number class to make it easy for people to be fuzzy about what kind of number they expect. This class only helps with -overloading; it doesn't provide any operations. **Open question:** -Should it specify ``__add__``, ``__sub__``, ``__neg__``, ``__mul__``, -and ``__abs__`` like Haskell's ``Num`` class?:: +overloading; it doesn't provide any operations. class Number(metaclass=MetaABC): pass Some types (primarily ``float``) define "Not a Number" (NaN) values that return false for any comparison, including equality with -themselves, and are maintained through operations. Because this -doesn't work well with the Reals (which are otherwise totally ordered -by ``<``), Guido suggested we might put NaN in its own type. It is -conceivable that this can still be represented by C doubles but be -included in a different ABC at runtime. **Open issue:** Is this a good -idea?:: +themselves, and are maintained through operations. That is, ``nan + x +-> nan`` and ``nan == nan -> False`` Because this doesn't work well +with the Reals (which are otherwise totally ordered by ``<``), Guido +suggested we might put NaN in its own type. It is conceivable that +this can still be represented by C doubles but be included in a +different ABC at runtime. **Open issue:** Is this a good idea?:: - class NotANumber(Number): + class UndefinedNumber(Number): """Implement IEEE 754 semantics.""" def __lt__(self, other): return false def __eq__(self, other): return false @@ -94,162 +67,130 @@ def __add__(self, other): return self def __radd__(self, other): return self ... + # Should we demand a conversion to float? + +Most implementations of complex numbers will be hashable, but if you +need to rely on that, you'll have to check it explicitly: mutable +numbers are supported by this hierarchy. :: + + class Complex(Number): + """Complex defines the operations that work on the builtin complex type. -Complex numbers are immutable and hashable. Implementors should be -careful that they make equal numbers equal and hash them to the same -values. This may be subtle if there are two different extensions of -the real numbers:: - - class Complex(Hashable, Number): - """A ``Complex`` should define the operations that work on the - Python ``complex`` type. If it is given heterogenous - arguments, it may fall back on this class's definition of the - operations.addition, subtraction, negation, and - multiplication. These operators should never return a - TypeError as long as both arguments are instances of Complex - (or even just implement __complex__). + In short, those are: a conversion to complex, .real, .imag, +, + -, *, /, abs(), .conjugate, ==, and !=. + + If it is given heterogenous arguments, and doesn't have + special knowledge about them, it should fall back to the + builtin complex type as described below. """ @abstractmethod def __complex__(self): - """This operation gives the arithmetic operations a fallback. - """ - return complex(self.real, self.imag) + """Return a builtin complex instance.""" + + @abstractmethod @property def real(self): - return complex(self).real + """Retrieve the real component of this number, which should subclass Real.""" + raise NotImplementedError + @abstractmethod @property def imag(self): - return complex(self).imag - -I define the reversed operations here so that they serve as the final -fallback for operations involving instances of Complex. **Open -issue:** Should Complex's operations check for ``isinstance(other, -Complex)``? Duck typing seems to imply that we should just try -__complex__ and succeed if it works, but stronger typing might be -justified for the operators. TODO: analyze the combinations of normal -and reversed operations with real and virtual subclasses of Complex:: + """Retrieve the real component of this number, which should subclass Real.""" + raise NotImplementedError - def __radd__(self, other): - """Should this catch any type errors and return - NotImplemented instead?""" - return complex(other) + complex(self) - def __rsub__(self, other): - return complex(other) - complex(self) + @abstractmethod + def __add__(self, other): + raise NotImplementedError + @abstractmethod + def __sub__(self, other): + raise NotImplementedError + @abstractmethod def __neg__(self): - return -complex(self) - def __rmul__(self, other): - return complex(other) * complex(self) - def __rdiv__(self, other): - return complex(other) / complex(self) + raise NotImplementedError + @abstractmethod + def __mul__(self, other): + raise NotImplementedError + @abstractmethod + def __div__(self, other): + raise NotImplementedError + @abstractmethod def __abs__(self): - return abs(complex(self)) + """Returns the Real distance from 0.""" + raise NotImplementedError + @abstractmethod def conjugate(self): - return complex(self).conjugate() + """(x+y*i).conjugate() returns (x-y*i).""" + raise NotImplementedError - def __hash__(self): - """Two "equal" values of different complex types should - hash in the same way.""" - return hash(complex(self)) + @abstractmethod + def __eq__(self, other): + raise NotImplementedError + def __ne__(self, other): + return not (self == other) The ``Real`` ABC indicates that the value is on the real line, and supports the operations of the ``float`` builtin. Real numbers are -totally ordered. (NaNs were handled above.):: +totally ordered. (NaNs were handled above).:: + + class Real(Complex): + """To Complex, Real adds the operations that work on real numbers. + + In short, those are: a conversion to float, trunc(), divmod, + %, <, <=, >, and >=. - class Real(Complex, metaclass=TotallyOrderedABC): + Real also provides defaults for the derived operations. + """ @abstractmethod def __float__(self): """Any Real can be converted to a native float object.""" - raise NotImplementedError - def __complex__(self): - """Which gives us an easy way to define the conversion to - complex.""" - return complex(float(self)) - @property - def real(self): return self - @property - def imag(self): return 0 - - def __radd__(self, other): - if isinstance(other, Real): - return float(other) + float(self) - else: - return super(Real, self).__radd__(other) - def __rsub__(self, other): - if isinstance(other, Real): - return float(other) - float(self) - else: - return super(Real, self).__rsub__(other) - def __neg__(self): - return -float(self) - def __rmul__(self, other): - if isinstance(other, Real): - return float(other) * float(self) - else: - return super(Real, self).__rmul__(other) - def __rdiv__(self, other): - if isinstance(other, Real): - return float(other) / float(self) - else: - return super(Real, self).__rdiv__(other) - def __rdivmod__(self, other): - """Implementing divmod() for your type is sufficient to - get floordiv and mod too. - """ - if isinstance(other, Real): - return divmod(float(other), float(self)) - else: - return super(Real, self).__rdivmod__(other) - def __rfloordiv__(self, other): - return divmod(other, self)[0] - def __rmod__(self, other): - return divmod(other, self)[1] - + raise NotImplementedError + + @abstractmethod def __trunc__(self): - """Do we want properfraction, floor, ceiling, and round?""" - return trunc(float(self)) - - def __abs__(self): - return abs(float(self)) + """Returns an Integral of the same sign as self whose abs is <= self's abs.""" + raise NotImplementedError -There is no way to define only the reversed comparison operators, so -these operations take precedence over any defined in the other -type. :( :: + def __divmod__(self, other): + """The pair (self // other, self % other).""" + return (self // other, self % other) + @abstractmethod + def __floordiv__(self, other): + """The floor() of self/other.""" + raise NotImplementedError + @abstractmethod + def __mod__(self, other): + """.""" + raise NotImplementedError + @abstractmethod def __lt__(self, other): - """The comparison operators in Python seem to be more - strict about their input types than other functions. I'm - guessing here that we want types to be incompatible even - if they define a __float__ operation, unless they also - declare themselves to be Real numbers. - """ - if isinstance(other, Real): - return float(self) < float(other) - else: - return NotImplemented - - def __le__(self, other): + raise NotImplementedError + def __le__(self, other): + # Assume that if other is Real, it defines an ordering + # consistent with this class, or returns NotImplemented. if isinstance(other, Real): - return float(self) <= float(other) - else: - return NotImplemented + return not (other < self) + + # Concrete implementations of Complex abstract methods. + def __complex__(self): + return complex(float(self)) + @property + def real(self): + return self + @property + def imag(self): + return 0 - def __eq__(self, other): - if isinstance(other, Real): - return float(self) == float(other) - else: - return NotImplemented - There is no built-in rational type, but it's straightforward to write, -so we provide an ABC for it:: +so we provide an ABC for it. *Open issue*: Add Demo/classes/Rat.py to +the stdlib?:: class Rational(Real, Exact): - """rational.numerator and rational.denominator should be in - lowest terms. - """ + """.numerator and .denominator should be in lowest terms.""" @abstractmethod @property def numerator(self): @@ -259,49 +200,80 @@ def denominator(self): raise NotImplementedError + # Concrete implementation of Real's conversion to float. def __float__(self): return self.numerator / self.denominator - class Integer(Rational): - @abstractmethod - def __int__(self): - raise NotImplementedError - def __float__(self): - return float(int(self)) +And finally integers:: + + class Integral(Rational): + """Integral adds a conversion to int and the bit-string operations.""" + @abstractmethod + def __int__(self): + raise NotImplementedError + + @abstractmethod + def __lshift__(self, other): + raise NotImplementedError + @abstractmethod + def __rshift__(self, other): + raise NotImplementedError + @abstractmethod + def __and__(self, other): + raise NotImplementedError + @abstractmethod + def __xor__(self, other): + raise NotImplementedError + @abstractmethod + def __or__(self, other): + raise NotImplementedError + + # Concrete implementations of Rational and Real abstract methods. + def __float__(self): + return float(int(self)) @property - def numerator(self): return self + def numerator(self): + return self @property - def denominator(self): return 1 + def denominator(self): + return 1 - def __ror__(self, other): - return int(other) | int(self) - def __rxor__(self, other): - return int(other) ^ int(self) - def __rand__(self, other): - return int(other) & int(self) - def __rlshift__(self, other): - return int(other) << int(self) - def __rrshift__(self, other): - return int(other) >> int(self) - def __invert__(self): - return ~int(self) - def __radd__(self, other): - """All of the Real methods need to be overridden here too - in order to get a more exact type for their results. - """ - if isinstance(other, Integer): - return int(other) + int(self) - else: - return super(Integer, self).__radd__(other) - ... + + +Exact vs. Inexact Classes +------------------------- + +Floating point values may not exactly obey several of the properties +you would expect. For example, it is possible for ``(X + -X) + 3 == +3``, but ``X + (-X + 3) == 0``. On the range of values that most +functions deal with this isn't a problem, but it is something to be +aware of. + +Therefore, I define ``Exact`` and ``Inexact`` ABCs to mark whether +types have this problem. Every instance of ``Integral`` and +``Rational`` should be Exact, but ``Reals`` and ``Complexes`` may or +may not be. (Do we really only need one of these, and the other is +defined as ``not`` the first?):: + + class Exact(Number): pass + class Inexact(Number): pass + + +Notes for type implementors +--------------------------- + +Implementors should be careful to make equal numbers equal and +hash them to the same values. This may be subtle if there are two +different extensions of the real numbers. For example, a complex type +could reasonably implement hash() as follows:: def __hash__(self): - """Surprisingly, hash() needs to be overridden too, since - there are integers that float can't represent.""" - return hash(int(self)) + return hash(complex(self)) +but should be careful of any values that fall outside of the built in +complex's range or precision. Adding More Numeric ABCs ------------------------ @@ -313,7 +285,61 @@ class MyFoo(Complex): ... MyFoo.register(Real) -TODO(jyasskin): Check this. +Implementing the arithmetic operations +-------------------------------------- + +We want to implement the arithmetic operations so that mixed-mode +operations either call an implementation whose author knew about the +types of both arguments, or convert both to the nearest built in type +and do the operation there. For subtypes of Integral, this means that +__add__ and __radd__ should be defined as:: + + class MyIntegral(Integral): + def __add__(self, other): + if isinstance(other, MyIntegral): + return do_my_adding_stuff(self, other) + elif isinstance(other, OtherTypeIKnowAbout): + return do_my_other_adding_stuff(self, other) + else: + return NotImplemented + def __radd__(self, other): + if isinstance(other, MyIntegral): + return do_my_adding_stuff(other, self) + elif isinstance(other, OtherTypeIKnowAbout): + return do_my_other_adding_stuff(other, self) + elif isinstance(other, Integral): + return int(other) + int(self) + elif isinstance(other, Real): + return float(other) + float(self) + elif isinstance(other, Complex): + return complex(other) + complex(self) + else: + return NotImplemented + + +There are 5 different cases for a mixed-type operation on subclasses +of Complex. I'll refer to all of the above code that doesn't refer to +MyIntegral and OtherTypeIKnowAbout as "boilerplate". ``a`` will be an +instance of ``A``, which is a subtype of ``Complex`` (``a : A <: +Complex``), and ``b : B <: Complex``. I'll consider ``a + b``: + + 1. If A defines an __add__ which accepts b, all is well. + 2. If A falls back to the boilerplate code, and it were to return + a value from __add__, we'd miss the possibility that B defines + a more intelligent __radd__, so the boilerplate should return + NotImplemented from __add__. (Or A may not implement __add__ at + all.) + 3. Then B's __radd__ gets a chance. If it accepts a, all is well. + 4. If it falls back to the boilerplate, there are no more possible + methods to try, so this is where the default implementation + should live. + 5. If B <: A, Python tries B.__radd__ before A.__add__. This is + ok, because it was implemented with knowledge of A, so it can + handle those instances before delegating to Complex. + +If ``A<:Complex`` and ``B<:Real`` without sharing any other knowledge, +then the appropriate shared operation is the one involving the built +in complex, and both __radd__s land there, so ``a+b == b+a``. Rejected Alternatives @@ -324,12 +350,18 @@ MonoidUnderPlus, AdditiveGroup, Ring, and Field, and mentioned several other possible algebraic types before getting to the numbers. I had expected this to be useful to people using vectors and matrices, but -the NumPy community really wasn't interested. The numbers then had a -much more branching structure to include things like the Gaussian -Integers and Z/nZ, which could be Complex but wouldn't necessarily -support things like division. The community decided that this was too -much complication for Python, so the proposal has been scaled back to -resemble the Scheme numeric tower much more closely. +the NumPy community really wasn't interested, and we ran into the +issue that even if ``x`` is an instance of ``X <: MonoidUnderPlus`` +and ``y`` is an instance of ``Y <: MonoidUnderPlus``, ``x + y`` may +still not make sense. + +Then I gave the numbers a much more branching structure to include +things like the Gaussian Integers and Z/nZ, which could be Complex but +wouldn't necessarily support things like division. The community +decided that this was too much complication for Python, so I've now +scaled back the proposal to resemble the Scheme numeric tower much +more closely. + References ========== @@ -337,10 +369,7 @@ .. [#pep3119] Introducing Abstract Base Classes (http://www.python.org/dev/peps/pep-3119/) -.. [#pep3107] Function Annotations - (http://www.python.org/dev/peps/pep-3107/) - -.. [3] Possible Python 3K Class Tree?, wiki page created by Bill Janssen +.. [#classtree] Possible Python 3K Class Tree?, wiki page created by Bill Janssen (http://wiki.python.org/moin/AbstractBaseClasses) .. [#numericprelude] NumericPrelude: An experimental alternative hierarchy of numeric type classes @@ -355,9 +384,10 @@ Thanks to Neil Norwitz for encouraging me to write this PEP in the first place, to Travis Oliphant for pointing out that the numpy people -didn't really care about the algebraic concepts, and to Guido van -Rossum, Collin Winter, and lots of other people on the mailing list -for refining the concept. +didn't really care about the algebraic concepts, to Alan Isaac for +reminding me that Scheme had already done this, and to Guido van +Rossum and lots of other people on the mailing list for refining the +concept. Copyright ========= From python-checkins at python.org Tue Jul 31 21:54:22 2007 From: python-checkins at python.org (martin.v.loewis) Date: Tue, 31 Jul 2007 21:54:22 +0200 (CEST) Subject: [Python-checkins] r56635 - in python/branches/release25-maint: Misc/NEWS configure configure.in pyconfig.h.in Message-ID: <20070731195422.88A0F1E400C@bag.python.org> Author: martin.v.loewis Date: Tue Jul 31 21:54:20 2007 New Revision: 56635 Modified: python/branches/release25-maint/Misc/NEWS python/branches/release25-maint/configure python/branches/release25-maint/configure.in python/branches/release25-maint/pyconfig.h.in Log: Define _BSD_SOURCE, to get access to POSIX extensions on OpenBSD 4.1+. Modified: python/branches/release25-maint/Misc/NEWS ============================================================================== --- python/branches/release25-maint/Misc/NEWS (original) +++ python/branches/release25-maint/Misc/NEWS Tue Jul 31 21:54:20 2007 @@ -101,6 +101,8 @@ Build ----- +- Define _BSD_SOURCE, to get access to POSIX extensions on OpenBSD 4.1+. + - Patch #1673122: Use an explicit path to libtool when building a framework. This avoids picking up GNU libtool from a users PATH. Modified: python/branches/release25-maint/configure ============================================================================== --- python/branches/release25-maint/configure (original) +++ python/branches/release25-maint/configure Tue Jul 31 21:54:20 2007 @@ -1,5 +1,5 @@ #! /bin/sh -# From configure.in Revision: 52844 . +# From configure.in Revision: 54284 . # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.59 for python 2.5. # @@ -1388,6 +1388,14 @@ _ACEOF +# OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is +# also defined. This can be overridden by defining _BSD_SOURCE + +cat >>confdefs.h <<\_ACEOF +#define _BSD_SOURCE 1 +_ACEOF + + # The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables # u_int on Irix 5.3. Defining _BSD_TYPES brings it back. Modified: python/branches/release25-maint/configure.in ============================================================================== --- python/branches/release25-maint/configure.in (original) +++ python/branches/release25-maint/configure.in Tue Jul 31 21:54:20 2007 @@ -50,6 +50,10 @@ # them. AC_DEFINE(__BSD_VISIBLE, 1, [Define on FreeBSD to activate all library features]) +# OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is +# also defined. This can be overridden by defining _BSD_SOURCE +AC_DEFINE(_BSD_SOURCE, 1, [Define on OpenBSD to activate all library features]) + # The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables # u_int on Irix 5.3. Defining _BSD_TYPES brings it back. AC_DEFINE(_BSD_TYPES, 1, [Define on Irix to enable u_int]) Modified: python/branches/release25-maint/pyconfig.h.in ============================================================================== --- python/branches/release25-maint/pyconfig.h.in (original) +++ python/branches/release25-maint/pyconfig.h.in Tue Jul 31 21:54:20 2007 @@ -909,6 +909,9 @@ # undef _ALL_SOURCE #endif +/* Define on OpenBSD to activate all library features */ +#undef _BSD_SOURCE + /* Define on Irix to enable u_int */ #undef _BSD_TYPES From python-checkins at python.org Tue Jul 31 21:57:57 2007 From: python-checkins at python.org (martin.v.loewis) Date: Tue, 31 Jul 2007 21:57:57 +0200 (CEST) Subject: [Python-checkins] r56636 - in python/trunk: Misc/NEWS configure configure.in pyconfig.h.in Message-ID: <20070731195757.C16A01E400D@bag.python.org> Author: martin.v.loewis Date: Tue Jul 31 21:57:56 2007 New Revision: 56636 Modified: python/trunk/Misc/NEWS python/trunk/configure python/trunk/configure.in python/trunk/pyconfig.h.in Log: Define _BSD_SOURCE, to get access to POSIX extensions on OpenBSD 4.1+. Modified: python/trunk/Misc/NEWS ============================================================================== --- python/trunk/Misc/NEWS (original) +++ python/trunk/Misc/NEWS Tue Jul 31 21:57:56 2007 @@ -926,6 +926,8 @@ Build ----- +- Define _BSD_SOURCE, to get access to POSIX extensions on OpenBSD 4.1+. + - Stop supporting AtheOS and cause a build error in configure for the platform. - Bug #1655392: don't add -L/usr/lib/pythonX.Y/config to the LDFLAGS Modified: python/trunk/configure ============================================================================== --- python/trunk/configure (original) +++ python/trunk/configure Tue Jul 31 21:57:56 2007 @@ -1,5 +1,5 @@ #! /bin/sh -# From configure.in Revision: 54283 . +# From configure.in Revision: 55739 . # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.61 for python 2.6. # @@ -1838,6 +1838,14 @@ _ACEOF +# OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is +# also defined. This can be overridden by defining _BSD_SOURCE + +cat >>confdefs.h <<\_ACEOF +#define _BSD_SOURCE 1 +_ACEOF + + # The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables # u_int on Irix 5.3. Defining _BSD_TYPES brings it back. Modified: python/trunk/configure.in ============================================================================== --- python/trunk/configure.in (original) +++ python/trunk/configure.in Tue Jul 31 21:57:56 2007 @@ -50,6 +50,10 @@ # them. AC_DEFINE(__BSD_VISIBLE, 1, [Define on FreeBSD to activate all library features]) +# OpenBSD undoes our definition of __BSD_VISIBLE if _XOPEN_SOURCE is +# also defined. This can be overridden by defining _BSD_SOURCE +AC_DEFINE(_BSD_SOURCE, 1, [Define on OpenBSD to activate all library features]) + # The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables # u_int on Irix 5.3. Defining _BSD_TYPES brings it back. AC_DEFINE(_BSD_TYPES, 1, [Define on Irix to enable u_int]) Modified: python/trunk/pyconfig.h.in ============================================================================== --- python/trunk/pyconfig.h.in (original) +++ python/trunk/pyconfig.h.in Tue Jul 31 21:57:56 2007 @@ -103,6 +103,10 @@ /* Define if you have the 'resize_term' function. */ #undef HAVE_CURSES_RESIZE_TERM +/* Define to 1 if you have the declaration of `tzname', and to 0 if you don't. + */ +#undef HAVE_DECL_TZNAME + /* Define to 1 if you have the device macros. */ #undef HAVE_DEVICE_MACROS @@ -802,22 +806,22 @@ /* Define if i>>j for signed int i does not extend the sign bit when i < 0 */ #undef SIGNED_RIGHT_SHIFT_ZERO_FILLS -/* The size of a `double', as computed by sizeof. */ +/* The size of `double', as computed by sizeof. */ #undef SIZEOF_DOUBLE -/* The size of a `float', as computed by sizeof. */ +/* The size of `float', as computed by sizeof. */ #undef SIZEOF_FLOAT -/* The size of a `fpos_t', as computed by sizeof. */ +/* The size of `fpos_t', as computed by sizeof. */ #undef SIZEOF_FPOS_T -/* The size of a `int', as computed by sizeof. */ +/* The size of `int', as computed by sizeof. */ #undef SIZEOF_INT -/* The size of a `long', as computed by sizeof. */ +/* The size of `long', as computed by sizeof. */ #undef SIZEOF_LONG -/* The size of a `long long', as computed by sizeof. */ +/* The size of `long long', as computed by sizeof. */ #undef SIZEOF_LONG_LONG /* The number of bytes in an off_t. */ @@ -826,25 +830,25 @@ /* The number of bytes in a pthread_t. */ #undef SIZEOF_PTHREAD_T -/* The size of a `short', as computed by sizeof. */ +/* The size of `short', as computed by sizeof. */ #undef SIZEOF_SHORT -/* The size of a `size_t', as computed by sizeof. */ +/* The size of `size_t', as computed by sizeof. */ #undef SIZEOF_SIZE_T /* The number of bytes in a time_t. */ #undef SIZEOF_TIME_T -/* The size of a `uintptr_t', as computed by sizeof. */ +/* The size of `uintptr_t', as computed by sizeof. */ #undef SIZEOF_UINTPTR_T -/* The size of a `void *', as computed by sizeof. */ +/* The size of `void *', as computed by sizeof. */ #undef SIZEOF_VOID_P -/* The size of a `wchar_t', as computed by sizeof. */ +/* The size of `wchar_t', as computed by sizeof. */ #undef SIZEOF_WCHAR_T -/* The size of a `_Bool', as computed by sizeof. */ +/* The size of `_Bool', as computed by sizeof. */ #undef SIZEOF__BOOL /* Define to 1 if you have the ANSI C header files. */ @@ -924,6 +928,9 @@ # undef _ALL_SOURCE #endif +/* Define on OpenBSD to activate all library features */ +#undef _BSD_SOURCE + /* Define on Irix to enable u_int */ #undef _BSD_TYPES @@ -980,7 +987,7 @@ /* Define to `int' if does not define. */ #undef mode_t -/* Define to `long' if does not define. */ +/* Define to `long int' if does not define. */ #undef off_t /* Define to `int' if does not define. */ @@ -989,7 +996,7 @@ /* Define to empty if the keyword does not work. */ #undef signed -/* Define to `unsigned' if does not define. */ +/* Define to `unsigned int' if does not define. */ #undef size_t /* Define to `int' if does not define. */ From buildbot at python.org Tue Jul 31 22:05:38 2007 From: buildbot at python.org (buildbot at python.org) Date: Tue, 31 Jul 2007 20:05:38 +0000 Subject: [Python-checkins] buildbot failure in amd64 XP trunk Message-ID: <20070731200539.522261E400C@bag.python.org> The Buildbot has detected a new failure of amd64 XP trunk. Full details are available at: http://www.python.org/dev/buildbot/all/amd64%2520XP%2520trunk/builds/80 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: martin.v.loewis BUILD FAILED: failed failed slave lost sincerely, -The Buildbot From buildbot at python.org Tue Jul 31 22:53:22 2007 From: buildbot at python.org (buildbot at python.org) Date: Tue, 31 Jul 2007 20:53:22 +0000 Subject: [Python-checkins] buildbot warnings in ia64 Ubuntu trunk 2.5 Message-ID: <20070731205322.404951E4009@bag.python.org> The Buildbot has detected a new failure of ia64 Ubuntu trunk 2.5. Full details are available at: http://www.python.org/dev/buildbot/all/ia64%2520Ubuntu%2520trunk%25202.5/builds/336 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch branches/release25-maint] HEAD Blamelist: martin.v.loewis Build had warnings: warnings test Excerpt from the test logfile: 1 test failed: test_urllib2net make: *** [buildbottest] Error 1 sincerely, -The Buildbot From buildbot at python.org Tue Jul 31 23:02:18 2007 From: buildbot at python.org (buildbot at python.org) Date: Tue, 31 Jul 2007 21:02:18 +0000 Subject: [Python-checkins] buildbot warnings in x86 mvlgcc trunk Message-ID: <20070731210218.A4BE31E400E@bag.python.org> The Buildbot has detected a new failure of x86 mvlgcc trunk. Full details are available at: http://www.python.org/dev/buildbot/all/x86%2520mvlgcc%2520trunk/builds/683 Buildbot URL: http://www.python.org/dev/buildbot/all/ Build Reason: Build Source Stamp: [branch trunk] HEAD Blamelist: martin.v.loewis Build had warnings: warnings test Excerpt from the test logfile: Traceback (most recent call last): File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/threading.py", line 466, in __bootstrap self.run() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_socketserver.py", line 93, in run svr.serve_a_few() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/test/test_socketserver.py", line 35, in serve_a_few self.handle_request() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 224, in handle_request self.handle_error(request, client_address) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 222, in handle_request self.process_request(request, client_address) File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 429, in process_request self.collect_children() File "/home2/buildbot/slave/trunk.loewis-linux/build/Lib/SocketServer.py", line 425, in collect_children self.active_children.remove(pid) ValueError: list.remove(x): x not in list 1 test failed: test_socketserver make: *** [buildbottest] Error 1 sincerely, -The Buildbot From python-checkins at python.org Tue Jul 31 23:14:19 2007 From: python-checkins at python.org (guido.van.rossum) Date: Tue, 31 Jul 2007 23:14:19 +0200 (CEST) Subject: [Python-checkins] r56637 - peps/trunk/pep-3141.txt Message-ID: <20070731211419.063001E4014@bag.python.org> Author: guido.van.rossum Date: Tue Jul 31 23:14:18 2007 New Revision: 56637 Modified: peps/trunk/pep-3141.txt Log: Insert some blank lines between methods (and remove a few between chapters). Modified: peps/trunk/pep-3141.txt ============================================================================== --- peps/trunk/pep-3141.txt (original) +++ peps/trunk/pep-3141.txt Tue Jul 31 23:14:18 2007 @@ -83,6 +83,7 @@ special knowledge about them, it should fall back to the builtin complex type as described below. """ + @abstractmethod def __complex__(self): """Return a builtin complex instance.""" @@ -92,6 +93,7 @@ def real(self): """Retrieve the real component of this number, which should subclass Real.""" raise NotImplementedError + @abstractmethod @property def imag(self): @@ -101,15 +103,19 @@ @abstractmethod def __add__(self, other): raise NotImplementedError + @abstractmethod def __sub__(self, other): raise NotImplementedError + @abstractmethod def __neg__(self): raise NotImplementedError + @abstractmethod def __mul__(self, other): raise NotImplementedError + @abstractmethod def __div__(self, other): raise NotImplementedError @@ -127,6 +133,7 @@ @abstractmethod def __eq__(self, other): raise NotImplementedError + def __ne__(self, other): return not (self == other) @@ -143,6 +150,7 @@ Real also provides defaults for the derived operations. """ + @abstractmethod def __float__(self): """Any Real can be converted to a native float object.""" @@ -156,10 +164,12 @@ def __divmod__(self, other): """The pair (self // other, self % other).""" return (self // other, self % other) + @abstractmethod def __floordiv__(self, other): """The floor() of self/other.""" raise NotImplementedError + @abstractmethod def __mod__(self, other): """.""" @@ -168,6 +178,7 @@ @abstractmethod def __lt__(self, other): raise NotImplementedError + def __le__(self, other): # Assume that if other is Real, it defines an ordering # consistent with this class, or returns NotImplemented. @@ -175,11 +186,14 @@ return not (other < self) # Concrete implementations of Complex abstract methods. + def __complex__(self): return complex(float(self)) + @property def real(self): return self + @property def imag(self): return 0 @@ -191,10 +205,12 @@ class Rational(Real, Exact): """.numerator and .denominator should be in lowest terms.""" + @abstractmethod @property def numerator(self): raise NotImplementedError + @abstractmethod @property def denominator(self): @@ -209,6 +225,7 @@ class Integral(Rational): """Integral adds a conversion to int and the bit-string operations.""" + @abstractmethod def __int__(self): raise NotImplementedError @@ -216,32 +233,37 @@ @abstractmethod def __lshift__(self, other): raise NotImplementedError + @abstractmethod def __rshift__(self, other): raise NotImplementedError + @abstractmethod def __and__(self, other): raise NotImplementedError + @abstractmethod def __xor__(self, other): raise NotImplementedError + @abstractmethod def __or__(self, other): raise NotImplementedError # Concrete implementations of Rational and Real abstract methods. + def __float__(self): return float(int(self)) + @property def numerator(self): return self + @property def denominator(self): return 1 - - Exact vs. Inexact Classes ------------------------- @@ -295,6 +317,7 @@ __add__ and __radd__ should be defined as:: class MyIntegral(Integral): + def __add__(self, other): if isinstance(other, MyIntegral): return do_my_adding_stuff(self, other) @@ -302,6 +325,7 @@ return do_my_other_adding_stuff(self, other) else: return NotImplemented + def __radd__(self, other): if isinstance(other, MyIntegral): return do_my_adding_stuff(other, self)