[Python-checkins] cpython (merge default -> default): merge heads.

gregory.p.smith python-checkins at python.org
Sun Jun 5 08:06:12 CEST 2011


http://hg.python.org/cpython/rev/940537b372e3
changeset:   70641:940537b372e3
parent:      70640:20d9926f5db5
parent:      70639:247ec010e9c0
user:        Gregory P. Smith <greg at krypto.org>
date:        Sat Jun 04 23:05:19 2011 -0700
summary:
  merge heads.

files:
  .hgtags                                         |     1 +
  Doc/c-api/weakref.rst                           |     2 +-
  Doc/contents.rst                                |     2 +-
  Doc/distutils/apiref.rst                        |   585 ++--
  Doc/distutils/extending.rst                     |     4 +-
  Doc/distutils/index.rst                         |    11 +
  Doc/install/index.rst                           |   Bin 
  Doc/documenting/building.rst                    |     1 +
  Doc/documenting/style.rst                       |     4 +-
  Doc/glossary.rst                                |     2 +-
  Doc/howto/sockets.rst                           |     8 +-
  Doc/install/index.rst                           |   991 +---------
  Doc/install/install.rst                         |  1029 ++++++++++
  Doc/install/pysetup-config.rst                  |    44 +
  Doc/install/pysetup-servers.rst                 |    61 +
  Doc/install/pysetup.rst                         |   163 +
  Doc/library/abc.rst                             |     4 +-
  Doc/library/bz2.rst                             |    39 +-
  Doc/library/codecs.rst                          |     3 +-
  Doc/library/collections.abc.rst                 |     2 +-
  Doc/library/collections.rst                     |     2 +-
  Doc/library/crypt.rst                           |    32 +-
  Doc/library/depgraph-output.png                 |   Bin 
  Doc/library/distutils.rst                       |    20 +-
  Doc/library/functions.rst                       |    56 +-
  Doc/library/os.rst                              |    13 +
  Doc/library/packaging-misc.rst                  |    27 +
  Doc/library/packaging.command.rst               |   111 +
  Doc/library/packaging.compiler.rst              |   672 ++++++
  Doc/library/packaging.database.rst              |   324 +++
  Doc/library/packaging.depgraph.rst              |   199 +
  Doc/library/packaging.dist.rst                  |   102 +
  Doc/library/packaging.fancy_getopt.rst          |    75 +
  Doc/library/packaging.install.rst               |   112 +
  Doc/library/packaging.metadata.rst              |   122 +
  Doc/library/packaging.pypi.dist.rst             |   114 +
  Doc/library/packaging.pypi.rst                  |    53 +
  Doc/library/packaging.pypi.simple.rst           |   157 +
  Doc/library/packaging.pypi.xmlrpc.rst           |   143 +
  Doc/library/packaging.rst                       |    78 +
  Doc/library/packaging.tests.pypi_server.rst     |   105 +
  Doc/library/packaging.util.rst                  |   186 +
  Doc/library/packaging.version.rst               |   104 +
  Doc/library/pprint.rst                          |     6 +-
  Doc/library/python.rst                          |     1 +
  Doc/library/random.rst                          |     6 +
  Doc/library/re.rst                              |    72 +-
  Doc/library/signal.rst                          |     7 +-
  Doc/library/site.rst                            |     4 +
  Doc/library/socketserver.rst                    |    15 +-
  Doc/library/ssl.rst                             |    35 +-
  Doc/library/threading.rst                       |    19 +-
  Doc/packaging/builtdist.rst                     |   307 ++
  Doc/packaging/commandhooks.rst                  |    31 +
  Doc/packaging/commandref.rst                    |   349 +++
  Doc/packaging/configfile.rst                    |   125 +
  Doc/packaging/examples.rst                      |   334 +++
  Doc/packaging/extending.rst                     |    95 +
  Doc/packaging/index.rst                         |    45 +
  Doc/packaging/introduction.rst                  |   193 +
  Doc/packaging/packageindex.rst                  |   104 +
  Doc/packaging/setupcfg.rst                      |   648 ++++++
  Doc/packaging/setupscript.rst                   |   689 ++++++
  Doc/packaging/sourcedist.rst                    |   273 ++
  Doc/packaging/tutorial.rst                      |   112 +
  Doc/packaging/uploading.rst                     |    80 +
  Doc/tools/sphinxext/indexcontent.html           |     8 +-
  Doc/whatsnew/3.3.rst                            |    36 +
  Include/Python-ast.h                            |    42 +-
  Include/import.h                                |     2 +-
  Lib/_pyio.py                                    |    12 +-
  Lib/bz2.py                                      |    49 +-
  Lib/collections/__init__.py                     |     2 +
  Lib/ctypes/util.py                              |     4 +-
  Lib/decimal.py                                  |   119 +-
  Lib/distutils/tests/test_build_py.py            |    15 +-
  Lib/html/parser.py                              |     2 +-
  Lib/imaplib.py                                  |    10 +-
  Lib/logging/__init__.py                         |    11 +-
  Lib/packaging/command/bdist.py                  |     1 +
  Lib/packaging/command/check.py                  |     2 +-
  Lib/packaging/command/sdist.py                  |    36 +-
  Lib/packaging/compiler/__init__.py              |     5 +-
  Lib/packaging/compiler/ccompiler.py             |     2 +-
  Lib/packaging/database.py                       |    16 +-
  Lib/packaging/fancy_getopt.py                   |   119 +-
  Lib/packaging/install.py                        |   107 +-
  Lib/packaging/metadata.py                       |    14 +-
  Lib/packaging/pypi/simple.py                    |    14 +-
  Lib/packaging/pypi/xmlrpc.py                    |     8 +-
  Lib/packaging/resources.py                      |    25 -
  Lib/packaging/run.py                            |    85 +-
  Lib/packaging/tests/support.py                  |    37 +-
  Lib/packaging/tests/test_command_build_ext.py   |     6 +-
  Lib/packaging/tests/test_command_build_py.py    |     9 +-
  Lib/packaging/tests/test_command_check.py       |    22 +-
  Lib/packaging/tests/test_command_install_lib.py |     4 +
  Lib/packaging/tests/test_command_sdist.py       |     9 +-
  Lib/packaging/tests/test_command_test.py        |     3 +-
  Lib/packaging/tests/test_config.py              |     5 +
  Lib/packaging/tests/test_create.py              |     2 +-
  Lib/packaging/tests/test_database.py            |   163 +-
  Lib/packaging/tests/test_dist.py                |     2 +
  Lib/packaging/tests/test_install.py             |    25 +-
  Lib/packaging/tests/test_manifest.py            |    11 +-
  Lib/packaging/tests/test_pypi_dist.py           |     5 +-
  Lib/packaging/tests/test_pypi_simple.py         |    24 +-
  Lib/packaging/tests/test_resources.py           |   167 -
  Lib/packaging/tests/test_run.py                 |    25 +-
  Lib/packaging/tests/test_uninstall.py           |    31 +-
  Lib/packaging/tests/test_util.py                |    53 +-
  Lib/packaging/util.py                           |    21 +-
  Lib/pickle.py                                   |     2 -
  Lib/pkgutil.py                                  |     2 +-
  Lib/platform.py                                 |     2 +
  Lib/plistlib.py                                 |    59 +-
  Lib/pydoc.py                                    |   280 +--
  Lib/reprlib.py                                  |     2 +-
  Lib/socketserver.py                             |    21 +-
  Lib/ssl.py                                      |     2 +-
  Lib/subprocess.py                               |   191 +-
  Lib/sysconfig.py                                |    11 +-
  Lib/tarfile.py                                  |     2 -
  Lib/test/cjkencodings/hz-utf8.txt               |     2 +
  Lib/test/cjkencodings/hz.txt                    |     2 +
  Lib/test/cjkencodings/iso2022_jp-utf8.txt       |     7 +
  Lib/test/cjkencodings/iso2022_jp.txt            |     7 +
  Lib/test/cjkencodings/iso2022_kr-utf8.txt       |     7 +
  Lib/test/cjkencodings/iso2022_kr.txt            |     7 +
  Lib/test/decimaltestdata/extra.decTest          |    13 +
  Lib/test/lock_tests.py                          |     4 +-
  Lib/test/regrtest.py                            |    40 +-
  Lib/test/support.py                             |    67 +-
  Lib/test/test_abstract_numbers.py               |     2 +
  Lib/test/test_ast.py                            |     9 +-
  Lib/test/test_bool.py                           |    10 +
  Lib/test/test_builtin.py                        |     2 +
  Lib/test/test_bz2.py                            |   212 +-
  Lib/test/test_capi.py                           |     5 +-
  Lib/test/test_codecencodings_cn.py              |    29 +
  Lib/test/test_codecencodings_iso2022.py         |    46 +
  Lib/test/test_codecs.py                         |    13 +-
  Lib/test/test_collections.py                    |     2 +-
  Lib/test/test_compile.py                        |     9 +
  Lib/test/test_concurrent_futures.py             |     8 +-
  Lib/test/test_descr.py                          |     8 +-
  Lib/test/test_descrtut.py                       |     1 +
  Lib/test/test_faulthandler.py                   |    13 +-
  Lib/test/test_grammar.py                        |    91 +-
  Lib/test/test_io.py                             |    11 +-
  Lib/test/test_logging.py                        |    22 +-
  Lib/test/test_marshal.py                        |    17 +
  Lib/test/test_math.py                           |     6 +-
  Lib/test/test_multibytecodec.py                 |    35 +
  Lib/test/test_multibytecodec_support.py         |    34 +-
  Lib/test/test_parser.py                         |     8 +
  Lib/test/test_pep292.py                         |    33 +-
  Lib/test/test_plistlib.py                       |    26 +
  Lib/test/test_posix.py                          |    27 +
  Lib/test/test_signal.py                         |    39 +-
  Lib/test/test_socket.py                         |    23 +-
  Lib/test/test_ssl.py                            |    10 +
  Lib/test/test_string.py                         |    68 +-
  Lib/test/test_subprocess.py                     |    41 +-
  Lib/test/test_sys.py                            |     6 +-
  Lib/test/test_sysconfig.py                      |    53 +-
  Lib/test/test_threaded_import.py                |     2 +-
  Lib/test/test_threading.py                      |    36 +-
  Lib/threading.py                                |    21 +-
  Lib/tkinter/__init__.py                         |     2 -
  Lib/wsgiref.egg-info                            |     8 -
  Lib/xml/parsers/expat.py                        |     2 -
  Mac/Makefile.in                                 |    13 +
  Makefile.pre.in                                 |    10 +-
  Misc/ACKS                                       |    43 +-
  Misc/NEWS                                       |   149 +-
  Modules/_io/_iomodule.c                         |     3 +
  Modules/_io/_iomodule.h                         |     1 +
  Modules/_io/bufferedio.c                        |    53 +-
  Modules/_io/fileio.c                            |    39 +-
  Modules/_io/iobase.c                            |     8 +
  Modules/_io/textio.c                            |     9 +-
  Modules/_posixsubprocess.c                      |     4 +-
  Modules/_sqlite/connection.c                    |     2 +
  Modules/_ssl.c                                  |    67 +
  Modules/_threadmodule.c                         |     2 +-
  Modules/cjkcodecs/_codecs_hk.c                  |    91 +-
  Modules/cjkcodecs/_codecs_jp.c                  |    10 +-
  Modules/cjkcodecs/multibytecodec.c              |    22 +-
  Modules/faulthandler.c                          |    11 +-
  Modules/parsermodule.c                          |     5 +-
  Modules/posixmodule.c                           |    51 +-
  Modules/pyexpat.c                               |    21 -
  Modules/signalmodule.c                          |     9 +-
  Modules/socketmodule.c                          |     9 +-
  Modules/zipimport.c                             |     2 +-
  Objects/abstract.c                              |     3 +-
  Objects/exceptions.c                            |     2 +-
  Objects/moduleobject.c                          |    30 +-
  Objects/object.c                                |   198 +-
  Objects/setobject.c                             |    20 +-
  Objects/typeobject.c                            |   133 +-
  Objects/typeslots.inc                           |     2 +-
  Objects/typeslots.py                            |     2 +-
  Parser/Python.asdl                              |     7 +-
  Parser/myreadline.c                             |     1 +
  Parser/parsetok.c                               |     2 +-
  Python/Python-ast.c                             |   297 +-
  Python/ast.c                                    |    67 +-
  Python/compile.c                                |   200 +-
  Python/import.c                                 |    18 +-
  Python/marshal.c                                |    35 +-
  Python/pythonrun.c                              |     2 +-
  Python/symtable.c                               |    29 +-
  Python/thread_pthread.h                         |    12 +
  Tools/msi/msi.py                                |     2 +
  Tools/scripts/findnocoding.py                   |     4 +-
  Tools/scripts/pysource.py                       |     2 +-
  Tools/unittestgui/unittestgui.py                |     1 -
  configure                                       |   650 +++---
  configure.in                                    |    45 +-
  pyconfig.h.in                                   |     2 +-
  setup.py                                        |     4 +-
  223 files changed, 11060 insertions(+), 3747 deletions(-)


diff --git a/.hgtags b/.hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -75,6 +75,7 @@
 960efa327c5d9c18df995437b0ac550cb89c9f85 v3.1.2
 d18e9d71f369d8211f6ac87252c6d3211f9bd09f v3.1.3rc1
 a4f75773c0060cee38b0bb651a7aba6f56b0e996 v3.1.3
+32fcb9e94985cb19ce37ba9543f091c0dbe9d7dd v3.1.4rc1
 b37b7834757492d009b99cf0ca4d42d2153d7fac v3.2a1
 56d4373cecb73c8b45126ba7b045b3c7b3f94b0b v3.2a2
 da012d9a2c23d144e399d2e01a55b8a83ad94573 v3.2a3
diff --git a/Doc/c-api/weakref.rst b/Doc/c-api/weakref.rst
--- a/Doc/c-api/weakref.rst
+++ b/Doc/c-api/weakref.rst
@@ -55,7 +55,7 @@
    Return the referenced object from a weak reference, *ref*.  If the referent is
    no longer live, returns :const:`Py_None`.
 
-   .. warning::
+   .. note::
 
       This function returns a **borrowed reference** to the referenced object.
       This means that you should always call :c:func:`Py_INCREF` on the object
diff --git a/Doc/contents.rst b/Doc/contents.rst
--- a/Doc/contents.rst
+++ b/Doc/contents.rst
@@ -11,7 +11,7 @@
    library/index.rst
    extending/index.rst
    c-api/index.rst
-   distutils/index.rst
+   packaging/index.rst
    install/index.rst
    documenting/index.rst
    howto/index.rst
diff --git a/Doc/distutils/apiref.rst b/Doc/distutils/apiref.rst
--- a/Doc/distutils/apiref.rst
+++ b/Doc/distutils/apiref.rst
@@ -21,7 +21,7 @@
 .. function:: setup(arguments)
 
    The basic do-everything function that does most everything you could ever ask
-   for from a Distutils method. See XXXXX
+   for from a Distutils method.
 
    The setup function takes a large number of arguments. These are laid out in the
    following table.
@@ -147,11 +147,11 @@
 In addition, the :mod:`distutils.core` module exposed a number of  classes that
 live elsewhere.
 
-* :class:`Extension` from :mod:`distutils.extension`
-
-* :class:`Command` from :mod:`distutils.cmd`
-
-* :class:`Distribution` from :mod:`distutils.dist`
+* :class:`~distutils.extension.Extension` from :mod:`distutils.extension`
+
+* :class:`~distutils.cmd.Command` from :mod:`distutils.cmd`
+
+* :class:`~distutils.dist.Distribution` from :mod:`distutils.dist`
 
 A short description of each of these follows, but see the relevant module for
 the full reference.
@@ -1678,8 +1678,8 @@
 ===================================================================
 
 .. module:: distutils.cmd
-   :synopsis: This module provides the abstract base class Command. This class is subclassed
-              by the modules in the distutils.command  subpackage.
+   :synopsis: This module provides the abstract base class Command. This class
+              is subclassed by the modules in the distutils.command subpackage.
 
 
 This module supplies the abstract base class :class:`Command`.
@@ -1689,291 +1689,38 @@
 
    Abstract base class for defining command classes, the "worker bees" of the
    Distutils.  A useful analogy for command classes is to think of them as
-   subroutines with local variables called *options*.  The options are declared in
-   :meth:`initialize_options` and defined (given their final values) in
-   :meth:`finalize_options`, both of which must be defined by every command class.
-   The distinction between the two is necessary because option values might come
-   from the outside world (command line, config file, ...), and any options
-   dependent on other options must be computed after these outside influences have
-   been processed --- hence :meth:`finalize_options`.  The body of the subroutine,
-   where it does all its work based on the values of its options, is the
-   :meth:`run` method, which must also be implemented by every command class.
-
-   The class constructor takes a single argument *dist*, a  :class:`Distribution`
+   subroutines with local variables called *options*.  The options are declared
+   in :meth:`initialize_options` and defined (given their final values) in
+   :meth:`finalize_options`, both of which must be defined by every command
+   class.  The distinction between the two is necessary because option values
+   might come from the outside world (command line, config file, ...), and any
+   options dependent on other options must be computed after these outside
+   influences have been processed --- hence :meth:`finalize_options`.  The body
+   of the subroutine, where it does all its work based on the values of its
+   options, is the :meth:`run` method, which must also be implemented by every
+   command class.
+
+   The class constructor takes a single argument *dist*, a :class:`Distribution`
    instance.
 
 
-:mod:`distutils.command` --- Individual Distutils commands
-==========================================================
-
-.. module:: distutils.command
-   :synopsis: This subpackage contains one module for each standard Distutils command.
-
-
-.. % \subsubsection{Individual Distutils commands}
-.. % todo
-
-
-:mod:`distutils.command.bdist` --- Build a binary installer
-===========================================================
-
-.. module:: distutils.command.bdist
-   :synopsis: Build a binary installer for a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.bdist_packager` --- Abstract base class for packagers
-=============================================================================
-
-.. module:: distutils.command.bdist_packager
-   :synopsis: Abstract base class for packagers
-
-
-.. % todo
-
-
-:mod:`distutils.command.bdist_dumb` --- Build a "dumb" installer
-================================================================
-
-.. module:: distutils.command.bdist_dumb
-   :synopsis: Build a "dumb" installer - a simple archive of files
-
-
-.. % todo
-
-
-:mod:`distutils.command.bdist_msi` --- Build a Microsoft Installer binary package
-=================================================================================
-
-.. module:: distutils.command.bdist_msi
-   :synopsis: Build a binary distribution as a Windows MSI file
-
-.. class:: bdist_msi(Command)
-
-   Builds a `Windows Installer`_ (.msi) binary package.
-
-   .. _Windows Installer: http://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx
-
-   In most cases, the ``bdist_msi`` installer is a better choice than the
-   ``bdist_wininst`` installer, because it provides better support for
-   Win64 platforms, allows administrators to perform non-interactive
-   installations, and allows installation through group policies.
-
-
-:mod:`distutils.command.bdist_rpm` --- Build a binary distribution as a Redhat RPM and SRPM
-===========================================================================================
-
-.. module:: distutils.command.bdist_rpm
-   :synopsis: Build a binary distribution as a Redhat RPM and SRPM
-
-
-.. % todo
-
-
-:mod:`distutils.command.bdist_wininst` --- Build a Windows installer
-====================================================================
-
-.. module:: distutils.command.bdist_wininst
-   :synopsis: Build a Windows installer
-
-
-.. % todo
-
-
-:mod:`distutils.command.sdist` --- Build a source distribution
-==============================================================
-
-.. module:: distutils.command.sdist
-   :synopsis: Build a source distribution
-
-
-.. % todo
-
-
-:mod:`distutils.command.build` --- Build all files of a package
-===============================================================
-
-.. module:: distutils.command.build
-   :synopsis: Build all files of a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.build_clib` --- Build any C libraries in a package
-==========================================================================
-
-.. module:: distutils.command.build_clib
-   :synopsis: Build any C libraries in a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.build_ext` --- Build any extensions in a package
-========================================================================
-
-.. module:: distutils.command.build_ext
-   :synopsis: Build any extensions in a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.build_py` --- Build the .py/.pyc files of a package
-===========================================================================
-
-.. module:: distutils.command.build_py
-   :synopsis: Build the .py/.pyc files of a package
-
-
-.. class:: build_py(Command)
-
-.. class:: build_py_2to3(build_py)
-
-   Alternative implementation of build_py which also runs the
-   2to3 conversion library on each .py file that is going to be
-   installed. To use this in a setup.py file for a distribution
-   that is designed to run with both Python 2.x and 3.x, add::
-
-     try:
-        from distutils.command.build_py import build_py_2to3 as build_py
-     except ImportError:
-        from distutils.command.build_py import build_py
-
-   to your setup.py, and later::
-
-      cmdclass = {'build_py': build_py}
-
-   to the invocation of setup().
-
-
-:mod:`distutils.command.build_scripts` --- Build the scripts of a package
-=========================================================================
-
-.. module:: distutils.command.build_scripts
-   :synopsis: Build the scripts of a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.clean` --- Clean a package build area
-=============================================================
-
-.. module:: distutils.command.clean
-   :synopsis: Clean a package build area
-
-
-.. % todo
-
-
-:mod:`distutils.command.config` --- Perform package configuration
-=================================================================
-
-.. module:: distutils.command.config
-   :synopsis: Perform package configuration
-
-
-.. % todo
-
-
-:mod:`distutils.command.install` --- Install a package
-======================================================
-
-.. module:: distutils.command.install
-   :synopsis: Install a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.install_data` --- Install data files from a package
-===========================================================================
-
-.. module:: distutils.command.install_data
-   :synopsis: Install data files from a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.install_headers` --- Install C/C++ header files from a package
-======================================================================================
-
-.. module:: distutils.command.install_headers
-   :synopsis: Install C/C++ header files from a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.install_lib` --- Install library files from a package
-=============================================================================
-
-.. module:: distutils.command.install_lib
-   :synopsis: Install library files from a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.install_scripts` --- Install script files from a package
-================================================================================
-
-.. module:: distutils.command.install_scripts
-   :synopsis: Install script files from a package
-
-
-.. % todo
-
-
-:mod:`distutils.command.register` --- Register a module with the Python Package Index
-=====================================================================================
-
-.. module:: distutils.command.register
-   :synopsis: Register a module with the Python Package Index
-
-
-The ``register`` command registers the package with the Python Package  Index.
-This is described in more detail in :pep:`301`.
-
-.. % todo
-
-:mod:`distutils.command.check` --- Check the meta-data of a package
-===================================================================
-
-.. module:: distutils.command.check
-   :synopsis: Check the metadata of a package
-
-
-The ``check`` command performs some tests on the meta-data of a package.
-For example, it verifies that all required meta-data are provided as
-the arguments passed to the :func:`setup` function.
-
-.. % todo
-
-
 Creating a new Distutils command
 ================================
 
 This section outlines the steps to create a new Distutils command.
 
 A new command lives in a module in the :mod:`distutils.command` package. There
-is a sample template in that directory called  :file:`command_template`. Copy
+is a sample template in that directory called :file:`command_template`.  Copy
 this file to a new module with the same name as the new command you're
-implementing. This module should implement a class with the same name as the
-module (and the command). So, for instance, to create the command
+implementing.  This module should implement a class with the same name as the
+module (and the command).  So, for instance, to create the command
 ``peel_banana`` (so that users can run ``setup.py peel_banana``), you'd copy
-:file:`command_template`  to :file:`distutils/command/peel_banana.py`, then edit
+:file:`command_template` to :file:`distutils/command/peel_banana.py`, then edit
 it so that it's implementing the class :class:`peel_banana`, a subclass of
 :class:`distutils.cmd.Command`.
 
 Subclasses of :class:`Command` must define the following methods.
 
-
 .. method:: Command.initialize_options()
 
    Set default values for all the options that this command supports.  Note that
@@ -2001,16 +1748,274 @@
    :meth:`finalize_options`.  All terminal output and filesystem interaction should
    be done by :meth:`run`.
 
-*sub_commands* formalizes the notion of a "family" of commands, eg. ``install``
-as the parent with sub-commands ``install_lib``, ``install_headers``, etc.  The
-parent of a family of commands defines *sub_commands* as a class attribute; it's
-a list of 2-tuples ``(command_name, predicate)``, with *command_name* a string
-and *predicate* a function, a string or None. *predicate* is a method of
-the parent command that determines whether the corresponding command is
-applicable in the current situation.  (Eg. we ``install_headers`` is only
-applicable if we have any C header files to install.)  If *predicate* is None,
-that command is always applicable.
-
-*sub_commands* is usually defined at the \*end\* of a class, because predicates
-can be methods of the class, so they must already have been defined.  The
-canonical example is the :command:`install` command.
+
+.. attribute:: Command.sub_commands
+
+   *sub_commands* formalizes the notion of a "family" of commands,
+   e.g. ``install`` as the parent with sub-commands ``install_lib``,
+   ``install_headers``, etc.  The parent of a family of commands defines
+   *sub_commands* as a class attribute; it's a list of 2-tuples ``(command_name,
+   predicate)``, with *command_name* a string and *predicate* a function, a
+   string or ``None``.  *predicate* is a method of the parent command that
+   determines whether the corresponding command is applicable in the current
+   situation.  (E.g. ``install_headers`` is only applicable if we have any C
+   header files to install.)  If *predicate* is ``None``, that command is always
+   applicable.
+
+   *sub_commands* is usually defined at the *end* of a class, because
+   predicates can be methods of the class, so they must already have been
+   defined.  The canonical example is the :command:`install` command.
+
+
+:mod:`distutils.command` --- Individual Distutils commands
+==========================================================
+
+.. module:: distutils.command
+   :synopsis: This subpackage contains one module for each standard Distutils command.
+
+
+.. % \subsubsection{Individual Distutils commands}
+.. % todo
+
+
+:mod:`distutils.command.bdist` --- Build a binary installer
+===========================================================
+
+.. module:: distutils.command.bdist
+   :synopsis: Build a binary installer for a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_packager` --- Abstract base class for packagers
+=============================================================================
+
+.. module:: distutils.command.bdist_packager
+   :synopsis: Abstract base class for packagers
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_dumb` --- Build a "dumb" installer
+================================================================
+
+.. module:: distutils.command.bdist_dumb
+   :synopsis: Build a "dumb" installer - a simple archive of files
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_msi` --- Build a Microsoft Installer binary package
+=================================================================================
+
+.. module:: distutils.command.bdist_msi
+   :synopsis: Build a binary distribution as a Windows MSI file
+
+.. class:: bdist_msi
+
+   Builds a `Windows Installer`_ (.msi) binary package.
+
+   .. _Windows Installer: http://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx
+
+   In most cases, the ``bdist_msi`` installer is a better choice than the
+   ``bdist_wininst`` installer, because it provides better support for
+   Win64 platforms, allows administrators to perform non-interactive
+   installations, and allows installation through group policies.
+
+
+:mod:`distutils.command.bdist_rpm` --- Build a binary distribution as a Redhat RPM and SRPM
+===========================================================================================
+
+.. module:: distutils.command.bdist_rpm
+   :synopsis: Build a binary distribution as a Redhat RPM and SRPM
+
+
+.. % todo
+
+
+:mod:`distutils.command.bdist_wininst` --- Build a Windows installer
+====================================================================
+
+.. module:: distutils.command.bdist_wininst
+   :synopsis: Build a Windows installer
+
+
+.. % todo
+
+
+:mod:`distutils.command.sdist` --- Build a source distribution
+==============================================================
+
+.. module:: distutils.command.sdist
+   :synopsis: Build a source distribution
+
+
+.. % todo
+
+
+:mod:`distutils.command.build` --- Build all files of a package
+===============================================================
+
+.. module:: distutils.command.build
+   :synopsis: Build all files of a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.build_clib` --- Build any C libraries in a package
+==========================================================================
+
+.. module:: distutils.command.build_clib
+   :synopsis: Build any C libraries in a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.build_ext` --- Build any extensions in a package
+========================================================================
+
+.. module:: distutils.command.build_ext
+   :synopsis: Build any extensions in a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.build_py` --- Build the .py/.pyc files of a package
+===========================================================================
+
+.. module:: distutils.command.build_py
+   :synopsis: Build the .py/.pyc files of a package
+
+
+.. class:: build_py
+
+.. class:: build_py_2to3
+
+   Alternative implementation of build_py which also runs the
+   2to3 conversion library on each .py file that is going to be
+   installed. To use this in a setup.py file for a distribution
+   that is designed to run with both Python 2.x and 3.x, add::
+
+     try:
+        from distutils.command.build_py import build_py_2to3 as build_py
+     except ImportError:
+        from distutils.command.build_py import build_py
+
+   to your setup.py, and later::
+
+      cmdclass = {'build_py': build_py}
+
+   to the invocation of setup().
+
+
+:mod:`distutils.command.build_scripts` --- Build the scripts of a package
+=========================================================================
+
+.. module:: distutils.command.build_scripts
+   :synopsis: Build the scripts of a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.clean` --- Clean a package build area
+=============================================================
+
+.. module:: distutils.command.clean
+   :synopsis: Clean a package build area
+
+
+.. % todo
+
+
+:mod:`distutils.command.config` --- Perform package configuration
+=================================================================
+
+.. module:: distutils.command.config
+   :synopsis: Perform package configuration
+
+
+.. % todo
+
+
+:mod:`distutils.command.install` --- Install a package
+======================================================
+
+.. module:: distutils.command.install
+   :synopsis: Install a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_data` --- Install data files from a package
+===========================================================================
+
+.. module:: distutils.command.install_data
+   :synopsis: Install data files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_headers` --- Install C/C++ header files from a package
+======================================================================================
+
+.. module:: distutils.command.install_headers
+   :synopsis: Install C/C++ header files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_lib` --- Install library files from a package
+=============================================================================
+
+.. module:: distutils.command.install_lib
+   :synopsis: Install library files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.install_scripts` --- Install script files from a package
+================================================================================
+
+.. module:: distutils.command.install_scripts
+   :synopsis: Install script files from a package
+
+
+.. % todo
+
+
+:mod:`distutils.command.register` --- Register a module with the Python Package Index
+=====================================================================================
+
+.. module:: distutils.command.register
+   :synopsis: Register a module with the Python Package Index
+
+
+The ``register`` command registers the package with the Python Package  Index.
+This is described in more detail in :pep:`301`.
+
+.. % todo
+
+
+:mod:`distutils.command.check` --- Check the meta-data of a package
+===================================================================
+
+.. module:: distutils.command.check
+   :synopsis: Check the metadata of a package
+
+
+The ``check`` command performs some tests on the meta-data of a package.
+For example, it verifies that all required meta-data are provided as
+the arguments passed to the :func:`setup` function.
+
+.. % todo
diff --git a/Doc/distutils/extending.rst b/Doc/distutils/extending.rst
--- a/Doc/distutils/extending.rst
+++ b/Doc/distutils/extending.rst
@@ -15,8 +15,8 @@
 should be copied into packages in addition to :file:`.py` files as a
 convenience.
 
-Most distutils command implementations are subclasses of the :class:`Command`
-class from :mod:`distutils.cmd`.  New commands may directly inherit from
+Most distutils command implementations are subclasses of the
+:class:`distutils.cmd.Command` class.  New commands may directly inherit from
 :class:`Command`, while replacements often derive from :class:`Command`
 indirectly, directly subclassing the command they are replacing.  Commands are
 required to derive from :class:`Command`.
diff --git a/Doc/distutils/index.rst b/Doc/distutils/index.rst
--- a/Doc/distutils/index.rst
+++ b/Doc/distutils/index.rst
@@ -14,6 +14,10 @@
 make Python modules and extensions easily available to a wider audience with
 very little overhead for build/release/install mechanics.
 
+.. deprecated:: 3.3
+   :mod:`packaging` replaces Distutils.  See :ref:`packaging-index` and
+   :ref:`packaging-install-index`.
+
 .. toctree::
    :maxdepth: 2
    :numbered:
@@ -29,3 +33,10 @@
    extending.rst
    commandref.rst
    apiref.rst
+
+Another document describes how to install modules and extensions packaged
+following the above guidelines:
+
+.. toctree::
+
+   install.rst
diff --git a/Doc/install/index.rst b/Doc/distutils/install.rst
copy from Doc/install/index.rst
copy to Doc/distutils/install.rst
diff --git a/Doc/documenting/building.rst b/Doc/documenting/building.rst
--- a/Doc/documenting/building.rst
+++ b/Doc/documenting/building.rst
@@ -14,6 +14,7 @@
 Luckily, a Makefile has been prepared so that on Unix, provided you have
 installed Python and Subversion, you can just run ::
 
+   cd Doc
    make html
 
 to check out the necessary toolset in the :file:`tools/` subdirectory and build
diff --git a/Doc/documenting/style.rst b/Doc/documenting/style.rst
--- a/Doc/documenting/style.rst
+++ b/Doc/documenting/style.rst
@@ -136,7 +136,7 @@
 Economy of Expression
 ---------------------
 
-More documentation is not necessarily better documentation.  Error on the side
+More documentation is not necessarily better documentation.  Err on the side
 of being succinct.
 
 It is an unfortunate fact that making documentation longer can be an impediment
@@ -198,7 +198,7 @@
 The tone of the tutorial (and all the docs) needs to be respectful of the
 reader's intelligence.  Don't presume that the readers are stupid.  Lay out the
 relevant information, show motivating use cases, provide glossary links, and do
-our best to connect-the-dots, but don't talk down to them or waste their time.
+your best to connect-the-dots, but don't talk down to them or waste their time.
 
 The tutorial is meant for newcomers, many of whom will be using the tutorial to
 evaluate the language as a whole.  The experience needs to be positive and not
diff --git a/Doc/glossary.rst b/Doc/glossary.rst
--- a/Doc/glossary.rst
+++ b/Doc/glossary.rst
@@ -247,7 +247,7 @@
       processing, remembering the location execution state (including local
       variables and pending try-statements).  When the generator resumes, it
       picks-up where it left-off (in contrast to functions which start fresh on
-      every invocation.
+      every invocation).
 
       .. index:: single: generator expression
 
diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst
--- a/Doc/howto/sockets.rst
+++ b/Doc/howto/sockets.rst
@@ -23,8 +23,8 @@
 working. It doesn't cover the fine points (and there are a lot of them), but I
 hope it will give you enough background to begin using them decently.
 
-I'm only going to talk about INET sockets, but they account for at least 99% of
-the sockets in use. And I'll only talk about STREAM sockets - unless you really
+I'm only going to talk about INET (i.e. IPv4) sockets, but they account for at least 99% of
+the sockets in use. And I'll only talk about STREAM (i.e. TCP) sockets - unless you really
 know what you're doing (in which case this HOWTO isn't for you!), you'll get
 better behavior and performance from a STREAM socket than anything else. I will
 try to clear up the mystery of what a socket is, as well as some hints on how to
@@ -208,10 +208,10 @@
                totalsent = totalsent + sent
 
        def myreceive(self):
-           msg = ''
+           msg = b''
            while len(msg) < MSGLEN:
                chunk = self.sock.recv(MSGLEN-len(msg))
-               if chunk == '':
+               if chunk == b'':
                    raise RuntimeError("socket connection broken")
                msg = msg + chunk
            return msg
diff --git a/Doc/install/index.rst b/Doc/install/index.rst
--- a/Doc/install/index.rst
+++ b/Doc/install/index.rst
@@ -1,12 +1,10 @@
-.. highlightlang:: none
+.. _packaging-install-index:
 
-.. _install-index:
+******************************
+  Installing Python Projects
+******************************
 
-*****************************
-  Installing Python Modules
-*****************************
-
-:Author: Greg Ward
+:Author: The Fellowship of the Packaging
 :Release: |version|
 :Date: |today|
 
@@ -16,990 +14,43 @@
    about Python and aren't about to learn the language just in order to
    install and maintain it for their users, i.e. system administrators.
    Thus, I have to be sure to explain the basics at some point:
-   sys.path and PYTHONPATH at least.  Should probably give pointers to
+   sys.path and PYTHONPATH at least. Should probably give pointers to
    other docs on "import site", PYTHONSTARTUP, PYTHONHOME, etc.
 
    Finally, it might be useful to include all the material from my "Care
-   and Feeding of a Python Installation" talk in here somewhere.  Yow!
+   and Feeding of a Python Installation" talk in here somewhere. Yow!
 
 .. topic:: Abstract
 
-   This document describes the Python Distribution Utilities ("Distutils") from the
-   end-user's point-of-view, describing how to extend the capabilities of a
-   standard Python installation by building and installing third-party Python
-   modules and extensions.
+   This document describes Packaging from the end-user's point of view: it
+   explains how to extend the functionality of a standard Python installation by
+   building and installing third-party Python modules and applications.
 
 
-.. _inst-intro:
+This guide is split into a simple overview  followed by a longer presentation of
+the :program:`pysetup` script, the Python package management tool used to
+build, distribute, search for, install, remove and list Python distributions.
 
-Introduction
-============
+.. TODO integrate install and pysetup instead of duplicating
 
-Although Python's extensive standard library covers many programming needs,
-there often comes a time when you need to add some new functionality to your
-Python installation in the form of third-party modules.  This might be necessary
-to support your own programming, or to support an application that you want to
-use and that happens to be written in Python.
+.. toctree::
+   :maxdepth: 2
+   :numbered:
 
-In the past, there has been little support for adding third-party modules to an
-existing Python installation.  With the introduction of the Python Distribution
-Utilities (Distutils for short) in Python 2.0, this changed.
-
-This document is aimed primarily at the people who need to install third-party
-Python modules: end-users and system administrators who just need to get some
-Python application running, and existing Python programmers who want to add some
-new goodies to their toolbox.  You don't need to know Python to read this
-document; there will be some brief forays into using Python's interactive mode
-to explore your installation, but that's it.  If you're looking for information
-on how to distribute your own Python modules so that others may use them, see
-the :ref:`distutils-index` manual.
-
-
-.. _inst-trivial-install:
-
-Best case: trivial installation
--------------------------------
-
-In the best case, someone will have prepared a special version of the module
-distribution you want to install that is targeted specifically at your platform
-and is installed just like any other software on your platform.  For example,
-the module developer might make an executable installer available for Windows
-users, an RPM package for users of RPM-based Linux systems (Red Hat, SuSE,
-Mandrake, and many others), a Debian package for users of Debian-based Linux
-systems, and so forth.
-
-In that case, you would download the installer appropriate to your platform and
-do the obvious thing with it: run it if it's an executable installer, ``rpm
---install`` it if it's an RPM, etc.  You don't need to run Python or a setup
-script, you don't need to compile anything---you might not even need to read any
-instructions (although it's always a good idea to do so anyways).
-
-Of course, things will not always be that easy.  You might be interested in a
-module distribution that doesn't have an easy-to-use installer for your
-platform.  In that case, you'll have to start with the source distribution
-released by the module's author/maintainer.  Installing from a source
-distribution is not too hard, as long as the modules are packaged in the
-standard way.  The bulk of this document is about building and installing
-modules from standard source distributions.
-
-
-.. _inst-new-standard:
-
-The new standard: Distutils
----------------------------
-
-If you download a module source distribution, you can tell pretty quickly if it
-was packaged and distributed in the standard way, i.e. using the Distutils.
-First, the distribution's name and version number will be featured prominently
-in the name of the downloaded archive, e.g. :file:`foo-1.0.tar.gz` or
-:file:`widget-0.9.7.zip`.  Next, the archive will unpack into a similarly-named
-directory: :file:`foo-1.0` or :file:`widget-0.9.7`.  Additionally, the
-distribution will contain a setup script :file:`setup.py`, and a file named
-:file:`README.txt` or possibly just :file:`README`, which should explain that
-building and installing the module distribution is a simple matter of running ::
-
-   python setup.py install
-
-If all these things are true, then you already know how to build and install the
-modules you've just downloaded:  Run the command above. Unless you need to
-install things in a non-standard way or customize the build process, you don't
-really need this manual.  Or rather, the above command is everything you need to
-get out of this manual.
-
-
-.. _inst-standard-install:
-
-Standard Build and Install
-==========================
-
-As described in section :ref:`inst-new-standard`, building and installing a module
-distribution using the Distutils is usually one simple command::
-
-   python setup.py install
-
-On Unix, you'd run this command from a shell prompt; on Windows, you have to
-open a command prompt window ("DOS box") and do it there; on Mac OS X, you open
-a :command:`Terminal` window to get a shell prompt.
-
-
-.. _inst-platform-variations:
-
-Platform variations
--------------------
-
-You should always run the setup command from the distribution root directory,
-i.e. the top-level subdirectory that the module source distribution unpacks
-into.  For example, if you've just downloaded a module source distribution
-:file:`foo-1.0.tar.gz` onto a Unix system, the normal thing to do is::
-
-   gunzip -c foo-1.0.tar.gz | tar xf -    # unpacks into directory foo-1.0
-   cd foo-1.0
-   python setup.py install
-
-On Windows, you'd probably download :file:`foo-1.0.zip`.  If you downloaded the
-archive file to :file:`C:\\Temp`, then it would unpack into
-:file:`C:\\Temp\\foo-1.0`; you can use either a archive manipulator with a
-graphical user interface (such as WinZip) or a command-line tool (such as
-:program:`unzip` or :program:`pkunzip`) to unpack the archive.  Then, open a
-command prompt window ("DOS box"), and run::
-
-   cd c:\Temp\foo-1.0
-   python setup.py install
-
-
-.. _inst-splitting-up:
-
-Splitting the job up
---------------------
-
-Running ``setup.py install`` builds and installs all modules in one run.  If you
-prefer to work incrementally---especially useful if you want to customize the
-build process, or if things are going wrong---you can use the setup script to do
-one thing at a time.  This is particularly helpful when the build and install
-will be done by different users---for example, you might want to build a module
-distribution and hand it off to a system administrator for installation (or do
-it yourself, with super-user privileges).
-
-For example, you can build everything in one step, and then install everything
-in a second step, by invoking the setup script twice::
-
-   python setup.py build
-   python setup.py install
-
-If you do this, you will notice that running the :command:`install` command
-first runs the :command:`build` command, which---in this case---quickly notices
-that it has nothing to do, since everything in the :file:`build` directory is
-up-to-date.
-
-You may not need this ability to break things down often if all you do is
-install modules downloaded off the 'net, but it's very handy for more advanced
-tasks.  If you get into distributing your own Python modules and extensions,
-you'll run lots of individual Distutils commands on their own.
-
-
-.. _inst-how-build-works:
-
-How building works
-------------------
-
-As implied above, the :command:`build` command is responsible for putting the
-files to install into a *build directory*.  By default, this is :file:`build`
-under the distribution root; if you're excessively concerned with speed, or want
-to keep the source tree pristine, you can change the build directory with the
-:option:`--build-base` option. For example::
-
-   python setup.py build --build-base=/tmp/pybuild/foo-1.0
-
-(Or you could do this permanently with a directive in your system or personal
-Distutils configuration file; see section :ref:`inst-config-files`.)  Normally, this
-isn't necessary.
-
-The default layout for the build tree is as follows::
-
-   --- build/ --- lib/
-   or
-   --- build/ --- lib.<plat>/
-                  temp.<plat>/
-
-where ``<plat>`` expands to a brief description of the current OS/hardware
-platform and Python version.  The first form, with just a :file:`lib` directory,
-is used for "pure module distributions"---that is, module distributions that
-include only pure Python modules.  If a module distribution contains any
-extensions (modules written in C/C++), then the second form, with two ``<plat>``
-directories, is used.  In that case, the :file:`temp.{plat}` directory holds
-temporary files generated by the compile/link process that don't actually get
-installed.  In either case, the :file:`lib` (or :file:`lib.{plat}`) directory
-contains all Python modules (pure Python and extensions) that will be installed.
-
-In the future, more directories will be added to handle Python scripts,
-documentation, binary executables, and whatever else is needed to handle the job
-of installing Python modules and applications.
-
-
-.. _inst-how-install-works:
-
-How installation works
-----------------------
-
-After the :command:`build` command runs (whether you run it explicitly, or the
-:command:`install` command does it for you), the work of the :command:`install`
-command is relatively simple: all it has to do is copy everything under
-:file:`build/lib` (or :file:`build/lib.{plat}`) to your chosen installation
-directory.
-
-If you don't choose an installation directory---i.e., if you just run ``setup.py
-install``\ ---then the :command:`install` command installs to the standard
-location for third-party Python modules.  This location varies by platform and
-by how you built/installed Python itself.  On Unix (and Mac OS X, which is also
-Unix-based), it also depends on whether the module distribution being installed
-is pure Python or contains extensions ("non-pure"):
-
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-| Platform        | Standard installation location                      | Default value                                    | Notes |
-+=================+=====================================================+==================================================+=======+
-| Unix (pure)     | :file:`{prefix}/lib/python{X.Y}/site-packages`      | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1)  |
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-| Unix (non-pure) | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1)  |
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-| Windows         | :file:`{prefix}\\Lib\\site-packages`                | :file:`C:\\Python{XY}\\Lib\\site-packages`       | \(2)  |
-+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
-
-Notes:
-
-(1)
-   Most Linux distributions include Python as a standard part of the system, so
-   :file:`{prefix}` and :file:`{exec-prefix}` are usually both :file:`/usr` on
-   Linux.  If you build Python yourself on Linux (or any Unix-like system), the
-   default :file:`{prefix}` and :file:`{exec-prefix}` are :file:`/usr/local`.
-
-(2)
-   The default installation directory on Windows was :file:`C:\\Program
-   Files\\Python` under Python 1.6a1, 1.5.2, and earlier.
-
-:file:`{prefix}` and :file:`{exec-prefix}` stand for the directories that Python
-is installed to, and where it finds its libraries at run-time.  They are always
-the same under Windows, and very often the same under Unix and Mac OS X.  You
-can find out what your Python installation uses for :file:`{prefix}` and
-:file:`{exec-prefix}` by running Python in interactive mode and typing a few
-simple commands. Under Unix, just type ``python`` at the shell prompt.  Under
-Windows, choose :menuselection:`Start --> Programs --> Python X.Y -->
-Python (command line)`.   Once the interpreter is started, you type Python code
-at the prompt.  For example, on my Linux system, I type the three Python
-statements shown below, and get the output as shown, to find out my
-:file:`{prefix}` and :file:`{exec-prefix}`::
-
-   Python 2.4 (#26, Aug  7 2004, 17:19:02)
-   Type "help", "copyright", "credits" or "license" for more information.
-   >>> import sys
-   >>> sys.prefix
-   '/usr'
-   >>> sys.exec_prefix
-   '/usr'
-
-If you don't want to install modules to the standard location, or if you don't
-have permission to write there, then you need to read about alternate
-installations in section :ref:`inst-alt-install`.  If you want to customize your
-installation directories more heavily, see section :ref:`inst-custom-install` on
-custom installations.
-
-
-.. _inst-alt-install:
-
-Alternate Installation
-======================
-
-Often, it is necessary or desirable to install modules to a location other than
-the standard location for third-party Python modules.  For example, on a Unix
-system you might not have permission to write to the standard third-party module
-directory.  Or you might wish to try out a module before making it a standard
-part of your local Python installation.  This is especially true when upgrading
-a distribution already present: you want to make sure your existing base of
-scripts still works with the new version before actually upgrading.
-
-The Distutils :command:`install` command is designed to make installing module
-distributions to an alternate location simple and painless.  The basic idea is
-that you supply a base directory for the installation, and the
-:command:`install` command picks a set of directories (called an *installation
-scheme*) under this base directory in which to install files.  The details
-differ across platforms, so read whichever of the following sections applies to
-you.
-
-
-.. _inst-alt-install-prefix:
-
-Alternate installation: the home scheme
----------------------------------------
-
-The idea behind the "home scheme" is that you build and maintain a personal
-stash of Python modules.  This scheme's name is derived from the idea of a
-"home" directory on Unix, since it's not unusual for a Unix user to make their
-home directory have a layout similar to :file:`/usr/` or :file:`/usr/local/`.
-This scheme can be used by anyone, regardless of the operating system they
-are installing for.
-
-Installing a new module distribution is as simple as ::
-
-   python setup.py install --home=<dir>
-
-where you can supply any directory you like for the :option:`--home` option.  On
-Unix, lazy typists can just type a tilde (``~``); the :command:`install` command
-will expand this to your home directory::
-
-   python setup.py install --home=~
-
-The :option:`--home` option defines the installation base directory.  Files are
-installed to the following directories under the installation base as follows:
-
-+------------------------------+---------------------------+-----------------------------+
-| Type of file                 | Installation Directory    | Override option             |
-+==============================+===========================+=============================+
-| pure module distribution     | :file:`{home}/lib/python` | :option:`--install-purelib` |
-+------------------------------+---------------------------+-----------------------------+
-| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` |
-+------------------------------+---------------------------+-----------------------------+
-| scripts                      | :file:`{home}/bin`        | :option:`--install-scripts` |
-+------------------------------+---------------------------+-----------------------------+
-| data                         | :file:`{home}/share`      | :option:`--install-data`    |
-+------------------------------+---------------------------+-----------------------------+
-
-
-.. _inst-alt-install-home:
-
-Alternate installation: Unix (the prefix scheme)
-------------------------------------------------
-
-The "prefix scheme" is useful when you wish to use one Python installation to
-perform the build/install (i.e., to run the setup script), but install modules
-into the third-party module directory of a different Python installation (or
-something that looks like a different Python installation).  If this sounds a
-trifle unusual, it is---that's why the "home scheme" comes first.  However,
-there are at least two known cases where the prefix scheme will be useful.
-
-First, consider that many Linux distributions put Python in :file:`/usr`, rather
-than the more traditional :file:`/usr/local`.  This is entirely appropriate,
-since in those cases Python is part of "the system" rather than a local add-on.
-However, if you are installing Python modules from source, you probably want
-them to go in :file:`/usr/local/lib/python2.{X}` rather than
-:file:`/usr/lib/python2.{X}`.  This can be done with ::
-
-   /usr/bin/python setup.py install --prefix=/usr/local
-
-Another possibility is a network filesystem where the name used to write to a
-remote directory is different from the name used to read it: for example, the
-Python interpreter accessed as :file:`/usr/local/bin/python` might search for
-modules in :file:`/usr/local/lib/python2.{X}`, but those modules would have to
-be installed to, say, :file:`/mnt/{@server}/export/lib/python2.{X}`.  This could
-be done with ::
-
-   /usr/local/bin/python setup.py install --prefix=/mnt/@server/export
-
-In either case, the :option:`--prefix` option defines the installation base, and
-the :option:`--exec-prefix` option defines the platform-specific installation
-base, which is used for platform-specific files.  (Currently, this just means
-non-pure module distributions, but could be expanded to C libraries, binary
-executables, etc.)  If :option:`--exec-prefix` is not supplied, it defaults to
-:option:`--prefix`.  Files are installed as follows:
-
-+------------------------------+-----------------------------------------------------+-----------------------------+
-| Type of file                 | Installation Directory                              | Override option             |
-+==============================+=====================================================+=============================+
-| pure module distribution     | :file:`{prefix}/lib/python{X.Y}/site-packages`      | :option:`--install-purelib` |
-+------------------------------+-----------------------------------------------------+-----------------------------+
-| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` |
-+------------------------------+-----------------------------------------------------+-----------------------------+
-| scripts                      | :file:`{prefix}/bin`                                | :option:`--install-scripts` |
-+------------------------------+-----------------------------------------------------+-----------------------------+
-| data                         | :file:`{prefix}/share`                              | :option:`--install-data`    |
-+------------------------------+-----------------------------------------------------+-----------------------------+
-
-There is no requirement that :option:`--prefix` or :option:`--exec-prefix`
-actually point to an alternate Python installation; if the directories listed
-above do not already exist, they are created at installation time.
-
-Incidentally, the real reason the prefix scheme is important is simply that a
-standard Unix installation uses the prefix scheme, but with :option:`--prefix`
-and :option:`--exec-prefix` supplied by Python itself as ``sys.prefix`` and
-``sys.exec_prefix``.  Thus, you might think you'll never use the prefix scheme,
-but every time you run ``python setup.py install`` without any other options,
-you're using it.
-
-Note that installing extensions to an alternate Python installation has no
-effect on how those extensions are built: in particular, the Python header files
-(:file:`Python.h` and friends) installed with the Python interpreter used to run
-the setup script will be used in compiling extensions.  It is your
-responsibility to ensure that the interpreter used to run extensions installed
-in this way is compatible with the interpreter used to build them.  The best way
-to do this is to ensure that the two interpreters are the same version of Python
-(possibly different builds, or possibly copies of the same build).  (Of course,
-if your :option:`--prefix` and :option:`--exec-prefix` don't even point to an
-alternate Python installation, this is immaterial.)
-
-
-.. _inst-alt-install-windows:
-
-Alternate installation: Windows (the prefix scheme)
----------------------------------------------------
-
-Windows has no concept of a user's home directory, and since the standard Python
-installation under Windows is simpler than under Unix, the :option:`--prefix`
-option has traditionally been used to install additional packages in separate
-locations on Windows. ::
-
-   python setup.py install --prefix="\Temp\Python"
-
-to install modules to the :file:`\\Temp\\Python` directory on the current drive.
-
-The installation base is defined by the :option:`--prefix` option; the
-:option:`--exec-prefix` option is not supported under Windows. Files are
-installed as follows:
-
-+------------------------------+---------------------------+-----------------------------+
-| Type of file                 | Installation Directory    | Override option             |
-+==============================+===========================+=============================+
-| pure module distribution     | :file:`{prefix}`          | :option:`--install-purelib` |
-+------------------------------+---------------------------+-----------------------------+
-| non-pure module distribution | :file:`{prefix}`          | :option:`--install-platlib` |
-+------------------------------+---------------------------+-----------------------------+
-| scripts                      | :file:`{prefix}\\Scripts` | :option:`--install-scripts` |
-+------------------------------+---------------------------+-----------------------------+
-| data                         | :file:`{prefix}\\Data`    | :option:`--install-data`    |
-+------------------------------+---------------------------+-----------------------------+
-
-
-.. _inst-custom-install:
-
-Custom Installation
-===================
-
-Sometimes, the alternate installation schemes described in section
-:ref:`inst-alt-install` just don't do what you want.  You might want to tweak just
-one or two directories while keeping everything under the same base directory,
-or you might want to completely redefine the installation scheme.  In either
-case, you're creating a *custom installation scheme*.
-
-You probably noticed the column of "override options" in the tables describing
-the alternate installation schemes above.  Those options are how you define a
-custom installation scheme.  These override options can be relative, absolute,
-or explicitly defined in terms of one of the installation base directories.
-(There are two installation base directories, and they are normally the same---
-they only differ when you use the Unix "prefix scheme" and supply different
-:option:`--prefix` and :option:`--exec-prefix` options.)
-
-For example, say you're installing a module distribution to your home directory
-under Unix---but you want scripts to go in :file:`~/scripts` rather than
-:file:`~/bin`. As you might expect, you can override this directory with the
-:option:`--install-scripts` option; in this case, it makes most sense to supply
-a relative path, which will be interpreted relative to the installation base
-directory (your home directory, in this case)::
-
-   python setup.py install --home=~ --install-scripts=scripts
-
-Another Unix example: suppose your Python installation was built and installed
-with a prefix of :file:`/usr/local/python`, so under a standard  installation
-scripts will wind up in :file:`/usr/local/python/bin`.  If you want them in
-:file:`/usr/local/bin` instead, you would supply this absolute directory for the
-:option:`--install-scripts` option::
-
-   python setup.py install --install-scripts=/usr/local/bin
-
-(This performs an installation using the "prefix scheme," where the prefix is
-whatever your Python interpreter was installed with--- :file:`/usr/local/python`
-in this case.)
-
-If you maintain Python on Windows, you might want third-party modules to live in
-a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}`
-itself.  This is almost as easy as customizing the script installation directory
----you just have to remember that there are two types of modules to worry about,
-pure modules and non-pure modules (i.e., modules from a non-pure distribution).
-For example::
-
-   python setup.py install --install-purelib=Site --install-platlib=Site
-
-The specified installation directories are relative to :file:`{prefix}`.  Of
-course, you also have to ensure that these directories are in Python's module
-search path, such as by putting a :file:`.pth` file in :file:`{prefix}`.  See
-section :ref:`inst-search-path` to find out how to modify Python's search path.
-
-If you want to define an entire installation scheme, you just have to supply all
-of the installation directory options.  The recommended way to do this is to
-supply relative paths; for example, if you want to maintain all Python
-module-related files under :file:`python` in your home directory, and you want a
-separate directory for each platform that you use your home directory from, you
-might define the following installation scheme::
-
-   python setup.py install --home=~ \
-                           --install-purelib=python/lib \
-                           --install-platlib=python/lib.$PLAT \
-                           --install-scripts=python/scripts
-                           --install-data=python/data
-
-or, equivalently, ::
-
-   python setup.py install --home=~/python \
-                           --install-purelib=lib \
-                           --install-platlib='lib.$PLAT' \
-                           --install-scripts=scripts
-                           --install-data=data
-
-``$PLAT`` is not (necessarily) an environment variable---it will be expanded by
-the Distutils as it parses your command line options, just as it does when
-parsing your configuration file(s).
-
-Obviously, specifying the entire installation scheme every time you install a
-new module distribution would be very tedious.  Thus, you can put these options
-into your Distutils config file (see section :ref:`inst-config-files`)::
-
-   [install]
-   install-base=$HOME
-   install-purelib=python/lib
-   install-platlib=python/lib.$PLAT
-   install-scripts=python/scripts
-   install-data=python/data
-
-or, equivalently, ::
-
-   [install]
-   install-base=$HOME/python
-   install-purelib=lib
-   install-platlib=lib.$PLAT
-   install-scripts=scripts
-   install-data=data
-
-Note that these two are *not* equivalent if you supply a different installation
-base directory when you run the setup script.  For example, ::
-
-   python setup.py install --install-base=/tmp
-
-would install pure modules to :file:`{/tmp/python/lib}` in the first case, and
-to :file:`{/tmp/lib}` in the second case.  (For the second case, you probably
-want to supply an installation base of :file:`/tmp/python`.)
-
-You probably noticed the use of ``$HOME`` and ``$PLAT`` in the sample
-configuration file input.  These are Distutils configuration variables, which
-bear a strong resemblance to environment variables. In fact, you can use
-environment variables in config files on platforms that have such a notion but
-the Distutils additionally define a few extra variables that may not be in your
-environment, such as ``$PLAT``.  (And of course, on systems that don't have
-environment variables, such as Mac OS 9, the configuration variables supplied by
-the Distutils are the only ones you can use.) See section :ref:`inst-config-files`
-for details.
-
-.. XXX need some Windows examples---when would custom installation schemes be
-   needed on those platforms?
-
-
-.. XXX I'm not sure where this section should go.
-
-.. _inst-search-path:
-
-Modifying Python's Search Path
-------------------------------
-
-When the Python interpreter executes an :keyword:`import` statement, it searches
-for both Python code and extension modules along a search path.  A default value
-for the path is configured into the Python binary when the interpreter is built.
-You can determine the path by importing the :mod:`sys` module and printing the
-value of ``sys.path``.   ::
-
-   $ python
-   Python 2.2 (#11, Oct  3 2002, 13:31:27)
-   [GCC 2.96 20000731 (Red Hat Linux 7.3 2.96-112)] on linux2
-   Type "help", "copyright", "credits" or "license" for more information.
-   >>> import sys
-   >>> sys.path
-   ['', '/usr/local/lib/python2.3', '/usr/local/lib/python2.3/plat-linux2',
-    '/usr/local/lib/python2.3/lib-tk', '/usr/local/lib/python2.3/lib-dynload',
-    '/usr/local/lib/python2.3/site-packages']
-   >>>
-
-The null string in ``sys.path`` represents the current working directory.
-
-The expected convention for locally installed packages is to put them in the
-:file:`{...}/site-packages/` directory, but you may want to install Python
-modules into some arbitrary directory.  For example, your site may have a
-convention of keeping all software related to the web server under :file:`/www`.
-Add-on Python modules might then belong in :file:`/www/python`, and in order to
-import them, this directory must be added to ``sys.path``.  There are several
-different ways to add the directory.
-
-The most convenient way is to add a path configuration file to a directory
-that's already on Python's path, usually to the :file:`.../site-packages/`
-directory.  Path configuration files have an extension of :file:`.pth`, and each
-line must contain a single path that will be appended to ``sys.path``.  (Because
-the new paths are appended to ``sys.path``, modules in the added directories
-will not override standard modules.  This means you can't use this mechanism for
-installing fixed versions of standard modules.)
-
-Paths can be absolute or relative, in which case they're relative to the
-directory containing the :file:`.pth` file.  See the documentation of
-the :mod:`site` module for more information.
-
-A slightly less convenient way is to edit the :file:`site.py` file in Python's
-standard library, and modify ``sys.path``.  :file:`site.py` is automatically
-imported when the Python interpreter is executed, unless the :option:`-S` switch
-is supplied to suppress this behaviour.  So you could simply edit
-:file:`site.py` and add two lines to it::
-
-   import sys
-   sys.path.append('/www/python/')
-
-However, if you reinstall the same major version of Python (perhaps when
-upgrading from 2.2 to 2.2.2, for example) :file:`site.py` will be overwritten by
-the stock version.  You'd have to remember that it was modified and save a copy
-before doing the installation.
-
-There are two environment variables that can modify ``sys.path``.
-:envvar:`PYTHONHOME` sets an alternate value for the prefix of the Python
-installation.  For example, if :envvar:`PYTHONHOME` is set to ``/www/python``,
-the search path will be set to ``['', '/www/python/lib/pythonX.Y/',
-'/www/python/lib/pythonX.Y/plat-linux2', ...]``.
-
-The :envvar:`PYTHONPATH` variable can be set to a list of paths that will be
-added to the beginning of ``sys.path``.  For example, if :envvar:`PYTHONPATH` is
-set to ``/www/python:/opt/py``, the search path will begin with
-``['/www/python', '/opt/py']``.  (Note that directories must exist in order to
-be added to ``sys.path``; the :mod:`site` module removes paths that don't
-exist.)
-
-Finally, ``sys.path`` is just a regular Python list, so any Python application
-can modify it by adding or removing entries.
-
-
-.. _inst-config-files:
-
-Distutils Configuration Files
-=============================
-
-As mentioned above, you can use Distutils configuration files to record personal
-or site preferences for any Distutils options.  That is, any option to any
-command can be stored in one of two or three (depending on your platform)
-configuration files, which will be consulted before the command-line is parsed.
-This means that configuration files will override default values, and the
-command-line will in turn override configuration files.  Furthermore, if
-multiple configuration files apply, values from "earlier" files are overridden
-by "later" files.
-
-
-.. _inst-config-filenames:
-
-Location and names of config files
-----------------------------------
-
-The names and locations of the configuration files vary slightly across
-platforms.  On Unix and Mac OS X, the three configuration files (in the order
-they are processed) are:
-
-+--------------+----------------------------------------------------------+-------+
-| Type of file | Location and filename                                    | Notes |
-+==============+==========================================================+=======+
-| system       | :file:`{prefix}/lib/python{ver}/distutils/distutils.cfg` | \(1)  |
-+--------------+----------------------------------------------------------+-------+
-| personal     | :file:`$HOME/.pydistutils.cfg`                           | \(2)  |
-+--------------+----------------------------------------------------------+-------+
-| local        | :file:`setup.cfg`                                        | \(3)  |
-+--------------+----------------------------------------------------------+-------+
-
-And on Windows, the configuration files are:
-
-+--------------+-------------------------------------------------+-------+
-| Type of file | Location and filename                           | Notes |
-+==============+=================================================+=======+
-| system       | :file:`{prefix}\\Lib\\distutils\\distutils.cfg` | \(4)  |
-+--------------+-------------------------------------------------+-------+
-| personal     | :file:`%HOME%\\pydistutils.cfg`                 | \(5)  |
-+--------------+-------------------------------------------------+-------+
-| local        | :file:`setup.cfg`                               | \(3)  |
-+--------------+-------------------------------------------------+-------+
-
-On all platforms, the "personal" file can be temporarily disabled by
-passing the `--no-user-cfg` option.
-
-Notes:
-
-(1)
-   Strictly speaking, the system-wide configuration file lives in the directory
-   where the Distutils are installed; under Python 1.6 and later on Unix, this is
-   as shown. For Python 1.5.2, the Distutils will normally be installed to
-   :file:`{prefix}/lib/python1.5/site-packages/distutils`, so the system
-   configuration file should be put there under Python 1.5.2.
-
-(2)
-   On Unix, if the :envvar:`HOME` environment variable is not defined, the user's
-   home directory will be determined with the :func:`getpwuid` function from the
-   standard :mod:`pwd` module. This is done by the :func:`os.path.expanduser`
-   function used by Distutils.
-
-(3)
-   I.e., in the current directory (usually the location of the setup script).
-
-(4)
-   (See also note (1).)  Under Python 1.6 and later, Python's default "installation
-   prefix" is :file:`C:\\Python`, so the system configuration file is normally
-   :file:`C:\\Python\\Lib\\distutils\\distutils.cfg`. Under Python 1.5.2, the
-   default prefix was :file:`C:\\Program Files\\Python`, and the Distutils were not
-   part of the standard library---so the system configuration file would be
-   :file:`C:\\Program Files\\Python\\distutils\\distutils.cfg` in a standard Python
-   1.5.2 installation under Windows.
-
-(5)
-   On Windows, if the :envvar:`HOME` environment variable is not defined,
-   :envvar:`USERPROFILE` then :envvar:`HOMEDRIVE` and :envvar:`HOMEPATH` will
-   be tried. This is done by the :func:`os.path.expanduser` function used
-   by Distutils.
-
-
-.. _inst-config-syntax:
-
-Syntax of config files
-----------------------
-
-The Distutils configuration files all have the same syntax.  The config files
-are grouped into sections.  There is one section for each Distutils command,
-plus a ``global`` section for global options that affect every command.  Each
-section consists of one option per line, specified as ``option=value``.
-
-For example, the following is a complete config file that just forces all
-commands to run quietly by default::
-
-   [global]
-   verbose=0
-
-If this is installed as the system config file, it will affect all processing of
-any Python module distribution by any user on the current system.  If it is
-installed as your personal config file (on systems that support them), it will
-affect only module distributions processed by you.  And if it is used as the
-:file:`setup.cfg` for a particular module distribution, it affects only that
-distribution.
-
-You could override the default "build base" directory and make the
-:command:`build\*` commands always forcibly rebuild all files with the
-following::
-
-   [build]
-   build-base=blib
-   force=1
-
-which corresponds to the command-line arguments ::
-
-   python setup.py build --build-base=blib --force
-
-except that including the :command:`build` command on the command-line means
-that command will be run.  Including a particular command in config files has no
-such implication; it only means that if the command is run, the options in the
-config file will apply.  (Or if other commands that derive values from it are
-run, they will use the values in the config file.)
-
-You can find out the complete list of options for any command using the
-:option:`--help` option, e.g.::
-
-   python setup.py build --help
-
-and you can find out the complete list of global options by using
-:option:`--help` without a command::
-
-   python setup.py --help
-
-See also the "Reference" section of the "Distributing Python Modules" manual.
-
-
-.. _inst-building-ext:
-
-Building Extensions: Tips and Tricks
-====================================
-
-Whenever possible, the Distutils try to use the configuration information made
-available by the Python interpreter used to run the :file:`setup.py` script.
-For example, the same compiler and linker flags used to compile Python will also
-be used for compiling extensions.  Usually this will work well, but in
-complicated situations this might be inappropriate.  This section discusses how
-to override the usual Distutils behaviour.
-
-
-.. _inst-tweak-flags:
-
-Tweaking compiler/linker flags
-------------------------------
-
-Compiling a Python extension written in C or C++ will sometimes require
-specifying custom flags for the compiler and linker in order to use a particular
-library or produce a special kind of object code. This is especially true if the
-extension hasn't been tested on your platform, or if you're trying to
-cross-compile Python.
-
-In the most general case, the extension author might have foreseen that
-compiling the extensions would be complicated, and provided a :file:`Setup` file
-for you to edit.  This will likely only be done if the module distribution
-contains many separate extension modules, or if they often require elaborate
-sets of compiler flags in order to work.
-
-A :file:`Setup` file, if present, is parsed in order to get a list of extensions
-to build.  Each line in a :file:`Setup` describes a single module.  Lines have
-the following structure::
-
-   module ... [sourcefile ...] [cpparg ...] [library ...]
-
-
-Let's examine each of the fields in turn.
-
-* *module* is the name of the extension module to be built, and should be a
-  valid Python identifier.  You can't just change this in order to rename a module
-  (edits to the source code would also be needed), so this should be left alone.
-
-* *sourcefile* is anything that's likely to be a source code file, at least
-  judging by the filename.  Filenames ending in :file:`.c` are assumed to be
-  written in C, filenames ending in :file:`.C`, :file:`.cc`, and :file:`.c++` are
-  assumed to be C++, and filenames ending in :file:`.m` or :file:`.mm` are assumed
-  to be in Objective C.
-
-* *cpparg* is an argument for the C preprocessor,  and is anything starting with
-  :option:`-I`, :option:`-D`, :option:`-U` or :option:`-C`.
-
-* *library* is anything ending in :file:`.a` or beginning with :option:`-l` or
-  :option:`-L`.
-
-If a particular platform requires a special library on your platform, you can
-add it by editing the :file:`Setup` file and running ``python setup.py build``.
-For example, if the module defined by the line ::
-
-   foo foomodule.c
-
-must be linked with the math library :file:`libm.a` on your platform, simply add
-:option:`-lm` to the line::
-
-   foo foomodule.c -lm
-
-Arbitrary switches intended for the compiler or the linker can be supplied with
-the :option:`-Xcompiler` *arg* and :option:`-Xlinker` *arg* options::
-
-   foo foomodule.c -Xcompiler -o32 -Xlinker -shared -lm
-
-The next option after :option:`-Xcompiler` and :option:`-Xlinker` will be
-appended to the proper command line, so in the above example the compiler will
-be passed the :option:`-o32` option, and the linker will be passed
-:option:`-shared`.  If a compiler option requires an argument, you'll have to
-supply multiple :option:`-Xcompiler` options; for example, to pass ``-x c++``
-the :file:`Setup` file would have to contain ``-Xcompiler -x -Xcompiler c++``.
-
-Compiler flags can also be supplied through setting the :envvar:`CFLAGS`
-environment variable.  If set, the contents of :envvar:`CFLAGS` will be added to
-the compiler flags specified in the  :file:`Setup` file.
-
-
-.. _inst-non-ms-compilers:
-
-Using non-Microsoft compilers on Windows
-----------------------------------------
-
-.. sectionauthor:: Rene Liebscher <R.Liebscher at gmx.de>
-
-
-
-Borland/CodeGear C++
-^^^^^^^^^^^^^^^^^^^^
-
-This subsection describes the necessary steps to use Distutils with the Borland
-C++ compiler version 5.5.  First you have to know that Borland's object file
-format (OMF) is different from the format used by the Python version you can
-download from the Python or ActiveState Web site.  (Python is built with
-Microsoft Visual C++, which uses COFF as the object file format.) For this
-reason you have to convert Python's library :file:`python25.lib` into the
-Borland format.  You can do this as follows:
-
-.. Should we mention that users have to create cfg-files for the compiler?
-.. see also http://community.borland.com/article/0,1410,21205,00.html
-
-::
-
-   coff2omf python25.lib python25_bcpp.lib
-
-The :file:`coff2omf` program comes with the Borland compiler.  The file
-:file:`python25.lib` is in the :file:`Libs` directory of your Python
-installation.  If your extension uses other libraries (zlib, ...) you have to
-convert them too.
-
-The converted files have to reside in the same directories as the normal
-libraries.
-
-How does Distutils manage to use these libraries with their changed names?  If
-the extension needs a library (eg. :file:`foo`) Distutils checks first if it
-finds a library with suffix :file:`_bcpp` (eg. :file:`foo_bcpp.lib`) and then
-uses this library.  In the case it doesn't find such a special library it uses
-the default name (:file:`foo.lib`.) [#]_
-
-To let Distutils compile your extension with Borland C++ you now have to type::
-
-   python setup.py build --compiler=bcpp
-
-If you want to use the Borland C++ compiler as the default, you could specify
-this in your personal or system-wide configuration file for Distutils (see
-section :ref:`inst-config-files`.)
+   install
+   pysetup
+   pysetup-config
+   pysetup-servers
 
 
 .. seealso::
 
-   `C++Builder Compiler <http://www.codegear.com/downloads/free/cppbuilder>`_
-      Information about the free C++ compiler from Borland, including links to the
-      download pages.
+   :ref:`packaging-index`
+      The manual for developers of Python projects who want to package and
+      distribute them. This describes how to use :mod:`packaging` to make
+      projects easily found and added to an existing Python installation.
 
-   `Creating Python Extensions Using Borland's Free Compiler <http://www.cyberus.ca/~g_will/pyExtenDL.shtml>`_
-      Document describing how to use Borland's free command-line C++ compiler to build
-      Python.
-
-
-GNU C / Cygwin / MinGW
-^^^^^^^^^^^^^^^^^^^^^^
-
-This section describes the necessary steps to use Distutils with the GNU C/C++
-compilers in their Cygwin and MinGW distributions. [#]_ For a Python interpreter
-that was built with Cygwin, everything should work without any of these
-following steps.
-
-Not all extensions can be built with MinGW or Cygwin, but many can.  Extensions
-most likely to not work are those that use C++ or depend on Microsoft Visual C
-extensions.
-
-To let Distutils compile your extension with Cygwin you have to type::
-
-   python setup.py build --compiler=cygwin
-
-and for Cygwin in no-cygwin mode [#]_ or for MinGW type::
-
-   python setup.py build --compiler=mingw32
-
-If you want to use any of these options/compilers as default, you should
-consider writing it in your personal or system-wide configuration file for
-Distutils (see section :ref:`inst-config-files`.)
-
-Older Versions of Python and MinGW
-""""""""""""""""""""""""""""""""""
-The following instructions only apply if you're using a version of Python
-inferior to 2.4.1 with a MinGW inferior to 3.0.0 (with
-binutils-2.13.90-20030111-1).
-
-These compilers require some special libraries.  This task is more complex than
-for Borland's C++, because there is no program to convert the library.  First
-you have to create a list of symbols which the Python DLL exports. (You can find
-a good program for this task at
-http://www.emmestech.com/software/pexports-0.43/download_pexports.html).
-
-.. I don't understand what the next line means. --amk
-.. (inclusive the references on data structures.)
-
-::
-
-   pexports python25.dll >python25.def
-
-The location of an installed :file:`python25.dll` will depend on the
-installation options and the version and language of Windows.  In a "just for
-me" installation, it will appear in the root of the installation directory.  In
-a shared installation, it will be located in the system directory.
-
-Then you can create from these information an import library for gcc. ::
-
-   /cygwin/bin/dlltool --dllname python25.dll --def python25.def --output-lib libpython25.a
-
-The resulting library has to be placed in the same directory as
-:file:`python25.lib`. (Should be the :file:`libs` directory under your Python
-installation directory.)
-
-If your extension uses other libraries (zlib,...) you might  have to convert
-them too. The converted files have to reside in the same directories as the
-normal libraries do.
-
-
-.. seealso::
-
-   `Building Python modules on MS Windows platform with MinGW <http://www.zope.org/Members/als/tips/win32_mingw_modules>`_
-      Information about building the required libraries for the MinGW environment.
-
-
-.. rubric:: Footnotes
-
-.. [#] This also means you could replace all existing COFF-libraries with OMF-libraries
-   of the same name.
-
-.. [#] Check http://sources.redhat.com/cygwin/ and http://www.mingw.org/ for more
-   information
-
-.. [#] Then you have no POSIX emulation available, but you also don't need
-   :file:`cygwin1.dll`.
+   :mod:`packaging`
+      A library reference for developers of packaging tools wanting to use
+      standalone building blocks like :mod:`~packaging.version` or
+      :mod:`~packaging.metadata`, or extend Packaging itself.
diff --git a/Doc/install/install.rst b/Doc/install/install.rst
new file mode 100644
--- /dev/null
+++ b/Doc/install/install.rst
@@ -0,0 +1,1029 @@
+.. highlightlang:: none
+
+====================================
+Installing Python projects: overwiew
+====================================
+
+.. _packaging_packaging-intro:
+
+Introduction
+============
+
+Although Python's extensive standard library covers many programming needs,
+there often comes a time when you need to add new functionality to your Python
+installation in the form of third-party modules. This might be necessary to
+support your own programming, or to support an application that you want to use
+and that happens to be written in Python.
+
+In the past, there was little support for adding third-party modules to an
+existing Python installation.  With the introduction of the Python Distribution
+Utilities (Distutils for short) in Python 2.0, this changed.  However, not all
+problems were solved; end-users had to rely on ``easy_install`` or
+``pip`` to download third-party modules from PyPI, uninstall distributions or do
+other maintenance operations.  Packaging is a more complete replacement for
+Distutils, in the standard library, with a backport named Distutils2 available
+for older Python versions.
+
+This document is aimed primarily at people who need to install third-party
+Python modules: end-users and system administrators who just need to get some
+Python application running, and existing Python programmers who want to add
+new goodies to their toolbox. You don't need to know Python to read this
+document; there will be some brief forays into using Python's interactive mode
+to explore your installation, but that's it. If you're looking for information
+on how to distribute your own Python modules so that others may use them, see
+the :ref:`packaging-index` manual.
+
+
+.. _packaging-trivial-install:
+
+Best case: trivial installation
+-------------------------------
+
+In the best case, someone will have prepared a special version of the module
+distribution you want to install that is targeted specifically at your platform
+and can be installed just like any other software on your platform. For example,
+the module's developer might make an executable installer available for Windows
+users, an RPM package for users of RPM-based Linux systems (Red Hat, SuSE,
+Mandrake, and many others), a Debian package for users of Debian and derivative
+systems, and so forth.
+
+In that case, you would use the standard system tools to download and install
+the specific installer for your platform and its dependencies.
+
+Of course, things will not always be that easy. You might be interested in a
+module whose distribution doesn't have an easy-to-use installer for your
+platform. In that case, you'll have to start with the source distribution
+released by the module's author/maintainer. Installing from a source
+distribution is not too hard, as long as the modules are packaged in the
+standard way. The bulk of this document addresses the building and installing
+of modules from standard source distributions.
+
+
+.. _packaging-distutils:
+
+The Python standard: Distutils
+------------------------------
+
+If you download a source distribution of a module, it will be obvious whether
+it was packaged and distributed using Distutils.  First, the distribution's name
+and version number will be featured prominently in the name of the downloaded
+archive, e.g. :file:`foo-1.0.tar.gz` or :file:`widget-0.9.7.zip`.  Next, the
+archive will unpack into a similarly-named directory: :file:`foo-1.0` or
+:file:`widget-0.9.7`.  Additionally, the distribution may contain a
+:file:`setup.cfg` file and a file named :file:`README.txt` ---or possibly just
+:file:`README`--- explaining that building and installing the module
+distribution is a simple matter of issuing the following command at your shell's
+prompt::
+
+   python setup.py install
+
+Third-party projects have extended Distutils to work around its limitations or
+add functionality.  After some years of near-inactivity in Distutils, a new
+maintainer has started to standardize good ideas in PEPs and implement them in a
+new, improved version of Distutils, called Distutils2 or Packaging.
+
+
+.. _packaging-new-standard:
+
+The new standard: Packaging
+---------------------------
+
+The rules described in the first paragraph above apply to Packaging-based
+projects too: a source distribution will have a name like
+:file:`widget-0.9.7.zip`.  One of the main differences with Distutils is that
+distributions no longer have a :file:`setup.py` script; it used to cause a
+number of issues.  Now there is a unique script installed with Python itself::
+
+   pysetup install widget-0.9.7.zip
+
+Running this command is enough to build and install projects (Python modules or
+packages, scripts or whole applications), without even having to unpack the
+archive.  It is also compatible with Distutils-based distributions.
+
+Unless you have to perform non-standard installations or customize the build
+process, you can stop reading this manual ---the above command is everything you
+need to get out of it.
+
+With :program:`pysetup`, you won't even have to manually download a distribution
+before installing it; see :ref:`packaging-pysetup`.
+
+
+.. _packaging-standard-install:
+
+Standard build and install
+==========================
+
+As described in section :ref:`packaging-new-standard`, building and installing
+a module distribution using Packaging usually comes down to one simple
+command::
+
+   pysetup run install_dist
+
+How you actually run this command depends on the platform and the command line
+interface it provides:
+
+* **Unix**: Use a shell prompt.
+* **Windows**: Open a command prompt ("DOS console") or use :command:`Powershell`.
+* **OS X**: Open a :command:`Terminal`.
+
+
+.. _packaging-platform-variations:
+
+Platform variations
+-------------------
+
+The setup command is meant to be run from the root directory of the source
+distribution, i.e. the top-level subdirectory that the module source
+distribution unpacks into. For example, if you've just downloaded a module
+source distribution :file:`foo-1.0.tar.gz` onto a Unix system, the normal
+steps to follow are these::
+
+   gunzip -c foo-1.0.tar.gz | tar xf -    # unpacks into directory foo-1.0
+   cd foo-1.0
+   pysetup run install_dist
+
+On Windows, you'd probably download :file:`foo-1.0.zip`. If you downloaded the
+archive file to :file:`C:\\Temp`, then it would unpack into
+:file:`C:\\Temp\\foo-1.0`. To actually unpack the archive, you can use either
+an archive manipulator with a graphical user interface (such as WinZip or 7-Zip)
+or a command-line tool (such as :program:`unzip`, :program:`pkunzip` or, again,
+:program:`7z`). Then, open a command prompt window ("DOS box" or
+Powershell), and run::
+
+   cd c:\Temp\foo-1.0
+   pysetup run install_dist
+
+
+.. _packaging-splitting-up:
+
+Splitting the job up
+--------------------
+
+Running ``pysetup run install_dist`` builds and installs all modules in one go. If you
+prefer to work incrementally ---especially useful if you want to customize the
+build process, or if things are going wrong--- you can use the setup script to
+do one thing at a time. This is a valuable tool when different users will perform
+separately the build and install steps. For example, you might want to build a
+module distribution and hand it off to a system administrator for installation
+(or do it yourself, but with super-user or admin privileges).
+
+For example, to build everything in one step and then install everything
+in a second step, you aptly invoke two distinct Packaging commands::
+
+   pysetup run build
+   pysetup run install_dist
+
+If you do this, you will notice that invoking the :command:`install_dist` command
+first runs the :command:`build` command, which ---in this case--- quickly
+notices it can spare itself the work, since everything in the :file:`build`
+directory is up-to-date.
+
+You may often ignore this ability to divide the process in steps if all you do
+is installing modules downloaded from the Internet, but it's very handy for
+more advanced tasks. If you find yourself in the need for distributing your own
+Python modules and extensions, though, you'll most likely run many individual
+Packaging commands.
+
+
+.. _packaging-how-build-works:
+
+How building works
+------------------
+
+As implied above, the :command:`build` command is responsible for collecting
+and placing the files to be installed into a *build directory*. By default,
+this is :file:`build`, under the distribution root. If you're excessively
+concerned with speed, or want to keep the source tree pristine, you can specify
+a different build directory with the :option:`--build-base` option. For example::
+
+   pysetup run build --build-base /tmp/pybuild/foo-1.0
+
+(Or you could do this permanently with a directive in your system or personal
+Packaging configuration file; see section :ref:`packaging-config-files`.)
+In the usual case, however, all this is unnecessary.
+
+The build tree's default layout looks like so::
+
+   --- build/ --- lib/
+   or
+   --- build/ --- lib.<plat>/
+                  temp.<plat>/
+
+where ``<plat>`` expands to a brief description of the current OS/hardware
+platform and Python version. The first form, with just a :file:`lib` directory,
+is used for pure module distributions (module distributions that
+include only pure Python modules). If a module distribution contains any
+extensions (modules written in C/C++), then the second form, with two ``<plat>``
+directories, is used. In that case, the :file:`temp.{plat}` directory holds
+temporary files generated during the compile/link process which are not intended
+to be installed. In either case, the :file:`lib` (or :file:`lib.{plat}`) directory
+contains all Python modules (pure Python and extensions) to be installed.
+
+In the future, more directories will be added to handle Python scripts,
+documentation, binary executables, and whatever else is required to install
+Python modules and applications.
+
+
+.. _packaging-how-install-works:
+
+How installation works
+----------------------
+
+After the :command:`build` command is run (whether explicitly or by the
+:command:`install_dist` command on your behalf), the work of the :command:`install_dist`
+command is relatively simple: all it has to do is copy the contents of
+:file:`build/lib` (or :file:`build/lib.{plat}`) to the installation directory
+of your choice.
+
+If you don't choose an installation directory ---i.e., if you just run
+``pysetup run install_dist``\ --- then the :command:`install_dist` command
+installs to the standard location for third-party Python modules. This location
+varies by platform and depending on how you built/installed Python itself. On
+Unix (and Mac OS X, which is also Unix-based), it also depends on whether the
+module distribution being installed is pure Python or contains extensions
+("non-pure"):
+
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Platform        | Standard installation location                      | Default value                                    | Notes |
++=================+=====================================================+==================================================+=======+
+| Unix (pure)     | :file:`{prefix}/lib/python{X.Y}/site-packages`      | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1)  |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Unix (non-pure) | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1)  |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+| Windows         | :file:`{prefix}\\Lib\\site-packages`                | :file:`C:\\Python{XY}\\Lib\\site-packages`       | \(2)  |
++-----------------+-----------------------------------------------------+--------------------------------------------------+-------+
+
+Notes:
+
+(1)
+   Most Linux distributions include Python as a standard part of the system, so
+   :file:`{prefix}` and :file:`{exec-prefix}` are usually both :file:`/usr` on
+   Linux. If you build Python yourself on Linux (or any Unix-like system), the
+   default :file:`{prefix}` and :file:`{exec-prefix}` are :file:`/usr/local`.
+
+(2)
+   The default installation directory on Windows was :file:`C:\\Program
+   Files\\Python` under Python 1.6a1, 1.5.2, and earlier.
+
+:file:`{prefix}` and :file:`{exec-prefix}` stand for the directories that Python
+is installed to, and where it finds its libraries at run-time. They are always
+the same under Windows, and very often the same under Unix and Mac OS X. You
+can find out what your Python installation uses for :file:`{prefix}` and
+:file:`{exec-prefix}` by running Python in interactive mode and typing a few
+simple commands.
+
+.. TODO link to Doc/using instead of duplicating
+
+To start the interactive Python interpreter, you need to follow a slightly
+different recipe for each platform. Under Unix, just type :command:`python` at
+the shell prompt. Under Windows (assuming the Python executable is on your
+:envvar:`PATH`, which is the usual case), you can choose :menuselection:`Start --> Run`,
+type ``python`` and press ``enter``. Alternatively, you can simply execute
+:command:`python` at a command prompt ("DOS console" or Powershell).
+
+Once the interpreter is started, you type Python code at the prompt. For
+example, on my Linux system, I type the three Python statements shown below,
+and get the output as shown, to find out my :file:`{prefix}` and :file:`{exec-prefix}`::
+
+   Python 3.3 (r32:88445, Apr  2 2011, 10:43:54)
+   Type "help", "copyright", "credits" or "license" for more information.
+   >>> import sys
+   >>> sys.prefix
+   '/usr'
+   >>> sys.exec_prefix
+   '/usr'
+
+If you don't want to install modules to the standard location, or if you don't
+have permission to write there, then you need to read about alternate
+installations in section :ref:`packaging-alt-install`. If you want to customize your
+installation directories more heavily, see section :ref:`packaging-custom-install`.
+
+
+.. _packaging-alt-install:
+
+Alternate installation
+======================
+
+Often, it is necessary or desirable to install modules to a location other than
+the standard location for third-party Python modules. For example, on a Unix
+system you might not have permission to write to the standard third-party module
+directory. Or you might wish to try out a module before making it a standard
+part of your local Python installation. This is especially true when upgrading
+a distribution already present: you want to make sure your existing base of
+scripts still works with the new version before actually upgrading.
+
+The Packaging :command:`install_dist` command is designed to make installing module
+distributions to an alternate location simple and painless. The basic idea is
+that you supply a base directory for the installation, and the
+:command:`install_dist` command picks a set of directories (called an *installation
+scheme*) under this base directory in which to install files. The details
+differ across platforms, so read whichever of the following sections applies to
+you.
+
+
+.. _packaging-alt-install-prefix:
+
+Alternate installation: the home scheme
+---------------------------------------
+
+The idea behind the "home scheme" is that you build and maintain a personal
+stash of Python modules. This scheme's name is derived from the concept of a
+"home" directory on Unix, since it's not unusual for a Unix user to make their
+home directory have a layout similar to :file:`/usr/` or :file:`/usr/local/`.
+In spite of its name's origin, this scheme can be used by anyone, regardless
+of the operating system.
+
+Installing a new module distribution in this way is as simple as ::
+
+   pysetup run install_dist --home <dir>
+
+where you can supply any directory you like for the :option:`--home` option. On
+Unix, lazy typists can just type a tilde (``~``); the :command:`install_dist` command
+will expand this to your home directory::
+
+   pysetup run install_dist --home ~
+
+The :option:`--home` option defines the base directory for the installation.
+Under it, files are installed to the following directories:
+
++------------------------------+---------------------------+-----------------------------+
+| Type of file                 | Installation Directory    | Override option             |
++==============================+===========================+=============================+
+| pure module distribution     | :file:`{home}/lib/python` | :option:`--install-purelib` |
++------------------------------+---------------------------+-----------------------------+
+| non-pure module distribution | :file:`{home}/lib/python` | :option:`--install-platlib` |
++------------------------------+---------------------------+-----------------------------+
+| scripts                      | :file:`{home}/bin`        | :option:`--install-scripts` |
++------------------------------+---------------------------+-----------------------------+
+| data                         | :file:`{home}/share`      | :option:`--install-data`    |
++------------------------------+---------------------------+-----------------------------+
+
+
+.. _packaging-alt-install-home:
+
+Alternate installation: Unix (the prefix scheme)
+------------------------------------------------
+
+The "prefix scheme" is useful when you wish to use one Python installation to
+run the build command, but install modules into the third-party module directory
+of a different Python installation (or something that looks like a different
+Python installation). If this sounds a trifle unusual, it is ---that's why the
+"home scheme" comes first. However, there are at least two known cases where the
+prefix scheme will be useful.
+
+First, consider that many Linux distributions put Python in :file:`/usr`, rather
+than the more traditional :file:`/usr/local`. This is entirely appropriate,
+since in those cases Python is part of "the system" rather than a local add-on.
+However, if you are installing Python modules from source, you probably want
+them to go in :file:`/usr/local/lib/python2.{X}` rather than
+:file:`/usr/lib/python2.{X}`. This can be done with ::
+
+   pysetup run install_dist --prefix /usr/local
+
+Another possibility is a network filesystem where the name used to write to a
+remote directory is different from the name used to read it: for example, the
+Python interpreter accessed as :file:`/usr/local/bin/python` might search for
+modules in :file:`/usr/local/lib/python2.{X}`, but those modules would have to
+be installed to, say, :file:`/mnt/{@server}/export/lib/python2.{X}`. This could
+be done with ::
+
+   pysetup run install_dist --prefix=/mnt/@server/export
+
+In either case, the :option:`--prefix` option defines the installation base, and
+the :option:`--exec-prefix` option defines the platform-specific installation
+base, which is used for platform-specific files. (Currently, this just means
+non-pure module distributions, but could be expanded to C libraries, binary
+executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to
+:option:`--prefix`. Files are installed as follows:
+
++------------------------------+-----------------------------------------------------+-----------------------------+
+| Type of file                 | Installation Directory                              | Override option             |
++==============================+=====================================================+=============================+
+| pure module distribution     | :file:`{prefix}/lib/python{X.Y}/site-packages`      | :option:`--install-purelib` |
++------------------------------+-----------------------------------------------------+-----------------------------+
+| non-pure module distribution | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :option:`--install-platlib` |
++------------------------------+-----------------------------------------------------+-----------------------------+
+| scripts                      | :file:`{prefix}/bin`                                | :option:`--install-scripts` |
++------------------------------+-----------------------------------------------------+-----------------------------+
+| data                         | :file:`{prefix}/share`                              | :option:`--install-data`    |
++------------------------------+-----------------------------------------------------+-----------------------------+
+
+There is no requirement that :option:`--prefix` or :option:`--exec-prefix`
+actually point to an alternate Python installation; if the directories listed
+above do not already exist, they are created at installation time.
+
+Incidentally, the real reason the prefix scheme is important is simply that a
+standard Unix installation uses the prefix scheme, but with :option:`--prefix`
+and :option:`--exec-prefix` supplied by Python itself as ``sys.prefix`` and
+``sys.exec_prefix``. Thus, you might think you'll never use the prefix scheme,
+but every time you run ``pysetup run install_dist`` without any other
+options, you're using it.
+
+Note that installing extensions to an alternate Python installation doesn't have
+anything to do with how those extensions are built: in particular, extensions
+will be compiled using the Python header files (:file:`Python.h` and friends)
+installed with the Python interpreter used to run the build command. It is
+therefore your responsibility to ensure compatibility between the interpreter
+intended to run extensions installed in this way and the interpreter used to
+build these same extensions. To avoid problems, it is best to make sure that
+the two interpreters are the same version of Python (possibly different builds,
+or possibly copies of the same build). (Of course, if your :option:`--prefix`
+and :option:`--exec-prefix` don't even point to an alternate Python installation,
+this is immaterial.)
+
+
+.. _packaging-alt-install-windows:
+
+Alternate installation: Windows (the prefix scheme)
+---------------------------------------------------
+
+Windows has a different and vaguer notion of home directories than Unix, and
+since its standard Python installation is simpler, the :option:`--prefix` option
+has traditionally been used to install additional packages to arbitrary
+locations. ::
+
+   pysetup run install_dist --prefix "\Temp\Python"
+
+to install modules to the :file:`\\Temp\\Python` directory on the current drive.
+
+The installation base is defined by the :option:`--prefix` option; the
+:option:`--exec-prefix` option is unsupported under Windows. Files are
+installed as follows:
+
++------------------------------+---------------------------+-----------------------------+
+| Type of file                 | Installation Directory    | Override option             |
++==============================+===========================+=============================+
+| pure module distribution     | :file:`{prefix}`          | :option:`--install-purelib` |
++------------------------------+---------------------------+-----------------------------+
+| non-pure module distribution | :file:`{prefix}`          | :option:`--install-platlib` |
++------------------------------+---------------------------+-----------------------------+
+| scripts                      | :file:`{prefix}\\Scripts` | :option:`--install-scripts` |
++------------------------------+---------------------------+-----------------------------+
+| data                         | :file:`{prefix}\\Data`    | :option:`--install-data`    |
++------------------------------+---------------------------+-----------------------------+
+
+
+.. _packaging-custom-install:
+
+Custom installation
+===================
+
+Sometimes, the alternate installation schemes described in section
+:ref:`packaging-alt-install` just don't do what you want. You might want to tweak
+just one or two directories while keeping everything under the same base
+directory, or you might want to completely redefine the installation scheme.
+In either case, you're creating a *custom installation scheme*.
+
+You probably noticed the column of "override options" in the tables describing
+the alternate installation schemes above. Those options are how you define a
+custom installation scheme. These override options can be relative, absolute,
+or explicitly defined in terms of one of the installation base directories.
+(There are two installation base directories, and they are normally the same
+---they only differ when you use the Unix "prefix scheme" and supply different
+:option:`--prefix` and :option:`--exec-prefix` options.)
+
+For example, say you're installing a module distribution to your home directory
+under Unix, but you want scripts to go in :file:`~/scripts` rather than
+:file:`~/bin`. As you might expect, you can override this directory with the
+:option:`--install-scripts` option and, in this case, it makes most sense to supply
+a relative path, which will be interpreted relative to the installation base
+directory (in our example, your home directory)::
+
+   pysetup run install_dist --home ~ --install-scripts scripts
+
+Another Unix example: suppose your Python installation was built and installed
+with a prefix of :file:`/usr/local/python`. Thus, in a standard installation,
+scripts will wind up in :file:`/usr/local/python/bin`. If you want them in
+:file:`/usr/local/bin` instead, you would supply this absolute directory for
+the :option:`--install-scripts` option::
+
+   pysetup run install_dist --install-scripts /usr/local/bin
+
+This command performs an installation using the "prefix scheme", where the
+prefix is whatever your Python interpreter was installed with ---in this case,
+:file:`/usr/local/python`.
+
+If you maintain Python on Windows, you might want third-party modules to live in
+a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}`
+itself. This is almost as easy as customizing the script installation directory
+---you just have to remember that there are two types of modules to worry about,
+pure modules and non-pure modules (i.e., modules from a non-pure distribution).
+For example::
+
+   pysetup run install_dist --install-purelib Site --install-platlib Site
+
+.. XXX Nothing is installed right under prefix in windows, is it??
+
+The specified installation directories are relative to :file:`{prefix}`. Of
+course, you also have to ensure that these directories are in Python's module
+search path, such as by putting a :file:`.pth` file in :file:`{prefix}`. See
+section :ref:`packaging-search-path` to find out how to modify Python's search path.
+
+If you want to define an entire installation scheme, you just have to supply all
+of the installation directory options. Using relative paths is recommended here.
+For example, if you want to maintain all Python module-related files under
+:file:`python` in your home directory, and you want a separate directory for
+each platform that you use your home directory from, you might define the
+following installation scheme::
+
+   pysetup run install_dist --home ~ \
+       --install-purelib python/lib \
+       --install-platlib python/'lib.$PLAT' \
+       --install-scripts python/scripts \
+       --install-data python/data
+
+or, equivalently, ::
+
+   pysetup run install_dist --home ~/python \
+       --install-purelib lib \
+       --install-platlib 'lib.$PLAT' \
+       --install-scripts scripts \
+       --install-data data
+
+``$PLAT`` doesn't need to be defined as an environment variable ---it will also
+be expanded by Packaging as it parses your command line options, just as it
+does when parsing your configuration file(s). (More on that later.)
+
+Obviously, specifying the entire installation scheme every time you install a
+new module distribution would be very tedious. To spare you all that work, you
+can store it in a Packaging configuration file instead (see section
+:ref:`packaging-config-files`), like so::
+
+   [install_dist]
+   install-base = $HOME
+   install-purelib = python/lib
+   install-platlib = python/lib.$PLAT
+   install-scripts = python/scripts
+   install-data = python/data
+
+or, equivalently, ::
+
+   [install_dist]
+   install-base = $HOME/python
+   install-purelib = lib
+   install-platlib = lib.$PLAT
+   install-scripts = scripts
+   install-data = data
+
+Note that these two are *not* equivalent if you override their installation
+base directory when running the setup script. For example, ::
+
+   pysetup run install_dist --install-base /tmp
+
+would install pure modules to :file:`/tmp/python/lib` in the first case, and
+to :file:`/tmp/lib` in the second case. (For the second case, you'd probably
+want to supply an installation base of :file:`/tmp/python`.)
+
+You may have noticed the use of ``$HOME`` and ``$PLAT`` in the sample
+configuration file. These are Packaging configuration variables, which
+bear a strong resemblance to environment variables. In fact, you can use
+environment variables in configuration files on platforms that have such a notion, but
+Packaging additionally defines a few extra variables that may not be in your
+environment, such as ``$PLAT``. Of course, on systems that don't have
+environment variables, such as Mac OS 9, the configuration variables supplied by
+the Packaging are the only ones you can use. See section :ref:`packaging-config-files`
+for details.
+
+.. XXX which vars win out eventually in case of clash env or Packaging?
+
+.. XXX need some Windows examples---when would custom installation schemes be
+   needed on those platforms?
+
+
+.. XXX Move this section to Doc/using
+
+.. _packaging-search-path:
+
+Modifying Python's search path
+------------------------------
+
+When the Python interpreter executes an :keyword:`import` statement, it searches
+for both Python code and extension modules along a search path. A default value
+for this path is configured into the Python binary when the interpreter is built.
+You can obtain the search path by importing the :mod:`sys` module and printing
+the value of ``sys.path``. ::
+
+   $ python
+   Python 2.2 (#11, Oct  3 2002, 13:31:27)
+   [GCC 2.96 20000731 (Red Hat Linux 7.3 2.96-112)] on linux2
+   Type "help", "copyright", "credits" or "license" for more information.
+   >>> import sys
+   >>> sys.path
+   ['', '/usr/local/lib/python2.3', '/usr/local/lib/python2.3/plat-linux2',
+    '/usr/local/lib/python2.3/lib-tk', '/usr/local/lib/python2.3/lib-dynload',
+    '/usr/local/lib/python2.3/site-packages']
+   >>>
+
+The null string in ``sys.path`` represents the current working directory.
+
+The expected convention for locally installed packages is to put them in the
+:file:`{...}/site-packages/` directory, but you may want to choose a different
+location for some reason. For example, if your site kept by convention all web
+server-related software under :file:`/www`. Add-on Python modules might then
+belong in :file:`/www/python`, and in order to import them, this directory would
+have to be added to ``sys.path``. There are several ways to solve this problem.
+
+The most convenient way is to add a path configuration file to a directory
+that's already on Python's path, usually to the :file:`.../site-packages/`
+directory. Path configuration files have an extension of :file:`.pth`, and each
+line must contain a single path that will be appended to ``sys.path``. (Because
+the new paths are appended to ``sys.path``, modules in the added directories
+will not override standard modules. This means you can't use this mechanism for
+installing fixed versions of standard modules.)
+
+Paths can be absolute or relative, in which case they're relative to the
+directory containing the :file:`.pth` file. See the documentation of
+the :mod:`site` module for more information.
+
+A slightly less convenient way is to edit the :file:`site.py` file in Python's
+standard library, and modify ``sys.path``. :file:`site.py` is automatically
+imported when the Python interpreter is executed, unless the :option:`-S` switch
+is supplied to suppress this behaviour. So you could simply edit
+:file:`site.py` and add two lines to it::
+
+   import sys
+   sys.path.append('/www/python/')
+
+However, if you reinstall the same major version of Python (perhaps when
+upgrading from 3.3 to 3.3.1, for example) :file:`site.py` will be overwritten by
+the stock version. You'd have to remember that it was modified and save a copy
+before doing the installation.
+
+Alternatively, there are two environment variables that can modify ``sys.path``.
+:envvar:`PYTHONHOME` sets an alternate value for the prefix of the Python
+installation. For example, if :envvar:`PYTHONHOME` is set to ``/www/python``,
+the search path will be set to ``['', '/www/python/lib/pythonX.Y/',
+'/www/python/lib/pythonX.Y/plat-linux2', ...]``.
+
+The :envvar:`PYTHONPATH` variable can be set to a list of paths that will be
+added to the beginning of ``sys.path``. For example, if :envvar:`PYTHONPATH` is
+set to ``/www/python:/opt/py``, the search path will begin with
+``['/www/python', '/opt/py']``. (Note that directories must exist in order to
+be added to ``sys.path``; the :mod:`site` module removes non-existent paths.)
+
+Finally, ``sys.path`` is just a regular Python list, so any Python application
+can modify it by adding or removing entries.
+
+
+.. _packaging-config-files:
+
+Configuration files for Packaging
+=================================
+
+As mentioned above, you can use configuration files to store personal or site
+preferences for any option supported by any Packaging command. Depending on your
+platform, you can use one of two or three possible configuration files. These
+files will be read before parsing the command-line, so they take precedence over
+default values. In turn, the command-line will override configuration files.
+Lastly, if there are multiple configuration files, values from files read
+earlier will be overridden by values from files read later.
+
+.. XXX "one of two or three possible..." seems wrong info. Below always 3 files
+   are indicated in the tables.
+
+
+.. _packaging-config-filenames:
+
+Location and names of configuration files
+-----------------------------------------
+
+The name and location of the configuration files vary slightly across
+platforms. On Unix and Mac OS X, these are the three configuration files listed
+in the order they are processed:
+
++--------------+----------------------------------------------------------+-------+
+| Type of file | Location and filename                                    | Notes |
++==============+==========================================================+=======+
+| system       | :file:`{prefix}/lib/python{ver}/packaging/packaging.cfg` | \(1)  |
++--------------+----------------------------------------------------------+-------+
+| personal     | :file:`$HOME/.pydistutils.cfg`                           | \(2)  |
++--------------+----------------------------------------------------------+-------+
+| local        | :file:`setup.cfg`                                        | \(3)  |
++--------------+----------------------------------------------------------+-------+
+
+Similarly, the configuration files on Windows ---also listed in the order they
+are processed--- are these:
+
++--------------+-------------------------------------------------+-------+
+| Type of file | Location and filename                           | Notes |
++==============+=================================================+=======+
+| system       | :file:`{prefix}\\Lib\\packaging\\packaging.cfg` | \(4)  |
++--------------+-------------------------------------------------+-------+
+| personal     | :file:`%HOME%\\pydistutils.cfg`                 | \(5)  |
++--------------+-------------------------------------------------+-------+
+| local        | :file:`setup.cfg`                               | \(3)  |
++--------------+-------------------------------------------------+-------+
+
+On all platforms, the *personal* file can be temporarily disabled by
+means of the `--no-user-cfg` option.
+
+Notes:
+
+(1)
+   Strictly speaking, the system-wide configuration file lives in the directory
+   where Packaging is installed.
+
+(2)
+   On Unix, if the :envvar:`HOME` environment variable is not defined, the
+   user's home directory will be determined with the :func:`getpwuid` function
+   from the standard :mod:`pwd` module. Packaging uses the
+   :func:`os.path.expanduser` function to do this.
+
+(3)
+   I.e., in the current directory (usually the location of the setup script).
+
+(4)
+   (See also note (1).) Python's default installation prefix is
+   :file:`C:\\Python`, so the system configuration file is normally
+   :file:`C:\\Python\\Lib\\packaging\\packaging.cfg`.
+
+(5)
+   On Windows, if the :envvar:`HOME` environment variable is not defined,
+   :envvar:`USERPROFILE` then :envvar:`HOMEDRIVE` and :envvar:`HOMEPATH` will
+   be tried. Packaging uses the :func:`os.path.expanduser` function to do this.
+
+
+.. _packaging-config-syntax:
+
+Syntax of configuration files
+-----------------------------
+
+All Packaging configuration files share the same syntax. Options defined in
+them are grouped into sections, and each Packaging command gets its own section.
+Additionally, there's a ``global`` section for options that affect every command.
+Sections consist of one or more lines containing a single option specified as
+``option = value``.
+
+For example, here's a complete configuration file that forces all commands to
+run quietly by default::
+
+   [global]
+   verbose = 0
+
+If this was the system configuration file, it would affect all processing
+of any Python module distribution by any user on the current system. If it was
+installed as your personal configuration file (on systems that support them),
+it would affect only module distributions processed by you. Lastly, if it was
+used as the :file:`setup.cfg` for a particular module distribution, it would
+affect that distribution only.
+
+.. XXX "(on systems that support them)" seems wrong info
+
+If you wanted to, you could override the default "build base" directory and
+make the :command:`build\*` commands always forcibly rebuild all files with
+the following::
+
+   [build]
+   build-base = blib
+   force = 1
+
+which corresponds to the command-line arguments::
+
+   pysetup run build --build-base blib --force
+
+except that including the :command:`build` command on the command-line means
+that command will be run. Including a particular command in configuration files
+has no such implication; it only means that if the command is run, the options
+for it in the configuration file will apply. (This is also true if you run
+other commands that derive values from it.)
+
+You can find out the complete list of options for any command using the
+:option:`--help` option, e.g.::
+
+   pysetup run build --help
+
+and you can find out the complete list of global options by using
+:option:`--help` without a command::
+
+   pysetup run --help
+
+See also the "Reference" section of the "Distributing Python Modules" manual.
+
+.. XXX no links to the relevant section exist.
+
+
+.. _packaging-building-ext:
+
+Building extensions: tips and tricks
+====================================
+
+Whenever possible, Packaging tries to use the configuration information made
+available by the Python interpreter used to run `pysetup`.
+For example, the same compiler and linker flags used to compile Python will also
+be used for compiling extensions. Usually this will work well, but in
+complicated situations this might be inappropriate. This section discusses how
+to override the usual Packaging behaviour.
+
+
+.. _packaging-tweak-flags:
+
+Tweaking compiler/linker flags
+------------------------------
+
+Compiling a Python extension written in C or C++ will sometimes require
+specifying custom flags for the compiler and linker in order to use a particular
+library or produce a special kind of object code. This is especially true if the
+extension hasn't been tested on your platform, or if you're trying to
+cross-compile Python.
+
+.. TODO update to new setup.cfg
+
+In the most general case, the extension author might have foreseen that
+compiling the extensions would be complicated, and provided a :file:`Setup` file
+for you to edit. This will likely only be done if the module distribution
+contains many separate extension modules, or if they often require elaborate
+sets of compiler flags in order to work.
+
+A :file:`Setup` file, if present, is parsed in order to get a list of extensions
+to build. Each line in a :file:`Setup` describes a single module. Lines have
+the following structure::
+
+   module ... [sourcefile ...] [cpparg ...] [library ...]
+
+
+Let's examine each of the fields in turn.
+
+* *module* is the name of the extension module to be built, and should be a
+  valid Python identifier. You can't just change this in order to rename a module
+  (edits to the source code would also be needed), so this should be left alone.
+
+* *sourcefile* is anything that's likely to be a source code file, at least
+  judging by the filename. Filenames ending in :file:`.c` are assumed to be
+  written in C, filenames ending in :file:`.C`, :file:`.cc`, and :file:`.c++` are
+  assumed to be C++, and filenames ending in :file:`.m` or :file:`.mm` are assumed
+  to be in Objective C.
+
+* *cpparg* is an argument for the C preprocessor,  and is anything starting with
+  :option:`-I`, :option:`-D`, :option:`-U` or :option:`-C`.
+
+* *library* is anything ending in :file:`.a` or beginning with :option:`-l` or
+  :option:`-L`.
+
+If a particular platform requires a special library on your platform, you can
+add it by editing the :file:`Setup` file and running ``pysetup run build``.
+For example, if the module defined by the line ::
+
+   foo foomodule.c
+
+must be linked with the math library :file:`libm.a` on your platform, simply add
+:option:`-lm` to the line::
+
+   foo foomodule.c -lm
+
+Arbitrary switches intended for the compiler or the linker can be supplied with
+the :option:`-Xcompiler` *arg* and :option:`-Xlinker` *arg* options::
+
+   foo foomodule.c -Xcompiler -o32 -Xlinker -shared -lm
+
+The next option after :option:`-Xcompiler` and :option:`-Xlinker` will be
+appended to the proper command line, so in the above example the compiler will
+be passed the :option:`-o32` option, and the linker will be passed
+:option:`-shared`. If a compiler option requires an argument, you'll have to
+supply multiple :option:`-Xcompiler` options; for example, to pass ``-x c++``
+the :file:`Setup` file would have to contain ``-Xcompiler -x -Xcompiler c++``.
+
+Compiler flags can also be supplied through setting the :envvar:`CFLAGS`
+environment variable. If set, the contents of :envvar:`CFLAGS` will be added to
+the compiler flags specified in the  :file:`Setup` file.
+
+
+.. _packaging-non-ms-compilers:
+
+Using non-Microsoft compilers on Windows
+----------------------------------------
+
+.. sectionauthor:: Rene Liebscher <R.Liebscher at gmx.de>
+
+
+
+Borland/CodeGear C++
+^^^^^^^^^^^^^^^^^^^^
+
+This subsection describes the necessary steps to use Packaging with the Borland
+C++ compiler version 5.5. First you have to know that Borland's object file
+format (OMF) is different from the format used by the Python version you can
+download from the Python or ActiveState Web site. (Python is built with
+Microsoft Visual C++, which uses COFF as the object file format.) For this
+reason, you have to convert Python's library :file:`python25.lib` into the
+Borland format. You can do this as follows:
+
+.. Should we mention that users have to create cfg-files for the compiler?
+.. see also http://community.borland.com/article/0,1410,21205,00.html
+
+::
+
+   coff2omf python25.lib python25_bcpp.lib
+
+The :file:`coff2omf` program comes with the Borland compiler. The file
+:file:`python25.lib` is in the :file:`Libs` directory of your Python
+installation. If your extension uses other libraries (zlib, ...) you have to
+convert them too.
+
+The converted files have to reside in the same directories as the normal
+libraries.
+
+How does Packaging manage to use these libraries with their changed names?  If
+the extension needs a library (eg. :file:`foo`) Packaging checks first if it
+finds a library with suffix :file:`_bcpp` (eg. :file:`foo_bcpp.lib`) and then
+uses this library. In the case it doesn't find such a special library it uses
+the default name (:file:`foo.lib`.) [#]_
+
+To let Packaging compile your extension with Borland, C++ you now have to
+type::
+
+   pysetup run build --compiler bcpp
+
+If you want to use the Borland C++ compiler as the default, you could specify
+this in your personal or system-wide configuration file for Packaging (see
+section :ref:`packaging-config-files`.)
+
+
+.. seealso::
+
+   `C++Builder Compiler <http://www.codegear.com/downloads/free/cppbuilder>`_
+      Information about the free C++ compiler from Borland, including links to the
+      download pages.
+
+   `Creating Python Extensions Using Borland's Free Compiler <http://www.cyberus.ca/~g_will/pyExtenDL.shtml>`_
+      Document describing how to use Borland's free command-line C++ compiler to build
+      Python.
+
+
+GNU C / Cygwin / MinGW
+^^^^^^^^^^^^^^^^^^^^^^
+
+This section describes the necessary steps to use Packaging with the GNU C/C++
+compilers in their Cygwin and MinGW distributions. [#]_ For a Python interpreter
+that was built with Cygwin, everything should work without any of these
+following steps.
+
+Not all extensions can be built with MinGW or Cygwin, but many can. Extensions
+most likely to not work are those that use C++ or depend on Microsoft Visual C
+extensions.
+
+To let Packaging compile your extension with Cygwin, you have to type::
+
+   pysetup run build --compiler=cygwin
+
+and for Cygwin in no-cygwin mode [#]_ or for MinGW, type::
+
+   pysetup run build --compiler=mingw32
+
+If you want to use any of these options/compilers as default, you should
+consider writing it in your personal or system-wide configuration file for
+Packaging (see section :ref:`packaging-config-files`.)
+
+Older Versions of Python and MinGW
+""""""""""""""""""""""""""""""""""
+The following instructions only apply if you're using a version of Python
+inferior to 2.4.1 with a MinGW inferior to 3.0.0 (with
+:file:`binutils-2.13.90-20030111-1`).
+
+These compilers require some special libraries. This task is more complex than
+for Borland's C++, because there is no program to convert the library. First
+you have to create a list of symbols which the Python DLL exports. (You can find
+a good program for this task at
+http://www.emmestech.com/software/pexports-0.43/download_pexports.html).
+
+.. I don't understand what the next line means. --amk
+   (inclusive the references on data structures.)
+
+::
+
+   pexports python25.dll > python25.def
+
+The location of an installed :file:`python25.dll` will depend on the
+installation options and the version and language of Windows. In a "just for
+me" installation, it will appear in the root of the installation directory. In
+a shared installation, it will be located in the system directory.
+
+Then you can create from these information an import library for gcc. ::
+
+   /cygwin/bin/dlltool --dllname python25.dll --def python25.def --output-lib libpython25.a
+
+The resulting library has to be placed in the same directory as
+:file:`python25.lib`. (Should be the :file:`libs` directory under your Python
+installation directory.)
+
+If your extension uses other libraries (zlib,...) you might have to convert
+them too. The converted files have to reside in the same directories as the
+normal libraries do.
+
+
+.. seealso::
+
+   `Building Python modules on MS Windows platform with MinGW <http://www.zope.org/Members/als/tips/win32_mingw_modules>`_
+      Information about building the required libraries for the MinGW
+      environment.
+
+
+.. rubric:: Footnotes
+
+.. [#] This also means you could replace all existing COFF-libraries with
+   OMF-libraries of the same name.
+
+.. [#] Check http://sources.redhat.com/cygwin/ and http://www.mingw.org/ for
+   more information.
+
+.. [#] Then you have no POSIX emulation available, but you also don't need
+   :file:`cygwin1.dll`.
diff --git a/Doc/install/pysetup-config.rst b/Doc/install/pysetup-config.rst
new file mode 100644
--- /dev/null
+++ b/Doc/install/pysetup-config.rst
@@ -0,0 +1,44 @@
+.. _packaging-pysetup-config:
+
+=====================
+Pysetup Configuration
+=====================
+
+Pysetup supports two configuration files: :file:`.pypirc` and :file:`packaging.cfg`.
+
+.. FIXME integrate with configfile instead of duplicating
+
+Configuring indexes
+-------------------
+
+You can configure additional indexes in :file:`.pypirc` to be used for index-related
+operations. By default, all configured index-servers and package-servers will be used
+in an additive fashion. To limit operations to specific indexes, use the :option:`--index`
+and :option:`--package-server options`::
+
+  $ pysetup install --index pypi --package-server django some.project
+
+Adding indexes to :file:`.pypirc`::
+
+  [packaging]
+  index-servers =
+      pypi
+      other
+
+  package-servers =
+      django
+
+  [pypi]
+      repository: <repository-url>
+      username: <username>
+      password: <password>
+
+  [other]
+      repository: <repository-url>
+      username: <username>
+      password: <password>
+
+  [django]
+      repository: <repository-url>
+      username: <username>
+      password: <password>
diff --git a/Doc/install/pysetup-servers.rst b/Doc/install/pysetup-servers.rst
new file mode 100644
--- /dev/null
+++ b/Doc/install/pysetup-servers.rst
@@ -0,0 +1,61 @@
+.. _packaging-pysetup-servers:
+
+===============
+Package Servers
+===============
+
+Pysetup supports installing Python packages from *Package Servers* in addition
+to PyPI indexes and mirrors.
+
+Package Servers are simple directory listings of Python distributions. Directories
+can be served via HTTP or a local file system. This is useful when you want to
+dump source distributions in a directory and not worry about the full index structure.
+
+Serving distributions from Apache
+---------------------------------
+::
+
+   $ mkdir -p /var/www/html/python/distributions
+   $ cp *.tar.gz /var/www/html/python/distributions/
+
+   <VirtualHost python.example.org:80>
+       ServerAdmin webmaster at domain.com
+       DocumentRoot "/var/www/html/python"
+       ServerName python.example.org
+       ErrorLog logs/python.example.org-error.log
+       CustomLog logs/python.example.org-access.log common
+       Options Indexes FollowSymLinks MultiViews
+       DirectoryIndex index.html index.htm
+
+       <Directory "/var/www/html/python/distributions">
+           Options Indexes FollowSymLinks MultiViews
+           Order allow,deny
+           Allow from all
+       </Directory>
+   </VirtualHost>
+
+Add the Apache based distribution server to :file:`.pypirc`::
+
+   [packaging]
+   package-servers =
+       apache
+
+   [apache]
+   repository: http://python.example.org/distributions/
+
+
+Serving distributions from a file system
+----------------------------------------
+::
+
+   $ mkdir -p /data/python/distributions
+   $ cp *.tar.gz /data/python/distributions/
+
+Add the directory to :file:`.pypirc`::
+
+   [packaging]
+   package-servers =
+       local
+
+   [local]
+   repository: file:///data/python/distributions/
diff --git a/Doc/install/pysetup.rst b/Doc/install/pysetup.rst
new file mode 100644
--- /dev/null
+++ b/Doc/install/pysetup.rst
@@ -0,0 +1,163 @@
+.. _packaging-pysetup:
+
+================
+Pysetup Tutorial
+================
+
+Getting started
+---------------
+
+Pysetup is a simple script that supports the following features:
+
+- install, remove, list, and verify Python packages;
+- search for available packages on PyPI or any *Simple Index*;
+- verify installed packages (md5sum, installed files, version).
+
+
+Finding out what's installed
+----------------------------
+
+Pysetup makes it easy to find out what Python packages are installed::
+
+   $ pysetup search virtualenv
+   virtualenv 1.6 at /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info
+
+   $ pysetup search --all
+   pyverify 0.8.1 at /opt/python3.3/lib/python3.3/site-packages/pyverify-0.8.1.dist-info
+   virtualenv 1.6 at /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info
+   wsgiref 0.1.2 at /opt/python3.3/lib/python3.3/wsgiref.egg-info
+   ...
+
+
+Installing a distribution
+-------------------------
+
+Pysetup can install a Python project from the following sources:
+
+- PyPI and Simple Indexes;
+- source directories containing a valid :file:`setup.py` or :file:`setup.cfg`;
+- distribution source archives (:file:`project-1.0.tar.gz`, :file:`project-1.0.zip`);
+- HTTP (http://host/packages/project-1.0.tar.gz).
+
+
+Installing from PyPI and Simple Indexes::
+
+   $ pysetup install project
+   $ pysetup install project==1.0
+
+Installing from a distribution source archive::
+
+   $ pysetup install project-1.0.tar.gz
+
+Installing from a source directory containing a valid :file:`setup.py` or
+:file:`setup.cfg`::
+
+   $ cd path/to/source/directory
+   $ pysetup install
+
+   $ pysetup install path/to/source/directory
+
+Installing from HTTP::
+
+   $ pysetup install http://host/packages/project-1.0.tar.gz
+
+
+Retrieving metadata
+-------------------
+
+You can gather metadata from two sources, a project's source directory or an
+installed distribution. The `pysetup metadata` command can retrieve one or
+more metadata fields using the `-f` option and a metadata field as the
+argument. ::
+
+   $ pysetup metadata virtualenv -f version -f name
+   Version:
+       1.6
+   Name:
+       virtualenv
+
+   $ pysetup metadata virtualenv --all
+   Metadata-Version:
+       1.0
+   Name:
+       virtualenv
+   Version:
+       1.6
+   Platform:
+       UNKNOWN
+   Summary:
+       Virtual Python Environment builder
+   ...
+
+.. seealso::
+
+   There are three metadata versions, 1.0, 1.1, and 1.2. The following PEPs
+   describe specifics of the field names, and their semantics and usage.  1.0
+   :PEP:`241`, 1.1 :PEP:`314`, and 1.2 :PEP:`345`
+
+
+Removing a distribution
+-----------------------
+
+You can remove one or more installed distributions using the `pysetup remove`
+command::
+
+   $ pysetup remove virtualenv
+   removing 'virtualenv':
+     /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/dependency_links.txt
+     /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/entry_points.txt
+     /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/not-zip-safe
+     /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/PKG-INFO
+     /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/SOURCES.txt
+     /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/top_level.txt
+   Proceed (y/n)? y
+   success: removed 6 files and 1 dirs
+
+The optional '-y' argument auto confirms, skipping the conformation prompt::
+
+  $ pysetup remove virtualenv -y
+
+
+Getting help
+------------
+
+All pysetup actions take the `-h` and `--help` options which prints the commands
+help string to stdout. ::
+
+   $ pysetup remove -h
+   Usage: pysetup remove dist [-y]
+      or: pysetup remove --help
+
+   Uninstall a Python package.
+
+   positional arguments:
+      dist  installed distribution name
+
+   optional arguments:
+      -y  auto confirm package removal
+
+Getting a list of all pysetup actions and global options::
+
+   $ pysetup --help
+   Usage: pysetup [options] action [action_options]
+
+   Actions:
+       run: Run one or several commands
+       metadata: Display the metadata of a project
+       install: Install a project
+       remove: Remove a project
+       search: Search for a project
+       graph: Display a graph
+       create: Create a Project
+
+   To get more help on an action, use:
+
+       pysetup action --help
+
+   Global options:
+       --verbose (-v)  run verbosely (default)
+       --quiet (-q)    run quietly (turns verbosity off)
+       --dry-run (-n)  don't actually do anything
+       --help (-h)     show detailed help message
+       --no-user-cfg   ignore pydistutils.cfg in your home directory
+       --version       Display the version
diff --git a/Doc/library/abc.rst b/Doc/library/abc.rst
--- a/Doc/library/abc.rst
+++ b/Doc/library/abc.rst
@@ -1,3 +1,5 @@
+.. _abstract-base-classes:
+
 :mod:`abc` --- Abstract Base Classes
 ====================================
 
@@ -12,7 +14,7 @@
 --------------
 
 This module provides the infrastructure for defining an :term:`abstract base
-class` (ABCs) in Python, as outlined in :pep:`3119`; see the PEP for why this
+class` (ABC) in Python, as outlined in :pep:`3119`; see the PEP for why this
 was added to Python. (See also :pep:`3141` and the :mod:`numbers` module
 regarding a type hierarchy for numbers based on ABCs.)
 
diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst
--- a/Doc/library/bz2.rst
+++ b/Doc/library/bz2.rst
@@ -37,14 +37,18 @@
    *fileobj*), or operate directly on a named file (named by *filename*).
    Exactly one of these two parameters should be provided.
 
-   The *mode* argument can be either ``'r'`` for reading (default), or ``'w'``
-   for writing.
+   The *mode* argument can be either ``'r'`` for reading (default), ``'w'`` for
+   overwriting, or ``'a'`` for appending. If *fileobj* is provided, a mode of
+   ``'w'`` does not truncate the file, and is instead equivalent to ``'a'``.
 
    The *buffering* argument is ignored. Its use is deprecated.
 
-   If *mode* is ``'w'``, *compresslevel* can be a number between ``1`` and
-   ``9`` specifying the level of compression: ``1`` produces the least
-   compression, and ``9`` (default) produces the most compression.
+   If *mode* is ``'w'`` or ``'a'``, *compresslevel* can be a number between
+   ``1`` and ``9`` specifying the level of compression: ``1`` produces the
+   least compression, and ``9`` (default) produces the most compression.
+
+   If *mode* is ``'r'``, the input file may be the concatenation of multiple
+   compressed streams.
 
    :class:`BZ2File` provides all of the members specified by the
    :class:`io.BufferedIOBase`, except for :meth:`detach` and :meth:`truncate`.
@@ -70,6 +74,10 @@
    .. versionchanged:: 3.3
       The *fileobj* argument to the constructor was added.
 
+   .. versionchanged:: 3.3
+      The ``'a'`` (append) mode was added, along with support for reading
+      multi-stream files.
+
 
 Incremental (de)compression
 ---------------------------
@@ -106,14 +114,20 @@
    incrementally. For one-shot compression, use the :func:`decompress` function
    instead.
 
+   .. note::
+      This class does not transparently handle inputs containing multiple
+      compressed streams, unlike :func:`decompress` and :class:`BZ2File`. If
+      you need to decompress a multi-stream input with :class:`BZ2Decompressor`,
+      you must use a new decompressor for each stream.
+
    .. method:: decompress(data)
 
       Provide data to the decompressor object. Returns a chunk of decompressed
       data if possible, or an empty byte string otherwise.
 
-      Attempting to decompress data after the end of stream is reached raises
-      an :exc:`EOFError`. If any data is found after the end of the stream, it
-      is ignored and saved in the :attr:`unused_data` attribute.
+      Attempting to decompress data after the end of the current stream is
+      reached raises an :exc:`EOFError`. If any data is found after the end of
+      the stream, it is ignored and saved in the :attr:`unused_data` attribute.
 
 
    .. attribute:: eof
@@ -127,6 +141,9 @@
 
       Data found after the end of the compressed stream.
 
+      If this attribute is accessed before the end of the stream has been
+      reached, its value will be ``b''``.
+
 
 One-shot (de)compression
 ------------------------
@@ -145,5 +162,11 @@
 
    Decompress *data*.
 
+   If *data* is the concatenation of multiple compressed streams, decompress
+   all of the streams.
+
    For incremental decompression, use a :class:`BZ2Decompressor` instead.
 
+   .. versionchanged:: 3.3
+      Support for multi-stream inputs was added.
+
diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst
--- a/Doc/library/codecs.rst
+++ b/Doc/library/codecs.rst
@@ -458,7 +458,8 @@
 
    .. method:: reset()
 
-      Reset the encoder to the initial state.
+      Reset the encoder to the initial state. The output is discarded: call
+      ``.encode('', final=True)`` to reset the encoder and to get the output.
 
 
 .. method:: IncrementalEncoder.getstate()
diff --git a/Doc/library/collections.abc.rst b/Doc/library/collections.abc.rst
--- a/Doc/library/collections.abc.rst
+++ b/Doc/library/collections.abc.rst
@@ -23,7 +23,7 @@
 .. versionchanged:: 3.3
    Formerly, this module was part of the :mod:`collections` module.
 
-.. _abstract-base-classes:
+.. _collections-abstract-base-classes:
 
 Collections Abstract Base Classes
 ---------------------------------
diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst
--- a/Doc/library/collections.rst
+++ b/Doc/library/collections.rst
@@ -34,7 +34,7 @@
 =====================   ====================================================================
 
 .. versionchanged:: 3.3
-   Moved :ref:`abstract-base-classes` to the :mod:`collections.abc` module.
+   Moved :ref:`collections-abstract-base-classes` to the :mod:`collections.abc` module.
    For backwards compatibility, they continue to be visible in this module
    as well.
 
diff --git a/Doc/library/crypt.rst b/Doc/library/crypt.rst
--- a/Doc/library/crypt.rst
+++ b/Doc/library/crypt.rst
@@ -29,6 +29,8 @@
 Hashing Methods
 ---------------
 
+.. versionadded:: 3.3
+
 The :mod:`crypt` module defines the list of hashing methods (not all methods
 are available on all platforms):
 
@@ -37,33 +39,26 @@
    A Modular Crypt Format method with 16 character salt and 86 character
    hash.  This is the strongest method.
 
-   .. versionadded:: 3.3
-
 .. data:: METHOD_SHA256
 
    Another Modular Crypt Format method with 16 character salt and 43
    character hash.
 
-   .. versionadded:: 3.3
-
 .. data:: METHOD_MD5
 
    Another Modular Crypt Format method with 8 character salt and 22
    character hash.
 
-   .. versionadded:: 3.3
-
 .. data:: METHOD_CRYPT
 
    The traditional method with a 2 character salt and 13 characters of
    hash.  This is the weakest method.
 
-   .. versionadded:: 3.3
-
 
 Module Attributes
 -----------------
 
+.. versionadded:: 3.3
 
 .. attribute:: methods
 
@@ -71,8 +66,6 @@
    ``crypt.METHOD_*`` objects.  This list is sorted from strongest to
    weakest, and is guaranteed to have at least ``crypt.METHOD_CRYPT``.
 
-   .. versionadded:: 3.3
-
 
 Module Functions
 ----------------
@@ -108,9 +101,8 @@
    different sizes in the *salt*, it is recommended to use  the full crypted
    password as salt when checking for a password.
 
-.. versionchanged:: 3.3
-   Before version 3.3, *salt*  must be specified as a string and cannot
-   accept ``crypt.METHOD_*`` values (which don't exist anyway).
+   .. versionchanged:: 3.3
+      Accept ``crypt.METHOD_*`` values in addition to strings for *salt*.
 
 
 .. function:: mksalt(method=None)
@@ -124,25 +116,27 @@
    16 random characters from the set ``[./a-zA-Z0-9]``, suitable for
    passing as the *salt* argument to :func:`crypt`.
 
-.. versionadded:: 3.3
+   .. versionadded:: 3.3
 
 Examples
 --------
 
 A simple example illustrating typical use::
 
-   import crypt, getpass, pwd
+   import pwd
+   import crypt
+   import getpass
 
    def login():
-       username = input('Python login:')
+       username = input('Python login: ')
        cryptedpasswd = pwd.getpwnam(username)[1]
        if cryptedpasswd:
            if cryptedpasswd == 'x' or cryptedpasswd == '*':
-               raise "Sorry, currently no support for shadow passwords"
+               raise ValueError('no support for shadow passwords')
            cleartext = getpass.getpass()
            return crypt.crypt(cleartext, cryptedpasswd) == cryptedpasswd
        else:
-           return 1
+           return True
 
 To generate a hash of a password using the strongest available method and
 check it against the original::
@@ -151,4 +145,4 @@
 
    hashed = crypt.crypt(plaintext)
    if hashed != crypt.crypt(plaintext, hashed):
-      raise "Hashed version doesn't validate against original"
+      raise ValueError("hashed version doesn't validate against original")
diff --git a/Doc/library/depgraph-output.png b/Doc/library/depgraph-output.png
new file mode 100644
index 0000000000000000000000000000000000000000..960bb1b5639e7ff615ddbd6cbf8a542be31d5fe7
GIT binary patch
literal 24719
zc%00<Wn5KT_xHU4ML<9rX#u4bK|lmS1f--vq`O<BLr`e}2|>D3x*H at FQBqn;>Fx%f
zvCg^v&-J;VcyqtH&%wiiz4ux(#u&fvZ_c?4Qk0j(!=c1Mp-_0zQsT-e6j~DeVaLLR
z-&~f}{RZDK4WCMiqy8fQrZ(n8p-?wa(&CR*T@%+P-LzHrFCJ};;GW)HV<`K|kcx%F
za>LfzBD>aDIw9D7-Soc4&y0!mXp=+d^#wI&_EX(*9+!0PYHg+VZ<k(Nd7?yyb~*Zo
z0fm2 at +uw_=p5vf<);KRkrh2~^(SAPmT-bH%jcapD>3xv$fRUa7-6(|1^pbKZhJFaw
zu$TRS0sP35=y=^Igd#L=6zy)OK(c6>$df0R+1S~Yo<G0hCyIu;!Jw+9hO=+XO~Jvz
zfkKgyk%e~|msD2=ynTy7O-+3xLP|U+Md%^_^4(!Si%QM8#l=4ZSx;FwIK-8d2yt+5
z?AOP6Nk~a&hYPiGb8~$(3vEAMtgWxlE-hg_fBt-LW0H!NmbO(vPfzdCrAt`S at 3_Qn
zKJ*AJEkZsZrJ(R7PbDWjI@)h-&Dz+^ETgCh|8tV>%*F=3f`WpptLx{sHVk=r`Bs)E
zhb1K?K1oSQeMOCpjd$NDv5<(q`IMjkRyj}n$&)7;k{TKmGBPrXs;a)}=?o-fWa*ii
zm&7j-6BE0-xutpky}-i76_=92VqjpH+1%{SxQXc#g>RHZL^%6B?jFD662ZXWU_?xe
z|Jj+RRF?A5 at i9FM3l<t08uGEf7w7%ec6y_uqk&|_GFhUpUNwc|_NF{~f0g{sOsn73
z+`>Y?#zrB3T#7e=fmq^_l16i1BVZKMJG$H3qytLj(@RPSP7gOF9UQpvX&(kA+_!6m
zenmt?wBieLaNt%|RT&11TUu69#>K_)zdGZGw!AnydXXsTg1 at k^a9!AwU*bJ&Xn%$z
z<HLuElA at EBm}xr2KO`i)OYy#-cyzu;97gvDUr<o6y`uwL at K2N1X)ujpJv%4omo%|}
z at WjNBxjAF)T8GOp5wYARIN7psZwMJBMA+toDzCB23xrT;JbOmUXEW~0Yc)L69z?t}
zluw at Qxg*{F<qIPd6DAWAlVO0lDmkYv3DG7 at S6}{fPT3S;8t?N}z5ZgoN2P5>>me|h
z5)RPjv+2L*bgzzPufQli{Cl=5B_%b(cv?TVu)x5 at ML6wsNVBuELqJUYmR!%1P~p)z
z&-d@&+wtF5Vn92bO5R?%_WRHKM-nnJIGERHLms?b&|mqPm#%xXxmKZhJ2EP&)P3L5
zdb)|m*4EbbbiI1J^(yD*fdS>Utx=4bHug?|ud{6dT-Kv^LuiF4q1{r_(k(&6%v%Fb
zIbXC|yjErFh~+Zcf;;@)neUFhYk)rW>eS}w=t#dKm~{Q1meJG`vc<~md9m&jPtS+H
zfB)9EvwL4r!9&e&k18!KH4GTO<>v1GJHNO|R9Balh=`~}zXjdb*H?eK$;&XHa{~h=
zW;{qOzXMW)kbuCa+^pBIH(6-3-0T`e2ZOWuwaet><QzJ6B#S*sV$HA4?>eukKivI(
zuf}OLW;CsoZ^lX>r1GuRNRdHj2*uI);q>-<?=uX$hHcF6KYn!JuclRb|9u4!dj$^<
zPrdn-kVGiuEoZ3$QrFD3Q49fxIjlEt-zq^4WJtdMo#Zk#>uYq$s57Kxc`%2Nl+=&Q
zs8hc`{fS{fyXI?{uxt8X!f37wI&;u^tZ^kibR&Y{Clk!H?U<UP4GIc6+VuYW?!E8{
zR&m1)4i9DMs6naemQFTBy at FPi^<z;{bX|{iUeA-|98W58!p5WD at 3ywKJ`XB9!t)b-
z?X)_)ecCU*%}dj{A*QKGX=r4$y-~kuINR!H7|?dXeET-ud!d8tx?cOw&Q|sQ5(!e9
zqL8s$I at w>v!p3fiQV<S$mai5>`EJ8p_q+F>9)h+>=Z#-MImzxg$lX0X8%AeKJfOrC
zgl5Yn`2YTGxHjFaw}*|9WDqb-VOTSglkzHfzWd{!-yQFCR*HU2q9K=`sh)n-1|eX7
zu%>Zzdist=KIhl03#Pa)gpE;qAfbAbXDwW}9=R?a10y4s=aGGZda3A|BFcWiq6adN
zkc#Tv(~oyrCMLAj>_`7 at eX!`%Ts}MAMM~M)Sb69`ie_P__r)%qZ$N<9osVKnQS^L#
zWP&amJnAmcd3zU^fZEyzCx at HwE3HOWhE(!Y)ys^rMa9JEqq=THbrDcf2Ag!pq@|^y
zp$O>ct_6v|LBqfl$sfY$9kQ&veperLlL$N6>p;VxR-A#0U&Z=QXJ}w(C{li)e9tT{
z2Dt4mq<#AINk~oMo&PnuFk!C~{lRQGiRJf|L|mPkCeQ{R{m-b3y)5NC^Y5`-mp;#k
zNgle0ioR)amzI_m)yYzp?(grHtD+z$mt>0$hMM}dzdr)i2NUDBSgchYhEjxxUS9Z-
z;Lf#VUa1)`;A9zfe(CaMrDxBcT`6u66h7M$16|;j(JOF5R6_=un4wW_N|99z^FfR%
zD=keM%6~pss8yZY)RY`XD<l)etU$K1>^wI$HRU+-8BLij+VT9v$sIxq8x>0ZKsmSG
zb=weK1Ro#2!lau_nS~@%k;Qy>;fJxEodU{yc_2$%yuzRj6J11DSU6KT4 at ytO`}fz-
zQsEYS!TVF0<xn-~vu!4-luS%+<*BlL`}#E%Dj)$W{*m`9q|Fdo(x}|*>`s>R(wG-8
z`O%oT*G1?&w_Yoq;9z6Zqbgqf=`>wkc0Q5}qqeAM$jz0K(iCWEX}RP>L`=LtK2~8t
zjF#Hhr)V&CwEYJgRj6GX*>tkPjxM5-BX7fvne_Fm6uO9og at wD5(~INN)BC4&<K1!h
z%&<^8HTEsYO}E9-J2kro2Z at nf*Y!T%$KQO)6qN>gh~tOuP}|VuUT4J$Nq?7 at msdUe
zl)SpE><vhtlM`=2VIkE}VNsEPXeeGmL4l%{)>RbCojVeWiUjbZ?*aF{dmzT6){lmV
zhe4 at Go1C?(6E~>m&{0r&Q<d|^%VajXVM1o-=lvf2JtGziz+d0GQ15nSKr<?wTUiMP
zJuPj!yENrHNJ~va(_e0;5E~blR#ipT)6?^VNrXnnu-}}g556ob;Vaa1beZY4V+GVO
zG}NzOzd{D+tSBJ&G`qUHuU@`<Sz<X~ulW^f<CkJZ<KFrNIjY3t(6+Hmr{0-&g&eie
zoAN#}k(xuZ0&BDB^iEH*(7VHF at 8*tIZ{EB?p`d6IOWgOT?Nn4$MBTf0PhKD?XuEfb
zYnayy#9(f2PMj_JQ`_mu-k<_DYHVz*Kc3erx1b<oWp#DA_t9S-{)o at v9^H<_*oldW
zEUc_Szz=zKb at 4ETT`etX{o-#huHc7 at Go(Svg-c|l)zt~??CssmQ<Y(5W&LqbZu)}~
zT_ia<xx#i*pw4}tjhBZfTsbfE9XaQ~{b^6JHh(;;FO@?T5bOP;rADCx<W at AGYL&_W
zTyY5rgGD;^0u7weGEXujLM`_{Scq-hR$+?<b{aBH0C7>((^HtApP$><kVlzAPg{4L
zqVOIH3FW1yr;j0(a){~F;?BZBrYc+4 at NiXsg&mVHa`_g$zbDV#_xdG62R_PVr9<^l
zvbVphp{W^JS;^bc**OqKBj_t&BP}5A;@l2xB^$3OD@&i5(bt5+Gc+<nOxXXjS4a|N
z4)rfIIe*BKm4V?6s&BXV(X=6MS63Ga9i76)#)h$}sf3!E8VL!B_{+r}UjFjJMkLCA
zO-{lP*0r4)7Jexoxo-6q;3nN}x=Fp>`8Jb6GKVvN0k(;W$<jzMUDMI;tFlP~&!BGV
zAca9f&vW}S*=<cktXg|}72x+mJd}8}-e)g<4h_-s@<um*FN~VaY*VLNcDC&3=r{u5
z);BX_c9vSZQ`~q!3G;TGni}<+bugqdA}9zuGBPrC$dc>TnX9U*Y9I|!TT<_p#=jS*
z>XnwH at EZTDqcX^Q{WWK2=cD}*Jta-e;H4#V5_0l)G|JW4{pMT0-*Fm_m6?n}HiKYw
z;0N&{GI`kb<;#`kzZZhCa&ib;WM*dv!v|EAmES;Bpl4;pu6Nm7D9+uOgI*n=w6(Rv
z<Ku&Y1Vbo!aW<R(3jegM95ryF@;F#~3<L`NgS at VkArVAFWQE8QZFphf?N=AaW}fH!
zBkHAwm;Ldm{EfbZ1<?$P6#~WrMRY=0;R06pN2M|Ghy*!?aS&8te!1|yr7Mb={$n?y
zRZ!3|MvC>w1q%X#WwKfk4ZJu%{WH_ at R!T;uwa#f(-^hs3*}Nbc8UHXS=>Z=;ker?E
z_Dadg`878`nsWVvmu$Y}qsm6wbi6<X#6I)(n($}#;)XZ7i at m%133mb?<+Q0^$;;1g
z`}8b-YdI(7Hz+|YuNB#p=dRp8lB)R;l)w+v&G2|<9=L|gv^P1_Wy<XeWDdee#L#1b
zSZEz2_WN5;V{UMAUW4#=-R>aY{;8tS(%LGzwk41S{WsWO8G?4B+b{l{r?P86b8&Hr
zU^KcGjId_<kmU=IiUQq6N<rr}9~i)7w*`uSiB;k#`^Ux#2>;RXVoF48Y#;<IJ#aPx
ztlQApT^%}F1{gi!#}*c>eAc5rnVC0 at jg3Edbzv(AAKy%N{f*t;-d?dnB~ULAD)AQ7
z34yqy?Jsl at adFt!u3eLj<GzfIjg58nDzTvWFd7UdQ(6A=V#C$p!Xp at x@R%4(5N at dX
z2!_6YxP#T&+6v;{j=%dFEEM=0MqXZ0NG*QPqr2a_yJtbRnUh`7x3{+gLqg`L9EFic
z8V*u at _6%Ea9)b57d$jEAY()MMlR+=$ufH|Zf`x_U6CE8rI?E at WKg96p5uI62;$wGr
zfq{X6 at Q4UBFn4BVW)FA1GG6DkBmo0~M at L6j+GZlrj#!d4n0%?HPusTteDR}k0shZa
zW^HI}M0oxZCMJ@>OOy5Yq5RB(7xVBq#eZMe)I<wTi-nD?71Sc)<HrC92}VIdDk!wM
z#l-=6d90ZD)Pc}zdJ9ZU%pLP=sy~ps<S<haU{&DFf50oXGBPvg^iLMeJ3K%x4leP^
zl>murPoF;hI`06wg6-tg{9H{<wE8I&ZeNLv$N!i+H at 63fxl>kV*fTRQcnwf<<L+Go
z#GYYciGV#LyncN&Pty4N5t4!7xkW{t&_pG5^}zpl!G;EHPK%!-BYzIoCqSuCeM>G{
zT3QBVNf0-nf_srM85xZ3`zx0 at IXVC1I)ywANY<Uoi~JzkaEXF0CGgq&E_ODh3A20K
zcHQW^Iozr9`aMdhz+9an{_v&{t_-ME|3|)58t_J(;k7sep<sw<^v92k^z>*#?t4g<
z|F*GodrHQk?qksAPr-3I|51;Cl=Lzc6&0A>8*FSi6IC`=Dh)8h>J6 at 3{`fTGpK9R0
z!PkV3QjaBqxQ5r!Qsol(2vIAa>ixn38yhD)FOC=e_%?0p^X7lBvasB^diCmf&5I1(
zF|!n}?2+V+Qp1j5wJ%mn$3HZ{zJN+8D=8^OIk_TDmMC-C)Sb-%`;>I<KG>a54%8PL
zAOAZqC)u*%;r;vMn+?18D8#|AaB?Cs0pf8975m6r7;35}shz(54Fo<xsj5C*s3-_s
zh9ran!=sZo#QL7=pb!1MW5_^ukx-%9#HV?=TCY>SE at STahi+xMV{a~B#X><(<ch7w
zD=aSgOnLu3KMiZVMa(37qgGpfXlMwt+YQ=?fs%zc9qG7u%}uS|WQ)dm8pH9}+?>Uj
zn*zQ7<4di_xOsSZu=aO$cj+Ufz&~CqRAc9O^l~AjZvlwUcV4rQ?BL)aT<Yf6-d;PB
zk<L&m at Dx`)JUrCZ)j#+4UIVTbd-iO|t68m5GZY*$$?WWGNoncpxHt;HDKG`^p&}U2
zR8$DZ*M~W}j5Y2%j!4F9O(g=~B2)(S?rs9Wg7TrbXSx)<977}QSu=J(CvI)c0l-uG
za#6mGbUbn6y3sWno);H%E;N&D(K5kdVYX%R+S=5B$7ynq2IwL=@<~K7w>8Rq!E#97
zB+{&N%quzjnWu8er=g+2*F>{0CR;vf+}Qi^<Ht~`vJNRVjSkkvZZ}QEeA8Mfue7QB
z89+anqri+pn7*-$R>++I#m36o&tCuPTmUUKS;(WxQiyloJ^tlVA3eIrJ+mGj^&Uo=
z&g&?|v9xZA|6D>trFdUF2%~<0e&xy)>#?%S0K at PFjNuQCM_E~U3o86o8>rSO)T`6=
z`vzkmcZA at f!ovv=Kd4 at mPfSI{&T$Ifv&3!J)YZeo2tpbuEizf65)zmaA=h6Q6mXoK
zPXBEx9;pL&f%?px;`t^rlE|Eg@<VjA0g%}Bd!`{J`0-kFHZL}J<1LFTy$~5j^;O&H
zHF8g4_4W5F6 at LfuKq+cyj4h1xX})^(3gtu0EPuVIZ^7})b&sV~(h)~+=GdrPOiU}$
z%gI!flx+Vr3JcZo<qI*PlAIi#hPrwm7)gt%1~+313+c+rN^x<7y-?;q;(1$l85tR|
zQCV48%lpq&RmD+Xr>C3yt<2Z&{P^-E0M!S)xeQ2?W7m8h<KW<cTl8^Z+`8zOqK3-;
z1d$+Ng!Ua~RH%fzfT^kJrPTgh70E1RLKOG?`*u!xHld|OMPaD$(9qCX=kZ_njMmZy
z<1>w*2`F6M8)_^hmwXT(Ve-64n;tdy`?sjLIPeY=%5iJvv5dSt6RHBTmVv?H#S41W
zhlB+7hk8s=aBVIwdSYVYNSUmg(K6Bu5m+zc<KlST at m#(Xot|z4$jiv!)YjLpM!%;K
zjFEZzG{JhX&z+cY8LS(+2xX{5vFzHQ6=kSDDqNL~m)gTBKsJPHn9S>B at A2kTBLZ&V
z!40OHyh^I7I=@CRayiUNeKqD at o2b6_;K2iy+qeC4a+p0mJ$)Ak|H^Y&@KC~Jf>&)^
zZg)}8HGelcs*bE<Zq2r-*EzBx;4nHiW&l@?g&=&}X#XoNw=V+n*+-CAa3f1_-uX}*
zF9|4&@6#Y5cnLHVSSifz2T)%x)oG}>^zsk(uje{%P8B|{vKqOA5@~*QPI<few5|t<
zMq*-&moHyJa^GNL!aP0PBrjTAT3G?ZjE3q-63lVI^VNK at o{?1t0v{L@)G}e$97^}<
z2o16i9YxJ=pZaVS12yOH0F_!(Q`5f#tjtI8++%BR?-J@)gPTe*$DKR)sFGLb?huM)
z^~Fh*_jP1-b#?#KJg{h}4cV?n7ZNU~6;;&dZqDWqI`1<=aFMxrc`er-o%z(hT;%(d
zkugzgGhTsBen*?&&H%>eSS}-ERTdIU!pX_eCXDV8?dnLeq=5nbPfK1Hf)~pJw-EMf
z3%J%?JVHT36B<f&KRq`Wi!v0Gl$3O|$xG0ulNbwf21P(bgpP9k6MA0}rtRd!o%*3G
z0St3PagC-GPz=ha`y=OS^p9jAs)dCG1idfLH@!2`)6wBNM}YjL7K2Q&2?>8Vy^o&;
zbU4p{BSnd*tH;jb*a(**xi#ZG;%NFKerT?eAWK;q)I?5x8)7-@<m5zRePM2n=hMrJ
zGq*GjcJ_RwvH9+g`@h}yhn{N*RUS<>Yq>Yd2`zk&WBh4l>qS6#>((s_jub8<i2&U}
zjcQvOv{ZOM6^GphAAgG1fkytR-FjW~#Lj#-Hp*tIVNCx4plocoBO5#WGAJh#R80f2
zpOb>*&<-$K+2FCtDk^TWv$Nyi<7a~sE#0D~0enOkX#~$(_PwnwqiZxWDvDKDI2n9Y
z*Z1!skZCL|EP)F>NfYL#T3XJ(yf2QK8 at F1o&i(l#1`%RxV<QK8um@!?3WQ{(vO+j8
zaJWE23|I(pXmH_dzpLcRVq#(eOi7<Uu|;+n^BA=HGOb!q)<r|LdkmqAT!c{~1X&)T
zESN}^Hf#08Mt+Cm*Q#GT=Z$x-vyKa-6Xb9?iPk7-<X7#>Qn0+gK*zh at 9dxRxs9*qD
zeuo*;^V*MY5%81?;OeYPMHnBZU#r at dRFI@)xKJw%%$WDZ8Ph7V6H$8SE3&%Lbj;Lr
zabi^}Mp&R-OJvu$Pv(BOF=<Sr!q^D}P(b5#w*AF0pzmv`(HG5vHbf1KB-*6m`mz20
z%YFtP^Z(XM-YNJ-li#)M8vRsq<XgJBq}30=R5EaL6M;KH`Ak&XwSD`B!~VR;Hz6TG
zJ*_N&04`P+=Q8a at 4l`=|OlL}{<e)QW<oc?q=I(kbM6B!I^TSXAyj5F381?%<K@;X6
z=CqjE*{{BS{aPY8(24xvfI$IO{DYTFfSR~5Af=G}FIo*~-T<ymB1ZdKrKU~E_xAQn
zhz2@{7u3%TT7@{N5E2sdzxeGtWsC;1(a`YF)y*x4gu)|sTPmQ`mJJ||iHRvRMmkFw
zmx9}u7=QIHbaFKtDQD)D;}YE<+bNnxG>BcAR4vwRBp at cvxIgZ8vYhh-T6G0^bEoO|
zSbFV2E-tReApP!pOY{z^YX+TQ5Ny|gx`-qM#C?Gx0;vf-Mv-MPs`ARpj+z&LP$&?R
zSRkjbgrxyVd825KWPnJm8J|AkKvX_^_AGQoRZWdAUf0sV04+|I>CDE^5FLmcXs)%h
z6BFT|?)kg- at 82I^)tAjuPRq{5thSr>|93$Pb-qrHVq;^QSzNq=+S=Z3nQHVfu(2tu
z{_*W!i!jlUltjb9!Exhbcj#{VsMw#hvqXHtS#UIfRFdH7z$@7AF38O)fklUKOoL`Y
z^Fa02`dncS;B&JxGblkp!M&BCt571B`ZEHy>dVXTK`i1y7K0i%nLjMB>a)r3=;#30
zB&RhoF>!Am`S~-osHjKZaM_%Xj}O_90{^hKwl)JchMAd}|G|S6V1c8F{(eHR5555b
zSG2XY^)0`BX_!>{C?_D3MMTc2EAH&fcSonr_w3A5yV0HW`1lyvO!M+;R2+eL94$Ru
z8za7R=Z?LzvwvY>VK{C<VIjDC_%Z;NMA_ThoBzxS4VBCA{xCj1E^)pTb{A+uQAZ~<
zI2h;W&!1R~x!=CYsH&>Y09lHnbKJh&0i{_{ON+h22rhsa95=U?i3zHysi~#u=0`AU
zSKyaYS)OVY`6?_VH4gJQ4liH&*&M(t$U5SS7hFX;^~2^9+`POt6~xURk{i%nD=Vu;
z!obi_Jh*{@kr9T7_vz)0MBRH_TrJaoFI+fu8?KX)k+n+wwXHLe$x at cG-QVAD4g3Ti
z68H at QgoT}*IC6FpduPK&n=QJvqvNuzt*!mbmv8Iq1+{f_X8-)b(9_e~TOPd92)^vI
zi%(G2ATtfPab%;=*!af1d&HnspP|-;r=$R4Uj>alIy%A<G)FcLJv^>@*VokWxV_Ko
z$%NqY^YeqUD)RX8>u`~)I5_Z;_uvdsV4q3R(a{MA2z)@P8F_dR*0y(YB0VN8 at fZTT
z2!{3Rw{M~_V=yF$i-pQCv+yGUwXx<oIXT&1?_yU|SJyf at sbeR(21Z#+YXU#C5^e}p
zmS$X1M~B9Csv!t&)p#jiOHEM`m8Fc03hy!&Mc>)qM;2VrF)+C7XG9t?M2DRm>oQ*+
z at 0es}Wz8OK|AEZEB%dVU7avav&UkNgnhwgzKNEJa{w_831`JJ(7VaP^&gHW5@`$9Q
z(5vKkZh#xt(9l4pkcScvaSODxVOv}FzJ7iNM_aS4fuT+fptgVDvq-m0O_{jw-FpG|
z%pI~+*VJsCp4J<Ey+Yg6)U<VQuweDdg`PpX-dXPY<&{@Z=@^)quRt20P>{E{*YBb*
zFmXi`6$wN|MG2^>f8D~rTQArj)syw~qy5}hLTX{p>v?&3ENpCVWSPYN-FPJ+BlCyW
z?5&O{Y>HX0^;`-BdjMREK|w)*bQiMX5>z^n&Gybt2B0OdGM<j3^0vQfUhnNWQa^e{
z{pCHKq`SMow{PF{EiKuoaq+1631D2X at bM*5guPrRY?wjAMU{KMeQO1-u?OL2A;AIm
z((g-s92giVA})RjSwU&?Iu&Jz2&Cbwf7c%+Z)0=&U~~GtPQ9~1Z?aI0^ExFpHT7LU
z%Jz;9dI14SdV2av!@U6|+v%pzM;AvJ&@X<6IV at z;wNULHSd9?CqbMdhQCC;~zxS$?
zA!G1q1boFF&X(r=&sl%~{8_+Fs80bwLD&#;ca6Uih>MHg0Of&koKbp*6^P~L=GOXJ
z#NEBl-oL%42Om%- at ZCExD9kX#NAbB|Ln0lKp#YzUxDqLOd4J792}egBCfQgYK-wt+
z0d**!VYEWQ7+3I-y}cU8Wg>`zy9TY7WaD|TfgSxJ6Gjc3DS at r{?PpNnpzOhXY1cVo
zJJeDmHv|IhOB3UA-O`^$7h3fM+UrSq^d3h4>|nwU*(VPU4xX8t^No$Y{?F3VJyYDX
z^Yae;%8BKYOqJ))1CZAJ=?ooh_I?D^+8IXkPCAOoU~RP2^OL0b$2&T$VKjmzK*?tp
zr<*0^<!vu}g#sXyjYdlh3<H>h- at HLb_ICxH*F<1~krh|ZBl}sb2i)9|lG8fc^73K!
zHzFb<-%#DRMV5eI)LZAeqChEIt~5*ZZAdS|Pe&{H#U)OwYCngEe{X-GlN1$2havhb
zIhfS9zdC{}2s*6}Bm4eahlgL_+xEayPNBWS8Dvd=xLEH!va6n>kRtJ(R>-%1+UUK|
z at Y37nwOLVdWN*I8ZaQpzqM8x(y{M=tmj49<vj3Qy%OZUKODUHIjK(;?FUSTV3I`7_
ztGMYT7@~DN!_D-jS#NSy^_2U&#Kc4?rl|hJ2QOI!1+~^n#-l~kQlUxxphMDZ(UA!W
zRE^u6R3GBwWh^T-*S2D%?&vkiLgMBawD}XFpmb!t7d{EZ!NF<AUr*Z=k(SKp?}Mt8
zXE)shZp-5AXdBE~%Rz8x5EMUugtt~kis=A6;-R at oLLO>sCA1Je_&?(A1;8XB&T~-V
zkzDh|2|F`o_99e09`6fJD0z%|sxojZmi4K|gbK~V(KI2F4Z-D`%*=zSkKanMMGq8c
zl+)HR8agkg8bZABK-hre<MB9HyW=wVF(yVrC0mvrt^tO44+wb&E3q<IvoKrF`#c)d
zNs=hY7hJJLn?GKra$Y-r;Zv)Z1FURp>Dzz45ThvX+ftjDnmUVX7ZCSbR#t$0-`U=#
z2b$KbFel{Y;R&QsE0<4xC=FrFgyMmGI)TZS&gc)MAzMxFNqiuNEIz2HsO;?Qq{F=L
zeq9u<WL>_=#1xs4!MMLZ at e!h5faT_}22}dNfvl%p- at XMiFfuN~{~88lF2aEFxNK-6
z{c&7U6mVP$AEhr1kb3n>_<Y^Yo4BetBm}oqY{fw{+7S$qMP~@b%?PQb=;yj=@$vCt
zVegy_p%jzQ(#mtkh45LA-f-rB144}4ySA=w;BeZzS2DezE%BjSWO at cN?GS{GfIm)#
z0Y6PDVjVwy`UL752C-reKDRS4$O(-tI(W)$ktR~=W`*amgOX8RetyP-mkY>_)rg*V
z5($o9?smz><NTok2+6NqT|uH^VuJHuo9|ijEr0m<QN`Sx1xidmXl4j0 at v11Y!Vfk*
z?Ra<b-GH}}5+NS#!`QX;bqkou^!C0Xv{dJlJ at EimcJ>TN(5R>=Vu&F6sq7dkmB)|K
z`~m`^8XJYqPgaX1Hk<P;c4!7HD|ud>x#HmBW`g1wb+*^VLZU>~);{n8MwVEwE-4B3
z^zvfm;rRfe6*_*{Sl1d>&q_n+;Zc2jw%dEvW@^$|r8`tEPjvt+N7t`kiLZ|5$(0P3
z*y23{$Zl%_IXOFmS{Ze=)#XOBKaZ4U$^c@%p4AK at N%_$A?h^?KWfPMur4ws>=A<y2
z!IqZSs6L=aDxG{Q%Ubm*k4+))Lyl)hFG|IhD{!YU7k_uWL$pI#nJC%ycTiX at DZ0lJ
za<&Bum8hnsX27)QTw(Q8E0~#xm>5hmvqk*%d|t=mk<ZO4IU|In)h-Ej2Njw&88-QO
z^M>)fR%B4!{Gm--h0VBJAA+)F<3xLfPYVcNrZhqR4z>poca4w7Tie)J9ITBwo*vk|
zl~k-V!SQ>e>%M%;!O`(1F)?x4S at ra*e})`bC at wx82^pc2r3_4FiKX#R1qGRL_sk*z
zJRdxG&>wm0X<*<I<kH^vd%fY6mAuF=JvJM2TzrRUZGI_ymuq}w#LCIZ?BU^oq=#Td
z%J=WG-1qP2K-M^$KPvR|kl+0LIWHNExn_eam!GdM_<mA|TqV%#+d6d>{Q-EpJ3Cm&
zb}H|Qc0V-Bk53^*_%_FgIghcKnIsDExL8S>iqAR~GyzzI5l9RvQP2r3J7e=D?@!3S
zH4?EnSFbYGYRiL-om*Lvfzpr$I#uEB#WS)vY4W3TNSV$MLYmRr+Z!@GVl>TB#(h%}
z#5688R?*V()AenF{-j<_0In4qSz=~+YrbVrwY)LBv#9_`0T3Pv>(z}W$M>0;H{n);
zS{Fc~)otVEPyZPO_v87GtbBY`VyCfOMwoK&NwHJ&cQMXBGFeClCKM2plaH`$1055h
z1e{hrlvsB~-hz~tVu<L2lHfO=BcH^8nwy`$#W4tx?zHlg=@eKp9ZYuPk=B2fJ~xsb
z!H*YeNea2|rH+qlLYz77O{G;@jYNiqhUOk-x~~>iW1#^0l{7Ueq421{x8su&z68gI
zLPE<kEpJuz^rmw?!E0BT_0qy at ->lS>8!6Vyfp{lE1qTM+9aRH}F)=e!0c<%x-}Gii
z85<cH*&m0~i;Xvio(9$Cjo8V8e%>G^j>1o<Dp1b*+S%z3x;9O}K3%-v><UJf2{kt}
zBcfGhP46fA7^08QZb}wR*`-t^hBz1;v{cTfBT;oe%bzTiJmv&`{{90XO at 8yphAmnu
zgda|~TCuJi>g&WrLY2*gM09jC6mS+s#<$1{1B&topLKW7IFwUVA9ISQC?Hv8zd5oI
zkDRkG>I|`PN+^^QxIMHf8?;~xX296eQby2agGBIyyyWJj`{{Z$>%)iIA?laTPxcv6
z72ba at B)SjRCq~vVYdsEa->%;R0fJP at dtR&yA01!H{W>r(ps&TOkW3OYS at R-eY~G=^
zVRE7-B{sqRyb~!I=zRv{%H?HcSg0gHmm;z8X1yQlXa8N1wHdDn@)2|~9XbBL*JS&X
zq=~`7AVL^_-;P#8QxiwBZ+m<DlFx8~2IIp=03;|yP0e^+?<Y^5T=D at 3(jq-=^z`(^
zLWR-@#^`#VJDmVBu`)B~$gO5%?F7&-6A)g$bgA#r#rCz*wj~IQLO|4kuMvzLflu at e
z3?8HQ{Ihv^d4Jf3`)_;*kBw#dK5h5IUb}j2Dnj>Z_e6Q<X=v at eU##gB6(3NJqswDP
zU&07ez0XChp)26Ruu;#RKVOYzCBZ=#QBzZEEd#GYfI^mK8PX@~U5HTvu3I-?<cRIf
zs>lc<-o3kuX3k at x{r_65rLs{0b1&C9_3I!eGE#J7W5azOZj}W?tXB9w<{Mp1CNTwt
zv|ZzV%&38*fV*DQNV%E9hv?|Z`kJSA_p-F;y!HqKv5*b=JGu>lb#(#+gsN=M%55Nx
zD=geRs$Mj6x%hL^)W7(H3H=D+e*XM93OO4SP%%yMkKyw;lmHWtcx12BgO>4eN=C`B
zb{Hr6sIC&bY2mrKxzvgZlH}y%t?g~XWQD&8T-p8Rsqi&qnKooe_2Z_Mxp__TsG*Tj
z1Q at -xfNMse>8=ZLX8LAk0dRFgfP-rXtuGd>vgycx)LIX|Z8+GGjaOQcLmPSO`pm8C
z2CeGG?h2e9ZrTkh5fDbi#ZjR3)!$dvny~ENxnh&iXh_!PGVLV^ibUfs*9Un(#;N<u
zV14 at 9Bui^wQh`&>NMvN>-nT02v2+*r&_MH(z2!?jP3H$%D`4S-E{^_)j at 7*Q1Ey4p
zAMay$c;8YjSWg>)%g3n`3bwX)&~C)ZGI9K~DA_gdfS!8TvJ2imz%Vm|s&EJI@&0n7
z7L+Y&8XAlB at k$2YmtPX>nphKEgaJ_VRM`aJ+wtjXwOh{W!h!_Ke74n3RZUHzva+%Q
zW>TX3kLF+uSCqU*2Z#R`T8|K2*I7UO^*?%lPspJ#Iep^PUmYuFMoR?_*4R8%`~NyG
z<9F@;lu(3E;L?O$^DR_s_4cca<3-W2U{ZE06rxqo*VB!9iC&#@Q#>>mrkDb)>aZf+
zMj23oOjWjJklno>_ow?x4BC88Y{p)tr>C0(1*)p6OIcRNf9vk{bKMYrGWzS}27`fU
z+r8e0Vn_{9Wn+EtAg-aYkuEs%p54?#s7>Jzf-;!Xh29stDI_E$H^(`02L_Z^hVnm7
zx&6T=W+Ff#s}`!wwRm<g2$;P}j}CvmOyXOvc>cZ988`s^Z$?2u0seZ<r%%CPmCVP=
zOlZaxw7$plcF%Z7I4+9ZVPpHI{{R&n8ai~U*WJ}+p}Kf>yek at K%k8&euvA13fNvRf
zK3U&bVRC9w)CfFQVfjBx$s<30t$vec78Fc?{5;0&lDm6k3;DUTwUs)e>+xfB8w-M5
z{Lh#7%yF_3RaI4cADhpfTrU$46ukdw;+KDpTK+QK-=iyTuCCz_fcwjX0c3ZyrS|vt
z|I-6i4Grmwi;KPA?>H&5np0k#1bzShUEs$^#&b2bfo{%bWFw$;xxapz`2TN3n#aw4
zad!4GD%WLmN-hR&#6zLt;J^(9 at E`byi;DmnxP19?KL8ok4{NLLgWbgx?}4kF4aiwL
zq;3xn4<nEZvEr5yVfJ+O?NU!5$Bi34&=TaL751~DmgeS51JXGS4T(@CEygP>3<Cx%
zc#mz*p*|Wzt+9{KQFqM26F6xLD6px}ye)J*7X_1P{FB1b`|s&ALQME{!qyeS6jt8~
zwW`g*%k$XJh$s~Yxj6rzWhhWDee?atk0;U(6&`M1ItR*uNXaWJi{!KY#o`&h0RzF~
z{_y+VXqhJ}Ir6u(w6wnU^zd0uQ-=DR_9n|gk*{!=m&rKzF*+%q$p0AnMM6LzGE$^N
zESM-?*zscd&E>1$b@)ykE4X-^ruaEHWPd9BD*syyserf`e0=;pNP{4vw`%$S7&BQ}
zS##(m=A)}^>XMp`?z|u#0{|UM00xEugUgjG#!KXSo{yFDK0xYS&cLJcKHvZ62-m)?
z+Sj(>o>?e^B-GThU@#pI)-+1n(&pxj9h{w+9zA*lRqEsDY>Dj+X667Ug=AT{2n5CG
z+ at AfgdVcX|o9U(`_k%TQ=tcSz?uVeKg=&L{1}`iukkHV`!BiRs+yrBhZI<E{<0F_7
z-t4-aju at M8@SvBkl7;*EUA~No=^Wq1q4Ojm+v`J2%mA^%L!_+>-{aaA at 6^YA_&`F*
zZH$F<s?4NYU_C-dMg|8MAreR*ag@#V$(8RWs_h1M=DWLwhN2Nm(Gg7gv#;-lv${@?
z*AHHx8%8&G_ekwKImqe0whfl=0s#dD9J5x$V57t!?7#%2`}<=eg9jxkLX|CxRV7=|
z-Mwzqp!7~|W~MK~@@3W4%v4lV?z=zkBj-^K1BOf{4=)`8b|FXG#wI4BK;+JMQ~qXB
z-M6hUvz9fcprGKf8RvnTkoEX2W`DAfM>~Gr4<1Uq^P at lSNeBraqgdG3f`^NAy06F-
zXh4AAQ}c at qX3H@?efkuXH0}MPi&!=`Hn6JQ9y1EhCL7$M$T-wR0QPRcJL2z|$p?5d
zBso!sN{BU|?Y!H{GJ1RES~l1|=W!Jan+T9o<YJJ1k=^q=R>K at Ja&lfQx>QzlA3621
zuL&Rd00c?{Zw3-Fn(wa+xgS0 at J}jOT{-&j(B9^aKnC|0)0{2HkO8NwT8aTDo9)6%x
zWB(c%jQ^ZejAD{Y7Yo1-qEQO~;L3cKuhtJm77JBdUq6Uc1dqdw`Q at X@zOiyMi~jT{
zBxGa~;KC91e0xp!uB!+A!OTr at m&wfr8UjM(Sp}!nVS|Y(8*_8>&zaIucfC#=A3}LA
zDJ!$=-5A9{*2=1ECxh{*_-+UZ(M(NENl8h4{vLO4c6F8A*;NABCOq5e=0x at bfgf&s
z?B0ffHX6v3M$RNboU%Fx79_Do%TNoslhiw}w;rrdL?k3!wX?H(xZO at 1`_PRW(40Pc
z#cTDd74v$s<A$1!&h53I;M{?jUiW868vSk$LJsn~!*D6-lrtp?IN<<Fw)Z3nzWCja
zjqK}w`@o72KWNO22&r~_C7!ITtkF`VE67R?4AtX2RffBF2{I%?MXjwl02*dzW^Opg
z7KBn#QijLG_(Qm7W at gUxKKd)}<t2oPN9ija#WWkloGfB)o;zybOhpq9%~DhI7xFmJ
zU+hU5t+t~h!MWVp+Dbr99sr(5{K=D+&d$#3X^!j=%TjW39kXo#5s8T*;F6C{P7IMA
z at V{Vy_UW6NGC9X4g(>H?ml}NukBoc+?W2c*wXCEBtJIFH%>8HG at b29bIlZ+n0=nSa
z!{{D`?d>^c=HxKmzKsV6iJXdpzR<eO5w^FtZ?UM|lz1xOqLQbIEL$FIOuhpqLKcC;
zrEXpazK1?@IW8&s`uVkVcXyAbIaz>AfBp8YHHet`pS9cv4ws<iC8ed0RaHrlcZ1Ep
zgoznUWBE}<Mdj`Ptcg0WUFYWJwg*@Hb>0E4M$f>24)HIcuYcp at aPu1Y!cwtUON4|w
zySpGOe&uGpI6(_g&+hW_c6@$~K}bOn6u~ITaOV!bt*xzL!1o)<1GOXT6V)%E4Vl^5
z47|Lg;KAN#7W$W$-$TZimNu-U<Sw!n4du?$7(2D9ij167w>?iK#~vo%*w}bxZLD08
zEgD&91b`$YCVmSOf=m1GI+#$yfHTE@%gP$pZH9rtLBFI)$O$Dqy=%~=qvK<Pna{7W
zaBxHw6$#MM(fe`~QslRzf9r&57D^Zx&^tIdfFuVYhuT0H`Mob50vpjsNX<aphX at DY
zQoMosvc3OD{*%hhBo7Gi#0M`i|Fg^uBJ&zPC|0P}*4m2x?;$pVFt*c!^{WaBy#X7Z
z^=kP~q@}TuEr+C}P-Mfy#Drc at kP3?T4TjP2aYN+r-_@(cL=`D1deu#QjAWIXp+Nh{
zrU*Fquiw9mLm)vDQ`Pd(P#_{(`}=;y#hk#P$fnBC(NQWWv;B(~%T<h-fJ+}fd_WFi
z*4Eb6cph`ZZC}iPBLx9YMV>;KnVo%|ntB8G--UMM2qHQLh5>lb9d`Y~A=aocdM76*
zWB~4H*W6h1K^}|h{>XWiDzmWgc0)tM%>4Xi$Yxh}_tv2yQsmTGem;9*Qqt_&+Ar23
z1AtnD<sl<$+;*9e&63tuQMff0vOkln%7*euN}@r=zuDV+)(i=xz2&xhHUbZ#W&EJW
zJnl=*xo&Q5xy8lgD|IzBso)5=wziOa{F<Dc{rwx=%*>2mK%lj&OZsRmPErm68#z0s
zp+SK_=Cs|fn%7n%MQJrP*TG{;LN3Y3$oQ^nGemrDYZK$-Iz2gQ?d`n=)|jE0`yjPW
zcECbLOblaZXUCUjA2|h?lEQhWqpUozltPK3rly{OJ0geY#>ODyF&%2D;S0E~q_Wa4
zC<vQ_gM;$iNJ~?bkFspK$%~3IRN|<<tc*z97fce2#0*rw+)tlGot$_rEiF?OS$r?4
zJbymI!UV|T>gtNsHPGGt^k^(cfv%>m&bg1$*l+;C_tm)okacimBoXo$-oKl6ot0K2
z$b)&H-suGevYWLrF)^a(Wo2b=U{(qW3k?hm(2x^n8X7GF14REGM+=w9f;1Vl`m<r9
znbD85C3NNI&=8bI^qPB&&+=4%Bh4!;^i$3gH!-=jva+K1{5kW at SdR%5%F4>Brl#ie
zj~{qo7ww&#`~m{5Kt=YOYV^oJretU5ji28Wj{?nMX{Y1NK}uwYSb_u?`JY*zZo2)@
zG3fC}7GOV1D=Q@(olwyJug2WScBZ5;3S@&SG~U|UTHn$V2Xt0lORKG?2OkDDynDW+
zs>&Zah3s92)F6sN-Jz+e8C+_qP7Y=eFO1sp(dx<y3?{NgdUE2f`s^8UN)8}8H#Zk$
zzeo}z&R|jTW2#;bqfb!I2E12dQyE+{E9=(U+L~|X0;mCX>ZBm$OGihNSBu82KDLpL
zwCw?OuL1$Mu_50K16oJD>g{LIAu*6M$sx2tnrrsz_dCfkG{d8!25p3sdTDj1z5fb<
zb=m&P_&$&ZXLW&pxnky9H2c=!A-N!K=SG5NVW&SH)m@`b++ at dA@h at 8XS6wsP>gqsL
zkOV0={Xry1(#gNf0~k$BN=o{0z(8jlYA14b5INT?W)Ko3|1$X9J2VtHT(Sp5og2Nt
z9b2o#&A&OlkKc`^-Np2Yk}rp_NAr0U-dR at wueN^shNF@#O9)p+o^MivX+ at p)-o88h
z%=7R0q3cHN;*qd%!Qf2hU`TK<4n)g#z_|Ly56Q`4FBf~Z^LU5z;8POtsrL@*ANS7>
zrx{sTup%NNU_ at f!X7>P<_?JuW<P{a+YgJj#S_~8n&SnncM$2?WVZn7ieDDQU`yR_B
zwx)R9$~Q{>DhZC?DE-}GQ$6o<-fY>pR=uw80yOdhw`We%%8Hm688McZmtE(hpSR=t
zaS0_lUibKloFbv8r at zi+=nGxU4nQ7jizqAOKHF-$7Axpd1iv&z&T!B&K%V;n+-A!s
zu{pDZP_#?uPyiH%HigBP_Q)2h{b~Mt&T0BR7OhtMjx+ziuglBJV>Txwm-d*i4i|py
z?rzI3ZbClH8w8z$SnHAwaH3#WEBK5YHU&0^StyXsHoVdox4+bn0itbUVq)*$KqN at r
z8Sl6@%0(^cjO%@IoCA#-=Ad2X5&LNFsQ2Qtx>=>;vNA4>013!FDNIIu2i&|);pHwX
z-*B8xlP3kx^gj+C?p+xO_jY7Vg1Q~SXi#8*9FT;f_%<imT?QFOLcKu?BzVEhhJXCX
zjC9ozDnWrpIcAF20Y#>=u)lE<on6*#P at T^m at 5npw-*UZ;xuX+IE99<!ur>yBr=Gth
zm2(+{36z8cf^!dMY6foK0zhv72?E1YiSyo3li>RGKv1XBHuFpfN^ETGrT&a7naZp;
ziLg;K;B{gJoY)c*6M?$+Xbm+>^wA)|nJjVrjm^xAwtshA7jQJsa2g{6Y3*!q+a(|)
z!>rZDzk2m5sPAm_bKS=V25Bve$gt3PZna=^G+B>vLoNU9N3M(Vk>3`k(lVPYLd|^C
zdxc;LQXY(q-sa}ASWi?H4)u~-)vwsM$2&nnqKnMW&&v&?iF9g51I9J<ElzqpI$bMw
z at 7^qA^nddPy{x1p71AM?OiX3mNQk$(hlZMZ2`JXR_rHgf`@K4#(2H`%g*2V*JZ<E!
zd)L#T<8?4*CTB|wAz=QD%Fz0GXP;M9-zl<;fm#A*V^<bd`gaU2hJ{+6tp8Y{Sy=LG
zYAP9}2o~FdhqC{7{RqP3V4+pDwAeLq<*;#aGp{{53+KLfZ_`5vkCG=ahEq=-peH>(
zo)QQn<NWk6s=4_QT53%Vf8(O+^XF0E&l^ux@()Q5wd at +7064)fnK_70z4)qn>U4B!
z9sIx>W20_sRs=%s=Ya`Fc2ZDZfBi}XrPs2S3|aqny{?z+1O=+R4CTVM?APeuwYrN9
zdHzC;JN^)QQL;=KT~W+b9CZBlH*V`T#Ny!Kgiyyd9q1~U^(4}x08dOxJX?LSZ+`4X
z9tM(I2TzS`&TuvzrT6b_d~nnpFVf6NP4yw;)RhI0wf^!o5b;eY#3s9&Af5=dcX7Ho
z?Q%*}sY7uULf9`&EZ{f96K3~@aKO}0UXsb_p`oFpiRlYWW at hHt`1qCR`$J7dYg6uZ
zRs=t8+jaf-9QADCHiVPnvuC${D&-Fy?F}jr($eCCEXT&hwSYuzZ*NP<%iC6+|M$Q&
zW_NFrU;uJ}4J=GxSXk$$XZf`8f+B$D|2!s#8}p^JGrg{k60C)_O8r&xJH81CR2K);
z%@iEZSx91qJorE?koW_yO$4kQx`OO)gBkJxC4>vi_!Nw*{Lfi{xjA3tN!-5|=kDuy
zLzZWYkN!S(b>*LPcwo#;0eDT-QK+0(V%$aK>+dhh6!n-Xik^puq&w!e82poQ;5d;D
zV$te%6?rrRpc*+F0>#Z{NWk2p-crWR?SWdMrr*+%xvPiApCp&5mfDw#<3A;yf at wmb
z?wNk?V;{UN=y|l|RXwdV5#1k{v|slIF}7!CXSgwGq$7%s-U`UR%Fe>LU0(5m;oYB2
zYWZ2f)O^PcK!Tq>ePUop2UeawtV~}X%ppbz*w6U1?9d20_m%ewKLsl!>S1bX%Jbs4
zxQwi<_4d!M^Mdy=<CdJr)8A;Rl5}1_^^akCu~0t}ACwQ-Bnx}RL)AEkiE=lQvoS+`
zZEJh`?c2A#m8nzUk_wv%zO-bStaKPNo_gVEnOjdJCGTr6-?<Z}$`-9V4Rr@|mHF1K
z${Jb-vkf&Zt*1djLCD=9i-=Dmx{UYM%6pNg;NrDE9+vAsX?}}Rl$H+Ctq0~~M*RdI
zLiWd2MiSB&iasfFjvQU2qoV_}TWefn`J2#x_hnWVso1POz0!&))@_VOg;!VKhY$*7
zV`rDnQdW|Xzzn_4^#+9;;tmS*_1)@XPJ!ECL!!ewGLi&ckTsxq&q;BYqBxZK_FkN5
zTI&Al$e_`CAqgfXCeiPFe0(3m!$rWN->TG{oU>ZBmPXbZk*8e2qa#aTB|QM0*eJZ~
zcT<Pf!)S%#{jbp#x!^=E{r$4Jy86_@!UB1U at JkpCBQOEF$nL at q;*~I}`(aSm#gI)J
zwftoOL}a-OU4)R3P_%Kr`{UKkww0m$WvJK4XC>d$lFHj4N$w*^UR`ZHCW_<t2JEcO
zRD(uMPYvi)BY$IK<1+X)WFZ%>wg>Y at jG}ztVEk at 1Eo)t-P;C&fkCmSO^_)X(Urt^g
ztI)xyQN^oscV!k5xvIZ6CZg|@P9~W<sj`uxxfHGbdLK4k6atFM&cnkK4!B>aQ$MWl
zW);=5F<C!xb`~Zh07!AkXJBApZ-tzcln}LZc*x8^<gAJ8N+28G$g=vkt}d=K!_BnQ
zfRWI6&5VkQ56I(&^b(VQ9*5y;Wi&?0gC(l1t0O|WA5OX?6g-D2xz?v;)SE1XY}+GG
zhpHF7^9C$;@5K;-GD{xw|JY?R7Xos}*%uWR#S$sM*Z#k59&P;{uRpDc-4Vi{Nr|&?
zkGKBRojx8|?}jxtD&r89GfJY?l|5SUrG(sNbFy)jl+%{A;MT{$Csd+=JBs4sawbA+
zHpFE0IP+<d$=PvTn-{Y^+8L7h%d&q?Up4luRtheToHdyx#%&&-_Z%i-+!Yk87kH7K
zm6ccVY|Z7z at x%D>Vgcv1+%h?Qe0&ZDhBx4|a=Q*+X|2cztc(|HQr${2MUH;#?(Y}6
z=)8=QH!+!~`rB9+8J3ntkMa2uIvO$7rI6mmd%`Kbt`C#T8WmZLL%3q%<G+D+tWr<b
z*4N)ZzO}G>jO!!AtD^#>(9UQ3>nk-4&4l(q*VnK5;HGg=3A|Qnjf_Noz;u{C+s9{2
z)6egGEN}8U<+iii#GGLH?d>H47DV}0+}cF7!u)h?DWgjVOLeKz>!v1Q48_f$)twy&
z%-rNXwLSW%F3Mm2**ukcjmeI1n=z>FTDO*SjojsQPD#mGmU#xNY$jq$OSz(?ZaNL~
z-d}E5c8`<IuFBk9dGKN3><G+*^Tw|SRqp$i3xEE!g at lGupcdxmhi06)4*oI at Jb&u3
zs-mmgV`yj?TVG%QEuuOif-nJaMbK?0vs0lM=9Mk#cx|kl>fAtoZNAojfi}W+wB$A5
zXDJBc{n6}NX-O{LO8Z$+=iLRl`R35yEDe>B@<tPVAF>bF<77MKlOJ{gYHFIf|CpRi
z>S%AzwQ;ur-;SFr_}e(7bZ*WVlgAE6>2cD3y7TAD`#6ys`cS!?hFQZXsRULmSKehF
zDL1@&JaW9dI27ZEiBJ6%RRTPP`N#76WW5W;){e2Y5y>NakemG?1t}^0rlzKEoTOV0
zzVoxQEr^a#)3BM#42;#j%;DDPNw{C!MTgm`8?%Nk*V{-=LQ>+qK3+OAGb1s<`rFRZ
zU5 at K~_vG3E)cF-pOlN0jT+~FlnZnz1w(OA=c2r<!sENq=pjD`im<mLj?lTRIF1STp
zK!9YDC_2<eGR~XkU6B?}8_%`VGIuedFzMPo2{dH!Ug}G8Uwn~p!oVv3>-6ZT1dKgX
zgp|6B49 at C!W&U_%4^-ysTV*x$0s`^#i;MSQ at Bkp|!5F>^4mJb?58>+6*49>+mBnRe
zVbOP}O^p8Vp=7i9Z&MGUwHL`V3A5hhLYGDcKHB$R{BbE-85kMI26GgsycC>{Go;P-
zSBAbd>(tmAl>aTNeI}u+tNXpPGkZ$uAfJS552OItg)2|>8)!vbWhF1hg`QMj)>Cp!
zpUYRSl>h#1sP^nx2N4m`hsCS9=eSL$8-kcV6jW5 at Aa*6lt12pO?S?O41n%9t=M1Q+
zrliyYu4fdY{sTGp$aAT_Q9homyz@`=3Ms-SP{*vlhSRgDgxp>pdM73(D!S6hzt at u_
zDA)A9?vTAmalj2jtJ>B?LPBB<CY`MqKGfKsk&^Aa2GsQP^bnZb@~J=wSCxLt8&jGV
z-l|u_1sb8ROaWnVQCjuRHjS$`Ha25GFZ}iVdvcNzq30evRSzMkdpua#+731*C-gl7
zuF=_6DCaftLsC7@`TP87_S?5_S3~<pMyh)bK*{)k4J01X1&O!9uv1$9Sy}m{TslT1
zodex?05$mw;6+vp;bWuf7L96Kouq at pM!gq7kv&u_?CeGi5rH;iWl!e!VOB~3Jh<7}
z*`sIcMPD{a2|BOogPk=NO$!I4E}NOruS_0Pl9HA-h+>j61TQAbLc)!Kfiaw;K*wh{
z_0Ys5L7l at CfJJ|@&dEqXKtT2;QR!=pD`g;>BOb}~&tU4l_w|J%yH?|2?AlDovlUE4
zL6i at xx6Y1Vni?3iWGkdxNK7cJsda)kj|&RICJqw+-rfClzKR{<=(b){qK3Nq-899o
z1%-tg_V#!4^YZdsP60-=%1!ZBM~dkNM at Gz`OCTJdWS>gkBm%I_wl>n&{|v(SrM0bX
z`h at 89iTTN^j?r!mOfxexxP7eDO~am^p0%ynw*4xbALqeV9~`F?_4RL*S643zgz%Yc
z{Hld!D;n8#w_NTJDtq(h4W;!x?+ef2G_fl^*;KqV>(8Bd&6<Bx-b%XvAt~txs^sTS
z)gm#Adn7Ix7u#RxYVn^KogMvoFRP$X+(JAHYH#$A_I*cZXO)Ao%@yRsm3kKFv_>JX
z8wl^;zrVDy!obQJw5m8BV%L052J_;l&aogs5<0WA6iCP at F@ya50ZC5R=!B8K=m>ep
zTwGG}C4tVw<J0I*od at i4kEIu{0Wf#|=b50#Z!urc;P*3BR#skr;Bb>x*z=!-5Qtgr
z0CA~z>6yD%9taAGDJWcnTg?lQq(1 at 4ECJ*Q>@dW=e%EK!9?Rmzp84BH=Ii5?{4N{R
zkpG=dEaFeK8(awy$ORO>MV(%0p1Dir>FH^|zw&HuZcao`FJ;wUipu%#g%>|R|L3VG
zT0j+>;qq`Si+3!`4SrXy(ITsVM?2k|rEs&NHhLRX4GjtiVHAq|j<yeI#TP=tbYY&x
z12RxGLL#C!z*3ByobQp#g8#be?d^@Mpn>Fnun!#0i0IM!R9NVjk-><Ii;J8jjwhqH
zGoEU2L;L~Y at 9fOXXD5E&T$)hFsYsC<BH%%}xw#1_D7u{ZGx5e6-0oFbkG%m5m*JmF
zdMurD8Cj=>{<Zz?c!!+1Mp}rh(v#iM4uo)!jpqqMhK84?aai=a(a~I#0{Hg;_UA>P
z+uMCt!vp<gz)b*hB2Vj9T8%i+;L{fdN6KGiyK at J57}DCtCL%UgYSBfC225EC@(?4y
zIU_fB<N at JMPug-NnX##<KiI}ZVK2>$O)v2ps77f29+o!(Q2b#3RWL|Gz_P at wm-f=7
zOZuk=>qlq1y|wrO<~~Rxz&L at q41D+Qt;mfTft#ZIX>CrZ at vptT*Dhba9Q*IXEi=o@
zT~1QIVj3D6$iZN0YHH-X2GV~CMjBq^*k*cq`VwV`%P3SgF56%HHFb5rfg~O4Z|?MD
zWM)W&a2=hV{(%|b_d5B%TDa~&s at pGqjqH_5GO}JGD-mzVxOmNLMzRuRMcJz)T&~TP
z8!da3n^hDcn~acKq(n#=$qdQ(o#*$ye^%e_z0dhR=QEz?oTKn#VWE~St||}p-=iBN
zrK^0v*MbL`;BPtl%!DF%mV;G)e5j<6k&!_&vIZ#>^nBXpW-uzq1~x=<mqhunyM(ml
zd$W)RGBRwa2D+|J#oWRIo_#a|a5k~9kaYf;%SBkQKN}(SoavzZJ}|K#>Vjm&j50BR
zDWSaqyg*q?tNrRqHEL2)EC9osf5!KclX#WP&npO(=N3>5KtutbqEn#YkdTYRW%iH_
zySK_;Z7~6tip$7Yh1knvpeixca)^cwz&lSC{!9CrS(m@$<m&GaF9Rf%1Lp9cMEQb#
zjyA_1lALj?NqXJe+t#?Za}9F~Y(J_vKbk^_k7wH4+!U0SW+kxkM`mOk?j&;N)jkSM
z5<ypJpjza>{QMZLy=~xr>VdX!L}{rstdqXhj!i8)rJ~}*=I`Ilt*tcC(b4F(E=Mz}
zhK<CL^p8G4>wDkBR*3#vK;X9a_FRi%V^{?bvZC7B+8Qz$k{3WIZc?&$X)P=)P(3~%
z^VA6}_9LBvG9N1FQ9X0U?ELvSV5_E)k-PJ$udhH(e3_h#JS^+*s9uJHA*Z;Qum_HJ
zW~@O)_wD|{=cYnbouTVL;!s29m!=4AMNe*|Ed%tv9s<L^w7N>(sJHd<k_K!1tv{Px
zIZ&h^M10l_<LvB=?xj#d<>@4#$wwt6$IDuUF}uI}!zXul{yZuz1zBJu)(VQI!sX39
ztK`Jg=W#{D3cZjQa+kXg`p*ndf!AukMMYD?rjNeh-x&ju at ImQ!?i?QmvNAgDd*b9t
zbB8ywoN<DjagI(-)UfhcZQ<&U;N?jj7JlWbk^D?F0cK5H2$z%Q&MNQwRnM+2kac%=
z*W1?@wFE$dTQ9B|>*(x*-Cq&B?8GMQm1!r_369Wven=V}6rjE(O--tm)z#%M%{1uL
zzP^6D^v_!gB?|nLvi}SQX!OQXGktd<{NkC<bsGZ4Mqub5JT%$;*SyndFxXs<lXqD#
zOB54zvq7N(#&mxLbe9jNAGKXH?krqG7+z*yH&E%UOA1uqUZS}pbv_b6R7grH@%DUJ
zcTW$xZi4q9bnF;Cx(;!0IO^fyfx(~|CTg2sXi$qtb2Ak%^@&rbEM>s6a>Qw$P*kML
z-t&FF(XcUQF{b<tGP_$~2bBETbLS*=_2Q?yZXZL-Gd5;J_mlwB8e+F2I;V24442JN
zirLYn)b;Dv(Iw5`;M1;!t#9|QEdh5bGZH$dGzB?e*X0K-j-jjIjue5En>$<8UpozA
zhw^4mu^!_}SRQfB&VBik$IQ&E+jX@*c$s==Xs9M=@#y9aHJ82zak%@6k9X8A=;-MD
z9WZwgAVP>M!DnYH;N8$r9)z- at x>{#(tO1SpL!%18fj_VdUz==a{`&p9h?3Ib#_bhm
zUq3&RAr8k1tT6$U*&IcW=KIZjdQsjzo~<5!v^9p+!k8?fMu9!v1o0Ug8{<~LsS1+9
zHtZ9F`@HyU6%|Mo#7z77`IFo!Md)G_lt$)>i;>;zQw93BZMM0#fKT;IOmxl7In&b8
z(B%#6GKflvl#7dt^ic6jbV1Zjl6>9W9ceA~PiUy}QPp4!Gc$8dT^$--=I!GXQCoZZ
zRz^M#KR-VUuObVuvIK<ef at _sNLx_C3|Ho83p0 at YF=>v3hbSD%P+DdI-oGl6m+(N?z
zwFqnt?-`=xV%FCDe~dQ@`1tq)ug&b=T3=ue3JOBs0m60Q{d=!C+~g?gqImEiv7q3u
zg2F;gvD at er4MMjGuMYK^)NhPkw<GAs4&@Y3I#h3Na|42RfEJvyFVD}9g7{5+sC0H4
zF8j>e5OndP5IDJ*g-<W?b8;{Mm1sgC(Eb2W+zWPx)<eL^b&IdGU at yW}&Vm1rMj~ix
zYNFQQTDBC9QVsoL)|K`3j<@-`Buk6(15DBIYGjSG)mSYoC}QI#Qd}YsZiJMON-n*b
z5%2;gTG`b-X9qqd4i1{5eq`I{p3Nl5b0^#e1beg91%Nti<hw=i%n-E9?d==Zwzf((
z9nAGcs^GhHAt$0X{0&V^I#;H9h-qomy1Kfw`}U!py3o+jIfjNe=A-t?z%}+S{hMEm
z-%Nlnd`r<_{7o~HOzYJPs+&~Uo<!`rRO9!Z4gB2FpT$P<lb)`w=9#zo=zd?X(7d3i
z2rI?qJooN#y7wH~b at 0V#c!_sVkglmIho`3}V%9P(k%Bk5L}OlaGqdH6 at 9uc4kP!~&
z1I8Cp)u}!HkoAkJ{Ayb>B2R3pnSi}L;;pX32eAX6qXLPEZ{K1dKN=_&M0`D?fwI@@
z^2XhGcp$%`f&;9F4!oQ!)etlcfn?h$&Jc|~(UJaQYH_qyzJyFhV-`RLDP3vP=DBnC
zZro|VZ>Z%E%`ZZu&q4(S01ylEMIGvcBZ^J3OQawNgIB&Xq6+z*4B33Y?|tZe6r{`W
zga>P_T{0>)nH?-dl_7cg`JdWO8REU8(~`MBlsBhKSa|>VAcrRqrpm*2B>CgW`YtYF
zVkTKEKxwcKKYaM$&Ce;s6PK2Jr=+B$1<!Z at r4V8WaFBL{f?x3o2 at _LOQKO@FAb?)>
zEd($KrTbT<oju+Tn~`+*^!nWUg!$C5BZzd?*JG at uI@%Ks&ptc$^%Oj23NfFYn(~=w
z*%z9465Vl#;qKw at D_s*-QQ__A=m3_C?SREz27$8Fe)xjmsZ)od=-I6d1>#q+Rr3l@
z^75#Ylan`Bv-c7~o`Cezb~b(UgiAtFvNcWAu;S`x<!VvBK~)gZu_-B?cjaAEQ&W+;
zbYtO}bt8svPYLMZa5`32cWlV-+zKc+a&vQI<Ki>{RQ>&D?#AIw2j6l+d}oGAc~SqD
zS^htdt7Q$8-y9?AIbOaT4O*i&Q!Wbh<pdwD+&vC{TVB|ob#6H)ZSt at 44i3ppO&Z+1
zy!rNZfdER_hkxqrL5u^1p&nOLas1u8NB&9TK|Ry&L)76_vGOW90$Bq!M&{-U+1cBJ
z^nBzgLNY)n?J6oN+&nxj0|Ok8&eqRda1ZOQt_G_v1*^WE_NAq#Zyg=w1A&F6j0XN3
zeSD|?YrxjlR;GeGC#WOJ at tys7%iM37!hiQ%u!gvk*Y at 3_a?Hxg>fW=G0{iu(rl!XD
z)9_(%`^kK#^E{7{3`QQA<@0#<b31d!;aP!^n-u|)oy{Lt8IQvfemz(B{-Qy?;Q)5z
zGIhi)K$L2OaJ<_|h- at 7BT=`^vikioJH9tQ;RFrb>9uMf`JaFMP;d^S}@JL=AiAT^3
zs7cuDsXf!;Qc`V2xcg1qT6%i1Pwlm}|KFtv(hrqM1#hiM06e4;i4n_FoirHKO;%N<
zkeQh&sHn&_Jv}WhE898NaQ*G8u<MZGPCHw_0w(LdC#Vrz#c4;jv|vGwOaAd>Ma`9<
zdQe%d)ocn!Qb)j$$hGn(PZFsdkC|k7l6IFGiCACXr`_}L$4eONa)*ZW^1_0G3ZJQt
zO87-`KmsIU3?SK4d$4s!FwcSQ=>9B#r=@8EAUWsyIqS at IZj6ho>$zin at Eb|bHx|cT
zG1ed<$AUlYiHnGctOH6`*+8g{AOCBB8vdD3SSZ@&B*(|k|9&hxFEyv6gbBE$dzf5O
z0^%sAygX%Y&I7TBMUjzv)fbJ*KG+11P_SwOk!aT7#md4WNYpbmGovEv6+AUw{y-0Y
z|E#6u<2H%gU2|uXs{`fmAX?B+I8nnpRp>Pxzvn=q=2r=%P3dfHC9s3}1zeMC;%;q;
zX7*YAh69-qB}e!8?8g!xZ~KyMpl-xC0KF3mY*+;<QGV$hGC2VvG69;f^y6YQ^Wn3*
z>Y$x51Esb~7}@MS8qCDR#O6{fdv19-I|dx7-NOy at QBrLsv&12G0~OHH4+FqtTZ9PU
z%&n~%?i$g77%S5f!utU-pnIUc2d60|VRZZUp&}g+VPJn4n3$+BiM&c|m;rG0n5Gg-
zGE$Zhp1;JzoIX~$2aNTMj|*T<TpLpbrG7wER5aC~5Q%+2E(`|U)M6Zej at F_E(94%a
zk4s8cqQS5Cl}rkg#Ps#`Bf!p{>5S#L#l%>JM%7X%6lc&F9*fL-Dw^t7M{EB1_f$vP
za{bpYONnTgAD6q<KvmtBx*O=@<6e<OCQG8jjsvdFp9XkTRaG+^8^g(KI<ZAy!$kk$
zWSsw4Eos|$v*F_(^KHir5~&gp!o|s{MO%3P>dJ}}C^F0Ew|k~Nf3HmUwzNbtMq^#P
zyxv4cMV-gta1H8P=8w}$&p(R+rI0c^KQB+zYXg)xkHtoI2gXE4OMK$y=B at xOSGlz0
zO~S8wJV^42dinWP*nM=unwZE^y`iILtMUGd%_Z2p5X=@}CZ+=i4ghlATDgaAKa)j-
zwB-TzgcIfdR#8E0@^|o(;F){(oaN-?kRM%HSxIj{gs-1}l&X=i^siGy6Vlw=#D^#9
zDX3-!%@_VP_dea5A!J_?pgaT;qz7sprKO|u<?mDZ4ul3ENsGn=0XF)8=jPF=xB-i{
zqt~>tf~0f2bP2zYnX9R{mqj3aTW(mWK^`=A%tR}pb!dpIE@&~vRQxO0Wd)`)mIj8>
z4)xrSF{rK#<USUQ{k!qxg- at blV#0!g7_hO(UT0+;9TwW3=QHylnd`nvkc^YFb7jb?
zOHFO<1V|^KGSXMy)2OEt`OKBCU8pG+OfHXxzHSdLOZ5n9U2}Jr`0?Wh2R}c~WZP2{
z%xKiJXYy#mb7m&D=g8COqIV4Lw4x7#H2Sm7fFMS02v&?Lv_e8c><I6REYF at rbGA@*
zAczXom<`4j+?Do}*<gq+_mLw=!oogB<x1$($nRUPa!Q6+61l9b1I_h2dL|T&yCD*Z
zHGpoYR3g9)Uj?RQOb)!Ewv`nZcp>j^y+?q!(LUc?crjkXhRe6KvJ!?*J`=ju^xp_&
zUZtyF5 at cWHBzQB1(D*;>Q!x^|{a;fy*osad at BIf4;(_oY`irObV^Benu!u<Im0_9J
zmrnNHJH?D at NyftW!jM?P<rQd|+S(?xwP|~f)gO_NkU- at _;H+<ig`HaYtuSY0V-t at W
zM!*vw!rl1!S<bbO4`MTTW`QBZFx@{k9$4S$&-wytp9a5n(bbh8VNujXp;(anZB;{7
zXdydM*82LMgvfy(hDB(IsAFw7MET8SJ5Yrrl5TCCom22SE2^s6K=n=h{F$7qMK~@n
zIK*Xn?i`w`2fqy&w&|V>fTa{zA9~kIYioEQ+~JFyc>&_HsV-ezU7G(IhSaRAWZ*9U
z#W8*mJm5Sb6=_0$%@q>W-+{A0@%5e^6h)PU^704HmDxoYiD`|$3Paya6wz&IZ`b*)
z=Y@?8H60YX_5^@^WMl-{k-$JjKzvYv)F8BAXScwvZn(bj->_LncXviZL&Iu^!hUWo
zP>Vl5*AXHksYh%5=utJ{%^hXPLO^_Q{q)G>oj%<>GIE5CjcsPIFyW(<F#es6KA|cW
z(quB@!p&ynEAHP<0F5^KGJqNUEyh7D5S>EIfaL7$?RE6<D6#ukeT6<OEDQhyg8?-1
z4+x;DSAEQ0&sVA%zRSlMM+;agE-j5JD8bJ!Z7w?>Ja|y!IEfnL at 9&SqkXqQ5cm|L#
zB<P)=TpA3W;<Fspp}^dzE;Tzl1&A^i1aNzMdp919nnGUI(h|)m!I(NYu+`eJVg7G=
z`{`3UwO}XY78e(%RTvo=A!m(Ddd1VHktEWgm;STV5gJX?E(cI at 5;(AW<XmM_EQ1hj
zV>=fHgAqiv0HrZ$Y(n@{E3JPYcuTb;Kdi}tWe8E)K3tKWdQ$O$+9*7W(bqB7F2g!T
F{tpk90)YSk

diff --git a/Doc/library/distutils.rst b/Doc/library/distutils.rst
--- a/Doc/library/distutils.rst
+++ b/Doc/library/distutils.rst
@@ -12,18 +12,26 @@
 100%-pure Python, or may be extension modules written in C, or may be
 collections of Python packages which include modules coded in both Python and C.
 
-This package is discussed in two separate chapters:
+.. deprecated:: 3.3
+   :mod:`packaging` replaces Distutils.  See :ref:`packaging-index` and
+   :ref:`packaging-install-index`.
 
 
+User documentation and API reference are provided in another document:
+
 .. seealso::
 
    :ref:`distutils-index`
       The manual for developers and packagers of Python modules.  This describes
       how to prepare :mod:`distutils`\ -based packages so that they may be
-      easily installed into an existing Python installation.
+      easily installed into an existing Python installation.  If also contains
+      instructions for end-users wanting to install a distutils-based package,
+      :ref:`install-index`.
 
-   :ref:`install-index`
-      An "administrators" manual which includes information on installing
-      modules into an existing Python installation.  You do not need to be a
-      Python programmer to read this manual.
 
+.. trick to silence a Sphinx warning
+
+.. toctree::
+   :hidden:
+
+   ../distutils/index
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -290,19 +290,18 @@
    The resulting list is sorted alphabetically.  For example:
 
       >>> import struct
-      >>> dir()   # doctest: +SKIP
+      >>> dir()   # show the names in the module namespace
       ['__builtins__', '__doc__', '__name__', 'struct']
-      >>> dir(struct)   # doctest: +NORMALIZE_WHITESPACE
+      >>> dir(struct)   # show the names in the struct module
       ['Struct', '__builtins__', '__doc__', '__file__', '__name__',
        '__package__', '_clearcache', 'calcsize', 'error', 'pack', 'pack_into',
        'unpack', 'unpack_from']
-      >>> class Foo:
-      ...     def __dir__(self):
-      ...         return ["kan", "ga", "roo"]
-      ...
-      >>> f = Foo()
-      >>> dir(f)
-      ['ga', 'kan', 'roo']
+      >>> class Shape(object):
+              def __dir__(self):
+                  return ['area', 'perimeter', 'location']
+      >>> s = Shape()
+      >>> dir(s)
+      ['area', 'perimeter', 'location']
 
    .. note::
 
@@ -333,15 +332,21 @@
    :meth:`__next__` method of the iterator returned by :func:`enumerate` returns a
    tuple containing a count (from *start* which defaults to 0) and the
    corresponding value obtained from iterating over *iterable*.
-   :func:`enumerate` is useful for obtaining an indexed series: ``(0, seq[0])``,
-   ``(1, seq[1])``, ``(2, seq[2])``, .... For example:
 
-      >>> for i, season in enumerate(['Spring', 'Summer', 'Fall', 'Winter']):
-      ...     print(i, season)
-      0 Spring
-      1 Summer
-      2 Fall
-      3 Winter
+      >>> for i, season in enumerate('Spring Summer Fall Winter'.split(), start=1):
+              print(i, season)
+      1 Spring
+      2 Summer
+      3 Fall
+      4 Winter
+
+   Equivalent to::
+
+      def enumerate(sequence, start=0):
+          n = start
+          for elem in sequence:
+              yield n, elem
+              n += 1
 
 
 .. function:: eval(expression, globals=None, locals=None)
@@ -580,7 +585,7 @@
    Two objects with non-overlapping lifetimes may have the same :func:`id`
    value.
 
-   .. impl-detail:: This is the address of the object.
+   .. impl-detail:: This is the address of the object in memory.
 
 
 .. function:: input([prompt])
@@ -652,10 +657,10 @@
 
    One useful application of the second form of :func:`iter` is to read lines of
    a file until a certain line is reached.  The following example reads a file
-   until ``"STOP"`` is reached: ::
+   until the :meth:`readline` method returns an empty string::
 
-      with open("mydata.txt") as fp:
-          for line in iter(fp.readline, "STOP"):
+      with open('mydata.txt') as fp:
+          for line in iter(fp.readline, ''):
               process_line(line)
 
 
@@ -1169,8 +1174,9 @@
    It can be called either on the class (such as ``C.f()``) or on an instance (such
    as ``C().f()``).  The instance is ignored except for its class.
 
-   Static methods in Python are similar to those found in Java or C++. For a more
-   advanced concept, see :func:`classmethod` in this section.
+   Static methods in Python are similar to those found in Java or C++. Also see
+   :func:`classmethod` for a variant that is useful for creating alternate class
+   constructors.
 
    For more information on static methods, consult the documentation on the
    standard type hierarchy in :ref:`types`.
@@ -1270,6 +1276,10 @@
    references.  The zero argument form automatically searches the stack frame
    for the class (``__class__``) and the first argument.
 
+   For practical suggestions on how to design cooperative classes using
+   :func:`super`, see `guide to using super()
+   <http://rhettinger.wordpress.com/2011/05/26/super-considered-super/>`_.
+
 
 .. function:: tuple([iterable])
 
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -1019,6 +1019,19 @@
    Availability: Unix, Windows.
 
 
+.. function:: pipe2(flags=0)
+
+   Create a pipe with *flags* set atomically.
+   *flags* is optional and can be constructed by ORing together zero or more of
+   these values: :data:`O_NONBLOCK`, :data:`O_CLOEXEC`.
+   Return a pair of file descriptors ``(r, w)`` usable for reading and writing,
+   respectively.
+
+   Availability: some flavors of Unix.
+
+   .. versionadded:: 3.3
+
+
 .. function:: posix_fallocate(fd, offset, len)
 
    Ensures that enough disk space is allocated for the file specified by *fd*
diff --git a/Doc/library/packaging-misc.rst b/Doc/library/packaging-misc.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging-misc.rst
@@ -0,0 +1,27 @@
+.. temporary file for modules that don't need a dedicated file yet
+
+:mod:`packaging.errors` --- Packaging exceptions
+================================================
+
+.. module:: packaging.errors
+   :synopsis: Packaging exceptions.
+
+
+Provides exceptions used by the Packaging modules.  Note that Packaging modules
+may raise standard exceptions; in particular, SystemExit is usually raised for
+errors that are obviously the end-user's fault (e.g. bad command-line arguments).
+
+This module is safe to use in ``from ... import *`` mode; it only exports
+symbols whose names start with ``Packaging`` and end with ``Error``.
+
+
+:mod:`packaging.manifest` --- The Manifest class
+================================================
+
+.. module:: packaging.manifest
+   :synopsis: The Manifest class, used for poking about the file system and
+              building lists of files.
+
+
+This module provides the :class:`Manifest` class, used for poking about the
+filesystem and building lists of files.
diff --git a/Doc/library/packaging.command.rst b/Doc/library/packaging.command.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.command.rst
@@ -0,0 +1,111 @@
+:mod:`packaging.command` --- Standard Packaging commands
+========================================================
+
+.. module:: packaging.command
+   :synopsis: Standard packaging commands.
+
+
+This subpackage contains one module for each standard Packaging command, such as
+:command:`build`  or :command:`upload`.  Each command is implemented as a
+separate module, with the command name as the name of the module and of the
+class defined therein.
+
+
+
+:mod:`packaging.command.cmd` --- Abstract base class for Packaging commands
+===========================================================================
+
+.. module:: packaging.command.cmd
+   :synopsis: Abstract base class for commands.
+
+
+This module supplies the abstract base class :class:`Command`.  This class is
+subclassed by the modules in the packaging.command subpackage.
+
+
+.. class:: Command(dist)
+
+   Abstract base class for defining command classes, the "worker bees" of the
+   Packaging.  A useful analogy for command classes is to think of them as
+   subroutines with local variables called *options*.  The options are declared
+   in :meth:`initialize_options` and defined (given their final values) in
+   :meth:`finalize_options`, both of which must be defined by every command
+   class.  The distinction between the two is necessary because option values
+   might come from the outside world (command line, config file, ...), and any
+   options dependent on other options must be computed after these outside
+   influences have been processed --- hence :meth:`finalize_options`.  The body
+   of the subroutine, where it does all its work based on the values of its
+   options, is the :meth:`run` method, which must also be implemented by every
+   command class.
+
+   The class constructor takes a single argument *dist*, a
+   :class:`~packaging.dist.Distribution` instance.
+
+
+Creating a new Packaging command
+--------------------------------
+
+This section outlines the steps to create a new Packaging command.
+
+.. XXX the following paragraph is focused on the stdlib; expand it to document
+   how to write and register a command in third-party projects
+
+A new command lives in a module in the :mod:`packaging.command` package. There
+is a sample template in that directory called :file:`command_template`.  Copy
+this file to a new module with the same name as the new command you're
+implementing.  This module should implement a class with the same name as the
+module (and the command).  So, for instance, to create the command
+``peel_banana`` (so that users can run ``setup.py peel_banana``), you'd copy
+:file:`command_template` to :file:`packaging/command/peel_banana.py`, then edit
+it so that it's implementing the class :class:`peel_banana`, a subclass of
+:class:`Command`.  It must define the following methods:
+
+.. method:: Command.initialize_options()
+
+   Set default values for all the options that this command supports.  Note that
+   these defaults may be overridden by other commands, by the setup script, by
+   config files, or by the command line.  Thus, this is not the place to code
+   dependencies between options; generally, :meth:`initialize_options`
+   implementations are just a bunch of ``self.foo = None`` assignments.
+
+
+.. method:: Command.finalize_options()
+
+   Set final values for all the options that this command supports. This is
+   always called as late as possible, i.e. after any option assignments from the
+   command line or from other commands have been done.  Thus, this is the place
+   to to code option dependencies: if *foo* depends on *bar*, then it is safe to
+   set *foo* from *bar* as long as *foo* still has the same value it was
+   assigned in :meth:`initialize_options`.
+
+
+.. method:: Command.run()
+
+   A command's raison d'etre: carry out the action it exists to perform,
+   controlled by the options initialized in :meth:`initialize_options`,
+   customized by other commands, the setup script, the command line, and config
+   files, and finalized in :meth:`finalize_options`.  All terminal output and
+   filesystem interaction should be done by :meth:`run`.
+
+
+Command classes may define this attribute:
+
+
+.. attribute:: Command.sub_commands
+
+   *sub_commands* formalizes the notion of a "family" of commands,
+   e.g. ``install_dist`` as the parent with sub-commands ``install_lib``,
+   ``install_headers``, etc.  The parent of a family of commands defines
+   *sub_commands* as a class attribute; it's a list of 2-tuples ``(command_name,
+   predicate)``, with *command_name* a string and *predicate* a function, a
+   string or ``None``.  *predicate* is a method of the parent command that
+   determines whether the corresponding command is applicable in the current
+   situation.  (E.g. ``install_headers`` is only applicable if we have any C
+   header files to install.)  If *predicate* is ``None``, that command is always
+   applicable.
+
+   *sub_commands* is usually defined at the *end* of a class, because
+   predicates can be methods of the class, so they must already have been
+   defined.  The canonical example is the :command:`install_dist` command.
+
+.. XXX document how to add a custom command to another one's subcommands
diff --git a/Doc/library/packaging.compiler.rst b/Doc/library/packaging.compiler.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.compiler.rst
@@ -0,0 +1,672 @@
+:mod:`packaging.compiler` --- Compiler classes
+==============================================
+
+.. module:: packaging.compiler
+   :synopsis: Compiler classes to build C/C++ extensions or libraries.
+
+
+This subpackage contains an abstract base class representing a compiler and
+concrete implementations for common compilers.  The compiler classes should not
+be instantiated directly, but created using the :func:`new_compiler` factory
+function.  Compiler types provided by Packaging are listed in
+:ref:`packaging-standard-compilers`.
+
+
+Public functions
+----------------
+
+.. function:: new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0)
+
+   Factory function to generate an instance of some
+   :class:`~.ccompiler.CCompiler` subclass for the requested platform or
+   compiler type.
+
+   If no argument is given for *plat* and *compiler*, the default compiler type
+   for the platform (:attr:`os.name`) will be used: ``'unix'`` for Unix and
+   Mac OS X, ``'msvc'`` for Windows.
+
+   If *plat* is given, it must be one of ``'posix'``, ``'darwin'`` or ``'nt'``.
+   An invalid value will not raise an exception but use the default compiler
+   type for the current platform.
+
+   .. XXX errors should never pass silently; this behavior is particularly
+      harmful when a compiler type is given as first argument
+
+   If *compiler* is given, *plat* will be ignored, allowing you to get for
+   example a ``'unix'`` compiler object under Windows or an ``'msvc'`` compiler
+   under Unix.  However, not all compiler types can be instantiated on every
+   platform.
+
+
+.. function:: customize_compiler(compiler)
+
+   Do any platform-specific customization of a CCompiler instance.  Mainly
+   needed on Unix to plug in the information that varies across Unices and is
+   stored in CPython's Makefile.
+
+
+.. function:: gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries)
+
+   Generate linker options for searching library directories and linking with
+   specific libraries.  *libraries* and *library_dirs* are, respectively, lists
+   of library names (not filenames!) and search directories.  Returns a list of
+   command-line options suitable for use with some compiler (depending on the
+   two format strings passed in).
+
+
+.. function:: gen_preprocess_options(macros, include_dirs)
+
+   Generate C preprocessor options (:option:`-D`, :option:`-U`, :option:`-I`) as
+   used by at least two types of compilers: the typical Unix compiler and Visual
+   C++. *macros* is the usual thing, a list of 1- or 2-tuples, where ``(name,)``
+   means undefine (:option:`-U`) macro *name*, and ``(name, value)`` means
+   define (:option:`-D`) macro *name* to *value*.  *include_dirs* is just a list
+   of directory names to be added to the header file search path (:option:`-I`).
+   Returns a list of command-line options suitable for either Unix compilers or
+   Visual C++.
+
+
+.. function:: get_default_compiler(osname, platform)
+
+   Determine the default compiler to use for the given platform.
+
+   *osname* should be one of the standard Python OS names (i.e. the ones
+   returned by ``os.name``) and *platform* the common value returned by
+   ``sys.platform`` for the platform in question.
+
+   The default values are ``os.name`` and ``sys.platform``.
+
+
+.. function:: set_compiler(location)
+
+   Add or change a compiler
+
+
+.. function:: show_compilers()
+
+   Print list of available compilers (used by the :option:`--help-compiler`
+   options to :command:`build`, :command:`build_ext`, :command:`build_clib`).
+
+
+.. _packaging-standard-compilers:
+
+Standard compilers
+------------------
+
+Concrete subclasses of :class:`~.ccompiler.CCompiler` are provided in submodules
+of the :mod:`packaging.compiler` package.  You do not need to import them, using
+:func:`new_compiler` is the public API to use.  This table documents the
+standard compilers; be aware that they can be replaced by other classes on your
+platform.
+
+=============== ======================================================== =======
+name            description                                              notes
+=============== ======================================================== =======
+``'unix'``      typical Unix-style command-line C compiler               [#]_
+``'msvc'``      Microsoft compiler                                       [#]_
+``'bcpp'``      Borland C++ compiler
+``'cygwin'``    Cygwin compiler (Windows port of GCC)
+``'mingw32'``   Mingw32 port of GCC (same as Cygwin in no-Cygwin mode)
+=============== ======================================================== =======
+
+
+.. [#] The Unix compiler class assumes this behavior:
+
+       * macros defined with :option:`-Dname[=value]`
+
+       * macros undefined with :option:`-Uname`
+
+       * include search directories specified with :option:`-Idir`
+
+       * libraries specified with :option:`-llib`
+
+       * library search directories specified with :option:`-Ldir`
+
+       * compile handled by :program:`cc` (or similar) executable with
+         :option:`-c` option: compiles :file:`.c` to :file:`.o`
+
+       * link static library handled by :program:`ar` command (possibly with
+         :program:`ranlib`)
+
+       * link shared library handled by :program:`cc` :option:`-shared`
+
+
+.. [#] On Windows, extension modules typically need to be compiled with the same
+       compiler that was used to compile CPython (for example Microsoft Visual
+       Studio .NET 2003 for CPython 2.4 and 2.5).  The AMD64 and Itanium
+       binaries are created using the Platform SDK.
+
+       Under the hood, there are actually two different subclasses of
+       :class:`~.ccompiler.CCompiler` defined: one is compatible with MSVC 2005
+       and 2008, the other works with older versions.  This should not be a
+       concern for regular use of the functions in this module.
+
+       Packaging will normally choose the right compiler, linker etc. on its
+       own.  To override this choice, the environment variables
+       *DISTUTILS_USE_SDK* and *MSSdk* must be both set.  *MSSdk* indicates that
+       the current environment has been setup by the SDK's ``SetEnv.Cmd``
+       script, or that the environment variables had been registered when the
+       SDK was installed; *DISTUTILS_USE_SDK* indicates that the user has made
+       an explicit choice to override the compiler selection done by Packaging.
+
+       .. TODO document the envvars in Doc/using and the man page
+
+
+:mod:`packaging.compiler.ccompiler` --- CCompiler base class
+============================================================
+
+.. module:: packaging.compiler.ccompiler
+   :synopsis: Abstract CCompiler class.
+
+
+This module provides the abstract base class for the :class:`CCompiler`
+classes.  A :class:`CCompiler` instance can be used for all the compile and
+link steps needed to build a single project. Methods are provided to set
+options for the compiler --- macro definitions, include directories, link path,
+libraries and the like.
+
+.. class:: CCompiler([verbose=0, dry_run=0, force=0])
+
+   The abstract base class :class:`CCompiler` defines the interface that must be
+   implemented by real compiler classes.  The class also has some utility
+   methods used by several compiler classes.
+
+   The basic idea behind a compiler abstraction class is that each instance can
+   be used for all the compile/link steps in building a single project.  Thus,
+   attributes common to all of those compile and link steps --- include
+   directories, macros to define, libraries to link against, etc. --- are
+   attributes of the compiler instance.  To allow for variability in how
+   individual files are treated, most of those attributes may be varied on a
+   per-compilation or per-link basis.
+
+   The constructor for each subclass creates an instance of the Compiler object.
+   Flags are *verbose* (show verbose output), *dry_run* (don't actually execute
+   the steps) and *force* (rebuild everything, regardless of dependencies).  All
+   of these flags default to ``0`` (off). Note that you probably don't want to
+   instantiate :class:`CCompiler` or one of its subclasses directly - use the
+   :func:`packaging.CCompiler.new_compiler` factory function instead.
+
+   The following methods allow you to manually alter compiler options for the
+   instance of the Compiler class.
+
+
+   .. method:: CCompiler.add_include_dir(dir)
+
+      Add *dir* to the list of directories that will be searched for header
+      files.  The compiler is instructed to search directories in the order in
+      which they are supplied by successive calls to :meth:`add_include_dir`.
+
+
+   .. method:: CCompiler.set_include_dirs(dirs)
+
+      Set the list of directories that will be searched to *dirs* (a list of
+      strings). Overrides any preceding calls to :meth:`add_include_dir`;
+      subsequent calls to :meth:`add_include_dir` add to the list passed to
+      :meth:`set_include_dirs`. This does not affect any list of standard
+      include directories that the compiler may search by default.
+
+
+   .. method:: CCompiler.add_library(libname)
+
+      Add *libname* to the list of libraries that will be included in all links
+      driven by this compiler object.  Note that *libname* should *not* be the
+      name of a file containing a library, but the name of the library itself:
+      the actual filename will be inferred by the linker, the compiler, or the
+      compiler class (depending on the platform).
+
+      The linker will be instructed to link against libraries in the order they
+      were supplied to :meth:`add_library` and/or :meth:`set_libraries`.  It is
+      perfectly valid to duplicate library names; the linker will be instructed
+      to link against libraries as many times as they are mentioned.
+
+
+   .. method:: CCompiler.set_libraries(libnames)
+
+      Set the list of libraries to be included in all links driven by this
+      compiler object to *libnames* (a list of strings).  This does not affect
+      any standard system libraries that the linker may include by default.
+
+
+   .. method:: CCompiler.add_library_dir(dir)
+
+      Add *dir* to the list of directories that will be searched for libraries
+      specified to :meth:`add_library` and :meth:`set_libraries`.  The linker
+      will be instructed to search for libraries in the order they are supplied
+      to :meth:`add_library_dir` and/or :meth:`set_library_dirs`.
+
+
+   .. method:: CCompiler.set_library_dirs(dirs)
+
+      Set the list of library search directories to *dirs* (a list of strings).
+      This does not affect any standard library search path that the linker may
+      search by default.
+
+
+   .. method:: CCompiler.add_runtime_library_dir(dir)
+
+      Add *dir* to the list of directories that will be searched for shared
+      libraries at runtime.
+
+
+   .. method:: CCompiler.set_runtime_library_dirs(dirs)
+
+      Set the list of directories to search for shared libraries at runtime to
+      *dirs* (a list of strings).  This does not affect any standard search path
+      that the runtime linker may search by default.
+
+
+   .. method:: CCompiler.define_macro(name[, value=None])
+
+      Define a preprocessor macro for all compilations driven by this compiler
+      object. The optional parameter *value* should be a string; if it is not
+      supplied, then the macro will be defined without an explicit value and the
+      exact outcome depends on the compiler used (XXX true? does ANSI say
+      anything about this?)
+
+
+   .. method:: CCompiler.undefine_macro(name)
+
+      Undefine a preprocessor macro for all compilations driven by this compiler
+      object.  If the same macro is defined by :meth:`define_macro` and
+      undefined by :meth:`undefine_macro` the last call takes precedence
+      (including multiple redefinitions or undefinitions).  If the macro is
+      redefined/undefined on a per-compilation basis (i.e. in the call to
+      :meth:`compile`), then that takes precedence.
+
+
+   .. method:: CCompiler.add_link_object(object)
+
+      Add *object* to the list of object files (or analogues, such as explicitly
+      named library files or the output of "resource compilers") to be included
+      in every link driven by this compiler object.
+
+
+   .. method:: CCompiler.set_link_objects(objects)
+
+      Set the list of object files (or analogues) to be included in every link
+      to *objects*.  This does not affect any standard object files that the
+      linker may include by default (such as system libraries).
+
+   The following methods implement methods for autodetection of compiler
+   options, providing some functionality similar to GNU :program:`autoconf`.
+
+
+   .. method:: CCompiler.detect_language(sources)
+
+      Detect the language of a given file, or list of files. Uses the instance
+      attributes :attr:`language_map` (a dictionary), and :attr:`language_order`
+      (a list) to do the job.
+
+
+   .. method:: CCompiler.find_library_file(dirs, lib[, debug=0])
+
+      Search the specified list of directories for a static or shared library file
+      *lib* and return the full path to that file.  If *debug* is true, look for a
+      debugging version (if that makes sense on the current platform).  Return
+      ``None`` if *lib* wasn't found in any of the specified directories.
+
+
+   .. method:: CCompiler.has_function(funcname [, includes=None, include_dirs=None, libraries=None, library_dirs=None])
+
+      Return a boolean indicating whether *funcname* is supported on the current
+      platform.  The optional arguments can be used to augment the compilation
+      environment by providing additional include files and paths and libraries and
+      paths.
+
+
+   .. method:: CCompiler.library_dir_option(dir)
+
+      Return the compiler option to add *dir* to the list of directories searched for
+      libraries.
+
+
+   .. method:: CCompiler.library_option(lib)
+
+      Return the compiler option to add *dir* to the list of libraries linked into the
+      shared library or executable.
+
+
+   .. method:: CCompiler.runtime_library_dir_option(dir)
+
+      Return the compiler option to add *dir* to the list of directories searched for
+      runtime libraries.
+
+
+   .. method:: CCompiler.set_executables(**args)
+
+      Define the executables (and options for them) that will be run to perform the
+      various stages of compilation.  The exact set of executables that may be
+      specified here depends on the compiler class (via the 'executables' class
+      attribute), but most will have:
+
+      +--------------+------------------------------------------+
+      | attribute    | description                              |
+      +==============+==========================================+
+      | *compiler*   | the C/C++ compiler                       |
+      +--------------+------------------------------------------+
+      | *linker_so*  | linker used to create shared objects and |
+      |              | libraries                                |
+      +--------------+------------------------------------------+
+      | *linker_exe* | linker used to create binary executables |
+      +--------------+------------------------------------------+
+      | *archiver*   | static library creator                   |
+      +--------------+------------------------------------------+
+
+      On platforms with a command line (Unix, DOS/Windows), each of these is a string
+      that will be split into executable name and (optional) list of arguments.
+      (Splitting the string is done similarly to how Unix shells operate: words are
+      delimited by spaces, but quotes and backslashes can override this.  See
+      :func:`packaging.util.split_quoted`.)
+
+   The following methods invoke stages in the build process.
+
+
+   .. method:: CCompiler.compile(sources[, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None])
+
+      Compile one or more source files. Generates object files (e.g. transforms a
+      :file:`.c` file to a :file:`.o` file.)
+
+      *sources* must be a list of filenames, most likely C/C++ files, but in reality
+      anything that can be handled by a particular compiler and compiler class (e.g.
+      an ``'msvc'`` compiler` can handle resource files in *sources*).  Return a list of
+      object filenames, one per source filename in *sources*.  Depending on the
+      implementation, not all source files will necessarily be compiled, but all
+      corresponding object filenames will be returned.
+
+      If *output_dir* is given, object files will be put under it, while retaining
+      their original path component.  That is, :file:`foo/bar.c` normally compiles to
+      :file:`foo/bar.o` (for a Unix implementation); if *output_dir* is *build*, then
+      it would compile to :file:`build/foo/bar.o`.
+
+      *macros*, if given, must be a list of macro definitions.  A macro definition is
+      either a ``(name, value)`` 2-tuple or a ``(name,)`` 1-tuple. The former defines
+      a macro; if the value is ``None``, the macro is defined without an explicit
+      value.  The 1-tuple case undefines a macro.  Later
+      definitions/redefinitions/undefinitions take precedence.
+
+      *include_dirs*, if given, must be a list of strings, the directories to add to
+      the default include file search path for this compilation only.
+
+      *debug* is a boolean; if true, the compiler will be instructed to output debug
+      symbols in (or alongside) the object file(s).
+
+      *extra_preargs* and *extra_postargs* are implementation-dependent. On platforms
+      that have the notion of a command line (e.g. Unix, DOS/Windows), they are most
+      likely lists of strings: extra command-line arguments to prepend/append to the
+      compiler command line.  On other platforms, consult the implementation class
+      documentation.  In any event, they are intended as an escape hatch for those
+      occasions when the abstract compiler framework doesn't cut the mustard.
+
+      *depends*, if given, is a list of filenames that all targets depend on.  If a
+      source file is older than any file in depends, then the source file will be
+      recompiled.  This supports dependency tracking, but only at a coarse
+      granularity.
+
+      Raises :exc:`CompileError` on failure.
+
+
+   .. method:: CCompiler.create_static_lib(objects, output_libname[, output_dir=None, debug=0, target_lang=None])
+
+      Link a bunch of stuff together to create a static library file. The "bunch of
+      stuff" consists of the list of object files supplied as *objects*, the extra
+      object files supplied to :meth:`add_link_object` and/or
+      :meth:`set_link_objects`, the libraries supplied to :meth:`add_library` and/or
+      :meth:`set_libraries`, and the libraries supplied as *libraries* (if any).
+
+      *output_libname* should be a library name, not a filename; the filename will be
+      inferred from the library name.  *output_dir* is the directory where the library
+      file will be put. XXX defaults to what?
+
+      *debug* is a boolean; if true, debugging information will be included in the
+      library (note that on most platforms, it is the compile step where this matters:
+      the *debug* flag is included here just for consistency).
+
+      *target_lang* is the target language for which the given objects are being
+      compiled. This allows specific linkage time treatment of certain languages.
+
+      Raises :exc:`LibError` on failure.
+
+
+   .. method:: CCompiler.link(target_desc, objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+      Link a bunch of stuff together to create an executable or shared library file.
+
+      The "bunch of stuff" consists of the list of object files supplied as *objects*.
+      *output_filename* should be a filename.  If *output_dir* is supplied,
+      *output_filename* is relative to it (i.e. *output_filename* can provide
+      directory components if needed).
+
+      *libraries* is a list of libraries to link against.  These are library names,
+      not filenames, since they're translated into filenames in a platform-specific
+      way (e.g. *foo* becomes :file:`libfoo.a` on Unix and :file:`foo.lib` on
+      DOS/Windows).  However, they can include a directory component, which means the
+      linker will look in that specific directory rather than searching all the normal
+      locations.
+
+      *library_dirs*, if supplied, should be a list of directories to search for
+      libraries that were specified as bare library names (i.e. no directory
+      component).  These are on top of the system default and those supplied to
+      :meth:`add_library_dir` and/or :meth:`set_library_dirs`.  *runtime_library_dirs*
+      is a list of directories that will be embedded into the shared library and used
+      to search for other shared libraries that \*it\* depends on at run-time.  (This
+      may only be relevant on Unix.)
+
+      *export_symbols* is a list of symbols that the shared library will export.
+      (This appears to be relevant only on Windows.)
+
+      *debug* is as for :meth:`compile` and :meth:`create_static_lib`, with the
+      slight distinction that it actually matters on most platforms (as opposed to
+      :meth:`create_static_lib`, which includes a *debug* flag mostly for form's
+      sake).
+
+      *extra_preargs* and *extra_postargs* are as for :meth:`compile` (except of
+      course that they supply command-line arguments for the particular linker being
+      used).
+
+      *target_lang* is the target language for which the given objects are being
+      compiled. This allows specific linkage time treatment of certain languages.
+
+      Raises :exc:`LinkError` on failure.
+
+
+   .. method:: CCompiler.link_executable(objects, output_progname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, target_lang=None])
+
+      Link an executable.  *output_progname* is the name of the file executable, while
+      *objects* are a list of object filenames to link in. Other arguments are as for
+      the :meth:`link` method.
+
+
+   .. method:: CCompiler.link_shared_lib(objects, output_libname[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+      Link a shared library. *output_libname* is the name of the output library,
+      while *objects* is a list of object filenames to link in. Other arguments are
+      as for the :meth:`link` method.
+
+
+   .. method:: CCompiler.link_shared_object(objects, output_filename[, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None])
+
+      Link a shared object. *output_filename* is the name of the shared object that
+      will be created, while *objects* is a list of object filenames to link in.
+      Other arguments are as for the :meth:`link` method.
+
+
+   .. method:: CCompiler.preprocess(source[, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None])
+
+      Preprocess a single C/C++ source file, named in *source*. Output will be written
+      to file named *output_file*, or *stdout* if *output_file* not supplied.
+      *macros* is a list of macro definitions as for :meth:`compile`, which will
+      augment the macros set with :meth:`define_macro` and :meth:`undefine_macro`.
+      *include_dirs* is a list of directory names that will be added to the default
+      list, in the same way as :meth:`add_include_dir`.
+
+      Raises :exc:`PreprocessError` on failure.
+
+   The following utility methods are defined by the :class:`CCompiler` class, for
+   use by the various concrete subclasses.
+
+
+   .. method:: CCompiler.executable_filename(basename[, strip_dir=0, output_dir=''])
+
+      Returns the filename of the executable for the given *basename*.  Typically for
+      non-Windows platforms this is the same as the basename, while Windows will get
+      a :file:`.exe` added.
+
+
+   .. method:: CCompiler.library_filename(libname[, lib_type='static', strip_dir=0, output_dir=''])
+
+      Returns the filename for the given library name on the current platform. On Unix
+      a library with *lib_type* of ``'static'`` will typically be of the form
+      :file:`liblibname.a`, while a *lib_type* of ``'dynamic'`` will be of the form
+      :file:`liblibname.so`.
+
+
+   .. method:: CCompiler.object_filenames(source_filenames[, strip_dir=0, output_dir=''])
+
+      Returns the name of the object files for the given source files.
+      *source_filenames* should be a list of filenames.
+
+
+   .. method:: CCompiler.shared_object_filename(basename[, strip_dir=0, output_dir=''])
+
+      Returns the name of a shared object file for the given file name *basename*.
+
+
+   .. method:: CCompiler.execute(func, args[, msg=None, level=1])
+
+      Invokes :func:`packaging.util.execute` This method invokes a Python function
+      *func* with the given arguments *args*, after logging and taking into account
+      the *dry_run* flag. XXX see also.
+
+
+   .. method:: CCompiler.spawn(cmd)
+
+      Invokes :func:`packaging.util.spawn`. This invokes an external process to run
+      the given command. XXX see also.
+
+
+   .. method:: CCompiler.mkpath(name[, mode=511])
+
+      Invokes :func:`packaging.dir_util.mkpath`. This creates a directory and any
+      missing ancestor directories. XXX see also.
+
+
+   .. method:: CCompiler.move_file(src, dst)
+
+      Invokes :meth:`packaging.file_util.move_file`. Renames *src* to *dst*.  XXX see
+      also.
+
+
+:mod:`packaging.compiler.extension` --- The Extension class
+===========================================================
+
+.. module:: packaging.compiler.extension
+   :synopsis: Class used to represent C/C++ extension modules.
+
+
+This module provides the :class:`Extension` class, used to represent C/C++
+extension modules.
+
+.. class:: Extension
+
+   The Extension class describes a single C or C++ extension module.  It accepts
+   the following keyword arguments in its constructor
+
+   +------------------------+--------------------------------+---------------------------+
+   | argument name          | value                          | type                      |
+   +========================+================================+===========================+
+   | *name*                 | the full name of the           | string                    |
+   |                        | extension, including any       |                           |
+   |                        | packages --- i.e. *not* a      |                           |
+   |                        | filename or pathname, but      |                           |
+   |                        | Python dotted name             |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *sources*              | list of source filenames,      | string                    |
+   |                        | relative to the distribution   |                           |
+   |                        | root (where the setup script   |                           |
+   |                        | lives), in Unix form (slash-   |                           |
+   |                        | separated) for portability.    |                           |
+   |                        | Source files may be C, C++,    |                           |
+   |                        | SWIG (.i), platform-specific   |                           |
+   |                        | resource files, or whatever    |                           |
+   |                        | else is recognized by the      |                           |
+   |                        | :command:`build_ext` command   |                           |
+   |                        | as source for a Python         |                           |
+   |                        | extension.                     |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *include_dirs*         | list of directories to search  | string                    |
+   |                        | for C/C++ header files (in     |                           |
+   |                        | Unix form for portability)     |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *define_macros*        | list of macros to define; each | (string, string) tuple or |
+   |                        | macro is defined using a       | (name, ``None``)          |
+   |                        | 2-tuple ``(name, value)``,     |                           |
+   |                        | where *value* is               |                           |
+   |                        | either the string to define it |                           |
+   |                        | to or ``None`` to define it    |                           |
+   |                        | without a particular value     |                           |
+   |                        | (equivalent of ``#define FOO`` |                           |
+   |                        | in source or :option:`-DFOO`   |                           |
+   |                        | on Unix C compiler command     |                           |
+   |                        | line)                          |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *undef_macros*         | list of macros to undefine     | string                    |
+   |                        | explicitly                     |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *library_dirs*         | list of directories to search  | string                    |
+   |                        | for C/C++ libraries at link    |                           |
+   |                        | time                           |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *libraries*            | list of library names (not     | string                    |
+   |                        | filenames or paths) to link    |                           |
+   |                        | against                        |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *runtime_library_dirs* | list of directories to search  | string                    |
+   |                        | for C/C++ libraries at run     |                           |
+   |                        | time (for shared extensions,   |                           |
+   |                        | this is when the extension is  |                           |
+   |                        | loaded)                        |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *extra_objects*        | list of extra files to link    | string                    |
+   |                        | with (e.g. object files not    |                           |
+   |                        | implied by 'sources', static   |                           |
+   |                        | library that must be           |                           |
+   |                        | explicitly specified, binary   |                           |
+   |                        | resource files, etc.)          |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *extra_compile_args*   | any extra platform- and        | string                    |
+   |                        | compiler-specific information  |                           |
+   |                        | to use when compiling the      |                           |
+   |                        | source files in 'sources'. For |                           |
+   |                        | platforms and compilers where  |                           |
+   |                        | a command line makes sense,    |                           |
+   |                        | this is typically a list of    |                           |
+   |                        | command-line arguments, but    |                           |
+   |                        | for other platforms it could   |                           |
+   |                        | be anything.                   |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *extra_link_args*      | any extra platform- and        | string                    |
+   |                        | compiler-specific information  |                           |
+   |                        | to use when linking object     |                           |
+   |                        | files together to create the   |                           |
+   |                        | extension (or to create a new  |                           |
+   |                        | static Python interpreter).    |                           |
+   |                        | Similar interpretation as for  |                           |
+   |                        | 'extra_compile_args'.          |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *export_symbols*       | list of symbols to be exported | string                    |
+   |                        | from a shared extension. Not   |                           |
+   |                        | used on all platforms, and not |                           |
+   |                        | generally necessary for Python |                           |
+   |                        | extensions, which typically    |                           |
+   |                        | export exactly one symbol:     |                           |
+   |                        | ``init`` + extension_name.     |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *depends*              | list of files that the         | string                    |
+   |                        | extension depends on           |                           |
+   +------------------------+--------------------------------+---------------------------+
+   | *language*             | extension language (i.e.       | string                    |
+   |                        | ``'c'``, ``'c++'``,            |                           |
+   |                        | ``'objc'``). Will be detected  |                           |
+   |                        | from the source extensions if  |                           |
+   |                        | not provided.                  |                           |
+   +------------------------+--------------------------------+---------------------------+
diff --git a/Doc/library/packaging.database.rst b/Doc/library/packaging.database.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.database.rst
@@ -0,0 +1,324 @@
+:mod:`packaging.database` --- Database of installed distributions
+=================================================================
+
+.. module:: packaging.database
+   :synopsis: Functions to query and manipulate installed distributions.
+
+
+This module provides an implementation of :PEP:`376`.  It was originally
+intended to land in :mod:`pkgutil`, but with the inclusion of Packaging in the
+standard library, it was thought best to include it in a submodule of
+:mod:`packaging`, leaving :mod:`pkgutil` to deal with imports.
+
+Installed Python distributions are represented by instances of
+:class:`Distribution`, or :class:`EggInfoDistribution` for legacy egg formats.
+Most functions also provide an extra argument ``use_egg_info`` to take legacy
+distributions into account.
+
+
+Classes representing installed distributions
+--------------------------------------------
+
+.. class:: Distribution(path)
+
+   Class representing an installed distribution.  It is different from
+   :class:`packaging.dist.Distribution` which holds the list of files, the
+   metadata and options during the run of a Packaging command.
+
+   Instantiate with the *path* to a ``.dist-info`` directory.  Instances can be
+   compared and sorted.  Other available methods are:
+
+   .. XXX describe how comparison works
+
+   .. method:: get_distinfo_file(path, binary=False)
+
+      Return a read-only file object for a file located at
+      :file:`{project-version}.dist-info/path}`.  *path* should be a
+      ``'/'``-separated path relative to the ``.dist-info`` directory or an
+      absolute path; if it is an absolute path and doesn't start with the path
+      to the :file:`.dist-info` directory, a :class:`PackagingError` is raised.
+
+      If *binary* is ``True``, the file is opened in binary mode.
+
+   .. method:: get_resource_path(relative_path)
+
+      .. TODO
+
+   .. method:: list_distinfo_files(local=False)
+
+      Return an iterator over all files located in the :file:`.dist-info`
+      directory.  If *local* is ``True``, each returned path is transformed into
+      a local absolute path, otherwise the raw value found in the :file:`RECORD`
+      file is returned.
+
+   .. method::  list_installed_files(local=False)
+
+      Iterate over the files installed with the distribution and registered in
+      the :file:`RECORD` file and yield a tuple ``(path, md5, size)`` for each
+      line.  If *local* is ``True``, the returned path is transformed into a
+      local absolute path, otherwise the raw value is returned.
+
+      A local absolute path is an absolute path in which occurrences of ``'/'``
+      have been replaced by :data:`os.sep`.
+
+   .. method:: uses(path)
+
+      Check whether *path* was installed by this distribution (i.e. if the path
+      is present in the :file:`RECORD` file).  *path* can be a local absolute
+      path or a relative ``'/'``-separated path.  Returns a boolean.
+
+   Available attributes:
+
+   .. attribute:: metadata
+
+      Instance of :class:`packaging.metadata.Metadata` filled with the contents
+      of the :file:`{project-version}.dist-info/METADATA` file.
+
+   .. attribute:: name
+
+      Shortcut for ``metadata['Name']``.
+
+   .. attribute:: version
+
+      Shortcut for ``metadata['Version']``.
+
+   .. attribute:: requested
+
+      Boolean indicating whether this distribution was requested by the user of
+      automatically installed as a dependency.
+
+
+.. class:: EggInfoDistribution(path)
+
+   Class representing a legacy distribution.  It is compatible with distutils'
+   and setuptools' :file:`.egg-info` and :file:`.egg` files and directories.
+
+   .. FIXME should be named EggDistribution
+
+   Instantiate with the *path* to an egg file or directory.  Instances can be
+   compared and sorted.  Other available methods are:
+
+   .. method:: list_installed_files(local=False)
+
+   .. method:: uses(path)
+
+   Available attributes:
+
+   .. attribute:: metadata
+
+      Instance of :class:`packaging.metadata.Metadata` filled with the contents
+      of the :file:`{project-version}.egg-info/PKG-INFO` or
+      :file:`{project-version}.egg` file.
+
+   .. attribute:: name
+
+      Shortcut for ``metadata['Name']``.
+
+   .. attribute:: version
+
+      Shortcut for ``metadata['Version']``.
+
+
+Functions to work with the database
+-----------------------------------
+
+.. function:: get_distribution(name, use_egg_info=False, paths=None)
+
+   Return an instance of :class:`Distribution` or :class:`EggInfoDistribution`
+   for the first installed distribution matching *name*.  Egg distributions are
+   considered only if *use_egg_info* is true; if both a dist-info and an egg
+   file are found, the dist-info prevails.  The directories to be searched are
+   given in *paths*, which defaults to :data:`sys.path`.  Return ``None`` if no
+   matching distribution is found.
+
+   .. FIXME param should be named use_egg
+
+
+.. function:: get_distributions(use_egg_info=False, paths=None)
+
+   Return an iterator of :class:`Distribution` instances for all installed
+   distributions found in *paths* (defaults to :data:`sys.path`).  If
+   *use_egg_info* is true, also return instances of :class:`EggInfoDistribution`
+   for legacy distributions found.
+
+
+.. function:: get_file_users(path)
+
+   Return an iterator over all distributions using *path*, a local absolute path
+   or a relative ``'/'``-separated path.
+
+   .. XXX does this work with prefixes or full file path only?
+
+
+.. function:: obsoletes_distribution(name, version=None, use_egg_info=False)
+
+   Return an iterator over all distributions that declare they obsolete *name*.
+   *version* is an optional argument to match only specific releases (see
+   :mod:`packaging.version`).  If *use_egg_info* is true, legacy egg
+   distributions will be considered as well.
+
+
+.. function:: provides_distribution(name, version=None, use_egg_info=False)
+
+   Return an iterator over all distributions that declare they provide *name*.
+   *version* is an optional argument to match only specific releases (see
+   :mod:`packaging.version`).  If *use_egg_info* is true, legacy egg
+   distributions will be considered as well.
+
+
+Utility functions
+-----------------
+
+.. function:: distinfo_dirname(name, version)
+
+   Escape *name* and *version* into a filename-safe form and return the
+   directory name built from them, for example
+   :file:`{safename}-{safeversion}.dist-info.`  In *name*, runs of
+   non-alphanumeric characters are replaced with one ``'_'``; in *version*,
+   spaces become dots, and runs of other non-alphanumeric characters (except
+   dots) a replaced by one ``'-'``.
+
+   .. XXX wth spaces in version numbers?
+
+For performance purposes, the list of distributions is being internally
+cached.   Caching is enabled by default, but you can control it with these
+functions:
+
+.. function:: clear_cache()
+
+   Clear the cache.
+
+.. function:: disable_cache()
+
+   Disable the cache, without clearing it.
+
+.. function:: enable_cache()
+
+   Enable the internal cache, without clearing it.
+
+
+Examples
+--------
+
+Print all information about a distribution
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Given a path to a ``.dist-info`` distribution, we shall print out all
+information that can be obtained using functions provided in this module::
+
+   import sys
+   import packaging.database
+
+   path = input()
+   # first create the Distribution instance
+   try:
+       dist = packaging.database.Distribution(path)
+   except IOError:
+       sys.exit('No such distribution')
+
+   print('Information about %r' % dist.name)
+   print()
+
+   print('Files')
+   print('=====')
+   for path, md5, size in dist.list_installed_files():
+       print('* Path: %s' % path)
+       print('  Hash %s, Size: %s bytes' % (md5, size))
+   print()
+
+   print('Metadata')
+   print('========')
+   for key, value in dist.metadata.items():
+       print('%20s: %s' % (key, value))
+   print()
+
+   print('Extra')
+   print('=====')
+   if dist.requested:
+       print('* It was installed by user request')
+   else:
+       print('* It was installed as a dependency')
+
+If we save the script above as ``print_info.py``, we can use it to extract
+information from a :file:`.dist-info` directory.  By typing in the console:
+
+.. code-block:: sh
+
+   $ echo /tmp/choxie/choxie-2.0.0.9.dist-info | python3 print_info.py
+
+we get the following output:
+
+.. code-block:: none
+
+   Information about 'choxie'
+
+   Files
+   =====
+   * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/truffles.py
+     Hash 5e052db6a478d06bad9ae033e6bc08af, Size: 111 bytes
+   * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py
+     Hash ac56bf496d8d1d26f866235b95f31030, Size: 214 bytes
+   * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py
+     Hash 416aab08dfa846f473129e89a7625bbc, Size: 25 bytes
+   * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER
+     Hash d41d8cd98f00b204e9800998ecf8427e, Size: 0 bytes
+   * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA
+     Hash 696a209967fef3c8b8f5a7bb10386385, Size: 225 bytes
+   * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED
+     Hash d41d8cd98f00b204e9800998ecf8427e, Size: 0 bytes
+   * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD
+     Hash None, Size: None bytes
+
+   Metadata
+   ========
+       Metadata-Version: 1.2
+                   Name: choxie
+                Version: 2.0.0.9
+               Platform: []
+     Supported-Platform: UNKNOWN
+                Summary: Chocolate with a kick!
+            Description: UNKNOWN
+               Keywords: []
+              Home-page: UNKNOWN
+                 Author: UNKNOWN
+           Author-email: UNKNOWN
+             Maintainer: UNKNOWN
+       Maintainer-email: UNKNOWN
+                License: UNKNOWN
+             Classifier: []
+           Download-URL: UNKNOWN
+         Obsoletes-Dist: ['truffles (<=0.8,>=0.5)', 'truffles (<=0.9,>=0.6)']
+            Project-URL: []
+          Provides-Dist: ['truffles (1.0)']
+          Requires-Dist: ['towel-stuff (0.1)']
+        Requires-Python: UNKNOWN
+      Requires-External: []
+
+  Extra
+  =====
+  * It was installed as a dependency
+
+
+Find out obsoleted distributions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Now, we take tackle a different problem, we are interested in finding out
+which distributions have been obsoleted. This can be easily done as follows::
+
+  import packaging.database
+
+  # iterate over all distributions in the system
+  for dist in packaging.database.get_distributions():
+      name, version = dist.name, dist.version
+      # find out which distributions obsolete this name/version combination
+      replacements = packaging.database.obsoletes_distribution(name, version)
+      if replacements:
+          print('%r %s is obsoleted by' % (name, version),
+                ', '.join(repr(r.name) for r in replacements))
+
+This is how the output might look like:
+
+.. code-block:: none
+
+  'strawberry' 0.6 is obsoleted by 'choxie'
+  'grammar' 1.0a4 is obsoleted by 'towel-stuff'
diff --git a/Doc/library/packaging.depgraph.rst b/Doc/library/packaging.depgraph.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.depgraph.rst
@@ -0,0 +1,199 @@
+:mod:`packaging.depgraph` --- Dependency graph builder
+======================================================
+
+.. module:: packaging.depgraph
+   :synopsis: Graph builder for dependencies between releases.
+
+
+This module provides the means to analyse the dependencies between various
+distributions and to create a graph representing these dependency relationships.
+In this document, "distribution" refers to an instance of
+:class:`packaging.database.Distribution` or
+:class:`packaging.database.EggInfoDistribution`.
+
+.. XXX terminology problem with dist vs. release: dists are installed, but deps
+   use releases
+
+.. XXX explain how to use it with dists not installed: Distribution can only be
+   instantiated with a path, but this module is useful for remote dist too
+
+.. XXX functions should accept and return iterators, not lists
+
+
+The :class:`DependencyGraph` class
+----------------------------------
+
+.. class:: DependencyGraph
+
+   Represent a dependency graph between releases.  The nodes are distribution
+   instances; the edge model dependencies.  An edge from ``a`` to ``b`` means
+   that ``a`` depends on ``b``.
+
+   .. method:: add_distribution(distribution)
+
+      Add *distribution* to the graph.
+
+   .. method:: add_edge(x, y, label=None)
+
+      Add an edge from distribution *x* to distribution *y* with the given
+      *label* (string).
+
+   .. method:: add_missing(distribution, requirement)
+
+      Add a missing *requirement* (string) for the given *distribution*.
+
+   .. method:: repr_node(dist, level=1)
+
+      Print a subgraph starting from *dist*.  *level* gives the depth of the
+      subgraph.
+
+   Direct access to the graph nodes and edges is provided through these
+   attributes:
+
+   .. attribute:: adjacency_list
+
+      Dictionary mapping distributions to a list of ``(other, label)`` tuples
+      where  ``other`` is a distribution and the edge is labeled with ``label``
+      (i.e. the version specifier, if such was provided).
+
+   .. attribute:: reverse_list
+
+      Dictionary mapping distributions to a list of predecessors.  This allows
+      efficient traversal.
+
+   .. attribute:: missing
+
+      Dictionary mapping distributions to a list of requirements that were not
+      provided by any distribution.
+
+
+Auxiliary functions
+-------------------
+
+.. function:: dependent_dists(dists, dist)
+
+   Recursively generate a list of distributions from *dists* that are dependent
+   on *dist*.
+
+   .. XXX what does member mean here: "dist is a member of *dists* for which we
+      are interested"
+
+.. function:: generate_graph(dists)
+
+   Generate a :class:`DependencyGraph` from the given list of distributions.
+
+   .. XXX make this alternate constructor a DepGraph classmethod or rename;
+      'generate' can suggest it creates a file or an image, use 'make'
+
+.. function:: graph_to_dot(graph, f, skip_disconnected=True)
+
+   Write a DOT output for the graph to the file-like object *f*.
+
+   If *skip_disconnected* is true, all distributions that are not dependent on
+   any other distribution are skipped.
+
+   .. XXX why is this not a DepGraph method?
+
+
+Example Usage
+-------------
+
+Depict all dependenciess in the system
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+First, we shall generate a graph of all the distributions on the system
+and then create an image out of it using the tools provided by
+`Graphviz <http://www.graphviz.org/>`_::
+
+   from packaging.database import get_distributions
+   from packaging.depgraph import generate_graph
+
+   dists = list(get_distributions())
+   graph = generate_graph(dists)
+
+It would be interesting to print out the missing requirements.  This can be done
+as follows::
+
+   for dist, reqs in graph.missing.items():
+       if reqs:
+           reqs = ' ,'.join(repr(req) for req in reqs)
+           print('Missing dependencies for %r: %s' % (dist.name, reqs))
+
+Example output is:
+
+.. code-block:: none
+
+   Missing dependencies for 'TurboCheetah': 'Cheetah'
+   Missing dependencies for 'TurboGears': 'ConfigObj', 'DecoratorTools', 'RuleDispatch'
+   Missing dependencies for 'jockey': 'PyKDE4.kdecore', 'PyKDE4.kdeui', 'PyQt4.QtCore', 'PyQt4.QtGui'
+   Missing dependencies for 'TurboKid': 'kid'
+   Missing dependencies for 'TurboJson: 'DecoratorTools', 'RuleDispatch'
+
+Now, we proceed with generating a graphical representation of the graph. First
+we write it to a file, and then we generate a PNG image using the
+:program:`dot` command-line tool::
+
+   from packaging.depgraph import graph_to_dot
+   with open('output.dot', 'w') as f:
+      # only show the interesting distributions, skipping the disconnected ones
+      graph_to_dot(graph, f, skip_disconnected=True)
+
+We can create the final picture using:
+
+.. code-block:: sh
+
+   $ dot -Tpng output.dot > output.png
+
+An example result is:
+
+.. figure:: depgraph-output.png
+   :alt: Example PNG output from packaging.depgraph and dot
+
+If you want to include egg distributions as well, then the code requires only
+one change, namely the line::
+
+   dists = list(packaging.database.get_distributions())
+
+has to be replaced with::
+
+   dists = list(packaging.database.get_distributions(use_egg_info=True))
+
+On many platforms, a richer graph is obtained because at the moment most
+distributions are provided in the egg rather than the new standard
+``.dist-info`` format.
+
+.. XXX missing image
+
+   An example of a more involved graph for illustrative reasons can be seen
+   here:
+
+   .. image:: depgraph_big.png
+
+
+List all dependent distributions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+We will list all distributions that are dependent on some given distibution.
+This time, egg distributions will be considered as well::
+
+   import sys
+   from packaging.database import get_distribution, get_distributions
+   from packaging.depgraph import dependent_dists
+
+   dists = list(get_distributions(use_egg_info=True))
+   dist = get_distribution('bacon', use_egg_info=True)
+   if dist is None:
+       sys.exit('No such distribution in the system')
+
+   deps = dependent_dists(dists, dist)
+   deps = ', '.join(repr(x.name) for x in deps)
+   print('Distributions depending on %r: %s' % (dist.name, deps))
+
+And this is example output:
+
+.. with the dependency relationships as in the previous section
+   (depgraph_big)
+
+.. code-block:: none
+
+   Distributions depending on 'bacon': 'towel-stuff', 'choxie', 'grammar'
diff --git a/Doc/library/packaging.dist.rst b/Doc/library/packaging.dist.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.dist.rst
@@ -0,0 +1,102 @@
+:mod:`packaging.dist` --- The Distribution class
+================================================
+
+.. module:: packaging.dist
+   :synopsis: Core Distribution class.
+
+
+This module provides the :class:`Distribution` class, which represents the
+module distribution being built/packaged/distributed/installed.
+
+.. class:: Distribution(arguments)
+
+   A :class:`Distribution` describes how to build, package, distribute and
+   install a Python project.
+
+   The arguments accepted by the constructor are laid out in the following
+   table.  Some of them will end up in a metadata object, the rest will become
+   data attributes of the :class:`Distribution` instance.
+
+   .. TODO improve constructor to take a Metadata object + named params?
+      (i.e. Distribution(metadata, cmdclass, py_modules, etc)
+   .. TODO also remove obsolete(?) script_name, etc. parameters?  see what
+      py2exe and other tools need
+
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | argument name      | value                          | type                                                        |
+   +====================+================================+=============================================================+
+   | *name*             | The name of the project        | string                                                      |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *version*          | The version number of the      | See :mod:`packaging.version`                                |
+   |                    | release                        |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *summary*          | A single line describing the   | a string                                                    |
+   |                    | project                        |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *description*      | Longer description of the      | a string                                                    |
+   |                    | project                        |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *author*           | The name of the project author | a string                                                    |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *author_email*     | The email address of the       | a string                                                    |
+   |                    | project author                 |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *maintainer*       | The name of the current        | a string                                                    |
+   |                    | maintainer, if different from  |                                                             |
+   |                    | the author                     |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *maintainer_email* | The email address of the       |                                                             |
+   |                    | current maintainer, if         |                                                             |
+   |                    | different from the author      |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *home_page*        | A URL for the proejct          | a URL                                                       |
+   |                    | (homepage)                     |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *download_url*     | A URL to download the project  | a URL                                                       |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *packages*         | A list of Python packages that | a list of strings                                           |
+   |                    | packaging will manipulate      |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *py_modules*       | A list of Python modules that  | a list of strings                                           |
+   |                    | packaging will manipulate      |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *scripts*          | A list of standalone scripts   | a list of strings                                           |
+   |                    | to be built and installed      |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *ext_modules*      | A list of Python extensions to | A list of instances of                                      |
+   |                    | be built                       | :class:`packaging.compiler.extension.Extension`             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *classifiers*      | A list of categories for the   | The list of available                                       |
+   |                    | distribution                   | categorizations is at                                       |
+   |                    |                                | http://pypi.python.org/pypi?:action=list_classifiers.       |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *distclass*        | the :class:`Distribution`      | A subclass of                                               |
+   |                    | class to use                   | :class:`packaging.dist.Distribution`                        |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *script_name*      | The name of the setup.py       | a string                                                    |
+   |                    | script - defaults to           |                                                             |
+   |                    | ``sys.argv[0]``                |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *script_args*      | Arguments to supply to the     | a list of strings                                           |
+   |                    | setup script                   |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *options*          | default options for the setup  | a string                                                    |
+   |                    | script                         |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *license*          | The license for the            | a string; should be used when there is no suitable License  |
+   |                    | distribution                   | classifier, or to specify a classifier                      |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *keywords*         | Descriptive keywords           | a list of strings; used by catalogs                         |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *platforms*        | Platforms compatible with this | a list of strings; should be used when there is no          |
+   |                    | distribution                   | suitable Platform classifier                                |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *cmdclass*         | A mapping of command names to  | a dictionary                                                |
+   |                    | :class:`Command` subclasses    |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *data_files*       | A list of data files to        | a list                                                      |
+   |                    | install                        |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *package_dir*      | A mapping of Python packages   | a dictionary                                                |
+   |                    | to directory names             |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
diff --git a/Doc/library/packaging.fancy_getopt.rst b/Doc/library/packaging.fancy_getopt.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.fancy_getopt.rst
@@ -0,0 +1,75 @@
+:mod:`packaging.fancy_getopt` --- Wrapper around the getopt module
+==================================================================
+
+.. module:: packaging.fancy_getopt
+   :synopsis: Additional getopt functionality.
+
+
+.. warning::
+   This module is deprecated and will be replaced with :mod:`optparse`.
+
+This module provides a wrapper around the standard :mod:`getopt` module that
+provides the following additional features:
+
+* short and long options are tied together
+
+* options have help strings, so :func:`fancy_getopt` could potentially create a
+  complete usage summary
+
+* options set attributes of a passed-in object
+
+* boolean options can have "negative aliases" --- e.g. if :option:`--quiet` is
+  the "negative alias" of :option:`--verbose`, then :option:`--quiet` on the
+  command line sets *verbose* to false.
+
+.. function:: fancy_getopt(options, negative_opt, object, args)
+
+   Wrapper function. *options* is a list of ``(long_option, short_option,
+   help_string)`` 3-tuples as described in the constructor for
+   :class:`FancyGetopt`. *negative_opt* should be a dictionary mapping option names
+   to option names, both the key and value should be in the *options* list.
+   *object* is an object which will be used to store values (see the :meth:`getopt`
+   method of the :class:`FancyGetopt` class). *args* is the argument list. Will use
+   ``sys.argv[1:]`` if you pass ``None`` as *args*.
+
+
+.. class:: FancyGetopt([option_table=None])
+
+   The option_table is a list of 3-tuples: ``(long_option, short_option,
+   help_string)``
+
+   If an option takes an argument, its *long_option* should have ``'='`` appended;
+   *short_option* should just be a single character, no ``':'`` in any case.
+   *short_option* should be ``None`` if a *long_option* doesn't have a
+   corresponding *short_option*. All option tuples must have long options.
+
+The :class:`FancyGetopt` class provides the following methods:
+
+
+.. method:: FancyGetopt.getopt([args=None, object=None])
+
+   Parse command-line options in args. Store as attributes on *object*.
+
+   If *args* is ``None`` or not supplied, uses ``sys.argv[1:]``.  If *object* is
+   ``None`` or not supplied, creates a new :class:`OptionDummy` instance, stores
+   option values there, and returns a tuple ``(args, object)``.  If *object* is
+   supplied, it is modified in place and :func:`getopt` just returns *args*; in
+   both cases, the returned *args* is a modified copy of the passed-in *args* list,
+   which is left untouched.
+
+   .. TODO and args returned are?
+
+
+.. method:: FancyGetopt.get_option_order()
+
+   Returns the list of ``(option, value)`` tuples processed by the previous run of
+   :meth:`getopt`  Raises :exc:`RuntimeError` if :meth:`getopt` hasn't been called
+   yet.
+
+
+.. method:: FancyGetopt.generate_help([header=None])
+
+   Generate help text (a list of strings, one per suggested line of output) from
+   the option table for this :class:`FancyGetopt` object.
+
+   If supplied, prints the supplied *header* at the top of the help.
diff --git a/Doc/library/packaging.install.rst b/Doc/library/packaging.install.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.install.rst
@@ -0,0 +1,112 @@
+:mod:`packaging.install` --- Installation tools
+===============================================
+
+.. module:: packaging.install
+   :synopsis: Download and installation building blocks
+
+
+Packaging provides a set of tools to deal with downloads and installation of
+distributions.  Their role is to download the distribution from indexes, resolve
+the dependencies, and provide a safe way to install distributions.  An operation
+that fails will cleanly roll back, not leave half-installed distributions on the
+system.  Here's the basic process followed:
+
+#. Move all distributions that will be removed to a temporary location.
+
+#. Install all the distributions that will be installed in a temporary location.
+
+#. If the installation fails, move the saved distributions back to their
+   location and delete the installed distributions.
+
+#. Otherwise, move the installed distributions to the right location and delete
+   the temporary locations.
+
+This is a higher-level module built on :mod:`packaging.database` and
+:mod:`packaging.pypi`.
+
+
+Public functions
+----------------
+
+.. function:: get_infos(requirements, index=None, installed=None, \
+                        prefer_final=True)
+
+   Return information about what's going to be installed and upgraded.
+   *requirements* is a string string containing the requirements for this
+   project, for example ``'FooBar 1.1'`` or ``'BarBaz (<1.2)'``.
+
+   .. XXX are requirements comma-separated?
+
+   If you want to use another index than the main PyPI, give its URI as *index*
+   argument.
+
+   *installed* is a list of already installed distributions used to find
+   satisfied dependencies, obsoleted distributions and eventual conflicts.
+
+   By default, alpha, beta and candidate versions are not picked up.  Set
+   *prefer_final* to false to accept them too.
+
+   The results are returned in a dictionary containing all the information
+   needed to perform installation of the requirements with the
+   :func:`install_from_infos` function:
+
+   >>> get_install_info("FooBar (<=1.2)")
+   {'install': [<FooBar 1.1>], 'remove': [], 'conflict': []}
+
+   .. TODO should return tuple or named tuple, not dict
+   .. TODO use "predicate" or "requirement" consistently in version and here
+   .. FIXME "info" cannot be plural in English, s/infos/info/
+
+
+.. function:: install(project)
+
+
+.. function:: install_dists(dists, path, paths=None)
+
+   Safely install all distributions provided in *dists* into *path*.  *paths* is
+   a list of paths where already-installed distributions will be looked for to
+   find satisfied dependencies and conflicts (default: :data:`sys.path`).
+   Returns a list of installed dists.
+
+   .. FIXME dists are instances of what?
+
+
+.. function:: install_from_infos(install_path=None, install=[], remove=[], \
+                                 conflicts=[], paths=None)
+
+   Safely install and remove given distributions.  This function is designed to
+   work with the return value of :func:`get_infos`: *install*, *remove* and
+   *conflicts* should be list of distributions returned by :func:`get_infos`.
+   If *install* is not empty, *install_path* must be given to specify the path
+   where the distributions should be installed.  *paths* is a list of paths
+   where already-installed distributions will be looked for (default:
+   :data:`sys.path`).
+
+   This function is a very basic installer; if *conflicts* is not empty, the
+   system will be in a conflicting state after the function completes.  It is a
+   building block for more sophisticated installers with conflict resolution
+   systems.
+
+   .. TODO document typical value for install_path
+   .. TODO document integration with default schemes, esp. user site-packages
+
+
+.. function:: install_local_project(path)
+
+   Install a distribution from a source directory, which must contain either a
+   Packaging-compliant :file:`setup.cfg` file or a legacy Distutils
+   :file:`setup.py` script (in which case Distutils will be used under the hood
+   to perform the installation).
+
+
+.. function::  remove(project_name, paths=None, auto_confirm=True)
+
+   Remove one distribution from the system.
+
+   .. FIXME this is the only function using "project" instead of dist/release
+
+..
+   Example usage
+   --------------
+
+   Get the scheme of what's gonna be installed if we install "foobar":
diff --git a/Doc/library/packaging.metadata.rst b/Doc/library/packaging.metadata.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.metadata.rst
@@ -0,0 +1,122 @@
+:mod:`packaging.metadata` --- Metadata handling
+===============================================
+
+.. module:: packaging.metadata
+   :synopsis: Class holding the metadata of a release.
+
+
+.. TODO use sphinx-autogen to generate basic doc from the docstrings
+
+.. class:: Metadata
+
+   This class can read and write metadata files complying with any of the
+   defined versions: 1.0 (:PEP:`241`), 1.1 (:PEP:`314`) and 1.2 (:PEP:`345`).  It
+   implements methods to parse Metadata files and write them, and a mapping
+   interface to its contents.
+
+   The :PEP:`345` implementation supports the micro-language for the environment
+   markers, and displays warnings when versions that are supposed to be
+   :PEP:`386`-compliant are violating the specification.
+
+
+Reading metadata
+----------------
+
+The :class:`Metadata` class can be instantiated
+with the path of the metadata file, and provides a dict-like interface to the
+values::
+
+   >>> from packaging.metadata import Metadata
+   >>> metadata = Metadata('PKG-INFO')
+   >>> metadata.keys()[:5]
+   ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform')
+   >>> metadata['Name']
+   'CLVault'
+   >>> metadata['Version']
+   '0.5'
+   >>> metadata['Requires-Dist']
+   ["pywin32; sys.platform == 'win32'", "Sphinx"]
+
+
+The fields that support environment markers can be automatically ignored if
+the object is instantiated using the ``platform_dependent`` option.
+:class:`Metadata` will interpret in this case
+the markers and will automatically remove the fields that are not compliant
+with the running environment. Here's an example under Mac OS X. The win32
+dependency we saw earlier is ignored::
+
+   >>> from packaging.metadata import Metadata
+   >>> metadata = Metadata('PKG-INFO', platform_dependent=True)
+   >>> metadata['Requires-Dist']
+   ['Sphinx']
+
+
+If you want to provide your own execution context, let's say to test the
+metadata under a particular environment that is not the current environment,
+you can provide your own values in the ``execution_context`` option, which
+is the dict that may contain one or more keys of the context the micro-language
+expects.
+
+Here's an example, simulating a win32 environment::
+
+   >>> from packaging.metadata import Metadata
+   >>> context = {'sys.platform': 'win32'}
+   >>> metadata = Metadata('PKG-INFO', platform_dependent=True,
+   ...                     execution_context=context)
+   ...
+   >>> metadata['Requires-Dist'] = ["pywin32; sys.platform == 'win32'",
+   ...                              "Sphinx"]
+   ...
+   >>> metadata['Requires-Dist']
+   ['pywin32', 'Sphinx']
+
+
+Writing metadata
+----------------
+
+Writing metadata can be done using the ``write`` method::
+
+   >>> metadata.write('/to/my/PKG-INFO')
+
+The class will pick the best version for the metadata, depending on the values
+provided. If all the values provided exist in all versions, the class will
+use :attr:`PKG_INFO_PREFERRED_VERSION`.  It is set by default to 1.0, the most
+widespread version.
+
+
+Conflict checking and best version
+----------------------------------
+
+Some fields in :PEP:`345` have to comply with the version number specification
+defined in :PEP:`386`.  When they don't comply, a warning is emitted::
+
+   >>> from packaging.metadata import Metadata
+   >>> metadata = Metadata()
+   >>> metadata['Requires-Dist'] = ['Funky (Groovie)']
+   "Funky (Groovie)" is not a valid predicate
+   >>> metadata['Requires-Dist'] = ['Funky (1.2)']
+
+See also :mod:`packaging.version`.
+
+
+.. TODO talk about check()
+
+
+:mod:`packaging.markers` --- Environment markers
+================================================
+
+.. module:: packaging.markers
+   :synopsis: Micro-language for environment markers
+
+
+This is an implementation of environment markers `as defined in PEP 345
+<http://www.python.org/dev/peps/pep-0345/#environment-markers>`_.  It is used
+for some metadata fields.
+
+.. function:: interpret(marker, execution_context=None)
+
+   Interpret a marker and return a boolean result depending on the environment.
+   Example:
+
+      >>> interpret("python_version > '1.0'")
+      True
diff --git a/Doc/library/packaging.pypi.dist.rst b/Doc/library/packaging.pypi.dist.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.pypi.dist.rst
@@ -0,0 +1,114 @@
+:mod:`packaging.pypi.dist` --- Classes representing query results
+=================================================================
+
+.. module:: packaging.pypi.dist
+   :synopsis: Classes representing the results of queries to indexes.
+
+
+Information coming from the indexes is held in instances of the classes defined
+in this module.
+
+Keep in mind that each project (eg. FooBar) can have several releases
+(eg. 1.1, 1.2, 1.3), and each of these releases can be provided in multiple
+distributions (eg. a source distribution, a binary one, etc).
+
+
+ReleaseInfo
+-----------
+
+Each release has a project name, version, metadata, and related distributions.
+
+This information is stored in :class:`ReleaseInfo`
+objects.
+
+.. class:: ReleaseInfo
+
+
+DistInfo
+---------
+
+:class:`DistInfo` is a simple class that contains
+information related to distributions; mainly the URLs where distributions
+can be found.
+
+.. class:: DistInfo
+
+
+ReleasesList
+------------
+
+The :mod:`~packaging.pypi.dist` module provides a class which works
+with lists of :class:`ReleaseInfo` classes;
+used to filter and order results.
+
+.. class:: ReleasesList
+
+
+Example usage
+-------------
+
+Build a list of releases and order them
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Assuming we have a list of releases::
+
+   >>> from packaging.pypi.dist import ReleasesList, ReleaseInfo
+   >>> fb10 = ReleaseInfo("FooBar", "1.0")
+   >>> fb11 = ReleaseInfo("FooBar", "1.1")
+   >>> fb11a = ReleaseInfo("FooBar", "1.1a1")
+   >>> ReleasesList("FooBar", [fb11, fb11a, fb10])
+   >>> releases.sort_releases()
+   >>> releases.get_versions()
+   ['1.1', '1.1a1', '1.0']
+   >>> releases.add_release("1.2a1")
+   >>> releases.get_versions()
+   ['1.1', '1.1a1', '1.0', '1.2a1']
+   >>> releases.sort_releases()
+   ['1.2a1', '1.1', '1.1a1', '1.0']
+   >>> releases.sort_releases(prefer_final=True)
+   >>> releases.get_versions()
+   ['1.1', '1.0', '1.2a1', '1.1a1']
+
+
+Add distribution related information to releases
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It's easy to add distribution information to releases::
+
+   >>> from packaging.pypi.dist import ReleasesList, ReleaseInfo
+   >>> r = ReleaseInfo("FooBar", "1.0")
+   >>> r.add_distribution("sdist", url="http://example.org/foobar-1.0.tar.gz")
+   >>> r.dists
+   {'sdist': FooBar 1.0 sdist}
+   >>> r['sdist'].url
+   {'url': 'http://example.org/foobar-1.0.tar.gz', 'hashname': None, 'hashval':
+   None, 'is_external': True}
+
+
+Getting attributes from the dist objects
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+To abstract querying information returned from the indexes, attributes and
+release information can be retrieved directly from dist objects.
+
+For instance, if you have a release instance that does not contain the metadata
+attribute, it can be fetched by using the "fetch_metadata" method::
+
+   >>> r = Release("FooBar", "1.1")
+   >>> print r.metadata
+   None # metadata field is actually set to "None"
+   >>> r.fetch_metadata()
+   <Metadata for FooBar 1.1>
+
+.. XXX add proper roles to these constructs
+
+
+It's possible to retrieve a project's releases (`fetch_releases`),
+metadata (`fetch_metadata`) and distributions (`fetch_distributions`) using
+a similar work flow.
+
+.. XXX what is possible?
+
+Internally, this is possible because while retrieving information about
+projects, releases or distributions, a reference to the client used is
+stored which can be accessed using the objects `_index` attribute.
diff --git a/Doc/library/packaging.pypi.rst b/Doc/library/packaging.pypi.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.pypi.rst
@@ -0,0 +1,53 @@
+:mod:`packaging.pypi` --- Interface to projects indexes
+=======================================================
+
+.. module:: packaging.pypi
+   :synopsis: Low-level and high-level APIs to query projects indexes.
+
+
+Packaging queries PyPI to get information about projects or download them.  The
+low-level facilities used internally are also part of the public API designed to
+be used by other tools.
+
+The :mod:`packaging.pypi` package provides those facilities, which can be
+used to access information about Python projects registered at indexes, the
+main one being PyPI, located ad http://pypi.python.org/.
+
+There is two ways to retrieve data from these indexes: a screen-scraping
+interface called the "simple API", and XML-RPC.  The first one uses HTML pages
+located under http://pypi.python.org/simple/, the second one makes XML-RPC
+requests to http://pypi.python.org/pypi/.  All functions and classes also work
+with other indexes such as mirrors, which typically implement only the simple
+interface.
+
+Packaging provides a class that wraps both APIs to provide full query and
+download functionality: :class:`packaging.pypi.client.ClientWrapper`.  If you
+want more control, you can use the underlying classes
+:class:`packaging.pypi.simple.Crawler` and :class:`packaging.pypi.xmlrpc.Client`
+to connect to one specific interface.
+
+
+:mod:`packaging.pypi.client` --- High-level query API
+=====================================================
+
+.. module:: packaging.pypi.client
+   :synopsis: Wrapper around :mod;`packaging.pypi.xmlrpc` and
+              :mod:`packaging.pypi.simple` to query indexes.
+
+
+This module provides a high-level API to query indexes and search
+for releases and distributions. The aim of this module is to choose the best
+way to query the API automatically, either using XML-RPC or the simple index,
+with a preference toward the latter.
+
+.. class:: ClientWrapper
+
+   Instances of this class will use the simple interface or XML-RPC requests to
+   query indexes and return :class:`packaging.pypi.dist.ReleaseInfo` and
+   :class:`packaging.pypi.dist.ReleasesList` objects.
+
+   .. method:: find_projects
+
+   .. method:: get_release
+
+   .. method:: get_releases
diff --git a/Doc/library/packaging.pypi.simple.rst b/Doc/library/packaging.pypi.simple.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.pypi.simple.rst
@@ -0,0 +1,157 @@
+:mod:`packaging.pypi.simple` --- Crawler using the PyPI "simple" interface
+==========================================================================
+
+.. module:: packaging.pypi.simple
+   :synopsis: Crawler using the screen-scraping "simple" interface to fetch info
+              and distributions.
+
+
+`packaging.pypi.simple` can process Python Package Indexes  and provides
+useful information about distributions. It also can crawl local indexes, for
+instance.
+
+You should use `packaging.pypi.simple` for:
+
+    * Search distributions by name and versions.
+    * Process index external pages.
+    * Download distributions by name and versions.
+
+And should not be used for:
+
+    * Things that will end up in too long index processing (like "finding all
+      distributions with a specific version, no matters the name")
+
+
+API
+---
+
+.. class:: Crawler
+
+
+Usage Exemples
+---------------
+
+To help you understand how using the `Crawler` class, here are some basic
+usages.
+
+Request the simple index to get a specific distribution
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Supposing you want to scan an index to get a list of distributions for
+the "foobar" project. You can use the "get_releases" method for that.
+The get_releases method will browse the project page, and return
+:class:`ReleaseInfo`  objects for each found link that rely on downloads. ::
+
+   >>> from packaging.pypi.simple import Crawler
+   >>> crawler = Crawler()
+   >>> crawler.get_releases("FooBar")
+   [<ReleaseInfo "Foobar 1.1">, <ReleaseInfo "Foobar 1.2">]
+
+
+Note that you also can request the client about specific versions, using version
+specifiers (described in `PEP 345
+<http://www.python.org/dev/peps/pep-0345/#version-specifiers>`_)::
+
+   >>> client.get_releases("FooBar < 1.2")
+   [<ReleaseInfo "FooBar 1.1">, ]
+
+
+`get_releases` returns a list of :class:`ReleaseInfo`, but you also can get the
+best distribution that fullfil your requirements, using "get_release"::
+
+   >>> client.get_release("FooBar < 1.2")
+   <ReleaseInfo "FooBar 1.1">
+
+
+Download distributions
+^^^^^^^^^^^^^^^^^^^^^^
+
+As it can get the urls of distributions provided by PyPI, the `Crawler`
+client also can download the distributions and put it for you in a temporary
+destination::
+
+   >>> client.download("foobar")
+   /tmp/temp_dir/foobar-1.2.tar.gz
+
+
+You also can specify the directory you want to download to::
+
+   >>> client.download("foobar", "/path/to/my/dir")
+   /path/to/my/dir/foobar-1.2.tar.gz
+
+
+While downloading, the md5 of the archive will be checked, if not matches, it
+will try another time, then if fails again, raise `MD5HashDoesNotMatchError`.
+
+Internally, that's not the Crawler which download the distributions, but the
+`DistributionInfo` class. Please refer to this documentation for more details.
+
+
+Following PyPI external links
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The default behavior for packaging is to *not* follow the links provided
+by HTML pages in the "simple index", to find distributions related
+downloads.
+
+It's possible to tell the PyPIClient to follow external links by setting the
+`follow_externals` attribute, on instantiation or after::
+
+   >>> client = Crawler(follow_externals=True)
+
+or ::
+
+   >>> client = Crawler()
+   >>> client.follow_externals = True
+
+
+Working with external indexes, and mirrors
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The default `Crawler` behavior is to rely on the Python Package index stored
+on PyPI (http://pypi.python.org/simple).
+
+As you can need to work with a local index, or private indexes, you can specify
+it using the index_url parameter::
+
+   >>> client = Crawler(index_url="file://filesystem/path/")
+
+or ::
+
+   >>> client = Crawler(index_url="http://some.specific.url/")
+
+
+You also can specify mirrors to fallback on in case the first index_url you
+provided doesnt respond, or not correctly. The default behavior for
+`Crawler` is to use the list provided by Python.org DNS records, as
+described in the :PEP:`381` about mirroring infrastructure.
+
+If you don't want to rely on these, you could specify the list of mirrors you
+want to try by specifying the `mirrors` attribute. It's a simple iterable::
+
+   >>> mirrors = ["http://first.mirror","http://second.mirror"]
+   >>> client = Crawler(mirrors=mirrors)
+
+
+Searching in the simple index
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It's possible to search for projects with specific names in the package index.
+Assuming you want to find all projects containing the "distutils" keyword::
+
+   >>> c.search_projects("distutils")
+   [<Project "collective.recipe.distutils">, <Project "Distutils">, <Project
+   "Packaging">, <Project "distutilscross">, <Project "lpdistutils">, <Project
+   "taras.recipe.distutils">, <Project "zerokspot.recipe.distutils">]
+
+
+You can also search the projects starting with a specific text, or ending with
+that text, using a wildcard::
+
+   >>> c.search_projects("distutils*")
+   [<Project "Distutils">, <Project "Packaging">, <Project "distutilscross">]
+
+   >>> c.search_projects("*distutils")
+   [<Project "collective.recipe.distutils">, <Project "Distutils">, <Project
+   "lpdistutils">, <Project "taras.recipe.distutils">, <Project
+   "zerokspot.recipe.distutils">]
diff --git a/Doc/library/packaging.pypi.xmlrpc.rst b/Doc/library/packaging.pypi.xmlrpc.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.pypi.xmlrpc.rst
@@ -0,0 +1,143 @@
+:mod:`packaging.pypi.xmlrpc` --- Crawler using the PyPI XML-RPC interface
+=========================================================================
+
+.. module:: packaging.pypi.xmlrpc
+   :synopsis: Client using XML-RPC requests to fetch info and distributions.
+
+
+Indexes can be queried using XML-RPC calls, and Packaging provides a simple
+way to interface with XML-RPC.
+
+You should **use** XML-RPC when:
+
+* Searching the index for projects **on other fields than project
+  names**. For instance, you can search for projects based on the
+  author_email field.
+* Searching all the versions that have existed for a project.
+* you want to retrieve METADATAs information from releases or
+  distributions.
+
+
+You should **avoid using** XML-RPC method calls when:
+
+* Retrieving the last version of a project
+* Getting the projects with a specific name and version.
+* The simple index can match your needs
+
+
+When dealing with indexes, keep in mind that the index queries will always
+return you :class:`packaging.pypi.dist.ReleaseInfo` and
+:class:`packaging.pypi.dist.ReleasesList` objects.
+
+Some methods here share common APIs with the one you can find on
+:class:`packaging.pypi.simple`, internally, :class:`packaging.pypi.client`
+is inherited by :class:`Client`
+
+
+API
+---
+
+.. class:: Client
+
+
+Usage examples
+--------------
+
+Use case described here are use case that are not common to the other clients.
+If you want to see all the methods, please refer to API or to usage examples
+described in :class:`packaging.pypi.client.Client`
+
+
+Finding releases
+^^^^^^^^^^^^^^^^
+
+It's a common use case to search for "things" within the index. We can
+basically search for projects by their name, which is the most used way for
+users (eg. "give me the last version of the FooBar project").
+
+This can be accomplished using the following syntax::
+
+   >>> client = xmlrpc.Client()
+   >>> client.get_release("Foobar (<= 1.3))
+   <FooBar 1.2.1>
+   >>> client.get_releases("FooBar (<= 1.3)")
+   [FooBar 1.1, FooBar 1.1.1, FooBar 1.2, FooBar 1.2.1]
+
+
+And we also can find for specific fields::
+
+   >>> client.search_projects(field=value)
+
+
+You could specify the operator to use, default is "or"::
+
+   >>> client.search_projects(field=value, operator="and")
+
+
+The specific fields you can search are:
+
+* name
+* version
+* author
+* author_email
+* maintainer
+* maintainer_email
+* home_page
+* license
+* summary
+* description
+* keywords
+* platform
+* download_url
+
+
+Getting metadata information
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+XML-RPC is a prefered way to retrieve metadata information from indexes.
+It's really simple to do so::
+
+   >>> client = xmlrpc.Client()
+   >>> client.get_metadata("FooBar", "1.1")
+   <ReleaseInfo FooBar 1.1>
+
+
+Assuming we already have a :class:`packaging.pypi.ReleaseInfo` object defined,
+it's possible to pass it to the xmlrpc client to retrieve and complete its
+metadata::
+
+   >>> foobar11 = ReleaseInfo("FooBar", "1.1")
+   >>> client = xmlrpc.Client()
+   >>> returned_release = client.get_metadata(release=foobar11)
+   >>> returned_release
+   <ReleaseInfo FooBar 1.1>
+
+
+Get all the releases of a project
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+To retrieve all the releases for a project, you can build them using
+`get_releases`::
+
+   >>> client = xmlrpc.Client()
+   >>> client.get_releases("FooBar")
+   [<ReleaseInfo FooBar 0.9>, <ReleaseInfo FooBar 1.0>, <ReleaseInfo 1.1>]
+
+
+Get information about distributions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Indexes have information about projects, releases **and** distributions.
+If you're not familiar with those, please refer to the documentation of
+:mod:`packaging.pypi.dist`.
+
+It's possible to retrieve information about distributions, e.g "what are the
+existing distributions for this release ? How to retrieve them ?"::
+
+   >>> client = xmlrpc.Client()
+   >>> release = client.get_distributions("FooBar", "1.1")
+   >>> release.dists
+   {'sdist': <FooBar 1.1 sdist>, 'bdist': <FooBar 1.1 bdist>}
+
+As you see, this does not return a list of distributions, but a release,
+because a release can be used like a list of distributions.
diff --git a/Doc/library/packaging.rst b/Doc/library/packaging.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.rst
@@ -0,0 +1,78 @@
+:mod:`packaging` --- Packaging support
+======================================
+
+.. module:: packaging
+   :synopsis: Packaging system and building blocks for other packaging systems.
+.. sectionauthor:: Fred L. Drake, Jr. <fdrake at acm.org>, distutils and packaging
+                   contributors
+
+
+The :mod:`packaging` package provides support for building, packaging,
+distributing and installing additional projects into a Python installation.
+Projects may include Python modules, extension modules, packages and scripts.
+:mod:`packaging` also provides building blocks for other packaging systems
+that are not tied to the command system.
+
+This manual is the reference documentation for those standalone building
+blocks and for extending Packaging. If you're looking for the user-centric
+guides to install a project or package your own code, head to `See also`__.
+
+
+Building blocks
+---------------
+
+.. toctree::
+   :maxdepth: 2
+   :numbered:
+
+   packaging-misc
+   packaging.version
+   packaging.metadata
+   packaging.database
+   packaging.depgraph
+   packaging.pypi
+   packaging.pypi.dist
+   packaging.pypi.simple
+   packaging.pypi.xmlrpc
+   packaging.install
+
+
+The command machinery
+---------------------
+
+.. toctree::
+   :maxdepth: 2
+   :numbered:
+
+   packaging.dist
+   packaging.command
+   packaging.compiler
+   packaging.fancy_getopt
+
+
+Other utilities
+----------------
+
+.. toctree::
+   :maxdepth: 2
+   :numbered:
+
+   packaging.util
+   packaging.tests.pypi_server
+
+.. XXX missing: compat config create (dir_util) run pypi.{base,mirrors}
+
+
+.. __:
+
+.. seealso::
+
+   :ref:`packaging-index`
+      The manual for developers of Python projects who want to package and
+      distribute them. This describes how to use :mod:`packaging` to make
+      projects easily found and added to an existing Python installation.
+
+   :ref:`packaging-install-index`
+      A user-centered manual which includes information on adding projects
+      into an existing Python installation.  You do not need to be a Python
+      programmer to read this manual.
diff --git a/Doc/library/packaging.tests.pypi_server.rst b/Doc/library/packaging.tests.pypi_server.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.tests.pypi_server.rst
@@ -0,0 +1,105 @@
+:mod:`packaging.tests.pypi_server` --- PyPI mock server
+=======================================================
+
+.. module:: packaging.tests.pypi_server
+   :synopsis: Mock server used to test PyPI-related modules and commands.
+
+
+When you are testing code that works with Packaging, you might find these tools
+useful.
+
+
+The mock server
+---------------
+
+.. class:: PyPIServer
+
+   PyPIServer is a class that implements an HTTP server running in a separate
+   thread. All it does is record the requests for further inspection. The recorded
+   data is available under ``requests`` attribute. The default
+   HTTP response can be overridden with the ``default_response_status``,
+   ``default_response_headers`` and ``default_response_data`` attributes.
+
+   By default, when accessing the server with urls beginning with `/simple/`,
+   the server also record your requests, but will look for files under
+   the `/tests/pypiserver/simple/` path.
+
+   You can tell the sever to serve static files for other paths. This could be
+   accomplished by using the `static_uri_paths` parameter, as below::
+
+      server = PyPIServer(static_uri_paths=["first_path", "second_path"])
+
+
+   You need to create the content that will be served under the
+   `/tests/pypiserver/default` path. If you want to serve content from another
+   place, you also can specify another filesystem path (which needs to be under
+   `tests/pypiserver/`. This will replace the default behavior of the server, and
+   it will not serve content from the `default` dir ::
+
+      server = PyPIServer(static_filesystem_paths=["path/to/your/dir"])
+
+
+   If you just need to add some paths to the existing ones, you can do as shown,
+   keeping in mind that the server will always try to load paths in reverse order
+   (e.g here, try "another/super/path" then the default one) ::
+
+      server = PyPIServer(test_static_path="another/super/path")
+      server = PyPIServer("another/super/path")
+      # or
+      server.static_filesystem_paths.append("another/super/path")
+
+
+   As a result of what, in your tests, while you need to use the PyPIServer, in
+   order to isolates the test cases, the best practice is to place the common files
+   in the `default` folder, and to create a directory for each specific test case::
+
+      server = PyPIServer(static_filesystem_paths = ["default", "test_pypi_server"],
+                          static_uri_paths=["simple", "external"])
+
+
+Base class and decorator for tests
+----------------------------------
+
+.. class:: PyPIServerTestCase
+
+   ``PyPIServerTestCase`` is a test case class with setUp and tearDown methods that
+   take care of a single PyPIServer instance attached as a ``pypi`` attribute on
+   the test class. Use it as one of the base classes in your test case::
+
+
+      class UploadTestCase(PyPIServerTestCase):
+
+          def test_something(self):
+              cmd = self.prepare_command()
+              cmd.ensure_finalized()
+              cmd.repository = self.pypi.full_address
+              cmd.run()
+
+              environ, request_data = self.pypi.requests[-1]
+              self.assertEqual(request_data, EXPECTED_REQUEST_DATA)
+
+
+.. decorator:: use_pypi_server
+
+   You also can use a decorator for your tests, if you do not need the same server
+   instance along all you test case. So, you can specify, for each test method,
+   some initialisation parameters for the server.
+
+   For this, you need to add a `server` parameter to your method, like this::
+
+      class SampleTestCase(TestCase):
+
+          @use_pypi_server()
+          def test_something(self, server):
+              ...
+
+
+   The decorator will instantiate the server for you, and run and stop it just
+   before and after your method call. You also can pass the server initializer,
+   just like this::
+
+      class SampleTestCase(TestCase):
+
+          @use_pypi_server("test_case_name")
+          def test_something(self, server):
+              ...
diff --git a/Doc/library/packaging.util.rst b/Doc/library/packaging.util.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.util.rst
@@ -0,0 +1,186 @@
+:mod:`packaging.util` --- Miscellaneous utility functions
+=========================================================
+
+.. module:: packaging.util
+   :synopsis: Miscellaneous utility functions.
+
+
+This module contains various helpers for the other modules.
+
+.. XXX a number of functions are missing, but the module may be split first
+   (it's ginormous right now, some things could go to compat for example)
+
+.. function:: get_platform()
+
+   Return a string that identifies the current platform.  This is used mainly to
+   distinguish platform-specific build directories and platform-specific built
+   distributions.  Typically includes the OS name and version and the
+   architecture (as supplied by 'os.uname()'), although the exact information
+   included depends on the OS; e.g. for IRIX the architecture isn't particularly
+   important (IRIX only runs on SGI hardware), but for Linux the kernel version
+   isn't particularly important.
+
+   Examples of returned values:
+
+   * ``linux-i586``
+   * ``linux-alpha``
+   * ``solaris-2.6-sun4u``
+   * ``irix-5.3``
+   * ``irix64-6.2``
+
+   For non-POSIX platforms, currently just returns ``sys.platform``.
+
+   For Mac OS X systems the OS version reflects the minimal version on which
+   binaries will run (that is, the value of ``MACOSX_DEPLOYMENT_TARGET``
+   during the build of Python), not the OS version of the current system.
+
+   For universal binary builds on Mac OS X the architecture value reflects
+   the univeral binary status instead of the architecture of the current
+   processor. For 32-bit universal binaries the architecture is ``fat``,
+   for 64-bit universal binaries the architecture is ``fat64``, and
+   for 4-way universal binaries the architecture is ``universal``. Starting
+   from Python 2.7 and Python 3.2 the architecture ``fat3`` is used for
+   a 3-way universal build (ppc, i386, x86_64) and ``intel`` is used for
+   a univeral build with the i386 and x86_64 architectures
+
+   Examples of returned values on Mac OS X:
+
+   * ``macosx-10.3-ppc``
+
+   * ``macosx-10.3-fat``
+
+   * ``macosx-10.5-universal``
+
+   * ``macosx-10.6-intel``
+
+   .. XXX reinvention of platform module?
+
+
+.. function:: convert_path(pathname)
+
+   Return 'pathname' as a name that will work on the native filesystem, i.e.
+   split it on '/' and put it back together again using the current directory
+   separator. Needed because filenames in the setup script are always supplied
+   in Unix style, and have to be converted to the local convention before we
+   can actually use them in the filesystem.  Raises :exc:`ValueError` on
+   non-Unix-ish systems if *pathname* either starts or ends with a slash.
+
+
+.. function:: change_root(new_root, pathname)
+
+   Return *pathname* with *new_root* prepended.  If *pathname* is relative, this
+   is equivalent to ``os.path.join(new_root,pathname)`` Otherwise, it requires
+   making *pathname* relative and then joining the two, which is tricky on
+   DOS/Windows.
+
+
+.. function:: check_environ()
+
+   Ensure that 'os.environ' has all the environment variables we guarantee that
+   users can use in config files, command-line options, etc.  Currently this
+   includes:
+
+   * :envvar:`HOME` - user's home directory (Unix only)
+   * :envvar:`PLAT` - description of the current platform, including hardware
+     and OS (see :func:`get_platform`)
+
+
+.. function:: find_executable(executable, path=None)
+
+   Search the path for a given executable name.
+
+
+.. function:: subst_vars(s, local_vars)
+
+   Perform shell/Perl-style variable substitution on *s*.  Every occurrence of
+   ``$`` followed by a name is considered a variable, and variable is
+   substituted by the value found in the *local_vars* dictionary, or in
+   ``os.environ`` if it's not in *local_vars*. *os.environ* is first
+   checked/augmented to guarantee that it contains certain values: see
+   :func:`check_environ`.  Raise :exc:`ValueError` for any variables not found
+   in either *local_vars* or ``os.environ``.
+
+   Note that this is not a fully-fledged string interpolation function. A valid
+   ``$variable`` can consist only of upper and lower case letters, numbers and
+   an underscore. No { } or ( ) style quoting is available.
+
+
+.. function:: split_quoted(s)
+
+   Split a string up according to Unix shell-like rules for quotes and
+   backslashes. In short: words are delimited by spaces, as long as those spaces
+   are not escaped by a backslash, or inside a quoted string. Single and double
+   quotes are equivalent, and the quote characters can be backslash-escaped.
+   The backslash is stripped from any two-character escape sequence, leaving
+   only the escaped character.  The quote characters are stripped from any
+   quoted string.  Returns a list of words.
+
+   .. TODO Should probably be moved into the standard library.
+
+
+.. function:: execute(func, args[, msg=None, verbose=0, dry_run=0])
+
+   Perform some action that affects the outside world (for instance, writing to
+   the filesystem).  Such actions are special because they are disabled by the
+   *dry_run* flag.  This method takes care of all that bureaucracy for you;
+   all you have to do is supply the function to call and an argument tuple for
+   it (to embody the "external action" being performed), and an optional message
+   to print.
+
+
+.. function:: newer(source, target)
+
+   Return true if *source* exists and is more recently modified than *target*,
+   or if *source* exists and *target* doesn't. Return false if both exist and
+   *target* is the same age or newer than *source*. Raise
+   :exc:`PackagingFileError` if *source* does not exist.
+
+
+.. function:: strtobool(val)
+
+   Convert a string representation of truth to true (1) or false (0).
+
+   True values are ``y``, ``yes``, ``t``, ``true``, ``on`` and ``1``; false
+   values are ``n``, ``no``, ``f``, ``false``, ``off`` and ``0``.  Raises
+   :exc:`ValueError` if *val* is anything else.
+
+.. TODO Add :term: markup to bytecode when merging into the stdlib
+
+.. function:: byte_compile(py_files[, optimize=0, force=0, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None])
+
+   Byte-compile a collection of Python source files to either :file:`.pyc` or
+   :file:`.pyo` files in the same directory.  *py_files* is a list of files to
+   compile; any files that don't end in :file:`.py` are silently skipped.
+   *optimize* must be one of the following:
+
+   * ``0`` - don't optimize (generate :file:`.pyc`)
+   * ``1`` - normal optimization (like ``python -O``)
+   * ``2`` - extra optimization (like ``python -OO``)
+
+   If *force* is true, all files are recompiled regardless of timestamps.
+
+   The source filename encoded in each bytecode file defaults to the filenames
+   listed in *py_files*; you can modify these with *prefix* and *basedir*.
+   *prefix* is a string that will be stripped off of each source filename, and
+   *base_dir* is a directory name that will be prepended (after *prefix* is
+   stripped).  You can supply either or both (or neither) of *prefix* and
+   *base_dir*, as you wish.
+
+   If *dry_run* is true, doesn't actually do anything that would affect the
+   filesystem.
+
+   Byte-compilation is either done directly in this interpreter process with the
+   standard :mod:`py_compile` module, or indirectly by writing a temporary
+   script and executing it.  Normally, you should let :func:`byte_compile`
+   figure out to use direct compilation or not (see the source for details).
+   The *direct* flag is used by the script generated in indirect mode; unless
+   you know what you're doing, leave it set to ``None``.
+
+
+.. function:: rfc822_escape(header)
+
+   Return a version of *header* escaped for inclusion in an :rfc:`822` header, by
+   ensuring there are 8 spaces space after each newline.  Note that it does no
+   other modification of the string.
+
+   .. TODO this _can_ be replaced
diff --git a/Doc/library/packaging.version.rst b/Doc/library/packaging.version.rst
new file mode 100644
--- /dev/null
+++ b/Doc/library/packaging.version.rst
@@ -0,0 +1,104 @@
+:mod:`packaging.version` --- Version number classes
+===================================================
+
+.. module:: packaging.version
+   :synopsis: Classes that represent project version numbers.
+
+
+This module contains classes and functions useful to deal with version numbers.
+It's an implementation of version specifiers `as defined in PEP 345
+<http://www.python.org/dev/peps/pep-0345/#version-specifiers>`_.
+
+
+Version numbers
+---------------
+
+.. class:: NormalizedVersion(self, s, error_on_huge_major_num=True)
+
+   A specific version of a distribution, as described in PEP 345.  *s* is a
+   string object containing the version number (for example ``'1.2b1'``),
+   *error_on_huge_major_num* a boolean specifying whether to consider an
+   apparent use of a year or full date as the major version number an error.
+
+   The rationale for the second argument is that there were projects using years
+   or full dates as version numbers, which could cause problems with some
+   packaging systems sorting.
+
+   Instances of this class can be compared and sorted::
+
+      >>> NormalizedVersion('1.2b1') < NormalizedVersion('1.2')
+      True
+
+   :class:`NormalizedVersion` is used internally by :class:`VersionPredicate` to
+   do its work.
+
+
+.. class:: IrrationalVersionError
+
+   Exception raised when an invalid string is given to
+   :class:`NormalizedVersion`.
+
+      >>> NormalizedVersion("irrational_version_number")
+      ...
+      IrrationalVersionError: irrational_version_number
+
+
+.. function:: suggest_normalized_version(s)
+
+   Before standardization in PEP 386, various schemes were in use.  Packaging
+   provides a function to try to convert any string to a valid, normalized
+   version::
+
+      >>> suggest_normalized_version('2.1-rc1')
+      2.1c1
+
+
+   If :func:`suggest_normalized_version` can't make sense of the given string,
+   it will return ``None``::
+
+      >>> print(suggest_normalized_version('not a version'))
+      None
+
+
+Version predicates
+------------------
+
+.. class:: VersionPredicate(predicate)
+
+   This class deals with the parsing of field values like
+   ``ProjectName (>=version)``.
+
+   .. method:: match(version)
+
+      Test if a version number matches the predicate:
+
+         >>> version = VersionPredicate("ProjectName (<1.2, >1.0)")
+         >>> version.match("1.2.1")
+         False
+         >>> version.match("1.1.1")
+         True
+
+
+Validation helpers
+------------------
+
+If you want to use :term:`LBYL`-style checks instead of instantiating the
+classes and catching :class:`IrrationalVersionError` and :class:`ValueError`,
+you can use these functions:
+
+.. function:: is_valid_version(predicate)
+
+   Check whether the given string is a valid version number.  Example of valid
+   strings: ``'1.2'``,  ``'4.2.0.dev4'``, ``'2.5.4.post2'``.
+
+
+.. function:: is_valid_versions(predicate)
+
+   Check whether the given string is a valid value for specifying multiple
+   versions, such as in the Requires-Python field.  Example: ``'2.7, >=3.2'``.
+
+
+.. function:: is_valid_predicate(predicate)
+
+   Check whether the given string is a valid version predicate.  Examples:
+   ``'some.project == 4.5, <= 4.7'``, ``'speciallib (> 1.0, != 1.4.2, < 2.0)'``.
diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst
--- a/Doc/library/pprint.rst
+++ b/Doc/library/pprint.rst
@@ -193,7 +193,7 @@
 -------
 
 To demonstrate several uses of the :func:`pprint` function and its parameters,
-let's fetch information about a package from PyPI::
+let's fetch information about a project from PyPI::
 
    >>> import json
    >>> import pprint
@@ -201,8 +201,8 @@
    >>> with urlopen('http://pypi.python.org/pypi/configparser/json') as url:
    ...     http_info = url.info()
    ...     raw_data = url.read().decode(http_info.get_content_charset())
-   >>> package_data = json.loads(raw_data)
-   >>> result = {'headers': http_info.items(), 'body': package_data}
+   >>> project_info = json.loads(raw_data)
+   >>> result = {'headers': http_info.items(), 'body': project_info}
 
 In its basic form, :func:`pprint` shows the whole object::
 
diff --git a/Doc/library/python.rst b/Doc/library/python.rst
--- a/Doc/library/python.rst
+++ b/Doc/library/python.rst
@@ -25,4 +25,5 @@
    inspect.rst
    site.rst
    fpectl.rst
+   packaging.rst
    distutils.rst
diff --git a/Doc/library/random.rst b/Doc/library/random.rst
--- a/Doc/library/random.rst
+++ b/Doc/library/random.rst
@@ -43,6 +43,12 @@
 uses the system function :func:`os.urandom` to generate random numbers
 from sources provided by the operating system.
 
+.. warning::
+
+   The generators of the :mod:`random` module should not be used for security
+   purposes. Use :func:`ssl.RAND_bytes` if you require a cryptographically
+   secure pseudorandom number generator.
+
 
 Bookkeeping functions:
 
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -1301,24 +1301,27 @@
 to combine those into a single master regular expression and to loop over
 successive matches::
 
-    Token = collections.namedtuple('Token', 'typ value line column')
+    import collections
+    import re
+
+    Token = collections.namedtuple('Token', ['typ', 'value', 'line', 'column'])
 
     def tokenize(s):
-        keywords = {'IF', 'THEN', 'FOR', 'NEXT', 'GOSUB', 'RETURN'}
-        tok_spec = [
-            ('NUMBER', r'\d+(\.\d*)?'), # Integer or decimal number
-            ('ASSIGN', r':='),          # Assignment operator
-            ('END', ';'),               # Statement terminator
-            ('ID', r'[A-Za-z]+'),       # Identifiers
-            ('OP', r'[+*\/\-]'),        # Arithmetic operators
-            ('NEWLINE', r'\n'),         # Line endings
-            ('SKIP', r'[ \t]'),         # Skip over spaces and tabs
+        keywords = {'IF', 'THEN', 'ENDIF', 'FOR', 'NEXT', 'GOSUB', 'RETURN'}
+        token_specification = [
+            ('NUMBER',  r'\d+(\.\d*)?'), # Integer or decimal number
+            ('ASSIGN',  r':='),          # Assignment operator
+            ('END',     r';'),           # Statement terminator
+            ('ID',      r'[A-Za-z]+'),   # Identifiers
+            ('OP',      r'[+*\/\-]'),    # Arithmetic operators
+            ('NEWLINE', r'\n'),          # Line endings
+            ('SKIP',    r'[ \t]'),       # Skip over spaces and tabs
         ]
-        tok_re = '|'.join('(?P<%s>%s)' % pair for pair in tok_spec)
-        gettok = re.compile(tok_re).match
+        tok_regex = '|'.join('(?P<%s>%s)' % pair for pair in token_specification)
+        get_token = re.compile(tok_regex).match
         line = 1
         pos = line_start = 0
-        mo = gettok(s)
+        mo = get_token(s)
         while mo is not None:
             typ = mo.lastgroup
             if typ == 'NEWLINE':
@@ -1330,13 +1333,15 @@
                     typ = val
                 yield Token(typ, val, line, mo.start()-line_start)
             pos = mo.end()
-            mo = gettok(s, pos)
+            mo = get_token(s, pos)
         if pos != len(s):
             raise RuntimeError('Unexpected character %r on line %d' %(s[pos], line))
 
-    statements = '''\
-        total := total + price * quantity;
-        tax := price * 0.05;
+    statements = '''
+        IF quantity THEN
+            total := total + price * quantity;
+            tax := price * 0.05;
+        ENDIF;
     '''
 
     for token in tokenize(statements):
@@ -1344,17 +1349,22 @@
 
 The tokenizer produces the following output::
 
-    Token(typ='ID', value='total', line=1, column=8)
-    Token(typ='ASSIGN', value=':=', line=1, column=14)
-    Token(typ='ID', value='total', line=1, column=17)
-    Token(typ='OP', value='+', line=1, column=23)
-    Token(typ='ID', value='price', line=1, column=25)
-    Token(typ='OP', value='*', line=1, column=31)
-    Token(typ='ID', value='quantity', line=1, column=33)
-    Token(typ='END', value=';', line=1, column=41)
-    Token(typ='ID', value='tax', line=2, column=9)
-    Token(typ='ASSIGN', value=':=', line=2, column=13)
-    Token(typ='ID', value='price', line=2, column=16)
-    Token(typ='OP', value='*', line=2, column=22)
-    Token(typ='NUMBER', value='0.05', line=2, column=24)
-    Token(typ='END', value=';', line=2, column=28)
+    Token(typ='IF', value='IF', line=2, column=5)
+    Token(typ='ID', value='quantity', line=2, column=8)
+    Token(typ='THEN', value='THEN', line=2, column=17)
+    Token(typ='ID', value='total', line=3, column=9)
+    Token(typ='ASSIGN', value=':=', line=3, column=15)
+    Token(typ='ID', value='total', line=3, column=18)
+    Token(typ='OP', value='+', line=3, column=24)
+    Token(typ='ID', value='price', line=3, column=26)
+    Token(typ='OP', value='*', line=3, column=32)
+    Token(typ='ID', value='quantity', line=3, column=34)
+    Token(typ='END', value=';', line=3, column=42)
+    Token(typ='ID', value='tax', line=4, column=9)
+    Token(typ='ASSIGN', value=':=', line=4, column=13)
+    Token(typ='ID', value='price', line=4, column=16)
+    Token(typ='OP', value='*', line=4, column=22)
+    Token(typ='NUMBER', value='0.05', line=4, column=24)
+    Token(typ='END', value=';', line=4, column=28)
+    Token(typ='ENDIF', value='ENDIF', line=5, column=5)
+    Token(typ='END', value=';', line=5, column=10)
diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst
--- a/Doc/library/signal.rst
+++ b/Doc/library/signal.rst
@@ -187,10 +187,9 @@
    Send the signal *signum* to the thread *thread_id*, another thread in the same
    process as the caller.  The signal is asynchronously directed to thread.
 
-   *thread_id* can be read from the :attr:`~threading.Thread.ident` attribute
-   of :attr:`threading.Thread`.  For example,
-   ``threading.current_thread().ident`` gives the identifier of the current
-   thread.
+   Use :func:`threading.get_ident()` or the :attr:`~threading.Thread.ident`
+   attribute of :attr:`threading.Thread` to get a 'thread identifier' for
+   *thread_id*.
 
    If *signum* is 0, then no signal is sent, but error checking is still
    performed; this can be used to check if a thread is still running.
diff --git a/Doc/library/site.rst b/Doc/library/site.rst
--- a/Doc/library/site.rst
+++ b/Doc/library/site.rst
@@ -129,6 +129,10 @@
    unless the :program:`python` interpreter was started with the :option:`-S`
    flag.
 
+   .. versionchanged:: 3.3
+      This function used to be called unconditionnally.
+
+
 .. function:: addsitedir(sitedir, known_paths=None)
 
    Adds a directory to sys.path and processes its pth files.
diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst
--- a/Doc/library/socketserver.rst
+++ b/Doc/library/socketserver.rst
@@ -153,8 +153,21 @@
 .. method:: BaseServer.serve_forever(poll_interval=0.5)
 
    Handle requests until an explicit :meth:`shutdown` request.  Polls for
-   shutdown every *poll_interval* seconds.
+   shutdown every *poll_interval* seconds. It also calls
+   :meth:`service_actions` which may be used by a subclass or Mixin to provide
+   various cleanup actions. For e.g. ForkingMixin class uses
+   :meth:`service_actions` to cleanup the zombie child processes.
 
+   .. versionchanged:: 3.3
+       Added service_actions call to the serve_forever method.
+
+
+.. method:: BaseServer.service_actions()
+
+   This is called by the serve_forever loop. This method is can be overridden
+   by Mixin's to add cleanup or service specific actions.
+
+   .. versionadded:: 3.3
 
 .. method:: BaseServer.shutdown()
 
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -162,6 +162,35 @@
 Random generation
 ^^^^^^^^^^^^^^^^^
 
+.. function:: RAND_bytes(num)
+
+   Returns *num* cryptographically strong pseudo-random bytes. Raises an
+   :class:`SSLError` if the PRNG has not been seeded with enough data or if the
+   operation is not supported by the current RAND method. :func:`RAND_status`
+   can be used to check the status of the PRNG and :func:`RAND_add` can be used
+   to seed the PRNG.
+
+   Read the Wikipedia article, `Cryptographically secure pseudorandom number
+   generator (CSPRNG)
+   <http://en.wikipedia.org/wiki/Cryptographically_secure_pseudorandom_number_generator>`_,
+   to get the requirements of a cryptographically generator.
+
+   .. versionadded:: 3.3
+
+.. function:: RAND_pseudo_bytes(num)
+
+   Returns (bytes, is_cryptographic): bytes are *num* pseudo-random bytes,
+   is_cryptographic is True if the bytes generated are cryptographically
+   strong. Raises an :class:`SSLError` if the operation is not supported by the
+   current RAND method.
+
+   Generated pseudo-random byte sequences will be unique if they are of
+   sufficient length, but are not necessarily unpredictable. They can be used
+   for non-cryptographic purposes and for certain purposes in cryptographic
+   protocols, but usually not for key generation etc.
+
+   .. versionadded:: 3.3
+
 .. function:: RAND_status()
 
    Returns True if the SSL pseudo-random number generator has been seeded with
@@ -171,7 +200,7 @@
 
 .. function:: RAND_egd(path)
 
-   If you are running an entropy-gathering daemon (EGD) somewhere, and ``path``
+   If you are running an entropy-gathering daemon (EGD) somewhere, and *path*
    is the pathname of a socket connection open to it, this will read 256 bytes
    of randomness from the socket, and add it to the SSL pseudo-random number
    generator to increase the security of generated secret keys.  This is
@@ -182,8 +211,8 @@
 
 .. function:: RAND_add(bytes, entropy)
 
-   Mixes the given ``bytes`` into the SSL pseudo-random number generator.  The
-   parameter ``entropy`` (a float) is a lower bound on the entropy contained in
+   Mixes the given *bytes* into the SSL pseudo-random number generator.  The
+   parameter *entropy* (a float) is a lower bound on the entropy contained in
    string (so you can always use :const:`0.0`).  See :rfc:`1750` for more
    information on sources of entropy.
 
diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -48,6 +48,17 @@
    returned.
 
 
+.. function:: get_ident()
+
+   Return the 'thread identifier' of the current thread.  This is a nonzero
+   integer.  Its value has no direct meaning; it is intended as a magic cookie
+   to be used e.g. to index a dictionary of thread-specific data.  Thread
+   identifiers may be recycled when a thread exits and another thread is
+   created.
+
+   .. versionadded:: 3.3
+
+
 .. function:: enumerate()
 
    Return a list of all :class:`Thread` objects currently alive.  The list
@@ -332,10 +343,10 @@
    .. attribute:: ident
 
       The 'thread identifier' of this thread or ``None`` if the thread has not
-      been started.  This is a nonzero integer.  See the
-      :func:`thread.get_ident()` function.  Thread identifiers may be recycled
-      when a thread exits and another thread is created.  The identifier is
-      available even after the thread has exited.
+      been started.  This is a nonzero integer.  See the :func:`get_ident()`
+      function.  Thread identifiers may be recycled when a thread exits and
+      another thread is created.  The identifier is available even after the
+      thread has exited.
 
    .. method:: is_alive()
 
diff --git a/Doc/packaging/builtdist.rst b/Doc/packaging/builtdist.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/builtdist.rst
@@ -0,0 +1,307 @@
+.. _packaging-built-dist:
+
+****************************
+Creating Built Distributions
+****************************
+
+A "built distribution" is what you're probably used to thinking of either as a
+"binary package" or an "installer" (depending on your background).  It's not
+necessarily binary, though, because it might contain only Python source code
+and/or byte-code; and we don't call it a package, because that word is already
+spoken for in Python.  (And "installer" is a term specific to the world of
+mainstream desktop systems.)
+
+A built distribution is how you make life as easy as possible for installers of
+your module distribution: for users of RPM-based Linux systems, it's a binary
+RPM; for Windows users, it's an executable installer; for Debian-based Linux
+users, it's a Debian package; and so forth.  Obviously, no one person will be
+able to create built distributions for every platform under the sun, so the
+Distutils are designed to enable module developers to concentrate on their
+specialty---writing code and creating source distributions---while an
+intermediary species called *packagers* springs up to turn source distributions
+into built distributions for as many platforms as there are packagers.
+
+Of course, the module developer could be his own packager; or the packager could
+be a volunteer "out there" somewhere who has access to a platform which the
+original developer does not; or it could be software periodically grabbing new
+source distributions and turning them into built distributions for as many
+platforms as the software has access to.  Regardless of who they are, a packager
+uses the setup script and the :command:`bdist` command family to generate built
+distributions.
+
+As a simple example, if I run the following command in the Distutils source
+tree::
+
+   python setup.py bdist
+
+then the Distutils builds my module distribution (the Distutils itself in this
+case), does a "fake" installation (also in the :file:`build` directory), and
+creates the default type of built distribution for my platform.  The default
+format for built distributions is a "dumb" tar file on Unix, and a simple
+executable installer on Windows.  (That tar file is considered "dumb" because it
+has to be unpacked in a specific location to work.)
+
+Thus, the above command on a Unix system creates
+:file:`Distutils-1.0.{plat}.tar.gz`; unpacking this tarball from the right place
+installs the Distutils just as though you had downloaded the source distribution
+and run ``python setup.py install``.  (The "right place" is either the root of
+the filesystem or  Python's :file:`{prefix}` directory, depending on the options
+given to the :command:`bdist_dumb` command; the default is to make dumb
+distributions relative to :file:`{prefix}`.)
+
+Obviously, for pure Python distributions, this isn't any simpler than just
+running ``python setup.py install``\ ---but for non-pure distributions, which
+include extensions that would need to be compiled, it can mean the difference
+between someone being able to use your extensions or not.  And creating "smart"
+built distributions, such as an executable installer for
+Windows, is far more convenient for users even if your distribution doesn't
+include any extensions.
+
+The :command:`bdist` command has a :option:`--formats` option, similar to the
+:command:`sdist` command, which you can use to select the types of built
+distribution to generate: for example, ::
+
+   python setup.py bdist --format=zip
+
+would, when run on a Unix system, create :file:`Distutils-1.0.{plat}.zip`\
+---again, this archive would be unpacked from the root directory to install the
+Distutils.
+
+The available formats for built distributions are:
+
++-------------+------------------------------+---------+
+| Format      | Description                  | Notes   |
++=============+==============================+=========+
+| ``gztar``   | gzipped tar file             | (1),(3) |
+|             | (:file:`.tar.gz`)            |         |
++-------------+------------------------------+---------+
+| ``ztar``    | compressed tar file          | \(3)    |
+|             | (:file:`.tar.Z`)             |         |
++-------------+------------------------------+---------+
+| ``tar``     | tar file (:file:`.tar`)      | \(3)    |
++-------------+------------------------------+---------+
+| ``zip``     | zip file (:file:`.zip`)      | (2),(4) |
++-------------+------------------------------+---------+
+| ``wininst`` | self-extracting ZIP file for | \(4)    |
+|             | Windows                      |         |
++-------------+------------------------------+---------+
+| ``msi``     | Microsoft Installer.         |         |
++-------------+------------------------------+---------+
+
+
+Notes:
+
+(1)
+   default on Unix
+
+(2)
+   default on Windows
+
+(3)
+   requires external utilities: :program:`tar` and possibly one of :program:`gzip`,
+   :program:`bzip2`, or :program:`compress`
+
+(4)
+   requires either external :program:`zip` utility or :mod:`zipfile` module (part
+   of the standard Python library since Python 1.6)
+
+You don't have to use the :command:`bdist` command with the :option:`--formats`
+option; you can also use the command that directly implements the format you're
+interested in.  Some of these :command:`bdist` "sub-commands" actually generate
+several similar formats; for instance, the :command:`bdist_dumb` command
+generates all the "dumb" archive formats (``tar``, ``ztar``, ``gztar``, and
+``zip``).  The :command:`bdist` sub-commands, and the formats generated by
+each, are:
+
++--------------------------+-----------------------+
+| Command                  | Formats               |
++==========================+=======================+
+| :command:`bdist_dumb`    | tar, ztar, gztar, zip |
++--------------------------+-----------------------+
+| :command:`bdist_wininst` | wininst               |
++--------------------------+-----------------------+
+| :command:`bdist_msi`     | msi                   |
++--------------------------+-----------------------+
+
+The following sections give details on the individual :command:`bdist_\*`
+commands.
+
+
+.. _packaging-creating-dumb:
+
+Creating dumb built distributions
+=================================
+
+.. XXX Need to document absolute vs. prefix-relative packages here, but first
+       I have to implement it!
+
+
+.. _packaging-creating-wininst:
+
+Creating Windows Installers
+===========================
+
+Executable installers are the natural format for binary distributions on
+Windows.  They display a nice graphical user interface, display some information
+about the module distribution to be installed taken from the metadata in the
+setup script, let the user select a few options, and start or cancel the
+installation.
+
+Since the metadata is taken from the setup script, creating Windows installers
+is usually as easy as running::
+
+   python setup.py bdist_wininst
+
+or the :command:`bdist` command with the :option:`--formats` option::
+
+   python setup.py bdist --formats=wininst
+
+If you have a pure module distribution (only containing pure Python modules and
+packages), the resulting installer will be version independent and have a name
+like :file:`foo-1.0.win32.exe`.  These installers can even be created on Unix
+platforms or Mac OS X.
+
+If you have a non-pure distribution, the extensions can only be created on a
+Windows platform, and will be Python version dependent. The installer filename
+will reflect this and now has the form :file:`foo-1.0.win32-py2.0.exe`.  You
+have to create a separate installer for every Python version you want to
+support.
+
+.. TODO Add :term: markup to bytecode when merging into the stdlib
+
+The installer will try to compile pure modules into bytecode after installation
+on the target system in normal and optimizing mode.  If you don't want this to
+happen for some reason, you can run the :command:`bdist_wininst` command with
+the :option:`--no-target-compile` and/or the :option:`--no-target-optimize`
+option.
+
+By default the installer will display the cool "Python Powered" logo when it is
+run, but you can also supply your own 152x261 bitmap which must be a Windows
+:file:`.bmp` file with the :option:`--bitmap` option.
+
+The installer will also display a large title on the desktop background window
+when it is run, which is constructed from the name of your distribution and the
+version number.  This can be changed to another text by using the
+:option:`--title` option.
+
+The installer file will be written to the "distribution directory" --- normally
+:file:`dist/`, but customizable with the :option:`--dist-dir` option.
+
+.. _packaging-cross-compile-windows:
+
+Cross-compiling on Windows
+==========================
+
+Starting with Python 2.6, packaging is capable of cross-compiling between
+Windows platforms.  In practice, this means that with the correct tools
+installed, you can use a 32bit version of Windows to create 64bit extensions
+and vice-versa.
+
+To build for an alternate platform, specify the :option:`--plat-name` option
+to the build command.  Valid values are currently 'win32', 'win-amd64' and
+'win-ia64'.  For example, on a 32bit version of Windows, you could execute::
+
+   python setup.py build --plat-name=win-amd64
+
+to build a 64bit version of your extension.  The Windows Installers also
+support this option, so the command::
+
+   python setup.py build --plat-name=win-amd64 bdist_wininst
+
+would create a 64bit installation executable on your 32bit version of Windows.
+
+To cross-compile, you must download the Python source code and cross-compile
+Python itself for the platform you are targetting - it is not possible from a
+binary installtion of Python (as the .lib etc file for other platforms are
+not included.)  In practice, this means the user of a 32 bit operating
+system will need to use Visual Studio 2008 to open the
+:file:`PCBuild/PCbuild.sln` solution in the Python source tree and build the
+"x64" configuration of the 'pythoncore' project before cross-compiling
+extensions is possible.
+
+Note that by default, Visual Studio 2008 does not install 64bit compilers or
+tools.  You may need to reexecute the Visual Studio setup process and select
+these tools (using Control Panel->[Add/Remove] Programs is a convenient way to
+check or modify your existing install.)
+
+.. _packaging-postinstallation-script:
+
+The Postinstallation script
+---------------------------
+
+Starting with Python 2.3, a postinstallation script can be specified with the
+:option:`--install-script` option.  The basename of the script must be
+specified, and the script filename must also be listed in the scripts argument
+to the setup function.
+
+This script will be run at installation time on the target system after all the
+files have been copied, with ``argv[1]`` set to :option:`-install`, and again at
+uninstallation time before the files are removed with ``argv[1]`` set to
+:option:`-remove`.
+
+The installation script runs embedded in the windows installer, every output
+(``sys.stdout``, ``sys.stderr``) is redirected into a buffer and will be
+displayed in the GUI after the script has finished.
+
+Some functions especially useful in this context are available as additional
+built-in functions in the installation script.
+
+.. currentmodule:: bdist_wininst-postinst-script
+
+.. function:: directory_created(path)
+              file_created(path)
+
+   These functions should be called when a directory or file is created by the
+   postinstall script at installation time.  It will register *path* with the
+   uninstaller, so that it will be removed when the distribution is uninstalled.
+   To be safe, directories are only removed if they are empty.
+
+
+.. function:: get_special_folder_path(csidl_string)
+
+   This function can be used to retrieve special folder locations on Windows like
+   the Start Menu or the Desktop.  It returns the full path to the folder.
+   *csidl_string* must be one of the following strings::
+
+      "CSIDL_APPDATA"
+
+      "CSIDL_COMMON_STARTMENU"
+      "CSIDL_STARTMENU"
+
+      "CSIDL_COMMON_DESKTOPDIRECTORY"
+      "CSIDL_DESKTOPDIRECTORY"
+
+      "CSIDL_COMMON_STARTUP"
+      "CSIDL_STARTUP"
+
+      "CSIDL_COMMON_PROGRAMS"
+      "CSIDL_PROGRAMS"
+
+      "CSIDL_FONTS"
+
+   If the folder cannot be retrieved, :exc:`OSError` is raised.
+
+   Which folders are available depends on the exact Windows version, and probably
+   also the configuration.  For details refer to Microsoft's documentation of the
+   c:function:`SHGetSpecialFolderPath` function.
+
+
+.. function:: create_shortcut(target, description, filename[, arguments[, workdir[, iconpath[, iconindex]]]])
+
+   This function creates a shortcut. *target* is the path to the program to be
+   started by the shortcut. *description* is the description of the shortcut.
+   *filename* is the title of the shortcut that the user will see. *arguments*
+   specifies the command-line arguments, if any. *workdir* is the working directory
+   for the program. *iconpath* is the file containing the icon for the shortcut,
+   and *iconindex* is the index of the icon in the file *iconpath*.  Again, for
+   details consult the Microsoft documentation for the :class:`IShellLink`
+   interface.
+
+
+Vista User Access Control (UAC)
+===============================
+
+Starting with Python 2.6, bdist_wininst supports a :option:`--user-access-control`
+option.  The default is 'none' (meaning no UAC handling is done), and other
+valid values are 'auto' (meaning prompt for UAC elevation if Python was
+installed for all users) and 'force' (meaning always prompt for elevation).
diff --git a/Doc/packaging/commandhooks.rst b/Doc/packaging/commandhooks.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/commandhooks.rst
@@ -0,0 +1,31 @@
+=============
+Command hooks
+=============
+
+Packaging provides a way of extending its commands by the use of pre- and
+post- command hooks. The hooks are simple Python functions (or any callable
+objects) and are specified in the config file using their full qualified names.
+The pre-hooks are run after the command is finalized (its options are
+processed), but before it is run. The post-hooks are run after the command
+itself. Both types of hooks receive an instance of the command object.
+
+Sample usage of hooks
+=====================
+
+Firstly, you need to make sure your hook is present in the path. This is usually
+done by dropping them to the same folder where `setup.py` file lives ::
+
+  # file: myhooks.py
+  def my_install_hook(install_cmd):
+      print "Oh la la! Someone is installing my project!"
+
+Then, you need to point to it in your `setup.cfg` file, under the appropriate
+command section ::
+
+  [install_dist]
+  pre-hook.project = myhooks.my_install_hook
+
+The hooks defined in different config files (system-wide, user-wide and
+package-wide) do not override each other as long as they are specified with
+different aliases (additional names after the dot). The alias in the example
+above is ``project``.
diff --git a/Doc/packaging/commandref.rst b/Doc/packaging/commandref.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/commandref.rst
@@ -0,0 +1,349 @@
+.. _packaging-command-reference:
+
+*****************
+Command Reference
+*****************
+
+This reference briefly documents all standard Packaging commands and some of
+their options.
+
+.. FIXME does not work: Use pysetup run --help-commands to list all
+   standard and extra commands availavble on your system, with their
+   description.  Use pysetup run <command> --help to get help about the options
+   of one command.
+
+
+Preparing distributions
+=======================
+
+:command:`check`
+----------------
+
+Perform some tests on the metadata of a distribution.
+
+For example, it verifies that all required metadata fields are provided in the
+:file:`setup.cfg` file.
+
+.. TODO document reST checks
+
+
+:command:`test`
+---------------
+
+Run a test suite.
+
+When doing test-driven development, or running automated builds that need
+testing before they are installed for downloading or use, it's often useful to
+be able to run a project's unit tests without actually installing the project
+anywhere.  The :command:`test` command runs project's unit tests without
+actually installing it, by temporarily putting the project's source on
+:data:`sys.path`, after first running :command:`build_ext -i` to ensure that any
+C extensions are built.
+
+You can use this command in one of two ways: either by specifying a
+unittest-compatible test suite for your project (or any callable that returns
+it) or by passing a test runner function that will run your tests and display
+results in the console.  Both options take a Python dotted name in the form
+``package.module.callable`` to specify the object to use.
+
+If none of these options are specified, Packaging will try to perform test
+discovery using either unittest (for Python 3.2 and higher) or unittest2 (for
+older versions, if installed).
+
+.. this is a pseudo-command name used to disambiguate the options in indexes and
+   links
+.. program:: packaging test
+
+.. cmdoption:: --suite=NAME, -s NAME
+
+   Specify the test suite (or module, class, or method) to be run.  The default
+   for this option can be set by in the project's :file:`setup.cfg` file::
+
+   .. code-block:: cfg
+
+      [test]
+      suite = mypackage.tests.get_all_tests
+
+.. cmdoption:: --runner=NAME, -r NAME
+
+   Specify the test runner to be called.
+
+
+:command:`config`
+-----------------
+
+Perform distribution configuration.
+
+
+The build step
+==============
+
+This step is mainly useful to compile C/C++ libraries or extension modules.  The
+build commands can be run manually to check for syntax errors or packaging
+issues (for example if the addition of a new source file was forgotten in the
+:file:`setup.cfg` file), and is also run automatically by commands which need
+it.  Packaging checks the mtime of source and built files to avoid re-building
+if it's not necessary.
+
+
+:command:`build`
+----------------
+
+Build all files of a distribution, delegating to the other :command:`build_*`
+commands to do the work.
+
+
+:command:`build_clib`
+---------------------
+
+Build C libraries.
+
+
+:command:`build_ext`
+--------------------
+
+Build C/C++ extension modules.
+
+
+:command:`build_py`
+-------------------
+
+Build the Python modules (just copy them to the build directory) and
+byte-compile them to .pyc files.
+
+
+:command:`build_scripts`
+------------------------
+Build the scripts (just copy them to the build directory and adjust their
+shebang if they're Python scripts).
+
+
+:command:`clean`
+----------------
+
+Clean the build tree of the release.
+
+.. program:: packaging clean
+
+.. cmdoption:: --all, -a
+
+   Remove build directories for modules, scripts, etc., not only temporary build
+   by-products.
+
+
+Creating source and built distributions
+=======================================
+
+:command:`sdist`
+----------------
+
+Build a source distribution for a release.
+
+It is recommended that you always build and upload a source distribution.  Users
+of OSes with easy access to compilers and users of advanced packaging tools will
+prefer to compile from source rather than using pre-built distributions.  For
+Windows users, providing a binary installer is also recommended practice.
+
+
+:command:`bdist`
+----------------
+
+Build a binary distribution for a release.
+
+This command will call other :command:`bdist_*` commands to create one or more
+distributions depending on the options given.  The default is to create a
+.tar.gz archive on Unix and a zip archive on Windows or OS/2.
+
+.. program:: packaging bdist
+
+.. cmdoption:: --formats
+
+   Binary formats to build (comma-separated list).
+
+.. cmdoption:: --show-formats
+
+   Dump list of available formats.
+
+
+:command:`bdist_dumb`
+---------------------
+
+Build a "dumb" installer, a simple archive of files that could be unpacked under
+``$prefix`` or ``$exec_prefix``.
+
+
+:command:`bdist_wininst`
+------------------------
+
+Build a Windows installer.
+
+
+:command:`bdist_msi`
+--------------------
+
+Build a `Microsoft Installer`_ (.msi) file.
+
+.. _Microsoft Installer: http://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx
+
+In most cases, the :command:`bdist_msi` installer is a better choice than the
+:command:`bdist_wininst` installer, because it provides better support for Win64
+platforms, allows administrators to perform non-interactive installations, and
+allows installation through group policies.
+
+
+Publishing distributions
+========================
+
+:command:`register`
+-------------------
+
+This command registers the current release with the Python Package Index.  This
+is described in more detail in :PEP:`301`.
+
+.. TODO explain user and project registration with the web UI
+
+
+:command:`upload`
+-----------------
+
+Upload source and/or binary distributions to PyPI.
+
+The distributions have to be built on the same command line as the
+:command:`upload` command; see :ref:`packaging-package-upload` for more info.
+
+.. program:: packaging upload
+
+.. cmdoption:: --sign, -s
+
+   Sign each uploaded file using GPG (GNU Privacy Guard).  The ``gpg`` program
+   must be available for execution on the system ``PATH``.
+
+.. cmdoption:: --identity=NAME, -i NAME
+
+   Specify the identity or key name for GPG to use when signing.  The value of
+   this option will be passed through the ``--local-user`` option of the
+   ``gpg`` program.
+
+.. cmdoption:: --show-response
+
+   Display the full response text from server; this is useful for debugging
+   PyPI problems.
+
+.. cmdoption:: --repository=URL, -r URL
+
+   The URL of the repository to upload to.  Defaults to
+   http://pypi.python.org/pypi (i.e., the main PyPI installation).
+
+.. cmdoption:: --upload-docs
+
+   Also run :command:`upload_docs`.  Mainly useful as a default value in
+   :file:`setup.cfg` (on the command line, it's shorter to just type both
+   commands).
+
+
+:command:`upload_docs`
+----------------------
+
+Upload HTML documentation to PyPI.
+
+PyPI now supports publishing project documentation at a URI of the form
+``http://packages.python.org/<project>``.  :command:`upload_docs`  will create
+the necessary zip file out of a documentation directory and will post to the
+repository.
+
+Note that to upload the documentation of a project, the corresponding version
+must already be registered with PyPI, using the :command:`register` command ---
+just like with :command:`upload`.
+
+Assuming there is an ``Example`` project with documentation in the subdirectory
+:file:`docs`, for example::
+
+   Example/
+      example.py
+      setup.cfg
+      docs/
+         build/
+            html/
+               index.html
+               tips_tricks.html
+         conf.py
+         index.txt
+         tips_tricks.txt
+
+You can simply specify the directory with the HTML files in your
+:file:`setup.cfg` file:
+
+.. code-block:: cfg
+
+   [upload_docs]
+   upload-dir = docs/build/html
+
+
+.. program:: packaging upload_docs
+
+.. cmdoption:: --upload-dir
+
+   The directory to be uploaded to the repository. By default documentation
+   is searched for in ``docs`` (or ``doc``) directory in project root.
+
+.. cmdoption:: --show-response
+
+   Display the full response text from server; this is useful for debugging
+   PyPI problems.
+
+.. cmdoption:: --repository=URL, -r URL
+
+   The URL of the repository to upload to.  Defaults to
+   http://pypi.python.org/pypi (i.e., the main PyPI installation).
+
+
+The install step
+================
+
+These commands are used by end-users of a project using :program:`pysetup` or
+another compatible installer.  Each command will run the corresponding
+:command:`build_*` command and then move the built files to their destination on
+the target system.
+
+
+:command:`install_dist`
+-----------------------
+
+Install a distribution, delegating to the other :command:`install_*` commands to
+do the work.
+
+.. program:: packaging install_dist
+
+.. cmdoption:: --user
+
+   Install in user site-packages directory (see :PEP:`370`).
+
+
+:command:`install_data`
+-----------------------
+
+Install data files.
+
+
+:command:`install_distinfo`
+---------------------------
+
+Install files recording details of the installation as specified in :PEP:`376`.
+
+
+:command:`install_headers`
+--------------------------
+
+Install C/C++ header files.
+
+
+:command:`install_lib`
+----------------------
+
+Install C library files.
+
+
+:command:`install_scripts`
+--------------------------
+
+Install scripts.
diff --git a/Doc/packaging/configfile.rst b/Doc/packaging/configfile.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/configfile.rst
@@ -0,0 +1,125 @@
+.. _packaging-setup-config:
+
+************************************
+Writing the Setup Configuration File
+************************************
+
+Often, it's not possible to write down everything needed to build a distribution
+*a priori*: you may need to get some information from the user, or from the
+user's system, in order to proceed.  As long as that information is fairly
+simple---a list of directories to search for C header files or libraries, for
+example---then providing a configuration file, :file:`setup.cfg`, for users to
+edit is a cheap and easy way to solicit it.  Configuration files also let you
+provide default values for any command option, which the installer can then
+override either on the command line or by editing the config file.
+
+The setup configuration file is a useful middle-ground between the setup script
+---which, ideally, would be opaque to installers [#]_---and the command line to
+the setup script, which is outside of your control and entirely up to the
+installer.  In fact, :file:`setup.cfg` (and any other Distutils configuration
+files present on the target system) are processed after the contents of the
+setup script, but before the command line.  This has  several useful
+consequences:
+
+.. If you have more advanced needs, such as determining which extensions to
+   build based on what capabilities are present on the target system, then you
+   need the Distutils auto-configuration facility.  This started to appear in
+   Distutils 0.9 but, as of this writing, isn't mature or stable enough yet
+   for real-world use.
+
+* installers can override some of what you put in :file:`setup.py` by editing
+  :file:`setup.cfg`
+
+* you can provide non-standard defaults for options that are not easily set in
+  :file:`setup.py`
+
+* installers can override anything in :file:`setup.cfg` using the command-line
+  options to :file:`setup.py`
+
+The basic syntax of the configuration file is simple::
+
+   [command]
+   option = value
+   ...
+
+where *command* is one of the Distutils commands (e.g. :command:`build_py`,
+:command:`install_dist`), and *option* is one of the options that command supports.
+Any number of options can be supplied for each command, and any number of
+command sections can be included in the file.  Blank lines are ignored, as are
+comments, which run from a ``'#'`` character until the end of the line.  Long
+option values can be split across multiple lines simply by indenting the
+continuation lines.
+
+You can find out the list of options supported by a particular command with the
+universal :option:`--help` option, e.g. ::
+
+   > python setup.py --help build_ext
+   [...]
+   Options for 'build_ext' command:
+     --build-lib (-b)     directory for compiled extension modules
+     --build-temp (-t)    directory for temporary files (build by-products)
+     --inplace (-i)       ignore build-lib and put compiled extensions into the
+                          source directory alongside your pure Python modules
+     --include-dirs (-I)  list of directories to search for header files
+     --define (-D)        C preprocessor macros to define
+     --undef (-U)         C preprocessor macros to undefine
+     --swig-opts          list of SWIG command-line options
+   [...]
+
+.. XXX do we want to support ``setup.py --help metadata``?
+
+Note that an option spelled :option:`--foo-bar` on the command line  is spelled
+:option:`foo_bar` in configuration files.
+
+For example, say you want your extensions to be built "in-place"---that is, you
+have an extension :mod:`pkg.ext`, and you want the compiled extension file
+(:file:`ext.so` on Unix, say) to be put in the same source directory as your
+pure Python modules :mod:`pkg.mod1` and :mod:`pkg.mod2`.  You can always use the
+:option:`--inplace` option on the command line to ensure this::
+
+   python setup.py build_ext --inplace
+
+But this requires that you always specify the :command:`build_ext` command
+explicitly, and remember to provide :option:`--inplace`. An easier way is to
+"set and forget" this option, by encoding it in :file:`setup.cfg`, the
+configuration file for this distribution::
+
+   [build_ext]
+   inplace = 1
+
+This will affect all builds of this module distribution, whether or not you
+explicitly specify :command:`build_ext`.  If you include :file:`setup.cfg` in
+your source distribution, it will also affect end-user builds---which is
+probably a bad idea for this option, since always building extensions in-place
+would break installation of the module distribution.  In certain peculiar cases,
+though, modules are built right in their installation directory, so this is
+conceivably a useful ability.  (Distributing extensions that expect to be built
+in their installation directory is almost always a bad idea, though.)
+
+Another example: certain commands take options that vary from project to
+project but not depending on the installation system, for example,
+:command:`test` needs to know where your test suite is located and what test
+runner to use; likewise, :command:`upload_docs` can find HTML documentation in
+a :file:`doc` or :file:`docs` directory, but needs an option to find files in
+:file:`docs/build/html`.  Instead of having to type out these options each
+time you want to run the command, you can put them in the project's
+:file:`setup.cfg`::
+
+   [test]
+   suite = packaging.tests
+
+   [upload_docs]
+   upload-dir = docs/build/html
+
+
+.. seealso::
+
+   :ref:`packaging-config-syntax` in "Installing Python Projects"
+      More information on the configuration files is available in the manual for
+      system administrators.
+
+
+.. rubric:: Footnotes
+
+.. [#] This ideal probably won't be achieved until auto-configuration is fully
+   supported by the Distutils.
diff --git a/Doc/packaging/examples.rst b/Doc/packaging/examples.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/examples.rst
@@ -0,0 +1,334 @@
+.. _packaging-examples:
+
+********
+Examples
+********
+
+This chapter provides a number of basic examples to help get started with
+Packaging.
+
+
+.. _packaging-pure-mod:
+
+Pure Python distribution (by module)
+====================================
+
+If you're just distributing a couple of modules, especially if they don't live
+in a particular package, you can specify them individually using the
+:option:`py_modules` option in the setup script.
+
+In the simplest case, you'll have two files to worry about: a setup script and
+the single module you're distributing, :file:`foo.py` in this example::
+
+   <root>/
+          setup.py
+          foo.py
+
+(In all diagrams in this section, *<root>* will refer to the distribution root
+directory.)  A minimal setup script to describe this situation would be::
+
+   from packaging.core import setup
+   setup(name='foo',
+         version='1.0',
+         py_modules=['foo'])
+
+Note that the name of the distribution is specified independently with the
+:option:`name` option, and there's no rule that says it has to be the same as
+the name of the sole module in the distribution (although that's probably a good
+convention to follow).  However, the distribution name is used to generate
+filenames, so you should stick to letters, digits, underscores, and hyphens.
+
+Since :option:`py_modules` is a list, you can of course specify multiple
+modules, e.g. if you're distributing modules :mod:`foo` and :mod:`bar`, your
+setup might look like this::
+
+   <root>/
+          setup.py
+          foo.py
+          bar.py
+
+and the setup script might be  ::
+
+   from packaging.core import setup
+   setup(name='foobar',
+         version='1.0',
+         py_modules=['foo', 'bar'])
+
+You can put module source files into another directory, but if you have enough
+modules to do that, it's probably easier to specify modules by package rather
+than listing them individually.
+
+
+.. _packaging-pure-pkg:
+
+Pure Python distribution (by package)
+=====================================
+
+If you have more than a couple of modules to distribute, especially if they are
+in multiple packages, it's probably easier to specify whole packages rather than
+individual modules.  This works even if your modules are not in a package; you
+can just tell the Distutils to process modules from the root package, and that
+works the same as any other package (except that you don't have to have an
+:file:`__init__.py` file).
+
+The setup script from the last example could also be written as  ::
+
+   from packaging.core import setup
+   setup(name='foobar',
+         version='1.0',
+         packages=[''])
+
+(The empty string stands for the root package.)
+
+If those two files are moved into a subdirectory, but remain in the root
+package, e.g.::
+
+   <root>/
+          setup.py
+          src/
+              foo.py
+              bar.py
+
+then you would still specify the root package, but you have to tell the
+Distutils where source files in the root package live::
+
+   from packaging.core import setup
+   setup(name='foobar',
+         version='1.0',
+         package_dir={'': 'src'},
+         packages=[''])
+
+More typically, though, you will want to distribute multiple modules in the same
+package (or in sub-packages).  For example, if the :mod:`foo`  and :mod:`bar`
+modules belong in package :mod:`foobar`, one way to lay out your source tree is
+
+::
+
+   <root>/
+          setup.py
+          foobar/
+                 __init__.py
+                 foo.py
+                 bar.py
+
+This is in fact the default layout expected by the Distutils, and the one that
+requires the least work to describe in your setup script::
+
+   from packaging.core import setup
+   setup(name='foobar',
+         version='1.0',
+         packages=['foobar'])
+
+If you want to put modules in directories not named for their package, then you
+need to use the :option:`package_dir` option again.  For example, if the
+:file:`src` directory holds modules in the :mod:`foobar` package::
+
+   <root>/
+          setup.py
+          src/
+              __init__.py
+              foo.py
+              bar.py
+
+an appropriate setup script would be  ::
+
+   from packaging.core import setup
+   setup(name='foobar',
+         version='1.0',
+         package_dir={'foobar': 'src'},
+         packages=['foobar'])
+
+Or, you might put modules from your main package right in the distribution
+root::
+
+   <root>/
+          setup.py
+          __init__.py
+          foo.py
+          bar.py
+
+in which case your setup script would be  ::
+
+   from packaging.core import setup
+   setup(name='foobar',
+         version='1.0',
+         package_dir={'foobar': ''},
+         packages=['foobar'])
+
+(The empty string also stands for the current directory.)
+
+If you have sub-packages, they must be explicitly listed in :option:`packages`,
+but any entries in :option:`package_dir` automatically extend to sub-packages.
+(In other words, the Distutils does *not* scan your source tree, trying to
+figure out which directories correspond to Python packages by looking for
+:file:`__init__.py` files.)  Thus, if the default layout grows a sub-package::
+
+   <root>/
+          setup.py
+          foobar/
+                 __init__.py
+                 foo.py
+                 bar.py
+                 subfoo/
+                        __init__.py
+                        blah.py
+
+then the corresponding setup script would be  ::
+
+   from packaging.core import setup
+   setup(name='foobar',
+         version='1.0',
+         packages=['foobar', 'foobar.subfoo'])
+
+(Again, the empty string in :option:`package_dir` stands for the current
+directory.)
+
+
+.. _packaging-single-ext:
+
+Single extension module
+=======================
+
+Extension modules are specified using the :option:`ext_modules` option.
+:option:`package_dir` has no effect on where extension source files are found;
+it only affects the source for pure Python modules.  The simplest  case, a
+single extension module in a single C source file, is::
+
+   <root>/
+          setup.py
+          foo.c
+
+If the :mod:`foo` extension belongs in the root package, the setup script for
+this could be  ::
+
+   from packaging.core import setup, Extension
+   setup(name='foobar',
+         version='1.0',
+         ext_modules=[Extension('foo', ['foo.c'])])
+
+If the extension actually belongs in a package, say :mod:`foopkg`, then
+
+With exactly the same source tree layout, this extension can be put in the
+:mod:`foopkg` package simply by changing the name of the extension::
+
+   from packaging.core import setup, Extension
+   setup(name='foobar',
+         version='1.0',
+         packages=['foopkg'],
+         ext_modules=[Extension('foopkg.foo', ['foo.c'])])
+
+
+Checking metadata
+=================
+
+The ``check`` command allows you to verify if your project's metadata
+meets the minimum requirements to build a distribution.
+
+To run it, just call it using your :file:`setup.py` script. If something is
+missing, ``check`` will display a warning.
+
+Let's take an example with a simple script::
+
+    from packaging.core import setup
+
+    setup(name='foobar')
+
+.. TODO configure logging StreamHandler to match this output
+
+Running the ``check`` command will display some warnings::
+
+    $ python setup.py check
+    running check
+    warning: check: missing required metadata: version, home_page
+    warning: check: missing metadata: either (author and author_email) or
+             (maintainer and maintainer_email) must be supplied
+
+
+If you use the reStructuredText syntax in the ``long_description`` field and
+`Docutils <http://docutils.sourceforge.net/>`_ is installed you can check if
+the syntax is fine with the ``check`` command, using the ``restructuredtext``
+option.
+
+For example, if the :file:`setup.py` script is changed like this::
+
+    from packaging.core import setup
+
+    desc = """\
+    Welcome to foobar!
+    ===============
+
+    This is the description of the ``foobar`` project.
+    """
+
+    setup(name='foobar',
+          version='1.0',
+          author=u'Tarek Ziadé',
+          author_email='tarek at ziade.org',
+          summary='Foobar utilities'
+          description=desc,
+          home_page='http://example.com')
+
+Where the long description is broken, ``check`` will be able to detect it
+by using the :mod:`docutils` parser::
+
+    $ python setup.py check --restructuredtext
+    running check
+    warning: check: Title underline too short. (line 2)
+    warning: check: Could not finish the parsing.
+
+
+.. _packaging-reading-metadata:
+
+Reading the metadata
+====================
+
+The :func:`packaging.core.setup` function provides a command-line interface
+that allows you to query the metadata fields of a project through the
+:file:`setup.py` script of a given project::
+
+    $ python setup.py --name
+    foobar
+
+This call reads the ``name`` metadata by running the
+:func:`packaging.core.setup`  function. When a source or binary
+distribution is created with Distutils, the metadata fields are written
+in a static file called :file:`PKG-INFO`. When a Distutils-based project is
+installed in Python, the :file:`PKG-INFO` file is copied alongside the modules
+and packages of the distribution under :file:`NAME-VERSION-pyX.X.egg-info`,
+where ``NAME`` is the name of the project, ``VERSION`` its version as defined
+in the Metadata, and ``pyX.X`` the major and minor version of Python like
+``2.7`` or ``3.2``.
+
+You can read back this static file, by using the
+:class:`packaging.dist.Metadata` class and its
+:func:`read_pkg_file` method::
+
+    >>> from packaging.metadata import Metadata
+    >>> metadata = Metadata()
+    >>> metadata.read_pkg_file(open('distribute-0.6.8-py2.7.egg-info'))
+    >>> metadata.name
+    'distribute'
+    >>> metadata.version
+    '0.6.8'
+    >>> metadata.description
+    'Easily download, build, install, upgrade, and uninstall Python packages'
+
+Notice that the class can also be instantiated with a metadata file path to
+loads its values::
+
+    >>> pkg_info_path = 'distribute-0.6.8-py2.7.egg-info'
+    >>> Metadata(pkg_info_path).name
+    'distribute'
+
+
+.. XXX These comments have been here for at least ten years. Write the
+       sections or delete the comments (we can maybe ask Greg Ward about
+       the planned contents). (Unindent to make them section titles)
+
+    .. multiple-ext::
+
+       Multiple extension modules
+       ==========================
+
+       Putting it all together
+       =======================
diff --git a/Doc/packaging/extending.rst b/Doc/packaging/extending.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/extending.rst
@@ -0,0 +1,95 @@
+.. _extending-packaging:
+
+*******************
+Extending Distutils
+*******************
+
+Distutils can be extended in various ways.  Most extensions take the form of new
+commands or replacements for existing commands.  New commands may be written to
+support new types of platform-specific packaging, for example, while
+replacements for existing commands may be made to modify details of how the
+command operates on a package.
+
+Most extensions of the packaging are made within :file:`setup.py` scripts that
+want to modify existing commands; many simply add a few file extensions that
+should be copied into packages in addition to :file:`.py` files as a
+convenience.
+
+Most packaging command implementations are subclasses of the
+:class:`packaging.cmd.Command` class.  New commands may directly inherit from
+:class:`Command`, while replacements often derive from :class:`Command`
+indirectly, directly subclassing the command they are replacing.  Commands are
+required to derive from :class:`Command`.
+
+.. .. _extend-existing:
+      Extending existing commands
+      ===========================
+
+
+.. .. _new-commands:
+      Writing new commands
+      ====================
+
+
+Integrating new commands
+========================
+
+There are different ways to integrate new command implementations into
+packaging.  The most difficult is to lobby for the inclusion of the new features
+in packaging itself, and wait for (and require) a version of Python that
+provides that support.  This is really hard for many reasons.
+
+The most common, and possibly the most reasonable for most needs, is to include
+the new implementations with your :file:`setup.py` script, and cause the
+:func:`packaging.core.setup` function use them::
+
+   from packaging.core import setup
+   from packaging.command.build_py import build_py as _build_py
+
+   class build_py(_build_py):
+       """Specialized Python source builder."""
+
+       # implement whatever needs to be different...
+
+   setup(..., cmdclass={'build_py': build_py})
+
+This approach is most valuable if the new implementations must be used to use a
+particular package, as everyone interested in the package will need to have the
+new command implementation.
+
+Beginning with Python 2.4, a third option is available, intended to allow new
+commands to be added which can support existing :file:`setup.py` scripts without
+requiring modifications to the Python installation.  This is expected to allow
+third-party extensions to provide support for additional packaging systems, but
+the commands can be used for anything packaging commands can be used for.  A new
+configuration option, :option:`command_packages` (command-line option
+:option:`--command-packages`), can be used to specify additional packages to be
+searched for modules implementing commands.  Like all packaging options, this
+can be specified on the command line or in a configuration file.  This option
+can only be set in the ``[global]`` section of a configuration file, or before
+any commands on the command line.  If set in a configuration file, it can be
+overridden from the command line; setting it to an empty string on the command
+line causes the default to be used.  This should never be set in a configuration
+file provided with a package.
+
+This new option can be used to add any number of packages to the list of
+packages searched for command implementations; multiple package names should be
+separated by commas.  When not specified, the search is only performed in the
+:mod:`packaging.command` package.  When :file:`setup.py` is run with the option
+:option:`--command-packages` :option:`distcmds,buildcmds`, however, the packages
+:mod:`packaging.command`, :mod:`distcmds`, and :mod:`buildcmds` will be searched
+in that order.  New commands are expected to be implemented in modules of the
+same name as the command by classes sharing the same name.  Given the example
+command-line option above, the command :command:`bdist_openpkg` could be
+implemented by the class :class:`distcmds.bdist_openpkg.bdist_openpkg` or
+:class:`buildcmds.bdist_openpkg.bdist_openpkg`.
+
+
+Adding new distribution types
+=============================
+
+Commands that create distributions (files in the :file:`dist/` directory) need
+to add ``(command, filename)`` pairs to ``self.distribution.dist_files`` so that
+:command:`upload` can upload it to PyPI.  The *filename* in the pair contains no
+path information, only the name of the file itself.  In dry-run mode, pairs
+should still be added to represent what would have been created.
diff --git a/Doc/packaging/index.rst b/Doc/packaging/index.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/index.rst
@@ -0,0 +1,45 @@
+.. _packaging-index:
+
+##############################
+ Distributing Python Projects
+##############################
+
+:Authors: The Fellowship of the Packaging
+:Email: distutils-sig at python.org
+:Release: |version|
+:Date: |today|
+
+This document describes Packaging for Python authors, describing how to use the
+module to make Python applications, packages or modules easily available to a
+wider audience with very little overhead for build/release/install mechanics.
+
+.. toctree::
+   :maxdepth: 2
+   :numbered:
+
+   tutorial
+   setupcfg
+   introduction
+   setupscript
+   configfile
+   sourcedist
+   builtdist
+   packageindex
+   uploading
+   examples
+   extending
+   commandhooks
+   commandref
+
+
+.. seealso::
+
+   :ref:`packaging-install-index`
+      A user-centered manual which includes information on adding projects
+      into an existing Python installation.  You do not need to be a Python
+      programmer to read this manual.
+
+   :mod:`packaging`
+      A library reference for developers of packaging tools wanting to use
+      standalone building blocks like :mod:`~packaging.version` or
+      :mod:`~packaging.metadata`, or extend Packaging itself.
diff --git a/Doc/packaging/introduction.rst b/Doc/packaging/introduction.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/introduction.rst
@@ -0,0 +1,193 @@
+.. _packaging-intro:
+
+*****************************
+An Introduction to Packaging
+*****************************
+
+This document covers using Packaging to distribute your Python modules,
+concentrating on the role of developer/distributor.  If you're looking for
+information on installing Python modules you should refer to the
+:ref:`packaging-install-index` chapter.
+
+Throughout this documentation, the terms "Distutils", "the Distutils" and
+"Packaging" will be used interchangeably.
+
+.. _packaging-concepts:
+
+Concepts & Terminology
+======================
+
+Using Distutils is quite simple both for module developers and for
+users/administrators installing third-party modules.  As a developer, your
+responsibilities (apart from writing solid, well-documented and well-tested
+code, of course!) are:
+
+* writing a setup script (:file:`setup.py` by convention)
+
+* (optional) writing a setup configuration file
+
+* creating a source distribution
+
+* (optional) creating one or more "built" (binary) distributions of your
+  project
+
+All of these tasks are covered in this document.
+
+Not all module developers have access to multiple platforms, so one cannot
+expect them to create buildt distributions for every platform.  To remedy
+this, it is hoped that intermediaries called *packagers* will arise to address
+this need.  Packagers take source distributions released by module developers,
+build them on one or more platforms and release the resulting built
+distributions.  Thus, users on a greater range of platforms will be able to
+install the most popular Python modules in the most natural way for their
+platform without having to run a setup script or compile a single line of code.
+
+
+.. _packaging-simple-example:
+
+A Simple Example
+================
+
+A setup script is usually quite simple, although since it's written in Python
+there are no arbitrary limits to what you can do with it, though you should be
+careful about putting expensive operations in your setup script.
+Unlike, say, Autoconf-style configure scripts the setup script may be run
+multiple times in the course of building and installing a module
+distribution.
+
+If all you want to do is distribute a module called :mod:`foo`, contained in a
+file :file:`foo.py`, then your setup script can be as simple as::
+
+   from packaging.core import setup
+   setup(name='foo',
+         version='1.0',
+         py_modules=['foo'])
+
+Some observations:
+
+* most information that you supply to the Distutils is supplied as keyword
+  arguments to the :func:`setup` function
+
+* those keyword arguments fall into two categories: package metadata (name,
+  version number, etc.) and information about what's in the package (a list
+  of pure Python modules in this case)
+
+* modules are specified by module name, not filename (the same will hold true
+  for packages and extensions)
+
+* it's recommended that you supply a little more metadata than we have in the
+  example.  In particular your name, email address and a URL for the
+  project if appropriate (see section :ref:`packaging-setup-script` for an example)
+
+To create a source distribution for this module you would create a setup
+script, :file:`setup.py`, containing the above code and run::
+
+   python setup.py sdist
+
+which will create an archive file (e.g., tarball on Unix, ZIP file on Windows)
+containing your setup script :file:`setup.py`, and your module :file:`foo.py`.
+The archive file will be named :file:`foo-1.0.tar.gz` (or :file:`.zip`), and
+will unpack into a directory :file:`foo-1.0`.
+
+If an end-user wishes to install your :mod:`foo` module all he has to do is
+download :file:`foo-1.0.tar.gz` (or :file:`.zip`), unpack it, and from the
+:file:`foo-1.0` directory run ::
+
+   python setup.py install
+
+which will copy :file:`foo.py` to the appropriate directory for
+third-party modules in their Python installation.
+
+This simple example demonstrates some fundamental concepts of Distutils.
+First, both developers and installers have the same basic user interface, i.e.
+the setup script.  The difference is which Distutils *commands* they use: the
+:command:`sdist` command is almost exclusively for module developers, while
+:command:`install` is more often used by installers (although some developers
+will want to install their own code occasionally).
+
+If you want to make things really easy for your users, you can create more
+than one built distributions for them.  For instance, if you are running on a
+Windows machine and want to make things easy for other Windows users, you can
+create an executable installer (the most appropriate type of built distribution
+for this platform) with the :command:`bdist_wininst` command.  For example::
+
+   python setup.py bdist_wininst
+
+will create an executable installer, :file:`foo-1.0.win32.exe`, in the current
+directory.  You can find out what distribution formats are available at any time
+by running ::
+
+   python setup.py bdist --help-formats
+
+
+.. _packaging-python-terms:
+
+General Python terminology
+==========================
+
+If you're reading this document, you probably have a good idea of what Python
+modules, extensions and so forth are.  Nevertheless, just to be sure that
+everyone is on the same page, here's a quick overview of Python terms:
+
+module
+   The basic unit of code reusability in Python: a block of code imported by
+   some other code.  Three types of modules are important to us here: pure
+   Python modules, extension modules and packages.
+
+pure Python module
+   A module written in Python and contained in a single :file:`.py` file (and
+   possibly associated :file:`.pyc` and/or :file:`.pyo` files).  Sometimes
+   referred to as a "pure module."
+
+extension module
+   A module written in the low-level language of the Python implementation: C/C++
+   for Python, Java for Jython.  Typically contained in a single dynamically
+   loaded pre-compiled file, e.g. a shared object (:file:`.so`) file for Python
+   extensions on Unix, a DLL (given the :file:`.pyd` extension) for Python
+   extensions on Windows, or a Java class file for Jython extensions.  Note that
+   currently Distutils only handles C/C++ extensions for Python.
+
+package
+   A module that contains other modules, typically contained in a directory of
+   the filesystem and distinguished from other directories by the presence of a
+   file :file:`__init__.py`.
+
+root package
+   The root of the hierarchy of packages.  (This isn't really a package,
+   since it doesn't have an :file:`__init__.py` file.  But... we have to
+   call it something, right?)  The vast majority of the standard library is
+   in the root package, as are many small standalone third-party modules that
+   don't belong to a larger module collection.  Unlike regular packages,
+   modules in the root package can be found in many directories: in fact,
+   every directory listed in ``sys.path`` contributes modules to the root
+   package.
+
+
+.. _packaging-term:
+
+Distutils-specific terminology
+==============================
+
+The following terms apply more specifically to the domain of distributing Python
+modules using Distutils:
+
+module distribution
+   A collection of Python modules distributed together as a single downloadable
+   resource and meant to be installed all as one.  Examples of some well-known
+   module distributions are NumPy, SciPy, PIL (the Python Imaging
+   Library) or mxBase.  (Module distributions would be called a *package*,
+   except that term is already taken in the Python context: a single module
+   distribution may contain zero, one, or many Python packages.)
+
+pure module distribution
+   A module distribution that contains only pure Python modules and packages.
+   Sometimes referred to as a "pure distribution."
+
+non-pure module distribution
+   A module distribution that contains at least one extension module.  Sometimes
+   referred to as a "non-pure distribution."
+
+distribution root
+   The top-level directory of your source tree (or  source distribution).  The
+   directory where :file:`setup.py` exists.  Generally  :file:`setup.py` will
+   be run from this directory.
diff --git a/Doc/packaging/packageindex.rst b/Doc/packaging/packageindex.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/packageindex.rst
@@ -0,0 +1,104 @@
+.. _packaging-package-index:
+
+**********************************
+Registering with the Package Index
+**********************************
+
+The Python Package Index (PyPI) holds metadata describing distributions
+packaged with packaging. The packaging command :command:`register` is used to
+submit your distribution's metadata to the index. It is invoked as follows::
+
+    python setup.py register
+
+Distutils will respond with the following prompt::
+
+    running register
+    We need to know who you are, so please choose either:
+        1. use your existing login,
+        2. register as a new user,
+        3. have the server generate a new password for you (and email it to you), or
+        4. quit
+    Your selection [default 1]:
+
+Note: if your username and password are saved locally, you will not see this
+menu.
+
+If you have not registered with PyPI, then you will need to do so now. You
+should choose option 2, and enter your details as required. Soon after
+submitting your details, you will receive an email which will be used to confirm
+your registration.
+
+Once you are registered, you may choose option 1 from the menu. You will be
+prompted for your PyPI username and password, and :command:`register` will then
+submit your metadata to the index.
+
+You may submit any number of versions of your distribution to the index. If you
+alter the metadata for a particular version, you may submit it again and the
+index will be updated.
+
+PyPI holds a record for each (name, version) combination submitted. The first
+user to submit information for a given name is designated the Owner of that
+name. They may submit changes through the :command:`register` command or through
+the web interface. They may also designate other users as Owners or Maintainers.
+Maintainers may edit the package information, but not designate other Owners or
+Maintainers.
+
+By default PyPI will list all versions of a given package. To hide certain
+versions, the Hidden property should be set to yes. This must be edited through
+the web interface.
+
+
+.. _packaging-pypirc:
+
+The .pypirc file
+================
+
+The format of the :file:`.pypirc` file is as follows::
+
+    [packaging]
+    index-servers =
+        pypi
+
+    [pypi]
+    repository: <repository-url>
+    username: <username>
+    password: <password>
+
+The *packaging* section defines a *index-servers* variable that lists the
+name of all sections describing a repository.
+
+Each section describing a repository defines three variables:
+
+- *repository*, that defines the url of the PyPI server. Defaults to
+    ``http://www.python.org/pypi``.
+- *username*, which is the registered username on the PyPI server.
+- *password*, that will be used to authenticate. If omitted the user
+    will be prompt to type it when needed.
+
+If you want to define another server a new section can be created and
+listed in the *index-servers* variable::
+
+    [packaging]
+    index-servers =
+        pypi
+        other
+
+    [pypi]
+    repository: <repository-url>
+    username: <username>
+    password: <password>
+
+    [other]
+    repository: http://example.com/pypi
+    username: <username>
+    password: <password>
+
+:command:`register` can then be called with the -r option to point the
+repository to work with::
+
+    python setup.py register -r http://example.com/pypi
+
+For convenience, the name of the section that describes the repository
+may also be used::
+
+    python setup.py register -r other
diff --git a/Doc/packaging/setupcfg.rst b/Doc/packaging/setupcfg.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/setupcfg.rst
@@ -0,0 +1,648 @@
+.. highlightlang:: cfg
+
+*******************************************
+Specification of the :file:`setup.cfg` file
+*******************************************
+
+.. :version: 1.0
+
+This document describes the :file:`setup.cfg`, an ini-style configuration file
+(compatible with :class:`configparser.RawConfigParser`) configuration file used
+by Packaging to replace the :file:`setup.py` file.
+
+Each section contains a description of its options.
+
+- Options that are marked *multi* can have multiple values, one value per
+  line.
+- Options that are marked *optional* can be omitted.
+- Options that are marked *environ* can use environment markers, as described
+  in :PEP:`345`.
+
+
+The sections are:
+
+global
+   Global options not related to one command.
+
+metadata
+   Name, version and other information defined by :PEP:`345`.
+
+files
+   Modules, scripts, data, documentation and other files to include in the
+   distribution.
+
+command sections
+   Options given for specific commands, identical to those that can be given
+   on the command line.
+
+
+Global options
+==============
+
+Contains global options for Packaging. This section is shared with Distutils.
+
+
+commands
+   Defined Packaging command. A command is defined by its fully
+   qualified name. *optional*, *multi*
+
+   Examples::
+
+      [global]
+      commands =
+          package.setup.CustomSdistCommand
+          package.setup.BdistDeb
+
+compilers
+   Defined Packaging compiler. A compiler is defined by its fully
+   qualified name. *optional*, *multi*
+
+   Example::
+
+      [global]
+      compilers =
+          hotcompiler.SmartCCompiler
+
+setup_hook
+   defines a callable that will be called right after the
+   :file:`setup.cfg` file is read. The callable receives the configuration
+   in form of a mapping and can make some changes to it. *optional*
+
+   Example::
+
+      [global]
+      setup_hook = package.setup.customize_dist
+
+
+Metadata
+========
+
+The metadata section contains the metadata for the project as described in
+:PEP:`345`.  Field names are case-insensitive.
+
+Fields:
+
+name
+   Name of the project.
+
+version
+   Version of the project. Must comply with :PEP:`386`.
+
+platform
+   Platform specification describing an operating system
+   supported by the distribution which is not listed in the "Operating System"
+   Trove classifiers (:PEP:`301`).  *optional*, *multi*
+
+supported-platform
+   Binary distributions containing a PKG-INFO file will
+   use the Supported-Platform field in their metadata to specify the OS and
+   CPU for which the binary distribution was compiled.  The semantics of
+   the Supported-Platform field are free form. *optional*, *multi*
+
+summary
+   A one-line summary of what the distribution does.
+   (Used to be called *description* in Distutils1.)
+
+description
+   A longer description. (Used to be called *long_description*
+   in Distutils1.) A file can be provided in the *description-file* field.
+   *optional*
+
+description-file
+   path to a text file that will be used for the
+   **description** field. *optional*
+
+keywords
+   A list of additional keywords to be used to assist searching
+   for the distribution in a larger catalog. Comma or space-separated.
+   *optional*
+
+home-page
+   The URL for the distribution's home page.
+
+download-url
+   The URL from which this version of the distribution
+   can be downloaded. *optional*
+
+author
+   Author's name. *optional*
+
+author-email
+   Author's e-mail. *optional*
+
+maintainer
+   Maintainer's name. *optional*
+
+maintainer-email
+   Maintainer's e-mail. *optional*
+
+license
+   A text indicating the term of uses, when a trove classifier does
+   not match. *optional*.
+
+classifiers
+   Classification for the distribution, as described in PEP 301.
+   *optional*, *multi*, *environ*
+
+requires-dist
+   name of another packaging project required as a dependency.
+   The format is *name (version)* where version is an optional
+   version declaration, as described in PEP 345. *optional*, *multi*, *environ*
+
+provides-dist
+   name of another packaging project contained within this
+   distribution. Same format than *requires-dist*. *optional*, *multi*,
+   *environ*
+
+obsoletes-dist
+   name of another packaging project this version obsoletes.
+   Same format than *requires-dist*. *optional*, *multi*, *environ*
+
+requires-python
+   Specifies the Python version the distribution requires.
+   The value is a version number, as described in PEP 345.
+   *optional*, *multi*, *environ*
+
+requires-externals
+   a dependency in the system. This field is free-form,
+   and just a hint for downstream maintainers. *optional*, *multi*,
+   *environ*
+
+project-url
+   A label, followed by a browsable URL for the project.
+   "label, url". The label is limited to 32 signs. *optional*, *multi*
+
+
+Example::
+
+   [metadata]
+   name = pypi2rpm
+   version = 0.1
+   author = Tarek Ziadé
+   author-email = tarek at ziade.org
+   summary = Script that transforms an sdist archive into a RPM package
+   description-file = README
+   home-page = http://bitbucket.org/tarek/pypi2rpm/wiki/Home
+   project-url:
+       Repository, http://bitbucket.org/tarek/pypi2rpm/
+       RSS feed, https://bitbucket.org/tarek/pypi2rpm/rss
+   classifier =
+       Development Status :: 3 - Alpha
+       License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)
+
+You should not give any explicit value for metadata-version: it will be guessed
+from the fields present in the file.
+
+
+Files
+=====
+
+This section describes the files included in the project.
+
+packages_root
+   the root directory containing all packages and modules
+   (default: current directory).  *optional*
+
+packages
+   a list of packages the project includes *optional*, *multi*
+
+modules
+   a list of packages the project includes *optional*, *multi*
+
+scripts
+   a list of scripts the project includes *optional*, *multi*
+
+extra_files
+   a list of patterns to include extra files *optional*,
+   *multi*
+
+Example::
+
+   [files]
+   packages_root = src
+   packages =
+       pypi2rpm
+       pypi2rpm.command
+
+   scripts =
+       pypi2rpm/pypi2rpm.py
+
+   extra_files =
+       setup.py
+       README
+
+
+.. Note::
+   The :file:`setup.cfg` configuration file is included by default.  Contrary to
+   Distutils, :file:`README` (or :file:`README.txt`) and :file:`setup.py` are
+   not included by default.
+
+
+Resources
+---------
+
+This section describes the files used by the project which must not be installed
+in the same place that python modules or libraries, they are called
+**resources**. They are for example documentation files, script files,
+databases, etc...
+
+For declaring resources, you must use this notation::
+
+   source = destination
+
+Data-files are declared in the **resources** field in the **file** section, for
+example::
+
+   [files]
+   resources =
+       source1 = destination1
+       source2 = destination2
+
+The **source** part of the declaration are relative paths of resources files
+(using unix path separator **/**). For example, if you've this source tree::
+
+   foo/
+      doc/
+         doc.man
+      scripts/
+         foo.sh
+
+Your setup.cfg will look like::
+
+   [files]
+   resources =
+       doc/doc.man = destination_doc
+       scripts/foo.sh = destination_scripts
+
+The final paths where files will be placed are composed by : **source** +
+**destination**. In the previous example, **doc/doc.man** will be placed in
+**destination_doc/doc/doc.man** and **scripts/foo.sh** will be placed in
+**destination_scripts/scripts/foo.sh**. (If you want more control on the final
+path, take a look at base_prefix_).
+
+The **destination** part of resources declaration are paths with categories.
+Indeed, it's generally a bad idea to give absolute path as it will be cross
+incompatible. So, you must use resources categories in your **destination**
+declaration. Categories will be replaced by their real path at the installation
+time. Using categories is all benefit, your declaration will be simpler, cross
+platform and it will allow packager to place resources files where they want
+without breaking your code.
+
+Categories can be specified by using this syntax::
+
+   {category}
+
+Default categories are:
+
+* config
+* appdata
+* appdata.arch
+* appdata.persistent
+* appdata.disposable
+* help
+* icon
+* scripts
+* doc
+* info
+* man
+
+A special category also exists **{distribution.name}** that will be replaced by
+the name of the distribution, but as most of the defaults categories use them,
+so it's not necessary to add **{distribution.name}** into your destination.
+
+If you use categories in your declarations, and you are encouraged to do, final
+path will be::
+
+   source + destination_expanded
+
+.. _example_final_path:
+
+For example, if you have this setup.cfg::
+
+   [metadata]
+   name = foo
+
+   [files]
+   resources =
+       doc/doc.man = {doc}
+
+And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final
+path will be::
+
+   {datadir}/doc/foo/doc/doc.man
+
+Where {datafir} category will be platform-dependent.
+
+
+More control on source part
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Glob syntax
+"""""""""""
+
+When you declare source file, you can use a glob-like syntax to match multiples file, for example::
+
+   scripts/* = {script}
+
+Will match all the files in the scripts directory and placed them in the script category.
+
+Glob tokens are:
+
+ * ``*``: match all files.
+ * ``?``: match any character.
+ * ``**``: match any level of tree recursion (even 0).
+ * ``{}``: will match any part separated by comma (example: ``{sh,bat}``).
+
+.. TODO Add examples
+
+Order of declaration
+""""""""""""""""""""
+
+The order of declaration is important if one file match multiple rules. The last
+rules matched by file is used, this is useful if you have this source tree::
+
+   foo/
+      doc/
+         index.rst
+         setup.rst
+         documentation.txt
+         doc.tex
+         README
+
+And you want all the files in the doc directory to be placed in {doc} category,
+but README must be placed in {help} category, instead of listing all the files
+one by one, you can declare them in this way::
+
+   [files]
+   resources =
+       doc/* = {doc}
+       doc/README = {help}
+
+Exclude
+"""""""
+
+You can exclude some files of resources declaration by giving no destination, it
+can be useful if you have a non-resources file in the same directory of
+resources files::
+
+   foo/
+      doc/
+         RELEASES
+         doc.tex
+         documentation.txt
+         docu.rst
+
+Your **files** section will be::
+
+   [files]
+   resources =
+       doc/* = {doc}
+       doc/RELEASES =
+
+More control on destination part
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. _base_prefix:
+
+Defining a base prefix
+""""""""""""""""""""""
+
+When you define your resources, you can have more control of how the final path
+is compute.
+
+By default, the final path is::
+
+   destination + source
+
+This can generate long paths, for example (example_final_path_)::
+
+   {datadir}/doc/foo/doc/doc.man
+
+When you declare your source, you can use whitespace to split the source in
+**prefix** **suffix**.  So, for example, if you have this source::
+
+   docs/ doc.man
+
+The **prefix** is "docs/" and the **suffix** is "doc.html".
+
+.. note::
+
+   Separator can be placed after a path separator or replace it. So these two
+   sources are equivalent::
+
+      docs/ doc.man
+      docs doc.man
+
+.. note::
+
+   Glob syntax is working the same way with standard source and splitted source.
+   So these rules::
+
+      docs/*
+      docs/ *
+      docs *
+
+   Will match all the files in the docs directory.
+
+When you use splitted source, the final path is compute in this way::
+
+   destination + prefix
+
+So for example, if you have this setup.cfg::
+
+   [metadata]
+   name = foo
+
+   [files]
+   resources =
+       doc/ doc.man = {doc}
+
+And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final
+path will be::
+
+   {datadir}/doc/foo/doc.man
+
+
+Overwriting paths for categories
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+This part is intended for system administrators or downstream OS packagers.
+
+The real paths of categories are registered in the *sysconfig.cfg* file
+installed in your python installation. This file uses an ini format too.
+The content of the file is organized into several sections:
+
+* globals: Standard categories's paths.
+* posix_prefix: Standard paths for categories and installation paths for posix
+  system.
+* other ones XXX
+
+Standard categories paths are platform independent, they generally refers to
+other categories, which are platform dependent. :mod:`sysconfig` will choose
+these category from sections matching os.name. For example::
+
+   doc = {datadir}/doc/{distribution.name}
+
+It refers to datadir category, which can be different between platforms. In
+posix system, it may be::
+
+   datadir = /usr/share
+
+So the final path will be::
+
+   doc = /usr/share/doc/{distribution.name}
+
+The platform-dependent categories are:
+
+* confdir
+* datadir
+* libdir
+* base
+
+
+Defining extra categories
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. TODO
+
+
+Examples
+^^^^^^^^
+
+These examples are incremental but work unitarily.
+
+Resources in root dir
+"""""""""""""""""""""
+
+Source tree::
+
+   babar-1.0/
+      README
+      babar.sh
+      launch.sh
+      babar.py
+
+:file:`setup.cfg`::
+
+   [files]
+   resources =
+       README = {doc}
+       *.sh = {scripts}
+
+So babar.sh and launch.sh will be placed in {scripts} directory.
+
+Now let's move all the scripts into a scripts directory.
+
+Resources in sub-directory
+""""""""""""""""""""""""""
+
+Source tree::
+
+   babar-1.1/
+      README
+      scripts/
+         babar.sh
+         launch.sh
+         LAUNCH
+      babar.py
+
+:file:`setup.cfg`::
+
+   [files]
+   resources =
+       README = {doc}
+       scripts/ LAUNCH = {doc}
+       scripts/ *.sh = {scripts}
+
+It's important to use the separator after scripts/ to install all the shell
+scripts into {scripts} instead of {scripts}/scripts.
+
+Now let's add some docs.
+
+Resources in multiple sub-directories
+"""""""""""""""""""""""""""""""""""""
+
+Source tree::
+
+   babar-1.2/
+      README
+      scripts/
+         babar.sh
+         launch.sh
+         LAUNCH
+      docs/
+         api
+         man
+      babar.py
+
+:file:`setup.cfg`::
+
+   [files]
+   resources =
+        README = {doc}
+        scripts/ LAUNCH = {doc}
+        scripts/ *.sh = {scripts}
+        doc/ * = {doc}
+        doc/ man = {man}
+
+You want to place all the file in the docs script into {doc} category, instead
+of man, which must be placed into {man} category, we will use the order of
+declaration of globs to choose the destination, the last glob that match the
+file is used.
+
+Now let's add some scripts for windows users.
+
+Complete example
+""""""""""""""""
+
+Source tree::
+
+   babar-1.3/
+      README
+      doc/
+         api
+         man
+      scripts/
+         babar.sh
+         launch.sh
+         babar.bat
+         launch.bat
+         LAUNCH
+
+:file:`setup.cfg`::
+
+    [files]
+    resources =
+        README = {doc}
+        scripts/ LAUNCH = {doc}
+        scripts/ *.{sh,bat} = {scripts}
+        doc/ * = {doc}
+        doc/ man = {man}
+
+We use brace expansion syntax to place all the shell and batch scripts into
+{scripts} category.
+
+
+Command sections
+================
+
+To pass options to commands without having to type them on the command line
+for each invocation, you can write them in the :file:`setup.cfg` file, in a
+section named after the command.  Example::
+
+   [sdist]
+   # special function to add custom files
+   manifest-builders = package.setup.list_extra_files
+
+   [build]
+   use-2to3 = True
+
+   [build_ext]
+   inplace = on
+
+   [check]
+   strict = on
+   all = on
+
+Option values given in the configuration file can be overriden on the command
+line.  See :ref:`packaging-setup-config` for more information.
diff --git a/Doc/packaging/setupscript.rst b/Doc/packaging/setupscript.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/setupscript.rst
@@ -0,0 +1,689 @@
+.. _packaging-setup-script:
+
+************************
+Writing the Setup Script
+************************
+
+The setup script is the center of all activity in building, distributing, and
+installing modules using Distutils.  The main purpose of the setup script is
+to describe your module distribution to Distutils, so that the various
+commands that operate on your modules do the right thing.  As we saw in section
+:ref:`packaging-simple-example`, the setup script consists mainly of a
+call to :func:`setup` where the most information is supplied as
+keyword arguments to :func:`setup`.
+
+Here's a slightly more involved example, which we'll follow for the next couple
+of sections: a setup script that could be used for Packaging itself::
+
+    #!/usr/bin/env python
+
+    from packaging.core import setup, find_packages
+
+    setup(name='Packaging',
+          version='1.0',
+          summary='Python Distribution Utilities',
+          keywords=['packaging', 'packaging'],
+          author=u'Tarek Ziadé',
+          author_email='tarek at ziade.org',
+          home_page='http://bitbucket.org/tarek/packaging/wiki/Home',
+          license='PSF',
+          packages=find_packages())
+
+
+There are only two differences between this and the trivial one-file
+distribution presented in section :ref:`packaging-simple-example`: more
+metadata and the specification of pure Python modules by package rather than
+by module.  This is important since Ristutils consist of a couple of dozen
+modules split into (so far) two packages; an explicit list of every module
+would be tedious to generate and difficult to maintain.  For more information
+on the additional metadata, see section :ref:`packaging-metadata`.
+
+Note that any pathnames (files or directories) supplied in the setup script
+should be written using the Unix convention, i.e. slash-separated.  The
+Distutils will take care of converting this platform-neutral representation into
+whatever is appropriate on your current platform before actually using the
+pathname.  This makes your setup script portable across operating systems, which
+of course is one of the major goals of the Distutils.  In this spirit, all
+pathnames in this document are slash-separated.
+
+This, of course, only applies to pathnames given to Distutils functions.  If
+you, for example, use standard Python functions such as :func:`glob.glob` or
+:func:`os.listdir` to specify files, you should be careful to write portable
+code instead of hardcoding path separators::
+
+    glob.glob(os.path.join('mydir', 'subdir', '*.html'))
+    os.listdir(os.path.join('mydir', 'subdir'))
+
+
+.. _packaging-listing-packages:
+
+Listing whole packages
+======================
+
+The :option:`packages` option tells the Distutils to process (build, distribute,
+install, etc.) all pure Python modules found in each package mentioned in the
+:option:`packages` list.  In order to do this, of course, there has to be a
+correspondence between package names and directories in the filesystem.  The
+default correspondence is the most obvious one, i.e. package :mod:`packaging` is
+found in the directory :file:`packaging` relative to the distribution root.
+Thus, when you say ``packages = ['foo']`` in your setup script, you are
+promising that the Distutils will find a file :file:`foo/__init__.py` (which
+might be spelled differently on your system, but you get the idea) relative to
+the directory where your setup script lives.  If you break this promise, the
+Distutils will issue a warning but still process the broken package anyways.
+
+If you use a different convention to lay out your source directory, that's no
+problem: you just have to supply the :option:`package_dir` option to tell the
+Distutils about your convention.  For example, say you keep all Python source
+under :file:`lib`, so that modules in the "root package" (i.e., not in any
+package at all) are in :file:`lib`, modules in the :mod:`foo` package are in
+:file:`lib/foo`, and so forth.  Then you would put ::
+
+    package_dir = {'': 'lib'}
+
+in your setup script.  The keys to this dictionary are package names, and an
+empty package name stands for the root package.  The values are directory names
+relative to your distribution root.  In this case, when you say ``packages =
+['foo']``, you are promising that the file :file:`lib/foo/__init__.py` exists.
+
+Another possible convention is to put the :mod:`foo` package right in
+:file:`lib`, the :mod:`foo.bar` package in :file:`lib/bar`, etc.  This would be
+written in the setup script as ::
+
+    package_dir = {'foo': 'lib'}
+
+A ``package: dir`` entry in the :option:`package_dir` dictionary implicitly
+applies to all packages below *package*, so the :mod:`foo.bar` case is
+automatically handled here.  In this example, having ``packages = ['foo',
+'foo.bar']`` tells the Distutils to look for :file:`lib/__init__.py` and
+:file:`lib/bar/__init__.py`.  (Keep in mind that although :option:`package_dir`
+applies recursively, you must explicitly list all packages in
+:option:`packages`: the Distutils will *not* recursively scan your source tree
+looking for any directory with an :file:`__init__.py` file.)
+
+
+.. _packaging-listing-modules:
+
+Listing individual modules
+==========================
+
+For a small module distribution, you might prefer to list all modules rather
+than listing packages---especially the case of a single module that goes in the
+"root package" (i.e., no package at all).  This simplest case was shown in
+section :ref:`packaging-simple-example`; here is a slightly more involved
+example::
+
+    py_modules = ['mod1', 'pkg.mod2']
+
+This describes two modules, one of them in the "root" package, the other in the
+:mod:`pkg` package.  Again, the default package/directory layout implies that
+these two modules can be found in :file:`mod1.py` and :file:`pkg/mod2.py`, and
+that :file:`pkg/__init__.py` exists as well. And again, you can override the
+package/directory correspondence using the :option:`package_dir` option.
+
+
+.. _packaging-describing-extensions:
+
+Describing extension modules
+============================
+
+Just as writing Python extension modules is a bit more complicated than writing
+pure Python modules, describing them to the Distutils is a bit more complicated.
+Unlike pure modules, it's not enough just to list modules or packages and expect
+the Distutils to go out and find the right files; you have to specify the
+extension name, source file(s), and any compile/link requirements (include
+directories, libraries to link with, etc.).
+
+.. XXX read over this section
+
+All of this is done through another keyword argument to :func:`setup`, the
+:option:`ext_modules` option.  :option:`ext_modules` is just a list of
+:class:`Extension` instances, each of which describes a single extension module.
+Suppose your distribution includes a single extension, called :mod:`foo` and
+implemented by :file:`foo.c`.  If no additional instructions to the
+compiler/linker are needed, describing this extension is quite simple::
+
+    Extension('foo', ['foo.c'])
+
+The :class:`Extension` class can be imported from :mod:`packaging.core` along
+with :func:`setup`.  Thus, the setup script for a module distribution that
+contains only this one extension and nothing else might be::
+
+    from packaging.core import setup, Extension
+    setup(name='foo',
+          version='1.0',
+          ext_modules=[Extension('foo', ['foo.c'])])
+
+The :class:`Extension` class (actually, the underlying extension-building
+machinery implemented by the :command:`build_ext` command) supports a great deal
+of flexibility in describing Python extensions, which is explained in the
+following sections.
+
+
+Extension names and packages
+----------------------------
+
+The first argument to the :class:`Extension` constructor is always the name of
+the extension, including any package names.  For example, ::
+
+    Extension('foo', ['src/foo1.c', 'src/foo2.c'])
+
+describes an extension that lives in the root package, while ::
+
+    Extension('pkg.foo', ['src/foo1.c', 'src/foo2.c'])
+
+describes the same extension in the :mod:`pkg` package.  The source files and
+resulting object code are identical in both cases; the only difference is where
+in the filesystem (and therefore where in Python's namespace hierarchy) the
+resulting extension lives.
+
+If you have a number of extensions all in the same package (or all under the
+same base package), use the :option:`ext_package` keyword argument to
+:func:`setup`.  For example, ::
+
+    setup(...,
+          ext_package='pkg',
+          ext_modules=[Extension('foo', ['foo.c']),
+                       Extension('subpkg.bar', ['bar.c'])])
+
+will compile :file:`foo.c` to the extension :mod:`pkg.foo`, and :file:`bar.c` to
+:mod:`pkg.subpkg.bar`.
+
+
+Extension source files
+----------------------
+
+The second argument to the :class:`Extension` constructor is a list of source
+files.  Since the Distutils currently only support C, C++, and Objective-C
+extensions, these are normally C/C++/Objective-C source files.  (Be sure to use
+appropriate extensions to distinguish C++\ source files: :file:`.cc` and
+:file:`.cpp` seem to be recognized by both Unix and Windows compilers.)
+
+However, you can also include SWIG interface (:file:`.i`) files in the list; the
+:command:`build_ext` command knows how to deal with SWIG extensions: it will run
+SWIG on the interface file and compile the resulting C/C++ file into your
+extension.
+
+.. XXX SWIG support is rough around the edges and largely untested!
+
+This warning notwithstanding, options to SWIG can be currently passed like
+this::
+
+    setup(...,
+          ext_modules=[Extension('_foo', ['foo.i'],
+                                 swig_opts=['-modern', '-I../include'])],
+          py_modules=['foo'])
+
+Or on the command line like this::
+
+    > python setup.py build_ext --swig-opts="-modern -I../include"
+
+On some platforms, you can include non-source files that are processed by the
+compiler and included in your extension.  Currently, this just means Windows
+message text (:file:`.mc`) files and resource definition (:file:`.rc`) files for
+Visual C++. These will be compiled to binary resource (:file:`.res`) files and
+linked into the executable.
+
+
+Preprocessor options
+--------------------
+
+Three optional arguments to :class:`Extension` will help if you need to specify
+include directories to search or preprocessor macros to define/undefine:
+``include_dirs``, ``define_macros``, and ``undef_macros``.
+
+For example, if your extension requires header files in the :file:`include`
+directory under your distribution root, use the ``include_dirs`` option::
+
+    Extension('foo', ['foo.c'], include_dirs=['include'])
+
+You can specify absolute directories there; if you know that your extension will
+only be built on Unix systems with X11R6 installed to :file:`/usr`, you can get
+away with ::
+
+    Extension('foo', ['foo.c'], include_dirs=['/usr/include/X11'])
+
+You should avoid this sort of non-portable usage if you plan to distribute your
+code: it's probably better to write C code like  ::
+
+    #include <X11/Xlib.h>
+
+If you need to include header files from some other Python extension, you can
+take advantage of the fact that header files are installed in a consistent way
+by the Distutils :command:`install_header` command.  For example, the Numerical
+Python header files are installed (on a standard Unix installation) to
+:file:`/usr/local/include/python1.5/Numerical`. (The exact location will differ
+according to your platform and Python installation.)  Since the Python include
+directory---\ :file:`/usr/local/include/python1.5` in this case---is always
+included in the search path when building Python extensions, the best approach
+is to write C code like  ::
+
+    #include <Numerical/arrayobject.h>
+
+.. TODO check if it's d2.sysconfig or the new sysconfig module now
+
+If you must put the :file:`Numerical` include directory right into your header
+search path, though, you can find that directory using the Distutils
+:mod:`packaging.sysconfig` module::
+
+    from packaging.sysconfig import get_python_inc
+    incdir = os.path.join(get_python_inc(plat_specific=1), 'Numerical')
+    setup(...,
+          Extension(..., include_dirs=[incdir]))
+
+Even though this is quite portable---it will work on any Python installation,
+regardless of platform---it's probably easier to just write your C code in the
+sensible way.
+
+You can define and undefine preprocessor macros with the ``define_macros`` and
+``undef_macros`` options. ``define_macros`` takes a list of ``(name, value)``
+tuples, where ``name`` is the name of the macro to define (a string) and
+``value`` is its value: either a string or ``None``.  (Defining a macro ``FOO``
+to ``None`` is the equivalent of a bare ``#define FOO`` in your C source: with
+most compilers, this sets ``FOO`` to the string ``1``.)  ``undef_macros`` is
+just a list of macros to undefine.
+
+For example::
+
+    Extension(...,
+              define_macros=[('NDEBUG', '1'),
+                             ('HAVE_STRFTIME', None)],
+              undef_macros=['HAVE_FOO', 'HAVE_BAR'])
+
+is the equivalent of having this at the top of every C source file::
+
+    #define NDEBUG 1
+    #define HAVE_STRFTIME
+    #undef HAVE_FOO
+    #undef HAVE_BAR
+
+
+Library options
+---------------
+
+You can also specify the libraries to link against when building your extension,
+and the directories to search for those libraries.  The ``libraries`` option is
+a list of libraries to link against, ``library_dirs`` is a list of directories
+to search for libraries at  link-time, and ``runtime_library_dirs`` is a list of
+directories to  search for shared (dynamically loaded) libraries at run-time.
+
+For example, if you need to link against libraries known to be in the standard
+library search path on target systems ::
+
+    Extension(...,
+              libraries=['gdbm', 'readline'])
+
+If you need to link with libraries in a non-standard location, you'll have to
+include the location in ``library_dirs``::
+
+    Extension(...,
+              library_dirs=['/usr/X11R6/lib'],
+              libraries=['X11', 'Xt'])
+
+(Again, this sort of non-portable construct should be avoided if you intend to
+distribute your code.)
+
+.. XXX Should mention clib libraries here or somewhere else!
+
+
+Other options
+-------------
+
+There are still some other options which can be used to handle special cases.
+
+The :option:`optional` option is a boolean; if it is true,
+a build failure in the extension will not abort the build process, but
+instead simply not install the failing extension.
+
+The :option:`extra_objects` option is a list of object files to be passed to the
+linker. These files must not have extensions, as the default extension for the
+compiler is used.
+
+:option:`extra_compile_args` and :option:`extra_link_args` can be used to
+specify additional command-line options for the respective compiler and linker
+command lines.
+
+:option:`export_symbols` is only useful on Windows.  It can contain a list of
+symbols (functions or variables) to be exported. This option is not needed when
+building compiled extensions: Distutils  will automatically add ``initmodule``
+to the list of exported symbols.
+
+The :option:`depends` option is a list of files that the extension depends on
+(for example header files). The build command will call the compiler on the
+sources to rebuild extension if any on this files has been modified since the
+previous build.
+
+Relationships between Distributions and Packages
+================================================
+
+.. FIXME rewrite to update to PEP 345 (but without dist/release confusion)
+
+A distribution may relate to packages in three specific ways:
+
+#. It can require packages or modules.
+
+#. It can provide packages or modules.
+
+#. It can obsolete packages or modules.
+
+These relationships can be specified using keyword arguments to the
+:func:`packaging.core.setup` function.
+
+Dependencies on other Python modules and packages can be specified by supplying
+the *requires* keyword argument to :func:`setup`. The value must be a list of
+strings.  Each string specifies a package that is required, and optionally what
+versions are sufficient.
+
+To specify that any version of a module or package is required, the string
+should consist entirely of the module or package name. Examples include
+``'mymodule'`` and ``'xml.parsers.expat'``.
+
+If specific versions are required, a sequence of qualifiers can be supplied in
+parentheses.  Each qualifier may consist of a comparison operator and a version
+number.  The accepted comparison operators are::
+
+    <    >    ==
+    <=   >=   !=
+
+These can be combined by using multiple qualifiers separated by commas (and
+optional whitespace).  In this case, all of the qualifiers must be matched; a
+logical AND is used to combine the evaluations.
+
+Let's look at a bunch of examples:
+
++-------------------------+----------------------------------------------+
+| Requires Expression     | Explanation                                  |
++=========================+==============================================+
+| ``==1.0``               | Only version ``1.0`` is compatible           |
++-------------------------+----------------------------------------------+
+| ``>1.0, !=1.5.1, <2.0`` | Any version after ``1.0`` and before ``2.0`` |
+|                         | is compatible, except ``1.5.1``              |
++-------------------------+----------------------------------------------+
+
+Now that we can specify dependencies, we also need to be able to specify what we
+provide that other distributions can require.  This is done using the *provides*
+keyword argument to :func:`setup`. The value for this keyword is a list of
+strings, each of which names a Python module or package, and optionally
+identifies the version.  If the version is not specified, it is assumed to match
+that of the distribution.
+
+Some examples:
+
++---------------------+----------------------------------------------+
+| Provides Expression | Explanation                                  |
++=====================+==============================================+
+| ``mypkg``           | Provide ``mypkg``, using the distribution    |
+|                     | version                                      |
++---------------------+----------------------------------------------+
+| ``mypkg (1.1)``     | Provide ``mypkg`` version 1.1, regardless of |
+|                     | the distribution version                     |
++---------------------+----------------------------------------------+
+
+A package can declare that it obsoletes other packages using the *obsoletes*
+keyword argument.  The value for this is similar to that of the *requires*
+keyword: a list of strings giving module or package specifiers.  Each specifier
+consists of a module or package name optionally followed by one or more version
+qualifiers.  Version qualifiers are given in parentheses after the module or
+package name.
+
+The versions identified by the qualifiers are those that are obsoleted by the
+distribution being described.  If no qualifiers are given, all versions of the
+named module or package are understood to be obsoleted.
+
+.. _packaging-installing-scripts:
+
+Installing Scripts
+==================
+
+So far we have been dealing with pure and non-pure Python modules, which are
+usually not run by themselves but imported by scripts.
+
+Scripts are files containing Python source code, intended to be started from the
+command line.  Scripts don't require Distutils to do anything very complicated.
+The only clever feature is that if the first line of the script starts with
+``#!`` and contains the word "python", the Distutils will adjust the first line
+to refer to the current interpreter location. By default, it is replaced with
+the current interpreter location.  The :option:`--executable` (or :option:`-e`)
+option will allow the interpreter path to be explicitly overridden.
+
+The :option:`scripts` option simply is a list of files to be handled in this
+way.  From the PyXML setup script::
+
+    setup(...,
+          scripts=['scripts/xmlproc_parse', 'scripts/xmlproc_val'])
+
+All the scripts will also be added to the ``MANIFEST`` file if no template is
+provided. See :ref:`packaging-manifest`.
+
+.. _packaging-installing-package-data:
+
+Installing Package Data
+=======================
+
+Often, additional files need to be installed into a package.  These files are
+often data that's closely related to the package's implementation, or text files
+containing documentation that might be of interest to programmers using the
+package.  These files are called :dfn:`package data`.
+
+Package data can be added to packages using the ``package_data`` keyword
+argument to the :func:`setup` function.  The value must be a mapping from
+package name to a list of relative path names that should be copied into the
+package.  The paths are interpreted as relative to the directory containing the
+package (information from the ``package_dir`` mapping is used if appropriate);
+that is, the files are expected to be part of the package in the source
+directories. They may contain glob patterns as well.
+
+The path names may contain directory portions; any necessary directories will be
+created in the installation.
+
+For example, if a package should contain a subdirectory with several data files,
+the files can be arranged like this in the source tree::
+
+    setup.py
+    src/
+        mypkg/
+              __init__.py
+              module.py
+              data/
+                   tables.dat
+                   spoons.dat
+                   forks.dat
+
+The corresponding call to :func:`setup` might be::
+
+    setup(...,
+          packages=['mypkg'],
+          package_dir={'mypkg': 'src/mypkg'},
+          package_data={'mypkg': ['data/*.dat']})
+
+
+All the files that match ``package_data`` will be added to the ``MANIFEST``
+file if no template is provided. See :ref:`packaging-manifest`.
+
+
+.. _packaging-additional-files:
+
+Installing Additional Files
+===========================
+
+The :option:`data_files` option can be used to specify additional files needed
+by the module distribution: configuration files, message catalogs, data files,
+anything which doesn't fit in the previous categories.
+
+:option:`data_files` specifies a sequence of (*directory*, *files*) pairs in the
+following way::
+
+    setup(...,
+          data_files=[('bitmaps', ['bm/b1.gif', 'bm/b2.gif']),
+                      ('config', ['cfg/data.cfg']),
+                      ('/etc/init.d', ['init-script'])])
+
+Note that you can specify the directory names where the data files will be
+installed, but you cannot rename the data files themselves.
+
+Each (*directory*, *files*) pair in the sequence specifies the installation
+directory and the files to install there.  If *directory* is a relative path, it
+is interpreted relative to the installation prefix (Python's ``sys.prefix`` for
+pure-Python packages, ``sys.exec_prefix`` for packages that contain extension
+modules).  Each file name in *files* is interpreted relative to the
+:file:`setup.py` script at the top of the package source distribution.  No
+directory information from *files* is used to determine the final location of
+the installed file; only the name of the file is used.
+
+You can specify the :option:`data_files` options as a simple sequence of files
+without specifying a target directory, but this is not recommended, and the
+:command:`install_dist` command will print a warning in this case. To install data
+files directly in the target directory, an empty string should be given as the
+directory.
+
+All the files that match ``data_files`` will be added to the ``MANIFEST`` file
+if no template is provided. See :ref:`packaging-manifest`.
+
+
+
+.. _packaging-metadata:
+
+Metadata reference
+==================
+
+The setup script may include additional metadata beyond the name and version.
+This table describes required and additional information:
+
+.. TODO synchronize with setupcfg; link to it (but don't remove it, it's a
+   useful summary)
+
++----------------------+---------------------------+-----------------+--------+
+| Meta-Data            | Description               | Value           | Notes  |
++======================+===========================+=================+========+
+| ``name``             | name of the project       | short string    | \(1)   |
++----------------------+---------------------------+-----------------+--------+
+| ``version``          | version of this release   | short string    | (1)(2) |
++----------------------+---------------------------+-----------------+--------+
+| ``author``           | project author's name     | short string    | \(3)   |
++----------------------+---------------------------+-----------------+--------+
+| ``author_email``     | email address of the      | email address   | \(3)   |
+|                      | project author            |                 |        |
++----------------------+---------------------------+-----------------+--------+
+| ``maintainer``       | project maintainer's name | short string    | \(3)   |
++----------------------+---------------------------+-----------------+--------+
+| ``maintainer_email`` | email address of the      | email address   | \(3)   |
+|                      | project maintainer        |                 |        |
++----------------------+---------------------------+-----------------+--------+
+| ``home_page``        | home page for the project | URL             | \(1)   |
++----------------------+---------------------------+-----------------+--------+
+| ``summary``          | short description of the  | short string    |        |
+|                      | project                   |                 |        |
++----------------------+---------------------------+-----------------+--------+
+| ``description``      | longer description of the | long string     | \(5)   |
+|                      | project                   |                 |        |
++----------------------+---------------------------+-----------------+--------+
+| ``download_url``     | location where the        | URL             |        |
+|                      | project may be downloaded |                 |        |
++----------------------+---------------------------+-----------------+--------+
+| ``classifiers``      | a list of classifiers     | list of strings | \(4)   |
++----------------------+---------------------------+-----------------+--------+
+| ``platforms``        | a list of platforms       | list of strings |        |
++----------------------+---------------------------+-----------------+--------+
+| ``license``          | license for the release   | short string    | \(6)   |
++----------------------+---------------------------+-----------------+--------+
+
+Notes:
+
+(1)
+    These fields are required.
+
+(2)
+    It is recommended that versions take the form *major.minor[.patch[.sub]]*.
+
+(3)
+    Either the author or the maintainer must be identified.
+
+(4)
+    The list of classifiers is available from the `PyPI website
+    <http://pypi.python.org/pypi>`_. See also :mod:`packaging.create`.
+
+(5)
+    The ``description`` field is used by PyPI when you are registering a
+    release, to build its PyPI page.
+
+(6)
+    The ``license`` field is a text indicating the license covering the
+    distribution where the license is not a selection from the "License" Trove
+    classifiers. See the ``Classifier`` field. Notice that
+    there's a ``licence`` distribution option which is deprecated but still
+    acts as an alias for ``license``.
+
+'short string'
+    A single line of text, not more than 200 characters.
+
+'long string'
+    Multiple lines of plain text in reStructuredText format (see
+    http://docutils.sf.net/).
+
+'list of strings'
+    See below.
+
+In Python 2.x, "string value" means a unicode object. If a byte string (str or
+bytes) is given, it has to be valid ASCII.
+
+.. TODO move this section to the version document, keep a summary, add a link
+
+Encoding the version information is an art in itself. Python projects generally
+adhere to the version format *major.minor[.patch][sub]*. The major number is 0
+for initial, experimental releases of software. It is incremented for releases
+that represent major milestones in a project. The minor number is incremented
+when important new features are added to the project. The patch number
+increments when bug-fix releases are made. Additional trailing version
+information is sometimes used to indicate sub-releases.  These are
+"a1,a2,...,aN" (for alpha releases, where functionality and API may change),
+"b1,b2,...,bN" (for beta releases, which only fix bugs) and "pr1,pr2,...,prN"
+(for final pre-release release testing). Some examples:
+
+0.1.0
+    the first, experimental release of a project
+
+1.0.1a2
+    the second alpha release of the first patch version of 1.0
+
+:option:`classifiers` are specified in a Python list::
+
+    setup(...,
+          classifiers=[
+              'Development Status :: 4 - Beta',
+              'Environment :: Console',
+              'Environment :: Web Environment',
+              'Intended Audience :: End Users/Desktop',
+              'Intended Audience :: Developers',
+              'Intended Audience :: System Administrators',
+              'License :: OSI Approved :: Python Software Foundation License',
+              'Operating System :: MacOS :: MacOS X',
+              'Operating System :: Microsoft :: Windows',
+              'Operating System :: POSIX',
+              'Programming Language :: Python',
+              'Topic :: Communications :: Email',
+              'Topic :: Office/Business',
+              'Topic :: Software Development :: Bug Tracking',
+              ])
+
+
+Debugging the setup script
+==========================
+
+Sometimes things go wrong, and the setup script doesn't do what the developer
+wants.
+
+Distutils catches any exceptions when running the setup script, and print a
+simple error message before the script is terminated.  The motivation for this
+behaviour is to not confuse administrators who don't know much about Python and
+are trying to install a project.  If they get a big long traceback from deep
+inside the guts of Distutils, they may think the project or the Python
+installation is broken because they don't read all the way down to the bottom
+and see that it's a permission problem.
+
+.. FIXME DISTUTILS_DEBUG is dead, document logging/warnings here
+
+On the other hand, this doesn't help the developer to find the cause of the
+failure. For this purpose, the DISTUTILS_DEBUG environment variable can be set
+to anything except an empty string, and Packaging will now print detailed
+information about what it is doing, and prints the full traceback in case an
+exception occurs.
diff --git a/Doc/packaging/sourcedist.rst b/Doc/packaging/sourcedist.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/sourcedist.rst
@@ -0,0 +1,273 @@
+.. _packaging-source-dist:
+
+******************************
+Creating a Source Distribution
+******************************
+
+As shown in section :ref:`packaging-simple-example`, you use the :command:`sdist` command
+to create a source distribution.  In the simplest case, ::
+
+   python setup.py sdist
+
+(assuming you haven't specified any :command:`sdist` options in the setup script
+or config file), :command:`sdist` creates the archive of the default format for
+the current platform.  The default format is a gzip'ed tar file
+(:file:`.tar.gz`) on Unix, and ZIP file on Windows.
+
+You can specify as many formats as you like using the :option:`--formats`
+option, for example::
+
+   python setup.py sdist --formats=gztar,zip
+
+to create a gzipped tarball and a zip file.  The available formats are:
+
++-----------+-------------------------+---------+
+| Format    | Description             | Notes   |
++===========+=========================+=========+
+| ``zip``   | zip file (:file:`.zip`) | (1),(3) |
++-----------+-------------------------+---------+
+| ``gztar`` | gzip'ed tar file        | \(2)    |
+|           | (:file:`.tar.gz`)       |         |
++-----------+-------------------------+---------+
+| ``bztar`` | bzip2'ed tar file       |         |
+|           | (:file:`.tar.bz2`)      |         |
++-----------+-------------------------+---------+
+| ``ztar``  | compressed tar file     | \(4)    |
+|           | (:file:`.tar.Z`)        |         |
++-----------+-------------------------+---------+
+| ``tar``   | tar file (:file:`.tar`) |         |
++-----------+-------------------------+---------+
+
+Notes:
+
+(1)
+   default on Windows
+
+(2)
+   default on Unix
+
+(3)
+   requires either external :program:`zip` utility or :mod:`zipfile` module (part
+   of the standard Python library since Python 1.6)
+
+(4)
+   requires the :program:`compress` program. Notice that this format is now
+   pending for deprecation and will be removed in the future versions of Python.
+
+When using any ``tar`` format (``gztar``, ``bztar``, ``ztar`` or
+``tar``) under Unix, you can specify the ``owner`` and ``group`` names
+that will be set for each member of the archive.
+
+For example, if you want all files of the archive to be owned by root::
+
+    python setup.py sdist --owner=root --group=root
+
+
+.. _packaging-manifest:
+
+Specifying the files to distribute
+==================================
+
+If you don't supply an explicit list of files (or instructions on how to
+generate one), the :command:`sdist` command puts a minimal default set into the
+source distribution:
+
+* all Python source files implied by the :option:`py_modules` and
+  :option:`packages` options
+
+* all C source files mentioned in the :option:`ext_modules` or
+  :option:`libraries` options
+
+* scripts identified by the :option:`scripts` option
+  See :ref:`packaging-installing-scripts`.
+
+* anything that looks like a test script: :file:`test/test\*.py` (currently, the
+  Packaging don't do anything with test scripts except include them in source
+  distributions, but in the future there will be a standard for testing Python
+  module distributions)
+
+* the configuration file :file:`setup.cfg`
+
+* all files that matches the ``package_data`` metadata.
+  See :ref:`packaging-installing-package-data`.
+
+* all files that matches the ``data_files`` metadata.
+  See :ref:`packaging-additional-files`.
+
+Contrary to Distutils, :file:`README` (or :file:`README.txt`) and
+:file:`setup.py` are not included by default.
+
+Sometimes this is enough, but usually you will want to specify additional files
+to distribute.  The typical way to do this is to write a *manifest template*,
+called :file:`MANIFEST.in` by default.  The manifest template is just a list of
+instructions for how to generate your manifest file, :file:`MANIFEST`, which is
+the exact list of files to include in your source distribution.  The
+:command:`sdist` command processes this template and generates a manifest based
+on its instructions and what it finds in the filesystem.
+
+If you prefer to roll your own manifest file, the format is simple: one filename
+per line, regular files (or symlinks to them) only.  If you do supply your own
+:file:`MANIFEST`, you must specify everything: the default set of files
+described above does not apply in this case.
+
+:file:`MANIFEST` files start with a comment indicating they are generated.
+Files without this comment are not overwritten or removed.
+
+See :ref:`packaging-manifest-template` section for a syntax reference.
+
+
+.. _packaging-manifest-options:
+
+Manifest-related options
+========================
+
+The normal course of operations for the :command:`sdist` command is as follows:
+
+* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in`
+  and create the manifest
+
+* if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest
+  with just the default file set
+
+* if either :file:`MANIFEST.in` or the setup script (:file:`setup.py`) are more
+  recent than :file:`MANIFEST`, recreate :file:`MANIFEST` by reading
+  :file:`MANIFEST.in`
+
+* use the list of files now in :file:`MANIFEST` (either just generated or read
+  in) to create the source distribution archive(s)
+
+There are a couple of options that modify this behaviour.  First, use the
+:option:`--no-defaults` and :option:`--no-prune` to disable the standard
+"include" and "exclude" sets.
+
+Second, you might just want to (re)generate the manifest, but not create a
+source distribution::
+
+   python setup.py sdist --manifest-only
+
+:option:`-o` is a shortcut for :option:`--manifest-only`.
+
+
+.. _packaging-manifest-template:
+
+The MANIFEST.in template
+========================
+
+A :file:`MANIFEST.in` file can be added in a project to define the list of
+files to include in the distribution built by the :command:`sdist` command.
+
+When :command:`sdist` is run, it will look for the :file:`MANIFEST.in` file
+and interpret it to generate the :file:`MANIFEST` file that contains the
+list of files that will be included in the package.
+
+This mechanism can be used when the default list of files is not enough.
+(See :ref:`packaging-manifest`).
+
+Principle
+---------
+
+The manifest template has one command per line, where each command specifies a
+set of files to include or exclude from the source distribution.  For an
+example, let's look at the Packaging' own manifest template::
+
+   include *.txt
+   recursive-include examples *.txt *.py
+   prune examples/sample?/build
+
+The meanings should be fairly clear: include all files in the distribution root
+matching :file:`\*.txt`, all files anywhere under the :file:`examples` directory
+matching :file:`\*.txt` or :file:`\*.py`, and exclude all directories matching
+:file:`examples/sample?/build`.  All of this is done *after* the standard
+include set, so you can exclude files from the standard set with explicit
+instructions in the manifest template.  (Or, you can use the
+:option:`--no-defaults` option to disable the standard set entirely.)
+
+The order of commands in the manifest template matters: initially, we have the
+list of default files as described above, and each command in the template adds
+to or removes from that list of files.  Once we have fully processed the
+manifest template, we remove files that should not be included in the source
+distribution:
+
+* all files in the Packaging "build" tree (default :file:`build/`)
+
+* all files in directories named :file:`RCS`, :file:`CVS`, :file:`.svn`,
+  :file:`.hg`, :file:`.git`, :file:`.bzr` or :file:`_darcs`
+
+Now we have our complete list of files, which is written to the manifest for
+future reference, and then used to build the source distribution archive(s).
+
+You can disable the default set of included files with the
+:option:`--no-defaults` option, and you can disable the standard exclude set
+with :option:`--no-prune`.
+
+Following the Packaging' own manifest template, let's trace how the
+:command:`sdist` command builds the list of files to include in the Packaging
+source distribution:
+
+#. include all Python source files in the :file:`packaging` and
+   :file:`packaging/command` subdirectories (because packages corresponding to
+   those two directories were mentioned in the :option:`packages` option in the
+   setup script---see section :ref:`packaging-setup-script`)
+
+#. include :file:`README.txt`, :file:`setup.py`, and :file:`setup.cfg` (standard
+   files)
+
+#. include :file:`test/test\*.py` (standard files)
+
+#. include :file:`\*.txt` in the distribution root (this will find
+   :file:`README.txt` a second time, but such redundancies are weeded out later)
+
+#. include anything matching :file:`\*.txt` or :file:`\*.py` in the sub-tree
+   under :file:`examples`,
+
+#. exclude all files in the sub-trees starting at directories matching
+   :file:`examples/sample?/build`\ ---this may exclude files included by the
+   previous two steps, so it's important that the ``prune`` command in the manifest
+   template comes after the ``recursive-include`` command
+
+#. exclude the entire :file:`build` tree, and any :file:`RCS`, :file:`CVS`,
+   :file:`.svn`, :file:`.hg`, :file:`.git`, :file:`.bzr` and :file:`_darcs`
+   directories
+
+Just like in the setup script, file and directory names in the manifest template
+should always be slash-separated; the Packaging will take care of converting
+them to the standard representation on your platform. That way, the manifest
+template is portable across operating systems.
+
+Commands
+--------
+
+The manifest template commands are:
+
++-------------------------------------------+-----------------------------------------------+
+| Command                                   | Description                                   |
++===========================================+===============================================+
+| :command:`include pat1 pat2 ...`          | include all files matching any of the listed  |
+|                                           | patterns                                      |
++-------------------------------------------+-----------------------------------------------+
+| :command:`exclude pat1 pat2 ...`          | exclude all files matching any of the listed  |
+|                                           | patterns                                      |
++-------------------------------------------+-----------------------------------------------+
+| :command:`recursive-include dir pat1 pat2 | include all files under *dir* matching any of |
+| ...`                                      | the listed patterns                           |
++-------------------------------------------+-----------------------------------------------+
+| :command:`recursive-exclude dir pat1 pat2 | exclude all files under *dir* matching any of |
+| ...`                                      | the listed patterns                           |
++-------------------------------------------+-----------------------------------------------+
+| :command:`global-include pat1 pat2 ...`   | include all files anywhere in the source tree |
+|                                           | matching --- & any of the listed patterns     |
++-------------------------------------------+-----------------------------------------------+
+| :command:`global-exclude pat1 pat2 ...`   | exclude all files anywhere in the source tree |
+|                                           | matching --- & any of the listed patterns     |
++-------------------------------------------+-----------------------------------------------+
+| :command:`prune dir`                      | exclude all files under *dir*                 |
++-------------------------------------------+-----------------------------------------------+
+| :command:`graft dir`                      | include all files under *dir*                 |
++-------------------------------------------+-----------------------------------------------+
+
+The patterns here are Unix-style "glob" patterns: ``*`` matches any sequence of
+regular filename characters, ``?`` matches any single regular filename
+character, and ``[range]`` matches any of the characters in *range* (e.g.,
+``a-z``, ``a-zA-Z``, ``a-f0-9_.``).  The definition of "regular filename
+character" is platform-specific: on Unix it is anything except slash; on Windows
+anything except backslash or colon.
diff --git a/Doc/packaging/tutorial.rst b/Doc/packaging/tutorial.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/tutorial.rst
@@ -0,0 +1,112 @@
+==================
+Packaging tutorial
+==================
+
+Welcome to the Packaging tutorial! We will learn how to use Packaging
+to package your project.
+
+.. TODO merge with introduction.rst
+
+
+Getting started
+---------------
+
+Packaging works with the *setup.cfg* file. It contains all the metadata for
+your project, as defined in PEP 345, but also declare what your project
+contains.
+
+Let's say you have a project called *CLVault* containing one package called
+*clvault*, and a few scripts inside. You can use the *pysetup* script to create
+a *setup.cfg* file for the project. The script will ask you a few questions::
+
+   $ mkdir CLVault
+   $ cd CLVault
+   $ pysetup create
+   Project name [CLVault]:
+   Current version number: 0.1
+   Package description:
+   >Command-line utility to store and retrieve passwords
+   Author name: Tarek Ziade
+   Author e-mail address: tarek at ziade.org
+   Project Home Page: http://bitbucket.org/tarek/clvault
+   Do you want to add a package ? (y/n): y
+   Package name: clvault
+   Do you want to add a package ? (y/n): n
+   Do you want to set Trove classifiers? (y/n): y
+   Please select the project status:
+
+   1 - Planning
+   2 - Pre-Alpha
+   3 - Alpha
+   4 - Beta
+   5 - Production/Stable
+   6 - Mature
+   7 - Inactive
+
+   Status: 3
+   What license do you use: GPL
+   Matching licenses:
+
+   1) License :: OSI Approved :: GNU General Public License (GPL)
+   2) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
+
+   Type the number of the license you wish to use or ? to try again:: 1
+   Do you want to set other trove identifiers (y/n) [n]: n
+   Wrote "setup.cfg".
+
+
+A setup.cfg file is created, containing the metadata of your project and the
+list of the packages it contains::
+
+   $ cat setup.cfg
+   [metadata]
+   name = CLVault
+   version = 0.1
+   author = Tarek Ziade
+   author_email = tarek at ziade.org
+   description = Command-line utility to store and retrieve passwords
+   home_page = http://bitbucket.org/tarek/clvault
+
+   classifier = Development Status :: 3 - Alpha
+       License :: OSI Approved :: GNU General Public License (GPL)
+
+   [files]
+   packages = clvault
+
+
+Our project will depend on the *keyring* project. Let's add it in the
+[metadata] section::
+
+   [metadata]
+   ...
+   requires_dist =
+       keyring
+
+
+Running commands
+----------------
+
+You can run useful commands on your project once the setup.cfg file is ready:
+
+- sdist: creates a source distribution
+- register: register your project to PyPI
+- upload: upload the distribution to PyPI
+- install_dist: install it
+
+All commands are run using the run script::
+
+   $ pysetup run install_dist
+   $ pysetup run sdist
+   $ pysetup run upload
+
+If you want to push a source distribution of your project to PyPI, do::
+
+   $ pysetup run sdist register upload
+
+
+Installing the project
+----------------------
+
+The project can be installed by manually running the packaging install command::
+
+   $ pysetup run install_dist
diff --git a/Doc/packaging/uploading.rst b/Doc/packaging/uploading.rst
new file mode 100644
--- /dev/null
+++ b/Doc/packaging/uploading.rst
@@ -0,0 +1,80 @@
+.. _packaging-package-upload:
+
+***************************************
+Uploading Packages to the Package Index
+***************************************
+
+The Python Package Index (PyPI) not only stores the package info, but also the
+package data if the author of the package wishes to. The packaging command
+:command:`upload` pushes the distribution files to PyPI.
+
+The command is invoked immediately after building one or more distribution
+files.  For example, the command ::
+
+    python setup.py sdist bdist_wininst upload
+
+will cause the source distribution and the Windows installer to be uploaded to
+PyPI.  Note that these will be uploaded even if they are built using an earlier
+invocation of :file:`setup.py`, but that only distributions named on the command
+line for the invocation including the :command:`upload` command are uploaded.
+
+The :command:`upload` command uses the username, password, and repository URL
+from the :file:`$HOME/.pypirc` file (see section :ref:`packaging-pypirc` for more on this
+file). If a :command:`register` command was previously called in the same
+command, and if the password was entered in the prompt, :command:`upload` will
+reuse the entered password. This is useful if you do not want to store a clear
+text password in the :file:`$HOME/.pypirc` file.
+
+You can specify another PyPI server with the :option:`--repository=*url*`
+option::
+
+    python setup.py sdist bdist_wininst upload -r http://example.com/pypi
+
+See section :ref:`packaging-pypirc` for more on defining several servers.
+
+You can use the :option:`--sign` option to tell :command:`upload` to sign each
+uploaded file using GPG (GNU Privacy Guard).  The :program:`gpg` program must
+be available for execution on the system :envvar:`PATH`.  You can also specify
+which key to use for signing using the :option:`--identity=*name*` option.
+
+Other :command:`upload` options include :option:`--repository=<url>` or
+:option:`--repository=<section>` where *url* is the url of the server and
+*section* the name of the section in :file:`$HOME/.pypirc`, and
+:option:`--show-response` (which displays the full response text from the PyPI
+server for help in debugging upload problems).
+
+PyPI package display
+====================
+
+The ``description`` field plays a special role at PyPI. It is used by
+the server to display a home page for the registered package.
+
+If you use the `reStructuredText <http://docutils.sourceforge.net/rst.html>`_
+syntax for this field, PyPI will parse it and display an HTML output for
+the package home page.
+
+The ``description`` field can be filled from a text file located in the
+project::
+
+    from packaging.core import setup
+
+    fp = open('README.txt')
+    try:
+        description = fp.read()
+    finally:
+        fp.close()
+
+    setup(name='Packaging',
+          description=description)
+
+In that case, :file:`README.txt` is a regular reStructuredText text file located
+in the root of the package besides :file:`setup.py`.
+
+To prevent registering broken reStructuredText content, you can use the
+:program:`rst2html` program that is provided by the :mod:`docutils` package
+and check the ``description`` from the command line::
+
+    $ python setup.py --description | rst2html.py > output.html
+
+:mod:`docutils` will display a warning if there's something wrong with your
+syntax.
diff --git a/Doc/tools/sphinxext/indexcontent.html b/Doc/tools/sphinxext/indexcontent.html
--- a/Doc/tools/sphinxext/indexcontent.html
+++ b/Doc/tools/sphinxext/indexcontent.html
@@ -20,10 +20,10 @@
          <span class="linkdescr">tutorial for C/C++ programmers</span></p>
       <p class="biglink"><a class="biglink" href="{{ pathto("c-api/index") }}">Python/C API</a><br/>
          <span class="linkdescr">reference for C/C++ programmers</span></p>
-      <p class="biglink"><a class="biglink" href="{{ pathto("install/index") }}">Installing Python Modules</a><br/>
-         <span class="linkdescr">information for installers &amp; sys-admins</span></p>
-      <p class="biglink"><a class="biglink" href="{{ pathto("distutils/index") }}">Distributing Python Modules</a><br/>
-         <span class="linkdescr">sharing modules with others</span></p>
+      <p class="biglink"><a class="biglink" href="{{ pathto("install/index") }}">Installing Python Projects</a><br/>
+         <span class="linkdescr">finding and installing modules and applications</span></p>
+      <p class="biglink"><a class="biglink" href="{{ pathto("packaging/index") }}">Distributing Python Projects</a><br/>
+         <span class="linkdescr">packaging and distributing modules and applications</span></p>
       <p class="biglink"><a class="biglink" href="{{ pathto("documenting/index") }}">Documenting Python</a><br/>
          <span class="linkdescr">guide for documentation authors</span></p>
       <p class="biglink"><a class="biglink" href="{{ pathto("faq/index") }}">FAQs</a><br/>
diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst
--- a/Doc/whatsnew/3.3.rst
+++ b/Doc/whatsnew/3.3.rst
@@ -106,6 +106,11 @@
 os
 --
 
+* The :mod:`os` module has a new :func:`~os.pipe2` function that makes it
+  possible to create a pipe with :data:`~os.O_CLOEXEC` or
+  :data:`~os.O_NONBLOCK` flags set atomically. This is especially useful to
+  avoid race conditions in multi-threaded programs.
+
 * The :mod:`os` module has a new :func:`~os.sendfile` function which provides
   an efficent "zero-copy" way for copying data from one file (or socket)
   descriptor to another. The phrase "zero-copy" refers to the fact that all of
@@ -124,6 +129,27 @@
   (Patch submitted by Giampaolo Rodolà in :issue:`10784`.)
 
 
+packaging
+---------
+
+:mod:`distutils` has undergone additions and refactoring under a new name,
+:mod:`packaging`, to allow developers to break backward compatibility.
+:mod:`distutils` is still provided in the standard library, but users are
+encouraged to transition to :mod:`packaging`.  For older versions of Python, a
+backport compatible with 2.4+ and 3.1+ will be made available on PyPI under the
+name :mod:`distutils2`.
+
+.. TODO add examples and howto to the packaging docs and link to them
+
+
+pydoc
+-----
+
+The Tk GUI and the :func:`~pydoc.serve` function have been removed from the
+:mod:`pydoc` module: ``pydoc -g`` and :func:`~pydoc.serve` have been deprecated
+in Python 3.2.
+
+
 sys
 ---
 
@@ -152,6 +178,16 @@
   instead of a RuntimeError: OSError has an errno attribute.
 
 
+ssl
+---
+
+The :mod:`ssl` module has new functions:
+
+  * :func:`~ssl.RAND_bytes`: generate cryptographically strong
+    pseudo-random bytes.
+  * :func:`~ssl.RAND_pseudo_bytes`: generate pseudo-random bytes.
+
+
 Optimizations
 =============
 
diff --git a/Include/Python-ast.h b/Include/Python-ast.h
--- a/Include/Python-ast.h
+++ b/Include/Python-ast.h
@@ -36,6 +36,8 @@
 
 typedef struct _alias *alias_ty;
 
+typedef struct _withitem *withitem_ty;
+
 
 enum _mod_kind {Module_kind=1, Interactive_kind=2, Expression_kind=3,
                  Suite_kind=4};
@@ -64,10 +66,9 @@
 enum _stmt_kind {FunctionDef_kind=1, ClassDef_kind=2, Return_kind=3,
                   Delete_kind=4, Assign_kind=5, AugAssign_kind=6, For_kind=7,
                   While_kind=8, If_kind=9, With_kind=10, Raise_kind=11,
-                  TryExcept_kind=12, TryFinally_kind=13, Assert_kind=14,
-                  Import_kind=15, ImportFrom_kind=16, Global_kind=17,
-                  Nonlocal_kind=18, Expr_kind=19, Pass_kind=20, Break_kind=21,
-                  Continue_kind=22};
+                  Try_kind=12, Assert_kind=13, Import_kind=14,
+                  ImportFrom_kind=15, Global_kind=16, Nonlocal_kind=17,
+                  Expr_kind=18, Pass_kind=19, Break_kind=20, Continue_kind=21};
 struct _stmt {
         enum _stmt_kind kind;
         union {
@@ -128,8 +129,7 @@
                 } If;
                 
                 struct {
-                        expr_ty context_expr;
-                        expr_ty optional_vars;
+                        asdl_seq *items;
                         asdl_seq *body;
                 } With;
                 
@@ -142,12 +142,8 @@
                         asdl_seq *body;
                         asdl_seq *handlers;
                         asdl_seq *orelse;
-                } TryExcept;
-                
-                struct {
-                        asdl_seq *body;
                         asdl_seq *finalbody;
-                } TryFinally;
+                } Try;
                 
                 struct {
                         expr_ty test;
@@ -383,6 +379,11 @@
         identifier asname;
 };
 
+struct _withitem {
+        expr_ty context_expr;
+        expr_ty optional_vars;
+};
+
 
 #define Module(a0, a1) _Py_Module(a0, a1)
 mod_ty _Py_Module(asdl_seq * body, PyArena *arena);
@@ -421,18 +422,16 @@
 #define If(a0, a1, a2, a3, a4, a5) _Py_If(a0, a1, a2, a3, a4, a5)
 stmt_ty _Py_If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno,
                int col_offset, PyArena *arena);
-#define With(a0, a1, a2, a3, a4, a5) _Py_With(a0, a1, a2, a3, a4, a5)
-stmt_ty _Py_With(expr_ty context_expr, expr_ty optional_vars, asdl_seq * body,
-                 int lineno, int col_offset, PyArena *arena);
+#define With(a0, a1, a2, a3, a4) _Py_With(a0, a1, a2, a3, a4)
+stmt_ty _Py_With(asdl_seq * items, asdl_seq * body, int lineno, int col_offset,
+                 PyArena *arena);
 #define Raise(a0, a1, a2, a3, a4) _Py_Raise(a0, a1, a2, a3, a4)
 stmt_ty _Py_Raise(expr_ty exc, expr_ty cause, int lineno, int col_offset,
                   PyArena *arena);
-#define TryExcept(a0, a1, a2, a3, a4, a5) _Py_TryExcept(a0, a1, a2, a3, a4, a5)
-stmt_ty _Py_TryExcept(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse,
-                      int lineno, int col_offset, PyArena *arena);
-#define TryFinally(a0, a1, a2, a3, a4) _Py_TryFinally(a0, a1, a2, a3, a4)
-stmt_ty _Py_TryFinally(asdl_seq * body, asdl_seq * finalbody, int lineno, int
-                       col_offset, PyArena *arena);
+#define Try(a0, a1, a2, a3, a4, a5, a6) _Py_Try(a0, a1, a2, a3, a4, a5, a6)
+stmt_ty _Py_Try(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse,
+                asdl_seq * finalbody, int lineno, int col_offset, PyArena
+                *arena);
 #define Assert(a0, a1, a2, a3, a4) _Py_Assert(a0, a1, a2, a3, a4)
 stmt_ty _Py_Assert(expr_ty test, expr_ty msg, int lineno, int col_offset,
                    PyArena *arena);
@@ -547,6 +546,9 @@
 keyword_ty _Py_keyword(identifier arg, expr_ty value, PyArena *arena);
 #define alias(a0, a1, a2) _Py_alias(a0, a1, a2)
 alias_ty _Py_alias(identifier name, identifier asname, PyArena *arena);
+#define withitem(a0, a1, a2) _Py_withitem(a0, a1, a2)
+withitem_ty _Py_withitem(expr_ty context_expr, expr_ty optional_vars, PyArena
+                         *arena);
 
 PyObject* PyAST_mod2obj(mod_ty t);
 mod_ty PyAST_obj2mod(PyObject* ast, PyArena* arena, int mode);
diff --git a/Include/import.h b/Include/import.h
--- a/Include/import.h
+++ b/Include/import.h
@@ -44,7 +44,7 @@
     const char *name            /* UTF-8 encoded string */
     );
 PyAPI_FUNC(PyObject *) PyImport_ImportModuleLevel(
-    char *name,                 /* UTF-8 encoded string */
+    const char *name,           /* UTF-8 encoded string */
     PyObject *globals,
     PyObject *locals,
     PyObject *fromlist,
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -558,7 +558,11 @@
             if not data:
                 break
             res += data
-        return bytes(res)
+        if res:
+            return bytes(res)
+        else:
+            # b'' or None
+            return data
 
     def readinto(self, b):
         """Read up to len(b) bytes into bytearray b.
@@ -940,6 +944,12 @@
         # Special case for when the number of bytes to read is unspecified.
         if n is None or n == -1:
             self._reset_read_buf()
+            if hasattr(self.raw, 'readall'):
+                chunk = self.raw.readall()
+                if chunk is None:
+                    return buf[pos:] or None
+                else:
+                    return buf[pos:] + chunk
             chunks = [buf[pos:]]  # Strip the consumed bytes.
             current_size = 0
             while True:
diff --git a/Lib/bz2.py b/Lib/bz2.py
--- a/Lib/bz2.py
+++ b/Lib/bz2.py
@@ -76,6 +76,10 @@
             mode = "wb"
             mode_code = _MODE_WRITE
             self._compressor = BZ2Compressor()
+        elif mode in ("a", "ab"):
+            mode = "ab"
+            mode_code = _MODE_WRITE
+            self._compressor = BZ2Compressor()
         else:
             raise ValueError("Invalid mode: {!r}".format(mode))
 
@@ -155,20 +159,31 @@
         if not self.seekable():
             self._check_not_closed()
             raise io.UnsupportedOperation("Seeking is only supported "
-                                          "on files opening for reading")
+                                          "on files open for reading")
 
     # Fill the readahead buffer if it is empty. Returns False on EOF.
     def _fill_buffer(self):
         if self._buffer:
             return True
+
+        if self._decompressor.unused_data:
+            rawblock = self._decompressor.unused_data
+        else:
+            rawblock = self._fp.read(_BUFFER_SIZE)
+
+        if not rawblock:
+            if self._decompressor.eof:
+                self._mode = _MODE_READ_EOF
+                self._size = self._pos
+                return False
+            else:
+                raise EOFError("Compressed file ended before the "
+                               "end-of-stream marker was reached")
+
+        # Continue to next stream.
         if self._decompressor.eof:
-            self._mode = _MODE_READ_EOF
-            self._size = self._pos
-            return False
-        rawblock = self._fp.read(_BUFFER_SIZE)
-        if not rawblock:
-            raise EOFError("Compressed file ended before the "
-                           "end-of-stream marker was reached")
+            self._decompressor = BZ2Decompressor()
+
         self._buffer = self._decompressor.decompress(rawblock)
         return True
 
@@ -384,9 +399,15 @@
     """
     if len(data) == 0:
         return b""
-    decomp = BZ2Decompressor()
-    result = decomp.decompress(data)
-    if not decomp.eof:
-        raise ValueError("Compressed data ended before the "
-                         "end-of-stream marker was reached")
-    return result
+
+    results = []
+    while True:
+        decomp = BZ2Decompressor()
+        results.append(decomp.decompress(data))
+        if not decomp.eof:
+            raise ValueError("Compressed data ended before the "
+                             "end-of-stream marker was reached")
+        if not decomp.unused_data:
+            return b"".join(results)
+        # There is unused data left over. Proceed to next stream.
+        data = decomp.unused_data
diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py
--- a/Lib/collections/__init__.py
+++ b/Lib/collections/__init__.py
@@ -269,6 +269,8 @@
         'Return a new OrderedDict which maps field names to their values'
         return OrderedDict(zip(self._fields, self))
 
+    __dict__ = property(_asdict)
+
     def _replace(_self, **kwds):
         'Return a new {typename} object replacing specified fields with new values'
         result = _self._make(map(kwds.pop, {field_names!r}, _self))
diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py
--- a/Lib/ctypes/util.py
+++ b/Lib/ctypes/util.py
@@ -137,9 +137,7 @@
             rv = f.close()
             if rv == 10:
                 raise OSError('objdump command not found')
-            with contextlib.closing(os.popen(cmd)) as f:
-                data = f.read()
-            res = re.search(r'\sSONAME\s+([^\s]+)', data)
+            res = re.search(r'\sSONAME\s+([^\s]+)', dump)
             if not res:
                 return None
             return res.group(1)
diff --git a/Lib/decimal.py b/Lib/decimal.py
--- a/Lib/decimal.py
+++ b/Lib/decimal.py
@@ -2001,9 +2001,9 @@
         nonzero.  For efficiency, other._exp should not be too large,
         so that 10**abs(other._exp) is a feasible calculation."""
 
-        # In the comments below, we write x for the value of self and
-        # y for the value of other.  Write x = xc*10**xe and y =
-        # yc*10**ye.
+        # In the comments below, we write x for the value of self and y for the
+        # value of other.  Write x = xc*10**xe and abs(y) = yc*10**ye, with xc
+        # and yc positive integers not divisible by 10.
 
         # The main purpose of this method is to identify the *failure*
         # of x**y to be exactly representable with as little effort as
@@ -2011,13 +2011,12 @@
         # eliminate the possibility of x**y being exact.  Only if all
         # these tests are passed do we go on to actually compute x**y.
 
-        # Here's the main idea.  First normalize both x and y.  We
-        # express y as a rational m/n, with m and n relatively prime
-        # and n>0.  Then for x**y to be exactly representable (at
-        # *any* precision), xc must be the nth power of a positive
-        # integer and xe must be divisible by n.  If m is negative
-        # then additionally xc must be a power of either 2 or 5, hence
-        # a power of 2**n or 5**n.
+        # Here's the main idea.  Express y as a rational number m/n, with m and
+        # n relatively prime and n>0.  Then for x**y to be exactly
+        # representable (at *any* precision), xc must be the nth power of a
+        # positive integer and xe must be divisible by n.  If y is negative
+        # then additionally xc must be a power of either 2 or 5, hence a power
+        # of 2**n or 5**n.
         #
         # There's a limit to how small |y| can be: if y=m/n as above
         # then:
@@ -2089,21 +2088,43 @@
                     return None
                 # now xc is a power of 2; e is its exponent
                 e = _nbits(xc)-1
-                # find e*y and xe*y; both must be integers
-                if ye >= 0:
-                    y_as_int = yc*10**ye
-                    e = e*y_as_int
-                    xe = xe*y_as_int
-                else:
-                    ten_pow = 10**-ye
-                    e, remainder = divmod(e*yc, ten_pow)
-                    if remainder:
-                        return None
-                    xe, remainder = divmod(xe*yc, ten_pow)
-                    if remainder:
-                        return None
-
-                if e*65 >= p*93: # 93/65 > log(10)/log(5)
+
+                # We now have:
+                #
+                #   x = 2**e * 10**xe, e > 0, and y < 0.
+                #
+                # The exact result is:
+                #
+                #   x**y = 5**(-e*y) * 10**(e*y + xe*y)
+                #
+                # provided that both e*y and xe*y are integers.  Note that if
+                # 5**(-e*y) >= 10**p, then the result can't be expressed
+                # exactly with p digits of precision.
+                #
+                # Using the above, we can guard against large values of ye.
+                # 93/65 is an upper bound for log(10)/log(5), so if
+                #
+                #   ye >= len(str(93*p//65))
+                #
+                # then
+                #
+                #   -e*y >= -y >= 10**ye > 93*p/65 > p*log(10)/log(5),
+                #
+                # so 5**(-e*y) >= 10**p, and the coefficient of the result
+                # can't be expressed in p digits.
+
+                # emax >= largest e such that 5**e < 10**p.
+                emax = p*93//65
+                if ye >= len(str(emax)):
+                    return None
+
+                # Find -e*y and -xe*y; both must be integers
+                e = _decimal_lshift_exact(e * yc, ye)
+                xe = _decimal_lshift_exact(xe * yc, ye)
+                if e is None or xe is None:
+                    return None
+
+                if e > emax:
                     return None
                 xc = 5**e
 
@@ -2117,19 +2138,20 @@
                 while xc % 5 == 0:
                     xc //= 5
                     e -= 1
-                if ye >= 0:
-                    y_as_integer = yc*10**ye
-                    e = e*y_as_integer
-                    xe = xe*y_as_integer
-                else:
-                    ten_pow = 10**-ye
-                    e, remainder = divmod(e*yc, ten_pow)
-                    if remainder:
-                        return None
-                    xe, remainder = divmod(xe*yc, ten_pow)
-                    if remainder:
-                        return None
-                if e*3 >= p*10: # 10/3 > log(10)/log(2)
+
+                # Guard against large values of ye, using the same logic as in
+                # the 'xc is a power of 2' branch.  10/3 is an upper bound for
+                # log(10)/log(2).
+                emax = p*10//3
+                if ye >= len(str(emax)):
+                    return None
+
+                e = _decimal_lshift_exact(e * yc, ye)
+                xe = _decimal_lshift_exact(xe * yc, ye)
+                if e is None or xe is None:
+                    return None
+
+                if e > emax:
                     return None
                 xc = 2**e
             else:
@@ -5529,6 +5551,27 @@
 
 _nbits = int.bit_length
 
+def _decimal_lshift_exact(n, e):
+    """ Given integers n and e, return n * 10**e if it's an integer, else None.
+
+    The computation is designed to avoid computing large powers of 10
+    unnecessarily.
+
+    >>> _decimal_lshift_exact(3, 4)
+    30000
+    >>> _decimal_lshift_exact(300, -999999999)  # returns None
+
+    """
+    if n == 0:
+        return 0
+    elif e >= 0:
+        return n * 10**e
+    else:
+        # val_n = largest power of 10 dividing n.
+        str_n = str(abs(n))
+        val_n = len(str_n) - len(str_n.rstrip('0'))
+        return None if val_n < -e else n // 10**-e
+
 def _sqrt_nearest(n, a):
     """Closest integer to the square root of the positive integer n.  a is
     an initial approximation to the square root.  Any positive integer
diff --git a/Lib/distutils/tests/test_build_py.py b/Lib/distutils/tests/test_build_py.py
--- a/Lib/distutils/tests/test_build_py.py
+++ b/Lib/distutils/tests/test_build_py.py
@@ -57,12 +57,15 @@
         self.assertEqual(len(cmd.get_outputs()), 3)
         pkgdest = os.path.join(destination, "pkg")
         files = os.listdir(pkgdest)
-        self.assertTrue("__init__.py" in files)
-        if not sys.dont_write_bytecode:
-            self.assertTrue("__init__.pyc" in files)
-        self.assertTrue("README.txt" in files)
+        self.assertIn("__init__.py", files)
+        self.assertIn("README.txt", files)
+        # XXX even with -O, distutils writes pyc, not pyo; bug?
+        if sys.dont_write_bytecode:
+            self.assertNotIn("__init__.pyc", files)
+        else:
+            self.assertIn("__init__.pyc", files)
 
-    def test_empty_package_dir (self):
+    def test_empty_package_dir(self):
         # See SF 1668596/1720897.
         cwd = os.getcwd()
 
@@ -110,7 +113,7 @@
         finally:
             sys.dont_write_bytecode = old_dont_write_bytecode
 
-        self.assertTrue('byte-compiling is disabled' in self.logs[0][1])
+        self.assertIn('byte-compiling is disabled', self.logs[0][1])
 
 def test_suite():
     return unittest.makeSuite(BuildPyTestCase)
diff --git a/Lib/html/parser.py b/Lib/html/parser.py
--- a/Lib/html/parser.py
+++ b/Lib/html/parser.py
@@ -124,7 +124,7 @@
         _markupbase.ParserBase.reset(self)
 
     def feed(self, data):
-        """Feed data to the parser.
+        r"""Feed data to the parser.
 
         Call this as often as you want, with as little or as much text
         as you want (may include '\n').
diff --git a/Lib/imaplib.py b/Lib/imaplib.py
--- a/Lib/imaplib.py
+++ b/Lib/imaplib.py
@@ -249,15 +249,7 @@
 
     def read(self, size):
         """Read 'size' bytes from remote."""
-        chunks = []
-        read = 0
-        while read < size:
-            data = self.file.read(min(size-read, 4096))
-            if not data:
-                break
-            read += len(data)
-            chunks.append(data)
-        return b''.join(chunks)
+        return self.file.read(size)
 
 
     def readline(self):
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -41,10 +41,9 @@
     codecs = None
 
 try:
-    import _thread as thread
     import threading
 except ImportError: #pragma: no cover
-    thread = None
+    threading = None
 
 __author__  = "Vinay Sajip <vinay_sajip at red-dove.com>"
 __status__  = "production"
@@ -199,7 +198,7 @@
 #the lock would already have been acquired - so we need an RLock.
 #The same argument applies to Loggers and Manager.loggerDict.
 #
-if thread:
+if threading:
     _lock = threading.RLock()
 else: #pragma: no cover
     _lock = None
@@ -278,8 +277,8 @@
         self.created = ct
         self.msecs = (ct - int(ct)) * 1000
         self.relativeCreated = (self.created - _startTime) * 1000
-        if logThreads and thread:
-            self.thread = thread.get_ident()
+        if logThreads and threading:
+            self.thread = threading.get_ident()
             self.threadName = threading.current_thread().name
         else: # pragma: no cover
             self.thread = None
@@ -773,7 +772,7 @@
         """
         Acquire a thread lock for serializing access to the underlying I/O.
         """
-        if thread:
+        if threading:
             self.lock = threading.RLock()
         else: #pragma: no cover
             self.lock = None
diff --git a/Lib/packaging/command/bdist.py b/Lib/packaging/command/bdist.py
--- a/Lib/packaging/command/bdist.py
+++ b/Lib/packaging/command/bdist.py
@@ -128,6 +128,7 @@
         for i in range(len(self.formats)):
             cmd_name = commands[i]
             sub_cmd = self.get_reinitialized_command(cmd_name)
+            sub_cmd.format = self.formats[i]
 
             # passing the owner and group names for tar archiving
             if cmd_name == 'bdist_dumb':
diff --git a/Lib/packaging/command/check.py b/Lib/packaging/command/check.py
--- a/Lib/packaging/command/check.py
+++ b/Lib/packaging/command/check.py
@@ -32,7 +32,7 @@
         # XXX we could use a special handler for this, but would need to test
         # if it works even if the logger has a too high level
         self._warnings.append((msg, args))
-        return logger.warning(self.get_command_name() + msg, *args)
+        return logger.warning('%s: %s' % (self.get_command_name(), msg), *args)
 
     def run(self):
         """Runs the command."""
diff --git a/Lib/packaging/command/sdist.py b/Lib/packaging/command/sdist.py
--- a/Lib/packaging/command/sdist.py
+++ b/Lib/packaging/command/sdist.py
@@ -1,10 +1,9 @@
 """Create a source distribution."""
 
 import os
+import re
 import sys
-import re
 from io import StringIO
-from glob import glob
 from shutil import get_archive_formats, rmtree
 
 from packaging import logger
@@ -203,45 +202,14 @@
 
     def add_defaults(self):
         """Add all the default files to self.filelist:
-          - README or README.txt
-          - test/test*.py
           - all pure Python modules mentioned in setup script
           - all files pointed by package_data (build_py)
           - all files defined in data_files.
           - all files defined as scripts.
           - all C sources listed as part of extensions or C libraries
             in the setup script (doesn't catch C headers!)
-        Warns if (README or README.txt) or setup.py are missing; everything
-        else is optional.
+        Everything is optional.
         """
-        standards = [('README', 'README.txt')]
-        for fn in standards:
-            if isinstance(fn, tuple):
-                alts = fn
-                got_it = False
-                for fn in alts:
-                    if os.path.exists(fn):
-                        got_it = True
-                        self.filelist.append(fn)
-                        break
-
-                if not got_it:
-                    logger.warning(
-                        '%s: standard file not found: should have one of %s',
-                        self.get_command_name(), ', '.join(alts))
-            else:
-                if os.path.exists(fn):
-                    self.filelist.append(fn)
-                else:
-                    logger.warning('%s: standard file %r not found',
-                                   self.get_command_name(), fn)
-
-        optional = ['test/test*.py', 'setup.cfg']
-        for pattern in optional:
-            files = [f for f in glob(pattern) if os.path.isfile(f)]
-            if files:
-                self.filelist.extend(files)
-
         for cmd_name in get_command_names():
             try:
                 cmd_obj = self.get_finalized_command(cmd_name)
diff --git a/Lib/packaging/compiler/__init__.py b/Lib/packaging/compiler/__init__.py
--- a/Lib/packaging/compiler/__init__.py
+++ b/Lib/packaging/compiler/__init__.py
@@ -83,19 +83,16 @@
 # patterns. Order is important; platform mappings are preferred over
 # OS names.
 _default_compilers = (
-
     # Platform string mappings
 
     # on a cygwin built python we can use gcc like an ordinary UNIXish
     # compiler
     ('cygwin.*', 'unix'),
-    ('os2emx', 'emx'),
 
     # OS name mappings
     ('posix', 'unix'),
     ('nt', 'msvc'),
-
-    )
+)
 
 def get_default_compiler(osname=None, platform=None):
     """ Determine the default compiler to use for the given platform.
diff --git a/Lib/packaging/compiler/ccompiler.py b/Lib/packaging/compiler/ccompiler.py
--- a/Lib/packaging/compiler/ccompiler.py
+++ b/Lib/packaging/compiler/ccompiler.py
@@ -352,7 +352,7 @@
         return macros, objects, extra, pp_opts, build
 
     def _get_cc_args(self, pp_opts, debug, before):
-        # works for unixccompiler, emxccompiler, cygwinccompiler
+        # works for unixccompiler and cygwinccompiler
         cc_args = pp_opts + ['-c']
         if debug:
             cc_args[:0] = ['-g']
diff --git a/Lib/packaging/database.py b/Lib/packaging/database.py
--- a/Lib/packaging/database.py
+++ b/Lib/packaging/database.py
@@ -18,7 +18,7 @@
     'get_distributions', 'get_distribution', 'get_file_users',
     'provides_distribution', 'obsoletes_distribution',
     'enable_cache', 'disable_cache', 'clear_cache',
-]
+    'get_file_path', 'get_file']
 
 
 # TODO update docs
@@ -627,3 +627,17 @@
     for dist in get_distributions():
         if dist.uses(path):
             yield dist
+
+
+def get_file_path(distribution_name, relative_path):
+    """Return the path to a resource file."""
+    dist = get_distribution(distribution_name)
+    if dist != None:
+        return dist.get_resource_path(relative_path)
+    raise LookupError('no distribution named %r found' % distribution_name)
+
+
+def get_file(distribution_name, relative_path, *args, **kwargs):
+    """Open and return a resource file."""
+    return open(get_file_path(distribution_name, relative_path),
+                *args, **kwargs)
diff --git a/Lib/packaging/fancy_getopt.py b/Lib/packaging/fancy_getopt.py
--- a/Lib/packaging/fancy_getopt.py
+++ b/Lib/packaging/fancy_getopt.py
@@ -13,7 +13,6 @@
 import getopt
 import re
 import sys
-import string
 import textwrap
 
 from packaging.errors import PackagingGetoptError, PackagingArgError
@@ -142,20 +141,20 @@
 
         for option in self.option_table:
             if len(option) == 3:
-                integer, short, help = option
+                longopt, short, help = option
                 repeat = 0
             elif len(option) == 4:
-                integer, short, help, repeat = option
+                longopt, short, help, repeat = option
             else:
                 # the option table is part of the code, so simply
                 # assert that it is correct
                 raise ValueError("invalid option tuple: %r" % option)
 
             # Type- and value-check the option names
-            if not isinstance(integer, str) or len(integer) < 2:
+            if not isinstance(longopt, str) or len(longopt) < 2:
                 raise PackagingGetoptError(
                       ("invalid long option '%s': "
-                       "must be a string of length >= 2") % integer)
+                       "must be a string of length >= 2") % longopt)
 
             if (not ((short is None) or
                      (isinstance(short, str) and len(short) == 1))):
@@ -163,55 +162,55 @@
                       ("invalid short option '%s': "
                        "must be a single character or None") % short)
 
-            self.repeat[integer] = repeat
-            self.long_opts.append(integer)
+            self.repeat[longopt] = repeat
+            self.long_opts.append(longopt)
 
-            if integer[-1] == '=':             # option takes an argument?
+            if longopt[-1] == '=':             # option takes an argument?
                 if short:
                     short = short + ':'
-                integer = integer[0:-1]
-                self.takes_arg[integer] = 1
+                longopt = longopt[0:-1]
+                self.takes_arg[longopt] = 1
             else:
 
                 # Is option is a "negative alias" for some other option (eg.
                 # "quiet" == "!verbose")?
-                alias_to = self.negative_alias.get(integer)
+                alias_to = self.negative_alias.get(longopt)
                 if alias_to is not None:
                     if self.takes_arg[alias_to]:
                         raise PackagingGetoptError(
                               ("invalid negative alias '%s': "
                                "aliased option '%s' takes a value") % \
-                               (integer, alias_to))
+                               (longopt, alias_to))
 
-                    self.long_opts[-1] = integer   # XXX redundant?!
-                    self.takes_arg[integer] = 0
+                    self.long_opts[-1] = longopt   # XXX redundant?!
+                    self.takes_arg[longopt] = 0
 
                 else:
-                    self.takes_arg[integer] = 0
+                    self.takes_arg[longopt] = 0
 
             # If this is an alias option, make sure its "takes arg" flag is
             # the same as the option it's aliased to.
-            alias_to = self.alias.get(integer)
+            alias_to = self.alias.get(longopt)
             if alias_to is not None:
-                if self.takes_arg[integer] != self.takes_arg[alias_to]:
+                if self.takes_arg[longopt] != self.takes_arg[alias_to]:
                     raise PackagingGetoptError(
                           ("invalid alias '%s': inconsistent with "
                            "aliased option '%s' (one of them takes a value, "
-                           "the other doesn't") % (integer, alias_to))
+                           "the other doesn't") % (longopt, alias_to))
 
             # Now enforce some bondage on the long option name, so we can
             # later translate it to an attribute name on some object.  Have
             # to do this a bit late to make sure we've removed any trailing
             # '='.
-            if not longopt_re.match(integer):
+            if not longopt_re.match(longopt):
                 raise PackagingGetoptError(
                       ("invalid long option name '%s' " +
-                       "(must be letters, numbers, hyphens only") % integer)
+                       "(must be letters, numbers, hyphens only") % longopt)
 
-            self.attr_name[integer] = integer.replace('-', '_')
+            self.attr_name[longopt] = longopt.replace('-', '_')
             if short:
                 self.short_opts.append(short)
-                self.short2long[short[0]] = integer
+                self.short2long[short[0]] = longopt
 
     def getopt(self, args=None, object=None):
         """Parse command-line options in args. Store as attributes on object.
@@ -297,10 +296,10 @@
         # First pass: determine maximum length of long option names
         max_opt = 0
         for option in self.option_table:
-            integer = option[0]
+            longopt = option[0]
             short = option[1]
-            l = len(integer)
-            if integer[-1] == '=':
+            l = len(longopt)
+            if longopt[-1] == '=':
                 l = l - 1
             if short is not None:
                 l = l + 5                   # " (-x)" where short == 'x'
@@ -340,20 +339,20 @@
             lines = ['Option summary:']
 
         for option in self.option_table:
-            integer, short, help = option[:3]
+            longopt, short, help = option[:3]
             text = textwrap.wrap(help, text_width)
 
             # Case 1: no short option at all (makes life easy)
             if short is None:
                 if text:
-                    lines.append("  --%-*s  %s" % (max_opt, integer, text[0]))
+                    lines.append("  --%-*s  %s" % (max_opt, longopt, text[0]))
                 else:
-                    lines.append("  --%-*s  " % (max_opt, integer))
+                    lines.append("  --%-*s  " % (max_opt, longopt))
 
             # Case 2: we have a short option, so we have to include it
             # just after the long option
             else:
-                opt_names = "%s (-%s)" % (integer, short)
+                opt_names = "%s (-%s)" % (longopt, short)
                 if text:
                     lines.append("  --%-*s  %s" %
                                  (max_opt, opt_names, text[0]))
@@ -378,68 +377,6 @@
     return parser.getopt(args, object)
 
 
-WS_TRANS = str.maketrans(string.whitespace, ' ' * len(string.whitespace))
-
-
-def wrap_text(text, width):
-    """Split *text* into lines of no more than *width* characters each.
-
-    *text* is a str and *width* an int.  Returns a list of str.
-    """
-
-    if text is None:
-        return []
-    if len(text) <= width:
-        return [text]
-
-    text = text.expandtabs()
-    text = text.translate(WS_TRANS)
-
-    chunks = re.split(r'( +|-+)', text)
-    chunks = [_f for _f in chunks if _f]      # ' - ' results in empty strings
-    lines = []
-
-    while chunks:
-
-        cur_line = []                   # list of chunks (to-be-joined)
-        cur_len = 0                     # length of current line
-
-        while chunks:
-            l = len(chunks[0])
-            if cur_len + l <= width:    # can squeeze (at least) this chunk in
-                cur_line.append(chunks[0])
-                del chunks[0]
-                cur_len = cur_len + l
-            else:                       # this line is full
-                # drop last chunk if all space
-                if cur_line and cur_line[-1][0] == ' ':
-                    del cur_line[-1]
-                break
-
-        if chunks:                      # any chunks left to process?
-
-            # if the current line is still empty, then we had a single
-            # chunk that's too big too fit on a line -- so we break
-            # down and break it up at the line width
-            if cur_len == 0:
-                cur_line.append(chunks[0][0:width])
-                chunks[0] = chunks[0][width:]
-
-            # all-whitespace chunks at the end of a line can be discarded
-            # (and we know from the re.split above that if a chunk has
-            # *any* whitespace, it is *all* whitespace)
-            if chunks[0][0] == ' ':
-                del chunks[0]
-
-        # and store this line in the list-of-all-lines -- as a single
-        # string, of course!
-        lines.append(''.join(cur_line))
-
-    # while chunks
-
-    return lines
-
-
 class OptionDummy:
     """Dummy class just used as a place to hold command-line option
     values as instance attributes."""
diff --git a/Lib/packaging/install.py b/Lib/packaging/install.py
--- a/Lib/packaging/install.py
+++ b/Lib/packaging/install.py
@@ -6,7 +6,6 @@
 This is a higher-level module built on packaging.database and
 packaging.pypi.
 """
-
 import os
 import sys
 import stat
@@ -14,7 +13,7 @@
 import shutil
 import logging
 import tempfile
-from sysconfig import get_config_var
+from sysconfig import get_config_var, get_path
 
 from packaging import logger
 from packaging.dist import Distribution
@@ -28,6 +27,8 @@
 from packaging.errors import (PackagingError, InstallationException,
                               InstallationConflict, CCompilerError)
 from packaging.pypi.errors import ProjectNotFound, ReleaseNotFound
+from packaging import database
+
 
 __all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove',
            'install', 'install_local_project']
@@ -75,6 +76,7 @@
 def _run_setuptools_install(path):
     cmd = '%s setup.py install --record=%s --single-version-externally-managed'
     record_file = os.path.join(path, 'RECORD')
+
     os.system(cmd % (sys.executable, record_file))
     if not os.path.exists(record_file):
         raise ValueError('failed to install')
@@ -88,8 +90,10 @@
     dist.parse_config_files()
     try:
         dist.run_command('install_dist')
+        name = dist.metadata['name']
+        return database.get_distribution(name) is not None
     except (IOError, os.error, PackagingError, CCompilerError) as msg:
-        raise SystemExit("error: " + str(msg))
+        raise ValueError("Failed to install, " + str(msg))
 
 
 def _install_dist(dist, path):
@@ -115,18 +119,20 @@
     If the source directory contains a setup.py install using distutils1.
     If a setup.cfg is found, install using the install_dist command.
 
+    Returns True on success, False on Failure.
     """
     path = os.path.abspath(path)
     if os.path.isdir(path):
-        logger.info('installing from source directory: %s', path)
-        _run_install_from_dir(path)
+        logger.info('Installing from source directory: %s', path)
+        return _run_install_from_dir(path)
     elif _is_archive_file(path):
-        logger.info('installing from archive: %s', path)
+        logger.info('Installing from archive: %s', path)
         _unpacked_dir = tempfile.mkdtemp()
         shutil.unpack_archive(path, _unpacked_dir)
-        _run_install_from_archive(_unpacked_dir)
+        return _run_install_from_archive(_unpacked_dir)
     else:
-        logger.warning('no projects to install')
+        logger.warning('No projects to install.')
+        return False
 
 
 def _run_install_from_archive(source_dir):
@@ -152,7 +158,13 @@
     func = install_methods[install_method]
     try:
         func = install_methods[install_method]
-        return func(source_dir)
+        try:
+            func(source_dir)
+            return True
+        except ValueError as err:
+            # failed to install
+            logger.info(str(err))
+            return False
     finally:
         os.chdir(old_dir)
 
@@ -174,16 +186,16 @@
 
     installed_dists = []
     for dist in dists:
-        logger.info('installing %s %s', dist.name, dist.version)
+        logger.info('Installing %r %s...', dist.name, dist.version)
         try:
             _install_dist(dist, path)
             installed_dists.append(dist)
         except Exception as e:
-            logger.info('failed: %s', e)
+            logger.info('Failed: %s', e)
 
             # reverting
             for installed_dist in installed_dists:
-                logger.info('reverting %s', installed_dist)
+                logger.info('Reverting %s', installed_dist)
                 _remove_dist(installed_dist, paths)
             raise e
     return installed_dists
@@ -292,7 +304,7 @@
     #    or remove
 
     if not installed:
-        logger.info('reading installed distributions')
+        logger.debug('Reading installed distributions')
         installed = list(get_distributions(use_egg_info=True))
 
     infos = {'install': [], 'remove': [], 'conflict': []}
@@ -306,7 +318,7 @@
         if predicate.name.lower() != installed_project.name.lower():
             continue
         found = True
-        logger.info('found %s %s', installed_project.name,
+        logger.info('Found %s %s', installed_project.name,
                     installed_project.metadata['version'])
 
         # if we already have something installed, check it matches the
@@ -316,7 +328,7 @@
         break
 
     if not found:
-        logger.info('project not installed')
+        logger.debug('Project not installed')
 
     if not index:
         index = wrapper.ClientWrapper()
@@ -331,7 +343,7 @@
         raise InstallationException('Release not found: "%s"' % requirements)
 
     if release is None:
-        logger.info('could not find a matching project')
+        logger.info('Could not find a matching project')
         return infos
 
     metadata = release.fetch_metadata()
@@ -348,7 +360,7 @@
     # Get what the missing deps are
     dists = depgraph.missing[release]
     if dists:
-        logger.info("missing dependencies found, retrieving metadata")
+        logger.info("Missing dependencies found, retrieving metadata")
         # we have missing deps
         for dist in dists:
             _update_infos(infos, get_infos(dist, index, installed))
@@ -376,7 +388,10 @@
 
 
 def remove(project_name, paths=sys.path, auto_confirm=True):
-    """Removes a single project from the installation"""
+    """Removes a single project from the installation.
+
+    Returns True on success
+    """
     dist = get_distribution(project_name, use_egg_info=True, paths=paths)
     if dist is None:
         raise PackagingError('Distribution "%s" not found' % project_name)
@@ -384,13 +399,26 @@
     rmdirs = []
     rmfiles = []
     tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall')
+
+    def _move_file(source, target):
+        try:
+            os.rename(source, target)
+        except OSError as err:
+            return err
+        return None
+
+    success = True
+    error = None
     try:
         for file_, md5, size in files:
             if os.path.isfile(file_):
                 dirname, filename = os.path.split(file_)
                 tmpfile = os.path.join(tmp, filename)
                 try:
-                    os.rename(file_, tmpfile)
+                    error = _move_file(file_, tmpfile)
+                    if error is not None:
+                        success = False
+                        break
                 finally:
                     if not os.path.isfile(file_):
                         os.rename(tmpfile, file_)
@@ -401,7 +429,12 @@
     finally:
         shutil.rmtree(tmp)
 
-    logger.info('removing %r: ', project_name)
+    if not success:
+        logger.info('%r cannot be removed.', project_name)
+        logger.info('Error: %s' % str(error))
+        return False
+
+    logger.info('Removing %r: ', project_name)
 
     for file_ in rmfiles:
         logger.info('  %s', file_)
@@ -444,21 +477,41 @@
         if os.path.exists(dist.path):
             shutil.rmtree(dist.path)
 
-        logger.info('success: removed %d files and %d dirs',
+        logger.info('Success: removed %d files and %d dirs',
                     file_count, dir_count)
 
+    return True
+
 
 def install(project):
-    logger.info('getting information about %r', project)
+    """Installs a project.
+
+    Returns True on success, False on failure
+    """
+    logger.info('Checking the installation location...')
+    purelib_path = get_path('purelib')
+    # trying to write a file there
+    try:
+        with tempfile.NamedTemporaryFile(suffix=project,
+                                         dir=purelib_path) as testfile:
+            testfile.write(b'test')
+    except OSError:
+        # was unable to write a file
+        logger.info('Unable to write in "%s". Do you have the permissions ?'
+                    % purelib_path)
+        return False
+
+
+    logger.info('Getting information about %r...', project)
     try:
         info = get_infos(project)
     except InstallationException:
-        logger.info('cound not find %r', project)
-        return
+        logger.info('Cound not find %r', project)
+        return False
 
     if info['install'] == []:
-        logger.info('nothing to install')
-        return
+        logger.info('Nothing to install')
+        return False
 
     install_path = get_config_var('base')
     try:
@@ -470,6 +523,8 @@
             projects = ['%s %s' % (p.name, p.version) for p in e.args[0]]
             logger.info('%r conflicts with %s', project, ','.join(projects))
 
+    return True
+
 
 def _main(**attrs):
     if 'script_args' not in attrs:
diff --git a/Lib/packaging/metadata.py b/Lib/packaging/metadata.py
--- a/Lib/packaging/metadata.py
+++ b/Lib/packaging/metadata.py
@@ -396,22 +396,24 @@
                 value = []
 
         if logger.isEnabledFor(logging.WARNING):
+            project_name = self['Name']
+
             if name in _PREDICATE_FIELDS and value is not None:
                 for v in value:
                     # check that the values are valid predicates
                     if not is_valid_predicate(v.split(';')[0]):
                         logger.warning(
-                            '%r is not a valid predicate (field %r)',
-                            v, name)
+                            '%r: %r is not a valid predicate (field %r)',
+                            project_name, v, name)
             # FIXME this rejects UNKNOWN, is that right?
             elif name in _VERSIONS_FIELDS and value is not None:
                 if not is_valid_versions(value):
-                    logger.warning('%r is not a valid version (field %r)',
-                                   value, name)
+                    logger.warning('%r: %r is not a valid version (field %r)',
+                                   project_name, value, name)
             elif name in _VERSION_FIELDS and value is not None:
                 if not is_valid_version(value):
-                    logger.warning('%r is not a valid version (field %r)',
-                                   value, name)
+                    logger.warning('%r: %r is not a valid version (field %r)',
+                                   project_name, value, name)
 
         if name in _UNICODEFIELDS:
             if name == 'Description':
diff --git a/Lib/packaging/pypi/simple.py b/Lib/packaging/pypi/simple.py
--- a/Lib/packaging/pypi/simple.py
+++ b/Lib/packaging/pypi/simple.py
@@ -1,6 +1,6 @@
 """Spider using the screen-scraping "simple" PyPI API.
 
-This module contains the class SimpleIndexCrawler, a simple spider that
+This module contains the class Crawler, a simple spider that
 can be used to find and retrieve distributions from a project index
 (like the Python Package Index), using its so-called simple API (see
 reference implementation available at http://pypi.python.org/simple/).
@@ -118,9 +118,10 @@
     def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False,
                  prefer_source=True, hosts=DEFAULT_HOSTS,
                  follow_externals=False, mirrors_url=None, mirrors=None,
-                 timeout=SOCKET_TIMEOUT, mirrors_max_tries=0):
+                 timeout=SOCKET_TIMEOUT, mirrors_max_tries=0, verbose=False):
         super(Crawler, self).__init__(prefer_final, prefer_source)
         self.follow_externals = follow_externals
+        self.verbose = verbose
 
         # mirroring attributes.
         parsed = urllib.parse.urlparse(index_url)
@@ -177,14 +178,14 @@
 
     def get_releases(self, requirements, prefer_final=None,
                      force_update=False):
-        """Search for releases and return a ReleaseList object containing
+        """Search for releases and return a ReleasesList object containing
         the results.
         """
         predicate = get_version_predicate(requirements)
         if predicate.name.lower() in self._projects and not force_update:
             return self._projects.get(predicate.name.lower())
         prefer_final = self._get_prefer_final(prefer_final)
-        logger.info('reading info on PyPI about %s', predicate.name)
+        logger.debug('Reading info on PyPI about %s', predicate.name)
         self._process_index_page(predicate.name)
 
         if predicate.name.lower() not in self._projects:
@@ -321,8 +322,9 @@
                                 infos = get_infos_from_url(link, project_name,
                                             is_external=not self.index_url in url)
                             except CantParseArchiveName as e:
-                                logger.warning(
-                                    "version has not been parsed: %s", e)
+                                if self.verbose:
+                                    logger.warning(
+                                        "version has not been parsed: %s", e)
                             else:
                                 self._register_release(release_info=infos)
                         else:
diff --git a/Lib/packaging/pypi/xmlrpc.py b/Lib/packaging/pypi/xmlrpc.py
--- a/Lib/packaging/pypi/xmlrpc.py
+++ b/Lib/packaging/pypi/xmlrpc.py
@@ -31,11 +31,11 @@
     If no server_url is specified, use the default PyPI XML-RPC URL,
     defined in the DEFAULT_XMLRPC_INDEX_URL constant::
 
-        >>> client = XMLRPCClient()
+        >>> client = Client()
         >>> client.server_url == DEFAULT_XMLRPC_INDEX_URL
         True
 
-        >>> client = XMLRPCClient("http://someurl/")
+        >>> client = Client("http://someurl/")
         >>> client.server_url
         'http://someurl/'
     """
@@ -69,7 +69,7 @@
         informations (eg. make a new XML-RPC call).
         ::
 
-            >>> client = XMLRPCClient()
+            >>> client = Client()
             >>> client.get_releases('Foo')
             ['1.1', '1.2', '1.3']
 
@@ -189,7 +189,7 @@
 
         If no server proxy is defined yet, creates a new one::
 
-            >>> client = XmlRpcClient()
+            >>> client = Client()
             >>> client.proxy()
             <ServerProxy for python.org/pypi>
 
diff --git a/Lib/packaging/resources.py b/Lib/packaging/resources.py
deleted file mode 100644
--- a/Lib/packaging/resources.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""Data file path abstraction.
-
-Functions in this module use sysconfig to find the paths to the resource
-files registered in project's setup.cfg file.  See the documentation for
-more information.
-"""
-# TODO write that documentation
-
-from packaging.database import get_distribution
-
-__all__ = ['get_file_path', 'get_file']
-
-
-def get_file_path(distribution_name, relative_path):
-    """Return the path to a resource file."""
-    dist = get_distribution(distribution_name)
-    if dist != None:
-        return dist.get_resource_path(relative_path)
-    raise LookupError('no distribution named %r found' % distribution_name)
-
-
-def get_file(distribution_name, relative_path, *args, **kwargs):
-    """Open and return a resource file."""
-    return open(get_file_path(distribution_name, relative_path),
-                *args, **kwargs)
diff --git a/Lib/packaging/run.py b/Lib/packaging/run.py
--- a/Lib/packaging/run.py
+++ b/Lib/packaging/run.py
@@ -5,10 +5,11 @@
 import sys
 import getopt
 import logging
+from copy import copy
 
 from packaging import logger
 from packaging.dist import Distribution
-from packaging.util import _is_archive_file
+from packaging.util import _is_archive_file, generate_setup_py
 from packaging.command import get_command_class, STANDARD_COMMANDS
 from packaging.install import install, install_local_project, remove
 from packaging.database import get_distribution, get_distributions
@@ -37,6 +38,14 @@
 Create a new Python package.
 """
 
+generate_usage = """\
+Usage: pysetup generate-setup
+   or: pysetup generate-setup --help
+
+Generates a setup.py script for backward-compatibility purposes.
+"""
+
+
 graph_usage = """\
 Usage: pysetup graph dist
    or: pysetup graph --help
@@ -203,6 +212,13 @@
     return main()
 
 
+ at action_help(generate_usage)
+def _generate(distpatcher, args, **kw):
+    generate_setup_py()
+    print('The setup.py was generated')
+
+
+
 @action_help(graph_usage)
 def _graph(dispatcher, args, **kw):
     name = args[1]
@@ -224,15 +240,22 @@
         if 'setup.py' in listing or 'setup.cfg' in listing:
             args.insert(1, os.getcwd())
         else:
-            logger.warning('no project to install')
-            return
+            logger.warning('No project to install.')
+            return 1
 
+    target = args[1]
     # installing from a source dir or archive file?
-    if os.path.isdir(args[1]) or _is_archive_file(args[1]):
-        install_local_project(args[1])
+    if os.path.isdir(target) or _is_archive_file(target):
+        if install_local_project(target):
+            return 0
+        else:
+            return 1
     else:
         # download from PyPI
-        install(args[1])
+        if install(target):
+            return 0
+        else:
+            return 1
 
 
 @action_help(metadata_usage)
@@ -335,13 +358,21 @@
 def _list(dispatcher, args, **kw):
     opts = _parse_args(args[1:], '', ['all'])
     dists = get_distributions(use_egg_info=True)
-    if 'all' in opts:
+    if 'all' in opts or opts['args'] == []:
         results = dists
     else:
         results = [d for d in dists if d.name.lower() in opts['args']]
 
+    number = 0
     for dist in results:
         print('%s %s at %s' % (dist.name, dist.metadata['version'], dist.path))
+        number += 1
+
+    print('')
+    if number == 0:
+        print('Nothing seems to be installed.')
+    else:
+        print('Found %d projects installed.' % number)
 
 
 @action_help(search_usage)
@@ -351,8 +382,9 @@
     It is able to search for a specific index (specified with --index), using
     the simple or xmlrpc index types (with --type xmlrpc / --type simple)
     """
-    opts = _parse_args(args[1:], '', ['simple', 'xmlrpc'])
+    #opts = _parse_args(args[1:], '', ['simple', 'xmlrpc'])
     # 1. what kind of index is requested ? (xmlrpc / simple)
+    raise NotImplementedError()
 
 
 actions = [
@@ -364,6 +396,7 @@
     ('list', 'Search for local projects', _list),
     ('graph', 'Display a graph', _graph),
     ('create', 'Create a Project', _create),
+    ('generate-setup', 'Generates a backward-comptatible setup.py', _generate)
 ]
 
 
@@ -399,6 +432,14 @@
             msg = 'Unrecognized action "%s"' % self.action
             raise PackagingArgError(msg)
 
+        self._set_logger()
+        self.args = args
+
+        # for display options we return immediately
+        if self.help or self.action is None:
+            self._show_help(self.parser, display_options_=False)
+
+    def _set_logger(self):
         # setting up the logging level from the command-line options
         # -q gets warning, error and critical
         if self.verbose == 0:
@@ -416,13 +457,11 @@
         else:  # -vv and more for debug
             level = logging.DEBUG
 
-        # for display options we return immediately
-        option_order = self.parser.get_option_order()
-
-        self.args = args
-
-        if self.help or self.action is None:
-            self._show_help(self.parser, display_options_=False)
+        # setting up the stream handler
+        handler = logging.StreamHandler(sys.stderr)
+        handler.setLevel(level)
+        logger.addHandler(handler)
+        logger.setLevel(level)
 
     def _parse_command_opts(self, parser, args):
         # Pull the current command from the head of the command line
@@ -567,8 +606,6 @@
         if isinstance(command, str):
             command = get_command_class(command)
 
-        name = command.get_command_name()
-
         desc = getattr(command, 'description', '(no description available)')
         print('Description: %s' % desc)
         print('')
@@ -635,11 +672,17 @@
 
 
 def main(args=None):
-    dispatcher = Dispatcher(args)
-    if dispatcher.action is None:
-        return
+    old_level = logger.level
+    old_handlers = copy(logger.handlers)
+    try:
+        dispatcher = Dispatcher(args)
+        if dispatcher.action is None:
+            return
+        return dispatcher()
+    finally:
+        logger.setLevel(old_level)
+        logger.handlers[:] = old_handlers
 
-    return dispatcher()
 
 if __name__ == '__main__':
     sys.exit(main())
diff --git a/Lib/packaging/tests/support.py b/Lib/packaging/tests/support.py
--- a/Lib/packaging/tests/support.py
+++ b/Lib/packaging/tests/support.py
@@ -65,14 +65,17 @@
     configured to record all messages logged to the 'packaging' logger.
 
     Use get_logs to retrieve messages and self.loghandler.flush to discard
-    them.
+    them.  get_logs automatically flushes the logs; if you test code that
+    generates logging messages but don't use get_logs, you have to flush
+    manually before doing other checks on logging message, otherwise you
+    will get irrelevant results.  See example in test_command_check.
     """
 
     def setUp(self):
         super(LoggingCatcher, self).setUp()
         self.loghandler = handler = _TestHandler()
+        self.old_level = logger.level
         logger.addHandler(handler)
-        self.addCleanup(logger.setLevel, logger.level)
         logger.setLevel(logging.DEBUG)  # we want all messages
 
     def tearDown(self):
@@ -84,22 +87,29 @@
         for ref in weakref.getweakrefs(handler):
             logging._removeHandlerRef(ref)
         del self.loghandler
+        logger.setLevel(self.old_level)
         super(LoggingCatcher, self).tearDown()
 
     def get_logs(self, *levels):
         """Return all log messages with level in *levels*.
 
-        Without explicit levels given, returns all messages.
-        *levels* defaults to all levels.  For log calls with arguments (i.e.
-        logger.info('bla bla %s', arg)), the messages
-        Returns a list.
+        Without explicit levels given, returns all messages.  *levels* defaults
+        to all levels.  For log calls with arguments (i.e.
+        logger.info('bla bla %r', arg)), the messages will be formatted before
+        being returned (e.g. "bla bla 'thing'").
+
+        Returns a list.  Automatically flushes the loghandler after being
+        called.
 
         Example: self.get_logs(logging.WARN, logging.DEBUG).
         """
         if not levels:
-            return [log.getMessage() for log in self.loghandler.buffer]
-        return [log.getMessage() for log in self.loghandler.buffer
-                if log.levelno in levels]
+            messages = [log.getMessage() for log in self.loghandler.buffer]
+        else:
+            messages = [log.getMessage() for log in self.loghandler.buffer
+                        if log.levelno in levels]
+        self.loghandler.flush()
+        return messages
 
 
 class TempdirManager:
@@ -252,6 +262,15 @@
     return d
 
 
+def fake_dec(*args, **kw):
+    """Fake decorator"""
+    def _wrap(func):
+        def __wrap(*args, **kw):
+            return func(*args, **kw)
+        return __wrap
+    return _wrap
+
+
 try:
     from test.support import skip_unless_symlink
 except ImportError:
diff --git a/Lib/packaging/tests/test_command_build_ext.py b/Lib/packaging/tests/test_command_build_ext.py
--- a/Lib/packaging/tests/test_command_build_ext.py
+++ b/Lib/packaging/tests/test_command_build_ext.py
@@ -265,7 +265,7 @@
     def test_get_outputs(self):
         tmp_dir = self.mkdtemp()
         c_file = os.path.join(tmp_dir, 'foo.c')
-        self.write_file(c_file, 'void PyInit_foo(void) {};\n')
+        self.write_file(c_file, 'void PyInit_foo(void) {}\n')
         ext = Extension('foo', [c_file], optional=False)
         dist = Distribution({'name': 'xx',
                              'ext_modules': [ext]})
@@ -370,8 +370,8 @@
     src = _get_source_filename()
     if not os.path.exists(src):
         if verbose:
-            print ('test_build_ext: Cannot find source code (test'
-                   ' must run in python build dir)')
+            print('test_command_build_ext: Cannot find source code (test'
+                  ' must run in python build dir)')
         return unittest.TestSuite()
     else:
         return unittest.makeSuite(BuildExtTestCase)
diff --git a/Lib/packaging/tests/test_command_build_py.py b/Lib/packaging/tests/test_command_build_py.py
--- a/Lib/packaging/tests/test_command_build_py.py
+++ b/Lib/packaging/tests/test_command_build_py.py
@@ -61,9 +61,12 @@
         pkgdest = os.path.join(destination, "pkg")
         files = os.listdir(pkgdest)
         self.assertIn("__init__.py", files)
-        if not sys.dont_write_bytecode:
+        self.assertIn("README.txt", files)
+        # XXX even with -O, distutils writes pyc, not pyo; bug?
+        if sys.dont_write_bytecode:
+            self.assertNotIn("__init__.pyc", files)
+        else:
             self.assertIn("__init__.pyc", files)
-        self.assertIn("README.txt", files)
 
     def test_empty_package_dir(self):
         # See SF 1668596/1720897.
@@ -93,7 +96,7 @@
 
             try:
                 dist.run_commands()
-            except PackagingFileError as e:
+            except PackagingFileError:
                 self.fail("failed package_data test when package_dir is ''")
         finally:
             # Restore state.
diff --git a/Lib/packaging/tests/test_command_check.py b/Lib/packaging/tests/test_command_check.py
--- a/Lib/packaging/tests/test_command_check.py
+++ b/Lib/packaging/tests/test_command_check.py
@@ -36,7 +36,6 @@
         # now let's add the required fields
         # and run it again, to make sure we don't get
         # any warning anymore
-        self.loghandler.flush()
         metadata = {'home_page': 'xxx', 'author': 'xxx',
                     'author_email': 'xxx',
                     'name': 'xxx', 'version': '4.2',
@@ -50,8 +49,10 @@
         self.assertRaises(PackagingSetupError, self._run,
             {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
 
+        # clear warnings from the previous calls
+        self.loghandler.flush()
+
         # and of course, no error when all metadata fields are present
-        self.loghandler.flush()
         cmd = self._run(metadata, strict=True)
         self.assertEqual([], self.get_logs(logging.WARNING))
 
@@ -70,7 +71,6 @@
                     'name': 'xxx', 'version': '4.2',
                     'requires_python': '2.4',
                     }
-        self.loghandler.flush()
         cmd = self._run(metadata)
         self.assertEqual([], self.get_logs(logging.WARNING))
 
@@ -85,9 +85,11 @@
         self.assertRaises(PackagingSetupError, self._run, metadata,
             **{'strict': 1})
 
+        # clear warnings from the previous calls
+        self.loghandler.flush()
+
         # now with correct version format again
         metadata['version'] = '4.2'
-        self.loghandler.flush()
         cmd = self._run(metadata, strict=True)
         self.assertEqual([], self.get_logs(logging.WARNING))
 
@@ -100,7 +102,6 @@
         cmd.check_restructuredtext()
         self.assertEqual(len(self.get_logs(logging.WARNING)), 1)
 
-        self.loghandler.flush()
         pkg_info, dist = self.create_dist(description='title\n=====\n\ntest')
         cmd = check(dist)
         cmd.check_restructuredtext()
@@ -123,6 +124,17 @@
         cmd.check_hooks_resolvable()
         self.assertEqual(len(self.get_logs(logging.WARNING)), 1)
 
+    def test_warn(self):
+        _, dist = self.create_dist()
+        cmd = check(dist)
+        self.assertEqual([], self.get_logs())
+        cmd.warn('hello')
+        self.assertEqual(['check: hello'], self.get_logs())
+        cmd.warn('hello %s', 'world')
+        self.assertEqual(['check: hello world'], self.get_logs())
+        cmd.warn('hello %s %s', 'beautiful', 'world')
+        self.assertEqual(['check: hello beautiful world'], self.get_logs())
+
 
 def test_suite():
     return unittest.makeSuite(CheckTestCase)
diff --git a/Lib/packaging/tests/test_command_install_lib.py b/Lib/packaging/tests/test_command_install_lib.py
--- a/Lib/packaging/tests/test_command_install_lib.py
+++ b/Lib/packaging/tests/test_command_install_lib.py
@@ -67,6 +67,10 @@
         cmd.distribution.packages = [pkg_dir]
         cmd.distribution.script_name = 'setup.py'
 
+        # make sure the build_lib is set the temp dir
+        build_dir = os.path.split(pkg_dir)[0]
+        cmd.get_finalized_command('build_py').build_lib = build_dir
+
         # get_output should return 4 elements
         self.assertEqual(len(cmd.get_outputs()), 4)
 
diff --git a/Lib/packaging/tests/test_command_sdist.py b/Lib/packaging/tests/test_command_sdist.py
--- a/Lib/packaging/tests/test_command_sdist.py
+++ b/Lib/packaging/tests/test_command_sdist.py
@@ -33,7 +33,6 @@
 
 MANIFEST = """\
 # file GENERATED by packaging, do NOT edit
-README
 inroot.txt
 data%(sep)sdata.dt
 scripts%(sep)sscript.py
@@ -129,7 +128,7 @@
             content = zip_file.namelist()
 
         # making sure everything has been pruned correctly
-        self.assertEqual(len(content), 3)
+        self.assertEqual(len(content), 2)
 
     @requires_zlib
     @unittest.skipIf(find_executable('tar') is None or
@@ -214,7 +213,7 @@
 
         # Making sure everything was added. This includes 9 code and data
         # files in addition to PKG-INFO.
-        self.assertEqual(len(content), 10)
+        self.assertEqual(len(content), 9)
 
         # Checking the MANIFEST
         with open(join(self.tmp_dir, 'MANIFEST')) as fp:
@@ -331,7 +330,7 @@
         with open(cmd.manifest) as f:
             manifest = [line.strip() for line in f.read().split('\n')
                         if line.strip() != '']
-        self.assertEqual(len(manifest), 4)
+        self.assertEqual(len(manifest), 3)
 
         # Adding a file
         self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
@@ -348,7 +347,7 @@
                          if line.strip() != '']
 
         # Do we have the new file in MANIFEST?
-        self.assertEqual(len(manifest2), 5)
+        self.assertEqual(len(manifest2), 4)
         self.assertIn('doc2.txt', manifest2[-1])
 
     @requires_zlib
diff --git a/Lib/packaging/tests/test_command_test.py b/Lib/packaging/tests/test_command_test.py
--- a/Lib/packaging/tests/test_command_test.py
+++ b/Lib/packaging/tests/test_command_test.py
@@ -150,8 +150,7 @@
         cmd.tests_require = [phony_project]
         cmd.ensure_finalized()
         logs = self.get_logs(logging.WARNING)
-        self.assertEqual(1, len(logs))
-        self.assertIn(phony_project, logs[0])
+        self.assertIn(phony_project, logs[-1])
 
     def prepare_a_module(self):
         tmp_dir = self.mkdtemp()
diff --git a/Lib/packaging/tests/test_config.py b/Lib/packaging/tests/test_config.py
--- a/Lib/packaging/tests/test_config.py
+++ b/Lib/packaging/tests/test_config.py
@@ -176,9 +176,14 @@
 
         self.addCleanup(os.chdir, os.getcwd())
         tempdir = self.mkdtemp()
+        self.working_dir = os.getcwd()
         os.chdir(tempdir)
         self.tempdir = tempdir
 
+    def tearDown(self):
+        os.chdir(self.working_dir)
+        super(ConfigTestCase, self).tearDown()
+
     def write_setup(self, kwargs=None):
         opts = {'description-file': 'README', 'extra-files': '',
                 'setup-hook': 'packaging.tests.test_config.hook'}
diff --git a/Lib/packaging/tests/test_create.py b/Lib/packaging/tests/test_create.py
--- a/Lib/packaging/tests/test_create.py
+++ b/Lib/packaging/tests/test_create.py
@@ -31,11 +31,11 @@
             'doc': sys.prefix + '/share/doc/pyxfoil', }
 
     def tearDown(self):
-        super(CreateTestCase, self).tearDown()
         sys.stdin = self._stdin
         sys.stdout = self._stdout
         os.chdir(self._cwd)
         sysconfig.get_paths = self._old_get_paths
+        super(CreateTestCase, self).tearDown()
 
     def test_ask_yn(self):
         sys.stdin.write('y\n')
diff --git a/Lib/packaging/tests/test_database.py b/Lib/packaging/tests/test_database.py
--- a/Lib/packaging/tests/test_database.py
+++ b/Lib/packaging/tests/test_database.py
@@ -1,23 +1,25 @@
 import os
 import io
 import csv
-import imp
 import sys
 import shutil
-import zipfile
 import tempfile
 from os.path import relpath  # separate import for backport concerns
 from hashlib import md5
+from textwrap import dedent
 
+from packaging.tests.test_util import GlobTestCaseBase
+from packaging.tests.support import requires_zlib
+
+from packaging.config import get_resources_dests
 from packaging.errors import PackagingError
 from packaging.metadata import Metadata
-from packaging.tests import unittest, run_unittest, support, TESTFN
-from packaging.tests.support import requires_zlib
-
+from packaging.tests import unittest, support
 from packaging.database import (
     Distribution, EggInfoDistribution, get_distribution, get_distributions,
     provides_distribution, obsoletes_distribution, get_file_users,
-    enable_cache, disable_cache, distinfo_dirname, _yield_distributions)
+    enable_cache, disable_cache, distinfo_dirname, _yield_distributions,
+    get_file, get_file_path)
 
 # TODO Add a test for getting a distribution provided by another distribution
 # TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini)
@@ -504,12 +506,161 @@
         checkLists(dists + eggs, found)
 
 
+class DataFilesTestCase(GlobTestCaseBase):
+
+    def assertRulesMatch(self, rules, spec):
+        tempdir = self.build_files_tree(spec)
+        expected = self.clean_tree(spec)
+        result = get_resources_dests(tempdir, rules)
+        self.assertEqual(expected, result)
+
+    def clean_tree(self, spec):
+        files = {}
+        for path, value in spec.items():
+            if value is not None:
+                files[path] = value
+        return files
+
+    def test_simple_glob(self):
+        rules = [('', '*.tpl', '{data}')]
+        spec = {'coucou.tpl': '{data}/coucou.tpl',
+                'Donotwant': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_multiple_match(self):
+        rules = [('scripts', '*.bin', '{appdata}'),
+                 ('scripts', '*', '{appscript}')]
+        spec = {'scripts/script.bin': '{appscript}/script.bin',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match(self):
+        rules = [('scripts', '*.{bin,sh}', '{appscript}')]
+        spec = {'scripts/script.bin': '{appscript}/script.bin',
+                'scripts/babar.sh':  '{appscript}/babar.sh',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_multiple(self):
+        rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripts/script.sh':  '{appscript}/script.sh',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_exclude(self):
+        rules = [('scripts', '*', '{appscript}'),
+                 ('', os.path.join('**', '*.sh'), None)]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripts/script.sh':  None,
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_glob_in_base(self):
+        rules = [('scrip*', '*.bin', '{appscript}')]
+        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                'scripouille/babar.bin': '{appscript}/babar.bin',
+                'scriptortu/lotus.bin': '{appscript}/lotus.bin',
+                'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_recursive_glob(self):
+        rules = [('', os.path.join('**', '*.bin'), '{binary}')]
+        spec = {'binary0.bin': '{binary}/binary0.bin',
+                'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
+                'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
+                'you/kill/pandabear.guy': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_final_exemple_glob(self):
+        rules = [
+            ('mailman/database/schemas/', '*', '{appdata}/schemas'),
+            ('', os.path.join('**', '*.tpl'), '{appdata}/templates'),
+            ('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'),
+            ('', 'README', '{doc}'),
+            ('mailman/etc/', '*', '{config}'),
+            ('mailman/foo/', os.path.join('**', 'bar', '*.cfg'),
+             '{config}/baz'),
+            ('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'),
+            ('', 'some-new-semantic.sns', '{funky-crazy-category}'),
+        ]
+        spec = {
+            'README': '{doc}/README',
+            'some.tpl': '{appdata}/templates/some.tpl',
+            'some-new-semantic.sns':
+                '{funky-crazy-category}/some-new-semantic.sns',
+            'mailman/database/mailman.db': None,
+            'mailman/database/schemas/blah.schema':
+                '{appdata}/schemas/blah.schema',
+            'mailman/etc/my.cnf': '{config}/my.cnf',
+            'mailman/foo/some/path/bar/my.cfg':
+                '{config}/hmm/some/path/bar/my.cfg',
+            'mailman/foo/some/path/other.cfg':
+                '{config}/hmm/some/path/other.cfg',
+            'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
+            'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
+        }
+        self.maxDiff = None
+        self.assertRulesMatch(rules, spec)
+
+    def test_get_file(self):
+        # Create a fake dist
+        temp_site_packages = tempfile.mkdtemp()
+        self.addCleanup(shutil.rmtree, temp_site_packages)
+
+        dist_name = 'test'
+        dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
+        os.mkdir(dist_info)
+
+        metadata_path = os.path.join(dist_info, 'METADATA')
+        resources_path = os.path.join(dist_info, 'RESOURCES')
+
+        with open(metadata_path, 'w') as fp:
+            fp.write(dedent("""\
+                Metadata-Version: 1.2
+                Name: test
+                Version: 0.1
+                Summary: test
+                Author: me
+                """))
+
+        test_path = 'test.cfg'
+
+        fd, test_resource_path = tempfile.mkstemp()
+        os.close(fd)
+        self.addCleanup(os.remove, test_resource_path)
+
+        with open(test_resource_path, 'w') as fp:
+            fp.write('Config')
+
+        with open(resources_path, 'w') as fp:
+            fp.write('%s,%s' % (test_path, test_resource_path))
+
+        # Add fake site-packages to sys.path to retrieve fake dist
+        self.addCleanup(sys.path.remove, temp_site_packages)
+        sys.path.insert(0, temp_site_packages)
+
+        # Force packaging.database to rescan the sys.path
+        self.addCleanup(enable_cache)
+        disable_cache()
+
+        # Try to retrieve resources paths and files
+        self.assertEqual(get_file_path(dist_name, test_path),
+                         test_resource_path)
+        self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist')
+
+        with get_file(dist_name, test_path) as fp:
+            self.assertEqual(fp.read(), 'Config')
+        self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist')
+
+
 def test_suite():
     suite = unittest.TestSuite()
     load = unittest.defaultTestLoader.loadTestsFromTestCase
     suite.addTest(load(TestDistribution))
     suite.addTest(load(TestEggInfoDistribution))
     suite.addTest(load(TestDatabase))
+    suite.addTest(load(DataFilesTestCase))
     return suite
 
 
diff --git a/Lib/packaging/tests/test_dist.py b/Lib/packaging/tests/test_dist.py
--- a/Lib/packaging/tests/test_dist.py
+++ b/Lib/packaging/tests/test_dist.py
@@ -13,6 +13,7 @@
 from packaging.tests import TESTFN, captured_stdout
 from packaging.tests import support, unittest
 from packaging.tests.support import create_distribution
+from test.support import unload
 
 
 class test_dist(Command):
@@ -224,6 +225,7 @@
         # prepare the call recorders
         sys.path.append(temp_home)
         self.addCleanup(sys.path.remove, temp_home)
+        self.addCleanup(unload, module_name)
         record = __import__(module_name).record
 
         old_run = cmd.run
diff --git a/Lib/packaging/tests/test_install.py b/Lib/packaging/tests/test_install.py
--- a/Lib/packaging/tests/test_install.py
+++ b/Lib/packaging/tests/test_install.py
@@ -1,18 +1,18 @@
 """Tests for the packaging.install module."""
-
 import os
 from tempfile import mkstemp
+
 from packaging import install
 from packaging.pypi.xmlrpc import Client
 from packaging.metadata import Metadata
-
-from packaging.tests.support import LoggingCatcher, TempdirManager, unittest
+from packaging.tests.support import (LoggingCatcher, TempdirManager, unittest,
+                                     fake_dec)
 try:
     import threading
     from packaging.tests.pypi_server import use_xmlrpc_server
 except ImportError:
     threading = None
-    use_xmlrpc_server = None
+    use_xmlrpc_server = fake_dec
 
 
 class InstalledDist:
@@ -339,7 +339,7 @@
                     self.assertTrue(os.path.exists(f))
                 dist._unlink_installed_files()
         finally:
-            install.install_dist = old_install_dist
+            install._install_dist = old_install_dist
             install.uninstall = old_uninstall
 
     def test_install_from_infos_install_succes(self):
@@ -356,6 +356,21 @@
         finally:
             install._install_dist = old_install_dist
 
+    def test_install_permission_denied(self):
+        # if we don't have the access to the installation
+        # path, we should abort immediatly
+        project = os.path.join(os.path.dirname(__file__), 'package.tgz')
+        install_path = self.mkdtemp()
+        old_get_path = install.get_path
+        install.get_path = lambda path: install_path
+        old_mod = os.stat(install_path).st_mode
+        os.chmod(install_path, 0)
+        try:
+            self.assertFalse(install.install(project))
+        finally:
+            os.chmod(install_path, old_mod)
+            install.get_path = old_get_path
+
 
 def test_suite():
     suite = unittest.TestSuite()
diff --git a/Lib/packaging/tests/test_manifest.py b/Lib/packaging/tests/test_manifest.py
--- a/Lib/packaging/tests/test_manifest.py
+++ b/Lib/packaging/tests/test_manifest.py
@@ -26,6 +26,14 @@
                        support.LoggingCatcher,
                        unittest.TestCase):
 
+    def setUp(self):
+        super(ManifestTestCase, self).setUp()
+        self.cwd = os.getcwd()
+
+    def tearDown(self):
+        os.chdir(self.cwd)
+        super(ManifestTestCase, self).tearDown()
+
     def test_manifest_reader(self):
         tmpdir = self.mkdtemp()
         MANIFEST = os.path.join(tmpdir, 'MANIFEST.in')
@@ -42,9 +50,6 @@
         for warning in warnings:
             self.assertIn('no files found matching', warning)
 
-        # reset logs for the next assert
-        self.loghandler.flush()
-
         # manifest also accepts file-like objects
         with open(MANIFEST) as f:
             manifest.read_template(f)
diff --git a/Lib/packaging/tests/test_pypi_dist.py b/Lib/packaging/tests/test_pypi_dist.py
--- a/Lib/packaging/tests/test_pypi_dist.py
+++ b/Lib/packaging/tests/test_pypi_dist.py
@@ -7,12 +7,13 @@
 from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
 
 from packaging.tests import unittest
-from packaging.tests.support import TempdirManager, requires_zlib
+from packaging.tests.support import TempdirManager, requires_zlib, fake_dec
 try:
     import threading
     from packaging.tests.pypi_server import use_pypi_server
 except ImportError:
-    threading = use_pypi_server = None
+    threading = None
+    use_pypi_server = fake_dec
 
 
 def Dist(*args, **kwargs):
diff --git a/Lib/packaging/tests/test_pypi_simple.py b/Lib/packaging/tests/test_pypi_simple.py
--- a/Lib/packaging/tests/test_pypi_simple.py
+++ b/Lib/packaging/tests/test_pypi_simple.py
@@ -10,9 +10,19 @@
 from packaging.pypi.simple import Crawler
 
 from packaging.tests import unittest
-from packaging.tests.support import TempdirManager, LoggingCatcher
-from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
-                                         PYPI_DEFAULT_STATIC_PATH)
+from packaging.tests.support import (TempdirManager, LoggingCatcher,
+                                     fake_dec)
+
+try:
+    import _thread
+    from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
+                                             PYPI_DEFAULT_STATIC_PATH)
+except ImportError:
+    _thread = None
+    use_pypi_server = fake_dec
+    PYPI_DEFAULT_STATIC_PATH = os.path.join(
+        os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
+
 
 
 class SimpleCrawlerTestCase(TempdirManager,
@@ -28,6 +38,7 @@
         return Crawler(server.full_address + base_url, *args,
                        **kwargs)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server()
     def test_bad_urls(self, server):
         crawler = Crawler()
@@ -84,6 +95,7 @@
                 'http://www.famfamfam.com/">')
         crawler._process_url(url, page)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("test_found_links")
     def test_found_links(self, server):
         # Browse the index, asking for a specified release version
@@ -139,6 +151,7 @@
         self.assertTrue(
             crawler._is_browsable("http://pypi.example.org/a/path"))
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("with_externals")
     def test_follow_externals(self, server):
         # Include external pages
@@ -149,6 +162,7 @@
         self.assertIn(server.full_address + "/external/external.html",
             crawler._processed_urls)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("with_real_externals")
     def test_restrict_hosts(self, server):
         # Only use a list of allowed hosts is possible
@@ -159,6 +173,7 @@
         self.assertNotIn(server.full_address + "/external/external.html",
             crawler._processed_urls)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server(static_filesystem_paths=["with_externals"],
         static_uri_paths=["simple", "external"])
     def test_links_priority(self, server):
@@ -192,6 +207,7 @@
                          releases[0].dists['sdist'].url['hashval'])
         self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server(static_filesystem_paths=["with_norel_links"],
         static_uri_paths=["simple", "external"])
     def test_not_scan_all_links(self, server):
@@ -217,6 +233,7 @@
         self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
             crawler._processed_urls)  # linked from external homepage (rel)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     def test_uses_mirrors(self):
         # When the main repository seems down, try using the given mirrors"""
         server = PyPIServer("foo_bar_baz")
@@ -314,6 +331,7 @@
         self.assertIn('http://example.org/some/simpleurl', found_links)
         self.assertIn('http://example.org/some/download', found_links)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("project_list")
     def test_search_projects(self, server):
         # we can search the index for some projects, on their names
diff --git a/Lib/packaging/tests/test_resources.py b/Lib/packaging/tests/test_resources.py
deleted file mode 100644
--- a/Lib/packaging/tests/test_resources.py
+++ /dev/null
@@ -1,167 +0,0 @@
-"""Tests for packaging.resources."""
-
-import os
-import sys
-import shutil
-import tempfile
-from textwrap import dedent
-from packaging.config import get_resources_dests
-from packaging.database import disable_cache, enable_cache
-from packaging.resources import get_file, get_file_path
-
-from packaging.tests import unittest
-from packaging.tests.test_util import GlobTestCaseBase
-
-
-class DataFilesTestCase(GlobTestCaseBase):
-
-    def assertRulesMatch(self, rules, spec):
-        tempdir = self.build_files_tree(spec)
-        expected = self.clean_tree(spec)
-        result = get_resources_dests(tempdir, rules)
-        self.assertEqual(expected, result)
-
-    def clean_tree(self, spec):
-        files = {}
-        for path, value in spec.items():
-            if value is not None:
-                files[path] = value
-        return files
-
-    def test_simple_glob(self):
-        rules = [('', '*.tpl', '{data}')]
-        spec = {'coucou.tpl': '{data}/coucou.tpl',
-                'Donotwant': None}
-        self.assertRulesMatch(rules, spec)
-
-    def test_multiple_match(self):
-        rules = [('scripts', '*.bin', '{appdata}'),
-                 ('scripts', '*', '{appscript}')]
-        spec = {'scripts/script.bin': '{appscript}/script.bin',
-                'Babarlikestrawberry': None}
-        self.assertRulesMatch(rules, spec)
-
-    def test_set_match(self):
-        rules = [('scripts', '*.{bin,sh}', '{appscript}')]
-        spec = {'scripts/script.bin': '{appscript}/script.bin',
-                'scripts/babar.sh':  '{appscript}/babar.sh',
-                'Babarlikestrawberry': None}
-        self.assertRulesMatch(rules, spec)
-
-    def test_set_match_multiple(self):
-        rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
-        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
-                'scripts/script.sh':  '{appscript}/script.sh',
-                'Babarlikestrawberry': None}
-        self.assertRulesMatch(rules, spec)
-
-    def test_set_match_exclude(self):
-        rules = [('scripts', '*', '{appscript}'),
-                 ('', os.path.join('**', '*.sh'), None)]
-        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
-                'scripts/script.sh':  None,
-                'Babarlikestrawberry': None}
-        self.assertRulesMatch(rules, spec)
-
-    def test_glob_in_base(self):
-        rules = [('scrip*', '*.bin', '{appscript}')]
-        spec = {'scripts/scripts.bin': '{appscript}/scripts.bin',
-                'scripouille/babar.bin': '{appscript}/babar.bin',
-                'scriptortu/lotus.bin': '{appscript}/lotus.bin',
-                'Babarlikestrawberry': None}
-        self.assertRulesMatch(rules, spec)
-
-    def test_recursive_glob(self):
-        rules = [('', os.path.join('**', '*.bin'), '{binary}')]
-        spec = {'binary0.bin': '{binary}/binary0.bin',
-                'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
-                'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
-                'you/kill/pandabear.guy': None}
-        self.assertRulesMatch(rules, spec)
-
-    def test_final_exemple_glob(self):
-        rules = [
-            ('mailman/database/schemas/', '*', '{appdata}/schemas'),
-            ('', os.path.join('**', '*.tpl'), '{appdata}/templates'),
-            ('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'),
-            ('', 'README', '{doc}'),
-            ('mailman/etc/', '*', '{config}'),
-            ('mailman/foo/', os.path.join('**', 'bar', '*.cfg'), '{config}/baz'),
-            ('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'),
-            ('', 'some-new-semantic.sns', '{funky-crazy-category}'),
-        ]
-        spec = {
-            'README': '{doc}/README',
-            'some.tpl': '{appdata}/templates/some.tpl',
-            'some-new-semantic.sns':
-                '{funky-crazy-category}/some-new-semantic.sns',
-            'mailman/database/mailman.db': None,
-            'mailman/database/schemas/blah.schema':
-                '{appdata}/schemas/blah.schema',
-            'mailman/etc/my.cnf': '{config}/my.cnf',
-            'mailman/foo/some/path/bar/my.cfg':
-                '{config}/hmm/some/path/bar/my.cfg',
-            'mailman/foo/some/path/other.cfg':
-                '{config}/hmm/some/path/other.cfg',
-            'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
-            'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
-        }
-        self.maxDiff = None
-        self.assertRulesMatch(rules, spec)
-
-    def test_get_file(self):
-        # Create a fake dist
-        temp_site_packages = tempfile.mkdtemp()
-        self.addCleanup(shutil.rmtree, temp_site_packages)
-
-        dist_name = 'test'
-        dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
-        os.mkdir(dist_info)
-
-        metadata_path = os.path.join(dist_info, 'METADATA')
-        resources_path = os.path.join(dist_info, 'RESOURCES')
-
-        with open(metadata_path, 'w') as fp:
-            fp.write(dedent("""\
-                Metadata-Version: 1.2
-                Name: test
-                Version: 0.1
-                Summary: test
-                Author: me
-                """))
-
-        test_path = 'test.cfg'
-
-        fd, test_resource_path = tempfile.mkstemp()
-        os.close(fd)
-        self.addCleanup(os.remove, test_resource_path)
-
-        with open(test_resource_path, 'w') as fp:
-            fp.write('Config')
-
-        with open(resources_path, 'w') as fp:
-            fp.write('%s,%s' % (test_path, test_resource_path))
-
-        # Add fake site-packages to sys.path to retrieve fake dist
-        self.addCleanup(sys.path.remove, temp_site_packages)
-        sys.path.insert(0, temp_site_packages)
-
-        # Force packaging.database to rescan the sys.path
-        self.addCleanup(enable_cache)
-        disable_cache()
-
-        # Try to retrieve resources paths and files
-        self.assertEqual(get_file_path(dist_name, test_path),
-                         test_resource_path)
-        self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist')
-
-        with get_file(dist_name, test_path) as fp:
-            self.assertEqual(fp.read(), 'Config')
-        self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist')
-
-
-def test_suite():
-    return unittest.makeSuite(DataFilesTestCase)
-
-if __name__ == '__main__':
-    unittest.main(defaultTest='test_suite')
diff --git a/Lib/packaging/tests/test_run.py b/Lib/packaging/tests/test_run.py
--- a/Lib/packaging/tests/test_run.py
+++ b/Lib/packaging/tests/test_run.py
@@ -3,8 +3,12 @@
 import os
 import sys
 import shutil
+from tempfile import mkstemp
+from io import StringIO
 
+from packaging import install
 from packaging.tests import unittest, support, TESTFN
+from packaging.run import main
 
 # setup script that uses __file__
 setup_using___file__ = """\
@@ -25,7 +29,8 @@
 """
 
 
-class CoreTestCase(unittest.TestCase):
+class CoreTestCase(support.TempdirManager, support.LoggingCatcher,
+                   unittest.TestCase):
 
     def setUp(self):
         super(CoreTestCase, self).setUp()
@@ -54,6 +59,24 @@
 
     # TODO restore the tests removed six months ago and port them to pysetup
 
+    def test_install(self):
+        # making sure install returns 0 or 1 exit codes
+        project = os.path.join(os.path.dirname(__file__), 'package.tgz')
+        install_path = self.mkdtemp()
+        old_get_path = install.get_path
+        install.get_path = lambda path: install_path
+        old_mod = os.stat(install_path).st_mode
+        os.chmod(install_path, 0)
+        old_stderr = sys.stderr
+        sys.stderr = StringIO()
+        try:
+            self.assertFalse(install.install(project))
+            self.assertEqual(main(['install', 'blabla']), 1)
+        finally:
+            sys.stderr = old_stderr
+            os.chmod(install_path, old_mod)
+            install.get_path = old_get_path
+
 
 def test_suite():
     return unittest.makeSuite(CoreTestCase)
diff --git a/Lib/packaging/tests/test_uninstall.py b/Lib/packaging/tests/test_uninstall.py
--- a/Lib/packaging/tests/test_uninstall.py
+++ b/Lib/packaging/tests/test_uninstall.py
@@ -1,6 +1,8 @@
 """Tests for the uninstall command."""
 import os
 import sys
+from io import StringIO
+import stat
 
 from packaging.database import disable_cache, enable_cache
 from packaging.run import main
@@ -36,8 +38,13 @@
         self.addCleanup(os.chdir, os.getcwd())
         self.addCleanup(enable_cache)
         self.root_dir = self.mkdtemp()
+        self.cwd = os.getcwd()
         disable_cache()
 
+    def tearDown(self):
+        os.chdir(self.cwd)
+        super(UninstallTestCase, self).tearDown()
+
     def run_setup(self, *args):
         # run setup with args
         args = ['run'] + list(args)
@@ -74,6 +81,8 @@
         if not dirname:
             dirname = self.make_dist(name, **kw)
         os.chdir(dirname)
+        old_out = sys.stderr
+        sys.stderr = StringIO()
         dist = self.run_setup('install_dist', '--prefix=' + self.root_dir)
         install_lib = self.get_path(dist, 'purelib')
         return dist, install_lib
@@ -88,10 +97,30 @@
         self.assertIsFile(install_lib, 'foo', '__init__.py')
         self.assertIsFile(install_lib, 'foo', 'sub', '__init__.py')
         self.assertIsFile(install_lib, 'Foo-0.1.dist-info', 'RECORD')
-        remove('Foo', paths=[install_lib])
+        self.assertTrue(remove('Foo', paths=[install_lib]))
         self.assertIsNotFile(install_lib, 'foo', 'sub', '__init__.py')
         self.assertIsNotFile(install_lib, 'Foo-0.1.dist-info', 'RECORD')
 
+    @unittest.skipIf(sys.platform == 'win32', 'deactivated for now')
+    def test_remove_issue(self):
+        # makes sure if there are OSErrors (like permission denied)
+        # remove() stops and display a clean error
+        dist, install_lib = self.install_dist('Meh')
+
+        # breaking os.rename
+        old = os.rename
+
+        def _rename(source, target):
+            raise OSError()
+
+        os.rename = _rename
+        try:
+            self.assertFalse(remove('Meh', paths=[install_lib]))
+        finally:
+            os.rename = old
+
+        self.assertTrue(remove('Meh', paths=[install_lib]))
+
 
 def test_suite():
     return unittest.makeSuite(UninstallTestCase)
diff --git a/Lib/packaging/tests/test_util.py b/Lib/packaging/tests/test_util.py
--- a/Lib/packaging/tests/test_util.py
+++ b/Lib/packaging/tests/test_util.py
@@ -526,11 +526,18 @@
 
 class GlobTestCase(GlobTestCaseBase):
 
+    def setUp(self):
+        super(GlobTestCase, self).setUp()
+        self.cwd = os.getcwd()
+
+    def tearDown(self):
+        os.chdir(self.cwd)
+        super(GlobTestCase, self).tearDown()
+
     def assertGlobMatch(self, glob, spec):
         """"""
         tempdir = self.build_files_tree(spec)
         expected = self.clean_tree(spec)
-        self.addCleanup(os.chdir, os.getcwd())
         os.chdir(tempdir)
         result = list(iglob(glob))
         self.assertCountEqual(expected, result)
@@ -811,51 +818,51 @@
 
     def test_is_setuptools_logs_setup_py_text_found(self):
         is_setuptools(self._setuptools_setup_py_pkg())
-        expected = ['setup.py file found', 'found setuptools text in setup.py']
-        self.assertEqual(expected, self.get_logs(logging.INFO))
+        expected = ['setup.py file found.',
+                    'No egg-info directory found.',
+                    'Found setuptools text in setup.py.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def test_is_setuptools_logs_setup_py_text_not_found(self):
         directory = self._random_setup_py_pkg()
         is_setuptools(directory)
-        info_expected = ['setup.py file found']
-        warn_expected = ['no egg-info directory found',
-                         'no setuptools text found in setup.py']
-        self.assertEqual(info_expected, self.get_logs(logging.INFO))
-        self.assertEqual(warn_expected, self.get_logs(logging.WARN))
+        expected = ['setup.py file found.', 'No egg-info directory found.',
+                    'No setuptools text found in setup.py.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def test_is_setuptools_logs_egg_info_dir_found(self):
         is_setuptools(self._setuptools_egg_info_pkg())
-        expected = ['setup.py file found', 'found egg-info directory']
-        self.assertEqual(expected, self.get_logs(logging.INFO))
+        expected = ['setup.py file found.', 'Found egg-info directory.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def test_is_distutils_logs_setup_py_text_found(self):
         is_distutils(self._distutils_setup_py_pkg())
-        expected = ['setup.py file found', 'found distutils text in setup.py']
-        self.assertEqual(expected, self.get_logs(logging.INFO))
+        expected = ['setup.py file found.',
+                    'No PKG-INFO file found.',
+                    'Found distutils text in setup.py.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def test_is_distutils_logs_setup_py_text_not_found(self):
         directory = self._random_setup_py_pkg()
         is_distutils(directory)
-        info_expected = ['setup.py file found']
-        warn_expected = ['no PKG-INFO file found',
-                         'no distutils text found in setup.py']
-        self.assertEqual(info_expected, self.get_logs(logging.INFO))
-        self.assertEqual(warn_expected, self.get_logs(logging.WARN))
+        expected = ['setup.py file found.', 'No PKG-INFO file found.',
+                    'No distutils text found in setup.py.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def test_is_distutils_logs_pkg_info_file_found(self):
         is_distutils(self._distutils_pkg_info())
-        expected = ['setup.py file found', 'PKG-INFO file found']
-        self.assertEqual(expected, self.get_logs(logging.INFO))
+        expected = ['setup.py file found.', 'PKG-INFO file found.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def test_is_packaging_logs_setup_cfg_found(self):
         is_packaging(self._valid_setup_cfg_pkg())
-        expected = ['setup.cfg file found']
-        self.assertEqual(expected, self.get_logs(logging.INFO))
+        expected = ['setup.cfg file found.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def test_is_packaging_logs_setup_cfg_not_found(self):
         is_packaging(self._empty_dir)
-        expected = ['no setup.cfg file found']
-        self.assertEqual(expected, self.get_logs(logging.WARN))
+        expected = ['No setup.cfg file found.']
+        self.assertEqual(expected, self.get_logs(logging.DEBUG))
 
     def _write_setuptools_setup_py(self, directory):
         self.write_file((directory, 'setup.py'),
diff --git a/Lib/packaging/util.py b/Lib/packaging/util.py
--- a/Lib/packaging/util.py
+++ b/Lib/packaging/util.py
@@ -1087,7 +1087,7 @@
     Raises a PackagingFileError when a setup.py already exists.
     """
     if os.path.exists("setup.py"):
-        raise PackagingFileError("a setup.py file alreadyexists")
+        raise PackagingFileError("a setup.py file already exists")
 
     with open("setup.py", "w", encoding='utf-8') as fp:
         fp.write(_SETUP_TMPL % {'func': getsource(cfg_to_args)})
@@ -1224,9 +1224,9 @@
         for item in os.listdir(srcdir):
             full_path = os.path.join(srcdir, item)
             if item.endswith('.egg-info') and os.path.isdir(full_path):
-                logger.info("found egg-info directory")
+                logger.debug("Found egg-info directory.")
                 return True
-    logger.warning("no egg-info directory found")
+    logger.debug("No egg-info directory found.")
     return False
 
 
@@ -1243,9 +1243,9 @@
     with open(setup_py, 'r', encoding='utf-8') as setup:
         for line in setup:
             if re.search(installer_pattern, line):
-                logger.info("found %s text in setup.py", installer)
+                logger.debug("Found %s text in setup.py.", installer)
                 return True
-    logger.warning("no %s text found in setup.py", installer)
+    logger.debug("No %s text found in setup.py.", installer)
     return False
 
 
@@ -1261,15 +1261,16 @@
     pkg_info = os.path.join(srcdir, 'PKG-INFO')
     has_pkg_info = os.path.isfile(pkg_info)
     if has_pkg_info:
-        logger.info("PKG-INFO file found")
-    logger.warning("no PKG-INFO file found")
+        logger.debug("PKG-INFO file found.")
+    else:
+        logger.debug("No PKG-INFO file found.")
     return has_pkg_info
 
 
 def _has_setup_py(srcdir):
     setup_py = os.path.join(srcdir, 'setup.py')
     if os.path.isfile(setup_py):
-        logger.info('setup.py file found')
+        logger.debug('setup.py file found.')
         return True
     return False
 
@@ -1277,9 +1278,9 @@
 def _has_setup_cfg(srcdir):
     setup_cfg = os.path.join(srcdir, 'setup.cfg')
     if os.path.isfile(setup_cfg):
-        logger.info('setup.cfg file found')
+        logger.debug('setup.cfg file found.')
         return True
-    logger.warning("no setup.cfg file found")
+    logger.debug("No setup.cfg file found.")
     return False
 
 
diff --git a/Lib/pickle.py b/Lib/pickle.py
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -23,8 +23,6 @@
 
 """
 
-__version__ = "$Revision$"       # Code version
-
 from types import FunctionType, BuiltinFunctionType
 from copyreg import dispatch_table
 from copyreg import _extension_registry, _inverted_registry, _extension_cache
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -8,7 +8,7 @@
 
 __all__ = [
     'get_importer', 'iter_importers', 'get_loader', 'find_loader',
-    'walk_packages', 'iter_modules',
+    'walk_packages', 'iter_modules', 'get_data',
     'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
 ]
 
diff --git a/Lib/platform.py b/Lib/platform.py
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -361,6 +361,8 @@
 
     """ Portable popen() interface.
     """
+    import warnings
+    warnings.warn('use os.popen instead', DeprecationWarning, stacklevel=2)
     return os.popen(cmd, mode, bufsize)
 
 def _norm_version(version, build=''):
diff --git a/Lib/plistlib.py b/Lib/plistlib.py
--- a/Lib/plistlib.py
+++ b/Lib/plistlib.py
@@ -68,13 +68,15 @@
     usually is a dictionary).
     """
     didOpen = False
-    if isinstance(pathOrFile, str):
-        pathOrFile = open(pathOrFile, 'rb')
-        didOpen = True
-    p = PlistParser()
-    rootObject = p.parse(pathOrFile)
-    if didOpen:
-        pathOrFile.close()
+    try:
+        if isinstance(pathOrFile, str):
+            pathOrFile = open(pathOrFile, 'rb')
+            didOpen = True
+        p = PlistParser()
+        rootObject = p.parse(pathOrFile)
+    finally:
+        if didOpen:
+            pathOrFile.close()
     return rootObject
 
 
@@ -83,15 +85,17 @@
     file name or a (writable) file object.
     """
     didOpen = False
-    if isinstance(pathOrFile, str):
-        pathOrFile = open(pathOrFile, 'wb')
-        didOpen = True
-    writer = PlistWriter(pathOrFile)
-    writer.writeln("<plist version=\"1.0\">")
-    writer.writeValue(rootObject)
-    writer.writeln("</plist>")
-    if didOpen:
-        pathOrFile.close()
+    try:
+        if isinstance(pathOrFile, str):
+            pathOrFile = open(pathOrFile, 'wb')
+            didOpen = True
+        writer = PlistWriter(pathOrFile)
+        writer.writeln("<plist version=\"1.0\">")
+        writer.writeValue(rootObject)
+        writer.writeln("</plist>")
+    finally:
+        if didOpen:
+            pathOrFile.close()
 
 
 def readPlistFromBytes(data):
@@ -352,7 +356,6 @@
     def __repr__(self):
         return "%s(%s)" % (self.__class__.__name__, repr(self.data))
 
-
 class PlistParser:
 
     def __init__(self):
@@ -362,11 +365,11 @@
 
     def parse(self, fileobj):
         from xml.parsers.expat import ParserCreate
-        parser = ParserCreate()
-        parser.StartElementHandler = self.handleBeginElement
-        parser.EndElementHandler = self.handleEndElement
-        parser.CharacterDataHandler = self.handleData
-        parser.ParseFile(fileobj)
+        self.parser = ParserCreate()
+        self.parser.StartElementHandler = self.handleBeginElement
+        self.parser.EndElementHandler = self.handleEndElement
+        self.parser.CharacterDataHandler = self.handleData
+        self.parser.ParseFile(fileobj)
         return self.root
 
     def handleBeginElement(self, element, attrs):
@@ -385,12 +388,18 @@
 
     def addObject(self, value):
         if self.currentKey is not None:
+            if not isinstance(self.stack[-1], type({})):
+                raise ValueError("unexpected element at line %d" %
+                                 self.parser.CurrentLineNumber)
             self.stack[-1][self.currentKey] = value
             self.currentKey = None
         elif not self.stack:
             # this is the root object
             self.root = value
         else:
+            if not isinstance(self.stack[-1], type([])):
+                raise ValueError("unexpected element at line %d" %
+                                 self.parser.CurrentLineNumber)
             self.stack[-1].append(value)
 
     def getData(self):
@@ -405,9 +414,15 @@
         self.addObject(d)
         self.stack.append(d)
     def end_dict(self):
+        if self.currentKey:
+            raise ValueError("missing value for key '%s' at line %d" %
+                             (self.currentKey,self.parser.CurrentLineNumber))
         self.stack.pop()
 
     def end_key(self):
+        if self.currentKey or not isinstance(self.stack[-1], type({})):
+            raise ValueError("unexpected key at line %d" %
+                             self.parser.CurrentLineNumber)
         self.currentKey = self.getData()
 
     def begin_array(self, attrs):
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -22,11 +22,6 @@
 open a Web browser to interactively browse documentation.  The -p option
 can be used with the -b option to explicitly specify the server port.
 
-For platforms without a command line, "pydoc -g" starts the HTTP server
-and also pops up a little window for controlling it.  This option is
-deprecated, since the server can now be controlled directly from HTTP
-clients.
-
 Run "pydoc -w <name>" to write out the HTML documentation for a module
 to a file named "<name>.html".
 
@@ -42,7 +37,6 @@
 __author__ = "Ka-Ping Yee <ping at lfw.org>"
 __date__ = "26 February 2001"
 
-__version__ = "$Revision$"
 __credits__ = """Guido van Rossum, for an excellent programming language.
 Tommy Burnette, the original creator of manpy.
 Paul Prescod, for all his work on onlinehelp.
@@ -2056,272 +2050,6 @@
         warnings.filterwarnings('ignore') # ignore problems during import
         ModuleScanner().run(callback, key, onerror=onerror)
 
-# --------------------------------------------------- Web browser interface
-
-def serve(port, callback=None, completer=None):
-    import http.server, email.message, select
-
-    msg = 'the pydoc.serve() function is deprecated'
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-    class DocHandler(http.server.BaseHTTPRequestHandler):
-        def send_document(self, title, contents):
-            try:
-                self.send_response(200)
-                self.send_header('Content-Type', 'text/html; charset=UTF-8')
-                self.end_headers()
-                self.wfile.write(html.page(title, contents).encode('utf-8'))
-            except IOError: pass
-
-        def do_GET(self):
-            path = self.path
-            if path[-5:] == '.html': path = path[:-5]
-            if path[:1] == '/': path = path[1:]
-            if path and path != '.':
-                try:
-                    obj = locate(path, forceload=1)
-                except ErrorDuringImport as value:
-                    self.send_document(path, html.escape(str(value)))
-                    return
-                if obj:
-                    self.send_document(describe(obj), html.document(obj, path))
-                else:
-                    self.send_document(path,
-'no Python documentation found for %s' % repr(path))
-            else:
-                heading = html.heading(
-'<big><big><strong>Python: Index of Modules</strong></big></big>',
-'#ffffff', '#7799ee')
-                def bltinlink(name):
-                    return '<a href="%s.html">%s</a>' % (name, name)
-                names = [x for x in sys.builtin_module_names if x != '__main__']
-                contents = html.multicolumn(names, bltinlink)
-                indices = ['<p>' + html.bigsection(
-                    'Built-in Modules', '#ffffff', '#ee77aa', contents)]
-
-                seen = {}
-                for dir in sys.path:
-                    indices.append(html.index(dir, seen))
-                contents = heading + ' '.join(indices) + '''<p align=right>
-<font color="#909090" face="helvetica, arial"><strong>
-pydoc</strong> by Ka-Ping Yee &lt;ping at lfw.org&gt;</font>'''
-                self.send_document('Index of Modules', contents)
-
-        def log_message(self, *args): pass
-
-    class DocServer(http.server.HTTPServer):
-        def __init__(self, port, callback):
-            host = 'localhost'
-            self.address = (host, port)
-            self.url = 'http://%s:%d/' % (host, port)
-            self.callback = callback
-            self.base.__init__(self, self.address, self.handler)
-
-        def serve_until_quit(self):
-            import select
-            self.quit = False
-            while not self.quit:
-                rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
-                if rd: self.handle_request()
-            self.server_close()
-
-        def server_activate(self):
-            self.base.server_activate(self)
-            if self.callback: self.callback(self)
-
-    DocServer.base = http.server.HTTPServer
-    DocServer.handler = DocHandler
-    DocHandler.MessageClass = email.message.Message
-    try:
-        try:
-            DocServer(port, callback).serve_until_quit()
-        except (KeyboardInterrupt, select.error):
-            pass
-    finally:
-        if completer: completer()
-
-# ----------------------------------------------------- graphical interface
-
-def gui():
-    """Graphical interface (starts Web server and pops up a control window)."""
-
-    msg = ('the pydoc.gui() function and "pydoc -g" option are deprecated\n',
-           'use "pydoc.browse() function and "pydoc -b" option instead.')
-    warnings.warn(msg, DeprecationWarning, stacklevel=2)
-
-    class GUI:
-        def __init__(self, window, port=7464):
-            self.window = window
-            self.server = None
-            self.scanner = None
-
-            import tkinter
-            self.server_frm = tkinter.Frame(window)
-            self.title_lbl = tkinter.Label(self.server_frm,
-                text='Starting server...\n ')
-            self.open_btn = tkinter.Button(self.server_frm,
-                text='open browser', command=self.open, state='disabled')
-            self.quit_btn = tkinter.Button(self.server_frm,
-                text='quit serving', command=self.quit, state='disabled')
-
-            self.search_frm = tkinter.Frame(window)
-            self.search_lbl = tkinter.Label(self.search_frm, text='Search for')
-            self.search_ent = tkinter.Entry(self.search_frm)
-            self.search_ent.bind('<Return>', self.search)
-            self.stop_btn = tkinter.Button(self.search_frm,
-                text='stop', pady=0, command=self.stop, state='disabled')
-            if sys.platform == 'win32':
-                # Trying to hide and show this button crashes under Windows.
-                self.stop_btn.pack(side='right')
-
-            self.window.title('pydoc')
-            self.window.protocol('WM_DELETE_WINDOW', self.quit)
-            self.title_lbl.pack(side='top', fill='x')
-            self.open_btn.pack(side='left', fill='x', expand=1)
-            self.quit_btn.pack(side='right', fill='x', expand=1)
-            self.server_frm.pack(side='top', fill='x')
-
-            self.search_lbl.pack(side='left')
-            self.search_ent.pack(side='right', fill='x', expand=1)
-            self.search_frm.pack(side='top', fill='x')
-            self.search_ent.focus_set()
-
-            font = ('helvetica', sys.platform == 'win32' and 8 or 10)
-            self.result_lst = tkinter.Listbox(window, font=font, height=6)
-            self.result_lst.bind('<Button-1>', self.select)
-            self.result_lst.bind('<Double-Button-1>', self.goto)
-            self.result_scr = tkinter.Scrollbar(window,
-                orient='vertical', command=self.result_lst.yview)
-            self.result_lst.config(yscrollcommand=self.result_scr.set)
-
-            self.result_frm = tkinter.Frame(window)
-            self.goto_btn = tkinter.Button(self.result_frm,
-                text='go to selected', command=self.goto)
-            self.hide_btn = tkinter.Button(self.result_frm,
-                text='hide results', command=self.hide)
-            self.goto_btn.pack(side='left', fill='x', expand=1)
-            self.hide_btn.pack(side='right', fill='x', expand=1)
-
-            self.window.update()
-            self.minwidth = self.window.winfo_width()
-            self.minheight = self.window.winfo_height()
-            self.bigminheight = (self.server_frm.winfo_reqheight() +
-                                 self.search_frm.winfo_reqheight() +
-                                 self.result_lst.winfo_reqheight() +
-                                 self.result_frm.winfo_reqheight())
-            self.bigwidth, self.bigheight = self.minwidth, self.bigminheight
-            self.expanded = 0
-            self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
-            self.window.wm_minsize(self.minwidth, self.minheight)
-            self.window.tk.willdispatch()
-
-            import threading
-            threading.Thread(
-                target=serve, args=(port, self.ready, self.quit)).start()
-
-        def ready(self, server):
-            self.server = server
-            self.title_lbl.config(
-                text='Python documentation server at\n' + server.url)
-            self.open_btn.config(state='normal')
-            self.quit_btn.config(state='normal')
-
-        def open(self, event=None, url=None):
-            url = url or self.server.url
-            import webbrowser
-            webbrowser.open(url)
-
-        def quit(self, event=None):
-            if self.server:
-                self.server.quit = 1
-            self.window.quit()
-
-        def search(self, event=None):
-            key = self.search_ent.get()
-            self.stop_btn.pack(side='right')
-            self.stop_btn.config(state='normal')
-            self.search_lbl.config(text='Searching for "%s"...' % key)
-            self.search_ent.forget()
-            self.search_lbl.pack(side='left')
-            self.result_lst.delete(0, 'end')
-            self.goto_btn.config(state='disabled')
-            self.expand()
-
-            import threading
-            if self.scanner:
-                self.scanner.quit = 1
-            self.scanner = ModuleScanner()
-            threading.Thread(target=self.scanner.run,
-                             args=(self.update, key, self.done)).start()
-
-        def update(self, path, modname, desc):
-            if modname[-9:] == '.__init__':
-                modname = modname[:-9] + ' (package)'
-            self.result_lst.insert('end',
-                modname + ' - ' + (desc or '(no description)'))
-
-        def stop(self, event=None):
-            if self.scanner:
-                self.scanner.quit = 1
-                self.scanner = None
-
-        def done(self):
-            self.scanner = None
-            self.search_lbl.config(text='Search for')
-            self.search_lbl.pack(side='left')
-            self.search_ent.pack(side='right', fill='x', expand=1)
-            if sys.platform != 'win32': self.stop_btn.forget()
-            self.stop_btn.config(state='disabled')
-
-        def select(self, event=None):
-            self.goto_btn.config(state='normal')
-
-        def goto(self, event=None):
-            selection = self.result_lst.curselection()
-            if selection:
-                modname = self.result_lst.get(selection[0]).split()[0]
-                self.open(url=self.server.url + modname + '.html')
-
-        def collapse(self):
-            if not self.expanded: return
-            self.result_frm.forget()
-            self.result_scr.forget()
-            self.result_lst.forget()
-            self.bigwidth = self.window.winfo_width()
-            self.bigheight = self.window.winfo_height()
-            self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
-            self.window.wm_minsize(self.minwidth, self.minheight)
-            self.expanded = 0
-
-        def expand(self):
-            if self.expanded: return
-            self.result_frm.pack(side='bottom', fill='x')
-            self.result_scr.pack(side='right', fill='y')
-            self.result_lst.pack(side='top', fill='both', expand=1)
-            self.window.wm_geometry('%dx%d' % (self.bigwidth, self.bigheight))
-            self.window.wm_minsize(self.minwidth, self.bigminheight)
-            self.expanded = 1
-
-        def hide(self, event=None):
-            self.stop()
-            self.collapse()
-
-    import tkinter
-    try:
-        root = tkinter.Tk()
-        # Tk will crash if pythonw.exe has an XP .manifest
-        # file and the root has is not destroyed explicitly.
-        # If the problem is ever fixed in Tk, the explicit
-        # destroy can go.
-        try:
-            gui = GUI(root)
-            root.mainloop()
-        finally:
-            root.destroy()
-    except KeyboardInterrupt:
-        pass
-
-
 # --------------------------------------- enhanced Web browser interface
 
 def _start_server(urlhandler, port):
@@ -2778,15 +2506,12 @@
         sys.path.insert(0, '.')
 
     try:
-        opts, args = getopt.getopt(sys.argv[1:], 'bgk:p:w')
+        opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
         writing = False
         start_server = False
         open_browser = False
         port = None
         for opt, val in opts:
-            if opt == '-g':
-                gui()
-                return
             if opt == '-b':
                 start_server = True
                 open_browser = True
@@ -2847,9 +2572,6 @@
     to interactively browse documentation.  The -p option can be used with
     the -b option to explicitly specify the server port.
 
-{cmd} -g
-    Deprecated.
-
 {cmd} -w <name> ...
     Write out the HTML documentation for a module to a file in the current
     directory.  If <name> contains a '{sep}', it is treated as a filename; if
diff --git a/Lib/reprlib.py b/Lib/reprlib.py
--- a/Lib/reprlib.py
+++ b/Lib/reprlib.py
@@ -5,7 +5,7 @@
 import builtins
 from itertools import islice
 try:
-    from _thread import get_ident
+    from threading import get_ident
 except ImportError:
     from _dummy_thread import get_ident
 
diff --git a/Lib/socketserver.py b/Lib/socketserver.py
--- a/Lib/socketserver.py
+++ b/Lib/socketserver.py
@@ -82,7 +82,7 @@
 data is stored externally (e.g. in the file system), a synchronous
 class will essentially render the service "deaf" while one request is
 being handled -- which may be for a very long time if a client is slow
-to reqd all the data it has requested.  Here a threading or forking
+to recv all the data it has requested.  Here a threading or forking
 server is appropriate.
 
 In some cases, it may be appropriate to process part of a request
@@ -170,6 +170,7 @@
     - process_request(request, client_address)
     - shutdown_request(request)
     - close_request(request)
+    - service_actions()
     - handle_error()
 
     Methods for derived classes:
@@ -225,6 +226,8 @@
                 r, w, e = select.select([self], [], [], poll_interval)
                 if self in r:
                     self._handle_request_noblock()
+
+                self.service_actions()
         finally:
             self.__shutdown_request = False
             self.__is_shut_down.set()
@@ -239,6 +242,14 @@
         self.__shutdown_request = True
         self.__is_shut_down.wait()
 
+    def service_actions(self):
+        """Called by the serve_forever() loop.
+
+        May be overridden by a subclass / Mixin to implement any code that
+        needs to be run during the loop.
+        """
+        pass
+
     # The distinction between handling, getting, processing and
     # finishing a request is fairly arbitrary.  Remember:
     #
@@ -539,9 +550,15 @@
         """
         self.collect_children()
 
+    def service_actions(self):
+        """Collect the zombie child processes regularly in the ForkingMixin.
+
+        service_actions is called in the BaseServer's serve_forver loop.
+        """
+        self.collect_children()
+
     def process_request(self, request, client_address):
         """Fork a new subprocess to process the request."""
-        self.collect_children()
         pid = os.fork()
         if pid:
             # Parent process
diff --git a/Lib/ssl.py b/Lib/ssl.py
--- a/Lib/ssl.py
+++ b/Lib/ssl.py
@@ -63,7 +63,7 @@
 from _ssl import _SSLContext, SSLError
 from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
 from _ssl import OP_ALL, OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_TLSv1
-from _ssl import RAND_status, RAND_egd, RAND_add
+from _ssl import RAND_status, RAND_egd, RAND_add, RAND_bytes, RAND_pseudo_bytes
 from _ssl import (
     SSL_ERROR_ZERO_RETURN,
     SSL_ERROR_WANT_READ,
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -397,39 +397,14 @@
 else:
     import select
     _has_poll = hasattr(select, 'poll')
-    import fcntl
-    import pickle
-
-    try:
-        import _posixsubprocess
-    except ImportError:
-        _posixsubprocess = None
-        warnings.warn("The _posixsubprocess module is not being used. "
-                      "Child process reliability may suffer if your "
-                      "program uses threads.", RuntimeWarning)
+    import _posixsubprocess
+    _create_pipe = _posixsubprocess.cloexec_pipe
 
     # When select or poll has indicated that the file is writable,
     # we can write up to _PIPE_BUF bytes without risk of blocking.
     # POSIX defines PIPE_BUF as >= 512.
     _PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
 
-    _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1)
-
-    def _set_cloexec(fd, cloexec):
-        old = fcntl.fcntl(fd, fcntl.F_GETFD)
-        if cloexec:
-            fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC)
-        else:
-            fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC)
-
-    if _posixsubprocess:
-        _create_pipe = _posixsubprocess.cloexec_pipe
-    else:
-        def _create_pipe():
-            fds = os.pipe()
-            _set_cloexec(fds[0], True)
-            _set_cloexec(fds[1], True)
-            return fds
 
 __all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
            "getoutput", "check_output", "CalledProcessError", "DEVNULL"]
@@ -800,7 +775,10 @@
         self.wait()
 
     def __del__(self, _maxsize=sys.maxsize, _active=_active):
-        if not self._child_created:
+        # If __init__ hasn't had a chance to execute (e.g. if it
+        # was passed an undeclared keyword argument), we don't
+        # have a _child_created attribute at all.
+        if not getattr(self, '_child_created', False):
             # We didn't get to successfully create a child process.
             return
         # In case the child hasn't been waited on, check if it's done.
@@ -1267,140 +1245,33 @@
             errpipe_read, errpipe_write = _create_pipe()
             try:
                 try:
+                    # We must avoid complex work that could involve
+                    # malloc or free in the child process to avoid
+                    # potential deadlocks, thus we do all this here.
+                    # and pass it to fork_exec()
 
-                    if _posixsubprocess:
-                        # We must avoid complex work that could involve
-                        # malloc or free in the child process to avoid
-                        # potential deadlocks, thus we do all this here.
-                        # and pass it to fork_exec()
-
-                        if env:
-                            env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
-                                        for k, v in env.items()]
-                        else:
-                            env_list = None  # Use execv instead of execve.
-                        executable = os.fsencode(executable)
-                        if os.path.dirname(executable):
-                            executable_list = (executable,)
-                        else:
-                            # This matches the behavior of os._execvpe().
-                            executable_list = tuple(
-                                os.path.join(os.fsencode(dir), executable)
-                                for dir in os.get_exec_path(env))
-                        fds_to_keep = set(pass_fds)
-                        fds_to_keep.add(errpipe_write)
-                        self.pid = _posixsubprocess.fork_exec(
-                                args, executable_list,
-                                close_fds, sorted(fds_to_keep), cwd, env_list,
-                                p2cread, p2cwrite, c2pread, c2pwrite,
-                                errread, errwrite,
-                                errpipe_read, errpipe_write,
-                                restore_signals, start_new_session, preexec_fn)
+                    if env:
+                        env_list = [os.fsencode(k) + b'=' + os.fsencode(v)
+                                    for k, v in env.items()]
                     else:
-                        # Pure Python implementation: It is not thread safe.
-                        # This implementation may deadlock in the child if your
-                        # parent process has any other threads running.
-
-                        gc_was_enabled = gc.isenabled()
-                        # Disable gc to avoid bug where gc -> file_dealloc ->
-                        # write to stderr -> hang.  See issue1336
-                        gc.disable()
-                        try:
-                            self.pid = os.fork()
-                        except:
-                            if gc_was_enabled:
-                                gc.enable()
-                            raise
-                        self._child_created = True
-                        if self.pid == 0:
-                            # Child
-                            try:
-                                # Close parent's pipe ends
-                                if p2cwrite != -1:
-                                    os.close(p2cwrite)
-                                if c2pread != -1:
-                                    os.close(c2pread)
-                                if errread != -1:
-                                    os.close(errread)
-                                os.close(errpipe_read)
-
-                                # Dup fds for child
-                                def _dup2(a, b):
-                                    # dup2() removes the CLOEXEC flag but
-                                    # we must do it ourselves if dup2()
-                                    # would be a no-op (issue #10806).
-                                    if a == b:
-                                        _set_cloexec(a, False)
-                                    elif a != -1:
-                                        os.dup2(a, b)
-                                _dup2(p2cread, 0)
-                                _dup2(c2pwrite, 1)
-                                _dup2(errwrite, 2)
-
-                                # Close pipe fds.  Make sure we don't close the
-                                # same fd more than once, or standard fds.
-                                closed = set()
-                                for fd in [p2cread, c2pwrite, errwrite]:
-                                    if fd > 2 and fd not in closed:
-                                        os.close(fd)
-                                        closed.add(fd)
-
-                                # Close all other fds, if asked for
-                                if close_fds:
-                                    fds_to_keep = set(pass_fds)
-                                    fds_to_keep.add(errpipe_write)
-                                    self._close_fds(fds_to_keep)
-
-
-                                if cwd is not None:
-                                    os.chdir(cwd)
-
-                                # This is a copy of Python/pythonrun.c
-                                # _Py_RestoreSignals().  If that were exposed
-                                # as a sys._py_restoresignals func it would be
-                                # better.. but this pure python implementation
-                                # isn't likely to be used much anymore.
-                                if restore_signals:
-                                    signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
-                                    for sig in signals:
-                                        if hasattr(signal, sig):
-                                            signal.signal(getattr(signal, sig),
-                                                          signal.SIG_DFL)
-
-                                if start_new_session and hasattr(os, 'setsid'):
-                                    os.setsid()
-
-                                if preexec_fn:
-                                    preexec_fn()
-
-                                if env is None:
-                                    os.execvp(executable, args)
-                                else:
-                                    os.execvpe(executable, args, env)
-
-                            except:
-                                try:
-                                    exc_type, exc_value = sys.exc_info()[:2]
-                                    if isinstance(exc_value, OSError):
-                                        errno_num = exc_value.errno
-                                    else:
-                                        errno_num = 0
-                                    message = '%s:%x:%s' % (exc_type.__name__,
-                                                            errno_num, exc_value)
-                                    message = message.encode(errors="surrogatepass")
-                                    os.write(errpipe_write, message)
-                                except Exception:
-                                    # We MUST not allow anything odd happening
-                                    # above to prevent us from exiting below.
-                                    pass
-
-                            # This exitcode won't be reported to applications
-                            # so it really doesn't matter what we return.
-                            os._exit(255)
-
-                        # Parent
-                        if gc_was_enabled:
-                            gc.enable()
+                        env_list = None  # Use execv instead of execve.
+                    executable = os.fsencode(executable)
+                    if os.path.dirname(executable):
+                        executable_list = (executable,)
+                    else:
+                        # This matches the behavior of os._execvpe().
+                        executable_list = tuple(
+                            os.path.join(os.fsencode(dir), executable)
+                            for dir in os.get_exec_path(env))
+                    fds_to_keep = set(pass_fds)
+                    fds_to_keep.add(errpipe_write)
+                    self.pid = _posixsubprocess.fork_exec(
+                            args, executable_list,
+                            close_fds, sorted(fds_to_keep), cwd, env_list,
+                            p2cread, p2cwrite, c2pread, c2pwrite,
+                            errread, errwrite,
+                            errpipe_read, errpipe_write,
+                            restore_signals, start_new_session, preexec_fn)
                 finally:
                     # be sure the FD is closed no matter what
                     os.close(errpipe_write)
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
--- a/Lib/sysconfig.py
+++ b/Lib/sysconfig.py
@@ -287,14 +287,16 @@
                         variables.remove(name)
 
                         if name.startswith('PY_') \
-                                and name[3:] in renamed_variables:
+                        and name[3:] in renamed_variables:
 
                             name = name[3:]
                             if name not in done:
                                 done[name] = value
 
             else:
-                # bogus variable reference; just drop it since we can't deal
+                # bogus variable reference (e.g. "prefix=$/opt/python");
+                # just drop it since we can't deal
+                done[name] = value
                 variables.remove(name)
 
     # strip spurious spaces
@@ -656,11 +658,10 @@
         # to. This makes the compatibility story a bit more sane because the
         # machine is going to compile and link as if it were
         # MACOSX_DEPLOYMENT_TARGET.
-        #
         cfgvars = get_config_vars()
         macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
 
-        if 1:
+        if True:
             # Always calculate the release of the running machine,
             # needed to determine if we can build fat binaries or not.
 
@@ -755,7 +756,7 @@
     print('Platform: "%s"' % get_platform())
     print('Python version: "%s"' % get_python_version())
     print('Current installation scheme: "%s"' % _get_default_scheme())
-    print('')
+    print()
     _print_dict('Paths', get_paths())
     print()
     _print_dict('Variables', get_config_vars())
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -29,8 +29,6 @@
 """Read from and write to tar format archives.
 """
 
-__version__ = "$Revision$"
-
 version     = "0.9.0"
 __author__  = "Lars Gust\u00e4bel (lars at gustaebel.de)"
 __date__    = "$Date$"
diff --git a/Lib/test/cjkencodings/hz-utf8.txt b/Lib/test/cjkencodings/hz-utf8.txt
new file mode 100644
--- /dev/null
+++ b/Lib/test/cjkencodings/hz-utf8.txt
@@ -0,0 +1,2 @@
+This sentence is in ASCII.
+The next sentence is in GB.己所不欲,勿施於人。Bye.
diff --git a/Lib/test/cjkencodings/hz.txt b/Lib/test/cjkencodings/hz.txt
new file mode 100644
--- /dev/null
+++ b/Lib/test/cjkencodings/hz.txt
@@ -0,0 +1,2 @@
+This sentence is in ASCII.
+The next sentence is in GB.~{<:Ky2;S{#,NpJ)l6HK!#~}Bye.
diff --git a/Lib/test/cjkencodings/iso2022_jp-utf8.txt b/Lib/test/cjkencodings/iso2022_jp-utf8.txt
new file mode 100644
--- /dev/null
+++ b/Lib/test/cjkencodings/iso2022_jp-utf8.txt
@@ -0,0 +1,7 @@
+Python の開発は、1990 年ごろから開始されています。
+開発者の Guido van Rossum は教育用のプログラミング言語「ABC」の開発に参加していましたが、ABC は実用上の目的にはあまり適していませんでした。
+このため、Guido はより実用的なプログラミング言語の開発を開始し、英国 BBS 放送のコメディ番組「モンティ パイソン」のファンである Guido はこの言語を「Python」と名づけました。
+このような背景から生まれた Python の言語設計は、「シンプル」で「習得が容易」という目標に重点が置かれています。
+多くのスクリプト系言語ではユーザの目先の利便性を優先して色々な機能を言語要素として取り入れる場合が多いのですが、Python ではそういった小細工が追加されることはあまりありません。
+言語自体の機能は最小限に押さえ、必要な機能は拡張モジュールとして追加する、というのが Python のポリシーです。
+
diff --git a/Lib/test/cjkencodings/iso2022_jp.txt b/Lib/test/cjkencodings/iso2022_jp.txt
new file mode 100644
--- /dev/null
+++ b/Lib/test/cjkencodings/iso2022_jp.txt
@@ -0,0 +1,7 @@
+Python $B$N3+H/$O!"(B1990 $BG/$4$m$+$i3+;O$5$l$F$$$^$9!#(B
+$B3+H/<T$N(B Guido van Rossum $B$O650iMQ$N%W%m%0%i%_%s%08 at 8l!V(BABC$B!W$N3+H/$K;22C$7$F$$$^$7$?$,!"(BABC $B$O<BMQ>e$NL\E*$K$O$"$^$jE,$7$F$$$^$;$s$G$7$?!#(B
+$B$3$N$?$a!"(BGuido $B$O$h$j<BMQE*$J%W%m%0%i%_%s%08 at 8l$N3+H/$r3+;O$7!"1Q9q(B BBS $BJ|Aw$N%3%a%G%#HVAH!V%b%s%F%#(B $B%Q%$%=%s!W$N%U%!%s$G$"$k(B Guido $B$O$3$N8 at 8l$r!V(BPython$B!W$HL>$E$1$^$7$?!#(B
+$B$3$N$h$&$JGX7J$+$i at 8$^$l$?(B Python $B$N8 at 8l@_7W$O!"!V%7%s%W%k!W$G!V=,F@$,MF0W!W$H$$$&L\I8$K=EE@$,CV$+$l$F$$$^$9!#(B
+$BB?$/$N%9%/%j%W%H7O8 at 8l$G$O%f!<%6$NL\@h$NMxJX at -$rM%@h$7$F?'!9$J5!G=$r8 at 8lMWAG$H$7$F<h$jF~$l$k>l9g$,B?$$$N$G$9$,!"(BPython $B$G$O$=$&$$$C$?>.:Y9)$,DI2C$5$l$k$3$H$O$"$^$j$"$j$^$;$s!#(B
+$B8 at 8l<+BN$N5!G=$O:G>.8B$K2!$5$(!"I,MW$J5!G=$O3HD%%b%8%e!<%k$H$7$FDI2C$9$k!"$H$$$&$N$,(B Python $B$N%]%j%7!<$G$9!#(B
+
diff --git a/Lib/test/cjkencodings/iso2022_kr-utf8.txt b/Lib/test/cjkencodings/iso2022_kr-utf8.txt
new file mode 100644
--- /dev/null
+++ b/Lib/test/cjkencodings/iso2022_kr-utf8.txt
@@ -0,0 +1,7 @@
+◎ 파이썬(Python)은 배우기 쉽고, 강력한 프로그래밍 언어입니다. 파이썬은
+효율적인 고수준 데이터 구조와 간단하지만 효율적인 객체지향프로그래밍을
+지원합니다. 파이썬의 우아(優雅)한 문법과 동적 타이핑, 그리고 인터프리팅
+환경은 파이썬을 스크립팅과 여러 분야에서와 대부분의 플랫폼에서의 빠른
+애플리케이션 개발을 할 수 있는 이상적인 언어로 만들어줍니다.
+
+☆첫가끝: 날아라 쓩~ 큼! 금없이 전니다. 그런거 다.
diff --git a/Lib/test/cjkencodings/iso2022_kr.txt b/Lib/test/cjkencodings/iso2022_kr.txt
new file mode 100644
--- /dev/null
+++ b/Lib/test/cjkencodings/iso2022_kr.txt
@@ -0,0 +1,7 @@
+$)C!] FD at L=c(Python)@: 9h?l1b =10m, 0-7BGQ GA7N1W7!9V >p>n at T4O4Y. FD at L=c@:
+H?@2@{@N 0m<vAX 5%@LEM 18A6?M 0#4\GOAv88 H?@2@{@N 04C<AvGbGA7N1W7!9V@;
+Av?xGU4O4Y. FD at L=c at G ?l>F(iPd:)GQ 9.9}0z 5?@{ E8 at LGN, 1W8.0m @NEMGA8.FC
+H/0f@: FD at L=c@; =:E)83FC0z ?)7/ :P>_?!<-?M 4k:N:P at G GC7'F{?!<- at G :|8%
+>VGC8.DI at L<G 039_@; GR <v @V4B @L;s@{@N >p>n7N 885i>nA]4O4Y.
+
+!YC90!3!: 3/>F6s >1~ E-! 1]>x at L @|4O4Y. 1W710E 4Y.
diff --git a/Lib/test/decimaltestdata/extra.decTest b/Lib/test/decimaltestdata/extra.decTest
--- a/Lib/test/decimaltestdata/extra.decTest
+++ b/Lib/test/decimaltestdata/extra.decTest
@@ -222,12 +222,25 @@
 extr1701 power 100.0 -557.71e-742888888 -> 1.000000000000000 Inexact Rounded
 extr1702 power 10 1e-100 -> 1.000000000000000 Inexact Rounded
 
+-- Another one (see issue #12080).  Thanks again to Stefan Krah.
+extr1703 power 4 -1.2e-999999999 -> 1.000000000000000 Inexact Rounded
+
 -- A couple of interesting exact cases for power.  Note that the specification
 -- requires these to be reported as Inexact.
 extr1710 power 1e375 56e-3 -> 1.000000000000000E+21 Inexact Rounded
 extr1711 power 10000 0.75 -> 1000.000000000000 Inexact Rounded
 extr1712 power 1e-24 0.875 -> 1.000000000000000E-21 Inexact Rounded
 
+-- Some more exact cases, exercising power with negative second argument.
+extr1720 power 400 -0.5 -> 0.05000000000000000 Inexact Rounded
+extr1721 power 4096 -0.75 -> 0.001953125000000000 Inexact Rounded
+extr1722 power 625e4 -0.25 -> 0.02000000000000000 Inexact Rounded
+
+-- Nonexact cases, to exercise some of the early exit conditions from
+-- _power_exact.
+extr1730 power 2048 -0.75 -> 0.003284751622084822 Inexact Rounded
+
+
 -- Tests for the is_* boolean operations
 precision: 9
 maxExponent: 999
diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py
--- a/Lib/test/lock_tests.py
+++ b/Lib/test/lock_tests.py
@@ -4,7 +4,7 @@
 
 import sys
 import time
-from _thread import start_new_thread, get_ident, TIMEOUT_MAX
+from _thread import start_new_thread, TIMEOUT_MAX
 import threading
 import unittest
 
@@ -31,7 +31,7 @@
         self.finished = []
         self._can_exit = not wait_before_exit
         def task():
-            tid = get_ident()
+            tid = threading.get_ident()
             self.started.append(tid)
             try:
                 f()
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -22,8 +22,7 @@
 -h/--help       -- print this text and exit
 --timeout TIMEOUT
                 -- dump the traceback and exit if a test takes more
-                   than TIMEOUT seconds (default: 30 minutes); disable
-                   the timeout if TIMEOUT is zero
+                   than TIMEOUT seconds
 --wait          -- wait for user input, e.g., allow a debugger to be attached
 
 Verbosity
@@ -269,11 +268,6 @@
     # Display the Python traceback fatal errors (e.g. segfault)
     faulthandler.enable(all_threads=True)
 
-    if hasattr(faulthandler, 'dump_tracebacks_later'):
-        timeout = 60*60
-    else:
-        timeout = None
-
     replace_stdout()
 
     support.record_original_stdout(sys.stdout)
@@ -295,6 +289,7 @@
         use_resources = []
     debug = False
     start = None
+    timeout = None
     for o, a in opts:
         if o in ('-h', '--help'):
             print(__doc__)
@@ -420,10 +415,13 @@
             testdir = os.path.join(support.SAVEDCWD, a)
         elif o == '--timeout':
             if not hasattr(faulthandler, 'dump_tracebacks_later'):
-                print("--timeout option requires "
+                print("The timeout option requires "
                       "faulthandler.dump_tracebacks_later", file=sys.stderr)
                 sys.exit(1)
             timeout = float(a)
+            if timeout <= 0:
+                print("The timeout must be greater than 0", file=sys.stderr)
+                sys.exit(1)
         elif o == '--wait':
             input("Press any key to continue...")
         else:
@@ -630,9 +628,12 @@
                 if test is None:
                     finished += 1
                     continue
+                accumulate_result(test, result)
                 if not quiet:
-                    print("[{1:{0}}{2}] {3}".format(
-                        test_count_width, test_index, test_count, test))
+                    fmt = "[{1:{0}}{2}/{3}] {4}" if bad else "[{1:{0}}{2}] {4}"
+                    print(fmt.format(
+                        test_count_width, test_index, test_count,
+                        len(bad), test))
                 if stdout:
                     print(stdout)
                 if stderr:
@@ -642,7 +643,6 @@
                     raise KeyboardInterrupt   # What else?
                 if result[0] == CHILD_ERROR:
                     raise Exception("Child error on {}: {}".format(test, result[1]))
-                accumulate_result(test, result)
                 test_index += 1
         except KeyboardInterrupt:
             interrupted = True
@@ -652,8 +652,9 @@
     else:
         for test_index, test in enumerate(tests, 1):
             if not quiet:
-                print("[{1:{0}}{2}] {3}".format(
-                    test_count_width, test_index, test_count, test))
+                fmt = "[{1:{0}}{2}/{3}] {4}" if bad else "[{1:{0}}{2}] {4}"
+                print(fmt.format(
+                    test_count_width, test_index, test_count, len(bad), test))
                 sys.stdout.flush()
             if trace:
                 # If we're tracing code coverage, then we don't exit with status
@@ -791,17 +792,14 @@
 def replace_stdout():
     """Set stdout encoder error handler to backslashreplace (as stderr error
     handler) to avoid UnicodeEncodeError when printing a traceback"""
-    if os.name == "nt":
-        # Replace sys.stdout breaks the stdout newlines on Windows: issue #8533
-        return
-
     import atexit
 
     stdout = sys.stdout
     sys.stdout = open(stdout.fileno(), 'w',
         encoding=stdout.encoding,
         errors="backslashreplace",
-        closefd=False)
+        closefd=False,
+        newline='\n')
 
     def restore_stdout():
         sys.stdout.close()
@@ -835,7 +833,7 @@
     support.verbose = verbose  # Tell tests to be moderately quiet
     if use_resources is not None:
         support.use_resources = use_resources
-    use_timeout = (timeout is not None and timeout > 0)
+    use_timeout = (timeout is not None)
     if use_timeout:
         faulthandler.dump_tracebacks_later(timeout, exit=True)
     try:
@@ -1023,10 +1021,6 @@
 
 def runtest_inner(test, verbose, quiet, huntrleaks=False, debug=False):
     support.unload(test)
-    if verbose:
-        capture_stdout = None
-    else:
-        capture_stdout = io.StringIO()
 
     test_time = 0.0
     refleak = False  # True if the test leaked references.
diff --git a/Lib/test/support.py b/Lib/test/support.py
--- a/Lib/test/support.py
+++ b/Lib/test/support.py
@@ -37,7 +37,8 @@
     "Error", "TestFailed", "ResourceDenied", "import_module",
     "verbose", "use_resources", "max_memuse", "record_original_stdout",
     "get_original_stdout", "unload", "unlink", "rmtree", "forget",
-    "is_resource_enabled", "requires", "find_unused_port", "bind_port",
+    "is_resource_enabled", "requires", "requires_linux_version",
+    "requires_mac_ver", "find_unused_port", "bind_port",
     "IPV6_ENABLED", "is_jython", "TESTFN", "HOST", "SAVEDCWD", "temp_cwd",
     "findfile", "sortdict", "check_syntax_error", "open_urlresource",
     "check_warnings", "CleanImport", "EnvironmentVarGuard", "TransientResource",
@@ -48,7 +49,7 @@
     "threading_cleanup", "reap_children", "cpython_only", "check_impl_detail",
     "get_attribute", "swap_item", "swap_attr", "requires_IEEE_754",
     "TestHandler", "Matcher", "can_symlink", "skip_unless_symlink",
-    "import_fresh_module", "requires_zlib"
+    "import_fresh_module", "requires_zlib", "PIPE_MAX_SIZE"
     ]
 
 class Error(Exception):
@@ -291,6 +292,61 @@
             msg = "Use of the `%s' resource not enabled" % resource
         raise ResourceDenied(msg)
 
+def requires_linux_version(*min_version):
+    """Decorator raising SkipTest if the OS is Linux and the kernel version is
+    less than min_version.
+
+    For example, @requires_linux_version(2, 6, 35) raises SkipTest if the Linux
+    kernel version is less than 2.6.35.
+    """
+    def decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kw):
+            if sys.platform.startswith('linux'):
+                version_txt = platform.release().split('-', 1)[0]
+                try:
+                    version = tuple(map(int, version_txt.split('.')))
+                except ValueError:
+                    pass
+                else:
+                    if version < min_version:
+                        min_version_txt = '.'.join(map(str, min_version))
+                        raise unittest.SkipTest(
+                            "Linux kernel %s or higher required, not %s"
+                            % (min_version_txt, version_txt))
+            return func(*args, **kw)
+        wrapper.min_version = min_version
+        return wrapper
+    return decorator
+
+def requires_mac_ver(*min_version):
+    """Decorator raising SkipTest if the OS is Mac OS X and the OS X
+    version if less than min_version.
+
+    For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version
+    is lesser than 10.5.
+    """
+    def decorator(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kw):
+            if sys.platform == 'darwin':
+                version_txt = platform.mac_ver()[0]
+                try:
+                    version = tuple(map(int, version_txt.split('.')))
+                except ValueError:
+                    pass
+                else:
+                    if version < min_version:
+                        min_version_txt = '.'.join(map(str, min_version))
+                        raise unittest.SkipTest(
+                            "Mac OS X %s or higher required, not %s"
+                            % (min_version_txt, version_txt))
+            return func(*args, **kw)
+        wrapper.min_version = min_version
+        return wrapper
+    return decorator
+
+
 HOST = 'localhost'
 
 def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM):
@@ -401,6 +457,13 @@
 
 IPV6_ENABLED = _is_ipv6_enabled()
 
+
+# A constant likely larger than the underlying OS pipe buffer size.
+# Windows limit seems to be around 512B, and most Unix kernels have a 64K pipe
+# buffer size: take 1M to be sure.
+PIPE_MAX_SIZE = 1024 * 1024
+
+
 # decorator for skipping tests on non-IEEE 754 platforms
 requires_IEEE_754 = unittest.skipUnless(
     float.__getformat__("double").startswith("IEEE"),
diff --git a/Lib/test/test_abstract_numbers.py b/Lib/test/test_abstract_numbers.py
--- a/Lib/test/test_abstract_numbers.py
+++ b/Lib/test/test_abstract_numbers.py
@@ -14,6 +14,7 @@
         self.assertEqual(7, int(7).real)
         self.assertEqual(0, int(7).imag)
         self.assertEqual(7, int(7).conjugate())
+        self.assertEqual(-7, int(-7).conjugate())
         self.assertEqual(7, int(7).numerator)
         self.assertEqual(1, int(7).denominator)
 
@@ -24,6 +25,7 @@
         self.assertEqual(7.3, float(7.3).real)
         self.assertEqual(0, float(7.3).imag)
         self.assertEqual(7.3, float(7.3).conjugate())
+        self.assertEqual(-7.3, float(-7.3).conjugate())
 
     def test_complex(self):
         self.assertFalse(issubclass(complex, Real))
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -38,6 +38,9 @@
     "while v:pass",
     # If
     "if v:pass",
+    # With
+    "with x as y: pass",
+    "with x as y, z as q: pass",
     # Raise
     "raise Exception('string')",
     # TryExcept
@@ -341,9 +344,11 @@
 ('Module', [('For', (1, 0), ('Name', (1, 4), 'v', ('Store',)), ('Name', (1, 9), 'v', ('Load',)), [('Pass', (1, 11))], [])]),
 ('Module', [('While', (1, 0), ('Name', (1, 6), 'v', ('Load',)), [('Pass', (1, 8))], [])]),
 ('Module', [('If', (1, 0), ('Name', (1, 3), 'v', ('Load',)), [('Pass', (1, 5))], [])]),
+('Module', [('With', (1, 0), [('withitem', ('Name', (1, 5), 'x', ('Load',)), ('Name', (1, 10), 'y', ('Store',)))], [('Pass', (1, 13))])]),
+('Module', [('With', (1, 0), [('withitem', ('Name', (1, 5), 'x', ('Load',)), ('Name', (1, 10), 'y', ('Store',))), ('withitem', ('Name', (1, 13), 'z', ('Load',)), ('Name', (1, 18), 'q', ('Store',)))], [('Pass', (1, 21))])]),
 ('Module', [('Raise', (1, 0), ('Call', (1, 6), ('Name', (1, 6), 'Exception', ('Load',)), [('Str', (1, 16), 'string')], [], None, None), None)]),
-('Module', [('TryExcept', (1, 0), [('Pass', (2, 2))], [('ExceptHandler', (3, 0), ('Name', (3, 7), 'Exception', ('Load',)), None, [('Pass', (4, 2))])], [])]),
-('Module', [('TryFinally', (1, 0), [('Pass', (2, 2))], [('Pass', (4, 2))])]),
+('Module', [('Try', (1, 0), [('Pass', (2, 2))], [('ExceptHandler', (3, 0), ('Name', (3, 7), 'Exception', ('Load',)), None, [('Pass', (4, 2))])], [], [])]),
+('Module', [('Try', (1, 0), [('Pass', (2, 2))], [], [], [('Pass', (4, 2))])]),
 ('Module', [('Assert', (1, 0), ('Name', (1, 7), 'v', ('Load',)), None)]),
 ('Module', [('Import', (1, 0), [('alias', 'sys', None)])]),
 ('Module', [('ImportFrom', (1, 0), 'sys', [('alias', 'v', None)], 0)]),
diff --git a/Lib/test/test_bool.py b/Lib/test/test_bool.py
--- a/Lib/test/test_bool.py
+++ b/Lib/test/test_bool.py
@@ -330,6 +330,16 @@
                 except (Exception) as e_len:
                     self.assertEqual(str(e_bool), str(e_len))
 
+    def test_real_and_imag(self):
+        self.assertEqual(True.real, 1)
+        self.assertEqual(True.imag, 0)
+        self.assertIs(type(True.real), int)
+        self.assertIs(type(True.imag), int)
+        self.assertEqual(False.real, 0)
+        self.assertEqual(False.imag, 0)
+        self.assertIs(type(False.real), int)
+        self.assertIs(type(False.imag), int)
+
 def test_main():
     support.run_unittest(BoolTest)
 
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -385,6 +385,8 @@
         except:
             self.assertEqual(len(dir(sys.exc_info()[2])), 4)
 
+        # test that object has a __dir__()
+        self.assertEqual(sorted([].__dir__()), dir([]))
 
     def test_divmod(self):
         self.assertEqual(divmod(12, 7), (1, 5))
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
--- a/Lib/test/test_bz2.py
+++ b/Lib/test/test_bz2.py
@@ -47,7 +47,6 @@
         ]
     TEXT = b''.join(TEXT_LINES)
     DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
-    DATA_CRLF = b'BZh91AY&SY\xaez\xbbN\x00\x01H\xdf\x80\x00\x12@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe0@\x01\xbc\xc6`\x86*\x8d=M\xa9\x9a\x86\xd0L@\x0fI\xa6!\xa1\x13\xc8\x88jdi\x8d@\x03@\x1a\x1a\x0c\x0c\x83 \x00\xc4h2\x19\x01\x82D\x84e\t\xe8\x99\x89\x19\x1ah\x00\r\x1a\x11\xaf\x9b\x0fG\xf5(\x1b\x1f?\t\x12\xcf\xb5\xfc\x95E\x00ps\x89\x12^\xa4\xdd\xa2&\x05(\x87\x04\x98\x89u\xe40%\xb6\x19\'\x8c\xc4\x89\xca\x07\x0e\x1b!\x91UIFU%C\x994!DI\xd2\xfa\xf0\xf1N8W\xde\x13A\xf5\x9cr%?\x9f3;I45A\xd1\x8bT\xb1<l\xba\xcb_\xc00xY\x17r\x17\x88\x08\x08@\xa0\ry@\x10\x04$)`\xf2\xce\x89z\xb0s\xec\x9b.iW\x9d\x81\xb5-+t\x9f\x1a\'\x97dB\xf5x\xb5\xbe.[.\xd7\x0e\x81\xe7\x08\x1cN`\x88\x10\xca\x87\xc3!"\x80\x92R\xa1/\xd1\xc0\xe6mf\xac\xbd\x99\xcca\xb3\x8780>\xa4\xc7\x8d\x1a\\"\xad\xa1\xabyBg\x15\xb9l\x88\x88\x91k"\x94\xa4\xd4\x89\xae*\xa6\x0b\x10\x0c\xd6\xd4m\xe86\xec\xb5j\x8a\x86j\';\xca.\x01I\xf2\xaaJ\xe8\x88\x8cU+t3\xfb\x0c\n\xa33\x13r2\r\x16\xe0\xb3(\xbf\x1d\x83r\xe7M\xf0D\x1365\xd8\x88\xd3\xa4\x92\xcb2\x06\x04\\\xc1\xb0\xea//\xbek&\xd8\xe6+t\xe5\xa1\x13\xada\x16\xder5"w]\xa2i\xb7[\x97R \xe2IT\xcd;Z\x04dk4\xad\x8a\t\xd3\x81z\x10\xf1:^`\xab\x1f\xc5\xdc\x91N\x14$+\x9e\xae\xd3\x80'
 
     if has_cmdline_bunzip2:
         def decompress(self, data):
@@ -78,50 +77,69 @@
         if os.path.isfile(self.filename):
             os.unlink(self.filename)
 
-    def getData(self, crlf=False):
-        if crlf:
-            return self.DATA_CRLF
-        else:
-            return self.DATA
-
-    def createTempFile(self, crlf=False):
+    def createTempFile(self, streams=1):
         with open(self.filename, "wb") as f:
-            f.write(self.getData(crlf))
+            f.write(self.DATA * streams)
 
     def testRead(self):
-        # "Test BZ2File.read()"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             self.assertRaises(TypeError, bz2f.read, None)
             self.assertEqual(bz2f.read(), self.TEXT)
 
+    def testReadMultiStream(self):
+        self.createTempFile(streams=5)
+        with BZ2File(self.filename) as bz2f:
+            self.assertRaises(TypeError, bz2f.read, None)
+            self.assertEqual(bz2f.read(), self.TEXT * 5)
+
+    def testReadMonkeyMultiStream(self):
+        # Test BZ2File.read() on a multi-stream archive where a stream
+        # boundary coincides with the end of the raw read buffer.
+        buffer_size = bz2._BUFFER_SIZE
+        bz2._BUFFER_SIZE = len(self.DATA)
+        try:
+            self.createTempFile(streams=5)
+            with BZ2File(self.filename) as bz2f:
+                self.assertRaises(TypeError, bz2f.read, None)
+                self.assertEqual(bz2f.read(), self.TEXT * 5)
+        finally:
+            bz2._BUFFER_SIZE = buffer_size
+
     def testRead0(self):
-        # "Test BBZ2File.read(0)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             self.assertRaises(TypeError, bz2f.read, None)
             self.assertEqual(bz2f.read(0), b"")
 
     def testReadChunk10(self):
-        # "Test BZ2File.read() in chunks of 10 bytes"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             text = b''
-            while 1:
+            while True:
                 str = bz2f.read(10)
                 if not str:
                     break
                 text += str
             self.assertEqual(text, self.TEXT)
 
+    def testReadChunk10MultiStream(self):
+        self.createTempFile(streams=5)
+        with BZ2File(self.filename) as bz2f:
+            text = b''
+            while True:
+                str = bz2f.read(10)
+                if not str:
+                    break
+                text += str
+            self.assertEqual(text, self.TEXT * 5)
+
     def testRead100(self):
-        # "Test BZ2File.read(100)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             self.assertEqual(bz2f.read(100), self.TEXT[:100])
 
     def testPeek(self):
-        # "Test BZ2File.peek()"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             pdata = bz2f.peek()
@@ -130,7 +148,6 @@
             self.assertEqual(bz2f.read(), self.TEXT)
 
     def testReadInto(self):
-        # "Test BZ2File.readinto()"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             n = 128
@@ -143,32 +160,43 @@
             self.assertEqual(b[:n], self.TEXT[-n:])
 
     def testReadLine(self):
-        # "Test BZ2File.readline()"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             self.assertRaises(TypeError, bz2f.readline, None)
-            sio = BytesIO(self.TEXT)
-            for line in sio.readlines():
+            for line in self.TEXT_LINES:
+                self.assertEqual(bz2f.readline(), line)
+
+    def testReadLineMultiStream(self):
+        self.createTempFile(streams=5)
+        with BZ2File(self.filename) as bz2f:
+            self.assertRaises(TypeError, bz2f.readline, None)
+            for line in self.TEXT_LINES * 5:
                 self.assertEqual(bz2f.readline(), line)
 
     def testReadLines(self):
-        # "Test BZ2File.readlines()"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             self.assertRaises(TypeError, bz2f.readlines, None)
-            sio = BytesIO(self.TEXT)
-            self.assertEqual(bz2f.readlines(), sio.readlines())
+            self.assertEqual(bz2f.readlines(), self.TEXT_LINES)
+
+    def testReadLinesMultiStream(self):
+        self.createTempFile(streams=5)
+        with BZ2File(self.filename) as bz2f:
+            self.assertRaises(TypeError, bz2f.readlines, None)
+            self.assertEqual(bz2f.readlines(), self.TEXT_LINES * 5)
 
     def testIterator(self):
-        # "Test iter(BZ2File)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
-            sio = BytesIO(self.TEXT)
-            self.assertEqual(list(iter(bz2f)), sio.readlines())
+            self.assertEqual(list(iter(bz2f)), self.TEXT_LINES)
+
+    def testIteratorMultiStream(self):
+        self.createTempFile(streams=5)
+        with BZ2File(self.filename) as bz2f:
+            self.assertEqual(list(iter(bz2f)), self.TEXT_LINES * 5)
 
     def testClosedIteratorDeadlock(self):
-        # "Test that iteration on a closed bz2file releases the lock."
-        # http://bugs.python.org/issue3309
+        # Issue #3309: Iteration on a closed BZ2File should release the lock.
         self.createTempFile()
         bz2f = BZ2File(self.filename)
         bz2f.close()
@@ -178,7 +206,6 @@
         self.assertRaises(ValueError, bz2f.readlines)
 
     def testWrite(self):
-        # "Test BZ2File.write()"
         with BZ2File(self.filename, "w") as bz2f:
             self.assertRaises(TypeError, bz2f.write)
             bz2f.write(self.TEXT)
@@ -186,10 +213,9 @@
             self.assertEqual(self.decompress(f.read()), self.TEXT)
 
     def testWriteChunks10(self):
-        # "Test BZ2File.write() with chunks of 10 bytes"
         with BZ2File(self.filename, "w") as bz2f:
             n = 0
-            while 1:
+            while True:
                 str = self.TEXT[n*10:(n+1)*10]
                 if not str:
                     break
@@ -199,12 +225,11 @@
             self.assertEqual(self.decompress(f.read()), self.TEXT)
 
     def testWriteLines(self):
-        # "Test BZ2File.writelines()"
         with BZ2File(self.filename, "w") as bz2f:
             self.assertRaises(TypeError, bz2f.writelines)
-            sio = BytesIO(self.TEXT)
-            bz2f.writelines(sio.readlines())
-        # patch #1535500
+            bz2f.writelines(self.TEXT_LINES)
+        # Issue #1535500: Calling writelines() on a closed BZ2File
+        # should raise an exception.
         self.assertRaises(ValueError, bz2f.writelines, ["a"])
         with open(self.filename, 'rb') as f:
             self.assertEqual(self.decompress(f.read()), self.TEXT)
@@ -217,39 +242,73 @@
             self.assertRaises(IOError, bz2f.write, b"a")
             self.assertRaises(IOError, bz2f.writelines, [b"a"])
 
+    def testAppend(self):
+        with BZ2File(self.filename, "w") as bz2f:
+            self.assertRaises(TypeError, bz2f.write)
+            bz2f.write(self.TEXT)
+        with BZ2File(self.filename, "a") as bz2f:
+            self.assertRaises(TypeError, bz2f.write)
+            bz2f.write(self.TEXT)
+        with open(self.filename, 'rb') as f:
+            self.assertEqual(self.decompress(f.read()), self.TEXT * 2)
+
     def testSeekForward(self):
-        # "Test BZ2File.seek(150, 0)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             self.assertRaises(TypeError, bz2f.seek)
             bz2f.seek(150)
             self.assertEqual(bz2f.read(), self.TEXT[150:])
 
+    def testSeekForwardAcrossStreams(self):
+        self.createTempFile(streams=2)
+        with BZ2File(self.filename) as bz2f:
+            self.assertRaises(TypeError, bz2f.seek)
+            bz2f.seek(len(self.TEXT) + 150)
+            self.assertEqual(bz2f.read(), self.TEXT[150:])
+
     def testSeekBackwards(self):
-        # "Test BZ2File.seek(-150, 1)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             bz2f.read(500)
             bz2f.seek(-150, 1)
             self.assertEqual(bz2f.read(), self.TEXT[500-150:])
 
+    def testSeekBackwardsAcrossStreams(self):
+        self.createTempFile(streams=2)
+        with BZ2File(self.filename) as bz2f:
+            readto = len(self.TEXT) + 100
+            while readto > 0:
+                readto -= len(bz2f.read(readto))
+            bz2f.seek(-150, 1)
+            self.assertEqual(bz2f.read(), self.TEXT[100-150:] + self.TEXT)
+
     def testSeekBackwardsFromEnd(self):
-        # "Test BZ2File.seek(-150, 2)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             bz2f.seek(-150, 2)
             self.assertEqual(bz2f.read(), self.TEXT[len(self.TEXT)-150:])
 
+    def testSeekBackwardsFromEndAcrossStreams(self):
+        self.createTempFile(streams=2)
+        with BZ2File(self.filename) as bz2f:
+            bz2f.seek(-1000, 2)
+            self.assertEqual(bz2f.read(), (self.TEXT * 2)[-1000:])
+
     def testSeekPostEnd(self):
-        # "Test BZ2File.seek(150000)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             bz2f.seek(150000)
             self.assertEqual(bz2f.tell(), len(self.TEXT))
             self.assertEqual(bz2f.read(), b"")
 
+    def testSeekPostEndMultiStream(self):
+        self.createTempFile(streams=5)
+        with BZ2File(self.filename) as bz2f:
+            bz2f.seek(150000)
+            self.assertEqual(bz2f.tell(), len(self.TEXT) * 5)
+            self.assertEqual(bz2f.read(), b"")
+
     def testSeekPostEndTwice(self):
-        # "Test BZ2File.seek(150000) twice"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             bz2f.seek(150000)
@@ -257,34 +316,45 @@
             self.assertEqual(bz2f.tell(), len(self.TEXT))
             self.assertEqual(bz2f.read(), b"")
 
+    def testSeekPostEndTwiceMultiStream(self):
+        self.createTempFile(streams=5)
+        with BZ2File(self.filename) as bz2f:
+            bz2f.seek(150000)
+            bz2f.seek(150000)
+            self.assertEqual(bz2f.tell(), len(self.TEXT) * 5)
+            self.assertEqual(bz2f.read(), b"")
+
     def testSeekPreStart(self):
-        # "Test BZ2File.seek(-150, 0)"
         self.createTempFile()
         with BZ2File(self.filename) as bz2f:
             bz2f.seek(-150)
             self.assertEqual(bz2f.tell(), 0)
             self.assertEqual(bz2f.read(), self.TEXT)
 
+    def testSeekPreStartMultiStream(self):
+        self.createTempFile(streams=2)
+        with BZ2File(self.filename) as bz2f:
+            bz2f.seek(-150)
+            self.assertEqual(bz2f.tell(), 0)
+            self.assertEqual(bz2f.read(), self.TEXT * 2)
+
     def testFileno(self):
-        # "Test BZ2File.fileno()"
         self.createTempFile()
         with open(self.filename) as rawf:
             with BZ2File(fileobj=rawf) as bz2f:
                 self.assertEqual(bz2f.fileno(), rawf.fileno())
 
     def testOpenDel(self):
-        # "Test opening and deleting a file many times"
         self.createTempFile()
         for i in range(10000):
             o = BZ2File(self.filename)
             del o
 
     def testOpenNonexistent(self):
-        # "Test opening a nonexistent file"
         self.assertRaises(IOError, BZ2File, "/non/existent")
 
-    def testBug1191043(self):
-        # readlines() for files containing no newline
+    def testReadlinesNoNewline(self):
+        # Issue #1191043: readlines() fails on a file containing no newline.
         data = b'BZh91AY&SY\xd9b\x89]\x00\x00\x00\x03\x80\x04\x00\x02\x00\x0c\x00 \x00!\x9ah3M\x13<]\xc9\x14\xe1BCe\x8a%t'
         with open(self.filename, "wb") as f:
             f.write(data)
@@ -296,7 +366,6 @@
         self.assertEqual(xlines, [b'Test'])
 
     def testContextProtocol(self):
-        # BZ2File supports the context management protocol
         f = None
         with BZ2File(self.filename, "wb") as f:
             f.write(b"xxx")
@@ -319,7 +388,7 @@
 
     @unittest.skipUnless(threading, 'Threading required for this test.')
     def testThreading(self):
-        # Using a BZ2File from several threads doesn't deadlock (issue #7205).
+        # Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
         data = b"1" * 2**20
         nthreads = 10
         with bz2.BZ2File(self.filename, 'wb') as f:
@@ -332,8 +401,7 @@
             for t in threads:
                 t.join()
 
-    def testMixedIterationReads(self):
-        # "Test mixed iteration and reads."
+    def testMixedIterationAndReads(self):
         self.createTempFile()
         linelen = len(self.TEXT_LINES[0])
         halflen = linelen // 2
@@ -351,17 +419,28 @@
                 next(bz2f)
             self.assertEqual(bz2f.readlines(), [])
 
+    def testMultiStreamOrdering(self):
+        # Test the ordering of streams when reading a multi-stream archive.
+        data1 = b"foo" * 1000
+        data2 = b"bar" * 1000
+        with BZ2File(self.filename, "w") as bz2f:
+            bz2f.write(data1)
+        with BZ2File(self.filename, "a") as bz2f:
+            bz2f.write(data2)
+        with BZ2File(self.filename) as bz2f:
+            self.assertEqual(bz2f.read(), data1 + data2)
+
+    # Tests for a BZ2File wrapping another file object:
+
     def testReadBytesIO(self):
-        # "Test BZ2File.read() with BytesIO source"
-        with BytesIO(self.getData()) as bio:
+        with BytesIO(self.DATA) as bio:
             with BZ2File(fileobj=bio) as bz2f:
                 self.assertRaises(TypeError, bz2f.read, None)
                 self.assertEqual(bz2f.read(), self.TEXT)
             self.assertFalse(bio.closed)
 
     def testPeekBytesIO(self):
-        # "Test BZ2File.peek() with BytesIO source"
-        with BytesIO(self.getData()) as bio:
+        with BytesIO(self.DATA) as bio:
             with BZ2File(fileobj=bio) as bz2f:
                 pdata = bz2f.peek()
                 self.assertNotEqual(len(pdata), 0)
@@ -369,7 +448,6 @@
                 self.assertEqual(bz2f.read(), self.TEXT)
 
     def testWriteBytesIO(self):
-        # "Test BZ2File.write() with BytesIO destination"
         with BytesIO() as bio:
             with BZ2File(fileobj=bio, mode="w") as bz2f:
                 self.assertRaises(TypeError, bz2f.write)
@@ -378,16 +456,14 @@
             self.assertFalse(bio.closed)
 
     def testSeekForwardBytesIO(self):
-        # "Test BZ2File.seek(150, 0) with BytesIO source"
-        with BytesIO(self.getData()) as bio:
+        with BytesIO(self.DATA) as bio:
             with BZ2File(fileobj=bio) as bz2f:
                 self.assertRaises(TypeError, bz2f.seek)
                 bz2f.seek(150)
                 self.assertEqual(bz2f.read(), self.TEXT[150:])
 
     def testSeekBackwardsBytesIO(self):
-        # "Test BZ2File.seek(-150, 1) with BytesIO source"
-        with BytesIO(self.getData()) as bio:
+        with BytesIO(self.DATA) as bio:
             with BZ2File(fileobj=bio) as bz2f:
                 bz2f.read(500)
                 bz2f.seek(-150, 1)
@@ -395,7 +471,6 @@
 
 class BZ2CompressorTest(BaseTest):
     def testCompress(self):
-        # "Test BZ2Compressor.compress()/flush()"
         bz2c = BZ2Compressor()
         self.assertRaises(TypeError, bz2c.compress)
         data = bz2c.compress(self.TEXT)
@@ -403,11 +478,10 @@
         self.assertEqual(self.decompress(data), self.TEXT)
 
     def testCompressChunks10(self):
-        # "Test BZ2Compressor.compress()/flush() with chunks of 10 bytes"
         bz2c = BZ2Compressor()
         n = 0
         data = b''
-        while 1:
+        while True:
             str = self.TEXT[n*10:(n+1)*10]
             if not str:
                 break
@@ -438,18 +512,16 @@
         self.assertRaises(TypeError, BZ2Decompressor, 42)
 
     def testDecompress(self):
-        # "Test BZ2Decompressor.decompress()"
         bz2d = BZ2Decompressor()
         self.assertRaises(TypeError, bz2d.decompress)
         text = bz2d.decompress(self.DATA)
         self.assertEqual(text, self.TEXT)
 
     def testDecompressChunks10(self):
-        # "Test BZ2Decompressor.decompress() with chunks of 10 bytes"
         bz2d = BZ2Decompressor()
         text = b''
         n = 0
-        while 1:
+        while True:
             str = self.DATA[n*10:(n+1)*10]
             if not str:
                 break
@@ -458,7 +530,6 @@
         self.assertEqual(text, self.TEXT)
 
     def testDecompressUnusedData(self):
-        # "Test BZ2Decompressor.decompress() with unused data"
         bz2d = BZ2Decompressor()
         unused_data = b"this is unused data"
         text = bz2d.decompress(self.DATA+unused_data)
@@ -466,7 +537,6 @@
         self.assertEqual(bz2d.unused_data, unused_data)
 
     def testEOFError(self):
-        # "Calling BZ2Decompressor.decompress() after EOS must raise EOFError"
         bz2d = BZ2Decompressor()
         text = bz2d.decompress(self.DATA)
         self.assertRaises(EOFError, bz2d.decompress, b"anything")
@@ -492,24 +562,24 @@
     "Test module functions"
 
     def testCompress(self):
-        # "Test compress() function"
         data = bz2.compress(self.TEXT)
         self.assertEqual(self.decompress(data), self.TEXT)
 
     def testDecompress(self):
-        # "Test decompress() function"
         text = bz2.decompress(self.DATA)
         self.assertEqual(text, self.TEXT)
 
     def testDecompressEmpty(self):
-        # "Test decompress() function with empty string"
         text = bz2.decompress(b"")
         self.assertEqual(text, b"")
 
     def testDecompressIncomplete(self):
-        # "Test decompress() function with incomplete data"
         self.assertRaises(ValueError, bz2.decompress, self.DATA[:-10])
 
+    def testDecompressMultiStream(self):
+        text = bz2.decompress(self.DATA * 5)
+        self.assertEqual(text, self.TEXT * 5)
+
 def test_main():
     support.run_unittest(
         BZ2FileTest,
@@ -521,5 +591,3 @@
 
 if __name__ == '__main__':
     test_main()
-
-# vim:ts=4:sw=4
diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py
--- a/Lib/test/test_capi.py
+++ b/Lib/test/test_capi.py
@@ -190,18 +190,17 @@
         idents = []
 
         def callback():
-            idents.append(_thread.get_ident())
+            idents.append(threading.get_ident())
 
         _testcapi._test_thread_state(callback)
         a = b = callback
         time.sleep(1)
         # Check our main thread is in the list exactly 3 times.
-        if idents.count(_thread.get_ident()) != 3:
+        if idents.count(threading.get_ident()) != 3:
             raise support.TestFailed(
                         "Couldn't find main thread correctly in the list")
 
     if threading:
-        import _thread
         import time
         TestThreadState()
         t = threading.Thread(target=TestThreadState)
diff --git a/Lib/test/test_codecencodings_cn.py b/Lib/test/test_codecencodings_cn.py
--- a/Lib/test/test_codecencodings_cn.py
+++ b/Lib/test/test_codecencodings_cn.py
@@ -50,6 +50,35 @@
     )
     has_iso10646 = True
 
+class Test_HZ(test_multibytecodec_support.TestBase, unittest.TestCase):
+    encoding = 'hz'
+    tstring = test_multibytecodec_support.load_teststring('hz')
+    codectests = (
+        # test '~\n' (3 lines)
+        (b'This sentence is in ASCII.\n'
+         b'The next sentence is in GB.~{<:Ky2;S{#,~}~\n'
+         b'~{NpJ)l6HK!#~}Bye.\n',
+         'strict',
+         'This sentence is in ASCII.\n'
+         'The next sentence is in GB.'
+         '\u5df1\u6240\u4e0d\u6b32\uff0c\u52ff\u65bd\u65bc\u4eba\u3002'
+         'Bye.\n'),
+        # test '~\n' (4 lines)
+        (b'This sentence is in ASCII.\n'
+         b'The next sentence is in GB.~\n'
+         b'~{<:Ky2;S{#,NpJ)l6HK!#~}~\n'
+         b'Bye.\n',
+         'strict',
+         'This sentence is in ASCII.\n'
+         'The next sentence is in GB.'
+         '\u5df1\u6240\u4e0d\u6b32\uff0c\u52ff\u65bd\u65bc\u4eba\u3002'
+         'Bye.\n'),
+        # invalid bytes
+        (b'ab~cd', 'replace', 'ab\uFFFDd'),
+        (b'ab\xffcd', 'replace', 'ab\uFFFDcd'),
+        (b'ab~{\x81\x81\x41\x44~}cd', 'replace', 'ab\uFFFD\uFFFD\u804Acd'),
+    )
+
 def test_main():
     support.run_unittest(__name__)
 
diff --git a/Lib/test/test_codecencodings_iso2022.py b/Lib/test/test_codecencodings_iso2022.py
new file mode 100644
--- /dev/null
+++ b/Lib/test/test_codecencodings_iso2022.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Codec encoding tests for ISO 2022 encodings.
+
+from test import support
+from test import test_multibytecodec_support
+import unittest
+
+COMMON_CODEC_TESTS = (
+        # invalid bytes
+        (b'ab\xFFcd', 'replace', 'ab\uFFFDcd'),
+        (b'ab\x1Bdef', 'replace', 'ab\x1Bdef'),
+        (b'ab\x1B$def', 'replace', 'ab\uFFFD'),
+    )
+
+class Test_ISO2022_JP(test_multibytecodec_support.TestBase, unittest.TestCase):
+    encoding = 'iso2022_jp'
+    tstring = test_multibytecodec_support.load_teststring('iso2022_jp')
+    codectests = COMMON_CODEC_TESTS + (
+        (b'ab\x1BNdef', 'replace', 'ab\x1BNdef'),
+    )
+
+class Test_ISO2022_JP2(test_multibytecodec_support.TestBase, unittest.TestCase):
+    encoding = 'iso2022_jp_2'
+    tstring = test_multibytecodec_support.load_teststring('iso2022_jp')
+    codectests = COMMON_CODEC_TESTS + (
+        (b'ab\x1BNdef', 'replace', 'abdef'),
+    )
+
+class Test_ISO2022_KR(test_multibytecodec_support.TestBase, unittest.TestCase):
+    encoding = 'iso2022_kr'
+    tstring = test_multibytecodec_support.load_teststring('iso2022_kr')
+    codectests = COMMON_CODEC_TESTS + (
+        (b'ab\x1BNdef', 'replace', 'ab\x1BNdef'),
+    )
+
+    # iso2022_kr.txt cannot be used to test "chunk coding": the escape
+    # sequence is only written on the first line
+    def test_chunkcoding(self):
+        pass
+
+def test_main():
+    support.run_unittest(__name__)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -514,13 +514,11 @@
         s1 = 'Hello\r\nworld\r\n'
 
         s = s1.encode(self.encoding)
-        try:
-            with open(support.TESTFN, 'wb') as fp:
-                fp.write(s)
-            with codecs.open(support.TESTFN, 'U', encoding=self.encoding) as reader:
-                self.assertEqual(reader.read(), s1)
-        finally:
-            support.unlink(support.TESTFN)
+        self.addCleanup(support.unlink, support.TESTFN)
+        with open(support.TESTFN, 'wb') as fp:
+            fp.write(s)
+        with codecs.open(support.TESTFN, 'U', encoding=self.encoding) as reader:
+            self.assertEqual(reader.read(), s1)
 
 class UTF16LETest(ReadTest):
     encoding = "utf-16-le"
@@ -1624,6 +1622,7 @@
                  "utf-32",
                  "utf-32-le",
                  "utf-32-be")
+        self.addCleanup(support.unlink, support.TESTFN)
         for encoding in tests:
             # Check if the BOM is written only once
             with codecs.open(support.TESTFN, 'w+', encoding=encoding) as f:
diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py
--- a/Lib/test/test_collections.py
+++ b/Lib/test/test_collections.py
@@ -181,12 +181,12 @@
         self.assertRaises(TypeError, eval, 'Point(XXX=1, y=2)', locals())   # wrong keyword argument
         self.assertRaises(TypeError, eval, 'Point(x=1)', locals())          # missing keyword argument
         self.assertEqual(repr(p), 'Point(x=11, y=22)')
-        self.assertNotIn('__dict__', dir(p))                              # verify instance has no dict
         self.assertNotIn('__weakref__', dir(p))
         self.assertEqual(p, Point._make([11, 22]))                          # test _make classmethod
         self.assertEqual(p._fields, ('x', 'y'))                             # test _fields attribute
         self.assertEqual(p._replace(x=1), (1, 22))                          # test _replace method
         self.assertEqual(p._asdict(), dict(x=11, y=22))                     # test _asdict method
+        self.assertEqual(vars(p), p._asdict())                              # verify that vars() works
 
         try:
             p._replace(x=1, error=2)
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -1,6 +1,7 @@
 import unittest
 import sys
 import _ast
+import types
 from test import support
 
 class TestSpecifics(unittest.TestCase):
@@ -433,6 +434,14 @@
         ast.body = [_ast.BoolOp()]
         self.assertRaises(TypeError, compile, ast, '<ast>', 'exec')
 
+    @support.cpython_only
+    def test_same_filename_used(self):
+        s = """def f(): pass\ndef g(): pass"""
+        c = compile(s, "myfile", "exec")
+        for obj in c.co_consts:
+            if isinstance(obj, types.CodeType):
+                self.assertIs(obj.co_filename, c.co_filename)
+
 
 def test_main():
     support.run_unittest(TestSpecifics)
diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py
--- a/Lib/test/test_concurrent_futures.py
+++ b/Lib/test/test_concurrent_futures.py
@@ -260,14 +260,14 @@
 
     def test_timeout(self):
         future1 = self.executor.submit(mul, 6, 7)
-        future2 = self.executor.submit(time.sleep, 3)
+        future2 = self.executor.submit(time.sleep, 6)
 
         finished, pending = futures.wait(
                 [CANCELLED_AND_NOTIFIED_FUTURE,
                  EXCEPTION_FUTURE,
                  SUCCESSFUL_FUTURE,
                  future1, future2],
-                timeout=1.5,
+                timeout=5,
                 return_when=futures.ALL_COMPLETED)
 
         self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE,
@@ -357,8 +357,8 @@
         results = []
         try:
             for i in self.executor.map(time.sleep,
-                                       [0, 0, 3],
-                                       timeout=1.5):
+                                       [0, 0, 6],
+                                       timeout=5):
                 results.append(i)
         except futures.TimeoutError:
             pass
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1587,6 +1587,7 @@
             ("__floor__", math.floor, zero, set(), {}),
             ("__trunc__", math.trunc, zero, set(), {}),
             ("__ceil__", math.ceil, zero, set(), {}),
+            ("__dir__", dir, empty_seq, set(), {}),
             ]
 
         class Checker(object):
@@ -1630,12 +1631,7 @@
             for attr, obj in env.items():
                 setattr(X, attr, obj)
             setattr(X, name, ErrDescr())
-            try:
-                runner(X())
-            except MyException:
-                pass
-            else:
-                self.fail("{0!r} didn't raise".format(name))
+            self.assertRaises(MyException, runner, X())
 
     def test_specials(self):
         # Testing special operators...
diff --git a/Lib/test/test_descrtut.py b/Lib/test/test_descrtut.py
--- a/Lib/test/test_descrtut.py
+++ b/Lib/test/test_descrtut.py
@@ -170,6 +170,7 @@
      '__contains__',
      '__delattr__',
      '__delitem__',
+     '__dir__',
      '__doc__',
      '__eq__',
      '__format__',
diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py
--- a/Lib/test/test_faulthandler.py
+++ b/Lib/test/test_faulthandler.py
@@ -29,13 +29,14 @@
         except (ValueError, resource_error):
             pass
 
-def expected_traceback(lineno1, lineno2, header, count=1):
+def expected_traceback(lineno1, lineno2, header, min_count=1):
     regex = header
     regex += '  File "<string>", line %s in func\n' % lineno1
     regex += '  File "<string>", line %s in <module>' % lineno2
-    if count != 1:
-        regex = (regex + '\n') * (count - 1) + regex
-    return '^' + regex + '$'
+    if 1 < min_count:
+        return '^' + (regex + '\n') * (min_count - 1) + regex
+    else:
+        return '^' + regex + '$'
 
 @contextmanager
 def temporary_filename():
@@ -371,7 +372,7 @@
         faulthandler.dump_tracebacks_later(timeout, repeat=repeat, file=file)
         if cancel:
             faulthandler.cancel_dump_tracebacks_later()
-        time.sleep(timeout * 2.5)
+        time.sleep(timeout * 5)
         faulthandler.cancel_dump_tracebacks_later()
 
 timeout = {timeout}
@@ -402,7 +403,7 @@
             if repeat:
                 count *= 2
             header = r'Timeout \(%s\)!\nThread 0x[0-9a-f]+:\n' % timeout_str
-            regex = expected_traceback(9, 20, header, count=count)
+            regex = expected_traceback(9, 20, header, min_count=count)
             self.assertRegex(trace, regex)
         else:
             self.assertEqual(trace, '')
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -10,7 +10,7 @@
 
 class TokenTests(unittest.TestCase):
 
-    def testBackslash(self):
+    def test_backslash(self):
         # Backslash means line continuation:
         x = 1 \
         + 1
@@ -20,7 +20,7 @@
         x = 0
         self.assertEqual(x, 0, 'backslash ending comment')
 
-    def testPlainIntegers(self):
+    def test_plain_integers(self):
         self.assertEqual(type(000), type(0))
         self.assertEqual(0xff, 255)
         self.assertEqual(0o377, 255)
@@ -56,7 +56,7 @@
         else:
             self.fail('Weird maxsize value %r' % maxsize)
 
-    def testLongIntegers(self):
+    def test_long_integers(self):
         x = 0
         x = 0xffffffffffffffff
         x = 0Xffffffffffffffff
@@ -66,7 +66,7 @@
         x = 0b100000000000000000000000000000000000000000000000000000000000000000000
         x = 0B111111111111111111111111111111111111111111111111111111111111111111111
 
-    def testFloats(self):
+    def test_floats(self):
         x = 3.14
         x = 314.
         x = 0.314
@@ -80,7 +80,7 @@
         x = .3e14
         x = 3.1e4
 
-    def testStringLiterals(self):
+    def test_string_literals(self):
         x = ''; y = ""; self.assertTrue(len(x) == 0 and x == y)
         x = '\''; y = "'"; self.assertTrue(len(x) == 1 and x == y and ord(x) == 39)
         x = '"'; y = "\""; self.assertTrue(len(x) == 1 and x == y and ord(x) == 34)
@@ -120,11 +120,18 @@
 '
         self.assertEqual(x, y)
 
-    def testEllipsis(self):
+    def test_ellipsis(self):
         x = ...
         self.assertTrue(x is Ellipsis)
         self.assertRaises(SyntaxError, eval, ".. .")
 
+    def test_eof_error(self):
+        samples = ("def foo(", "\ndef foo(", "def foo(\n")
+        for s in samples:
+            with self.assertRaises(SyntaxError) as cm:
+                compile(s, "<test>", "exec")
+            self.assertIn("unexpected EOF", str(cm.exception))
+
 class GrammarTests(unittest.TestCase):
 
     # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
@@ -136,11 +143,11 @@
     # expr_input: testlist NEWLINE
     # XXX Hard to test -- used only in calls to input()
 
-    def testEvalInput(self):
+    def test_eval_input(self):
         # testlist ENDMARKER
         x = eval('1, 0 or 1')
 
-    def testFuncdef(self):
+    def test_funcdef(self):
         ### [decorators] 'def' NAME parameters ['->' test] ':' suite
         ### decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
         ### decorators: decorator+
@@ -324,7 +331,7 @@
         check_syntax_error(self, "f(*g(1=2))")
         check_syntax_error(self, "f(**g(1=2))")
 
-    def testLambdef(self):
+    def test_lambdef(self):
         ### lambdef: 'lambda' [varargslist] ':' test
         l1 = lambda : 0
         self.assertEqual(l1(), 0)
@@ -346,7 +353,7 @@
     ### stmt: simple_stmt | compound_stmt
     # Tested below
 
-    def testSimpleStmt(self):
+    def test_simple_stmt(self):
         ### simple_stmt: small_stmt (';' small_stmt)* [';']
         x = 1; pass; del x
         def foo():
@@ -357,7 +364,7 @@
     ### small_stmt: expr_stmt | pass_stmt | del_stmt | flow_stmt | import_stmt | global_stmt | access_stmt
     # Tested below
 
-    def testExprStmt(self):
+    def test_expr_stmt(self):
         # (exprlist '=')* exprlist
         1
         1, 2, 3
@@ -370,7 +377,7 @@
         check_syntax_error(self, "x + 1 = 1")
         check_syntax_error(self, "a + 1 = b + 2")
 
-    def testDelStmt(self):
+    def test_del_stmt(self):
         # 'del' exprlist
         abc = [1,2,3]
         x, y, z = abc
@@ -379,18 +386,18 @@
         del abc
         del x, y, (z, xyz)
 
-    def testPassStmt(self):
+    def test_pass_stmt(self):
         # 'pass'
         pass
 
     # flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt
     # Tested below
 
-    def testBreakStmt(self):
+    def test_break_stmt(self):
         # 'break'
         while 1: break
 
-    def testContinueStmt(self):
+    def test_continue_stmt(self):
         # 'continue'
         i = 1
         while i: i = 0; continue
@@ -442,7 +449,7 @@
                 self.fail("continue then break in try/except in loop broken!")
         test_inner()
 
-    def testReturn(self):
+    def test_return(self):
         # 'return' [testlist]
         def g1(): return
         def g2(): return 1
@@ -450,17 +457,17 @@
         x = g2()
         check_syntax_error(self, "class foo:return 1")
 
-    def testYield(self):
+    def test_yield(self):
         check_syntax_error(self, "class foo:yield 1")
 
-    def testRaise(self):
+    def test_raise(self):
         # 'raise' test [',' test]
         try: raise RuntimeError('just testing')
         except RuntimeError: pass
         try: raise KeyboardInterrupt
         except KeyboardInterrupt: pass
 
-    def testImport(self):
+    def test_import(self):
         # 'import' dotted_as_names
         import sys
         import time, sys
@@ -473,13 +480,13 @@
         from sys import (path, argv)
         from sys import (path, argv,)
 
-    def testGlobal(self):
+    def test_global(self):
         # 'global' NAME (',' NAME)*
         global a
         global a, b
         global one, two, three, four, five, six, seven, eight, nine, ten
 
-    def testNonlocal(self):
+    def test_nonlocal(self):
         # 'nonlocal' NAME (',' NAME)*
         x = 0
         y = 0
@@ -487,7 +494,7 @@
             nonlocal x
             nonlocal x, y
 
-    def testAssert(self):
+    def test_assert(self):
         # assertTruestmt: 'assert' test [',' test]
         assert 1
         assert 1, 1
@@ -504,7 +511,7 @@
     ### compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | funcdef | classdef
     # Tested below
 
-    def testIf(self):
+    def test_if(self):
         # 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
         if 1: pass
         if 1: pass
@@ -517,7 +524,7 @@
         elif 0: pass
         else: pass
 
-    def testWhile(self):
+    def test_while(self):
         # 'while' test ':' suite ['else' ':' suite]
         while 0: pass
         while 0: pass
@@ -532,7 +539,7 @@
             x = 2
         self.assertEqual(x, 2)
 
-    def testFor(self):
+    def test_for(self):
         # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
         for i in 1, 2, 3: pass
         for i, j, k in (): pass
@@ -559,7 +566,7 @@
             result.append(x)
         self.assertEqual(result, [1, 2, 3])
 
-    def testTry(self):
+    def test_try(self):
         ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
         ###         | 'try' ':' suite 'finally' ':' suite
         ### except_clause: 'except' [expr ['as' expr]]
@@ -582,7 +589,7 @@
         try: pass
         finally: pass
 
-    def testSuite(self):
+    def test_suite(self):
         # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
         if 1: pass
         if 1:
@@ -597,7 +604,7 @@
             pass
             #
 
-    def testTest(self):
+    def test_test(self):
         ### and_test ('or' and_test)*
         ### and_test: not_test ('and' not_test)*
         ### not_test: 'not' not_test | comparison
@@ -608,7 +615,7 @@
         if not 1 and 1 and 1: pass
         if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass
 
-    def testComparison(self):
+    def test_comparison(self):
         ### comparison: expr (comp_op expr)*
         ### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not'
         if 1: pass
@@ -625,36 +632,36 @@
         if 1 not in (): pass
         if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass
 
-    def testBinaryMaskOps(self):
+    def test_binary_mask_ops(self):
         x = 1 & 1
         x = 1 ^ 1
         x = 1 | 1
 
-    def testShiftOps(self):
+    def test_shift_ops(self):
         x = 1 << 1
         x = 1 >> 1
         x = 1 << 1 >> 1
 
-    def testAdditiveOps(self):
+    def test_additive_ops(self):
         x = 1
         x = 1 + 1
         x = 1 - 1 - 1
         x = 1 - 1 + 1 - 1 + 1
 
-    def testMultiplicativeOps(self):
+    def test_multiplicative_ops(self):
         x = 1 * 1
         x = 1 / 1
         x = 1 % 1
         x = 1 / 1 * 1 % 1
 
-    def testUnaryOps(self):
+    def test_unary_ops(self):
         x = +1
         x = -1
         x = ~1
         x = ~1 ^ 1 & 1 | 1 & 1 ^ -1
         x = -1*1/1 + 1*1 - ---1*1
 
-    def testSelectors(self):
+    def test_selectors(self):
         ### trailer: '(' [testlist] ')' | '[' subscript ']' | '.' NAME
         ### subscript: expr | [expr] ':' [expr]
 
@@ -684,7 +691,7 @@
         L.sort(key=lambda x: x if isinstance(x, tuple) else ())
         self.assertEqual(str(L), '[1, (1,), (1, 2), (1, 2, 3)]')
 
-    def testAtoms(self):
+    def test_atoms(self):
         ### atom: '(' [testlist] ')' | '[' [testlist] ']' | '{' [dictsetmaker] '}' | NAME | NUMBER | STRING
         ### dictsetmaker: (test ':' test (',' test ':' test)* [',']) | (test (',' test)* [','])
 
@@ -719,7 +726,7 @@
     ### testlist: test (',' test)* [',']
     # These have been exercised enough above
 
-    def testClassdef(self):
+    def test_classdef(self):
         # 'class' NAME ['(' [testlist] ')'] ':' suite
         class B: pass
         class B2(): pass
@@ -738,14 +745,14 @@
         @class_decorator
         class G: pass
 
-    def testDictcomps(self):
+    def test_dictcomps(self):
         # dictorsetmaker: ( (test ':' test (comp_for |
         #                                   (',' test ':' test)* [','])) |
         #                   (test (comp_for | (',' test)* [','])) )
         nums = [1, 2, 3]
         self.assertEqual({i:i+1 for i in nums}, {1: 2, 2: 3, 3: 4})
 
-    def testListcomps(self):
+    def test_listcomps(self):
         # list comprehension tests
         nums = [1, 2, 3, 4, 5]
         strs = ["Apple", "Banana", "Coconut"]
@@ -808,7 +815,7 @@
         self.assertEqual(x, [('Boeing', 'Airliner'), ('Boeing', 'Engine'), ('Ford', 'Engine'),
                              ('Macdonalds', 'Cheeseburger')])
 
-    def testGenexps(self):
+    def test_genexps(self):
         # generator expression tests
         g = ([x for x in range(10)] for x in range(1))
         self.assertEqual(next(g), [x for x in range(10)])
@@ -843,7 +850,7 @@
         check_syntax_error(self, "foo(x for x in range(10), 100)")
         check_syntax_error(self, "foo(100, x for x in range(10))")
 
-    def testComprehensionSpecials(self):
+    def test_comprehension_specials(self):
         # test for outmost iterable precomputation
         x = 10; g = (i for i in range(x)); x = 5
         self.assertEqual(len(list(g)), 10)
@@ -882,7 +889,7 @@
         with manager() as x, manager():
             pass
 
-    def testIfElseExpr(self):
+    def test_if_else_expr(self):
         # Test ifelse expressions in various cases
         def _checkeval(msg, ret):
             "helper to check that evaluation of expressions is done correctly"
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -833,14 +833,17 @@
         # Inject some None's in there to simulate EWOULDBLOCK
         rawio = self.MockRawIO((b"abc", b"d", None, b"efg", None, None, None))
         bufio = self.tp(rawio)
-
         self.assertEqual(b"abcd", bufio.read(6))
         self.assertEqual(b"e", bufio.read(1))
         self.assertEqual(b"fg", bufio.read())
         self.assertEqual(b"", bufio.peek(1))
-        self.assertTrue(None is bufio.read())
+        self.assertIsNone(bufio.read())
         self.assertEqual(b"", bufio.read())
 
+        rawio = self.MockRawIO((b"a", None, None))
+        self.assertEqual(b"a", rawio.readall())
+        self.assertIsNone(rawio.readall())
+
     def test_read_past_eof(self):
         rawio = self.MockRawIO((b"abc", b"d", b"efg"))
         bufio = self.tp(rawio)
@@ -2512,6 +2515,8 @@
             self.assertRaises(ValueError, f.read)
             if hasattr(f, "read1"):
                 self.assertRaises(ValueError, f.read1, 1024)
+            if hasattr(f, "readall"):
+                self.assertRaises(ValueError, f.readall)
             if hasattr(f, "readinto"):
                 self.assertRaises(ValueError, f.readinto, bytearray(1024))
             self.assertRaises(ValueError, f.readline)
@@ -2678,7 +2683,7 @@
             # The buffered IO layer must check for pending signal
             # handlers, which in this case will invoke alarm_interrupt().
             self.assertRaises(ZeroDivisionError,
-                              wio.write, item * (1024 * 1024))
+                        wio.write, item * (support.PIPE_MAX_SIZE // len(item)))
             t.join()
             # We got one byte, get another one and check that it isn't a
             # repeat of the first one.
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -888,19 +888,35 @@
                             before calling :meth:`start`, so that the server will
                             set up the socket and listen on it.
         """
-        def __init__(self, addr, handler, poll_interval=0.5, bind_and_activate=True):
+        def __init__(self, addr, handler, poll_interval=0.5,
+                     bind_and_activate=True):
             class DelegatingUDPRequestHandler(DatagramRequestHandler):
 
                 def handle(self):
                     self.server._handler(self)
-            ThreadingUDPServer.__init__(self, addr, DelegatingUDPRequestHandler,
+
+                def finish(self):
+                    data = self.wfile.getvalue()
+                    if data:
+                        try:
+                            super(DelegatingUDPRequestHandler, self).finish()
+                        except socket.error:
+                            if not self.server._closed:
+                                raise
+
+            ThreadingUDPServer.__init__(self, addr,
+                                        DelegatingUDPRequestHandler,
                                         bind_and_activate)
             ControlMixin.__init__(self, handler, poll_interval)
+            self._closed = False
 
         def server_bind(self):
             super(TestUDPServer, self).server_bind()
             self.port = self.socket.getsockname()[1]
 
+        def server_close(self):
+            super(TestUDPServer, self).server_close()
+            self._closed = True
 
 # - end of server_helper section
 
@@ -3570,7 +3586,7 @@
         r = logging.makeLogRecord({'msg': 'testing'})
         fh.emit(r)
         self.assertLogFile(self.fn)
-        time.sleep(1.0)
+        time.sleep(1.01)    # just a little over a second ...
         fh.emit(r)
         fh.close()
         # At this point, we should have a recent rotated file which we
diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py
--- a/Lib/test/test_marshal.py
+++ b/Lib/test/test_marshal.py
@@ -5,6 +5,7 @@
 import sys
 import unittest
 import os
+import types
 
 class HelperMixin:
     def helper(self, sample, *extra):
@@ -113,6 +114,22 @@
         codes = (ExceptionTestCase.test_exceptions.__code__,) * count
         marshal.loads(marshal.dumps(codes))
 
+    def test_different_filenames(self):
+        co1 = compile("x", "f1", "exec")
+        co2 = compile("y", "f2", "exec")
+        co1, co2 = marshal.loads(marshal.dumps((co1, co2)))
+        self.assertEqual(co1.co_filename, "f1")
+        self.assertEqual(co2.co_filename, "f2")
+
+    @support.cpython_only
+    def test_same_filename_used(self):
+        s = """def f(): pass\ndef g(): pass"""
+        co = compile(s, "myfile", "exec")
+        co = marshal.loads(marshal.dumps(co))
+        for obj in co.co_consts:
+            if isinstance(obj, types.CodeType):
+                self.assertIs(co.co_filename, obj.co_filename)
+
 class ContainerTestCase(unittest.TestCase, HelperMixin):
     d = {'astring': 'foo at bar.baz.spam',
          'afloat': 7283.43,
diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py
--- a/Lib/test/test_math.py
+++ b/Lib/test/test_math.py
@@ -2,6 +2,7 @@
 # XXXX Should not do tests around zero only
 
 from test.support import run_unittest, verbose, requires_IEEE_754
+from test import support
 import unittest
 import math
 import os
@@ -669,9 +670,8 @@
         self.assertTrue(math.isnan(math.log2(NAN)))
 
     @requires_IEEE_754
-    @unittest.skipIf(sys.platform == 'darwin'
-                     and platform.mac_ver()[0].startswith('10.4.'),
-                     'Mac OS X Tiger log2() is not accurate enough')
+    # log2() is not accurate enough on Mac OS X Tiger (10.4)
+    @support.requires_mac_ver(10, 5)
     def testLog2Exact(self):
         # Check that we get exact equality for log2 of powers of 2.
         actual = [math.log2(math.ldexp(1.0, n)) for n in range(-1074, 1024)]
diff --git a/Lib/test/test_multibytecodec.py b/Lib/test/test_multibytecodec.py
--- a/Lib/test/test_multibytecodec.py
+++ b/Lib/test/test_multibytecodec.py
@@ -256,6 +256,41 @@
             # Any ISO 2022 codec will cause the segfault
             myunichr(x).encode('iso_2022_jp', 'ignore')
 
+class TestStateful(unittest.TestCase):
+    text = '\u4E16\u4E16'
+    encoding = 'iso-2022-jp'
+    expected = b'\x1b$B@$@$'
+    reset = b'\x1b(B'
+    expected_reset = expected + reset
+
+    def test_encode(self):
+        self.assertEqual(self.text.encode(self.encoding), self.expected_reset)
+
+    def test_incrementalencoder(self):
+        encoder = codecs.getincrementalencoder(self.encoding)()
+        output = b''.join(
+            encoder.encode(char)
+            for char in self.text)
+        self.assertEqual(output, self.expected)
+        self.assertEqual(encoder.encode('', final=True), self.reset)
+        self.assertEqual(encoder.encode('', final=True), b'')
+
+    def test_incrementalencoder_final(self):
+        encoder = codecs.getincrementalencoder(self.encoding)()
+        last_index = len(self.text) - 1
+        output = b''.join(
+            encoder.encode(char, index == last_index)
+            for index, char in enumerate(self.text))
+        self.assertEqual(output, self.expected_reset)
+        self.assertEqual(encoder.encode('', final=True), b'')
+
+class TestHZStateful(TestStateful):
+    text = '\u804a\u804a'
+    encoding = 'hz'
+    expected = b'~{ADAD'
+    reset = b'~}'
+    expected_reset = expected + reset
+
 def test_main():
     support.run_unittest(__name__)
 
diff --git a/Lib/test/test_multibytecodec_support.py b/Lib/test/test_multibytecodec_support.py
--- a/Lib/test/test_multibytecodec_support.py
+++ b/Lib/test/test_multibytecodec_support.py
@@ -58,9 +58,16 @@
                 result = func(source, scheme)[0]
                 if func is self.decode:
                     self.assertTrue(type(result) is str, type(result))
+                    self.assertEqual(result, expected,
+                                     '%a.decode(%r, %r)=%a != %a'
+                                     % (source, self.encoding, scheme, result,
+                                        expected))
                 else:
                     self.assertTrue(type(result) is bytes, type(result))
-                self.assertEqual(result, expected)
+                    self.assertEqual(result, expected,
+                                     '%a.encode(%r, %r)=%a != %a'
+                                     % (source, self.encoding, scheme, result,
+                                        expected))
             else:
                 self.assertRaises(UnicodeError, func, source, scheme)
 
@@ -277,6 +284,7 @@
     pass_enctest = []
     pass_dectest = []
     supmaps = []
+    codectests = []
 
     def __init__(self, *args, **kw):
         unittest.TestCase.__init__(self, *args, **kw)
@@ -346,6 +354,30 @@
         if (csetch, unich) not in self.pass_dectest:
             self.assertEqual(str(csetch, self.encoding), unich)
 
+    def test_errorhandle(self):
+        for source, scheme, expected in self.codectests:
+            if isinstance(source, bytes):
+                func = source.decode
+            else:
+                func = source.encode
+            if expected:
+                if isinstance(source, bytes):
+                    result = func(self.encoding, scheme)
+                    self.assertTrue(type(result) is str, type(result))
+                    self.assertEqual(result, expected,
+                                     '%a.decode(%r, %r)=%a != %a'
+                                     % (source, self.encoding, scheme, result,
+                                        expected))
+                else:
+                    result = func(self.encoding, scheme)
+                    self.assertTrue(type(result) is bytes, type(result))
+                    self.assertEqual(result, expected,
+                                     '%a.encode(%r, %r)=%a != %a'
+                                     % (source, self.encoding, scheme, result,
+                                        expected))
+            else:
+                self.assertRaises(UnicodeError, func, self.encoding, scheme)
+
 def load_teststring(name):
     dir = os.path.join(os.path.dirname(__file__), 'cjkencodings')
     with open(os.path.join(dir, name + '.txt'), 'rb') as f:
diff --git a/Lib/test/test_parser.py b/Lib/test/test_parser.py
--- a/Lib/test/test_parser.py
+++ b/Lib/test/test_parser.py
@@ -614,6 +614,13 @@
 
     # XXX tests for pickling and unpickling of ST objects should go here
 
+class OtherParserCase(unittest.TestCase):
+
+    def test_two_args_to_expr(self):
+        # See bug #12264
+        with self.assertRaises(TypeError):
+            parser.expr("a", "b")
+
 
 def test_main():
     support.run_unittest(
@@ -622,6 +629,7 @@
         CompileTestCase,
         ParserStackLimitTestCase,
         STObjectTestCase,
+        OtherParserCase,
     )
 
 
diff --git a/Lib/test/test_pep292.py b/Lib/test/test_pep292.py
--- a/Lib/test/test_pep292.py
+++ b/Lib/test/test_pep292.py
@@ -42,19 +42,6 @@
         s = Template('$who likes $$')
         eq(s.substitute(dict(who='tim', what='ham')), 'tim likes $')
 
-    def test_invalid(self):
-        class MyPattern(Template):
-            pattern = r"""
-            (?:
-            (?P<invalid>)            |
-            (?P<escaped>%(delim)s)   |
-            @(?P<named>%(id)s)       |
-            @{(?P<braced>%(id)s)}
-            )
-            """
-        s = MyPattern('$')
-        self.assertRaises(ValueError, s.substitute, dict())
-
     def test_percents(self):
         eq = self.assertEqual
         s = Template('%(foo)s $foo ${foo}')
@@ -172,6 +159,26 @@
         val = t.safe_substitute({'location': 'Cleveland'})
         self.assertEqual(val, 'PyCon in Cleveland')
 
+    def test_invalid_with_no_lines(self):
+        # The error formatting for invalid templates
+        # has a special case for no data that the default
+        # pattern can't trigger (always has at least '$')
+        # So we craft a pattern that is always invalid
+        # with no leading data.
+        class MyTemplate(Template):
+            pattern = r"""
+              (?P<invalid>) |
+              unreachable(
+                (?P<named>)   |
+                (?P<braced>)  |
+                (?P<escaped>)
+              )
+            """
+        s = MyTemplate('')
+        with self.assertRaises(ValueError) as err:
+            s.substitute({})
+        self.assertIn('line 1, col 1', str(err.exception))
+
     def test_unicode_values(self):
         s = Template('$who likes $what')
         d = dict(who='t\xffm', what='f\xfe\fed')
diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py
--- a/Lib/test/test_plistlib.py
+++ b/Lib/test/test_plistlib.py
@@ -175,6 +175,32 @@
         self.assertEqual(test1, result1)
         self.assertEqual(test2, result2)
 
+    def test_invalidarray(self):
+        for i in ["<key>key inside an array</key>",
+                  "<key>key inside an array2</key><real>3</real>",
+                  "<true/><key>key inside an array3</key>"]:
+            self.assertRaises(ValueError, plistlib.readPlistFromBytes,
+                              ("<plist><array>%s</array></plist>"%i).encode())
+
+    def test_invaliddict(self):
+        for i in ["<key><true/>k</key><string>compound key</string>",
+                  "<key>single key</key>",
+                  "<string>missing key</string>",
+                  "<key>k1</key><string>v1</string><real>5.3</real>"
+                  "<key>k1</key><key>k2</key><string>double key</string>"]:
+            self.assertRaises(ValueError, plistlib.readPlistFromBytes,
+                              ("<plist><dict>%s</dict></plist>"%i).encode())
+            self.assertRaises(ValueError, plistlib.readPlistFromBytes,
+                              ("<plist><array><dict>%s</dict></array></plist>"%i).encode())
+
+    def test_invalidinteger(self):
+        self.assertRaises(ValueError, plistlib.readPlistFromBytes,
+                          b"<plist><integer>not integer</integer></plist>")
+
+    def test_invalidreal(self):
+        self.assertRaises(ValueError, plistlib.readPlistFromBytes,
+                          b"<plist><integer>not real</integer></plist>")
+
 
 def test_main():
     support.run_unittest(TestPlistlib)
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -309,6 +309,7 @@
                 fp2.close()
 
     @unittest.skipUnless(hasattr(os, 'O_CLOEXEC'), "needs os.O_CLOEXEC")
+    @support.requires_linux_version(2, 6, 23)
     def test_oscloexec(self):
         fd = os.open(support.TESTFN, os.O_RDONLY|os.O_CLOEXEC)
         self.addCleanup(os.close, fd)
@@ -474,6 +475,32 @@
             os.close(reader)
             os.close(writer)
 
+    @unittest.skipUnless(hasattr(os, 'pipe2'), "test needs os.pipe2()")
+    @support.requires_linux_version(2, 6, 27)
+    def test_pipe2(self):
+        self.assertRaises(TypeError, os.pipe2, 'DEADBEEF')
+        self.assertRaises(TypeError, os.pipe2, 0, 0)
+
+        # try calling without flag, like os.pipe()
+        r, w = os.pipe2()
+        os.close(r)
+        os.close(w)
+
+        # test flags
+        r, w = os.pipe2(os.O_CLOEXEC|os.O_NONBLOCK)
+        self.addCleanup(os.close, r)
+        self.addCleanup(os.close, w)
+        self.assertTrue(fcntl.fcntl(r, fcntl.F_GETFD) & fcntl.FD_CLOEXEC)
+        self.assertTrue(fcntl.fcntl(w, fcntl.F_GETFD) & fcntl.FD_CLOEXEC)
+        # try reading from an empty pipe: this should fail, not block
+        self.assertRaises(OSError, os.read, r, 1)
+        # try a write big enough to fill-up the pipe: this should either
+        # fail or perform a partial write, not block
+        try:
+            os.write(w, b'x' * support.PIPE_MAX_SIZE)
+        except OSError:
+            pass
+
     def test_utime(self):
         if hasattr(posix, 'utime'):
             now = time.time()
diff --git a/Lib/test/test_signal.py b/Lib/test/test_signal.py
--- a/Lib/test/test_signal.py
+++ b/Lib/test/test_signal.py
@@ -226,10 +226,17 @@
     TIMEOUT_FULL = 10
     TIMEOUT_HALF = 5
 
+    def handler(self, signum, frame):
+        pass
+
     def check_signum(self, *signals):
         data = os.read(self.read, len(signals)+1)
         raised = struct.unpack('%uB' % len(data), data)
-        self.assertSequenceEqual(raised, signals)
+        # We don't care of the signal delivery order (it's not portable or
+        # reliable)
+        raised = set(raised)
+        signals = set(signals)
+        self.assertEqual(raised, signals)
 
     def test_wakeup_fd_early(self):
         import select
@@ -259,16 +266,38 @@
         self.check_signum(signal.SIGALRM)
 
     def test_signum(self):
-        old_handler = signal.signal(signal.SIGUSR1, lambda x,y:None)
+        old_handler = signal.signal(signal.SIGUSR1, self.handler)
         self.addCleanup(signal.signal, signal.SIGUSR1, old_handler)
         os.kill(os.getpid(), signal.SIGUSR1)
         os.kill(os.getpid(), signal.SIGALRM)
         self.check_signum(signal.SIGUSR1, signal.SIGALRM)
 
+    @unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
+                         'need signal.pthread_sigmask()')
+    @unittest.skipUnless(hasattr(signal, 'pthread_kill'),
+                         'need signal.pthread_kill()')
+    def test_pending(self):
+        signum1 = signal.SIGUSR1
+        signum2 = signal.SIGUSR2
+        tid = threading.current_thread().ident
+
+        old_handler = signal.signal(signum1, self.handler)
+        self.addCleanup(signal.signal, signum1, old_handler)
+        old_handler = signal.signal(signum2, self.handler)
+        self.addCleanup(signal.signal, signum2, old_handler)
+
+        signal.pthread_sigmask(signal.SIG_BLOCK, (signum1, signum2))
+        signal.pthread_kill(tid, signum1)
+        signal.pthread_kill(tid, signum2)
+        # Unblocking the 2 signals calls the C signal handler twice
+        signal.pthread_sigmask(signal.SIG_UNBLOCK, (signum1, signum2))
+
+        self.check_signum(signum1, signum2)
+
     def setUp(self):
         import fcntl
 
-        self.alrm = signal.signal(signal.SIGALRM, lambda x,y:None)
+        self.alrm = signal.signal(signal.SIGALRM, self.handler)
         self.read, self.write = os.pipe()
         flags = fcntl.fcntl(self.write, fcntl.F_GETFL, 0)
         flags = flags | os.O_NONBLOCK
@@ -529,7 +558,7 @@
 
     def kill(self, signum):
         if self.has_pthread_kill:
-            tid = threading.current_thread().ident
+            tid = threading.get_ident()
             signal.pthread_kill(tid, signum)
         else:
             pid = os.getpid()
@@ -561,7 +590,7 @@
                          'need signal.pthread_kill()')
     def test_pthread_kill(self):
         signum = signal.SIGUSR1
-        current = threading.current_thread().ident
+        current = threading.get_ident()
 
         old_handler = signal.signal(signum, self.handler)
         self.addCleanup(signal.signal, signum, old_handler)
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -24,14 +24,6 @@
 except ImportError:
     fcntl = False
 
-def linux_version():
-    try:
-        # platform.release() is something like '2.6.33.7-desktop-2mnb'
-        version_string = platform.release().split('-')[0]
-        return tuple(map(int, version_string.split('.')))
-    except ValueError:
-        return 0, 0, 0
-
 HOST = support.HOST
 MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf-8') ## test unicode string and carriage return
 
@@ -1031,11 +1023,8 @@
         pass
 
     if hasattr(socket, "SOCK_NONBLOCK"):
+        @support.requires_linux_version(2, 6, 28)
         def testInitNonBlocking(self):
-            v = linux_version()
-            if v < (2, 6, 28):
-                self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
-                              % ".".join(map(str, v)))
             # reinit server socket
             self.serv.close()
             self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM |
@@ -2009,11 +1998,8 @@
                      "SOCK_CLOEXEC not defined")
 @unittest.skipUnless(fcntl, "module fcntl not available")
 class CloexecConstantTest(unittest.TestCase):
+    @support.requires_linux_version(2, 6, 28)
     def test_SOCK_CLOEXEC(self):
-        v = linux_version()
-        if v < (2, 6, 28):
-            self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
-                          % ".".join(map(str, v)))
         with socket.socket(socket.AF_INET,
                            socket.SOCK_STREAM | socket.SOCK_CLOEXEC) as s:
             self.assertTrue(s.type & socket.SOCK_CLOEXEC)
@@ -2031,11 +2017,8 @@
             self.assertFalse(s.type & socket.SOCK_NONBLOCK)
             self.assertEqual(s.gettimeout(), None)
 
+    @support.requires_linux_version(2, 6, 28)
     def test_SOCK_NONBLOCK(self):
-        v = linux_version()
-        if v < (2, 6, 28):
-            self.skipTest("Linux kernel 2.6.28 or higher required, not %s"
-                          % ".".join(map(str, v)))
         # a lot of it seems silly and redundant, but I wanted to test that
         # changing back and forth worked ok
         with socket.socket(socket.AF_INET,
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -102,6 +102,16 @@
             sys.stdout.write("\n RAND_status is %d (%s)\n"
                              % (v, (v and "sufficient randomness") or
                                 "insufficient randomness"))
+
+        data, is_cryptographic = ssl.RAND_pseudo_bytes(16)
+        self.assertEqual(len(data), 16)
+        self.assertEqual(is_cryptographic, v == 1)
+        if v:
+            data = ssl.RAND_bytes(16)
+            self.assertEqual(len(data), 16)
+        else:
+            self.assertRaises(ssl.SSLError, ssl.RAND_bytes, 16)
+
         try:
             ssl.RAND_egd(1)
         except TypeError:
diff --git a/Lib/test/test_string.py b/Lib/test/test_string.py
--- a/Lib/test/test_string.py
+++ b/Lib/test/test_string.py
@@ -26,15 +26,38 @@
         self.assertEqual(string.capwords('\taBc\tDeF\t'), 'Abc Def')
         self.assertEqual(string.capwords('\taBc\tDeF\t', '\t'), '\tAbc\tDef\t')
 
-    def test_formatter(self):
+    def test_basic_formatter(self):
         fmt = string.Formatter()
         self.assertEqual(fmt.format("foo"), "foo")
-
         self.assertEqual(fmt.format("foo{0}", "bar"), "foobar")
         self.assertEqual(fmt.format("foo{1}{0}-{1}", "bar", 6), "foo6bar-6")
+
+    def test_conversion_specifiers(self):
+        fmt = string.Formatter()
         self.assertEqual(fmt.format("-{arg!r}-", arg='test'), "-'test'-")
+        self.assertEqual(fmt.format("{0!s}", 'test'), 'test')
+        self.assertRaises(ValueError, fmt.format, "{0!h}", 'test')
 
-        # override get_value ############################################
+    def test_name_lookup(self):
+        fmt = string.Formatter()
+        class AnyAttr:
+            def __getattr__(self, attr):
+                return attr
+        x = AnyAttr()
+        self.assertEqual(fmt.format("{0.lumber}{0.jack}", x), 'lumberjack')
+        with self.assertRaises(AttributeError):
+            fmt.format("{0.lumber}{0.jack}", '')
+
+    def test_index_lookup(self):
+        fmt = string.Formatter()
+        lookup = ["eggs", "and", "spam"]
+        self.assertEqual(fmt.format("{0[2]}{0[0]}", lookup), 'spameggs')
+        with self.assertRaises(IndexError):
+            fmt.format("{0[2]}{0[0]}", [])
+        with self.assertRaises(KeyError):
+            fmt.format("{0[2]}{0[0]}", {})
+
+    def test_override_get_value(self):
         class NamespaceFormatter(string.Formatter):
             def __init__(self, namespace={}):
                 string.Formatter.__init__(self)
@@ -54,7 +77,7 @@
         self.assertEqual(fmt.format("{greeting}, world!"), 'hello, world!')
 
 
-        # override format_field #########################################
+    def test_override_format_field(self):
         class CallFormatter(string.Formatter):
             def format_field(self, value, format_spec):
                 return format(value(), format_spec)
@@ -63,18 +86,18 @@
         self.assertEqual(fmt.format('*{0}*', lambda : 'result'), '*result*')
 
 
-        # override convert_field ########################################
+    def test_override_convert_field(self):
         class XFormatter(string.Formatter):
             def convert_field(self, value, conversion):
                 if conversion == 'x':
                     return None
-                return super(XFormatter, self).convert_field(value, conversion)
+                return super().convert_field(value, conversion)
 
         fmt = XFormatter()
         self.assertEqual(fmt.format("{0!r}:{0!x}", 'foo', 'foo'), "'foo':None")
 
 
-        # override parse ################################################
+    def test_override_parse(self):
         class BarFormatter(string.Formatter):
             # returns an iterable that contains tuples of the form:
             # (literal_text, field_name, format_spec, conversion)
@@ -90,7 +113,7 @@
         fmt = BarFormatter()
         self.assertEqual(fmt.format('*|+0:^10s|*', 'foo'), '*   foo    *')
 
-        # test all parameters used
+    def test_check_unused_args(self):
         class CheckAllUsedFormatter(string.Formatter):
             def check_unused_args(self, used_args, args, kwargs):
                 # Track which arguments actually got used
@@ -112,28 +135,13 @@
         self.assertRaises(ValueError, fmt.format, "{0}", 10, 20, i=100)
         self.assertRaises(ValueError, fmt.format, "{i}", 10, 20, i=100)
 
-    def test_vformat_assert(self):
-        cls = string.Formatter()
-        kwargs = {
-            "i": 100
-        }
-        self.assertRaises(ValueError, cls._vformat,
-                cls.format, "{0}", kwargs, set(), -2)
-
-    def test_convert_field(self):
-        cls = string.Formatter()
-        self.assertEqual(cls.format("{0!s}", 'foo'), 'foo')
-        self.assertRaises(ValueError, cls.format, "{0!h}", 'foo')
-
-    def test_get_field(self):
-        cls = string.Formatter()
-        class MyClass:
-            name = 'lumberjack'
-        x = MyClass()
-        self.assertEqual(cls.format("{0.name}", x), 'lumberjack')
-
-        lookup = ["eggs", "and", "spam"]
-        self.assertEqual(cls.format("{0[2]}", lookup), 'spam')
+    def test_vformat_recursion_limit(self):
+        fmt = string.Formatter()
+        args = ()
+        kwargs = dict(i=100)
+        with self.assertRaises(ValueError) as err:
+            fmt._vformat("{i}", args, kwargs, set(), -1)
+        self.assertIn("recursion", str(err.exception))
 
 
 def test_main():
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -146,6 +146,16 @@
                              env=newenv)
         self.assertEqual(rc, 1)
 
+    def test_invalid_args(self):
+        # Popen() called with invalid arguments should raise TypeError
+        # but Popen.__del__ should not complain (issue #12085)
+        with support.captured_stderr() as s:
+            self.assertRaises(TypeError, subprocess.Popen, invalid_arg_name=1)
+            argcount = subprocess.Popen.__init__.__code__.co_argcount
+            too_many_args = [0] * (argcount + 1)
+            self.assertRaises(TypeError, subprocess.Popen, *too_many_args)
+        self.assertEqual(s.getvalue(), '')
+
     def test_stdin_none(self):
         # .stdin is None when not redirected
         p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
@@ -489,24 +499,21 @@
         # This test will probably deadlock rather than fail, if
         # communicate() does not work properly.
         x, y = os.pipe()
-        if mswindows:
-            pipe_buf = 512
-        else:
-            pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
         os.close(x)
         os.close(y)
         p = subprocess.Popen([sys.executable, "-c",
                               'import sys,os;'
                               'sys.stdout.write(sys.stdin.read(47));'
-                              'sys.stderr.write("xyz"*%d);'
-                              'sys.stdout.write(sys.stdin.read())' % pipe_buf],
+                              'sys.stderr.write("x" * %d);'
+                              'sys.stdout.write(sys.stdin.read())' %
+                              support.PIPE_MAX_SIZE],
                              stdin=subprocess.PIPE,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
         self.addCleanup(p.stdout.close)
         self.addCleanup(p.stderr.close)
         self.addCleanup(p.stdin.close)
-        string_to_write = b"abc"*pipe_buf
+        string_to_write = b"a" * support.PIPE_MAX_SIZE
         (stdout, stderr) = p.communicate(string_to_write)
         self.assertEqual(stdout, string_to_write)
 
@@ -1274,6 +1281,11 @@
                          "Some fds were left open")
         self.assertIn(1, remaining_fds, "Subprocess failed")
 
+    # Mac OS X Tiger (10.4) has a kernel bug: sometimes, the file
+    # descriptor of a pipe closed in the parent process is valid in the
+    # child process according to fstat(), but the mode of the file
+    # descriptor is invalid, and read or write raise an error.
+    @support.requires_mac_ver(10, 5)
     def test_pass_fds(self):
         fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
 
@@ -1495,20 +1507,6 @@
         ProcessTestCase.tearDown(self)
 
 
- at unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False),
-                     "_posixsubprocess extension module not found.")
-class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase):
-    def setUp(self):
-        subprocess._posixsubprocess = None
-        ProcessTestCase.setUp(self)
-        POSIXProcessTestCase.setUp(self)
-
-    def tearDown(self):
-        subprocess._posixsubprocess = sys.modules['_posixsubprocess']
-        POSIXProcessTestCase.tearDown(self)
-        ProcessTestCase.tearDown(self)
-
-
 class HelperFunctionTests(unittest.TestCase):
     @unittest.skipIf(mswindows, "errno and EINTR make no sense on windows")
     def test_eintr_retry_call(self):
@@ -1617,7 +1615,6 @@
     unit_tests = (ProcessTestCase,
                   POSIXProcessTestCase,
                   Win32ProcessTestCase,
-                  ProcessTestCasePOSIXPurePython,
                   CommandTests,
                   ProcessTestCaseNoPoll,
                   HelperFunctionTests,
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -343,7 +343,7 @@
     # Test sys._current_frames() in a WITH_THREADS build.
     @test.support.reap_threads
     def current_frames_with_threads(self):
-        import threading, _thread
+        import threading
         import traceback
 
         # Spawn a thread that blocks at a known place.  Then the main
@@ -357,7 +357,7 @@
             g456()
 
         def g456():
-            thread_info.append(_thread.get_ident())
+            thread_info.append(threading.get_ident())
             entered_g.set()
             leave_g.wait()
 
@@ -373,7 +373,7 @@
 
         d = sys._current_frames()
 
-        main_id = _thread.get_ident()
+        main_id = threading.get_ident()
         self.assertIn(main_id, d)
         self.assertIn(thread_id, d)
 
diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py
--- a/Lib/test/test_sysconfig.py
+++ b/Lib/test/test_sysconfig.py
@@ -1,9 +1,3 @@
-"""Tests for 'site'.
-
-Tests assume the initial paths in sys.path once the interpreter has begun
-executing have not been removed.
-
-"""
 import unittest
 import sys
 import os
@@ -20,13 +14,12 @@
                        _get_default_scheme, _expand_vars,
                        get_scheme_names, get_config_var, _main)
 
+
 class TestSysConfig(unittest.TestCase):
 
     def setUp(self):
-        """Make a copy of sys.path"""
         super(TestSysConfig, self).setUp()
         self.sys_path = sys.path[:]
-        self.makefile = None
         # patching os.uname
         if hasattr(os, 'uname'):
             self.uname = os.uname
@@ -53,10 +46,7 @@
                 self._added_envvars.append(var)
 
     def tearDown(self):
-        """Restore sys.path"""
         sys.path[:] = self.sys_path
-        if self.makefile is not None:
-            os.unlink(self.makefile)
         self._cleanup_testfn()
         if self.uname is not None:
             os.uname = self.uname
@@ -145,8 +135,6 @@
                    ('Darwin Kernel Version 8.11.1: '
                     'Wed Oct 10 18:23:28 PDT 2007; '
                     'root:xnu-792.25.20~1/RELEASE_I386'), 'PowerPC'))
-
-
         get_config_vars()['MACOSX_DEPLOYMENT_TARGET'] = '10.3'
 
         get_config_vars()['CFLAGS'] = ('-fno-strict-aliasing -DNDEBUG -g '
@@ -161,7 +149,6 @@
         finally:
             sys.maxsize = maxint
 
-
         self._set_uname(('Darwin', 'macziade', '8.11.1',
                    ('Darwin Kernel Version 8.11.1: '
                     'Wed Oct 10 18:23:28 PDT 2007; '
@@ -219,9 +206,9 @@
             get_config_vars()['CFLAGS'] = ('-arch %s -isysroot '
                                            '/Developer/SDKs/MacOSX10.4u.sdk  '
                                            '-fno-strict-aliasing -fno-common '
-                                           '-dynamic -DNDEBUG -g -O3'%(arch,))
+                                           '-dynamic -DNDEBUG -g -O3' % arch)
 
-            self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,))
+            self.assertEqual(get_platform(), 'macosx-10.4-%s' % arch)
 
         # linux debian sarge
         os.name = 'posix'
@@ -239,12 +226,6 @@
         config_h = sysconfig.get_config_h_filename()
         self.assertTrue(os.path.isfile(config_h), config_h)
 
-    @unittest.skipIf(sys.platform.startswith('win'),
-                     'Test is not Windows compatible')
-    def test_get_makefile_filename(self):
-        makefile = sysconfig.get_makefile_filename()
-        self.assertTrue(os.path.isfile(makefile), makefile)
-
     def test_get_scheme_names(self):
         wanted = ('nt', 'nt_user', 'os2', 'os2_home', 'osx_framework_user',
                   'posix_home', 'posix_prefix', 'posix_user')
@@ -295,7 +276,6 @@
 
         self.assertIn(ldflags, ldshared)
 
-
     @unittest.skipUnless(sys.platform == "darwin", "test only relevant on MacOSX")
     def test_platform_in_subprocess(self):
         my_platform = sysconfig.get_platform()
@@ -321,7 +301,6 @@
         self.assertEqual(status, 0)
         self.assertEqual(my_platform, test_platform)
 
-
         # Test with MACOSX_DEPLOYMENT_TARGET in the environment, and
         # using a value that is unlikely to be the default one.
         env = os.environ.copy()
@@ -342,10 +321,34 @@
         self.assertEqual(my_platform, test_platform)
 
 
+class MakefileTests(unittest.TestCase):
+
+    @unittest.skipIf(sys.platform.startswith('win'),
+                     'Test is not Windows compatible')
+    def test_get_makefile_filename(self):
+        makefile = sysconfig.get_makefile_filename()
+        self.assertTrue(os.path.isfile(makefile), makefile)
+
+    def test_parse_makefile(self):
+        self.addCleanup(unlink, TESTFN)
+        with open(TESTFN, "w") as makefile:
+            print("var1=a$(VAR2)", file=makefile)
+            print("VAR2=b$(var3)", file=makefile)
+            print("var3=42", file=makefile)
+            print("var4=$/invalid", file=makefile)
+            print("var5=dollar$$5", file=makefile)
+        vars = sysconfig._parse_makefile(TESTFN)
+        self.assertEqual(vars, {
+            'var1': 'ab42',
+            'VAR2': 'b42',
+            'var3': 42,
+            'var4': '$/invalid',
+            'var5': 'dollar$5',
+        })
 
 
 def test_main():
-    run_unittest(TestSysConfig)
+    run_unittest(TestSysConfig, MakefileTests)
 
 if __name__ == "__main__":
     test_main()
diff --git a/Lib/test/test_threaded_import.py b/Lib/test/test_threaded_import.py
--- a/Lib/test/test_threaded_import.py
+++ b/Lib/test/test_threaded_import.py
@@ -30,7 +30,7 @@
     except Exception as e:
         errors.append(e.with_traceback(None))
     finally:
-        done_tasks.append(thread.get_ident())
+        done_tasks.append(threading.get_ident())
         finished = len(done_tasks) == N
         if finished:
             done.set()
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -12,6 +12,7 @@
 import weakref
 import os
 from test.script_helper import assert_python_ok, assert_python_failure
+import subprocess
 
 from test import lock_tests
 
@@ -172,7 +173,7 @@
         exception = ctypes.py_object(AsyncExc)
 
         # First check it works when setting the exception from the same thread.
-        tid = _thread.get_ident()
+        tid = threading.get_ident()
 
         try:
             result = set_async_exc(ctypes.c_long(tid), exception)
@@ -201,7 +202,7 @@
 
         class Worker(threading.Thread):
             def run(self):
-                self.id = _thread.get_ident()
+                self.id = threading.get_ident()
                 self.finished = False
 
                 try:
@@ -703,6 +704,37 @@
         lock = threading.Lock()
         self.assertRaises(RuntimeError, lock.release)
 
+    @unittest.skipUnless(sys.platform == 'darwin', 'test macosx problem')
+    def test_recursion_limit(self):
+        # Issue 9670
+        # test that excessive recursion within a non-main thread causes
+        # an exception rather than crashing the interpreter on platforms
+        # like Mac OS X or FreeBSD which have small default stack sizes
+        # for threads
+        script = """if True:
+            import threading
+
+            def recurse():
+                return recurse()
+
+            def outer():
+                try:
+                    recurse()
+                except RuntimeError:
+                    pass
+
+            w = threading.Thread(target=outer)
+            w.start()
+            w.join()
+            print('end of main thread')
+            """
+        expected_output = "end of main thread\n"
+        p = subprocess.Popen([sys.executable, "-c", script],
+                             stdout=subprocess.PIPE)
+        stdout, stderr = p.communicate()
+        data = stdout.decode().replace('\r', '')
+        self.assertEqual(p.returncode, 0, "Unexpected error")
+        self.assertEqual(data, expected_output)
 
 class LockTests(lock_tests.LockTests):
     locktype = staticmethod(threading.Lock)
diff --git a/Lib/threading.py b/Lib/threading.py
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -24,7 +24,7 @@
 # Rename some stuff so "from threading import *" is safe
 _start_new_thread = _thread.start_new_thread
 _allocate_lock = _thread.allocate_lock
-_get_ident = _thread.get_ident
+get_ident = _thread.get_ident
 ThreadError = _thread.error
 try:
     _CRLock = _thread.RLock
@@ -52,7 +52,7 @@
                 format = format % args
                 # Issue #4188: calling current_thread() can incur an infinite
                 # recursion if it has to create a DummyThread on the fly.
-                ident = _get_ident()
+                ident = get_ident()
                 try:
                     name = _active[ident].name
                 except KeyError:
@@ -110,7 +110,7 @@
                 self.__class__.__name__, owner, self._count)
 
     def acquire(self, blocking=True, timeout=-1):
-        me = _get_ident()
+        me = get_ident()
         if self._owner == me:
             self._count = self._count + 1
             if __debug__:
@@ -130,7 +130,7 @@
     __enter__ = acquire
 
     def release(self):
-        if self._owner != _get_ident():
+        if self._owner != get_ident():
             raise RuntimeError("cannot release un-acquired lock")
         self._count = count = self._count - 1
         if not count:
@@ -166,7 +166,7 @@
         return (count, owner)
 
     def _is_owned(self):
-        return self._owner == _get_ident()
+        return self._owner == get_ident()
 
 _PyRLock = _RLock
 
@@ -714,7 +714,7 @@
             raise
 
     def _set_ident(self):
-        self._ident = _get_ident()
+        self._ident = get_ident()
 
     def _bootstrap_inner(self):
         try:
@@ -787,7 +787,7 @@
                 try:
                     # We don't call self._delete() because it also
                     # grabs _active_limbo_lock.
-                    del _active[_get_ident()]
+                    del _active[get_ident()]
                 except:
                     pass
 
@@ -823,7 +823,7 @@
 
         try:
             with _active_limbo_lock:
-                del _active[_get_ident()]
+                del _active[get_ident()]
                 # There must not be any python code between the previous line
                 # and after the lock is released.  Otherwise a tracing function
                 # could try to acquire the lock again in the same thread, (in
@@ -1006,9 +1006,8 @@
 
 def current_thread():
     try:
-        return _active[_get_ident()]
+        return _active[get_ident()]
     except KeyError:
-        ##print "current_thread(): no current thread for", _get_ident()
         return _DummyThread()
 
 currentThread = current_thread
@@ -1062,7 +1061,7 @@
             if thread is current:
                 # There is only one active thread. We reset the ident to
                 # its new value since it can have changed.
-                ident = _get_ident()
+                ident = get_ident()
                 thread._ident = ident
                 # Any condition variables hanging off of the active thread may
                 # be in an invalid state, so we reinitialize them.
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -30,8 +30,6 @@
 tk.mainloop()
 """
 
-__version__ = "$Revision$"
-
 import sys
 if sys.platform == "win32":
     # Attempt to configure Tcl/Tk without requiring PATH
diff --git a/Lib/wsgiref.egg-info b/Lib/wsgiref.egg-info
deleted file mode 100644
--- a/Lib/wsgiref.egg-info
+++ /dev/null
@@ -1,8 +0,0 @@
-Metadata-Version: 1.0
-Name: wsgiref
-Version: 0.1.2
-Summary: WSGI (PEP 333) Reference Library
-Author: Phillip J. Eby
-Author-email: web-sig at python.org
-License: PSF or ZPL
-Platform: UNKNOWN
diff --git a/Lib/xml/parsers/expat.py b/Lib/xml/parsers/expat.py
--- a/Lib/xml/parsers/expat.py
+++ b/Lib/xml/parsers/expat.py
@@ -1,6 +1,4 @@
 """Interface to the Expat non-validating XML parser."""
-__version__ = '$Revision$'
-
 import sys
 
 from pyexpat import *
diff --git a/Mac/Makefile.in b/Mac/Makefile.in
--- a/Mac/Makefile.in
+++ b/Mac/Makefile.in
@@ -76,6 +76,13 @@
 	do \
 		ln -fs "$(prefix)/bin/$${fn}" "$(DESTDIR)$(FRAMEWORKUNIXTOOLSPREFIX)/bin/$${fn}" ;\
 	done
+ifneq ($(LIPO_32BIT_FLAGS),)
+	for fn in python3-32 pythonw3-32 \
+		  python$(VERSION)-32 pythonw$(VERSION)-32 ;\
+	do \
+		ln -fs "$(prefix)/bin/$${fn}" "$(DESTDIR)$(FRAMEWORKUNIXTOOLSPREFIX)/bin/$${fn}" ;\
+	done
+endif
 
 
 #
@@ -90,6 +97,12 @@
 	do \
 		ln -fs "$(prefix)/bin/$${fn}" "$(DESTDIR)$(FRAMEWORKUNIXTOOLSPREFIX)/bin/$${fn}" ;\
 	done
+ifneq ($(LIPO_32BIT_FLAGS),)
+	for fn in python$(VERSION)-32 pythonw$(VERSION)-32 ;\
+	do \
+		ln -fs "$(prefix)/bin/$${fn}" "$(DESTDIR)$(FRAMEWORKUNIXTOOLSPREFIX)/bin/$${fn}" ;\
+	done
+endif
 
 pythonw: $(srcdir)/Tools/pythonw.c Makefile
 	$(CC) $(LDFLAGS) -DPYTHONFRAMEWORK='"$(PYTHONFRAMEWORK)"' -o $@ $(srcdir)/Tools/pythonw.c -I.. -I$(srcdir)/../Include ../$(PYTHONFRAMEWORK).framework/Versions/$(VERSION)/$(PYTHONFRAMEWORK)
diff --git a/Makefile.pre.in b/Makefile.pre.in
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -274,7 +274,7 @@
 
 ASDLGEN_FILES=	$(srcdir)/Parser/asdl.py $(srcdir)/Parser/asdl_c.py
 # XXX Note that a build now requires Python exist before the build starts
-ASDLGEN=	$(srcdir)/Parser/asdl_c.py
+ASDLGEN=	@DISABLE_ASDLGEN@ $(srcdir)/Parser/asdl_c.py
 
 ##########################################################################
 # Python
@@ -755,7 +755,7 @@
 # generated bytecode.  This is sometimes a very shy bug needing a lot of
 # sample data.
 
-TESTOPTS=	-l $(EXTRATESTOPTS)
+TESTOPTS=	-l --timeout=3600 $(EXTRATESTOPTS)
 TESTPROG=	$(srcdir)/Lib/test/regrtest.py
 TESTPYTHON=	$(RUNSHARED) ./$(BUILDPYTHON) -Wd -E -bb $(TESTPYTHONOPTS)
 test:		all platform
@@ -888,6 +888,8 @@
 	(cd $(DESTDIR)$(BINDIR); $(LN) -s pydoc$(VERSION) pydoc3)
 	-rm -f $(DESTDIR)$(BINDIR)/2to3
 	(cd $(DESTDIR)$(BINDIR); $(LN) -s 2to3-$(VERSION) 2to3)
+	-rm -f $(DESTDIR)$(BINDIR)/pysetup3
+	(cd $(DESTDIR)$(BINDIR); $(LN) -s pysetup$(VERSION) pysetup3)
 
 # Install the manual page
 maninstall:
@@ -908,8 +910,8 @@
 MACHDEPS=	$(PLATDIR) $(EXTRAPLATDIR)
 XMLLIBSUBDIRS=  xml xml/dom xml/etree xml/parsers xml/sax
 LIBSUBDIRS=	tkinter tkinter/test tkinter/test/test_tkinter \
-                tkinter/test/test_ttk site-packages test \
-		test/decimaltestdata test/xmltestdata test/subprocessdata \
+		tkinter/test/test_ttk site-packages test \
+		test/cjkencodings test/decimaltestdata test/xmltestdata test/subprocessdata \
 		test/tracedmodules test/encoded_modules \
 		collections concurrent concurrent/futures encodings \
 		email email/mime email/test email/test/data \
diff --git a/Misc/ACKS b/Misc/ACKS
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -12,7 +12,9 @@
 and the list is in rough alphabetical order by last names.
 
 David Abrahams
+Rajiv Abraham
 Ron Adam
+Ali Afshar
 Jim Ahlstrom
 Farhan Ahmad
 Matthew Ahrens
@@ -58,6 +60,7 @@
 Cesar Eduardo Barros
 Des Barry
 Ulf Bartelt
+Pior Bastida
 Nick Bastin
 Jeff Bauer
 Mike Bayer
@@ -122,6 +125,7 @@
 Daniel Brotsky
 Jean Brouwers
 Gary S. Brown
+Titus Brown
 Oleg Broytmann
 Dave Brueck
 Stan Bubrouski
@@ -135,6 +139,7 @@
 Tarn Weisner Burton
 Lee Busby
 Ralph Butler
+Nicolas Cadou
 Jp Calderone
 Daniel Calvelo
 Tony Campbell
@@ -153,6 +158,7 @@
 Mitch Chapman
 Greg Chapman
 Brad Chapman
+Godefroid Chapelle
 David Chaum
 Nicolas Chauvat
 Jerry Chen
@@ -176,6 +182,7 @@
 Jeffery Collins
 Robert Collins
 Paul Colomiets
+Christophe Combelles
 Denver Coneybeare
 Geremy Condra
 Juan José Conti
@@ -206,6 +213,7 @@
 Lars Damerow
 Evan Dandrea
 Eric Daniel
+Pierre-Yves David
 Scott David Daniels
 Ben Darnell
 Jonathan Dasteel
@@ -213,6 +221,7 @@
 Ned Deily
 Vincent Delft
 Arnaud Delobelle
+Konrad Delong
 Erik Demaine
 Roger Dev
 Raghuram Devarakonda
@@ -226,6 +235,7 @@
 Humberto Diogenes
 Yves Dionne
 Daniel Dittmar
+Josip Djolonga
 Jaromir Dolecek
 Ismail Donmez
 Marcos Donolo
@@ -245,7 +255,6 @@
 Andy Dustman
 Gary Duzan
 Eugene Dvurechenski
-Josip Dzolonga
 Maxim Dzumanenko
 Walter Dörwald
 Hans Eckardt
@@ -264,6 +273,7 @@
 Michael Ernst
 Ben Escoto
 Andy Eskilsson
+André Espaze
 Stefan Esser
 Stephen D Evans
 Carey Evans
@@ -277,8 +287,10 @@
 Clovis Fabricio
 Andreas Faerber
 Bill Fancher
+Andrew Francis
 Troy J. Farrell
 Mark Favas
+Boris Feld
 Niels Ferguson
 Sebastian Fernandez
 Florian Festi
@@ -328,6 +340,7 @@
 Jonathan Giddy
 Johannes Gijsbers
 Michael Gilfix
+Yannick Gingras
 Christoph Gohlke
 Tim Golden
 Chris Gonnerman
@@ -351,6 +364,7 @@
 Bob Halley
 Jesse Hallio
 Jun Hamano
+Alexandre Hamelin
 Mark Hammond
 Manus Hand
 Milton L. Hankins
@@ -382,6 +396,7 @@
 Magnus L. Hetland
 Raymond Hettinger
 Kevan Heydon
+Kelsey Hightower
 Jason Hildebrand
 Richie Hindle
 Konrad Hinsen
@@ -409,6 +424,7 @@
 Ken Howard
 Brad Howes
 Chih-Hao Huang
+Christian Hudon
 Lawrence Hudson
 Michael Hudson
 Jim Hugunin
@@ -417,6 +433,7 @@
 Jeremy Hylton
 Gerhard Häring
 Fredrik Håård
+Catalin Iacob
 Mihai Ibanescu
 Lars Immisch
 Bobby Impollonia
@@ -435,6 +452,7 @@
 Geert Jansen
 Jack Jansen
 Bill Janssen
+Julien Jehannet
 Drew Jenkins
 Flemming Kjær Jensen
 MunSic Jeong
@@ -483,6 +501,7 @@
 Bastian Kleineidam
 Bob Kline
 Matthias Klose
+Jeremy Kloth
 Kim Knapp
 Lenny Kneler
 Pat Knight
@@ -511,11 +530,13 @@
 Andrew Langmead
 Detlef Lannert
 Soren Larsen
+Amos Latteier
 Piers Lauder
 Ben Laurie
 Simon Law
 Chris Lawrence
 Brian Leair
+Mathieu Leduc-Hamel
 James Lee
 John J. Lee
 Inyeol Lee
@@ -531,6 +552,7 @@
 Marc-Andre Lemburg
 John Lenton
 Christopher Tur Lesniewski-Laas
+Alain Leufroy
 Mark Levinson
 William Lewis
 Xuanji Li
@@ -575,6 +597,7 @@
 Sébastien Martini
 Roger Masse
 Nick Mathewson
+Simon Mathieu
 Graham Matthews
 Dieter Maurer
 Arnaud Mazin
@@ -594,7 +617,9 @@
 Ezio Melotti
 Brian Merrell
 Luke Mewburn
+Carl Meyer
 Mike Meyer
+Alexis Métaireau
 Steven Miale
 Trent Mick
 Stan Mihai
@@ -604,20 +629,26 @@
 Jason V. Miller
 Jay T. Miller
 Roman Milner
+Julien Miotte
 Andrii V. Mishkovskyi
 Dustin J. Mitchell
 Dom Mitchell
+Zubin Mithra
 Doug Moen
 The Dragon De Monsyne
 Skip Montanaro
 Paul Moore
 Derek Morr
 James A Morrison
+Derek McTavish Mounce
 Pablo Mouzo
+Mher Movsisyan
 Sjoerd Mullender
 Sape Mullender
 Michael Muller
 Neil Muller
+Michael Mulich
+Louis Munro
 R. David Murray
 Piotr Meyer
 John Nagle
@@ -656,6 +687,7 @@
 Jason Orendorff
 Douglas Orr
 Michele Orrù
+Oleg Oshmyan
 Denis S. Otkidach
 Michael Otteneder
 R. M. Oudkerk
@@ -670,11 +702,14 @@
 Alexandre Parenteau
 Dan Parisien
 Harri Pasanen
+Gaël Pasgrimaud
 Randy Pausch
 Samuele Pedroni
 Marcel van der Peijl
 Steven Pemberton
 Santiago Peresón
+George Peristerakis
+Mathieu Perreault
 Mark Perrego
 Trevor Perrin
 Gabriel de Perthuis
@@ -683,6 +718,7 @@
 Joe Peterson
 Chris Petrilli
 Bjorn Pettersen
+Ronny Pfannschmidt
 Geoff Philbrick
 Gavrie Philipson
 Adrian Phillips
@@ -710,6 +746,7 @@
 Pierre Quentel
 Brian Quinlan
 Anders Qvist
+Jérôme Radix
 Burton Radons
 Brodie Rao
 Antti Rasinen
@@ -729,6 +766,7 @@
 Bernhard Reiter
 Steven Reiz
 Roeland Rengelink
+Antoine Reversat
 Tim Rice
 Francesco Ricciardi
 Jan Pieter Riegel
@@ -743,11 +781,14 @@
 Mark Roddy
 Kevin Rodgers
 Giampaolo Rodola
+Luis Rojas
 Mike Romberg
 Armin Ronacher
 Case Roole
 Timothy Roscoe
+Erik Rose
 Jim Roskind
+Brian Rosner
 Just van Rossum
 Hugo van Rossum
 Saskia van Rossum
diff --git a/Misc/NEWS b/Misc/NEWS
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -10,6 +10,37 @@
 Core and Builtins
 -----------------
 
+- Issue #12225: Still allow Python to build if Python is not in its hg repo or
+  mercurial is not installed.
+
+- Issue #1195: my_fgets() now always clears errors before calling fgets(). Fix
+  the following case: sys.stdin.read() stopped with CTRL+d (end of file),
+  raw_input() interrupted by CTRL+c.
+
+- Issue #12216: Allow unexpected EOF errors to happen on any line of the file.
+
+- Issue #12199: The TryExcept and TryFinally and AST nodes have been unified
+  into a Try node.
+
+- Issue #9670: Increase the default stack size for secondary threads on
+  Mac OS X and FreeBSD to reduce the chances of a crash instead of a
+  "maximum recursion depth" RuntimeError exception.
+  (patch by Ronald Oussoren)
+
+- Issue #12106: The use of the multiple-with shorthand syntax is now reflected
+  in the AST.
+
+- Issue #12190: Try to use the same filename object when compiling unmarshalling
+  a code objects in the same file.
+
+- Issue #12166: Move implementations of dir() specialized for various types into
+  the __dir__() methods of those types.
+
+- Issue #5715: In socketserver, close the server socket in the child process.
+
+- Correct lookup of __dir__ on objects. Among other things, this causes errors
+  besides AttributeError found on lookup to be propagated.
+
 - Issue #12060: Use sig_atomic_t type and volatile keyword in the signal
   module. Patch written by Charles-François Natali.
 
@@ -88,7 +119,7 @@
 - Issue #11320: fix bogus memory management in Modules/getpath.c, leading to
   a possible crash when calling Py_SetPath().
 
-- _ast.__version__ is now a Mercurial integer and hex revision.
+- _ast.__version__ is now a Mercurial hex revision.
 
 - Issue #11432: A bug was introduced in subprocess.Popen on posix systems with
   3.2.0 where the stdout or stderr file descriptor being the same as the stdin
@@ -153,6 +184,74 @@
 Library
 -------
 
+- Issue #12080: Fix a Decimal.power() case that took an unreasonably long time
+  to compute.
+
+- Issue #12221: Remove __version__ attributes from pyexpat, pickle, tarfile,
+  pydoc, tkinter, and xml.parsers.expat. This were useless version constants
+  left over from the Mercurial transition
+
+- Named tuples now work correctly with vars().
+
+- Issue #12085: Fix an attribute error in subprocess.Popen destructor if the
+  constructor has failed, e.g. because of an undeclared keyword argument. Patch
+  written by Oleg Oshmyan.
+
+- Issue #12028: Make threading._get_ident() public, rename it to
+  threading.get_ident() and document it. This function was already used using
+  _thread.get_ident().
+
+- Issue #12171: IncrementalEncoder.reset() of CJK codecs (multibytecodec) calls
+  encreset() instead of decreset().
+
+- Issue #12218: Removed wsgiref.egg-info.
+
+- Issue #12196: Add pipe2() to the os module.
+
+- Issue #985064: Make plistlib more resilient to faulty input plists.
+  Patch by Mher Movsisyan.
+
+- Issue #1625: BZ2File and bz2.decompress() now support multi-stream files.
+  Initial patch by Nir Aides.
+
+- Issue #12175: BufferedReader.read(-1) now calls raw.readall() if available.
+
+- Issue #12175: FileIO.readall() now only reads the file position and size
+  once.
+
+- Issue #12175: RawIOBase.readall() now returns None if read() returns None.
+
+- Issue #12175: FileIO.readall() now raises a ValueError instead of an IOError
+  if the file is closed.
+
+- Issue #11109: New service_action method for BaseServer, used by ForkingMixin
+  class for cleanup. Initial Patch by Justin Wark.
+
+- Issue #12045: Avoid duplicate execution of command in
+  ctypes.util._get_soname().  Patch by Sijin Joseph.
+
+- Issue #10818: Remove the Tk GUI and the serve() function of the pydoc module,
+  pydoc -g has been deprecated in Python 3.2 and it has a new enhanced web
+  server.
+
+- Issue #1441530: In imaplib, read the data in one chunk to speed up large
+  reads and simplify code.
+
+- Issue #12070: Fix the Makefile parser of the sysconfig module to handle
+  correctly references to "bogus variable" (e.g. "prefix=$/opt/python").
+
+- Issue #12100: Don't reset incremental encoders of CJK codecs at each call to
+  their encode() method anymore, but continue to call the reset() method if the
+  final argument is True.
+
+- Issue #12049: Add RAND_bytes() and RAND_pseudo_bytes() functions to the ssl
+  module.
+
+- Issue #12125: fixed the failures under Solaris due to improper test cleanup.
+
+- Issue #6501: os.device_encoding() returns None on Windows if the application
+  has no console.
+
 - Issue #12132: Skip test_build_ext in case the xxmodule is not found.
 
 - Issue #12105: Add O_CLOEXEC to the os module.
@@ -164,8 +263,8 @@
 - Issue #12124: zipimport doesn't keep a reference to zlib.decompress() anymore
   to be able to unload the module.
 
-- Issue #12120, #12119: skip a test in packaging and distutils
-  if sys.dont_write_bytecode is set to True.
+- Add the packaging module, an improved fork of distutils (also known as
+  distutils2).
 
 - Issue #12065: connect_ex() on an SSL socket now returns the original errno
   when the socket's timeout expires (it used to return None).
@@ -379,11 +478,12 @@
 
 - Issue #7311: fix html.parser to accept non-ASCII attribute values.
 
-- Issue #11605: email.parser.BytesFeedParser was incorrectly converting multipart
-  subpararts with an 8bit CTE into unicode instead of preserving the bytes.
-
-- Issue #1690608: email.util.formataddr is now RFC2047 aware:  it now has a
-  charset parameter that defaults utf-8 which is used as the charset for RFC
+- Issue #11605: email.parser.BytesFeedParser was incorrectly converting
+  multipart subparts with an 8-bit CTE into unicode instead of preserving the
+  bytes.
+
+- Issue #1690608: email.util.formataddr is now RFC 2047 aware:  it now has a
+  charset parameter that defaults to utf-8 and is used as the charset for RFC
   2047 encoding when the realname contains non-ASCII characters.
 
 - Issue #10963: Ensure that subprocess.communicate() never raises EPIPE.
@@ -537,7 +637,7 @@
   the "n" as the flag argument and the file exists. The behavior matches
   the documentation and general logic.
 
-- Issue #1162477: Postel Principal adjustment to email date parsing: handle the
+- Issue #1162477: Postel Principle adjustment to email date parsing: handle the
   fact that some non-compliant MUAs use '.' instead of ':' in time specs.
 
 - Issue #11131: Fix sign of zero in decimal.Decimal plus and minus
@@ -661,6 +761,10 @@
 Build
 -----
 
+- Issue #11217: For 64-bit/32-bit Mac OS X universal framework builds,
+  ensure "make install" creates symlinks in --prefix bin for the "-32"
+  files in the framework bin directory like the installer does.
+
 - Issue #11347: Use --no-as-needed when linking libpython3.so.
 
 - Issue #11411: Fix 'make DESTDIR=' with a relative destination.
@@ -688,6 +792,8 @@
 Extension Modules
 -----------------
 
+- Issue #12221: Replace pyexpat.__version__ with the Python version.
+
 - Issue #12051: Fix segfault in json.dumps() while encoding highly-nested
   objects using the C accelerations.
 
@@ -700,6 +806,15 @@
 Tests
 -----
 
+- Issue #12057: Add tests for ISO 2022 codecs (iso2022_jp, iso2022_jp_2,
+  iso2022_kr).
+
+- Issue #12180: Fixed a few remaining errors in test_packaging when no
+  threading.
+
+- Issue #12120, #12119: skip a test in packaging and distutils
+  if sys.dont_write_bytecode is set to True.
+
 - Issue #12096: Fix a race condition in test_threading.test_waitfor(). Patch
   written by Charles-François Natali.
 
@@ -754,12 +869,12 @@
 - New test_crashers added to exercise the scripts in the Lib/test/crashers
   directory and confirm they fail as expected
 
-- Issue #11578: added test for the timeit module.  Patch Michael Henry.
+- Issue #11578: added test for the timeit module.  Patch by Michael Henry.
 
 - Issue #11503: improve test coverage of posixpath.py. Patch by Evan Dandrea.
 
-- Issue #11505: improves test coverage of string.py. Patch by Alicia
-  Arlen.
+- Issue #11505: improves test coverage of string.py, increases granularity of
+  string.Formatter tests. Initial patch by Alicia Arlen.
 
 - Issue #11548: Improve test coverage of the shutil module. Patch by
   Evan Dandrea.
@@ -793,7 +908,11 @@
 C-API
 -----
 
-- PY_PATCHLEVEL_REVISION has been removed, since it's meaningless with Mercurial.
+- PY_PATCHLEVEL_REVISION has been removed, since it's meaningless with
+  Mercurial.
+
+- Issue #12173: The first argument of PyImport_ImportModuleLevel is now `const
+  char *` instead of `char *`.
 
 Documentation
 -------------
@@ -1064,10 +1183,10 @@
   comparisons that could lead to infinite recursion.
 
 - Issue #10686: the email package now :rfc:`2047`\ -encodes headers with
-  non-ASCII bytes (parsed by a Bytes Parser) when doing conversion to 7bit-clean
+  non-ASCII bytes (parsed by a BytesParser) when doing conversion to 7bit-clean
   presentation, instead of replacing them with ?s.
 
-- email.header.Header was incorrectly encoding folding white space when
+- email.header.Header was incorrectly encoding folding whitespace when
   rfc2047-encoding header values with embedded newlines, leaving them without
   folding whitespace.  It now uses the continuation_ws, as it does for
   continuation lines that it creates itself.
diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c
--- a/Modules/_io/_iomodule.c
+++ b/Modules/_io/_iomodule.c
@@ -36,6 +36,7 @@
 PyObject *_PyIO_str_read;
 PyObject *_PyIO_str_read1;
 PyObject *_PyIO_str_readable;
+PyObject *_PyIO_str_readall;
 PyObject *_PyIO_str_readinto;
 PyObject *_PyIO_str_readline;
 PyObject *_PyIO_str_reset;
@@ -767,6 +768,8 @@
         goto fail;
     if (!(_PyIO_str_readable = PyUnicode_InternFromString("readable")))
         goto fail;
+    if (!(_PyIO_str_readall = PyUnicode_InternFromString("readall")))
+        goto fail;
     if (!(_PyIO_str_readinto = PyUnicode_InternFromString("readinto")))
         goto fail;
     if (!(_PyIO_str_readline = PyUnicode_InternFromString("readline")))
diff --git a/Modules/_io/_iomodule.h b/Modules/_io/_iomodule.h
--- a/Modules/_io/_iomodule.h
+++ b/Modules/_io/_iomodule.h
@@ -155,6 +155,7 @@
 extern PyObject *_PyIO_str_read;
 extern PyObject *_PyIO_str_read1;
 extern PyObject *_PyIO_str_readable;
+extern PyObject *_PyIO_str_readall;
 extern PyObject *_PyIO_str_readinto;
 extern PyObject *_PyIO_str_readline;
 extern PyObject *_PyIO_str_reset;
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -589,7 +589,7 @@
 static void
 _bufferedwriter_reset_buf(buffered *self);
 static PyObject *
-_bufferedreader_peek_unlocked(buffered *self, Py_ssize_t);
+_bufferedreader_peek_unlocked(buffered *self);
 static PyObject *
 _bufferedreader_read_all(buffered *self);
 static PyObject *
@@ -797,7 +797,7 @@
             goto end;
         Py_CLEAR(res);
     }
-    res = _bufferedreader_peek_unlocked(self, n);
+    res = _bufferedreader_peek_unlocked(self);
 
 end:
     LEAVE_BUFFERED(self)
@@ -1407,32 +1407,57 @@
 _bufferedreader_read_all(buffered *self)
 {
     Py_ssize_t current_size;
-    PyObject *res, *data = NULL;
-    PyObject *chunks = PyList_New(0);
-
-    if (chunks == NULL)
-        return NULL;
+    PyObject *res, *data = NULL, *chunk, *chunks;
 
     /* First copy what we have in the current buffer. */
     current_size = Py_SAFE_DOWNCAST(READAHEAD(self), Py_off_t, Py_ssize_t);
     if (current_size) {
         data = PyBytes_FromStringAndSize(
             self->buffer + self->pos, current_size);
-        if (data == NULL) {
-            Py_DECREF(chunks);
+        if (data == NULL)
             return NULL;
-        }
     }
     _bufferedreader_reset_buf(self);
     /* We're going past the buffer's bounds, flush it */
     if (self->writable) {
         res = _bufferedwriter_flush_unlocked(self, 1);
-        if (res == NULL) {
-            Py_DECREF(chunks);
+        if (res == NULL)
+            return NULL;
+        Py_CLEAR(res);
+    }
+
+    if (PyObject_HasAttr(self->raw, _PyIO_str_readall)) {
+        chunk = PyObject_CallMethodObjArgs(self->raw, _PyIO_str_readall, NULL);
+        if (chunk == NULL)
+            return NULL;
+        if (chunk != Py_None && !PyBytes_Check(chunk)) {
+            Py_XDECREF(data);
+            Py_DECREF(chunk);
+            PyErr_SetString(PyExc_TypeError, "readall() should return bytes");
             return NULL;
         }
-        Py_CLEAR(res);
+        if (chunk == Py_None) {
+            if (current_size == 0)
+                return chunk;
+            else {
+                Py_DECREF(chunk);
+                return data;
+            }
+        }
+        else if (current_size) {
+            PyBytes_Concat(&data, chunk);
+            Py_DECREF(chunk);
+            if (data == NULL)
+                return NULL;
+            return data;
+        } else
+            return chunk;
     }
+
+    chunks = PyList_New(0);
+    if (chunks == NULL)
+        return NULL;
+
     while (1) {
         if (data) {
             if (PyList_Append(chunks, data) < 0) {
@@ -1586,7 +1611,7 @@
 }
 
 static PyObject *
-_bufferedreader_peek_unlocked(buffered *self, Py_ssize_t n)
+_bufferedreader_peek_unlocked(buffered *self)
 {
     Py_ssize_t have, r;
 
diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -547,14 +547,14 @@
 }
 
 static size_t
-new_buffersize(fileio *self, size_t currentsize)
+new_buffersize(fileio *self, size_t currentsize
+#ifdef HAVE_FSTAT
+               , off_t pos, off_t end
+#endif
+               )
 {
 #ifdef HAVE_FSTAT
-    off_t pos, end;
-    struct stat st;
-    if (fstat(self->fd, &st) == 0) {
-        end = st.st_size;
-        pos = lseek(self->fd, 0L, SEEK_CUR);
+    if (end != (off_t)-1) {
         /* Files claiming a size smaller than SMALLCHUNK may
            actually be streaming pseudo-files. In this case, we
            apply the more aggressive algorithm below.
@@ -579,10 +579,17 @@
 static PyObject *
 fileio_readall(fileio *self)
 {
+#ifdef HAVE_FSTAT
+    struct stat st;
+    off_t pos, end;
+#endif
     PyObject *result;
     Py_ssize_t total = 0;
     int n;
+    size_t newsize;
 
+    if (self->fd < 0)
+        return err_closed();
     if (!_PyVerify_fd(self->fd))
         return PyErr_SetFromErrno(PyExc_IOError);
 
@@ -590,8 +597,23 @@
     if (result == NULL)
         return NULL;
 
+#ifdef HAVE_FSTAT
+#if defined(MS_WIN64) || defined(MS_WINDOWS)
+    pos = _lseeki64(self->fd, 0L, SEEK_CUR);
+#else
+    pos = lseek(self->fd, 0L, SEEK_CUR);
+#endif
+    if (fstat(self->fd, &st) == 0)
+        end = st.st_size;
+    else
+        end = (off_t)-1;
+#endif
     while (1) {
-        size_t newsize = new_buffersize(self, total);
+#ifdef HAVE_FSTAT
+        newsize = new_buffersize(self, total, pos, end);
+#else
+        newsize = new_buffersize(self, total);
+#endif
         if (newsize > PY_SSIZE_T_MAX || newsize <= 0) {
             PyErr_SetString(PyExc_OverflowError,
                 "unbounded read returned more bytes "
@@ -630,6 +652,9 @@
             return NULL;
         }
         total += n;
+#ifdef HAVE_FSTAT
+        pos += n;
+#endif
     }
 
     if (PyBytes_GET_SIZE(result) > total) {
diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c
--- a/Modules/_io/iobase.c
+++ b/Modules/_io/iobase.c
@@ -815,6 +815,14 @@
             Py_DECREF(chunks);
             return NULL;
         }
+        if (data == Py_None) {
+            if (PyList_GET_SIZE(chunks) == 0) {
+                Py_DECREF(chunks);
+                return data;
+            }
+            Py_DECREF(data);
+            break;
+        }
         if (!PyBytes_Check(data)) {
             Py_DECREF(chunks);
             Py_DECREF(data);
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -1513,8 +1513,13 @@
         PyObject *decoded;
         if (bytes == NULL)
             goto fail;
-        decoded = PyObject_CallMethodObjArgs(self->decoder, _PyIO_str_decode,
-                                             bytes, Py_True, NULL);
+
+        if (Py_TYPE(self->decoder) == &PyIncrementalNewlineDecoder_Type)
+            decoded = _PyIncrementalNewlineDecoder_decode(self->decoder,
+                                                          bytes, 1);
+        else
+            decoded = PyObject_CallMethodObjArgs(
+                self->decoder, _PyIO_str_decode, bytes, Py_True, NULL);
         Py_DECREF(bytes);
         if (decoded == NULL)
             goto fail;
diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c
--- a/Modules/_posixsubprocess.c
+++ b/Modules/_posixsubprocess.c
@@ -1,7 +1,7 @@
 /* Authors: Gregory P. Smith & Jeffrey Yasskin */
 #include "Python.h"
-#ifdef HAVE_PIPE2
-#define _GNU_SOURCE
+#if defined(HAVE_PIPE2) && !defined(_GNU_SOURCE)
+# define _GNU_SOURCE
 #endif
 #include <unistd.h>
 #include <fcntl.h>
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
--- a/Modules/_sqlite/connection.c
+++ b/Modules/_sqlite/connection.c
@@ -200,11 +200,13 @@
         weakref = PyList_GetItem(self->statements, i);
         statement = PyWeakref_GetObject(weakref);
         if (statement != Py_None) {
+            Py_INCREF(statement);
             if (action == ACTION_RESET) {
                 (void)pysqlite_statement_reset((pysqlite_Statement*)statement);
             } else {
                 (void)pysqlite_statement_finalize((pysqlite_Statement*)statement);
             }
+            Py_DECREF(statement);
         }
     }
 
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -1887,6 +1887,69 @@
 bound on the entropy contained in string.  See RFC 1750.");
 
 static PyObject *
+PySSL_RAND(int len, int pseudo)
+{
+    int ok;
+    PyObject *bytes;
+    unsigned long err;
+    const char *errstr;
+    PyObject *v;
+
+    bytes = PyBytes_FromStringAndSize(NULL, len);
+    if (bytes == NULL)
+        return NULL;
+    if (pseudo) {
+        ok = RAND_pseudo_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len);
+        if (ok == 0 || ok == 1)
+            return Py_BuildValue("NO", bytes, ok == 1 ? Py_True : Py_False);
+    }
+    else {
+        ok = RAND_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len);
+        if (ok == 1)
+            return bytes;
+    }
+    Py_DECREF(bytes);
+
+    err = ERR_get_error();
+    errstr = ERR_reason_error_string(err);
+    v = Py_BuildValue("(ks)", err, errstr);
+    if (v != NULL) {
+        PyErr_SetObject(PySSLErrorObject, v);
+        Py_DECREF(v);
+    }
+    return NULL;
+}
+
+static PyObject *
+PySSL_RAND_bytes(PyObject *self, PyObject *args)
+{
+    int len;
+    if (!PyArg_ParseTuple(args, "i:RAND_bytes", &len))
+        return NULL;
+    return PySSL_RAND(len, 0);
+}
+
+PyDoc_STRVAR(PySSL_RAND_bytes_doc,
+"RAND_bytes(n) -> bytes\n\
+\n\
+Generate n cryptographically strong pseudo-random bytes.");
+
+static PyObject *
+PySSL_RAND_pseudo_bytes(PyObject *self, PyObject *args)
+{
+    int len;
+    if (!PyArg_ParseTuple(args, "i:RAND_pseudo_bytes", &len))
+        return NULL;
+    return PySSL_RAND(len, 1);
+}
+
+PyDoc_STRVAR(PySSL_RAND_pseudo_bytes_doc,
+"RAND_pseudo_bytes(n) -> (bytes, is_cryptographic)\n\
+\n\
+Generate n pseudo-random bytes. is_cryptographic is True if the bytes\
+generated are cryptographically strong.");
+
+static PyObject *
 PySSL_RAND_status(PyObject *self)
 {
     return PyLong_FromLong(RAND_status());
@@ -1939,6 +2002,10 @@
 #ifdef HAVE_OPENSSL_RAND
     {"RAND_add",            PySSL_RAND_add, METH_VARARGS,
      PySSL_RAND_add_doc},
+    {"RAND_bytes",          PySSL_RAND_bytes, METH_VARARGS,
+     PySSL_RAND_bytes_doc},
+    {"RAND_pseudo_bytes",   PySSL_RAND_pseudo_bytes, METH_VARARGS,
+     PySSL_RAND_pseudo_bytes_doc},
     {"RAND_egd",            PySSL_RAND_egd, METH_VARARGS,
      PySSL_RAND_egd_doc},
     {"RAND_status",         (PyCFunction)PySSL_RAND_status, METH_NOARGS,
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -1098,7 +1098,7 @@
 
 PyDoc_STRVAR(exit_doc,
 "exit()\n\
-(PyThread_exit_thread() is an obsolete synonym)\n\
+(exit_thread() is an obsolete synonym)\n\
 \n\
 This is synonymous to ``raise SystemExit''.  It will cause the current\n\
 thread to exit silently unless the exception is caught.");
diff --git a/Modules/cjkcodecs/_codecs_hk.c b/Modules/cjkcodecs/_codecs_hk.c
--- a/Modules/cjkcodecs/_codecs_hk.c
+++ b/Modules/cjkcodecs/_codecs_hk.c
@@ -115,55 +115,56 @@
 
         REQUIRE_INBUF(2)
 
-        if (0xc6 <= c && c <= 0xc8 && (c >= 0xc7 || IN2 >= 0xa1))
-            goto hkscsdec;
+        if (0xc6 > c || c > 0xc8 || (c < 0xc7 && IN2 < 0xa1)) {
+            TRYMAP_DEC(big5, **outbuf, c, IN2) {
+                NEXT(2, 1)
+                continue;
+            }
+        }
 
-        TRYMAP_DEC(big5, **outbuf, c, IN2) {
-            NEXT(2, 1)
+        TRYMAP_DEC(big5hkscs, decoded, c, IN2)
+        {
+            int s = BH2S(c, IN2);
+            const unsigned char *hintbase;
+
+            assert(0x87 <= c && c <= 0xfe);
+            assert(0x40 <= IN2 && IN2 <= 0xfe);
+
+            if (BH2S(0x87, 0x40) <= s && s <= BH2S(0xa0, 0xfe)) {
+                    hintbase = big5hkscs_phint_0;
+                    s -= BH2S(0x87, 0x40);
+            }
+            else if (BH2S(0xc6,0xa1) <= s && s <= BH2S(0xc8,0xfe)){
+                    hintbase = big5hkscs_phint_12130;
+                    s -= BH2S(0xc6, 0xa1);
+            }
+            else if (BH2S(0xf9,0xd6) <= s && s <= BH2S(0xfe,0xfe)){
+                    hintbase = big5hkscs_phint_21924;
+                    s -= BH2S(0xf9, 0xd6);
+            }
+            else
+                    return MBERR_INTERNAL;
+
+            if (hintbase[s >> 3] & (1 << (s & 7))) {
+                    WRITEUCS4(decoded | 0x20000)
+                    NEXT_IN(2)
+            }
+            else {
+                    OUT1(decoded)
+                    NEXT(2, 1)
+            }
+            continue;
         }
-        else
-hkscsdec:       TRYMAP_DEC(big5hkscs, decoded, c, IN2) {
-                        int s = BH2S(c, IN2);
-                        const unsigned char *hintbase;
 
-                        assert(0x87 <= c && c <= 0xfe);
-                        assert(0x40 <= IN2 && IN2 <= 0xfe);
+        switch ((c << 8) | IN2) {
+        case 0x8862: WRITE2(0x00ca, 0x0304); break;
+        case 0x8864: WRITE2(0x00ca, 0x030c); break;
+        case 0x88a3: WRITE2(0x00ea, 0x0304); break;
+        case 0x88a5: WRITE2(0x00ea, 0x030c); break;
+        default: return 2;
+        }
 
-                        if (BH2S(0x87, 0x40) <= s && s <= BH2S(0xa0, 0xfe)) {
-                                hintbase = big5hkscs_phint_0;
-                                s -= BH2S(0x87, 0x40);
-                        }
-                        else if (BH2S(0xc6,0xa1) <= s && s <= BH2S(0xc8,0xfe)){
-                                hintbase = big5hkscs_phint_12130;
-                                s -= BH2S(0xc6, 0xa1);
-                        }
-                        else if (BH2S(0xf9,0xd6) <= s && s <= BH2S(0xfe,0xfe)){
-                                hintbase = big5hkscs_phint_21924;
-                                s -= BH2S(0xf9, 0xd6);
-                        }
-                        else
-                                return MBERR_INTERNAL;
-
-                        if (hintbase[s >> 3] & (1 << (s & 7))) {
-                                WRITEUCS4(decoded | 0x20000)
-                                NEXT_IN(2)
-                        }
-                        else {
-                                OUT1(decoded)
-                                NEXT(2, 1)
-                        }
-                }
-                else {
-                        switch ((c << 8) | IN2) {
-                        case 0x8862: WRITE2(0x00ca, 0x0304); break;
-                        case 0x8864: WRITE2(0x00ca, 0x030c); break;
-                        case 0x88a3: WRITE2(0x00ea, 0x0304); break;
-                        case 0x88a5: WRITE2(0x00ea, 0x030c); break;
-                        default: return 2;
-                        }
-
-                        NEXT(2, 2) /* all decoded codepoints are pairs, above. */
-        }
+        NEXT(2, 2) /* all decoded codepoints are pairs, above. */
     }
 
     return 0;
diff --git a/Modules/cjkcodecs/_codecs_jp.c b/Modules/cjkcodecs/_codecs_jp.c
--- a/Modules/cjkcodecs/_codecs_jp.c
+++ b/Modules/cjkcodecs/_codecs_jp.c
@@ -371,11 +371,11 @@
 
         REQUIRE_OUTBUF(1)
 
-            if (c < 0x80) {
-                OUT1(c)
-                NEXT(1, 1)
-                continue;
-            }
+        if (c < 0x80) {
+            OUT1(c)
+            NEXT(1, 1)
+            continue;
+        }
 
         if (c == 0x8e) {
             /* JIS X 0201 half-width katakana */
diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c
--- a/Modules/cjkcodecs/multibytecodec.c
+++ b/Modules/cjkcodecs/multibytecodec.c
@@ -479,7 +479,7 @@
     MultibyteEncodeBuffer buf;
     Py_ssize_t finalsize, r = 0;
 
-    if (datalen == 0)
+    if (datalen == 0 && !(flags & MBENC_RESET))
         return PyBytes_FromStringAndSize(NULL, 0);
 
     buf.excobj = NULL;
@@ -515,7 +515,7 @@
             break;
     }
 
-    if (codec->encreset != NULL)
+    if (codec->encreset != NULL && (flags & MBENC_RESET))
         for (;;) {
             Py_ssize_t outleft;
 
@@ -785,8 +785,8 @@
     inbuf_end = inbuf + datalen;
 
     r = multibytecodec_encode(ctx->codec, &ctx->state,
-                    (const Py_UNICODE **)&inbuf,
-                    datalen, ctx->errors, final ? MBENC_FLUSH : 0);
+                    (const Py_UNICODE **)&inbuf, datalen,
+                    ctx->errors, final ? MBENC_FLUSH | MBENC_RESET : 0);
     if (r == NULL) {
         /* recover the original pending buffer */
         if (origpending > 0)
@@ -901,11 +901,17 @@
 static PyObject *
 mbiencoder_reset(MultibyteIncrementalEncoderObject *self)
 {
-    if (self->codec->decreset != NULL &&
-        self->codec->decreset(&self->state, self->codec->config) != 0)
-        return NULL;
+    /* Longest output: 4 bytes (b'\x0F\x1F(B') with ISO 2022 */
+    unsigned char buffer[4], *outbuf;
+    Py_ssize_t r;
+    if (self->codec->encreset != NULL) {
+        outbuf = buffer;
+        r = self->codec->encreset(&self->state, self->codec->config,
+                                  &outbuf, sizeof(buffer));
+        if (r != 0)
+            return NULL;
+    }
     self->pendingsize = 0;
-
     Py_RETURN_NONE;
 }
 
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -854,7 +854,7 @@
 }
 
 #if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
-void*
+static void*
 stack_overflow(void *min_sp, void *max_sp, size_t *depth)
 {
     /* allocate 4096 bytes on the stack at each call */
@@ -1005,9 +1005,10 @@
 faulthandler_env_options(void)
 {
     PyObject *xoptions, *key, *module, *res;
-    int enable;
 
     if (!Py_GETENV("PYTHONFAULTHANDLER")) {
+        int has_key;
+
         xoptions = PySys_GetXOptions();
         if (xoptions == NULL)
             return -1;
@@ -1016,13 +1017,11 @@
         if (key == NULL)
             return -1;
 
-        enable = PyDict_Contains(xoptions, key);
+        has_key = PyDict_Contains(xoptions, key);
         Py_DECREF(key);
-        if (!enable)
+        if (!has_key)
             return 0;
     }
-    else
-        enable = 1;
 
     module = PyImport_ImportModule("faulthandler");
     if (module == NULL) {
diff --git a/Modules/parsermodule.c b/Modules/parsermodule.c
--- a/Modules/parsermodule.c
+++ b/Modules/parsermodule.c
@@ -581,10 +581,11 @@
             if (res)
                 ((PyST_Object *)res)->st_flags.cf_flags = flags & PyCF_MASK;
         }
-        else
+        else {
             PyParser_SetError(&err);
+            PyParser_ClearError(&err);
+        }
     }
-    PyParser_ClearError(&err);
     return (res);
 }
 
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -6547,6 +6547,31 @@
 }
 #endif  /* HAVE_PIPE */
 
+#ifdef HAVE_PIPE2
+PyDoc_STRVAR(posix_pipe2__doc__,
+"pipe2(flags=0) -> (read_end, write_end)\n\n\
+Create a pipe with flags set atomically.\
+flags is optional and can be constructed by ORing together zero or more\n\
+of these values: O_NONBLOCK, O_CLOEXEC.\n\
+");
+
+static PyObject *
+posix_pipe2(PyObject *self, PyObject *args)
+{
+    int flags = 0;
+    int fds[2];
+    int res;
+
+    if (!PyArg_ParseTuple(args, "|i:pipe2", &flags))
+        return NULL;
+
+    res = pipe2(fds, flags);
+    if (res != 0)
+        return posix_error();
+    return Py_BuildValue("(ii)", fds[0], fds[1]);
+}
+#endif /* HAVE_PIPE2 */
+
 #ifdef HAVE_WRITEV
 PyDoc_STRVAR(posix_writev__doc__,
 "writev(fd, buffers) -> byteswritten\n\n\
@@ -8495,6 +8520,9 @@
 device_encoding(PyObject *self, PyObject *args)
 {
     int fd;
+#if defined(MS_WINDOWS) || defined(MS_WIN64)
+    UINT cp;
+#endif
     if (!PyArg_ParseTuple(args, "i:device_encoding", &fd))
         return NULL;
     if (!_PyVerify_fd(fd) || !isatty(fd)) {
@@ -8502,16 +8530,16 @@
         return Py_None;
     }
 #if defined(MS_WINDOWS) || defined(MS_WIN64)
-    if (fd == 0) {
-        char buf[100];
-        sprintf(buf, "cp%d", GetConsoleCP());
-        return PyUnicode_FromString(buf);
-    }
-    if (fd == 1 || fd == 2) {
-        char buf[100];
-        sprintf(buf, "cp%d", GetConsoleOutputCP());
-        return PyUnicode_FromString(buf);
-    }
+    if (fd == 0)
+        cp = GetConsoleCP();
+    else if (fd == 1 || fd == 2)
+        cp = GetConsoleOutputCP();
+    else
+        cp = 0;
+    /* GetConsoleCP() and GetConsoleOutputCP() return 0 if the application
+       has no console */
+    if (cp != 0)
+        return PyUnicode_FromFormat("cp%u", (unsigned int)cp);
 #elif defined(CODESET)
     {
         char *codeset = nl_langinfo(CODESET);
@@ -9438,6 +9466,9 @@
 #ifdef HAVE_PIPE
     {"pipe",            posix_pipe, METH_NOARGS, posix_pipe__doc__},
 #endif
+#ifdef HAVE_PIPE2
+    {"pipe2",           posix_pipe2, METH_VARARGS, posix_pipe2__doc__},
+#endif
 #ifdef HAVE_MKFIFO
     {"mkfifo",          posix_mkfifo, METH_VARARGS, posix_mkfifo__doc__},
 #endif
diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c
--- a/Modules/pyexpat.c
+++ b/Modules/pyexpat.c
@@ -1622,26 +1622,6 @@
 PyDoc_STRVAR(pyexpat_module_documentation,
 "Python wrapper for Expat parser.");
 
-/* Return a Python string that represents the version number without the
- * extra cruft added by revision control, even if the right options were
- * given to the "cvs export" command to make it not include the extra
- * cruft.
- */
-static PyObject *
-get_version_string(void)
-{
-    static char *rcsid = "$Revision$";
-    char *rev = rcsid;
-    int i = 0;
-
-    while (!isdigit(Py_CHARMASK(*rev)))
-        ++rev;
-    while (rev[i] != ' ' && rev[i] != '\0')
-        ++i;
-
-    return PyUnicode_FromStringAndSize(rev, i);
-}
-
 /* Initialization function for the module */
 
 #ifndef MODULE_NAME
@@ -1718,7 +1698,6 @@
     Py_INCREF(&Xmlparsetype);
     PyModule_AddObject(m, "XMLParserType", (PyObject *) &Xmlparsetype);
 
-    PyModule_AddObject(m, "__version__", get_version_string());
     PyModule_AddStringConstant(m, "EXPAT_VERSION",
                                (char *) XML_ExpatVersion());
     {
diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c
--- a/Modules/signalmodule.c
+++ b/Modules/signalmodule.c
@@ -177,17 +177,18 @@
 trip_signal(int sig_num)
 {
     unsigned char byte;
+
     Handlers[sig_num].tripped = 1;
+    if (wakeup_fd != -1) {
+        byte = (unsigned char)sig_num;
+        write(wakeup_fd, &byte, 1);
+    }
     if (is_tripped)
         return;
     /* Set is_tripped after setting .tripped, as it gets
        cleared in PyErr_CheckSignals() before .tripped. */
     is_tripped = 1;
     Py_AddPendingCall(checksignals_witharg, NULL);
-    if (wakeup_fd != -1) {
-        byte = (unsigned char)sig_num;
-        write(wakeup_fd, &byte, 1);
-    }
 }
 
 static void
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -2782,6 +2782,7 @@
             PyErr_Format(PyExc_TypeError,
                          "sendto() takes 2 or 3 arguments (%d given)",
                          arglen);
+            return NULL;
     }
     if (PyErr_Occurred())
         return NULL;
@@ -3144,7 +3145,7 @@
         }
         return PyErr_SetExcFromWindowsErr(PyExc_WindowsError, GetLastError());
     }
-    return PyUnicode_FromUnicode(buf, size);            
+    return PyUnicode_FromUnicode(buf, size);
 #else
     char buf[1024];
     int res;
@@ -4038,7 +4039,7 @@
 static PyObject *
 socket_getaddrinfo(PyObject *self, PyObject *args, PyObject* kwargs)
 {
-    static char* kwnames[] = {"host", "port", "family", "type", "proto", 
+    static char* kwnames[] = {"host", "port", "family", "type", "proto",
                               "flags", 0};
     struct addrinfo hints, *res;
     struct addrinfo *res0 = NULL;
@@ -4053,7 +4054,7 @@
 
     family = socktype = protocol = flags = 0;
     family = AF_UNSPEC;
-    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO|iiii:getaddrinfo", 
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "OO|iiii:getaddrinfo",
                           kwnames, &hobj, &pobj, &family, &socktype,
                           &protocol, &flags)) {
         return NULL;
@@ -4289,7 +4290,7 @@
     PyObject *list;
     int i;
     struct if_nameindex *ni;
-  
+
     ni = if_nameindex();
     if (ni == NULL) {
         PyErr_SetFromErrno(socket_error);
diff --git a/Modules/zipimport.c b/Modules/zipimport.c
--- a/Modules/zipimport.c
+++ b/Modules/zipimport.c
@@ -1196,7 +1196,7 @@
                 int *p_ispackage, PyObject **p_modpath)
 {
     PyObject *code = NULL, *toc_entry, *subname;
-    PyObject *path, *fullpath;
+    PyObject *path, *fullpath = NULL;
     struct st_zip_searchorder *zso;
 
     subname = get_subname(fullname);
diff --git a/Objects/abstract.c b/Objects/abstract.c
--- a/Objects/abstract.c
+++ b/Objects/abstract.c
@@ -237,7 +237,8 @@
     pb = obj->ob_type->tp_as_buffer;
     if (pb == NULL || pb->bf_getbuffer == NULL) {
         PyErr_SetString(PyExc_TypeError,
-                        "expected an object with the buffer interface");
+                        "expected bytes, bytearray "
+                        "or buffer compatible object");
         return -1;
     }
     if ((*pb->bf_getbuffer)(obj, &view, PyBUF_SIMPLE)) return -1;
diff --git a/Objects/exceptions.c b/Objects/exceptions.c
--- a/Objects/exceptions.c
+++ b/Objects/exceptions.c
@@ -499,7 +499,7 @@
     Py_CLEAR(self->code);
     if (size == 1)
         self->code = PyTuple_GET_ITEM(args, 0);
-    else if (size > 1)
+    else /* size > 1 */
         self->code = args;
     Py_INCREF(self->code);
     return 0;
diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c
--- a/Objects/moduleobject.c
+++ b/Objects/moduleobject.c
@@ -413,6 +413,34 @@
     return 0;
 }
 
+static PyObject *
+module_dir(PyObject *self, PyObject *args)
+{
+    PyObject *result = NULL;
+    PyObject *dict = PyObject_GetAttrString(self, "__dict__");
+
+    if (dict != NULL) {
+        if (PyDict_Check(dict))
+            result = PyDict_Keys(dict);
+        else {
+            const char *name = PyModule_GetName(self);
+            if (name)
+                PyErr_Format(PyExc_TypeError,
+                             "%.200s.__dict__ is not a dictionary",
+                             name);
+        }
+    }
+
+    Py_XDECREF(dict);
+    return result;
+}
+
+static PyMethodDef module_methods[] = {
+    {"__dir__", module_dir, METH_NOARGS,
+     PyDoc_STR("__dir__() -> list\nspecialized dir() implementation")},
+    {0}
+};
+
 
 PyDoc_STRVAR(module_doc,
 "module(name[, doc])\n\
@@ -449,7 +477,7 @@
     0,                                          /* tp_weaklistoffset */
     0,                                          /* tp_iter */
     0,                                          /* tp_iternext */
-    0,                                          /* tp_methods */
+    module_methods,                             /* tp_methods */
     module_members,                             /* tp_members */
     0,                                          /* tp_getset */
     0,                                          /* tp_base */
diff --git a/Objects/object.c b/Objects/object.c
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -1182,66 +1182,6 @@
     return x->ob_type->tp_call != NULL;
 }
 
-/* ------------------------- PyObject_Dir() helpers ------------------------- */
-
-/* Helper for PyObject_Dir.
-   Merge the __dict__ of aclass into dict, and recursively also all
-   the __dict__s of aclass's base classes.  The order of merging isn't
-   defined, as it's expected that only the final set of dict keys is
-   interesting.
-   Return 0 on success, -1 on error.
-*/
-
-static int
-merge_class_dict(PyObject* dict, PyObject* aclass)
-{
-    PyObject *classdict;
-    PyObject *bases;
-
-    assert(PyDict_Check(dict));
-    assert(aclass);
-
-    /* Merge in the type's dict (if any). */
-    classdict = PyObject_GetAttrString(aclass, "__dict__");
-    if (classdict == NULL)
-        PyErr_Clear();
-    else {
-        int status = PyDict_Update(dict, classdict);
-        Py_DECREF(classdict);
-        if (status < 0)
-            return -1;
-    }
-
-    /* Recursively merge in the base types' (if any) dicts. */
-    bases = PyObject_GetAttrString(aclass, "__bases__");
-    if (bases == NULL)
-        PyErr_Clear();
-    else {
-        /* We have no guarantee that bases is a real tuple */
-        Py_ssize_t i, n;
-        n = PySequence_Size(bases); /* This better be right */
-        if (n < 0)
-            PyErr_Clear();
-        else {
-            for (i = 0; i < n; i++) {
-                int status;
-                PyObject *base = PySequence_GetItem(bases, i);
-                if (base == NULL) {
-                    Py_DECREF(bases);
-                    return -1;
-                }
-                status = merge_class_dict(dict, base);
-                Py_DECREF(base);
-                if (status < 0) {
-                    Py_DECREF(bases);
-                    return -1;
-                }
-            }
-        }
-        Py_DECREF(bases);
-    }
-    return 0;
-}
 
 /* Helper for PyObject_Dir without arguments: returns the local scope. */
 static PyObject *
@@ -1269,132 +1209,34 @@
     return names;
 }
 
-/* Helper for PyObject_Dir of type objects: returns __dict__ and __bases__.
-   We deliberately don't suck up its __class__, as methods belonging to the
-   metaclass would probably be more confusing than helpful.
-*/
-static PyObject *
-_specialized_dir_type(PyObject *obj)
-{
-    PyObject *result = NULL;
-    PyObject *dict = PyDict_New();
-
-    if (dict != NULL && merge_class_dict(dict, obj) == 0)
-        result = PyDict_Keys(dict);
-
-    Py_XDECREF(dict);
-    return result;
-}
-
-/* Helper for PyObject_Dir of module objects: returns the module's __dict__. */
-static PyObject *
-_specialized_dir_module(PyObject *obj)
-{
-    PyObject *result = NULL;
-    PyObject *dict = PyObject_GetAttrString(obj, "__dict__");
-
-    if (dict != NULL) {
-        if (PyDict_Check(dict))
-            result = PyDict_Keys(dict);
-        else {
-            const char *name = PyModule_GetName(obj);
-            if (name)
-                PyErr_Format(PyExc_TypeError,
-                             "%.200s.__dict__ is not a dictionary",
-                             name);
-        }
-    }
-
-    Py_XDECREF(dict);
-    return result;
-}
-
-/* Helper for PyObject_Dir of generic objects: returns __dict__, __class__,
-   and recursively up the __class__.__bases__ chain.
-*/
-static PyObject *
-_generic_dir(PyObject *obj)
-{
-    PyObject *result = NULL;
-    PyObject *dict = NULL;
-    PyObject *itsclass = NULL;
-
-    /* Get __dict__ (which may or may not be a real dict...) */
-    dict = PyObject_GetAttrString(obj, "__dict__");
-    if (dict == NULL) {
-        PyErr_Clear();
-        dict = PyDict_New();
-    }
-    else if (!PyDict_Check(dict)) {
-        Py_DECREF(dict);
-        dict = PyDict_New();
-    }
-    else {
-        /* Copy __dict__ to avoid mutating it. */
-        PyObject *temp = PyDict_Copy(dict);
-        Py_DECREF(dict);
-        dict = temp;
-    }
-
-    if (dict == NULL)
-        goto error;
-
-    /* Merge in attrs reachable from its class. */
-    itsclass = PyObject_GetAttrString(obj, "__class__");
-    if (itsclass == NULL)
-        /* XXX(tomer): Perhaps fall back to obj->ob_type if no
-                       __class__ exists? */
-        PyErr_Clear();
-    else {
-        if (merge_class_dict(dict, itsclass) != 0)
-            goto error;
-    }
-
-    result = PyDict_Keys(dict);
-    /* fall through */
-error:
-    Py_XDECREF(itsclass);
-    Py_XDECREF(dict);
-    return result;
-}
-
-/* Helper for PyObject_Dir: object introspection.
-   This calls one of the above specialized versions if no __dir__ method
-   exists. */
+/* Helper for PyObject_Dir: object introspection. */
 static PyObject *
 _dir_object(PyObject *obj)
 {
-    PyObject * result = NULL;
-    PyObject * dirfunc = PyObject_GetAttrString((PyObject*)obj->ob_type,
-                                                "__dir__");
+    PyObject *result;
+    static PyObject *dir_str = NULL;
+    PyObject *dirfunc = _PyObject_LookupSpecial(obj, "__dir__", &dir_str);
 
     assert(obj);
     if (dirfunc == NULL) {
-        /* use default implementation */
-        PyErr_Clear();
-        if (PyModule_Check(obj))
-            result = _specialized_dir_module(obj);
-        else if (PyType_Check(obj))
-            result = _specialized_dir_type(obj);
-        else
-            result = _generic_dir(obj);
+        if (!PyErr_Occurred())
+            PyErr_SetString(PyExc_TypeError, "object does not provide __dir__");
+        return NULL;
     }
-    else {
-        /* use __dir__ */
-        result = PyObject_CallFunctionObjArgs(dirfunc, obj, NULL);
-        Py_DECREF(dirfunc);
-        if (result == NULL)
-            return NULL;
+    /* use __dir__ */
+    result = PyObject_CallFunctionObjArgs(dirfunc, NULL);
+    Py_DECREF(dirfunc);
+    if (result == NULL)
+        return NULL;
 
-        /* result must be a list */
-        /* XXX(gbrandl): could also check if all items are strings */
-        if (!PyList_Check(result)) {
-            PyErr_Format(PyExc_TypeError,
-                         "__dir__() must return a list, not %.200s",
-                         Py_TYPE(result)->tp_name);
-            Py_DECREF(result);
-            result = NULL;
-        }
+    /* result must be a list */
+    /* XXX(gbrandl): could also check if all items are strings */
+    if (!PyList_Check(result)) {
+        PyErr_Format(PyExc_TypeError,
+                     "__dir__() must return a list, not %.200s",
+                     Py_TYPE(result)->tp_name);
+        Py_DECREF(result);
+        result = NULL;
     }
 
     return result;
diff --git a/Objects/setobject.c b/Objects/setobject.c
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -607,16 +607,18 @@
         goto done;
     newsize = PyUnicode_GET_SIZE(listrepr);
     result = PyUnicode_FromUnicode(NULL, newsize);
-    if (result) {
-        u = PyUnicode_AS_UNICODE(result);
-        *u++ = '{';
-        /* Omit the brackets from the listrepr */
-        Py_UNICODE_COPY(u, PyUnicode_AS_UNICODE(listrepr)+1,
-                           PyUnicode_GET_SIZE(listrepr)-2);
-        u += newsize-2;
-        *u++ = '}';
-    }
+    if (result == NULL)
+        goto done;
+
+    u = PyUnicode_AS_UNICODE(result);
+    *u++ = '{';
+    /* Omit the brackets from the listrepr */
+    Py_UNICODE_COPY(u, PyUnicode_AS_UNICODE(listrepr)+1,
+                       newsize-2);
+    u += newsize-2;
+    *u++ = '}';
     Py_DECREF(listrepr);
+
     if (Py_TYPE(so) != &PySet_Type) {
         PyObject *tmp = PyUnicode_FromFormat("%s(%U)",
                                              Py_TYPE(so)->tp_name,
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -2572,6 +2572,82 @@
     return PyDict_New();
 }
 
+/* 
+   Merge the __dict__ of aclass into dict, and recursively also all
+   the __dict__s of aclass's base classes.  The order of merging isn't
+   defined, as it's expected that only the final set of dict keys is
+   interesting.
+   Return 0 on success, -1 on error.
+*/
+
+static int
+merge_class_dict(PyObject *dict, PyObject *aclass)
+{
+    PyObject *classdict;
+    PyObject *bases;
+
+    assert(PyDict_Check(dict));
+    assert(aclass);
+
+    /* Merge in the type's dict (if any). */
+    classdict = PyObject_GetAttrString(aclass, "__dict__");
+    if (classdict == NULL)
+        PyErr_Clear();
+    else {
+        int status = PyDict_Update(dict, classdict);
+        Py_DECREF(classdict);
+        if (status < 0)
+            return -1;
+    }
+
+    /* Recursively merge in the base types' (if any) dicts. */
+    bases = PyObject_GetAttrString(aclass, "__bases__");
+    if (bases == NULL)
+        PyErr_Clear();
+    else {
+        /* We have no guarantee that bases is a real tuple */
+        Py_ssize_t i, n;
+        n = PySequence_Size(bases); /* This better be right */
+        if (n < 0)
+            PyErr_Clear();
+        else {
+            for (i = 0; i < n; i++) {
+                int status;
+                PyObject *base = PySequence_GetItem(bases, i);
+                if (base == NULL) {
+                    Py_DECREF(bases);
+                    return -1;
+                }
+                status = merge_class_dict(dict, base);
+                Py_DECREF(base);
+                if (status < 0) {
+                    Py_DECREF(bases);
+                    return -1;
+                }
+            }
+        }
+        Py_DECREF(bases);
+    }
+    return 0;
+}
+
+/* __dir__ for type objects: returns __dict__ and __bases__.
+   We deliberately don't suck up its __class__, as methods belonging to the
+   metaclass would probably be more confusing than helpful.
+*/
+static PyObject *
+type_dir(PyObject *self, PyObject *args)
+{
+    PyObject *result = NULL;
+    PyObject *dict = PyDict_New();
+
+    if (dict != NULL && merge_class_dict(dict, self) == 0)
+        result = PyDict_Keys(dict);
+
+    Py_XDECREF(dict);
+    return result;
+}
+
 static PyMethodDef type_methods[] = {
     {"mro", (PyCFunction)mro_external, METH_NOARGS,
      PyDoc_STR("mro() -> list\nreturn a type's method resolution order")},
@@ -2582,9 +2658,11 @@
      PyDoc_STR("__prepare__() -> dict\n"
                "used to create the namespace for the class statement")},
     {"__instancecheck__", type___instancecheck__, METH_O,
-     PyDoc_STR("__instancecheck__() -> check if an object is an instance")},
+     PyDoc_STR("__instancecheck__() -> bool\ncheck if an object is an instance")},
     {"__subclasscheck__", type___subclasscheck__, METH_O,
-     PyDoc_STR("__subclasscheck__() -> check if a class is a subclass")},
+     PyDoc_STR("__subclasscheck__() -> bool\ncheck if a class is a subclass")},
+    {"__dir__", type_dir, METH_NOARGS,
+     PyDoc_STR("__dir__() -> list\nspecialized __dir__ implementation for types")},
     {0}
 };
 
@@ -3438,6 +3516,53 @@
     return PyLong_FromSsize_t(res);
 }
 
+/* __dir__ for generic objects: returns __dict__, __class__,
+   and recursively up the __class__.__bases__ chain.
+*/
+static PyObject *
+object_dir(PyObject *self, PyObject *args)
+{
+    PyObject *result = NULL;
+    PyObject *dict = NULL;
+    PyObject *itsclass = NULL;
+
+    /* Get __dict__ (which may or may not be a real dict...) */
+    dict = PyObject_GetAttrString(self, "__dict__");
+    if (dict == NULL) {
+        PyErr_Clear();
+        dict = PyDict_New();
+    }
+    else if (!PyDict_Check(dict)) {
+        Py_DECREF(dict);
+        dict = PyDict_New();
+    }
+    else {
+        /* Copy __dict__ to avoid mutating it. */
+        PyObject *temp = PyDict_Copy(dict);
+        Py_DECREF(dict);
+        dict = temp;
+    }
+
+    if (dict == NULL)
+        goto error;
+
+    /* Merge in attrs reachable from its class. */
+    itsclass = PyObject_GetAttrString(self, "__class__");
+    if (itsclass == NULL)
+        /* XXX(tomer): Perhaps fall back to obj->ob_type if no
+                       __class__ exists? */
+        PyErr_Clear();
+    else if (merge_class_dict(dict, itsclass) != 0)
+        goto error;
+
+    result = PyDict_Keys(dict);
+    /* fall through */
+error:
+    Py_XDECREF(itsclass);
+    Py_XDECREF(dict);
+    return result;
+}
+
 static PyMethodDef object_methods[] = {
     {"__reduce_ex__", object_reduce_ex, METH_VARARGS,
      PyDoc_STR("helper for pickle")},
@@ -3448,7 +3573,9 @@
     {"__format__", object_format, METH_VARARGS,
      PyDoc_STR("default object formatter")},
     {"__sizeof__", object_sizeof, METH_NOARGS,
-     PyDoc_STR("__sizeof__() -> size of object in memory, in bytes")},
+     PyDoc_STR("__sizeof__() -> int\nsize of object in memory, in bytes")},
+    {"__dir__", object_dir, METH_NOARGS,
+     PyDoc_STR("__dir__() -> list\ndefault dir() implementation")},
     {0}
 };
 
diff --git a/Objects/typeslots.inc b/Objects/typeslots.inc
--- a/Objects/typeslots.inc
+++ b/Objects/typeslots.inc
@@ -1,4 +1,4 @@
-/* Generated by typeslots.py $Revision$ */
+/* Generated by typeslots.py */
 0,
 0,
 offsetof(PyHeapTypeObject, as_mapping.mp_ass_subscript),
diff --git a/Objects/typeslots.py b/Objects/typeslots.py
--- a/Objects/typeslots.py
+++ b/Objects/typeslots.py
@@ -3,7 +3,7 @@
 
 import sys, re
 
-print("/* Generated by typeslots.py $Revision$ */")
+print("/* Generated by typeslots.py */")
 res = {}
 for line in sys.stdin:
     m = re.match("#define Py_([a-z_]+) ([0-9]+)", line)
diff --git a/Parser/Python.asdl b/Parser/Python.asdl
--- a/Parser/Python.asdl
+++ b/Parser/Python.asdl
@@ -28,11 +28,10 @@
 	      | For(expr target, expr iter, stmt* body, stmt* orelse)
 	      | While(expr test, stmt* body, stmt* orelse)
 	      | If(expr test, stmt* body, stmt* orelse)
-	      | With(expr context_expr, expr? optional_vars, stmt* body)
+	      | With(withitem* items, stmt* body)
 
 	      | Raise(expr? exc, expr? cause)
-	      | TryExcept(stmt* body, excepthandler* handlers, stmt* orelse)
-	      | TryFinally(stmt* body, stmt* finalbody)
+	      | Try(stmt* body, excepthandler* handlers, stmt* orelse, stmt* finalbody)
 	      | Assert(expr test, expr? msg)
 
 	      | Import(alias* names)
@@ -115,5 +114,7 @@
 
         -- import name with optional 'as' alias.
         alias = (identifier name, identifier? asname)
+
+        withitem = (expr context_expr, expr? optional_vars)
 }
 
diff --git a/Parser/myreadline.c b/Parser/myreadline.c
--- a/Parser/myreadline.c
+++ b/Parser/myreadline.c
@@ -40,6 +40,7 @@
         if (PyOS_InputHook != NULL)
             (void)(PyOS_InputHook)();
         errno = 0;
+        clearerr(fp);
         p = fgets(buf, len, fp);
         if (p != NULL)
             return 0; /* No error */
diff --git a/Parser/parsetok.c b/Parser/parsetok.c
--- a/Parser/parsetok.c
+++ b/Parser/parsetok.c
@@ -232,7 +232,7 @@
     PyParser_Delete(ps);
 
     if (n == NULL) {
-        if (tok->lineno <= 1 && tok->done == E_EOF)
+        if (tok->done == E_EOF)
             err_ret->error = E_EOF;
         err_ret->lineno = tok->lineno;
         if (tok->buf != NULL) {
diff --git a/Python/Python-ast.c b/Python/Python-ast.c
--- a/Python/Python-ast.c
+++ b/Python/Python-ast.c
@@ -2,7 +2,7 @@
 
 
 /*
-   __version__ 0daa6ba25d9b.
+   __version__ e0e663132363.
 
    This module must be committed separately after each AST grammar change;
    The __version__ number is set to the revision number of the commit
@@ -95,8 +95,7 @@
 };
 static PyTypeObject *With_type;
 static char *With_fields[]={
-        "context_expr",
-        "optional_vars",
+        "items",
         "body",
 };
 static PyTypeObject *Raise_type;
@@ -104,15 +103,11 @@
         "exc",
         "cause",
 };
-static PyTypeObject *TryExcept_type;
-static char *TryExcept_fields[]={
+static PyTypeObject *Try_type;
+static char *Try_fields[]={
         "body",
         "handlers",
         "orelse",
-};
-static PyTypeObject *TryFinally_type;
-static char *TryFinally_fields[]={
-        "body",
         "finalbody",
 };
 static PyTypeObject *Assert_type;
@@ -392,6 +387,12 @@
         "name",
         "asname",
 };
+static PyTypeObject *withitem_type;
+static PyObject* ast2obj_withitem(void*);
+static char *withitem_fields[]={
+        "context_expr",
+        "optional_vars",
+};
 
 
 static int
@@ -680,15 +681,12 @@
         if (!While_type) return 0;
         If_type = make_type("If", stmt_type, If_fields, 3);
         if (!If_type) return 0;
-        With_type = make_type("With", stmt_type, With_fields, 3);
+        With_type = make_type("With", stmt_type, With_fields, 2);
         if (!With_type) return 0;
         Raise_type = make_type("Raise", stmt_type, Raise_fields, 2);
         if (!Raise_type) return 0;
-        TryExcept_type = make_type("TryExcept", stmt_type, TryExcept_fields, 3);
-        if (!TryExcept_type) return 0;
-        TryFinally_type = make_type("TryFinally", stmt_type, TryFinally_fields,
-                                    2);
-        if (!TryFinally_type) return 0;
+        Try_type = make_type("Try", stmt_type, Try_fields, 4);
+        if (!Try_type) return 0;
         Assert_type = make_type("Assert", stmt_type, Assert_fields, 2);
         if (!Assert_type) return 0;
         Import_type = make_type("Import", stmt_type, Import_fields, 1);
@@ -938,6 +936,8 @@
         if (!keyword_type) return 0;
         alias_type = make_type("alias", &AST_type, alias_fields, 2);
         if (!alias_type) return 0;
+        withitem_type = make_type("withitem", &AST_type, withitem_fields, 2);
+        if (!withitem_type) return 0;
         initialized = 1;
         return 1;
 }
@@ -960,6 +960,7 @@
 static int obj2ast_arg(PyObject* obj, arg_ty* out, PyArena* arena);
 static int obj2ast_keyword(PyObject* obj, keyword_ty* out, PyArena* arena);
 static int obj2ast_alias(PyObject* obj, alias_ty* out, PyArena* arena);
+static int obj2ast_withitem(PyObject* obj, withitem_ty* out, PyArena* arena);
 
 mod_ty
 Module(asdl_seq * body, PyArena *arena)
@@ -1225,21 +1226,15 @@
 }
 
 stmt_ty
-With(expr_ty context_expr, expr_ty optional_vars, asdl_seq * body, int lineno,
-     int col_offset, PyArena *arena)
+With(asdl_seq * items, asdl_seq * body, int lineno, int col_offset, PyArena
+     *arena)
 {
         stmt_ty p;
-        if (!context_expr) {
-                PyErr_SetString(PyExc_ValueError,
-                                "field context_expr is required for With");
-                return NULL;
-        }
         p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p));
         if (!p)
                 return NULL;
         p->kind = With_kind;
-        p->v.With.context_expr = context_expr;
-        p->v.With.optional_vars = optional_vars;
+        p->v.With.items = items;
         p->v.With.body = body;
         p->lineno = lineno;
         p->col_offset = col_offset;
@@ -1262,33 +1257,18 @@
 }
 
 stmt_ty
-TryExcept(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse, int lineno,
-          int col_offset, PyArena *arena)
+Try(asdl_seq * body, asdl_seq * handlers, asdl_seq * orelse, asdl_seq *
+    finalbody, int lineno, int col_offset, PyArena *arena)
 {
         stmt_ty p;
         p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p));
         if (!p)
                 return NULL;
-        p->kind = TryExcept_kind;
-        p->v.TryExcept.body = body;
-        p->v.TryExcept.handlers = handlers;
-        p->v.TryExcept.orelse = orelse;
-        p->lineno = lineno;
-        p->col_offset = col_offset;
-        return p;
-}
-
-stmt_ty
-TryFinally(asdl_seq * body, asdl_seq * finalbody, int lineno, int col_offset,
-           PyArena *arena)
-{
-        stmt_ty p;
-        p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p));
-        if (!p)
-                return NULL;
-        p->kind = TryFinally_kind;
-        p->v.TryFinally.body = body;
-        p->v.TryFinally.finalbody = finalbody;
+        p->kind = Try_kind;
+        p->v.Try.body = body;
+        p->v.Try.handlers = handlers;
+        p->v.Try.orelse = orelse;
+        p->v.Try.finalbody = finalbody;
         p->lineno = lineno;
         p->col_offset = col_offset;
         return p;
@@ -2135,6 +2115,23 @@
         return p;
 }
 
+withitem_ty
+withitem(expr_ty context_expr, expr_ty optional_vars, PyArena *arena)
+{
+        withitem_ty p;
+        if (!context_expr) {
+                PyErr_SetString(PyExc_ValueError,
+                                "field context_expr is required for withitem");
+                return NULL;
+        }
+        p = (withitem_ty)PyArena_Malloc(arena, sizeof(*p));
+        if (!p)
+                return NULL;
+        p->context_expr = context_expr;
+        p->optional_vars = optional_vars;
+        return p;
+}
+
 
 PyObject*
 ast2obj_mod(void* _o)
@@ -2390,15 +2387,9 @@
         case With_kind:
                 result = PyType_GenericNew(With_type, NULL, NULL);
                 if (!result) goto failed;
-                value = ast2obj_expr(o->v.With.context_expr);
+                value = ast2obj_list(o->v.With.items, ast2obj_withitem);
                 if (!value) goto failed;
-                if (PyObject_SetAttrString(result, "context_expr", value) == -1)
-                        goto failed;
-                Py_DECREF(value);
-                value = ast2obj_expr(o->v.With.optional_vars);
-                if (!value) goto failed;
-                if (PyObject_SetAttrString(result, "optional_vars", value) ==
-                    -1)
+                if (PyObject_SetAttrString(result, "items", value) == -1)
                         goto failed;
                 Py_DECREF(value);
                 value = ast2obj_list(o->v.With.body, ast2obj_stmt);
@@ -2421,35 +2412,25 @@
                         goto failed;
                 Py_DECREF(value);
                 break;
-        case TryExcept_kind:
-                result = PyType_GenericNew(TryExcept_type, NULL, NULL);
+        case Try_kind:
+                result = PyType_GenericNew(Try_type, NULL, NULL);
                 if (!result) goto failed;
-                value = ast2obj_list(o->v.TryExcept.body, ast2obj_stmt);
+                value = ast2obj_list(o->v.Try.body, ast2obj_stmt);
                 if (!value) goto failed;
                 if (PyObject_SetAttrString(result, "body", value) == -1)
                         goto failed;
                 Py_DECREF(value);
-                value = ast2obj_list(o->v.TryExcept.handlers,
-                                     ast2obj_excepthandler);
+                value = ast2obj_list(o->v.Try.handlers, ast2obj_excepthandler);
                 if (!value) goto failed;
                 if (PyObject_SetAttrString(result, "handlers", value) == -1)
                         goto failed;
                 Py_DECREF(value);
-                value = ast2obj_list(o->v.TryExcept.orelse, ast2obj_stmt);
+                value = ast2obj_list(o->v.Try.orelse, ast2obj_stmt);
                 if (!value) goto failed;
                 if (PyObject_SetAttrString(result, "orelse", value) == -1)
                         goto failed;
                 Py_DECREF(value);
-                break;
-        case TryFinally_kind:
-                result = PyType_GenericNew(TryFinally_type, NULL, NULL);
-                if (!result) goto failed;
-                value = ast2obj_list(o->v.TryFinally.body, ast2obj_stmt);
-                if (!value) goto failed;
-                if (PyObject_SetAttrString(result, "body", value) == -1)
-                        goto failed;
-                Py_DECREF(value);
-                value = ast2obj_list(o->v.TryFinally.finalbody, ast2obj_stmt);
+                value = ast2obj_list(o->v.Try.finalbody, ast2obj_stmt);
                 if (!value) goto failed;
                 if (PyObject_SetAttrString(result, "finalbody", value) == -1)
                         goto failed;
@@ -3370,6 +3351,35 @@
         return NULL;
 }
 
+PyObject*
+ast2obj_withitem(void* _o)
+{
+        withitem_ty o = (withitem_ty)_o;
+        PyObject *result = NULL, *value = NULL;
+        if (!o) {
+                Py_INCREF(Py_None);
+                return Py_None;
+        }
+
+        result = PyType_GenericNew(withitem_type, NULL, NULL);
+        if (!result) return NULL;
+        value = ast2obj_expr(o->context_expr);
+        if (!value) goto failed;
+        if (PyObject_SetAttrString(result, "context_expr", value) == -1)
+                goto failed;
+        Py_DECREF(value);
+        value = ast2obj_expr(o->optional_vars);
+        if (!value) goto failed;
+        if (PyObject_SetAttrString(result, "optional_vars", value) == -1)
+                goto failed;
+        Py_DECREF(value);
+        return result;
+failed:
+        Py_XDECREF(value);
+        Py_XDECREF(result);
+        return NULL;
+}
+
 
 int
 obj2ast_mod(PyObject* obj, mod_ty* out, PyArena* arena)
@@ -4210,33 +4220,34 @@
                 return 1;
         }
         if (isinstance) {
-                expr_ty context_expr;
-                expr_ty optional_vars;
+                asdl_seq* items;
                 asdl_seq* body;
 
-                if (PyObject_HasAttrString(obj, "context_expr")) {
+                if (PyObject_HasAttrString(obj, "items")) {
                         int res;
-                        tmp = PyObject_GetAttrString(obj, "context_expr");
+                        Py_ssize_t len;
+                        Py_ssize_t i;
+                        tmp = PyObject_GetAttrString(obj, "items");
                         if (tmp == NULL) goto failed;
-                        res = obj2ast_expr(tmp, &context_expr, arena);
-                        if (res != 0) goto failed;
+                        if (!PyList_Check(tmp)) {
+                                PyErr_Format(PyExc_TypeError, "With field \"items\" must be a list, not a %.200s", tmp->ob_type->tp_name);
+                                goto failed;
+                        }
+                        len = PyList_GET_SIZE(tmp);
+                        items = asdl_seq_new(len, arena);
+                        if (items == NULL) goto failed;
+                        for (i = 0; i < len; i++) {
+                                withitem_ty value;
+                                res = obj2ast_withitem(PyList_GET_ITEM(tmp, i), &value, arena);
+                                if (res != 0) goto failed;
+                                asdl_seq_SET(items, i, value);
+                        }
                         Py_XDECREF(tmp);
                         tmp = NULL;
                 } else {
-                        PyErr_SetString(PyExc_TypeError, "required field \"context_expr\" missing from With");
+                        PyErr_SetString(PyExc_TypeError, "required field \"items\" missing from With");
                         return 1;
                 }
-                if (PyObject_HasAttrString(obj, "optional_vars")) {
-                        int res;
-                        tmp = PyObject_GetAttrString(obj, "optional_vars");
-                        if (tmp == NULL) goto failed;
-                        res = obj2ast_expr(tmp, &optional_vars, arena);
-                        if (res != 0) goto failed;
-                        Py_XDECREF(tmp);
-                        tmp = NULL;
-                } else {
-                        optional_vars = NULL;
-                }
                 if (PyObject_HasAttrString(obj, "body")) {
                         int res;
                         Py_ssize_t len;
@@ -4262,8 +4273,7 @@
                         PyErr_SetString(PyExc_TypeError, "required field \"body\" missing from With");
                         return 1;
                 }
-                *out = With(context_expr, optional_vars, body, lineno,
-                            col_offset, arena);
+                *out = With(items, body, lineno, col_offset, arena);
                 if (*out == NULL) goto failed;
                 return 0;
         }
@@ -4301,7 +4311,7 @@
                 if (*out == NULL) goto failed;
                 return 0;
         }
-        isinstance = PyObject_IsInstance(obj, (PyObject*)TryExcept_type);
+        isinstance = PyObject_IsInstance(obj, (PyObject*)Try_type);
         if (isinstance == -1) {
                 return 1;
         }
@@ -4309,6 +4319,7 @@
                 asdl_seq* body;
                 asdl_seq* handlers;
                 asdl_seq* orelse;
+                asdl_seq* finalbody;
 
                 if (PyObject_HasAttrString(obj, "body")) {
                         int res;
@@ -4317,7 +4328,7 @@
                         tmp = PyObject_GetAttrString(obj, "body");
                         if (tmp == NULL) goto failed;
                         if (!PyList_Check(tmp)) {
-                                PyErr_Format(PyExc_TypeError, "TryExcept field \"body\" must be a list, not a %.200s", tmp->ob_type->tp_name);
+                                PyErr_Format(PyExc_TypeError, "Try field \"body\" must be a list, not a %.200s", tmp->ob_type->tp_name);
                                 goto failed;
                         }
                         len = PyList_GET_SIZE(tmp);
@@ -4332,7 +4343,7 @@
                         Py_XDECREF(tmp);
                         tmp = NULL;
                 } else {
-                        PyErr_SetString(PyExc_TypeError, "required field \"body\" missing from TryExcept");
+                        PyErr_SetString(PyExc_TypeError, "required field \"body\" missing from Try");
                         return 1;
                 }
                 if (PyObject_HasAttrString(obj, "handlers")) {
@@ -4342,7 +4353,7 @@
                         tmp = PyObject_GetAttrString(obj, "handlers");
                         if (tmp == NULL) goto failed;
                         if (!PyList_Check(tmp)) {
-                                PyErr_Format(PyExc_TypeError, "TryExcept field \"handlers\" must be a list, not a %.200s", tmp->ob_type->tp_name);
+                                PyErr_Format(PyExc_TypeError, "Try field \"handlers\" must be a list, not a %.200s", tmp->ob_type->tp_name);
                                 goto failed;
                         }
                         len = PyList_GET_SIZE(tmp);
@@ -4357,7 +4368,7 @@
                         Py_XDECREF(tmp);
                         tmp = NULL;
                 } else {
-                        PyErr_SetString(PyExc_TypeError, "required field \"handlers\" missing from TryExcept");
+                        PyErr_SetString(PyExc_TypeError, "required field \"handlers\" missing from Try");
                         return 1;
                 }
                 if (PyObject_HasAttrString(obj, "orelse")) {
@@ -4367,7 +4378,7 @@
                         tmp = PyObject_GetAttrString(obj, "orelse");
                         if (tmp == NULL) goto failed;
                         if (!PyList_Check(tmp)) {
-                                PyErr_Format(PyExc_TypeError, "TryExcept field \"orelse\" must be a list, not a %.200s", tmp->ob_type->tp_name);
+                                PyErr_Format(PyExc_TypeError, "Try field \"orelse\" must be a list, not a %.200s", tmp->ob_type->tp_name);
                                 goto failed;
                         }
                         len = PyList_GET_SIZE(tmp);
@@ -4382,45 +4393,7 @@
                         Py_XDECREF(tmp);
                         tmp = NULL;
                 } else {
-                        PyErr_SetString(PyExc_TypeError, "required field \"orelse\" missing from TryExcept");
-                        return 1;
-                }
-                *out = TryExcept(body, handlers, orelse, lineno, col_offset,
-                                 arena);
-                if (*out == NULL) goto failed;
-                return 0;
-        }
-        isinstance = PyObject_IsInstance(obj, (PyObject*)TryFinally_type);
-        if (isinstance == -1) {
-                return 1;
-        }
-        if (isinstance) {
-                asdl_seq* body;
-                asdl_seq* finalbody;
-
-                if (PyObject_HasAttrString(obj, "body")) {
-                        int res;
-                        Py_ssize_t len;
-                        Py_ssize_t i;
-                        tmp = PyObject_GetAttrString(obj, "body");
-                        if (tmp == NULL) goto failed;
-                        if (!PyList_Check(tmp)) {
-                                PyErr_Format(PyExc_TypeError, "TryFinally field \"body\" must be a list, not a %.200s", tmp->ob_type->tp_name);
-                                goto failed;
-                        }
-                        len = PyList_GET_SIZE(tmp);
-                        body = asdl_seq_new(len, arena);
-                        if (body == NULL) goto failed;
-                        for (i = 0; i < len; i++) {
-                                stmt_ty value;
-                                res = obj2ast_stmt(PyList_GET_ITEM(tmp, i), &value, arena);
-                                if (res != 0) goto failed;
-                                asdl_seq_SET(body, i, value);
-                        }
-                        Py_XDECREF(tmp);
-                        tmp = NULL;
-                } else {
-                        PyErr_SetString(PyExc_TypeError, "required field \"body\" missing from TryFinally");
+                        PyErr_SetString(PyExc_TypeError, "required field \"orelse\" missing from Try");
                         return 1;
                 }
                 if (PyObject_HasAttrString(obj, "finalbody")) {
@@ -4430,7 +4403,7 @@
                         tmp = PyObject_GetAttrString(obj, "finalbody");
                         if (tmp == NULL) goto failed;
                         if (!PyList_Check(tmp)) {
-                                PyErr_Format(PyExc_TypeError, "TryFinally field \"finalbody\" must be a list, not a %.200s", tmp->ob_type->tp_name);
+                                PyErr_Format(PyExc_TypeError, "Try field \"finalbody\" must be a list, not a %.200s", tmp->ob_type->tp_name);
                                 goto failed;
                         }
                         len = PyList_GET_SIZE(tmp);
@@ -4445,10 +4418,11 @@
                         Py_XDECREF(tmp);
                         tmp = NULL;
                 } else {
-                        PyErr_SetString(PyExc_TypeError, "required field \"finalbody\" missing from TryFinally");
+                        PyErr_SetString(PyExc_TypeError, "required field \"finalbody\" missing from Try");
                         return 1;
                 }
-                *out = TryFinally(body, finalbody, lineno, col_offset, arena);
+                *out = Try(body, handlers, orelse, finalbody, lineno,
+                           col_offset, arena);
                 if (*out == NULL) goto failed;
                 return 0;
         }
@@ -6723,6 +6697,43 @@
         return 1;
 }
 
+int
+obj2ast_withitem(PyObject* obj, withitem_ty* out, PyArena* arena)
+{
+        PyObject* tmp = NULL;
+        expr_ty context_expr;
+        expr_ty optional_vars;
+
+        if (PyObject_HasAttrString(obj, "context_expr")) {
+                int res;
+                tmp = PyObject_GetAttrString(obj, "context_expr");
+                if (tmp == NULL) goto failed;
+                res = obj2ast_expr(tmp, &context_expr, arena);
+                if (res != 0) goto failed;
+                Py_XDECREF(tmp);
+                tmp = NULL;
+        } else {
+                PyErr_SetString(PyExc_TypeError, "required field \"context_expr\" missing from withitem");
+                return 1;
+        }
+        if (PyObject_HasAttrString(obj, "optional_vars")) {
+                int res;
+                tmp = PyObject_GetAttrString(obj, "optional_vars");
+                if (tmp == NULL) goto failed;
+                res = obj2ast_expr(tmp, &optional_vars, arena);
+                if (res != 0) goto failed;
+                Py_XDECREF(tmp);
+                tmp = NULL;
+        } else {
+                optional_vars = NULL;
+        }
+        *out = withitem(context_expr, optional_vars, arena);
+        return 0;
+failed:
+        Py_XDECREF(tmp);
+        return 1;
+}
+
 
 static struct PyModuleDef _astmodule = {
   PyModuleDef_HEAD_INIT, "_ast"
@@ -6739,7 +6750,7 @@
             NULL;
         if (PyModule_AddIntConstant(m, "PyCF_ONLY_AST", PyCF_ONLY_AST) < 0)
                 return NULL;
-        if (PyModule_AddStringConstant(m, "__version__", "0daa6ba25d9b") < 0)
+        if (PyModule_AddStringConstant(m, "__version__", "e0e663132363") < 0)
                 return NULL;
         if (PyDict_SetItemString(d, "mod", (PyObject*)mod_type) < 0) return
             NULL;
@@ -6774,10 +6785,8 @@
             NULL;
         if (PyDict_SetItemString(d, "Raise", (PyObject*)Raise_type) < 0) return
             NULL;
-        if (PyDict_SetItemString(d, "TryExcept", (PyObject*)TryExcept_type) <
-            0) return NULL;
-        if (PyDict_SetItemString(d, "TryFinally", (PyObject*)TryFinally_type) <
-            0) return NULL;
+        if (PyDict_SetItemString(d, "Try", (PyObject*)Try_type) < 0) return
+            NULL;
         if (PyDict_SetItemString(d, "Assert", (PyObject*)Assert_type) < 0)
             return NULL;
         if (PyDict_SetItemString(d, "Import", (PyObject*)Import_type) < 0)
@@ -6940,6 +6949,8 @@
             return NULL;
         if (PyDict_SetItemString(d, "alias", (PyObject*)alias_type) < 0) return
             NULL;
+        if (PyDict_SetItemString(d, "withitem", (PyObject*)withitem_type) < 0)
+            return NULL;
         return m;
 }
 
diff --git a/Python/ast.c b/Python/ast.c
--- a/Python/ast.c
+++ b/Python/ast.c
@@ -2893,7 +2893,7 @@
 {
     const int nch = NCH(n);
     int n_except = (nch - 3)/3;
-    asdl_seq *body, *orelse = NULL, *finally = NULL;
+    asdl_seq *body, *handlers = NULL, *orelse = NULL, *finally = NULL;
 
     REQ(n, try_stmt);
 
@@ -2934,9 +2934,8 @@
 
     if (n_except > 0) {
         int i;
-        stmt_ty except_st;
         /* process except statements to create a try ... except */
-        asdl_seq *handlers = asdl_seq_new(n_except, c->c_arena);
+        handlers = asdl_seq_new(n_except, c->c_arena);
         if (handlers == NULL)
             return NULL;
 
@@ -2947,28 +2946,15 @@
                 return NULL;
             asdl_seq_SET(handlers, i, e);
         }
-
-        except_st = TryExcept(body, handlers, orelse, LINENO(n),
-                              n->n_col_offset, c->c_arena);
-        if (!finally)
-            return except_st;
-
-        /* if a 'finally' is present too, we nest the TryExcept within a
-           TryFinally to emulate try ... except ... finally */
-        body = asdl_seq_new(1, c->c_arena);
-        if (body == NULL)
-            return NULL;
-        asdl_seq_SET(body, 0, except_st);
     }
 
-    /* must be a try ... finally (except clauses are in body, if any exist) */
-    assert(finally != NULL);
-    return TryFinally(body, finally, LINENO(n), n->n_col_offset, c->c_arena);
+    assert(finally != NULL || asdl_seq_LEN(handlers));
+    return Try(body, handlers, orelse, finally, LINENO(n), n->n_col_offset, c->c_arena);
 }
 
 /* with_item: test ['as' expr] */
-static stmt_ty
-ast_for_with_item(struct compiling *c, const node *n, asdl_seq *content)
+static withitem_ty
+ast_for_with_item(struct compiling *c, const node *n)
 {
     expr_ty context_expr, optional_vars = NULL;
 
@@ -2987,43 +2973,32 @@
         }
     }
 
-    return With(context_expr, optional_vars, content, LINENO(n),
-                n->n_col_offset, c->c_arena);
+    return withitem(context_expr, optional_vars, c->c_arena);
 }
 
 /* with_stmt: 'with' with_item (',' with_item)* ':' suite */
 static stmt_ty
 ast_for_with_stmt(struct compiling *c, const node *n)
 {
-    int i;
-    stmt_ty ret;
-    asdl_seq *inner;
+    int i, n_items;
+    asdl_seq *items, *body;
 
     REQ(n, with_stmt);
 
-    /* process the with items inside-out */
-    i = NCH(n) - 1;
-    /* the suite of the innermost with item is the suite of the with stmt */
-    inner = ast_for_suite(c, CHILD(n, i));
-    if (!inner)
+    n_items = (NCH(n) - 2) / 2;
+    items = asdl_seq_new(n_items, c->c_arena);
+    for (i = 1; i < NCH(n) - 2; i += 2) {
+        withitem_ty item = ast_for_with_item(c, CHILD(n, i));
+        if (!item)
+            return NULL;
+        asdl_seq_SET(items, (i - 1) / 2, item);
+    }
+
+    body = ast_for_suite(c, CHILD(n, NCH(n) - 1));
+    if (!body)
         return NULL;
 
-    for (;;) {
-        i -= 2;
-        ret = ast_for_with_item(c, CHILD(n, i), inner);
-        if (!ret)
-            return NULL;
-        /* was this the last item? */
-        if (i == 1)
-            break;
-        /* if not, wrap the result so far in a new sequence */
-        inner = asdl_seq_new(1, c->c_arena);
-        if (!inner)
-            return NULL;
-        asdl_seq_SET(inner, 0, ret);
-    }
-
-    return ret;
+    return With(items, body, LINENO(n), n->n_col_offset, c->c_arena);
 }
 
 static stmt_ty
diff --git a/Python/compile.c b/Python/compile.c
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -135,6 +135,7 @@
 
 struct compiler {
     const char *c_filename;
+    PyObject *c_filename_obj;
     struct symtable *c_st;
     PyFutureFeatures *c_future; /* pointer to module's __future__ */
     PyCompilerFlags *c_flags;
@@ -178,12 +179,13 @@
 static int inplace_binop(struct compiler *, operator_ty);
 static int expr_constant(struct compiler *, expr_ty);
 
-static int compiler_with(struct compiler *, stmt_ty);
+static int compiler_with(struct compiler *, stmt_ty, int);
 static int compiler_call_helper(struct compiler *c, int n,
                                 asdl_seq *args,
                                 asdl_seq *keywords,
                                 expr_ty starargs,
                                 expr_ty kwargs);
+static int compiler_try_except(struct compiler *, stmt_ty);
 
 static PyCodeObject *assemble(struct compiler *, int addNone);
 static PyObject *__doc__;
@@ -272,6 +274,9 @@
     if (!compiler_init(&c))
         return NULL;
     c.c_filename = filename;
+    c.c_filename_obj = PyUnicode_DecodeFSDefault(filename);
+    if (!c.c_filename_obj)
+        goto finally;
     c.c_arena = arena;
     c.c_future = PyFuture_FromAST(mod, filename);
     if (c.c_future == NULL)
@@ -324,6 +329,8 @@
         PySymtable_Free(c->c_st);
     if (c->c_future)
         PyObject_Free(c->c_future);
+    if (c->c_filename_obj)
+        Py_DECREF(c->c_filename_obj);
     Py_DECREF(c->c_stack);
 }
 
@@ -1892,7 +1899,13 @@
     compiler_use_next_block(c, body);
     if (!compiler_push_fblock(c, FINALLY_TRY, body))
         return 0;
-    VISIT_SEQ(c, stmt, s->v.TryFinally.body);
+    if (s->v.Try.handlers && asdl_seq_LEN(s->v.Try.handlers)) {
+        if (!compiler_try_except(c, s))
+            return 0;
+    }
+    else {
+        VISIT_SEQ(c, stmt, s->v.Try.body);
+    }
     ADDOP(c, POP_BLOCK);
     compiler_pop_fblock(c, FINALLY_TRY, body);
 
@@ -1900,7 +1913,7 @@
     compiler_use_next_block(c, end);
     if (!compiler_push_fblock(c, FINALLY_END, end))
         return 0;
-    VISIT_SEQ(c, stmt, s->v.TryFinally.finalbody);
+    VISIT_SEQ(c, stmt, s->v.Try.finalbody);
     ADDOP(c, END_FINALLY);
     compiler_pop_fblock(c, FINALLY_END, end);
 
@@ -1954,15 +1967,15 @@
     compiler_use_next_block(c, body);
     if (!compiler_push_fblock(c, EXCEPT, body))
         return 0;
-    VISIT_SEQ(c, stmt, s->v.TryExcept.body);
+    VISIT_SEQ(c, stmt, s->v.Try.body);
     ADDOP(c, POP_BLOCK);
     compiler_pop_fblock(c, EXCEPT, body);
     ADDOP_JREL(c, JUMP_FORWARD, orelse);
-    n = asdl_seq_LEN(s->v.TryExcept.handlers);
+    n = asdl_seq_LEN(s->v.Try.handlers);
     compiler_use_next_block(c, except);
     for (i = 0; i < n; i++) {
         excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
-                                        s->v.TryExcept.handlers, i);
+            s->v.Try.handlers, i);
         if (!handler->v.ExceptHandler.type && i < n-1)
             return compiler_error(c, "default 'except:' must be last");
         c->u->u_lineno_set = 0;
@@ -1979,82 +1992,91 @@
         }
         ADDOP(c, POP_TOP);
         if (handler->v.ExceptHandler.name) {
-        basicblock *cleanup_end, *cleanup_body;
-
-        cleanup_end = compiler_new_block(c);
-        cleanup_body = compiler_new_block(c);
-        if(!(cleanup_end || cleanup_body))
-        return 0;
-
-        compiler_nameop(c, handler->v.ExceptHandler.name, Store);
-        ADDOP(c, POP_TOP);
-
-        /*
-        try:
-            # body
-        except type as name:
-            try:
-            # body
-            finally:
-            name = None
-            del name
-        */
-
-        /* second try: */
-        ADDOP_JREL(c, SETUP_FINALLY, cleanup_end);
-        compiler_use_next_block(c, cleanup_body);
-        if (!compiler_push_fblock(c, FINALLY_TRY, cleanup_body))
-            return 0;
-
-        /* second # body */
-        VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
-        ADDOP(c, POP_BLOCK);
-        ADDOP(c, POP_EXCEPT);
-        compiler_pop_fblock(c, FINALLY_TRY, cleanup_body);
-
-        /* finally: */
-        ADDOP_O(c, LOAD_CONST, Py_None, consts);
-        compiler_use_next_block(c, cleanup_end);
-        if (!compiler_push_fblock(c, FINALLY_END, cleanup_end))
-            return 0;
-
-        /* name = None */
-        ADDOP_O(c, LOAD_CONST, Py_None, consts);
-        compiler_nameop(c, handler->v.ExceptHandler.name, Store);
-
-        /* del name */
-        compiler_nameop(c, handler->v.ExceptHandler.name, Del);
-
-        ADDOP(c, END_FINALLY);
-        compiler_pop_fblock(c, FINALLY_END, cleanup_end);
+            basicblock *cleanup_end, *cleanup_body;
+
+            cleanup_end = compiler_new_block(c);
+            cleanup_body = compiler_new_block(c);
+            if (!(cleanup_end || cleanup_body))
+                return 0;
+
+            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
+            ADDOP(c, POP_TOP);
+
+            /*
+              try:
+              # body
+              except type as name:
+              try:
+              # body
+              finally:
+              name = None
+              del name
+            */
+
+            /* second try: */
+            ADDOP_JREL(c, SETUP_FINALLY, cleanup_end);
+            compiler_use_next_block(c, cleanup_body);
+            if (!compiler_push_fblock(c, FINALLY_TRY, cleanup_body))
+                return 0;
+
+            /* second # body */
+            VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
+            ADDOP(c, POP_BLOCK);
+            ADDOP(c, POP_EXCEPT);
+            compiler_pop_fblock(c, FINALLY_TRY, cleanup_body);
+
+            /* finally: */
+            ADDOP_O(c, LOAD_CONST, Py_None, consts);
+            compiler_use_next_block(c, cleanup_end);
+            if (!compiler_push_fblock(c, FINALLY_END, cleanup_end))
+                return 0;
+
+            /* name = None */
+            ADDOP_O(c, LOAD_CONST, Py_None, consts);
+            compiler_nameop(c, handler->v.ExceptHandler.name, Store);
+
+            /* del name */
+            compiler_nameop(c, handler->v.ExceptHandler.name, Del);
+
+            ADDOP(c, END_FINALLY);
+            compiler_pop_fblock(c, FINALLY_END, cleanup_end);
         }
         else {
-        basicblock *cleanup_body;
-
-        cleanup_body = compiler_new_block(c);
-        if(!cleanup_body)
-        return 0;
+            basicblock *cleanup_body;
+
+            cleanup_body = compiler_new_block(c);
+            if (!cleanup_body)
+                return 0;
 
             ADDOP(c, POP_TOP);
-        ADDOP(c, POP_TOP);
-        compiler_use_next_block(c, cleanup_body);
-        if (!compiler_push_fblock(c, FINALLY_TRY, cleanup_body))
-            return 0;
+            ADDOP(c, POP_TOP);
+            compiler_use_next_block(c, cleanup_body);
+            if (!compiler_push_fblock(c, FINALLY_TRY, cleanup_body))
+                return 0;
             VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
-        ADDOP(c, POP_EXCEPT);
-        compiler_pop_fblock(c, FINALLY_TRY, cleanup_body);
+            ADDOP(c, POP_EXCEPT);
+            compiler_pop_fblock(c, FINALLY_TRY, cleanup_body);
         }
         ADDOP_JREL(c, JUMP_FORWARD, end);
         compiler_use_next_block(c, except);
     }
     ADDOP(c, END_FINALLY);
     compiler_use_next_block(c, orelse);
-    VISIT_SEQ(c, stmt, s->v.TryExcept.orelse);
+    VISIT_SEQ(c, stmt, s->v.Try.orelse);
     compiler_use_next_block(c, end);
     return 1;
 }
 
 static int
+compiler_try(struct compiler *c, stmt_ty s) {
+    if (s->v.Try.finalbody && asdl_seq_LEN(s->v.Try.finalbody))
+        return compiler_try_finally(c, s);
+    else
+        return compiler_try_except(c, s);
+}
+
+
+static int
 compiler_import_as(struct compiler *c, identifier name, identifier asname)
 {
     /* The IMPORT_NAME opcode was already generated.  This function
@@ -2301,10 +2323,8 @@
         }
         ADDOP_I(c, RAISE_VARARGS, n);
         break;
-    case TryExcept_kind:
-        return compiler_try_except(c, s);
-    case TryFinally_kind:
-        return compiler_try_finally(c, s);
+    case Try_kind:
+        return compiler_try(c, s);
     case Assert_kind:
         return compiler_assert(c, s);
     case Import_kind:
@@ -2335,7 +2355,7 @@
     case Continue_kind:
         return compiler_continue(c);
     case With_kind:
-        return compiler_with(c, s);
+        return compiler_with(c, s, 0);
     }
     return 1;
 }
@@ -3062,9 +3082,10 @@
        exit(*exc)
  */
 static int
-compiler_with(struct compiler *c, stmt_ty s)
+compiler_with(struct compiler *c, stmt_ty s, int pos)
 {
     basicblock *block, *finally;
+    withitem_ty item = asdl_seq_GET(s->v.With.items, pos);
 
     assert(s->kind == With_kind);
 
@@ -3074,7 +3095,7 @@
         return 0;
 
     /* Evaluate EXPR */
-    VISIT(c, expr, s->v.With.context_expr);
+    VISIT(c, expr, item->context_expr);
     ADDOP_JREL(c, SETUP_WITH, finally);
 
     /* SETUP_WITH pushes a finally block. */
@@ -3083,16 +3104,20 @@
         return 0;
     }
 
-    if (s->v.With.optional_vars) {
-        VISIT(c, expr, s->v.With.optional_vars);
+    if (item->optional_vars) {
+        VISIT(c, expr, item->optional_vars);
     }
     else {
     /* Discard result from context.__enter__() */
         ADDOP(c, POP_TOP);
     }
 
-    /* BLOCK code */
-    VISIT_SEQ(c, stmt, s->v.With.body);
+    pos++;
+    if (pos == asdl_seq_LEN(s->v.With.items))
+        /* BLOCK code */
+        VISIT_SEQ(c, stmt, s->v.With.body)
+    else if (!compiler_with(c, s, pos))
+            return 0;
 
     /* End of try block; start the finally block */
     ADDOP(c, POP_BLOCK);
@@ -3361,7 +3386,7 @@
 static int
 compiler_error(struct compiler *c, const char *errstr)
 {
-    PyObject *loc, *filename;
+    PyObject *loc;
     PyObject *u = NULL, *v = NULL;
 
     loc = PyErr_ProgramText(c->c_filename, c->u->u_lineno);
@@ -3369,16 +3394,7 @@
         Py_INCREF(Py_None);
         loc = Py_None;
     }
-    if (c->c_filename != NULL) {
-        filename = PyUnicode_DecodeFSDefault(c->c_filename);
-        if (!filename)
-            goto exit;
-    }
-    else {
-        Py_INCREF(Py_None);
-        filename = Py_None;
-    }
-    u = Py_BuildValue("(NiiO)", filename, c->u->u_lineno,
+    u = Py_BuildValue("(OiiO)", c->c_filename_obj, c->u->u_lineno,
                       c->u->u_col_offset, loc);
     if (!u)
         goto exit;
@@ -3927,7 +3943,6 @@
     PyObject *consts = NULL;
     PyObject *names = NULL;
     PyObject *varnames = NULL;
-    PyObject *filename = NULL;
     PyObject *name = NULL;
     PyObject *freevars = NULL;
     PyObject *cellvars = NULL;
@@ -3951,10 +3966,6 @@
     freevars = dict_keys_inorder(c->u->u_freevars, PyTuple_Size(cellvars));
     if (!freevars)
         goto error;
-    filename = PyUnicode_DecodeFSDefault(c->c_filename);
-    if (!filename)
-        goto error;
-
     nlocals = PyDict_Size(c->u->u_varnames);
     flags = compute_code_flags(c);
     if (flags < 0)
@@ -3974,14 +3985,13 @@
                     nlocals, stackdepth(c), flags,
                     bytecode, consts, names, varnames,
                     freevars, cellvars,
-                    filename, c->u->u_name,
+                    c->c_filename_obj, c->u->u_name,
                     c->u->u_firstlineno,
                     a->a_lnotab);
  error:
     Py_XDECREF(consts);
     Py_XDECREF(names);
     Py_XDECREF(varnames);
-    Py_XDECREF(filename);
     Py_XDECREF(name);
     Py_XDECREF(freevars);
     Py_XDECREF(cellvars);
diff --git a/Python/import.c b/Python/import.c
--- a/Python/import.c
+++ b/Python/import.c
@@ -110,8 +110,12 @@
    TAG and PYC_TAG_UNICODE must change for each major Python release. The magic
    number will take care of any bytecode changes that occur during development.
 */
+#define QUOTE(arg) #arg
+#define STRIFY(name) QUOTE(name)
+#define MAJOR STRIFY(PY_MAJOR_VERSION)
+#define MINOR STRIFY(PY_MINOR_VERSION)
 #define MAGIC (3180 | ((long)'\r'<<16) | ((long)'\n'<<24))
-#define TAG "cpython-32"
+#define TAG "cpython-" MAJOR MINOR;
 #define CACHEDIR "__pycache__"
 static const Py_UNICODE CACHEDIR_UNICODE[] = {
     '_', '_', 'p', 'y', 'c', 'a', 'c', 'h', 'e', '_', '_', '\0'};
@@ -119,7 +123,11 @@
 static long pyc_magic = MAGIC;
 static const char *pyc_tag = TAG;
 static const Py_UNICODE PYC_TAG_UNICODE[] = {
-    'c', 'p', 'y', 't', 'h', 'o', 'n', '-', '3', '2', '\0'};
+    'c', 'p', 'y', 't', 'h', 'o', 'n', '-', PY_MAJOR_VERSION + 48, PY_MINOR_VERSION + 48, '\0'};
+#undef QUOTE
+#undef STRIFY
+#undef MAJOR
+#undef MINOR
 
 /* See _PyImport_FixupExtensionObject() below */
 static PyObject *extensions = NULL;
@@ -1733,7 +1741,6 @@
     Py_UNICODE buf[MAXPATHLEN+1];
     Py_ssize_t buflen = MAXPATHLEN+1;
     PyObject *path_unicode, *filename;
-    const Py_UNICODE *base;
     Py_ssize_t len;
     struct stat statbuf;
     static struct filedescr fd_package = {"", "", PKG_DIRECTORY};
@@ -1751,7 +1758,6 @@
     else
         return 0;
 
-    base = PyUnicode_AS_UNICODE(path_unicode);
     len = PyUnicode_GET_SIZE(path_unicode);
     if (len + 2 + PyUnicode_GET_SIZE(name) + MAXSUFFIXSIZE >= buflen) {
         Py_DECREF(path_unicode);
@@ -2275,12 +2281,10 @@
 static int
 find_init_module(PyObject *directory)
 {
-    size_t len;
     struct stat statbuf;
     PyObject *filename;
     int match;
 
-    len = PyUnicode_GET_SIZE(directory);
     filename = PyUnicode_FromFormat("%U%c__init__.py", directory, SEP);
     if (filename == NULL)
         return -1;
@@ -2818,7 +2822,7 @@
 }
 
 PyObject *
-PyImport_ImportModuleLevel(char *name, PyObject *globals, PyObject *locals,
+PyImport_ImportModuleLevel(const char *name, PyObject *globals, PyObject *locals,
                            PyObject *fromlist, int level)
 {
     PyObject *nameobj, *mod;
diff --git a/Python/marshal.c b/Python/marshal.c
--- a/Python/marshal.c
+++ b/Python/marshal.c
@@ -58,9 +58,9 @@
     int depth;
     /* If fp == NULL, the following are valid: */
     PyObject *str;
+    PyObject *current_filename;
     char *ptr;
     char *end;
-    PyObject *strings; /* dict on marshal, list on unmarshal */
     int version;
 } WFILE;
 
@@ -444,7 +444,6 @@
     wf.fp = fp;
     wf.error = WFERR_OK;
     wf.depth = 0;
-    wf.strings = NULL;
     wf.version = version;
     w_long(x, &wf);
 }
@@ -456,10 +455,8 @@
     wf.fp = fp;
     wf.error = WFERR_OK;
     wf.depth = 0;
-    wf.strings = (version > 0) ? PyDict_New() : NULL;
     wf.version = version;
     w_object(x, &wf);
-    Py_XDECREF(wf.strings);
 }
 
 typedef WFILE RFILE; /* Same struct with different invariants */
@@ -980,6 +977,18 @@
             filename = r_object(p);
             if (filename == NULL)
                 goto code_error;
+            if (PyUnicode_CheckExact(filename)) {
+                if (p->current_filename != NULL) {
+                    if (!PyUnicode_Compare(filename, p->current_filename)) {
+                        Py_DECREF(filename);
+                        Py_INCREF(p->current_filename);
+                        filename = p->current_filename;
+                    }
+                }
+                else {
+                    p->current_filename = filename;
+                }
+            }
             name = r_object(p);
             if (name == NULL)
                 goto code_error;
@@ -1041,7 +1050,7 @@
     RFILE rf;
     assert(fp);
     rf.fp = fp;
-    rf.strings = NULL;
+    rf.current_filename = NULL;
     rf.end = rf.ptr = NULL;
     return r_short(&rf);
 }
@@ -1051,7 +1060,7 @@
 {
     RFILE rf;
     rf.fp = fp;
-    rf.strings = NULL;
+    rf.current_filename = NULL;
     rf.ptr = rf.end = NULL;
     return r_long(&rf);
 }
@@ -1112,11 +1121,10 @@
     RFILE rf;
     PyObject *result;
     rf.fp = fp;
-    rf.strings = PyList_New(0);
+    rf.current_filename = NULL;
     rf.depth = 0;
     rf.ptr = rf.end = NULL;
     result = r_object(&rf);
-    Py_DECREF(rf.strings);
     return result;
 }
 
@@ -1126,12 +1134,11 @@
     RFILE rf;
     PyObject *result;
     rf.fp = NULL;
+    rf.current_filename = NULL;
     rf.ptr = str;
     rf.end = str + len;
-    rf.strings = PyList_New(0);
     rf.depth = 0;
     result = r_object(&rf);
-    Py_DECREF(rf.strings);
     return result;
 }
 
@@ -1150,9 +1157,7 @@
     wf.error = WFERR_OK;
     wf.depth = 0;
     wf.version = version;
-    wf.strings = (version > 0) ? PyDict_New() : NULL;
     w_object(x, &wf);
-    Py_XDECREF(wf.strings);
     if (wf.str != NULL) {
         char *base = PyBytes_AS_STRING((PyBytesObject *)wf.str);
         if (wf.ptr - base > PY_SSIZE_T_MAX) {
@@ -1226,6 +1231,7 @@
     if (data == NULL)
         return NULL;
     rf.fp = NULL;
+    rf.current_filename = NULL;
     if (PyBytes_Check(data)) {
         rf.ptr = PyBytes_AS_STRING(data);
         rf.end = rf.ptr + PyBytes_GET_SIZE(data);
@@ -1242,10 +1248,8 @@
         Py_DECREF(data);
         return NULL;
     }
-    rf.strings = PyList_New(0);
     rf.depth = 0;
     result = read_object(&rf);
-    Py_DECREF(rf.strings);
     Py_DECREF(data);
     return result;
 }
@@ -1296,12 +1300,11 @@
     s = p.buf;
     n = p.len;
     rf.fp = NULL;
+    rf.current_filename = NULL;
     rf.ptr = s;
     rf.end = s + n;
-    rf.strings = PyList_New(0);
     rf.depth = 0;
     result = read_object(&rf);
-    Py_DECREF(rf.strings);
     PyBuffer_Release(&p);
     return result;
 }
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -1593,7 +1593,7 @@
         moduleName = PyObject_GetAttrString(type, "__module__");
         if (moduleName == NULL || !PyUnicode_Check(moduleName))
         {
-            Py_DECREF(moduleName);
+            Py_XDECREF(moduleName);
             err = PyFile_WriteString("<unknown>", f);
         }
         else {
diff --git a/Python/symtable.c b/Python/symtable.c
--- a/Python/symtable.c
+++ b/Python/symtable.c
@@ -185,6 +185,7 @@
 static int symtable_visit_argannotations(struct symtable *st, asdl_seq *args);
 static int symtable_implicit_arg(struct symtable *st, int pos);
 static int symtable_visit_annotations(struct symtable *st, stmt_ty s);
+static int symtable_visit_withitem(struct symtable *st, withitem_ty item);
 
 
 static identifier top = NULL, lambda = NULL, genexpr = NULL,
@@ -1210,14 +1211,11 @@
             }
         }
         break;
-    case TryExcept_kind:
-        VISIT_SEQ(st, stmt, s->v.TryExcept.body);
-        VISIT_SEQ(st, stmt, s->v.TryExcept.orelse);
-        VISIT_SEQ(st, excepthandler, s->v.TryExcept.handlers);
-        break;
-    case TryFinally_kind:
-        VISIT_SEQ(st, stmt, s->v.TryFinally.body);
-        VISIT_SEQ(st, stmt, s->v.TryFinally.finalbody);
+    case Try_kind:
+        VISIT_SEQ(st, stmt, s->v.Try.body);
+        VISIT_SEQ(st, stmt, s->v.Try.orelse);
+        VISIT_SEQ(st, excepthandler, s->v.Try.handlers);
+        VISIT_SEQ(st, stmt, s->v.Try.finalbody);
         break;
     case Assert_kind:
         VISIT(st, expr, s->v.Assert.test);
@@ -1305,10 +1303,7 @@
         /* nothing to do here */
         break;
     case With_kind:
-        VISIT(st, expr, s->v.With.context_expr);
-        if (s->v.With.optional_vars) {
-            VISIT(st, expr, s->v.With.optional_vars);
-        }
+        VISIT_SEQ(st, withitem, s->v.With.items);
         VISIT_SEQ(st, stmt, s->v.With.body);
         break;
     }
@@ -1540,6 +1535,16 @@
     return 1;
 }
 
+static int
+symtable_visit_withitem(struct symtable *st, withitem_ty item)
+{
+    VISIT(st, expr, item->context_expr);
+    if (item->optional_vars) {
+        VISIT(st, expr, item->optional_vars);
+    }
+    return 1;
+}
+
 
 static int
 symtable_visit_alias(struct symtable *st, alias_ty a)
diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h
--- a/Python/thread_pthread.h
+++ b/Python/thread_pthread.h
@@ -18,6 +18,18 @@
 #ifndef THREAD_STACK_SIZE
 #define THREAD_STACK_SIZE       0       /* use default stack size */
 #endif
+
+#if (defined(__APPLE__) || defined(__FreeBSD__)) && defined(THREAD_STACK_SIZE) && THREAD_STACK_SIZE == 0
+   /* The default stack size for new threads on OSX is small enough that
+    * we'll get hard crashes instead of 'maximum recursion depth exceeded'
+    * exceptions.
+    *
+    * The default stack size below is the minimal stack size where a
+    * simple recursive function doesn't cause a hard crash.
+    */
+#undef  THREAD_STACK_SIZE
+#define THREAD_STACK_SIZE       0x400000
+#endif
 /* for safety, ensure a viable minimum stacksize */
 #define THREAD_STACK_MIN        0x8000  /* 32kB */
 #else  /* !_POSIX_THREAD_ATTR_STACKSIZE */
diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py
--- a/Tools/msi/msi.py
+++ b/Tools/msi/msi.py
@@ -1031,6 +1031,8 @@
             lib.glob("*.0")
         if dir=='tests' and parent.physical=='distutils':
             lib.add_file("Setup.sample")
+        if dir=='cjkencodings':
+            lib.glob("*.txt")
         if dir=='decimaltestdata':
             lib.glob("*.decTest")
         if dir=='xmltestdata':
diff --git a/Tools/scripts/findnocoding.py b/Tools/scripts/findnocoding.py
--- a/Tools/scripts/findnocoding.py
+++ b/Tools/scripts/findnocoding.py
@@ -2,7 +2,7 @@
 
 """List all those Python files that require a coding directive
 
-Usage: nocoding.py dir1 [dir2...]
+Usage: findnocoding.py dir1 [dir2...]
 """
 
 __author__ = "Oleg Broytmann, Georg Brandl"
@@ -50,7 +50,7 @@
 
 def needs_declaration(fullpath):
     try:
-        infile = open(fullpath, 'rU')
+        infile = open(fullpath)
     except IOError: # Oops, the file was removed - ignore it
         return None
 
diff --git a/Tools/scripts/pysource.py b/Tools/scripts/pysource.py
--- a/Tools/scripts/pysource.py
+++ b/Tools/scripts/pysource.py
@@ -42,7 +42,7 @@
         return None
 
     try:
-        return open(fullpath, 'rU')
+        return open(fullpath)
     except IOError as err: # Access denied, or a special file - ignore it
         print_debug("%s: access denied: %s" % (fullpath, err))
         return None
diff --git a/Tools/unittestgui/unittestgui.py b/Tools/unittestgui/unittestgui.py
--- a/Tools/unittestgui/unittestgui.py
+++ b/Tools/unittestgui/unittestgui.py
@@ -28,7 +28,6 @@
 """
 
 __author__ = "Steve Purcell (stephen_purcell at yahoo.com)"
-__version__ = "$Revision: 1.7 $"[11:-2]
 
 import sys
 import traceback
diff --git a/configure b/configure
--- a/configure
+++ b/configure
@@ -1,14 +1,13 @@
 #! /bin/sh
-# From configure.in Revision.
 # Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.67 for python 3.3.
+# Generated by GNU Autoconf 2.65 for python 3.3.
 #
 # Report bugs to <http://bugs.python.org/>.
 #
 #
 # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
-# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
-# Foundation, Inc.
+# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation,
+# Inc.
 #
 #
 # This configure script is free software; the Free Software Foundation
@@ -320,7 +319,7 @@
       test -d "$as_dir" && break
     done
     test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+  } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir"
 
 
 } # as_fn_mkdir_p
@@ -360,19 +359,19 @@
 fi # as_fn_arith
 
 
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
+# as_fn_error ERROR [LINENO LOG_FD]
+# ---------------------------------
 # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
 # provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
+# script with status $?, using 1 if that was 0.
 as_fn_error ()
 {
-  as_status=$1; test $as_status -eq 0 && as_status=1
-  if test "$4"; then
-    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  as_status=$?; test $as_status -eq 0 && as_status=1
+  if test "$3"; then
+    as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3
   fi
-  $as_echo "$as_me: error: $2" >&2
+  $as_echo "$as_me: error: $1" >&2
   as_fn_exit $as_status
 } # as_fn_error
 
@@ -534,7 +533,7 @@
 exec 6>&1
 
 # Name of the host.
-# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
+# hostname on some systems (SVR3.2, Linux) returns a bogus exit status,
 # so uname gets run too.
 ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
 
@@ -636,10 +635,8 @@
 INSTALL_DATA
 INSTALL_SCRIPT
 INSTALL_PROGRAM
-HAS_HG
-HGBRANCH
-HGTAG
-HGVERSION
+HAS_PYTHON
+DISABLE_ASDLGEN
 ARFLAGS
 AR
 RANLIB
@@ -690,6 +687,10 @@
 CONFIG_ARGS
 SOVERSION
 VERSION
+HAS_HG
+HGBRANCH
+HGTAG
+HGVERSION
 target_alias
 host_alias
 build_alias
@@ -832,9 +833,8 @@
   fi
 
   case $ac_option in
-  *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
-  *=)   ac_optarg= ;;
-  *)    ac_optarg=yes ;;
+  *=*)	ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
+  *)	ac_optarg=yes ;;
   esac
 
   # Accept the important Cygnus configure options, so we can diagnose typos.
@@ -879,7 +879,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid feature name: $ac_useropt"
+      as_fn_error "invalid feature name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -905,7 +905,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid feature name: $ac_useropt"
+      as_fn_error "invalid feature name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -1109,7 +1109,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid package name: $ac_useropt"
+      as_fn_error "invalid package name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -1125,7 +1125,7 @@
     ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
     # Reject names that are not valid shell variable names.
     expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid package name: $ac_useropt"
+      as_fn_error "invalid package name: $ac_useropt"
     ac_useropt_orig=$ac_useropt
     ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
     case $ac_user_opts in
@@ -1155,8 +1155,8 @@
   | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
     x_libraries=$ac_optarg ;;
 
-  -*) as_fn_error $? "unrecognized option: \`$ac_option'
-Try \`$0 --help' for more information"
+  -*) as_fn_error "unrecognized option: \`$ac_option'
+Try \`$0 --help' for more information."
     ;;
 
   *=*)
@@ -1164,7 +1164,7 @@
     # Reject names that are not valid shell variable names.
     case $ac_envvar in #(
       '' | [0-9]* | *[!_$as_cr_alnum]* )
-      as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
+      as_fn_error "invalid variable name: \`$ac_envvar'" ;;
     esac
     eval $ac_envvar=\$ac_optarg
     export $ac_envvar ;;
@@ -1182,13 +1182,13 @@
 
 if test -n "$ac_prev"; then
   ac_option=--`echo $ac_prev | sed 's/_/-/g'`
-  as_fn_error $? "missing argument to $ac_option"
+  as_fn_error "missing argument to $ac_option"
 fi
 
 if test -n "$ac_unrecognized_opts"; then
   case $enable_option_checking in
     no) ;;
-    fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
+    fatal) as_fn_error "unrecognized options: $ac_unrecognized_opts" ;;
     *)     $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
   esac
 fi
@@ -1211,7 +1211,7 @@
     [\\/$]* | ?:[\\/]* )  continue;;
     NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
   esac
-  as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
+  as_fn_error "expected an absolute directory name for --$ac_var: $ac_val"
 done
 
 # There might be people who depend on the old broken behavior: `$host'
@@ -1225,8 +1225,8 @@
 if test "x$host_alias" != x; then
   if test "x$build_alias" = x; then
     cross_compiling=maybe
-    $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
-    If a cross compiler is detected then cross compile mode will be used" >&2
+    $as_echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host.
+    If a cross compiler is detected then cross compile mode will be used." >&2
   elif test "x$build_alias" != "x$host_alias"; then
     cross_compiling=yes
   fi
@@ -1241,9 +1241,9 @@
 ac_pwd=`pwd` && test -n "$ac_pwd" &&
 ac_ls_di=`ls -di .` &&
 ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
-  as_fn_error $? "working directory cannot be determined"
+  as_fn_error "working directory cannot be determined"
 test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
-  as_fn_error $? "pwd does not report name of working directory"
+  as_fn_error "pwd does not report name of working directory"
 
 
 # Find the source files, if location was not specified.
@@ -1282,11 +1282,11 @@
 fi
 if test ! -r "$srcdir/$ac_unique_file"; then
   test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
-  as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
+  as_fn_error "cannot find sources ($ac_unique_file) in $srcdir"
 fi
 ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
 ac_abs_confdir=`(
-	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
+	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error "$ac_msg"
 	pwd)`
 # When building in place, set srcdir=.
 if test "$ac_abs_confdir" = "$ac_pwd"; then
@@ -1326,7 +1326,7 @@
       --help=short        display options specific to this package
       --help=recursive    display the short help of all the included packages
   -V, --version           display version information and exit
-  -q, --quiet, --silent   do not print \`checking ...' messages
+  -q, --quiet, --silent   do not print \`checking...' messages
       --cache-file=FILE   cache test results in FILE [disabled]
   -C, --config-cache      alias for \`--cache-file=config.cache'
   -n, --no-create         do not create output files
@@ -1511,9 +1511,9 @@
 if $ac_init_version; then
   cat <<\_ACEOF
 python configure 3.3
-generated by GNU Autoconf 2.67
-
-Copyright (C) 2010 Free Software Foundation, Inc.
+generated by GNU Autoconf 2.65
+
+Copyright (C) 2009 Free Software Foundation, Inc.
 This configure script is free software; the Free Software Foundation
 gives unlimited permission to copy, distribute and modify it.
 _ACEOF
@@ -1629,7 +1629,7 @@
     mv -f conftest.er1 conftest.err
   fi
   $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } > conftest.i && {
+  test $ac_status = 0; } >/dev/null && {
 	 test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
 	 test ! -s conftest.err
        }; then :
@@ -1653,10 +1653,10 @@
 ac_fn_c_check_header_mongrel ()
 {
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if eval "test \"\${$3+set}\"" = set; then :
+  if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 fi
 eval ac_res=\$$3
@@ -1692,7 +1692,7 @@
 else
   ac_header_preproc=no
 fi
-rm -f conftest.err conftest.i conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5
 $as_echo "$ac_header_preproc" >&6; }
 
@@ -1715,15 +1715,17 @@
 $as_echo "$as_me: WARNING: $2:     section \"Present But Cannot Be Compiled\"" >&2;}
     { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-( $as_echo "## -------------------------------------- ##
+( cat <<\_ASBOX
+## -------------------------------------- ##
 ## Report this to http://bugs.python.org/ ##
-## -------------------------------------- ##"
+## -------------------------------------- ##
+_ASBOX
      ) | sed "s/^/$as_me: WARNING:     /" >&2
     ;;
 esac
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=\$ac_header_compiler"
@@ -1787,7 +1789,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -1818,7 +1820,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=no"
@@ -1872,7 +1874,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for uint$2_t" >&5
 $as_echo_n "checking for uint$2_t... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=no"
@@ -1902,7 +1904,8 @@
 esac
 fi
 rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-       if eval test \"x\$"$3"\" = x"no"; then :
+       eval as_val=\$$3
+   if test "x$as_val" = x""no; then :
 
 else
   break
@@ -1925,7 +1928,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for int$2_t" >&5
 $as_echo_n "checking for int$2_t... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   eval "$3=no"
@@ -1976,7 +1979,8 @@
 rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
 fi
 rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-       if eval test \"x\$"$3"\" = x"no"; then :
+       eval as_val=\$$3
+   if test "x$as_val" = x""no; then :
 
 else
   break
@@ -2176,7 +2180,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
 $as_echo_n "checking for $2... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2244,7 +2248,7 @@
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
   { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2.$3" >&5
 $as_echo_n "checking for $2.$3... " >&6; }
-if eval "test \"\${$4+set}\"" = set; then :
+if { as_var=$4; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2292,18 +2296,15 @@
 
 } # ac_fn_c_check_member
 
-# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES
-# ---------------------------------------------
-# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR
-# accordingly.
+# ac_fn_c_check_decl LINENO SYMBOL VAR
+# ------------------------------------
+# Tests whether SYMBOL is declared, setting cache variable VAR accordingly.
 ac_fn_c_check_decl ()
 {
   as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  as_decl_name=`echo $2|sed 's/ *(.*//'`
-  as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'`
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5
-$as_echo_n "checking whether $as_decl_name is declared... " >&6; }
-if eval "test \"\${$3+set}\"" = set; then :
+  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $2 is declared" >&5
+$as_echo_n "checking whether $2 is declared... " >&6; }
+if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -2312,12 +2313,8 @@
 int
 main ()
 {
-#ifndef $as_decl_name
-#ifdef __cplusplus
-  (void) $as_decl_use;
-#else
-  (void) $as_decl_name;
-#endif
+#ifndef $2
+  (void) $2;
 #endif
 
   ;
@@ -2342,7 +2339,7 @@
 running configure, to aid debugging if configure makes a mistake.
 
 It was created by python $as_me 3.3, which was
-generated by GNU Autoconf 2.67.  Invocation command line was
+generated by GNU Autoconf 2.65.  Invocation command line was
 
   $ $0 $@
 
@@ -2452,9 +2449,11 @@
   {
     echo
 
-    $as_echo "## ---------------- ##
+    cat <<\_ASBOX
+## ---------------- ##
 ## Cache variables. ##
-## ---------------- ##"
+## ---------------- ##
+_ASBOX
     echo
     # The following way of writing the cache mishandles newlines in values,
 (
@@ -2488,9 +2487,11 @@
 )
     echo
 
-    $as_echo "## ----------------- ##
+    cat <<\_ASBOX
+## ----------------- ##
 ## Output variables. ##
-## ----------------- ##"
+## ----------------- ##
+_ASBOX
     echo
     for ac_var in $ac_subst_vars
     do
@@ -2503,9 +2504,11 @@
     echo
 
     if test -n "$ac_subst_files"; then
-      $as_echo "## ------------------- ##
+      cat <<\_ASBOX
+## ------------------- ##
 ## File substitutions. ##
-## ------------------- ##"
+## ------------------- ##
+_ASBOX
       echo
       for ac_var in $ac_subst_files
       do
@@ -2519,9 +2522,11 @@
     fi
 
     if test -s confdefs.h; then
-      $as_echo "## ----------- ##
+      cat <<\_ASBOX
+## ----------- ##
 ## confdefs.h. ##
-## ----------- ##"
+## ----------- ##
+_ASBOX
       echo
       cat confdefs.h
       echo
@@ -2576,12 +2581,7 @@
 ac_site_file1=NONE
 ac_site_file2=NONE
 if test -n "$CONFIG_SITE"; then
-  # We do not want a PATH search for config.site.
-  case $CONFIG_SITE in #((
-    -*)  ac_site_file1=./$CONFIG_SITE;;
-    */*) ac_site_file1=$CONFIG_SITE;;
-    *)   ac_site_file1=./$CONFIG_SITE;;
-  esac
+  ac_site_file1=$CONFIG_SITE
 elif test "x$prefix" != xNONE; then
   ac_site_file1=$prefix/share/config.site
   ac_site_file2=$prefix/etc/config.site
@@ -2596,11 +2596,7 @@
     { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
 $as_echo "$as_me: loading site script $ac_site_file" >&6;}
     sed 's/^/| /' "$ac_site_file" >&5
-    . "$ac_site_file" \
-      || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "failed to load site script $ac_site_file
-See \`config.log' for more details" "$LINENO" 5 ; }
+    . "$ac_site_file"
   fi
 done
 
@@ -2676,7 +2672,7 @@
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
   { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
-  as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
+  as_fn_error "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
 fi
 ## -------------------- ##
 ## Main body of script. ##
@@ -2690,6 +2686,65 @@
 
 
 
+
+
+
+
+if test -e $srcdir/.hg/00changelog.i
+then
+# Extract the first word of "hg", so it can be a program name with args.
+set dummy hg; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if test "${ac_cv_prog_HAS_HG+set}" = set; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$HAS_HG"; then
+  ac_cv_prog_HAS_HG="$HAS_HG" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+    for ac_exec_ext in '' $ac_executable_extensions; do
+  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
+    ac_cv_prog_HAS_HG="found"
+    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+  done
+IFS=$as_save_IFS
+
+  test -z "$ac_cv_prog_HAS_HG" && ac_cv_prog_HAS_HG="not-found"
+fi
+fi
+HAS_HG=$ac_cv_prog_HAS_HG
+if test -n "$HAS_HG"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_HG" >&5
+$as_echo "$HAS_HG" >&6; }
+else
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+else
+HAS_HG=no-repository
+fi
+if test $HAS_HG = found
+then
+    HGVERSION="hg id -i \$(srcdir)"
+    HGTAG="hg id -t \$(srcdir)"
+    HGBRANCH="hg id -b \$(srcdir)"
+else
+    HGVERSION=""
+    HGTAG=""
+    HGBRANCH=""
+fi
+
+
 ac_config_headers="$ac_config_headers pyconfig.h"
 
 
@@ -2777,7 +2832,7 @@
 		UNIVERSALSDK=$enableval
 		if test ! -d "${UNIVERSALSDK}"
 		then
-			as_fn_error $? "--enable-universalsdk specifies non-existing SDK: ${UNIVERSALSDK}" "$LINENO" 5
+			as_fn_error "--enable-universalsdk specifies non-existing SDK: ${UNIVERSALSDK}" "$LINENO" 5
 		fi
 		;;
 	esac
@@ -3169,7 +3224,7 @@
 # If the user switches compilers, we can't believe the cache
 if test ! -z "$ac_cv_prog_CC" -a ! -z "$CC" -a "$CC" != "$ac_cv_prog_CC"
 then
-  as_fn_error $? "cached CC is different -- throw away $cache_file
+  as_fn_error "cached CC is different -- throw away $cache_file
 (it is also a good idea to do 'make clean' before compiling)" "$LINENO" 5
 fi
 
@@ -3479,8 +3534,8 @@
 
 test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "no acceptable C compiler found in \$PATH
-See \`config.log' for more details" "$LINENO" 5 ; }
+as_fn_error "no acceptable C compiler found in \$PATH
+See \`config.log' for more details." "$LINENO" 5; }
 
 # Provide some information about the compiler.
 $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
@@ -3594,8 +3649,9 @@
 
 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "C compiler cannot create executables
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "C compiler cannot create executables
+See \`config.log' for more details." "$LINENO" 5; }; }
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
 $as_echo "yes" >&6; }
@@ -3637,8 +3693,8 @@
 else
   { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details" "$LINENO" 5 ; }
+as_fn_error "cannot compute suffix of executables: cannot compile and link
+See \`config.log' for more details." "$LINENO" 5; }
 fi
 rm -f conftest conftest$ac_cv_exeext
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
@@ -3695,9 +3751,9 @@
     else
 	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot run C compiled programs.
+as_fn_error "cannot run C compiled programs.
 If you meant to cross compile, use \`--host'.
-See \`config.log' for more details" "$LINENO" 5 ; }
+See \`config.log' for more details." "$LINENO" 5; }
     fi
   fi
 fi
@@ -3748,8 +3804,8 @@
 
 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of object files: cannot compile
-See \`config.log' for more details" "$LINENO" 5 ; }
+as_fn_error "cannot compute suffix of object files: cannot compile
+See \`config.log' for more details." "$LINENO" 5; }
 fi
 rm -f conftest.$ac_cv_objext conftest.$ac_ext
 fi
@@ -4232,7 +4288,7 @@
   # Broken: fails on valid input.
 continue
 fi
-rm -f conftest.err conftest.i conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 
   # OK, works on sane cases.  Now check whether nonexistent headers
   # can be detected and how.
@@ -4248,11 +4304,11 @@
 ac_preproc_ok=:
 break
 fi
-rm -f conftest.err conftest.i conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 
 done
 # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 if $ac_preproc_ok; then :
   break
 fi
@@ -4291,7 +4347,7 @@
   # Broken: fails on valid input.
 continue
 fi
-rm -f conftest.err conftest.i conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 
   # OK, works on sane cases.  Now check whether nonexistent headers
   # can be detected and how.
@@ -4307,18 +4363,18 @@
 ac_preproc_ok=:
 break
 fi
-rm -f conftest.err conftest.i conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 
 done
 # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 if $ac_preproc_ok; then :
 
 else
   { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
-See \`config.log' for more details" "$LINENO" 5 ; }
+as_fn_error "C preprocessor \"$CPP\" fails sanity check
+See \`config.log' for more details." "$LINENO" 5; }
 fi
 
 ac_ext=c
@@ -4379,7 +4435,7 @@
   done
 IFS=$as_save_IFS
   if test -z "$ac_cv_path_GREP"; then
-    as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+    as_fn_error "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
   fi
 else
   ac_cv_path_GREP=$GREP
@@ -4445,7 +4501,7 @@
   done
 IFS=$as_save_IFS
   if test -z "$ac_cv_path_EGREP"; then
-    as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
+    as_fn_error "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
   fi
 else
   ac_cv_path_EGREP=$EGREP
@@ -4577,7 +4633,8 @@
   as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
 ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
 "
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+eval as_val=\$$as_ac_Header
+   if test "x$as_val" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
 _ACEOF
@@ -5140,17 +5197,16 @@
 fi
 
 
-
-
-# Extract the first word of "hg", so it can be a program name with args.
-set dummy hg; ac_word=$2
+DISABLE_ASDLGEN=""
+# Extract the first word of "python", so it can be a program name with args.
+set dummy python; ac_word=$2
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
 $as_echo_n "checking for $ac_word... " >&6; }
-if test "${ac_cv_prog_HAS_HG+set}" = set; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$HAS_HG"; then
-  ac_cv_prog_HAS_HG="$HAS_HG" # Let the user override the test.
+if test "${ac_cv_prog_HAS_PYTHON+set}" = set; then :
+  $as_echo_n "(cached) " >&6
+else
+  if test -n "$HAS_PYTHON"; then
+  ac_cv_prog_HAS_PYTHON="$HAS_PYTHON" # Let the user override the test.
 else
 as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
 for as_dir in $PATH
@@ -5159,7 +5215,7 @@
   test -z "$as_dir" && as_dir=.
     for ac_exec_ext in '' $ac_executable_extensions; do
   if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_HAS_HG="found"
+    ac_cv_prog_HAS_PYTHON="found"
     $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
     break 2
   fi
@@ -5167,29 +5223,24 @@
   done
 IFS=$as_save_IFS
 
-  test -z "$ac_cv_prog_HAS_HG" && ac_cv_prog_HAS_HG="not-found"
-fi
-fi
-HAS_HG=$ac_cv_prog_HAS_HG
-if test -n "$HAS_HG"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_HG" >&5
-$as_echo "$HAS_HG" >&6; }
+  test -z "$ac_cv_prog_HAS_PYTHON" && ac_cv_prog_HAS_PYTHON="not-found"
+fi
+fi
+HAS_PYTHON=$ac_cv_prog_HAS_PYTHON
+if test -n "$HAS_PYTHON"; then
+  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_PYTHON" >&5
+$as_echo "$HAS_PYTHON" >&6; }
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
 $as_echo "no" >&6; }
 fi
 
 
-if test $HAS_HG = found
-then
-    HGVERSION="hg id -i \$(srcdir)"
-    HGTAG="hg id -t \$(srcdir)"
-    HGBRANCH="hg id -b \$(srcdir)"
-else
-    HGVERSION=""
-    HGTAG=""
-    HGBRANCH=""
-fi
+if test $HAS_HG != found -o $HAS_PYTHON != found
+then
+    DISABLE_ASDLGEN="@echo hg: $HAS_HG, python: $HAS_PYTHON! cannot run \$(srcdir)/Parser/asdl_c.py #"
+fi
+
 
 case $MACHDEP in
 bsdos*|hp*|HP*)
@@ -5201,22 +5252,16 @@
 esac
 ac_aux_dir=
 for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do
-  if test -f "$ac_dir/install-sh"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/install-sh -c"
-    break
-  elif test -f "$ac_dir/install.sh"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/install.sh -c"
-    break
-  elif test -f "$ac_dir/shtool"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/shtool install -c"
-    break
-  fi
+  for ac_t in install-sh install.sh shtool; do
+    if test -f "$ac_dir/$ac_t"; then
+      ac_aux_dir=$ac_dir
+      ac_install_sh="$ac_aux_dir/$ac_t -c"
+      break 2
+    fi
+  done
 done
 if test -z "$ac_aux_dir"; then
-  as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
+  as_fn_error "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5
 fi
 
 # These three variables are undocumented and unsupported,
@@ -5554,7 +5599,7 @@
 		   ARCH_RUN_32BIT="/usr/bin/arch -i386 -ppc"
 
 		 else
-	           as_fn_error $? "proper usage is --with-universal-arch=32-bit|64-bit|all|intel|3-way" "$LINENO" 5
+	           as_fn_error "proper usage is --with-universal-arch=32-bit|64-bit|all|intel|3-way" "$LINENO" 5
 
 		 fi
 
@@ -6042,7 +6087,8 @@
 do :
   as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
 ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default"
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+eval as_val=\$$as_ac_Header
+   if test "x$as_val" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
 _ACEOF
@@ -6056,7 +6102,7 @@
   as_ac_Header=`$as_echo "ac_cv_header_dirent_$ac_hdr" | $as_tr_sh`
 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_hdr that defines DIR" >&5
 $as_echo_n "checking for $ac_hdr that defines DIR... " >&6; }
-if eval "test \"\${$as_ac_Header+set}\"" = set; then :
+if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then :
   $as_echo_n "(cached) " >&6
 else
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -6083,7 +6129,8 @@
 eval ac_res=\$$as_ac_Header
 	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
 $as_echo "$ac_res" >&6; }
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
+eval as_val=\$$as_ac_Header
+   if test "x$as_val" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_hdr" | $as_tr_cpp` 1
 _ACEOF
@@ -6608,8 +6655,9 @@
   if test "$ac_cv_type_int" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (int)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (int)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_int=0
    fi
@@ -6641,8 +6689,9 @@
   if test "$ac_cv_type_long" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (long)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (long)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_long=0
    fi
@@ -6674,8 +6723,9 @@
   if test "$ac_cv_type_void_p" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (void *)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (void *)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_void_p=0
    fi
@@ -6707,8 +6757,9 @@
   if test "$ac_cv_type_short" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (short)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (short)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_short=0
    fi
@@ -6740,8 +6791,9 @@
   if test "$ac_cv_type_float" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (float)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (float)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_float=0
    fi
@@ -6773,8 +6825,9 @@
   if test "$ac_cv_type_double" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (double)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (double)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_double=0
    fi
@@ -6806,8 +6859,9 @@
   if test "$ac_cv_type_fpos_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (fpos_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (fpos_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_fpos_t=0
    fi
@@ -6839,8 +6893,9 @@
   if test "$ac_cv_type_size_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (size_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (size_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_size_t=0
    fi
@@ -6872,8 +6927,9 @@
   if test "$ac_cv_type_pid_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (pid_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (pid_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_pid_t=0
    fi
@@ -6932,8 +6988,9 @@
   if test "$ac_cv_type_long_long" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (long long)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (long long)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_long_long=0
    fi
@@ -6993,8 +7050,9 @@
   if test "$ac_cv_type_long_double" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (long double)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (long double)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_long_double=0
    fi
@@ -7055,8 +7113,9 @@
   if test "$ac_cv_type__Bool" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (_Bool)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (_Bool)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof__Bool=0
    fi
@@ -7103,8 +7162,9 @@
   if test "$ac_cv_type_uintptr_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (uintptr_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (uintptr_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_uintptr_t=0
    fi
@@ -7144,8 +7204,9 @@
   if test "$ac_cv_type_off_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (off_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (off_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_off_t=0
    fi
@@ -7206,8 +7267,9 @@
   if test "$ac_cv_type_time_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (time_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (time_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_time_t=0
    fi
@@ -7278,8 +7340,9 @@
   if test "$ac_cv_type_pthread_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (pthread_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (pthread_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_pthread_t=0
    fi
@@ -7366,7 +7429,7 @@
     		MACOSX_DEFAULT_ARCH="ppc"
     		;;
     	*)
-    		as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
+    		as_fn_error "Unexpected output of 'arch' on OSX" "$LINENO" 5
     		;;
     	esac
     else
@@ -7378,7 +7441,7 @@
     		MACOSX_DEFAULT_ARCH="ppc64"
     		;;
     	*)
-    		as_fn_error $? "Unexpected output of 'arch' on OSX" "$LINENO" 5
+    		as_fn_error "Unexpected output of 'arch' on OSX" "$LINENO" 5
     		;;
     	esac
 
@@ -7404,7 +7467,7 @@
 $as_echo "yes" >&6; }
 	if test $enable_shared = "yes"
 	then
-		as_fn_error $? "Specifying both --enable-shared and --enable-framework is not supported, use only --enable-framework instead" "$LINENO" 5
+		as_fn_error "Specifying both --enable-shared and --enable-framework is not supported, use only --enable-framework instead" "$LINENO" 5
 	fi
 else
 	{ $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
@@ -8244,12 +8307,12 @@
   withval=$with_dbmliborder;
 if test x$with_dbmliborder = xyes
 then
-as_fn_error $? "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
+as_fn_error "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
 else
   for db in `echo $with_dbmliborder | sed 's/:/ /g'`; do
     if test x$db != xndbm && test x$db != xgdbm && test x$db != xbdb
     then
-      as_fn_error $? "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
+      as_fn_error "proper usage is --with-dbmliborder=db1:db2:..." "$LINENO" 5
     fi
   done
 fi
@@ -9222,7 +9285,7 @@
 $as_echo "#define WITH_VALGRIND 1" >>confdefs.h
 
 else
-  as_fn_error $? "Valgrind support requested but headers not available" "$LINENO" 5
+  as_fn_error "Valgrind support requested but headers not available" "$LINENO" 5
 
 fi
 
@@ -9307,7 +9370,7 @@
  getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \
  if_nameindex \
  initgroups kill killpg lchmod lchown lockf linkat lstat lutimes mbrtowc mkdirat mkfifo \
- mkfifoat mknod mknodat mktime mremap nice openat pathconf pause plock poll \
+ mkfifoat mknod mknodat mktime mremap nice openat pathconf pause pipe2 plock poll \
  posix_fallocate posix_fadvise pread \
  pthread_init pthread_kill putenv pwrite readlink readlinkat readv realpath renameat \
  select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \
@@ -9321,7 +9384,8 @@
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+eval as_val=\$$as_ac_var
+   if test "x$as_val" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -10344,7 +10408,8 @@
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+eval as_val=\$$as_ac_var
+   if test "x$as_val" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -10353,44 +10418,25 @@
 done
 
 
-ac_fn_c_check_func "$LINENO" "dup2" "ac_cv_func_dup2"
-if test "x$ac_cv_func_dup2" = x""yes; then :
-  $as_echo "#define HAVE_DUP2 1" >>confdefs.h
+for ac_func in dup2 getcwd strdup
+do :
+  as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
+ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
+eval as_val=\$$as_ac_var
+   if test "x$as_val" = x""yes; then :
+  cat >>confdefs.h <<_ACEOF
+#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
+_ACEOF
 
 else
   case " $LIBOBJS " in
-  *" dup2.$ac_objext "* ) ;;
-  *) LIBOBJS="$LIBOBJS dup2.$ac_objext"
+  *" $ac_func.$ac_objext "* ) ;;
+  *) LIBOBJS="$LIBOBJS $ac_func.$ac_objext"
  ;;
 esac
 
 fi
-
-ac_fn_c_check_func "$LINENO" "getcwd" "ac_cv_func_getcwd"
-if test "x$ac_cv_func_getcwd" = x""yes; then :
-  $as_echo "#define HAVE_GETCWD 1" >>confdefs.h
-
-else
-  case " $LIBOBJS " in
-  *" getcwd.$ac_objext "* ) ;;
-  *) LIBOBJS="$LIBOBJS getcwd.$ac_objext"
- ;;
-esac
-
-fi
-
-ac_fn_c_check_func "$LINENO" "strdup" "ac_cv_func_strdup"
-if test "x$ac_cv_func_strdup" = x""yes; then :
-  $as_echo "#define HAVE_STRDUP 1" >>confdefs.h
-
-else
-  case " $LIBOBJS " in
-  *" strdup.$ac_objext "* ) ;;
-  *) LIBOBJS="$LIBOBJS strdup.$ac_objext"
- ;;
-esac
-
-fi
+done
 
 
 for ac_func in getpgrp
@@ -11603,7 +11649,7 @@
 then LIBM=$withval
      { $as_echo "$as_me:${as_lineno-$LINENO}: result: set LIBM=\"$withval\"" >&5
 $as_echo "set LIBM=\"$withval\"" >&6; }
-else as_fn_error $? "proper usage is --with-libm=STRING" "$LINENO" 5
+else as_fn_error "proper usage is --with-libm=STRING" "$LINENO" 5
 fi
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: default LIBM=\"$LIBM\"" >&5
@@ -11627,7 +11673,7 @@
 then LIBC=$withval
      { $as_echo "$as_me:${as_lineno-$LINENO}: result: set LIBC=\"$withval\"" >&5
 $as_echo "set LIBC=\"$withval\"" >&6; }
-else as_fn_error $? "proper usage is --with-libc=STRING" "$LINENO" 5
+else as_fn_error "proper usage is --with-libc=STRING" "$LINENO" 5
 fi
 else
   { $as_echo "$as_me:${as_lineno-$LINENO}: result: default LIBC=\"$LIBC\"" >&5
@@ -11877,7 +11923,8 @@
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+eval as_val=\$$as_ac_var
+   if test "x$as_val" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -11889,7 +11936,8 @@
 do :
   as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
 ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+eval as_val=\$$as_ac_var
+   if test "x$as_val" = x""yes; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
 _ACEOF
@@ -12152,7 +12200,7 @@
 15|30)
   ;;
 *)
-  as_fn_error $? "bad value $enable_big_digits for --enable-big-digits; value should be 15 or 30" "$LINENO" 5  ;;
+  as_fn_error "bad value $enable_big_digits for --enable-big-digits; value should be 15 or 30" "$LINENO" 5 ;;
 esac
 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_big_digits" >&5
 $as_echo "$enable_big_digits" >&6; }
@@ -12203,8 +12251,9 @@
   if test "$ac_cv_type_wchar_t" = yes; then
      { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "cannot compute sizeof (wchar_t)
-See \`config.log' for more details" "$LINENO" 5 ; }
+{ as_fn_set_status 77
+as_fn_error "cannot compute sizeof (wchar_t)
+See \`config.log' for more details." "$LINENO" 5; }; }
    else
      ac_cv_sizeof_wchar_t=0
    fi
@@ -12573,8 +12622,8 @@
 
      ;; #(
    *)
-     as_fn_error $? "unknown endianness
- presetting ac_cv_c_bigendian=no (or yes) will help" "$LINENO" 5  ;;
+     as_fn_error "unknown endianness
+ presetting ac_cv_c_bigendian=no (or yes) will help" "$LINENO" 5 ;;
  esac
 
 
@@ -12835,7 +12884,7 @@
   have_readline=no
 
 fi
-rm -f conftest.err conftest.i conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 if test $have_readline = yes
 then
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -13009,7 +13058,7 @@
   have_readline=no
 
 fi
-rm -f conftest.err conftest.i conftest.$ac_ext
+rm -f conftest.err conftest.$ac_ext
 if test $have_readline = yes
 then
   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
@@ -13824,14 +13873,6 @@
 esac
 
 
-ac_fn_c_check_func "$LINENO" "pipe2" "ac_cv_func_pipe2"
-if test "x$ac_cv_func_pipe2" = x""yes; then :
-
-$as_echo "#define HAVE_PIPE2 1" >>confdefs.h
-
-fi
-
-
 
 
 for h in `(cd $srcdir;echo Python/thread_*.h)`
@@ -13939,7 +13980,6 @@
 
 ac_libobjs=
 ac_ltlibobjs=
-U=
 for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
   # 1. Remove the extension, and $U if already installed.
   ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
@@ -14102,19 +14142,19 @@
 (unset CDPATH) >/dev/null 2>&1 && unset CDPATH
 
 
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
+# as_fn_error ERROR [LINENO LOG_FD]
+# ---------------------------------
 # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
 # provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
+# script with status $?, using 1 if that was 0.
 as_fn_error ()
 {
-  as_status=$1; test $as_status -eq 0 && as_status=1
-  if test "$4"; then
-    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
+  as_status=$?; test $as_status -eq 0 && as_status=1
+  if test "$3"; then
+    as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
+    $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3
   fi
-  $as_echo "$as_me: error: $2" >&2
+  $as_echo "$as_me: error: $1" >&2
   as_fn_exit $as_status
 } # as_fn_error
 
@@ -14310,7 +14350,7 @@
       test -d "$as_dir" && break
     done
     test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
+  } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir"
 
 
 } # as_fn_mkdir_p
@@ -14364,7 +14404,7 @@
 # values after options handling.
 ac_log="
 This file was extended by python $as_me 3.3, which was
-generated by GNU Autoconf 2.67.  Invocation command line was
+generated by GNU Autoconf 2.65.  Invocation command line was
 
   CONFIG_FILES    = $CONFIG_FILES
   CONFIG_HEADERS  = $CONFIG_HEADERS
@@ -14426,10 +14466,10 @@
 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
 ac_cs_version="\\
 python config.status 3.3
-configured by $0, generated by GNU Autoconf 2.67,
+configured by $0, generated by GNU Autoconf 2.65,
   with options \\"\$ac_cs_config\\"
 
-Copyright (C) 2010 Free Software Foundation, Inc.
+Copyright (C) 2009 Free Software Foundation, Inc.
 This config.status script is free software; the Free Software Foundation
 gives unlimited permission to copy, distribute and modify it."
 
@@ -14445,16 +14485,11 @@
 while test $# != 0
 do
   case $1 in
-  --*=?*)
+  --*=*)
     ac_option=`expr "X$1" : 'X\([^=]*\)='`
     ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
     ac_shift=:
     ;;
-  --*=)
-    ac_option=`expr "X$1" : 'X\([^=]*\)='`
-    ac_optarg=
-    ac_shift=:
-    ;;
   *)
     ac_option=$1
     ac_optarg=$2
@@ -14476,7 +14511,6 @@
     $ac_shift
     case $ac_optarg in
     *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
-    '') as_fn_error $? "missing file argument" ;;
     esac
     as_fn_append CONFIG_FILES " '$ac_optarg'"
     ac_need_defaults=false;;
@@ -14489,7 +14523,7 @@
     ac_need_defaults=false;;
   --he | --h)
     # Conflict between --help and --header
-    as_fn_error $? "ambiguous option: \`$1'
+    as_fn_error "ambiguous option: \`$1'
 Try \`$0 --help' for more information.";;
   --help | --hel | -h )
     $as_echo "$ac_cs_usage"; exit ;;
@@ -14498,7 +14532,7 @@
     ac_cs_silent=: ;;
 
   # This is an error.
-  -*) as_fn_error $? "unrecognized option: \`$1'
+  -*) as_fn_error "unrecognized option: \`$1'
 Try \`$0 --help' for more information." ;;
 
   *) as_fn_append ac_config_targets " $1"
@@ -14557,7 +14591,7 @@
     "Misc/python.pc") CONFIG_FILES="$CONFIG_FILES Misc/python.pc" ;;
     "Modules/ld_so_aix") CONFIG_FILES="$CONFIG_FILES Modules/ld_so_aix" ;;
 
-  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5 ;;
+  *) as_fn_error "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
   esac
 done
 
@@ -14594,7 +14628,7 @@
 {
   tmp=./conf$$-$RANDOM
   (umask 077 && mkdir "$tmp")
-} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
+} || as_fn_error "cannot create a temporary directory in ." "$LINENO" 5
 
 # Set up the scripts for CONFIG_FILES section.
 # No need to generate them if there are no CONFIG_FILES.
@@ -14611,7 +14645,7 @@
 fi
 ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
 if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
-  ac_cs_awk_cr='\\r'
+  ac_cs_awk_cr='\r'
 else
   ac_cs_awk_cr=$ac_cr
 fi
@@ -14625,18 +14659,18 @@
   echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
   echo "_ACEOF"
 } >conf$$subs.sh ||
-  as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
+  as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5
+ac_delim_num=`echo "$ac_subst_vars" | grep -c '$'`
 ac_delim='%!_!# '
 for ac_last_try in false false false false false :; do
   . ./conf$$subs.sh ||
-    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+    as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5
 
   ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
   if test $ac_delim_n = $ac_delim_num; then
     break
   elif $ac_last_try; then
-    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
+    as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5
   else
     ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
   fi
@@ -14725,28 +14759,20 @@
 else
   cat
 fi < "$tmp/subs1.awk" > "$tmp/subs.awk" \
-  || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
-_ACEOF
-
-# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
-# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
+  || as_fn_error "could not setup config files machinery" "$LINENO" 5
+_ACEOF
+
+# VPATH may cause trouble with some makes, so we remove $(srcdir),
+# ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and
 # trailing colons and then remove the whole line if VPATH becomes empty
 # (actually we leave an empty line to preserve line numbers).
 if test "x$srcdir" = x.; then
-  ac_vpsub='/^[	 ]*VPATH[	 ]*=[	 ]*/{
-h
-s///
-s/^/:/
-s/[	 ]*$/:/
-s/:\$(srcdir):/:/g
-s/:\${srcdir}:/:/g
-s/:@srcdir@:/:/g
-s/^:*//
+  ac_vpsub='/^[	 ]*VPATH[	 ]*=/{
+s/:*\$(srcdir):*/:/
+s/:*\${srcdir}:*/:/
+s/:*@srcdir@:*/:/
+s/^\([^=]*=[	 ]*\):*/\1/
 s/:*$//
-x
-s/\(=[	 ]*\).*/\1/
-G
-s/\n//
 s/^[^=]*=[	 ]*$//
 }'
 fi
@@ -14774,7 +14800,7 @@
   if test -z "$ac_t"; then
     break
   elif $ac_last_try; then
-    as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
+    as_fn_error "could not make $CONFIG_HEADERS" "$LINENO" 5
   else
     ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
   fi
@@ -14859,7 +14885,7 @@
 _ACAWK
 _ACEOF
 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-  as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
+  as_fn_error "could not setup config headers machinery" "$LINENO" 5
 fi # test -n "$CONFIG_HEADERS"
 
 
@@ -14872,7 +14898,7 @@
   esac
   case $ac_mode$ac_tag in
   :[FHL]*:*);;
-  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5 ;;
+  :L* | :C*:*) as_fn_error "invalid tag \`$ac_tag'" "$LINENO" 5;;
   :[FH]-) ac_tag=-:-;;
   :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
   esac
@@ -14900,7 +14926,7 @@
 	   [\\/$]*) false;;
 	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
 	   esac ||
-	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5 ;;
+	   as_fn_error "cannot find input file: \`$ac_f'" "$LINENO" 5;;
       esac
       case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
       as_fn_append ac_file_inputs " '$ac_f'"
@@ -14927,7 +14953,7 @@
 
     case $ac_tag in
     *:-:* | *:-) cat >"$tmp/stdin" \
-      || as_fn_error $? "could not create $ac_file" "$LINENO" 5  ;;
+      || as_fn_error "could not create $ac_file" "$LINENO" 5 ;;
     esac
     ;;
   esac
@@ -15058,22 +15084,22 @@
 $ac_datarootdir_hack
 "
 eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$tmp/subs.awk" >$tmp/out \
-  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+  || as_fn_error "could not create $ac_file" "$LINENO" 5
 
 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
   { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } &&
   { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } &&
   { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined" >&5
+which seems to be undefined.  Please make sure it is defined." >&5
 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined" >&2;}
+which seems to be undefined.  Please make sure it is defined." >&2;}
 
   rm -f "$tmp/stdin"
   case $ac_file in
   -) cat "$tmp/out" && rm -f "$tmp/out";;
   *) rm -f "$ac_file" && mv "$tmp/out" "$ac_file";;
   esac \
-  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+  || as_fn_error "could not create $ac_file" "$LINENO" 5
  ;;
   :H)
   #
@@ -15084,19 +15110,19 @@
       $as_echo "/* $configure_input  */" \
       && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs"
     } >"$tmp/config.h" \
-      || as_fn_error $? "could not create $ac_file" "$LINENO" 5
+      || as_fn_error "could not create $ac_file" "$LINENO" 5
     if diff "$ac_file" "$tmp/config.h" >/dev/null 2>&1; then
       { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
 $as_echo "$as_me: $ac_file is unchanged" >&6;}
     else
       rm -f "$ac_file"
       mv "$tmp/config.h" "$ac_file" \
-	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
+	|| as_fn_error "could not create $ac_file" "$LINENO" 5
     fi
   else
     $as_echo "/* $configure_input  */" \
       && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" \
-      || as_fn_error $? "could not create -" "$LINENO" 5
+      || as_fn_error "could not create -" "$LINENO" 5
   fi
  ;;
 
@@ -15116,7 +15142,7 @@
 ac_clean_files=$ac_clean_files_save
 
 test $ac_write_fail = 0 ||
-  as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
+  as_fn_error "write failure creating $CONFIG_STATUS" "$LINENO" 5
 
 
 # configure is writing to config.log, and then calls config.status.
@@ -15137,7 +15163,7 @@
   exec 5>>config.log
   # Use ||, not &&, to avoid exiting from the if with $? = 1, which
   # would make configure fail if this is the last instruction.
-  $ac_cs_success || as_fn_exit 1
+  $ac_cs_success || as_fn_exit $?
 fi
 if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
   { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
diff --git a/configure.in b/configure.in
--- a/configure.in
+++ b/configure.in
@@ -7,8 +7,29 @@
 
 AC_PREREQ(2.65)
 
-AC_REVISION($Revision$)
 AC_INIT(python, PYTHON_VERSION, http://bugs.python.org/)
+
+AC_SUBST(HGVERSION)
+AC_SUBST(HGTAG)
+AC_SUBST(HGBRANCH)
+
+if test -e $srcdir/.hg/00changelog.i
+then
+AC_CHECK_PROG(HAS_HG, hg, found, not-found)
+else
+HAS_HG=no-repository
+fi
+if test $HAS_HG = found
+then
+    HGVERSION="hg id -i \$(srcdir)"
+    HGTAG="hg id -t \$(srcdir)"
+    HGBRANCH="hg id -b \$(srcdir)"
+else
+    HGVERSION=""
+    HGTAG=""
+    HGBRANCH=""
+fi
+
 AC_CONFIG_SRCDIR([Include/object.h])
 AC_CONFIG_HEADER(pyconfig.h)
 
@@ -808,21 +829,15 @@
         ARFLAGS="rc"
 fi
 
-AC_SUBST(HGVERSION)
-AC_SUBST(HGTAG)
-AC_SUBST(HGBRANCH)
-AC_CHECK_PROG(HAS_HG, hg, found, not-found)
-if test $HAS_HG = found
+AC_SUBST(DISABLE_ASDLGEN)
+DISABLE_ASDLGEN=""
+AC_CHECK_PROG(HAS_PYTHON, python, found, not-found)
+if test $HAS_HG != found -o $HAS_PYTHON != found
 then
-    HGVERSION="hg id -i \$(srcdir)"
-    HGTAG="hg id -t \$(srcdir)"
-    HGBRANCH="hg id -b \$(srcdir)"
-else
-    HGVERSION=""
-    HGTAG=""
-    HGBRANCH=""
+    DISABLE_ASDLGEN="@echo hg: $HAS_HG, python: $HAS_PYTHON! cannot run \$(srcdir)/Parser/asdl_c.py #"
 fi
 
+
 case $MACHDEP in
 bsdos*|hp*|HP*)
 	# install -d does not work on BSDI or HP-UX
@@ -2531,7 +2546,7 @@
  getpriority getresuid getresgid getpwent getspnam getspent getsid getwd \
  if_nameindex \
  initgroups kill killpg lchmod lchown lockf linkat lstat lutimes mbrtowc mkdirat mkfifo \
- mkfifoat mknod mknodat mktime mremap nice openat pathconf pause plock poll \
+ mkfifoat mknod mknodat mktime mremap nice openat pathconf pause pipe2 plock poll \
  posix_fallocate posix_fadvise pread \
  pthread_init pthread_kill putenv pwrite readlink readlinkat readv realpath renameat \
  select sem_open sem_timedwait sem_getvalue sem_unlink sendfile setegid seteuid \
@@ -4244,8 +4259,6 @@
 esac
 
 
-AC_CHECK_FUNC(pipe2, AC_DEFINE(HAVE_PIPE2, 1, [Define if the OS supports pipe2()]), )
-
 AC_SUBST(THREADHEADERS)
 
 for h in `(cd $srcdir;echo Python/thread_*.h)`
diff --git a/pyconfig.h.in b/pyconfig.h.in
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -560,7 +560,7 @@
 /* Define to 1 if you have the `pause' function. */
 #undef HAVE_PAUSE
 
-/* Define if the OS supports pipe2() */
+/* Define to 1 if you have the `pipe2' function. */
 #undef HAVE_PIPE2
 
 /* Define to 1 if you have the `plock' function. */
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,8 +1,6 @@
 # Autodetecting setup.py script for building the Python extensions
 #
 
-__version__ = "$Revision$"
-
 import sys, os, imp, re, optparse
 from glob import glob
 import sysconfig
@@ -1889,7 +1887,7 @@
           # check the PyBuildScripts command above, and change the links
           # created by the bininstall target in Makefile.pre.in
           scripts = ["Tools/scripts/pydoc3", "Tools/scripts/idle3",
-                     "Tools/scripts/2to3"]
+                     "Tools/scripts/2to3", "Tools/scripts/pysetup3"]
         )
 
 # --install-platlib

-- 
Repository URL: http://hg.python.org/cpython


More information about the Python-checkins mailing list