[pypy-commit] pypy py3.3: Undo all changes in lib-python/3

amauryfa noreply at buildbot.pypy.org
Sat Apr 26 12:28:51 CEST 2014


Author: Amaury Forgeot d'Arc <amauryfa at gmail.com>
Branch: py3.3
Changeset: r71002:8cfe11f35dae
Date: 2014-04-26 12:19 +0200
http://bitbucket.org/pypy/pypy/changeset/8cfe11f35dae/

Log:	Undo all changes in lib-python/3

diff too long, truncating to 2000 out of 61393 lines

diff --git a/lib-python/3/argparse.py b/lib-python/3/argparse.py
--- a/lib-python/3/argparse.py
+++ b/lib-python/3/argparse.py
@@ -71,6 +71,7 @@
     'ArgumentDefaultsHelpFormatter',
     'RawDescriptionHelpFormatter',
     'RawTextHelpFormatter',
+    'MetavarTypeHelpFormatter',
     'Namespace',
     'Action',
     'ONE_OR_MORE',
@@ -164,6 +165,8 @@
         self._prog = prog
         self._indent_increment = indent_increment
         self._max_help_position = max_help_position
+        self._max_help_position = min(max_help_position,
+                                      max(width - 20, indent_increment * 2))
         self._width = width
 
         self._current_indent = 0
@@ -335,7 +338,7 @@
                     else:
                         line_len = len(indent) - 1
                     for part in parts:
-                        if line_len + 1 + len(part) > text_width:
+                        if line_len + 1 + len(part) > text_width and line:
                             lines.append(indent + ' '.join(line))
                             line = []
                             line_len = len(indent) - 1
@@ -419,7 +422,8 @@
 
             # produce all arg strings
             elif not action.option_strings:
-                part = self._format_args(action, action.dest)
+                default = self._get_default_metavar_for_positional(action)
+                part = self._format_args(action, default)
 
                 # if it's in a group, strip the outer []
                 if action in group_actions:
@@ -441,7 +445,7 @@
                 # if the Optional takes a value, format is:
                 #    -s ARGS or --long ARGS
                 else:
-                    default = action.dest.upper()
+                    default = self._get_default_metavar_for_optional(action)
                     args_string = self._format_args(action, default)
                     part = '%s %s' % (option_string, args_string)
 
@@ -474,7 +478,7 @@
     def _format_text(self, text):
         if '%(prog)' in text:
             text = text % dict(prog=self._prog)
-        text_width = self._width - self._current_indent
+        text_width = max(self._width - self._current_indent, 11)
         indent = ' ' * self._current_indent
         return self._fill_text(text, text_width, indent) + '\n\n'
 
@@ -482,7 +486,7 @@
         # determine the required width and the entry label
         help_position = min(self._action_max_length + 2,
                             self._max_help_position)
-        help_width = self._width - help_position
+        help_width = max(self._width - help_position, 11)
         action_width = help_position - self._current_indent - 2
         action_header = self._format_action_invocation(action)
 
@@ -527,7 +531,8 @@
 
     def _format_action_invocation(self, action):
         if not action.option_strings:
-            metavar, = self._metavar_formatter(action, action.dest)(1)
+            default = self._get_default_metavar_for_positional(action)
+            metavar, = self._metavar_formatter(action, default)(1)
             return metavar
 
         else:
@@ -541,7 +546,7 @@
             # if the Optional takes a value, format is:
             #    -s ARGS, --long ARGS
             else:
-                default = action.dest.upper()
+                default = self._get_default_metavar_for_optional(action)
                 args_string = self._format_args(action, default)
                 for option_string in action.option_strings:
                     parts.append('%s %s' % (option_string, args_string))
@@ -619,6 +624,12 @@
     def _get_help_string(self, action):
         return action.help
 
+    def _get_default_metavar_for_optional(self, action):
+        return action.dest.upper()
+
+    def _get_default_metavar_for_positional(self, action):
+        return action.dest
+
 
 class RawDescriptionHelpFormatter(HelpFormatter):
     """Help message formatter which retains any formatting in descriptions.
@@ -628,7 +639,7 @@
     """
 
     def _fill_text(self, text, width, indent):
-        return ''.join([indent + line for line in text.splitlines(True)])
+        return ''.join(indent + line for line in text.splitlines(keepends=True))
 
 
 class RawTextHelpFormatter(RawDescriptionHelpFormatter):
@@ -659,6 +670,22 @@
         return help
 
 
+class MetavarTypeHelpFormatter(HelpFormatter):
+    """Help message formatter which uses the argument 'type' as the default
+    metavar value (instead of the argument 'dest')
+
+    Only the name of this class is considered a public API. All the methods
+    provided by the class are considered an implementation detail.
+    """
+
+    def _get_default_metavar_for_optional(self, action):
+        return action.type.__name__
+
+    def _get_default_metavar_for_positional(self, action):
+        return action.type.__name__
+
+
+
 # =====================
 # Options and Arguments
 # =====================
@@ -1554,7 +1581,6 @@
                  usage=None,
                  description=None,
                  epilog=None,
-                 version=None,
                  parents=[],
                  formatter_class=HelpFormatter,
                  prefix_chars='-',
@@ -1563,14 +1589,6 @@
                  conflict_handler='error',
                  add_help=True):
 
-        if version is not None:
-            import warnings
-            warnings.warn(
-                """The "version" argument to ArgumentParser is deprecated. """
-                """Please use """
-                """"add_argument(..., action='version', version="N", ...)" """
-                """instead""", DeprecationWarning)
-
         superinit = super(ArgumentParser, self).__init__
         superinit(description=description,
                   prefix_chars=prefix_chars,
@@ -1584,7 +1602,6 @@
         self.prog = prog
         self.usage = usage
         self.epilog = epilog
-        self.version = version
         self.formatter_class = formatter_class
         self.fromfile_prefix_chars = fromfile_prefix_chars
         self.add_help = add_help
@@ -1599,7 +1616,7 @@
             return string
         self.register('type', None, identity)
 
-        # add help and version arguments if necessary
+        # add help argument if necessary
         # (using explicit default to override global argument_default)
         default_prefix = '-' if '-' in prefix_chars else prefix_chars[0]
         if self.add_help:
@@ -1607,12 +1624,6 @@
                 default_prefix+'h', default_prefix*2+'help',
                 action='help', default=SUPPRESS,
                 help=_('show this help message and exit'))
-        if self.version:
-            self.add_argument(
-                default_prefix+'v', default_prefix*2+'version',
-                action='version', default=SUPPRESS,
-                version=self.version,
-                help=_("show program's version number and exit"))
 
         # add parent arguments and defaults
         for parent in parents:
@@ -1632,7 +1643,6 @@
             'prog',
             'usage',
             'description',
-            'version',
             'formatter_class',
             'conflict_handler',
             'add_help',
@@ -1952,29 +1962,29 @@
         # if we didn't consume all the argument strings, there were extras
         extras.extend(arg_strings[stop_index:])
 
-        # if we didn't use all the Positional objects, there were too few
-        # arg strings supplied.
-        if positionals:
-            self.error(_('too few arguments'))
-
-        # make sure all required actions were present, and convert defaults.
+        # make sure all required actions were present and also convert
+        # action defaults which were not given as arguments
+        required_actions = []
         for action in self._actions:
             if action not in seen_actions:
                 if action.required:
-                    name = _get_action_name(action)
-                    self.error(_('argument %s is required') % name)
+                    required_actions.append(_get_action_name(action))
                 else:
                     # Convert action default now instead of doing it before
                     # parsing arguments to avoid calling convert functions
                     # twice (which may fail) if the argument was given, but
                     # only if it was defined already in the namespace
                     if (action.default is not None and
-                            isinstance(action.default, str) and
-                            hasattr(namespace, action.dest) and
-                            action.default is getattr(namespace, action.dest)):
+                        isinstance(action.default, str) and
+                        hasattr(namespace, action.dest) and
+                        action.default is getattr(namespace, action.dest)):
                         setattr(namespace, action.dest,
                                 self._get_value(action, action.default))
 
+        if required_actions:
+            self.error(_('the following arguments are required: %s') %
+                       ', '.join(required_actions))
+
         # make sure all required groups had one option present
         for group in self._mutually_exclusive_groups:
             if group.required:
@@ -2326,16 +2336,6 @@
         # determine help from format above
         return formatter.format_help()
 
-    def format_version(self):
-        import warnings
-        warnings.warn(
-            'The format_version method is deprecated -- the "version" '
-            'argument to ArgumentParser is no longer supported.',
-            DeprecationWarning)
-        formatter = self._get_formatter()
-        formatter.add_text(self.version)
-        return formatter.format_help()
-
     def _get_formatter(self):
         return self.formatter_class(prog=self.prog)
 
@@ -2352,14 +2352,6 @@
             file = _sys.stdout
         self._print_message(self.format_help(), file)
 
-    def print_version(self, file=None):
-        import warnings
-        warnings.warn(
-            'The print_version method is deprecated -- the "version" '
-            'argument to ArgumentParser is no longer supported.',
-            DeprecationWarning)
-        self._print_message(self.format_version(), file)
-
     def _print_message(self, message, file=None):
         if message:
             if file is None:
diff --git a/lib-python/3/configparser.py b/lib-python/3/configparser.py
--- a/lib-python/3/configparser.py
+++ b/lib-python/3/configparser.py
@@ -118,7 +118,8 @@
         between keys and values are surrounded by spaces.
 """
 
-from collections import MutableMapping, OrderedDict as _default_dict, _ChainMap
+from collections.abc import MutableMapping
+from collections import OrderedDict as _default_dict, ChainMap as _ChainMap
 import functools
 import io
 import itertools
@@ -143,23 +144,6 @@
 class Error(Exception):
     """Base class for ConfigParser exceptions."""
 
-    def _get_message(self):
-        """Getter for 'message'; needed only to override deprecation in
-        BaseException.
-        """
-        return self.__message
-
-    def _set_message(self, value):
-        """Setter for 'message'; needed only to override deprecation in
-        BaseException.
-        """
-        self.__message = value
-
-    # BaseException.message has been deprecated since Python 2.6.  To prevent
-    # DeprecationWarning from popping up over this pre-existing attribute, use
-    # a new property that takes lookup precedence.
-    message = property(_get_message, _set_message)
-
     def __init__(self, msg=''):
         self.message = msg
         Exception.__init__(self, msg)
@@ -190,7 +174,7 @@
     def __init__(self, section, source=None, lineno=None):
         msg = [repr(section), " already exists"]
         if source is not None:
-            message = ["While reading from ", source]
+            message = ["While reading from ", repr(source)]
             if lineno is not None:
                 message.append(" [line {0:2d}]".format(lineno))
             message.append(": section ")
@@ -216,7 +200,7 @@
         msg = [repr(option), " in section ", repr(section),
                " already exists"]
         if source is not None:
-            message = ["While reading from ", source]
+            message = ["While reading from ", repr(source)]
             if lineno is not None:
                 message.append(" [line {0:2d}]".format(lineno))
             message.append(": option ")
@@ -302,7 +286,7 @@
             raise ValueError("Required argument `source' not given.")
         elif filename:
             source = filename
-        Error.__init__(self, 'Source contains parsing errors: %s' % source)
+        Error.__init__(self, 'Source contains parsing errors: %r' % source)
         self.source = source
         self.errors = []
         self.args = (source, )
@@ -338,7 +322,7 @@
     def __init__(self, filename, lineno, line):
         Error.__init__(
             self,
-            'File contains no section headers.\nfile: %s, line: %d\n%r' %
+            'File contains no section headers.\nfile: %r, line: %d\n%r' %
             (filename, lineno, line))
         self.source = filename
         self.lineno = lineno
@@ -455,7 +439,7 @@
         tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
         if '$' in tmp_value:
             raise ValueError("invalid interpolation syntax in %r at "
-                             "position %d" % (value, tmp_value.find('%')))
+                             "position %d" % (value, tmp_value.find('$')))
         return value
 
     def _interpolate_some(self, parser, option, accum, rest, section, map,
@@ -959,7 +943,9 @@
 
         # XXX this is not atomic if read_dict fails at any point. Then again,
         # no update method in configparser is atomic in this implementation.
-        if key in self._sections:
+        if key == self.default_section:
+            self._defaults.clear()
+        elif key in self._sections:
             self._sections[key].clear()
         self.read_dict({key: value})
 
@@ -1005,18 +991,26 @@
         indent_level = 0
         e = None                              # None, or an exception
         for lineno, line in enumerate(fp, start=1):
-            comment_start = None
+            comment_start = sys.maxsize
             # strip inline comments
-            for prefix in self._inline_comment_prefixes:
-                index = line.find(prefix)
-                if index == 0 or (index > 0 and line[index-1].isspace()):
-                    comment_start = index
-                    break
+            inline_prefixes = {p: -1 for p in self._inline_comment_prefixes}
+            while comment_start == sys.maxsize and inline_prefixes:
+                next_prefixes = {}
+                for prefix, index in inline_prefixes.items():
+                    index = line.find(prefix, index+1)
+                    if index == -1:
+                        continue
+                    next_prefixes[prefix] = index
+                    if index == 0 or (index > 0 and line[index-1].isspace()):
+                        comment_start = min(comment_start, index)
+                inline_prefixes = next_prefixes
             # strip full line comments
             for prefix in self._comment_prefixes:
                 if line.strip().startswith(prefix):
                     comment_start = 0
                     break
+            if comment_start == sys.maxsize:
+                comment_start = None
             value = line[:comment_start].strip()
             if not value:
                 if self._empty_lines_in_values:
diff --git a/lib-python/3/distutils/__init__.py b/lib-python/3/distutils/__init__.py
--- a/lib-python/3/distutils/__init__.py
+++ b/lib-python/3/distutils/__init__.py
@@ -13,5 +13,5 @@
 # Updated automatically by the Python release process.
 #
 #--start constants--
-__version__ = "3.2.5"
+__version__ = "3.3.5"
 #--end constants--
diff --git a/lib-python/3/distutils/command/build_ext.py b/lib-python/3/distutils/command/build_ext.py
--- a/lib-python/3/distutils/command/build_ext.py
+++ b/lib-python/3/distutils/command/build_ext.py
@@ -4,10 +4,11 @@
 modules (currently limited to C extensions, should accommodate C++
 extensions ASAP)."""
 
-import sys, os, re, imp
+import sys, os, re
 from distutils.core import Command
 from distutils.errors import *
 from distutils.sysconfig import customize_compiler, get_python_version
+from distutils.sysconfig import get_config_h_filename
 from distutils.dep_util import newer_group
 from distutils.extension import Extension
 from distutils.util import get_platform
@@ -35,11 +36,6 @@
     from distutils.ccompiler import show_compilers
     show_compilers()
 
-def _get_c_extension_suffix():
-    for ext, mod, typ in imp.get_suffixes():
-        if typ == imp.C_EXTENSION:
-            return ext
-
 
 class build_ext(Command):
 
@@ -164,6 +160,11 @@
         if isinstance(self.include_dirs, str):
             self.include_dirs = self.include_dirs.split(os.pathsep)
 
+        # If in a virtualenv, add its include directory
+        # Issue 16116
+        if sys.exec_prefix != sys.base_exec_prefix:
+            self.include_dirs.append(os.path.join(sys.exec_prefix, 'include'))
+
         # Put the Python "system" include dir at the end, so that
         # any local include dirs take precedence.
         self.include_dirs.append(py_include)
@@ -193,7 +194,9 @@
             # the 'libs' directory is for binary installs - we assume that
             # must be the *native* platform.  But we don't really support
             # cross-compiling via a binary install anyway, so we let it go.
-            self.library_dirs.append(os.path.join(sys.exec_prefix, 'include'))
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+            if sys.base_exec_prefix != sys.prefix:  # Issue 16116
+                self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs'))
             if self.debug:
                 self.build_temp = os.path.join(self.build_temp, "Debug")
             else:
@@ -201,13 +204,11 @@
 
             # Append the source distribution include and library directories,
             # this allows distutils on windows to work in the source tree
-            if 0:
-                # pypy has no PC directory
-                self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
-            if 1:
-                # pypy has no PCBuild directory
-                pass
-            elif MSVC_VERSION == 9:
+            self.include_dirs.append(os.path.dirname(get_config_h_filename()))
+            _sys_home = getattr(sys, '_home', None)
+            if _sys_home:
+                self.library_dirs.append(_sys_home)
+            if MSVC_VERSION >= 9:
                 # Use the .lib files for the correct architecture
                 if self.plat_name == 'win32':
                     suffix = ''
@@ -246,12 +247,10 @@
                 # building python standard extensions
                 self.library_dirs.append('.')
 
-        # for extensions under Linux or Solaris with a shared Python library,
+        # For building extensions with a shared Python library,
         # Python's library directory must be appended to library_dirs
-        sysconfig.get_config_var('Py_ENABLE_SHARED')
-        if ((sys.platform.startswith('linux') or sys.platform.startswith('gnu')
-             or sys.platform.startswith('sunos'))
-            and sysconfig.get_config_var('Py_ENABLE_SHARED')):
+        # See Issues: #1600860, #4366
+        if (sysconfig.get_config_var('Py_ENABLE_SHARED')):
             if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")):
                 # building third party extensions
                 self.library_dirs.append(sysconfig.get_config_var('LIBDIR'))
@@ -676,18 +675,10 @@
         # OS/2 has an 8 character module (extension) limit :-(
         if os.name == "os2":
             ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
-        # PyPy tweak: first try to get the C extension suffix from
-        # 'imp'.  If it fails we fall back to the 'SO' config var, like
-        # the previous version of this code did.  This should work for
-        # CPython too.  The point is that on PyPy with cpyext, the
-        # config var 'SO' is just ".so" but we want to return
-        # ".pypy-VERSION.so" instead.
-        ext_suffix = _get_c_extension_suffix()
-        if ext_suffix is None:
-            ext_suffix = get_config_var('EXT_SUFFIX')     # fall-back
         # extensions in debug_mode are named 'module_d.pyd' under windows
+        ext_suffix = get_config_var('EXT_SUFFIX')
         if os.name == 'nt' and self.debug:
-            ext_suffix = '_d.pyd'
+            return os.path.join(*ext_path) + '_d' + ext_suffix
         return os.path.join(*ext_path) + ext_suffix
 
     def get_export_symbols(self, ext):
@@ -706,17 +697,24 @@
         shared extension.  On most platforms, this is just 'ext.libraries';
         on Windows and OS/2, we add the Python library (eg. python20.dll).
         """
-        # For PyPy, we must not add any such Python library, on any platform
-        if "__pypy__" in sys.builtin_module_names:
-            return ext.libraries
-        # The python library is always needed on Windows.
+        # The python library is always needed on Windows.  For MSVC, this
+        # is redundant, since the library is mentioned in a pragma in
+        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
+        # to need it mentioned explicitly, though, so that's what we do.
+        # Append '_d' to the python import library on debug builds.
         if sys.platform == "win32":
-            template = "python%d%d"
-            pythonlib = (template %
-                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
-            # don't extend ext.libraries, it may be shared with other
-            # extensions, it is a reference to the original list
-            return ext.libraries + [pythonlib]
+            from distutils.msvccompiler import MSVCCompiler
+            if not isinstance(self.compiler, MSVCCompiler):
+                template = "python%d%d"
+                if self.debug:
+                    template = template + '_d'
+                pythonlib = (template %
+                       (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                # don't extend ext.libraries, it may be shared with other
+                # extensions, it is a reference to the original list
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
         elif sys.platform == "os2emx":
             # EMX/GCC requires the python library explicitly, and I
             # believe VACPP does as well (though not confirmed) - AIM Apr01
diff --git a/lib-python/3/distutils/sysconfig.py b/lib-python/3/distutils/sysconfig.py
--- a/lib-python/3/distutils/sysconfig.py
+++ b/lib-python/3/distutils/sysconfig.py
@@ -9,17 +9,589 @@
 Email:        <fdrake at acm.org>
 """
 
+import os
+import re
 import sys
 
+from .errors import DistutilsPlatformError
 
-# The content of this file is redirected from
-# sysconfig_cpython or sysconfig_pypy.
+# These are needed in a couple of spots, so just compute them once.
+PREFIX = os.path.normpath(sys.prefix)
+EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+BASE_PREFIX = os.path.normpath(sys.base_prefix)
+BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
 
-if '__pypy__' in sys.builtin_module_names:
-    from distutils.sysconfig_pypy import *
-    from distutils.sysconfig_pypy import _config_vars # needed by setuptools
-    from distutils.sysconfig_pypy import _variable_rx # read_setup_file()
+# Path to the base directory of the project. On Windows the binary may
+# live in project/PCBuild9.  If we're dealing with an x64 Windows build,
+# it'll live in project/PCbuild/amd64.
+# set for cross builds
+if "_PYTHON_PROJECT_BASE" in os.environ:
+    project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
 else:
-    from distutils.sysconfig_cpython import *
-    from distutils.sysconfig_cpython import _config_vars # needed by setuptools
-    from distutils.sysconfig_cpython import _variable_rx # read_setup_file()
+    project_base = os.path.dirname(os.path.abspath(sys.executable))
+if os.name == "nt" and "pcbuild" in project_base[-8:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir))
+# PC/VS7.1
+if os.name == "nt" and "\\pc\\v" in project_base[-10:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
+                                                os.path.pardir))
+# PC/AMD64
+if os.name == "nt" and "\\pcbuild\\amd64" in project_base[-14:].lower():
+    project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
+                                                os.path.pardir))
+
+# python_build: (Boolean) if true, we're either building Python or
+# building an extension with an un-installed Python, so we use
+# different (hard-wired) directories.
+# Setup.local is available for Makefile builds including VPATH builds,
+# Setup.dist is available on Windows
+def _is_python_source_dir(d):
+    for fn in ("Setup.dist", "Setup.local"):
+        if os.path.isfile(os.path.join(d, "Modules", fn)):
+            return True
+    return False
+_sys_home = getattr(sys, '_home', None)
+if _sys_home and os.name == 'nt' and \
+    _sys_home.lower().endswith(('pcbuild', 'pcbuild\\amd64')):
+    _sys_home = os.path.dirname(_sys_home)
+    if _sys_home.endswith('pcbuild'):   # must be amd64
+        _sys_home = os.path.dirname(_sys_home)
+def _python_build():
+    if _sys_home:
+        return _is_python_source_dir(_sys_home)
+    return _is_python_source_dir(project_base)
+python_build = _python_build()
+
+# Calculate the build qualifier flags if they are defined.  Adding the flags
+# to the include and lib directories only makes sense for an installation, not
+# an in-source build.
+build_flags = ''
+try:
+    if not python_build:
+        build_flags = sys.abiflags
+except AttributeError:
+    # It's not a configure-based build, so the sys module doesn't have
+    # this attribute, which is fine.
+    pass
+
+def get_python_version():
+    """Return a string containing the major and minor Python version,
+    leaving off the patchlevel.  Sample return values could be '1.5'
+    or '2.2'.
+    """
+    return sys.version[:3]
+
+
+def get_python_inc(plat_specific=0, prefix=None):
+    """Return the directory containing installed Python header files.
+
+    If 'plat_specific' is false (the default), this is the path to the
+    non-platform-specific header files, i.e. Python.h and so on;
+    otherwise, this is the path to platform-specific header files
+    (namely pyconfig.h).
+
+    If 'prefix' is supplied, use it instead of sys.base_prefix or
+    sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
+    if os.name == "posix":
+        if python_build:
+            # Assume the executable is in the build directory.  The
+            # pyconfig.h file should be in the same directory.  Since
+            # the build directory may not be the source directory, we
+            # must use "srcdir" from the makefile to find the "Include"
+            # directory.
+            base = _sys_home or project_base
+            if plat_specific:
+                return base
+            if _sys_home:
+                incdir = os.path.join(_sys_home, get_config_var('AST_H_DIR'))
+            else:
+                incdir = os.path.join(get_config_var('srcdir'), 'Include')
+            return os.path.normpath(incdir)
+        python_dir = 'python' + get_python_version() + build_flags
+        return os.path.join(prefix, "include", python_dir)
+    elif os.name == "nt":
+        return os.path.join(prefix, "include")
+    elif os.name == "os2":
+        return os.path.join(prefix, "Include")
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its C header files "
+            "on platform '%s'" % os.name)
+
+
+def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    """Return the directory containing the Python library (standard or
+    site additions).
+
+    If 'plat_specific' is true, return the directory containing
+    platform-specific modules, i.e. any module from a non-pure-Python
+    module distribution; otherwise, return the platform-shared library
+    directory.  If 'standard_lib' is true, return the directory
+    containing standard Python library modules; otherwise, return the
+    directory for site-specific modules.
+
+    If 'prefix' is supplied, use it instead of sys.base_prefix or
+    sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        if standard_lib:
+            prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
+        else:
+            prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+    if os.name == "posix":
+        libpython = os.path.join(prefix,
+                                 "lib", "python" + get_python_version())
+        if standard_lib:
+            return libpython
+        else:
+            return os.path.join(libpython, "site-packages")
+    elif os.name == "nt":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            if get_python_version() < "2.2":
+                return prefix
+            else:
+                return os.path.join(prefix, "Lib", "site-packages")
+    elif os.name == "os2":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            return os.path.join(prefix, "Lib", "site-packages")
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its library "
+            "on platform '%s'" % os.name)
+
+
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.compiler_type == "unix":
+        if sys.platform == "darwin":
+            # Perform first-time customization of compiler-related
+            # config vars on OS X now that we know we need a compiler.
+            # This is primarily to support Pythons from binary
+            # installers.  The kind and paths to build tools on
+            # the user system may vary significantly from the system
+            # that Python itself was built on.  Also the user OS
+            # version and build tools may not support the same set
+            # of CPU architectures for universal builds.
+            global _config_vars
+            if not _config_vars.get('CUSTOMIZED_OSX_COMPILER', ''):
+                import _osx_support
+                _osx_support.customize_compiler(_config_vars)
+                _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
+
+        (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+            get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
+                            'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
+
+        if 'CC' in os.environ:
+            newcc = os.environ['CC']
+            if (sys.platform == 'darwin'
+                    and 'LDSHARED' not in os.environ
+                    and ldshared.startswith(cc)):
+                # On OS X, if CC is overridden, use that as the default
+                #       command for LDSHARED as well
+                ldshared = newcc + ldshared[len(cc):]
+            cc = newcc
+        if 'CXX' in os.environ:
+            cxx = os.environ['CXX']
+        if 'LDSHARED' in os.environ:
+            ldshared = os.environ['LDSHARED']
+        if 'CPP' in os.environ:
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if 'LDFLAGS' in os.environ:
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if 'CFLAGS' in os.environ:
+            cflags = opt + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if 'CPPFLAGS' in os.environ:
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            cflags = cflags + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+        if 'AR' in os.environ:
+            ar = os.environ['AR']
+        if 'ARFLAGS' in os.environ:
+            archiver = ar + ' ' + os.environ['ARFLAGS']
+        else:
+            archiver = ar + ' ' + ar_flags
+
+        cc_cmd = cc + ' ' + cflags
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc,
+            archiver=archiver)
+
+        compiler.shared_lib_extension = shlib_suffix
+
+
+def get_config_h_filename():
+    """Return full pathname of installed pyconfig.h file."""
+    if python_build:
+        if os.name == "nt":
+            inc_dir = os.path.join(_sys_home or project_base, "PC")
+        else:
+            inc_dir = _sys_home or project_base
+    else:
+        inc_dir = get_python_inc(plat_specific=1)
+    if get_python_version() < '2.2':
+        config_h = 'config.h'
+    else:
+        # The name of the config.h file changed in 2.2
+        config_h = 'pyconfig.h'
+    return os.path.join(inc_dir, config_h)
+
+
+def get_makefile_filename():
+    """Return full pathname of installed Makefile from the Python build."""
+    if python_build:
+        return os.path.join(_sys_home or project_base, "Makefile")
+    lib_dir = get_python_lib(plat_specific=0, standard_lib=1)
+    config_file = 'config-{}{}'.format(get_python_version(), build_flags)
+    return os.path.join(lib_dir, config_file, 'Makefile')
+
+
+def parse_config_h(fp, g=None):
+    """Parse a config.h-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    if g is None:
+        g = {}
+    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
+    #
+    while True:
+        line = fp.readline()
+        if not line:
+            break
+        m = define_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            try: v = int(v)
+            except ValueError: pass
+            g[n] = v
+        else:
+            m = undef_rx.match(line)
+            if m:
+                g[m.group(1)] = 0
+    return g
+
+
+# Regexes needed for parsing Makefile (and similar syntaxes,
+# like old-style Setup files).
+_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+def parse_makefile(fn, g=None):
+    """Parse a Makefile-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    from distutils.text_file import TextFile
+    fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape")
+
+    if g is None:
+        g = {}
+    done = {}
+    notdone = {}
+
+    while True:
+        line = fp.readline()
+        if line is None: # eof
+            break
+        m = _variable_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            v = v.strip()
+            # `$$' is a literal `$' in make
+            tmpv = v.replace('$$', '')
+
+            if "$" in tmpv:
+                notdone[n] = v
+            else:
+                try:
+                    v = int(v)
+                except ValueError:
+                    # insert literal `$'
+                    done[n] = v.replace('$$', '$')
+                else:
+                    done[n] = v
+
+    # Variables with a 'PY_' prefix in the makefile. These need to
+    # be made available without that prefix through sysconfig.
+    # Special care is needed to ensure that variable expansion works, even
+    # if the expansion uses the name without a prefix.
+    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
+
+    # do variable interpolation here
+    while notdone:
+        for name in list(notdone):
+            value = notdone[name]
+            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+            if m:
+                n = m.group(1)
+                found = True
+                if n in done:
+                    item = str(done[n])
+                elif n in notdone:
+                    # get it on a subsequent round
+                    found = False
+                elif n in os.environ:
+                    # do it like make: fall back to environment
+                    item = os.environ[n]
+
+                elif n in renamed_variables:
+                    if name.startswith('PY_') and name[3:] in renamed_variables:
+                        item = ""
+
+                    elif 'PY_' + n in notdone:
+                        found = False
+
+                    else:
+                        item = str(done['PY_' + n])
+                else:
+                    done[n] = item = ""
+                if found:
+                    after = value[m.end():]
+                    value = value[:m.start()] + item + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try: value = int(value)
+                        except ValueError:
+                            done[name] = value.strip()
+                        else:
+                            done[name] = value
+                        del notdone[name]
+
+                        if name.startswith('PY_') \
+                            and name[3:] in renamed_variables:
+
+                            name = name[3:]
+                            if name not in done:
+                                done[name] = value
+            else:
+                # bogus variable reference; just drop it since we can't deal
+                del notdone[name]
+
+    fp.close()
+
+    # strip spurious spaces
+    for k, v in done.items():
+        if isinstance(v, str):
+            done[k] = v.strip()
+
+    # save the results in the global dictionary
+    g.update(done)
+    return g
+
+
+def expand_makefile_vars(s, vars):
+    """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
+    'string' according to 'vars' (a dictionary mapping variable names to
+    values).  Variables not present in 'vars' are silently expanded to the
+    empty string.  The variable values in 'vars' should not contain further
+    variable expansions; if 'vars' is the output of 'parse_makefile()',
+    you're fine.  Returns a variable-expanded version of 's'.
+    """
+
+    # This algorithm does multiple expansion, so if vars['foo'] contains
+    # "${bar}", it will expand ${foo} to ${bar}, and then expand
+    # ${bar}... and so forth.  This is fine as long as 'vars' comes from
+    # 'parse_makefile()', which takes care of such expansions eagerly,
+    # according to make's variable expansion semantics.
+
+    while True:
+        m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
+        if m:
+            (beg, end) = m.span()
+            s = s[0:beg] + vars.get(m.group(1)) + s[end:]
+        else:
+            break
+    return s
+
+
+_config_vars = None
+
+def _init_posix():
+    """Initialize the module as appropriate for POSIX systems."""
+    g = {}
+    # load the installed Makefile:
+    try:
+        filename = get_makefile_filename()
+        parse_makefile(filename, g)
+    except IOError as msg:
+        my_msg = "invalid Python installation: unable to open %s" % filename
+        if hasattr(msg, "strerror"):
+            my_msg = my_msg + " (%s)" % msg.strerror
+
+        raise DistutilsPlatformError(my_msg)
+
+    # load the installed pyconfig.h:
+    try:
+        filename = get_config_h_filename()
+        with open(filename) as file:
+            parse_config_h(file, g)
+    except IOError as msg:
+        my_msg = "invalid Python installation: unable to open %s" % filename
+        if hasattr(msg, "strerror"):
+            my_msg = my_msg + " (%s)" % msg.strerror
+
+        raise DistutilsPlatformError(my_msg)
+
+    # On AIX, there are wrong paths to the linker scripts in the Makefile
+    # -- these paths are relative to the Python source, but when installed
+    # the scripts are in another directory.
+    if python_build:
+        g['LDSHARED'] = g['BLDSHARED']
+
+    elif get_python_version() < '2.1':
+        # The following two branches are for 1.5.2 compatibility.
+        if sys.platform == 'aix4':          # what about AIX 3.x ?
+            # Linker script is in the config directory, not in Modules as the
+            # Makefile says.
+            python_lib = get_python_lib(standard_lib=1)
+            ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
+            python_exp = os.path.join(python_lib, 'config', 'python.exp')
+
+            g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp)
+
+    global _config_vars
+    _config_vars = g
+
+
+def _init_nt():
+    """Initialize the module as appropriate for NT"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXT_SUFFIX'] = '.pyd'
+    g['EXE'] = ".exe"
+    g['VERSION'] = get_python_version().replace(".", "")
+    g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable))
+
+    global _config_vars
+    _config_vars = g
+
+
+def _init_os2():
+    """Initialize the module as appropriate for OS/2"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXT_SUFFIX'] = '.pyd'
+    g['EXE'] = ".exe"
+
+    global _config_vars
+    _config_vars = g
+
+
+def get_config_vars(*args):
+    """With no arguments, return a dictionary of all configuration
+    variables relevant for the current platform.  Generally this includes
+    everything needed to build extensions and install both pure modules and
+    extensions.  On Unix, this means every variable defined in Python's
+    installed Makefile; on Windows it's a much smaller set.
+
+    With arguments, return a list of values that result from looking up
+    each argument in the configuration variable dictionary.
+    """
+    global _config_vars
+    if _config_vars is None:
+        func = globals().get("_init_" + os.name)
+        if func:
+            func()
+        else:
+            _config_vars = {}
+
+        # Normalized versions of prefix and exec_prefix are handy to have;
+        # in fact, these are the standard versions used most places in the
+        # Distutils.
+        _config_vars['prefix'] = PREFIX
+        _config_vars['exec_prefix'] = EXEC_PREFIX
+
+        # Always convert srcdir to an absolute path
+        srcdir = _config_vars.get('srcdir', project_base)
+        if os.name == 'posix':
+            if python_build:
+                # If srcdir is a relative path (typically '.' or '..')
+                # then it should be interpreted relative to the directory
+                # containing Makefile.
+                base = os.path.dirname(get_makefile_filename())
+                srcdir = os.path.join(base, srcdir)
+            else:
+                # srcdir is not meaningful since the installation is
+                # spread about the filesystem.  We choose the
+                # directory containing the Makefile since we know it
+                # exists.
+                srcdir = os.path.dirname(get_makefile_filename())
+        _config_vars['srcdir'] = os.path.abspath(os.path.normpath(srcdir))
+
+        # Convert srcdir into an absolute path if it appears necessary.
+        # Normally it is relative to the build directory.  However, during
+        # testing, for example, we might be running a non-installed python
+        # from a different directory.
+        if python_build and os.name == "posix":
+            base = project_base
+            if (not os.path.isabs(_config_vars['srcdir']) and
+                base != os.getcwd()):
+                # srcdir is relative and we are not in the same directory
+                # as the executable. Assume executable is in the build
+                # directory and make srcdir absolute.
+                srcdir = os.path.join(base, _config_vars['srcdir'])
+                _config_vars['srcdir'] = os.path.normpath(srcdir)
+
+        # OS X platforms require special customization to handle
+        # multi-architecture, multi-os-version installers
+        if sys.platform == 'darwin':
+            import _osx_support
+            _osx_support.customize_config_vars(_config_vars)
+
+    if args:
+        vals = []
+        for name in args:
+            vals.append(_config_vars.get(name))
+        return vals
+    else:
+        return _config_vars
+
+def get_config_var(name):
+    """Return the value of a single variable using the dictionary
+    returned by 'get_config_vars()'.  Equivalent to
+    get_config_vars().get(name)
+    """
+    return get_config_vars().get(name)
diff --git a/lib-python/3/distutils/sysconfig_cpython.py b/lib-python/3/distutils/sysconfig_cpython.py
--- a/lib-python/3/distutils/sysconfig_cpython.py
+++ b/lib-python/3/distutils/sysconfig_cpython.py
@@ -146,7 +146,7 @@
             "I don't know where Python installs its library "
             "on platform '%s'" % os.name)
 
-
+_USE_CLANG = None
 
 def customize_compiler(compiler):
     """Do any platform-specific customization of a CCompiler instance.
@@ -155,28 +155,42 @@
     varies across Unices and is stored in Python's Makefile.
     """
     if compiler.compiler_type == "unix":
-        if sys.platform == "darwin":
-            # Perform first-time customization of compiler-related
-            # config vars on OS X now that we know we need a compiler.
-            # This is primarily to support Pythons from binary
-            # installers.  The kind and paths to build tools on
-            # the user system may vary significantly from the system
-            # that Python itself was built on.  Also the user OS
-            # version and build tools may not support the same set
-            # of CPU architectures for universal builds.
-            global _config_vars
-            if not _config_vars.get('CUSTOMIZED_OSX_COMPILER', ''):
-                import _osx_support
-                _osx_support.customize_compiler(_config_vars)
-                _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
-
-        (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+        (cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags) = \
             get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
-                            'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
+                            'CCSHARED', 'LDSHARED', 'SO', 'AR', 'ARFLAGS')
 
         newcc = None
         if 'CC' in os.environ:
-            cc = os.environ['CC']
+            newcc = os.environ['CC']
+        elif sys.platform == 'darwin' and cc == 'gcc-4.2':
+            # Issue #13590:
+            #       Since Apple removed gcc-4.2 in Xcode 4.2, we can no
+            #       longer assume it is available for extension module builds.
+            #       If Python was built with gcc-4.2, check first to see if
+            #       it is available on this system; if not, try to use clang
+            #       instead unless the caller explicitly set CC.
+            global _USE_CLANG
+            if _USE_CLANG is None:
+                from distutils import log
+                from subprocess import Popen, PIPE
+                p = Popen("! type gcc-4.2 && type clang && exit 2",
+                                shell=True, stdout=PIPE, stderr=PIPE)
+                p.wait()
+                if p.returncode == 2:
+                    _USE_CLANG = True
+                    log.warn("gcc-4.2 not found, using clang instead")
+                else:
+                    _USE_CLANG = False
+            if _USE_CLANG:
+                newcc = 'clang'
+        if newcc:
+            # On OS X, if CC is overridden, use that as the default
+            #       command for LDSHARED as well
+            if (sys.platform == 'darwin'
+                    and 'LDSHARED' not in os.environ
+                    and ldshared.startswith(cc)):
+                ldshared = newcc + ldshared[len(cc):]
+            cc = newcc
         if 'CXX' in os.environ:
             cxx = os.environ['CXX']
         if 'LDSHARED' in os.environ:
@@ -211,7 +225,7 @@
             linker_exe=cc,
             archiver=archiver)
 
-        compiler.shared_lib_extension = shlib_suffix
+        compiler.shared_lib_extension = so_ext
 
 
 def get_config_h_filename():
@@ -466,7 +480,6 @@
     g['INCLUDEPY'] = get_python_inc(plat_specific=0)
 
     g['SO'] = '.pyd'
-    g['EXT_SUFFIX'] = '.pyd'
     g['EXE'] = ".exe"
     g['VERSION'] = get_python_version().replace(".", "")
     g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable))
@@ -486,7 +499,6 @@
     g['INCLUDEPY'] = get_python_inc(plat_specific=0)
 
     g['SO'] = '.pyd'
-    g['EXT_SUFFIX'] = '.pyd'
     g['EXE'] = ".exe"
 
     global _config_vars
@@ -531,11 +543,43 @@
                 srcdir = os.path.join(base, _config_vars['srcdir'])
                 _config_vars['srcdir'] = os.path.normpath(srcdir)
 
-        # OS X platforms require special customization to handle
-        # multi-architecture, multi-os-version installers
         if sys.platform == 'darwin':
-            import _osx_support
-            _osx_support.customize_config_vars(_config_vars)
+            kernel_version = os.uname()[2] # Kernel version (8.4.3)
+            major_version = int(kernel_version.split('.')[0])
+
+            if major_version < 8:
+                # On Mac OS X before 10.4, check if -arch and -isysroot
+                # are in CFLAGS or LDFLAGS and remove them if they are.
+                # This is needed when building extensions on a 10.3 system
+                # using a universal build of python.
+                for key in ('LDFLAGS', 'BASECFLAGS',
+                        # a number of derived variables. These need to be
+                        # patched up as well.
+                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+                    flags = _config_vars[key]
+                    flags = re.sub('-arch\s+\w+\s', ' ', flags, re.ASCII)
+                    flags = re.sub('-isysroot [^ \t]*', ' ', flags)
+                    _config_vars[key] = flags
+
+            else:
+
+                # Allow the user to override the architecture flags using
+                # an environment variable.
+                # NOTE: This name was introduced by Apple in OSX 10.5 and
+                # is used by several scripting languages distributed with
+                # that OS release.
+
+                if 'ARCHFLAGS' in os.environ:
+                    arch = os.environ['ARCHFLAGS']
+                    for key in ('LDFLAGS', 'BASECFLAGS',
+                        # a number of derived variables. These need to be
+                        # patched up as well.
+                        'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
+
+                        flags = _config_vars[key]
+                        flags = re.sub('-arch\s+\w+\s', ' ', flags)
+                        flags = flags + ' ' + arch
+                        _config_vars[key] = flags
 
     if args:
         vals = []
diff --git a/lib-python/3/distutils/tests/test_build_ext.py b/lib-python/3/distutils/tests/test_build_ext.py
--- a/lib-python/3/distutils/tests/test_build_ext.py
+++ b/lib-python/3/distutils/tests/test_build_ext.py
@@ -61,9 +61,9 @@
             sys.stdout = old_stdout
 
         if ALREADY_TESTED:
-            return
+            self.skipTest('Already tested in %s' % ALREADY_TESTED)
         else:
-            ALREADY_TESTED = True
+            ALREADY_TESTED = type(self).__name__
 
         import xx
 
@@ -76,8 +76,8 @@
         if support.HAVE_DOCSTRINGS:
             doc = 'This is a template module just for instruction.'
             self.assertEqual(xx.__doc__, doc)
-        self.assertTrue(isinstance(xx.Null(), xx.Null))
-        self.assertTrue(isinstance(xx.Str(), xx.Str))
+        self.assertIsInstance(xx.Null(), xx.Null)
+        self.assertIsInstance(xx.Str(), xx.Str)
 
     def tearDown(self):
         # Get everything back to normal
@@ -110,13 +110,9 @@
                 _config_vars['Py_ENABLE_SHARED'] = old_var
 
         # make sure we get some library dirs under solaris
-        self.assertTrue(len(cmd.library_dirs) > 0)
+        self.assertGreater(len(cmd.library_dirs), 0)
 
     def test_user_site(self):
-        # site.USER_SITE was introduced in 2.6
-        if sys.version < '2.6':
-            return
-
         import site
         dist = Distribution({'name': 'xx'})
         cmd = build_ext(dist)
@@ -124,7 +120,7 @@
         # making sure the user option is there
         options = [name for name, short, lable in
                    cmd.user_options]
-        self.assertTrue('user' in options)
+        self.assertIn('user', options)
 
         # setting a value
         cmd.user = 1
@@ -171,10 +167,10 @@
 
         from distutils import sysconfig
         py_include = sysconfig.get_python_inc()
-        self.assertTrue(py_include in cmd.include_dirs)
+        self.assertIn(py_include, cmd.include_dirs)
 
         plat_py_include = sysconfig.get_python_inc(plat_specific=1)
-        self.assertTrue(plat_py_include in cmd.include_dirs)
+        self.assertIn(plat_py_include, cmd.include_dirs)
 
         # make sure cmd.libraries is turned into a list
         # if it's a string
@@ -255,13 +251,13 @@
                              'some': 'bar'})]
         cmd.check_extensions_list(exts)
         ext = exts[0]
-        self.assertTrue(isinstance(ext, Extension))
+        self.assertIsInstance(ext, Extension)
 
         # check_extensions_list adds in ext the values passed
         # when they are in ('include_dirs', 'library_dirs', 'libraries'
         # 'extra_objects', 'extra_compile_args', 'extra_link_args')
         self.assertEqual(ext.libraries, 'foo')
-        self.assertTrue(not hasattr(ext, 'some'))
+        self.assertFalse(hasattr(ext, 'some'))
 
         # 'macros' element of build info dict must be 1- or 2-tuple
         exts = [('foo.bar', {'sources': [''], 'libraries': 'foo',
diff --git a/lib-python/3/distutils/tests/test_sysconfig.py b/lib-python/3/distutils/tests/test_sysconfig.py
--- a/lib-python/3/distutils/tests/test_sysconfig.py
+++ b/lib-python/3/distutils/tests/test_sysconfig.py
@@ -50,15 +50,41 @@
 
     def test_get_config_vars(self):
         cvars = sysconfig.get_config_vars()
-        self.assertTrue(isinstance(cvars, dict))
+        self.assertIsInstance(cvars, dict)
         self.assertTrue(cvars)
 
+    def test_srcdir(self):
+        # See Issues #15322, #15364.
+        srcdir = sysconfig.get_config_var('srcdir')
+
+        self.assertTrue(os.path.isabs(srcdir), srcdir)
+        self.assertTrue(os.path.isdir(srcdir), srcdir)
+
+        if sysconfig.python_build:
+            # The python executable has not been installed so srcdir
+            # should be a full source checkout.
+            Python_h = os.path.join(srcdir, 'Include', 'Python.h')
+            self.assertTrue(os.path.exists(Python_h), Python_h)
+            self.assertTrue(sysconfig._is_python_source_dir(srcdir))
+        elif os.name == 'posix':
+            self.assertEqual(os.path.dirname(sysconfig.get_makefile_filename()),
+                                 srcdir)
+
+    def test_srcdir_independent_of_cwd(self):
+        # srcdir should be independent of the current working directory
+        # See Issues #15322, #15364.
+        srcdir = sysconfig.get_config_var('srcdir')
+        cwd = os.getcwd()
+        try:
+            os.chdir('..')
+            srcdir2 = sysconfig.get_config_var('srcdir')
+        finally:
+            os.chdir(cwd)
+        self.assertEqual(srcdir, srcdir2)
+
+    @unittest.skipUnless(get_default_compiler() == 'unix',
+                         'not testing if default compiler is not unix')
     def test_customize_compiler(self):
-
-        # not testing if default compiler is not unix
-        if get_default_compiler() != 'unix':
-            return
-
         os.environ['AR'] = 'my_ar'
         os.environ['ARFLAGS'] = '-arflags'
 
@@ -121,7 +147,7 @@
 
         import sysconfig as global_sysconfig
         if sysconfig.get_config_var('CUSTOMIZED_OSX_COMPILER'):
-            return
+            self.skipTest('compiler flags customized')
         self.assertEqual(global_sysconfig.get_config_var('LDSHARED'), sysconfig.get_config_var('LDSHARED'))
         self.assertEqual(global_sysconfig.get_config_var('CC'), sysconfig.get_config_var('CC'))
 
diff --git a/lib-python/3/email/charset.py b/lib-python/3/email/charset.py
--- a/lib-python/3/email/charset.py
+++ b/lib-python/3/email/charset.py
@@ -194,7 +194,7 @@
                    header encoding.  Charset.SHORTEST is not allowed for
                    body_encoding.
 
-    output_charset: Some character sets must be converted before the can be
+    output_charset: Some character sets must be converted before they can be
                     used in email headers or bodies.  If the input_charset is
                     one of them, this attribute will contain the name of the
                     charset output will be converted to.  Otherwise, it will
@@ -386,7 +386,8 @@
         string using the ascii codec produces the correct string version
         of the content.
         """
-        # 7bit/8bit encodings return the string unchanged (module conversions)
+        if not string:
+            return string
         if self.body_encoding is BASE64:
             if isinstance(string, str):
                 string = string.encode(self.output_charset)
@@ -398,13 +399,9 @@
             # character set, then, we must turn it into pseudo bytes via the
             # latin1 charset, which will encode any byte as a single code point
             # between 0 and 255, which is what body_encode is expecting.
-            #
-            # Note that this clause doesn't handle the case of a _payload that
-            # is already bytes.  It never did, and the semantics of _payload
-            # being bytes has never been nailed down, so fixing that is a
-            # longer term TODO.
             if isinstance(string, str):
-                string = string.encode(self.output_charset).decode('latin1')
+                string = string.encode(self.output_charset)
+            string = string.decode('latin1')
             return email.quoprimime.body_encode(string)
         else:
             if isinstance(string, str):
diff --git a/lib-python/3/email/encoders.py b/lib-python/3/email/encoders.py
--- a/lib-python/3/email/encoders.py
+++ b/lib-python/3/email/encoders.py
@@ -71,16 +71,8 @@
             msg['Content-Transfer-Encoding'] = '8bit'
     else:
         msg['Content-Transfer-Encoding'] = '7bit'
-    if not isinstance(orig, str):
-        msg.set_payload(orig.decode('ascii', 'surrogateescape'))
 
 
 

 def encode_noop(msg):
     """Do nothing."""
-    # Well, not quite *nothing*: in Python3 we have to turn bytes into a string
-    # in our internal surrogateescaped form in order to keep the model
-    # consistent.
-    orig = msg.get_payload()
-    if not isinstance(orig, str):
-        msg.set_payload(orig.decode('ascii', 'surrogateescape'))
diff --git a/lib-python/3/email/generator.py b/lib-python/3/email/generator.py
--- a/lib-python/3/email/generator.py
+++ b/lib-python/3/email/generator.py
@@ -12,9 +12,12 @@
 import random
 import warnings
 
+from copy import deepcopy
 from io import StringIO, BytesIO
+from email._policybase import compat32
 from email.header import Header
-from email.message import _has_surrogates
+from email.utils import _has_surrogates
+import email.charset as _charset
 
 UNDERSCORE = '_'
 NL = '\n'  # XXX: no longer used by the code below.
@@ -33,7 +36,8 @@
     # Public interface
     #
 
-    def __init__(self, outfp, mangle_from_=True, maxheaderlen=78):
+    def __init__(self, outfp, mangle_from_=True, maxheaderlen=None, *,
+                 policy=None):
         """Create the generator for message flattening.
 
         outfp is the output file-like object for writing the message to.  It
@@ -49,16 +53,22 @@
         defined in the Header class.  Set maxheaderlen to zero to disable
         header wrapping.  The default is 78, as recommended (but not required)
         by RFC 2822.
+
+        The policy keyword specifies a policy object that controls a number of
+        aspects of the generator's operation.  The default policy maintains
+        backward compatibility.
+
         """
         self._fp = outfp
         self._mangle_from_ = mangle_from_
-        self._maxheaderlen = maxheaderlen
+        self.maxheaderlen = maxheaderlen
+        self.policy = policy
 
     def write(self, s):
         # Just delegate to the file object
         self._fp.write(s)
 
-    def flatten(self, msg, unixfrom=False, linesep='\n'):
+    def flatten(self, msg, unixfrom=False, linesep=None):
         r"""Print the message object tree rooted at msg to the output file
         specified when the Generator instance was created.
 
@@ -70,29 +80,47 @@
         Note that for subobjects, no From_ line is printed.
 
         linesep specifies the characters used to indicate a new line in
-        the output.  The default value is the most useful for typical
-        Python applications, but it can be set to \r\n to produce RFC-compliant
-        line separators when needed.
+        the output.  The default value is determined by the policy.
 
         """
         # We use the _XXX constants for operating on data that comes directly
         # from the msg, and _encoded_XXX constants for operating on data that
         # has already been converted (to bytes in the BytesGenerator) and
         # inserted into a temporary buffer.
-        self._NL = linesep
-        self._encoded_NL = self._encode(linesep)
+        policy = msg.policy if self.policy is None else self.policy
+        if linesep is not None:
+            policy = policy.clone(linesep=linesep)
+        if self.maxheaderlen is not None:
+            policy = policy.clone(max_line_length=self.maxheaderlen)
+        self._NL = policy.linesep
+        self._encoded_NL = self._encode(self._NL)
         self._EMPTY = ''
         self._encoded_EMTPY = self._encode('')
-        if unixfrom:
-            ufrom = msg.get_unixfrom()
-            if not ufrom:
-                ufrom = 'From nobody ' + time.ctime(time.time())
-            self.write(ufrom + self._NL)
-        self._write(msg)
+        # Because we use clone (below) when we recursively process message
+        # subparts, and because clone uses the computed policy (not None),
+        # submessages will automatically get set to the computed policy when
+        # they are processed by this code.
+        old_gen_policy = self.policy
+        old_msg_policy = msg.policy
+        try:
+            self.policy = policy
+            msg.policy = policy
+            if unixfrom:
+                ufrom = msg.get_unixfrom()
+                if not ufrom:
+                    ufrom = 'From nobody ' + time.ctime(time.time())
+                self.write(ufrom + self._NL)
+            self._write(msg)
+        finally:
+            self.policy = old_gen_policy
+            msg.policy = old_msg_policy
 
     def clone(self, fp):
         """Clone this generator with the exact same options."""
-        return self.__class__(fp, self._mangle_from_, self._maxheaderlen)
+        return self.__class__(fp,
+                              self._mangle_from_,
+                              None, # Use policy setting, which we've adjusted
+                              policy=self.policy)
 
     #
     # Protected interface - undocumented ;/
@@ -146,10 +174,18 @@
         # necessary.
         oldfp = self._fp
         try:
+            self._munge_cte = None
             self._fp = sfp = self._new_buffer()
             self._dispatch(msg)
         finally:
             self._fp = oldfp
+            munge_cte = self._munge_cte
+            del self._munge_cte
+        # If we munged the cte, copy the message again and re-fix the CTE.
+        if munge_cte:
+            msg = deepcopy(msg)
+            msg.replace_header('content-transfer-encoding', munge_cte[0])
+            msg.replace_header('content-type', munge_cte[1])
         # Write the headers.  First we see if the message object wants to
         # handle that itself.  If not, we'll do it generically.
         meth = getattr(msg, '_write_headers', None)
@@ -180,16 +216,8 @@
     #
 
     def _write_headers(self, msg):
-        for h, v in msg.items():
-            self.write('%s: ' % h)
-            if isinstance(v, Header):
-                self.write(v.encode(
-                    maxlinelen=self._maxheaderlen, linesep=self._NL)+self._NL)
-            else:
-                # Header's got lots of smarts, so use it.
-                header = Header(v, maxlinelen=self._maxheaderlen,
-                                header_name=h)
-                self.write(header.encode(linesep=self._NL)+self._NL)
+        for h, v in msg.raw_items():
+            self.write(self.policy.fold(h, v))
         # A blank line always separates headers from body
         self.write(self._NL)
 
@@ -206,9 +234,14 @@
         if _has_surrogates(msg._payload):
             charset = msg.get_param('charset')
             if charset is not None:
+                # XXX: This copy stuff is an ugly hack to avoid modifying the
+                # existing message.
+                msg = deepcopy(msg)
                 del msg['content-transfer-encoding']
                 msg.set_payload(payload, charset)
                 payload = msg.get_payload()
+                self._munge_cte = (msg['content-transfer-encoding'],
+                                   msg['content-type'])
         if self._mangle_from_:
             payload = fcre.sub('>From ', payload)
         self._write_lines(payload)
@@ -266,9 +299,8 @@
             # body-part
             self._fp.write(body_part)
         # close-delimiter transport-padding
-        self.write(self._NL + '--' + boundary + '--')
+        self.write(self._NL + '--' + boundary + '--' + self._NL)
         if msg.epilogue is not None:
-            self.write(self._NL)
             if self._mangle_from_:
                 epilogue = fcre.sub('>From ', msg.epilogue)
             else:
@@ -279,12 +311,12 @@
         # The contents of signed parts has to stay unmodified in order to keep
         # the signature intact per RFC1847 2.1, so we disable header wrapping.
         # RDM: This isn't enough to completely preserve the part, but it helps.
-        old_maxheaderlen = self._maxheaderlen
+        p = self.policy
+        self.policy = p.clone(max_line_length=0)
         try:
-            self._maxheaderlen = 0
             self._handle_multipart(msg)
         finally:
-            self._maxheaderlen = old_maxheaderlen
+            self.policy = p
 
     def _handle_message_delivery_status(self, msg):
         # We can't just write the headers directly to self's file object
@@ -319,16 +351,18 @@
         # message/rfc822.  Such messages are generated by, for example,
         # Groupwise when forwarding unadorned messages.  (Issue 7970.)  So
         # in that case we just emit the string body.
-        payload = msg.get_payload()
+        payload = msg._payload
         if isinstance(payload, list):
             g.flatten(msg.get_payload(0), unixfrom=False, linesep=self._NL)
             payload = s.getvalue()
+        else:
+            payload = self._encode(payload)
         self._fp.write(payload)
 
     # This used to be a module level function; we use a classmethod for this
     # and _compile_re so we can continue to provide the module level function
     # for backward compatibility by doing
-    #   _make_boudary = Generator._make_boundary
+    #   _make_boundary = Generator._make_boundary
     # at the end of the module.  It *is* internal, so we could drop that...
     @classmethod
     def _make_boundary(cls, text=None):
@@ -358,7 +392,10 @@
 
     Functionally identical to the base Generator except that the output is
     bytes and not string.  When surrogates were used in the input to encode
-    bytes, these are decoded back to bytes for output.
+    bytes, these are decoded back to bytes for output.  If the policy has
+    cte_type set to 7bit, then the message is transformed such that the
+    non-ASCII bytes are properly content transfer encoded, using the charset
+    unknown-8bit.
 
     The outfp object must accept bytes in its write method.
     """
@@ -379,23 +416,8 @@
     def _write_headers(self, msg):
         # This is almost the same as the string version, except for handling
         # strings with 8bit bytes.
-        for h, v in msg._headers:
-            self.write('%s: ' % h)
-            if isinstance(v, Header):
-                self.write(v.encode(maxlinelen=self._maxheaderlen)+self._NL)
-            elif _has_surrogates(v):
-                # If we have raw 8bit data in a byte string, we have no idea
-                # what the encoding is.  There is no safe way to split this
-                # string.  If it's ascii-subset, then we could do a normal
-                # ascii split, but if it's multibyte then we could break the
-                # string.  There's no way to know so the least harm seems to
-                # be to not split the string and risk it being too long.
-                self.write(v+NL)
-            else:
-                # Header's got lots of smarts and this string is safe...
-                header = Header(v, maxlinelen=self._maxheaderlen,
-                                header_name=h)
-                self.write(header.encode(linesep=self._NL)+self._NL)
+        for h, v in msg.raw_items():
+            self._fp.write(self.policy.fold_binary(h, v))
         # A blank line always separates headers from body
         self.write(self._NL)
 
@@ -404,7 +426,7 @@
         # just write it back out.
         if msg._payload is None:
             return
-        if _has_surrogates(msg._payload):
+        if _has_surrogates(msg._payload) and not self.policy.cte_type=='7bit':
             if self._mangle_from_:
                 msg._payload = fcre.sub(">From ", msg._payload)
             self._write_lines(msg._payload)
diff --git a/lib-python/3/email/message.py b/lib-python/3/email/message.py
--- a/lib-python/3/email/message.py
+++ b/lib-python/3/email/message.py
@@ -10,14 +10,14 @@
 import uu
 import base64
 import binascii
-import warnings
 from io import BytesIO, StringIO
 
 # Intrapackage imports
 from email import utils
 from email import errors
-from email import header
+from email._policybase import compat32
 from email import charset as _charset
+from email._encoded_words import decode_b
 Charset = _charset.Charset
 
 SEMISPACE = '; '
@@ -26,24 +26,6 @@
 # existence of which force quoting of the parameter value.
 tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
 
-# How to figure out if we are processing strings that come from a byte
-# source with undecodable characters.
-_has_surrogates = re.compile(
-    '([^\ud800-\udbff]|\A)[\udc00-\udfff]([^\udc00-\udfff]|\Z)').search
-
-
-# Helper functions
-def _sanitize_header(name, value):
-    # If the header value contains surrogates, return a Header using
-    # the unknown-8bit charset to encode the bytes as encoded words.
-    if not isinstance(value, str):
-        # Assume it is already a header object
-        return value
-    if _has_surrogates(value):
-        return header.Header(value, charset=_charset.UNKNOWN8BIT,
-                             header_name=name)
-    else:
-        return value
 
 def _splitparam(param):
     # Split header parameters.  BAW: this may be too simple.  It isn't
@@ -136,7 +118,8 @@
     you must use the explicit API to set or get all the headers.  Not all of
     the mapping methods are implemented.
     """
-    def __init__(self):
+    def __init__(self, policy=compat32):
+        self.policy = policy
         self._headers = []
         self._unixfrom = None
         self._payload = None
@@ -246,7 +229,7 @@
         cte = str(self.get('content-transfer-encoding', '')).lower()
         # payload may be bytes here.
         if isinstance(payload, str):
-            if _has_surrogates(payload):
+            if utils._has_surrogates(payload):
                 bpayload = payload.encode('ascii', 'surrogateescape')
                 if not decode:
                     try:
@@ -267,11 +250,12 @@
         if cte == 'quoted-printable':
             return utils._qdecode(bpayload)
         elif cte == 'base64':
-            try:
-                return base64.b64decode(bpayload)
-            except binascii.Error:
-                # Incorrect padding
-                return bpayload
+            # XXX: this is a bit of a hack; decode_b should probably be factored
+            # out somewhere, but I haven't figured out where yet.
+            value, defects = decode_b(b''.join(bpayload.splitlines()))
+            for defect in defects:
+                self.policy.handle_defect(self, defect)
+            return value
         elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
             in_file = BytesIO(bpayload)
             out_file = BytesIO()
@@ -291,7 +275,17 @@
         Optional charset sets the message's default character set.  See
         set_charset() for details.
         """
-        self._payload = payload
+        if hasattr(payload, 'encode'):
+            if charset is None:
+                self._payload = payload
+                return
+            if not isinstance(charset, Charset):
+                charset = Charset(charset)
+            payload = payload.encode(charset.output_charset)
+        if hasattr(payload, 'decode'):
+            self._payload = payload.decode('ascii', 'surrogateescape')
+        else:
+            self._payload = payload
         if charset is not None:
             self.set_charset(charset)
 
@@ -330,7 +324,16 @@
             try:
                 cte(self)
             except TypeError:
-                self._payload = charset.body_encode(self._payload)
+                # This 'if' is for backward compatibility, it allows unicode
+                # through even though that won't work correctly if the
+                # message is serialized.
+                payload = self._payload
+                if payload:
+                    try:
+                        payload = payload.encode('ascii', 'surrogateescape')
+                    except UnicodeError:
+                        payload = payload.encode(charset.output_charset)
+                self._payload = charset.body_encode(payload)
                 self.add_header('Content-Transfer-Encoding', cte)
 
     def get_charset(self):
@@ -362,7 +365,17 @@
         Note: this does not overwrite an existing header with the same field
         name.  Use __delitem__() first to delete any existing headers.
         """
-        self._headers.append((name, val))
+        max_count = self.policy.header_max_count(name)
+        if max_count:
+            lname = name.lower()
+            found = 0
+            for k, v in self._headers:
+                if k.lower() == lname:
+                    found += 1
+                    if found >= max_count:
+                        raise ValueError("There may be at most {} {} headers "
+                                         "in a message".format(max_count, name))
+        self._headers.append(self.policy.header_store_parse(name, val))
 
     def __delitem__(self, name):
         """Delete all occurrences of a header, if present.
@@ -401,7 +414,8 @@
         Any fields deleted and re-inserted are always appended to the header
         list.
         """
-        return [_sanitize_header(k, v) for k, v in self._headers]
+        return [self.policy.header_fetch_parse(k, v)
+                for k, v in self._headers]
 
     def items(self):
         """Get all the message's header fields and values.
@@ -411,7 +425,8 @@
         Any fields deleted and re-inserted are always appended to the header
         list.
         """
-        return [(k, _sanitize_header(k, v)) for k, v in self._headers]
+        return [(k, self.policy.header_fetch_parse(k, v))
+                for k, v in self._headers]
 
     def get(self, name, failobj=None):
         """Get a header value.
@@ -422,10 +437,29 @@
         name = name.lower()
         for k, v in self._headers:
             if k.lower() == name:
-                return _sanitize_header(k, v)
+                return self.policy.header_fetch_parse(k, v)
         return failobj
 
     #
+    # "Internal" methods (public API, but only intended for use by a parser
+    # or generator, not normal application code.
+    #
+
+    def set_raw(self, name, value):
+        """Store name and value in the model without modification.
+
+        This is an "internal" API, intended only for use by a parser.
+        """
+        self._headers.append((name, value))
+
+    def raw_items(self):
+        """Return the (name, value) header pairs without modification.
+
+        This is an "internal" API, intended only for use by a generator.
+        """
+        return iter(self._headers.copy())
+
+    #
     # Additional useful stuff
     #
 
@@ -442,7 +476,7 @@
         name = name.lower()
         for k, v in self._headers:
             if k.lower() == name:
-                values.append(_sanitize_header(k, v))
+                values.append(self.policy.header_fetch_parse(k, v))
         if not values:
             return failobj
         return values
@@ -475,7 +509,7 @@
                 parts.append(_formatparam(k.replace('_', '-'), v))
         if _value is not None:
             parts.insert(0, _value)
-        self._headers.append((_name, SEMISPACE.join(parts)))
+        self[_name] = SEMISPACE.join(parts)
 
     def replace_header(self, _name, _value):
         """Replace a header.
@@ -487,7 +521,7 @@
         _name = _name.lower()
         for i, (k, v) in zip(range(len(self._headers)), self._headers):
             if k.lower() == _name:
-                self._headers[i] = (k, _value)
+                self._headers[i] = self.policy.header_store_parse(k, _value)
                 break
         else:
             raise KeyError(_name)
@@ -619,7 +653,7 @@
         If your application doesn't care whether the parameter was RFC 2231
         encoded, it can turn the return value into a string as follows:
 
-            param = msg.get_param('foo')
+            rawparam = msg.get_param('foo')
             param = email.utils.collapse_rfc2231_value(rawparam)
 
         """
@@ -803,7 +837,8 @@
                         parts.append(k)
                     else:
                         parts.append('%s=%s' % (k, v))
-                newheaders.append((h, SEMISPACE.join(parts)))
+                val = SEMISPACE.join(parts)
+                newheaders.append(self.policy.header_store_parse(h, val))
 
             else:
                 newheaders.append((h, v))
diff --git a/lib-python/3/email/parser.py b/lib-python/3/email/parser.py
--- a/lib-python/3/email/parser.py
+++ b/lib-python/3/email/parser.py
@@ -4,18 +4,19 @@
 
 """A parser of RFC 2822 and MIME email messages."""
 
-__all__ = ['Parser', 'HeaderParser', 'BytesParser']
+__all__ = ['Parser', 'HeaderParser', 'BytesParser', 'BytesHeaderParser']
 
 import warnings
 from io import StringIO, TextIOWrapper
 
 from email.feedparser import FeedParser, BytesFeedParser
 from email.message import Message
+from email._policybase import compat32
 


More information about the pypy-commit mailing list