[Python-checkins] distutils2 (merge default -> default): Branch merge to trunk

tarek.ziade python-checkins at python.org
Sun Mar 13 19:45:15 CET 2011


http://hg.python.org/distutils2/rev/be240095c410
changeset:   1109:be240095c410
parent:      1103:f19a72623297
parent:      1108:677266d859c4
user:        Arc Riley <arcriley at gmail.com>
date:        Sun Mar 13 00:12:23 2011 -0500
summary:
  Branch merge to trunk

files:
  distutils2/_backport/tests/test_pkgutil.py
  distutils2/_backport/tests/test_sysconfig.py
  distutils2/compiler/unixccompiler.py
  distutils2/fancy_getopt.py
  distutils2/tests/__init__.py
  runtests.py
  setup.py

diff --git a/.hgignore b/.hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -15,3 +15,4 @@
 include
 bin
 nosetests.xml
+Distutils2.egg-info
diff --git a/CHANGES.txt b/CHANGES.txt
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -6,17 +6,20 @@
 ---------
 
 - The setup runner supports more options:
-- XXX fill changes done in commands + compilers
-- Issue 10409: Fixed the Licence selector in mkcfg
+- XXX fill changes done in commands + compilers [tarek]
+- Issue #10409: Fixed the Licence selector in mkcfg [tarek]
+- Issue #9558: Fix build_ext with VS 8.0 [éric]
+- Issue #6007: Add disclaimer about MinGW compatibility in docs [éric]
+- Renamed DistributionMetadata to Metadata [ccomb]
 
 1.0a3 - 2010-10-08
 ------------------
 
-- Provided a Tox configuration for cross-python testing [holger]
+- Provided a Tox configuration for cross-Python testing [holger]
 - Fixed the installation when using easy_install and Pip by switching
   setup.py to distutils1 [holger/tarek]
 - Added missing c/h files in the MANIFEST so they are always present
-  no matter which python version was used to build it. [holger/tarek]
+  no matter which Python version was used to build it. [holger/tarek]
 - Added the new setup runner that uses only setup.cfg
 - Renamed mkpkg to mkcfg [tarek]
 - Renamed install_tools to install [alexis]
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -18,15 +18,22 @@
 - Nicolas Cadou
 - Konrad Delong
 - Josip Djolonga
+- Andrew Francis
 - Yannick Gingras
+- Alexandre Hamelin
+- Kelsey Hightower
+- Christian Hudon
 - Jeremy Kloth
 - Amos Latteier
+- Mathieu Leduc-Hamel
 - Martin von Löwis
+- Simon Mathieu
 - Carl Meyer
 - Alexis Métaireau
 - Zubin Mithra
 - Derek McTavish Mounce
 - Michael Mulich
+- Louis Munro
 - George Peristerakis
 - Mathieu Perreault
 - Sean Reifschneider
diff --git a/DEVNOTES.txt b/DEVNOTES.txt
--- a/DEVNOTES.txt
+++ b/DEVNOTES.txt
@@ -6,4 +6,5 @@
   one of these Python versions.
 
 - Always run tests.sh before you push a change. This implies
-  that you have all Python versions installed from 2.4 to 2.7.
+  that you have all Python versions installed from 2.4 to 2.7. Be sure to have 
+  docutils installed on all python versions no avoid skipping tests as well.
diff --git a/README.txt b/README.txt
--- a/README.txt
+++ b/README.txt
@@ -10,6 +10,9 @@
 
 See the documentation at http://packages.python.org/Distutils2 for more info.
 
+If you want to contribute, please have a look to 
+http://distutils2.notmyidea.org/contributing.html
+
 **Beware that Distutils2 is in its early stage and should not be used in
 production. Its API is subject to changes**
 
diff --git a/distutils2/__init__.py b/distutils2/__init__.py
--- a/distutils2/__init__.py
+++ b/distutils2/__init__.py
@@ -13,8 +13,3 @@
 
 __version__ = "1.0a3"
 logger = getLogger('distutils2')
-
-# when set to True, converts doctests by default too
-run_2to3_on_doctests = True
-# Standard package names for fixer packages
-lib2to3_fixer_packages = ['lib2to3.fixes']
diff --git a/distutils2/_backport/__init__.py b/distutils2/_backport/__init__.py
--- a/distutils2/_backport/__init__.py
+++ b/distutils2/_backport/__init__.py
@@ -1,5 +1,5 @@
 """Things that will land in the Python 3.3 std lib but which we must drag along
-with us for now to support 2.x."""
+ us for now to support 2.x."""
 
 def any(seq):
     for elem in seq:
diff --git a/distutils2/_backport/pkgutil.py b/distutils2/_backport/pkgutil.py
--- a/distutils2/_backport/pkgutil.py
+++ b/distutils2/_backport/pkgutil.py
@@ -1,24 +1,27 @@
 """Utilities to support packages."""
 
-# NOTE: This module must remain compatible with Python 2.3, as it is shared
-# by setuptools for distribution with Python 2.3 and up.
+import imp
+import sys
 
+from csv import reader as csv_reader
 import os
-import sys
-import imp
-import os.path
-from csv import reader as csv_reader
+import re
+from stat import ST_SIZE
 from types import ModuleType
+import warnings
+
+try:
+    from hashlib import md5
+except ImportError:
+    from md5 import md5
+
 from distutils2.errors import DistutilsError
-from distutils2.metadata import DistributionMetadata
+from distutils2.metadata import Metadata
 from distutils2.version import suggest_normalized_version, VersionPredicate
-import zipimport
 try:
     import cStringIO as StringIO
 except ImportError:
     import StringIO
-import re
-import warnings
 
 
 __all__ = [
@@ -28,10 +31,14 @@
     'Distribution', 'EggInfoDistribution', 'distinfo_dirname',
     'get_distributions', 'get_distribution', 'get_file_users',
     'provides_distribution', 'obsoletes_distribution',
-    'enable_cache', 'disable_cache', 'clear_cache'
+    'enable_cache', 'disable_cache', 'clear_cache',
 ]
 
 
+##########################
+# PEP 302 Implementation #
+##########################
+
 def read_code(stream):
     # This helper is needed in order for the :pep:`302` emulation to
     # correctly handle compiled files
@@ -41,7 +48,7 @@
     if magic != imp.get_magic():
         return None
 
-    stream.read(4) # Skip timestamp
+    stream.read(4)  # Skip timestamp
     return marshal.load(stream)
 
 
@@ -49,7 +56,7 @@
     """Make a trivial single-dispatch generic function"""
     registry = {}
 
-    def wrapper(*args, **kw):
+    def wrapper(*args, ** kw):
         ob = args[0]
         try:
             cls = ob.__class__
@@ -64,12 +71,12 @@
                     pass
                 mro = cls.__mro__[1:]
             except TypeError:
-                mro = object,   # must be an ExtensionClass or some such  :(
+                mro = object, # must be an ExtensionClass or some such  :(
         for t in mro:
             if t in registry:
-                return registry[t](*args, **kw)
+                return registry[t](*args, ** kw)
         else:
-            return func(*args, **kw)
+            return func(*args, ** kw)
     try:
         wrapper.__name__ = func.__name__
     except (TypeError, AttributeError):
@@ -173,7 +180,6 @@
 
 #@simplegeneric
 def iter_importer_modules(importer, prefix=''):
-    ""
     if not hasattr(importer, 'iter_modules'):
         return []
     return importer.iter_modules(prefix)
@@ -331,9 +337,9 @@
     def get_filename(self, fullname=None):
         fullname = self._fix_name(fullname)
         mod_type = self.etc[2]
-        if self.etc[2] == imp.PKG_DIRECTORY:
+        if mod_type == imp.PKG_DIRECTORY:
             return self._get_delegate().get_filename()
-        elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
+        elif mod_type in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
             return self.filename
         return None
 
@@ -343,8 +349,7 @@
     from zipimport import zipimporter
 
     def iter_zipimport_modules(importer, prefix=''):
-        dirlist = zipimport._zip_directory_cache[importer.archive].keys()
-        dirlist.sort()
+        dirlist = sorted(zipimport._zip_directory_cache[importer.archive])
         _prefix = importer.prefix
         plen = len(_prefix)
         yielded = {}
@@ -433,7 +438,8 @@
     import mechanism will find the latter.
 
     Items of the following types can be affected by this discrepancy:
-        ``imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY``
+    :data:`imp.C_EXTENSION`, :data:`imp.PY_SOURCE`, :data:`imp.PY_COMPILED`,
+    :data:`imp.PKG_DIRECTORY`
     """
     if fullname.startswith('.'):
         raise ImportError("Relative module names not supported")
@@ -535,13 +541,13 @@
         # frozen package.  Return the path unchanged in that case.
         return path
 
-    pname = os.path.join(*name.split('.')) # Reconstitute as relative path
+    pname = os.path.join(*name.split('.'))  # Reconstitute as relative path
     # Just in case os.extsep != '.'
     sname = os.extsep.join(name.split('.'))
     sname_pkg = sname + os.extsep + "pkg"
     init_py = "__init__" + os.extsep + "py"
 
-    path = path[:] # Start with a copy of the existing path
+    path = path[:]  # Start with a copy of the existing path
 
     for dir in sys.path:
         if not isinstance(dir, basestring) or not os.path.isdir(dir):
@@ -566,7 +572,7 @@
                     line = line.rstrip('\n')
                     if not line or line.startswith('#'):
                         continue
-                    path.append(line) # Don't check for existence!
+                    path.append(line)  # Don't check for existence!
                 f.close()
 
     return path
@@ -610,19 +616,20 @@
     resource_name = os.path.join(*parts)
     return loader.get_data(resource_name)
 
+
 ##########################
 # PEP 376 Implementation #
 ##########################
 
-DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED',)
+DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES')
 
 # Cache
-_cache_name = {} # maps names to Distribution instances
-_cache_name_egg = {} # maps names to EggInfoDistribution instances
-_cache_path = {} # maps paths to Distribution instances
-_cache_path_egg = {} # maps paths to EggInfoDistribution instances
-_cache_generated = False # indicates if .dist-info distributions are cached
-_cache_generated_egg = False # indicates if .dist-info and .egg are cached
+_cache_name = {}  # maps names to Distribution instances
+_cache_name_egg = {}  # maps names to EggInfoDistribution instances
+_cache_path = {}  # maps paths to Distribution instances
+_cache_path_egg = {}  # maps paths to EggInfoDistribution instances
+_cache_generated = False  # indicates if .dist-info distributions are cached
+_cache_generated_egg = False  # indicates if .dist-info and .egg are cached
 _cache_enabled = True
 
 
@@ -637,6 +644,7 @@
 
     _cache_enabled = True
 
+
 def disable_cache():
     """
     Disables the internal cache.
@@ -648,10 +656,11 @@
 
     _cache_enabled = False
 
+
 def clear_cache():
     """ Clears the internal cache. """
-    global _cache_name, _cache_name_egg, cache_path, _cache_path_egg, \
-           _cache_generated, _cache_generated_egg
+    global _cache_name, _cache_name_egg, _cache_path, _cache_path_egg, \
+        _cache_generated, _cache_generated_egg
 
     _cache_name = {}
     _cache_name_egg = {}
@@ -661,14 +670,14 @@
     _cache_generated_egg = False
 
 
-def _yield_distributions(include_dist, include_egg):
+def _yield_distributions(include_dist, include_egg, paths=sys.path):
     """
     Yield .dist-info and .egg(-info) distributions, based on the arguments
 
     :parameter include_dist: yield .dist-info distributions
     :parameter include_egg: yield .egg(-info) distributions
     """
-    for path in sys.path:
+    for path in paths:
         realpath = os.path.realpath(path)
         if not os.path.isdir(realpath):
             continue
@@ -680,8 +689,7 @@
                                   dir.endswith('.egg')):
                 yield EggInfoDistribution(dist_path)
 
-
-def _generate_cache(use_egg_info=False):
+def _generate_cache(use_egg_info=False, paths=sys.path):
     global _cache_generated, _cache_generated_egg
 
     if _cache_generated_egg or (_cache_generated and not use_egg_info):
@@ -690,7 +698,7 @@
         gen_dist = not _cache_generated
         gen_egg = use_egg_info
 
-        for dist in _yield_distributions(gen_dist, gen_egg):
+        for dist in _yield_distributions(gen_dist, gen_egg, paths):
             if isinstance(dist, Distribution):
                 _cache_path[dist.path] = dist
                 if not dist.name in _cache_name:
@@ -718,7 +726,7 @@
     name = ''
     """The name of the distribution."""
     metadata = None
-    """A :class:`distutils2.metadata.DistributionMetadata` instance loaded with
+    """A :class:`distutils2.metadata.Metadata` instance loaded with
     the distribution's ``METADATA`` file."""
     requested = False
     """A boolean that indicates whether the ``REQUESTED`` metadata file is
@@ -730,7 +738,7 @@
             self.metadata = _cache_path[path].metadata
         else:
             metadata_path = os.path.join(path, 'METADATA')
-            self.metadata = DistributionMetadata(path=metadata_path)
+            self.metadata = Metadata(path=metadata_path)
 
         self.path = path
         self.name = self.metadata['name']
@@ -738,9 +746,12 @@
         if _cache_enabled and not path in _cache_path:
             _cache_path[path] = self
 
+    def __repr__(self):
+        return '%s-%s at %s' % (self.name, self.metadata.version, self.path)
+
     def _get_records(self, local=False):
-        RECORD = os.path.join(self.path, 'RECORD')
-        record_reader = csv_reader(open(RECORD, 'rb'), delimiter=',')
+        RECORD = self.get_distinfo_file('RECORD')
+        record_reader = csv_reader(RECORD, delimiter=',')
         for row in record_reader:
             path, md5, size = row[:] + [None for i in xrange(len(row), 3)]
             if local:
@@ -748,6 +759,15 @@
                 path = os.path.join(sys.prefix, path)
             yield path, md5, size
 
+    def get_resource_path(self, relative_path):
+        resources_file = self.get_distinfo_file('RESOURCES')
+        resources_reader = csv_reader(resources_file, delimiter=',')
+        for relative, destination in resources_reader:
+            if relative == relative_path:
+                return destination
+        raise KeyError('No resource file with relative path %s were installed' %
+                       relative_path)
+
     def get_installed_files(self, local=False):
         """
         Iterates over the ``RECORD`` entries and returns a tuple
@@ -805,13 +825,13 @@
             distinfo_dirname, path = path.split(os.sep)[-2:]
             if distinfo_dirname != self.path.split(os.sep)[-1]:
                 raise DistutilsError("Requested dist-info file does not "
-                    "belong to the %s distribution. '%s' was requested." \
-                    % (self.name, os.sep.join([distinfo_dirname, path])))
+                                     "belong to the %s distribution. '%s' was requested." \
+                                     % (self.name, os.sep.join([distinfo_dirname, path])))
 
         # The file must be relative
         if path not in DIST_FILES:
             raise DistutilsError("Requested an invalid dist-info file: "
-                "%s" % path)
+                                 "%s" % path)
 
         # Convert the relative path back to absolute
         path = os.path.join(self.path, path)
@@ -848,17 +868,17 @@
     name = ''
     """The name of the distribution."""
     metadata = None
-    """A :class:`distutils2.metadata.DistributionMetadata` instance loaded with
+    """A :class:`distutils2.metadata.Metadata` instance loaded with
     the distribution's ``METADATA`` file."""
-    _REQUIREMENT = re.compile( \
-        r'(?P<name>[-A-Za-z0-9_.]+)\s*' \
-        r'(?P<first>(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*' \
-        r'(?P<rest>(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*' \
-        r'(?P<extras>\[.*\])?')
+    _REQUIREMENT = re.compile(\
+                              r'(?P<name>[-A-Za-z0-9_.]+)\s*' \
+                              r'(?P<first>(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*' \
+                              r'(?P<rest>(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*' \
+                              r'(?P<extras>\[.*\])?')
 
-    def __init__(self, path):
+    def __init__(self, path, display_warnings=False):
         self.path = path
-
+        self.display_warnings = display_warnings
         if _cache_enabled and path in _cache_path_egg:
             self.metadata = _cache_path_egg[path].metadata
             self.name = self.metadata['Name']
@@ -871,7 +891,8 @@
             if isinstance(strs, basestring):
                 for s in strs.splitlines():
                     s = s.strip()
-                    if s and not s.startswith('#'): # skip blank lines/comments
+                    # skip blank lines/comments
+                    if s and not s.startswith('#'):
                         yield s
             else:
                 for ss in strs:
@@ -882,16 +903,17 @@
         if path.endswith('.egg'):
             if os.path.isdir(path):
                 meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
-                self.metadata = DistributionMetadata(path=meta_path)
+                self.metadata = Metadata(path=meta_path)
                 try:
                     req_path = os.path.join(path, 'EGG-INFO', 'requires.txt')
                     requires = open(req_path, 'r').read()
                 except IOError:
                     requires = None
             else:
+                # FIXME handle the case where zipfile is not available
                 zipf = zipimport.zipimporter(path)
                 fileobj = StringIO.StringIO(zipf.get_data('EGG-INFO/PKG-INFO'))
-                self.metadata = DistributionMetadata(fileobj=fileobj)
+                self.metadata = Metadata(fileobj=fileobj)
                 try:
                     requires = zipf.get_data('EGG-INFO/requires.txt')
                 except IOError:
@@ -905,35 +927,41 @@
                     requires = req_f.read()
                 except IOError:
                     requires = None
-            self.metadata = DistributionMetadata(path=path)
+            self.metadata = Metadata(path=path)
             self.name = self.metadata['name']
         else:
             raise ValueError('The path must end with .egg-info or .egg')
 
-        provides = "%s (%s)" % (self.metadata['name'],
-                                self.metadata['version'])
-        if self.metadata['Metadata-Version'] == '1.2':
-            self.metadata['Provides-Dist'] += (provides,)
-        else:
-            self.metadata['Provides'] += (provides,)
+
+        if requires is not None:
+            if self.metadata['Metadata-Version'] == '1.1':
+                # we can't have 1.1 metadata *and* Setuptools requires
+                for field in ('Obsoletes', 'Requires', 'Provides'):
+                    del self.metadata[field]
+
         reqs = []
+
         if requires is not None:
             for line in yield_lines(requires):
-                if line[0] == '[':
+                if line[0] == '[' and self.display_warnings:
                     warnings.warn('distutils2 does not support extensions '
                                   'in requires.txt')
                     break
                 else:
                     match = self._REQUIREMENT.match(line.strip())
                     if not match:
-                        raise ValueError('Distribution %s has ill formed '
-                                         'requires.txt file (%s)' %
-                                         (self.name, line))
+                        # this happens when we encounter extras
+                        # since they are written at the end of the file
+                        # we just exit
+                        break
+                        #raise ValueError('Distribution %s has ill formed '
+                        #                 'requires.txt file (%s)' %
+                        #                 (self.name, line))
                     else:
                         if match.group('extras'):
                             s = (('Distribution %s uses extra requirements '
-                                  'which are not supported in distutils') \
-                                         % (self.name))
+                                 'which are not supported in distutils') \
+                                 % (self.name))
                             warnings.warn(s)
                         name = match.group('name')
                         version = None
@@ -941,20 +969,50 @@
                             version = match.group('first')
                             if match.group('rest'):
                                 version += match.group('rest')
-                            version = version.replace(' ', '') # trim spaces
+                            version = version.replace(' ', '')  # trim spaces
                         if version is None:
                             reqs.append(name)
                         else:
                             reqs.append('%s (%s)' % (name, version))
-            if self.metadata['Metadata-Version'] == '1.2':
+
+            if len(reqs) > 0:
                 self.metadata['Requires-Dist'] += reqs
-            else:
-                self.metadata['Requires'] += reqs
+
 
         if _cache_enabled:
             _cache_path_egg[self.path] = self
 
+    def __repr__(self):
+        return '%s-%s at %s' % (self.name, self.metadata.version, self.path)
+
     def get_installed_files(self, local=False):
+
+        def _md5(path):
+            f = open(path)
+            try:
+                content = f.read()
+            finally:
+                f.close()
+            return md5(content).hexdigest()
+
+        def _size(path):
+            return os.stat(path)[ST_SIZE]
+
+        path = self.path
+        if local:
+            path = path.replace('/', os.sep)
+
+        # XXX What about scripts and data files ?
+        if os.path.isfile(path):
+            return [(path, _md5(path), _size(path))]
+        else:
+            files = []
+            for root, dir, files_ in os.walk(path):
+                for item in files_:
+                    item = os.path.join(root, item)
+                    files.append((item, _md5(item), _size(item)))
+            return files
+
         return []
 
     def uses(self, path):
@@ -962,18 +1020,12 @@
 
     def __eq__(self, other):
         return isinstance(other, EggInfoDistribution) and \
-               self.path == other.path
+            self.path == other.path
 
     # See http://docs.python.org/reference/datamodel#object.__hash__
     __hash__ = object.__hash__
 
 
-def _normalize_dist_name(name):
-    """Returns a normalized name from the given *name*.
-    :rtype: string"""
-    return name.replace('-', '_')
-
-
 def distinfo_dirname(name, version):
     """
     The *name* and *version* parameters are converted into their
@@ -993,7 +1045,7 @@
     :returns: directory name
     :rtype: string"""
     file_extension = '.dist-info'
-    name = _normalize_dist_name(name)
+    name = name.replace('-', '_')
     normalized_version = suggest_normalized_version(version)
     # Because this is a lookup procedure, something will be returned even if
     #   it is a version that cannot be normalized
@@ -1003,7 +1055,7 @@
     return '-'.join([name, normalized_version]) + file_extension
 
 
-def get_distributions(use_egg_info=False):
+def get_distributions(use_egg_info=False, paths=sys.path):
     """
     Provides an iterator that looks for ``.dist-info`` directories in
     ``sys.path`` and returns :class:`Distribution` instances for each one of
@@ -1014,10 +1066,10 @@
             instances
     """
     if not _cache_enabled:
-        for dist in _yield_distributions(True, use_egg_info):
+        for dist in _yield_distributions(True, use_egg_info, paths):
             yield dist
     else:
-        _generate_cache(use_egg_info)
+        _generate_cache(use_egg_info, paths)
 
         for dist in _cache_path.itervalues():
             yield dist
@@ -1027,7 +1079,7 @@
                 yield dist
 
 
-def get_distribution(name, use_egg_info=False):
+def get_distribution(name, use_egg_info=False, paths=None):
     """
     Scans all elements in ``sys.path`` and looks for all directories
     ending with ``.dist-info``. Returns a :class:`Distribution`
@@ -1044,12 +1096,15 @@
 
     :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None
     """
+    if paths == None:
+        paths = sys.path
+
     if not _cache_enabled:
-        for dist in _yield_distributions(True, use_egg_info):
+        for dist in _yield_distributions(True, use_egg_info, paths):
             if dist.name == name:
                 return dist
     else:
-        _generate_cache(use_egg_info)
+        _generate_cache(use_egg_info, paths)
 
         if name in _cache_name:
             return _cache_name[name][0]
@@ -1086,7 +1141,7 @@
                     predicate = VersionPredicate(obs)
                 except ValueError:
                     raise DistutilsError(('Distribution %s has ill formed' +
-                                          ' obsoletes field') % (dist.name,))
+                                         ' obsoletes field') % (dist.name,))
                 if name == o_components[0] and predicate.match(version):
                     yield dist
                     break
@@ -1132,9 +1187,9 @@
                 p_name, p_ver = p_components
                 if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')':
                     raise DistutilsError(('Distribution %s has invalid ' +
-                                          'provides field: %s') \
-                                           % (dist.name, p))
-                p_ver = p_ver[1:-1] # trim off the parenthesis
+                                         'provides field: %s') \
+                                         % (dist.name, p))
+                p_ver = p_ver[1:-1]  # trim off the parenthesis
                 if p_name == name and predicate.match(p_ver):
                     yield dist
                     break
@@ -1153,3 +1208,15 @@
     for dist in get_distributions():
         if dist.uses(path):
             yield dist
+
+def resource_path(distribution_name, relative_path):
+    dist = get_distribution(distribution_name)
+    if dist != None:
+        return dist.get_resource_path(relative_path)
+    raise LookupError('No distribution named %s is installed.' %
+                      distribution_name)
+
+def resource_open(distribution_name, relative_path, * args, ** kwargs):
+    file = open(resource_path(distribution_name, relative_path), * args,
+                ** kwargs)
+    return file
\ No newline at end of file
diff --git a/distutils2/_backport/shutil.py b/distutils2/_backport/shutil.py
--- a/distutils2/_backport/shutil.py
+++ b/distutils2/_backport/shutil.py
@@ -1,4 +1,4 @@
-"""Utility functions for copying files and directory trees.
+"""Utility functions for copying and archiving files and directory trees.
 
 XXX The functions here don't copy the resource fork or other metadata on Mac.
 
@@ -9,7 +9,13 @@
 import stat
 from os.path import abspath
 import fnmatch
-from warnings import warn
+import errno
+
+try:
+    import bz2
+    _BZ2_SUPPORTED = True
+except ImportError:
+    _BZ2_SUPPORTED = False
 
 try:
     from pwd import getpwnam
@@ -21,9 +27,12 @@
 except ImportError:
     getgrnam = None
 
-__all__ = ["copyfileobj","copyfile","copymode","copystat","copy","copy2",
-           "copytree","move","rmtree","Error", "SpecialFileError",
-           "ExecError","make_archive"]
+__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
+           "copytree", "move", "rmtree", "Error", "SpecialFileError",
+           "ExecError", "make_archive", "get_archive_formats",
+           "register_archive_format", "unregister_archive_format",
+           "get_unpack_formats", "register_unpack_format",
+           "unregister_unpack_format", "unpack_archive"]
 
 class Error(EnvironmentError):
     pass
@@ -35,6 +44,14 @@
 class ExecError(EnvironmentError):
     """Raised when a command could not be executed"""
 
+class ReadError(EnvironmentError):
+    """Raised when an archive cannot be read"""
+
+class RegistryError(Exception):
+    """Raised when a registery operation with the archiving
+    and unpacking registeries fails"""
+
+
 try:
     WindowsError
 except NameError:
@@ -50,7 +67,7 @@
 
 def _samefile(src, dst):
     # Macintosh, Unix.
-    if hasattr(os.path,'samefile'):
+    if hasattr(os.path, 'samefile'):
         try:
             return os.path.samefile(src, dst)
         except OSError:
@@ -63,10 +80,8 @@
 def copyfile(src, dst):
     """Copy data from src to dst"""
     if _samefile(src, dst):
-        raise Error, "`%s` and `%s` are the same file" % (src, dst)
+        raise Error("`%s` and `%s` are the same file" % (src, dst))
 
-    fsrc = None
-    fdst = None
     for fn in [src, dst]:
         try:
             st = os.stat(fn)
@@ -77,15 +92,16 @@
             # XXX What about other special files? (sockets, devices...)
             if stat.S_ISFIFO(st.st_mode):
                 raise SpecialFileError("`%s` is a named pipe" % fn)
+
+    fsrc = open(src, 'rb')
     try:
-        fsrc = open(src, 'rb')
         fdst = open(dst, 'wb')
-        copyfileobj(fsrc, fdst)
+        try:
+            copyfileobj(fsrc, fdst)
+        finally:
+            fdst.close()
     finally:
-        if fdst:
-            fdst.close()
-        if fsrc:
-            fsrc.close()
+        fsrc.close()
 
 def copymode(src, dst):
     """Copy mode bits from src to dst"""
@@ -103,8 +119,12 @@
     if hasattr(os, 'chmod'):
         os.chmod(dst, mode)
     if hasattr(os, 'chflags') and hasattr(st, 'st_flags'):
-        os.chflags(dst, st.st_flags)
-
+        try:
+            os.chflags(dst, st.st_flags)
+        except OSError, why:
+            if (not hasattr(errno, 'EOPNOTSUPP') or
+                why.errno != errno.EOPNOTSUPP):
+                raise
 
 def copy(src, dst):
     """Copy data and mode bits ("cp src dst").
@@ -140,8 +160,9 @@
         return set(ignored_names)
     return _ignore_patterns
 
-def copytree(src, dst, symlinks=False, ignore=None):
-    """Recursively copy a directory tree using copy2().
+def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
+             ignore_dangling_symlinks=False):
+    """Recursively copy a directory tree.
 
     The destination directory must not already exist.
     If exception(s) occur, an Error is raised with a list of reasons.
@@ -149,7 +170,13 @@
     If the optional symlinks flag is true, symbolic links in the
     source tree result in symbolic links in the destination tree; if
     it is false, the contents of the files pointed to by symbolic
-    links are copied.
+    links are copied. If the file pointed by the symlink doesn't
+    exist, an exception will be added in the list of errors raised in
+    an Error exception at the end of the copy process.
+
+    You can set the optional ignore_dangling_symlinks flag to true if you
+    want to silence this exception. Notice that this has no effect on
+    platforms that don't support os.symlink.
 
     The optional ignore argument is a callable. If given, it
     is called with the `src` parameter, which is the directory
@@ -163,7 +190,10 @@
     list of names relative to the `src` directory that should
     not be copied.
 
-    XXX Consider this example code rather than the ultimate tool.
+    The optional copy_function argument is a callable that will be used
+    to copy each file. It will be called with the source path and the
+    destination path as arguments. By default, copy2() is used, but any
+    function that supports the same signature (like copy()) can be used.
 
     """
     names = os.listdir(src)
@@ -182,14 +212,21 @@
         srcname = os.path.join(src, name)
         dstname = os.path.join(dst, name)
         try:
-            if symlinks and os.path.islink(srcname):
+            if os.path.islink(srcname):
                 linkto = os.readlink(srcname)
-                os.symlink(linkto, dstname)
+                if symlinks:
+                    os.symlink(linkto, dstname)
+                else:
+                    # ignore dangling symlink if the flag is on
+                    if not os.path.exists(linkto) and ignore_dangling_symlinks:
+                        continue
+                    # otherwise let the copy occurs. copy2 will raise an error
+                    copy_function(srcname, dstname)
             elif os.path.isdir(srcname):
-                copytree(srcname, dstname, symlinks, ignore)
+                copytree(srcname, dstname, symlinks, ignore, copy_function)
             else:
                 # Will raise a SpecialFileError for unsupported file types
-                copy2(srcname, dstname)
+                copy_function(srcname, dstname)
         # catch the Error from the recursive copytree so that we can
         # continue with other files
         except Error, err:
@@ -205,7 +242,7 @@
         else:
             errors.extend((src, dst, str(why)))
     if errors:
-        raise Error, errors
+        raise Error(errors)
 
 def rmtree(path, ignore_errors=False, onerror=None):
     """Recursively delete a directory tree.
@@ -235,7 +272,7 @@
     names = []
     try:
         names = os.listdir(path)
-    except os.error, err:
+    except os.error:
         onerror(os.listdir, path, sys.exc_info())
     for name in names:
         fullname = os.path.join(path, name)
@@ -248,7 +285,7 @@
         else:
             try:
                 os.remove(fullname)
-            except os.error, err:
+            except os.error:
                 onerror(os.remove, fullname, sys.exc_info())
     try:
         os.rmdir(path)
@@ -282,13 +319,13 @@
     if os.path.isdir(dst):
         real_dst = os.path.join(dst, _basename(src))
         if os.path.exists(real_dst):
-            raise Error, "Destination path '%s' already exists" % real_dst
+            raise Error("Destination path '%s' already exists" % real_dst)
     try:
         os.rename(src, real_dst)
     except OSError:
         if os.path.isdir(src):
             if _destinsrc(src, dst):
-                raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst)
+                raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst))
             copytree(src, real_dst, symlinks=True)
             rmtree(src)
         else:
@@ -333,44 +370,45 @@
     """Create a (possibly compressed) tar file from all the files under
     'base_dir'.
 
-    'compress' must be "gzip" (the default), "compress", "bzip2", or None.
-    (compress will be deprecated in Python 3.2)
+    'compress' must be "gzip" (the default), "bzip2", or None.
 
     'owner' and 'group' can be used to define an owner and a group for the
     archive that is being built. If not provided, the current owner and group
     will be used.
 
-    The output tar file will be named 'base_dir' +  ".tar", possibly plus
-    the appropriate compression extension (".gz", ".bz2" or ".Z").
+    The output tar file will be named 'base_name' +  ".tar", possibly plus
+    the appropriate compression extension (".gz", or ".bz2").
 
     Returns the output filename.
     """
-    tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: '', 'compress': ''}
-    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'compress': '.Z'}
+    tar_compression = {'gzip': 'gz', None: ''}
+    compress_ext = {'gzip': '.gz'}
+
+    if _BZ2_SUPPORTED:
+        tar_compression['bzip2'] = 'bz2'
+        compress_ext['bzip2'] = '.bz2'
 
     # flags for compression program, each element of list will be an argument
-    if compress is not None and compress not in compress_ext.keys():
-        raise ValueError, \
-              ("bad value for 'compress': must be None, 'gzip', 'bzip2' "
-               "or 'compress'")
+    if compress is not None and compress not in compress_ext:
+        raise ValueError("bad value for 'compress', or compression format not "
+                         "supported: %s" % compress)
 
-    archive_name = base_name + '.tar'
-    if compress != 'compress':
-        archive_name += compress_ext.get(compress, '')
+    archive_name = base_name + '.tar' + compress_ext.get(compress, '')
+    archive_dir = os.path.dirname(archive_name)
 
-    archive_dir = os.path.dirname(archive_name)
     if not os.path.exists(archive_dir):
         if logger is not None:
-            logger.info("creating %s" % archive_dir)
+            logger.info("creating %s", archive_dir)
         if not dry_run:
             os.makedirs(archive_dir)
 
-
     # creating the tarball
+    # XXX late import because of circular dependency between shutil and
+    # tarfile :(
     from distutils2._backport import tarfile
 
     if logger is not None:
-        logger.info('Creating tar archive')
+        logger.info('creating tar archive')
 
     uid = _get_uid(owner)
     gid = _get_gid(group)
@@ -391,23 +429,9 @@
         finally:
             tar.close()
 
-    # compression using `compress`
-    # XXX this block will be removed in Python 3.2
-    if compress == 'compress':
-        warn("'compress' will be deprecated.", PendingDeprecationWarning)
-        # the option varies depending on the platform
-        compressed_name = archive_name + compress_ext[compress]
-        if sys.platform == 'win32':
-            cmd = [compress, archive_name, compressed_name]
-        else:
-            cmd = [compress, '-f', archive_name]
-        from distutils2.spawn import spawn
-        spawn(cmd, dry_run=dry_run)
-        return compressed_name
-
     return archive_name
 
-def _call_external_zip(directory, verbose=False):
+def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False):
     # XXX see if we want to keep an external call here
     if verbose:
         zipoptions = "-r"
@@ -420,15 +444,14 @@
     except DistutilsExecError:
         # XXX really should distinguish between "couldn't find
         # external 'zip' command" and "zip failed".
-        raise ExecError, \
-            ("unable to create zip file '%s': "
+        raise ExecError("unable to create zip file '%s': "
             "could neither import the 'zipfile' module nor "
             "find a standalone zip utility") % zip_filename
 
 def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
     """Create a zip file from all the files under 'base_dir'.
 
-    The output zip file will be named 'base_dir' + ".zip".  Uses either the
+    The output zip file will be named 'base_name' + ".zip".  Uses either the
     "zipfile" Python module (if available) or the InfoZIP "zip" utility
     (if installed and found on the default search path).  If neither tool is
     available, raises ExecError.  Returns the name of the output zip
@@ -451,7 +474,7 @@
         zipfile = None
 
     if zipfile is None:
-        _call_external_zip(base_dir, verbose)
+        _call_external_zip(base_dir, zip_filename, verbose, dry_run)
     else:
         if logger is not None:
             logger.info("creating '%s' and adding '%s' to it",
@@ -475,19 +498,21 @@
 _ARCHIVE_FORMATS = {
     'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
     'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
-    'ztar':  (_make_tarball, [('compress', 'compress')],
-                "compressed tar file"),
     'tar':   (_make_tarball, [('compress', None)], "uncompressed tar file"),
-    'zip':   (_make_zipfile, [],"ZIP file")
+    'zip':   (_make_zipfile, [], "ZIP file"),
     }
 
+if _BZ2_SUPPORTED:
+    _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
+                                "bzip2'ed tar-file")
+
 def get_archive_formats():
     """Returns a list of supported formats for archiving and unarchiving.
 
     Each element of the returned sequence is a tuple (name, description)
     """
     formats = [(name, registry[2]) for name, registry in
-               _ARCHIVE_FORMATS.items()]
+               _ARCHIVE_FORMATS.iteritems()]
     formats.sort()
     return formats
 
@@ -507,7 +532,7 @@
     if not isinstance(extra_args, (tuple, list)):
         raise TypeError('extra_args needs to be a sequence')
     for element in extra_args:
-        if not isinstance(element, (tuple, list)) or len(element) !=2 :
+        if not isinstance(element, (tuple, list)) or len(element) !=2:
             raise TypeError('extra_args elements are : (arg_name, value)')
 
     _ARCHIVE_FORMATS[name] = (function, extra_args, description)
@@ -520,7 +545,7 @@
     """Create an archive file (eg. zip or tar).
 
     'base_name' is the name of the file to create, minus any format-specific
-    extension; 'format' is the archive format: one of "zip", "tar", "ztar",
+    extension; 'format' is the archive format: one of "zip", "tar", "bztar"
     or "gztar".
 
     'root_dir' is a directory that will be the root directory of the
@@ -549,7 +574,7 @@
     try:
         format_info = _ARCHIVE_FORMATS[format]
     except KeyError:
-        raise ValueError, "unknown archive format '%s'" % format
+        raise ValueError("unknown archive format '%s'" % format)
 
     func = format_info[0]
     for arg, val in format_info[1]:
@@ -568,3 +593,176 @@
             os.chdir(save_cwd)
 
     return filename
+
+
+def get_unpack_formats():
+    """Returns a list of supported formats for unpacking.
+
+    Each element of the returned sequence is a tuple
+    (name, extensions, description)
+    """
+    formats = [(name, info[0], info[3]) for name, info in
+               _UNPACK_FORMATS.iteritems()]
+    formats.sort()
+    return formats
+
+def _check_unpack_options(extensions, function, extra_args):
+    """Checks what gets registered as an unpacker."""
+    # first make sure no other unpacker is registered for this extension
+    existing_extensions = {}
+    for name, info in _UNPACK_FORMATS.iteritems():
+        for ext in info[0]:
+            existing_extensions[ext] = name
+
+    for extension in extensions:
+        if extension in existing_extensions:
+            msg = '%s is already registered for "%s"'
+            raise RegistryError(msg % (extension,
+                                       existing_extensions[extension]))
+
+    if not callable(function):
+        raise TypeError('The registered function must be a callable')
+
+
+def register_unpack_format(name, extensions, function, extra_args=None,
+                           description=''):
+    """Registers an unpack format.
+
+    `name` is the name of the format. `extensions` is a list of extensions
+    corresponding to the format.
+
+    `function` is the callable that will be
+    used to unpack archives. The callable will receive archives to unpack.
+    If it's unable to handle an archive, it needs to raise a ReadError
+    exception.
+
+    If provided, `extra_args` is a sequence of
+    (name, value) tuples that will be passed as arguments to the callable.
+    description can be provided to describe the format, and will be returned
+    by the get_unpack_formats() function.
+    """
+    if extra_args is None:
+        extra_args = []
+    _check_unpack_options(extensions, function, extra_args)
+    _UNPACK_FORMATS[name] = extensions, function, extra_args, description
+
+def unregister_unpack_format(name):
+    """Removes the pack format from the registery."""
+    del _UNPACK_FORMATS[name]
+
+def _ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+
+def _unpack_zipfile(filename, extract_dir):
+    """Unpack zip `filename` to `extract_dir`
+    """
+    try:
+        import zipfile
+    except ImportError:
+        raise ReadError('zlib not supported, cannot unpack this archive.')
+
+    if not zipfile.is_zipfile(filename):
+        raise ReadError("%s is not a zip file" % filename)
+
+    zip = zipfile.ZipFile(filename)
+    try:
+        for info in zip.infolist():
+            name = info.filename
+
+            # don't extract absolute paths or ones with .. in them
+            if name.startswith('/') or '..' in name:
+                continue
+
+            target = os.path.join(extract_dir, *name.split('/'))
+            if not target:
+                continue
+
+            _ensure_directory(target)
+            if not name.endswith('/'):
+                # file
+                data = zip.read(info.filename)
+                f = open(target, 'wb')
+                try:
+                    f.write(data)
+                finally:
+                    f.close()
+                    del data
+    finally:
+        zip.close()
+
+def _unpack_tarfile(filename, extract_dir):
+    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+    """
+    from distutils2._backport import tarfile
+    try:
+        tarobj = tarfile.open(filename)
+    except tarfile.TarError:
+        raise ReadError(
+            "%s is not a compressed or uncompressed tar file" % filename)
+    try:
+        tarobj.extractall(extract_dir)
+    finally:
+        tarobj.close()
+
+_UNPACK_FORMATS = {
+    'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"),
+    'tar':   (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
+    'zip':   (['.zip'], _unpack_zipfile, [], "ZIP file")
+    }
+
+if _BZ2_SUPPORTED:
+    _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [],
+                                "bzip2'ed tar-file")
+
+def _find_unpack_format(filename):
+    for name, info in _UNPACK_FORMATS.iteritems():
+        for extension in info[0]:
+            if filename.endswith(extension):
+                return name
+    return None
+
+def unpack_archive(filename, extract_dir=None, format=None):
+    """Unpack an archive.
+
+    `filename` is the name of the archive.
+
+    `extract_dir` is the name of the target directory, where the archive
+    is unpacked. If not provided, the current working directory is used.
+
+    `format` is the archive format: one of "zip", "tar", or "gztar". Or any
+    other registered format. If not provided, unpack_archive will use the
+    filename extension and see if an unpacker was registered for that
+    extension.
+
+    In case none is found, a ValueError is raised.
+    """
+    if extract_dir is None:
+        extract_dir = os.getcwd()
+
+    func = None
+
+    if format is not None:
+        try:
+            format_info = _UNPACK_FORMATS[format]
+        except KeyError:
+            raise ValueError("Unknown unpack format '{0}'".format(format))
+
+        func = format_info[0]
+        func(filename, extract_dir, **dict(format_info[1]))
+    else:
+        # we need to look at the registered unpackers supported extensions
+        format = _find_unpack_format(filename)
+        if format is None:
+            raise ReadError("Unknown archive format '{0}'".format(filename))
+
+        func = _UNPACK_FORMATS[format][1]
+        kwargs = dict(_UNPACK_FORMATS[format][2])
+        func(filename, extract_dir, **kwargs)
+
+    if func is None:
+        raise ValueError('Unknown archive format: %s' % filename)
+
+    return extract_dir
diff --git a/distutils2/_backport/sysconfig.py b/distutils2/_backport/sysconfig.py
--- a/distutils2/_backport/sysconfig.py
+++ b/distutils2/_backport/sysconfig.py
@@ -1,8 +1,6 @@
-"""Provide access to Python's configuration information.
-
-"""
+"""Provide access to Python's configuration information."""
+import os
 import sys
-import os
 import re
 from os.path import pardir, realpath
 from ConfigParser import RawConfigParser
@@ -18,6 +16,7 @@
 _SCHEMES.read(_CONFIG_FILE)
 _VAR_REPL = re.compile(r'\{([^{]*?)\}')
 
+
 def _expand_globals(config):
     if config.has_section('globals'):
         globals = config.items('globals')
@@ -38,11 +37,13 @@
     #
     for section in config.sections():
         variables = dict(config.items(section))
+
         def _replacer(matchobj):
             name = matchobj.group(1)
             if name in variables:
                 return variables[name]
             return matchobj.group(0)
+
         for option, value in config.items(section):
             config.set(section, option, _VAR_REPL.sub(_replacer, value))
 
@@ -69,6 +70,7 @@
 if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
     _PROJECT_BASE = realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
 
+
 def is_python_build():
     for fn in ("Setup.dist", "Setup.local"):
         if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
@@ -84,10 +86,10 @@
 
 
 def _subst_vars(path, local_vars):
-    """In the string `path`, replace tokens like {some.thing} with the corresponding value from the map `local_vars`.
+    """In the string `path`, replace tokens like {some.thing} with the
+    corresponding value from the map `local_vars`.
 
     If there is no corresponding value, leave the token unchanged.
-
     """
     def _replacer(matchobj):
         name = matchobj.group(1)
@@ -98,13 +100,14 @@
         return matchobj.group(0)
     return _VAR_REPL.sub(_replacer, path)
 
+
 def _extend_dict(target_dict, other_dict):
-    target_keys = target_dict.keys()
-    for key, value in other_dict.items():
-        if key in target_keys:
+    for key, value in other_dict.iteritems():
+        if key in target_dict:
             continue
         target_dict[key] = value
 
+
 def _expand_vars(scheme, vars):
     res = {}
     if vars is None:
@@ -117,14 +120,25 @@
         res[key] = os.path.normpath(_subst_vars(value, vars))
     return res
 
+def format_value(value, vars):
+    def _replacer(matchobj):
+         name = matchobj.group(1)
+         if name in vars:
+             return vars[name]
+         return matchobj.group(0)
+    return _VAR_REPL.sub(_replacer, value)
+ 
+
 def _get_default_scheme():
     if os.name == 'posix':
         # the default scheme for posix is posix_prefix
         return 'posix_prefix'
     return os.name
 
+
 def _getuserbase():
     env_base = os.environ.get("PYTHONUSERBASE", None)
+
     def joinuser(*args):
         return os.path.expanduser(os.path.join(*args))
 
@@ -158,7 +172,6 @@
     optional dictionary is passed in as the second argument, it is
     used instead of a new dictionary.
     """
-    import re
     # Regexes needed for parsing Makefile (and similar syntaxes,
     # like old-style Setup files).
     _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
@@ -256,7 +269,6 @@
                             if name not in done:
                                 done[name] = value
 
-
             else:
                 # bogus variable reference; just drop it since we can't deal
                 variables.remove(name)
@@ -267,6 +279,7 @@
 
 
 def get_makefile_filename():
+    """Return the path of the Makefile."""
     if _PYTHON_BUILD:
         return os.path.join(_PROJECT_BASE, "Makefile")
     return os.path.join(get_path('stdlib'), "config", "Makefile")
@@ -315,6 +328,7 @@
     if _PYTHON_BUILD:
         vars['LDSHARED'] = vars['BLDSHARED']
 
+
 def _init_non_posix(vars):
     """Initialize the module as appropriate for NT"""
     # set basic install directories
@@ -338,7 +352,6 @@
     optional dictionary is passed in as the second argument, it is
     used instead of a new dictionary.
     """
-    import re
     if vars is None:
         vars = {}
     define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
@@ -351,8 +364,10 @@
         m = define_rx.match(line)
         if m:
             n, v = m.group(1, 2)
-            try: v = int(v)
-            except ValueError: pass
+            try:
+                v = int(v)
+            except ValueError:
+                pass
             vars[n] = v
         else:
             m = undef_rx.match(line)
@@ -360,8 +375,9 @@
                 vars[m.group(1)] = 0
     return vars
 
+
 def get_config_h_filename():
-    """Returns the path of pyconfig.h."""
+    """Return the path of pyconfig.h."""
     if _PYTHON_BUILD:
         if os.name == "nt":
             inc_dir = os.path.join(_PROJECT_BASE, "PC")
@@ -371,17 +387,20 @@
         inc_dir = get_path('platinclude')
     return os.path.join(inc_dir, 'pyconfig.h')
 
+
 def get_scheme_names():
-    """Returns a tuple containing the schemes names."""
+    """Return a tuple containing the schemes names."""
     return tuple(sorted(_SCHEMES.sections()))
 
+
 def get_path_names():
-    """Returns a tuple containing the paths names."""
+    """Return a tuple containing the paths names."""
     # xxx see if we want a static list
     return _SCHEMES.options('posix_prefix')
 
+
 def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
-    """Returns a mapping containing an install scheme.
+    """Return a mapping containing an install scheme.
 
     ``scheme`` is the install scheme name. If not provided, it will
     return the default scheme for the current platform.
@@ -391,13 +410,15 @@
     else:
         return dict(_SCHEMES.items(scheme))
 
+
 def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
-    """Returns a path corresponding to the scheme.
+    """Return a path corresponding to the scheme.
 
     ``scheme`` is the install scheme name.
     """
     return get_paths(scheme, vars, expand)[name]
 
+
 def get_config_vars(*args):
     """With no arguments, return a dictionary of all configuration
     variables relevant for the current platform.
@@ -408,7 +429,6 @@
     With arguments, return a list of values that result from looking up
     each argument in the configuration variable dictionary.
     """
-    import re
     global _CONFIG_VARS
     if _CONFIG_VARS is None:
         _CONFIG_VARS = {}
@@ -440,7 +460,6 @@
         else:
             _CONFIG_VARS['srcdir'] = realpath(_CONFIG_VARS['srcdir'])
 
-
         # Convert srcdir into an absolute path if it appears necessary.
         # Normally it is relative to the build directory.  However, during
         # testing, for example, we might be running a non-installed python
@@ -456,7 +475,7 @@
                 _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
 
         if sys.platform == 'darwin':
-            kernel_version = os.uname()[2] # Kernel version (8.4.3)
+            kernel_version = os.uname()[2]  # Kernel version (8.4.3)
             major_version = int(kernel_version.split('.')[0])
 
             if major_version < 8:
@@ -522,6 +541,7 @@
     else:
         return _CONFIG_VARS
 
+
 def get_config_var(name):
     """Return the value of a single variable using the dictionary returned by
     'get_config_vars()'.
@@ -530,6 +550,7 @@
     """
     return get_config_vars().get(name)
 
+
 def get_platform():
     """Return a string that identifies the current platform.
 
@@ -555,7 +576,6 @@
 
     For other non-POSIX platforms, currently just returns 'sys.platform'.
     """
-    import re
     if os.name == 'nt':
         # sniff sys.version for architecture.
         prefix = " bit ("
@@ -563,7 +583,7 @@
         if i == -1:
             return sys.platform
         j = sys.version.find(")", i)
-        look = sys.version[i+len(prefix):j].lower()
+        look = sys.version[i + len(prefix):j].lower()
         if look == 'amd64':
             return 'win-amd64'
         if look == 'itanium':
@@ -600,7 +620,7 @@
         return "%s-%s.%s" % (osname, version, release)
     elif osname[:6] == "cygwin":
         osname = "cygwin"
-        rel_re = re.compile (r'[\d.]+')
+        rel_re = re.compile(r'[\d.]+')
         m = rel_re.match(release)
         if m:
             release = m.group()
@@ -675,19 +695,19 @@
                     machine = 'universal'
                 else:
                     raise ValueError(
-                       "Don't know machine value for archs=%r"%(archs,))
+                       "Don't know machine value for archs=%r" % (archs,))
 
             elif machine == 'i386':
                 # On OSX the machine type returned by uname is always the
                 # 32-bit variant, even if the executable architecture is
                 # the 64-bit variant
-                if sys.maxint >= 2**32:
+                if sys.maxint >= (2 ** 32):
                     machine = 'x86_64'
 
             elif machine in ('PowerPC', 'Power_Macintosh'):
                 # Pick a sane name for the PPC architecture.
                 # See 'i386' case
-                if sys.maxint >= 2**32:
+                if sys.maxint >= (2 ** 32):
                     machine = 'ppc64'
                 else:
                     machine = 'ppc'
@@ -698,12 +718,14 @@
 def get_python_version():
     return _PY_VERSION_SHORT
 
+
 def _print_dict(title, data):
-    for index, (key, value) in enumerate(sorted(data.items())):
+    for index, (key, value) in enumerate(sorted(data.iteritems())):
         if index == 0:
             print '%s: ' % (title)
         print '\t%s = "%s"' % (key, value)
 
+
 def _main():
     """Display all information sysconfig detains."""
     print 'Platform: "%s"' % get_platform()
@@ -714,5 +736,6 @@
     print
     _print_dict('Variables', get_config_vars())
 
+
 if __name__ == '__main__':
     _main()
diff --git a/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/INSTALLER b/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/INSTALLER
new file mode 100644
diff --git a/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/METADATA b/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/METADATA
new file mode 100644
--- /dev/null
+++ b/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/METADATA
@@ -0,0 +1,4 @@
+Metadata-version: 1.2
+Name: babar
+Version: 0.1
+Author: FELD Boris
\ No newline at end of file
diff --git a/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/RECORD b/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/RECORD
new file mode 100644
diff --git a/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/REQUESTED b/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/REQUESTED
new file mode 100644
diff --git a/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/RESOURCES b/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/RESOURCES
new file mode 100644
--- /dev/null
+++ b/distutils2/_backport/tests/fake_dists/babar-0.1.dist-info/RESOURCES
@@ -0,0 +1,2 @@
+babar.png,babar.png
+babar.cfg,babar.cfg
\ No newline at end of file
diff --git a/distutils2/_backport/tests/fake_dists/babar.cfg b/distutils2/_backport/tests/fake_dists/babar.cfg
new file mode 100644
--- /dev/null
+++ b/distutils2/_backport/tests/fake_dists/babar.cfg
@@ -0,0 +1,1 @@
+Config
\ No newline at end of file
diff --git a/distutils2/_backport/tests/fake_dists/babar.png b/distutils2/_backport/tests/fake_dists/babar.png
new file mode 100644
diff --git a/distutils2/_backport/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO b/distutils2/_backport/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
new file mode 100644
--- /dev/null
+++ b/distutils2/_backport/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO
@@ -0,0 +1,5 @@
+Metadata-Version: 1.2
+Name: coconuts-aster
+Version: 10.3
+Provides-Dist: strawberry (0.6)
+Provides-Dist: banana (0.4)
diff --git a/distutils2/_backport/tests/test_pkgutil.py b/distutils2/_backport/tests/test_pkgutil.py
--- a/distutils2/_backport/tests/test_pkgutil.py
+++ b/distutils2/_backport/tests/test_pkgutil.py
@@ -1,19 +1,27 @@
 # -*- coding: utf-8 -*-
 """Tests for PEP 376 pkgutil functionality"""
+import imp
 import sys
+
+import csv
 import os
-import csv
-import imp
+import shutil
 import tempfile
-import shutil
 import zipfile
 try:
     from hashlib import md5
 except ImportError:
     from distutils2._backport.hashlib import md5
 
+from distutils2.errors import DistutilsError
+from distutils2.metadata import Metadata
 from distutils2.tests import unittest, run_unittest, support, TESTFN
+
 from distutils2._backport import pkgutil
+from distutils2._backport.pkgutil import (
+                                          Distribution, EggInfoDistribution, get_distribution, get_distributions,
+                                          provides_distribution, obsoletes_distribution, get_file_users,
+                                          distinfo_dirname, _yield_distributions)
 
 try:
     from os.path import relpath
@@ -106,11 +114,16 @@
         self.assertEqual(res1, RESOURCE_DATA)
         res2 = pkgutil.get_data(pkg, 'sub/res.txt')
         self.assertEqual(res2, RESOURCE_DATA)
+
+        names = []
+        for loader, name, ispkg in pkgutil.iter_modules([zip_file]):
+            names.append(name)
+        self.assertEqual(names, ['test_getdata_zipfile'])
+
         del sys.path[0]
 
         del sys.modules[pkg]
 
-
 # Adapted from Python 2.7's trunk
 
 
@@ -169,7 +182,7 @@
     def setUp(self):
         super(TestPkgUtilDistribution, self).setUp()
         self.fake_dists_path = os.path.abspath(
-            os.path.join(os.path.dirname(__file__), 'fake_dists'))
+                                               os.path.join(os.path.dirname(__file__), 'fake_dists'))
         pkgutil.disable_cache()
 
         self.distinfo_dirs = [os.path.join(self.fake_dists_path, dir)
@@ -192,7 +205,7 @@
             # Setup the RECORD file for this dist
             record_file = os.path.join(distinfo_dir, 'RECORD')
             record_writer = csv.writer(open(record_file, 'w'), delimiter=',',
-                quoting=csv.QUOTE_NONE)
+                                       quoting=csv.QUOTE_NONE)
             dist_location = distinfo_dir.replace('.dist-info', '')
 
             for path, dirs, files in os.walk(dist_location):
@@ -201,15 +214,15 @@
                                            os.path.join(path, f)))
             for file in ['INSTALLER', 'METADATA', 'REQUESTED']:
                 record_writer.writerow(record_pieces(
-                    os.path.join(distinfo_dir, file)))
+                                       os.path.join(distinfo_dir, file)))
             record_writer.writerow([relpath(record_file, sys.prefix)])
-            del record_writer # causes the RECORD file to close
+            del record_writer  # causes the RECORD file to close
             record_reader = csv.reader(open(record_file, 'rb'))
             record_data = []
             for row in record_reader:
                 path, md5_, size = row[:] + \
-                                   [None for i in xrange(len(row), 3)]
-                record_data.append([path, (md5_, size,)])
+                    [None for i in xrange(len(row), 3)]
+                record_data.append([path, (md5_, size, )])
             self.records[distinfo_dir] = dict(record_data)
 
     def tearDown(self):
@@ -223,31 +236,26 @@
     def test_instantiation(self):
         # Test the Distribution class's instantiation provides us with usable
         # attributes.
-        # Import the Distribution class
-        from distutils2._backport.pkgutil import distinfo_dirname, Distribution
-
         here = os.path.abspath(os.path.dirname(__file__))
         name = 'choxie'
         version = '2.0.0.9'
         dist_path = os.path.join(here, 'fake_dists',
-            distinfo_dirname(name, version))
+                                 distinfo_dirname(name, version))
         dist = Distribution(dist_path)
 
         self.assertEqual(dist.name, name)
-        from distutils2.metadata import DistributionMetadata
-        self.assertTrue(isinstance(dist.metadata, DistributionMetadata))
+        self.assertTrue(isinstance(dist.metadata, Metadata))
         self.assertEqual(dist.metadata['version'], version)
         self.assertTrue(isinstance(dist.requested, type(bool())))
 
     def test_installed_files(self):
         # Test the iteration of installed files.
         # Test the distribution's installed files
-        from distutils2._backport.pkgutil import Distribution
         for distinfo_dir in self.distinfo_dirs:
             dist = Distribution(distinfo_dir)
             for path, md5_, size in dist.get_installed_files():
                 record_data = self.records[dist.path]
-                self.assertTrue(path in record_data.keys())
+                self.assertIn(path, record_data)
                 self.assertEqual(md5_, record_data[path][0])
                 self.assertEqual(size, record_data[path][1])
 
@@ -256,27 +264,25 @@
         # Criteria to test against
         distinfo_name = 'grammar-1.0a4'
         distinfo_dir = os.path.join(self.fake_dists_path,
-            distinfo_name + '.dist-info')
+                                    distinfo_name + '.dist-info')
         true_path = [self.fake_dists_path, distinfo_name, \
-                     'grammar', 'utils.py']
+            'grammar', 'utils.py']
         true_path = relpath(os.path.join(*true_path), sys.prefix)
         false_path = [self.fake_dists_path, 'towel_stuff-0.1', 'towel_stuff',
             '__init__.py']
         false_path = relpath(os.path.join(*false_path), sys.prefix)
 
         # Test if the distribution uses the file in question
-        from distutils2._backport.pkgutil import Distribution
         dist = Distribution(distinfo_dir)
         self.assertTrue(dist.uses(true_path))
         self.assertFalse(dist.uses(false_path))
 
     def test_get_distinfo_file(self):
         # Test the retrieval of dist-info file objects.
-        from distutils2._backport.pkgutil import Distribution
         distinfo_name = 'choxie-2.0.0.9'
         other_distinfo_name = 'grammar-1.0a4'
         distinfo_dir = os.path.join(self.fake_dists_path,
-            distinfo_name + '.dist-info')
+                                    distinfo_name + '.dist-info')
         dist = Distribution(distinfo_dir)
         # Test for known good file matches
         distinfo_files = [
@@ -293,22 +299,20 @@
             # Is it the correct file?
             self.assertEqual(value.name, os.path.join(distinfo_dir, distfile))
 
-        from distutils2.errors import DistutilsError
         # Test an absolute path that is part of another distributions dist-info
         other_distinfo_file = os.path.join(self.fake_dists_path,
-            other_distinfo_name + '.dist-info', 'REQUESTED')
+                                           other_distinfo_name + '.dist-info', 'REQUESTED')
         self.assertRaises(DistutilsError, dist.get_distinfo_file,
-            other_distinfo_file)
+                          other_distinfo_file)
         # Test for a file that does not exist and should not exist
         self.assertRaises(DistutilsError, dist.get_distinfo_file, \
                           'ENTRYPOINTS')
 
     def test_get_distinfo_files(self):
         # Test for the iteration of RECORD path entries.
-        from distutils2._backport.pkgutil import Distribution
         distinfo_name = 'towel_stuff-0.1'
         distinfo_dir = os.path.join(self.fake_dists_path,
-            distinfo_name + '.dist-info')
+                                    distinfo_name + '.dist-info')
         dist = Distribution(distinfo_dir)
         # Test for the iteration of the raw path
         distinfo_record_paths = self.records[distinfo_dir].keys()
@@ -316,10 +320,20 @@
         self.assertEqual(sorted(found), sorted(distinfo_record_paths))
         # Test for the iteration of local absolute paths
         distinfo_record_paths = [os.path.join(sys.prefix, path)
-            for path in self.records[distinfo_dir].keys()]
+            for path in self.records[distinfo_dir]]
         found = [path for path in dist.get_distinfo_files(local=True)]
         self.assertEqual(sorted(found), sorted(distinfo_record_paths))
 
+    def test_get_resources_path(self):
+        distinfo_name = 'babar-0.1'
+        distinfo_dir = os.path.join(self.fake_dists_path,
+                                    distinfo_name + '.dist-info')
+        dist = Distribution(distinfo_dir)
+        resource_path = dist.get_resource_path('babar.png')
+        self.assertEqual(resource_path, 'babar.png')
+        self.assertRaises(KeyError, dist.get_resource_path, 'notexist')
+
+
 
 class TestPkgUtilPEP376(support.LoggingCatcher, support.WarningsCatcher,
                         unittest.TestCase):
@@ -354,9 +368,6 @@
             ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'),
             ]
 
-        # Import the function in question
-        from distutils2._backport.pkgutil import distinfo_dirname
-
         # Loop through the items to validate the results
         for name, version, standard_dirname in items:
             dirname = distinfo_dirname(name, version)
@@ -366,23 +377,18 @@
         # Lookup all distributions found in the ``sys.path``.
         # This test could potentially pick up other installed distributions
         fake_dists = [('grammar', '1.0a4'), ('choxie', '2.0.0.9'),
-            ('towel-stuff', '0.1')]
+                      ('towel-stuff', '0.1'), ('babar', '0.1')]
         found_dists = []
 
-        # Import the function in question
-        from distutils2._backport.pkgutil import get_distributions, \
-                                                 Distribution, \
-                                                 EggInfoDistribution
-
         # Verify the fake dists have been found.
         dists = [dist for dist in get_distributions()]
         for dist in dists:
             if not isinstance(dist, Distribution):
                 self.fail("item received was not a Distribution instance: "
-                    "%s" % type(dist))
-            if dist.name in dict(fake_dists).keys() and \
-               dist.path.startswith(self.fake_dists_path):
-                found_dists.append((dist.name, dist.metadata['version'],))
+                          "%s" % type(dist))
+            if dist.name in dict(fake_dists) and \
+                dist.path.startswith(self.fake_dists_path):
+                    found_dists.append((dist.name, dist.metadata['version'], ))
             else:
                 # check that it doesn't find anything more than this
                 self.assertFalse(dist.path.startswith(self.fake_dists_path))
@@ -393,6 +399,7 @@
 
         # Now, test if the egg-info distributions are found correctly as well
         fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2'),
+                       ('coconuts-aster', '10.3'),
                        ('banana', '0.4'), ('strawberry', '0.6'),
                        ('truffles', '5.0'), ('nut', 'funkyversion')]
         found_dists = []
@@ -403,9 +410,9 @@
                     isinstance(dist, EggInfoDistribution)):
                 self.fail("item received was not a Distribution or "
                           "EggInfoDistribution instance: %s" % type(dist))
-            if dist.name in dict(fake_dists).keys() and \
-               dist.path.startswith(self.fake_dists_path):
-                found_dists.append((dist.name, dist.metadata['version']))
+            if dist.name in dict(fake_dists) and \
+                dist.path.startswith(self.fake_dists_path):
+                    found_dists.append((dist.name, dist.metadata['version']))
             else:
                 self.assertFalse(dist.path.startswith(self.fake_dists_path))
 
@@ -414,12 +421,7 @@
     def test_get_distribution(self):
         # Test for looking up a distribution by name.
         # Test the lookup of the towel-stuff distribution
-        name = 'towel-stuff' # Note: This is different from the directory name
-
-        # Import the function in question
-        from distutils2._backport.pkgutil import get_distribution, \
-                                                 Distribution, \
-                                                 EggInfoDistribution
+        name = 'towel-stuff'  # Note: This is different from the directory name
 
         # Lookup the distribution
         dist = get_distribution(name)
@@ -459,19 +461,15 @@
 
     def test_get_file_users(self):
         # Test the iteration of distributions that use a file.
-        from distutils2._backport.pkgutil import get_file_users, Distribution
         name = 'towel_stuff-0.1'
         path = os.path.join(self.fake_dists_path, name,
-            'towel_stuff', '__init__.py')
+                            'towel_stuff', '__init__.py')
         for dist in get_file_users(path):
             self.assertTrue(isinstance(dist, Distribution))
             self.assertEqual(dist.name, name)
 
     def test_provides(self):
         # Test for looking up distributions by what they provide
-        from distutils2._backport.pkgutil import provides_distribution
-        from distutils2.errors import DistutilsError
-
         checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
 
         l = [dist.name for dist in provides_distribution('truffles')]
@@ -507,33 +505,30 @@
 
         l = [dist.name for dist in provides_distribution('truffles', '>1.5',
                                                          use_egg_info=True)]
-        checkLists(l, ['bacon', 'truffles'])
+        checkLists(l, ['bacon'])
 
         l = [dist.name for dist in provides_distribution('truffles', '>=1.0')]
         checkLists(l, ['choxie', 'towel-stuff'])
 
         l = [dist.name for dist in provides_distribution('strawberry', '0.6',
                                                          use_egg_info=True)]
-        checkLists(l, ['strawberry'])
+        checkLists(l, ['coconuts-aster'])
 
         l = [dist.name for dist in provides_distribution('strawberry', '>=0.5',
                                                          use_egg_info=True)]
-        checkLists(l, ['strawberry'])
-
+        checkLists(l, ['coconuts-aster'])
 
         l = [dist.name for dist in provides_distribution('strawberry', '>0.6',
                                                          use_egg_info=True)]
         checkLists(l, [])
 
-
         l = [dist.name for dist in provides_distribution('banana', '0.4',
                                                          use_egg_info=True)]
-        checkLists(l, ['banana'])
+        checkLists(l, ['coconuts-aster'])
 
         l = [dist.name for dist in provides_distribution('banana', '>=0.3',
                                                          use_egg_info=True)]
-        checkLists(l, ['banana'])
-
+        checkLists(l, ['coconuts-aster'])
 
         l = [dist.name for dist in provides_distribution('banana', '!=0.4',
                                                          use_egg_info=True)]
@@ -541,9 +536,6 @@
 
     def test_obsoletes(self):
         # Test looking for distributions based on what they obsolete
-        from distutils2._backport.pkgutil import obsoletes_distribution
-        from distutils2.errors import DistutilsError
-
         checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
 
         l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')]
@@ -553,7 +545,6 @@
                                                           use_egg_info=True)]
         checkLists(l, ['cheese', 'bacon'])
 
-
         l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')]
         checkLists(l, ['choxie'])
 
@@ -573,14 +564,13 @@
 
     def test_yield_distribution(self):
         # tests the internal function _yield_distributions
-        from distutils2._backport.pkgutil import _yield_distributions
         checkLists = lambda x, y: self.assertListEqual(sorted(x), sorted(y))
 
         eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'),
                 ('truffles', '5.0'), ('cheese', '2.0.2'),
-                ('nut', 'funkyversion')]
+                ('coconuts-aster', '10.3'), ('nut', 'funkyversion')]
         dists = [('choxie', '2.0.0.9'), ('grammar', '1.0a4'),
-                 ('towel-stuff', '0.1')]
+                 ('towel-stuff', '0.1'), ('babar', '0.1')]
 
         checkLists([], _yield_distributions(False, False))
 
diff --git a/distutils2/_backport/tests/test_shutil.py b/distutils2/_backport/tests/test_shutil.py
new file mode 100644
--- /dev/null
+++ b/distutils2/_backport/tests/test_shutil.py
@@ -0,0 +1,945 @@
+import os
+import sys
+import tempfile
+import stat
+import tarfile
+from os.path import splitdrive
+from StringIO import StringIO
+
+from distutils.spawn import find_executable, spawn
+from distutils2._backport import shutil
+from distutils2._backport.shutil import (
+    _make_tarball, _make_zipfile, make_archive, unpack_archive,
+    register_archive_format, unregister_archive_format, get_archive_formats,
+    register_unpack_format, unregister_unpack_format, get_unpack_formats,
+    Error, RegistryError)
+
+from distutils2.tests import unittest, support, TESTFN
+
+try:
+    import bz2
+    BZ2_SUPPORTED = True
+except ImportError:
+    BZ2_SUPPORTED = False
+
+TESTFN2 = TESTFN + "2"
+
+try:
+    import grp
+    import pwd
+    UID_GID_SUPPORT = True
+except ImportError:
+    UID_GID_SUPPORT = False
+
+try:
+    import zlib
+except ImportError:
+    zlib = None
+
+try:
+    import zipfile
+    ZIP_SUPPORT = True
+except ImportError:
+    ZIP_SUPPORT = find_executable('zip')
+
+class TestShutil(unittest.TestCase):
+
+    def setUp(self):
+        super(TestShutil, self).setUp()
+        self.tempdirs = []
+
+    def tearDown(self):
+        super(TestShutil, self).tearDown()
+        while self.tempdirs:
+            d = self.tempdirs.pop()
+            shutil.rmtree(d, os.name in ('nt', 'cygwin'))
+
+    def write_file(self, path, content='xxx'):
+        """Writes a file in the given path.
+
+
+        path can be a string or a sequence.
+        """
+        if isinstance(path, (list, tuple)):
+            path = os.path.join(*path)
+        f = open(path, 'w')
+        try:
+            f.write(content)
+        finally:
+            f.close()
+
+    def mkdtemp(self):
+        """Create a temporary directory that will be cleaned up.
+
+        Returns the path of the directory.
+        """
+        d = tempfile.mkdtemp()
+        self.tempdirs.append(d)
+        return d
+
+    def test_rmtree_errors(self):
+        # filename is guaranteed not to exist
+        filename = tempfile.mktemp()
+        self.assertRaises(OSError, shutil.rmtree, filename)
+
+    # See bug #1071513 for why we don't run this on cygwin
+    # and bug #1076467 for why we don't run this as root.
+    if (hasattr(os, 'chmod') and sys.platform[:6] != 'cygwin'
+        and not (hasattr(os, 'geteuid') and os.geteuid() == 0)):
+        def test_on_error(self):
+            self.errorState = 0
+            os.mkdir(TESTFN)
+            self.childpath = os.path.join(TESTFN, 'a')
+            f = open(self.childpath, 'w')
+            f.close()
+            old_dir_mode = os.stat(TESTFN).st_mode
+            old_child_mode = os.stat(self.childpath).st_mode
+            # Make unwritable.
+            os.chmod(self.childpath, stat.S_IREAD)
+            os.chmod(TESTFN, stat.S_IREAD)
+
+            shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror)
+            # Test whether onerror has actually been called.
+            self.assertEqual(self.errorState, 2,
+                             "Expected call to onerror function did not happen.")
+
+            # Make writable again.
+            os.chmod(TESTFN, old_dir_mode)
+            os.chmod(self.childpath, old_child_mode)
+
+            # Clean up.
+            shutil.rmtree(TESTFN)
+
+    def check_args_to_onerror(self, func, arg, exc):
+        # test_rmtree_errors deliberately runs rmtree
+        # on a directory that is chmod 400, which will fail.
+        # This function is run when shutil.rmtree fails.
+        # 99.9% of the time it initially fails to remove
+        # a file in the directory, so the first time through
+        # func is os.remove.
+        # However, some Linux machines running ZFS on
+        # FUSE experienced a failure earlier in the process
+        # at os.listdir.  The first failure may legally
+        # be either.
+        if self.errorState == 0:
+            if func is os.remove:
+                self.assertEqual(arg, self.childpath)
+            else:
+                self.assertIs(func, os.listdir,
+                              "func must be either os.remove or os.listdir")
+                self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 1
+        else:
+            self.assertEqual(func, os.rmdir)
+            self.assertEqual(arg, TESTFN)
+            self.assertTrue(issubclass(exc[0], OSError))
+            self.errorState = 2
+
+    def test_rmtree_dont_delete_file(self):
+        # When called on a file instead of a directory, don't delete it.
+        handle, path = tempfile.mkstemp()
+        os.fdopen(handle).close()
+        self.assertRaises(OSError, shutil.rmtree, path)
+        os.remove(path)
+
+    def _write_data(self, path, data):
+        f = open(path, "w")
+        f.write(data)
+        f.close()
+
+    def test_copytree_simple(self):
+
+        def read_data(path):
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        src_dir = tempfile.mkdtemp()
+        dst_dir = os.path.join(tempfile.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        try:
+            shutil.copytree(src_dir, dst_dir)
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test.txt')))
+            self.assertTrue(os.path.isdir(os.path.join(dst_dir, 'test_dir')))
+            self.assertTrue(os.path.isfile(os.path.join(dst_dir, 'test_dir',
+                                                        'test.txt')))
+            actual = read_data(os.path.join(dst_dir, 'test.txt'))
+            self.assertEqual(actual, '123')
+            actual = read_data(os.path.join(dst_dir, 'test_dir', 'test.txt'))
+            self.assertEqual(actual, '456')
+        finally:
+            for path in (
+                    os.path.join(src_dir, 'test.txt'),
+                    os.path.join(dst_dir, 'test.txt'),
+                    os.path.join(src_dir, 'test_dir', 'test.txt'),
+                    os.path.join(dst_dir, 'test_dir', 'test.txt'),
+                ):
+                if os.path.exists(path):
+                    os.remove(path)
+            for path in (src_dir,
+                    os.path.dirname(dst_dir)
+                ):
+                if os.path.exists(path):
+                    shutil.rmtree(path)
+
+    def test_copytree_with_exclude(self):
+
+        def read_data(path):
+            f = open(path)
+            data = f.read()
+            f.close()
+            return data
+
+        # creating data
+        join = os.path.join
+        exists = os.path.exists
+        src_dir = tempfile.mkdtemp()
+        try:
+            dst_dir = join(tempfile.mkdtemp(), 'destination')
+            self._write_data(join(src_dir, 'test.txt'), '123')
+            self._write_data(join(src_dir, 'test.tmp'), '123')
+            os.mkdir(join(src_dir, 'test_dir'))
+            self._write_data(join(src_dir, 'test_dir', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'test.txt'), '456')
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir'))
+            os.mkdir(join(src_dir, 'test_dir2', 'subdir2'))
+            self._write_data(join(src_dir, 'test_dir2', 'subdir', 'test.txt'),
+                             '456')
+            self._write_data(join(src_dir, 'test_dir2', 'subdir2', 'test.py'),
+                             '456')
+
+
+            # testing glob-like patterns
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'test_dir2')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(exists(join(dst_dir, 'test.txt')))
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+            try:
+                patterns = shutil.ignore_patterns('*.tmp', 'subdir*')
+                shutil.copytree(src_dir, dst_dir, ignore=patterns)
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test.tmp')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+
+            # testing callable-style
+            try:
+                def _filter(src, names):
+                    res = []
+                    for name in names:
+                        path = os.path.join(src, name)
+
+                        if (os.path.isdir(path) and
+                            path.split()[-1] == 'subdir'):
+                            res.append(name)
+                        elif os.path.splitext(path)[-1] in ('.py'):
+                            res.append(name)
+                    return res
+
+                shutil.copytree(src_dir, dst_dir, ignore=_filter)
+
+                # checking the result: some elements should not be copied
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir2',
+                                        'test.py')))
+                self.assertTrue(not exists(join(dst_dir, 'test_dir2', 'subdir')))
+
+            finally:
+                if os.path.exists(dst_dir):
+                    shutil.rmtree(dst_dir)
+        finally:
+            shutil.rmtree(src_dir)
+            shutil.rmtree(os.path.dirname(dst_dir))
+
+    @support.skip_unless_symlink
+    def test_dont_copy_file_onto_link_to_itself(self):
+        # bug 851123.
+        os.mkdir(TESTFN)
+        src = os.path.join(TESTFN, 'cheese')
+        dst = os.path.join(TESTFN, 'shop')
+        try:
+            f = open(src, 'w')
+            f.write('cheddar')
+            f.close()
+
+            if hasattr(os, "link"):
+                os.link(src, dst)
+                self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+                f = open(src, 'r')
+                try:
+                    self.assertEqual(f.read(), 'cheddar')
+                finally:
+                    f.close()
+                os.remove(dst)
+
+            # Using `src` here would mean we end up with a symlink pointing
+            # to TESTFN/TESTFN/cheese, while it should point at
+            # TESTFN/cheese.
+            os.symlink('cheese', dst)
+            self.assertRaises(shutil.Error, shutil.copyfile, src, dst)
+            f = open(src, 'r')
+            try:
+                self.assertEqual(f.read(), 'cheddar')
+            finally:
+                f.close()
+            os.remove(dst)
+        finally:
+            try:
+                shutil.rmtree(TESTFN)
+            except OSError:
+                pass
+
+    @support.skip_unless_symlink
+    def test_rmtree_on_symlink(self):
+        # bug 1669.
+        os.mkdir(TESTFN)
+        try:
+            src = os.path.join(TESTFN, 'cheese')
+            dst = os.path.join(TESTFN, 'shop')
+            os.mkdir(src)
+            os.symlink(src, dst)
+            self.assertRaises(OSError, shutil.rmtree, dst)
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    if hasattr(os, "mkfifo"):
+        # Issue #3002: copyfile and copytree block indefinitely on named pipes
+        def test_copyfile_named_pipe(self):
+            os.mkfifo(TESTFN)
+            try:
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, TESTFN, TESTFN2)
+                self.assertRaises(shutil.SpecialFileError,
+                                  shutil.copyfile, __file__, TESTFN)
+            finally:
+                os.remove(TESTFN)
+
+    @unittest.skipUnless(hasattr(os, 'mkfifo'), 'requires os.mkfifo')
+    def test_copytree_named_pipe(self):
+        os.mkdir(TESTFN)
+        try:
+            subdir = os.path.join(TESTFN, "subdir")
+            os.mkdir(subdir)
+            pipe = os.path.join(subdir, "mypipe")
+            os.mkfifo(pipe)
+            try:
+                shutil.copytree(TESTFN, TESTFN2)
+            except shutil.Error, e:
+                errors = e.args[0]
+                self.assertEqual(len(errors), 1)
+                src, dst, error_msg = errors[0]
+                self.assertEqual("`%s` is a named pipe" % pipe, error_msg)
+            else:
+                self.fail("shutil.Error should have been raised")
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+            shutil.rmtree(TESTFN2, ignore_errors=True)
+
+    def test_copytree_special_func(self):
+
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        self._write_data(os.path.join(src_dir, 'test.txt'), '123')
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+
+        copied = []
+        def _copy(src, dst):
+            copied.append((src, dst))
+
+        shutil.copytree(src_dir, dst_dir, copy_function=_copy)
+        self.assertEquals(len(copied), 2)
+
+    @support.skip_unless_symlink
+    def test_copytree_dangling_symlinks(self):
+
+        # a dangling symlink raises an error at the end
+        src_dir = self.mkdtemp()
+        dst_dir = os.path.join(self.mkdtemp(), 'destination')
+        os.symlink('IDONTEXIST', os.path.join(src_dir, 'test.txt'))
+        os.mkdir(os.path.join(src_dir, 'test_dir'))
+        self._write_data(os.path.join(src_dir, 'test_dir', 'test.txt'), '456')
+        self.assertRaises(Error, shutil.copytree, src_dir, dst_dir)
+
+        # a dangling symlink is ignored with the proper flag
+        dst_dir = os.path.join(self.mkdtemp(), 'destination2')
+        shutil.copytree(src_dir, dst_dir, ignore_dangling_symlinks=True)
+        self.assertNotIn('test.txt', os.listdir(dst_dir))
+
+        # a dangling symlink is copied if symlinks=True
+        dst_dir = os.path.join(self.mkdtemp(), 'destination3')
+        shutil.copytree(src_dir, dst_dir, symlinks=True)
+        self.assertIn('test.txt', os.listdir(dst_dir))
+
+    @unittest.skipUnless(zlib, "requires zlib")
+    def test_make_tarball(self):
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+        os.mkdir(os.path.join(tmpdir, 'sub'))
+        self.write_file([tmpdir, 'sub', 'file3'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0],
+                            "source and target should be on same drive")
+
+        base_name = os.path.join(tmpdir2, 'archive')
+
+        # working with relative paths to avoid tar warnings
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(splitdrive(base_name)[1], '.', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    def _tarinfo(self, path):
+        tar = tarfile.open(path)
+        try:
+            names = tar.getnames()
+            names.sort()
+            return tuple(names)
+        finally:
+            tar.close()
+
+    def _create_files(self):
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        dist = os.path.join(tmpdir, 'dist')
+        os.mkdir(dist)
+        self.write_file([dist, 'file1'], 'xxx')
+        self.write_file([dist, 'file2'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub'))
+        self.write_file([dist, 'sub', 'file3'], 'xxx')
+        os.mkdir(os.path.join(dist, 'sub2'))
+        tmpdir2 = self.mkdtemp()
+        base_name = os.path.join(tmpdir2, 'archive')
+        return tmpdir, tmpdir2, base_name
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(find_executable('tar') and find_executable('gzip'),
+                         'Need the tar command to run')
+    def test_tarfile_vs_tar(self):
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist')
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.tar.gz'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now create another tarball using `tar`
+        tarball2 = os.path.join(tmpdir, 'archive2.tar.gz')
+        tar_cmd = ['tar', '-cf', 'archive2.tar', 'dist']
+        gzip_cmd = ['gzip', '-f9', 'archive2.tar']
+        old_dir = os.getcwd()
+        old_stdout = sys.stdout
+        os.chdir(tmpdir)
+        sys.stdout = StringIO()
+
+        try:
+            spawn(tar_cmd)
+            spawn(gzip_cmd)
+        finally:
+            os.chdir(old_dir)
+            sys.stdout = old_stdout
+
+        self.assertTrue(os.path.exists(tarball2))
+        # let's compare both tarballs
+        self.assertEquals(self._tarinfo(tarball), self._tarinfo(tarball2))
+
+        # trying an uncompressed one
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+        # now for a dry_run
+        base_name = os.path.join(tmpdir2, 'archive')
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        try:
+            _make_tarball(base_name, 'dist', compress=None, dry_run=True)
+        finally:
+            os.chdir(old_dir)
+        tarball = base_name + '.tar'
+        self.assertTrue(os.path.exists(tarball))
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run')
+    def test_make_zipfile(self):
+        # creating something to tar
+        tmpdir = self.mkdtemp()
+        self.write_file([tmpdir, 'file1'], 'xxx')
+        self.write_file([tmpdir, 'file2'], 'xxx')
+
+        tmpdir2 = self.mkdtemp()
+        base_name = os.path.join(tmpdir2, 'archive')
+        _make_zipfile(base_name, tmpdir)
+
+        # check if the compressed tarball was created
+        tarball = base_name + '.zip'
+        self.assertTrue(os.path.exists(tarball))
+
+
+    def test_make_archive(self):
+        tmpdir = self.mkdtemp()
+        base_name = os.path.join(tmpdir, 'archive')
+        self.assertRaises(ValueError, make_archive, base_name, 'xxx')
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_make_archive_owner_group(self):
+        # testing make_archive with owner and group, with various combinations
+        # this works even if there's not gid/uid support
+        if UID_GID_SUPPORT:
+            group = grp.getgrgid(0)[0]
+            owner = pwd.getpwuid(0)[0]
+        else:
+            group = owner = 'root'
+
+        base_dir, root_dir, base_name =  self._create_files()
+        base_name = os.path.join(self.mkdtemp() , 'archive')
+        res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner,
+                           group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'zip', root_dir, base_dir)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner=owner, group=group)
+        self.assertTrue(os.path.exists(res))
+
+        res = make_archive(base_name, 'tar', root_dir, base_dir,
+                           owner='kjhkjhkjg', group='oihohoh')
+        self.assertTrue(os.path.exists(res))
+
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support")
+    def test_tarfile_root_owner(self):
+        tmpdir, tmpdir2, base_name =  self._create_files()
+        old_dir = os.getcwd()
+        os.chdir(tmpdir)
+        group = grp.getgrgid(0)[0]
+        owner = pwd.getpwuid(0)[0]
+        try:
+            archive_name = _make_tarball(base_name, 'dist', compress=None,
+                                         owner=owner, group=group)
+        finally:
+            os.chdir(old_dir)
+
+        # check if the compressed tarball was created
+        self.assertTrue(os.path.exists(archive_name))
+
+        # now checks the rights
+        archive = tarfile.open(archive_name)
+        try:
+            for member in archive.getmembers():
+                self.assertEquals(member.uid, 0)
+                self.assertEquals(member.gid, 0)
+        finally:
+            archive.close()
+
+    def test_make_archive_cwd(self):
+        current_dir = os.getcwd()
+        def _breaks(*args, **kw):
+            raise RuntimeError()
+
+        register_archive_format('xxx', _breaks, [], 'xxx file')
+        try:
+            try:
+                make_archive('xxx', 'xxx', root_dir=self.mkdtemp())
+            except Exception:
+                pass
+            self.assertEquals(os.getcwd(), current_dir)
+        finally:
+            unregister_archive_format('xxx')
+
+    def test_register_archive_format(self):
+
+        self.assertRaises(TypeError, register_archive_format, 'xxx', 1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
+                          1)
+        self.assertRaises(TypeError, register_archive_format, 'xxx', lambda: x,
+                          [(1, 2), (1, 2, 3)])
+
+        register_archive_format('xxx', lambda: x, [(1, 2)], 'xxx file')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertIn('xxx', formats)
+
+        unregister_archive_format('xxx')
+        formats = [name for name, params in get_archive_formats()]
+        self.assertNotIn('xxx', formats)
+
+    def _compare_dirs(self, dir1, dir2):
+        # check that dir1 and dir2 are equivalent,
+        # return the diff
+        diff = []
+        for root, dirs, files in os.walk(dir1):
+            for file_ in files:
+                path = os.path.join(root, file_)
+                target_path = os.path.join(dir2, os.path.split(path)[-1])
+                if not os.path.exists(target_path):
+                    diff.append(file_)
+        return diff
+
+    @unittest.skipUnless(zlib, "Requires zlib")
+    def test_unpack_archive(self):
+        formats = ['tar', 'gztar', 'zip']
+        if BZ2_SUPPORTED:
+            formats.append('bztar')
+
+        for format in formats:
+            tmpdir = self.mkdtemp()
+            base_dir, root_dir, base_name =  self._create_files()
+            tmpdir2 = self.mkdtemp()
+            filename = make_archive(base_name, format, root_dir, base_dir)
+
+            # let's try to unpack it now
+            unpack_archive(filename, tmpdir2)
+            diff = self._compare_dirs(tmpdir, tmpdir2)
+            self.assertEquals(diff, [])
+
+    def test_unpack_registery(self):
+
+        formats = get_unpack_formats()
+
+        def _boo(filename, extract_dir, extra):
+            self.assertEquals(extra, 1)
+            self.assertEquals(filename, 'stuff.boo')
+            self.assertEquals(extract_dir, 'xx')
+
+        register_unpack_format('Boo', ['.boo', '.b2'], _boo, [('extra', 1)])
+        unpack_archive('stuff.boo', 'xx')
+
+        # trying to register a .boo unpacker again
+        self.assertRaises(RegistryError, register_unpack_format, 'Boo2',
+                          ['.boo'], _boo)
+
+        # should work now
+        unregister_unpack_format('Boo')
+        register_unpack_format('Boo2', ['.boo'], _boo)
+        self.assertIn(('Boo2', ['.boo'], ''), get_unpack_formats())
+        self.assertNotIn(('Boo', ['.boo'], ''), get_unpack_formats())
+
+        # let's leave a clean state
+        unregister_unpack_format('Boo2')
+        self.assertEquals(get_unpack_formats(), formats)
+
+
+class TestMove(unittest.TestCase):
+
+    def setUp(self):
+        filename = "foo"
+        self.src_dir = tempfile.mkdtemp()
+        self.dst_dir = tempfile.mkdtemp()
+        self.src_file = os.path.join(self.src_dir, filename)
+        self.dst_file = os.path.join(self.dst_dir, filename)
+        # Try to create a dir in the current directory, hoping that it is
+        # not located on the same filesystem as the system tmp dir.
+        try:
+            self.dir_other_fs = tempfile.mkdtemp(
+                dir=os.path.dirname(__file__))
+            self.file_other_fs = os.path.join(self.dir_other_fs,
+                filename)
+        except OSError:
+            self.dir_other_fs = None
+        f = open(self.src_file, "wb")
+        try:
+            f.write("spam")
+        finally:
+            f.close()
+
+    def tearDown(self):
+        for d in (self.src_dir, self.dst_dir, self.dir_other_fs):
+            try:
+                if d:
+                    shutil.rmtree(d)
+            except:
+                pass
+
+    def _check_move_file(self, src, dst, real_dst):
+        f = open(src, "rb")
+        try:
+            contents = f.read()
+        finally:
+            f.close()
+
+        shutil.move(src, dst)
+        f = open(real_dst, "rb")
+        try:
+            self.assertEqual(contents, f.read())
+        finally:
+            f.close()
+
+        self.assertFalse(os.path.exists(src))
+
+    def _check_move_dir(self, src, dst, real_dst):
+        contents = sorted(os.listdir(src))
+        shutil.move(src, dst)
+        self.assertEqual(contents, sorted(os.listdir(real_dst)))
+        self.assertFalse(os.path.exists(src))
+
+    def test_move_file(self):
+        # Move a file to another location on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_file, self.dst_file)
+
+    def test_move_file_to_dir(self):
+        # Move a file inside an existing dir on the same filesystem.
+        self._check_move_file(self.src_file, self.dst_dir, self.dst_file)
+
+    def test_move_file_other_fs(self):
+        # Move a file to an existing dir on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        self._check_move_file(self.src_file, self.file_other_fs,
+            self.file_other_fs)
+
+    def test_move_file_to_dir_other_fs(self):
+        # Move a file to another location on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        self._check_move_file(self.src_file, self.dir_other_fs,
+            self.file_other_fs)
+
+    def test_move_dir(self):
+        # Move a dir to another location on the same filesystem.
+        dst_dir = tempfile.mktemp()
+        try:
+            self._check_move_dir(self.src_dir, dst_dir, dst_dir)
+        finally:
+            try:
+                shutil.rmtree(dst_dir)
+            except:
+                pass
+
+    def test_move_dir_other_fs(self):
+        # Move a dir to another location on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        dst_dir = tempfile.mktemp(dir=self.dir_other_fs)
+        try:
+            self._check_move_dir(self.src_dir, dst_dir, dst_dir)
+        finally:
+            try:
+                shutil.rmtree(dst_dir)
+            except:
+                pass
+
+    def test_move_dir_to_dir(self):
+        # Move a dir inside an existing dir on the same filesystem.
+        self._check_move_dir(self.src_dir, self.dst_dir,
+            os.path.join(self.dst_dir, os.path.basename(self.src_dir)))
+
+    def test_move_dir_to_dir_other_fs(self):
+        # Move a dir inside an existing dir on another filesystem.
+        if not self.dir_other_fs:
+            # skip
+            return
+        self._check_move_dir(self.src_dir, self.dir_other_fs,
+            os.path.join(self.dir_other_fs, os.path.basename(self.src_dir)))
+
+    def test_existing_file_inside_dest_dir(self):
+        # A file with the same name inside the destination dir already exists.
+        f = open(self.dst_file, "wb")
+        try:
+            pass
+        finally:
+            f.close()
+        self.assertRaises(shutil.Error, shutil.move, self.src_file, self.dst_dir)
+
+    def test_dont_move_dir_in_itself(self):
+        # Moving a dir inside itself raises an Error.
+        dst = os.path.join(self.src_dir, "bar")
+        self.assertRaises(shutil.Error, shutil.move, self.src_dir, dst)
+
+    def test_destinsrc_false_negative(self):
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'srcdir/dest')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertTrue(shutil._destinsrc(src, dst),
+                             msg='_destinsrc() wrongly concluded that '
+                             'dst (%s) is not in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+    def test_destinsrc_false_positive(self):
+        os.mkdir(TESTFN)
+        try:
+            for src, dst in [('srcdir', 'src/dest'), ('srcdir', 'srcdir.new')]:
+                src = os.path.join(TESTFN, src)
+                dst = os.path.join(TESTFN, dst)
+                self.assertFalse(shutil._destinsrc(src, dst),
+                            msg='_destinsrc() wrongly concluded that '
+                            'dst (%s) is in src (%s)' % (dst, src))
+        finally:
+            shutil.rmtree(TESTFN, ignore_errors=True)
+
+
+class TestCopyFile(unittest.TestCase):
+
+    _delete = False
+
+    class Faux(object):
+        _entered = False
+        _exited_with = None
+        _raised = False
+
+        def __init__(self, raise_in_exit=False, suppress_at_exit=True):
+            self._raise_in_exit = raise_in_exit
+            self._suppress_at_exit = suppress_at_exit
+
+        def read(self, *args):
+            return ''
+
+        def __enter__(self):
+            self._entered = True
+
+        def __exit__(self, exc_type, exc_val, exc_tb):
+            self._exited_with = exc_type, exc_val, exc_tb
+            if self._raise_in_exit:
+                self._raised = True
+                raise IOError("Cannot close")
+            return self._suppress_at_exit
+
+    def tearDown(self):
+        if self._delete:
+            del shutil.open
+
+    def _set_shutil_open(self, func):
+        shutil.open = func
+        self._delete = True
+
+    def test_w_source_open_fails(self):
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                raise IOError('Cannot open "srcfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError, shutil.copyfile, 'srcfile', 'destfile')
+
+    @unittest.skip("can't use the with statement and support 2.4")
+    def test_w_dest_open_fails(self):
+
+        srcfile = self.Faux()
+
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                raise IOError('Cannot open "destfile"')
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot open "destfile"',))
+
+    @unittest.skip("can't use the with statement and support 2.4")
+    def test_w_dest_close_fails(self):
+
+        srcfile = self.Faux()
+        destfile = self.Faux(True)
+
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        shutil.copyfile('srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertTrue(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is IOError)
+        self.assertEqual(srcfile._exited_with[1].args,
+                         ('Cannot close',))
+
+    @unittest.skip("can't use the with statement and support 2.4")
+    def test_w_source_close_fails(self):
+
+        srcfile = self.Faux(True)
+        destfile = self.Faux()
+
+        def _open(filename, mode='r'):
+            if filename == 'srcfile':
+                return srcfile
+            if filename == 'destfile':
+                return destfile
+            assert 0  # shouldn't reach here.
+
+        self._set_shutil_open(_open)
+
+        self.assertRaises(IOError,
+                          shutil.copyfile, 'srcfile', 'destfile')
+        self.assertTrue(srcfile._entered)
+        self.assertTrue(destfile._entered)
+        self.assertFalse(destfile._raised)
+        self.assertTrue(srcfile._exited_with[0] is None)
+        self.assertTrue(srcfile._raised)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    load = unittest.defaultTestLoader.loadTestsFromTestCase
+    suite.addTest(load(TestCopyFile))
+    suite.addTest(load(TestMove))
+    suite.addTest(load(TestShutil))
+    return suite
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/distutils2/_backport/tests/test_sysconfig.py b/distutils2/_backport/tests/test_sysconfig.py
--- a/distutils2/_backport/tests/test_sysconfig.py
+++ b/distutils2/_backport/tests/test_sysconfig.py
@@ -4,7 +4,7 @@
 import sys
 import subprocess
 import shutil
-from copy import copy, deepcopy
+from copy import copy
 from ConfigParser import RawConfigParser
 from StringIO import StringIO
 
@@ -16,6 +16,7 @@
 
 from distutils2.tests import unittest, TESTFN, unlink
 from distutils2.tests.support import EnvironGuard
+from test.test_support import TESTFN, unlink
 
 try:
     from test.test_support import skip_unless_symlink
diff --git a/distutils2/command/__init__.py b/distutils2/command/__init__.py
--- a/distutils2/command/__init__.py
+++ b/distutils2/command/__init__.py
@@ -5,6 +5,9 @@
 from distutils2.errors import DistutilsModuleError
 from distutils2.util import resolve_name
 
+__all__ = ['get_command_names', 'set_command', 'get_command_class',
+           'STANDARD_COMMANDS']
+
 _COMMANDS = {
     'check': 'distutils2.command.check.check',
     'test': 'distutils2.command.test.test',
@@ -29,10 +32,12 @@
     'upload': 'distutils2.command.upload.upload',
     'upload_docs': 'distutils2.command.upload_docs.upload_docs'}
 
+STANDARD_COMMANDS = set(_COMMANDS)
+
 
 def get_command_names():
-    return sorted(_COMMANDS.keys())
     """Return registered commands"""
+    return sorted(_COMMANDS)
 
 
 def set_command(location):
diff --git a/distutils2/command/bdist.py b/distutils2/command/bdist.py
--- a/distutils2/command/bdist.py
+++ b/distutils2/command/bdist.py
@@ -4,7 +4,7 @@
 distribution)."""
 import os
 
-from distutils2.util import get_platform
+from distutils2 import util
 from distutils2.command.cmd import Command
 from distutils2.errors import DistutilsPlatformError, DistutilsOptionError
 
@@ -29,7 +29,7 @@
                      "temporary directory for creating built distributions"),
                     ('plat-name=', 'p',
                      "platform name to embed in generated filenames "
-                     "(default: %s)" % get_platform()),
+                     "(default: %s)" % util.get_platform()),
                     ('formats=', None,
                      "formats for distribution (comma-separated list)"),
                     ('dist-dir=', 'd',
@@ -87,7 +87,7 @@
         # have to finalize 'plat_name' before 'bdist_base'
         if self.plat_name is None:
             if self.skip_build:
-                self.plat_name = get_platform()
+                self.plat_name = util.get_platform()
             else:
                 self.plat_name = self.get_finalized_command('build').plat_name
 
diff --git a/distutils2/command/bdist_dumb.py b/distutils2/command/bdist_dumb.py
--- a/distutils2/command/bdist_dumb.py
+++ b/distutils2/command/bdist_dumb.py
@@ -87,7 +87,7 @@
         install.skip_build = self.skip_build
         install.warn_dir = 0
 
-        logger.info("installing to %s" % self.bdist_dir)
+        logger.info("installing to %s", self.bdist_dir)
         self.run_command('install_dist')
 
         # And make an archive relative to the root of the
@@ -106,11 +106,10 @@
         else:
             if (self.distribution.has_ext_modules() and
                 (install.install_base != install.install_platbase)):
-                raise DistutilsPlatformError, \
-                      ("can't make a dumb built distribution where "
-                       "base and platbase are different (%s, %s)"
-                       % (repr(install.install_base),
-                          repr(install.install_platbase)))
+                raise DistutilsPlatformError(
+                    "can't make a dumb built distribution where base and "
+                    "platbase are different (%r, %r)" %
+                    (install.install_base, install.install_platbase))
             else:
                 archive_root = os.path.join(
                     self.bdist_dir,
@@ -129,7 +128,7 @@
 
         if not self.keep_temp:
             if self.dry_run:
-                logger.info('Removing %s' % self.bdist_dir)
+                logger.info('removing %s', self.bdist_dir)
             else:
                 rmtree(self.bdist_dir)
 
diff --git a/distutils2/command/bdist_wininst.py b/distutils2/command/bdist_wininst.py
--- a/distutils2/command/bdist_wininst.py
+++ b/distutils2/command/bdist_wininst.py
@@ -192,7 +192,7 @@
 
         if not self.keep_temp:
             if self.dry_run:
-                logger.info('Removing %s' % self.bdist_dir)
+                logger.info('removing %s', self.bdist_dir)
             else:
                 rmtree(self.bdist_dir)
 
diff --git a/distutils2/command/build_ext.py b/distutils2/command/build_ext.py
--- a/distutils2/command/build_ext.py
+++ b/distutils2/command/build_ext.py
@@ -257,7 +257,7 @@
 
             elif MSVC_VERSION == 8:
                 self.library_dirs.append(os.path.join(sys.exec_prefix,
-                                         'PC', 'VS8.0', 'win32release'))
+                                         'PC', 'VS8.0'))
             elif MSVC_VERSION == 7:
                 self.library_dirs.append(os.path.join(sys.exec_prefix,
                                          'PC', 'VS7.1'))
diff --git a/distutils2/command/build_py.py b/distutils2/command/build_py.py
--- a/distutils2/command/build_py.py
+++ b/distutils2/command/build_py.py
@@ -8,7 +8,6 @@
 import logging
 from glob import glob
 
-import distutils2
 from distutils2.command.cmd import Command
 from distutils2.errors import DistutilsOptionError, DistutilsFileError
 from distutils2.util import convert_path
@@ -66,10 +65,9 @@
         self.packages = self.distribution.packages
         self.py_modules = self.distribution.py_modules
         self.package_data = self.distribution.package_data
-        self.package_dir = {}
-        if self.distribution.package_dir:
-            for name, path in self.distribution.package_dir.items():
-                self.package_dir[name] = convert_path(path)
+        self.package_dir = None
+        if self.distribution.package_dir is not None:
+            self.package_dir = convert_path(self.distribution.package_dir)
         self.data_files = self.get_data_files()
 
         # Ick, copied straight from install_lib.py (fancy_getopt needs a
@@ -164,11 +162,13 @@
 
         Helper function for `run()`.
         """
+        # FIXME add tests for this method
         for package, src_dir, build_dir, filenames in self.data_files:
             for filename in filenames:
                 target = os.path.join(build_dir, filename)
+                srcfile = os.path.join(src_dir, filename)
                 self.mkpath(os.path.dirname(target))
-                outf, copied = self.copy_file(os.path.join(src_dir, filename),
+                outf, copied = self.copy_file(srcfile,
                                target, preserve_mode=False)
                 if copied and srcfile in self.distribution.convert_2to3.doctests:
                     self._doctests_2to3.append(outf)
@@ -179,41 +179,14 @@
         """Return the directory, relative to the top of the source
            distribution, where package 'package' should be found
            (at least according to the 'package_dir' option, if any)."""
+        path = package.split('.')
+        if self.package_dir is not None:
+            path.insert(0, self.package_dir)
 
-        path = package.split('.')
+        if len(path) > 0:
+            return os.path.join(*path)
 
-        if not self.package_dir:
-            if path:
-                return os.path.join(*path)
-            else:
-                return ''
-        else:
-            tail = []
-            while path:
-                try:
-                    pdir = self.package_dir['.'.join(path)]
-                except KeyError:
-                    tail.insert(0, path[-1])
-                    del path[-1]
-                else:
-                    tail.insert(0, pdir)
-                    return os.path.join(*tail)
-            else:
-                # Oops, got all the way through 'path' without finding a
-                # match in package_dir.  If package_dir defines a directory
-                # for the root (nameless) package, then fallback on it;
-                # otherwise, we might as well have not consulted
-                # package_dir at all, as we just use the directory implied
-                # by 'tail' (which should be the same as the original value
-                # of 'path' at this point).
-                pdir = self.package_dir.get('')
-                if pdir is not None:
-                    tail.insert(0, pdir)
-
-                if tail:
-                    return os.path.join(*tail)
-                else:
-                    return ''
+        return ''
 
     def check_package(self, package, package_dir):
         """Helper function for `find_package_modules()` and `find_modules()'.
diff --git a/distutils2/command/check.py b/distutils2/command/check.py
--- a/distutils2/command/check.py
+++ b/distutils2/command/check.py
@@ -52,18 +52,19 @@
     def check_metadata(self):
         """Ensures that all required elements of metadata are supplied.
 
-        name, version, URL, (author and author_email) or
-        (maintainer and maintainer_email)).
+        name, version, URL, author
 
         Warns if any are missing.
         """
-        missing, __ = self.distribution.metadata.check()
+        missing, warnings = self.distribution.metadata.check(strict=True)
         if missing != []:
             self.warn("missing required metadata: %s"  % ', '.join(missing))
+        for warning in warnings:
+            self.warn(warning)
 
     def check_restructuredtext(self):
         """Checks if the long string fields are reST-compliant."""
-        missing, warnings = self.distribution.metadata.check()
+        missing, warnings = self.distribution.metadata.check(restructuredtext=True)
         if self.distribution.metadata.docutils_support:
             for warning in warnings:
                 line = warning[-1].get('line')
@@ -76,11 +77,11 @@
             raise DistutilsSetupError('The docutils package is needed.')
 
     def check_hooks_resolvable(self):
-        for options in self.distribution.command_options.values():
+        for options in self.distribution.command_options.itervalues():
             for hook_kind in ("pre_hook", "post_hook"):
                 if hook_kind not in options:
                     break
-                for hook_name in options[hook_kind][1].values():
+                for hook_name in options[hook_kind][1].itervalues():
                     try:
                         resolve_name(hook_name)
                     except ImportError:
diff --git a/distutils2/command/clean.py b/distutils2/command/clean.py
--- a/distutils2/command/clean.py
+++ b/distutils2/command/clean.py
@@ -48,7 +48,7 @@
         # gone)
         if os.path.exists(self.build_temp):
             if self.dry_run:
-                logger.info('Removing %s' % self.build_temp)
+                logger.info('removing %s', self.build_temp)
             else:
                 rmtree(self.build_temp)
         else:
@@ -62,7 +62,7 @@
                               self.build_scripts):
                 if os.path.exists(directory):
                     if self.dry_run:
-                        logger.info('Removing %s' % directory)
+                        logger.info('removing %s', directory)
                     else:
                         rmtree(directory)
                 else:
diff --git a/distutils2/command/cmd.py b/distutils2/command/cmd.py
--- a/distutils2/command/cmd.py
+++ b/distutils2/command/cmd.py
@@ -10,14 +10,7 @@
 from distutils2.errors import DistutilsOptionError
 from distutils2 import util
 from distutils2 import logger
-
-# XXX see if we want to backport this
-from distutils2._backport.shutil import copytree, copyfile, move
-
-try:
-    from shutil import make_archive
-except ImportError:
-    from distutils2._backport.shutil import make_archive
+from distutils2._backport.shutil import copytree, copyfile, move, make_archive
 
 
 class Command(object):
@@ -165,7 +158,10 @@
             header = "command options for '%s':" % self.get_command_name()
         self.announce(indent + header, level=logging.INFO)
         indent = indent + "  "
+        negative_opt = getattr(self, 'negative_opt', ())
         for (option, _, _) in self.user_options:
+            if option in negative_opt:
+                continue
             option = option.replace('-', '_')
             if option[-1] == "=":
                 option = option[:-1]
@@ -186,6 +182,7 @@
         raise RuntimeError(
             "abstract method -- subclass %s must override" % self.__class__)
 
+    # TODO remove this method, just use logging.info
     def announce(self, msg, level=logging.INFO):
         """If the current verbosity level is of greater than or equal to
         'level' print 'msg' to stdout.
@@ -367,8 +364,9 @@
 
     # -- External world manipulation -----------------------------------
 
+    # TODO remove this method, just use logging.warn
     def warn(self, msg):
-        logger.warning("warning: %s: %s\n" % (self.get_command_name(), msg))
+        logger.warning("warning: %s: %s\n", self.get_command_name(), msg)
 
     def execute(self, func, args, msg=None, level=1):
         util.execute(func, args, msg, dry_run=self.dry_run)
diff --git a/distutils2/command/config.py b/distutils2/command/config.py
--- a/distutils2/command/config.py
+++ b/distutils2/command/config.py
@@ -345,7 +345,7 @@
     If head is not None, will be dumped before the file content.
     """
     if head is None:
-        logger.info('%s' % filename)
+        logger.info(filename)
     else:
         logger.info(head)
     file = open(filename)
diff --git a/distutils2/command/install_data.py b/distutils2/command/install_data.py
--- a/distutils2/command/install_data.py
+++ b/distutils2/command/install_data.py
@@ -9,6 +9,8 @@
 import os
 from distutils2.command.cmd import Command
 from distutils2.util import change_root, convert_path
+from distutils2._backport.sysconfig import get_paths, format_value
+from distutils2._backport.shutil import Error
 
 class install_data(Command):
 
@@ -28,6 +30,7 @@
     def initialize_options(self):
         self.install_dir = None
         self.outfiles = []
+        self.data_files_out = []
         self.root = None
         self.force = 0
         self.data_files = self.distribution.data_files
@@ -40,54 +43,38 @@
 
     def run(self):
         self.mkpath(self.install_dir)
-        for f in self.data_files:
-            if isinstance(f, str):
-                # it's a simple file, so copy it
-                f = convert_path(f)
-                if self.warn_dir:
-                    self.warn("setup script did not provide a directory for "
-                              "'%s' -- installing right in '%s'" %
-                              (f, self.install_dir))
-                (out, _) = self.copy_file(f, self.install_dir)
-                self.outfiles.append(out)
-            else:
-                # it's a tuple with path to install to and a list of files
-                dir = convert_path(f[0])
-                if not os.path.isabs(dir):
-                    dir = os.path.join(self.install_dir, dir)
-                elif self.root:
-                    dir = change_root(self.root, dir)
-                self.mkpath(dir)
+        for file in self.data_files.items():
+            destination = convert_path(self.expand_categories(file[1]))
+            dir_dest = os.path.abspath(os.path.dirname(destination))
+            
+            self.mkpath(dir_dest)
+            try:
+                (out, _) = self.copy_file(file[0], dir_dest)
+            except Error, e:
+                self.warn(e)
+                out = destination
 
-                if f[1] == []:
-                    # If there are no files listed, the user must be
-                    # trying to create an empty directory, so add the
-                    # directory to the list of output files.
-                    self.outfiles.append(dir)
-                else:
-                    # Copy files, adding them to the list of output files.
-                    for data in f[1]:
-                        data = convert_path(data)
-                        (out, _) = self.copy_file(data, dir)
-                        self.outfiles.append(out)
+            self.outfiles.append(out)
+            self.data_files_out.append((file[0], destination))
+
+    def expand_categories(self, path_with_categories):
+        local_vars = get_paths()
+        local_vars['distribution.name'] = self.distribution.metadata['Name']
+        expanded_path = format_value(path_with_categories, local_vars)
+        expanded_path = format_value(expanded_path, local_vars)
+        if '{' in expanded_path and '}' in expanded_path:
+            self.warn("Unable to expand %s, some categories may missing." %
+                path_with_categories)
+        return expanded_path
 
     def get_source_files(self):
-        sources = []
-        for item in self.data_files:
-            if isinstance(item, str): # plain file
-                item = convert_path(item)
-                if os.path.isfile(item):
-                    sources.append(item)
-            else:    # a (dirname, filenames) tuple
-                dirname, filenames = item
-                for f in filenames:
-                    f = convert_path(f)
-                    if os.path.isfile(f):
-                        sources.append(f)
-        return sources
+        return self.data_files.keys()
 
     def get_inputs(self):
-        return self.data_files or []
+        return self.data_files.keys()
 
     def get_outputs(self):
         return self.outfiles
+
+    def get_resources_out(self):
+        return self.data_files_out
diff --git a/distutils2/command/install_dist.py b/distutils2/command/install_dist.py
--- a/distutils2/command/install_dist.py
+++ b/distutils2/command/install_dist.py
@@ -87,6 +87,8 @@
         ('record=', None,
          "filename in which to record a list of installed files "
          "(not PEP 376-compliant)"),
+        ('resources=', None,
+         "data files mapping"),
 
         # .dist-info related arguments, read by install_dist_info
         ('no-distinfo', None,
@@ -184,12 +186,14 @@
         #self.install_info = None
 
         self.record = None
+        self.resources = None
 
         # .dist-info related options
         self.no_distinfo = None
         self.installer = None
         self.requested = None
         self.no_record = None
+        self.no_resources = None
 
     # -- Option finalizing methods -------------------------------------
     # (This is rather more involved than for most commands,
@@ -418,13 +422,13 @@
             else:
                 opt_name = opt_name.replace('-', '_')
                 val = getattr(self, opt_name)
-            logger.debug("  %s: %s" % (opt_name, val))
+            logger.debug("  %s: %s", opt_name, val)
 
     def select_scheme(self, name):
         """Set the install directories by applying the install schemes."""
         # it's the caller's problem if they supply a bad name!
         scheme = get_paths(name, expand=False)
-        for key, value in scheme.items():
+        for key, value in scheme.iteritems():
             if key == 'platinclude':
                 key = 'headers'
                 value = os.path.join(value, self.distribution.metadata['Name'])
diff --git a/distutils2/command/install_distinfo.py b/distutils2/command/install_distinfo.py
--- a/distutils2/command/install_distinfo.py
+++ b/distutils2/command/install_distinfo.py
@@ -12,12 +12,12 @@
 
 # This file was created from the code for the former command install_egg_info
 
-import os
 import csv
-import re
-from distutils2.command.cmd import Command
 from distutils2 import logger
 from distutils2._backport.shutil import rmtree
+from distutils2.command.cmd import Command
+import os
+import re
 try:
     import hashlib
 except ImportError:
@@ -39,9 +39,11 @@
          "do not generate a REQUESTED file"),
         ('no-record', None,
          "do not generate a RECORD file"),
+        ('no-resources', None,
+         "do not generate a RESSOURCES list installed file")
     ]
 
-    boolean_options = ['requested', 'no-record']
+    boolean_options = ['requested', 'no-record', 'no-resources']
 
     negative_opt = {'no-requested': 'requested'}
 
@@ -50,6 +52,7 @@
         self.installer = None
         self.requested = None
         self.no_record = None
+        self.no_resources = None
 
     def finalize_options(self):
         self.set_undefined_options('install_dist',
@@ -66,13 +69,16 @@
             self.requested = True
         if self.no_record is None:
             self.no_record = False
+        if self.no_resources is None:
+            self.no_resources = False
+
 
         metadata = self.distribution.metadata
 
         basename = "%s-%s.dist-info" % (
-            to_filename(safe_name(metadata['Name'])),
-            to_filename(safe_version(metadata['Version'])),
-        )
+                                        to_filename(safe_name(metadata['Name'])),
+                                        to_filename(safe_version(metadata['Version'])),
+                                        )
 
         self.distinfo_dir = os.path.join(self.distinfo_dir, basename)
         self.outputs = []
@@ -113,6 +119,25 @@
                 f.close()
                 self.outputs.append(requested_path)
 
+
+            if not self.no_resources:
+                install_data = self.get_finalized_command('install_data')
+                if install_data.get_resources_out() != []:
+                    resources_path = os.path.join(self.distinfo_dir,
+                                                  'RESOURCES')
+                    logger.info('creating %s', resources_path)
+                    f = open(resources_path, 'wb')
+                    try:
+                        writer = csv.writer(f, delimiter=',',
+                                            lineterminator=os.linesep,
+                                            quotechar='"')
+                        for tuple in install_data.get_resources_out():
+                            writer.writerow(tuple)
+
+                        self.outputs.append(resources_path)
+                    finally:
+                        f.close()
+
             if not self.no_record:
                 record_path = os.path.join(self.distinfo_dir, 'RECORD')
                 logger.info('creating %s', record_path)
@@ -142,6 +167,7 @@
                 finally:
                     f.close()
 
+
     def get_outputs(self):
         return self.outputs
 
diff --git a/distutils2/command/register.py b/distutils2/command/register.py
--- a/distutils2/command/register.py
+++ b/distutils2/command/register.py
@@ -11,13 +11,12 @@
 import urlparse
 import StringIO
 import logging
-from warnings import warn
 
 from distutils2.command.cmd import Command
 from distutils2 import logger
-from distutils2.util import (metadata_to_dict, read_pypirc, generate_pypirc,
-                             DEFAULT_REPOSITORY, DEFAULT_REALM,
-                             get_pypirc_path)
+from distutils2.metadata import metadata_to_dict
+from distutils2.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY,
+                             DEFAULT_REALM, get_pypirc_path)
 
 class register(Command):
 
@@ -33,8 +32,7 @@
          "stop the registration if the metadata is not fully compliant")
         ]
 
-    boolean_options = ['show-response', 'verify', 'list-classifiers',
-                       'strict']
+    boolean_options = ['show-response', 'list-classifiers', 'strict']
 
     def initialize_options(self):
         self.repository = None
@@ -48,16 +46,15 @@
             self.repository = DEFAULT_REPOSITORY
         if self.realm is None:
             self.realm = DEFAULT_REALM
-        # setting options for the `check` subcommand
-        check_options = {'strict': ('register', self.strict),
-                         'all': ('register', 1)}
-        self.distribution.command_options['check'] = check_options
 
     def run(self):
         self.finalize_options()
         self._set_config()
 
         # Check the package metadata
+        check = self.distribution.get_command_obj('check')
+        check.strict = self.strict
+        check.all = 1
         self.run_command('check')
 
         if self.dry_run:
@@ -67,16 +64,6 @@
         else:
             self.send_metadata()
 
-    def check_metadata(self):
-        """Deprecated API."""
-        warn("distutils.command.register.check_metadata is deprecated, \
-              use the check command instead", PendingDeprecationWarning)
-        check = self.distribution.get_command_obj('check')
-        check.ensure_finalized()
-        check.strict = self.strict
-        check.all = 1
-        check.run()
-
     def _set_config(self):
         ''' Reads the configuration file and set attributes.
         '''
@@ -105,7 +92,7 @@
         '''
         # send the info to the server and report the result
         code, result = self.post_to_server(self.build_post_data('verify'))
-        logger.info('Server response (%s): %s' % (code, result))
+        logger.info('server response (%s): %s', code, result)
 
 
     def send_metadata(self):
@@ -219,18 +206,17 @@
                 data['email'] = raw_input('   EMail: ')
             code, result = self.post_to_server(data)
             if code != 200:
-                logger.info('Server response (%s): %s' % (code, result))
+                logger.info('server response (%s): %s', code, result)
             else:
-                logger.info('You will receive an email shortly.')
-                logger.info(('Follow the instructions in it to '
-                             'complete registration.'))
+                logger.info('you will receive an email shortly; follow the '
+                            'instructions in it to complete registration.')
         elif choice == '3':
             data = {':action': 'password_reset'}
             data['email'] = ''
             while not data['email']:
                 data['email'] = raw_input('Your email address: ')
             code, result = self.post_to_server(data)
-            logger.info('Server response (%s): %s' % (code, result))
+            logger.info('server response (%s): %s', code, result)
 
     def build_post_data(self, action):
         # figure the data to send - the metadata plus some additional
@@ -252,7 +238,7 @@
         sep_boundary = '\n--' + boundary
         end_boundary = sep_boundary + '--'
         body = StringIO.StringIO()
-        for key, value in data.items():
+        for key, value in data.iteritems():
             # handle multiple entries for the same name
             if not isinstance(value, (tuple, list)):
                 value = [value]
diff --git a/distutils2/command/sdist.py b/distutils2/command/sdist.py
--- a/distutils2/command/sdist.py
+++ b/distutils2/command/sdist.py
@@ -2,10 +2,7 @@
 
 Implements the Distutils 'sdist' command (create a source distribution)."""
 import os
-import string
 import sys
-from glob import glob
-from warnings import warn
 from shutil import rmtree
 import re
 from StringIO import StringIO
@@ -18,10 +15,10 @@
 from distutils2.command import get_command_names
 from distutils2.command.cmd import Command
 from distutils2.errors import (DistutilsPlatformError, DistutilsOptionError,
-                               DistutilsTemplateError, DistutilsModuleError)
+                               DistutilsModuleError, DistutilsFileError)
 from distutils2.manifest import Manifest
 from distutils2 import logger
-from distutils2.util import convert_path, resolve_name
+from distutils2.util import resolve_name
 
 def show_formats():
     """Print all possible values for the 'formats' option (used by
@@ -214,8 +211,6 @@
 
     def add_defaults(self):
         """Add all the default files to self.filelist:
-          - README or README.txt
-          - test/test*.py
           - all pure Python modules mentioned in setup script
           - all files pointed by package_data (build_py)
           - all files defined in data_files.
@@ -225,32 +220,6 @@
         Warns if (README or README.txt) or setup.py are missing; everything
         else is optional.
         """
-        standards = [('README', 'README.txt')]
-        for fn in standards:
-            if isinstance(fn, tuple):
-                alts = fn
-                got_it = 0
-                for fn in alts:
-                    if os.path.exists(fn):
-                        got_it = 1
-                        self.filelist.append(fn)
-                        break
-
-                if not got_it:
-                    self.warn("standard file not found: should have one of " +
-                              string.join(alts, ', '))
-            else:
-                if os.path.exists(fn):
-                    self.filelist.append(fn)
-                else:
-                    self.warn("standard file '%s' not found" % fn)
-
-        optional = ['test/test*.py', 'setup.cfg']
-        for pattern in optional:
-            files = filter(os.path.isfile, glob(pattern))
-            if files:
-                self.filelist.extend(files)
-
         for cmd_name in get_command_names():
             try:
                 cmd_obj = self.get_finalized_command(cmd_name)
@@ -319,9 +288,15 @@
             logger.warn("no files to distribute -- empty manifest?")
         else:
             logger.info(msg)
+
+        for file in self.distribution.metadata.requires_files:
+            if file not in files:
+                msg = "'%s' must be included explicitly in 'extra_files'" % file
+                raise DistutilsFileError(msg)
+
         for file in files:
             if not os.path.isfile(file):
-                logger.warn("'%s' not a regular file -- skipping" % file)
+                logger.warn("'%s' not a regular file -- skipping", file)
             else:
                 dest = os.path.join(base_dir, file)
                 self.copy_file(file, dest, link=link)
@@ -357,7 +332,7 @@
 
         if not self.keep_temp:
             if self.dry_run:
-                logger.info('Removing %s' % base_dir)
+                logger.info('removing %s', base_dir)
             else:
                 rmtree(base_dir)
 
@@ -371,10 +346,8 @@
         need_dir = {}
         for file in files:
             need_dir[os.path.join(base_dir, os.path.dirname(file))] = 1
-        need_dirs = need_dir.keys()
-        need_dirs.sort()
+        need_dirs = sorted(need_dir)
 
         # Now create them
         for dir in need_dirs:
             self.mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
-
diff --git a/distutils2/command/upload.py b/distutils2/command/upload.py
--- a/distutils2/command/upload.py
+++ b/distutils2/command/upload.py
@@ -20,8 +20,8 @@
 from distutils2.errors import DistutilsOptionError
 from distutils2.util import spawn
 from distutils2.command.cmd import Command
-from distutils2.util import (metadata_to_dict, read_pypirc,
-                             DEFAULT_REPOSITORY, DEFAULT_REALM)
+from distutils2.metadata import metadata_to_dict
+from distutils2.util import read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM
 
 
 class upload(Command):
@@ -140,7 +140,7 @@
         body = StringIO()
         file_fields = ('content', 'gpg_signature')
 
-        for key, values in data.items():
+        for key, values in data.iteritems():
             # handle multiple entries for the same name
             if not isinstance(values, (tuple, list)):
                 values = [values]
diff --git a/distutils2/compat.py b/distutils2/compat.py
--- a/distutils2/compat.py
+++ b/distutils2/compat.py
@@ -7,10 +7,13 @@
 import logging
 
 
+# XXX Having two classes with the same name is not a good thing.
+# XXX 2to3-related code should move from util to this module
+
+# TODO Move common code here: PY3 (bool indicating if we're on 3.x), any, etc.
+
 try:
     from distutils2.util import Mixin2to3 as _Mixin2to3
-    from distutils2 import run_2to3_on_doctests
-    from lib2to3.refactor import get_fixers_from_package
     _CONVERT = True
     _KLASS = _Mixin2to3
 except ImportError:
@@ -20,6 +23,7 @@
 # marking public APIs
 __all__ = ['Mixin2to3']
 
+
 class Mixin2to3(_KLASS):
     """ The base class which can be used for refactoring. When run under
     Python 3.0, the run_2to3 method provided by Mixin2to3 is overridden.
@@ -46,19 +50,10 @@
             logging.info("Converting doctests with '.py' files")
             _KLASS.run_2to3(self, files, doctests_only=True)
 
-            # If the following conditions are met, then convert:-
-            # 1. User has specified the 'convert_2to3_doctests' option. So, we
-            #    can expect that the list 'doctests' is not empty.
-            # 2. The default is allow distutils2 to allow conversion of text files
-            #    containing doctests. It is set as
-            #    distutils2.run_2to3_on_doctests
-
-            if doctests != [] and run_2to3_on_doctests:
+            if doctests != []:
                 logging.info("Converting text files which contain doctests")
                 _KLASS.run_2to3(self, doctests, doctests_only=True)
     else:
         # If run on Python 2.x, there is nothing to do.
         def _run_2to3(self, files, doctests=[], fixers=[]):
             pass
-
-
diff --git a/distutils2/compiler/__init__.py b/distutils2/compiler/__init__.py
--- a/distutils2/compiler/__init__.py
+++ b/distutils2/compiler/__init__.py
@@ -127,7 +127,7 @@
     from distutils2.fancy_getopt import FancyGetopt
     compilers = []
 
-    for name, cls in _COMPILERS.items():
+    for name, cls in _COMPILERS.iteritems():
         if isinstance(cls, str):
             cls = resolve_name(cls)
             _COMPILERS[name] = cls
diff --git a/distutils2/compiler/bcppcompiler.py b/distutils2/compiler/bcppcompiler.py
--- a/distutils2/compiler/bcppcompiler.py
+++ b/distutils2/compiler/bcppcompiler.py
@@ -191,9 +191,8 @@
             self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
 
         if runtime_library_dirs:
-            logger.warning(("I don't know what to do with "
-                            "'runtime_library_dirs': %s"),
-                     str(runtime_library_dirs))
+            logger.warning("don't know what to do with "
+                           "'runtime_library_dirs': %r", runtime_library_dirs)
 
         if output_dir is not None:
             output_filename = os.path.join (output_dir, output_filename)
diff --git a/distutils2/compiler/ccompiler.py b/distutils2/compiler/ccompiler.py
--- a/distutils2/compiler/ccompiler.py
+++ b/distutils2/compiler/ccompiler.py
@@ -116,8 +116,8 @@
         # named library files) to include on any link
         self.objects = []
 
-        for key in self.executables.keys():
-            self.set_executable(key, self.executables[key])
+        for key, value in self.executables.iteritems():
+            self.set_executable(key, value)
 
     def set_executables(self, **args):
         """Define the executables (and options for them) that will be run
@@ -145,12 +145,12 @@
         # discovered at run-time, since there are many different ways to do
         # basically the same things with Unix C compilers.
 
-        for key in args.keys():
+        for key, value in args.iteritems():
             if key not in self.executables:
                 raise ValueError, \
                       "unknown executable '%s' for class %s" % \
                       (key, self.__class__.__name__)
-            self.set_executable(key, args[key])
+            self.set_executable(key, value)
 
     def set_executable(self, key, value):
         if isinstance(value, str):
@@ -850,6 +850,7 @@
 
     # -- Utility methods -----------------------------------------------
 
+    # TODO use logging.info
     def announce(self, msg, level=None):
         logger.debug(msg)
 
@@ -858,6 +859,7 @@
         if DEBUG:
             print msg
 
+    # TODO use logging.warn
     def warn(self, msg):
         sys.stderr.write("warning: %s\n" % msg)
 
diff --git a/distutils2/compiler/cygwinccompiler.py b/distutils2/compiler/cygwinccompiler.py
--- a/distutils2/compiler/cygwinccompiler.py
+++ b/distutils2/compiler/cygwinccompiler.py
@@ -358,27 +358,3 @@
     except IOError, exc:
         return (CONFIG_H_UNCERTAIN,
                 "couldn't read '%s': %s" % (fn, exc.strerror))
-
-class _Deprecated_SRE_Pattern(object):
-    def __init__(self, pattern):
-        self.pattern = pattern
-
-    def __getattr__(self, name):
-        if name in ('findall', 'finditer', 'match', 'scanner', 'search',
-                    'split', 'sub', 'subn'):
-            warn("'distutils.cygwinccompiler.RE_VERSION' is deprecated "
-                 "and will be removed in the next version", DeprecationWarning)
-        return getattr(self.pattern, name)
-
-RE_VERSION = _Deprecated_SRE_Pattern(re.compile('(\d+\.\d+(\.\d+)*)'))
-
-def get_versions():
-    """ Try to find out the versions of gcc, ld and dllwrap.
-
-    If not possible it returns None for it.
-    """
-    warn("'distutils.cygwinccompiler.get_versions' is deprecated "
-         "use 'distutils.util.get_compiler_versions' instead",
-         DeprecationWarning)
-
-    return get_compiler_versions()
diff --git a/distutils2/compiler/msvc9compiler.py b/distutils2/compiler/msvc9compiler.py
--- a/distutils2/compiler/msvc9compiler.py
+++ b/distutils2/compiler/msvc9compiler.py
@@ -153,7 +153,7 @@
                 self.macros["$(FrameworkVersion)"] = d["version"]
 
     def sub(self, s):
-        for k, v in self.macros.items():
+        for k, v in self.macros.iteritems():
             s = s.replace(k, v)
         return s
 
@@ -226,17 +226,17 @@
             productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
             productdir = os.path.abspath(productdir)
             if not os.path.isdir(productdir):
-                logger.debug("%s is not a valid directory" % productdir)
+                logger.debug("%s is not a valid directory", productdir)
                 return None
         else:
-            logger.debug("Env var %s is not set or invalid" % toolskey)
+            logger.debug("env var %s is not set or invalid", toolskey)
     if not productdir:
-        logger.debug("No productdir found")
+        logger.debug("no productdir found")
         return None
     vcvarsall = os.path.join(productdir, "vcvarsall.bat")
     if os.path.isfile(vcvarsall):
         return vcvarsall
-    logger.debug("Unable to find vcvarsall.bat")
+    logger.debug("unable to find vcvarsall.bat")
     return None
 
 def query_vcvarsall(version, arch="x86"):
@@ -248,7 +248,7 @@
 
     if vcvarsall is None:
         raise DistutilsPlatformError("Unable to find vcvarsall.bat")
-    logger.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
+    logger.debug("calling 'vcvarsall.bat %s' (version=%s)", arch, version)
     popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
@@ -271,7 +271,7 @@
             result[key] = removeDuplicates(value)
 
     if len(result) != len(interesting):
-        raise ValueError(str(list(result.keys())))
+        raise ValueError(str(list(result)))
 
     return result
 
diff --git a/distutils2/compiler/msvccompiler.py b/distutils2/compiler/msvccompiler.py
--- a/distutils2/compiler/msvccompiler.py
+++ b/distutils2/compiler/msvccompiler.py
@@ -44,11 +44,10 @@
         RegError = win32api.error
 
     except ImportError:
-        logger.info("Warning: Can't read registry to find the "
-                 "necessary compiler setting\n"
-                 "Make sure that Python modules _winreg, "
-                 "win32api or win32con are installed.")
-        pass
+        logger.warning(
+            "can't read registry to find the necessary compiler setting;\n"
+            "make sure that Python modules _winreg, win32api or win32con "
+            "are installed.")
 
 if _can_read_reg:
     HKEYS = (hkey_mod.HKEY_USERS,
@@ -146,7 +145,7 @@
             self.macros["$(FrameworkVersion)"] = d["version"]
 
     def sub(self, s):
-        for k, v in self.macros.items():
+        for k, v in self.macros.iteritems():
             s = string.replace(s, k, v)
         return s
 
@@ -653,7 +652,7 @@
 
 
 if get_build_version() >= 8.0:
-    logger.debug("Importing new compiler from distutils.msvc9compiler")
+    logger.debug("importing new compiler from distutils.msvc9compiler")
     OldMSVCCompiler = MSVCCompiler
     from distutils2.compiler.msvc9compiler import MSVCCompiler
     # get_build_architecture not really relevant now we support cross-compile
diff --git a/distutils2/compiler/unixccompiler.py b/distutils2/compiler/unixccompiler.py
--- a/distutils2/compiler/unixccompiler.py
+++ b/distutils2/compiler/unixccompiler.py
@@ -97,9 +97,9 @@
         sysroot = compiler_so[idx+1]
 
     if sysroot and not os.path.isdir(sysroot):
-        logger.warning("Compiling with an SDK that doesn't seem to exist: %s",
-                sysroot)
-        logger.warning("Please check your Xcode installation")
+        logger.warning(
+            "compiling with an SDK that doesn't seem to exist: %r;\n"
+            "please check your Xcode installation", sysroot)
 
     return compiler_so
 
diff --git a/distutils2/config.py b/distutils2/config.py
--- a/distutils2/config.py
+++ b/distutils2/config.py
@@ -2,14 +2,39 @@
 
     Know how to read all config files Distutils2 uses.
 """
+import os.path
 import os
 import sys
+import logging
 from ConfigParser import RawConfigParser
+from shlex import split
 
 from distutils2 import logger
-from distutils2.util import check_environ, resolve_name
+from distutils2.errors import DistutilsOptionError
+from distutils2.compiler.extension import Extension
+from distutils2.util import check_environ, resolve_name, strtobool
 from distutils2.compiler import set_compiler
 from distutils2.command import set_command
+from distutils2.resources import resources_dests
+from distutils2.markers import interpret
+
+
+def _pop_values(values_dct, key):
+    """Remove values from the dictionary and convert them as a list"""
+    vals_str = values_dct.pop(key, '')
+    if not vals_str:
+        return
+    fields = []
+    for field in vals_str.split(os.linesep):
+        tmp_vals = field.split('--')
+        if (len(tmp_vals) == 2) and (not interpret(tmp_vals[1])):
+            continue
+        fields.append(tmp_vals[0])
+    # Get bash options like `gcc -print-file-name=libgcc.a`
+    vals = split(' '.join(fields))
+    if vals:
+        return vals
+
 
 class Config(object):
     """Reads configuration files and work with the Distribution instance
@@ -67,7 +92,8 @@
         if os.path.isfile(local_file):
             files.append(local_file)
 
-        logger.debug("using config files: %s" % ', '.join(files))
+        if logger.isEnabledFor(logging.DEBUG):
+            logger.debug("using config files: %s", ', '.join(files))
         return files
 
     def _convert_metadata(self, name, value):
@@ -76,13 +102,13 @@
         return value
 
     def _multiline(self, value):
-        if '\n' in value:
-            value = [v for v in
-                        [v.strip() for v in value.split('\n')]
-                        if v != '']
+        value = [v for v in
+                [v.strip() for v in value.split('\n')]
+                if v != '']
         return value
 
-    def _read_setup_cfg(self, parser):
+    def _read_setup_cfg(self, parser, cfg_filename):
+        cfg_directory = os.path.dirname(os.path.abspath(cfg_filename))
         content = {}
         for section in parser.sections():
             content[section] = dict(parser.items(section))
@@ -98,9 +124,11 @@
 
         # setting the metadata values
         if 'metadata' in content:
-            for key, value in content['metadata'].items():
+            for key, value in content['metadata'].iteritems():
                 key = key.replace('_', '-')
-                value = self._multiline(value)
+                if metadata.is_multi_field(key):
+                    value = self._multiline(value)
+
                 if key == 'project-url':
                     value = [(label.strip(), url.strip())
                              for label, url in
@@ -112,30 +140,46 @@
                                "mutually exclusive")
                         raise DistutilsOptionError(msg)
 
-                    f = open(value)    # will raise if file not found
-                    try:
-                        value = f.read()
-                    finally:
-                        f.close()
+                    if isinstance(value, list):
+                        filenames = value
+                    else:
+                        filenames = value.split()
+
+                    # concatenate each files
+                    value = ''
+                    for filename in filenames:
+                        # will raise if file not found
+                        description_file = open(filename)
+                        try:
+                            value += description_file.read().strip() + '\n'
+                        finally:
+                            description_file.close()
+                        # add filename as a required file
+                        if filename not in metadata.requires_files:
+                            metadata.requires_files.append(filename)
+                    value = value.strip()
                     key = 'description'
 
                 if metadata.is_metadata_field(key):
                     metadata[key] = self._convert_metadata(key, value)
 
+
         if 'files' in content:
-            files = dict([(key, self._multiline(value))
-                          for key, value in content['files'].items()])
+            def _convert(key, value):
+                if key not in ('packages_root',):
+                    value = self._multiline(value)
+                return value
+
+            files = dict([(key, _convert(key, value))
+                          for key, value in content['files'].iteritems()])
             self.dist.packages = []
-            self.dist.package_dir = {}
+            self.dist.package_dir = files.get('packages_root')
 
             packages = files.get('packages', [])
             if isinstance(packages, str):
                 packages = [packages]
 
             for package in packages:
-                if ':' in package:
-                    dir_, package = package.split(':')
-                    self.dist.package_dir[package] = dir_
                 self.dist.packages.append(package)
 
             self.dist.py_modules = files.get('modules', [])
@@ -149,22 +193,55 @@
             for data in files.get('package_data', []):
                 data = data.split('=')
                 if len(data) != 2:
-                    continue
+                    continue # XXX error should never pass silently
                 key, value = data
                 self.dist.package_data[key.strip()] = value.strip()
 
-            self.dist.data_files = []
-            for data in files.get('data_files', []):
-                data = data.split('=')
-                if len(data) != 2:
-                    continue
-                key, value = data
-                values = [v.strip() for v in value.split(',')]
-                self.dist.data_files.append((key, values))
-
             # manifest template
             self.dist.extra_files = files.get('extra_files', [])
 
+            resources = []
+            for rule in files.get('resources', []):
+                glob , destination  = rule.split('=', 1)
+                rich_glob = glob.strip().split(' ', 1)
+                if len(rich_glob) == 2:
+                    prefix, suffix = rich_glob
+                else:
+                    assert len(rich_glob) == 1
+                    prefix = ''
+                    suffix = glob
+                if destination == '<exclude>':
+                    destination = None
+                resources.append((prefix.strip(), suffix.strip(), destination.strip()))
+                self.dist.data_files = resources_dests(cfg_directory, resources)
+
+        ext_modules = self.dist.ext_modules
+        for section_key in content:
+            labels = section_key.split('=')
+            if (len(labels) == 2) and (labels[0] == 'extension'):
+                # labels[1] not used from now but should be implemented
+                # for extension build dependency
+                values_dct = content[section_key]
+                ext_modules.append(Extension(
+                    values_dct.pop('name'),
+                    _pop_values(values_dct, 'sources'),
+                    _pop_values(values_dct, 'include_dirs'),
+                    _pop_values(values_dct, 'define_macros'),
+                    _pop_values(values_dct, 'undef_macros'),
+                    _pop_values(values_dct, 'library_dirs'),
+                    _pop_values(values_dct, 'libraries'),
+                    _pop_values(values_dct, 'runtime_library_dirs'),
+                    _pop_values(values_dct, 'extra_objects'),
+                    _pop_values(values_dct, 'extra_compile_args'),
+                    _pop_values(values_dct, 'extra_link_args'),
+                    _pop_values(values_dct, 'export_symbols'),
+                    _pop_values(values_dct, 'swig_opts'),
+                    _pop_values(values_dct, 'depends'),
+                    values_dct.pop('language', None),
+                    values_dct.pop('optional', None),
+                    **values_dct
+                ))
+
     def parse_config_files(self, filenames=None):
         if filenames is None:
             filenames = self.find_config_files()
@@ -174,11 +251,11 @@
         parser = RawConfigParser()
 
         for filename in filenames:
-            logger.debug("  reading %s" % filename)
+            logger.debug("  reading %s", filename)
             parser.read(filename)
 
             if os.path.split(filename)[-1] == 'setup.cfg':
-                self._read_setup_cfg(parser)
+                self._read_setup_cfg(parser, filename)
 
             for section in parser.sections():
                 if section == 'global':
@@ -223,7 +300,7 @@
         # If there was a "global" section in the config file, use it
         # to set Distribution options.
         if 'global' in self.dist.command_options:
-            for (opt, (src, val)) in self.dist.command_options['global'].items():
+            for (opt, (src, val)) in self.dist.command_options['global'].iteritems():
                 alias = self.dist.negative_opt.get(opt)
                 try:
                     if alias:
diff --git a/distutils2/depgraph.py b/distutils2/depgraph.py
--- a/distutils2/depgraph.py
+++ b/distutils2/depgraph.py
@@ -66,18 +66,28 @@
         """
         self.missing[distribution].append(requirement)
 
+    def _repr_dist(self, dist):
+        return '%s %s' % (dist.name, dist.metadata['Version'])
+
+    def repr_node(self, dist, level=1):
+        """Prints only a subgraph"""
+        output = []
+        output.append(self._repr_dist(dist))
+        for other, label in self.adjacency_list[dist]:
+            dist = self._repr_dist(other)
+            if label is not None:
+                dist = '%s [%s]' % (dist, label)
+            output.append('    ' * level + '%s' % dist)
+            suboutput = self.repr_node(other, level + 1)
+            subs = suboutput.split('\n')
+            output.extend(subs[1:])
+        return '\n'.join(output)
+
     def __repr__(self):
         """Representation of the graph"""
-        def _repr_dist(dist):
-            return '%s %s' % (dist.name, dist.metadata['Version'])
         output = []
         for dist, adjs in self.adjacency_list.iteritems():
-            output.append(_repr_dist(dist))
-            for other, label in adjs:
-                dist = _repr_dist(other)
-                if label is not None:
-                    dist = '%s [%s]' % (dist, label)
-                output.append('    %s' % dist)
+            output.append(self.repr_node(dist))
         return '\n'.join(output)
 
 
@@ -123,13 +133,15 @@
     :rtype: an :class:`DependencyGraph` instance
     """
     graph = DependencyGraph()
-    provided = {} # maps names to lists of (version, dist) tuples
-    dists = list(dists) # maybe use generator_tools in future
+    provided = {}  # maps names to lists of (version, dist) tuples
+    dists = list(dists)  # maybe use generator_tools in future
 
     # first, build the graph and find out the provides
     for dist in dists:
         graph.add_distribution(dist)
-        provides = dist.metadata['Provides-Dist'] + dist.metadata['Provides']
+        provides = (dist.metadata['Provides-Dist'] +
+                    dist.metadata['Provides'] +
+                    ['%s (%s)' % (dist.name, dist.metadata['Version'])])
 
         for p in provides:
             comps = p.strip().rsplit(" ", 1)
@@ -140,7 +152,7 @@
                 if len(version) < 3 or version[0] != '(' or version[-1] != ')':
                     raise DistutilsError('Distribution %s has ill formed' \
                                          'provides field: %s' % (dist.name, p))
-                version = version[1:-1] # trim off parenthesis
+                version = version[1:-1]  # trim off parenthesis
             if not name in provided:
                 provided[name] = []
             provided[name].append((version, dist))
@@ -149,7 +161,13 @@
     for dist in dists:
         requires = dist.metadata['Requires-Dist'] + dist.metadata['Requires']
         for req in requires:
-            predicate = VersionPredicate(req)
+            try:
+                predicate = VersionPredicate(req)
+            except IrrationalVersionError:
+                # XXX compat-mode if cannot read the version
+                name = req.split()[0]
+                predicate = VersionPredicate(name)
+
             name = predicate.name
 
             if not name in provided:
@@ -161,7 +179,7 @@
                         match = predicate.match(version)
                     except IrrationalVersionError:
                         # XXX small compat-mode
-                        if version.split(' ' ) == 1:
+                        if version.split(' ') == 1:
                             match = True
                         else:
                             match = False
@@ -172,7 +190,6 @@
                         break
                 if not matched:
                     graph.add_missing(dist, req)
-
     return graph
 
 
@@ -187,8 +204,8 @@
         raise ValueError('The given distribution is not a member of the list')
     graph = generate_graph(dists)
 
-    dep = [dist] # dependent distributions
-    fringe = graph.reverse_list[dist] # list of nodes we should inspect
+    dep = [dist]  # dependent distributions
+    fringe = graph.reverse_list[dist]  # list of nodes we should inspect
 
     while not len(fringe) == 0:
         node = fringe.pop()
@@ -197,9 +214,10 @@
             if not prev in dep:
                 fringe.append(prev)
 
-    dep.pop(0) # remove dist from dep, was there to prevent infinite loops
+    dep.pop(0)  # remove dist from dep, was there to prevent infinite loops
     return dep
 
+
 def main():
     from distutils2._backport.pkgutil import get_distributions
     tempout = StringIO()
diff --git a/distutils2/dist.py b/distutils2/dist.py
--- a/distutils2/dist.py
+++ b/distutils2/dist.py
@@ -5,7 +5,6 @@
 """
 
 
-import sys
 import os
 import re
 import warnings
@@ -16,9 +15,9 @@
 from distutils2.fancy_getopt import FancyGetopt
 from distutils2.util import strtobool, resolve_name
 from distutils2 import logger
-from distutils2.metadata import DistributionMetadata
+from distutils2.metadata import Metadata
 from distutils2.config import Config
-from distutils2.command import get_command_class
+from distutils2.command import get_command_class, STANDARD_COMMANDS
 
 # Regex to define acceptable Distutils command names.  This is not *quite*
 # the same as a Python NAME -- I don't allow leading underscores.  The fact
@@ -146,7 +145,7 @@
         # forth) in a separate object -- we're getting to have enough
         # information here (and enough command-line options) that it's
         # worth it.
-        self.metadata = DistributionMetadata()
+        self.metadata = Metadata()
 
         # 'cmdclass' maps command names to class objects, so we
         # can 1) quickly figure out which class to instantiate when
@@ -192,7 +191,7 @@
         self.include_dirs = []
         self.extra_path = None
         self.scripts = []
-        self.data_files = []
+        self.data_files = {}
         self.password = ''
         self.use_2to3 = False
         self.convert_2to3_doctests = []
@@ -228,14 +227,14 @@
             options = attrs.get('options')
             if options is not None:
                 del attrs['options']
-                for (command, cmd_options) in options.items():
+                for (command, cmd_options) in options.iteritems():
                     opt_dict = self.get_option_dict(command)
-                    for (opt, val) in cmd_options.items():
+                    for (opt, val) in cmd_options.iteritems():
                         opt_dict[opt] = ("setup script", val)
 
             # Now work on the rest of the attributes.  Any attribute that's
             # not already defined is invalid!
-            for key, val in attrs.items():
+            for key, val in attrs.iteritems():
                 if self.metadata.is_metadata_field(key):
                     self.metadata[key] = val
                 elif hasattr(self, key):
@@ -280,8 +279,7 @@
         from pprint import pformat
 
         if commands is None:             # dump all command option dicts
-            commands = self.command_options.keys()
-            commands.sort()
+            commands = sorted(self.command_options)
 
         if header is not None:
             self.announce(indent + header)
@@ -295,10 +293,10 @@
             opt_dict = self.command_options.get(cmd_name)
             if opt_dict is None:
                 self.announce(indent +
-                              "no option dict for '%s' command" % cmd_name)
+                              "no option dict for %r command" % cmd_name)
             else:
                 self.announce(indent +
-                              "option dict for '%s' command:" % cmd_name)
+                              "option dict for %r command:" % cmd_name)
                 out = pformat(opt_dict)
                 for line in out.split('\n'):
                     self.announce(indent + "  " + line)
@@ -403,7 +401,7 @@
         # Pull the current command from the head of the command line
         command = args[0]
         if not command_re.match(command):
-            raise SystemExit("invalid command name '%s'" % command)
+            raise SystemExit("invalid command name %r" % command)
         self.commands.append(command)
 
         # Dig up the command class that implements this command, so we
@@ -422,15 +420,15 @@
             if hasattr(cmd_class, meth):
                 continue
             raise DistutilsClassError(
-                  'command "%s" must implement "%s"' % (cmd_class, meth))
+                'command %r must implement %r' % (cmd_class, meth))
 
         # Also make sure that the command object provides a list of its
         # known options.
         if not (hasattr(cmd_class, 'user_options') and
                 isinstance(cmd_class.user_options, list)):
             raise DistutilsClassError(
-                  ("command class %s must provide "
-                   "'user_options' attribute (a list of tuples)") % cmd_class)
+                "command class %s must provide "
+                "'user_options' attribute (a list of tuples)" % cmd_class)
 
         # If the command class has a list of negative alias options,
         # merge it in with the global negative aliases.
@@ -468,7 +466,7 @@
                         func()
                     else:
                         raise DistutilsClassError(
-                            "invalid help function %r for help option '%s': "
+                            "invalid help function %r for help option %r: "
                             "must be a callable object (function, etc.)"
                             % (func, help_option))
 
@@ -478,7 +476,7 @@
         # Put the options from the command line into their official
         # holding pen, the 'command_options' dictionary.
         opt_dict = self.get_option_dict(command)
-        for (name, value) in vars(opts).items():
+        for (name, value) in vars(opts).iteritems():
             opt_dict[name] = ("command line", value)
 
         return args
@@ -539,7 +537,7 @@
                                         fix_help_options(cls.help_options))
             else:
                 parser.set_option_table(cls.user_options)
-            parser.print_help("Options for '%s' command:" % cls.__name__)
+            parser.print_help("Options for %r command:" % cls.__name__)
             print('')
 
         print(gen_usage(self.script_name))
@@ -591,31 +589,26 @@
         print(header + ":")
 
         for cmd in commands:
-            cls = self.cmdclass.get(cmd)
-            if not cls:
-                cls = get_command_class(cmd)
-            try:
-                description = cls.description
-            except AttributeError:
-                description = "(no description available)"
+            cls = self.cmdclass.get(cmd) or get_command_class(cmd)
+            description = getattr(cls, 'description',
+                                  '(no description available)')
 
             print("  %-*s  %s" % (max_length, cmd, description))
 
     def _get_command_groups(self):
         """Helper function to retrieve all the command class names divided
-        into standard commands (listed in distutils2.command.__all__)
-        and extra commands (given in self.cmdclass and not standard
-        commands).
+        into standard commands (listed in
+        distutils2.command.STANDARD_COMMANDS) and extra commands (given in
+        self.cmdclass and not standard commands).
         """
-        from distutils2.command import __all__ as std_commands
         extra_commands = [cmd for cmd in self.cmdclass
-                          if cmd not in std_commands]
-        return std_commands, extra_commands
+                          if cmd not in STANDARD_COMMANDS]
+        return STANDARD_COMMANDS, extra_commands
 
     def print_commands(self):
         """Print out a help message listing all available commands with a
         description of each.  The list is divided into standard commands
-        (listed in distutils2.command.__all__) and extra commands
+        (listed in distutils2.command.STANDARD_COMMANDS) and extra commands
         (given in self.cmdclass and not standard commands).  The
         descriptions come from the command class attribute
         'description'.
@@ -635,10 +628,8 @@
                                     "Extra commands",
                                     max_length)
 
-
     # -- Command class/object methods ----------------------------------
 
-
     def get_command_obj(self, command, create=1):
         """Return the command object for 'command'.  Normally this object
         is cached on a previous call to 'get_command_obj()'; if no command
@@ -648,7 +639,7 @@
         cmd_obj = self.command_obj.get(command)
         if not cmd_obj and create:
             logger.debug("Distribution.get_command_obj(): " \
-                         "creating '%s' command object" % command)
+                         "creating %r command object", command)
 
             cls = get_command_class(command)
             cmd_obj = self.command_obj[command] = cls(self)
@@ -678,10 +669,10 @@
         if option_dict is None:
             option_dict = self.get_option_dict(command_name)
 
-        logger.debug("  setting options for '%s' command:" % command_name)
+        logger.debug("  setting options for %r command:", command_name)
 
-        for (option, (source, value)) in option_dict.items():
-            logger.debug("    %s = %s (from %s)" % (option, value, source))
+        for (option, (source, value)) in option_dict.iteritems():
+            logger.debug("    %s = %s (from %s)", option, value, source)
             try:
                 bool_opts = [x.replace('-', '_')
                              for x in command_obj.boolean_options]
@@ -702,7 +693,7 @@
                     setattr(command_obj, option, value)
                 else:
                     raise DistutilsOptionError(
-                        "error in %s: command '%s' has no such option '%s'" %
+                        "error in %s: command %r has no such option %r" %
                         (source, command_name, option))
             except ValueError, msg:
                 raise DistutilsOptionError(msg)
diff --git a/distutils2/errors.py b/distutils2/errors.py
--- a/distutils2/errors.py
+++ b/distutils2/errors.py
@@ -9,7 +9,6 @@
 symbols whose names start with "Distutils" and end with "Error"."""
 
 
-
 class DistutilsError(Exception):
     """The root of all Distutils evil."""
 
@@ -110,6 +109,10 @@
     """Attempt to process an unknown file type."""
 
 
+class MetadataMissingError(DistutilsError):
+    """A required metadata is missing"""
+
+
 class MetadataConflictError(DistutilsError):
     """Attempt to read or write metadata fields that are conflictual."""
 
@@ -131,3 +134,11 @@
     This guard can be disabled by setting that option False.
     """
     pass
+
+
+class InstallationException(Exception):
+    """Base exception for installation scripts"""
+
+
+class InstallationConflict(InstallationException):
+    """Raised when a conflict is detected"""
diff --git a/distutils2/fancy_getopt.py b/distutils2/fancy_getopt.py
--- a/distutils2/fancy_getopt.py
+++ b/distutils2/fancy_getopt.py
@@ -26,6 +26,7 @@
 # For recognizing "negative alias" options, eg. "quiet=!verbose"
 neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
 
+
 class FancyGetopt(object):
     """Wrapper around the standard 'getopt()' module that provides some
     handy extra functionality:
@@ -37,7 +38,6 @@
         --quiet is the "negative alias" of --verbose, then "--quiet"
         on the command line sets 'verbose' to false
     """
-
     def __init__(self, option_table=None):
 
         # The option table is (currently) a list of tuples.  The
@@ -79,51 +79,47 @@
         # but expands short options, converts aliases, etc.
         self.option_order = []
 
-    # __init__ ()
-
-
-    def _build_index (self):
+    def _build_index(self):
         self.option_index.clear()
         for option in self.option_table:
             self.option_index[option[0]] = option
 
-    def set_option_table (self, option_table):
+    def set_option_table(self, option_table):
         self.option_table = option_table
         self._build_index()
 
-    def add_option (self, long_option, short_option=None, help_string=None):
+    def add_option(self, long_option, short_option=None, help_string=None):
         if long_option in self.option_index:
-            raise DistutilsGetoptError, \
-                  "option conflict: already an option '%s'" % long_option
+            raise DistutilsGetoptError(
+                  "option conflict: already an option '%s'" % long_option)
         else:
             option = (long_option, short_option, help_string)
             self.option_table.append(option)
             self.option_index[long_option] = option
 
-
-    def has_option (self, long_option):
+    def has_option(self, long_option):
         """Return true if the option table for this parser has an
         option with long name 'long_option'."""
         return long_option in self.option_index
 
-    def _check_alias_dict (self, aliases, what):
+    def _check_alias_dict(self, aliases, what):
         assert isinstance(aliases, dict)
-        for (alias, opt) in aliases.items():
+        for (alias, opt) in aliases.iteritems():
             if alias not in self.option_index:
-                raise DistutilsGetoptError, \
+                raise DistutilsGetoptError(
                       ("invalid %s '%s': "
-                       "option '%s' not defined") % (what, alias, alias)
+                       "option '%s' not defined") % (what, alias, alias))
             if opt not in self.option_index:
-                raise DistutilsGetoptError, \
+                raise DistutilsGetoptError(
                       ("invalid %s '%s': "
-                       "aliased option '%s' not defined") % (what, alias, opt)
+                       "aliased option '%s' not defined") % (what, alias, opt))
 
-    def set_aliases (self, alias):
+    def set_aliases(self, alias):
         """Set the aliases for this option parser."""
         self._check_alias_dict(alias, "alias")
         self.alias = alias
 
-    def set_negative_aliases (self, negative_alias):
+    def set_negative_aliases(self, negative_alias):
         """Set the negative aliases for this option parser.
         'negative_alias' should be a dictionary mapping option names to
         option names, both the key and value must already be defined
@@ -131,8 +127,7 @@
         self._check_alias_dict(negative_alias, "negative alias")
         self.negative_alias = negative_alias
 
-
-    def _grok_option_table (self):
+    def _grok_option_table(self):
         """Populate the various data structures that keep tabs on the
         option table.  Called by 'getopt()' before it can do anything
         worthwhile.
@@ -151,19 +146,19 @@
             else:
                 # the option table is part of the code, so simply
                 # assert that it is correct
-                raise ValueError, "invalid option tuple: %r" % (option,)
+                raise ValueError("invalid option tuple: %r" % option)
 
             # Type- and value-check the option names
             if not isinstance(long, str) or len(long) < 2:
-                raise DistutilsGetoptError, \
+                raise DistutilsGetoptError(
                       ("invalid long option '%s': "
-                       "must be a string of length >= 2") % long
+                       "must be a string of length >= 2") % long)
 
             if (not ((short is None) or
                      (isinstance(short, str) and len(short) == 1))):
-                raise DistutilsGetoptError, \
+                raise DistutilsGetoptError(
                       ("invalid short option '%s': "
-                       "must a single character or None") % short
+                       "must a single character or None") % short)
 
             self.repeat[long] = repeat
             self.long_opts.append(long)
@@ -180,12 +175,12 @@
                 alias_to = self.negative_alias.get(long)
                 if alias_to is not None:
                     if self.takes_arg[alias_to]:
-                        raise DistutilsGetoptError, \
+                        raise DistutilsGetoptError(
                               ("invalid negative alias '%s': "
                                "aliased option '%s' takes a value") % \
-                               (long, alias_to)
+                               (long, alias_to))
 
-                    self.long_opts[-1] = long # XXX redundant?!
+                    self.long_opts[-1] = long   # XXX redundant?!
                     self.takes_arg[long] = 0
 
                 else:
@@ -196,32 +191,26 @@
             alias_to = self.alias.get(long)
             if alias_to is not None:
                 if self.takes_arg[long] != self.takes_arg[alias_to]:
-                    raise DistutilsGetoptError, \
+                    raise DistutilsGetoptError(
                           ("invalid alias '%s': inconsistent with "
                            "aliased option '%s' (one of them takes a value, "
-                           "the other doesn't") % (long, alias_to)
-
+                           "the other doesn't") % (long, alias_to))
 
             # Now enforce some bondage on the long option name, so we can
             # later translate it to an attribute name on some object.  Have
             # to do this a bit late to make sure we've removed any trailing
             # '='.
             if not longopt_re.match(long):
-                raise DistutilsGetoptError, \
+                raise DistutilsGetoptError(
                       ("invalid long option name '%s' " +
-                       "(must be letters, numbers, hyphens only") % long
+                       "(must be letters, numbers, hyphens only") % long)
 
             self.attr_name[long] = long.replace('-', '_')
             if short:
                 self.short_opts.append(short)
                 self.short2long[short[0]] = long
 
-        # for option_table
-
-    # _grok_option_table()
-
-
-    def getopt (self, args=None, object=None):
+    def getopt(self, args=None, object=None):
         """Parse command-line options in args. Store as attributes on object.
 
         If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If
@@ -246,10 +235,10 @@
         try:
             opts, args = getopt.getopt(args, short_opts, self.long_opts)
         except getopt.error, msg:
-            raise DistutilsArgError, msg
+            raise DistutilsArgError(msg)
 
         for opt, val in opts:
-            if len(opt) == 2 and opt[0] == '-': # it's a short option
+            if len(opt) == 2 and opt[0] == '-':   # it's a short option
                 opt = self.short2long[opt[1]]
             else:
                 assert len(opt) > 2 and opt[:2] == '--'
@@ -282,21 +271,17 @@
         else:
             return args
 
-    # getopt()
-
-
-    def get_option_order (self):
+    def get_option_order(self):
         """Returns the list of (option, value) tuples processed by the
         previous run of 'getopt()'.  Raises RuntimeError if
         'getopt()' hasn't been called yet.
         """
         if self.option_order is None:
-            raise RuntimeError, "'getopt()' hasn't been called yet"
-        else:
-            return self.option_order
+            raise RuntimeError("'getopt()' hasn't been called yet")
 
+        return self.option_order
 
-    def generate_help (self, header=None):
+    def generate_help(self, header=None):
         """Generate help text (a list of strings, one per suggested line of
         output) from the option table for this FancyGetopt object.
         """
@@ -374,32 +359,87 @@
             for l in text[1:]:
                 lines.append(big_indent + l)
 
-        # for self.option_table
-
         return lines
 
-    # generate_help ()
-
-    def print_help (self, header=None, file=None):
+    def print_help(self, header=None, file=None):
         if file is None:
             file = sys.stdout
         for line in self.generate_help(header):
             file.write(line + "\n")
 
-# class FancyGetopt
 
-
-def fancy_getopt (options, negative_opt, object, args):
+def fancy_getopt(options, negative_opt, object, args):
     parser = FancyGetopt(options)
     parser.set_negative_aliases(negative_opt)
     return parser.getopt(args, object)
 
 
+
+WS_TRANS = string.maketrans(string.whitespace, ' ' * len(string.whitespace))
+
+
+def wrap_text(text, width):
+    """wrap_text(text : string, width : int) -> [string]
+
+    Split 'text' into multiple lines of no more than 'width' characters
+    each, and return the list of strings that results.
+    """
+
+    if text is None:
+        return []
+    if len(text) <= width:
+        return [text]
+
+    text = string.expandtabs(text)
+    text = string.translate(text, WS_TRANS)
+    chunks = re.split(r'( +|-+)', text)
+    chunks = filter(None, chunks)      # ' - ' results in empty strings
+    lines = []
+
+    while chunks:
+
+        cur_line = []                   # list of chunks (to-be-joined)
+        cur_len = 0                     # length of current line
+
+        while chunks:
+            l = len(chunks[0])
+            if cur_len + l <= width:    # can squeeze (at least) this chunk in
+                cur_line.append(chunks[0])
+                del chunks[0]
+                cur_len = cur_len + l
+            else:                       # this line is full
+                # drop last chunk if all space
+                if cur_line and cur_line[-1][0] == ' ':
+                    del cur_line[-1]
+                break
+
+        if chunks:                      # any chunks left to process?
+
+            # if the current line is still empty, then we had a single
+            # chunk that's too big too fit on a line -- so we break
+            # down and break it up at the line width
+            if cur_len == 0:
+                cur_line.append(chunks[0][0:width])
+                chunks[0] = chunks[0][width:]
+
+            # all-whitespace chunks at the end of a line can be discarded
+            # (and we know from the re.split above that if a chunk has
+            # *any* whitespace, it is *all* whitespace)
+            if chunks[0][0] == ' ':
+                del chunks[0]
+
+        # and store this line in the list-of-all-lines -- as a single
+        # string, of course!
+        lines.append(string.join(cur_line, ''))
+
+    return lines
+
+
 class OptionDummy(object):
     """Dummy class just used as a place to hold command-line option
     values as instance attributes."""
 
-    def __init__ (self, options=[]):
+    def __init__(self, options=[]):
         """Create a new OptionDummy instance.  The attributes listed in
         'options' will be initialized to None."""
         for opt in options:
diff --git a/distutils2/index/__init__.py b/distutils2/index/__init__.py
--- a/distutils2/index/__init__.py
+++ b/distutils2/index/__init__.py
@@ -6,6 +6,6 @@
            'xmlrpc',
            'dist',
            'errors',
-           'mirrors',]
+           'mirrors']
 
 from dist import ReleaseInfo, ReleasesList, DistInfo
diff --git a/distutils2/index/dist.py b/distutils2/index/dist.py
--- a/distutils2/index/dist.py
+++ b/distutils2/index/dist.py
@@ -17,19 +17,19 @@
 import urllib
 import urlparse
 import zipfile
-
 try:
     import hashlib
 except ImportError:
     from distutils2._backport import hashlib
 
+from distutils2._backport.shutil import unpack_archive
 from distutils2.errors import IrrationalVersionError
 from distutils2.index.errors import (HashDoesNotMatch, UnsupportedHashName,
                                      CantParseArchiveName)
 from distutils2.version import (suggest_normalized_version, NormalizedVersion,
                                 get_version_predicate)
-from distutils2.metadata import DistributionMetadata
-from distutils2.util import untar_file, unzip_file, splitext
+from distutils2.metadata import Metadata
+from distutils2.util import splitext
 
 __all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url']
 
@@ -66,7 +66,7 @@
         self._version = None
         self.version = version
         if metadata:
-            self.metadata = DistributionMetadata(mapping=metadata)
+            self.metadata = Metadata(mapping=metadata)
         else:
             self.metadata = None
         self.dists = {}
@@ -101,7 +101,7 @@
     def is_final(self):
         """proxy to version.is_final"""
         return self.version.is_final
-    
+
     def fetch_distributions(self):
         if self.dists is None:
             self._index.get_distributions(self.name, '%s' % self.version)
@@ -109,7 +109,8 @@
                 self.dists = {}
         return self.dists
 
-    def add_distribution(self, dist_type='sdist', python_version=None, **params):
+    def add_distribution(self, dist_type='sdist', python_version=None,
+                         **params):
         """Add distribution informations to this release.
         If distribution information is already set for this distribution type,
         add the given url paths to the distribution. This can be useful while
@@ -127,7 +128,7 @@
             self.dists[dist_type] = DistInfo(self, dist_type,
                                              index=self._index, **params)
         if python_version:
-            self.dists[dist_type].python_version = python_version 
+            self.dists[dist_type].python_version = python_version
 
     def get_distribution(self, dist_type=None, prefer_source=True):
         """Return a distribution.
@@ -149,6 +150,16 @@
                 dist = self.dists.values()[0]
             return dist
 
+    def unpack(self, path=None, prefer_source=True):
+        """Unpack the distribution to the given path.
+
+        If not destination is given, creates a temporary location.
+
+        Returns the location of the extracted files (root).
+        """
+        return self.get_distribution(prefer_source=prefer_source)\
+                   .unpack(path=path)
+
     def download(self, temp_path=None, prefer_source=True):
         """Download the distribution, using the requirements.
 
@@ -164,7 +175,7 @@
 
     def set_metadata(self, metadata):
         if not self.metadata:
-            self.metadata = DistributionMetadata()
+            self.metadata = Metadata()
         self.metadata.update(metadata)
 
     def __getitem__(self, item):
@@ -302,7 +313,7 @@
 
     def unpack(self, path=None):
         """Unpack the distribution to the given path.
-        
+
         If not destination is given, creates a temporary location.
 
         Returns the location of the extracted files (root).
@@ -310,20 +321,11 @@
         if not self._unpacked_dir:
             if path is None:
                 path = tempfile.mkdtemp()
-            
-            filename = self.download()
+
+            filename = self.download(path)
             content_type = mimetypes.guess_type(filename)[0]
-     
-            if (content_type == 'application/zip'
-                or filename.endswith('.zip')
-                or filename.endswith('.pybundle')
-                or zipfile.is_zipfile(filename)):
-                unzip_file(filename, path, flatten=not filename.endswith('.pybundle'))
-            elif (content_type == 'application/x-gzip'
-                  or tarfile.is_tarfile(filename)
-                  or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
-                untar_file(filename, path)
-            self._unpacked_dir = path
+            self._unpacked_dir = unpack_archive(filename, path)
+
         return self._unpacked_dir
 
     def _check_md5(self, filename):
@@ -340,6 +342,9 @@
                     % (hashval.hexdigest(), expected_hashval))
 
     def __repr__(self):
+        if self.release is None:
+            return "<? ? %s>" % self.dist_type
+
         return "<%s %s %s>" % (
             self.release.name, self.release.version, self.dist_type or "")
 
@@ -351,7 +356,7 @@
     """
     def __init__(self, name, releases=None, contains_hidden=False, index=None):
         self.set_index(index)
-        self.releases = [] 
+        self.releases = []
         self.name = name
         self.contains_hidden = contains_hidden
         if releases:
@@ -376,6 +381,8 @@
         """
         predicate = get_version_predicate(requirements)
         releases = self.filter(predicate)
+        if len(releases) == 0:
+            return None
         releases.sort_releases(prefer_final, reverse=True)
         return releases[0]
 
@@ -404,11 +411,11 @@
                 raise ValueError("%s is not the same project than %s" %
                                  (release.name, self.name))
             version = '%s' % release.version
-                
+
             if not version in self.get_versions():
                 # append only if not already exists
                 self.releases.append(release)
-            for dist in release.dists.values():
+            for dist in release.dists.itervalues():
                 for url in dist.urls:
                     self.add_release(version, dist.dist_type, **url)
         else:
@@ -445,8 +452,7 @@
             reverse=reverse, *args, **kwargs)
 
     def get_release(self, version):
-        """Return a release from it's version.
-        """
+        """Return a release from its version."""
         matches = [r for r in self.releases if "%s" % r.version == version]
         if len(matches) != 1:
             raise KeyError(version)
diff --git a/distutils2/index/mirrors.py b/distutils2/index/mirrors.py
--- a/distutils2/index/mirrors.py
+++ b/distutils2/index/mirrors.py
@@ -1,4 +1,4 @@
-"""Utilities related to the mirror infrastructure defined in PEP 381. 
+"""Utilities related to the mirror infrastructure defined in PEP 381.
 See http://www.python.org/dev/peps/pep-0381/
 """
 
@@ -7,6 +7,7 @@
 
 DEFAULT_MIRROR_URL = "last.pypi.python.org"
 
+
 def get_mirrors(hostname=None):
     """Return the list of mirrors from the last record found on the DNS
     entry::
@@ -19,7 +20,7 @@
     """
     if hostname is None:
         hostname = DEFAULT_MIRROR_URL
-    
+
     # return the last mirror registered on PyPI.
     try:
         hostname = socket.gethostbyname_ex(hostname)[0]
@@ -30,23 +31,24 @@
     # determine the list from the last one.
     return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])]
 
+
 def string_range(last):
     """Compute the range of string between "a" and last.
-    
+
     This works for simple "a to z" lists, but also for "a to zz" lists.
     """
     for k in range(len(last)):
-        for x in product(ascii_lowercase, repeat=k+1):
+        for x in product(ascii_lowercase, repeat=(k + 1)):
             result = ''.join(x)
             yield result
             if result == last:
                 return
 
+
 def product(*args, **kwds):
     pools = map(tuple, args) * kwds.get('repeat', 1)
     result = [[]]
     for pool in pools:
-        result = [x+[y] for x in result for y in pool]
+        result = [x + [y] for x in result for y in pool]
     for prod in result:
         yield tuple(prod)
-
diff --git a/distutils2/index/simple.py b/distutils2/index/simple.py
--- a/distutils2/index/simple.py
+++ b/distutils2/index/simple.py
@@ -1,7 +1,7 @@
 """index.simple
 
 Contains the class "SimpleIndexCrawler", a simple spider to find and retrieve
-distributions on the Python Package Index, using it's "simple" API,
+distributions on the Python Package Index, using its "simple" API,
 avalaible at http://pypi.python.org/simple/
 """
 from fnmatch import translate
@@ -11,9 +11,9 @@
 import sys
 import urllib2
 import urlparse
-import logging
 import os
 
+from distutils2 import logger
 from distutils2.index.base import BaseClient
 from distutils2.index.dist import (ReleasesList, EXTENSIONS,
                                    get_infos_from_url, MD5_HASH)
@@ -21,7 +21,7 @@
                                      UnableToDownload, CantParseArchiveName,
                                      ReleaseNotFound, ProjectNotFound)
 from distutils2.index.mirrors import get_mirrors
-from distutils2.metadata import DistributionMetadata
+from distutils2.metadata import Metadata
 from distutils2.version import get_version_predicate
 from distutils2 import __version__ as __distutils2_version__
 
@@ -167,6 +167,7 @@
         if predicate.name.lower() in self._projects and not force_update:
             return self._projects.get(predicate.name.lower())
         prefer_final = self._get_prefer_final(prefer_final)
+        logger.info('reading info on PyPI about %s', predicate.name)
         self._process_index_page(predicate.name)
 
         if predicate.name.lower() not in self._projects:
@@ -201,7 +202,7 @@
         if not release._metadata:
             location = release.get_distribution().unpack()
             pkg_info = os.path.join(location, 'PKG-INFO')
-            release._metadata = DistributionMetadata(pkg_info)
+            release._metadata = Metadata(pkg_info)
         return release
 
     def _switch_to_next_mirror(self):
@@ -304,8 +305,8 @@
                             infos = get_infos_from_url(link, project_name,
                                         is_external=not self.index_url in url)
                         except CantParseArchiveName, e:
-                            logging.warning("version has not been parsed: %s"
-                                            % e)
+                            logger.warning(
+                                "version has not been parsed: %s", e)
                         else:
                             self._register_release(release_info=infos)
                     else:
diff --git a/distutils2/index/wrapper.py b/distutils2/index/wrapper.py
--- a/distutils2/index/wrapper.py
+++ b/distutils2/index/wrapper.py
@@ -1,5 +1,4 @@
-import xmlrpc
-import simple
+from distutils2.index import simple, xmlrpc
 
 _WRAPPER_MAPPINGS = {'get_release': 'simple',
                      'get_releases': 'simple',
@@ -10,6 +9,7 @@
 _WRAPPER_INDEXES = {'xmlrpc': xmlrpc.Client,
                     'simple': simple.Crawler}
 
+
 def switch_index_if_fails(func, wrapper):
     """Decorator that switch of index (for instance from xmlrpc to simple)
     if the first mirror return an empty list or raises an exception.
@@ -58,7 +58,7 @@
 
         # instantiate the classes and set their _project attribute to the one
         # of the wrapper.
-        for name, cls in index_classes.items():
+        for name, cls in index_classes.iteritems():
             obj = self._indexes.setdefault(name, cls())
             obj._projects = self._projects
             obj._index = self
@@ -83,11 +83,11 @@
                 other_indexes = [i for i in self._indexes
                                  if i != self._default_index]
                 for index in other_indexes:
-                    real_method = getattr(self._indexes[index], method_name, None)
+                    real_method = getattr(self._indexes[index], method_name,
+                                          None)
                     if real_method:
                         break
         if real_method:
             return switch_index_if_fails(real_method, self)
         else:
             raise AttributeError("No index have attribute '%s'" % method_name)
-
diff --git a/distutils2/index/xmlrpc.py b/distutils2/index/xmlrpc.py
--- a/distutils2/index/xmlrpc.py
+++ b/distutils2/index/xmlrpc.py
@@ -103,7 +103,6 @@
         project.sort_releases(prefer_final)
         return project
 
-
     def get_distributions(self, project_name, version):
         """Grab informations about distributions from XML-RPC.
 
@@ -127,10 +126,17 @@
         return release
 
     def get_metadata(self, project_name, version):
-        """Retreive project metadatas.
+        """Retrieve project metadata.
 
         Return a ReleaseInfo object, with metadata informations filled in.
         """
+        # to be case-insensitive, get the informations from the XMLRPC API
+        projects = [d['name'] for d in
+                    self.proxy.search({'name': project_name})
+                    if d['name'].lower() == project_name]
+        if len(projects) > 0:
+            project_name = projects[0]
+
         metadata = self.proxy.release_data(project_name, version)
         project = self._get_project(project_name)
         if version not in project.get_versions():
@@ -158,9 +164,16 @@
                     p['version'], metadata={'summary': p['summary']},
                     index=self._index))
             except IrrationalVersionError, e:
-                logging.warn("Irrational version error found: %s" % e)
+                logging.warn("Irrational version error found: %s", e)
+        return [self._projects[p['name'].lower()] for p in projects]
 
-        return [self._projects[p['name'].lower()] for p in projects]
+    def get_all_projects(self):
+        """Return the list of all projects registered in the package index"""
+        projects = self.proxy.list_packages()
+        for name in projects:
+            self.get_releases(name, show_hidden=True)
+
+        return [self._projects[name.lower()] for name in set(projects)]
 
     @property
     def proxy(self):
diff --git a/distutils2/install.py b/distutils2/install.py
--- a/distutils2/install.py
+++ b/distutils2/install.py
@@ -1,14 +1,3 @@
-from tempfile import mkdtemp
-import logging
-import shutil
-import os
-import errno
-
-from distutils2._backport.pkgutil import get_distributions
-from distutils2.depgraph import generate_graph
-from distutils2.index import wrapper
-from distutils2.index.errors import ProjectNotFound, ReleaseNotFound
-
 """Provides installations scripts.
 
 The goal of this script is to install a release from the indexes (eg.
@@ -17,26 +6,32 @@
 It uses the work made in pkgutil and by the index crawlers to browse the
 installed distributions, and rely on the instalation commands to install.
 """
+import shutil
+import os
+import sys
+import stat
+import errno
+import itertools
+import logging
+import tempfile
 
+from distutils2 import logger
+from distutils2._backport.pkgutil import get_distributions
+from distutils2._backport.pkgutil import get_distribution
+from distutils2._backport.sysconfig import get_config_var
+from distutils2.depgraph import generate_graph
+from distutils2.index import wrapper
+from distutils2.index.errors import ProjectNotFound, ReleaseNotFound
+from distutils2.errors import (DistutilsError, InstallationException,
+                               InstallationConflict)
+from distutils2.version import get_version_predicate
 
-class InstallationException(Exception):
-    """Base exception for installation scripts"""
 
+__all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove',
+           'install']
 
-class InstallationConflict(InstallationException):
-    """Raised when a conflict is detected"""
 
-
-def _update_infos(infos, new_infos):
-    """extends the lists contained in the `info` dict with those contained
-    in the `new_info` one
-    """
-    for key, value in infos.items():
-        if key in new_infos:
-            infos[key].extend(new_infos[key])
-
-
-def move_files(files, destination=None):
+def _move_files(files, destination):
     """Move the list of files in the destination folder, keeping the same
     structure.
 
@@ -44,13 +39,11 @@
 
     :param files: a list of files to move.
     :param destination: the destination directory to put on the files.
-                        if not defined, create a new one, using mkdtemp
     """
-    if not destination:
-        destination = mkdtemp()
-
     for old in files:
-        new = '%s%s' % (destination, old)
+        # not using os.path.join() because basename() might not be
+        # unique in destination
+        new = "%s%s" % (destination, old)
 
         # try to make the paths.
         try:
@@ -61,10 +54,66 @@
             else:
                 raise e
         os.rename(old, new)
-        yield(old, new)
+        yield old, new
 
 
-def install_dists(dists, path=None):
+def _run_d1_install(archive_dir, path):
+    # backward compat: using setuptools or plain-distutils
+    cmd = '%s setup.py install --root=%s --record=%s'
+    setup_py = os.path.join(archive_dir, 'setup.py')
+    if 'setuptools' in open(setup_py).read():
+        cmd += ' --single-version-externally-managed'
+
+    # how to place this file in the egg-info dir
+    # for non-distutils2 projects ?
+    record_file = os.path.join(archive_dir, 'RECORD')
+    os.system(cmd % (sys.executable, path, record_file))
+    if not os.path.exists(record_file):
+        raise ValueError('failed to install')
+    return open(record_file).read().split('\n')
+
+
+def _run_d2_install(archive_dir, path):
+    # using our own install command
+    raise NotImplementedError()
+
+
+def _install_dist(dist, path):
+    """Install a distribution into a path.
+
+    This:
+
+    * unpack the distribution
+    * copy the files in "path"
+    * determine if the distribution is distutils2 or distutils1.
+    """
+    where = dist.unpack(path)
+
+    # get into the dir
+    archive_dir = None
+    for item in os.listdir(where):
+        fullpath = os.path.join(where, item)
+        if os.path.isdir(fullpath):
+            archive_dir = fullpath
+            break
+
+    if archive_dir is None:
+        raise ValueError('Cannot locate the unpacked archive')
+
+    # install
+    old_dir = os.getcwd()
+    os.chdir(archive_dir)
+    try:
+        # distutils2 or distutils1 ?
+        if 'setup.py' in os.listdir(archive_dir):
+            return _run_d1_install(archive_dir, path)
+        else:
+            return _run_d2_install(archive_dir, path)
+    finally:
+        os.chdir(old_dir)
+
+
+def install_dists(dists, path, paths=sys.path):
     """Install all distributions provided in dists, with the given prefix.
 
     If an error occurs while installing one of the distributions, uninstall all
@@ -73,24 +122,29 @@
     Return a list of installed files.
 
     :param dists: distributions to install
-    :param path: base path to install distribution on
+    :param path: base path to install distribution in
+    :param paths: list of paths (defaults to sys.path) to look for info
     """
-    if not path:
-        path = mkdtemp()
 
     installed_dists, installed_files = [], []
-    for d in dists:
+    for dist in dists:
+        logger.info('installing %s %s', dist.name, dist.version)
         try:
-            installed_files.extend(d.install(path))
-            installed_dists.append(d)
-        except Exception, e :
-            for d in installed_dists:
-                d.uninstall()
+            installed_files.extend(_install_dist(dist, path))
+            installed_dists.append(dist)
+        except Exception, e:
+            logger.info('failed: %s', e)
+
+            # reverting
+            for installed_dist in installed_dists:
+                _remove_dist(installed_dist, paths)
             raise e
+
     return installed_files
 
 
-def install_from_infos(install=[], remove=[], conflicts=[], install_path=None):
+def install_from_infos(install_path=None, install=[], remove=[], conflicts=[],
+                       paths=sys.path):
     """Install and remove the given distributions.
 
     The function signature is made to be compatible with the one of get_infos.
@@ -109,37 +163,57 @@
         4. Else, move the distributions to the right locations, and remove for
            real the distributions thats need to be removed.
 
-    :param install: list of distributions that will be installed.
+    :param install_path: the installation path where we want to install the
+                         distributions.
+    :param install: list of distributions that will be installed; install_path
+                    must be provided if this list is not empty.
     :param remove: list of distributions that will be removed.
     :param conflicts: list of conflicting distributions, eg. that will be in
                       conflict once the install and remove distribution will be
                       processed.
-    :param install_path: the installation path where we want to install the
-                         distributions.
+    :param paths: list of paths (defaults to sys.path) to look for info
     """
     # first of all, if we have conflicts, stop here.
     if conflicts:
         raise InstallationConflict(conflicts)
 
+    if install and not install_path:
+        raise ValueError("Distributions are to be installed but `install_path`"
+                         " is not provided.")
+
     # before removing the files, we will start by moving them away
     # then, if any error occurs, we could replace them in the good place.
     temp_files = {}  # contains lists of {dist: (old, new)} paths
+    temp_dir = None
     if remove:
+        temp_dir = tempfile.mkdtemp()
         for dist in remove:
             files = dist.get_installed_files()
-            temp_files[dist] = move_files(files)
+            temp_files[dist] = _move_files(files, temp_dir)
     try:
         if install:
-            installed_files = install_dists(install, install_path)  # install to tmp first
-        for files in temp_files.values():
-            for old, new in files:
-                os.remove(new)
-
-    except Exception,e:
-        # if an error occurs, put back the files in the good place.
+            install_dists(install, install_path, paths)
+    except:
+        # if an error occurs, put back the files in the right place.
         for files in temp_files.values():
             for old, new in files:
                 shutil.move(new, old)
+        if temp_dir:
+            shutil.rmtree(temp_dir)
+        # now re-raising
+        raise
+
+    # we can remove them for good
+    for files in temp_files.values():
+        for old, new in files:
+            os.remove(new)
+    if temp_dir:
+        shutil.rmtree(temp_dir)
+
+
+def _get_setuptools_deps(release):
+    # NotImplementedError
+    pass
 
 
 def get_infos(requirements, index=None, installed=None, prefer_final=True):
@@ -162,47 +236,188 @@
     Conflict contains all the conflicting distributions, if there is a
     conflict.
     """
+    if not installed:
+        logger.info('reading installed distributions')
+        installed = get_distributions(use_egg_info=True)
+
+    infos = {'install': [], 'remove': [], 'conflict': []}
+    # Is a compatible version of the project is already installed ?
+    predicate = get_version_predicate(requirements)
+    found = False
+    installed = list(installed)
+
+    # check that the project isnt already installed
+    for installed_project in installed:
+        # is it a compatible project ?
+        if predicate.name.lower() != installed_project.name.lower():
+            continue
+        found = True
+        logger.info('found %s %s', installed_project.name,
+                    installed_project.version)
+
+        # if we already have something installed, check it matches the
+        # requirements
+        if predicate.match(installed_project.version):
+            return infos
+        break
+
+    if not found:
+        logger.info('project not installed')
 
     if not index:
         index = wrapper.ClientWrapper()
 
-    if not installed:
-        installed = get_distributions()
-
     # Get all the releases that match the requirements
     try:
         releases = index.get_releases(requirements)
-    except (ReleaseNotFound, ProjectNotFound), e:
+    except (ReleaseNotFound, ProjectNotFound):
         raise InstallationException('Release not found: "%s"' % requirements)
 
     # Pick up a release, and try to get the dependency tree
     release = releases.get_last(requirements, prefer_final=prefer_final)
 
-    # Iter since we found something without conflicts
+    if release is None:
+        logger.info('could not find a matching project')
+        return infos
+
+    # this works for Metadata 1.2
     metadata = release.fetch_metadata()
 
-    # Get the distributions already_installed on the system
-    # and add the one we want to install
+    # for earlier, we need to build setuptools deps if any
+    if 'requires_dist' not in metadata:
+        deps = _get_setuptools_deps(release)
+    else:
+        deps = metadata['requires_dist']
 
-    distributions = installed + [release]
+    # XXX deps not used
+
+    distributions = itertools.chain(installed, [release])
     depgraph = generate_graph(distributions)
 
     # Store all the already_installed packages in a list, in case of rollback.
-    infos = {'install': [], 'remove': [], 'conflict': []}
-
     # Get what the missing deps are
-    for dists in depgraph.missing.values():
-        if dists:
-            logging.info("missing dependencies found, installing them")
-            # we have missing deps
-            for dist in dists:
-                _update_infos(infos,
-                             get_infos(dist, index, installed))
+    dists = depgraph.missing[release]
+    if dists:
+        logger.info("missing dependencies found, retrieving metadata")
+        # we have missing deps
+        for dist in dists:
+            _update_infos(infos, get_infos(dist, index, installed))
 
     # Fill in the infos
     existing = [d for d in installed if d.name == release.name]
+
     if existing:
         infos['remove'].append(existing[0])
         infos['conflict'].extend(depgraph.reverse_list[existing[0]])
     infos['install'].append(release)
     return infos
+
+
+def _update_infos(infos, new_infos):
+    """extends the lists contained in the `info` dict with those contained
+    in the `new_info` one
+    """
+    for key, value in infos.items():
+        if key in new_infos:
+            infos[key].extend(new_infos[key])
+
+
+def _remove_dist(dist, paths=sys.path):
+    remove(dist.name, paths)
+
+
+def remove(project_name, paths=sys.path):
+    """Removes a single project from the installation"""
+    dist = get_distribution(project_name, use_egg_info=True, paths=paths)
+    if dist is None:
+        raise DistutilsError('Distribution "%s" not found' % project_name)
+    files = dist.get_installed_files(local=True)
+    rmdirs = []
+    rmfiles = []
+    tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall')
+    try:
+        for file_, md5, size in files:
+            if os.path.isfile(file_):
+                dirname, filename = os.path.split(file_)
+                tmpfile = os.path.join(tmp, filename)
+                try:
+                    os.rename(file_, tmpfile)
+                finally:
+                    if not os.path.isfile(file_):
+                        os.rename(tmpfile, file_)
+                if file_ not in rmfiles:
+                    rmfiles.append(file_)
+                if dirname not in rmdirs:
+                    rmdirs.append(dirname)
+    finally:
+        shutil.rmtree(tmp)
+
+    logger.info('removing %r...', project_name)
+
+    file_count = 0
+    for file_ in rmfiles:
+        os.remove(file_)
+        file_count += 1
+
+    dir_count = 0
+    for dirname in rmdirs:
+        if not os.path.exists(dirname):
+            # could
+            continue
+
+        files_count = 0
+        for root, dir, files in os.walk(dirname):
+            files_count += len(files)
+
+        if files_count > 0:
+            # XXX Warning
+            continue
+
+        # empty dirs with only empty dirs
+        if bool(os.stat(dirname).st_mode & stat.S_IWUSR):
+            # XXX Add a callable in shutil.rmtree to count
+            # the number of deleted elements
+            shutil.rmtree(dirname)
+            dir_count += 1
+
+    # removing the top path
+    # XXX count it ?
+    if os.path.exists(dist.path):
+        shutil.rmtree(dist.path)
+
+    logger.info('success: removed %d files and %d dirs',
+                file_count, dir_count)
+
+
+def install(project):
+    logger.info('getting information about %r', project)
+    try:
+        info = get_infos(project)
+    except InstallationException:
+        logger.info('cound not find %r', project)
+        return
+
+    if info['install'] == []:
+        logger.info('nothing to install')
+        return
+
+    install_path = get_config_var('base')
+    try:
+        install_from_infos(install_path,
+                           info['install'], info['remove'], info['conflict'])
+
+    except InstallationConflict, e:
+        if logger.isEnabledFor(logging.INFO):
+            projects = ['%s %s' % (p.name, p.version) for p in e.args[0]]
+            logger.info('%r conflicts with %s', project, ','.join(projects))
+
+
+def _main(**attrs):
+    if 'script_args' not in attrs:
+        import sys
+        attrs['requirements'] = sys.argv[1]
+    get_infos(**attrs)
+
+
+if __name__ == '__main__':
+    _main()
diff --git a/distutils2/manifest.py b/distutils2/manifest.py
--- a/distutils2/manifest.py
+++ b/distutils2/manifest.py
@@ -95,7 +95,7 @@
             try:
                 self._process_template_line(line)
             except DistutilsTemplateError, msg:
-                logging.warning("%s, %s" % (path_or_file, msg))
+                logging.warning("%s, %s", path_or_file, msg)
 
     def write(self, path):
         """Write the file list in 'self.filelist' (presumably as filled in
@@ -111,14 +111,14 @@
 
             if first_line != '# file GENERATED by distutils, do NOT edit\n':
                 logging.info("not writing to manually maintained "
-                             "manifest file '%s'", path)
+                             "manifest file %r", path)
                 return
 
         self.sort()
         self.remove_duplicates()
         content = self.files[:]
         content.insert(0, '# file GENERATED by distutils, do NOT edit')
-        logging.info("writing manifest file '%s'", path)
+        logging.info("writing manifest file %r", path)
         write_file(path, content)
 
     def read(self, path):
@@ -126,7 +126,7 @@
         fill in 'self.filelist', the list of files to include in the source
         distribution.
         """
-        logging.info("reading manifest file '%s'" % path)
+        logging.info("reading manifest file %r", path)
         manifest = open(path)
         try:
             for line in manifest.readlines():
@@ -168,14 +168,14 @@
                       'global-include', 'global-exclude'):
             if len(words) < 2:
                 raise DistutilsTemplateError(
-                      "'%s' expects <pattern1> <pattern2> ..." % action)
+                      "%r expects <pattern1> <pattern2> ..." % action)
 
             patterns = map(convert_path, words[1:])
 
         elif action in ('recursive-include', 'recursive-exclude'):
             if len(words) < 3:
                 raise DistutilsTemplateError(
-                      "'%s' expects <dir> <pattern1> <pattern2> ..." % action)
+                      "%r expects <dir> <pattern1> <pattern2> ..." % action)
 
             dir = convert_path(words[1])
             patterns = map(convert_path, words[2:])
@@ -183,12 +183,12 @@
         elif action in ('graft', 'prune'):
             if len(words) != 2:
                 raise DistutilsTemplateError(
-                     "'%s' expects a single <dir_pattern>" % action)
+                     "%r expects a single <dir_pattern>" % action)
 
             dir_pattern = convert_path(words[1])
 
         else:
-            raise DistutilsTemplateError("unknown action '%s'" % action)
+            raise DistutilsTemplateError("unknown action %r" % action)
 
         return action, patterns, dir, dir_pattern
 
@@ -206,53 +206,52 @@
         if action == 'include':
             for pattern in patterns:
                 if not self._include_pattern(pattern, anchor=1):
-                    logging.warning("warning: no files found matching '%s'" %
-                             pattern)
+                    logging.warning("no files found matching %r", pattern)
 
         elif action == 'exclude':
             for pattern in patterns:
                 if not self.exclude_pattern(pattern, anchor=1):
-                    logging.warning(("warning: no previously-included files "
-                              "found matching '%s'") % pattern)
+                    logging.warning("no previously-included files "
+                                    "found matching %r", pattern)
 
         elif action == 'global-include':
             for pattern in patterns:
                 if not self._include_pattern(pattern, anchor=0):
-                    logging.warning(("warning: no files found matching '%s' " +
-                              "anywhere in distribution") % pattern)
+                    logging.warning("no files found matching %r "
+                                    "anywhere in distribution", pattern)
 
         elif action == 'global-exclude':
             for pattern in patterns:
                 if not self.exclude_pattern(pattern, anchor=0):
-                    logging.warning(("warning: no previously-included files "
-                              "matching '%s' found anywhere in distribution") %
-                             pattern)
+                    logging.warning("no previously-included files "
+                                    "matching %r found anywhere in "
+                                    "distribution", pattern)
 
         elif action == 'recursive-include':
             for pattern in patterns:
                 if not self._include_pattern(pattern, prefix=dir):
-                    logging.warning(("warning: no files found matching '%s' "
-                                "under directory '%s'" % (pattern, dir)))
+                    logging.warning("no files found matching %r "
+                                    "under directory %r", pattern, dir)
 
         elif action == 'recursive-exclude':
             for pattern in patterns:
                 if not self.exclude_pattern(pattern, prefix=dir):
-                    logging.warning(("warning: no previously-included files "
-                              "matching '%s' found under directory '%s'") %
-                             (pattern, dir))
+                    logging.warning("no previously-included files "
+                                    "matching %r found under directory %r",
+                                    pattern, dir)
 
         elif action == 'graft':
             if not self._include_pattern(None, prefix=dir_pattern):
-                logging.warning("warning: no directories found matching '%s'" %
-                         dir_pattern)
+                logging.warning("no directories found matching %r",
+                                dir_pattern)
 
         elif action == 'prune':
             if not self.exclude_pattern(None, prefix=dir_pattern):
-                logging.warning(("no previously-included directories found " +
-                          "matching '%s'") % dir_pattern)
+                logging.warning("no previously-included directories found "
+                                "matching %r", dir_pattern)
         else:
             raise DistutilsInternalError(
-                  "this cannot happen: invalid action '%s'" % action)
+                  "this cannot happen: invalid action %r" % action)
 
     def _include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0):
         """Select strings (presumably filenames) from 'self.files' that
diff --git a/distutils2/markers.py b/distutils2/markers.py
new file mode 100644
--- /dev/null
+++ b/distutils2/markers.py
@@ -0,0 +1,194 @@
+""" Micro-language for PEP 345 environment markers
+"""
+import sys
+import platform
+import os
+from tokenize import tokenize, NAME, OP, STRING, ENDMARKER
+from StringIO import StringIO
+
+__all__ = ['interpret']
+
+
+# allowed operators
+_OPERATORS = {'==': lambda x, y: x == y,
+              '!=': lambda x, y: x != y,
+              '>': lambda x, y: x > y,
+              '>=': lambda x, y: x >= y,
+              '<': lambda x, y: x < y,
+              '<=': lambda x, y: x <= y,
+              'in': lambda x, y: x in y,
+              'not in': lambda x, y: x not in y}
+
+
+def _operate(operation, x, y):
+    return _OPERATORS[operation](x, y)
+
+
+# restricted set of variables
+_VARS = {'sys.platform': sys.platform,
+         'python_version': sys.version[:3],
+         'python_full_version': sys.version.split(' ', 1)[0],
+         'os.name': os.name,
+         'platform.version': platform.version(),
+         'platform.machine': platform.machine()}
+
+
+class _Operation(object):
+
+    def __init__(self, execution_context=None):
+        self.left = None
+        self.op = None
+        self.right = None
+        if execution_context is None:
+            execution_context = {}
+        self.execution_context = execution_context
+
+    def _get_var(self, name):
+        if name in self.execution_context:
+            return self.execution_context[name]
+        return _VARS[name]
+
+    def __repr__(self):
+        return '%s %s %s' % (self.left, self.op, self.right)
+
+    def _is_string(self, value):
+        if value is None or len(value) < 2:
+            return False
+        for delimiter in '"\'':
+            if value[0] == value[-1] == delimiter:
+                return True
+        return False
+
+    def _is_name(self, value):
+        return value in _VARS
+
+    def _convert(self, value):
+        if value in _VARS:
+            return self._get_var(value)
+        return value.strip('"\'')
+
+    def _check_name(self, value):
+        if value not in _VARS:
+            raise NameError(value)
+
+    def _nonsense_op(self):
+        msg = 'This operation is not supported : "%s"' % self
+        raise SyntaxError(msg)
+
+    def __call__(self):
+        # make sure we do something useful
+        if self._is_string(self.left):
+            if self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.right)
+        else:
+            if not self._is_string(self.right):
+                self._nonsense_op()
+            self._check_name(self.left)
+
+        if self.op not in _OPERATORS:
+            raise TypeError('Operator not supported "%s"' % self.op)
+
+        left = self._convert(self.left)
+        right = self._convert(self.right)
+        return _operate(self.op, left, right)
+
+
+class _OR(object):
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'OR(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() or self.right()
+
+
+class _AND(object):
+    def __init__(self, left, right=None):
+        self.left = left
+        self.right = right
+
+    def filled(self):
+        return self.right is not None
+
+    def __repr__(self):
+        return 'AND(%r, %r)' % (self.left, self.right)
+
+    def __call__(self):
+        return self.left() and self.right()
+
+
+class _CHAIN(object):
+
+    def __init__(self, execution_context=None):
+        self.ops = []
+        self.op_starting = True
+        self.execution_context = execution_context
+
+    def eat(self, toktype, tokval, rowcol, line, logical_line):
+        if toktype not in (NAME, OP, STRING, ENDMARKER):
+            raise SyntaxError('Type not supported "%s"' % tokval)
+
+        if self.op_starting:
+            op = _Operation(self.execution_context)
+            if len(self.ops) > 0:
+                last = self.ops[-1]
+                if isinstance(last, (_OR, _AND)) and not last.filled():
+                    last.right = op
+                else:
+                    self.ops.append(op)
+            else:
+                self.ops.append(op)
+            self.op_starting = False
+        else:
+            op = self.ops[-1]
+
+        if (toktype == ENDMARKER or
+            (toktype == NAME and tokval in ('and', 'or'))):
+            if toktype == NAME and tokval == 'and':
+                self.ops.append(_AND(self.ops.pop()))
+            elif toktype == NAME and tokval == 'or':
+                self.ops.append(_OR(self.ops.pop()))
+            self.op_starting = True
+            return
+
+        if isinstance(op, (_OR, _AND)) and op.right is not None:
+            op = op.right
+
+        if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
+            or (toktype == OP and tokval == '.')):
+            if op.op is None:
+                if op.left is None:
+                    op.left = tokval
+                else:
+                    op.left += tokval
+            else:
+                if op.right is None:
+                    op.right = tokval
+                else:
+                    op.right += tokval
+        elif toktype == OP or tokval in ('in', 'not'):
+            if tokval == 'in' and op.op == 'not':
+                op.op = 'not in'
+            else:
+                op.op = tokval
+
+    def result(self):
+        for op in self.ops:
+            if not op():
+                return False
+        return True
+
+
+def interpret(marker, execution_context=None):
+    """Interpret a marker and return a result depending on environment."""
+    marker = marker.strip()
+    operations = _CHAIN(execution_context)
+    tokenize(StringIO(marker).readline, operations.eat)
+    return operations.result()
diff --git a/distutils2/metadata.py b/distutils2/metadata.py
--- a/distutils2/metadata.py
+++ b/distutils2/metadata.py
@@ -3,18 +3,16 @@
 Supports all metadata formats (1.0, 1.1, 1.2).
 """
 
-import os
-import sys
-import platform
 import re
 from StringIO import StringIO
 from email import message_from_file
-from tokenize import tokenize, NAME, OP, STRING, ENDMARKER
 
 from distutils2 import logger
+from distutils2.markers import interpret
 from distutils2.version import (is_valid_predicate, is_valid_version,
                                 is_valid_versions)
-from distutils2.errors import (MetadataConflictError,
+from distutils2.errors import (MetadataMissingError,
+                               MetadataConflictError,
                                MetadataUnrecognizedVersionError)
 
 try:
@@ -41,8 +39,8 @@
     _HAS_DOCUTILS = False
 
 # public API of this module
-__all__ = ('DistributionMetadata', 'PKG_INFO_ENCODING',
-           'PKG_INFO_PREFERRED_VERSION')
+__all__ = ['Metadata', 'get_metadata_version', 'metadata_to_dict',
+           'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
 
 # Encoding used for the PKG-INFO files
 PKG_INFO_ENCODING = 'utf-8'
@@ -139,49 +137,104 @@
     # default marker when 1.0 is disqualified
     return '1.2'
 
-_ATTR2FIELD = {'metadata_version': 'Metadata-Version',
-        'name': 'Name',
-        'version': 'Version',
-        'platform': 'Platform',
-        'supported_platform': 'Supported-Platform',
-        'summary': 'Summary',
-        'description': 'Description',
-        'keywords': 'Keywords',
-        'home_page': 'Home-page',
-        'author': 'Author',
-        'author_email': 'Author-email',
-        'maintainer': 'Maintainer',
-        'maintainer_email': 'Maintainer-email',
-        'license': 'License',
-        'classifier': 'Classifier',
-        'download_url': 'Download-URL',
-        'obsoletes_dist': 'Obsoletes-Dist',
-        'provides_dist': 'Provides-Dist',
-        'requires_dist': 'Requires-Dist',
-        'requires_python': 'Requires-Python',
-        'requires_external': 'Requires-External',
-        'requires': 'Requires',
-        'provides': 'Provides',
-        'obsoletes': 'Obsoletes',
-        'project_url': 'Project-URL',
-        }
+
+def get_metadata_version(metadata):
+    """Return the Metadata-Version attribute
+
+    - *metadata* give a METADATA object
+    """
+    return metadata['Metadata-Version']
+
+
+def metadata_to_dict(metadata):
+    """Convert a metadata object to a dict
+
+    - *metadata* give a METADATA object
+    """
+    data = {
+        'metadata_version': metadata['Metadata-Version'],
+        'name': metadata['Name'],
+        'version': metadata['Version'],
+        'summary': metadata['Summary'],
+        'home_page': metadata['Home-page'],
+        'author': metadata['Author'],
+        'author_email': metadata['Author-email'],
+        'license': metadata['License'],
+        'description': metadata['Description'],
+        'keywords': metadata['Keywords'],
+        'platform': metadata['Platform'],
+        'classifier': metadata['Classifier'],
+        'download_url': metadata['Download-URL'],
+    }
+
+    if metadata['Metadata-Version'] == '1.2':
+        data['requires_dist'] = metadata['Requires-Dist']
+        data['requires_python'] = metadata['Requires-Python']
+        data['requires_external'] = metadata['Requires-External']
+        data['provides_dist'] = metadata['Provides-Dist']
+        data['obsoletes_dist'] = metadata['Obsoletes-Dist']
+        data['project_url'] = [','.join(url) for url in
+                               metadata['Project-URL']]
+
+    elif metadata['Metadata-Version'] == '1.1':
+        data['provides'] = metadata['Provides']
+        data['requires'] = metadata['Requires']
+        data['obsoletes'] = metadata['Obsoletes']
+
+    return data
+
+
+_ATTR2FIELD = {
+    'metadata_version': 'Metadata-Version',
+    'name': 'Name',
+    'version': 'Version',
+    'platform': 'Platform',
+    'supported_platform': 'Supported-Platform',
+    'summary': 'Summary',
+    'description': 'Description',
+    'keywords': 'Keywords',
+    'home_page': 'Home-page',
+    'author': 'Author',
+    'author_email': 'Author-email',
+    'maintainer': 'Maintainer',
+    'maintainer_email': 'Maintainer-email',
+    'license': 'License',
+    'classifier': 'Classifier',
+    'download_url': 'Download-URL',
+    'obsoletes_dist': 'Obsoletes-Dist',
+    'provides_dist': 'Provides-Dist',
+    'requires_dist': 'Requires-Dist',
+    'requires_python': 'Requires-Python',
+    'requires_external': 'Requires-External',
+    'requires': 'Requires',
+    'provides': 'Provides',
+    'obsoletes': 'Obsoletes',
+    'project_url': 'Project-URL',
+}
 
 _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
 _VERSIONS_FIELDS = ('Requires-Python',)
 _VERSION_FIELDS = ('Version',)
 _LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
-        'Requires', 'Provides', 'Obsoletes-Dist',
-        'Provides-Dist', 'Requires-Dist', 'Requires-External',
-        'Project-URL')
+               'Requires', 'Provides', 'Obsoletes-Dist',
+               'Provides-Dist', 'Requires-Dist', 'Requires-External',
+               'Project-URL', 'Supported-Platform')
 _LISTTUPLEFIELDS = ('Project-URL',)
 
 _ELEMENTSFIELD = ('Keywords',)
 
 _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
 
-_MISSING = object()
 
-class DistributionMetadata(object):
+class NoDefault(object):
+    """Marker object used for clean representation"""
+    def __repr__(self):
+        return '<NoDefault>'
+
+_MISSING = NoDefault()
+
+
+class Metadata(object):
     """The metadata of a release.
 
     Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can
@@ -195,9 +248,11 @@
     # also document the mapping API and UNKNOWN default key
 
     def __init__(self, path=None, platform_dependent=False,
-                 execution_context=None, fileobj=None, mapping=None):
+                 execution_context=None, fileobj=None, mapping=None,
+                 display_warnings=False):
         self._fields = {}
-        self.version = None
+        self.display_warnings = display_warnings
+        self.requires_files = []
         self.docutils_support = _HAS_DOCUTILS
         self.platform_dependent = platform_dependent
         self.execution_context = execution_context
@@ -211,8 +266,7 @@
             self.update(mapping)
 
     def _set_best_version(self):
-        self.version = _best_version(self._fields)
-        self._fields['Metadata-Version'] = self.version
+        self._fields['Metadata-Version'] = _best_version(self._fields)
 
     def _write_field(self, file, name, value):
         file.write('%s: %s\n' % (name, value))
@@ -281,7 +335,7 @@
         if not self.platform_dependent or ';' not in value:
             return True, value
         value, marker = value.split(';')
-        return _interpret(marker, self.execution_context), value
+        return interpret(marker, self.execution_context), value
 
     def _remove_line_prefix(self, value):
         return _LINE_PREFIX.sub('\n', value)
@@ -290,21 +344,28 @@
     # Public API
     #
     def get_fullname(self):
+        """Return the distribution name with version"""
         return '%s-%s' % (self['Name'], self['Version'])
 
     def is_metadata_field(self, name):
+        """return True if name is a valid metadata key"""
         name = self._convert_name(name)
         return name in _ALL_FIELDS
 
+    def is_multi_field(self, name):
+        name = self._convert_name(name)
+        return name in _LISTFIELDS
+
     def read(self, filepath):
+        """Read the metadata values from a file path."""
         self.read_file(open(filepath))
 
     def read_file(self, fileob):
         """Read the metadata values from a file object."""
         msg = message_from_file(fileob)
-        self.version = msg['metadata-version']
+        self._fields['Metadata-Version'] = msg['metadata-version']
 
-        for field in _version2fieldlist(self.version):
+        for field in _version2fieldlist(self['Metadata-Version']):
             if field in _LISTFIELDS:
                 # we can have multiple lines
                 values = msg.get_all(field)
@@ -328,7 +389,7 @@
     def write_file(self, fileobject):
         """Write the PKG-INFO format data to a file object."""
         self._set_best_version()
-        for field in _version2fieldlist(self.version):
+        for field in _version2fieldlist(self['Metadata-Version']):
             values = self.get(field)
             if field in _ELEMENTSFIELD:
                 self._write_field(fileobject, field, ','.join(values))
@@ -387,21 +448,22 @@
             else:
                 value = []
 
-        if name in _PREDICATE_FIELDS and value is not None:
-            for v in value:
-                # check that the values are valid predicates
-                if not is_valid_predicate(v.split(';')[0]):
-                    logger.warn('"%s" is not a valid predicate (field "%s")' %
-                         (v, name))
-        # FIXME this rejects UNKNOWN, is that right?
-        elif name in _VERSIONS_FIELDS and value is not None:
-            if not is_valid_versions(value):
-                logger.warn('"%s" is not a valid version (field "%s")' %
-                     (value, name))
-        elif name in _VERSION_FIELDS and value is not None:
-            if not is_valid_version(value):
-                logger.warn('"%s" is not a valid version (field "%s")' %
-                     (value, name))
+        if self.display_warnings:
+            if name in _PREDICATE_FIELDS and value is not None:
+                for v in value:
+                    # check that the values are valid predicates
+                    if not is_valid_predicate(v.split(';')[0]):
+                        logger.warn('"%s" is not a valid predicate (field "%s")' %
+                            (v, name))
+            # FIXME this rejects UNKNOWN, is that right?
+            elif name in _VERSIONS_FIELDS and value is not None:
+                if not is_valid_versions(value):
+                    logger.warn('"%s" is not a valid version (field "%s")' %
+                        (value, name))
+            elif name in _VERSION_FIELDS and value is not None:
+                if not is_valid_version(value):
+                    logger.warn('"%s" is not a valid version (field "%s")' %
+                        (value, name))
 
         if name in _UNICODEFIELDS:
             value = self._encode_field(value)
@@ -448,15 +510,25 @@
             return None
         return value
 
-    def check(self):
-        """Check if the metadata is compliant."""
+    def check(self, strict=False, restructuredtext=False):
+        """Check if the metadata is compliant. If strict is False then raise if
+        no Name or Version are provided"""
         # XXX should check the versions (if the file was loaded)
         missing, warnings = [], []
-        for attr in ('Name', 'Version', 'Home-page'):
+
+        for attr in ('Name', 'Version'):  # required by PEP 345
             if attr not in self:
                 missing.append(attr)
 
-        if _HAS_DOCUTILS:
+        if strict and missing != []:
+            msg = 'missing required metadata: %s' % ', '.join(missing)
+            raise MetadataMissingError(msg)
+
+        for attr in ('Home-page', 'Author'):
+            if attr not in self:
+                missing.append(attr)
+
+        if _HAS_DOCUTILS and restructuredtext:
             warnings.extend(self._check_rst_data(self['Description']))
 
         # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
@@ -479,199 +551,13 @@
 
         return missing, warnings
 
+    # Mapping API
+
     def keys(self):
-        return _version2fieldlist(self.version)
+        return _version2fieldlist(self['Metadata-Version'])
 
     def values(self):
         return [self[key] for key in self.keys()]
 
     def items(self):
         return [(key, self[key]) for key in self.keys()]
-
-
-#
-# micro-language for PEP 345 environment markers
-#
-
-# allowed operators
-_OPERATORS = {'==': lambda x, y: x == y,
-              '!=': lambda x, y: x != y,
-              '>': lambda x, y: x > y,
-              '>=': lambda x, y: x >= y,
-              '<': lambda x, y: x < y,
-              '<=': lambda x, y: x <= y,
-              'in': lambda x, y: x in y,
-              'not in': lambda x, y: x not in y}
-
-
-def _operate(operation, x, y):
-    return _OPERATORS[operation](x, y)
-
-# restricted set of variables
-_VARS = {'sys.platform': sys.platform,
-         'python_version': sys.version[:3],
-         'python_full_version': sys.version.split(' ', 1)[0],
-         'os.name': os.name,
-         'platform.version': platform.version(),
-         'platform.machine': platform.machine()}
-
-
-class _Operation(object):
-
-    def __init__(self, execution_context=None):
-        self.left = None
-        self.op = None
-        self.right = None
-        if execution_context is None:
-            execution_context = {}
-        self.execution_context = execution_context
-
-    def _get_var(self, name):
-        if name in self.execution_context:
-            return self.execution_context[name]
-        return _VARS[name]
-
-    def __repr__(self):
-        return '%s %s %s' % (self.left, self.op, self.right)
-
-    def _is_string(self, value):
-        if value is None or len(value) < 2:
-            return False
-        for delimiter in '"\'':
-            if value[0] == value[-1] == delimiter:
-                return True
-        return False
-
-    def _is_name(self, value):
-        return value in _VARS
-
-    def _convert(self, value):
-        if value in _VARS:
-            return self._get_var(value)
-        return value.strip('"\'')
-
-    def _check_name(self, value):
-        if value not in _VARS:
-            raise NameError(value)
-
-    def _nonsense_op(self):
-        msg = 'This operation is not supported : "%s"' % self
-        raise SyntaxError(msg)
-
-    def __call__(self):
-        # make sure we do something useful
-        if self._is_string(self.left):
-            if self._is_string(self.right):
-                self._nonsense_op()
-            self._check_name(self.right)
-        else:
-            if not self._is_string(self.right):
-                self._nonsense_op()
-            self._check_name(self.left)
-
-        if self.op not in _OPERATORS:
-            raise TypeError('Operator not supported "%s"' % self.op)
-
-        left = self._convert(self.left)
-        right = self._convert(self.right)
-        return _operate(self.op, left, right)
-
-
-class _OR(object):
-    def __init__(self, left, right=None):
-        self.left = left
-        self.right = right
-
-    def filled(self):
-        return self.right is not None
-
-    def __repr__(self):
-        return 'OR(%r, %r)' % (self.left, self.right)
-
-    def __call__(self):
-        return self.left() or self.right()
-
-
-class _AND(object):
-    def __init__(self, left, right=None):
-        self.left = left
-        self.right = right
-
-    def filled(self):
-        return self.right is not None
-
-    def __repr__(self):
-        return 'AND(%r, %r)' % (self.left, self.right)
-
-    def __call__(self):
-        return self.left() and self.right()
-
-
-class _CHAIN(object):
-
-    def __init__(self, execution_context=None):
-        self.ops = []
-        self.op_starting = True
-        self.execution_context = execution_context
-
-    def eat(self, toktype, tokval, rowcol, line, logical_line):
-        if toktype not in (NAME, OP, STRING, ENDMARKER):
-            raise SyntaxError('Type not supported "%s"' % tokval)
-
-        if self.op_starting:
-            op = _Operation(self.execution_context)
-            if len(self.ops) > 0:
-                last = self.ops[-1]
-                if isinstance(last, (_OR, _AND)) and not last.filled():
-                    last.right = op
-                else:
-                    self.ops.append(op)
-            else:
-                self.ops.append(op)
-            self.op_starting = False
-        else:
-            op = self.ops[-1]
-
-        if (toktype == ENDMARKER or
-            (toktype == NAME and tokval in ('and', 'or'))):
-            if toktype == NAME and tokval == 'and':
-                self.ops.append(_AND(self.ops.pop()))
-            elif toktype == NAME and tokval == 'or':
-                self.ops.append(_OR(self.ops.pop()))
-            self.op_starting = True
-            return
-
-        if isinstance(op, (_OR, _AND)) and op.right is not None:
-            op = op.right
-
-        if ((toktype in (NAME, STRING) and tokval not in ('in', 'not'))
-            or (toktype == OP and tokval == '.')):
-            if op.op is None:
-                if op.left is None:
-                    op.left = tokval
-                else:
-                    op.left += tokval
-            else:
-                if op.right is None:
-                    op.right = tokval
-                else:
-                    op.right += tokval
-        elif toktype == OP or tokval in ('in', 'not'):
-            if tokval == 'in' and op.op == 'not':
-                op.op = 'not in'
-            else:
-                op.op = tokval
-
-    def result(self):
-        for op in self.ops:
-            if not op():
-                return False
-        return True
-
-
-def _interpret(marker, execution_context=None):
-    """Interpret a marker and return a result depending on environment."""
-    marker = marker.strip()
-    operations = _CHAIN(execution_context)
-    tokenize(StringIO(marker).readline, operations.eat)
-    return operations.result()
diff --git a/distutils2/mkcfg.py b/distutils2/mkcfg.py
--- a/distutils2/mkcfg.py
+++ b/distutils2/mkcfg.py
@@ -20,17 +20,24 @@
 #  Ask for the dependencies.
 #  Ask for the Requires-Dist
 #  Ask for the Provides-Dist
+#  Ask for a description
 #  Detect scripts (not sure how.  #! outside of package?)
 
 import os
 import sys
+import glob
 import re
 import shutil
 from ConfigParser import RawConfigParser
 from textwrap import dedent
+try:
+    from hashlib import md5
+except ImportError:
+    from distutils2._backport.hashlib  import md5
 # importing this with an underscore as it should be replaced by the
 # dict form or another structures for all purposes
 from distutils2._trove import all_classifiers as _CLASSIFIERS_LIST
+from distutils2._backport import sysconfig
 
 _FILENAME = 'setup.cfg'
 
@@ -82,6 +89,10 @@
 Optionally, you can set other trove identifiers for things such as the
 human language, programming language, user interface, etc...
 ''',
+    'setup.py found': '''
+The setup.py script will be executed to retrieve the metadata.
+A wizard will be run if you answer "n",
+''',
 }
 
 # XXX everything needs docstrings and tests (both low-level tests of various
@@ -106,7 +117,7 @@
         if default and len(question) + len(default) > 70:
             prompt = '%s\n    [%s]: ' % (question, default)
     if lengthy or multiline:
-        prompt += '\n   >'
+        prompt += '\n   > '
 
     if not helptext:
         helptext = 'No additional help available.'
@@ -148,6 +159,7 @@
 
 CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST)
 
+
 def _build_licences(classifiers):
     res = []
     for index, item in enumerate(classifiers):
@@ -162,12 +174,15 @@
 class MainProgram(object):
     def __init__(self):
         self.configparser = None
-        self.classifiers = {}
+        self.classifiers = set([])
         self.data = {}
         self.data['classifier'] = self.classifiers
         self.data['packages'] = []
         self.data['modules'] = []
+        self.data['platform'] = []
+        self.data['resources'] = []
         self.data['extra_files'] = []
+        self.data['scripts'] = []
         self.load_config_file()
 
     def lookup_option(self, key):
@@ -178,6 +193,7 @@
     def load_config_file(self):
         self.configparser = RawConfigParser()
         # TODO replace with section in distutils config file
+        #XXX freedesktop
         self.configparser.read(os.path.expanduser('~/.mkcfg'))
         self.data['author'] = self.lookup_option('author')
         self.data['author_email'] = self.lookup_option('author_email')
@@ -194,6 +210,7 @@
         if not valuesDifferent:
             return
 
+        #XXX freedesktop
         fp = open(os.path.expanduser('~/.mkcfgpy'), 'w')
         try:
             self.configparser.write(fp)
@@ -201,30 +218,139 @@
             fp.close()
 
     def load_existing_setup_script(self):
-        raise NotImplementedError
-        # Ideas:
-        # - define a mock module to assign to sys.modules['distutils'] before
-        # importing the setup script as a module (or executing it); it would
-        # provide setup (a function that just returns its args as a dict),
-        # Extension (ditto), find_packages (the real function)
-        # - we could even mock Distribution and commands to handle more setup
-        # scripts
-        # - we could use a sandbox (http://bugs.python.org/issue8680)
-        # - the cleanest way is to parse the file, not import it, but there is
-        # no way to do that across versions (the compiler package is
-        # deprecated or removed in recent Pythons, the ast module is not
-        # present before 2.6)
+        """ Generate a setup.cfg from an existing setup.py.
+
+        It only exports the distutils metadata (setuptools specific metadata
+        is not actually supported).
+        """
+        setuppath = 'setup.py'
+        if not os.path.exists(setuppath):
+            return
+        else:
+            ans = ask_yn(('A legacy setup.py has been found.\n'
+                          'Would you like to convert it to a setup.cfg ?'),
+                         'y',
+                         _helptext['setup.py found'])
+            if ans != 'y':
+                return
+
+        data = self.data
+
+        def setup(**attrs):
+            """Mock the setup(**attrs) in order to retrive metadata."""
+            # use the distutils v1 processings to correctly parse metadata.
+            #XXX we could also use the setuptools distibution ???
+            from distutils.dist import Distribution
+            dist = Distribution(attrs)
+            dist.parse_config_files()
+            # 1. retrieves metadata that are quite similar PEP314<->PEP345
+            labels = (('name',) * 2,
+                      ('version',) * 2,
+                      ('author',) * 2,
+                      ('author_email',) * 2,
+                      ('maintainer',) * 2,
+                      ('maintainer_email',) * 2,
+                      ('description', 'summary'),
+                      ('long_description', 'description'),
+                      ('url', 'home_page'),
+                      ('platforms', 'platform'))
+
+            if sys.version[:3] >= '2.5':
+                labels += (('provides', 'provides-dist'),
+                           ('obsoletes', 'obsoletes-dist'),
+                           ('requires', 'requires-dist'),)
+            get = lambda lab: getattr(dist.metadata, lab.replace('-', '_'))
+            data.update((new, get(old)) for (old, new) in labels if get(old))
+            # 2. retrieves data that requires special processings.
+            data['classifier'].update(dist.get_classifiers() or [])
+            data['scripts'].extend(dist.scripts or [])
+            data['packages'].extend(dist.packages or [])
+            data['modules'].extend(dist.py_modules or [])
+            # 2.1 data_files -> resources.
+            if dist.data_files:
+                if len(dist.data_files) < 2 or \
+                   isinstance(dist.data_files[1], str):
+                    dist.data_files = [('', dist.data_files)]
+                # add tokens in the destination paths
+                vars = {'distribution.name': data['name']}
+                path_tokens = sysconfig.get_paths(vars=vars).items()
+                # sort tokens to use the longest one first
+                # TODO chain two sorted with key arguments, remove cmp
+                path_tokens.sort(cmp=lambda x, y: cmp(len(y), len(x)),
+                                 key=lambda x: x[1])
+                for dest, srcs in (dist.data_files or []):
+                    dest = os.path.join(sys.prefix, dest)
+                    for tok, path in path_tokens:
+                        if dest.startswith(path):
+                            dest = ('{%s}' % tok) + dest[len(path):]
+                            files = [('/ '.join(src.rsplit('/', 1)), dest)
+                                     for src in srcs]
+                            data['resources'].extend(files)
+                            continue
+            # 2.2 package_data -> extra_files
+            package_dirs = dist.package_dir or {}
+            for package, extras in dist.package_data.iteritems() or []:
+                package_dir = package_dirs.get(package, package)
+                files = [os.path.join(package_dir, f) for f in extras]
+                data['extra_files'].extend(files)
+
+            # Use README file if its content is the desciption
+            if "description" in data:
+                ref = md5(re.sub('\s', '', self.data['description']).lower())
+                ref = ref.digest()
+                for readme in glob.glob('README*'):
+                    fp = open(readme)
+                    try:
+                        contents = fp.read()
+                    finally:
+                        fp.close()
+                    val = md5(re.sub('\s', '', contents.lower())).digest()
+                    if val == ref:
+                        del data['description']
+                        data['description-file'] = readme
+                        break
+
+        # apply monkey patch to distutils (v1) and setuptools (if needed)
+        # (abord the feature if distutils v1 has been killed)
+        try:
+            import distutils.core as DC
+            DC.setup  # ensure distutils v1
+        except (ImportError, AttributeError):
+            return
+        saved_setups = [(DC, DC.setup)]
+        DC.setup = setup
+        try:
+            import setuptools
+            saved_setups.append((setuptools, setuptools.setup))
+            setuptools.setup = setup
+        except (ImportError, AttributeError):
+            pass
+        # get metadata by executing the setup.py with the patched setup(...)
+        success = False  # for python < 2.4
+        try:
+            pyenv = globals().copy()
+            execfile(setuppath, pyenv)
+            success = True
+        finally:  # revert monkey patches
+            for patched_module, original_setup in saved_setups:
+                patched_module.setup = original_setup
+        if not self.data:
+            raise ValueError('Unable to load metadata from setup.py')
+        return success
 
     def inspect_file(self, path):
         fp = open(path, 'r')
         try:
-            for line in [fp.readline() for _ in range(10)]:
+            for _ in xrange(10):
+                line = fp.readline()
                 m = re.match(r'^#!.*python((?P<major>\d)(\.\d+)?)?$', line)
                 if m:
                     if m.group('major') == '3':
-                        self.classifiers['Programming Language :: Python :: 3'] = 1
+                        self.classifiers.add(
+                            'Programming Language :: Python :: 3')
                     else:
-                        self.classifiers['Programming Language :: Python :: 2'] = 1
+                        self.classifiers.add(
+                        'Programming Language :: Python :: 2')
         finally:
             fp.close()
 
@@ -270,7 +396,6 @@
                         helptext=_helptext['extra_files']) == 'y':
                 self._set_multi('Extra file/dir name', 'extra_files')
 
-
         if ask_yn('Do you want to set Trove classifiers?',
                   helptext=_helptext['do_classifier']) == 'y':
             self.set_classifier()
@@ -290,7 +415,6 @@
         _pref = ['lib', 'include', 'dist', 'build', '.', '~']
         _suf = ['.pyc']
 
-
         def to_skip(path):
             path = relative(path)
 
@@ -317,7 +441,7 @@
         for root, dirs, files in os.walk(curdir):
             if to_skip(root):
                 continue
-            for dir_ in dirs:
+            for dir_ in sorted(dirs):
                 if to_skip(dir_):
                     continue
                 fullpath = os.path.join(root, dir_)
@@ -334,7 +458,7 @@
             if True in [root.startswith(path) for path in scanned]:
                 continue
 
-            for file in files:
+            for file in sorted(files):
                 fullpath = os.path.join(root, file)
                 if to_skip(fullpath):
                     continue
@@ -347,8 +471,6 @@
     def _set_multi(self, question, name):
         existing_values = self.data[name]
         value = ask(question, helptext=_helptext[name]).strip()
-        if value == '':
-            return
         if value not in existing_values:
             existing_values.append(value)
 
@@ -369,10 +491,10 @@
         if not trove:
             return
 
-        for key in sorted(trove.keys()):
+        for key in sorted(trove):
             if len(trove[key]) == 0:
                 if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y':
-                    classifiers[desc[4:] + ' :: ' + key] = 1
+                    classifiers.add(desc[4:] + ' :: ' + key)
                 continue
 
             if ask_yn('Do you want to set items under\n   "%s" (%d sub-items)'
@@ -423,7 +545,7 @@
                 print ("ERROR: Invalid selection, type a number from the list "
                        "above.")
 
-            classifiers[_CLASSIFIERS_LIST[index]] = 1
+            classifiers.add(_CLASSIFIERS_LIST[index])
             return
 
     def set_devel_status(self, classifiers):
@@ -450,14 +572,14 @@
                            'Development Status :: 5 - Production/Stable',
                            'Development Status :: 6 - Mature',
                            'Development Status :: 7 - Inactive'][choice]
-                    classifiers[key] = 1
+                    classifiers.add(key)
                     return
                 except (IndexError, ValueError):
                     print ("ERROR: Invalid selection, type a single digit "
                            "number.")
 
     def _dotted_packages(self, data):
-        packages = sorted(data.keys())
+        packages = sorted(data)
         modified_pkgs = []
         for pkg in packages:
             pkg = pkg.lstrip('./')
@@ -477,32 +599,45 @@
         fp = open(_FILENAME, 'w')
         try:
             fp.write('[metadata]\n')
-            fp.write('name = %s\n' % self.data['name'])
-            fp.write('version = %s\n' % self.data['version'])
-            fp.write('author = %s\n' % self.data['author'])
-            fp.write('author_email = %s\n' % self.data['author_email'])
-            fp.write('summary = %s\n' % self.data['summary'])
-            fp.write('home_page = %s\n' % self.data['home_page'])
+            # simple string entries
+            for name in ('name', 'version', 'summary', 'download_url'):
+                fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN')))
+            # optional string entries
+            if 'keywords' in self.data and self.data['keywords']:
+                fp.write('keywords = %s\n' % ' '.join(self.data['keywords']))
+            for name in ('home_page', 'author', 'author_email',
+                         'maintainer', 'maintainer_email', 'description-file'):
+                if name in self.data and self.data[name]:
+                    fp.write('%s = %s\n' % (name, self.data[name]))
+            if 'description' in self.data:
+                fp.write(
+                    'description = %s\n'
+                    % '\n       |'.join(self.data['description'].split('\n')))
+            # multiple use string entries
+            for name in ('platform', 'supported-platform', 'classifier',
+                         'requires-dist', 'provides-dist', 'obsoletes-dist',
+                         'requires-external'):
+                if not(name in self.data and self.data[name]):
+                    continue
+                fp.write('%s = ' % name)
+                fp.write(''.join('    %s\n' % val
+                                 for val in self.data[name]).lstrip())
+            fp.write('\n[files]\n')
+            for name in ('packages', 'modules', 'scripts',
+                         'package_data', 'extra_files'):
+                if not(name in self.data and self.data[name]):
+                    continue
+                fp.write('%s = %s\n'
+                         % (name, '\n    '.join(self.data[name]).strip()))
+            fp.write('\nresources =\n')
+            for src, dest in self.data['resources']:
+                fp.write('    %s = %s\n' % (src, dest))
             fp.write('\n')
-            if len(self.data['classifier']) > 0:
-                classifiers = '\n'.join(['    %s' % clas for clas in
-                                         self.data['classifier']])
-                fp.write('classifier = %s\n' % classifiers.strip())
-                fp.write('\n')
 
-            fp.write('[files]\n')
-            for element in ('packages', 'modules', 'extra_files'):
-                if len(self.data[element]) == 0:
-                    continue
-                items = '\n'.join(['    %s' % item for item in
-                                  self.data[element]])
-                fp.write('%s = %s\n' % (element, items.strip()))
-
-            fp.write('\n')
         finally:
             fp.close()
 
-        os.chmod(_FILENAME, 0755)
+        os.chmod(_FILENAME, 0644)
         print 'Wrote "%s".' % _FILENAME
 
 
@@ -510,11 +645,12 @@
     """Main entry point."""
     program = MainProgram()
     # uncomment when implemented
-    #program.load_existing_setup_script()
-    program.inspect_directory()
-    program.query_user()
-    program.update_config_file()
+    if not program.load_existing_setup_script():
+        program.inspect_directory()
+        program.query_user()
+        program.update_config_file()
     program.write_setup_script()
+    # distutils2.util.cfg_to_args()
 
 
 if __name__ == '__main__':
diff --git a/distutils2/resources.py b/distutils2/resources.py
new file mode 100644
--- /dev/null
+++ b/distutils2/resources.py
@@ -0,0 +1,25 @@
+import os
+
+from distutils2.util import iglob
+
+
+def _rel_path(base, path):
+    assert path.startswith(base)
+    return path[len(base):].lstrip('/')
+
+
+def resources_dests(resources_root, rules):
+    """find destination of resources files"""
+    destinations = {}
+    for (base, suffix, dest) in rules:
+        prefix = os.path.join(resources_root, base)
+        for abs_base in iglob(prefix):
+            abs_glob = os.path.join(abs_base, suffix)
+            for abs_path in iglob(abs_glob):
+                resource_file = _rel_path(resources_root, abs_path)
+                if dest is None:  # remove the entry if it was here
+                    destinations.pop(resource_file, None)
+                else:
+                    rel_path = _rel_path(abs_base, abs_path)
+                    destinations[resource_file] = os.path.join(dest, rel_path)
+    return destinations
diff --git a/distutils2/run.py b/distutils2/run.py
--- a/distutils2/run.py
+++ b/distutils2/run.py
@@ -1,12 +1,16 @@
 import os
 import sys
 from optparse import OptionParser
+import logging
 
-from distutils2.util import grok_environment_error
+from distutils2 import logger
 from distutils2.errors import (DistutilsSetupError, DistutilsArgError,
                                DistutilsError, CCompilerError)
 from distutils2.dist import Distribution
 from distutils2 import __version__
+from distutils2._backport.pkgutil import get_distributions, get_distribution
+from distutils2.depgraph import generate_graph
+from distutils2.install import install, remove
 
 # This is a barebones help message generated displayed when the user
 # runs the setup script with no arguments at all.  More useful help
@@ -78,10 +82,10 @@
         dist = distclass(attrs)
     except DistutilsSetupError, msg:
         if 'name' in attrs:
-            raise SystemExit, "error in %s setup command: %s" % \
-                  (attrs['name'], msg)
+            raise SystemExit("error in %s setup command: %s" % \
+                  (attrs['name'], msg))
         else:
-            raise SystemExit, "error in setup command: %s" % msg
+            raise SystemExit("error in setup command: %s" % msg)
 
     # Find and parse the config file(s): they will override options from
     # the setup script, but be overridden by the command line.
@@ -93,43 +97,139 @@
     try:
         res = dist.parse_command_line()
     except DistutilsArgError, msg:
-        raise SystemExit, gen_usage(dist.script_name) + "\nerror: %s" % msg
+        raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg)
 
     # And finally, run all the commands found on the command line.
     if res:
         try:
             dist.run_commands()
         except KeyboardInterrupt:
-            raise SystemExit, "interrupted"
-        except (IOError, os.error), exc:
-            error = grok_environment_error(exc)
-            raise SystemExit, error
-
-        except (DistutilsError,
-                CCompilerError), msg:
-            raise SystemExit, "error: " + str(msg)
+            raise SystemExit("interrupted")
+        except (IOError, os.error, DistutilsError, CCompilerError), msg:
+            raise SystemExit("error: " + str(msg))
 
     return dist
 
 
+def _set_logger():
+    logger.setLevel(logging.INFO)
+    sth = logging.StreamHandler(sys.stderr)
+    sth.setLevel(logging.INFO)
+    logger.addHandler(sth)
+    logger.propagate = 0
+
+
 def main():
-    """Main entry point for Distutils2"""
+    """Main entry point for Distutils2
+
+    Execute an action or delegate to the commands system.
+    """
+    _set_logger()
     parser = OptionParser()
     parser.disable_interspersed_args()
+    parser.usage = '%prog [options] cmd1 cmd2 ..'
+
     parser.add_option("-v", "--version",
                   action="store_true", dest="version", default=False,
                   help="Prints out the version of Distutils2 and exits.")
 
+    parser.add_option("-m", "--metadata",
+                  action="append", dest="metadata", default=[],
+                  help="List METADATA metadata or 'all' for all metadatas.")
+
+    parser.add_option("-s", "--search",
+                  action="store", dest="search", default=None,
+                  help="Search for installed distributions.")
+
+    parser.add_option("-g", "--graph",
+                  action="store", dest="graph", default=None,
+                  help="Display the graph for a given installed distribution.")
+
+    parser.add_option("-f", "--full-graph",
+                  action="store_true", dest="fgraph", default=False,
+                  help="Display the full graph for installed distributions.")
+
+    parser.add_option("-i", "--install",
+                  action="store", dest="install",
+                  help="Install a project.")
+
+    parser.add_option("-r", "--remove",
+                  action="store", dest="remove",
+                  help="Remove a project.")
+
     options, args = parser.parse_args()
     if options.version:
         print('Distutils2 %s' % __version__)
-        sys.exit(0)
+        return 0
+
+    if len(options.metadata):
+        from distutils2.dist import Distribution
+        dist = Distribution()
+        dist.parse_config_files()
+        metadata = dist.metadata
+
+        if 'all' in options.metadata:
+            keys = metadata.keys()
+        else:
+            keys = options.metadata
+            if len(keys) == 1:
+                print metadata[keys[0]]
+                return
+
+        for key in keys:
+            if key in metadata:
+                print(metadata._convert_name(key) + ':')
+                value = metadata[key]
+                if isinstance(value, list):
+                    for v in value:
+                        print('    ' + v)
+                else:
+                    print('    ' + value.replace('\n', '\n    '))
+        return 0
+
+    if options.search is not None:
+        search = options.search.lower()
+        for dist in get_distributions(use_egg_info=True):
+            name = dist.name.lower()
+            if search in name:
+                print('%s %s at %s' % (dist.name, dist.metadata['version'],
+                                     dist.path))
+
+        return 0
+
+    if options.graph is not None:
+        name = options.graph
+        dist = get_distribution(name, use_egg_info=True)
+        if dist is None:
+            print('Distribution not found.')
+        else:
+            dists = get_distributions(use_egg_info=True)
+            graph = generate_graph(dists)
+            print(graph.repr_node(dist))
+
+        return 0
+
+    if options.fgraph:
+        dists = get_distributions(use_egg_info=True)
+        graph = generate_graph(dists)
+        print(graph)
+        return 0
+
+    if options.install is not None:
+        install(options.install)
+        return 0
+
+    if options.remove is not None:
+        remove(options.remove)
+        return 0
 
     if len(args) == 0:
         parser.print_help()
+        return 0
 
     commands_main()
-    sys.exit(0)
+    return 0
+
 
 if __name__ == '__main__':
-    main()
+    sys.exit(main())
diff --git a/distutils2/tests/pypi_server.py b/distutils2/tests/pypi_server.py
--- a/distutils2/tests/pypi_server.py
+++ b/distutils2/tests/pypi_server.py
@@ -375,6 +375,7 @@
 
     def __init__(self, dists=[]):
         self._dists = dists
+        self._search_result = []
 
     def add_distributions(self, dists):
         for dist in dists:
@@ -400,7 +401,7 @@
                 self._dists.append(dist)
         return [r.search_result() for r in results]
 
-    def list_package(self):
+    def list_packages(self):
         return [d.name for d in self._dists]
 
     def package_releases(self, package_name, show_hidden=False):
diff --git a/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/foobar-0.1.tar.gz b/distutils2/tests/pypiserver/downloads_with_md5/simple/foobar/foobar-0.1.tar.gz
index 0000000000000000000000000000000000000000..333961eb18a6e7db80fefd41c339ab218d5180c4
GIT binary patch
literal 110
zc$|~(=3uy!>FUeC{PvtR-ysJc)&sVu<PP%fK3N&$;N1M<j{R(=POen}XGQA#H*w#;
z=4~0pQ=DD>?9yZ7`(A1Di)P(6s!I71JWZ;--fWND`LA)=lAmk-7Jbj=XMlnFEsQ#U
Kd|Vkc7#IK&xGYxy

diff --git a/distutils2/tests/support.py b/distutils2/tests/support.py
--- a/distutils2/tests/support.py
+++ b/distutils2/tests/support.py
@@ -17,10 +17,11 @@
             super(SomeTestCase, self).setUp()
             ... # other setup code
 
-Read each class' docstring to see its purpose and usage.
+Also provided is a DummyCommand class, useful to mock commands in the
+tests of another command that needs them, a create_distribution function
+and a skip_unless_symlink decorator.
 
-Also provided is a DummyCommand class, useful to mock commands in the
-tests of another command that needs them (see docstring).
+Each class or function has a docstring to explain its purpose and usage.
 """
 
 import os
@@ -35,7 +36,8 @@
 from distutils2.tests import unittest
 
 __all__ = ['LoggingCatcher', 'WarningsCatcher', 'TempdirManager',
-           'EnvironGuard', 'DummyCommand', 'unittest']
+           'EnvironGuard', 'DummyCommand', 'unittest', 'create_distribution',
+           'skip_unless_symlink']
 
 
 class LoggingCatcher(object):
@@ -49,6 +51,9 @@
 
     def setUp(self):
         super(LoggingCatcher, self).setUp()
+        # TODO read the new logging docs and/or the python-dev posts about
+        # logging and tests to properly use a handler instead of
+        # monkey-patching
         self.old_log = logger._log
         logger._log = self._log
         logger.setLevel(logging.INFO)
@@ -135,7 +140,7 @@
         finally:
             f.close()
 
-    def create_dist(self, pkg_name='foo', **kw):
+    def create_dist(self, **kw):
         """Create a stub distribution object and files.
 
         This function creates a Distribution instance (use keyword arguments
@@ -143,18 +148,35 @@
         (currently an empty directory).
 
         It returns the path to the directory and the Distribution instance.
-        You can use TempdirManager.write_file to write any file in that
+        You can use self.write_file to write any file in that
         directory, e.g. setup scripts or Python modules.
         """
         # Late import so that third parties can import support without
         # loading a ton of distutils2 modules in memory.
         from distutils2.dist import Distribution
+        if 'name' not in kw:
+            kw['name'] = 'foo'
         tmp_dir = self.mkdtemp()
-        pkg_dir = os.path.join(tmp_dir, pkg_name)
-        os.mkdir(pkg_dir)
+        project_dir = os.path.join(tmp_dir, kw['name'])
+        os.mkdir(project_dir)
         dist = Distribution(attrs=kw)
-        return pkg_dir, dist
+        return project_dir, dist
 
+    def assertIsFile(self, *args):
+        path = os.path.join(*args)
+        dirname = os.path.dirname(path)
+        file = os.path.basename(path)
+        if os.path.isdir(dirname):
+            files = os.listdir(dirname)
+            msg = "%s not found in %s: %s" % (file, dirname, files)
+            assert os.path.isfile(path), msg
+        else:
+            raise AssertionError(
+                    '%s not found. %s does not exist' % (file, dirname))
+
+    def assertIsNotFile(self, *args):
+        path = os.path.join(*args)
+        assert not os.path.isfile(path), "%s exist" % path
 
 class EnvironGuard(object):
     """TestCase-compatible mixin to save and restore the environment."""
@@ -211,3 +233,9 @@
     d.parse_command_line()
     return d
 
+
+try:
+    from test.test_support import skip_unless_symlink
+except ImportError:
+    skip_unless_symlink = unittest.skip(
+        'requires test.test_support.skip_unless_symlink')
diff --git a/distutils2/tests/test_command_bdist.py b/distutils2/tests/test_command_bdist.py
--- a/distutils2/tests/test_command_bdist.py
+++ b/distutils2/tests/test_command_bdist.py
@@ -1,13 +1,31 @@
 """Tests for distutils.command.bdist."""
 
+from distutils2 import util
 from distutils2.tests import run_unittest
 
-from distutils2.command.bdist import bdist
-from distutils2.tests import unittest, support
+from distutils2.command.bdist import bdist, show_formats
+from distutils2.tests import unittest, support, captured_stdout
+
 
 class BuildTestCase(support.TempdirManager,
                     unittest.TestCase):
 
+    def _mock_get_platform(self):
+        self._get_platform_called = True
+        return self._get_platform()
+
+    def setUp(self):
+        super(BuildTestCase, self).setUp()
+
+        # mock util.get_platform
+        self._get_platform_called = False
+        self._get_platform = util.get_platform
+        util.get_platform = self._mock_get_platform
+
+    def tearDown(self):
+        super(BuildTestCase, self).tearDown()
+        util.get_platform = self._get_platform
+
     def test_formats(self):
 
         # let's create a command and make sure
@@ -22,12 +40,36 @@
         # XXX an explicit list in bdist is
         # not the best way to  bdist_* commands
         # we should add a registry
-        formats = ['zip', 'gztar', 'bztar', 'ztar', 'tar', 'wininst', 'msi']
-        formats.sort()
-        found = cmd.format_command.keys()
-        found.sort()
+        formats = sorted(('zip', 'gztar', 'bztar', 'ztar',
+                          'tar', 'wininst', 'msi'))
+        found = sorted(cmd.format_command)
         self.assertEqual(found, formats)
 
+    def test_skip_build(self):
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.skip_build = False
+        cmd.formats = ['ztar']
+        cmd.ensure_finalized()
+        self.assertFalse(self._get_platform_called)
+
+        pkg_pth, dist = self.create_dist()
+        cmd = bdist(dist)
+        cmd.skip_build = True
+        cmd.formats = ['ztar']
+        cmd.ensure_finalized()
+        self.assertTrue(self._get_platform_called)
+
+    def test_show_formats(self):
+        __, stdout = captured_stdout(show_formats)
+
+        # the output should be a header line + one line per format
+        num_formats = len(bdist.format_commands)
+        output = [line for line in stdout.split('\n')
+                  if line.strip().startswith('--formats=')]
+        self.assertEqual(len(output), num_formats)
+
+
 def test_suite():
     return unittest.makeSuite(BuildTestCase)
 
diff --git a/distutils2/tests/test_command_build_ext.py b/distutils2/tests/test_command_build_ext.py
--- a/distutils2/tests/test_command_build_ext.py
+++ b/distutils2/tests/test_command_build_ext.py
@@ -289,7 +289,7 @@
 
         # inplace = 0, cmd.package = 'bar'
         build_py = cmd.get_finalized_command('build_py')
-        build_py.package_dir = {'': 'bar'}
+        build_py.package_dir = 'bar'
         path = cmd.get_ext_fullpath('foo')
         # checking that the last directory is the build_dir
         path = os.path.split(path)[0]
@@ -318,7 +318,7 @@
         dist = Distribution()
         cmd = build_ext(dist)
         cmd.inplace = 1
-        cmd.distribution.package_dir = {'': 'src'}
+        cmd.distribution.package_dir = 'src'
         cmd.distribution.packages = ['lxml', 'lxml.html']
         curdir = os.getcwd()
         wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext)
@@ -334,7 +334,7 @@
 
         # building twisted.runner.portmap not inplace
         build_py = cmd.get_finalized_command('build_py')
-        build_py.package_dir = {}
+        build_py.package_dir = None
         cmd.distribution.packages = ['twisted', 'twisted.runner.portmap']
         path = cmd.get_ext_fullpath('twisted.runner.portmap')
         wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner',
diff --git a/distutils2/tests/test_command_build_py.py b/distutils2/tests/test_command_build_py.py
--- a/distutils2/tests/test_command_build_py.py
+++ b/distutils2/tests/test_command_build_py.py
@@ -17,12 +17,14 @@
 
     def test_package_data(self):
         sources = self.mkdtemp()
-        f = open(os.path.join(sources, "__init__.py"), "w")
+        pkg_dir = os.path.join(sources, 'pkg')
+        os.mkdir(pkg_dir)
+        f = open(os.path.join(pkg_dir, "__init__.py"), "w")
         try:
             f.write("# Pretend this is a package.")
         finally:
             f.close()
-        f = open(os.path.join(sources, "README.txt"), "w")
+        f = open(os.path.join(pkg_dir, "README.txt"), "w")
         try:
             f.write("Info about this package")
         finally:
@@ -31,8 +33,9 @@
         destination = self.mkdtemp()
 
         dist = Distribution({"packages": ["pkg"],
-                             "package_dir": {"pkg": sources}})
+                             "package_dir": sources})
         # script_name need not exist, it just need to be initialized
+
         dist.script_name = os.path.join(sources, "setup.py")
         dist.command_obj["build"] = support.DummyCommand(
             force=0,
@@ -42,7 +45,7 @@
             use_2to3=False)
         dist.packages = ["pkg"]
         dist.package_data = {"pkg": ["README.txt"]}
-        dist.package_dir = {"pkg": sources}
+        dist.package_dir = sources
 
         cmd = build_py(dist)
         cmd.compile = 1
@@ -68,19 +71,20 @@
 
         # create the distribution files.
         sources = self.mkdtemp()
-        open(os.path.join(sources, "__init__.py"), "w").close()
-
-        testdir = os.path.join(sources, "doc")
+        pkg = os.path.join(sources, 'pkg')
+        os.mkdir(pkg)
+        open(os.path.join(pkg, "__init__.py"), "w").close()
+        testdir = os.path.join(pkg, "doc")
         os.mkdir(testdir)
         open(os.path.join(testdir, "testfile"), "w").close()
 
         os.chdir(sources)
         old_stdout = sys.stdout
-        sys.stdout = StringIO.StringIO()
+        #sys.stdout = StringIO.StringIO()
 
         try:
             dist = Distribution({"packages": ["pkg"],
-                                 "package_dir": {"pkg": ""},
+                                 "package_dir": sources,
                                  "package_data": {"pkg": ["doc/*"]}})
             # script_name need not exist, it just need to be initialized
             dist.script_name = os.path.join(sources, "setup.py")
@@ -89,7 +93,7 @@
 
             try:
                 dist.run_commands()
-            except DistutilsFileError:
+            except DistutilsFileError, e:
                 self.fail("failed package_data test when package_dir is ''")
         finally:
             # Restore state.
@@ -112,7 +116,7 @@
         finally:
             sys.dont_write_bytecode = old_dont_write_bytecode
 
-        self.assertTrue('byte-compiling is disabled' in self.logs[0][1])
+        self.assertIn('byte-compiling is disabled', self.logs[0][2][1])
 
 def test_suite():
     return unittest.makeSuite(BuildPyTestCase)
diff --git a/distutils2/tests/test_command_check.py b/distutils2/tests/test_command_check.py
--- a/distutils2/tests/test_command_check.py
+++ b/distutils2/tests/test_command_check.py
@@ -4,6 +4,8 @@
 from distutils2.metadata import _HAS_DOCUTILS
 from distutils2.tests import unittest, support
 from distutils2.errors import DistutilsSetupError
+from distutils2.errors import MetadataMissingError
+
 
 class CheckTestCase(support.LoggingCatcher,
                     support.TempdirManager,
@@ -11,7 +13,7 @@
 
     def _run(self, metadata=None, **options):
         if metadata is None:
-            metadata = {}
+            metadata = {'name': 'xxx', 'version': 'xxx'}
         pkg_info, dist = self.create_dist(**metadata)
         cmd = check(dist)
         cmd.initialize_options()
@@ -33,19 +35,60 @@
         # any warning anymore
         metadata = {'home_page': 'xxx', 'author': 'xxx',
                     'author_email': 'xxx',
-                    'name': 'xxx', 'version': 'xxx'
+                    'name': 'xxx', 'version': 'xxx',
                     }
         cmd = self._run(metadata)
         self.assertEqual(len(cmd._warnings), 0)
 
         # now with the strict mode, we should
         # get an error if there are missing metadata
-        self.assertRaises(DistutilsSetupError, self._run, {}, **{'strict': 1})
+        self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+        self.assertRaises(DistutilsSetupError, self._run,
+            {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
 
         # and of course, no error when all metadata fields are present
         cmd = self._run(metadata, strict=1)
         self.assertEqual(len(cmd._warnings), 0)
 
+    def test_check_metadata_1_2(self):
+        # let's run the command with no metadata at all
+        # by default, check is checking the metadata
+        # should have some warnings
+        cmd = self._run()
+        self.assertTrue(len(cmd._warnings) > 0)
+
+        # now let's add the required fields
+        # and run it again, to make sure we don't get
+        # any warning anymore
+        # let's use requires_python as a marker to enforce
+        # Metadata-Version 1.2
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'xxx',
+                    'name': 'xxx', 'version': 'xxx',
+                    'requires_python': '2.4',
+                    }
+        cmd = self._run(metadata)
+        self.assertEqual(len(cmd._warnings), 1)
+
+        # now with the strict mode, we should
+        # get an error if there are missing metadata
+        self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1})
+        self.assertRaises(DistutilsSetupError, self._run,
+            {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1})
+
+        # complain about version format
+        self.assertRaises(DistutilsSetupError, self._run, metadata,
+            **{'strict': 1})
+
+        # now with correct version format
+        metadata = {'home_page': 'xxx', 'author': 'xxx',
+                    'author_email': 'xxx',
+                    'name': 'xxx', 'version': '1.2',
+                    'requires_python': '2.4',
+                    }
+        cmd = self._run(metadata, strict=1)
+        self.assertEqual(len(cmd._warnings), 0)
+
     @unittest.skipUnless(_HAS_DOCUTILS, "requires docutils")
     def test_check_restructuredtext(self):
         # let's see if it detects broken rest in long_description
@@ -62,8 +105,10 @@
 
     def test_check_all(self):
 
-        metadata = {'home_page': 'xxx', 'author': 'xxx'}
         self.assertRaises(DistutilsSetupError, self._run,
+                          {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1,
+                                 'all': 1})
+        self.assertRaises(MetadataMissingError, self._run,
                           {}, **{'strict': 1,
                                  'all': 1})
 
@@ -75,7 +120,7 @@
         cmd = check(dist)
         cmd.check_hooks_resolvable()
         self.assertEqual(len(cmd._warnings), 1)
-        
+
 
 def test_suite():
     return unittest.makeSuite(CheckTestCase)
diff --git a/distutils2/tests/test_command_cmd.py b/distutils2/tests/test_command_cmd.py
--- a/distutils2/tests/test_command_cmd.py
+++ b/distutils2/tests/test_command_cmd.py
@@ -17,22 +17,6 @@
         dist = Distribution()
         self.cmd = MyCmd(dist)
 
-    def test_ensure_string_list(self):
-
-        cmd = self.cmd
-        cmd.not_string_list = ['one', 2, 'three']
-        cmd.yes_string_list = ['one', 'two', 'three']
-        cmd.not_string_list2 = object()
-        cmd.yes_string_list2 = 'ok'
-        cmd.ensure_string_list('yes_string_list')
-        cmd.ensure_string_list('yes_string_list2')
-
-        self.assertRaises(DistutilsOptionError,
-                          cmd.ensure_string_list, 'not_string_list')
-
-        self.assertRaises(DistutilsOptionError,
-                          cmd.ensure_string_list, 'not_string_list2')
-
     def test_make_file(self):
 
         cmd = self.cmd
@@ -82,12 +66,20 @@
         cmd.ensure_string_list('option1')
         self.assertEqual(cmd.option1, ['ok', 'dok'])
 
-        cmd.option2 = ['xxx', 'www']
-        cmd.ensure_string_list('option2')
+        cmd.yes_string_list = ['one', 'two', 'three']
+        cmd.yes_string_list2 = 'ok'
+        cmd.ensure_string_list('yes_string_list')
+        cmd.ensure_string_list('yes_string_list2')
+        self.assertEqual(cmd.yes_string_list, ['one', 'two', 'three'])
+        self.assertEqual(cmd.yes_string_list2, ['ok'])
 
-        cmd.option3 = ['ok', 2]
-        self.assertRaises(DistutilsOptionError, cmd.ensure_string_list,
-                          'option3')
+        cmd.not_string_list = ['one', 2, 'three']
+        cmd.not_string_list2 = object()
+        self.assertRaises(DistutilsOptionError,
+                          cmd.ensure_string_list, 'not_string_list')
+
+        self.assertRaises(DistutilsOptionError,
+                          cmd.ensure_string_list, 'not_string_list2')
 
     def test_ensure_filename(self):
         cmd = self.cmd
diff --git a/distutils2/tests/test_command_install_data.py b/distutils2/tests/test_command_install_data.py
--- a/distutils2/tests/test_command_install_data.py
+++ b/distutils2/tests/test_command_install_data.py
@@ -1,7 +1,6 @@
 """Tests for distutils.command.install_data."""
-import sys
+import cmd
 import os
-import getpass
 
 from distutils2.command.install_data import install_data
 from distutils2.tests import unittest, support
@@ -12,21 +11,29 @@
                           unittest.TestCase):
 
     def test_simple_run(self):
+        from distutils2._backport.sysconfig import _SCHEMES as sysconfig_SCHEMES
+        from distutils2._backport.sysconfig import _get_default_scheme
+            #dirty but hit marmoute
+
+        old_scheme = sysconfig_SCHEMES
+
         pkg_dir, dist = self.create_dist()
         cmd = install_data(dist)
         cmd.install_dir = inst = os.path.join(pkg_dir, 'inst')
 
-        # data_files can contain
-        #  - simple files
-        #  - a tuple with a path, and a list of file
+        sysconfig_SCHEMES.set(_get_default_scheme(), 'inst',
+            os.path.join(pkg_dir, 'inst'))
+        sysconfig_SCHEMES.set(_get_default_scheme(), 'inst2',
+            os.path.join(pkg_dir, 'inst2'))
+
         one = os.path.join(pkg_dir, 'one')
         self.write_file(one, 'xxx')
         inst2 = os.path.join(pkg_dir, 'inst2')
         two = os.path.join(pkg_dir, 'two')
         self.write_file(two, 'xxx')
 
-        cmd.data_files = [one, (inst2, [two])]
-        self.assertEqual(cmd.get_inputs(), [one, (inst2, [two])])
+        cmd.data_files = {one : '{inst}/one', two : '{inst2}/two'}
+        self.assertItemsEqual(cmd.get_inputs(), [one, two])
 
         # let's run the command
         cmd.ensure_finalized()
@@ -53,21 +60,25 @@
 
         # now using root and empty dir
         cmd.root = os.path.join(pkg_dir, 'root')
-        inst3 = os.path.join(cmd.install_dir, 'inst3')
         inst4 = os.path.join(pkg_dir, 'inst4')
         three = os.path.join(cmd.install_dir, 'three')
         self.write_file(three, 'xx')
-        cmd.data_files = [one, (inst2, [two]),
-                          ('inst3', [three]),
-                          (inst4, [])]
+
+        sysconfig_SCHEMES.set(_get_default_scheme(), 'inst3', cmd.install_dir)
+
+        cmd.data_files = {one : '{inst}/one',
+                          two : '{inst2}/two',
+                          three : '{inst3}/three'}
         cmd.ensure_finalized()
         cmd.run()
 
         # let's check the result
-        self.assertEqual(len(cmd.get_outputs()), 4)
+        self.assertEqual(len(cmd.get_outputs()), 3)
         self.assertTrue(os.path.exists(os.path.join(inst2, rtwo)))
         self.assertTrue(os.path.exists(os.path.join(inst, rone)))
 
+        sysconfig_SCHEMES = old_scheme
+
 def test_suite():
     return unittest.makeSuite(InstallDataTestCase)
 
diff --git a/distutils2/tests/test_command_install_dist.py b/distutils2/tests/test_command_install_dist.py
--- a/distutils2/tests/test_command_install_dist.py
+++ b/distutils2/tests/test_command_install_dist.py
@@ -180,8 +180,8 @@
             cmd.user = 'user'
             self.assertRaises(DistutilsOptionError, cmd.finalize_options)
 
-    def test_record(self):
-
+    def test_old_record(self):
+        # test pre-PEP 376 --record option (outside dist-info dir)
         install_dir = self.mkdtemp()
         pkgdir, dist = self.create_dist()
 
@@ -189,11 +189,11 @@
         cmd = install_dist(dist)
         dist.command_obj['install_dist'] = cmd
         cmd.root = install_dir
-        cmd.record = os.path.join(pkgdir, 'RECORD')
+        cmd.record = os.path.join(pkgdir, 'filelist')
         cmd.ensure_finalized()
         cmd.run()
 
-        # let's check the RECORD file was created with four
+        # let's check the record file was created with four
         # lines, one for each .dist-info entry: METADATA,
         # INSTALLER, REQUSTED, RECORD
         f = open(cmd.record)
diff --git a/distutils2/tests/test_command_install_distinfo.py b/distutils2/tests/test_command_install_distinfo.py
--- a/distutils2/tests/test_command_install_distinfo.py
+++ b/distutils2/tests/test_command_install_distinfo.py
@@ -1,12 +1,11 @@
 """Tests for ``distutils2.command.install_distinfo``. """
 
 import os
-import sys
 import csv
 
 from distutils2.command.install_distinfo import install_distinfo
 from distutils2.command.cmd import Command
-from distutils2.metadata import DistributionMetadata
+from distutils2.metadata import Metadata
 from distutils2.tests import unittest, support
 
 try:
@@ -65,7 +64,7 @@
         self.assertEqual(open(os.path.join(dist_info, 'REQUESTED')).read(),
                          '')
         meta_path = os.path.join(dist_info, 'METADATA')
-        self.assertTrue(DistributionMetadata(path=meta_path).check())
+        self.assertTrue(Metadata(path=meta_path).check())
 
     def test_installer(self):
         pkg_dir, dist = self.create_dist(name='foo',
diff --git a/distutils2/tests/test_command_install_headers.py b/distutils2/tests/test_command_install_headers.py
--- a/distutils2/tests/test_command_install_headers.py
+++ b/distutils2/tests/test_command_install_headers.py
@@ -1,7 +1,5 @@
 """Tests for distutils.command.install_headers."""
-import sys
 import os
-import getpass
 
 from distutils2.command.install_headers import install_headers
 from distutils2.tests import unittest, support
diff --git a/distutils2/tests/test_command_install_lib.py b/distutils2/tests/test_command_install_lib.py
--- a/distutils2/tests/test_command_install_lib.py
+++ b/distutils2/tests/test_command_install_lib.py
@@ -97,7 +97,7 @@
         finally:
             sys.dont_write_bytecode = old_dont_write_bytecode
 
-        self.assertTrue('byte-compiling is disabled' in self.logs[0][1])
+        self.assertIn('byte-compiling is disabled', self.logs[0][2][1])
 
 def test_suite():
     return unittest.makeSuite(InstallLibTestCase)
diff --git a/distutils2/tests/test_command_register.py b/distutils2/tests/test_command_register.py
--- a/distutils2/tests/test_command_register.py
+++ b/distutils2/tests/test_command_register.py
@@ -1,6 +1,5 @@
 # -*- encoding: utf-8 -*-
 """Tests for distutils.command.register."""
-import sys
 import os
 import getpass
 import urllib2
@@ -87,6 +86,8 @@
     def tearDown(self):
         getpass.getpass = self._old_getpass
         urllib2.build_opener = self.old_opener
+        if hasattr(register_module, 'raw_input'):
+            del register_module.raw_input
         super(RegisterTestCase, self).tearDown()
 
     def _get_cmd(self, metadata=None):
@@ -109,7 +110,6 @@
 
         # patching raw_input and getpass.getpass
         # so register gets happy
-        #
         # Here's what we are faking :
         # use your existing login (choice 1.)
         # Username : 'tarek'
@@ -117,11 +117,7 @@
         # Save your login (y/N)? : 'y'
         inputs = RawInputs('1', 'tarek', 'y')
         register_module.raw_input = inputs.__call__
-        # let's run the command
-        try:
-            cmd.run()
-        finally:
-            del register_module.raw_input
+        cmd.run()
 
         # we should have a brand new .pypirc file
         self.assertTrue(os.path.exists(self.rc))
@@ -135,8 +131,8 @@
         # if we run the command again
         def _no_way(prompt=''):
             raise AssertionError(prompt)
+
         register_module.raw_input = _no_way
-
         cmd.show_response = 1
         cmd.run()
 
@@ -165,13 +161,10 @@
         cmd = self._get_cmd()
         inputs = RawInputs('2', 'tarek', 'tarek at ziade.org')
         register_module.raw_input = inputs.__call__
-        try:
-            # let's run the command
-            # FIXME does this send a real request? use a mock server
-            # also, silence self.announce (with LoggingCatcher)
-            cmd.run()
-        finally:
-            del register_module.raw_input
+        # let's run the command
+        # FIXME does this send a real request? use a mock server
+        # also, silence self.announce (with LoggingCatcher)
+        cmd.run()
 
         # we should have send a request
         self.assertTrue(self.conn.reqs, 1)
@@ -185,11 +178,7 @@
         cmd = self._get_cmd()
         inputs = RawInputs('3', 'tarek at ziade.org')
         register_module.raw_input = inputs.__call__
-        try:
-            # let's run the command
-            cmd.run()
-        finally:
-            del register_module.raw_input
+        cmd.run()
 
         # we should have send a request
         self.assertTrue(self.conn.reqs, 1)
@@ -206,9 +195,11 @@
         # long_description is not reSt compliant
 
         # empty metadata
-        cmd = self._get_cmd({})
+        cmd = self._get_cmd({'name': 'xxx', 'version': 'xxx'})
         cmd.ensure_finalized()
         cmd.strict = 1
+        inputs = RawInputs('1', 'tarek', 'y')
+        register_module.raw_input = inputs.__call__
         self.assertRaises(DistutilsSetupError, cmd.run)
 
         # metadata is OK but long_description is broken
@@ -230,22 +221,14 @@
         cmd.strict = 1
         inputs = RawInputs('1', 'tarek', 'y')
         register_module.raw_input = inputs.__call__
-        # let's run the command
-        try:
-            cmd.run()
-        finally:
-            del register_module.raw_input
+        cmd.run()
 
         # strict is not by default
         cmd = self._get_cmd()
         cmd.ensure_finalized()
         inputs = RawInputs('1', 'tarek', 'y')
         register_module.raw_input = inputs.__call__
-        # let's run the command
-        try:
-            cmd.run()
-        finally:
-            del register_module.raw_input
+        cmd.run()
 
     def test_register_pep345(self):
         cmd = self._get_cmd({})
diff --git a/distutils2/tests/test_command_sdist.py b/distutils2/tests/test_command_sdist.py
--- a/distutils2/tests/test_command_sdist.py
+++ b/distutils2/tests/test_command_sdist.py
@@ -45,7 +45,6 @@
 
 MANIFEST = """\
 # file GENERATED by distutils, do NOT edit
-README
 inroot.txt
 data%(sep)sdata.dt
 scripts%(sep)sscript.py
@@ -96,9 +95,6 @@
         dist.include_package_data = True
         cmd = sdist(dist)
         cmd.dist_dir = 'dist'
-        def _warn(*args):
-            pass
-        cmd.warn = _warn
         return dist, cmd
 
     @unittest.skipUnless(zlib, "requires zlib")
@@ -141,7 +137,7 @@
             zip_file.close()
 
         # making sure everything has been pruned correctly
-        self.assertEqual(len(content), 3)
+        self.assertEqual(len(content), 2)
 
     @unittest.skipUnless(zlib, "requires zlib")
     def test_make_distribution(self):
@@ -206,11 +202,10 @@
         self.write_file((some_dir, 'file.txt'), '#')
         self.write_file((some_dir, 'other_file.txt'), '#')
 
-        dist.data_files = [('data', ['data/data.dt',
-                                     'inroot.txt',
-                                     'notexisting']),
-                           'some/file.txt',
-                           'some/other_file.txt']
+        dist.data_files = {'data/data.dt' : '{appdata}/data.dt',
+                           'inroot.txt' : '{appdata}/inroot.txt',
+                           'some/file.txt' : '{appdata}/file.txt',
+                           'some/other_file.txt' : '{appdata}/other_file.txt'}
 
         # adding a script
         script_dir = join(self.tmp_dir, 'scripts')
@@ -236,7 +231,7 @@
             zip_file.close()
 
         # making sure everything was added
-        self.assertEqual(len(content), 10)
+        self.assertEqual(len(content), 9)
 
         # checking the MANIFEST
         manifest = open(join(self.tmp_dir, 'MANIFEST')).read()
@@ -245,14 +240,14 @@
     @unittest.skipUnless(zlib, "requires zlib")
     def test_metadata_check_option(self):
         # testing the `check-metadata` option
-        dist, cmd = self.get_cmd(metadata={})
+        dist, cmd = self.get_cmd(metadata={'name':'xxx', 'version':'xxx'})
 
         # this should raise some warnings !
         # with the `check` subcommand
         cmd.ensure_finalized()
         cmd.run()
         warnings = self.get_logs(logging.WARN)
-        self.assertEqual(len(warnings), 1)
+        self.assertEqual(len(warnings), 2)
 
         # trying with a complete set of metadata
         self.clear_logs()
@@ -264,7 +259,8 @@
         # removing manifest generated warnings
         warnings = [warn for warn in warnings if
                     not warn.endswith('-- skipping')]
-        self.assertEqual(len(warnings), 0)
+        # the remaining warning is about the use of the default file list
+        self.assertEqual(len(warnings), 1)
 
 
     def test_show_formats(self):
@@ -362,8 +358,7 @@
                         if line.strip() != '']
         finally:
             f.close()
-
-        self.assertEquals(len(manifest), 4)
+        self.assertEqual(len(manifest), 3)
 
         # adding a file
         self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#')
@@ -383,7 +378,7 @@
             f.close()
 
         # do we have the new file in MANIFEST ?
-        self.assertEquals(len(manifest2), 5)
+        self.assertEqual(len(manifest2), 4)
         self.assertIn('doc2.txt', manifest2[-1])
 
     def test_manifest_marker(self):
diff --git a/distutils2/tests/test_command_test.py b/distutils2/tests/test_command_test.py
--- a/distutils2/tests/test_command_test.py
+++ b/distutils2/tests/test_command_test.py
@@ -17,10 +17,6 @@
 from distutils2.dist import Distribution
 from distutils2._backport import pkgutil
 
-try:
-    any
-except NameError:
-    from distutils2._backport import any
 
 EXPECTED_OUTPUT_RE = r'''FAIL: test_blah \(myowntestmodule.SomeTest\)
 ----------------------------------------------------------------------
diff --git a/distutils2/tests/test_command_upload.py b/distutils2/tests/test_command_upload.py
--- a/distutils2/tests/test_command_upload.py
+++ b/distutils2/tests/test_command_upload.py
@@ -5,6 +5,7 @@
 
 from distutils2.command.upload import upload
 from distutils2.dist import Distribution
+from distutils2.errors import DistutilsOptionError
 
 from distutils2.tests import unittest, support
 from distutils2.tests.pypi_server import PyPIServer, PyPIServerTestCase
@@ -59,6 +60,14 @@
                                ('repository', 'http://pypi.python.org/pypi')):
             self.assertEqual(getattr(cmd, attr), expected)
 
+    def test_finalize_options_unsigned_identity_yields_exception(self):
+        self.write_file(self.rc, PYPIRC)
+        dist = Distribution()
+        cmd = upload(dist)
+        cmd.identity = True
+        cmd.sign = False
+        self.assertRaises(DistutilsOptionError, cmd.finalize_options) 
+
     def test_saved_password(self):
         # file with no password
         self.write_file(self.rc, PYPIRC_NOPASSWORD)
@@ -76,6 +85,11 @@
         cmd.finalize_options()
         self.assertEqual(cmd.password, 'xxx')
 
+    def test_upload_without_files_yields_exception(self):
+        dist = Distribution()
+        cmd = upload(dist)
+        self.assertRaises(DistutilsOptionError, cmd.run)
+
     def test_upload(self):
         path = os.path.join(self.tmp_dir, 'xxx')
         self.write_file(path)
diff --git a/distutils2/tests/test_config.py b/distutils2/tests/test_config.py
--- a/distutils2/tests/test_config.py
+++ b/distutils2/tests/test_config.py
@@ -1,10 +1,12 @@
-# -*- encoding: utf8 -*-
+# -*- encoding: utf-8 -*-
 """Tests for distutils.config."""
 import os
 import sys
 from StringIO import StringIO
 
 from distutils2.tests import unittest, support, run_unittest
+from distutils2.command.sdist import sdist
+from distutils2.errors import DistutilsFileError
 
 
 SETUP_CFG = """
@@ -16,7 +18,7 @@
 maintainer = Éric Araujo
 maintainer_email = merwok at netwok.org
 summary = A sample project demonstrating distutils2 packaging
-description-file = README
+description-file = %(description-file)s
 keywords = distutils2, packaging, sample project
 
 classifier =
@@ -47,9 +49,11 @@
   Fork in progress, http://bitbucket.org/Merwok/sample-distutils2-project
 
 [files]
+packages_root = src
+
 packages = one
-           src:two
-           src2:three
+           two
+           three
 
 modules = haven
 
@@ -61,10 +65,7 @@
 package_data =
   cheese = data/templates/*
 
-data_files =
-  bitmaps = bm/b1.gif, bm/b2.gif
-  config = cfg/data.cfg
-  /etc/init.d = init-script
+extra_files = %(extra-files)s
 
 # Replaces MANIFEST.in
 sdist_extra =
@@ -72,6 +73,11 @@
   recursive-include examples *.txt *.py
   prune examples/sample?/build
 
+resources=
+  bm/ {b1,b2}.gif = {icon}
+  Cf*/ *.CFG = {config}/baBar/
+  init_script = {script}/JunGle/
+
 [global]
 commands =
     distutils2.tests.test_config.FooBarBazTest
@@ -87,6 +93,34 @@
 sub_commands = foo
 """
 
+# Can not be merged with SETUP_CFG else install_dist
+# command will fail when trying to compile C sources
+EXT_SETUP_CFG = """
+[files]
+packages = one
+           two
+
+[extension=speed_coconuts]
+name = one.speed_coconuts
+sources = c_src/speed_coconuts.c
+extra_link_args = "`gcc -print-file-name=libgcc.a`" -shared
+define_macros = HAVE_CAIRO HAVE_GTK2
+libraries = gecodeint gecodekernel -- sys.platform != 'win32'
+    GecodeInt GecodeKernel -- sys.platform == 'win32'
+
+[extension=fast_taunt]
+name = three.fast_taunt
+sources = cxx_src/utils_taunt.cxx
+          cxx_src/python_module.cxx
+include_dirs = /usr/include/gecode
+    /usr/include/blitz
+extra_compile_args = -fPIC -O2
+    -DGECODE_VERSION=$(./gecode_version) -- sys.platform != 'win32'
+    /DGECODE_VERSION='win32' -- sys.platform == 'win32'
+language = cxx
+
+"""
+
 
 class DCompiler(object):
     name = 'd'
@@ -128,23 +162,46 @@
         super(ConfigTestCase, self).setUp()
         self.addCleanup(setattr, sys, 'stdout', sys.stdout)
         self.addCleanup(setattr, sys, 'stderr', sys.stderr)
+        sys.stdout = sys.stderr = StringIO()
+
         self.addCleanup(os.chdir, os.getcwd())
+        tempdir = self.mkdtemp()
+        os.chdir(tempdir)
+        self.tempdir = tempdir
+
+        self.addCleanup(setattr, sys, 'argv', sys.argv)
+
+    def write_setup(self, kwargs=None):
+        opts = {'description-file': 'README', 'extra-files':''}
+        if kwargs:
+            opts.update(kwargs)
+        self.write_file('setup.cfg', SETUP_CFG % opts)
+
+
+    def run_setup(self, *args):
+        # run setup with args
+        sys.stdout = StringIO()
+        sys.argv[:] = [''] + list(args)
+        old_sys = sys.argv[:]
+        try:
+            from distutils2.run import commands_main
+            dist = commands_main()
+        finally:
+            sys.argv[:] = old_sys
+        return dist
 
     def test_config(self):
-        tempdir = self.mkdtemp()
-        os.chdir(tempdir)
-        self.write_file('setup.cfg', SETUP_CFG)
+        self.write_setup()
         self.write_file('README', 'yeah')
+        os.mkdir('bm')
+        self.write_file(os.path.join('bm', 'b1.gif'), '')
+        self.write_file(os.path.join('bm', 'b2.gif'), '')
+        os.mkdir('Cfg')
+        self.write_file(os.path.join('Cfg', 'data.CFG'), '')
+        self.write_file('init_script', '')
 
         # try to load the metadata now
-        sys.stdout = StringIO()
-        sys.argv[:] = ['setup.py', '--version']
-        old_sys = sys.argv[:]
-        try:
-            from distutils2.run import main
-            dist = main()
-        finally:
-            sys.argv[:] = old_sys
+        dist = self.run_setup('--version')
 
         # sanity check
         self.assertEqual(sys.stdout.getvalue(), '0.6.4.dev1' + os.linesep)
@@ -183,15 +240,17 @@
                  'http://bitbucket.org/Merwok/sample-distutils2-project')]
         self.assertEqual(dist.metadata['Project-Url'], urls)
 
-
         self.assertEqual(dist.packages, ['one', 'two', 'three'])
         self.assertEqual(dist.py_modules, ['haven'])
         self.assertEqual(dist.package_data, {'cheese': 'data/templates/*'})
-        self.assertEqual(dist.data_files,
-            [('bitmaps ', ['bm/b1.gif', 'bm/b2.gif']),
-             ('config ', ['cfg/data.cfg']),
-             ('/etc/init.d ', ['init-script'])])
-        self.assertEqual(dist.package_dir['two'], 'src')
+        self.assertEqual(
+            {'bm/b1.gif' : '{icon}/b1.gif',
+             'bm/b2.gif' : '{icon}/b2.gif',
+             'Cfg/data.CFG' : '{config}/baBar/data.CFG',
+             'init_script' : '{script}/JunGle/init_script'},
+             dist.data_files)
+
+        self.assertEqual(dist.package_dir, 'src')
 
         # Make sure we get the foo command loaded.  We use a string comparison
         # instead of assertIsInstance because the class is not the same when
@@ -204,7 +263,7 @@
                          'FooBarBazTest')
 
         # did the README got loaded ?
-        self.assertEquals(dist.metadata['description'], 'yeah')
+        self.assertEqual(dist.metadata['description'], 'yeah')
 
         # do we have the D Compiler enabled ?
         from distutils2.compiler import new_compiler, _COMPILERS
@@ -212,10 +271,112 @@
         d = new_compiler(compiler='d')
         self.assertEqual(d.description, 'D Compiler')
 
+
+    def test_multiple_description_file(self):
+        self.write_setup({'description-file': 'README  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog2')
+        dist = self.run_setup('--version')
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_multiline_description_file(self):
+        self.write_setup({'description-file': 'README\n  CHANGES'})
+        self.write_file('README', 'yeah')
+        self.write_file('CHANGES', 'changelog')
+        dist = self.run_setup('--version')
+        self.assertEqual(dist.metadata['description'], 'yeah\nchangelog')
+        self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES'])
+
+    def test_parse_extensions_in_config(self):
+        self.write_file('setup.cfg', EXT_SETUP_CFG)
+        dist = self.run_setup('--version')
+
+        ext_modules = dict((mod.name, mod) for mod in dist.ext_modules)
+        self.assertEqual(len(ext_modules), 2)
+        ext = ext_modules.get('one.speed_coconuts')
+        self.assertEqual(ext.sources, ['c_src/speed_coconuts.c'])
+        self.assertEqual(ext.define_macros, ['HAVE_CAIRO', 'HAVE_GTK2'])
+        libs = ['gecodeint', 'gecodekernel']
+        if sys.platform == 'win32':
+            libs = ['GecodeInt', 'GecodeKernel']
+        self.assertEqual(ext.libraries, libs)
+        self.assertEqual(ext.extra_link_args,
+            ['`gcc -print-file-name=libgcc.a`', '-shared'])
+
+        ext = ext_modules.get('three.fast_taunt')
+        self.assertEqual(ext.sources,
+            ['cxx_src/utils_taunt.cxx', 'cxx_src/python_module.cxx'])
+        self.assertEqual(ext.include_dirs,
+            ['/usr/include/gecode', '/usr/include/blitz'])
+        cargs = ['-fPIC', '-O2']
+        if sys.platform == 'win32':
+            cargs.append("/DGECODE_VERSION='win32'")
+        else:
+            cargs.append('-DGECODE_VERSION=$(./gecode_version)')
+        self.assertEqual(ext.extra_compile_args, cargs)
+        self.assertEqual(ext.language, 'cxx')
+
+
+    def test_metadata_requires_description_files_missing(self):
+        self.write_setup({'description-file': 'README\n  README2'})
+        self.write_file('README', 'yeah')
+        self.write_file('README2', 'yeah')
+        self.write_file('haven.py', '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(os.path.join('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(os.path.join('bin', 'taunt'), '#')
+
+        os.mkdir('src')
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file(os.path.join(pkg, '__init__.py'), '#')
+
+        dist = self.run_setup('--version')
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(DistutilsFileError, cmd.make_distribution)
+
+    def test_metadata_requires_description_files(self):
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files':'\n  README2'})
+        self.write_file('README', 'yeah')
+        self.write_file('README2', 'yeah')
+        self.write_file('haven.py', '#')
+        self.write_file('script1.py', '#')
+        os.mkdir('scripts')
+        self.write_file(os.path.join('scripts', 'find-coconuts'), '#')
+        os.mkdir('bin')
+        self.write_file(os.path.join('bin', 'taunt'), '#')
+
+        os.mkdir('src')
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
+            os.mkdir(pkg)
+            self.write_file(os.path.join(pkg, '__init__.py'), '#')
+
+        dist = self.run_setup('--description')
+        self.assertIn('yeah\nyeah\n', sys.stdout.getvalue())
+
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        self.assertRaises(DistutilsFileError, cmd.make_distribution)
+
+        self.write_setup({'description-file': 'README\n  README2',
+                          'extra-files': '\n  README2\n    README'})
+        dist = self.run_setup('--description')
+        cmd = sdist(dist)
+        cmd.finalize_options()
+        cmd.get_file_list()
+        cmd.make_distribution()
+        self.assertIn('README\nREADME2\n', open('MANIFEST').read())
+
     def test_sub_commands(self):
-        tempdir = self.mkdtemp()
-        os.chdir(tempdir)
-        self.write_file('setup.cfg', SETUP_CFG)
+        self.write_setup()
         self.write_file('README', 'yeah')
         self.write_file('haven.py', '#')
         self.write_file('script1.py', '#')
@@ -223,22 +384,17 @@
         self.write_file(os.path.join('scripts', 'find-coconuts'), '#')
         os.mkdir('bin')
         self.write_file(os.path.join('bin', 'taunt'), '#')
+        os.mkdir('src')
 
-        for pkg in ('one', 'src', 'src2'):
+        for pkg in ('one', 'two', 'three'):
+            pkg = os.path.join('src', pkg)
             os.mkdir(pkg)
             self.write_file(os.path.join(pkg, '__init__.py'), '#')
 
         # try to run the install command to see if foo is called
-        sys.stdout = sys.stderr = StringIO()
-        sys.argv[:] = ['', 'install_dist']
-        old_sys = sys.argv[:]
-        try:
-            from distutils2.run import main
-            dist = main()
-        finally:
-            sys.argv[:] = old_sys
+        dist = self.run_setup('install_dist')
 
-        self.assertEquals(dist.foo_was_here, 1)
+        self.assertEqual(dist.foo_was_here, 1)
 
 
 def test_suite():
diff --git a/distutils2/tests/test_cygwinccompiler.py b/distutils2/tests/test_cygwinccompiler.py
--- a/distutils2/tests/test_cygwinccompiler.py
+++ b/distutils2/tests/test_cygwinccompiler.py
@@ -8,10 +8,9 @@
 from distutils2.tests import captured_stdout
 
 from distutils2.compiler import cygwinccompiler
-from distutils2.compiler.cygwinccompiler import (CygwinCCompiler, check_config_h,
-                                       CONFIG_H_OK, CONFIG_H_NOTOK,
-                                       CONFIG_H_UNCERTAIN, get_versions,
-                                       get_msvcr, RE_VERSION)
+from distutils2.compiler.cygwinccompiler import (
+    CygwinCCompiler, check_config_h, get_msvcr,
+    CONFIG_H_OK, CONFIG_H_NOTOK, CONFIG_H_UNCERTAIN)
 from distutils2.util import get_compiler_versions
 from distutils2.tests import unittest, support
 
diff --git a/distutils2/tests/test_depgraph.py b/distutils2/tests/test_depgraph.py
--- a/distutils2/tests/test_depgraph.py
+++ b/distutils2/tests/test_depgraph.py
@@ -184,6 +184,67 @@
 
         self.checkLists(matches, expected)
 
+    def test_graph_disconnected_to_dot(self):
+        dependencies_expected = (
+            ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
+            ('grammar', 'bacon', 'truffles (>=1.2)'),
+            ('choxie', 'towel-stuff', 'towel-stuff (0.1)'),
+            ('banana', 'strawberry', 'strawberry (>=0.5)')
+        )
+        disconnected_expected = ('cheese', 'bacon', 'strawberry')
+
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG:
+            dist = pkgutil.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        buf = StringIO.StringIO()
+        depgraph.graph_to_dot(graph, buf, skip_disconnected=False)
+        buf.seek(0)
+        lines = buf.readlines()
+
+        dependencies_lines = []
+        disconnected_lines = []
+
+        # First sort output lines into dependencies and disconnected lines.
+        # We also skip the attribute lines, and don't include the "{" and "}"
+        # lines.
+        disconnected_active = False
+        for line in lines[1:-1]: # Skip first and last line
+            if line.startswith('subgraph disconnected'):
+                disconnected_active = True
+                continue
+            if line.startswith('}') and disconnected_active:
+                disconnected_active = False
+                continue
+
+            if disconnected_active:
+                # Skip the 'label = "Disconnected"', etc. attribute lines.
+                if ' = ' not in line:
+                    disconnected_lines.append(line)
+            else:
+                dependencies_lines.append(line)
+
+        dependencies_matches = []
+        for line in dependencies_lines:
+            if line[-1] == '\n':
+                line = line[:-1]
+            match = self.EDGE.match(line.strip())
+            self.assertTrue(match is not None)
+            dependencies_matches.append(match.groups())
+
+        disconnected_matches = []
+        for line in disconnected_lines:
+            if line[-1] == '\n':
+                line = line[:-1]
+            line = line.strip('"')
+            disconnected_matches.append(line)
+
+        self.checkLists(dependencies_matches, dependencies_expected)
+        self.checkLists(disconnected_matches, disconnected_expected)
+
     def test_graph_bad_version_to_dot(self):
         expected = (
             ('towel-stuff', 'bacon', 'bacon (<=0.2)'),
@@ -213,6 +274,16 @@
 
         self.checkLists(matches, expected)
 
+    def test_repr(self):
+        dists = []
+        for name in self.DISTROS_DIST + self.DISTROS_EGG + self.BAD_EGGS:
+            dist = pkgutil.get_distribution(name, use_egg_info=True)
+            self.assertNotEqual(dist, None)
+            dists.append(dist)
+
+        graph = depgraph.generate_graph(dists)
+        assert repr(graph)
+
     def test_main(self):
         tempout = StringIO.StringIO()
         old = sys.stdout
diff --git a/distutils2/tests/test_dist.py b/distutils2/tests/test_dist.py
--- a/distutils2/tests/test_dist.py
+++ b/distutils2/tests/test_dist.py
@@ -68,7 +68,7 @@
             distutils2.dist.DEBUG = False
 
     def test_write_pkg_file(self):
-        # Check DistributionMetadata handling of Unicode fields
+        # Check Metadata handling of Unicode fields
         tmp_dir = self.mkdtemp()
         my_file = os.path.join(tmp_dir, 'f')
         cls = Distribution
diff --git a/distutils2/tests/test_index_dist.py b/distutils2/tests/test_index_dist.py
--- a/distutils2/tests/test_index_dist.py
+++ b/distutils2/tests/test_index_dist.py
@@ -127,7 +127,7 @@
         url = "%s/simple/foobar/foobar-0.1.tar.gz" % server.full_address
         # check md5 if given
         dist = Dist(url=url, hashname="md5",
-                    hashval="d41d8cd98f00b204e9800998ecf8427e")
+                    hashval="fe18804c5b722ff024cabdf514924fc4")
         dist.download(self.mkdtemp())
 
         # a wrong md5 fails
@@ -157,6 +157,35 @@
                           hashname="invalid_hashname",
                           hashval="value")
 
+    @use_pypi_server('downloads_with_md5')
+    def test_unpack(self, server):
+        url = "%s/simple/foobar/foobar-0.1.tar.gz" % server.full_address
+        dist1 = Dist(url=url)
+        # doing an unpack
+        dist1_here = self.mkdtemp()
+        dist1_there = dist1.unpack(path=dist1_here)
+        # assert we unpack to the path provided
+        self.assertEqual(dist1_here, dist1_there)
+        dist1_result = os.listdir(dist1_there)
+        self.assertIn('paf', dist1_result)
+        os.remove(os.path.join(dist1_there, 'paf'))
+
+        # Test unpack works without a path argument
+        dist2 = Dist(url=url)
+        # doing an unpack
+        dist2_there = dist2.unpack()
+        dist2_result = os.listdir(dist2_there)
+        self.assertIn('paf', dist2_result)
+        os.remove(os.path.join(dist2_there, 'paf'))
+
+    def test_hashname(self):
+        # Invalid hashnames raises an exception on assignation
+        Dist(hashname="md5", hashval="value")
+
+        self.assertRaises(UnsupportedHashName, Dist,
+                          hashname="invalid_hashname",
+                          hashval="value")
+
 
 class TestReleasesList(unittest.TestCase):
 
@@ -237,6 +266,10 @@
 #        dists.sort_distributions(prefer_source=True)
 #        self.assertEqual(fb2_binary, dists[0])
 
+    def test_get_last(self):
+        dists = ReleasesList('Foo')
+        self.assertEqual(dists.get_last('Foo 1.0'), None)
+
 
 def test_suite():
     suite = unittest.TestSuite()
diff --git a/distutils2/tests/test_index_simple.py b/distutils2/tests/test_index_simple.py
--- a/distutils2/tests/test_index_simple.py
+++ b/distutils2/tests/test_index_simple.py
@@ -6,12 +6,15 @@
 import urllib2
 
 from distutils2.index.simple import Crawler
-from distutils2.tests import unittest, support
+from distutils2.tests import unittest
+from distutils2.tests.support import TempdirManager, LoggingCatcher
 from distutils2.tests.pypi_server import (use_pypi_server, PyPIServer,
                                           PYPI_DEFAULT_STATIC_PATH)
 
 
-class SimpleCrawlerTestCase(support.TempdirManager, unittest.TestCase):
+class SimpleCrawlerTestCase(TempdirManager,
+                            LoggingCatcher,
+                            unittest.TestCase):
 
     def _get_simple_crawler(self, server, base_url="/simple/", hosts=None,
                           *args, **kwargs):
@@ -293,8 +296,8 @@
 <a href="../download" rel="download">link2</a>
 <a href="../simpleurl">link2</a>
         """
-        found_links = dict(crawler._default_link_matcher(content,
-                                                         base_url)).keys()
+        found_links = set(dict(crawler._default_link_matcher(content,
+                                                             base_url)))
         self.assertIn('http://example.org/some/homepage', found_links)
         self.assertIn('http://example.org/some/simpleurl', found_links)
         self.assertIn('http://example.org/some/download', found_links)
diff --git a/distutils2/tests/test_index_xmlrpc.py b/distutils2/tests/test_index_xmlrpc.py
--- a/distutils2/tests/test_index_xmlrpc.py
+++ b/distutils2/tests/test_index_xmlrpc.py
@@ -25,6 +25,24 @@
                           invalid="test")
 
     @use_xmlrpc_server()
+    def test_get_all_projects(self, server):
+        client = self._get_client(server)
+        server.xmlrpc.set_distributions([
+            {'name': 'FooBar', 'version': '1.1'},
+            {'name': 'FooBar', 'version': '1.2'},
+            {'name': 'Foo', 'version': '1.1'},
+        ])
+        results = client.get_all_projects()
+        self.assertEqual(2, len(results))
+
+        # check we do have two releases for Foobar's project
+        self.assertEqual(2, len(results[0].releases))
+
+        names = [r.name for r in results]
+        self.assertIn('FooBar', names)
+        self.assertIn('Foo', names)
+
+    @use_xmlrpc_server()
     def test_get_releases(self, server):
         client = self._get_client(server)
         server.xmlrpc.set_distributions([
diff --git a/distutils2/tests/test_install.py b/distutils2/tests/test_install.py
--- a/distutils2/tests/test_install.py
+++ b/distutils2/tests/test_install.py
@@ -1,15 +1,14 @@
 """Tests for the distutils2.install module."""
 
 import os
+
 from tempfile import mkstemp
-
 from distutils2 import install
 from distutils2.index.xmlrpc import Client
-from distutils2.metadata import DistributionMetadata
+from distutils2.metadata import Metadata
 from distutils2.tests import run_unittest
-from distutils2.tests.support import TempdirManager
+from distutils2.tests.support import LoggingCatcher, TempdirManager, unittest
 from distutils2.tests.pypi_server import use_xmlrpc_server
-from distutils2.tests.support import unittest
 
 
 class InstalledDist(object):
@@ -18,7 +17,7 @@
     def __init__(self, name, version, deps):
         self.name = name
         self.version = version
-        self.metadata = DistributionMetadata()
+        self.metadata = Metadata()
         self.metadata['Requires-Dist'] = deps
         self.metadata['Provides-Dist'] = ['%s (%s)' % (name, version)]
 
@@ -29,26 +28,20 @@
 class ToInstallDist(object):
     """Distribution that will be installed"""
 
-    def __init__(self, raise_error=False, files=False):
-        self._raise_error = raise_error
+    def __init__(self, files=False):
         self._files = files
-        self.install_called = False
-        self.install_called_with = {}
         self.uninstall_called = False
         self._real_files = []
+        self.name = "fake"
+        self.version = "fake"
         if files:
             for f in range(0,3):
                self._real_files.append(mkstemp())
 
-    def install(self, *args):
-        self.install_called = True
-        self.install_called_with = args
-        if self._raise_error:
-            raise Exception('Oops !')
-        return ['/path/to/foo', '/path/to/bar']
-
-    def uninstall(self, **args):
-        self.uninstall_called = True
+    def _unlink_installed_files(self):
+        if self._files:
+            for f in self._real_files:
+                os.unlink(f[1])
 
     def get_installed_files(self, **args):
         if self._files:
@@ -58,14 +51,39 @@
         return self.get_installed_files()
 
 
+class MagicMock(object):
+    def __init__(self, return_value=None, raise_exception=False):
+        self.called = False
+        self._times_called = 0
+        self._called_with = []
+        self._return_value = return_value
+        self._raise = raise_exception
+
+    def __call__(self, *args, **kwargs):
+        self.called = True
+        self._times_called = self._times_called + 1
+        self._called_with.append((args, kwargs))
+        iterable = hasattr(self._raise, '__iter__')
+        if self._raise:
+            if ((not iterable and self._raise)
+                    or self._raise[self._times_called - 1]):
+                raise Exception
+        return self._return_value
+
+    def called_with(self, *args, **kwargs):
+        return (args, kwargs) in self._called_with
+
+
 def get_installed_dists(dists):
+    """Return a list of fake installed dists.
+    The list is name, version, deps"""
     objects = []
     for (name, version, deps) in dists:
         objects.append(InstalledDist(name, version, deps))
     return objects
 
 
-class TestInstall(TempdirManager, unittest.TestCase):
+class TestInstall(LoggingCatcher, TempdirManager, unittest.TestCase):
     def _get_client(self, server, *args, **kwargs):
         return Client(server.full_address, *args, **kwargs)
 
@@ -150,6 +168,8 @@
         # Tests that conflicts are detected
         client = self._get_client(server)
         archive_path = '%s/distribution.tar.gz' % server.full_address
+
+        # choxie depends on towel-stuff, which depends on bacon.
         server.xmlrpc.set_distributions([
             {'name':'choxie',
              'version': '2.0.0.9',
@@ -164,6 +184,8 @@
              'requires_dist': [],
              'url': archive_path},
             ])
+
+        # name, version, deps.
         already_installed = [('bacon', '0.1', []),
                              ('chicken', '1.1', ['bacon (0.1)'])]
         output = install.get_infos("choxie", index=client, installed=
@@ -195,7 +217,7 @@
         files = [os.path.join(path, '%s' % x) for x in range(1, 20)]
         for f in files:
             file(f, 'a+')
-        output = [o for o in install.move_files(files, newpath)]
+        output = [o for o in install._move_files(files, newpath)]
 
         # check that output return the list of old/new places
         for f in files:
@@ -214,30 +236,46 @@
 
         for dict1, dict2, expect in tests:
             install._update_infos(dict1, dict2)
-            for key in expect.keys():
+            for key in expect:
                 self.assertEqual(expect[key], dict1[key])
 
     def test_install_dists_rollback(self):
         # if one of the distribution installation fails, call uninstall on all
         # installed distributions.
 
-        d1 = ToInstallDist()
-        d2 = ToInstallDist(raise_error=True)
-        self.assertRaises(Exception, install.install_dists, [d1, d2])
-        for dist in (d1, d2):
-            self.assertTrue(dist.install_called)
-        self.assertTrue(d1.uninstall_called)
-        self.assertFalse(d2.uninstall_called)
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
+
+        install._install_dist = MagicMock(return_value=[],
+                raise_exception=(False, True))
+        install.remove = MagicMock()
+        try:
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+            path = self.mkdtemp()
+            self.assertRaises(Exception, install.install_dists, [d1, d2], path)
+            self.assertTrue(install._install_dist.called_with(d1, path))
+            self.assertTrue(install.remove.called)
+        finally:
+            install._install_dist = old_install_dist
+            install.remove = old_uninstall
+
 
     def test_install_dists_success(self):
-        # test that the install method is called on each of the distributions.
-        d1 = ToInstallDist()
-        d2 = ToInstallDist()
-        install.install_dists([d1, d2])
-        for dist in (d1, d2):
-            self.assertTrue(dist.install_called)
-        self.assertFalse(d1.uninstall_called)
-        self.assertFalse(d2.uninstall_called)
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock(return_value=[])
+        try:
+            # test that the install method is called on each of the distributions.
+            d1 = ToInstallDist()
+            d2 = ToInstallDist()
+
+            # should call install
+            path = self.mkdtemp()
+            install.install_dists([d1, d2], path)
+            for dist in (d1, d2):
+                self.assertTrue(install._install_dist.called_with(dist, path))
+        finally:
+            install._install_dist = old_install_dist
 
     def test_install_from_infos_conflict(self):
         # assert conflicts raise an exception
@@ -262,29 +300,48 @@
             install.install_dists = old_install_dists
 
     def test_install_from_infos_remove_rollback(self):
-        # assert that if an error occurs, the removed files are restored.
-        remove = []
-        for i in range(0,2):
-            remove.append(ToInstallDist(files=True, raise_error=True))
-        to_install = [ToInstallDist(raise_error=True),
-                   ToInstallDist()]
+        old_install_dist = install._install_dist
+        old_uninstall = getattr(install, 'uninstall', None)
 
-        install.install_from_infos(remove=remove, install=to_install)
-        # assert that the files are in the same place
-        # assert that the files have been removed
-        for dist in remove:
-            for f in dist.get_installed_files():
-                self.assertTrue(os.path.exists(f))
+        install._install_dist = MagicMock(return_value=[],
+                raise_exception=(False, True))
+        install.uninstall = MagicMock()
+        try:
+            # assert that if an error occurs, the removed files are restored.
+            remove = []
+            for i in range(0,2):
+                remove.append(ToInstallDist(files=True))
+            to_install = [ToInstallDist(), ToInstallDist()]
+            temp_dir = self.mkdtemp()
+
+            self.assertRaises(Exception, install.install_from_infos,
+                              install_path=temp_dir, install=to_install,
+                              remove=remove)
+            # assert that the files are in the same place
+            # assert that the files have been removed
+            for dist in remove:
+                for f in dist.get_installed_files():
+                    self.assertTrue(os.path.exists(f))
+                dist._unlink_installed_files()
+        finally:
+            install.install_dist = old_install_dist
+            install.uninstall = old_uninstall
+
 
     def test_install_from_infos_install_succes(self):
-        # assert that the distribution can be installed
-        install_path = "my_install_path"
-        to_install = [ToInstallDist(), ToInstallDist()]
+        old_install_dist = install._install_dist
+        install._install_dist = MagicMock([])
+        try:
+            # assert that the distribution can be installed
+            install_path = "my_install_path"
+            to_install = [ToInstallDist(), ToInstallDist()]
 
-        install.install_from_infos(install=to_install,
-                                         install_path=install_path)
-        for dist in to_install:
-            self.assertEquals(dist.install_called_with, (install_path,))
+            install.install_from_infos(install_path, install=to_install)
+            for dist in to_install:
+                install._install_dist.called_with(install_path)
+        finally:
+            install._install_dist = old_install_dist
+
 
 def test_suite():
     suite = unittest.TestSuite()
diff --git a/distutils2/tests/test_manifest.py b/distutils2/tests/test_manifest.py
--- a/distutils2/tests/test_manifest.py
+++ b/distutils2/tests/test_manifest.py
@@ -1,6 +1,5 @@
 """Tests for distutils.manifest."""
 import os
-import sys
 import logging
 from StringIO import StringIO
 
@@ -25,10 +24,11 @@
 
 
 class ManifestTestCase(support.TempdirManager,
+                       # enable this after LoggingCatcher is fixed
+                       #support.LoggingCatcher,
                        unittest.TestCase):
 
     def test_manifest_reader(self):
-
         tmpdir = self.mkdtemp()
         MANIFEST = os.path.join(tmpdir, 'MANIFEST.in')
         f = open(MANIFEST, 'w')
@@ -38,9 +38,10 @@
             f.close()
         manifest = Manifest()
 
+        # remove this when LoggingCatcher is fixed
         warns = []
-        def _warn(msg):
-            warns.append(msg)
+        def _warn(*args):
+            warns.append(args[0])
 
         old_warn = logging.warning
         logging.warning = _warn
@@ -53,7 +54,7 @@
         # and 3 warnings issued (we ddidn't provided the files)
         self.assertEqual(len(warns), 3)
         for warn in warns:
-            self.assertIn('warning: no files found matching', warn)
+            self.assertIn('no files found matching', warn)
 
         # manifest also accepts file-like objects
         old_warn = logging.warning
diff --git a/distutils2/tests/test_markers.py b/distutils2/tests/test_markers.py
new file mode 100644
--- /dev/null
+++ b/distutils2/tests/test_markers.py
@@ -0,0 +1,69 @@
+"""Tests for distutils.metadata."""
+import os
+import sys
+import platform
+from StringIO import StringIO
+
+from distutils2.markers import interpret
+from distutils2.tests import run_unittest, unittest
+from distutils2.tests.support import LoggingCatcher, WarningsCatcher
+
+
+class MarkersTestCase(LoggingCatcher, WarningsCatcher,
+                      unittest.TestCase):
+
+    def test_interpret(self):
+        sys_platform = sys.platform
+        version = sys.version.split()[0]
+        os_name = os.name
+        platform_version = platform.version()
+        platform_machine = platform.machine()
+
+        self.assertTrue(interpret("sys.platform == '%s'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' or python_version == '2.4'" % sys_platform))
+        self.assertTrue(interpret(
+            "sys.platform == '%s' and python_full_version == '%s'" %
+            (sys_platform, version)))
+        self.assertTrue(interpret("'%s' == sys.platform" % sys_platform))
+        self.assertTrue(interpret('os.name == "%s"' % os_name))
+        self.assertTrue(interpret(
+            'platform.version == "%s" and platform.machine == "%s"' %
+            (platform_version, platform_machine)))
+
+        # stuff that need to raise a syntax error
+        ops = ('os.name == os.name', 'os.name == 2', "'2' == '2'",
+               'okpjonon', '', 'os.name ==', 'python_version == 2.4')
+        for op in ops:
+            self.assertRaises(SyntaxError, interpret, op)
+
+        # combined operations
+        OP = 'os.name == "%s"' % os_name
+        AND = ' and '
+        OR = ' or '
+        self.assertTrue(interpret(OP + AND + OP))
+        self.assertTrue(interpret(OP + AND + OP + AND + OP))
+        self.assertTrue(interpret(OP + OR + OP))
+        self.assertTrue(interpret(OP + OR + OP + OR + OP))
+
+        # other operators
+        self.assertTrue(interpret("os.name != 'buuuu'"))
+        self.assertTrue(interpret("python_version > '1.0'"))
+        self.assertTrue(interpret("python_version < '5.0'"))
+        self.assertTrue(interpret("python_version <= '5.0'"))
+        self.assertTrue(interpret("python_version >= '1.0'"))
+        self.assertTrue(interpret("'%s' in os.name" % os_name))
+        self.assertTrue(interpret("'buuuu' not in os.name"))
+        self.assertTrue(interpret(
+            "'buuuu' not in os.name and '%s' in os.name" % os_name))
+
+        # execution context
+        self.assertTrue(interpret('python_version == "0.1"',
+                                  {'python_version': '0.1'}))
+
+
+def test_suite():
+    return unittest.makeSuite(MarkersTestCase)
+
+if __name__ == '__main__':
+    run_unittest(test_suite())
diff --git a/distutils2/tests/test_metadata.py b/distutils2/tests/test_metadata.py
--- a/distutils2/tests/test_metadata.py
+++ b/distutils2/tests/test_metadata.py
@@ -1,10 +1,10 @@
-"""Tests for distutils.command.bdist."""
+"""Tests for distutils.metadata."""
 import os
 import sys
 import platform
 from StringIO import StringIO
 
-from distutils2.metadata import (DistributionMetadata, _interpret,
+from distutils2.metadata import (Metadata, get_metadata_version,
                                  PKG_INFO_PREFERRED_VERSION)
 from distutils2.tests import run_unittest, unittest
 from distutils2.tests.support import LoggingCatcher, WarningsCatcher
@@ -12,7 +12,7 @@
                                MetadataUnrecognizedVersionError)
 
 
-class DistributionMetadataTestCase(LoggingCatcher, WarningsCatcher,
+class MetadataTestCase(LoggingCatcher, WarningsCatcher,
                                    unittest.TestCase):
 
     def test_instantiation(self):
@@ -24,84 +24,35 @@
             fp.close()
         fp = StringIO(contents)
 
-        m = DistributionMetadata()
+        m = Metadata()
         self.assertRaises(MetadataUnrecognizedVersionError, m.items)
 
-        m = DistributionMetadata(PKG_INFO)
+        m = Metadata(PKG_INFO)
         self.assertEqual(len(m.items()), 22)
 
-        m = DistributionMetadata(fileobj=fp)
+        m = Metadata(fileobj=fp)
         self.assertEqual(len(m.items()), 22)
 
-        m = DistributionMetadata(mapping=dict(name='Test', version='1.0'))
+        m = Metadata(mapping=dict(name='Test', version='1.0'))
         self.assertEqual(len(m.items()), 11)
 
         d = dict(m.items())
-        self.assertRaises(TypeError, DistributionMetadata,
+        self.assertRaises(TypeError, Metadata,
                           PKG_INFO, fileobj=fp)
-        self.assertRaises(TypeError, DistributionMetadata,
+        self.assertRaises(TypeError, Metadata,
                           PKG_INFO, mapping=d)
-        self.assertRaises(TypeError, DistributionMetadata,
+        self.assertRaises(TypeError, Metadata,
                           fileobj=fp, mapping=d)
-        self.assertRaises(TypeError, DistributionMetadata,
+        self.assertRaises(TypeError, Metadata,
                           PKG_INFO, mapping=m, fileobj=fp)
 
-    def test_interpret(self):
-        sys_platform = sys.platform
-        version = sys.version.split()[0]
-        os_name = os.name
-        platform_version = platform.version()
-        platform_machine = platform.machine()
-
-        self.assertTrue(_interpret("sys.platform == '%s'" % sys_platform))
-        self.assertTrue(_interpret(
-            "sys.platform == '%s' or python_version == '2.4'" % sys_platform))
-        self.assertTrue(_interpret(
-            "sys.platform == '%s' and python_full_version == '%s'" %
-            (sys_platform, version)))
-        self.assertTrue(_interpret("'%s' == sys.platform" % sys_platform))
-        self.assertTrue(_interpret('os.name == "%s"' % os_name))
-        self.assertTrue(_interpret(
-            'platform.version == "%s" and platform.machine == "%s"' %
-            (platform_version, platform_machine)))
-
-        # stuff that need to raise a syntax error
-        ops = ('os.name == os.name', 'os.name == 2', "'2' == '2'",
-               'okpjonon', '', 'os.name ==', 'python_version == 2.4')
-        for op in ops:
-            self.assertRaises(SyntaxError, _interpret, op)
-
-        # combined operations
-        OP = 'os.name == "%s"' % os_name
-        AND = ' and '
-        OR = ' or '
-        self.assertTrue(_interpret(OP + AND + OP))
-        self.assertTrue(_interpret(OP + AND + OP + AND + OP))
-        self.assertTrue(_interpret(OP + OR + OP))
-        self.assertTrue(_interpret(OP + OR + OP + OR + OP))
-
-        # other operators
-        self.assertTrue(_interpret("os.name != 'buuuu'"))
-        self.assertTrue(_interpret("python_version > '1.0'"))
-        self.assertTrue(_interpret("python_version < '5.0'"))
-        self.assertTrue(_interpret("python_version <= '5.0'"))
-        self.assertTrue(_interpret("python_version >= '1.0'"))
-        self.assertTrue(_interpret("'%s' in os.name" % os_name))
-        self.assertTrue(_interpret("'buuuu' not in os.name"))
-        self.assertTrue(_interpret(
-            "'buuuu' not in os.name and '%s' in os.name" % os_name))
-
-        # execution context
-        self.assertTrue(_interpret('python_version == "0.1"',
-                                   {'python_version': '0.1'}))
-
     def test_metadata_read_write(self):
         PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
-        metadata = DistributionMetadata(PKG_INFO)
+        metadata = Metadata(PKG_INFO)
         out = StringIO()
         metadata.write_file(out)
         out.seek(0)
-        res = DistributionMetadata()
+        res = Metadata()
         res.read_file(out)
         for k in metadata.keys():
             self.assertTrue(metadata[k] == res[k])
@@ -111,17 +62,17 @@
         PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
         content = open(PKG_INFO).read()
         content = content % sys.platform
-        metadata = DistributionMetadata(platform_dependent=True)
+        metadata = Metadata(platform_dependent=True)
         metadata.read_file(StringIO(content))
         self.assertEqual(metadata['Requires-Dist'], ['bar'])
         metadata['Name'] = "baz; sys.platform == 'blah'"
         # FIXME is None or 'UNKNOWN' correct here?
         # where is that documented?
-        self.assertEquals(metadata['Name'], None)
+        self.assertEqual(metadata['Name'], None)
 
         # test with context
         context = {'sys.platform': 'okook'}
-        metadata = DistributionMetadata(platform_dependent=True,
+        metadata = Metadata(platform_dependent=True,
                                         execution_context=context)
         metadata.read_file(StringIO(content))
         self.assertEqual(metadata['Requires-Dist'], ['foo'])
@@ -130,7 +81,7 @@
         PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
         content = open(PKG_INFO).read()
         content = content % sys.platform
-        metadata = DistributionMetadata()
+        metadata = Metadata()
         metadata.read_file(StringIO(content))
 
         # see if we can read the description now
@@ -149,7 +100,7 @@
         PKG_INFO = os.path.join(os.path.dirname(__file__), 'PKG-INFO')
         content = open(PKG_INFO).read()
         content = content % sys.platform
-        metadata = DistributionMetadata(fileobj=StringIO(content))
+        metadata = Metadata(fileobj=StringIO(content))
         self.assertIn('Version', metadata.keys())
         self.assertIn('0.5', metadata.values())
         self.assertIn(('Version', '0.5'), metadata.items())
@@ -160,7 +111,7 @@
         self.assertEqual(metadata['Version'], '0.7')
 
     def test_versions(self):
-        metadata = DistributionMetadata()
+        metadata = Metadata()
         metadata['Obsoletes'] = 'ok'
         self.assertEqual(metadata['Metadata-Version'], '1.1')
 
@@ -175,22 +126,28 @@
         del metadata['Obsoletes-Dist']
         metadata['Version'] = '1'
         self.assertEqual(metadata['Metadata-Version'], '1.0')
+        self.assertEqual(get_metadata_version(metadata), '1.0')
 
         PKG_INFO = os.path.join(os.path.dirname(__file__),
                                 'SETUPTOOLS-PKG-INFO')
         metadata.read_file(StringIO(open(PKG_INFO).read()))
         self.assertEqual(metadata['Metadata-Version'], '1.0')
+        self.assertEqual(get_metadata_version(metadata), '1.0')
 
         PKG_INFO = os.path.join(os.path.dirname(__file__),
                                 'SETUPTOOLS-PKG-INFO2')
         metadata.read_file(StringIO(open(PKG_INFO).read()))
         self.assertEqual(metadata['Metadata-Version'], '1.1')
+        self.assertEqual(get_metadata_version(metadata), '1.1')
 
-        metadata.version = '1.618'
+        # Update the _fields dict directly to prevent 'Metadata-Version'
+        # from being updated by the _set_best_version() method.
+        metadata._fields['Metadata-Version'] = '1.618'
         self.assertRaises(MetadataUnrecognizedVersionError, metadata.keys)
 
-    def test_warnings(self):
-        metadata = DistributionMetadata()
+    # XXX Spurious Warnings were disabled
+    def XXXtest_warnings(self):
+        metadata = Metadata()
 
         # these should raise a warning
         values = (('Requires-Dist', 'Funky (Groovie)'),
@@ -203,7 +160,7 @@
         self.assertEqual(len(self.logs), 2)
 
     def test_multiple_predicates(self):
-        metadata = DistributionMetadata()
+        metadata = Metadata()
 
         # see for "3" instead of "3.0"  ???
         # its seems like the MINOR VERSION can be omitted
@@ -213,35 +170,97 @@
         self.assertEqual(len(self.warnings), 0)
 
     def test_project_url(self):
-        metadata = DistributionMetadata()
+        metadata = Metadata()
         metadata['Project-URL'] = [('one', 'http://ok')]
         self.assertEqual(metadata['Project-URL'],
                           [('one', 'http://ok')])
-        self.assertEqual(metadata.version, '1.2')
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
 
-    def test_check(self):
-        metadata = DistributionMetadata()
+    def test_check_version(self):
+        metadata = Metadata()
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Version'])
+
+    def test_check_version_strict(self):
+        metadata = Metadata()
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        from distutils2.errors import MetadataMissingError
+        self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+    def test_check_name(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Name'])
+
+    def test_check_name_strict(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        from distutils2.errors import MetadataMissingError
+        self.assertRaises(MetadataMissingError, metadata.check, strict=True)
+
+    def test_check_author(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Author'])
+
+    def test_check_homepage(self):
+        metadata = Metadata()
+        metadata['Version'] = '1.0'
+        metadata['Name'] = 'vimpdb'
+        metadata['Author'] = 'Monty Python'
+        metadata.docutils_support = False
+        missing, warnings = metadata.check()
+        self.assertEqual(missing, ['Home-page'])
+
+    def test_check_predicates(self):
+        metadata = Metadata()
         metadata['Version'] = 'rr'
+        metadata['Name'] = 'vimpdb'
+        metadata['Home-page'] = 'http://pypi.python.org'
+        metadata['Author'] = 'Monty Python'
         metadata['Requires-dist'] = ['Foo (a)']
+        metadata['Obsoletes-dist'] = ['Foo (a)']
+        metadata['Provides-dist'] = ['Foo (a)']
         if metadata.docutils_support:
             missing, warnings = metadata.check()
-            self.assertEqual(len(warnings), 2)
+            self.assertEqual(len(warnings), 4)
             metadata.docutils_support = False
         missing, warnings = metadata.check()
-        self.assertEqual(missing, ['Name', 'Home-page'])
-        self.assertEqual(len(warnings), 2)
+        self.assertEqual(len(warnings), 4)
 
     def test_best_choice(self):
-        metadata = DistributionMetadata()
+        metadata = Metadata()
         metadata['Version'] = '1.0'
-        self.assertEqual(metadata.version, PKG_INFO_PREFERRED_VERSION)
+        self.assertEqual(metadata['Metadata-Version'],
+                         PKG_INFO_PREFERRED_VERSION)
+        self.assertEqual(get_metadata_version(metadata),
+                         PKG_INFO_PREFERRED_VERSION)
         metadata['Classifier'] = ['ok']
-        self.assertEqual(metadata.version, '1.2')
+        self.assertEqual(metadata['Metadata-Version'], '1.2')
+        self.assertEqual(get_metadata_version(metadata), '1.2')
 
     def test_project_urls(self):
         # project-url is a bit specific, make sure we write it
         # properly in PKG-INFO
-        metadata = DistributionMetadata()
+        metadata = Metadata()
         metadata['Version'] = '1.0'
         metadata['Project-Url'] = [('one', 'http://ok')]
         self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
@@ -252,13 +271,13 @@
         self.assertIn('Project-URL: one,http://ok', res)
 
         file_.seek(0)
-        metadata = DistributionMetadata()
+        metadata = Metadata()
         metadata.read_file(file_)
         self.assertEqual(metadata['Project-Url'], [('one', 'http://ok')])
 
 
 def test_suite():
-    return unittest.makeSuite(DistributionMetadataTestCase)
+    return unittest.makeSuite(MetadataTestCase)
 
 if __name__ == '__main__':
     run_unittest(test_suite())
diff --git a/distutils2/tests/test_mixin2to3.py b/distutils2/tests/test_mixin2to3.py
--- a/distutils2/tests/test_mixin2to3.py
+++ b/distutils2/tests/test_mixin2to3.py
@@ -25,7 +25,7 @@
         converted_code_content = "print('test')\n"
         new_code_content = "".join(open(code_name).readlines())
 
-        self.assertEquals(new_code_content, converted_code_content)
+        self.assertEqual(new_code_content, converted_code_content)
 
     @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
     def test_doctests_only(self):
@@ -45,7 +45,7 @@
         converted_doctest_content = '\n'.join(converted_doctest_content)
         new_doctest_content = "".join(open(doctest_name).readlines())
 
-        self.assertEquals(new_doctest_content, converted_doctest_content)
+        self.assertEqual(new_doctest_content, converted_doctest_content)
 
     @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
     def test_additional_fixers(self):
@@ -60,11 +60,11 @@
 
         mixin2to3 = Mixin2to3()
 
-        mixin2to3._run_2to3(files=[code_name],
+        mixin2to3._run_2to3(files=[code_name], doctests=[code_name],
                             fixers=['distutils2.tests.fixer'])
         converted_code_content = "isinstance(x, T)"
         new_code_content = "".join(open(code_name).readlines())
-        self.assertEquals(new_code_content, converted_code_content)
+        self.assertEqual(new_code_content, converted_code_content)
 
 def test_suite():
     return unittest.makeSuite(Mixin2to3TestCase)
diff --git a/distutils2/tests/test_mkcfg.py b/distutils2/tests/test_mkcfg.py
--- a/distutils2/tests/test_mkcfg.py
+++ b/distutils2/tests/test_mkcfg.py
@@ -1,18 +1,67 @@
+# -*- coding: utf-8 -*-
 """Tests for distutils.mkcfg."""
 import os
+import sys
+import StringIO
+from textwrap import dedent
+
 from distutils2.tests import run_unittest, support, unittest
 from distutils2.mkcfg import MainProgram
+from distutils2.mkcfg import ask_yn, ask, main
+from distutils2._backport import sysconfig
 
 
 class MkcfgTestCase(support.TempdirManager,
                     unittest.TestCase):
 
+    def setUp(self):
+        super(MkcfgTestCase, self).setUp()
+        self._stdin = sys.stdin
+        self._stdout = sys.stdout
+        sys.stdin = StringIO.StringIO()
+        sys.stdout = StringIO.StringIO()
+        self._cwd = os.getcwd()
+        self.wdir = self.mkdtemp()
+        os.chdir(self.wdir)
+        # patch sysconfig
+        self._old_get_paths = sysconfig.get_paths
+        sysconfig.get_paths = lambda *args, **kwargs: {
+                'man': sys.prefix + '/share/man', 
+                'doc': sys.prefix + '/share/doc/pyxfoil',}
+
+    def tearDown(self):
+        super(MkcfgTestCase, self).tearDown()
+        sys.stdin = self._stdin
+        sys.stdout = self._stdout
+        os.chdir(self._cwd)
+        sysconfig.get_paths = self._old_get_paths
+
+    def test_ask_yn(self):
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        self.assertEqual('y', ask_yn('is this a test'))
+
+    def test_ask(self):
+        sys.stdin.write('a\n')
+        sys.stdin.write('b\n')
+        sys.stdin.seek(0)
+        self.assertEqual('a', ask('is this a test'))
+        self.assertEqual('b', ask(str(range(0,70)), default='c', lengthy=True))
+
+    def test_set_multi(self):
+        main = MainProgram()
+        sys.stdin.write('aaaaa\n')
+        sys.stdin.seek(0)
+        main.data['author'] = []
+        main._set_multi('_set_multi test', 'author')
+        self.assertEqual(['aaaaa'], main.data['author'])
+
     def test_find_files(self):
         # making sure we scan a project dir correctly
         main = MainProgram()
 
         # building the structure
-        tempdir = self.mkdtemp()
+        tempdir = self.wdir
         dirs = ['pkg1', 'data', 'pkg2', 'pkg2/sub']
         files = ['README', 'setup.cfg', 'foo.py',
                  'pkg1/__init__.py', 'pkg1/bar.py',
@@ -26,18 +75,143 @@
             path = os.path.join(tempdir, file_)
             self.write_file(path, 'xxx')
 
-        old_dir = os.getcwd()
-        os.chdir(tempdir)
-        try:
-            main._find_files()
-        finally:
-            os.chdir(old_dir)
+        main._find_files()
 
         # do we have what we want ?
         self.assertEqual(main.data['packages'], ['pkg1', 'pkg2', 'pkg2.sub'])
         self.assertEqual(main.data['modules'], ['foo'])
-        self.assertEqual(main.data['extra_files'],
-                         ['setup.cfg', 'README', 'data/data1'])
+        self.assertEqual(set(main.data['extra_files']),
+                         set(['setup.cfg', 'README', 'data/data1']))
+
+    def test_convert_setup_py_to_cfg(self):
+        self.write_file((self.wdir, 'setup.py'),
+                        dedent("""
+        # -*- coding: utf-8 -*-
+        from distutils.core import setup
+        lg_dsc = '''My super Death-scription
+        barbar is now on the public domain,
+        ho, baby !'''
+        setup(name='pyxfoil',
+              version='0.2',
+              description='Python bindings for the Xfoil engine',
+              long_description = lg_dsc,
+              maintainer='André Espaze',
+              maintainer_email='andre.espaze at logilab.fr',
+              url='http://www.python-science.org/project/pyxfoil',
+              license='GPLv2',
+              packages=['pyxfoil', 'babar', 'me'],
+              data_files=[('share/doc/pyxfoil', ['README.rst']),
+                          ('share/man', ['pyxfoil.1']),
+                         ],
+              py_modules = ['my_lib', 'mymodule'],
+              package_dir = {'babar' : '',
+                             'me' : 'Martinique/Lamentin',
+                            },
+              package_data = {'babar': ['Pom', 'Flora', 'Alexander'],
+                              'me': ['dady', 'mumy', 'sys', 'bro'],
+                              '':  ['setup.py', 'README'],
+                              'pyxfoil' : ['fengine.so'],
+                             },
+              scripts = ['my_script', 'bin/run'],
+              )
+        """))
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        main()
+        fp = open(os.path.join(self.wdir, 'setup.cfg'))
+        try:
+            lines = set([line.rstrip() for line in fp])
+        finally:
+            fp.close()
+        self.assertEqual(lines, set(['',
+            '[metadata]',
+            'version = 0.2',
+            'name = pyxfoil',
+            'maintainer = André Espaze',
+            'description = My super Death-scription',
+            '       |barbar is now on the public domain,',
+            '       |ho, baby !',
+            'maintainer_email = andre.espaze at logilab.fr',
+            'home_page = http://www.python-science.org/project/pyxfoil',
+            'download_url = UNKNOWN',
+            'summary = Python bindings for the Xfoil engine',
+            '[files]',
+            'modules = my_lib',
+            '    mymodule',
+            'packages = pyxfoil',
+            '    babar',
+            '    me',
+            'extra_files = Martinique/Lamentin/dady',
+            '    Martinique/Lamentin/mumy',
+            '    Martinique/Lamentin/sys',
+            '    Martinique/Lamentin/bro',
+            '    Pom',
+            '    Flora',
+            '    Alexander',
+            '    setup.py',
+            '    README',
+            '    pyxfoil/fengine.so',
+            'scripts = my_script',
+            '    bin/run',
+            'resources =',
+            '    README.rst = {doc}',
+            '    pyxfoil.1 = {man}',
+        ]))
+
+    def test_convert_setup_py_to_cfg_with_description_in_readme(self):
+        self.write_file((self.wdir, 'setup.py'),
+                        dedent("""
+        # -*- coding: utf-8 -*-
+        from distutils.core import setup
+        lg_dsc = open('README.txt').read()
+        setup(name='pyxfoil',
+              version='0.2',
+              description='Python bindings for the Xfoil engine',
+              long_description=lg_dsc,
+              maintainer='André Espaze',
+              maintainer_email='andre.espaze at logilab.fr',
+              url='http://www.python-science.org/project/pyxfoil',
+              license='GPLv2',
+              packages=['pyxfoil'],
+              package_data={'pyxfoil' : ['fengine.so', 'babar.so']},
+              data_files=[
+                ('share/doc/pyxfoil', ['README.rst']),
+                ('share/man', ['pyxfoil.1']),
+              ],
+        )
+        """))
+        self.write_file((self.wdir, 'README.txt'),
+                        dedent('''
+My super Death-scription
+barbar is now on the public domain,
+ho, baby !
+                        '''))
+        sys.stdin.write('y\n')
+        sys.stdin.seek(0)
+        main()
+        fp = open(os.path.join(self.wdir, 'setup.cfg'))
+        try:
+            lines = set([line.rstrip() for line in fp])
+        finally:
+            fp.close()
+        self.assertEqual(lines, set(['',
+            '[metadata]',
+            'version = 0.2',
+            'name = pyxfoil',
+            'maintainer = André Espaze',
+            'maintainer_email = andre.espaze at logilab.fr',
+            'home_page = http://www.python-science.org/project/pyxfoil',
+            'download_url = UNKNOWN',
+            'summary = Python bindings for the Xfoil engine',
+            'description-file = README.txt',
+            '[files]',
+            'packages = pyxfoil',
+            'extra_files = pyxfoil/fengine.so',
+            '    pyxfoil/babar.so',
+            'resources =',
+            '    README.rst = {doc}',
+            '    pyxfoil.1 = {man}',
+        ]))
 
 
 def test_suite():
diff --git a/distutils2/tests/test_resources.py b/distutils2/tests/test_resources.py
new file mode 100644
--- /dev/null
+++ b/distutils2/tests/test_resources.py
@@ -0,0 +1,174 @@
+# -*- encoding: utf-8 -*-
+"""Tests for distutils.data."""
+import pkgutil
+import sys
+
+from distutils2._backport.pkgutil import resource_open
+from distutils2._backport.pkgutil import resource_path
+from distutils2._backport.pkgutil import disable_cache
+from distutils2._backport.pkgutil import enable_cache
+from distutils2.command.install_dist import install_dist
+from distutils2.resources import resources_dests
+from distutils2.tests import run_unittest
+from distutils2.tests import unittest
+from distutils2.tests.test_util import GlobTestCaseBase
+import os
+import tempfile
+
+
+class DataFilesTestCase(GlobTestCaseBase):
+
+    def assertRulesMatch(self, rules, spec):
+        tempdir = self.build_files_tree(spec)
+        expected = self.clean_tree(spec)
+        result = resources_dests(tempdir, rules)
+        self.assertEquals(expected, result)
+
+    def clean_tree(self, spec):
+        files = {}
+        for path, value in spec.items():
+            if value is not None:
+                path = self.os_dependant_path(path)
+                files[path] = value
+        return files
+
+    def test_simple_glob(self):
+        rules = [('', '*.tpl', '{data}')]
+        spec  = {'coucou.tpl': '{data}/coucou.tpl',
+            'Donotwant': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_multiple_match(self):
+        rules = [('scripts', '*.bin', '{appdata}'),
+            ('scripts', '*', '{appscript}')]
+        spec  = {'scripts/script.bin': '{appscript}/script.bin',
+            'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match(self):
+        rules = [('scripts', '*.{bin,sh}', '{appscript}')]
+        spec  = {'scripts/script.bin': '{appscript}/script.bin',
+            'scripts/babar.sh':  '{appscript}/babar.sh',
+            'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_multiple(self):
+        rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')]
+        spec  = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+            'scripts/script.sh':  '{appscript}/script.sh',
+            'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_set_match_exclude(self):
+        rules = [('scripts', '*', '{appscript}'),
+            ('', '**/*.sh', None)]
+        spec  = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+            'scripts/script.sh':  None,
+            'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_glob_in_base(self):
+        rules = [('scrip*', '*.bin', '{appscript}')]
+        spec  = {'scripts/scripts.bin': '{appscript}/scripts.bin',
+                 'scripouille/babar.bin': '{appscript}/babar.bin',
+                 'scriptortu/lotus.bin': '{appscript}/lotus.bin',
+                 'Babarlikestrawberry': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_recursive_glob(self):
+        rules = [('', '**/*.bin', '{binary}')]
+        spec  = {'binary0.bin': '{binary}/binary0.bin',
+            'scripts/binary1.bin': '{binary}/scripts/binary1.bin',
+            'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin',
+            'you/kill/pandabear.guy': None}
+        self.assertRulesMatch(rules, spec)
+
+    def test_final_exemple_glob(self):
+        rules = [
+            ('mailman/database/schemas/', '*', '{appdata}/schemas'),
+            ('', '**/*.tpl', '{appdata}/templates'),
+            ('', 'developer-docs/**/*.txt', '{doc}'),
+            ('', 'README', '{doc}'),
+            ('mailman/etc/', '*', '{config}'),
+            ('mailman/foo/', '**/bar/*.cfg', '{config}/baz'),
+            ('mailman/foo/', '**/*.cfg', '{config}/hmm'),
+            ('', 'some-new-semantic.sns', '{funky-crazy-category}')
+        ]
+        spec = {
+            'README': '{doc}/README',
+            'some.tpl': '{appdata}/templates/some.tpl',
+            'some-new-semantic.sns': '{funky-crazy-category}/some-new-semantic.sns',
+            'mailman/database/mailman.db': None,
+            'mailman/database/schemas/blah.schema': '{appdata}/schemas/blah.schema',
+            'mailman/etc/my.cnf': '{config}/my.cnf',
+            'mailman/foo/some/path/bar/my.cfg': '{config}/hmm/some/path/bar/my.cfg',
+            'mailman/foo/some/path/other.cfg': '{config}/hmm/some/path/other.cfg',
+            'developer-docs/index.txt': '{doc}/developer-docs/index.txt',
+            'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt',
+        }
+        self.maxDiff = None
+        self.assertRulesMatch(rules, spec)
+
+    def test_resource_open(self):
+
+
+        #Create a fake-dist
+        temp_site_packages = tempfile.mkdtemp()
+
+        dist_name = 'test'
+        dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info')
+        os.mkdir(dist_info)
+
+        metadata_path = os.path.join(dist_info, 'METADATA')
+        resources_path = os.path.join(dist_info, 'RESOURCES')
+
+        metadata_file = open(metadata_path, 'w')
+
+        metadata_file.write(
+"""Metadata-Version: 1.2
+Name: test
+Version: 0.1
+Summary: test
+Author: me
+        """)
+
+        metadata_file.close()
+
+        test_path = 'test.cfg'
+
+        _, test_resource_path = tempfile.mkstemp()
+
+        test_resource_file = open(test_resource_path, 'w')
+
+        content = 'Config'
+        test_resource_file.write(content)
+        test_resource_file.close()
+
+        resources_file = open(resources_path, 'w')
+
+        resources_file.write("""%s,%s""" % (test_path, test_resource_path))
+        resources_file.close()
+
+        #Add fake site-packages to sys.path to retrieve fake dist
+        old_sys_path = sys.path
+        sys.path.insert(0, temp_site_packages)
+
+        #Force pkgutil to rescan the sys.path
+        disable_cache()
+
+        #Try to retrieve resources paths and files
+        self.assertEqual(resource_path(dist_name, test_path), test_resource_path)
+        self.assertRaises(KeyError, resource_path, dist_name, 'notexis')
+
+        self.assertEqual(resource_open(dist_name, test_path).read(), content)
+        self.assertRaises(KeyError, resource_open, dist_name, 'notexis')
+
+        sys.path = old_sys_path
+
+        enable_cache()
+
+def test_suite():
+    return unittest.makeSuite(DataFilesTestCase)
+
+if __name__ == '__main__':
+    run_unittest(test_suite())
diff --git a/distutils2/tests/test_uninstall.py b/distutils2/tests/test_uninstall.py
new file mode 100644
--- /dev/null
+++ b/distutils2/tests/test_uninstall.py
@@ -0,0 +1,93 @@
+"""Tests for the uninstall command."""
+import os
+import sys
+from StringIO import StringIO
+from distutils2._backport.pkgutil import disable_cache, enable_cache
+from distutils2.tests import unittest, support, run_unittest
+from distutils2.errors import DistutilsError
+from distutils2.install import remove
+
+SETUP_CFG = """
+[metadata]
+name = %(name)s
+version = %(version)s
+
+[files]
+packages =
+    %(name)s
+    %(name)s.sub
+"""
+
+class UninstallTestCase(support.TempdirManager,
+                     support.LoggingCatcher,
+                     unittest.TestCase):
+
+    def setUp(self):
+        super(UninstallTestCase, self).setUp()
+        self.addCleanup(setattr, sys, 'stdout', sys.stdout)
+        self.addCleanup(setattr, sys, 'stderr', sys.stderr)
+        self.addCleanup(os.chdir, os.getcwd())
+        self.addCleanup(enable_cache)
+        self.root_dir = self.mkdtemp()
+        disable_cache()
+
+    def run_setup(self, *args):
+        # run setup with args
+        #sys.stdout = StringIO()
+        sys.argv[:] = [''] + list(args)
+        old_sys = sys.argv[:]
+        try:
+            from distutils2.run import commands_main
+            dist = commands_main()
+        finally:
+            sys.argv[:] = old_sys
+        return dist
+
+    def get_path(self, dist, name):
+        from distutils2.command.install_dist import install_dist
+        cmd = install_dist(dist)
+        cmd.prefix = self.root_dir
+        cmd.finalize_options()
+        return getattr(cmd, 'install_'+name)
+
+    def make_dist(self, pkg_name='foo', **kw):
+        dirname = self.mkdtemp()
+        kw['name'] = pkg_name
+        if 'version' not in kw:
+            kw['version'] = '0.1'
+        self.write_file((dirname, 'setup.cfg'), SETUP_CFG % kw)
+        os.mkdir(os.path.join(dirname, pkg_name))
+        self.write_file((dirname, '__init__.py'), '#')
+        self.write_file((dirname, pkg_name+'_utils.py'), '#')
+        os.mkdir(os.path.join(dirname, pkg_name, 'sub'))
+        self.write_file((dirname, pkg_name, 'sub', '__init__.py'), '#')
+        self.write_file((dirname, pkg_name, 'sub', pkg_name+'_utils.py'), '#')
+        return dirname
+
+    def install_dist(self, pkg_name='foo', dirname=None, **kw):
+        if not dirname:
+            dirname = self.make_dist(pkg_name, **kw)
+        os.chdir(dirname)
+        dist = self.run_setup('install_dist', '--prefix='+self.root_dir)
+        install_lib = self.get_path(dist, 'purelib')
+        return dist, install_lib
+
+    def test_uninstall_unknow_distribution(self):
+        self.assertRaises(DistutilsError, remove, 'foo', paths=[self.root_dir])
+
+    def test_uninstall(self):
+        dist, install_lib = self.install_dist()
+        self.assertIsFile(install_lib, 'foo', 'sub', '__init__.py')
+        self.assertIsFile(install_lib, 'foo-0.1.dist-info', 'RECORD')
+        remove('foo', paths=[install_lib])
+        self.assertIsNotFile(install_lib, 'foo', 'sub', '__init__.py')
+        self.assertIsNotFile(install_lib, 'foo-0.1.dist-info', 'RECORD')
+
+
+
+
+def test_suite():
+    return unittest.makeSuite(UninstallTestCase)
+
+if __name__ == '__main__':
+    run_unittest(test_suite())
diff --git a/distutils2/tests/test_util.py b/distutils2/tests/test_util.py
--- a/distutils2/tests/test_util.py
+++ b/distutils2/tests/test_util.py
@@ -18,7 +18,7 @@
                              _find_exe_version, _MAC_OS_X_LD_VERSION,
                              byte_compile, find_packages, spawn, find_executable,
                              _nt_quote_args, get_pypirc_path, generate_pypirc,
-                             read_pypirc, resolve_name)
+                             read_pypirc, resolve_name, iglob, RICH_GLOB)
 
 from distutils2 import util
 from distutils2.tests import unittest, support
@@ -144,7 +144,7 @@
         os.path.join = _join
 
         self.assertEqual(convert_path('/home/to/my/stuff'),
-                          '/home/to/my/stuff')
+                         '/home/to/my/stuff')
 
         # win
         os.sep = '\\'
@@ -156,9 +156,9 @@
         self.assertRaises(ValueError, convert_path, 'home/to/my/stuff/')
 
         self.assertEqual(convert_path('home/to/my/stuff'),
-                          'home\\to\\my\\stuff')
+                         'home\\to\\my\\stuff')
         self.assertEqual(convert_path('.'),
-                          os.curdir)
+                         os.curdir)
 
     def test_change_root(self):
         # linux/mac
@@ -171,9 +171,9 @@
         os.path.join = _join
 
         self.assertEqual(change_root('/root', '/old/its/here'),
-                          '/root/old/its/here')
+                         '/root/old/its/here')
         self.assertEqual(change_root('/root', 'its/here'),
-                          '/root/its/here')
+                         '/root/its/here')
 
         # windows
         os.name = 'nt'
@@ -190,9 +190,9 @@
         os.path.join = _join
 
         self.assertEqual(change_root('c:\\root', 'c:\\old\\its\\here'),
-                          'c:\\root\\old\\its\\here')
+                         'c:\\root\\old\\its\\here')
         self.assertEqual(change_root('c:\\root', 'its\\here'),
-                          'c:\\root\\its\\here')
+                         'c:\\root\\its\\here')
 
         # BugsBunny os (it's a great os)
         os.name = 'BugsBunny'
@@ -203,7 +203,7 @@
 
     def test_split_quoted(self):
         self.assertEqual(split_quoted('""one"" "two" \'three\' \\four'),
-                          ['one', 'two', 'three', 'four'])
+                         ['one', 'two', 'three', 'four'])
 
     def test_strtobool(self):
         yes = ('y', 'Y', 'yes', 'True', 't', 'true', 'True', 'On', 'on', '1')
@@ -383,7 +383,7 @@
         run_2to3([file_name])
         new_content = "".join(file_handle.read())
         file_handle.close()
-        self.assertEquals(new_content, converted_content)
+        self.assertEqual(new_content, converted_content)
 
     @unittest.skipIf(sys.version < '2.6', 'requires Python 2.6 or higher')
     def test_run_2to3_on_doctests(self):
@@ -399,7 +399,7 @@
         run_2to3([file_name], doctests_only=True)
         new_content = "".join(file_handle.readlines())
         file_handle.close()
-        self.assertEquals(new_content, converted_content)
+        self.assertEqual(new_content, converted_content)
 
     def test_nt_quote_args(self):
 
@@ -414,6 +414,10 @@
     @unittest.skipUnless(os.name in ('nt', 'posix'),
                          'runs only under posix or nt')
     def test_spawn(self):
+        # Do not patch subprocess on unix because
+        # distutils2.util._spawn_posix uses it
+        if os.name in 'posix':
+            subprocess.Popen = self.old_popen
         tmpdir = self.mkdtemp()
 
         # creating something executable
@@ -475,9 +479,186 @@
         content = open(rc).read()
         self.assertEqual(content, WANTED)
 
+class GlobTestCaseBase(support.TempdirManager,
+                       support.LoggingCatcher,
+                       unittest.TestCase):
+
+    def build_files_tree(self, files):
+        tempdir = self.mkdtemp()
+        for filepath in files:
+            is_dir = filepath.endswith('/')
+            filepath = os.path.join(tempdir, *filepath.split('/'))
+            if is_dir:
+                dirname = filepath
+            else:
+                dirname = os.path.dirname(filepath)
+            if dirname and not os.path.exists(dirname):
+                os.makedirs(dirname)
+            if not is_dir:
+                self.write_file(filepath, 'babar')
+        return tempdir
+
+    @staticmethod
+    def os_dependant_path(path):
+        path = path.rstrip('/').split('/')
+        return os.path.join(*path)
+
+    def clean_tree(self, spec):
+        files = []
+        for path, includes in list(spec.items()):
+            if includes:
+                files.append(self.os_dependant_path(path))
+        return files
+
+class GlobTestCase(GlobTestCaseBase):
+
+
+    def assertGlobMatch(self, glob, spec):
+        """"""
+        tempdir  = self.build_files_tree(spec)
+        expected = self.clean_tree(spec)
+        self.addCleanup(os.chdir, os.getcwd())
+        os.chdir(tempdir)
+        result = list(iglob(glob))
+        self.assertItemsEqual(expected, result)
+
+    def test_regex_rich_glob(self):
+        matches = RICH_GLOB.findall(r"babar aime les {fraises} est les {huitres}")
+        self.assertEquals(["fraises","huitres"], matches)
+
+    def test_simple_glob(self):
+        glob = '*.tp?'
+        spec  = {'coucou.tpl': True,
+                 'coucou.tpj': True,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_simple_glob_in_dir(self):
+        glob = 'babar/*.tp?'
+        spec  = {'babar/coucou.tpl': True,
+                 'babar/coucou.tpj': True,
+                 'babar/toto.bin': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_head(self):
+        glob = '**/tip/*.t?l'
+        spec  = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+                 'babar/z/tip/coucou.tpl': True,
+                 'babar/tip/coucou.tpl': True,
+                 'babar/zeop/tip/babar/babar.tpl': False,
+                 'babar/z/tip/coucou.bin': False,
+                 'babar/toto.bin': False,
+                 'zozo/zuzu/tip/babar.tpl': True,
+                 'zozo/tip/babar.tpl': True,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_tail(self):
+        glob = 'babar/**'
+        spec = {'babar/zaza/': True,
+                'babar/zaza/zuzu/': True,
+                'babar/zaza/zuzu/babar.xml': True,
+                'babar/zaza/zuzu/toto.xml': True,
+                'babar/zaza/zuzu/toto.csv': True,
+                'babar/zaza/coucou.tpl': True,
+                'babar/bubu.tpl': True,
+                'zozo/zuzu/tip/babar.tpl': False,
+                'zozo/tip/babar.tpl': False,
+                'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_recursive_glob_middle(self):
+        glob = 'babar/**/tip/*.t?l'
+        spec  = {'babar/zaza/zuzu/tip/coucou.tpl': True,
+                 'babar/z/tip/coucou.tpl': True,
+                 'babar/tip/coucou.tpl': True,
+                 'babar/zeop/tip/babar/babar.tpl': False,
+                 'babar/z/tip/coucou.bin': False,
+                 'babar/toto.bin': False,
+                 'zozo/zuzu/tip/babar.tpl': False,
+                 'zozo/tip/babar.tpl': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_tail(self):
+        glob = 'bin/*.{bin,sh,exe}'
+        spec  = {'bin/babar.bin': True,
+                 'bin/zephir.sh': True,
+                 'bin/celestine.exe': True,
+                 'bin/cornelius.bat': False,
+                 'bin/cornelius.xml': False,
+                 'toto/yurg': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_middle(self):
+        glob = 'xml/{babar,toto}.xml'
+        spec  = {'xml/babar.xml': True,
+                 'xml/toto.xml': True,
+                 'xml/babar.xslt': False,
+                 'xml/cornelius.sgml': False,
+                 'xml/zephir.xml': False,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_set_head(self):
+        glob = '{xml,xslt}/babar.*'
+        spec  = {'xml/babar.xml': True,
+                 'xml/toto.xml': False,
+                 'xslt/babar.xslt': True,
+                 'xslt/toto.xslt': False,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_glob_all(self):
+        glob = '{xml/*,xslt/**}/babar.xml'
+        spec  = {'xml/a/babar.xml': True,
+                 'xml/b/babar.xml': True,
+                 'xml/a/c/babar.xml': False,
+                 'xslt/a/babar.xml': True,
+                 'xslt/b/babar.xml': True,
+                 'xslt/a/c/babar.xml': True,
+                 'toto/yurg.xml': False,
+                 'Donotwant': False}
+        self.assertGlobMatch(glob, spec)
+
+    def test_invalid_glob_pattern(self):
+        invalids = [
+            'ppooa**',
+            'azzaeaz4**/',
+            '/**ddsfs',
+            '**##1e"&e',
+            'DSFb**c009',
+            '{'
+            '{aaQSDFa'
+            '}'
+            'aQSDFSaa}'
+            '{**a,'
+            ',**a}'
+            '{a**,'
+            ',b**}'
+            '{a**a,babar}'
+            '{bob,b**z}'
+            ]
+        msg = "%r is not supposed to be a valid pattern"
+        for pattern in invalids:
+            try:
+                iglob(pattern)
+            except ValueError:
+                continue
+            else:
+                self.fail("%r is not a valid iglob pattern" % pattern)
+
+
 
 def test_suite():
-    return unittest.makeSuite(UtilTestCase)
+    suite = unittest.makeSuite(UtilTestCase)
+    suite.addTest(unittest.makeSuite(GlobTestCase))
+    return suite
+
 
 if __name__ == "__main__":
     unittest.main(defaultTest="test_suite")
diff --git a/distutils2/tests/test_version.py b/distutils2/tests/test_version.py
--- a/distutils2/tests/test_version.py
+++ b/distutils2/tests/test_version.py
@@ -61,9 +61,9 @@
 
     def test_huge_version(self):
 
-        self.assertEquals(str(V('1980.0')), '1980.0')
+        self.assertEqual(str(V('1980.0')), '1980.0')
         self.assertRaises(HugeMajorVersionNumError, V, '1981.0')
-        self.assertEquals(str(V('1981.0', error_on_huge_major_num=False)), '1981.0')
+        self.assertEqual(str(V('1981.0', error_on_huge_major_num=False)), '1981.0')
 
     def test_comparison(self):
         r"""
@@ -196,9 +196,20 @@
 
         self.assertRaises(ValueError, VersionPredicate, '')
 
+        self.assertTrue(VersionPredicate('Hey 2.5').match('2.5.1'))
+
         # XXX need to silent the micro version in this case
         #assert not VersionPredicate('Ho (<3.0,!=2.6)').match('2.6.3')
 
+
+        # Make sure a predicate that ends with a number works
+        self.assertTrue(VersionPredicate('virtualenv5 (1.0)').match('1.0'))
+        self.assertTrue(VersionPredicate('virtualenv5').match('1.0'))
+        self.assertTrue(VersionPredicate('vi5two').match('1.0'))
+        self.assertTrue(VersionPredicate('5two').match('1.0'))
+        self.assertTrue(VersionPredicate('vi5two 1.0').match('1.0'))
+        self.assertTrue(VersionPredicate('5two 1.0').match('1.0'))
+
         # test repr
         for predicate in predicates:
             self.assertEqual(str(VersionPredicate(predicate)), predicate)
@@ -220,12 +231,13 @@
         for version in other_versions:
             self.assertFalse(V(version).is_final)
 
+
 class VersionWhiteBoxTestCase(unittest.TestCase):
 
     def test_parse_numdots(self):
         # For code coverage completeness, as pad_zeros_length can't be set or
         # influenced from the public interface
-        self.assertEquals(V('1.0')._parse_numdots('1.0', '1.0',
+        self.assertEqual(V('1.0')._parse_numdots('1.0', '1.0',
                                                   pad_zeros_length=3),
                           [1, 0, 0])
 
diff --git a/distutils2/util.py b/distutils2/util.py
--- a/distutils2/util.py
+++ b/distutils2/util.py
@@ -9,12 +9,15 @@
 import re
 import string
 import sys
-import shutil
-import tarfile
-import zipfile
+from subprocess import call as sub_call
 from copy import copy
 from fnmatch import fnmatchcase
+try:
+    from glob import iglob as std_iglob
+except ImportError:
+    from glob import glob as std_iglob  # for python < 2.5
 from ConfigParser import RawConfigParser
+from inspect import getsource
 
 from distutils2.errors import (DistutilsPlatformError, DistutilsFileError,
                                DistutilsByteCompileError, DistutilsExecError)
@@ -175,29 +178,6 @@
         raise ValueError("invalid variable '$%s'" % var)
 
 
-def grok_environment_error(exc, prefix="error: "):
-    """Generate a useful error message from an EnvironmentError.
-
-    This will generate an IOError or an OSError exception object.
-    Handles Python 1.5.1 and 1.5.2 styles, and
-    does what it can to deal with exception objects that don't have a
-    filename (which happens when the error is due to a two-file operation,
-    such as 'rename()' or 'link()'.  Returns the error message as a string
-    prefixed with 'prefix'.
-    """
-    # check for Python 1.5.2-style {IO,OS}Error exception objects
-    if hasattr(exc, 'filename') and hasattr(exc, 'strerror'):
-        if exc.filename:
-            error = prefix + "%s: %s" % (exc.filename, exc.strerror)
-        else:
-            # two-argument functions in posix module don't
-            # include the filename in the exception object!
-            error = prefix + "%s" % exc.strerror
-    else:
-        error = prefix + str(exc[-1])
-
-    return error
-
 # Needed by 'split_quoted()'
 _wordchars_re = _squote_re = _dquote_re = None
 
@@ -238,20 +218,20 @@
             words.append(s[:end])
             break
 
-        if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
-            words.append(s[:end])       # we definitely have a word delimiter
+        if s[end] in string.whitespace:  # unescaped, unquoted whitespace: now
+            words.append(s[:end])        # we definitely have a word delimiter
             s = s[end:].lstrip()
             pos = 0
 
-        elif s[end] == '\\':            # preserve whatever is being escaped;
-                                        # will become part of the current word
+        elif s[end] == '\\':             # preserve whatever is being escaped;
+                                         # will become part of the current word
             s = s[:end] + s[end + 1:]
             pos = end + 1
 
         else:
-            if s[end] == "'":           # slurp singly-quoted string
+            if s[end] == "'":            # slurp singly-quoted string
                 m = _squote_re.match(s, end)
-            elif s[end] == '"':         # slurp doubly-quoted string
+            elif s[end] == '"':          # slurp doubly-quoted string
                 m = _dquote_re.match(s, end)
             else:
                 raise RuntimeError("this can't happen "
@@ -542,9 +522,9 @@
             if missing == 'error':      # blow up when we stat() the file
                 pass
             elif missing == 'ignore':   # missing source dropped from
-                continue                #  target's dependency list
+                continue                # target's dependency list
             elif missing == 'newer':    # missing source means target is
-                return True             #  out-of-date
+                return True             # out-of-date
 
         if os.stat(source).st_mtime > target_mtime:
             return True
@@ -664,6 +644,7 @@
 
     return ret
 
+
 def splitext(path):
     """Like os.path.splitext, but take off .tar too"""
     base, ext = posixpath.splitext(path)
@@ -673,83 +654,6 @@
     return base, ext
 
 
-def unzip_file(filename, location, flatten=True):
-    """Unzip the file (zip file located at filename) to the destination
-    location"""
-    if not os.path.exists(location):
-        os.makedirs(location)
-    zipfp = open(filename, 'rb')
-    try:
-        zip = zipfile.ZipFile(zipfp)
-        leading = has_leading_dir(zip.namelist()) and flatten
-        for name in zip.namelist():
-            data = zip.read(name)
-            fn = name
-            if leading:
-                fn = split_leading_dir(name)[1]
-            fn = os.path.join(location, fn)
-            dir = os.path.dirname(fn)
-            if not os.path.exists(dir):
-                os.makedirs(dir)
-            if fn.endswith('/') or fn.endswith('\\'):
-                # A directory
-                if not os.path.exists(fn):
-                    os.makedirs(fn)
-            else:
-                fp = open(fn, 'wb')
-                try:
-                    fp.write(data)
-                finally:
-                    fp.close()
-    finally:
-        zipfp.close()
-
-
-def untar_file(filename, location):
-    """Untar the file (tar file located at filename) to the destination
-    location
-    """
-    if not os.path.exists(location):
-        os.makedirs(location)
-    if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
-        mode = 'r:gz'
-    elif (filename.lower().endswith('.bz2')
-          or filename.lower().endswith('.tbz')):
-        mode = 'r:bz2'
-    elif filename.lower().endswith('.tar'):
-        mode = 'r'
-    else:
-        mode = 'r:*'
-    tar = tarfile.open(filename, mode)
-    try:
-        leading = has_leading_dir([member.name for member in tar.getmembers()])
-        for member in tar.getmembers():
-            fn = member.name
-            if leading:
-                fn = split_leading_dir(fn)[1]
-            path = os.path.join(location, fn)
-            if member.isdir():
-                if not os.path.exists(path):
-                    os.makedirs(path)
-            else:
-                try:
-                    fp = tar.extractfile(member)
-                except (KeyError, AttributeError):
-                    # Some corrupt tar files seem to produce this
-                    # (specifically bad symlinks)
-                    continue
-                if not os.path.exists(os.path.dirname(path)):
-                    os.makedirs(os.path.dirname(path))
-                destfp = open(path, 'wb')
-                try:
-                    shutil.copyfileobj(fp, destfp)
-                finally:
-                    destfp.close()
-                fp.close()
-    finally:
-        tar.close()
-
-
 def has_leading_dir(paths):
     """Returns true if all the paths have the same leading path name
     (i.e., everything is in one subdirectory in an archive)"""
@@ -869,69 +773,22 @@
                   "command '%s' failed: %s" % (cmd[0], exc[-1]))
         if rc != 0:
             # and this reflects the command running but failing
-            logger.debug("command '%s' failed with exit status %d" % (cmd[0], rc))
+            logger.debug("command '%s' failed with exit status %d",
+                         (cmd[0], rc))
             raise DistutilsExecError(
                   "command '%s' failed with exit status %d" % (cmd[0], rc))
 
 
-def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0, env=None):
-    logger.info(' '.join(cmd))
+def _spawn_posix(cmd, search_path=1, verbose=1, dry_run=0, env=None):
+    cmd = ' '.join(cmd)
+    if verbose:
+        logger.info(cmd)
     if dry_run:
         return
-
-    if env is None:
-        exec_fn = search_path and os.execvp or os.execv
-    else:
-        exec_fn = search_path and os.execvpe or os.execve
-
-    pid = os.fork()
-
-    if pid == 0:  # in the child
-        try:
-            if env is None:
-                exec_fn(cmd[0], cmd)
-            else:
-                exec_fn(cmd[0], cmd, env)
-        except OSError, e:
-            sys.stderr.write("unable to execute %s: %s\n" %
-                             (cmd[0], e.strerror))
-            os._exit(1)
-
-        sys.stderr.write("unable to execute %s for unknown reasons" % cmd[0])
-        os._exit(1)
-    else:   # in the parent
-        # Loop until the child either exits or is terminated by a signal
-        # (ie. keep waiting if it's merely stopped)
-        while 1:
-            try:
-                pid, status = os.waitpid(pid, 0)
-            except OSError, exc:
-                import errno
-                if exc.errno == errno.EINTR:
-                    continue
-                raise DistutilsExecError(
-                      "command '%s' failed: %s" % (cmd[0], exc[-1]))
-            if os.WIFSIGNALED(status):
-                raise DistutilsExecError(
-                      "command '%s' terminated by signal %d" % \
-                      (cmd[0], os.WTERMSIG(status)))
-
-            elif os.WIFEXITED(status):
-                exit_status = os.WEXITSTATUS(status)
-                if exit_status == 0:
-                    return   # hey, it succeeded!
-                else:
-                    raise DistutilsExecError(
-                          "command '%s' failed with exit status %d" % \
-                          (cmd[0], exit_status))
-
-            elif os.WIFSTOPPED(status):
-                continue
-
-            else:
-                raise DistutilsExecError(
-                      "unknown error executing '%s': termination status %d" % \
-                      (cmd[0], status))
+    exit_status = sub_call(cmd, shell=True, env=env)
+    if exit_status != 0:
+        msg = "command '%s' failed with exit status %d"
+        raise DistutilsExecError(msg % (cmd, exit_status))
 
 
 def find_executable(executable, path=None):
@@ -971,6 +828,7 @@
 password:%s
 """
 
+
 def get_pypirc_path():
     """Returns rc file path."""
     return os.path.join(os.path.expanduser('~'), '.pypirc')
@@ -1045,44 +903,10 @@
     return {}
 
 
-def metadata_to_dict(meta):
-    """XXX might want to move it to the Metadata class."""
-    data = {
-        'metadata_version' : meta.version,
-        'name': meta['Name'],
-        'version': meta['Version'],
-        'summary': meta['Summary'],
-        'home_page': meta['Home-page'],
-        'author': meta['Author'],
-        'author_email': meta['Author-email'],
-        'license': meta['License'],
-        'description': meta['Description'],
-        'keywords': meta['Keywords'],
-        'platform': meta['Platform'],
-        'classifier': meta['Classifier'],
-        'download_url': meta['Download-URL'],
-    }
-
-    if meta.version == '1.2':
-        data['requires_dist'] = meta['Requires-Dist']
-        data['requires_python'] = meta['Requires-Python']
-        data['requires_external'] = meta['Requires-External']
-        data['provides_dist'] = meta['Provides-Dist']
-        data['obsoletes_dist'] = meta['Obsoletes-Dist']
-        data['project_url'] = [','.join(url) for url in
-                                meta['Project-URL']]
-
-    elif meta.version == '1.1':
-        data['provides'] = meta['Provides']
-        data['requires'] = meta['Requires']
-        data['obsoletes'] = meta['Obsoletes']
-
-    return data
-
 # utility functions for 2to3 support
 
-def run_2to3(files, doctests_only=False, fixer_names=None, options=None,
-                                                                explicit=None):
+def run_2to3(files, doctests_only=False, fixer_names=None,
+             options=None, explicit=None):
     """ Wrapper function around the refactor() class which
     performs the conversions on a list of python files.
     Invoke 2to3 on a list of Python files. The files should all come
@@ -1096,15 +920,13 @@
     fixers = []
     fixers = get_fixers_from_package('lib2to3.fixes')
 
-
     if fixer_names:
         for fixername in fixer_names:
-            fixers.extend([fixer for fixer in get_fixers_from_package(fixername)])
+            fixers.extend([fixer for fixer in
+                           get_fixers_from_package(fixername)])
     r = RefactoringTool(fixers, options=options)
-    if doctests_only:
-        r.refactor(files, doctests_only=True, write=True)
-    else:
-        r.refactor(files, write=True)
+    r.refactor(files, write=True, doctests_only=doctests_only)
+
 
 class Mixin2to3:
     """ Wrapper class for commands that run 2to3.
@@ -1126,3 +948,164 @@
         """ Issues a call to util.run_2to3. """
         return run_2to3(files, doctests_only, self.fixer_names,
                         self.options, self.explicit)
+
+RICH_GLOB = re.compile(r'\{([^}]*)\}')
+_CHECK_RECURSIVE_GLOB = re.compile(r'[^/,{]\*\*|\*\*[^/,}]')
+_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
+
+
+def iglob(path_glob):
+    """Richer glob than the std glob module support ** and {opt1,opt2,opt3}"""
+    if _CHECK_RECURSIVE_GLOB.search(path_glob):
+        msg = """Invalid glob %r: Recursive glob "**" must be used alone"""
+        raise ValueError(msg % path_glob)
+    if _CHECK_MISMATCH_SET.search(path_glob):
+        msg = """Invalid glob %r: Mismatching set marker '{' or '}'"""
+        raise ValueError(msg % path_glob)
+    return _iglob(path_glob)
+
+
+def _iglob(path_glob):
+    """Actual logic of the iglob function"""
+    rich_path_glob = RICH_GLOB.split(path_glob, 1)
+    if len(rich_path_glob) > 1:
+        assert len(rich_path_glob) == 3, rich_path_glob
+        prefix, set, suffix = rich_path_glob
+        for item in set.split(','):
+            for path in _iglob(''.join((prefix, item, suffix))):
+                yield path
+    else:
+        if '**' not in path_glob:
+            for item in std_iglob(path_glob):
+                yield item
+        else:
+            prefix, radical = path_glob.split('**', 1)
+            if prefix == '':
+                prefix = '.'
+            if radical == '':
+                radical = '*'
+            else:
+                radical = radical.lstrip('/')
+            for (path, dir, files) in os.walk(prefix):
+                path = os.path.normpath(path)
+                for file in _iglob(os.path.join(path, radical)):
+                    yield file
+
+
+def cfg_to_args(path='setup.cfg'):
+    """ Distutils2 to distutils1 compatibility util.
+
+        This method uses an existing setup.cfg to generate a dictionnary of
+        keywords that can be used by distutils.core.setup(kwargs**).
+
+        :param file:
+            The setup.cfg path.
+        :raises DistutilsFileError:
+            When the setup.cfg file is not found.
+
+    """
+    # We need to declare the following constants here so that it's easier to
+    # generate the setup.py afterwards, using inspect.getsource.
+
+    # XXX ** == needs testing
+    D1_D2_SETUP_ARGS = {"name": ("metadata",),
+                        "version": ("metadata",),
+                        "author": ("metadata",),
+                        "author_email": ("metadata",),
+                        "maintainer": ("metadata",),
+                        "maintainer_email": ("metadata",),
+                        "url": ("metadata", "home_page"),
+                        "description": ("metadata", "summary"),
+                        "long_description": ("metadata", "description"),
+                        "download-url": ("metadata",),
+                        "classifiers": ("metadata", "classifier"),
+                        "platforms": ("metadata", "platform"),  # **
+                        "license": ("metadata",),
+                        "requires": ("metadata", "requires_dist"),
+                        "provides": ("metadata", "provides_dist"),  # **
+                        "obsoletes": ("metadata", "obsoletes_dist"),  # **
+                        "packages": ("files",),
+                        "scripts": ("files",),
+                        "py_modules": ("files", "modules"),  # **
+                        }
+
+    MULTI_FIELDS = ("classifiers",
+                    "requires",
+                    "platforms",
+                    "packages",
+                    "scripts")
+
+    def has_get_option(config, section, option):
+        if config.has_option(section, option):
+            return config.get(section, option)
+        elif config.has_option(section, option.replace('_', '-')):
+            return config.get(section, option.replace('_', '-'))
+        else:
+            return False
+
+    # The method source code really starts here.
+    config = RawConfigParser()
+    if not os.path.exists(file):
+        raise DistutilsFileError("file '%s' does not exist" %
+                                 os.path.abspath(file))
+    config.read(path)
+
+    kwargs = {}
+    for arg in D1_D2_SETUP_ARGS:
+        if len(D1_D2_SETUP_ARGS[arg]) == 2:
+            # The distutils field name is different than distutils2's.
+            section, option = D1_D2_SETUP_ARGS[arg]
+
+        elif len(D1_D2_SETUP_ARGS[arg]) == 1:
+            # The distutils field name is the same thant distutils2's.
+            section = D1_D2_SETUP_ARGS[arg][0]
+            option = arg
+
+        in_cfg_value = has_get_option(config, section, option)
+        if not in_cfg_value:
+            # There is no such option in the setup.cfg
+            if arg == "long_description":
+                filename = has_get_option(config, section, "description_file")
+                if filename:
+                    in_cfg_value = open(filename).read()
+            else:
+                continue
+
+        if arg in MULTI_FIELDS:
+            # Special behaviour when we have a multi line option
+            if "\n" in in_cfg_value:
+                in_cfg_value = in_cfg_value.strip().split('\n')
+            else:
+                in_cfg_value = list((in_cfg_value,))
+
+        kwargs[arg] = in_cfg_value
+
+    return kwargs
+
+
+_SETUP_TMPL = """\
+# This script was automatically generated by Distutils2
+import os
+from distutils.core import setup
+from ConfigParser import RawConfigParser
+
+%(func)s
+
+setup(**cfg_to_args())
+"""
+
+
+def generate_setup_py():
+    """Generates a distutils compatible setup.py using an existing setup.cfg.
+
+        :raises DistutilsFileError:
+            When a setup.py already exists.
+    """
+    if os.path.exists("setup.py"):
+        raise DistutilsFileError("A pre existing setup.py file exists")
+
+    handle = open("setup.py", "w")
+    try:
+        handle.write(_SETUP_TMPL % {'func': getsource(cfg_to_args)})
+    finally:
+        handle.close()
diff --git a/distutils2/version.py b/distutils2/version.py
--- a/distutils2/version.py
+++ b/distutils2/version.py
@@ -322,8 +322,9 @@
     return None
 
 
-_PREDICATE = re.compile(r"(?i)^\s*([a-z_][\sa-zA-Z_-]*(?:\.[a-z_]\w*)*)(.*)")
-_VERSIONS = re.compile(r"^\s*\((.*)\)\s*$")
+# A predicate is: "ProjectName (VERSION1, VERSION2, ..)
+_PREDICATE = re.compile(r"(?i)^\s*(\w[\s\w-]*(?:\.\w*)*)(.*)")
+_VERSIONS = re.compile(r"^\s*\((?P<versions>.*)\)\s*$|^\s*(?P<versions2>.*)\s*$")
 _PLAIN_VERSIONS = re.compile(r"^\s*(.*)\s*$")
 _SPLIT_CMP = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$")
 
@@ -358,14 +359,25 @@
 
         name, predicates = match.groups()
         self.name = name.strip()
-        predicates = predicates.strip()
-        predicates = _VERSIONS.match(predicates)
-        if predicates is not None:
-            predicates = predicates.groups()[0]
-            self.predicates = [_split_predicate(pred.strip())
-                               for pred in predicates.split(',')]
+        self.predicates = []
+        if predicates is None:
+            return
+
+        predicates = _VERSIONS.match(predicates.strip())
+        if predicates is None:
+            return
+
+        predicates = predicates.groupdict()
+        if predicates['versions'] is not None:
+            versions = predicates['versions']
         else:
-            self.predicates = []
+            versions = predicates.get('versions2')
+
+        if versions is not None:
+            for version in versions.split(','):
+                if version.strip() == '':
+                    continue
+                self.predicates.append(_split_predicate(version))
 
     def match(self, version):
         """Check if the provided version matches the predicates."""
diff --git a/docs/design/configfile.rst b/docs/design/configfile.rst
new file mode 100644
--- /dev/null
+++ b/docs/design/configfile.rst
@@ -0,0 +1,132 @@
+.. _setup-config:
+
+************************************
+Writing the Setup Configuration File
+************************************
+
+Often, it's not possible to write down everything needed to build a distribution
+*a priori*: you may need to get some information from the user, or from the
+user's system, in order to proceed.  As long as that information is fairly
+simple---a list of directories to search for C header files or libraries, for
+example---then providing a configuration file, :file:`setup.cfg`, for users to
+edit is a cheap and easy way to solicit it.  Configuration files also let you
+provide default values for any command option, which the installer can then
+override either on the command line or by editing the config file.
+
+The setup configuration file is a useful middle-ground between the setup script
+---which, ideally, would be opaque to installers [#]_---and the command line to
+the setup script, which is outside of your control and entirely up to the
+installer.  In fact, :file:`setup.cfg` (and any other Distutils configuration
+files present on the target system) are processed after the contents of the
+setup script, but before the command line.  This has  several useful
+consequences:
+
+.. If you have more advanced needs, such as determining which extensions to
+   build based on what capabilities are present on the target system, then you
+   need the Distutils auto-configuration facility.  This started to appear in
+   Distutils 0.9 but, as of this writing, isn't mature or stable enough yet
+   for real-world use.
+
+* installers can override some of what you put in :file:`setup.py` by editing
+  :file:`setup.cfg`
+
+* you can provide non-standard defaults for options that are not easily set in
+  :file:`setup.py`
+
+* installers can override anything in :file:`setup.cfg` using the command-line
+  options to :file:`setup.py`
+
+The basic syntax of the configuration file is simple::
+
+   [command]
+   option=value
+   ...
+
+where *command* is one of the Distutils commands (e.g. :command:`build_py`,
+:command:`install`), and *option* is one of the options that command supports.
+Any number of options can be supplied for each command, and any number of
+command sections can be included in the file.  Blank lines are ignored, as are
+comments, which run from a ``'#'`` character until the end of the line.  Long
+option values can be split across multiple lines simply by indenting the
+continuation lines.
+
+You can find out the list of options supported by a particular command with the
+universal :option:`--help` option, e.g. ::
+
+   > python setup.py --help build_ext
+   [...]
+   Options for 'build_ext' command:
+     --build-lib (-b)     directory for compiled extension modules
+     --build-temp (-t)    directory for temporary files (build by-products)
+     --inplace (-i)       ignore build-lib and put compiled extensions into the
+                          source directory alongside your pure Python modules
+     --include-dirs (-I)  list of directories to search for header files
+     --define (-D)        C preprocessor macros to define
+     --undef (-U)         C preprocessor macros to undefine
+     --swig-opts          list of SWIG command-line options
+   [...]
+
+.. XXX do we want to support ``setup.py --help metadata``?
+
+Note that an option spelled :option:`--foo-bar` on the command line  is spelled
+:option:`foo_bar` in configuration files.
+
+For example, say you want your extensions to be built "in-place"---that is, you
+have an extension :mod:`pkg.ext`, and you want the compiled extension file
+(:file:`ext.so` on Unix, say) to be put in the same source directory as your
+pure Python modules :mod:`pkg.mod1` and :mod:`pkg.mod2`.  You can always use the
+:option:`--inplace` option on the command line to ensure this::
+
+   python setup.py build_ext --inplace
+
+But this requires that you always specify the :command:`build_ext` command
+explicitly, and remember to provide :option:`--inplace`. An easier way is to
+"set and forget" this option, by encoding it in :file:`setup.cfg`, the
+configuration file for this distribution::
+
+   [build_ext]
+   inplace=1
+
+This will affect all builds of this module distribution, whether or not you
+explicitly specify :command:`build_ext`.  If you include :file:`setup.cfg` in
+your source distribution, it will also affect end-user builds---which is
+probably a bad idea for this option, since always building extensions in-place
+would break installation of the module distribution.  In certain peculiar cases,
+though, modules are built right in their installation directory, so this is
+conceivably a useful ability.  (Distributing extensions that expect to be built
+in their installation directory is almost always a bad idea, though.)
+
+Another example: certain commands take a lot of options that don't change from
+run to run; for example, :command:`bdist_rpm` needs to know everything required
+to generate a "spec" file for creating an RPM distribution.  Some of this
+information comes from the setup script, and some is automatically generated by
+the Distutils (such as the list of files installed).  But some of it has to be
+supplied as options to :command:`bdist_rpm`, which would be very tedious to do
+on the command line for every run.  Hence, here is a snippet from the Distutils'
+own :file:`setup.cfg`::
+
+   [bdist_rpm]
+   release = 1
+   packager = Greg Ward <gward at python.net>
+   doc_files = CHANGES.txt
+               README.txt
+               USAGE.txt
+               doc/
+               examples/
+
+Note that the :option:`doc_files` option is simply a whitespace-separated string
+split across multiple lines for readability.
+
+
+.. seealso::
+
+   :ref:`inst-config-syntax` in "Installing Python Modules"
+      More information on the configuration files is available in the manual for
+      system administrators.
+
+
+.. rubric:: Footnotes
+
+.. [#] This ideal probably won't be achieved until auto-configuration is fully
+   supported by the Distutils.
+
diff --git a/docs/design/pep-0376.txt b/docs/design/pep-0376.txt
--- a/docs/design/pep-0376.txt
+++ b/docs/design/pep-0376.txt
@@ -425,7 +425,7 @@
 
 - ``name``: The name of the distribution.
 
-- ``metadata``: A ``DistributionMetadata`` instance loaded with the
+- ``metadata``: A ``Metadata`` instance loaded with the
   distribution's PKG-INFO file.
 
 - ``requested``: A boolean that indicates whether the REQUESTED
diff --git a/docs/design/wiki.rst b/docs/design/wiki.rst
--- a/docs/design/wiki.rst
+++ b/docs/design/wiki.rst
@@ -250,8 +250,8 @@
 ==  ====================================  ===================================================================================================
 1   mailman/database/schemas/blah.schema  /var/mailman/schemas/blah.schema
 2   some.tpl                              /var/mailman/templates/some.tpl
-3   path/to/some.tpl                      /var/mailman/templates/path/to/some.tpl
-4   mailman/database/mailman.db           /var/mailman/database/mailman.db
+3   path/to/some.tpl                      /var/mailman/templates/path/to/some.tpl !
+4   mailman/database/mailman.db           /var/mailman/database/mailman.db !
 5   developer-docs/index.txt              /usr/share/doc/mailman/developer-docs/index.txt
 6   developer-docs/api/toc.txt            /usr/share/doc/mailman/developer-docs/api/toc.txt
 7   README                                /usr/share/doc/mailman/README
@@ -259,7 +259,7 @@
 9   mailman/foo/some/path/bar/my.cfg      /etc/mailman/baz/some/path/bar/my.cfg AND
                                           /etc/mailman/hmm/some/path/bar/my.cfg + 
                                           emit a warning
-10  mailman/foo/some/path/other.cfg       /etc/mailman/some/path/other.cfg
+10  mailman/foo/some/path/other.cfg       /etc/mailman/some/path/other.cfg !
 11  some-new-semantic.sns                 /var/funky/mailman/some-new-semantic.sns
 ==  ====================================  ===================================================================================================
 
diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst
new file mode 100644
--- /dev/null
+++ b/docs/source/contributing.rst
@@ -0,0 +1,27 @@
+==========================
+Contributing to Distutils2
+==========================
+
+----------------
+Reporting Issues
+----------------
+
+When using, testing or developping distutils2, you may encounter issues. Please report to the following sections to know how these issues should be reported.
+
+Please keep in mind that this guide is intended to ease the triage and fixing processes by giving the maximum information to the developers. It should not be viewed as mandatory, only advisory ;).
+
+
+- Go to http://bugs.python.org/ (you'll need a Python Bugs account), then "Issues" > "Create ticket".
+- **Title**: write in a short summary of the issue. You may prefix the issue title with “component:”, where component can be something like installer, sdist, setup.cfg, etc., or add it at the end of your title if the normal flow of the sentence allows it. This will ease up later searches.
+- **Components**: choose "Distutils2"
+- **Version**: choose "3rd party"
+- **Body**: explain how to reproduce the bug: What you want to do, what code you write, what happens, what should happen, how to fix it (if you have an idea).
+   * You should always test with the tip of the main repository, not releases.
+   * Mention the versions of Python you tested with.  d2 supports 2.4 to 2.7.
+   * If relevant, mention the version of your operating system (for example with issues related to C extensions).
+   * When referencing commits, be careful to use the universal changeset identifiers (12 characters, for instance c3cf81fc64db), not the local sequential numbers (for example 925) that are not shared among clones.
+   * Try to be as concise as possible, but not too much.
+   * If useful, paste tracebacks.
+   * If useful, attach setup.cfg or other files (binary files like archives are not very convenient, better to stick to text).
+
+Issues related to PyPI are reported via email to the **catalog-sig at python.org** mailing list, not within bugs.python.org.
diff --git a/docs/source/distutils/apiref.rst b/docs/source/distutils/apiref.rst
--- a/docs/source/distutils/apiref.rst
+++ b/docs/source/distutils/apiref.rst
@@ -104,6 +104,26 @@
    | *package_dir*      | A mapping of package to        | a dictionary                                                |
    |                    | directory names                |                                                             |
    +--------------------+--------------------------------+-------------------------------------------------------------+
+   | *extra_path*       | Information about an           | a string, 1-tuple or 2-tuple                                |
+   |                    | intervening directory the      |                                                             |
+   |                    | install directory and the      |                                                             |
+   |                    | actual installation directory. |                                                             |
+   |                    |                                |                                                             |
+   |                    | If the value is a string is is |                                                             |
+   |                    | treated as a comma-separated   |                                                             |
+   |                    | tuple.                         |                                                             |
+   |                    |                                |                                                             |
+   |                    | If the value is a 2-tuple,     |                                                             |
+   |                    | the first element is the       |                                                             |
+   |                    | ``.pth`` file and the second   |                                                             |
+   |                    | is the name of the intervening |                                                             |
+   |                    | directory.                     |                                                             |
+   |                    |                                |                                                             |
+   |                    | If the value is a 1-tuple that |                                                             |
+   |                    | element is both the name of    |                                                             |
+   |                    | the ``.pth`` file and the      |                                                             |
+   |                    | intervening directory.         |                                                             |
+   +--------------------+--------------------------------+-------------------------------------------------------------+
 
 
 
@@ -888,7 +908,7 @@
 .. function:: make_zipfile(base_name, base_dir[, verbose=0, dry_run=0])
 
    Create a zip file from all files in and under *base_dir*.  The output zip file
-   will be named *base_dir* + :file:`.zip`.  Uses either the :mod:`zipfile` Python
+   will be named *base_name* + :file:`.zip`.  Uses either the :mod:`zipfile` Python
    module (if available) or the InfoZIP :file:`zip` utility (if installed and
    found on the default search path).  If neither tool is available, raises
    :exc:`DistutilsExecError`.  Returns the name of the output zip file.
@@ -1055,6 +1075,15 @@
    Create a file called *filename* and write *contents* (a sequence of strings
    without line terminators) to it.
 
+:mod:`distutils2.metadata` --- Metadata handling
+================================================================
+
+.. module:: distutils2.metadata
+
+.. FIXME CPython/stdlib docs don't use autoclass, write doc manually here
+
+.. autoclass:: distutils2.metadata.Metadata
+   :members:
 
 :mod:`distutils2.util` --- Miscellaneous other utility functions
 ================================================================
@@ -1157,15 +1186,6 @@
    an underscore. No { } or ( ) style quoting is available.
 
 
-.. function:: grok_environment_error(exc[, prefix='error: '])
-
-   Generate a useful error message from an :exc:`EnvironmentError`
-   (:exc:`IOError` or :exc:`OSError`) exception object. Does what it can to deal
-   with exception objects that don't have a filename (which happens when the
-   error is due to a two-file operation, such as :func:`rename` or
-   :func:`link`).  Returns the error message as a string prefixed with *prefix*.
-
-
 .. function:: split_quoted(s)
 
    Split a string up according to Unix shell-like rules for quotes and
diff --git a/docs/source/distutils/examples.rst b/docs/source/distutils/examples.rst
--- a/docs/source/distutils/examples.rst
+++ b/docs/source/distutils/examples.rst
@@ -298,11 +298,11 @@
 ``2.7`` or ``3.2``.
 
 You can read back this static file, by using the
-:class:`distutils2.dist.DistributionMetadata` class and its
+:class:`distutils2.dist.Metadata` class and its
 :func:`read_pkg_file` method::
 
-    >>> from distutils2.dist import DistributionMetadata
-    >>> metadata = DistributionMetadata()
+    >>> from distutils2.metadata import Metadata
+    >>> metadata = Metadata()
     >>> metadata.read_pkg_file(open('distribute-0.6.8-py2.7.egg-info'))
     >>> metadata.name
     'distribute'
@@ -315,7 +315,7 @@
 loads its values::
 
     >>> pkg_info_path = 'distribute-0.6.8-py2.7.egg-info'
-    >>> DistributionMetadata(pkg_info_path).name
+    >>> Metadata(pkg_info_path).name
     'distribute'
 
 
diff --git a/docs/source/distutils/sourcedist.rst b/docs/source/distutils/sourcedist.rst
--- a/docs/source/distutils/sourcedist.rst
+++ b/docs/source/distutils/sourcedist.rst
@@ -86,8 +86,7 @@
   distributions, but in the future there will be a standard for testing Python
   module distributions)
 
-* :file:`README.txt` (or :file:`README`), :file:`setup.py` (or whatever  you
-  called your setup script), and :file:`setup.cfg`
+* The configuration file :file:`setup.cfg`
 
 * all files that matches the ``package_data`` metadata.
   See :ref:`distutils-installing-package-data`.
@@ -95,6 +94,10 @@
 * all files that matches the ``data_files`` metadata.
   See :ref:`distutils-additional-files`.
 
+.. Warning::
+    In Distutils2, setup.py and README (or README.txt) files are not more
+    included in source distribution by default
+
 Sometimes this is enough, but usually you will want to specify additional files
 to distribute.  The typical way to do this is to write a *manifest template*,
 called :file:`MANIFEST.in` by default.  The manifest template is just a list of
diff --git a/docs/source/index.rst b/docs/source/index.rst
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -29,7 +29,7 @@
 .. __: http://bitbucket.org/tarek/distutils2/wiki/GSoC_2010_teams
 
 If you’re looking for information on how to contribute, head to
-:doc:`devresources`.
+:doc:`devresources`, and be sure to have a look at :doc:`contributing`.
 
 
 Documentation
@@ -76,6 +76,7 @@
    distutils/index
    library/distutils2
    library/pkgutil
+   contributing
 
 
 Indices and tables
diff --git a/docs/source/install/index.rst b/docs/source/install/index.rst
--- a/docs/source/install/index.rst
+++ b/docs/source/install/index.rst
@@ -927,15 +927,34 @@
 GNU C / Cygwin / MinGW
 ^^^^^^^^^^^^^^^^^^^^^^
 
-These instructions only apply if you're using a version of Python prior  to
-2.4.1 with a MinGW prior to 3.0.0 (with binutils-2.13.90-20030111-1).
-
 This section describes the necessary steps to use Distutils with the GNU C/C++
 compilers in their Cygwin and MinGW distributions. [#]_ For a Python interpreter
 that was built with Cygwin, everything should work without any of these
 following steps.
 
-These compilers require some special libraries. This task is more complex than
+Not all extensions can be built with MinGW or Cygwin, but many can.  Extensions
+most likely to not work are those that use C++ or depend on Microsoft Visual C
+extensions.
+
+To let Distutils compile your extension with Cygwin you have to type::
+
+   python setup.py build --compiler=cygwin
+
+and for Cygwin in no-cygwin mode [#]_ or for MinGW type::
+
+   python setup.py build --compiler=mingw32
+
+If you want to use any of these options/compilers as default, you should
+consider writing it in your personal or system-wide configuration file for
+Distutils (see section :ref:`inst-config-files`.)
+
+Older Versions of Python and MinGW
+""""""""""""""""""""""""""""""""""
+The following instructions only apply if you're using a version of Python
+inferior to 2.4.1 with a MinGW inferior to 3.0.0 (with
+binutils-2.13.90-20030111-1).
+
+These compilers require some special libraries.  This task is more complex than
 for Borland's C++, because there is no program to convert the library.  First
 you have to create a list of symbols which the Python DLL exports. (You can find
 a good program for this task at
@@ -965,18 +984,6 @@
 them too. The converted files have to reside in the same directories as the
 normal libraries do.
 
-To let Distutils compile your extension with Cygwin you now have to type ::
-
-   python setup.py build --compiler cygwin
-
-and for Cygwin in no-cygwin mode [#]_ or for MinGW type::
-
-   python setup.py build --compiler mingw32
-
-If you want to use any of these options/compilers as default, you should
-consider to write it in your personal or system-wide configuration file for
-Distutils (see section :ref:`inst-config-files`.)
-
 
 .. seealso::
 
diff --git a/docs/source/library/distutils2.index.xmlrpc.rst b/docs/source/library/distutils2.index.xmlrpc.rst
--- a/docs/source/library/distutils2.index.xmlrpc.rst
+++ b/docs/source/library/distutils2.index.xmlrpc.rst
@@ -90,7 +90,7 @@
     <ReleaseInfo FooBar 1.1>
 
 Assuming we already have a :class:`distutils2.index.ReleaseInfo` object defined,
-it's possible to pass it ot the xmlrpc client to retrieve and complete it's
+it's possible to pass it ot the xmlrpc client to retrieve and complete its
 metadata::
 
     >>> foobar11 = ReleaseInfo("FooBar", "1.1")
diff --git a/docs/source/library/distutils2.metadata.rst b/docs/source/library/distutils2.metadata.rst
--- a/docs/source/library/distutils2.metadata.rst
+++ b/docs/source/library/distutils2.metadata.rst
@@ -2,7 +2,9 @@
 Metadata
 ========
 
-Distutils2 provides a :class:`DistributionMetadata` class that can read and
+.. module:: distutils2.metadata
+
+Distutils2 provides a :class:`~distutils2.metadata.Metadata` class that can read and
 write metadata files. This class is compatible with all metadata versions:
 
 * 1.0: :PEP:`241`
@@ -17,11 +19,11 @@
 Reading metadata
 ================
 
-The :class:`DistributionMetadata` class can be instantiated with the path of
+The :class:`~distutils2.metadata.Metadata` class can be instantiated with the path of
 the metadata file, and provides a dict-like interface to the values::
 
-    >>> from distutils2.metadata import DistributionMetadata
-    >>> metadata = DistributionMetadata('PKG-INFO')
+    >>> from distutils2.metadata import Metadata
+    >>> metadata = Metadata('PKG-INFO')
     >>> metadata.keys()[:5]
     ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform')
     >>> metadata['Name']
@@ -33,13 +35,13 @@
 
 The fields that supports environment markers can be automatically ignored if
 the object is instantiated using the ``platform_dependent`` option.
-:class:`DistributionMetadata` will interpret in the case the markers and will
+:class:`~distutils2.metadata.Metadata` will interpret in the case the markers and will
 automatically remove the fields that are not compliant with the running
 environment. Here's an example under Mac OS X. The win32 dependency
 we saw earlier is ignored::
 
-    >>> from distutils2.metadata import DistributionMetadata
-    >>> metadata = DistributionMetadata('PKG-INFO', platform_dependent=True)
+    >>> from distutils2.metadata import Metadata
+    >>> metadata = Metadata('PKG-INFO', platform_dependent=True)
     >>> metadata['Requires-Dist']
     ['bar']
 
@@ -51,9 +53,9 @@
 
 Here's an example, simulating a win32 environment::
 
-    >>> from distutils2.metadata import DistributionMetadata
+    >>> from distutils2.metadata import Metadata
     >>> context = {'sys.platform': 'win32'}
-    >>> metadata = DistributionMetadata('PKG-INFO', platform_dependent=True,
+    >>> metadata = Metadata('PKG-INFO', platform_dependent=True,
     ...                                 execution_context=context)
     ...
     >>> metadata['Requires-Dist'] = ["pywin32; sys.platform == 'win32'",
@@ -81,8 +83,8 @@
 Some fields in :PEP:`345` have to follow a version scheme in their versions
 predicate. When the scheme is violated, a warning is emitted::
 
-    >>> from distutils2.metadata import DistributionMetadata
-    >>> metadata = DistributionMetadata()
+    >>> from distutils2.metadata import Metadata
+    >>> metadata = Metadata()
     >>> metadata['Requires-Dist'] = ['Funky (Groovie)']
     "Funky (Groovie)" is not a valid predicate
     >>> metadata['Requires-Dist'] = ['Funky (1.2)']
diff --git a/docs/source/library/distutils2.rst b/docs/source/library/distutils2.rst
--- a/docs/source/library/distutils2.rst
+++ b/docs/source/library/distutils2.rst
@@ -24,6 +24,7 @@
     distutils2.version
     distutils2.metadata
     distutils2.depgraph
+    distutils2.install
     distutils2.index
     distutils2.tests.pypi_server
 
diff --git a/docs/source/library/distutils2.tests.pypi_server.rst b/docs/source/library/distutils2.tests.pypi_server.rst
--- a/docs/source/library/distutils2.tests.pypi_server.rst
+++ b/docs/source/library/distutils2.tests.pypi_server.rst
@@ -77,6 +77,7 @@
         @use_pypi_server()
         def test_somthing(self, server):
             # your tests goes here
+            ...
 
 The decorator will instantiate the server for you, and run and stop it just
 before and after your method call. You also can pass the server initializer,
@@ -85,4 +86,4 @@
     class SampleTestCase(TestCase):
         @use_pypi_server("test_case_name")
         def test_something(self, server):
-            # something
+            ...
diff --git a/docs/source/library/pkgutil.rst b/docs/source/library/pkgutil.rst
--- a/docs/source/library/pkgutil.rst
+++ b/docs/source/library/pkgutil.rst
@@ -4,77 +4,204 @@
 .. module:: pkgutil
    :synopsis: Utilities to support packages.
 
-.. TODO Follow the reST conventions used in the stdlib
+This module provides utilities to manipulate packages: support for the
+Importer protocol defined in :PEP:`302` and implementation of the API
+described in :PEP:`376` to work with the database of installed Python
+distributions.
 
-This module provides functions to manipulate packages, as well as
-the necessary functions to provide support for the "Importer Protocol" as
-described in :PEP:`302` and for working with the database of installed Python
-distributions which is specified in :PEP:`376`. In addition to the functions
-required in :PEP:`376`, back support for older ``.egg`` and ``.egg-info``
-distributions is provided as well. These distributions are represented by the
-class :class:`~distutils2._backport.pkgutil.EggInfoDistribution` and most
-functions provide an extra argument ``use_egg_info`` which indicates if
-they should consider these old styled distributions. This document details
-first the functions and classes available and then presents several use cases.
-
+Import system utilities
+-----------------------
 
 .. function:: extend_path(path, name)
 
-   Extend the search path for the modules which comprise a package. Intended use is
-   to place the following code in a package's :file:`__init__.py`::
+   Extend the search path for the modules which comprise a package.  Intended
+   use is to place the following code in a package's :file:`__init__.py`::
 
       from pkgutil import extend_path
       __path__ = extend_path(__path__, __name__)
 
-   This will add to the package's ``__path__`` all subdirectories of directories on
-   ``sys.path`` named after the package.  This is useful if one wants to distribute
-   different parts of a single logical package as multiple directories.
+   This will add to the package's ``__path__`` all subdirectories of directories
+   on :data:`sys.path` named after the package.  This is useful if one wants to
+   distribute different parts of a single logical package as multiple
+   directories.
 
-   It also looks for :file:`\*.pkg` files beginning where ``*`` matches the *name*
-   argument.  This feature is similar to :file:`\*.pth` files (see the :mod:`site`
-   module for more information), except that it doesn't special-case lines starting
-   with ``import``.  A :file:`\*.pkg` file is trusted at face value: apart from
-   checking for duplicates, all entries found in a :file:`\*.pkg` file are added to
-   the path, regardless of whether they exist on the filesystem.  (This is a
-   feature.)
+   It also looks for :file:`\*.pkg` files beginning where ``*`` matches the
+   *name* argument.  This feature is similar to :file:`\*.pth` files (see the
+   :mod:`site` module for more information), except that it doesn't special-case
+   lines starting with ``import``.  A :file:`\*.pkg` file is trusted at face
+   value: apart from checking for duplicates, all entries found in a
+   :file:`\*.pkg` file are added to the path, regardless of whether they exist
+   on the filesystem.  (This is a feature.)
 
    If the input path is not a list (as is the case for frozen packages) it is
    returned unchanged.  The input path is not modified; an extended copy is
    returned.  Items are only appended to the copy at the end.
 
-   It is assumed that ``sys.path`` is a sequence.  Items of ``sys.path`` that are
-   not strings referring to existing directories are ignored. Unicode items on
-   ``sys.path`` that cause errors when used as filenames may cause this function
-   to raise an exception (in line with :func:`os.path.isdir` behavior).
+   It is assumed that :data:`sys.path` is a sequence.  Items of :data:`sys.path`
+   that are not strings referring to existing directories are ignored. Unicode
+   items on :data:`sys.path` that cause errors when used as filenames may cause
+   this function to raise an exception (in line with :func:`os.path.isdir`
+   behavior).
+
+
+.. class:: ImpImporter(dirname=None)
+
+   :pep:`302` Importer that wraps Python's "classic" import algorithm.
+
+   If *dirname* is a string, a :pep:`302` importer is created that searches that
+   directory.  If *dirname* is ``None``, a :pep:`302` importer is created that
+   searches the current :data:`sys.path`, plus any modules that are frozen or
+   built-in.
+
+   Note that :class:`ImpImporter` does not currently support being used by
+   placement on :data:`sys.meta_path`.
+
+
+.. class:: ImpLoader(fullname, file, filename, etc)
+
+   :pep:`302` Loader that wraps Python's "classic" import algorithm.
+
+
+.. function:: find_loader(fullname)
+
+   Find a :pep:`302` "loader" object for *fullname*.
+
+   If *fullname* contains dots, path must be the containing package's
+   ``__path__``.  Returns ``None`` if the module cannot be found or imported.
+   This function uses :func:`iter_importers`, and is thus subject to the same
+   limitations regarding platform-specific special import locations such as the
+   Windows registry.
+
+
+.. function:: get_importer(path_item)
+
+   Retrieve a :pep:`302` importer for the given *path_item*.
+
+   The returned importer is cached in :data:`sys.path_importer_cache` if it was
+   newly created by a path hook.
+
+   If there is no importer, a wrapper around the basic import machinery is
+   returned.  This wrapper is never inserted into the importer cache (None is
+   inserted instead).
+
+   The cache (or part of it) can be cleared manually if a rescan of
+   :data:`sys.path_hooks` is necessary.
+
+
+.. function:: get_loader(module_or_name)
+
+   Get a :pep:`302` "loader" object for *module_or_name*.
+
+   If the module or package is accessible via the normal import mechanism, a
+   wrapper around the relevant part of that machinery is returned.  Returns
+   ``None`` if the module cannot be found or imported.  If the named module is
+   not already imported, its containing package (if any) is imported, in order
+   to establish the package ``__path__``.
+
+   This function uses :func:`iter_importers`, and is thus subject to the same
+   limitations regarding platform-specific special import locations such as the
+   Windows registry.
+
+
+.. function:: iter_importers(fullname='')
+
+   Yield :pep:`302` importers for the given module name.
+
+   If fullname contains a '.', the importers will be for the package containing
+   fullname, otherwise they will be importers for :data:`sys.meta_path`,
+   :data:`sys.path`, and Python's "classic" import machinery, in that order.  If
+   the named module is in a package, that package is imported as a side effect
+   of invoking this function.
+
+   Non-:pep:`302` mechanisms (e.g. the Windows registry) used by the standard
+   import machinery to find files in alternative locations are partially
+   supported, but are searched *after* :data:`sys.path`.  Normally, these
+   locations are searched *before* :data:`sys.path`, preventing :data:`sys.path`
+   entries from shadowing them.
+
+   For this to cause a visible difference in behaviour, there must be a module
+   or package name that is accessible via both :data:`sys.path` and one of the
+   non-:pep:`302` file system mechanisms.  In this case, the emulation will find
+   the former version, while the builtin import mechanism will find the latter.
+
+   Items of the following types can be affected by this discrepancy:
+   ``imp.C_EXTENSION``, ``imp.PY_SOURCE``, ``imp.PY_COMPILED``,
+   ``imp.PKG_DIRECTORY``.
+
+
+.. function:: iter_modules(path=None, prefix='')
+
+   Yields ``(module_loader, name, ispkg)`` for all submodules on *path*, or, if
+   path is ``None``, all top-level modules on :data:`sys.path`.
+
+   *path* should be either ``None`` or a list of paths to look for modules in.
+
+   *prefix* is a string to output on the front of every module name on output.
+
+
+.. function:: walk_packages(path=None, prefix='', onerror=None)
+
+   Yields ``(module_loader, name, ispkg)`` for all modules recursively on
+   *path*, or, if path is ``None``, all accessible modules.
+
+   *path* should be either ``None`` or a list of paths to look for modules in.
+
+   *prefix* is a string to output on the front of every module name on output.
+
+   Note that this function must import all *packages* (*not* all modules!) on
+   the given *path*, in order to access the ``__path__`` attribute to find
+   submodules.
+
+   *onerror* is a function which gets called with one argument (the name of the
+   package which was being imported) if any exception occurs while trying to
+   import a package.  If no *onerror* function is supplied, :exc:`ImportError`\s
+   are caught and ignored, while all other exceptions are propagated,
+   terminating the search.
+
+   Examples::
+
+      # list all modules python can access
+      walk_packages()
+
+      # list all submodules of ctypes
+      walk_packages(ctypes.__path__, ctypes.__name__ + '.')
+
 
 .. function:: get_data(package, resource)
 
    Get a resource from a package.
 
-   This is a wrapper for the :pep:`302` loader :func:`get_data` API. The package
-   argument should be the name of a package, in standard module format
-   (foo.bar). The resource argument should be in the form of a relative
-   filename, using ``/`` as the path separator. The parent directory name
+   This is a wrapper for the :pep:`302` loader :func:`get_data` API.  The
+   *package* argument should be the name of a package, in standard module format
+   (``foo.bar``).  The *resource* argument should be in the form of a relative
+   filename, using ``/`` as the path separator.  The parent directory name
    ``..`` is not allowed, and nor is a rooted name (starting with a ``/``).
 
-   The function returns a binary string that is the contents of the
-   specified resource.
+   The function returns a binary string that is the contents of the specified
+   resource.
 
    For packages located in the filesystem, which have already been imported,
    this is the rough equivalent of::
 
-       d = os.path.dirname(sys.modules[package].__file__)
-       data = open(os.path.join(d, resource), 'rb').read()
+      d = os.path.dirname(sys.modules[package].__file__)
+      data = open(os.path.join(d, resource), 'rb').read()
 
    If the package cannot be located or loaded, or it uses a :pep:`302` loader
-   which does not support :func:`get_data`, then None is returned.
+   which does not support :func:`get_data`, then ``None`` is returned.
 
 
-API Reference
-=============
+Installed distributions database
+--------------------------------
 
-.. automodule:: distutils2._backport.pkgutil
-   :members:
+Installed Python distributions are represented by instances of
+:class:`~distutils2._backport.pkgutil.Distribution`, or its subclass
+:class:`~distutils2._backport.pkgutil.EggInfoDistribution` for legacy ``.egg``
+and ``.egg-info`` formats).  Most functions also provide an extra argument
+``use_egg_info`` to take legacy distributions into account.
+
+.. TODO write docs here, don't rely on automodule
+   classes: Distribution and descendents
+   functions: provides, obsoletes, replaces, etc.
 
 Caching
 +++++++
@@ -86,11 +213,10 @@
 :func:`~distutils2._backport.pkgutil.clear_cache`.
 
 
+Examples
+--------
 
-Example Usage
-=============
-
-Print All Information About a Distribution
+Print all information about a distribution
 ++++++++++++++++++++++++++++++++++++++++++
 
 Given a path to a ``.dist-info`` distribution, we shall print out all
@@ -182,7 +308,7 @@
   =====
   * It was installed as a dependency
 
-Find Out Obsoleted Distributions
+Find out obsoleted distributions
 ++++++++++++++++++++++++++++++++
 
 Now, we take tackle a different problem, we are interested in finding out
diff --git a/docs/source/setupcfg.rst b/docs/source/setupcfg.rst
--- a/docs/source/setupcfg.rst
+++ b/docs/source/setupcfg.rst
@@ -7,24 +7,35 @@
 
 Each section contains a description of its options.
 
-- Options that are marked *\*multi* can have multiple values, one value
-  per line.
+- Options that are marked *\*multi* can have multiple values, one value per
+  line.
 - Options that are marked *\*optional* can be omited.
-- Options that are marked *\*environ* can use environement markes, as described
-  in PEP 345.
+- Options that are marked *\*environ* can use environment markers, as described
+  in :PEP:`345`.
+
 
 The sections are:
 
-- global
-- metadata
-- files
-- command sections
+global
+    Global options for Distutils2.
+
+metadata
+    The metadata section contains the metadata for the project as described in
+    :PEP:`345`.
+
+files
+    Declaration of package files included in the project.
+
+`command` sections
+    Redefinition of user options for Distutils2 commands.
 
 
 global
 ======
 
-Contains global options for Distutils2. This section is shared with Distutils1.
+Contains global options for Distutils2. This section is shared with Distutils1
+(legacy version distributed in python 2.X standard library).
+
 
 - **commands**: Defined Distutils2 command. A command is defined by its fully
   qualified name.
@@ -38,13 +49,13 @@
   *\*optional* *\*multi*
 
 - **compilers**: Defined Distutils2 compiler. A compiler is defined by its fully
-  qualified name. 
+  qualified name.
 
   Example::
 
     [global]
     compiler =
-        package.compilers.CustomCCompiler
+        package.compiler.CustomCCompiler
 
   *\*optional* *\*multi*
 
@@ -52,21 +63,29 @@
   :file:`setup.cfg` file is read. The callable receives the configuration
   in form of a mapping and can make some changes to it. *\*optional*
 
+  Example::
+
+    [global]
+    setup_hook =
+        distutils2.tests.test_config.hook
+
 
 metadata
 ========
 
 The metadata section contains the metadata for the project as described in
-PEP 345.
+:PEP:`345`.
 
+.. Note::
+    Field names are case-insensitive.
 
 Fields:
 
 - **name**: Name of the project.
-- **version**: Version of the project. Must comply with PEP 386.
+- **version**: Version of the project. Must comply with :PEP:`386`.
 - **platform**: Platform specification describing an operating system supported
   by the distribution which is not listed in the "Operating System" Trove
-  classifiers. *\*multi* *\*optional*
+  classifiers (:PEP:`301`). *\*multi* *\*optional*
 - **supported-platform**: Binary distributions containing a PKG-INFO file will
   use the Supported-Platform field in their metadata to specify the OS and
   CPU for which the binary distribution was compiled.  The semantics of
@@ -113,14 +132,18 @@
     name = pypi2rpm
     version = 0.1
     author = Tarek Ziade
-    author_email = tarek at ziade.org
+    author-email = tarek at ziade.org
     summary = Script that transforms a sdist archive into a rpm archive
     description-file = README
-    home_page = http://bitbucket.org/tarek/pypi2rpm
+    home-page = http://bitbucket.org/tarek/pypi2rpm
+    project-url: RSS feed, https://bitbucket.org/tarek/pypi2rpm/rss
 
     classifier = Development Status :: 3 - Alpha
         License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)
 
+.. Note::
+    Some metadata fields seen in :PEP:`345` are automatically generated
+    (for instance Metadata-Version value).
 
 
 files
@@ -128,6 +151,8 @@
 
 This section describes the files included in the project.
 
+- **packages_root**: the root directory containing all packages. If not provided
+  Distutils2 will use the current directory.  *\*optional*
 - **packages**: a list of packages the project includes *\*optional* *\*multi*
 - **modules**: a list of packages the project includes *\*optional* *\*multi*
 - **scripts**: a list of scripts the project includes *\*optional* *\*multi*
@@ -136,6 +161,7 @@
 Example::
 
     [files]
+    packages_root = src
     packages =
             pypi2rpm
             pypi2rpm.command
@@ -145,17 +171,379 @@
 
     extra_files =
             setup.py
+            README
 
+.. Note::
+    In Distutils2, setup.cfg will be implicitly included.
 
-command sections
-================
+Resources
+=========
 
-Each command can have its options described in :file:`setup.cfg`
+This section describes the files used by the project which must not be installed in the same place that python modules or libraries, they are called **resources**. They are for example documentation files, script files, databases, etc...
 
+For declaring resources, you must use this notation ::
+
+    source = destination
+
+Data-files are declared in the **resources** field in the **file** section, for example::
+
+    [files]
+    resources =
+        source1 = destination1
+        source2 = destination2
+
+The **source** part of the declaration are relative paths of resources files (using unix path separator **/**). For example, if you've this source tree::
+
+    foo/
+        doc/
+            doc.man
+        scripts/
+            foo.sh
+            
+Your setup.cfg will look like::
+
+    [files]
+    resources =
+        doc/doc.man = destination_doc
+        scripts/foo.sh = destination_scripts
+        
+The final paths where files will be placed are composed by : **source** + **destination**. In the previous example, **doc/doc.man** will be placed in **destination_doc/doc/doc.man** and **scripts/foo.sh** will be placed in **destination_scripts/scripts/foo.sh**. (If you want more control on the final path, take a look at base_prefix_).
+
+The **destination** part of resources declaration are paths with categories. Indeed, it's generally a bad idea to give absolute path as it will be cross incompatible. So, you must use resources categories in your **destination** declaration. Categories will be replaced by their real path at the installation time. Using categories is all benefit, your declaration will be simpler, cross platform and it will allow packager to place resources files where they want without breaking your code.
+
+Categories can be specified by using this syntax::
+
+    {category}
+    
+Default categories are::
+
+* config
+* appdata
+* appdata.arch
+* appdata.persistent
+* appdata.disposable
+* help
+* icon
+* scripts
+* doc
+* info
+* man
+
+A special category also exists **{distribution.name}** that will be replaced by the name of the distribution, but as most of the defaults categories use them, so it's not necessary to add **{distribution.name}** into your destination.
+
+If you use categories in your declarations, and you are encouraged to do, final path will be::
+
+    source + destination_expanded
+
+.. _example_final_path:
+
+For example, if you have this setup.cfg::
+
+    [metadata]
+    name = foo
+
+    [files]
+    resources =
+        doc/doc.man = {doc}
+
+And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final path will be::
+
+    {datadir}/doc/foo/doc/doc.man
+    
+Where {datafir} category will be platform-dependent.
+
+    
+More control on source part
+---------------------------
+
+Glob syntax
+___________
+
+When you declare source file, you can use a glob-like syntax to match multiples file, for example::
+
+    scripts/* = {script}
+    
+Will match all the files in the scripts directory and placed them in the script category.
+
+Glob tokens are:
+
+ * * : match all files.
+ * ? : match any character.
+ * ** : match any level of tree recursion (even 0).
+ * {} : will match any part separated by comma (example : {sh,bat}).
+ 
+TODO ::
+
+    Add an example
+    
+Order of declaration
+____________________
+
+The order of declaration is important if one file match multiple rules. The last rules matched by file is used, this is useful if you have this source tree::
+
+    foo/
+        doc/
+            index.rst
+            setup.rst
+            documentation.txt
+            doc.tex
+            README
+            
+And you want all the files in the doc directory to be placed in {doc} category, but README must be placed in {help} category, instead of listing all the files one by one, you can declare them in this way::
+
+    [files]
+    resources =
+        doc/* = {doc}
+        doc/README = {help}
+        
+Exclude
+_______
+
+You can exclude some files of resources declaration by giving no destination, it can be useful if you have a non-resources file in the same directory of resources files::
+
+    foo/
+        doc/
+           RELEASES
+           doc.tex
+           documentation.txt
+           docu.rst
+           
+Your **file** section will be::
+
+    [files]
+    resources =
+        doc/* = {doc}
+        doc/RELEASES =
+        
+More control on destination part
+--------------------------------  
+
+.. _base_prefix:
+
+Define a base-prefix
+____________________
+
+When you define your resources, you can have more control of how the final path is compute.
+
+By default, the final path is::
+
+    destination + source
+    
+This can generate long paths, for example (example_final_path_)::
+
+    {datadir}/doc/foo/doc/doc.man
+    
+When you declare your source, you can use a separator to split the source in **prefix** **suffix**. The supported separator are :
+
+ * Whitespace
+ 
+So, for example, if you have this source::
+
+    docs/ doc.man
+    
+The **prefix** is "docs/" and the **suffix** is "doc.html".
+
+.. note::
+
+    Separator can be placed after a path separator or replace it. So theses two sources are equivalent::
+    
+        docs/ doc.man
+        docs doc.man
+
+.. note::
+
+    Glob syntax is working the same way with standard source and splitted source. So theses rules::
+    
+        docs/*
+        docs/ *
+        docs *
+        
+    Will match all the files in the docs directory.
+    
+When you use splitted source, the final path is compute in this way::
+
+    destination + prefix
+    
+So for example, if you have this setup.cfg::
+
+    [metadata]
+    name = foo
+
+    [files]
+    resources =
+        doc/ doc.man = {doc}
+
+And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final path will be::
+
+    {datadir}/doc/foo/doc.man
+    
+    
+Overwrite paths for categories
+------------------------------
+
+.. warning::
+
+    This part is intended for system administrator or packager.
+    
+The real paths of categories are registered in the *sysconfig.cfg* file installed in your python installation. The format of this file is INI-like. The content of the file is  organized into several sections :
+
+ * globals : Standard categories's paths.
+ * posix_prefix : Standard paths for categories and installation paths for posix system.
+ * other one...
+ 
+Standard categories's paths are platform independent, they generally refers to other categories, which are platform dependent. Sysconfig module will choose these category from sections matching os.name. For example::
+
+    doc = {datadir}/doc/{distribution.name}
+
+It refers to datadir category, which can be different between platforms. In posix system, it may be::
+
+    datadir = /usr/share
+    
+So the final path will be::
+
+    doc = /usr/share/doc/{distribution.name}
+    
+The platform dependent categories are :
+ 
+ * confdir
+ * datadir
+ * libdir
+ * base
+
+Define extra-categories
+-----------------------
+
+Examples
+--------
+
+.. note::
+
+    These examples are incremental but works unitarily.
+
+Resources in root dir
+_____________________
+
+Source tree::
+
+  babar-1.0/
+    README
+    babar.sh
+    launch.sh
+    babar.py
+    
+Setup.cfg::
+
+    [files]
+    resources =
+        README = {doc}
+        *.sh = {scripts}
+  
+So babar.sh and launch.sh will be placed in {scripts} directory.
+
+Now let's move all the scripts into a scripts directory.
+
+Resources in sub-directory
+__________________________
+
+Source tree::
+
+  babar-1.1/
+    README
+    scripts/
+      babar.sh
+      launch.sh
+      LAUNCH
+    babar.py
+    
+Setup.cfg::
+
+    [files]
+    resources =
+        README = {doc}
+        scripts/ LAUNCH = {doc}
+        scripts/ *.sh = {scripts}
+  
+It's important to use the separator after scripts/ to install all the bash scripts into {scripts} instead of {scripts}/scripts.
+
+Now let's add some docs.
+
+Resources in multiple sub-directories
+_____________________________________
+
+Source tree::
+
+  babar-1.2/
+    README
+    scripts/
+      babar.sh
+      launch.sh
+      LAUNCH
+    docs/
+      api
+      man
+    babar.py
+
+Setup.cfg::
+
+   [files]
+   resources =
+        README = {doc}
+        scripts/ LAUNCH = {doc}
+        scripts/ *.sh = {scripts}
+        doc/ * = {doc}
+        doc/ man = {man}
+  
+You want to place all the file in the docs script into {doc} category, instead of man, which must be placed into {man} category, we will use the order of declaration of globs to choose the destination, the last glob that match the file is used.
+
+Now let's add some scripts for windows users.
+  
+Complete example
+________________
+
+Source tree::
+
+  babar-1.3/
+    README
+    doc/
+      api
+      man
+    scripts/  
+      babar.sh
+      launch.sh
+      babar.bat
+      launch.bat
+      LAUNCH
+
+Setup.cfg::
+
+    [files]
+    resources = 
+        README = {doc}
+        scripts/ LAUNCH = {doc}
+        scripts/ *.{sh,bat} = {scripts}
+        doc/ * = {doc}
+        doc/ man = {man}
+
+We use brace expansion syntax to place all the bash and batch scripts into {scripts} category.
+
+.. Warning::
+    In Distutils2, setup.py and README (or README.txt) files are not more
+    included in source distribution by default
+
+`command` sections
+==================
+
+Each Distutils2 command can have its own user options defined in :file:`setup.cfg`
 
 Example::
 
     [sdist]
-    manifest_makers = package.module.Maker
+    manifest-builders = package.module.Maker
 
 
+To override the build class in order to generate Python3 code from your Python2 base::
+
+    [build_py]
+    use-2to3 = True
+
+
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,12 +1,11 @@
 #!/usr/bin/env python
-# -*- encoding: utf8 -*-
+# -*- encoding: utf-8 -*-
 import sys
 import os
 import re
 
 from distutils2 import __version__ as VERSION
 from distutils import log
-from distutils.core import setup, Extension
 from distutils.ccompiler import new_compiler
 from distutils.command.sdist import sdist
 from distutils.command.install import install
@@ -15,8 +14,13 @@
 try:
     from distutils.command.build_py import build_py_2to3 as build_py
 except ImportError:
-    # 2.x
-    from distutils.command.build_py import build_py
+    # 2.x, try to use setuptools if available
+    try :
+        from setuptools import setup, Extension
+        from setuptools.command.build_py import build_py
+    except ImportError:
+        from distutils.core import setup, Extension
+        from distutils.command.build_py import build_py
 
 
 f = open('README.txt')
@@ -196,9 +200,10 @@
 
     return exts
 
-setup_kwargs = {}
+setup_kwargs = {'scripts': ['distutils2/pysetup']}
+
 if sys.version < '2.6':
-    setup_kwargs['scripts'] = ['distutils2/mkcfg.py']
+    setup_kwargs['scripts'].append('distutils2/mkcfg.py')
 
 if sys.version < '2.5':
     setup_kwargs['ext_modules'] = prepare_hashlib_extensions()

-- 
Repository URL: http://hg.python.org/distutils2


More information about the Python-checkins mailing list