[Python-checkins] cpython: Issue #12180: Fixed a few remaining errors in test_packaging when no threading.

tarek.ziade python-checkins at python.org
Wed May 25 23:46:43 CEST 2011


http://hg.python.org/cpython/rev/8aa1ae77cfe1
changeset:   70389:8aa1ae77cfe1
parent:      70387:325453be64ec
user:        Tarek Ziade <tarek at ziade.org>
date:        Wed May 25 23:46:09 2011 +0200
summary:
  Issue #12180: Fixed a few remaining errors in test_packaging when no threading.

files:
  Lib/packaging/tests/support.py          |   9 ++++
  Lib/packaging/tests/test_install.py     |   5 +-
  Lib/packaging/tests/test_pypi_dist.py   |   5 +-
  Lib/packaging/tests/test_pypi_simple.py |  24 +++++++++++-
  Misc/NEWS                               |   3 +
  5 files changed, 39 insertions(+), 7 deletions(-)


diff --git a/Lib/packaging/tests/support.py b/Lib/packaging/tests/support.py
--- a/Lib/packaging/tests/support.py
+++ b/Lib/packaging/tests/support.py
@@ -253,6 +253,15 @@
     return d
 
 
+def fake_dec(*args, **kw):
+    """Fake decorator"""
+    def _wrap(func):
+        def __wrap(*args, **kw):
+            return func(*args, **kw)
+        return __wrap
+    return _wrap
+
+
 try:
     from test.support import skip_unless_symlink
 except ImportError:
diff --git a/Lib/packaging/tests/test_install.py b/Lib/packaging/tests/test_install.py
--- a/Lib/packaging/tests/test_install.py
+++ b/Lib/packaging/tests/test_install.py
@@ -6,13 +6,14 @@
 from packaging.pypi.xmlrpc import Client
 from packaging.metadata import Metadata
 
-from packaging.tests.support import LoggingCatcher, TempdirManager, unittest
+from packaging.tests.support import (LoggingCatcher, TempdirManager, unittest,
+                                     fake_dec)
 try:
     import threading
     from packaging.tests.pypi_server import use_xmlrpc_server
 except ImportError:
     threading = None
-    use_xmlrpc_server = None
+    use_xmlrpc_server = fake_dec
 
 
 class InstalledDist:
diff --git a/Lib/packaging/tests/test_pypi_dist.py b/Lib/packaging/tests/test_pypi_dist.py
--- a/Lib/packaging/tests/test_pypi_dist.py
+++ b/Lib/packaging/tests/test_pypi_dist.py
@@ -7,12 +7,13 @@
 from packaging.pypi.errors import HashDoesNotMatch, UnsupportedHashName
 
 from packaging.tests import unittest
-from packaging.tests.support import TempdirManager, requires_zlib
+from packaging.tests.support import TempdirManager, requires_zlib, fake_dec
 try:
     import threading
     from packaging.tests.pypi_server import use_pypi_server
 except ImportError:
-    threading = use_pypi_server = None
+    threading = None
+    use_pypi_server = fake_dec
 
 
 def Dist(*args, **kwargs):
diff --git a/Lib/packaging/tests/test_pypi_simple.py b/Lib/packaging/tests/test_pypi_simple.py
--- a/Lib/packaging/tests/test_pypi_simple.py
+++ b/Lib/packaging/tests/test_pypi_simple.py
@@ -10,9 +10,19 @@
 from packaging.pypi.simple import Crawler
 
 from packaging.tests import unittest
-from packaging.tests.support import TempdirManager, LoggingCatcher
-from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
-                                         PYPI_DEFAULT_STATIC_PATH)
+from packaging.tests.support import (TempdirManager, LoggingCatcher,
+                                     fake_dec)
+
+try:
+    import _thread
+    from packaging.tests.pypi_server import (use_pypi_server, PyPIServer,
+                                             PYPI_DEFAULT_STATIC_PATH)
+except ImportError:
+    _thread = None
+    use_pypi_server = fake_dec
+    PYPI_DEFAULT_STATIC_PATH = os.path.join(
+        os.path.dirname(os.path.abspath(__file__)), 'pypiserver')
+
 
 
 class SimpleCrawlerTestCase(TempdirManager,
@@ -28,6 +38,7 @@
         return Crawler(server.full_address + base_url, *args,
                        **kwargs)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server()
     def test_bad_urls(self, server):
         crawler = Crawler()
@@ -84,6 +95,7 @@
                 'http://www.famfamfam.com/">')
         crawler._process_url(url, page)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("test_found_links")
     def test_found_links(self, server):
         # Browse the index, asking for a specified release version
@@ -139,6 +151,7 @@
         self.assertTrue(
             crawler._is_browsable("http://pypi.example.org/a/path"))
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("with_externals")
     def test_follow_externals(self, server):
         # Include external pages
@@ -149,6 +162,7 @@
         self.assertIn(server.full_address + "/external/external.html",
             crawler._processed_urls)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("with_real_externals")
     def test_restrict_hosts(self, server):
         # Only use a list of allowed hosts is possible
@@ -159,6 +173,7 @@
         self.assertNotIn(server.full_address + "/external/external.html",
             crawler._processed_urls)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server(static_filesystem_paths=["with_externals"],
         static_uri_paths=["simple", "external"])
     def test_links_priority(self, server):
@@ -192,6 +207,7 @@
                          releases[0].dists['sdist'].url['hashval'])
         self.assertEqual('md5', releases[0].dists['sdist'].url['hashname'])
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server(static_filesystem_paths=["with_norel_links"],
         static_uri_paths=["simple", "external"])
     def test_not_scan_all_links(self, server):
@@ -217,6 +233,7 @@
         self.assertIn("%s/foobar-2.0.tar.gz" % server.full_address,
             crawler._processed_urls)  # linked from external homepage (rel)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     def test_uses_mirrors(self):
         # When the main repository seems down, try using the given mirrors"""
         server = PyPIServer("foo_bar_baz")
@@ -314,6 +331,7 @@
         self.assertIn('http://example.org/some/simpleurl', found_links)
         self.assertIn('http://example.org/some/download', found_links)
 
+    @unittest.skipIf(_thread is None, 'needs threads')
     @use_pypi_server("project_list")
     def test_search_projects(self, server):
         # we can search the index for some projects, on their names
diff --git a/Misc/NEWS b/Misc/NEWS
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -161,6 +161,9 @@
 Library
 -------
 
+- Issue #12180: Fixed a few remaining errors in test_packaging when no 
+  threading.
+
 - Issue #12175: RawIOBase.readall() now returns None if read() returns None.
 
 - Issue #12175: FileIO.readall() now raises a ValueError instead of an IOError

-- 
Repository URL: http://hg.python.org/cpython


More information about the Python-checkins mailing list