[Python-checkins] cpython (merge default -> default): Merge.

larry.hastings python-checkins at python.org
Mon May 25 01:43:59 CEST 2015


https://hg.python.org/cpython/rev/0d0989359bbb
changeset:   96278:0d0989359bbb
parent:      96277:8c70691e51cf
parent:      96272:cb30db9cc029
user:        Larry Hastings <larry at hastings.org>
date:        Sun May 24 16:41:42 2015 -0700
summary:
  Merge.

files:
  Lib/functools.py                        |  203 ++--
  Lib/test/test_functools.py              |  109 ++-
  Misc/NEWS                               |    5 +
  Modules/_functoolsmodule.c              |  547 +++++++++++-
  Objects/descrobject.c                   |   25 +-
  PCbuild/_testmultiphase.vcxproj         |   80 +
  PCbuild/_testmultiphase.vcxproj.filters |   22 +
  PCbuild/pcbuild.proj                    |    2 +-
  PCbuild/pcbuild.sln                     |   16 +
  Tools/msi/make_zip.proj                 |    1 +
  Tools/msi/make_zip.py                   |    1 +
  Tools/msi/test/test_files.wxs           |   12 +
  12 files changed, 903 insertions(+), 120 deletions(-)


diff --git a/Lib/functools.py b/Lib/functools.py
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -419,120 +419,129 @@
     if maxsize is not None and not isinstance(maxsize, int):
         raise TypeError('Expected maxsize to be an integer or None')
 
+    def decorating_function(user_function):
+        wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
+        return update_wrapper(wrapper, user_function)
+
+    return decorating_function
+
+def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
     # Constants shared by all lru cache instances:
     sentinel = object()          # unique object used to signal cache misses
     make_key = _make_key         # build a key from the function arguments
     PREV, NEXT, KEY, RESULT = 0, 1, 2, 3   # names for the link fields
 
-    def decorating_function(user_function):
-        cache = {}
-        hits = misses = 0
-        full = False
-        cache_get = cache.get    # bound method to lookup a key or return None
-        lock = RLock()           # because linkedlist updates aren't threadsafe
-        root = []                # root of the circular doubly linked list
-        root[:] = [root, root, None, None]     # initialize by pointing to self
+    cache = {}
+    hits = misses = 0
+    full = False
+    cache_get = cache.get    # bound method to lookup a key or return None
+    lock = RLock()           # because linkedlist updates aren't threadsafe
+    root = []                # root of the circular doubly linked list
+    root[:] = [root, root, None, None]     # initialize by pointing to self
 
-        if maxsize == 0:
+    if maxsize == 0:
 
-            def wrapper(*args, **kwds):
-                # No caching -- just a statistics update after a successful call
-                nonlocal misses
-                result = user_function(*args, **kwds)
-                misses += 1
+        def wrapper(*args, **kwds):
+            # No caching -- just a statistics update after a successful call
+            nonlocal misses
+            result = user_function(*args, **kwds)
+            misses += 1
+            return result
+
+    elif maxsize is None:
+
+        def wrapper(*args, **kwds):
+            # Simple caching without ordering or size limit
+            nonlocal hits, misses
+            key = make_key(args, kwds, typed)
+            result = cache_get(key, sentinel)
+            if result is not sentinel:
+                hits += 1
                 return result
+            result = user_function(*args, **kwds)
+            cache[key] = result
+            misses += 1
+            return result
 
-        elif maxsize is None:
+    else:
 
-            def wrapper(*args, **kwds):
-                # Simple caching without ordering or size limit
-                nonlocal hits, misses
-                key = make_key(args, kwds, typed)
-                result = cache_get(key, sentinel)
-                if result is not sentinel:
+        def wrapper(*args, **kwds):
+            # Size limited caching that tracks accesses by recency
+            nonlocal root, hits, misses, full
+            key = make_key(args, kwds, typed)
+            with lock:
+                link = cache_get(key)
+                if link is not None:
+                    # Move the link to the front of the circular queue
+                    link_prev, link_next, _key, result = link
+                    link_prev[NEXT] = link_next
+                    link_next[PREV] = link_prev
+                    last = root[PREV]
+                    last[NEXT] = root[PREV] = link
+                    link[PREV] = last
+                    link[NEXT] = root
                     hits += 1
                     return result
-                result = user_function(*args, **kwds)
-                cache[key] = result
+            result = user_function(*args, **kwds)
+            with lock:
+                if key in cache:
+                    # Getting here means that this same key was added to the
+                    # cache while the lock was released.  Since the link
+                    # update is already done, we need only return the
+                    # computed result and update the count of misses.
+                    pass
+                elif full:
+                    # Use the old root to store the new key and result.
+                    oldroot = root
+                    oldroot[KEY] = key
+                    oldroot[RESULT] = result
+                    # Empty the oldest link and make it the new root.
+                    # Keep a reference to the old key and old result to
+                    # prevent their ref counts from going to zero during the
+                    # update. That will prevent potentially arbitrary object
+                    # clean-up code (i.e. __del__) from running while we're
+                    # still adjusting the links.
+                    root = oldroot[NEXT]
+                    oldkey = root[KEY]
+                    oldresult = root[RESULT]
+                    root[KEY] = root[RESULT] = None
+                    # Now update the cache dictionary.
+                    del cache[oldkey]
+                    # Save the potentially reentrant cache[key] assignment
+                    # for last, after the root and links have been put in
+                    # a consistent state.
+                    cache[key] = oldroot
+                else:
+                    # Put result in a new link at the front of the queue.
+                    last = root[PREV]
+                    link = [last, root, key, result]
+                    last[NEXT] = root[PREV] = cache[key] = link
+                    full = (len(cache) >= maxsize)
                 misses += 1
-                return result
+            return result
 
-        else:
+    def cache_info():
+        """Report cache statistics"""
+        with lock:
+            return _CacheInfo(hits, misses, maxsize, len(cache))
 
-            def wrapper(*args, **kwds):
-                # Size limited caching that tracks accesses by recency
-                nonlocal root, hits, misses, full
-                key = make_key(args, kwds, typed)
-                with lock:
-                    link = cache_get(key)
-                    if link is not None:
-                        # Move the link to the front of the circular queue
-                        link_prev, link_next, _key, result = link
-                        link_prev[NEXT] = link_next
-                        link_next[PREV] = link_prev
-                        last = root[PREV]
-                        last[NEXT] = root[PREV] = link
-                        link[PREV] = last
-                        link[NEXT] = root
-                        hits += 1
-                        return result
-                result = user_function(*args, **kwds)
-                with lock:
-                    if key in cache:
-                        # Getting here means that this same key was added to the
-                        # cache while the lock was released.  Since the link
-                        # update is already done, we need only return the
-                        # computed result and update the count of misses.
-                        pass
-                    elif full:
-                        # Use the old root to store the new key and result.
-                        oldroot = root
-                        oldroot[KEY] = key
-                        oldroot[RESULT] = result
-                        # Empty the oldest link and make it the new root.
-                        # Keep a reference to the old key and old result to
-                        # prevent their ref counts from going to zero during the
-                        # update. That will prevent potentially arbitrary object
-                        # clean-up code (i.e. __del__) from running while we're
-                        # still adjusting the links.
-                        root = oldroot[NEXT]
-                        oldkey = root[KEY]
-                        oldresult = root[RESULT]
-                        root[KEY] = root[RESULT] = None
-                        # Now update the cache dictionary.
-                        del cache[oldkey]
-                        # Save the potentially reentrant cache[key] assignment
-                        # for last, after the root and links have been put in
-                        # a consistent state.
-                        cache[key] = oldroot
-                    else:
-                        # Put result in a new link at the front of the queue.
-                        last = root[PREV]
-                        link = [last, root, key, result]
-                        last[NEXT] = root[PREV] = cache[key] = link
-                        full = (len(cache) >= maxsize)
-                    misses += 1
-                return result
+    def cache_clear():
+        """Clear the cache and cache statistics"""
+        nonlocal hits, misses, full
+        with lock:
+            cache.clear()
+            root[:] = [root, root, None, None]
+            hits = misses = 0
+            full = False
 
-        def cache_info():
-            """Report cache statistics"""
-            with lock:
-                return _CacheInfo(hits, misses, maxsize, len(cache))
+    wrapper.cache_info = cache_info
+    wrapper.cache_clear = cache_clear
+    return update_wrapper(wrapper, user_function)
 
-        def cache_clear():
-            """Clear the cache and cache statistics"""
-            nonlocal hits, misses, full
-            with lock:
-                cache.clear()
-                root[:] = [root, root, None, None]
-                hits = misses = 0
-                full = False
-
-        wrapper.cache_info = cache_info
-        wrapper.cache_clear = cache_clear
-        return update_wrapper(wrapper, user_function)
-
-    return decorating_function
+try:
+    from _functools import _lru_cache_wrapper
+except ImportError:
+    pass
 
 
 ################################################################################
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -7,6 +7,10 @@
 from test import support
 import unittest
 from weakref import proxy
+try:
+    import threading
+except ImportError:
+    threading = None
 
 import functools
 
@@ -912,12 +916,12 @@
         return self.value == other.value
 
 
-class TestLRU(unittest.TestCase):
+class TestLRU:
 
     def test_lru(self):
         def orig(x, y):
             return 3 * x + y
-        f = functools.lru_cache(maxsize=20)(orig)
+        f = self.module.lru_cache(maxsize=20)(orig)
         hits, misses, maxsize, currsize = f.cache_info()
         self.assertEqual(maxsize, 20)
         self.assertEqual(currsize, 0)
@@ -955,7 +959,7 @@
         self.assertEqual(currsize, 1)
 
         # test size zero (which means "never-cache")
-        @functools.lru_cache(0)
+        @self.module.lru_cache(0)
         def f():
             nonlocal f_cnt
             f_cnt += 1
@@ -971,7 +975,7 @@
         self.assertEqual(currsize, 0)
 
         # test size one
-        @functools.lru_cache(1)
+        @self.module.lru_cache(1)
         def f():
             nonlocal f_cnt
             f_cnt += 1
@@ -987,7 +991,7 @@
         self.assertEqual(currsize, 1)
 
         # test size two
-        @functools.lru_cache(2)
+        @self.module.lru_cache(2)
         def f(x):
             nonlocal f_cnt
             f_cnt += 1
@@ -1004,7 +1008,7 @@
         self.assertEqual(currsize, 2)
 
     def test_lru_with_maxsize_none(self):
-        @functools.lru_cache(maxsize=None)
+        @self.module.lru_cache(maxsize=None)
         def fib(n):
             if n < 2:
                 return n
@@ -1012,17 +1016,26 @@
         self.assertEqual([fib(n) for n in range(16)],
             [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
         self.assertEqual(fib.cache_info(),
-            functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
+            self.module._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
         fib.cache_clear()
         self.assertEqual(fib.cache_info(),
-            functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
+            self.module._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
+
+    def test_lru_with_maxsize_negative(self):
+        @self.module.lru_cache(maxsize=-10)
+        def eq(n):
+            return n
+        for i in (0, 1):
+            self.assertEqual([eq(n) for n in range(150)], list(range(150)))
+        self.assertEqual(eq.cache_info(),
+            self.module._CacheInfo(hits=0, misses=300, maxsize=-10, currsize=1))
 
     def test_lru_with_exceptions(self):
         # Verify that user_function exceptions get passed through without
         # creating a hard-to-read chained exception.
         # http://bugs.python.org/issue13177
         for maxsize in (None, 128):
-            @functools.lru_cache(maxsize)
+            @self.module.lru_cache(maxsize)
             def func(i):
                 return 'abc'[i]
             self.assertEqual(func(0), 'a')
@@ -1035,7 +1048,7 @@
 
     def test_lru_with_types(self):
         for maxsize in (None, 128):
-            @functools.lru_cache(maxsize=maxsize, typed=True)
+            @self.module.lru_cache(maxsize=maxsize, typed=True)
             def square(x):
                 return x * x
             self.assertEqual(square(3), 9)
@@ -1050,7 +1063,7 @@
             self.assertEqual(square.cache_info().misses, 4)
 
     def test_lru_with_keyword_args(self):
-        @functools.lru_cache()
+        @self.module.lru_cache()
         def fib(n):
             if n < 2:
                 return n
@@ -1060,13 +1073,13 @@
             [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]
         )
         self.assertEqual(fib.cache_info(),
-            functools._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16))
+            self.module._CacheInfo(hits=28, misses=16, maxsize=128, currsize=16))
         fib.cache_clear()
         self.assertEqual(fib.cache_info(),
-            functools._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0))
+            self.module._CacheInfo(hits=0, misses=0, maxsize=128, currsize=0))
 
     def test_lru_with_keyword_args_maxsize_none(self):
-        @functools.lru_cache(maxsize=None)
+        @self.module.lru_cache(maxsize=None)
         def fib(n):
             if n < 2:
                 return n
@@ -1074,15 +1087,71 @@
         self.assertEqual([fib(n=number) for number in range(16)],
             [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
         self.assertEqual(fib.cache_info(),
-            functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
+            self.module._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
         fib.cache_clear()
         self.assertEqual(fib.cache_info(),
-            functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
+            self.module._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
+
+    def test_lru_cache_decoration(self):
+        def f(zomg: 'zomg_annotation'):
+            """f doc string"""
+            return 42
+        g = self.module.lru_cache()(f)
+        for attr in self.module.WRAPPER_ASSIGNMENTS:
+            self.assertEqual(getattr(g, attr), getattr(f, attr))
+
+    @unittest.skipUnless(threading, 'This test requires threading.')
+    def test_lru_cache_threaded(self):
+        def orig(x, y):
+            return 3 * x + y
+        f = self.module.lru_cache(maxsize=20)(orig)
+        hits, misses, maxsize, currsize = f.cache_info()
+        self.assertEqual(currsize, 0)
+
+        def full(f, *args):
+            for _ in range(10):
+                f(*args)
+
+        def clear(f):
+            for _ in range(10):
+                f.cache_clear()
+
+        orig_si = sys.getswitchinterval()
+        sys.setswitchinterval(1e-6)
+        try:
+            # create 5 threads in order to fill cache
+            threads = []
+            for k in range(5):
+                t = threading.Thread(target=full, args=[f, k, k])
+                t.start()
+                threads.append(t)
+
+            for t in threads:
+                t.join()
+
+            hits, misses, maxsize, currsize = f.cache_info()
+            self.assertEqual(hits, 45)
+            self.assertEqual(misses, 5)
+            self.assertEqual(currsize, 5)
+
+            # create 5 threads in order to fill cache and 1 to clear it
+            cleaner = threading.Thread(target=clear, args=[f])
+            cleaner.start()
+            threads = [cleaner]
+            for k in range(5):
+                t = threading.Thread(target=full, args=[f, k, k])
+                t.start()
+                threads.append(t)
+
+            for t in threads:
+                t.join()
+        finally:
+            sys.setswitchinterval(orig_si)
 
     def test_need_for_rlock(self):
         # This will deadlock on an LRU cache that uses a regular lock
 
-        @functools.lru_cache(maxsize=10)
+        @self.module.lru_cache(maxsize=10)
         def test_func(x):
             'Used to demonstrate a reentrant lru_cache call within a single thread'
             return x
@@ -1110,6 +1179,12 @@
             def f():
                 pass
 
+class TestLRUC(TestLRU, unittest.TestCase):
+    module = c_functools
+
+class TestLRUPy(TestLRU, unittest.TestCase):
+    module = py_functools
+
 
 class TestSingleDispatch(unittest.TestCase):
     def test_simple_overloads(self):
diff --git a/Misc/NEWS b/Misc/NEWS
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -22,6 +22,8 @@
 Core and Builtins
 -----------------
 
+- Issue #24276: Fixed optimization of property descriptor getter.
+
 - Issue #24268: PEP 489: Multi-phase extension module initialization
 
 - Issue #23955: Add pyvenv.cfg option to suppress registry/environment
@@ -75,6 +77,9 @@
 Library
 -------
 
+- Issue #14373: Added C implementation of functools.lru_cache().  Based on
+  patches by Matt Joiner and Alexey Kachayev.
+
 - Issue 24230: The tempfile module now accepts bytes for prefix, suffix and dir
   parameters and returns bytes in such situations (matching the os module APIs).
 
diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c
--- a/Modules/_functoolsmodule.c
+++ b/Modules/_functoolsmodule.c
@@ -590,6 +590,539 @@
 of the sequence in the calculation, and serves as a default when the\n\
 sequence is empty.");
 
+/* lru_cache object **********************************************************/
+
+/* this object is used delimit args and keywords in the cache keys */
+static PyObject *kwd_mark = NULL;
+
+struct lru_list_elem;
+struct lru_cache_object;
+
+typedef struct lru_list_elem {
+    PyObject_HEAD
+    struct lru_list_elem *prev, *next;  /* borrowed links */
+    PyObject *key, *result;
+} lru_list_elem;
+
+static void
+lru_list_elem_dealloc(lru_list_elem *link)
+{
+    _PyObject_GC_UNTRACK(link);
+    Py_XDECREF(link->key);
+    Py_XDECREF(link->result);
+    PyObject_GC_Del(link);
+}
+
+static int
+lru_list_elem_traverse(lru_list_elem *link, visitproc visit, void *arg)
+{
+    Py_VISIT(link->key);
+    Py_VISIT(link->result);
+    return 0;
+}
+
+static int
+lru_list_elem_clear(lru_list_elem *link)
+{
+    Py_CLEAR(link->key);
+    Py_CLEAR(link->result);
+    return 0;
+}
+
+static PyTypeObject lru_list_elem_type = {
+    PyVarObject_HEAD_INIT(&PyType_Type, 0)
+    "functools._lru_list_elem",         /* tp_name */
+    sizeof(lru_list_elem),              /* tp_basicsize */
+    0,                                  /* tp_itemsize */
+    /* methods */
+    (destructor)lru_list_elem_dealloc,  /* tp_dealloc */
+    0,                                  /* tp_print */
+    0,                                  /* tp_getattr */
+    0,                                  /* tp_setattr */
+    0,                                  /* tp_reserved */
+    0,                                  /* tp_repr */
+    0,                                  /* tp_as_number */
+    0,                                  /* tp_as_sequence */
+    0,                                  /* tp_as_mapping */
+    0,                                  /* tp_hash */
+    0,                                  /* tp_call */
+    0,                                  /* tp_str */
+    0,                                  /* tp_getattro */
+    0,                                  /* tp_setattro */
+    0,                                  /* tp_as_buffer */
+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,  /* tp_flags */
+    0,                                  /* tp_doc */
+    (traverseproc)lru_list_elem_traverse,  /* tp_traverse */
+    (inquiry)lru_list_elem_clear,       /* tp_clear */
+};
+
+
+typedef PyObject *(*lru_cache_ternaryfunc)(struct lru_cache_object *, PyObject *, PyObject *);
+
+typedef struct lru_cache_object {
+    lru_list_elem root;  /* includes PyObject_HEAD */
+    Py_ssize_t maxsize;
+    PyObject *maxsize_O;
+    PyObject *func;
+    lru_cache_ternaryfunc wrapper;
+    PyObject *cache;
+    PyObject *cache_info_type;
+    Py_ssize_t misses, hits;
+    int typed;
+    PyObject *dict;
+    int full;
+} lru_cache_object;
+
+static PyTypeObject lru_cache_type;
+
+static PyObject *
+lru_cache_make_key(PyObject *args, PyObject *kwds, int typed)
+{
+    PyObject *key, *sorted_items;
+    Py_ssize_t key_size, pos, key_pos;
+
+    /* short path, key will match args anyway, which is a tuple */
+    if (!typed && !kwds) {
+        Py_INCREF(args);
+        return args;
+    }
+
+    if (kwds && PyDict_Size(kwds) > 0) {
+        sorted_items = PyDict_Items(kwds);
+        if (!sorted_items)
+            return NULL;
+        if (PyList_Sort(sorted_items) < 0) {
+            Py_DECREF(sorted_items);
+            return NULL;
+        }
+    } else
+        sorted_items = NULL;
+
+    key_size = PyTuple_GET_SIZE(args);
+    if (sorted_items)
+        key_size += PyList_GET_SIZE(sorted_items);
+    if (typed)
+        key_size *= 2;
+    if (sorted_items)
+        key_size++;
+
+    key = PyTuple_New(key_size);
+    if (key == NULL)
+        goto done;
+
+    key_pos = 0;
+    for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
+        PyObject *item = PyTuple_GET_ITEM(args, pos);
+        Py_INCREF(item);
+        PyTuple_SET_ITEM(key, key_pos++, item);
+    }
+    if (sorted_items) {
+        Py_INCREF(kwd_mark);
+        PyTuple_SET_ITEM(key, key_pos++, kwd_mark);
+        for (pos = 0; pos < PyList_GET_SIZE(sorted_items); ++pos) {
+            PyObject *item = PyList_GET_ITEM(sorted_items, pos);
+            Py_INCREF(item);
+            PyTuple_SET_ITEM(key, key_pos++, item);
+        }
+    }
+    if (typed) {
+        for (pos = 0; pos < PyTuple_GET_SIZE(args); ++pos) {
+            PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(args, pos));
+            Py_INCREF(item);
+            PyTuple_SET_ITEM(key, key_pos++, item);
+        }
+        if (sorted_items) {
+            for (pos = 0; pos < PyList_GET_SIZE(sorted_items); ++pos) {
+                PyObject *tp_items = PyList_GET_ITEM(sorted_items, pos);
+                PyObject *item = (PyObject *)Py_TYPE(PyTuple_GET_ITEM(tp_items, 1));
+                Py_INCREF(item);
+                PyTuple_SET_ITEM(key, key_pos++, item);
+            }
+        }
+    }
+    assert(key_pos == key_size);
+
+done:
+    if (sorted_items)
+        Py_DECREF(sorted_items);
+    return key;
+}
+
+static PyObject *
+uncached_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
+{
+    PyObject *result = PyObject_Call(self->func, args, kwds);
+    if (!result)
+        return NULL;
+    self->misses++;
+    return result;
+}
+
+static PyObject *
+infinite_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
+{
+    PyObject *result;
+    PyObject *key = lru_cache_make_key(args, kwds, self->typed);
+    if (!key)
+        return NULL;
+    result = PyDict_GetItemWithError(self->cache, key);
+    if (result) {
+        Py_INCREF(result);
+        self->hits++;
+        Py_DECREF(key);
+        return result;
+    }
+    if (PyErr_Occurred()) {
+        Py_DECREF(key);
+        return NULL;
+    }
+    result = PyObject_Call(self->func, args, kwds);
+    if (!result) {
+        Py_DECREF(key);
+        return NULL;
+    }
+    if (PyDict_SetItem(self->cache, key, result) < 0) {
+        Py_DECREF(result);
+        Py_DECREF(key);
+        return NULL;
+    }
+    Py_DECREF(key);
+    self->misses++;
+    return result;
+}
+
+static void
+lru_cache_extricate_link(lru_list_elem *link)
+{
+    link->prev->next = link->next;
+    link->next->prev = link->prev;
+}
+
+static void
+lru_cache_append_link(lru_cache_object *self, lru_list_elem *link)
+{
+    lru_list_elem *root = &self->root;
+    lru_list_elem *last = root->prev;
+    last->next = root->prev = link;
+    link->prev = last;
+    link->next = root;
+}
+
+static PyObject *
+bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
+{
+    lru_list_elem *link;
+    PyObject *key, *result;
+
+    key = lru_cache_make_key(args, kwds, self->typed);
+    if (!key)
+        return NULL;
+    link  = (lru_list_elem *)PyDict_GetItemWithError(self->cache, key);
+    if (link) {
+        lru_cache_extricate_link(link);
+        lru_cache_append_link(self, link);
+        self->hits++;
+        result = link->result;
+        Py_INCREF(result);
+        Py_DECREF(key);
+        return result;
+    }
+    if (PyErr_Occurred()) {
+        Py_DECREF(key);
+        return NULL;
+    }
+    result = PyObject_Call(self->func, args, kwds);
+    if (!result) {
+        Py_DECREF(key);
+        return NULL;
+    }
+    if (self->full && self->root.next != &self->root) {
+        /* Use the oldest item to store the new key and result. */
+        PyObject *oldkey, *oldresult;
+        /* Extricate the oldest item. */
+        link = self->root.next;
+        lru_cache_extricate_link(link);
+        /* Remove it from the cache.
+           The cache dict holds one reference to the link,
+           and the linked list holds yet one reference to it. */
+        if (PyDict_DelItem(self->cache, link->key) < 0) {
+            lru_cache_append_link(self, link);
+            Py_DECREF(key);
+            Py_DECREF(result);
+            return NULL;
+        }
+        /* Keep a reference to the old key and old result to
+           prevent their ref counts from going to zero during the
+           update. That will prevent potentially arbitrary object
+           clean-up code (i.e. __del__) from running while we're
+           still adjusting the links. */
+        oldkey = link->key;
+        oldresult = link->result;
+
+        link->key = key;
+        link->result = result;
+        if (PyDict_SetItem(self->cache, key, (PyObject *)link) < 0) {
+            Py_DECREF(link);
+            Py_DECREF(oldkey);
+            Py_DECREF(oldresult);
+            return NULL;
+        }
+        lru_cache_append_link(self, link);
+        Py_INCREF(result); /* for return */
+        Py_DECREF(oldkey);
+        Py_DECREF(oldresult);
+    } else {
+        /* Put result in a new link at the front of the queue. */
+        link = (lru_list_elem *)PyObject_GC_New(lru_list_elem,
+                                                &lru_list_elem_type);
+        if (link == NULL) {
+            Py_DECREF(key);
+            Py_DECREF(result);
+            return NULL;
+        }
+
+        link->key = key;
+        link->result = result;
+        _PyObject_GC_TRACK(link);
+        if (PyDict_SetItem(self->cache, key, (PyObject *)link) < 0) {
+            Py_DECREF(link);
+            return NULL;
+        }
+        lru_cache_append_link(self, link);
+        Py_INCREF(result); /* for return */
+        self->full = (PyDict_Size(self->cache) >= self->maxsize);
+    }
+    self->misses++;
+    return result;
+}
+
+static PyObject *
+lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
+{
+    PyObject *func, *maxsize_O, *cache_info_type;
+    int typed;
+    lru_cache_object *obj;
+    Py_ssize_t maxsize;
+    PyObject *(*wrapper)(lru_cache_object *, PyObject *, PyObject *);
+    static char *keywords[] = {"user_function", "maxsize", "typed",
+                               "cache_info_type", NULL};
+
+    if (!PyArg_ParseTupleAndKeywords(args, kw, "OOpO:lru_cache", keywords,
+                                     &func, &maxsize_O, &typed,
+                                     &cache_info_type)) {
+        return NULL;
+    }
+
+    if (!PyCallable_Check(func)) {
+        PyErr_SetString(PyExc_TypeError,
+                        "the first argument must be callable");
+        return NULL;
+    }
+
+    /* select the caching function, and make/inc maxsize_O */
+    if (maxsize_O == Py_None) {
+        wrapper = infinite_lru_cache_wrapper;
+        /* use this only to initialize lru_cache_object attribute maxsize */
+        maxsize = -1;
+    } else if (PyIndex_Check(maxsize_O)) {
+        maxsize = PyNumber_AsSsize_t(maxsize_O, PyExc_OverflowError);
+        if (maxsize == -1 && PyErr_Occurred())
+            return NULL;
+        if (maxsize == 0)
+            wrapper = uncached_lru_cache_wrapper;
+        else
+            wrapper = bounded_lru_cache_wrapper;
+    } else {
+        PyErr_SetString(PyExc_TypeError, "maxsize should be integer or None");
+        return NULL;
+    }
+
+    obj = (lru_cache_object *)type->tp_alloc(type, 0);
+    if (obj == NULL)
+        return NULL;
+
+    if (!(obj->cache = PyDict_New())) {
+        Py_DECREF(obj);
+        return NULL;
+    }
+
+    obj->root.prev = &obj->root;
+    obj->root.next = &obj->root;
+    obj->maxsize = maxsize;
+    Py_INCREF(maxsize_O);
+    obj->maxsize_O = maxsize_O;
+    Py_INCREF(func);
+    obj->func = func;
+    obj->wrapper = wrapper;
+    obj->misses = obj->hits = 0;
+    obj->typed = typed;
+    Py_INCREF(cache_info_type);
+    obj->cache_info_type = cache_info_type;
+
+    return (PyObject *)obj;
+}
+
+static lru_list_elem *
+lru_cache_unlink_list(lru_cache_object *self)
+{
+    lru_list_elem *root = &self->root;
+    lru_list_elem *link = root->next;
+    if (link == root)
+        return NULL;
+    root->prev->next = NULL;
+    root->next = root->prev = root;
+    return link;
+}
+
+static void
+lru_cache_clear_list(lru_list_elem *link)
+{
+    while (link != NULL) {
+        lru_list_elem *next = link->next;
+        Py_DECREF(link);
+        link = next;
+    }
+}
+
+static void
+lru_cache_dealloc(lru_cache_object *obj)
+{
+    lru_list_elem *list = lru_cache_unlink_list(obj);
+    Py_XDECREF(obj->maxsize_O);
+    Py_XDECREF(obj->func);
+    Py_XDECREF(obj->cache);
+    Py_XDECREF(obj->dict);
+    Py_XDECREF(obj->cache_info_type);
+    lru_cache_clear_list(list);
+    Py_TYPE(obj)->tp_free(obj);
+}
+
+static PyObject *
+lru_cache_call(lru_cache_object *self, PyObject *args, PyObject *kwds)
+{
+    return self->wrapper(self, args, kwds);
+}
+
+static PyObject *
+lru_cache_cache_info(lru_cache_object *self, PyObject *unused)
+{
+    return PyObject_CallFunction(self->cache_info_type, "nnOn",
+                                 self->hits, self->misses, self->maxsize_O,
+                                 PyDict_Size(self->cache));
+}
+
+static PyObject *
+lru_cache_cache_clear(lru_cache_object *self, PyObject *unused)
+{
+    lru_list_elem *list = lru_cache_unlink_list(self);
+    self->hits = self->misses = 0;
+    self->full = 0;
+    PyDict_Clear(self->cache);
+    lru_cache_clear_list(list);
+    Py_RETURN_NONE;
+}
+
+static int
+lru_cache_tp_traverse(lru_cache_object *self, visitproc visit, void *arg)
+{
+    lru_list_elem *link = self->root.next;
+    while (link != &self->root) {
+        lru_list_elem *next = link->next;
+        Py_VISIT(link);
+        link = next;
+    }
+    Py_VISIT(self->maxsize_O);
+    Py_VISIT(self->func);
+    Py_VISIT(self->cache);
+    Py_VISIT(self->cache_info_type);
+    Py_VISIT(self->dict);
+    return 0;
+}
+
+static int
+lru_cache_tp_clear(lru_cache_object *self)
+{
+    lru_list_elem *list = lru_cache_unlink_list(self);
+    Py_CLEAR(self->maxsize_O);
+    Py_CLEAR(self->func);
+    Py_CLEAR(self->cache);
+    Py_CLEAR(self->cache_info_type);
+    Py_CLEAR(self->dict);
+    lru_cache_clear_list(list);
+    return 0;
+}
+
+
+PyDoc_STRVAR(lru_cache_doc,
+"Create a cached callable that wraps another function.\n\
+\n\
+user_function:      the function being cached\n\
+\n\
+maxsize:  0         for no caching\n\
+          None      for unlimited cache size\n\
+          n         for a bounded cache\n\
+\n\
+typed:    False     cache f(3) and f(3.0) as identical calls\n\
+          True      cache f(3) and f(3.0) as distinct calls\n\
+\n\
+cache_info_type:    namedtuple class with the fields:\n\
+                        hits misses currsize maxsize\n"
+);
+
+static PyMethodDef lru_cache_methods[] = {
+    {"cache_info", (PyCFunction)lru_cache_cache_info, METH_NOARGS},
+    {"cache_clear", (PyCFunction)lru_cache_cache_clear, METH_NOARGS},
+    {NULL}
+};
+
+static PyGetSetDef lru_cache_getsetlist[] = {
+    {"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
+    {NULL}
+};
+
+static PyTypeObject lru_cache_type = {
+    PyVarObject_HEAD_INIT(NULL, 0)
+    "functools._lru_cache_wrapper",     /* tp_name */
+    sizeof(lru_cache_object),           /* tp_basicsize */
+    0,                                  /* tp_itemsize */
+    /* methods */
+    (destructor)lru_cache_dealloc,      /* tp_dealloc */
+    0,                                  /* tp_print */
+    0,                                  /* tp_getattr */
+    0,                                  /* tp_setattr */
+    0,                                  /* tp_reserved */
+    0,                                  /* tp_repr */
+    0,                                  /* tp_as_number */
+    0,                                  /* tp_as_sequence */
+    0,                                  /* tp_as_mapping */
+    0,                                  /* tp_hash */
+    (ternaryfunc)lru_cache_call,        /* tp_call */
+    0,                                  /* tp_str */
+    0,                                  /* tp_getattro */
+    0,                                  /* tp_setattro */
+    0,                                  /* tp_as_buffer */
+    Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC,
+                                        /* tp_flags */
+    lru_cache_doc,                      /* tp_doc */
+    (traverseproc)lru_cache_tp_traverse,/* tp_traverse */
+    (inquiry)lru_cache_tp_clear,        /* tp_clear */
+    0,                                  /* tp_richcompare */
+    0,                                  /* tp_weaklistoffset */
+    0,                                  /* tp_iter */
+    0,                                  /* tp_iternext */
+    lru_cache_methods,                  /* tp_methods */
+    0,                                  /* tp_members */
+    lru_cache_getsetlist,               /* tp_getset */
+    0,                                  /* tp_base */
+    0,                                  /* tp_dict */
+    0,                                  /* tp_descr_get */
+    0,                                  /* tp_descr_set */
+    offsetof(lru_cache_object, dict),   /* tp_dictoffset */
+    0,                                  /* tp_init */
+    0,                                  /* tp_alloc */
+    lru_cache_new,                      /* tp_new */
+};
+
 /* module level code ********************************************************/
 
 PyDoc_STRVAR(module_doc,
@@ -602,6 +1135,11 @@
     {NULL,              NULL}           /* sentinel */
 };
 
+static void
+module_free(void *m)
+{
+    Py_CLEAR(kwd_mark);
+}
 
 static struct PyModuleDef _functoolsmodule = {
     PyModuleDef_HEAD_INIT,
@@ -612,7 +1150,7 @@
     NULL,
     NULL,
     NULL,
-    NULL
+    module_free,
 };
 
 PyMODINIT_FUNC
@@ -623,6 +1161,7 @@
     char *name;
     PyTypeObject *typelist[] = {
         &partial_type,
+        &lru_cache_type,
         NULL
     };
 
@@ -630,6 +1169,12 @@
     if (m == NULL)
         return NULL;
 
+    kwd_mark = PyObject_CallObject((PyObject *)&PyBaseObject_Type, NULL);
+    if (!kwd_mark) {
+        Py_DECREF(m);
+        return NULL;
+    }
+
     for (i=0 ; typelist[i] != NULL ; i++) {
         if (PyType_Ready(typelist[i]) < 0) {
             Py_DECREF(m);
diff --git a/Objects/descrobject.c b/Objects/descrobject.c
--- a/Objects/descrobject.c
+++ b/Objects/descrobject.c
@@ -1372,7 +1372,8 @@
 static PyObject *
 property_descr_get(PyObject *self, PyObject *obj, PyObject *type)
 {
-    static PyObject *args = NULL;
+    static PyObject * volatile cached_args = NULL;
+    PyObject *args;
     PyObject *ret;
     propertyobject *gs = (propertyobject *)self;
 
@@ -1384,12 +1385,28 @@
         PyErr_SetString(PyExc_AttributeError, "unreadable attribute");
         return NULL;
     }
-    if (!args && !(args = PyTuple_New(1))) {
-        return NULL;
+    args = cached_args;
+    if (!args || Py_REFCNT(args) != 1) {
+        Py_CLEAR(cached_args);
+        if (!(cached_args = args = PyTuple_New(1)))
+            return NULL;
     }
+    Py_INCREF(args);
+    assert (Py_REFCNT(args) == 2);
+    Py_INCREF(obj);
     PyTuple_SET_ITEM(args, 0, obj);
     ret = PyObject_Call(gs->prop_get, args, NULL);
-    PyTuple_SET_ITEM(args, 0, NULL);
+    if (args == cached_args) {
+        if (Py_REFCNT(args) == 2) {
+            obj = PyTuple_GET_ITEM(args, 0);
+            PyTuple_SET_ITEM(args, 0, NULL);
+            Py_XDECREF(obj);
+        }
+        else {
+            Py_CLEAR(cached_args);
+        }
+    }
+    Py_DECREF(args);
     return ret;
 }
 
diff --git a/PCbuild/_testmultiphase.vcxproj b/PCbuild/_testmultiphase.vcxproj
new file mode 100644
--- /dev/null
+++ b/PCbuild/_testmultiphase.vcxproj
@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup Label="ProjectConfigurations">
+    <ProjectConfiguration Include="Debug|Win32">
+      <Configuration>Debug</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Debug|x64">
+      <Configuration>Debug</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="PGInstrument|Win32">
+      <Configuration>PGInstrument</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="PGInstrument|x64">
+      <Configuration>PGInstrument</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="PGUpdate|Win32">
+      <Configuration>PGUpdate</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="PGUpdate|x64">
+      <Configuration>PGUpdate</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|Win32">
+      <Configuration>Release</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|x64">
+      <Configuration>Release</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+  </ItemGroup>
+  <PropertyGroup Label="Globals">
+    <ProjectGuid>{16BFE6F0-22EF-40B5-B831-7E937119EF10}</ProjectGuid>
+    <Keyword>Win32Proj</Keyword>
+    <RootNamespace>_testmultiphase</RootNamespace>
+    <SupportPGO>false</SupportPGO>
+  </PropertyGroup>
+  <Import Project="python.props" />
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+  <PropertyGroup Label="Configuration">
+    <ConfigurationType>DynamicLibrary</ConfigurationType>
+    <CharacterSet>NotSet</CharacterSet>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+  <PropertyGroup>
+    <TargetExt>.pyd</TargetExt>
+  </PropertyGroup>
+  <ImportGroup Label="ExtensionSettings">
+  </ImportGroup>
+  <ImportGroup Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+    <Import Project="pyproject.props" />
+  </ImportGroup>
+  <PropertyGroup Label="UserMacros" />
+  <ItemDefinitionGroup>
+    <ClCompile>
+      <PreprocessorDefinitions>_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemGroup>
+    <ClCompile Include="..\Modules\_testmultiphase.c" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="pythoncore.vcxproj">
+      <Project>{cf7ac3d1-e2df-41d2-bea6-1e2556cdea26}</Project>
+      <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+  <ImportGroup Label="ExtensionTargets">
+  </ImportGroup>
+</Project>
\ No newline at end of file
diff --git a/PCbuild/_testmultiphase.vcxproj.filters b/PCbuild/_testmultiphase.vcxproj.filters
new file mode 100644
--- /dev/null
+++ b/PCbuild/_testmultiphase.vcxproj.filters
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup>
+    <Filter Include="Source Files">
+      <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
+      <Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
+    </Filter>
+    <Filter Include="Header Files">
+      <UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
+      <Extensions>h;hpp;hxx;hm;inl;inc;xsd</Extensions>
+    </Filter>
+    <Filter Include="Resource Files">
+      <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
+      <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
+    </Filter>
+  </ItemGroup>
+  <ItemGroup>
+    <ClCompile Include="..\Modules\_testmultiphase.c">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+  </ItemGroup>
+</Project>
\ No newline at end of file
diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj
--- a/PCbuild/pcbuild.proj
+++ b/PCbuild/pcbuild.proj
@@ -46,7 +46,7 @@
     <ExtensionModules Include="_ssl;_hashlib" Condition="$(IncludeSSL)" />
     <Projects Include="@(ExtensionModules->'%(Identity).vcxproj')" Condition="$(IncludeExtensions)" />
     <!-- Test modules -->
-    <TestModules Include="_ctypes_test;_testbuffer;_testcapi;_testembed;_testimportmultiple" />
+    <TestModules Include="_ctypes_test;_testbuffer;_testcapi;_testembed;_testimportmultiple;_testmultiphase" />
     <TestModules Include="xxlimited" Condition="'$(Configuration)' == 'Release'" />
     <Projects Include="@(TestModules->'%(Identity).vcxproj')" Condition="$(IncludeTests)">
       <!-- Disable parallel build for test modules -->
diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln
--- a/PCbuild/pcbuild.sln
+++ b/PCbuild/pcbuild.sln
@@ -72,6 +72,8 @@
 EndProject
 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testembed", "_testembed.vcxproj", "{6DAC66D9-E703-4624-BE03-49112AB5AA62}"
 EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testmultiphase", "_testmultiphase.vcxproj", "{16BFE6F0-22EF-40B5-B831-7E937119EF10}"
+EndProject
 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tcl", "tcl.vcxproj", "{B5FD6F1D-129E-4BFF-9340-03606FAC7283}"
 EndProject
 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tix", "tix.vcxproj", "{C5A3E7FB-9695-4B2E-960B-1D9F43F1E555}"
@@ -588,6 +590,20 @@
 		{6DAC66D9-E703-4624-BE03-49112AB5AA62}.Release|Win32.Build.0 = Release|Win32
 		{6DAC66D9-E703-4624-BE03-49112AB5AA62}.Release|x64.ActiveCfg = Release|x64
 		{6DAC66D9-E703-4624-BE03-49112AB5AA62}.Release|x64.Build.0 = Release|x64
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Debug|Win32.ActiveCfg = Debug|Win32
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Debug|Win32.Build.0 = Debug|Win32
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Debug|x64.ActiveCfg = Debug|x64
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Debug|x64.Build.0 = Debug|x64
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.PGInstrument|Win32.ActiveCfg = Release|Win32
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.PGInstrument|x64.ActiveCfg = Release|x64
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.PGUpdate|Win32.ActiveCfg = Release|Win32
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.PGUpdate|Win32.Build.0 = Release|Win32
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.PGUpdate|x64.ActiveCfg = Release|x64
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.PGUpdate|x64.Build.0 = Release|x64
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|Win32.ActiveCfg = Release|Win32
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|Win32.Build.0 = Release|Win32
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|x64.ActiveCfg = Release|x64
+		{16BFE6F0-22EF-40B5-B831-7E937119EF10}.Release|x64.Build.0 = Release|x64
 		{B5FD6F1D-129E-4BFF-9340-03606FAC7283}.Debug|Win32.ActiveCfg = Debug|Win32
 		{B5FD6F1D-129E-4BFF-9340-03606FAC7283}.Debug|Win32.Build.0 = Debug|Win32
 		{B5FD6F1D-129E-4BFF-9340-03606FAC7283}.Debug|x64.ActiveCfg = Debug|x64
diff --git a/Tools/msi/make_zip.proj b/Tools/msi/make_zip.proj
--- a/Tools/msi/make_zip.proj
+++ b/Tools/msi/make_zip.proj
@@ -4,6 +4,7 @@
         <ProjectGuid>{10487945-15D1-4092-A214-338395C4116B}</ProjectGuid>
         <OutputName>python</OutputName>
         <OutputSuffix></OutputSuffix>
+        <SupportSigning>false</SupportSigning>
     </PropertyGroup>
 
     <Import Project="msi.props" />
diff --git a/Tools/msi/make_zip.py b/Tools/msi/make_zip.py
--- a/Tools/msi/make_zip.py
+++ b/Tools/msi/make_zip.py
@@ -25,6 +25,7 @@
         '_testbuffer.pyd',
         '_testcapi.pyd',
         '_testimportmultiple.pyd',
+        '_testmultiphase.pyd',
         'xxlimited.pyd',
     }
 
diff --git a/Tools/msi/test/test_files.wxs b/Tools/msi/test/test_files.wxs
--- a/Tools/msi/test/test_files.wxs
+++ b/Tools/msi/test/test_files.wxs
@@ -14,6 +14,9 @@
             <Component Id="_testimportmultiple.pyd" Directory="DLLs" Guid="*">
                 <File Id="_testimportmultiple.pyd" Name="_testimportmultiple.pyd" KeyPath="yes" />
             </Component>
+            <Component Id="_testmultiphase.pyd" Directory="DLLs" Guid="*">
+                <File Id="_testmultiphase.pyd" Name="_testmultiphase.pyd" KeyPath="yes" />
+            </Component>
         </ComponentGroup>
     </Fragment>
     
@@ -31,6 +34,9 @@
             <Component Id="_testimportmultiple.pdb" Directory="DLLs" Guid="*">
                 <File Id="_testimportmultiple.pdb" Name="_testimportmultiple.pdb" />
             </Component>
+            <Component Id="_testmultiphase.pdb" Directory="DLLs" Guid="*">
+                <File Id="_testmultiphase.pdb" Name="_testmultiphase.pdb" />
+            </Component>
         </ComponentGroup>
     </Fragment>
     
@@ -48,6 +54,9 @@
             <Component Id="_testimportmultiple_d.pyd" Directory="DLLs" Guid="*">
                 <File Id="_testimportmultiple_d.pyd" Name="_testimportmultiple_d.pyd" />
             </Component>
+            <Component Id="_testmultiphase_d.pyd" Directory="DLLs" Guid="*">
+                <File Id="_testmultiphase_d.pyd" Name="_testmultiphase_d.pyd" />
+            </Component>
             <Component Id="_testcapi_d.pdb" Directory="DLLs" Guid="*">
                 <File Id="_testcapi_d.pdb" Name="_testcapi_d.pdb" />
             </Component>
@@ -60,6 +69,9 @@
             <Component Id="_testimportmultiple_d.pdb" Directory="DLLs" Guid="*">
                 <File Id="_testimportmultiple_d.pdb" Name="_testimportmultiple_d.pdb" />
             </Component>
+            <Component Id="_testmultiphase_d.pdb" Directory="DLLs" Guid="*">
+                <File Id="_testmultiphase_d.pdb" Name="_testmultiphase_d.pdb" />
+            </Component>
         </ComponentGroup>
     </Fragment>
 </Wix>

-- 
Repository URL: https://hg.python.org/cpython


More information about the Python-checkins mailing list