From python-checkins at python.org Fri Jun 1 00:01:31 2012 From: python-checkins at python.org (r.david.murray) Date: Fri, 01 Jun 2012 00:01:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Don=27t_use_metaclasses_whe?= =?utf8?q?n_class_decorators_can_do_the_job=2E?= Message-ID: http://hg.python.org/cpython/rev/01b72be1ce0c changeset: 77273:01b72be1ce0c user: R David Murray date: Thu May 31 18:00:45 2012 -0400 summary: Don't use metaclasses when class decorators can do the job. Thanks to Nick Coghlan for pointing out that I'd forgotten about class decorators. files: Lib/email/_policybase.py | 39 ++--- Lib/email/policy.py | 3 +- Lib/test/test_email/__init__.py | 66 ++++----- Lib/test/test_email/test_generator.py | 5 +- Lib/test/test_email/test_headerregistry.py | 5 +- Lib/test/test_email/test_pickleable.py | 9 +- 6 files changed, 63 insertions(+), 64 deletions(-) diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py --- a/Lib/email/_policybase.py +++ b/Lib/email/_policybase.py @@ -91,31 +91,25 @@ return self.clone(**other.__dict__) -# Conceptually this isn't a subclass of ABCMeta, but since we want Policy to -# use ABCMeta as a metaclass *and* we want it to use this one as well, we have -# to make this one a subclas of ABCMeta. -class _DocstringExtenderMetaclass(abc.ABCMeta): +def _append_doc(doc, added_doc): + doc = doc.rsplit('\n', 1)[0] + added_doc = added_doc.split('\n', 1)[1] + return doc + '\n' + added_doc - def __new__(meta, classname, bases, classdict): - if classdict.get('__doc__') and classdict['__doc__'].startswith('+'): - classdict['__doc__'] = meta._append_doc(bases[0].__doc__, - classdict['__doc__']) - for name, attr in classdict.items(): - if attr.__doc__ and attr.__doc__.startswith('+'): - for cls in (cls for base in bases for cls in base.mro()): - doc = getattr(getattr(cls, name), '__doc__') - if doc: - attr.__doc__ = meta._append_doc(doc, attr.__doc__) - break - return super().__new__(meta, classname, bases, classdict) +def _extend_docstrings(cls): + if cls.__doc__ and cls.__doc__.startswith('+'): + cls.__doc__ = _append_doc(cls.__bases__[0].__doc__, cls.__doc__) + for name, attr in cls.__dict__.items(): + if attr.__doc__ and attr.__doc__.startswith('+'): + for c in (c for base in cls.__bases__ for c in base.mro()): + doc = getattr(getattr(c, name), '__doc__') + if doc: + attr.__doc__ = _append_doc(doc, attr.__doc__) + break + return cls - @staticmethod - def _append_doc(doc, added_doc): - added_doc = added_doc.split('\n', 1)[1] - return doc + '\n' + added_doc - -class Policy(_PolicyBase, metaclass=_DocstringExtenderMetaclass): +class Policy(_PolicyBase, metaclass=abc.ABCMeta): r"""Controls for how messages are interpreted and formatted. @@ -264,6 +258,7 @@ raise NotImplementedError + at _extend_docstrings class Compat32(Policy): """+ diff --git a/Lib/email/policy.py b/Lib/email/policy.py --- a/Lib/email/policy.py +++ b/Lib/email/policy.py @@ -2,7 +2,7 @@ code that adds all the email6 features. """ -from email._policybase import Policy, Compat32, compat32 +from email._policybase import Policy, Compat32, compat32, _extend_docstrings from email.utils import _has_surrogates from email.headerregistry import HeaderRegistry as HeaderRegistry @@ -17,6 +17,7 @@ 'HTTP', ] + at _extend_docstrings class EmailPolicy(Policy): """+ diff --git a/Lib/test/test_email/__init__.py b/Lib/test/test_email/__init__.py --- a/Lib/test/test_email/__init__.py +++ b/Lib/test/test_email/__init__.py @@ -73,10 +73,8 @@ 'item {}'.format(i)) -# Metaclass to allow for parameterized tests -class Parameterized(type): - - """Provide a test method parameterization facility. +def parameterize(cls): + """A test method parameterization class decorator. Parameters are specified as the value of a class attribute that ends with the string '_params'. Call the portion before '_params' the prefix. Then @@ -92,9 +90,10 @@ In a _params dictioanry, the keys become part of the name of the generated tests. In a _params list, the values in the list are converted into a string by joining the string values of the elements of the tuple by '_' and - converting any blanks into '_'s, and this become part of the name. The - full name of a generated test is the portion of the _params name before the - '_params' portion, plus an '_', plus the name derived as explained above. + converting any blanks into '_'s, and this become part of the name. + The full name of a generated test is a 'test_' prefix, the portion of the + test function name after the '_as_' separator, plus an '_', plus the name + derived as explained above. For example, if we have: @@ -123,30 +122,29 @@ be used to select the test individually from the unittest command line. """ - - def __new__(meta, classname, bases, classdict): - paramdicts = {} - for name, attr in classdict.items(): - if name.endswith('_params'): - if not hasattr(attr, 'keys'): - d = {} - for x in attr: - if not hasattr(x, '__iter__'): - x = (x,) - n = '_'.join(str(v) for v in x).replace(' ', '_') - d[n] = x - attr = d - paramdicts[name[:-7] + '_as_'] = attr - testfuncs = {} - for name, attr in classdict.items(): - for paramsname, paramsdict in paramdicts.items(): - if name.startswith(paramsname): - testnameroot = 'test_' + name[len(paramsname):] - for paramname, params in paramsdict.items(): - test = (lambda self, name=name, params=params: - getattr(self, name)(*params)) - testname = testnameroot + '_' + paramname - test.__name__ = testname - testfuncs[testname] = test - classdict.update(testfuncs) - return super().__new__(meta, classname, bases, classdict) + paramdicts = {} + for name, attr in cls.__dict__.items(): + if name.endswith('_params'): + if not hasattr(attr, 'keys'): + d = {} + for x in attr: + if not hasattr(x, '__iter__'): + x = (x,) + n = '_'.join(str(v) for v in x).replace(' ', '_') + d[n] = x + attr = d + paramdicts[name[:-7] + '_as_'] = attr + testfuncs = {} + for name, attr in cls.__dict__.items(): + for paramsname, paramsdict in paramdicts.items(): + if name.startswith(paramsname): + testnameroot = 'test_' + name[len(paramsname):] + for paramname, params in paramsdict.items(): + test = (lambda self, name=name, params=params: + getattr(self, name)(*params)) + testname = testnameroot + '_' + paramname + test.__name__ = testname + testfuncs[testname] = test + for key, value in testfuncs.items(): + setattr(cls, key, value) + return cls diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py --- a/Lib/test/test_email/test_generator.py +++ b/Lib/test/test_email/test_generator.py @@ -4,10 +4,11 @@ from email import message_from_string, message_from_bytes from email.generator import Generator, BytesGenerator from email import policy -from test.test_email import TestEmailBase, Parameterized +from test.test_email import TestEmailBase, parameterize -class TestGeneratorBase(metaclass=Parameterized): + at parameterize +class TestGeneratorBase: policy = policy.default diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -4,7 +4,7 @@ from email import errors from email import policy from email.message import Message -from test.test_email import TestEmailBase, Parameterized +from test.test_email import TestEmailBase, parameterize from email import headerregistry from email.headerregistry import Address, Group @@ -175,7 +175,8 @@ self.assertEqual(m['Date'].datetime, self.dt) -class TestAddressHeader(TestHeaderBase, metaclass=Parameterized): + at parameterize +class TestAddressHeader(TestHeaderBase): example_params = { diff --git a/Lib/test/test_email/test_pickleable.py b/Lib/test/test_email/test_pickleable.py --- a/Lib/test/test_email/test_pickleable.py +++ b/Lib/test/test_email/test_pickleable.py @@ -6,9 +6,11 @@ import email.message from email import policy from email.headerregistry import HeaderRegistry -from test.test_email import TestEmailBase, Parameterized +from test.test_email import TestEmailBase, parameterize -class TestPickleCopyHeader(TestEmailBase, metaclass=Parameterized): + + at parameterize +class TestPickleCopyHeader(TestEmailBase): header_factory = HeaderRegistry() @@ -33,7 +35,8 @@ self.assertEqual(str(h), str(header)) -class TestPickleCopyMessage(TestEmailBase, metaclass=Parameterized): + at parameterize +class TestPickleCopyMessage(TestEmailBase): # Message objects are a sequence, so we have to make them a one-tuple in # msg_params so they get passed to the parameterized test method as a -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 00:21:41 2012 From: python-checkins at python.org (hynek.schlawack) Date: Fri, 01 Jun 2012 00:21:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2314814=3A_Remove_stale_?= =?utf8?q?=5F=5Fhex=5F=5F_method_from_ipaddress?= Message-ID: http://hg.python.org/cpython/rev/bd2c2def77a7 changeset: 77274:bd2c2def77a7 user: Hynek Schlawack date: Fri Jun 01 00:20:13 2012 +0200 summary: #14814: Remove stale __hex__ method from ipaddress Obsolete 2.x method. files: Lib/ipaddress.py | 3 --- 1 files changed, 0 insertions(+), 3 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -482,9 +482,6 @@ def __int__(self): return self._ip - def __hex__(self): - return hex(self._ip) - def __eq__(self, other): try: return (self._ip == other._ip -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Jun 1 05:51:32 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 01 Jun 2012 05:51:32 +0200 Subject: [Python-checkins] Daily reference leaks (bd2c2def77a7): sum=465 Message-ID: results for bd2c2def77a7 on branch "default" -------------------------------------------- test_smtplib leaked [154, 154, 154] references, sum=462 test_super leaked [1, 1, 1] references, sum=3 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogSNO9GS', '-x'] From python-checkins at python.org Fri Jun 1 06:16:49 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 01 Jun 2012 06:16:49 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314007=3A_make_XMLP?= =?utf8?q?arser_a_real_subclassable_type_exported_from?= Message-ID: http://hg.python.org/cpython/rev/a29ae1c2b8b2 changeset: 77275:a29ae1c2b8b2 user: Eli Bendersky date: Fri Jun 01 07:13:08 2012 +0300 summary: Issue #14007: make XMLParser a real subclassable type exported from _elementtree. +cleanups files: Doc/library/xml.etree.elementtree.rst | 10 +- Lib/test/test_xml_etree.py | 29 + Modules/_elementtree.c | 262 +++++++------ 3 files changed, 180 insertions(+), 121 deletions(-) diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -646,8 +646,8 @@ Loads an external XML section into this element tree. *source* is a file name or :term:`file object`. *parser* is an optional parser instance. - If not given, the standard XMLParser parser is used. Returns the section - root element. + If not given, the standard :class:`XMLParser` parser is used. Returns the + section root element. .. method:: write(file, encoding="us-ascii", xml_declaration=None, method="xml") @@ -767,9 +767,9 @@ :class:`Element` structure builder for XML source data, based on the expat parser. *html* are predefined HTML entities. This flag is not supported by the current implementation. *target* is the target object. If omitted, the - builder uses an instance of the standard TreeBuilder class. *encoding* [1]_ - is optional. If given, the value overrides the encoding specified in the - XML file. + builder uses an instance of the standard :class:`TreeBuilder` class. + *encoding* [1]_ is optional. If given, the value overrides the encoding + specified in the XML file. .. method:: close() diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2028,6 +2028,34 @@ 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd')) +class XMLParserTest(unittest.TestCase): + sample1 = '22' + + def _check_sample_element(self, e): + self.assertEqual(e.tag, 'file') + self.assertEqual(e[0].tag, 'line') + self.assertEqual(e[0].text, '22') + + def test_constructor_args(self): + # Positional args. The first (html) is not supported, but should be + # nevertheless correctly accepted. + parser = ET.XMLParser(None, ET.TreeBuilder(), 'utf-8') + parser.feed(self.sample1) + self._check_sample_element(parser.close()) + + # Now as keyword args. + parser2 = ET.XMLParser(encoding='utf-8', html=[{}], target=ET.TreeBuilder()) + parser2.feed(self.sample1) + self._check_sample_element(parser2.close()) + + def test_subclass(self): + class MyParser(ET.XMLParser): + pass + parser = MyParser() + parser.feed(self.sample1) + self._check_sample_element(parser.close()) + + class NoAcceleratorTest(unittest.TestCase): # Test that the C accelerator was not imported for pyET def test_correct_import_pyET(self): @@ -2245,6 +2273,7 @@ ElementTreeTest, NamespaceParseTest, TreeBuilderTest, + XMLParserTest, KeywordArgsTest] if module is pyET: # Run the tests specific to the Python implementation diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -2257,6 +2257,9 @@ #define EXPAT(func) (XML_##func) #endif +static XML_Memory_Handling_Suite ExpatMemoryHandler = { + PyObject_Malloc, PyObject_Realloc, PyObject_Free}; + typedef struct { PyObject_HEAD @@ -2671,121 +2674,125 @@ } /* -------------------------------------------------------------------- */ -/* constructor and destructor */ - -static PyObject* -xmlparser(PyObject* self_, PyObject* args, PyObject* kw) + +static PyObject * +xmlparser_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - XMLParserObject* self; - /* FIXME: does this need to be static? */ - static XML_Memory_Handling_Suite memory_handler; - - PyObject* target = NULL; - char* encoding = NULL; - static char* kwlist[] = { "target", "encoding", NULL }; - if (!PyArg_ParseTupleAndKeywords(args, kw, "|Oz:XMLParser", kwlist, - &target, &encoding)) - return NULL; - -#if defined(USE_PYEXPAT_CAPI) - if (!expat_capi) { - PyErr_SetString( - PyExc_RuntimeError, "cannot load dispatch table from pyexpat" - ); - return NULL; + XMLParserObject *self = (XMLParserObject *)type->tp_alloc(type, 0); + if (self) { + self->parser = NULL; + self->target = self->entity = self->names = NULL; + self->handle_start = self->handle_data = self->handle_end = NULL; + self->handle_comment = self->handle_pi = self->handle_close = NULL; } -#endif - - self = PyObject_New(XMLParserObject, &XMLParser_Type); - if (self == NULL) - return NULL; - - self->entity = PyDict_New(); - if (!self->entity) { - PyObject_Del(self); - return NULL; + return (PyObject *)self; +} + +static int +xmlparser_init(PyObject *self, PyObject *args, PyObject *kwds) +{ + XMLParserObject *self_xp = (XMLParserObject *)self; + PyObject *target = NULL, *html = NULL; + char *encoding = NULL; + static char *kwlist[] = {"html", "target", "encoding"}; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOz:XMLParser", kwlist, + &html, &target, &encoding)) { + return -1; } - self->names = PyDict_New(); - if (!self->names) { - PyObject_Del(self->entity); - PyObject_Del(self); - return NULL; + self_xp->entity = PyDict_New(); + if (!self_xp->entity) + return -1; + + self_xp->names = PyDict_New(); + if (!self_xp->names) { + Py_XDECREF(self_xp->entity); + return -1; } - memory_handler.malloc_fcn = PyObject_Malloc; - memory_handler.realloc_fcn = PyObject_Realloc; - memory_handler.free_fcn = PyObject_Free; - - self->parser = EXPAT(ParserCreate_MM)(encoding, &memory_handler, "}"); - if (!self->parser) { - PyObject_Del(self->names); - PyObject_Del(self->entity); - PyObject_Del(self); + self_xp->parser = EXPAT(ParserCreate_MM)(encoding, &ExpatMemoryHandler, "}"); + if (!self_xp->parser) { + Py_XDECREF(self_xp->entity); + Py_XDECREF(self_xp->names); PyErr_NoMemory(); - return NULL; + return -1; } - /* setup target handlers */ - if (!target) { + if (target) { + Py_INCREF(target); + } else { target = treebuilder_new(&TreeBuilder_Type, NULL, NULL); if (!target) { - EXPAT(ParserFree)(self->parser); - PyObject_Del(self->names); - PyObject_Del(self->entity); - PyObject_Del(self); - return NULL; + Py_XDECREF(self_xp->entity); + Py_XDECREF(self_xp->names); + EXPAT(ParserFree)(self_xp->parser); + return -1; } - } else - Py_INCREF(target); - self->target = target; - - self->handle_start = PyObject_GetAttrString(target, "start"); - self->handle_data = PyObject_GetAttrString(target, "data"); - self->handle_end = PyObject_GetAttrString(target, "end"); - self->handle_comment = PyObject_GetAttrString(target, "comment"); - self->handle_pi = PyObject_GetAttrString(target, "pi"); - self->handle_close = PyObject_GetAttrString(target, "close"); + } + self_xp->target = target; + + self_xp->handle_start = PyObject_GetAttrString(target, "start"); + self_xp->handle_data = PyObject_GetAttrString(target, "data"); + self_xp->handle_end = PyObject_GetAttrString(target, "end"); + self_xp->handle_comment = PyObject_GetAttrString(target, "comment"); + self_xp->handle_pi = PyObject_GetAttrString(target, "pi"); + self_xp->handle_close = PyObject_GetAttrString(target, "close"); PyErr_Clear(); - + /* configure parser */ - EXPAT(SetUserData)(self->parser, self); + EXPAT(SetUserData)(self_xp->parser, self_xp); EXPAT(SetElementHandler)( - self->parser, + self_xp->parser, (XML_StartElementHandler) expat_start_handler, (XML_EndElementHandler) expat_end_handler ); EXPAT(SetDefaultHandlerExpand)( - self->parser, + self_xp->parser, (XML_DefaultHandler) expat_default_handler ); EXPAT(SetCharacterDataHandler)( - self->parser, + self_xp->parser, (XML_CharacterDataHandler) expat_data_handler ); - if (self->handle_comment) + if (self_xp->handle_comment) EXPAT(SetCommentHandler)( - self->parser, + self_xp->parser, (XML_CommentHandler) expat_comment_handler ); - if (self->handle_pi) + if (self_xp->handle_pi) EXPAT(SetProcessingInstructionHandler)( - self->parser, + self_xp->parser, (XML_ProcessingInstructionHandler) expat_pi_handler ); EXPAT(SetUnknownEncodingHandler)( - self->parser, + self_xp->parser, (XML_UnknownEncodingHandler) expat_unknown_encoding_handler, NULL ); - ALLOC(sizeof(XMLParserObject), "create expatparser"); - - return (PyObject*) self; + return 0; } -static void -xmlparser_dealloc(XMLParserObject* self) +static int +xmlparser_gc_traverse(XMLParserObject *self, visitproc visit, void *arg) +{ + Py_VISIT(self->handle_close); + Py_VISIT(self->handle_pi); + Py_VISIT(self->handle_comment); + Py_VISIT(self->handle_end); + Py_VISIT(self->handle_data); + Py_VISIT(self->handle_start); + + Py_VISIT(self->target); + Py_VISIT(self->entity); + Py_VISIT(self->names); + + return 0; +} + +static int +xmlparser_gc_clear(XMLParserObject *self) { EXPAT(ParserFree)(self->parser); @@ -2796,17 +2803,20 @@ Py_XDECREF(self->handle_data); Py_XDECREF(self->handle_start); - Py_DECREF(self->target); - Py_DECREF(self->entity); - Py_DECREF(self->names); - - RELEASE(sizeof(XMLParserObject), "destroy expatparser"); - - PyObject_Del(self); + Py_XDECREF(self->target); + Py_XDECREF(self->entity); + Py_XDECREF(self->names); + + return 0; } -/* -------------------------------------------------------------------- */ -/* methods (in alphabetical order) */ +static void +xmlparser_dealloc(XMLParserObject* self) +{ + PyObject_GC_UnTrack(self); + xmlparser_gc_clear(self); + Py_TYPE(self)->tp_free((PyObject *)self); +} LOCAL(PyObject*) expat_parse(XMLParserObject* self, char* data, int data_len, int final) @@ -3083,31 +3093,42 @@ PyVarObject_HEAD_INIT(NULL, 0) "XMLParser", sizeof(XMLParserObject), 0, /* methods */ - (destructor)xmlparser_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_reserved */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - (getattrofunc)xmlparser_getattro, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - 0, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - xmlparser_methods, /* tp_methods */ - 0, /* tp_members */ + (destructor)xmlparser_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + (getattrofunc)xmlparser_getattro, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, + /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)xmlparser_gc_traverse, /* tp_traverse */ + (inquiry)xmlparser_gc_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + xmlparser_methods, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)xmlparser_init, /* tp_init */ + PyType_GenericAlloc, /* tp_alloc */ + xmlparser_new, /* tp_new */ + 0, /* tp_free */ }; #endif @@ -3117,9 +3138,6 @@ static PyMethodDef _functions[] = { {"SubElement", (PyCFunction) subelement, METH_VARARGS|METH_KEYWORDS}, -#if defined(USE_EXPAT) - {"XMLParser", (PyCFunction) xmlparser, METH_VARARGS|METH_KEYWORDS}, -#endif {NULL, NULL} }; @@ -3214,8 +3232,15 @@ expat_capi->size < sizeof(struct PyExpat_CAPI) || expat_capi->MAJOR_VERSION != XML_MAJOR_VERSION || expat_capi->MINOR_VERSION != XML_MINOR_VERSION || - expat_capi->MICRO_VERSION != XML_MICRO_VERSION) + expat_capi->MICRO_VERSION != XML_MICRO_VERSION) { expat_capi = NULL; + } + } + if (!expat_capi) { + PyErr_SetString( + PyExc_RuntimeError, "cannot load dispatch table from pyexpat" + ); + return NULL; } #endif @@ -3231,5 +3256,10 @@ Py_INCREF((PyObject *)&TreeBuilder_Type); PyModule_AddObject(m, "TreeBuilder", (PyObject *)&TreeBuilder_Type); +#if defined(USE_EXPAT) + Py_INCREF((PyObject *)&XMLParser_Type); + PyModule_AddObject(m, "XMLParser", (PyObject *)&XMLParser_Type); +#endif + return m; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 06:16:50 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 01 Jun 2012 06:16:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_fix_trailing_whitespace?= Message-ID: http://hg.python.org/cpython/rev/925383843063 changeset: 77276:925383843063 user: Eli Bendersky date: Fri Jun 01 07:15:00 2012 +0300 summary: fix trailing whitespace files: Doc/library/xml.etree.elementtree.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -767,7 +767,7 @@ :class:`Element` structure builder for XML source data, based on the expat parser. *html* are predefined HTML entities. This flag is not supported by the current implementation. *target* is the target object. If omitted, the - builder uses an instance of the standard :class:`TreeBuilder` class. + builder uses an instance of the standard :class:`TreeBuilder` class. *encoding* [1]_ is optional. If given, the value overrides the encoding specified in the XML file. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 07:07:49 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 01 Jun 2012 07:07:49 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Reformat_two_issue_numbers?= Message-ID: http://hg.python.org/cpython/rev/8ddf40f68def changeset: 77277:8ddf40f68def user: Brian Curtin date: Fri Jun 01 00:07:28 2012 -0500 summary: Reformat two issue numbers files: Misc/NEWS | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -92,13 +92,13 @@ - Issue #14443: Tell rpmbuild to use the correct version of Python in bdist_rpm. Initial patch by Ross Lagerwall. -- Issue14929: Stop Idle 3.x from closing on Unicode decode errors when grepping. +- Issue #14929: Stop Idle 3.x from closing on Unicode decode errors when grepping. Patch by Roger Serwy. - Issue #12515: email now registers a defect if it gets to EOF while parsing a MIME part without seeing the closing MIME boundary. -- Issue12510: Attempting to get invalid tooltip no longer closes Idle. +- Issue #12510: Attempting to get invalid tooltip no longer closes Idle. Original patch by Roger Serwy. - Issue #1672568: email now always decodes base64 payloads, adding padding and -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 08:50:27 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 01 Jun 2012 08:50:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_We=27re_always_building_=5F?= =?utf8?q?elementtree_with_USE=5FPYEXPAT=5FCAPI=2C_so_the_=23ifdefs_in?= Message-ID: http://hg.python.org/cpython/rev/cf9c379a9859 changeset: 77278:cf9c379a9859 user: Eli Bendersky date: Fri Jun 01 09:48:37 2012 +0300 summary: We're always building _elementtree with USE_PYEXPAT_CAPI, so the #ifdefs in the code are unnecessary. files: Modules/_elementtree.c | 14 ++------------ 1 files changed, 2 insertions(+), 12 deletions(-) diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -58,9 +58,6 @@ /* Leave defined to include the expat-based XMLParser type */ #define USE_EXPAT -/* Define to do all expat calls via pyexpat's embedded expat library */ -/* #define USE_PYEXPAT_CAPI */ - /* An element can hold this many children without extra memory allocations. */ #define STATIC_CHILDREN 4 @@ -2248,14 +2245,9 @@ #if defined(USE_EXPAT) #include "expat.h" - -#if defined(USE_PYEXPAT_CAPI) #include "pyexpat.h" -static struct PyExpat_CAPI* expat_capi; +static struct PyExpat_CAPI *expat_capi; #define EXPAT(func) (expat_capi->func) -#else -#define EXPAT(func) (XML_##func) -#endif static XML_Memory_Handling_Suite ExpatMemoryHandler = { PyObject_Malloc, PyObject_Realloc, PyObject_Free}; @@ -3223,8 +3215,7 @@ elementtree_iter_obj = PyDict_GetItemString(g, "iter"); elementtree_itertext_obj = PyDict_GetItemString(g, "itertext"); -#if defined(USE_PYEXPAT_CAPI) - /* link against pyexpat, if possible */ + /* link against pyexpat */ expat_capi = PyCapsule_Import(PyExpat_CAPSULE_NAME, 0); if (expat_capi) { /* check that it's usable */ @@ -3242,7 +3233,6 @@ ); return NULL; } -#endif elementtree_parseerror_obj = PyErr_NewException( "xml.etree.ElementTree.ParseError", PyExc_SyntaxError, NULL -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 10:34:30 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 01 Jun 2012 10:34:30 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314007=3A_implement?= =?utf8?q?_doctype=28=29_method_calling_in_XMLParser_of_=5Felementtree=2E?= Message-ID: http://hg.python.org/cpython/rev/6f9bfcc1896f changeset: 77279:6f9bfcc1896f user: Eli Bendersky date: Fri Jun 01 11:32:34 2012 +0300 summary: Issue #14007: implement doctype() method calling in XMLParser of _elementtree. Includes exposing a doctype handler from expat through pyexpat. files: Include/pyexpat.h | 2 + Lib/test/test_xml_etree.py | 19 +++- Modules/_elementtree.c | 117 ++++++++++++++++++++++-- Modules/pyexpat.c | 1 + 4 files changed, 124 insertions(+), 15 deletions(-) diff --git a/Include/pyexpat.h b/Include/pyexpat.h --- a/Include/pyexpat.h +++ b/Include/pyexpat.h @@ -43,6 +43,8 @@ XML_Parser parser, XML_UnknownEncodingHandler handler, void *encodingHandlerData); void (*SetUserData)(XML_Parser parser, void *userData); + void (*SetStartDoctypeDeclHandler)(XML_Parser parser, + XML_StartDoctypeDeclHandler start); /* always add new stuff to the end! */ }; diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2009,7 +2009,6 @@ self.assertEqual(lst, ['toplevel']) - @unittest.expectedFailure # XXX issue 14007 with C ElementTree def test_doctype(self): class DoctypeParser: _doctype = None @@ -2030,6 +2029,10 @@ class XMLParserTest(unittest.TestCase): sample1 = '22' + sample2 = ('' + 'text') def _check_sample_element(self, e): self.assertEqual(e.tag, 'file') @@ -2055,6 +2058,20 @@ parser.feed(self.sample1) self._check_sample_element(parser.close()) + def test_subclass_doctype(self): + _doctype = None + class MyParserWithDoctype(ET.XMLParser): + def doctype(self, name, pubid, system): + nonlocal _doctype + _doctype = (name, pubid, system) + + parser = MyParserWithDoctype() + parser.feed(self.sample2) + parser.close() + self.assertEqual(_doctype, + ('html', '-//W3C//DTD XHTML 1.0 Transitional//EN', + 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd')) + class NoAcceleratorTest(unittest.TestCase): # Test that the C accelerator was not imported for pyET diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -2257,24 +2257,27 @@ XML_Parser parser; - PyObject* target; - PyObject* entity; - - PyObject* names; - - PyObject* handle_start; - PyObject* handle_data; - PyObject* handle_end; - - PyObject* handle_comment; - PyObject* handle_pi; - - PyObject* handle_close; + PyObject *target; + PyObject *entity; + + PyObject *names; + + PyObject *handle_start; + PyObject *handle_data; + PyObject *handle_end; + + PyObject *handle_comment; + PyObject *handle_pi; + PyObject *handle_doctype; + + PyObject *handle_close; } XMLParserObject; static PyTypeObject XMLParser_Type; +#define XMLParser_CheckExact(op) (Py_TYPE(op) == &XMLParser_Type) + /* helpers */ LOCAL(PyObject*) @@ -2601,6 +2604,78 @@ } } +static void +expat_start_doctype_handler(XMLParserObject *self, + const XML_Char *doctype_name, + const XML_Char *sysid, + const XML_Char *pubid, + int has_internal_subset) +{ + PyObject *self_pyobj = (PyObject *)self; + PyObject *doctype_name_obj, *sysid_obj, *pubid_obj; + PyObject *parser_doctype = NULL; + PyObject *res = NULL; + + doctype_name_obj = makeuniversal(self, doctype_name); + if (!doctype_name_obj) + return; + + if (sysid) { + sysid_obj = makeuniversal(self, sysid); + if (!sysid_obj) { + Py_DECREF(doctype_name_obj); + return; + } + } else { + Py_INCREF(Py_None); + sysid_obj = Py_None; + } + + if (pubid) { + pubid_obj = makeuniversal(self, pubid); + if (!pubid_obj) { + Py_DECREF(doctype_name_obj); + Py_DECREF(sysid_obj); + return; + } + } else { + Py_INCREF(Py_None); + pubid_obj = Py_None; + } + + /* If the target has a handler for doctype, call it. */ + if (self->handle_doctype) { + res = PyObject_CallFunction(self->handle_doctype, "OOO", + doctype_name_obj, pubid_obj, sysid_obj); + Py_CLEAR(res); + } + + /* Now see if the parser itself has a doctype method. If yes and it's + * a subclass, call it but warn about deprecation. If it's not a subclass + * (i.e. vanilla XMLParser), do nothing. + */ + parser_doctype = PyObject_GetAttrString(self_pyobj, "doctype"); + if (parser_doctype) { + if (!XMLParser_CheckExact(self_pyobj)) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "This method of XMLParser is deprecated. Define" + " doctype() method on the TreeBuilder target.", + 1) < 0) { + goto clear; + } + res = PyObject_CallFunction(parser_doctype, "OOO", + doctype_name_obj, pubid_obj, sysid_obj); + Py_CLEAR(res); + } + } + +clear: + Py_XDECREF(parser_doctype); + Py_DECREF(doctype_name_obj); + Py_DECREF(pubid_obj); + Py_DECREF(sysid_obj); +} + static void expat_pi_handler(XMLParserObject* self, const XML_Char* target_in, const XML_Char* data_in) @@ -2676,6 +2751,7 @@ self->target = self->entity = self->names = NULL; self->handle_start = self->handle_data = self->handle_end = NULL; self->handle_comment = self->handle_pi = self->handle_close = NULL; + self->handle_doctype = NULL; } return (PyObject *)self; } @@ -2730,6 +2806,7 @@ self_xp->handle_comment = PyObject_GetAttrString(target, "comment"); self_xp->handle_pi = PyObject_GetAttrString(target, "pi"); self_xp->handle_close = PyObject_GetAttrString(target, "close"); + self_xp->handle_doctype = PyObject_GetAttrString(target, "doctype"); PyErr_Clear(); @@ -2758,6 +2835,10 @@ self_xp->parser, (XML_ProcessingInstructionHandler) expat_pi_handler ); + EXPAT(SetStartDoctypeDeclHandler)( + self_xp->parser, + (XML_StartDoctypeDeclHandler) expat_start_doctype_handler + ); EXPAT(SetUnknownEncodingHandler)( self_xp->parser, (XML_UnknownEncodingHandler) expat_unknown_encoding_handler, NULL @@ -2794,6 +2875,7 @@ Py_XDECREF(self->handle_end); Py_XDECREF(self->handle_data); Py_XDECREF(self->handle_start); + Py_XDECREF(self->handle_doctype); Py_XDECREF(self->target); Py_XDECREF(self->entity); @@ -2950,7 +3032,13 @@ } static PyObject* -xmlparser_setevents(XMLParserObject* self, PyObject* args) +xmlparser_doctype(XMLParserObject *self, PyObject *args) +{ + Py_RETURN_NONE; +} + +static PyObject* +xmlparser_setevents(XMLParserObject *self, PyObject* args) { /* activate element event reporting */ @@ -3054,6 +3142,7 @@ {"close", (PyCFunction) xmlparser_close, METH_VARARGS}, {"_parse", (PyCFunction) xmlparser_parse, METH_VARARGS}, {"_setevents", (PyCFunction) xmlparser_setevents, METH_VARARGS}, + {"doctype", (PyCFunction) xmlparser_doctype, METH_VARARGS}, {NULL, NULL} }; diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -1904,6 +1904,7 @@ capi.SetProcessingInstructionHandler = XML_SetProcessingInstructionHandler; capi.SetUnknownEncodingHandler = XML_SetUnknownEncodingHandler; capi.SetUserData = XML_SetUserData; + capi.SetStartDoctypeDeclHandler = XML_SetStartDoctypeDeclHandler; /* export using capsule */ capi_object = PyCapsule_New(&capi, PyExpat_CAPSULE_NAME, NULL); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 10:59:38 2012 From: python-checkins at python.org (stefan.krah) Date: Fri, 01 Jun 2012 10:59:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_word=2Edigits_are_always_in?= =?utf8?q?itialized_before_use_in_the_Taylor_series_loop=2C?= Message-ID: http://hg.python.org/cpython/rev/a118294b68db changeset: 77280:a118294b68db user: Stefan Krah date: Fri Jun 01 10:58:16 2012 +0200 summary: word.digits are always initialized before use in the Taylor series loop, but this is more readable. files: Modules/_decimal/libmpdec/mpdecimal.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -3989,7 +3989,7 @@ mpd_context_t workctx; MPD_NEW_STATIC(tmp,0,0,0,0); MPD_NEW_STATIC(sum,0,0,0,0); - MPD_NEW_CONST(word,0,0,0,1,1,1); + MPD_NEW_CONST(word,0,0,1,1,1,1); mpd_ssize_t j, n, t; assert(!mpd_isspecial(a)); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 11:54:12 2012 From: python-checkins at python.org (hynek.schlawack) Date: Fri, 01 Jun 2012 11:54:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2314814=3A_Remove_2=2Ex=27?= =?utf8?q?s_new-style_classes_syntax_from_ipaddress?= Message-ID: http://hg.python.org/cpython/rev/0eb63de72e96 changeset: 77281:0eb63de72e96 user: Hynek Schlawack date: Fri Jun 01 11:48:32 2012 +0200 summary: #14814: Remove 2.x's new-style classes syntax from ipaddress files: Lib/ipaddress.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -400,7 +400,7 @@ return NotImplemented -class _IPAddressBase(object): +class _IPAddressBase: """The mother class.""" @@ -975,7 +975,7 @@ return t.__class__('%s/%d' % (str(t.network_address), t.prefixlen)) -class _BaseV4(object): +class _BaseV4: """Base IPv4 object. @@ -1511,7 +1511,7 @@ return '%s/%s' % (str(self.network_address), str(self.hostmask)) -class _BaseV6(object): +class _BaseV6: """Base IPv6 object. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 14:48:48 2012 From: python-checkins at python.org (nick.coghlan) Date: Fri, 01 Jun 2012 14:48:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Close_=2314969=3A_Improve_t?= =?utf8?q?he_handling_of_exception_chaining_in_contextlib=2EExitStack?= Message-ID: http://hg.python.org/cpython/rev/c108bc96aec6 changeset: 77282:c108bc96aec6 user: Nick Coghlan date: Fri Jun 01 22:48:32 2012 +1000 summary: Close #14969: Improve the handling of exception chaining in contextlib.ExitStack files: Lib/contextlib.py | 16 +++++++++++-- Lib/test/test_contextlib.py | 29 +++++++++++++++--------- Misc/NEWS | 2 + 3 files changed, 33 insertions(+), 14 deletions(-) diff --git a/Lib/contextlib.py b/Lib/contextlib.py --- a/Lib/contextlib.py +++ b/Lib/contextlib.py @@ -225,6 +225,17 @@ return self def __exit__(self, *exc_details): + # We manipulate the exception state so it behaves as though + # we were actually nesting multiple with statements + frame_exc = sys.exc_info()[1] + def _fix_exception_context(new_exc, old_exc): + while 1: + exc_context = new_exc.__context__ + if exc_context in (None, frame_exc): + break + new_exc = exc_context + new_exc.__context__ = old_exc + # Callbacks are invoked in LIFO order to match the behaviour of # nested context managers suppressed_exc = False @@ -236,9 +247,8 @@ exc_details = (None, None, None) except: new_exc_details = sys.exc_info() - if exc_details != (None, None, None): - # simulate the stack of exceptions by setting the context - new_exc_details[1].__context__ = exc_details[1] + # simulate the stack of exceptions by setting the context + _fix_exception_context(new_exc_details[1], exc_details[1]) if not self._exit_callbacks: raise exc_details = new_exc_details diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py --- a/Lib/test/test_contextlib.py +++ b/Lib/test/test_contextlib.py @@ -505,6 +505,18 @@ def __exit__(self, *exc_details): raise self.exc + class RaiseExcWithContext: + def __init__(self, outer, inner): + self.outer = outer + self.inner = inner + def __enter__(self): + return self + def __exit__(self, *exc_details): + try: + raise self.inner + except: + raise self.outer + class SuppressExc: def __enter__(self): return self @@ -514,11 +526,10 @@ try: with RaiseExc(IndexError): - with RaiseExc(KeyError): - with RaiseExc(AttributeError): - with SuppressExc(): - with RaiseExc(ValueError): - 1 / 0 + with RaiseExcWithContext(KeyError, AttributeError): + with SuppressExc(): + with RaiseExc(ValueError): + 1 / 0 except IndexError as exc: self.assertIsInstance(exc.__context__, KeyError) self.assertIsInstance(exc.__context__.__context__, AttributeError) @@ -553,12 +564,8 @@ except IndexError as exc: self.assertIsInstance(exc.__context__, KeyError) self.assertIsInstance(exc.__context__.__context__, AttributeError) - # Inner exceptions were suppressed, but the with statement - # cleanup code adds the one from the body back in as the - # context of the exception raised by the outer callbacks - # See http://bugs.python.org/issue14969 - suite_exc = exc.__context__.__context__.__context__ - self.assertIsInstance(suite_exc, ZeroDivisionError) + # Inner exceptions were suppressed + self.assertIsNone(exc.__context__.__context__.__context__) else: self.fail("Expected IndexError, but no exception was raised") # Check the inner exceptions diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Library ------- +- Issue #14969: Better handling of exception chaining in contextlib.ExitStack + - Issue #14962: Update text coloring in IDLE shell window after changing options. Patch by Roger Serwy. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 15:09:18 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 01 Jun 2012 15:09:18 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Convert_to_rst=2E_Add_section_?= =?utf8?q?on_Windows_lifecycle=2E?= Message-ID: http://hg.python.org/peps/rev/727662ab7f50 changeset: 4444:727662ab7f50 user: Martin v. L?wis date: Fri Jun 01 15:09:13 2012 +0200 summary: Convert to rst. Add section on Windows lifecycle. files: pep-0011.txt | 288 +++++++++++++++++++++----------------- 1 files changed, 162 insertions(+), 126 deletions(-) diff --git a/pep-0011.txt b/pep-0011.txt --- a/pep-0011.txt +++ b/pep-0011.txt @@ -5,189 +5,225 @@ Author: martin at v.loewis.de (Martin von L?wis) Status: Active Type: Process +Content-Type: text/x-rst Created: 07-Jul-2002 Post-History: 18-Aug-2007 Abstract +-------- - This PEP documents operating systems (platforms) which are not - supported in Python anymore. For some of these systems, - supporting code might be still part of Python, but will be removed - in a future release - unless somebody steps forward as a volunteer - to maintain this code. +This PEP documents operating systems (platforms) which are not +supported in Python anymore. For some of these systems, +supporting code might be still part of Python, but will be removed +in a future release - unless somebody steps forward as a volunteer +to maintain this code. Rationale +--------- - Over time, the Python source code has collected various pieces of - platform-specific code, which, at some point in time, was - considered necessary to use Python on a specific platform. - Without access to this platform, it is not possible to determine - whether this code is still needed. As a result, this code may - either break during the Python evolution, or it may become - unnecessary as the platforms evolve as well. +Over time, the Python source code has collected various pieces of +platform-specific code, which, at some point in time, was +considered necessary to use Python on a specific platform. +Without access to this platform, it is not possible to determine +whether this code is still needed. As a result, this code may +either break during the Python evolution, or it may become +unnecessary as the platforms evolve as well. - The growing amount of these fragments poses the risk of - unmaintainability: without having experts for a large number of - platforms, it is not possible to determine whether a certain - change to the Python source code will work on all supported - platforms. +The growing amount of these fragments poses the risk of +unmaintainability: without having experts for a large number of +platforms, it is not possible to determine whether a certain +change to the Python source code will work on all supported +platforms. - To reduce this risk, this PEP proposes a procedure to remove code - for platforms with no Python users. +To reduce this risk, this PEP proposes a procedure to remove code +for platforms with no Python users. Unsupporting platforms +---------------------- - If a certain platform that currently has special code in it is - deemed to be without Python users, a note must be posted in this - PEP that this platform is no longer actively supported. This - note must include: +If a certain platform that currently has special code in it is +deemed to be without Python users, a note must be posted in this +PEP that this platform is no longer actively supported. This +note must include: - - the name of the system - - the first release number that does not support this platform - anymore, and - - the first release where the historical support code is actively - removed +- the name of the system +- the first release number that does not support this platform + anymore, and +- the first release where the historical support code is actively + removed - In some cases, it is not possible to identify the specific list of - systems for which some code is used (e.g. when autoconf tests for - absence of some feature which is considered present on all - supported systems). In this case, the name will give the precise - condition (usually a preprocessor symbol) that will become - unsupported. +In some cases, it is not possible to identify the specific list of +systems for which some code is used (e.g. when autoconf tests for +absence of some feature which is considered present on all +supported systems). In this case, the name will give the precise +condition (usually a preprocessor symbol) that will become +unsupported. - At the same time, the Python source code must be changed to - produce a build-time error if somebody tries to install Python on - this platform. On platforms using autoconf, configure must fail. - This gives potential users of the platform a chance to step - forward and offer maintenance. +At the same time, the Python source code must be changed to +produce a build-time error if somebody tries to install Python on +this platform. On platforms using autoconf, configure must fail. +This gives potential users of the platform a chance to step +forward and offer maintenance. Resupporting platforms +---------------------- - If a user of a platform wants to see this platform supported - again, he may volunteer to maintain the platform support. Such an - offer must be recorded in the PEP, and the user can submit patches - to remove the build-time errors, and perform any other maintenance - work for the platform. +If a user of a platform wants to see this platform supported +again, he may volunteer to maintain the platform support. Such an +offer must be recorded in the PEP, and the user can submit patches +to remove the build-time errors, and perform any other maintenance +work for the platform. + +Microsoft Windows +----------------- + +Microsoft has established a policy called product support lifecycle +[1]_. Each product's lifecycle has a mainstream support phase, where +the product is generally commercially available, and an extended +support phase, where paid support is still available, and certain bug +fixes are released (in particular security fixes). + +Python's Windows support now follows this lifecycle. A new feature +release X.Y.0 will support all Windows releases whose extended support +phase is not yet expired. Subsequent bug fix releases will support +the same Windows releases as the original feature release (even if +the extended support phase has ended). + +Because of this policy, no further Windows releases need to be listed +in this PEP. + +Each feature release is built by a specific version of Microsoft +Visual Studio. That version should have mainstream support when the +release is made. Developers of extension modules will generally need +to use the same Visual Studio release; they are concerned both with +the availability of the versions they need to use, and with keeping +the zoo of versions small. The Python source tree will keep +unmaintained build files for older Visual Studio releases, for which +patches will be accepted. Such build files will be removed from the +source tree 3 years after the extended support for the compiler has +ended (but continue to remain available in revision control). No-longer-supported platforms +----------------------------- - Name: MS-DOS, MS-Windows 3.x - Unsupported in: Python 2.0 - Code removed in: Python 2.1 +* | Name: MS-DOS, MS-Windows 3.x + | Unsupported in: Python 2.0 + | Code removed in: Python 2.1 - Name: SunOS 4 - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: SunOS 4 + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: DYNIX - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: DYNIX + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: dgux - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: dgux + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: Minix - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: Minix + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: Irix 4 and --with-sgi-dl - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: Irix 4 and --with-sgi-dl + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: Linux 1 - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: Linux 1 + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: Systems defining __d6_pthread_create (configure.in) - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: Systems defining __d6_pthread_create (configure.in) + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: Systems defining PY_PTHREAD_D4, PY_PTHREAD_D6, +* | Name: Systems defining PY_PTHREAD_D4, PY_PTHREAD_D6, or PY_PTHREAD_D7 in thread_pthread.h - Unsupported in: Python 2.3 - Code removed in: Python 2.4 + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: Systems using --with-dl-dld - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: Systems using --with-dl-dld + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: Systems using --without-universal-newlines, - Unsupported in: Python 2.3 - Code removed in: Python 2.4 +* | Name: Systems using --without-universal-newlines, + | Unsupported in: Python 2.3 + | Code removed in: Python 2.4 - Name: MacOS 9 - Unsupported in: Python 2.4 - Code removed in: Python 2.4 +* | Name: MacOS 9 + | Unsupported in: Python 2.4 + | Code removed in: Python 2.4 - Name: Systems using --with-wctype-functions - Unsupported in: Python 2.6 - Code removed in: Python 2.6 +* | Name: Systems using --with-wctype-functions + | Unsupported in: Python 2.6 + | Code removed in: Python 2.6 - Name: Win9x, WinME, NT4 - Unsupported in: Python 2.6 (warning in 2.5 installer) - Code removed in: Python 2.6 +* | Name: Win9x, WinME, NT4 + | Unsupported in: Python 2.6 (warning in 2.5 installer) + | Code removed in: Python 2.6 - Name: AtheOS - Unsupported in: Python 2.6 (with "AtheOS" changed to "Syllable") - Build broken in: Python 2.7 (edit configure to reenable) - Code removed in: Python 3.0 - Details: http://www.syllable.org/discussion.php?id=2320 +* | Name: AtheOS + | Unsupported in: Python 2.6 (with "AtheOS" changed to "Syllable") + | Build broken in: Python 2.7 (edit configure to reenable) + | Code removed in: Python 3.0 + | Details: http://www.syllable.org/discussion.php?id=2320 - Name: BeOS - Unsupported in: Python 2.6 (warning in configure) - Build broken in: Python 2.7 (edit configure to reenable) - Code removed in: Python 3.0 +* | Name: BeOS + | Unsupported in: Python 2.6 (warning in configure) + | Build broken in: Python 2.7 (edit configure to reenable) + | Code removed in: Python 3.0 - Name: Systems using Mach C Threads - Unsupported in: Python 3.2 - Code removed in: Python 3.3 +* | Name: Systems using Mach C Threads + | Unsupported in: Python 3.2 + | Code removed in: Python 3.3 - Name: SunOS lightweight processes (LWP) - Unsupported in: Python 3.2 - Code removed in: Python 3.3 +* | Name: SunOS lightweight processes (LWP) + | Unsupported in: Python 3.2 + | Code removed in: Python 3.3 - Name: Systems using --with-pth (GNU pth threads) - Unsupported in: Python 3.2 - Code removed in: Python 3.3 +* | Name: Systems using --with-pth (GNU pth threads) + | Unsupported in: Python 3.2 + | Code removed in: Python 3.3 - Name: Systems using Irix threads - Unsupported in: Python 3.2 - Code removed in: Python 3.3 +* | Name: Systems using Irix threads + | Unsupported in: Python 3.2 + | Code removed in: Python 3.3 - Name: OSF* systems (issue 8606) - Unsupported in: Python 3.2 - Code removed in: Python 3.3 +* | Name: OSF* systems (issue 8606) + | Unsupported in: Python 3.2 + | Code removed in: Python 3.3 - Name: OS/2 - Unsupported in: Python 3.3 - Code removed in: Python 3.4 +* | Name: OS/2 + | Unsupported in: Python 3.3 + | Code removed in: Python 3.4 - Name: VMS - Unsupported in: Python 3.3 - Code removed in: Python 3.4 +* | Name: VMS + | Unsupported in: Python 3.3 + | Code removed in: Python 3.4 - Name: Windows 2000 - Unsupported in: Python 3.3 - Code removed in: Python 3.4 +* | Name: Windows 2000 + | Unsupported in: Python 3.3 + | Code removed in: Python 3.4 - Name: Windows systems where COMSPEC points to command.com - Unsupported in: Python 3.3 - Code removed in: Python 3.4 +* | Name: Windows systems where COMSPEC points to command.com + | Unsupported in: Python 3.3 + | Code removed in: Python 3.4 -Platform Maintainers +References +---------- - Cygwin Jason Tishler (jason at tishler.net) - More TBD. +.. [1] http://support.microsoft.com/lifecycle/ Copyright +--------- - This document has been placed in the public domain. +This document has been placed in the public domain. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Jun 1 19:57:45 2012 From: python-checkins at python.org (hynek.schlawack) Date: Fri, 01 Jun 2012 19:57:45 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Add_interests?= Message-ID: http://hg.python.org/devguide/rev/365da1a16d73 changeset: 521:365da1a16d73 user: Hynek Schlawack date: Fri Jun 01 19:57:29 2012 +0200 summary: Add interests files: experts.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/experts.rst b/experts.rst --- a/experts.rst +++ b/experts.rst @@ -280,7 +280,7 @@ FreeBSD HP-UX Linux -Mac OS X ronaldoussoren, ned.deily +Mac OS X ronaldoussoren, ned.deily, hynek NetBSD1 OS2/EMX aimacintyre Solaris/OpenIndiana jcea @@ -308,7 +308,7 @@ GUI i18n lemburg, eric.araujo import machinery brett.cannon, ncoghlan -io pitrou, benjamin.peterson, stutzbach +io pitrou, benjamin.peterson, stutzbach, hynek locale lemburg, loewis mathematics mark.dickinson, eric.smith, lemburg, stutzbach memory management tim_one, lemburg -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Fri Jun 1 20:18:29 2012 From: python-checkins at python.org (benjamin.peterson) Date: Fri, 01 Jun 2012 20:18:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_check_return_for_error?= Message-ID: http://hg.python.org/cpython/rev/2285a82504dc changeset: 77283:2285a82504dc user: Benjamin Peterson date: Fri Jun 01 11:18:22 2012 -0700 summary: check return for error files: Python/ceval.c | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Python/ceval.c b/Python/ceval.c --- a/Python/ceval.c +++ b/Python/ceval.c @@ -3107,6 +3107,8 @@ tail = PyUnicode_FromFormat(", %U, and %U", PyList_GET_ITEM(names, len - 2), PyList_GET_ITEM(names, len - 1)); + if (tail == NULL) + return; /* Chop off the last two objects in the list. This shouldn't actually fail, but we can't be too careful. */ err = PyList_SetSlice(names, len - 2, len, NULL); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 20:21:12 2012 From: python-checkins at python.org (hynek.schlawack) Date: Fri, 01 Jun 2012 20:21:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2314814=3A_Fix_errror_mess?= =?utf8?q?age_creation_in_ipaddress=2Ecollapse=5Faddresses?= Message-ID: http://hg.python.org/cpython/rev/000cc4e0e1cd changeset: 77284:000cc4e0e1cd user: Hynek Schlawack date: Fri Jun 01 20:12:17 2012 +0200 summary: #14814: Fix errror message creation in ipaddress.collapse_addresses files: Lib/ipaddress.py | 2 +- Lib/test/test_ipaddress.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -359,7 +359,7 @@ else: if nets and nets[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) + str(ip), str(nets[-1]))) nets.append(ip) # sort and dedup diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -596,10 +596,17 @@ self.assertEqual(list(collapsed), [ip3]) # the toejam test - ip1 = ipaddress.ip_address('1.1.1.1') - ip2 = ipaddress.ip_address('::1') - self.assertRaises(TypeError, ipaddress.collapse_addresses, - [ip1, ip2]) + addr_tuples = [ + (ipaddress.ip_address('1.1.1.1'), + ipaddress.ip_address('::1')), + (ipaddress.IPv4Network('1.1.0.0/24'), + ipaddress.IPv6Network('2001::/120')), + (ipaddress.IPv4Network('1.1.0.0/32'), + ipaddress.IPv6Network('2001::/128')), + ] + for ip1, ip2 in addr_tuples: + self.assertRaises(TypeError, ipaddress.collapse_addresses, + [ip1, ip2]) def testSummarizing(self): #ip = ipaddress.ip_address -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 20:26:30 2012 From: python-checkins at python.org (sandro.tosi) Date: Fri, 01 Jun 2012 20:26:30 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE0OTY4?= =?utf8?q?=3A_set_=27Inplace_Operators=27_as_subsection=3B_patch_by_Lars_B?= =?utf8?q?uitinck?= Message-ID: http://hg.python.org/cpython/rev/bf6305bce3af changeset: 77285:bf6305bce3af branch: 3.2 parent: 77266:9d0c3a835bfe user: Sandro Tosi date: Fri Jun 01 20:23:20 2012 +0200 summary: Issue #14968: set 'Inplace Operators' as subsection; patch by Lars Buitinck files: Doc/library/operator.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -404,7 +404,7 @@ +-----------------------+-------------------------+---------------------------------------+ Inplace Operators -================= +----------------- Many operations have an "in-place" version. Listed below are functions providing a more primitive access to in-place operators than the usual syntax -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 20:26:31 2012 From: python-checkins at python.org (sandro.tosi) Date: Fri, 01 Jun 2012 20:26:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2314968=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/7c9702b08bfb changeset: 77286:7c9702b08bfb parent: 77283:2285a82504dc parent: 77285:bf6305bce3af user: Sandro Tosi date: Fri Jun 01 20:23:46 2012 +0200 summary: Issue #14968: merge with 3.2 files: Doc/library/operator.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -404,7 +404,7 @@ +-----------------------+-------------------------+---------------------------------------+ Inplace Operators -================= +----------------- Many operations have an "in-place" version. Listed below are functions providing a more primitive access to in-place operators than the usual syntax -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 20:26:32 2012 From: python-checkins at python.org (sandro.tosi) Date: Fri, 01 Jun 2012 20:26:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/d7c4089e9637 changeset: 77287:d7c4089e9637 parent: 77286:7c9702b08bfb parent: 77284:000cc4e0e1cd user: Sandro Tosi date: Fri Jun 01 20:25:36 2012 +0200 summary: merge heads files: Lib/ipaddress.py | 2 +- Lib/test/test_ipaddress.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -359,7 +359,7 @@ else: if nets and nets[-1]._version != ip._version: raise TypeError("%s and %s are not of the same version" % ( - str(ip), str(ips[-1]))) + str(ip), str(nets[-1]))) nets.append(ip) # sort and dedup diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -596,10 +596,17 @@ self.assertEqual(list(collapsed), [ip3]) # the toejam test - ip1 = ipaddress.ip_address('1.1.1.1') - ip2 = ipaddress.ip_address('::1') - self.assertRaises(TypeError, ipaddress.collapse_addresses, - [ip1, ip2]) + addr_tuples = [ + (ipaddress.ip_address('1.1.1.1'), + ipaddress.ip_address('::1')), + (ipaddress.IPv4Network('1.1.0.0/24'), + ipaddress.IPv6Network('2001::/120')), + (ipaddress.IPv4Network('1.1.0.0/32'), + ipaddress.IPv6Network('2001::/128')), + ] + for ip1, ip2 in addr_tuples: + self.assertRaises(TypeError, ipaddress.collapse_addresses, + [ip1, ip2]) def testSummarizing(self): #ip = ipaddress.ip_address -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 22:21:22 2012 From: python-checkins at python.org (r.david.murray) Date: Fri, 01 Jun 2012 22:21:22 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzE0OTU3OiBjbGFy?= =?utf8?q?ify_splitlines_docs=2E?= Message-ID: http://hg.python.org/cpython/rev/24572015e24f changeset: 77288:24572015e24f branch: 3.2 parent: 77285:bf6305bce3af user: R David Murray date: Fri Jun 01 16:19:36 2012 -0400 summary: #14957: clarify splitlines docs. Initial patch by Michael Driscoll, I added the example. files: Doc/library/stdtypes.rst | 8 +++++++- 1 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1329,7 +1329,13 @@ Return a list of the lines in the string, breaking at line boundaries. Line breaks are not included in the resulting list unless *keepends* is given and - true. + true. This method uses the universal newlines approach to splitting lines. + Unlike :meth:`~str.split`, if the string ends with line boundary characters + the returned list does ``not`` have an empty last element. + + For example, ``'ab c\n\nde fg\rkl\r\n'.splitlines()`` returns + ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splinelines(True)`` + returns ``['ab c\n', '\n, 'de fg\r', 'kl\r\n']``. .. method:: str.startswith(prefix[, start[, end]]) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 22:21:23 2012 From: python-checkins at python.org (r.david.murray) Date: Fri, 01 Jun 2012 22:21:23 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2314957=3A_clarify_splitlines_docs=2E?= Message-ID: http://hg.python.org/cpython/rev/2a43088318ed changeset: 77289:2a43088318ed parent: 77287:d7c4089e9637 parent: 77288:24572015e24f user: R David Murray date: Fri Jun 01 16:20:26 2012 -0400 summary: #14957: clarify splitlines docs. Initial patch by Michael Driscoll, I added the example. files: Doc/library/stdtypes.rst | 8 +++++++- 1 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1353,7 +1353,13 @@ Return a list of the lines in the string, breaking at line boundaries. Line breaks are not included in the resulting list unless *keepends* is given and - true. + true. This method uses the universal newlines approach to splitting lines. + Unlike :meth:`~str.split`, if the string ends with line boundary characters + the returned list does ``not`` have an empty last element. + + For example, ``'ab c\n\nde fg\rkl\r\n'.splitlines()`` returns + ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splinelines(True)`` + returns ``['ab c\n', '\n, 'de fg\r', 'kl\r\n']``. .. method:: str.startswith(prefix[, start[, end]]) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 22:21:24 2012 From: python-checkins at python.org (r.david.murray) Date: Fri, 01 Jun 2012 22:21:24 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzE0OTU3OiBjbGFy?= =?utf8?q?ify_splitlines_docs=2E?= Message-ID: http://hg.python.org/cpython/rev/0df7594e4ebd changeset: 77290:0df7594e4ebd branch: 2.7 parent: 77265:1a4e99460438 user: R David Murray date: Fri Jun 01 16:21:06 2012 -0400 summary: #14957: clarify splitlines docs. Initial patch by Michael Driscoll, I added the example. files: Doc/library/stdtypes.rst | 8 +++++++- 1 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1185,7 +1185,13 @@ Return a list of the lines in the string, breaking at line boundaries. Line breaks are not included in the resulting list unless *keepends* is given and - true. + true. This method uses the universal newlines approach to splitting lines. + Unlike :meth:`~str.split`, if the string ends with line boundary characters + the returned list does ``not`` have an empty last element. + + For example, ``'ab c\n\nde fg\rkl\r\n'.splitlines()`` returns + ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splinelines(True)`` + returns ``['ab c\n', '\n, 'de fg\r', 'kl\r\n']``. .. method:: str.startswith(prefix[, start[, end]]) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 1 22:47:26 2012 From: python-checkins at python.org (victor.stinner) Date: Fri, 01 Jun 2012 22:47:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_sporadic_failure_of_tes?= =?utf8?q?t=5Ftime=2Etest=5Fprocess=5Ftime=28=29_on_Windows?= Message-ID: http://hg.python.org/cpython/rev/bdc7ad1f05ef changeset: 77291:bdc7ad1f05ef parent: 77289:2a43088318ed user: Victor Stinner date: Fri Jun 01 22:45:23 2012 +0200 summary: Fix sporadic failure of test_time.test_process_time() on Windows Use a threshold of 20 ms instead of 10 ms. files: Lib/test/test_time.py | 7 +++++-- 1 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -380,10 +380,13 @@ time.perf_counter() def test_process_time(self): + # process_time() should not include time spend during a sleep start = time.process_time() - time.sleep(0.1) + time.sleep(0.100) stop = time.process_time() - self.assertLess(stop - start, 0.01) + # use 20 ms because process_time() has usually a resolution of 15 ms + # on Windows + self.assertLess(stop - start, 0.020) info = time.get_clock_info('process_time') self.assertTrue(info.monotonic) -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sat Jun 2 05:53:13 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 02 Jun 2012 05:53:13 +0200 Subject: [Python-checkins] Daily reference leaks (bdc7ad1f05ef): sum=465 Message-ID: results for bdc7ad1f05ef on branch "default" -------------------------------------------- test_smtplib leaked [154, 154, 154] references, sum=462 test_super leaked [1, 1, 1] references, sum=3 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogHoFJ_w', '-x'] From python-checkins at python.org Sat Jun 2 07:43:16 2012 From: python-checkins at python.org (raymond.hettinger) Date: Sat, 02 Jun 2012 07:43:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Improve_tooltip?= =?utf8?q?s_for_splitlines=28=29_by_showing_that_the_default_for_keepends_?= =?utf8?q?is?= Message-ID: http://hg.python.org/cpython/rev/2fbee0b741f7 changeset: 77292:2fbee0b741f7 branch: 2.7 parent: 77290:0df7594e4ebd user: Raymond Hettinger date: Sat Jun 02 01:42:58 2012 -0400 summary: Improve tooltips for splitlines() by showing that the default for keepends is False. files: Objects/bytearrayobject.c | 2 +- Objects/stringobject.c | 2 +- Objects/unicodeobject.c | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -2649,7 +2649,7 @@ } PyDoc_STRVAR(splitlines__doc__, -"B.splitlines([keepends]) -> list of lines\n\ +"B.splitlines(keepends=False) -> list of lines\n\ \n\ Return a list of the lines in B, breaking at line boundaries.\n\ Line breaks are not included in the resulting list unless keepends\n\ diff --git a/Objects/stringobject.c b/Objects/stringobject.c --- a/Objects/stringobject.c +++ b/Objects/stringobject.c @@ -3545,7 +3545,7 @@ PyDoc_STRVAR(splitlines__doc__, -"S.splitlines([keepends]) -> list of strings\n\ +"S.splitlines(keepends=False) -> list of strings\n\ \n\ Return a list of the lines in S, breaking at line boundaries.\n\ Line breaks are not included in the resulting list unless keepends\n\ diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -7521,7 +7521,7 @@ } PyDoc_STRVAR(splitlines__doc__, - "S.splitlines([keepends]) -> list of strings\n\ + "S.splitlines(keepends=False) -> list of strings\n\ \n\ Return a list of the lines in S, breaking at line boundaries.\n\ Line breaks are not included in the resulting list unless keepends\n\ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 08:58:08 2012 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 02 Jun 2012 08:58:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_don=27t_leak_if?= =?utf8?q?_the_=5F=5Fclass=5F=5F_closure_is_set?= Message-ID: http://hg.python.org/cpython/rev/ba01cf9a8578 changeset: 77293:ba01cf9a8578 branch: 3.2 parent: 77288:24572015e24f user: Benjamin Peterson date: Fri Jun 01 23:57:36 2012 -0700 summary: don't leak if the __class__ closure is set files: Python/bltinmodule.c | 6 ++---- 1 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -158,10 +158,8 @@ cls = PyEval_CallObjectWithKeywords(meta, margs, mkw); Py_DECREF(margs); } - if (cls != NULL && PyCell_Check(cell)) { - Py_INCREF(cls); - PyCell_SET(cell, cls); - } + if (cls != NULL && PyCell_Check(cell)) + PyCell_Set(cell, cls); Py_DECREF(cell); } Py_DECREF(ns); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 08:58:09 2012 From: python-checkins at python.org (benjamin.peterson) Date: Sat, 02 Jun 2012 08:58:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/56650edb95a2 changeset: 77294:56650edb95a2 parent: 77291:bdc7ad1f05ef parent: 77293:ba01cf9a8578 user: Benjamin Peterson date: Fri Jun 01 23:57:50 2012 -0700 summary: merge 3.2 files: Python/bltinmodule.c | 6 ++---- 1 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -163,10 +163,8 @@ cls = PyEval_CallObjectWithKeywords(meta, margs, mkw); Py_DECREF(margs); } - if (cls != NULL && PyCell_Check(cell)) { - Py_INCREF(cls); - PyCell_SET(cell, cls); - } + if (cls != NULL && PyCell_Check(cell)) + PyCell_Set(cell, cls); Py_DECREF(cell); } Py_DECREF(ns); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 17:17:47 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 17:17:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314814=3A_minor_spe?= =?utf8?q?lling_fixes?= Message-ID: http://hg.python.org/cpython/rev/facdca62aa68 changeset: 77295:facdca62aa68 user: Sandro Tosi date: Sat Jun 02 17:14:22 2012 +0200 summary: Issue #14814: minor spelling fixes files: Lib/ipaddress.py | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -143,7 +143,7 @@ """Represent an address as 16 packed bytes in network (big-endian) order. Args: - address: An integer representation of an IPv4 IP address. + address: An integer representation of an IPv6 IP address. Returns: The integer address packed as 16 bytes in network (big-endian) order. @@ -1181,7 +1181,7 @@ IPv4Address('192.0.2.1') Raises: - AddressValueError: If ipaddressisn't a valid IPv4 address. + AddressValueError: If ipaddress isn't a valid IPv4 address. """ _BaseAddress.__init__(self, address) @@ -1366,10 +1366,10 @@ IPv4Interface('192.0.2.1') Raises: - AddressValueError: If ipaddressisn't a valid IPv4 address. + AddressValueError: If ipaddress isn't a valid IPv4 address. NetmaskValueError: If the netmask isn't valid for an IPv4 address. - ValueError: If strict was True and a network address was not + ValueError: If strict is True and a network address is not supplied. """ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 17:17:48 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 17:17:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314814=3A_use_print?= =?utf8?q?=28=29_function?= Message-ID: http://hg.python.org/cpython/rev/4b4044292d09 changeset: 77296:4b4044292d09 user: Sandro Tosi date: Sat Jun 02 17:16:33 2012 +0200 summary: Issue #14814: use print() function files: Doc/howto/ipaddress.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/howto/ipaddress.rst b/Doc/howto/ipaddress.rst --- a/Doc/howto/ipaddress.rst +++ b/Doc/howto/ipaddress.rst @@ -288,4 +288,4 @@ try: ipaddress.IPv4Address(address) except ValueError: - print 'address/netmask is invalid: %s' % address + print('address/netmask is invalid:', address) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 17:21:48 2012 From: python-checkins at python.org (r.david.murray) Date: Sat, 02 Jun 2012 17:21:48 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzE0OTU3OiBmaXgg?= =?utf8?q?doc_typo=2E?= Message-ID: http://hg.python.org/cpython/rev/4d9b3a58e208 changeset: 77297:4d9b3a58e208 branch: 3.2 parent: 77293:ba01cf9a8578 user: R David Murray date: Sat Jun 02 11:20:29 2012 -0400 summary: #14957: fix doc typo. files: Doc/library/stdtypes.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1334,7 +1334,7 @@ the returned list does ``not`` have an empty last element. For example, ``'ab c\n\nde fg\rkl\r\n'.splitlines()`` returns - ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splinelines(True)`` + ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splitlines(True)`` returns ``['ab c\n', '\n, 'de fg\r', 'kl\r\n']``. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 17:21:48 2012 From: python-checkins at python.org (r.david.murray) Date: Sat, 02 Jun 2012 17:21:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2314957=3A_fix_doc_typo=2E?= Message-ID: http://hg.python.org/cpython/rev/3bb35ad5d9da changeset: 77298:3bb35ad5d9da parent: 77296:4b4044292d09 parent: 77297:4d9b3a58e208 user: R David Murray date: Sat Jun 02 11:20:53 2012 -0400 summary: #14957: fix doc typo. files: Doc/library/stdtypes.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1358,7 +1358,7 @@ the returned list does ``not`` have an empty last element. For example, ``'ab c\n\nde fg\rkl\r\n'.splitlines()`` returns - ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splinelines(True)`` + ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splitlines(True)`` returns ``['ab c\n', '\n, 'de fg\r', 'kl\r\n']``. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 17:21:49 2012 From: python-checkins at python.org (r.david.murray) Date: Sat, 02 Jun 2012 17:21:49 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzE0OTU3OiBmaXgg?= =?utf8?q?doc_typo=2E?= Message-ID: http://hg.python.org/cpython/rev/48564362b687 changeset: 77299:48564362b687 branch: 2.7 parent: 77292:2fbee0b741f7 user: R David Murray date: Sat Jun 02 11:21:31 2012 -0400 summary: #14957: fix doc typo. files: Doc/library/stdtypes.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1190,7 +1190,7 @@ the returned list does ``not`` have an empty last element. For example, ``'ab c\n\nde fg\rkl\r\n'.splitlines()`` returns - ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splinelines(True)`` + ``['ab c', '', 'de fg', 'kl']``, while the same call with ``splitlines(True)`` returns ``['ab c\n', '\n, 'de fg\r', 'kl\r\n']``. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 18:23:28 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 18:23:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_refer_to_time?= =?utf8?q?=2Estrftime?= Message-ID: http://hg.python.org/cpython/rev/01ab23aadca3 changeset: 77300:01ab23aadca3 branch: 2.7 user: Sandro Tosi date: Sat Jun 02 18:21:06 2012 +0200 summary: refer to time.strftime files: Doc/library/locale.rst | 16 ++++++++-------- 1 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -164,22 +164,22 @@ .. data:: D_T_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent date and time in a locale-specific way. .. data:: D_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent a date in a locale-specific way. .. data:: T_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent a time in a locale-specific way. .. data:: T_FMT_AMPM - Get a format string for :func:`strftime` to represent time in the am/pm + Get a format string for :func:`time.strftime` to represent time in the am/pm format. .. data:: DAY_1 ... DAY_7 @@ -243,24 +243,24 @@ then-emperor's reign. Normally it should not be necessary to use this value directly. Specifying - the ``E`` modifier in their format strings causes the :func:`strftime` + the ``E`` modifier in their format strings causes the :func:`time.strftime` function to use this information. The format of the returned string is not specified, and therefore you should not assume knowledge of it on different systems. .. data:: ERA_D_T_FMT - Get a format string for :func:`strftime` to represent date and time in a + Get a format string for :func:`time.strftime` to represent date and time in a locale-specific era-based way. .. data:: ERA_D_FMT - Get a format string for :func:`strftime` to represent a date in a + Get a format string for :func:`time.strftime` to represent a date in a locale-specific era-based way. .. data:: ERA_T_FMT - Get a format string for :func:`strftime` to represent a time in a + Get a format string for :func:`time.strftime` to represent a time in a locale-specific era-based way. .. data:: ALT_DIGITS -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 18:23:29 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 18:23:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_refer_to_time?= =?utf8?q?=2Estrftime?= Message-ID: http://hg.python.org/cpython/rev/c41779327bac changeset: 77301:c41779327bac branch: 3.2 parent: 77297:4d9b3a58e208 user: Sandro Tosi date: Sat Jun 02 18:22:02 2012 +0200 summary: refer to time.strftime files: Doc/library/locale.rst | 16 ++++++++-------- 1 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -160,22 +160,22 @@ .. data:: D_T_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent date and time in a locale-specific way. .. data:: D_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent a date in a locale-specific way. .. data:: T_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent a time in a locale-specific way. .. data:: T_FMT_AMPM - Get a format string for :func:`strftime` to represent time in the am/pm + Get a format string for :func:`time.strftime` to represent time in the am/pm format. .. data:: DAY_1 ... DAY_7 @@ -239,24 +239,24 @@ then-emperor's reign. Normally it should not be necessary to use this value directly. Specifying - the ``E`` modifier in their format strings causes the :func:`strftime` + the ``E`` modifier in their format strings causes the :func:`time.strftime` function to use this information. The format of the returned string is not specified, and therefore you should not assume knowledge of it on different systems. .. data:: ERA_D_T_FMT - Get a format string for :func:`strftime` to represent date and time in a + Get a format string for :func:`time.strftime` to represent date and time in a locale-specific era-based way. .. data:: ERA_D_FMT - Get a format string for :func:`strftime` to represent a date in a + Get a format string for :func:`time.strftime` to represent a date in a locale-specific era-based way. .. data:: ERA_T_FMT - Get a format string for :func:`strftime` to represent a time in a + Get a format string for :func:`time.strftime` to represent a time in a locale-specific era-based way. .. data:: ALT_DIGITS -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 18:23:30 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 18:23:30 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/dac347701b4f changeset: 77302:dac347701b4f parent: 77298:3bb35ad5d9da parent: 77301:c41779327bac user: Sandro Tosi date: Sat Jun 02 18:22:31 2012 +0200 summary: merge with 3.2 files: Doc/library/locale.rst | 16 ++++++++-------- 1 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -160,22 +160,22 @@ .. data:: D_T_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent date and time in a locale-specific way. .. data:: D_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent a date in a locale-specific way. .. data:: T_FMT - Get a string that can be used as a format string for :func:`strftime` to + Get a string that can be used as a format string for :func:`time.strftime` to represent a time in a locale-specific way. .. data:: T_FMT_AMPM - Get a format string for :func:`strftime` to represent time in the am/pm + Get a format string for :func:`time.strftime` to represent time in the am/pm format. .. data:: DAY_1 ... DAY_7 @@ -239,24 +239,24 @@ then-emperor's reign. Normally it should not be necessary to use this value directly. Specifying - the ``E`` modifier in their format strings causes the :func:`strftime` + the ``E`` modifier in their format strings causes the :func:`time.strftime` function to use this information. The format of the returned string is not specified, and therefore you should not assume knowledge of it on different systems. .. data:: ERA_D_T_FMT - Get a format string for :func:`strftime` to represent date and time in a + Get a format string for :func:`time.strftime` to represent date and time in a locale-specific era-based way. .. data:: ERA_D_FMT - Get a format string for :func:`strftime` to represent a date in a + Get a format string for :func:`time.strftime` to represent a date in a locale-specific era-based way. .. data:: ERA_T_FMT - Get a format string for :func:`strftime` to represent a time in a + Get a format string for :func:`time.strftime` to represent a time in a locale-specific era-based way. .. data:: ALT_DIGITS -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 19:40:42 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 19:40:42 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE0OTI2?= =?utf8?q?=3A_fix_docstring_highlight?= Message-ID: http://hg.python.org/cpython/rev/e2739145657d changeset: 77303:e2739145657d branch: 3.2 parent: 77301:c41779327bac user: Sandro Tosi date: Sat Jun 02 19:40:02 2012 +0200 summary: Issue #14926: fix docstring highlight files: Lib/random.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/random.py b/Lib/random.py --- a/Lib/random.py +++ b/Lib/random.py @@ -96,7 +96,7 @@ None or no argument seeds from current time or from an operating system specific randomness source if available. - For version 2 (the default), all of the bits are used if *a *is a str, + For version 2 (the default), all of the bits are used if *a* is a str, bytes, or bytearray. For version 1, the hash() of *a* is used instead. If *a* is an int, all bits are used. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 19:40:43 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 19:40:43 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2314926=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/29148c027986 changeset: 77304:29148c027986 parent: 77302:dac347701b4f parent: 77303:e2739145657d user: Sandro Tosi date: Sat Jun 02 19:40:20 2012 +0200 summary: Issue #14926: merge with 3.2 files: Lib/random.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/random.py b/Lib/random.py --- a/Lib/random.py +++ b/Lib/random.py @@ -96,7 +96,7 @@ None or no argument seeds from current time or from an operating system specific randomness source if available. - For version 2 (the default), all of the bits are used if *a *is a str, + For version 2 (the default), all of the bits are used if *a* is a str, bytes, or bytearray. For version 1, the hash() of *a* is used instead. If *a* is an int, all bits are used. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 23:43:34 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 23:43:34 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_backport_c4bd68?= =?utf8?q?be5fc6_to_2=2E7?= Message-ID: http://hg.python.org/cpython/rev/50d6592791bd changeset: 77305:50d6592791bd branch: 2.7 parent: 77300:01ab23aadca3 user: Sandro Tosi date: Sat Jun 02 23:40:59 2012 +0200 summary: backport c4bd68be5fc6 to 2.7 files: Doc/glossary.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/glossary.rst b/Doc/glossary.rst --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -200,7 +200,7 @@ An object exposing a file-oriented API (with methods such as :meth:`read()` or :meth:`write()`) to an underlying resource. Depending on the way it was created, a file object can mediate access to a real - on-disk file or to another other type of storage or communication device + on-disk file or to another type of storage or communication device (for example standard input/output, in-memory buffers, sockets, pipes, etc.). File objects are also called :dfn:`file-like objects` or :dfn:`streams`. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 23:43:35 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 23:43:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_backport_c4bd68?= =?utf8?q?be5fc6_to_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/b17747289da1 changeset: 77306:b17747289da1 branch: 3.2 parent: 77303:e2739145657d user: Sandro Tosi date: Sat Jun 02 23:41:19 2012 +0200 summary: backport c4bd68be5fc6 to 3.2 files: Doc/glossary.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/glossary.rst b/Doc/glossary.rst --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -194,7 +194,7 @@ An object exposing a file-oriented API (with methods such as :meth:`read()` or :meth:`write()`) to an underlying resource. Depending on the way it was created, a file object can mediate access to a real - on-disk file or to another other type of storage or communication device + on-disk file or to another type of storage or communication device (for example standard input/output, in-memory buffers, sockets, pipes, etc.). File objects are also called :dfn:`file-like objects` or :dfn:`streams`. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 23:43:36 2012 From: python-checkins at python.org (sandro.tosi) Date: Sat, 02 Jun 2012 23:43:36 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_null_merge?= Message-ID: http://hg.python.org/cpython/rev/f9baf7ffff96 changeset: 77307:f9baf7ffff96 parent: 77304:29148c027986 parent: 77306:b17747289da1 user: Sandro Tosi date: Sat Jun 02 23:42:08 2012 +0200 summary: null merge files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 2 23:57:10 2012 From: python-checkins at python.org (r.david.murray) Date: Sat, 02 Jun 2012 23:57:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=231079=3A_Fix_parsing_of_e?= =?utf8?q?ncoded_words=2E?= Message-ID: http://hg.python.org/cpython/rev/8c03fe231877 changeset: 77308:8c03fe231877 user: R David Murray date: Sat Jun 02 17:56:49 2012 -0400 summary: #1079: Fix parsing of encoded words. This is a behavior change: before this leading and trailing spaces were stripped from ASCII parts, now they are preserved. Without this fix we didn't parse the examples in the RFC correctly, so I think breaking backward compatibility here is justified. Patch by Ralf Schlatterbeck. files: Lib/email/header.py | 45 ++++++- Lib/nntplib.py | 2 +- Lib/test/test_email/test_asian_codecs.py | 2 +- Lib/test/test_email/test_email.py | 81 +++++++++-- Misc/NEWS | 4 + 5 files changed, 114 insertions(+), 20 deletions(-) diff --git a/Lib/email/header.py b/Lib/email/header.py --- a/Lib/email/header.py +++ b/Lib/email/header.py @@ -40,7 +40,6 @@ \? # literal ? (?P.*?) # non-greedy up to the next ?= is the encoded string \?= # literal ?= - (?=[ \t]|$) # whitespace or the end of the string ''', re.VERBOSE | re.IGNORECASE | re.MULTILINE) # Field name regexp, including trailing colon, but not separating whitespace, @@ -86,8 +85,12 @@ words = [] for line in header.splitlines(): parts = ecre.split(line) + first = True while parts: - unencoded = parts.pop(0).strip() + unencoded = parts.pop(0) + if first: + unencoded = unencoded.lstrip() + first = False if unencoded: words.append((unencoded, None, None)) if parts: @@ -95,6 +98,16 @@ encoding = parts.pop(0).lower() encoded = parts.pop(0) words.append((encoded, encoding, charset)) + # Now loop over words and remove words that consist of whitespace + # between two encoded strings. + import sys + droplist = [] + for n, w in enumerate(words): + if n>1 and w[1] and words[n-2][1] and words[n-1][0].isspace(): + droplist.append(n-1) + for d in reversed(droplist): + del words[d] + # The next step is to decode each encoded word by applying the reverse # base64 or quopri transformation. decoded_words is now a list of the # form (decoded_word, charset). @@ -217,22 +230,27 @@ self._normalize() uchunks = [] lastcs = None + lastspace = None for string, charset in self._chunks: # We must preserve spaces between encoded and non-encoded word # boundaries, which means for us we need to add a space when we go # from a charset to None/us-ascii, or from None/us-ascii to a # charset. Only do this for the second and subsequent chunks. + # Don't add a space if the None/us-ascii string already has + # a space (trailing or leading depending on transition) nextcs = charset if nextcs == _charset.UNKNOWN8BIT: original_bytes = string.encode('ascii', 'surrogateescape') string = original_bytes.decode('ascii', 'replace') if uchunks: + hasspace = string and self._nonctext(string[0]) if lastcs not in (None, 'us-ascii'): - if nextcs in (None, 'us-ascii'): + if nextcs in (None, 'us-ascii') and not hasspace: uchunks.append(SPACE) nextcs = None - elif nextcs not in (None, 'us-ascii'): + elif nextcs not in (None, 'us-ascii') and not lastspace: uchunks.append(SPACE) + lastspace = string and self._nonctext(string[-1]) lastcs = nextcs uchunks.append(string) return EMPTYSTRING.join(uchunks) @@ -291,6 +309,11 @@ charset = UTF8 self._chunks.append((s, charset)) + def _nonctext(self, s): + """True if string s is not a ctext character of RFC822. + """ + return s.isspace() or s in ('(', ')', '\\') + def encode(self, splitchars=';, \t', maxlinelen=None, linesep='\n'): r"""Encode a message header into an RFC-compliant format. @@ -334,7 +357,20 @@ maxlinelen = 1000000 formatter = _ValueFormatter(self._headerlen, maxlinelen, self._continuation_ws, splitchars) + lastcs = None + hasspace = lastspace = None for string, charset in self._chunks: + if hasspace is not None: + hasspace = string and self._nonctext(string[0]) + import sys + if lastcs not in (None, 'us-ascii'): + if not hasspace or charset not in (None, 'us-ascii'): + formatter.add_transition() + elif charset not in (None, 'us-ascii') and not lastspace: + formatter.add_transition() + lastspace = string and self._nonctext(string[-1]) + lastcs = charset + hasspace = False lines = string.splitlines() if lines: formatter.feed('', lines[0], charset) @@ -351,6 +387,7 @@ formatter.feed(fws, sline, charset) if len(lines) > 1: formatter.newline() + if self._chunks: formatter.add_transition() value = formatter._str(linesep) if _embeded_header.search(value): diff --git a/Lib/nntplib.py b/Lib/nntplib.py --- a/Lib/nntplib.py +++ b/Lib/nntplib.py @@ -166,7 +166,7 @@ parts.append(v.decode(enc or 'ascii')) else: parts.append(v) - return ' '.join(parts) + return ''.join(parts) def _parse_overview_fmt(lines): """Parse a list of string representing the response to LIST OVERVIEW.FMT diff --git a/Lib/test/test_email/test_asian_codecs.py b/Lib/test/test_email/test_asian_codecs.py --- a/Lib/test/test_email/test_asian_codecs.py +++ b/Lib/test/test_email/test_asian_codecs.py @@ -41,7 +41,7 @@ Hello World! =?iso-2022-jp?b?GyRCJU8lbSE8JW8hPCVrJUkhKhsoQg==?= =?iso-8859-1?q?Gr=FC=DF_Gott!?=""") eq(decode_header(h.encode()), - [(b'Hello World!', None), + [(b'Hello World! ', None), (b'\x1b$B%O%m!<%o!<%k%I!*\x1b(B', 'iso-2022-jp'), (b'Gr\xfc\xdf Gott!', gcode)]) subject_bytes = (b'test-ja \xa4\xd8\xc5\xea\xb9\xc6\xa4\xb5' diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -1994,9 +1994,9 @@ foo bar =?mac-iceland?q?r=8Aksm=9Arg=8Cs?=""" dh = decode_header(s) eq(dh, [ - (b'Re:', None), + (b'Re: ', None), (b'r\x8aksm\x9arg\x8cs', 'mac-iceland'), - (b'baz foo bar', None), + (b' baz foo bar ', None), (b'r\x8aksm\x9arg\x8cs', 'mac-iceland')]) header = make_header(dh) eq(str(header), @@ -2005,36 +2005,38 @@ Re: =?mac-iceland?q?r=8Aksm=9Arg=8Cs?= baz foo bar =?mac-iceland?q?r=8Aksm?= =?mac-iceland?q?=9Arg=8Cs?=""") - def test_whitespace_eater_unicode(self): + def test_whitespace_keeper_unicode(self): eq = self.assertEqual s = '=?ISO-8859-1?Q?Andr=E9?= Pirard ' dh = decode_header(s) eq(dh, [(b'Andr\xe9', 'iso-8859-1'), - (b'Pirard ', None)]) + (b' Pirard ', None)]) header = str(make_header(dh)) eq(header, 'Andr\xe9 Pirard ') - def test_whitespace_eater_unicode_2(self): + def test_whitespace_keeper_unicode_2(self): eq = self.assertEqual s = 'The =?iso-8859-1?b?cXVpY2sgYnJvd24gZm94?= jumped over the =?iso-8859-1?b?bGF6eSBkb2c=?=' dh = decode_header(s) - eq(dh, [(b'The', None), (b'quick brown fox', 'iso-8859-1'), - (b'jumped over the', None), (b'lazy dog', 'iso-8859-1')]) + eq(dh, [(b'The ', None), (b'quick brown fox', 'iso-8859-1'), + (b' jumped over the ', None), (b'lazy dog', 'iso-8859-1')]) hu = str(make_header(dh)) eq(hu, 'The quick brown fox jumped over the lazy dog') def test_rfc2047_missing_whitespace(self): s = 'Sm=?ISO-8859-1?B?9g==?=rg=?ISO-8859-1?B?5Q==?=sbord' dh = decode_header(s) - self.assertEqual(dh, [(s, None)]) - - def test_rfc2047_with_whitespace(self): - s = 'Sm =?ISO-8859-1?B?9g==?= rg =?ISO-8859-1?B?5Q==?= sbord' - dh = decode_header(s) self.assertEqual(dh, [(b'Sm', None), (b'\xf6', 'iso-8859-1'), (b'rg', None), (b'\xe5', 'iso-8859-1'), (b'sbord', None)]) + def test_rfc2047_with_whitespace(self): + s = 'Sm =?ISO-8859-1?B?9g==?= rg =?ISO-8859-1?B?5Q==?= sbord' + dh = decode_header(s) + self.assertEqual(dh, [(b'Sm ', None), (b'\xf6', 'iso-8859-1'), + (b' rg ', None), (b'\xe5', 'iso-8859-1'), + (b' sbord', None)]) + def test_rfc2047_B_bad_padding(self): s = '=?iso-8859-1?B?%s?=' data = [ # only test complete bytes @@ -2051,6 +2053,57 @@ self.assertEqual(decode_header(s), [(b'andr\xe9=zz', 'iso-8659-1')]) + def test_rfc2047_rfc2047_1(self): + # 1st testcase at end of rfc2047 + s = '(=?ISO-8859-1?Q?a?=)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'a', 'iso-8859-1'), (b')', None)]) + + def test_rfc2047_rfc2047_2(self): + # 2nd testcase at end of rfc2047 + s = '(=?ISO-8859-1?Q?a?= b)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'a', 'iso-8859-1'), (b' b)', None)]) + + def test_rfc2047_rfc2047_3(self): + # 3rd testcase at end of rfc2047 + s = '(=?ISO-8859-1?Q?a?= =?ISO-8859-1?Q?b?=)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'ab', 'iso-8859-1'), (b')', None)]) + + def test_rfc2047_rfc2047_4(self): + # 4th testcase at end of rfc2047 + s = '(=?ISO-8859-1?Q?a?= =?ISO-8859-1?Q?b?=)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'ab', 'iso-8859-1'), (b')', None)]) + + def test_rfc2047_rfc2047_5a(self): + # 5th testcase at end of rfc2047 newline is \r\n + s = '(=?ISO-8859-1?Q?a?=\r\n =?ISO-8859-1?Q?b?=)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'ab', 'iso-8859-1'), (b')', None)]) + + def test_rfc2047_rfc2047_5b(self): + # 5th testcase at end of rfc2047 newline is \n + s = '(=?ISO-8859-1?Q?a?=\n =?ISO-8859-1?Q?b?=)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'ab', 'iso-8859-1'), (b')', None)]) + + def test_rfc2047_rfc2047_6(self): + # 6th testcase at end of rfc2047 + s = '(=?ISO-8859-1?Q?a_b?=)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'a b', 'iso-8859-1'), (b')', None)]) + + def test_rfc2047_rfc2047_7(self): + # 7th testcase at end of rfc2047 + s = '(=?ISO-8859-1?Q?a?= =?ISO-8859-2?Q?_b?=)' + self.assertEqual(decode_header(s), + [(b'(', None), (b'a', 'iso-8859-1'), (b' b', 'iso-8859-2'), + (b')', None)]) + self.assertEqual(make_header(decode_header(s)).encode(), s.lower()) + self.assertEqual(str(make_header(decode_header(s))), '(a b)') + # Test the MIMEMessage class class TestMIMEMessage(TestEmailBase): @@ -4388,11 +4441,11 @@ h = make_header(decode_header(s)) eq(h.encode(), s) - def test_whitespace_eater(self): + def test_whitespace_keeper(self): eq = self.assertEqual s = 'Subject: =?koi8-r?b?8NLP18XSy8EgzsEgxsnOwczYztk=?= =?koi8-r?q?=CA?= zz.' parts = decode_header(s) - eq(parts, [(b'Subject:', None), (b'\xf0\xd2\xcf\xd7\xc5\xd2\xcb\xc1 \xce\xc1 \xc6\xc9\xce\xc1\xcc\xd8\xce\xd9\xca', 'koi8-r'), (b'zz.', None)]) + eq(parts, [(b'Subject: ', None), (b'\xf0\xd2\xcf\xd7\xc5\xd2\xcb\xc1 \xce\xc1 \xc6\xc9\xce\xc1\xcc\xd8\xce\xd9\xca', 'koi8-r'), (b' zz.', None)]) hdr = make_header(parts) eq(hdr.encode(), 'Subject: =?koi8-r?b?8NLP18XSy8EgzsEgxsnOwczYztnK?= zz.') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,10 @@ Library ------- +- Issue #1079: email.header.decode_header now correctly parses all the examples + in RFC2047. There is a necessary visible behavior change: the leading and/or + trailing whitespace on ASCII parts is now preserved. + - Issue #14969: Better handling of exception chaining in contextlib.ExitStack - Issue #14962: Update text coloring in IDLE shell window after changing -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 02:25:06 2012 From: python-checkins at python.org (terry.reedy) Date: Sun, 03 Jun 2012 02:25:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Issue_10365=3A_?= =?utf8?q?Add_and_replace_comments=3B_condense_defaulted_attribute_access?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/5b267381eea0 changeset: 77309:5b267381eea0 branch: 2.7 parent: 77305:50d6592791bd user: Terry Jan Reedy date: Sat Jun 02 20:22:35 2012 -0400 summary: Issue 10365: Add and replace comments; condense defaulted attribute access. Code patch by Roger Serwy. files: Lib/idlelib/IOBinding.py | 29 ++++++++++++--------------- 1 files changed, 13 insertions(+), 16 deletions(-) diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py --- a/Lib/idlelib/IOBinding.py +++ b/Lib/idlelib/IOBinding.py @@ -197,35 +197,32 @@ def open(self, event=None, editFile=None): flist = self.editwin.flist + # Save in case parent window is closed (ie, during askopenfile()). if flist: if not editFile: filename = self.askopenfile() else: filename=editFile if filename: - # If the current window has no filename and hasn't been - # modified, we replace its contents (no loss). Otherwise - # we open a new window. But we won't replace the - # shell window (which has an interp(reter) attribute), which - # gets set to "not modified" at every new prompt. - # Also, make sure the current window has not been closed, - # since it can be closed during the Open File dialog. - try: - interp = self.editwin.interp - except AttributeError: - interp = None - - if self.editwin and not self.filename and \ - self.get_saved() and not interp: + # If editFile is valid and already open, flist.open will + # shift focus to its existing window. + # If the current window exists and is a fresh unnamed, + # unmodified editor window (not an interpreter shell), + # pass self.loadfile to flist.open so it will load the file + # in the current window (if the file is not already open) + # instead of a new window. + if (self.editwin and + not getattr(self.editwin, 'interp', None) and + not self.filename and + self.get_saved()): flist.open(filename, self.loadfile) else: flist.open(filename) else: if self.text: self.text.focus_set() + return "break" - return "break" - # # Code for use outside IDLE: if self.get_saved(): reply = self.maybesave() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 02:25:10 2012 From: python-checkins at python.org (terry.reedy) Date: Sun, 03 Jun 2012 02:25:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Issue_10365=3A_?= =?utf8?q?Add_and_replace_comments=3B_condense_defaulted_attribute_access?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/4f3d4ce8ac9f changeset: 77310:4f3d4ce8ac9f branch: 3.2 parent: 77306:b17747289da1 user: Terry Jan Reedy date: Sat Jun 02 20:22:58 2012 -0400 summary: Issue 10365: Add and replace comments; condense defaulted attribute access. Code patch by Roger Serwy. files: Lib/idlelib/IOBinding.py | 29 ++++++++++++--------------- 1 files changed, 13 insertions(+), 16 deletions(-) diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py --- a/Lib/idlelib/IOBinding.py +++ b/Lib/idlelib/IOBinding.py @@ -157,35 +157,32 @@ def open(self, event=None, editFile=None): flist = self.editwin.flist + # Save in case parent window is closed (ie, during askopenfile()). if flist: if not editFile: filename = self.askopenfile() else: filename=editFile if filename: - # If the current window has no filename and hasn't been - # modified, we replace its contents (no loss). Otherwise - # we open a new window. But we won't replace the - # shell window (which has an interp(reter) attribute), which - # gets set to "not modified" at every new prompt. - # Also, make sure the current window has not been closed, - # since it can be closed during the Open File dialog. - try: - interp = self.editwin.interp - except AttributeError: - interp = None - - if self.editwin and not self.filename and \ - self.get_saved() and not interp: + # If editFile is valid and already open, flist.open will + # shift focus to its existing window. + # If the current window exists and is a fresh unnamed, + # unmodified editor window (not an interpreter shell), + # pass self.loadfile to flist.open so it will load the file + # in the current window (if the file is not already open) + # instead of a new window. + if (self.editwin and + not getattr(self.editwin, 'interp', None) and + not self.filename and + self.get_saved()): flist.open(filename, self.loadfile) else: flist.open(filename) else: if self.text: self.text.focus_set() + return "break" - return "break" - # # Code for use outside IDLE: if self.get_saved(): reply = self.maybesave() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 02:25:11 2012 From: python-checkins at python.org (terry.reedy) Date: Sun, 03 Jun 2012 02:25:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_with_3=2E2_=2310365?= Message-ID: http://hg.python.org/cpython/rev/d9b7399d9e45 changeset: 77311:d9b7399d9e45 parent: 77308:8c03fe231877 parent: 77310:4f3d4ce8ac9f user: Terry Jan Reedy date: Sat Jun 02 20:24:21 2012 -0400 summary: Merge with 3.2 #10365 files: Lib/idlelib/IOBinding.py | 29 ++++++++++++--------------- 1 files changed, 13 insertions(+), 16 deletions(-) diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py --- a/Lib/idlelib/IOBinding.py +++ b/Lib/idlelib/IOBinding.py @@ -157,35 +157,32 @@ def open(self, event=None, editFile=None): flist = self.editwin.flist + # Save in case parent window is closed (ie, during askopenfile()). if flist: if not editFile: filename = self.askopenfile() else: filename=editFile if filename: - # If the current window has no filename and hasn't been - # modified, we replace its contents (no loss). Otherwise - # we open a new window. But we won't replace the - # shell window (which has an interp(reter) attribute), which - # gets set to "not modified" at every new prompt. - # Also, make sure the current window has not been closed, - # since it can be closed during the Open File dialog. - try: - interp = self.editwin.interp - except AttributeError: - interp = None - - if self.editwin and not self.filename and \ - self.get_saved() and not interp: + # If editFile is valid and already open, flist.open will + # shift focus to its existing window. + # If the current window exists and is a fresh unnamed, + # unmodified editor window (not an interpreter shell), + # pass self.loadfile to flist.open so it will load the file + # in the current window (if the file is not already open) + # instead of a new window. + if (self.editwin and + not getattr(self.editwin, 'interp', None) and + not self.filename and + self.get_saved()): flist.open(filename, self.loadfile) else: flist.open(filename) else: if self.text: self.text.focus_set() + return "break" - return "break" - # # Code for use outside IDLE: if self.get_saved(): reply = self.maybesave() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 04:28:49 2012 From: python-checkins at python.org (brett.cannon) Date: Sun, 03 Jun 2012 04:28:49 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314987=3A_Add_a_mis?= =?utf8?q?sing_import_statement?= Message-ID: http://hg.python.org/cpython/rev/3de5b053d924 changeset: 77312:3de5b053d924 user: Brett Cannon date: Sat Jun 02 22:28:42 2012 -0400 summary: Issue #14987: Add a missing import statement files: Lib/inspect.py | 1 + Misc/NEWS | 2 ++ 2 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -38,6 +38,7 @@ import sys import tokenize import types +import warnings from operator import attrgetter from collections import namedtuple diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Library ------- +- Issue #14987: Add a missing import statement to inspect. + - Issue #1079: email.header.decode_header now correctly parses all the examples in RFC2047. There is a necessary visible behavior change: the leading and/or trailing whitespace on ASCII parts is now preserved. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 05:09:47 2012 From: python-checkins at python.org (eli.bendersky) Date: Sun, 03 Jun 2012 05:09:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_unterminated_keyword_ar?= =?utf8?q?ray_passed_to_PyArg=5FParseTupleAndKeywords?= Message-ID: http://hg.python.org/cpython/rev/eb1d633fe307 changeset: 77313:eb1d633fe307 user: Eli Bendersky date: Sun Jun 03 06:09:42 2012 +0300 summary: Fix unterminated keyword array passed to PyArg_ParseTupleAndKeywords files: Modules/_elementtree.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -1855,7 +1855,7 @@ static int treebuilder_init(PyObject *self, PyObject *args, PyObject *kwds) { - static char *kwlist[] = {"element_factory", NULL}; + static char *kwlist[] = {"element_factory", 0}; PyObject *element_factory = NULL; TreeBuilderObject *self_tb = (TreeBuilderObject *)self; @@ -2762,7 +2762,7 @@ XMLParserObject *self_xp = (XMLParserObject *)self; PyObject *target = NULL, *html = NULL; char *encoding = NULL; - static char *kwlist[] = {"html", "target", "encoding"}; + static char *kwlist[] = {"html", "target", "encoding", 0}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOz:XMLParser", kwlist, &html, &target, &encoding)) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 05:48:00 2012 From: python-checkins at python.org (eli.bendersky) Date: Sun, 03 Jun 2012 05:48:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314424=3A_Document_?= =?utf8?q?PyType=5FGenericAlloc=2C_and_fix_the_documentation_of?= Message-ID: http://hg.python.org/cpython/rev/3c43be281196 changeset: 77314:3c43be281196 user: Eli Bendersky date: Sun Jun 03 06:47:53 2012 +0300 summary: Issue #14424: Document PyType_GenericAlloc, and fix the documentation of PyType_GenericNew files: Doc/c-api/type.rst | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -70,13 +70,14 @@ .. c:function:: PyObject* PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems) - XXX: Document. - + Generic handler for the :attr:`tp_alloc` slot of a type object. Use + Python's default memory allocation mechanism to allocate a new instance and + initialize all its contents to *NULL*. .. c:function:: PyObject* PyType_GenericNew(PyTypeObject *type, PyObject *args, PyObject *kwds) - Generic handler for the :attr:`tp_new` slot of a type object. Initialize - all instance variables to *NULL*. + Generic handler for the :attr:`tp_new` slot of a type object. Create a + new instance using the type's :attr:`tp_alloc` slot. .. c:function:: int PyType_Ready(PyTypeObject *type) -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Jun 3 05:49:52 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 03 Jun 2012 05:49:52 +0200 Subject: [Python-checkins] Daily reference leaks (d9b7399d9e45): sum=462 Message-ID: results for d9b7399d9e45 on branch "default" -------------------------------------------- test_smtplib leaked [154, 154, 154] references, sum=462 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogDoXFb_', '-x'] From benjamin at python.org Sun Jun 3 06:01:25 2012 From: benjamin at python.org (Benjamin Peterson) Date: Sat, 2 Jun 2012 21:01:25 -0700 Subject: [Python-checkins] Daily reference leaks (d9b7399d9e45): sum=462 In-Reply-To: References: Message-ID: 2012/6/2 : > results for d9b7399d9e45 on branch "default" > -------------------------------------------- > > test_smtplib leaked [154, 154, 154] references, sum=462 Can other people reproduce this one? I can't. -- Regards, Benjamin From eliben at gmail.com Sun Jun 3 06:28:10 2012 From: eliben at gmail.com (Eli Bendersky) Date: Sun, 3 Jun 2012 06:28:10 +0200 Subject: [Python-checkins] Daily reference leaks (d9b7399d9e45): sum=462 In-Reply-To: References: Message-ID: On Sun, Jun 3, 2012 at 6:01 AM, Benjamin Peterson wrote: > 2012/6/2 ?: >> results for d9b7399d9e45 on branch "default" >> -------------------------------------------- >> >> test_smtplib leaked [154, 154, 154] references, sum=462 > > Can other people reproduce this one? I can't. > I can't either: $ ./python -m test.regrtest -R : test_smtplib [1/1] test_smtplib beginning 9 repetitions 123456789 ......... 1 test OK. [172101 refs] (Ubuntu 10.04, x64 2.6.32-41-generic) From python-checkins at python.org Sun Jun 3 07:07:31 2012 From: python-checkins at python.org (terry.reedy) Date: Sun, 03 Jun 2012 07:07:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Issue_12510=3A_?= =?utf8?q?Expand_2_bare_excepts=2E_Improve_comments=2E_Change_deceptive_na?= =?utf8?q?me?= Message-ID: http://hg.python.org/cpython/rev/477508efe4ab changeset: 77315:477508efe4ab branch: 2.7 parent: 77309:5b267381eea0 user: Terry Jan Reedy date: Sun Jun 03 00:58:36 2012 -0400 summary: Issue 12510: Expand 2 bare excepts. Improve comments. Change deceptive name 'name' to 'expression' as the latter is what the string actually represents. The bug in this issue was only catching NameError and AttributeError when evaluating an expression that was not necessarily a name. files: Lib/idlelib/CallTips.py | 30 +++++++++++++++------------- 1 files changed, 16 insertions(+), 14 deletions(-) diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py --- a/Lib/idlelib/CallTips.py +++ b/Lib/idlelib/CallTips.py @@ -71,16 +71,16 @@ if not sur_paren: return hp.set_index(sur_paren[0]) - name = hp.get_expression() - if not name or (not evalfuncs and name.find('(') != -1): + expression = hp.get_expression() + if not expression or (not evalfuncs and expression.find('(') != -1): return - arg_text = self.fetch_tip(name) + arg_text = self.fetch_tip(expression) if not arg_text: return self.calltip = self._make_calltip_window() self.calltip.showtip(arg_text, sur_paren[0], sur_paren[1]) - def fetch_tip(self, name): + def fetch_tip(self, expression): """Return the argument list and docstring of a function or class If there is a Python subprocess, get the calltip there. Otherwise, @@ -96,25 +96,27 @@ """ try: rpcclt = self.editwin.flist.pyshell.interp.rpcclt - except: + except AttributeError: rpcclt = None if rpcclt: return rpcclt.remotecall("exec", "get_the_calltip", - (name,), {}) + (expression,), {}) else: - entity = self.get_entity(name) + entity = self.get_entity(expression) return get_arg_text(entity) - def get_entity(self, name): - "Lookup name in a namespace spanning sys.modules and __main.dict__" - if name: + def get_entity(self, expression): + """Return the object corresponding to expression evaluated + in a namespace spanning sys.modules and __main.dict__. + """ + if expression: namespace = sys.modules.copy() namespace.update(__main__.__dict__) try: - return eval(name, namespace) - # any exception is possible if evalfuncs True in open_calltip - # at least Syntax, Name, Attribute, Index, and Key E. if not - except: + return eval(expression, namespace) + except BaseException: + # An uncaught exception closes idle, and eval can raise any + # exception, especially if user classes are involved. return None def _find_constructor(class_ob): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 07:07:33 2012 From: python-checkins at python.org (terry.reedy) Date: Sun, 03 Jun 2012 07:07:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Issue_12510=3A_?= =?utf8?q?Expand_2_bare_excepts=2E_Improve_comments=2E_Change_deceptive_na?= =?utf8?q?me?= Message-ID: http://hg.python.org/cpython/rev/f927a5c6e4be changeset: 77316:f927a5c6e4be branch: 3.2 parent: 77310:4f3d4ce8ac9f user: Terry Jan Reedy date: Sun Jun 03 00:27:54 2012 -0400 summary: Issue 12510: Expand 2 bare excepts. Improve comments. Change deceptive name 'name' to 'expression' as the latter is what the string actually represents. The bug in this issue was only catching NameError and AttributeError when evaluating an expression that was not necessarily a name. files: Lib/idlelib/CallTips.py | 32 +++++++++++++++------------- 1 files changed, 17 insertions(+), 15 deletions(-) diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py --- a/Lib/idlelib/CallTips.py +++ b/Lib/idlelib/CallTips.py @@ -67,18 +67,18 @@ if not sur_paren: return hp.set_index(sur_paren[0]) - name = hp.get_expression() - if not name: + expression = hp.get_expression() + if not expression: return - if not evalfuncs and (name.find('(') != -1): + if not evalfuncs and (expression.find('(') != -1): return - argspec = self.fetch_tip(name) + argspec = self.fetch_tip(expression) if not argspec: return self.active_calltip = self._calltip_window() self.active_calltip.showtip(argspec, sur_paren[0], sur_paren[1]) - def fetch_tip(self, name): + def fetch_tip(self, expression): """Return the argument list and docstring of a function or class. If there is a Python subprocess, get the calltip there. Otherwise, @@ -94,25 +94,27 @@ """ try: rpcclt = self.editwin.flist.pyshell.interp.rpcclt - except: + except AttributeError: rpcclt = None if rpcclt: return rpcclt.remotecall("exec", "get_the_calltip", - (name,), {}) + (expression,), {}) else: - entity = self.get_entity(name) + entity = self.get_entity(expression) return get_argspec(entity) - def get_entity(self, name): - "Lookup name in a namespace spanning sys.modules and __main.dict__." - if name: + def get_entity(self, expression): + """Return the object corresponding to expression evaluated + in a namespace spanning sys.modules and __main.dict__. + """ + if expression: namespace = sys.modules.copy() namespace.update(__main__.__dict__) try: - return eval(name, namespace) - # any exception is possible if evalfuncs True in open_calltip - # at least Syntax, Name, Attribute, Index, and Key E. if not - except: + return eval(expression, namespace) + except BaseException: + # An uncaught exception closes idle, and eval can raise any + # exception, especially if user classes are involved. return None def _find_constructor(class_ob): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 07:07:33 2012 From: python-checkins at python.org (terry.reedy) Date: Sun, 03 Jun 2012 07:07:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_with_3=2E2_=2312510?= Message-ID: http://hg.python.org/cpython/rev/a7501ddf74ac changeset: 77317:a7501ddf74ac parent: 77314:3c43be281196 parent: 77316:f927a5c6e4be user: Terry Jan Reedy date: Sun Jun 03 01:06:38 2012 -0400 summary: Merge with 3.2 #12510 files: Lib/idlelib/CallTips.py | 32 +++++++++++++++------------- 1 files changed, 17 insertions(+), 15 deletions(-) diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py --- a/Lib/idlelib/CallTips.py +++ b/Lib/idlelib/CallTips.py @@ -67,18 +67,18 @@ if not sur_paren: return hp.set_index(sur_paren[0]) - name = hp.get_expression() - if not name: + expression = hp.get_expression() + if not expression: return - if not evalfuncs and (name.find('(') != -1): + if not evalfuncs and (expression.find('(') != -1): return - argspec = self.fetch_tip(name) + argspec = self.fetch_tip(expression) if not argspec: return self.active_calltip = self._calltip_window() self.active_calltip.showtip(argspec, sur_paren[0], sur_paren[1]) - def fetch_tip(self, name): + def fetch_tip(self, expression): """Return the argument list and docstring of a function or class. If there is a Python subprocess, get the calltip there. Otherwise, @@ -94,25 +94,27 @@ """ try: rpcclt = self.editwin.flist.pyshell.interp.rpcclt - except: + except AttributeError: rpcclt = None if rpcclt: return rpcclt.remotecall("exec", "get_the_calltip", - (name,), {}) + (expression,), {}) else: - entity = self.get_entity(name) + entity = self.get_entity(expression) return get_argspec(entity) - def get_entity(self, name): - "Lookup name in a namespace spanning sys.modules and __main.dict__." - if name: + def get_entity(self, expression): + """Return the object corresponding to expression evaluated + in a namespace spanning sys.modules and __main.dict__. + """ + if expression: namespace = sys.modules.copy() namespace.update(__main__.__dict__) try: - return eval(name, namespace) - # any exception is possible if evalfuncs True in open_calltip - # at least Syntax, Name, Attribute, Index, and Key E. if not - except: + return eval(expression, namespace) + except BaseException: + # An uncaught exception closes idle, and eval can raise any + # exception, especially if user classes are involved. return None def _find_constructor(class_ob): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 07:09:48 2012 From: python-checkins at python.org (eli.bendersky) Date: Sun, 03 Jun 2012 07:09:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314090=3A_fix_some_?= =?utf8?q?minor_C_API_problems_in_default_branch_=283=2E3=29?= Message-ID: http://hg.python.org/cpython/rev/90f0dd118aa4 changeset: 77318:90f0dd118aa4 parent: 77314:3c43be281196 user: Eli Bendersky date: Sun Jun 03 08:07:47 2012 +0300 summary: Issue #14090: fix some minor C API problems in default branch (3.3) files: Doc/c-api/code.rst | 6 +++--- Doc/c-api/conversion.rst | 4 ++-- Doc/c-api/init.rst | 2 +- Doc/c-api/type.rst | 4 ++-- Doc/c-api/unicode.rst | 2 +- Doc/c-api/veryhigh.rst | 6 ------ Include/pythonrun.h | 7 +++++-- Misc/ACKS | 1 + 8 files changed, 15 insertions(+), 17 deletions(-) diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst --- a/Doc/c-api/code.rst +++ b/Doc/c-api/code.rst @@ -31,11 +31,11 @@ Return true if *co* is a :class:`code` object -.. c:function:: int PyCode_GetNumFree(PyObject *co) +.. c:function:: int PyCode_GetNumFree(PyCodeObject *co) Return the number of free variables in *co*. -.. c:function:: PyCodeObject *PyCode_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *lnotab) +.. c:function:: PyCodeObject* PyCode_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *lnotab) Return a new code object. If you need a dummy code object to create a frame, use :c:func:`PyCode_NewEmpty` instead. Calling @@ -43,7 +43,7 @@ version since the definition of the bytecode changes often. -.. c:function:: int PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno) +.. c:function:: PyCodeObject* PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno) Return a new empty code object with the specified filename, function name, and first line number. It is illegal to diff --git a/Doc/c-api/conversion.rst b/Doc/c-api/conversion.rst --- a/Doc/c-api/conversion.rst +++ b/Doc/c-api/conversion.rst @@ -119,13 +119,13 @@ .. versionadded:: 3.1 -.. c:function:: char* PyOS_stricmp(char *s1, char *s2) +.. c:function:: int PyOS_stricmp(char *s1, char *s2) Case insensitive comparison of strings. The function works almost identically to :c:func:`strcmp` except that it ignores the case. -.. c:function:: char* PyOS_strnicmp(char *s1, char *s2, Py_ssize_t size) +.. c:function:: int PyOS_strnicmp(char *s1, char *s2, Py_ssize_t size) Case insensitive comparison of strings. The function works almost identically to :c:func:`strncmp` except that it ignores the case. diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -646,7 +646,7 @@ :c:func:`PyGILState_Release` on the same thread. -.. c:function:: PyThreadState PyGILState_GetThisThreadState() +.. c:function:: PyThreadState* PyGILState_GetThisThreadState() Get the current thread state for this thread. May return ``NULL`` if no GILState API has been used on the current thread. Note that the main thread diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -51,13 +51,13 @@ modification of the attributes or base classes of the type. -.. c:function:: int PyType_HasFeature(PyObject *o, int feature) +.. c:function:: int PyType_HasFeature(PyTypeObject *o, int feature) Return true if the type object *o* sets the feature *feature*. Type features are denoted by single bit flags. -.. c:function:: int PyType_IS_GC(PyObject *o) +.. c:function:: int PyType_IS_GC(PyTypeObject *o) Return true if the type object includes support for the cycle detector; this tests the type flag :const:`Py_TPFLAGS_HAVE_GC`. diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -1615,7 +1615,7 @@ ISO-8859-1 if it contains non-ASCII characters". -.. c:function:: int PyUnicode_RichCompare(PyObject *left, PyObject *right, int op) +.. c:function:: PyObject* PyUnicode_RichCompare(PyObject *left, PyObject *right, int op) Rich compare two unicode strings and return one of the following: diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst --- a/Doc/c-api/veryhigh.rst +++ b/Doc/c-api/veryhigh.rst @@ -95,12 +95,6 @@ leaving *closeit* set to ``0`` and *flags* set to *NULL*. -.. c:function:: int PyRun_SimpleFileFlags(FILE *fp, const char *filename, PyCompilerFlags *flags) - - This is a simplified interface to :c:func:`PyRun_SimpleFileExFlags` below, - leaving *closeit* set to ``0``. - - .. c:function:: int PyRun_SimpleFileEx(FILE *fp, const char *filename, int closeit) This is a simplified interface to :c:func:`PyRun_SimpleFileExFlags` below, diff --git a/Include/pythonrun.h b/Include/pythonrun.h --- a/Include/pythonrun.h +++ b/Include/pythonrun.h @@ -82,9 +82,12 @@ PyParser_SimpleParseFileFlags(FP, S, B, 0) #endif PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlags(const char *, int, - int); + int); +PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlagsFilename(const char *, + const char *, + int, int); PyAPI_FUNC(struct _node *) PyParser_SimpleParseFileFlags(FILE *, const char *, - int, int); + int, int); #ifndef Py_LIMITED_API PyAPI_FUNC(PyObject *) PyRun_StringFlags(const char *, int, PyObject *, diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -989,6 +989,7 @@ Joel Stanley Oliver Steele Greg Stein +Baruch Sterin Chris Stern Alex Stewart Victor Stinner -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 07:09:49 2012 From: python-checkins at python.org (eli.bendersky) Date: Sun, 03 Jun 2012 07:09:49 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge?= Message-ID: http://hg.python.org/cpython/rev/5e6676be2224 changeset: 77319:5e6676be2224 parent: 77318:90f0dd118aa4 parent: 77317:a7501ddf74ac user: Eli Bendersky date: Sun Jun 03 08:09:33 2012 +0300 summary: merge files: Lib/idlelib/CallTips.py | 32 +++++++++++++++------------- 1 files changed, 17 insertions(+), 15 deletions(-) diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py --- a/Lib/idlelib/CallTips.py +++ b/Lib/idlelib/CallTips.py @@ -67,18 +67,18 @@ if not sur_paren: return hp.set_index(sur_paren[0]) - name = hp.get_expression() - if not name: + expression = hp.get_expression() + if not expression: return - if not evalfuncs and (name.find('(') != -1): + if not evalfuncs and (expression.find('(') != -1): return - argspec = self.fetch_tip(name) + argspec = self.fetch_tip(expression) if not argspec: return self.active_calltip = self._calltip_window() self.active_calltip.showtip(argspec, sur_paren[0], sur_paren[1]) - def fetch_tip(self, name): + def fetch_tip(self, expression): """Return the argument list and docstring of a function or class. If there is a Python subprocess, get the calltip there. Otherwise, @@ -94,25 +94,27 @@ """ try: rpcclt = self.editwin.flist.pyshell.interp.rpcclt - except: + except AttributeError: rpcclt = None if rpcclt: return rpcclt.remotecall("exec", "get_the_calltip", - (name,), {}) + (expression,), {}) else: - entity = self.get_entity(name) + entity = self.get_entity(expression) return get_argspec(entity) - def get_entity(self, name): - "Lookup name in a namespace spanning sys.modules and __main.dict__." - if name: + def get_entity(self, expression): + """Return the object corresponding to expression evaluated + in a namespace spanning sys.modules and __main.dict__. + """ + if expression: namespace = sys.modules.copy() namespace.update(__main__.__dict__) try: - return eval(name, namespace) - # any exception is possible if evalfuncs True in open_calltip - # at least Syntax, Name, Attribute, Index, and Key E. if not - except: + return eval(expression, namespace) + except BaseException: + # An uncaught exception closes idle, and eval can raise any + # exception, especially if user classes are involved. return None def _find_constructor(class_ob): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 10:16:49 2012 From: python-checkins at python.org (senthil.kumaran) Date: Sun, 03 Jun 2012 10:16:49 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_14989=3A_http=2Eserve?= =?utf8?q?r_--cgi_option_can_enable_the_CGI_http_server=2E?= Message-ID: http://hg.python.org/cpython/rev/935a656359ae changeset: 77320:935a656359ae parent: 77313:eb1d633fe307 user: Senthil Kumaran date: Sun Jun 03 16:15:54 2012 +0800 summary: Issue 14989: http.server --cgi option can enable the CGI http server. files: Doc/library/http.server.rst | 6 ++++++ Lib/http/server.py | 22 +++++++++++++++------- Misc/NEWS | 3 +++ 3 files changed, 24 insertions(+), 7 deletions(-) diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -400,3 +400,9 @@ Note that CGI scripts will be run with UID of user nobody, for security reasons. Problems with the CGI script will be translated to error 403. + +:class:`CGIHTTPRequestHandler` can be enabled in the command line by passing +the ``--cgi`` option.:: + + python -m http.server --cgi 8000 + diff --git a/Lib/http/server.py b/Lib/http/server.py --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -100,6 +100,8 @@ import time import urllib.parse import copy +import argparse + # Default error message template DEFAULT_ERROR_MESSAGE = """\ @@ -1173,18 +1175,13 @@ def test(HandlerClass = BaseHTTPRequestHandler, - ServerClass = HTTPServer, protocol="HTTP/1.0"): + ServerClass = HTTPServer, protocol="HTTP/1.0", port=8000): """Test the HTTP request handler class. This runs an HTTP server on port 8000 (or the first command line argument). """ - - if sys.argv[1:]: - port = int(sys.argv[1]) - else: - port = 8000 server_address = ('', port) HandlerClass.protocol_version = protocol @@ -1200,4 +1197,15 @@ sys.exit(0) if __name__ == '__main__': - test(HandlerClass=SimpleHTTPRequestHandler) + parser = argparse.ArgumentParser() + parser.add_argument('--cgi', action='store_true', + help='Run as CGI Server') + parser.add_argument('port', action='store', + default=8000, type=int, + nargs='?', + help='Specify alternate port [default: 8000]') + args = parser.parse_args() + if args.cgi: + test(HandlerClass=CGIHTTPRequestHandler, port=args.port) + else: + test(HandlerClass=SimpleHTTPRequestHandler, port=args.port) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Library ------- +- Issue #14989: Make the CGI enable option to http.server available via command + line. + - Issue #14987: Add a missing import statement to inspect. - Issue #1079: email.header.decode_header now correctly parses all the examples -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 10:16:50 2012 From: python-checkins at python.org (senthil.kumaran) Date: Sun, 03 Jun 2012 10:16:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge_heads?= Message-ID: http://hg.python.org/cpython/rev/1bbb3f481bae changeset: 77321:1bbb3f481bae parent: 77320:935a656359ae parent: 77319:5e6676be2224 user: Senthil Kumaran date: Sun Jun 03 16:16:39 2012 +0800 summary: merge heads files: Doc/c-api/code.rst | 6 ++-- Doc/c-api/conversion.rst | 4 +- Doc/c-api/init.rst | 2 +- Doc/c-api/type.rst | 13 ++++++----- Doc/c-api/unicode.rst | 2 +- Doc/c-api/veryhigh.rst | 6 ----- Include/pythonrun.h | 7 ++++- Lib/idlelib/CallTips.py | 32 ++++++++++++++------------- Misc/ACKS | 1 + 9 files changed, 37 insertions(+), 36 deletions(-) diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst --- a/Doc/c-api/code.rst +++ b/Doc/c-api/code.rst @@ -31,11 +31,11 @@ Return true if *co* is a :class:`code` object -.. c:function:: int PyCode_GetNumFree(PyObject *co) +.. c:function:: int PyCode_GetNumFree(PyCodeObject *co) Return the number of free variables in *co*. -.. c:function:: PyCodeObject *PyCode_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *lnotab) +.. c:function:: PyCodeObject* PyCode_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *lnotab) Return a new code object. If you need a dummy code object to create a frame, use :c:func:`PyCode_NewEmpty` instead. Calling @@ -43,7 +43,7 @@ version since the definition of the bytecode changes often. -.. c:function:: int PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno) +.. c:function:: PyCodeObject* PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno) Return a new empty code object with the specified filename, function name, and first line number. It is illegal to diff --git a/Doc/c-api/conversion.rst b/Doc/c-api/conversion.rst --- a/Doc/c-api/conversion.rst +++ b/Doc/c-api/conversion.rst @@ -119,13 +119,13 @@ .. versionadded:: 3.1 -.. c:function:: char* PyOS_stricmp(char *s1, char *s2) +.. c:function:: int PyOS_stricmp(char *s1, char *s2) Case insensitive comparison of strings. The function works almost identically to :c:func:`strcmp` except that it ignores the case. -.. c:function:: char* PyOS_strnicmp(char *s1, char *s2, Py_ssize_t size) +.. c:function:: int PyOS_strnicmp(char *s1, char *s2, Py_ssize_t size) Case insensitive comparison of strings. The function works almost identically to :c:func:`strncmp` except that it ignores the case. diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -646,7 +646,7 @@ :c:func:`PyGILState_Release` on the same thread. -.. c:function:: PyThreadState PyGILState_GetThisThreadState() +.. c:function:: PyThreadState* PyGILState_GetThisThreadState() Get the current thread state for this thread. May return ``NULL`` if no GILState API has been used on the current thread. Note that the main thread diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -51,13 +51,13 @@ modification of the attributes or base classes of the type. -.. c:function:: int PyType_HasFeature(PyObject *o, int feature) +.. c:function:: int PyType_HasFeature(PyTypeObject *o, int feature) Return true if the type object *o* sets the feature *feature*. Type features are denoted by single bit flags. -.. c:function:: int PyType_IS_GC(PyObject *o) +.. c:function:: int PyType_IS_GC(PyTypeObject *o) Return true if the type object includes support for the cycle detector; this tests the type flag :const:`Py_TPFLAGS_HAVE_GC`. @@ -70,13 +70,14 @@ .. c:function:: PyObject* PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems) - XXX: Document. - + Generic handler for the :attr:`tp_alloc` slot of a type object. Use + Python's default memory allocation mechanism to allocate a new instance and + initialize all its contents to *NULL*. .. c:function:: PyObject* PyType_GenericNew(PyTypeObject *type, PyObject *args, PyObject *kwds) - Generic handler for the :attr:`tp_new` slot of a type object. Initialize - all instance variables to *NULL*. + Generic handler for the :attr:`tp_new` slot of a type object. Create a + new instance using the type's :attr:`tp_alloc` slot. .. c:function:: int PyType_Ready(PyTypeObject *type) diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -1615,7 +1615,7 @@ ISO-8859-1 if it contains non-ASCII characters". -.. c:function:: int PyUnicode_RichCompare(PyObject *left, PyObject *right, int op) +.. c:function:: PyObject* PyUnicode_RichCompare(PyObject *left, PyObject *right, int op) Rich compare two unicode strings and return one of the following: diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst --- a/Doc/c-api/veryhigh.rst +++ b/Doc/c-api/veryhigh.rst @@ -95,12 +95,6 @@ leaving *closeit* set to ``0`` and *flags* set to *NULL*. -.. c:function:: int PyRun_SimpleFileFlags(FILE *fp, const char *filename, PyCompilerFlags *flags) - - This is a simplified interface to :c:func:`PyRun_SimpleFileExFlags` below, - leaving *closeit* set to ``0``. - - .. c:function:: int PyRun_SimpleFileEx(FILE *fp, const char *filename, int closeit) This is a simplified interface to :c:func:`PyRun_SimpleFileExFlags` below, diff --git a/Include/pythonrun.h b/Include/pythonrun.h --- a/Include/pythonrun.h +++ b/Include/pythonrun.h @@ -82,9 +82,12 @@ PyParser_SimpleParseFileFlags(FP, S, B, 0) #endif PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlags(const char *, int, - int); + int); +PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlagsFilename(const char *, + const char *, + int, int); PyAPI_FUNC(struct _node *) PyParser_SimpleParseFileFlags(FILE *, const char *, - int, int); + int, int); #ifndef Py_LIMITED_API PyAPI_FUNC(PyObject *) PyRun_StringFlags(const char *, int, PyObject *, diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py --- a/Lib/idlelib/CallTips.py +++ b/Lib/idlelib/CallTips.py @@ -67,18 +67,18 @@ if not sur_paren: return hp.set_index(sur_paren[0]) - name = hp.get_expression() - if not name: + expression = hp.get_expression() + if not expression: return - if not evalfuncs and (name.find('(') != -1): + if not evalfuncs and (expression.find('(') != -1): return - argspec = self.fetch_tip(name) + argspec = self.fetch_tip(expression) if not argspec: return self.active_calltip = self._calltip_window() self.active_calltip.showtip(argspec, sur_paren[0], sur_paren[1]) - def fetch_tip(self, name): + def fetch_tip(self, expression): """Return the argument list and docstring of a function or class. If there is a Python subprocess, get the calltip there. Otherwise, @@ -94,25 +94,27 @@ """ try: rpcclt = self.editwin.flist.pyshell.interp.rpcclt - except: + except AttributeError: rpcclt = None if rpcclt: return rpcclt.remotecall("exec", "get_the_calltip", - (name,), {}) + (expression,), {}) else: - entity = self.get_entity(name) + entity = self.get_entity(expression) return get_argspec(entity) - def get_entity(self, name): - "Lookup name in a namespace spanning sys.modules and __main.dict__." - if name: + def get_entity(self, expression): + """Return the object corresponding to expression evaluated + in a namespace spanning sys.modules and __main.dict__. + """ + if expression: namespace = sys.modules.copy() namespace.update(__main__.__dict__) try: - return eval(name, namespace) - # any exception is possible if evalfuncs True in open_calltip - # at least Syntax, Name, Attribute, Index, and Key E. if not - except: + return eval(expression, namespace) + except BaseException: + # An uncaught exception closes idle, and eval can raise any + # exception, especially if user classes are involved. return None def _find_constructor(class_ob): diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -989,6 +989,7 @@ Joel Stanley Oliver Steele Greg Stein +Baruch Sterin Chris Stern Alex Stewart Victor Stinner -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 12:00:56 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 03 Jun 2012 12:00:56 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE0OTM3?= =?utf8?q?=3A_Perform_auto-completion_of_filenames_in_strings_even_for?= Message-ID: http://hg.python.org/cpython/rev/41e85ac2ccef changeset: 77322:41e85ac2ccef branch: 3.2 parent: 77316:f927a5c6e4be user: Martin v. L?wis date: Sun Jun 03 11:55:32 2012 +0200 summary: Issue #14937: Perform auto-completion of filenames in strings even for non-ASCII filenames. files: Lib/idlelib/AutoComplete.py | 11 +++++++++-- Lib/idlelib/AutoCompleteWindow.py | 9 +++++++++ Lib/idlelib/NEWS.txt | 6 ++++++ 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -124,13 +124,20 @@ curline = self.text.get("insert linestart", "insert") i = j = len(curline) if hp.is_in_string() and (not mode or mode==COMPLETE_FILES): + # Find the beginning of the string + # fetch_completions will look at the file system to determine whether the + # string value constitutes an actual file name + # XXX could consider raw strings here and unescape the string value if it's + # not raw. self._remove_autocomplete_window() mode = COMPLETE_FILES - while i and curline[i-1] in FILENAME_CHARS: + # Find last separator or string start + while i and curline[i-1] not in "'\"" + SEPS: i -= 1 comp_start = curline[i:j] j = i - while i and curline[i-1] in FILENAME_CHARS + SEPS: + # Find string start + while i and curline[i-1] not in "'\"": i -= 1 comp_what = curline[i:j] elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES): diff --git a/Lib/idlelib/AutoCompleteWindow.py b/Lib/idlelib/AutoCompleteWindow.py --- a/Lib/idlelib/AutoCompleteWindow.py +++ b/Lib/idlelib/AutoCompleteWindow.py @@ -354,6 +354,15 @@ # A modifier key, so ignore return + elif event.char: + # Regular character with a non-length-1 keycode + self._change_start(self.start + event.char) + self.lasttypedstart = self.start + self.listbox.select_clear(0, int(self.listbox.curselection()[0])) + self.listbox.select_set(self._binary_search(self.start)) + self._selection_changed() + return "break" + else: # Unknown event, close the window and let it through. self.hide_window() diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,3 +1,9 @@ +What's New in IDLE 3.2.4? +========================= + +- Issue #14937: Perform auto-completion of filenames in strings even for + non-ASCII filenames. + What's New in IDLE 3.2.3? ========================= -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 12:00:57 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 03 Jun 2012 12:00:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_3=2E2=3A_issue_=2314937=2E?= Message-ID: http://hg.python.org/cpython/rev/9aa8af0761ef changeset: 77323:9aa8af0761ef parent: 77321:1bbb3f481bae parent: 77322:41e85ac2ccef user: Martin v. L?wis date: Sun Jun 03 12:00:48 2012 +0200 summary: Merge 3.2: issue #14937. files: Lib/idlelib/AutoComplete.py | 11 +++++++++-- Lib/idlelib/AutoCompleteWindow.py | 9 +++++++++ Lib/idlelib/NEWS.txt | 5 ++++- 3 files changed, 22 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -124,13 +124,20 @@ curline = self.text.get("insert linestart", "insert") i = j = len(curline) if hp.is_in_string() and (not mode or mode==COMPLETE_FILES): + # Find the beginning of the string + # fetch_completions will look at the file system to determine whether the + # string value constitutes an actual file name + # XXX could consider raw strings here and unescape the string value if it's + # not raw. self._remove_autocomplete_window() mode = COMPLETE_FILES - while i and curline[i-1] in FILENAME_CHARS: + # Find last separator or string start + while i and curline[i-1] not in "'\"" + SEPS: i -= 1 comp_start = curline[i:j] j = i - while i and curline[i-1] in FILENAME_CHARS + SEPS: + # Find string start + while i and curline[i-1] not in "'\"": i -= 1 comp_what = curline[i:j] elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES): diff --git a/Lib/idlelib/AutoCompleteWindow.py b/Lib/idlelib/AutoCompleteWindow.py --- a/Lib/idlelib/AutoCompleteWindow.py +++ b/Lib/idlelib/AutoCompleteWindow.py @@ -354,6 +354,15 @@ # A modifier key, so ignore return + elif event.char: + # Regular character with a non-length-1 keycode + self._change_start(self.start + event.char) + self.lasttypedstart = self.start + self.listbox.select_clear(0, int(self.listbox.curselection()[0])) + self.listbox.select_set(self._binary_search(self.start)) + self._selection_changed() + return "break" + else: # Unknown event, close the window and let it through. self.hide_window() diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,6 +1,9 @@ -What's New in IDLE 3.3? +What's New in IDLE 3.3.0? ========================= +- Issue #14937: Perform auto-completion of filenames in strings even for + non-ASCII filenames. + - Issue #8515: Set __file__ when run file in IDLE. Initial patch by Bruce Frederiksen. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 12:03:35 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 03 Jun 2012 12:03:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Drop_unused_constant=2E?= Message-ID: http://hg.python.org/cpython/rev/8c07769d717e changeset: 77324:8c07769d717e user: Martin v. L?wis date: Sun Jun 03 12:03:29 2012 +0200 summary: Drop unused constant. files: Lib/idlelib/AutoComplete.py | 3 --- 1 files changed, 0 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -9,9 +9,6 @@ from idlelib.configHandler import idleConf -# This string includes all chars that may be in a file name (without a path -# separator) -FILENAME_CHARS = string.ascii_letters + string.digits + os.curdir + "._~#$:-" # This string includes all chars that may be in an identifier ID_CHARS = string.ascii_letters + string.digits + "_" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 12:33:38 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 03 Jun 2012 12:33:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Do_not_try_to_i?= =?utf8?q?nsert_control_characters=2E?= Message-ID: http://hg.python.org/cpython/rev/ec5bc858df25 changeset: 77325:ec5bc858df25 branch: 3.2 parent: 77322:41e85ac2ccef user: Martin v. L?wis date: Sun Jun 03 12:26:09 2012 +0200 summary: Do not try to insert control characters. files: Lib/idlelib/AutoCompleteWindow.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/idlelib/AutoCompleteWindow.py b/Lib/idlelib/AutoCompleteWindow.py --- a/Lib/idlelib/AutoCompleteWindow.py +++ b/Lib/idlelib/AutoCompleteWindow.py @@ -354,7 +354,7 @@ # A modifier key, so ignore return - elif event.char: + elif event.char and event.char >= ' ': # Regular character with a non-length-1 keycode self._change_start(self.start + event.char) self.lasttypedstart = self.start -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 12:33:39 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 03 Jun 2012 12:33:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_PEP_3131=3A_sup?= =?utf8?q?port_non-ASCII_characters_in_auto-completion_of_identifiers=2E?= Message-ID: http://hg.python.org/cpython/rev/21a475aee5e0 changeset: 77326:21a475aee5e0 branch: 3.2 user: Martin v. L?wis date: Sun Jun 03 12:32:42 2012 +0200 summary: PEP 3131: support non-ASCII characters in auto-completion of identifiers. files: Lib/idlelib/AutoComplete.py | 2 +- Lib/idlelib/NEWS.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -143,7 +143,7 @@ elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES): self._remove_autocomplete_window() mode = COMPLETE_ATTRIBUTES - while i and curline[i-1] in ID_CHARS: + while i and curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127: i -= 1 comp_start = curline[i:j] if i and curline[i-1] == '.': diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -2,7 +2,7 @@ ========================= - Issue #14937: Perform auto-completion of filenames in strings even for - non-ASCII filenames. + non-ASCII filenames. Likewise for identifiers. What's New in IDLE 3.2.3? ========================= -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 12:33:40 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 03 Jun 2012 12:33:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/0c68d3412ee5 changeset: 77327:0c68d3412ee5 parent: 77324:8c07769d717e parent: 77326:21a475aee5e0 user: Martin v. L?wis date: Sun Jun 03 12:33:23 2012 +0200 summary: merge 3.2 files: Lib/idlelib/AutoComplete.py | 2 +- Lib/idlelib/AutoCompleteWindow.py | 2 +- Lib/idlelib/NEWS.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -140,7 +140,7 @@ elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES): self._remove_autocomplete_window() mode = COMPLETE_ATTRIBUTES - while i and curline[i-1] in ID_CHARS: + while i and curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127: i -= 1 comp_start = curline[i:j] if i and curline[i-1] == '.': diff --git a/Lib/idlelib/AutoCompleteWindow.py b/Lib/idlelib/AutoCompleteWindow.py --- a/Lib/idlelib/AutoCompleteWindow.py +++ b/Lib/idlelib/AutoCompleteWindow.py @@ -354,7 +354,7 @@ # A modifier key, so ignore return - elif event.char: + elif event.char and event.char >= ' ': # Regular character with a non-length-1 keycode self._change_start(self.start + event.char) self.lasttypedstart = self.start diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -2,7 +2,7 @@ ========================= - Issue #14937: Perform auto-completion of filenames in strings even for - non-ASCII filenames. + non-ASCII filenames. Likewise for identifiers. - Issue #8515: Set __file__ when run file in IDLE. Initial patch by Bruce Frederiksen. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 18:27:20 2012 From: python-checkins at python.org (r.david.murray) Date: Sun, 03 Jun 2012 18:27:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=232658=3A_Add_test_for_iss?= =?utf8?q?ue_fixed_by_fix_for_=231079=2E?= Message-ID: http://hg.python.org/cpython/rev/0808cb8c60fd changeset: 77328:0808cb8c60fd user: R David Murray date: Sun Jun 03 12:27:07 2012 -0400 summary: #2658: Add test for issue fixed by fix for #1079. files: Lib/test/test_email/test_email.py | 10 ++++++++++ 1 files changed, 10 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -2104,6 +2104,16 @@ self.assertEqual(make_header(decode_header(s)).encode(), s.lower()) self.assertEqual(str(make_header(decode_header(s))), '(a b)') + def test_multiline_header(self): + s = '=?windows-1252?q?=22M=FCller_T=22?=\r\n ' + self.assertEqual(decode_header(s), + [(b'"M\xfcller T"', 'windows-1252'), + (b'', None)]) + self.assertEqual(make_header(decode_header(s)).encode(), + ''.join(s.splitlines())) + self.assertEqual(str(make_header(decode_header(s))), + '"M?ller T" ') + # Test the MIMEMessage class class TestMIMEMessage(TestEmailBase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 23:36:52 2012 From: python-checkins at python.org (gregory.p.smith) Date: Sun, 03 Jun 2012 23:36:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fixes_Issue_=23?= =?utf8?q?14992=3A_os=2Emakedirs=28path=2C_exist=5Fok=3DTrue=29_would_rais?= =?utf8?q?e_an_OSError?= Message-ID: http://hg.python.org/cpython/rev/fef529f3de5b changeset: 77329:fef529f3de5b branch: 3.2 parent: 77326:21a475aee5e0 user: Gregory P. Smith date: Sun Jun 03 14:30:44 2012 -0700 summary: Fixes Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError when the path existed and had the S_ISGID mode bit set when it was not explicitly asked for. This is no longer an exception as mkdir cannot control if the OS sets that bit for it or not. files: Lib/os.py | 16 +++++++++++- Lib/test/test_os.py | 40 ++++++++++++++++++++++++++++---- Misc/NEWS | 5 ++++ 3 files changed, 53 insertions(+), 8 deletions(-) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -152,8 +152,20 @@ mkdir(name, mode) except OSError as e: import stat as st - if not (e.errno == errno.EEXIST and exist_ok and path.isdir(name) and - st.S_IMODE(lstat(name).st_mode) == _get_masked_mode(mode)): + dir_exists = path.isdir(name) + expected_mode = _get_masked_mode(mode) + if dir_exists: + # S_ISGID is automatically copied by the OS from parent to child + # directories on mkdir. Don't consider it being set to be a mode + # mismatch as mkdir does not unset it when not specified in mode. + actual_mode = st.S_IMODE(lstat(name).st_mode) & ~st.S_ISGID + else: + actual_mode = -1 + if not (e.errno == errno.EEXIST and exist_ok and dir_exists and + actual_mode == expected_mode): + if dir_exists and actual_mode != expected_mode: + e.strerror += ' (mode %o != expected mode %o)' % ( + actual_mode, expected_mode) raise def removedirs(name): diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -15,6 +15,7 @@ import contextlib import mmap import uuid +import stat from test.script_helper import assert_python_ok # Detect whether we're on a Linux system that uses the (now outdated @@ -574,12 +575,39 @@ path = os.path.join(support.TESTFN, 'dir1') mode = 0o777 old_mask = os.umask(0o022) - os.makedirs(path, mode) - self.assertRaises(OSError, os.makedirs, path, mode) - self.assertRaises(OSError, os.makedirs, path, mode, exist_ok=False) - self.assertRaises(OSError, os.makedirs, path, 0o776, exist_ok=True) - os.makedirs(path, mode=mode, exist_ok=True) - os.umask(old_mask) + try: + os.makedirs(path, mode) + self.assertRaises(OSError, os.makedirs, path, mode) + self.assertRaises(OSError, os.makedirs, path, mode, exist_ok=False) + self.assertRaises(OSError, os.makedirs, path, 0o776, exist_ok=True) + os.makedirs(path, mode=mode, exist_ok=True) + finally: + os.umask(old_mask) + + def test_exist_ok_s_isgid_directory(self): + path = os.path.join(support.TESTFN, 'dir1') + S_ISGID = stat.S_ISGID + mode = 0o777 + old_mask = os.umask(0o022) + try: + existing_testfn_mode = stat.S_IMODE( + os.lstat(support.TESTFN).st_mode) + os.chmod(support.TESTFN, existing_testfn_mode | S_ISGID) + if (os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID): + raise unittest.SkipTest('No support for S_ISGID dir mode.') + # The os should apply S_ISGID from the parent dir for us, but + # this test need not depend on that behavior. Be explicit. + os.makedirs(path, mode | S_ISGID) + # http://bugs.python.org/issue14992 + # Should not fail when the bit is already set. + os.makedirs(path, mode, exist_ok=True) + # remove the bit. + os.chmod(path, stat.S_IMODE(os.lstat(path).st_mode) & ~S_ISGID) + with self.assertRaises(OSError): + # Should fail when the bit is not already set when demanded. + os.makedirs(path, mode | S_ISGID, exist_ok=True) + finally: + os.umask(old_mask) def test_exist_ok_existing_regular_file(self): base = support.TESTFN diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,11 @@ Core and Builtins ----------------- +- Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError + when the path existed and had the S_ISGID mode bit set when it was + not explicitly asked for. This is no longer an exception as mkdir + cannot control if the OS sets that bit for it or not. + - Issue #14775: Fix a potential quadratic dict build-up due to the garbage collector repeatedly trying to untrack dicts. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 23:36:53 2012 From: python-checkins at python.org (gregory.p.smith) Date: Sun, 03 Jun 2012 23:36:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Fixes_Issue_=2314992=3A_os=2Emakedirs=28path=2C_exist=5Fok?= =?utf8?q?=3DTrue=29_would_raise_an_OSError?= Message-ID: http://hg.python.org/cpython/rev/eed26e508b7e changeset: 77330:eed26e508b7e parent: 77328:0808cb8c60fd parent: 77329:fef529f3de5b user: Gregory P. Smith date: Sun Jun 03 14:35:09 2012 -0700 summary: Fixes Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError when the path existed and had the S_ISGID mode bit set when it was not explicitly asked for. This is no longer an exception as mkdir cannot control if the OS sets that bit for it or not. files: Lib/os.py | 16 ++++++++++++++-- Lib/test/test_os.py | 25 +++++++++++++++++++++++++ Misc/NEWS | 5 +++++ 3 files changed, 44 insertions(+), 2 deletions(-) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -160,8 +160,20 @@ try: mkdir(name, mode) except OSError as e: - if not (e.errno == errno.EEXIST and exist_ok and path.isdir(name) and - st.S_IMODE(lstat(name).st_mode) == _get_masked_mode(mode)): + dir_exists = path.isdir(name) + expected_mode = _get_masked_mode(mode) + if dir_exists: + # S_ISGID is automatically copied by the OS from parent to child + # directories on mkdir. Don't consider it being set to be a mode + # mismatch as mkdir does not unset it when not specified in mode. + actual_mode = st.S_IMODE(lstat(name).st_mode) & ~st.S_ISGID + else: + actual_mode = -1 + if not (e.errno == errno.EEXIST and exist_ok and dir_exists and + actual_mode == expected_mode): + if dir_exists and actual_mode != expected_mode: + e.strerror += ' (mode %o != expected mode %o)' % ( + actual_mode, expected_mode) raise def removedirs(name): diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -838,6 +838,31 @@ os.makedirs(path, mode=mode, exist_ok=True) os.umask(old_mask) + def test_exist_ok_s_isgid_directory(self): + path = os.path.join(support.TESTFN, 'dir1') + S_ISGID = stat.S_ISGID + mode = 0o777 + old_mask = os.umask(0o022) + try: + existing_testfn_mode = stat.S_IMODE( + os.lstat(support.TESTFN).st_mode) + os.chmod(support.TESTFN, existing_testfn_mode | S_ISGID) + if (os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID): + raise unittest.SkipTest('No support for S_ISGID dir mode.') + # The os should apply S_ISGID from the parent dir for us, but + # this test need not depend on that behavior. Be explicit. + os.makedirs(path, mode | S_ISGID) + # http://bugs.python.org/issue14992 + # Should not fail when the bit is already set. + os.makedirs(path, mode, exist_ok=True) + # remove the bit. + os.chmod(path, stat.S_IMODE(os.lstat(path).st_mode) & ~S_ISGID) + with self.assertRaises(OSError): + # Should fail when the bit is not already set when demanded. + os.makedirs(path, mode | S_ISGID, exist_ok=True) + finally: + os.umask(old_mask) + def test_exist_ok_existing_regular_file(self): base = support.TESTFN path = os.path.join(support.TESTFN, 'dir1') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,11 @@ Library ------- +- Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError + when the path existed and had the S_ISGID mode bit set when it was + not explicitly asked for. This is no longer an exception as mkdir + cannot control if the OS sets that bit for it or not. + - Issue #14989: Make the CGI enable option to http.server available via command line. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 23:36:54 2012 From: python-checkins at python.org (gregory.p.smith) Date: Sun, 03 Jun 2012 23:36:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Move_the_14992_?= =?utf8?q?note_to_the_correct_section=2E?= Message-ID: http://hg.python.org/cpython/rev/4c07e4806e69 changeset: 77331:4c07e4806e69 branch: 3.2 parent: 77329:fef529f3de5b user: Gregory P. Smith date: Sun Jun 03 14:36:01 2012 -0700 summary: Move the 14992 note to the correct section. files: Misc/NEWS | 119 +++++++++++++++++++++-------------------- 1 files changed, 60 insertions(+), 59 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,71 +10,71 @@ Core and Builtins ----------------- +- Issue #14775: Fix a potential quadratic dict build-up due to the garbage + collector repeatedly trying to untrack dicts. + +- Issue #14494: Fix __future__.py and its documentation to note that + absolute imports are the default behavior in 3.0 instead of 2.7. + Patch by Sven Marnach. + +- Issue #14761: Fix potential leak on an error case in the import machinery. + +- Issue #14699: Fix calling the classmethod descriptor directly. + +- Issue #14433: Prevent msvcrt crash in interactive prompt when stdin + is closed. + +- Issue #11603 (again): Setting __repr__ to __str__ now raises a RuntimeError + when repr() or str() is called on such an object. + +- Issue #14658: Fix binding a special method to a builtin implementation of a + special method with a different name. + +- Issue #14630: Fix a memory access bug for instances of a subclass of int + with value 0. + +- Issue #14612: Fix jumping around with blocks by setting f_lineno. + +- Issue #14607: Fix keyword-only arguments which started with ``__``. + +- Issue #13889: Check and (if necessary) set FPU control word before calling + any of the dtoa.c string <-> float conversion functions, on MSVC builds of + Python. This fixes issues when embedding Python in a Delphi app. + +- Issue #14474: Save and restore exception state in thread.start_new_thread() + while writing error message if the thread leaves a unhandled exception. + +- Issue #13019: Fix potential reference leaks in bytearray.extend(). Patch + by Suman Saha. + +- Issue #14378: Fix compiling ast.ImportFrom nodes with a "__future__" string as + the module name that was not interned. + +- Issue #14331: Use significantly less stack space when importing modules by + allocating path buffers on the heap instead of the stack. + +- Issue #14334: Prevent in a segfault in type.__getattribute__ when it was not + passed strings. + +- Issue #1469629: Allow cycles through an object's __dict__ slot to be + collected. (For example if ``x.__dict__ is x``). + +- Issue #14172: Fix reference leak when marshalling a buffer-like object + (other than a bytes object). + +- Issue #13521: dict.setdefault() now does only one lookup for the given key, + making it "atomic" for many purposes. Patch by Filip Gruszczy?ski. + +- Issue #14471: Fix a possible buffer overrun in the winreg module. + +Library +------- + - Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError when the path existed and had the S_ISGID mode bit set when it was not explicitly asked for. This is no longer an exception as mkdir cannot control if the OS sets that bit for it or not. -- Issue #14775: Fix a potential quadratic dict build-up due to the garbage - collector repeatedly trying to untrack dicts. - -- Issue #14494: Fix __future__.py and its documentation to note that - absolute imports are the default behavior in 3.0 instead of 2.7. - Patch by Sven Marnach. - -- Issue #14761: Fix potential leak on an error case in the import machinery. - -- Issue #14699: Fix calling the classmethod descriptor directly. - -- Issue #14433: Prevent msvcrt crash in interactive prompt when stdin - is closed. - -- Issue #11603 (again): Setting __repr__ to __str__ now raises a RuntimeError - when repr() or str() is called on such an object. - -- Issue #14658: Fix binding a special method to a builtin implementation of a - special method with a different name. - -- Issue #14630: Fix a memory access bug for instances of a subclass of int - with value 0. - -- Issue #14612: Fix jumping around with blocks by setting f_lineno. - -- Issue #14607: Fix keyword-only arguments which started with ``__``. - -- Issue #13889: Check and (if necessary) set FPU control word before calling - any of the dtoa.c string <-> float conversion functions, on MSVC builds of - Python. This fixes issues when embedding Python in a Delphi app. - -- Issue #14474: Save and restore exception state in thread.start_new_thread() - while writing error message if the thread leaves a unhandled exception. - -- Issue #13019: Fix potential reference leaks in bytearray.extend(). Patch - by Suman Saha. - -- Issue #14378: Fix compiling ast.ImportFrom nodes with a "__future__" string as - the module name that was not interned. - -- Issue #14331: Use significantly less stack space when importing modules by - allocating path buffers on the heap instead of the stack. - -- Issue #14334: Prevent in a segfault in type.__getattribute__ when it was not - passed strings. - -- Issue #1469629: Allow cycles through an object's __dict__ slot to be - collected. (For example if ``x.__dict__ is x``). - -- Issue #14172: Fix reference leak when marshalling a buffer-like object - (other than a bytes object). - -- Issue #13521: dict.setdefault() now does only one lookup for the given key, - making it "atomic" for many purposes. Patch by Filip Gruszczy?ski. - -- Issue #14471: Fix a possible buffer overrun in the winreg module. - -Library -------- - - Issue #14962: Update text coloring in IDLE shell window after changing options. Patch by Roger Serwy. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 23:36:55 2012 From: python-checkins at python.org (gregory.p.smith) Date: Sun, 03 Jun 2012 23:36:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_null_merge_from_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/55cb07c01717 changeset: 77332:55cb07c01717 parent: 77330:eed26e508b7e parent: 77331:4c07e4806e69 user: Gregory P. Smith date: Sun Jun 03 14:36:38 2012 -0700 summary: null merge from 3.2 files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 23:40:41 2012 From: python-checkins at python.org (gregory.p.smith) Date: Sun, 03 Jun 2012 23:40:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Revert_the_modi?= =?utf8?q?fication_of_e=2Estrerror_in_3=2E2_as_that_kind_of_change_could?= Message-ID: http://hg.python.org/cpython/rev/f3ce3e874a58 changeset: 77333:f3ce3e874a58 branch: 3.2 parent: 77331:4c07e4806e69 user: Gregory P. Smith date: Sun Jun 03 14:39:26 2012 -0700 summary: Revert the modification of e.strerror in 3.2 as that kind of change could break someone's over specified test that depends on the exact error message. files: Lib/os.py | 3 --- 1 files changed, 0 insertions(+), 3 deletions(-) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -163,9 +163,6 @@ actual_mode = -1 if not (e.errno == errno.EEXIST and exist_ok and dir_exists and actual_mode == expected_mode): - if dir_exists and actual_mode != expected_mode: - e.strerror += ' (mode %o != expected mode %o)' % ( - actual_mode, expected_mode) raise def removedirs(name): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 3 23:40:42 2012 From: python-checkins at python.org (gregory.p.smith) Date: Sun, 03 Jun 2012 23:40:42 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_null_merge_=28not_removing_the_new_feature_in_3=2E3=29?= Message-ID: http://hg.python.org/cpython/rev/5c0ee973a39a changeset: 77334:5c0ee973a39a parent: 77332:55cb07c01717 parent: 77333:f3ce3e874a58 user: Gregory P. Smith date: Sun Jun 03 14:40:32 2012 -0700 summary: null merge (not removing the new feature in 3.3) files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 03:18:26 2012 From: python-checkins at python.org (benjamin.peterson) Date: Mon, 04 Jun 2012 03:18:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=5F=5FGNUC=5F=5F_does_not_i?= =?utf8?q?mply_gcc_version_is_present=2C_so_just_check_for_version?= Message-ID: http://hg.python.org/cpython/rev/696d3631a4a1 changeset: 77335:696d3631a4a1 user: Benjamin Peterson date: Sun Jun 03 18:15:15 2012 -0700 summary: __GNUC__ does not imply gcc version is present, so just check for version (closes #14994) files: Include/pyerrors.h | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Include/pyerrors.h b/Include/pyerrors.h --- a/Include/pyerrors.h +++ b/Include/pyerrors.h @@ -87,7 +87,7 @@ PyAPI_FUNC(void) PyErr_SetExcInfo(PyObject *, PyObject *, PyObject *); #if defined(__clang__) || \ - (defined(__GNUC__) && \ + (defined(__GNUC_MAJOR__) && \ ((__GNUC_MAJOR__ >= 3) || \ (__GNUC_MAJOR__ == 2) && (__GNUC_MINOR__ >= 5))) #define _Py_NO_RETURN __attribute__((__noreturn__)) -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Mon Jun 4 05:52:57 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 04 Jun 2012 05:52:57 +0200 Subject: [Python-checkins] Daily reference leaks (696d3631a4a1): sum=464 Message-ID: results for 696d3631a4a1 on branch "default" -------------------------------------------- test_dbm leaked [0, 2, 0] references, sum=2 test_smtplib leaked [154, 154, 154] references, sum=462 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogESyyWk', '-x'] From python-checkins at python.org Mon Jun 4 09:21:39 2012 From: python-checkins at python.org (raymond.hettinger) Date: Mon, 04 Jun 2012 09:21:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Separate_key_creation_logic?= =?utf8?q?_from_the_sequence_class_that_memoizes_its_hash?= Message-ID: http://hg.python.org/cpython/rev/2d5ca0ea2aab changeset: 77336:2d5ca0ea2aab user: Raymond Hettinger date: Mon Jun 04 00:21:14 2012 -0700 summary: Separate key creation logic from the sequence class that memoizes its hash value. files: Lib/functools.py | 43 ++++++++++++++++++++--------------- 1 files changed, 24 insertions(+), 19 deletions(-) diff --git a/Lib/functools.py b/Lib/functools.py --- a/Lib/functools.py +++ b/Lib/functools.py @@ -142,30 +142,35 @@ _CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"]) -class _CacheKey(list): - 'Make a cache key from optionally typed positional and keyword arguments' - +class _HashedSeq(list): __slots__ = 'hashvalue' - def __init__(self, args, kwds, typed, - kwd_mark = (object(),), - sorted=sorted, tuple=tuple, type=type, hash=hash): - key = args - if kwds: - sorted_items = sorted(kwds.items()) - key += kwd_mark - for item in sorted_items: - key += item - if typed: - key += tuple(type(v) for v in args) - if kwds: - key += tuple(type(v) for k, v in sorted_items) - self[:] = key - self.hashvalue = hash(key) # so we only have to hash just once + def __init__(self, tup, hash=hash): + self[:] = tup + self.hashvalue = hash(tup) def __hash__(self): return self.hashvalue +def _make_key(args, kwds, typed, + kwd_mark = (object(),), + fasttypes = {int, str, frozenset, type(None)}, + sorted=sorted, tuple=tuple, type=type, len=len): + 'Make a cache key from optionally typed positional and keyword arguments' + key = args + if kwds: + sorted_items = sorted(kwds.items()) + key += kwd_mark + for item in sorted_items: + key += item + if typed: + key += tuple(type(v) for v in args) + if kwds: + key += tuple(type(v) for k, v in sorted_items) + elif len(key) == 1 and type(key[0]) in fasttypes: + return key[0] + return _HashedSeq(key) + def lru_cache(maxsize=128, typed=False): """Least-recently-used cache decorator. @@ -193,7 +198,7 @@ # Constants shared by all lru cache instances: sentinel = object() # unique object used to signal cache misses - make_key = _CacheKey # build a key from the function arguments + make_key = _make_key # build a key from the function arguments PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields def decorating_function(user_function): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 09:32:35 2012 From: python-checkins at python.org (raymond.hettinger) Date: Mon, 04 Jun 2012 09:32:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_usage_note=2E?= Message-ID: http://hg.python.org/cpython/rev/44a382d49e86 changeset: 77337:44a382d49e86 user: Raymond Hettinger date: Mon Jun 04 00:32:15 2012 -0700 summary: Add usage note. files: Doc/library/functools.rst | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -49,8 +49,9 @@ Since a dictionary is used to cache results, the positional and keyword arguments to the function must be hashable. - If *maxsize* is set to None, the LRU feature is disabled and the cache - can grow without bound. + If *maxsize* is set to None, the LRU feature is disabled and the cache can + grow without bound. The LRU feature performs best when *maxsize* is a + power-of-two. If *typed* is set to True, function arguments of different types will be cached separately. For example, ``f(3)`` and ``f(3.0)`` will be treated -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 14:20:17 2012 From: python-checkins at python.org (hynek.schlawack) Date: Mon, 04 Jun 2012 14:20:17 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2314814=3A_Use_correct_com?= =?utf8?q?parison_for_IP_addresses?= Message-ID: http://hg.python.org/cpython/rev/6808a72fc9ec changeset: 77338:6808a72fc9ec user: Hynek Schlawack date: Mon Jun 04 14:19:39 2012 +0200 summary: #14814: Use correct comparison for IP addresses ipaddress._BaseV4.is_unspecified() compared IP addresses using "in" which fails. files: Lib/ipaddress.py | 2 +- Lib/test/test_ipaddress.py | 1 + 2 files changed, 2 insertions(+), 1 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1130,7 +1130,7 @@ """ unspecified_address = IPv4Address('0.0.0.0') if isinstance(self, _BaseAddress): - return self in unspecified_address + return self == unspecified_address return (self.network_address == self.broadcast_address == unspecified_address) diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -837,6 +837,7 @@ self.assertEqual(False, ipaddress.ip_network('128.0.0.0').is_loopback) # test addresses + self.assertEqual(True, ipaddress.ip_address('0.0.0.0').is_unspecified) self.assertEqual(True, ipaddress.ip_address('224.1.1.1').is_multicast) self.assertEqual(False, ipaddress.ip_address('240.0.0.0').is_multicast) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 15:52:00 2012 From: python-checkins at python.org (barry.warsaw) Date: Mon, 04 Jun 2012 15:52:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Eric_Snow=27s_implementatio?= =?utf8?q?n_of_PEP_421=2E?= Message-ID: http://hg.python.org/cpython/rev/9c445f4695c1 changeset: 77339:9c445f4695c1 parent: 77328:0808cb8c60fd user: Barry Warsaw date: Sun Jun 03 16:18:47 2012 -0400 summary: Eric Snow's implementation of PEP 421. Issue 14673: Add sys.implementation files: Doc/library/sys.rst | 38 ++++ Doc/library/types.rst | 24 ++ Include/Python.h | 1 + Include/namespaceobject.h | 17 + Lib/test/test_sys.py | 18 ++ Lib/test/test_types.py | 143 ++++++++++++++++- Lib/types.py | 1 + Makefile.pre.in | 2 + Objects/namespaceobject.c | 225 ++++++++++++++++++++++++++ Objects/object.c | 3 + Python/sysmodule.c | 72 ++++++++- 11 files changed, 541 insertions(+), 3 deletions(-) diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -616,6 +616,44 @@ Thus ``2.1.0a3`` is hexversion ``0x020100a3``. + +.. data:: implementation + + An object containing the information about the implementation of the + currently running Python interpreter. Its attributes are the those + that all Python implementations must implement. They are described + below. + + *name* is the implementation's identifier, like ``'cpython'``. + + *version* is a named tuple, in the same format as + :data:`sys.version_info`. It represents the version of the Python + *implementation*. This has a distinct meaning from the specific + version of the Python *language* to which the currently running + interpreter conforms, which ``sys.version_info`` represents. For + example, for PyPy 1.8 ``sys.implementation.version`` might be + ``sys.version_info(1, 8, 0, 'final', 0)``, whereas ``sys.version_info`` + would be ``sys.version_info(1, 8, 0, 'final', 0)``. For CPython they + are the same value, since it is the reference implementation. + + *hexversion* is the implementation version in hexadecimal format, like + :data:`sys.hexversion`. + + *cache_tag* is the tag used by the import machinery in the filenames of + cached modules. By convention, it would be a composite of the + implementation's name and version, like ``'cpython-33'``. However, a + Python implementation may use some other value if appropriate. If + ``cache_tag`` is set to ``None``, it indicates that module caching should + be disabled. + + Regardless of its contents, :data:`sys.implementation` will not + change during a run of the interpreter, nor between implementation + versions. (It may change between Python language versions, + however.) See `PEP 421` for more information. + + .. versionadded:: 3.3 + + .. data:: int_info A :term:`struct sequence` that holds information about Python's internal diff --git a/Doc/library/types.rst b/Doc/library/types.rst --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -194,3 +194,27 @@ Return a new view of the underlying mapping's values. +.. class:: SimpleNamespace + + A simple :class:`object` subclass that provides attribute access to its + namespace, as well as a meaningful repr. + + Unlike :class:`object`, with ``SimpleNamespace`` you can add and remove + attributes. If a ``SimpleNamespace`` object is initialized with keyword + arguments, those are directly added to the underlying namespace. + + The type is roughly equivalent to the following code:: + + class SimpleNamespace: + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + def __repr__(self): + keys = sorted(self.__dict__) + items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys) + return "{}({})".format(type(self).__name__, ", ".join(items)) + + ``SimpleNamespace`` may be useful as a replacement for ``class NS: pass``. + However, for a structured record type use :func:`~collections.namedtuple` + instead. + + .. versionadded:: 3.3 diff --git a/Include/Python.h b/Include/Python.h --- a/Include/Python.h +++ b/Include/Python.h @@ -101,6 +101,7 @@ #include "warnings.h" #include "weakrefobject.h" #include "structseq.h" +#include "namespaceobject.h" #include "codecs.h" #include "pyerrors.h" diff --git a/Include/namespaceobject.h b/Include/namespaceobject.h new file mode 100644 --- /dev/null +++ b/Include/namespaceobject.h @@ -0,0 +1,17 @@ + +/* simple namespace object interface */ + +#ifndef NAMESPACEOBJECT_H +#define NAMESPACEOBJECT_H +#ifdef __cplusplus +extern "C" { +#endif + +PyAPI_DATA(PyTypeObject) _PyNamespace_Type; + +PyAPI_FUNC(PyObject *) _PyNamespace_New(PyObject *kwds); + +#ifdef __cplusplus +} +#endif +#endif /* !NAMESPACEOBJECT_H */ diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -581,6 +581,24 @@ expected = None self.check_fsencoding(fs_encoding, expected) + def test_implementation(self): + # This test applies to all implementations equally. + + levels = {'alpha': 0xA, 'beta': 0xB, 'candidate': 0xC, 'release': 0xF} + + self.assertTrue(hasattr(sys.implementation, 'name')) + self.assertTrue(hasattr(sys.implementation, 'version')) + self.assertTrue(hasattr(sys.implementation, 'hexversion')) + self.assertTrue(hasattr(sys.implementation, 'cache_tag')) + + version = sys.implementation.version + self.assertEqual(version[:2], (version.major, version.minor)) + + hexversion = (version.major << 24 | version.minor << 16 | + version.micro << 8 | levels[version.releaselevel] << 4 | + version.serial << 0) + self.assertEqual(sys.implementation.hexversion, hexversion) + class SizeofTest(unittest.TestCase): diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -996,8 +996,149 @@ X = types.new_class("X", (int(), C)) +class SimpleNamespaceTests(unittest.TestCase): + + def test_constructor(self): + ns1 = types.SimpleNamespace() + ns2 = types.SimpleNamespace(x=1, y=2) + ns3 = types.SimpleNamespace(**dict(x=1, y=2)) + + with self.assertRaises(TypeError): + types.SimpleNamespace(1, 2, 3) + + self.assertEqual(len(ns1.__dict__), 0) + self.assertEqual(vars(ns1), {}) + self.assertEqual(len(ns2.__dict__), 2) + self.assertEqual(vars(ns2), {'y': 2, 'x': 1}) + self.assertEqual(len(ns3.__dict__), 2) + self.assertEqual(vars(ns3), {'y': 2, 'x': 1}) + + def test_unbound(self): + ns1 = vars(types.SimpleNamespace()) + ns2 = vars(types.SimpleNamespace(x=1, y=2)) + + self.assertEqual(ns1, {}) + self.assertEqual(ns2, {'y': 2, 'x': 1}) + + def test_underlying_dict(self): + ns1 = types.SimpleNamespace() + ns2 = types.SimpleNamespace(x=1, y=2) + ns3 = types.SimpleNamespace(a=True, b=False) + mapping = ns3.__dict__ + del ns3 + + self.assertEqual(ns1.__dict__, {}) + self.assertEqual(ns2.__dict__, {'y': 2, 'x': 1}) + self.assertEqual(mapping, dict(a=True, b=False)) + + def test_attrget(self): + ns = types.SimpleNamespace(x=1, y=2, w=3) + + self.assertEqual(ns.x, 1) + self.assertEqual(ns.y, 2) + self.assertEqual(ns.w, 3) + with self.assertRaises(AttributeError): + ns.z + + def test_attrset(self): + ns1 = types.SimpleNamespace() + ns2 = types.SimpleNamespace(x=1, y=2, w=3) + ns1.a = 'spam' + ns1.b = 'ham' + ns2.z = 4 + ns2.theta = None + + self.assertEqual(ns1.__dict__, dict(a='spam', b='ham')) + self.assertEqual(ns2.__dict__, dict(x=1, y=2, w=3, z=4, theta=None)) + + def test_attrdel(self): + ns1 = types.SimpleNamespace() + ns2 = types.SimpleNamespace(x=1, y=2, w=3) + + with self.assertRaises(AttributeError): + del ns1.spam + with self.assertRaises(AttributeError): + del ns2.spam + + del ns2.y + self.assertEqual(vars(ns2), dict(w=3, x=1)) + ns2.y = 'spam' + self.assertEqual(vars(ns2), dict(w=3, x=1, y='spam')) + del ns2.y + self.assertEqual(vars(ns2), dict(w=3, x=1)) + + ns1.spam = 5 + self.assertEqual(vars(ns1), dict(spam=5)) + del ns1.spam + self.assertEqual(vars(ns1), {}) + + def test_repr(self): + ns1 = types.SimpleNamespace(x=1, y=2, w=3) + ns2 = types.SimpleNamespace() + ns2.x = "spam" + ns2._y = 5 + + self.assertEqual(repr(ns1), "namespace(w=3, x=1, y=2)") + self.assertEqual(repr(ns2), "namespace(_y=5, x='spam')") + + def test_nested(self): + ns1 = types.SimpleNamespace(a=1, b=2) + ns2 = types.SimpleNamespace() + ns3 = types.SimpleNamespace(x=ns1) + ns2.spam = ns1 + ns2.ham = '?' + ns2.spam = ns3 + + self.assertEqual(vars(ns1), dict(a=1, b=2)) + self.assertEqual(vars(ns2), dict(spam=ns3, ham='?')) + self.assertEqual(ns2.spam, ns3) + self.assertEqual(vars(ns3), dict(x=ns1)) + self.assertEqual(ns3.x.a, 1) + + def test_recursive(self): + ns1 = types.SimpleNamespace(c='cookie') + ns2 = types.SimpleNamespace() + ns3 = types.SimpleNamespace(x=1) + ns1.spam = ns1 + ns2.spam = ns3 + ns3.spam = ns2 + + self.assertEqual(ns1.spam, ns1) + self.assertEqual(ns1.spam.spam, ns1) + self.assertEqual(ns1.spam.spam, ns1.spam) + self.assertEqual(ns2.spam, ns3) + self.assertEqual(ns3.spam, ns2) + self.assertEqual(ns2.spam.spam, ns2) + + def test_recursive_repr(self): + ns1 = types.SimpleNamespace(c='cookie') + ns2 = types.SimpleNamespace() + ns3 = types.SimpleNamespace(x=1) + ns1.spam = ns1 + ns2.spam = ns3 + ns3.spam = ns2 + + self.assertEqual(repr(ns1), + "namespace(c='cookie', spam=namespace(...))") + self.assertEqual(repr(ns2), + "namespace(spam=namespace(spam=namespace(...), x=1))") + + def test_as_dict(self): + ns = types.SimpleNamespace(spam='spamspamspam') + + with self.assertRaises(TypeError): + len(ns) + with self.assertRaises(TypeError): + iter(ns) + with self.assertRaises(TypeError): + 'spam' in ns + with self.assertRaises(TypeError): + ns['spam'] + + def test_main(): - run_unittest(TypesTests, MappingProxyTests, ClassCreationTests) + run_unittest(TypesTests, MappingProxyTests, ClassCreationTests, + SimpleNamespaceTests) if __name__ == '__main__': test_main() diff --git a/Lib/types.py b/Lib/types.py --- a/Lib/types.py +++ b/Lib/types.py @@ -13,6 +13,7 @@ LambdaType = type(lambda: None) # Same as FunctionType CodeType = type(_f.__code__) MappingProxyType = type(type.__dict__) +SimpleNamespace = type(sys.implementation) def _g(): yield 1 diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -392,6 +392,7 @@ Objects/memoryobject.o \ Objects/methodobject.o \ Objects/moduleobject.o \ + Objects/namespaceobject.o \ Objects/object.o \ Objects/obmalloc.o \ Objects/capsule.o \ @@ -766,6 +767,7 @@ $(srcdir)/Include/methodobject.h \ $(srcdir)/Include/modsupport.h \ $(srcdir)/Include/moduleobject.h \ + $(srcdir)/Include/namespaceobject.h \ $(srcdir)/Include/node.h \ $(srcdir)/Include/object.h \ $(srcdir)/Include/objimpl.h \ diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c new file mode 100644 --- /dev/null +++ b/Objects/namespaceobject.c @@ -0,0 +1,225 @@ +/* namespace object implementation */ + +#include "Python.h" +#include "structmember.h" + + +typedef struct { + PyObject_HEAD + PyObject *ns_dict; +} _PyNamespaceObject; + + +static PyMemberDef namespace_members[] = { + {"__dict__", T_OBJECT, offsetof(_PyNamespaceObject, ns_dict), READONLY}, + {NULL} +}; + + +/* Methods */ + +static PyObject * +namespace_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +{ + _PyNamespaceObject *ns; + ns = PyObject_GC_New(_PyNamespaceObject, &_PyNamespace_Type); + if (ns == NULL) + return NULL; + + ns->ns_dict = PyDict_New(); + if (ns->ns_dict == NULL) { + Py_DECREF(ns); + return NULL; + } + + PyObject_GC_Track(ns); + return (PyObject *)ns; +} + + +static int +namespace_init(_PyNamespaceObject *ns, PyObject *args, PyObject *kwds) +{ + /* ignore args if it's NULL or empty */ + if (args != NULL) { + Py_ssize_t argcount = PyObject_Size(args); + if (argcount < 0) + return argcount; + else if (argcount > 0) { + PyErr_Format(PyExc_TypeError, "no positional arguments expected"); + return -1; + } + } + if (kwds == NULL) + return 0; + return PyDict_Update(ns->ns_dict, kwds); +} + + +static void +namespace_dealloc(_PyNamespaceObject *ns) +{ + PyObject_GC_UnTrack(ns); + Py_CLEAR(ns->ns_dict); + Py_TYPE(ns)->tp_free((PyObject *)ns); +} + + +static PyObject * +namespace_repr(_PyNamespaceObject *ns) +{ + int i, loop_error = 0; + PyObject *pairs = NULL, *d = NULL, *keys = NULL, *keys_iter = NULL; + PyObject *key; + PyObject *separator, *pairsrepr, *repr = NULL; + + i = Py_ReprEnter((PyObject *)ns); + if (i != 0) { + return i > 0 ? PyUnicode_FromString("namespace(...)") : NULL; + } + + pairs = PyList_New(0); + if (pairs == NULL) + goto error; + + d = ((_PyNamespaceObject *)ns)->ns_dict; + assert(d != NULL); + Py_INCREF(d); + + keys = PyDict_Keys(d); + if (keys == NULL) + goto error; + if (PyList_Sort(keys) != 0) + goto error; + + keys_iter = PyObject_GetIter(keys); + if (keys_iter == NULL) + goto error; + + while ((key = PyIter_Next(keys_iter)) != NULL) { + if (PyUnicode_Check(key) && PyUnicode_GET_SIZE(key) > 0) { + PyObject *value, *item; + + value = PyDict_GetItem(d, key); + assert(value != NULL); + + item = PyUnicode_FromFormat("%S=%R", key, value); + if (item == NULL) { + loop_error = 1; + } + else { + loop_error = PyList_Append(pairs, item); + Py_DECREF(item); + } + } + + Py_DECREF(key); + if (loop_error) + goto error; + } + + separator = PyUnicode_FromString(", "); + if (separator == NULL) + goto error; + + pairsrepr = PyUnicode_Join(separator, pairs); + Py_DECREF(separator); + if (pairsrepr == NULL) + goto error; + + repr = PyUnicode_FromFormat("%s(%S)", + ((PyObject *)ns)->ob_type->tp_name, pairsrepr); + Py_DECREF(pairsrepr); + +error: + Py_XDECREF(pairs); + Py_XDECREF(d); + Py_XDECREF(keys); + Py_XDECREF(keys_iter); + Py_ReprLeave((PyObject *)ns); + + return repr; +} + + +static int +namespace_traverse(_PyNamespaceObject *ns, visitproc visit, void *arg) +{ + Py_VISIT(ns->ns_dict); + return 0; +} + + +static int +namespace_clear(_PyNamespaceObject *ns) +{ + Py_CLEAR(ns->ns_dict); + return 0; +} + + +PyDoc_STRVAR(namespace_doc, +"A simple attribute-based namespace.\n\ +\n\ +namespace(**kwargs)"); + +PyTypeObject _PyNamespace_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + "namespace", /* tp_name */ + sizeof(_PyNamespaceObject), /* tp_size */ + 0, /* tp_itemsize */ + (destructor)namespace_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + (reprfunc)namespace_repr, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + PyObject_GenericGetAttr, /* tp_getattro */ + PyObject_GenericSetAttr, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE, /* tp_flags */ + namespace_doc, /* tp_doc */ + (traverseproc)namespace_traverse, /* tp_traverse */ + (inquiry)namespace_clear, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + 0, /* tp_methods */ + namespace_members, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + offsetof(_PyNamespaceObject, ns_dict), /* tp_dictoffset */ + (initproc)namespace_init, /* tp_init */ + PyType_GenericAlloc, /* tp_alloc */ + (newfunc)namespace_new, /* tp_new */ + PyObject_GC_Del, /* tp_free */ +}; + + +PyObject * +_PyNamespace_New(PyObject *kwds) +{ + PyObject *ns = namespace_new(&_PyNamespace_Type, NULL, NULL); + if (ns == NULL) + return NULL; + + if (kwds == NULL) + return ns; + if (PyDict_Update(((_PyNamespaceObject *)ns)->ns_dict, kwds) != 0) { + Py_DECREF(ns); + return NULL; + } + + return (PyObject *)ns; +} diff --git a/Objects/object.c b/Objects/object.c --- a/Objects/object.c +++ b/Objects/object.c @@ -1707,6 +1707,9 @@ if (PyType_Ready(&PyZip_Type) < 0) Py_FatalError("Can't initialize zip type"); + + if (PyType_Ready(&_PyNamespace_Type) < 0) + Py_FatalError("Can't initialize namespace type"); } diff --git a/Python/sysmodule.c b/Python/sysmodule.c --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -1261,6 +1261,7 @@ float_info -- a struct sequence with information about the float implementation.\n\ float_repr_style -- string indicating the style of repr() output for floats\n\ hexversion -- version information encoded as a single integer\n\ +implementation -- Python implementation information.\n\ int_info -- a struct sequence with information about the int implementation.\n\ maxsize -- the largest supported length of containers.\n\ maxunicode -- the value of the largest Unicode codepoint\n\ @@ -1454,6 +1455,69 @@ return version_info; } +static PyObject * +make_impl_info(PyObject *version_info) +{ + int res; + PyObject *impl_info, *value, *ns; + + impl_info = PyDict_New(); + if (impl_info == NULL) + return NULL; + + /* populate the dict */ + +#define NAME "cpython" +#define QUOTE(arg) #arg +#define STRIFY(name) QUOTE(name) +#define MAJOR STRIFY(PY_MAJOR_VERSION) +#define MINOR STRIFY(PY_MINOR_VERSION) +#define TAG NAME "-" MAJOR MINOR + value = PyUnicode_FromString(NAME); + if (value == NULL) + goto error; + res = PyDict_SetItemString(impl_info, "name", value); + Py_DECREF(value); + if (res < 0) + goto error; + + value = PyUnicode_FromString(TAG); + if (value == NULL) + goto error; + res = PyDict_SetItemString(impl_info, "cache_tag", value); + Py_DECREF(value); + if (res < 0) + goto error; +#undef NAME +#undef QUOTE +#undef STRIFY +#undef MAJOR +#undef MINOR +#undef TAG + + res = PyDict_SetItemString(impl_info, "version", version_info); + if (res < 0) + goto error; + + value = PyLong_FromLong(PY_VERSION_HEX); + if (value == NULL) + goto error; + res = PyDict_SetItemString(impl_info, "hexversion", value); + Py_DECREF(value); + if (res < 0) + goto error; + + /* dict ready */ + + ns = _PyNamespace_New(impl_info); + Py_DECREF(impl_info); + return ns; + +error: + Py_CLEAR(impl_info); + return NULL; +} + static struct PyModuleDef sysmodule = { PyModuleDef_HEAD_INIT, "sys", @@ -1469,7 +1533,7 @@ PyObject * _PySys_Init(void) { - PyObject *m, *v, *sysdict; + PyObject *m, *v, *sysdict, *version_info; char *s; m = PyModule_Create(&sysmodule); @@ -1589,11 +1653,15 @@ /* version_info */ if (VersionInfoType.tp_name == 0) PyStructSequence_InitType(&VersionInfoType, &version_info_desc); - SET_SYS_FROM_STRING("version_info", make_version_info()); + version_info = make_version_info(); + SET_SYS_FROM_STRING("version_info", version_info); /* prevent user from creating new instances */ VersionInfoType.tp_init = NULL; VersionInfoType.tp_new = NULL; + /* implementation */ + SET_SYS_FROM_STRING("implementation", make_impl_info(version_info)); + /* flags */ if (FlagsType.tp_name == 0) PyStructSequence_InitType(&FlagsType, &flags_desc); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 15:52:01 2012 From: python-checkins at python.org (barry.warsaw) Date: Mon, 04 Jun 2012 15:52:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?b?KTogVHJ1bmsgbWVyZ2Uu?= Message-ID: http://hg.python.org/cpython/rev/4b83cb7d1e81 changeset: 77340:4b83cb7d1e81 parent: 77339:9c445f4695c1 parent: 77338:6808a72fc9ec user: Barry Warsaw date: Mon Jun 04 09:41:48 2012 -0400 summary: Trunk merge. files: Doc/library/functools.rst | 5 +- Include/pyerrors.h | 2 +- Lib/functools.py | 43 ++++++++++++++----------- Lib/ipaddress.py | 2 +- Lib/os.py | 16 ++++++++- Lib/test/test_ipaddress.py | 1 + Lib/test/test_os.py | 25 +++++++++++++++ Misc/NEWS | 5 +++ 8 files changed, 74 insertions(+), 25 deletions(-) diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -49,8 +49,9 @@ Since a dictionary is used to cache results, the positional and keyword arguments to the function must be hashable. - If *maxsize* is set to None, the LRU feature is disabled and the cache - can grow without bound. + If *maxsize* is set to None, the LRU feature is disabled and the cache can + grow without bound. The LRU feature performs best when *maxsize* is a + power-of-two. If *typed* is set to True, function arguments of different types will be cached separately. For example, ``f(3)`` and ``f(3.0)`` will be treated diff --git a/Include/pyerrors.h b/Include/pyerrors.h --- a/Include/pyerrors.h +++ b/Include/pyerrors.h @@ -87,7 +87,7 @@ PyAPI_FUNC(void) PyErr_SetExcInfo(PyObject *, PyObject *, PyObject *); #if defined(__clang__) || \ - (defined(__GNUC__) && \ + (defined(__GNUC_MAJOR__) && \ ((__GNUC_MAJOR__ >= 3) || \ (__GNUC_MAJOR__ == 2) && (__GNUC_MINOR__ >= 5))) #define _Py_NO_RETURN __attribute__((__noreturn__)) diff --git a/Lib/functools.py b/Lib/functools.py --- a/Lib/functools.py +++ b/Lib/functools.py @@ -142,30 +142,35 @@ _CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"]) -class _CacheKey(list): - 'Make a cache key from optionally typed positional and keyword arguments' - +class _HashedSeq(list): __slots__ = 'hashvalue' - def __init__(self, args, kwds, typed, - kwd_mark = (object(),), - sorted=sorted, tuple=tuple, type=type, hash=hash): - key = args - if kwds: - sorted_items = sorted(kwds.items()) - key += kwd_mark - for item in sorted_items: - key += item - if typed: - key += tuple(type(v) for v in args) - if kwds: - key += tuple(type(v) for k, v in sorted_items) - self[:] = key - self.hashvalue = hash(key) # so we only have to hash just once + def __init__(self, tup, hash=hash): + self[:] = tup + self.hashvalue = hash(tup) def __hash__(self): return self.hashvalue +def _make_key(args, kwds, typed, + kwd_mark = (object(),), + fasttypes = {int, str, frozenset, type(None)}, + sorted=sorted, tuple=tuple, type=type, len=len): + 'Make a cache key from optionally typed positional and keyword arguments' + key = args + if kwds: + sorted_items = sorted(kwds.items()) + key += kwd_mark + for item in sorted_items: + key += item + if typed: + key += tuple(type(v) for v in args) + if kwds: + key += tuple(type(v) for k, v in sorted_items) + elif len(key) == 1 and type(key[0]) in fasttypes: + return key[0] + return _HashedSeq(key) + def lru_cache(maxsize=128, typed=False): """Least-recently-used cache decorator. @@ -193,7 +198,7 @@ # Constants shared by all lru cache instances: sentinel = object() # unique object used to signal cache misses - make_key = _CacheKey # build a key from the function arguments + make_key = _make_key # build a key from the function arguments PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields def decorating_function(user_function): diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1130,7 +1130,7 @@ """ unspecified_address = IPv4Address('0.0.0.0') if isinstance(self, _BaseAddress): - return self in unspecified_address + return self == unspecified_address return (self.network_address == self.broadcast_address == unspecified_address) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -160,8 +160,20 @@ try: mkdir(name, mode) except OSError as e: - if not (e.errno == errno.EEXIST and exist_ok and path.isdir(name) and - st.S_IMODE(lstat(name).st_mode) == _get_masked_mode(mode)): + dir_exists = path.isdir(name) + expected_mode = _get_masked_mode(mode) + if dir_exists: + # S_ISGID is automatically copied by the OS from parent to child + # directories on mkdir. Don't consider it being set to be a mode + # mismatch as mkdir does not unset it when not specified in mode. + actual_mode = st.S_IMODE(lstat(name).st_mode) & ~st.S_ISGID + else: + actual_mode = -1 + if not (e.errno == errno.EEXIST and exist_ok and dir_exists and + actual_mode == expected_mode): + if dir_exists and actual_mode != expected_mode: + e.strerror += ' (mode %o != expected mode %o)' % ( + actual_mode, expected_mode) raise def removedirs(name): diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -837,6 +837,7 @@ self.assertEqual(False, ipaddress.ip_network('128.0.0.0').is_loopback) # test addresses + self.assertEqual(True, ipaddress.ip_address('0.0.0.0').is_unspecified) self.assertEqual(True, ipaddress.ip_address('224.1.1.1').is_multicast) self.assertEqual(False, ipaddress.ip_address('240.0.0.0').is_multicast) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -838,6 +838,31 @@ os.makedirs(path, mode=mode, exist_ok=True) os.umask(old_mask) + def test_exist_ok_s_isgid_directory(self): + path = os.path.join(support.TESTFN, 'dir1') + S_ISGID = stat.S_ISGID + mode = 0o777 + old_mask = os.umask(0o022) + try: + existing_testfn_mode = stat.S_IMODE( + os.lstat(support.TESTFN).st_mode) + os.chmod(support.TESTFN, existing_testfn_mode | S_ISGID) + if (os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID): + raise unittest.SkipTest('No support for S_ISGID dir mode.') + # The os should apply S_ISGID from the parent dir for us, but + # this test need not depend on that behavior. Be explicit. + os.makedirs(path, mode | S_ISGID) + # http://bugs.python.org/issue14992 + # Should not fail when the bit is already set. + os.makedirs(path, mode, exist_ok=True) + # remove the bit. + os.chmod(path, stat.S_IMODE(os.lstat(path).st_mode) & ~S_ISGID) + with self.assertRaises(OSError): + # Should fail when the bit is not already set when demanded. + os.makedirs(path, mode | S_ISGID, exist_ok=True) + finally: + os.umask(old_mask) + def test_exist_ok_existing_regular_file(self): base = support.TESTFN path = os.path.join(support.TESTFN, 'dir1') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,11 @@ Library ------- +- Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError + when the path existed and had the S_ISGID mode bit set when it was + not explicitly asked for. This is no longer an exception as mkdir + cannot control if the OS sets that bit for it or not. + - Issue #14989: Make the CGI enable option to http.server available via command line. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 15:52:02 2012 From: python-checkins at python.org (barry.warsaw) Date: Mon, 04 Jun 2012 15:52:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_NEWS_entry=2E?= Message-ID: http://hg.python.org/cpython/rev/abb5ee3159a6 changeset: 77341:abb5ee3159a6 user: Barry Warsaw date: Mon Jun 04 09:51:53 2012 -0400 summary: Add NEWS entry. files: Misc/NEWS | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -7,6 +7,11 @@ *Release date: XX-XXX-2012* +Core and Builtins +----------------- + +- Issue #14673: Add Eric Snow's sys.implementation implementation. + Library ------- -- Repository URL: http://hg.python.org/cpython From brett at python.org Mon Jun 4 16:16:24 2012 From: brett at python.org (Brett Cannon) Date: Mon, 4 Jun 2012 10:16:24 -0400 Subject: [Python-checkins] cpython: Eric Snow's implementation of PEP 421. In-Reply-To: References: Message-ID: On Mon, Jun 4, 2012 at 9:52 AM, barry.warsaw wrote: > http://hg.python.org/cpython/rev/9c445f4695c1 > changeset: 77339:9c445f4695c1 > parent: 77328:0808cb8c60fd > user: Barry Warsaw > date: Sun Jun 03 16:18:47 2012 -0400 > summary: > Eric Snow's implementation of PEP 421. > > Issue 14673: Add sys.implementation > > files: > Doc/library/sys.rst | 38 ++++ > Doc/library/types.rst | 24 ++ > Include/Python.h | 1 + > Include/namespaceobject.h | 17 + > Lib/test/test_sys.py | 18 ++ > Lib/test/test_types.py | 143 ++++++++++++++++- > Lib/types.py | 1 + > Makefile.pre.in | 2 + > Objects/namespaceobject.c | 225 ++++++++++++++++++++++++++ > Objects/object.c | 3 + > Python/sysmodule.c | 72 ++++++++- > 11 files changed, 541 insertions(+), 3 deletions(-) > > > diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst > --- a/Doc/library/sys.rst > +++ b/Doc/library/sys.rst > @@ -616,6 +616,44 @@ > > Thus ``2.1.0a3`` is hexversion ``0x020100a3``. > > + > +.. data:: implementation > + > + An object containing the information about the implementation of the > + currently running Python interpreter. Its attributes are the those > "the those" -> "those" > + that all Python implementations must implement. Should you mention that VMs are allowed to add their own attributes that are not listed? > They are described > + below. > + > + *name* is the implementation's identifier, like ``'cpython'``. > Is this guaranteed to be lowercase, or does it simply happen to be lowercase in this instance? > + > + *version* is a named tuple, in the same format as > + :data:`sys.version_info`. It represents the version of the Python > + *implementation*. This has a distinct meaning from the specific > + version of the Python *language* to which the currently running > + interpreter conforms, which ``sys.version_info`` represents. For > + example, for PyPy 1.8 ``sys.implementation.version`` might be > + ``sys.version_info(1, 8, 0, 'final', 0)``, whereas ``sys.version_info`` > + would be ``sys.version_info(1, 8, 0, 'final', 0)``. I think you meant to say ``sys.version_info(2, 7, 2, 'final', 0)``. > For CPython they > + are the same value, since it is the reference implementation. > + > + *hexversion* is the implementation version in hexadecimal format, like > + :data:`sys.hexversion`. > + > + *cache_tag* is the tag used by the import machinery in the filenames of > + cached modules. By convention, it would be a composite of the > + implementation's name and version, like ``'cpython-33'``. However, a > + Python implementation may use some other value if appropriate. If > + ``cache_tag`` is set to ``None``, it indicates that module caching > should > + be disabled. > + > + Regardless of its contents, :data:`sys.implementation` will not > + change during a run of the interpreter, nor between implementation > + versions. (It may change between Python language versions, > + however.) See `PEP 421` for more information. > + > + .. versionadded:: 3.3 > + > + > .. data:: int_info > > A :term:`struct sequence` that holds information about Python's internal > diff --git a/Doc/library/types.rst b/Doc/library/types.rst > --- a/Doc/library/types.rst > +++ b/Doc/library/types.rst > @@ -194,3 +194,27 @@ > Return a new view of the underlying mapping's values. > > > +.. class:: SimpleNamespace > + > + A simple :class:`object` subclass that provides attribute access to its > + namespace, as well as a meaningful repr. > + > + Unlike :class:`object`, with ``SimpleNamespace`` you can add and remove > + attributes. If a ``SimpleNamespace`` object is initialized with > keyword > + arguments, those are directly added to the underlying namespace. > + > + The type is roughly equivalent to the following code:: > + > + class SimpleNamespace: > + def __init__(self, **kwargs): > + self.__dict__.update(kwargs) > + def __repr__(self): > + keys = sorted(self.__dict__) > + items = ("{}={!r}".format(k, self.__dict__[k]) for k in > keys) > + return "{}({})".format(type(self).__name__, ", > ".join(items)) > + > + ``SimpleNamespace`` may be useful as a replacement for ``class NS: > pass``. > + However, for a structured record type use > :func:`~collections.namedtuple` > What's with the ~? -Brett > + instead. > + > + .. versionadded:: 3.3 > diff --git a/Include/Python.h b/Include/Python.h > --- a/Include/Python.h > +++ b/Include/Python.h > @@ -101,6 +101,7 @@ > #include "warnings.h" > #include "weakrefobject.h" > #include "structseq.h" > +#include "namespaceobject.h" > > #include "codecs.h" > #include "pyerrors.h" > diff --git a/Include/namespaceobject.h b/Include/namespaceobject.h > new file mode 100644 > --- /dev/null > +++ b/Include/namespaceobject.h > @@ -0,0 +1,17 @@ > + > +/* simple namespace object interface */ > + > +#ifndef NAMESPACEOBJECT_H > +#define NAMESPACEOBJECT_H > +#ifdef __cplusplus > +extern "C" { > +#endif > + > +PyAPI_DATA(PyTypeObject) _PyNamespace_Type; > + > +PyAPI_FUNC(PyObject *) _PyNamespace_New(PyObject *kwds); > + > +#ifdef __cplusplus > +} > +#endif > +#endif /* !NAMESPACEOBJECT_H */ > diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py > --- a/Lib/test/test_sys.py > +++ b/Lib/test/test_sys.py > @@ -581,6 +581,24 @@ > expected = None > self.check_fsencoding(fs_encoding, expected) > > + def test_implementation(self): > + # This test applies to all implementations equally. > + > + levels = {'alpha': 0xA, 'beta': 0xB, 'candidate': 0xC, 'release': > 0xF} > + > + self.assertTrue(hasattr(sys.implementation, 'name')) > + self.assertTrue(hasattr(sys.implementation, 'version')) > + self.assertTrue(hasattr(sys.implementation, 'hexversion')) > + self.assertTrue(hasattr(sys.implementation, 'cache_tag')) > + > + version = sys.implementation.version > + self.assertEqual(version[:2], (version.major, version.minor)) > + > + hexversion = (version.major << 24 | version.minor << 16 | > + version.micro << 8 | levels[version.releaselevel] > << 4 | > + version.serial << 0) > + self.assertEqual(sys.implementation.hexversion, hexversion) > + > > class SizeofTest(unittest.TestCase): > > diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py > --- a/Lib/test/test_types.py > +++ b/Lib/test/test_types.py > @@ -996,8 +996,149 @@ > X = types.new_class("X", (int(), C)) > > > +class SimpleNamespaceTests(unittest.TestCase): > + > + def test_constructor(self): > + ns1 = types.SimpleNamespace() > + ns2 = types.SimpleNamespace(x=1, y=2) > + ns3 = types.SimpleNamespace(**dict(x=1, y=2)) > + > + with self.assertRaises(TypeError): > + types.SimpleNamespace(1, 2, 3) > + > + self.assertEqual(len(ns1.__dict__), 0) > + self.assertEqual(vars(ns1), {}) > + self.assertEqual(len(ns2.__dict__), 2) > + self.assertEqual(vars(ns2), {'y': 2, 'x': 1}) > + self.assertEqual(len(ns3.__dict__), 2) > + self.assertEqual(vars(ns3), {'y': 2, 'x': 1}) > + > + def test_unbound(self): > + ns1 = vars(types.SimpleNamespace()) > + ns2 = vars(types.SimpleNamespace(x=1, y=2)) > + > + self.assertEqual(ns1, {}) > + self.assertEqual(ns2, {'y': 2, 'x': 1}) > + > + def test_underlying_dict(self): > + ns1 = types.SimpleNamespace() > + ns2 = types.SimpleNamespace(x=1, y=2) > + ns3 = types.SimpleNamespace(a=True, b=False) > + mapping = ns3.__dict__ > + del ns3 > + > + self.assertEqual(ns1.__dict__, {}) > + self.assertEqual(ns2.__dict__, {'y': 2, 'x': 1}) > + self.assertEqual(mapping, dict(a=True, b=False)) > + > + def test_attrget(self): > + ns = types.SimpleNamespace(x=1, y=2, w=3) > + > + self.assertEqual(ns.x, 1) > + self.assertEqual(ns.y, 2) > + self.assertEqual(ns.w, 3) > + with self.assertRaises(AttributeError): > + ns.z > + > + def test_attrset(self): > + ns1 = types.SimpleNamespace() > + ns2 = types.SimpleNamespace(x=1, y=2, w=3) > + ns1.a = 'spam' > + ns1.b = 'ham' > + ns2.z = 4 > + ns2.theta = None > + > + self.assertEqual(ns1.__dict__, dict(a='spam', b='ham')) > + self.assertEqual(ns2.__dict__, dict(x=1, y=2, w=3, z=4, > theta=None)) > + > + def test_attrdel(self): > + ns1 = types.SimpleNamespace() > + ns2 = types.SimpleNamespace(x=1, y=2, w=3) > + > + with self.assertRaises(AttributeError): > + del ns1.spam > + with self.assertRaises(AttributeError): > + del ns2.spam > + > + del ns2.y > + self.assertEqual(vars(ns2), dict(w=3, x=1)) > + ns2.y = 'spam' > + self.assertEqual(vars(ns2), dict(w=3, x=1, y='spam')) > + del ns2.y > + self.assertEqual(vars(ns2), dict(w=3, x=1)) > + > + ns1.spam = 5 > + self.assertEqual(vars(ns1), dict(spam=5)) > + del ns1.spam > + self.assertEqual(vars(ns1), {}) > + > + def test_repr(self): > + ns1 = types.SimpleNamespace(x=1, y=2, w=3) > + ns2 = types.SimpleNamespace() > + ns2.x = "spam" > + ns2._y = 5 > + > + self.assertEqual(repr(ns1), "namespace(w=3, x=1, y=2)") > + self.assertEqual(repr(ns2), "namespace(_y=5, x='spam')") > + > + def test_nested(self): > + ns1 = types.SimpleNamespace(a=1, b=2) > + ns2 = types.SimpleNamespace() > + ns3 = types.SimpleNamespace(x=ns1) > + ns2.spam = ns1 > + ns2.ham = '?' > + ns2.spam = ns3 > + > + self.assertEqual(vars(ns1), dict(a=1, b=2)) > + self.assertEqual(vars(ns2), dict(spam=ns3, ham='?')) > + self.assertEqual(ns2.spam, ns3) > + self.assertEqual(vars(ns3), dict(x=ns1)) > + self.assertEqual(ns3.x.a, 1) > + > + def test_recursive(self): > + ns1 = types.SimpleNamespace(c='cookie') > + ns2 = types.SimpleNamespace() > + ns3 = types.SimpleNamespace(x=1) > + ns1.spam = ns1 > + ns2.spam = ns3 > + ns3.spam = ns2 > + > + self.assertEqual(ns1.spam, ns1) > + self.assertEqual(ns1.spam.spam, ns1) > + self.assertEqual(ns1.spam.spam, ns1.spam) > + self.assertEqual(ns2.spam, ns3) > + self.assertEqual(ns3.spam, ns2) > + self.assertEqual(ns2.spam.spam, ns2) > + > + def test_recursive_repr(self): > + ns1 = types.SimpleNamespace(c='cookie') > + ns2 = types.SimpleNamespace() > + ns3 = types.SimpleNamespace(x=1) > + ns1.spam = ns1 > + ns2.spam = ns3 > + ns3.spam = ns2 > + > + self.assertEqual(repr(ns1), > + "namespace(c='cookie', spam=namespace(...))") > + self.assertEqual(repr(ns2), > + "namespace(spam=namespace(spam=namespace(...), > x=1))") > + > + def test_as_dict(self): > + ns = types.SimpleNamespace(spam='spamspamspam') > + > + with self.assertRaises(TypeError): > + len(ns) > + with self.assertRaises(TypeError): > + iter(ns) > + with self.assertRaises(TypeError): > + 'spam' in ns > + with self.assertRaises(TypeError): > + ns['spam'] > + > + > def test_main(): > - run_unittest(TypesTests, MappingProxyTests, ClassCreationTests) > + run_unittest(TypesTests, MappingProxyTests, ClassCreationTests, > + SimpleNamespaceTests) > > if __name__ == '__main__': > test_main() > diff --git a/Lib/types.py b/Lib/types.py > --- a/Lib/types.py > +++ b/Lib/types.py > @@ -13,6 +13,7 @@ > LambdaType = type(lambda: None) # Same as FunctionType > CodeType = type(_f.__code__) > MappingProxyType = type(type.__dict__) > +SimpleNamespace = type(sys.implementation) > > def _g(): > yield 1 > diff --git a/Makefile.pre.in b/Makefile.pre.in > --- a/Makefile.pre.in > +++ b/Makefile.pre.in > @@ -392,6 +392,7 @@ > Objects/memoryobject.o \ > Objects/methodobject.o \ > Objects/moduleobject.o \ > + Objects/namespaceobject.o \ > Objects/object.o \ > Objects/obmalloc.o \ > Objects/capsule.o \ > @@ -766,6 +767,7 @@ > $(srcdir)/Include/methodobject.h \ > $(srcdir)/Include/modsupport.h \ > $(srcdir)/Include/moduleobject.h \ > + $(srcdir)/Include/namespaceobject.h \ > $(srcdir)/Include/node.h \ > $(srcdir)/Include/object.h \ > $(srcdir)/Include/objimpl.h \ > diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c > new file mode 100644 > --- /dev/null > +++ b/Objects/namespaceobject.c > @@ -0,0 +1,225 @@ > +/* namespace object implementation */ > + > +#include "Python.h" > +#include "structmember.h" > + > + > +typedef struct { > + PyObject_HEAD > + PyObject *ns_dict; > +} _PyNamespaceObject; > + > + > +static PyMemberDef namespace_members[] = { > + {"__dict__", T_OBJECT, offsetof(_PyNamespaceObject, ns_dict), > READONLY}, > + {NULL} > +}; > + > + > +/* Methods */ > + > +static PyObject * > +namespace_new(PyTypeObject *type, PyObject *args, PyObject *kwds) > +{ > + _PyNamespaceObject *ns; > + ns = PyObject_GC_New(_PyNamespaceObject, &_PyNamespace_Type); > + if (ns == NULL) > + return NULL; > + > + ns->ns_dict = PyDict_New(); > + if (ns->ns_dict == NULL) { > + Py_DECREF(ns); > + return NULL; > + } > + > + PyObject_GC_Track(ns); > + return (PyObject *)ns; > +} > + > + > +static int > +namespace_init(_PyNamespaceObject *ns, PyObject *args, PyObject *kwds) > +{ > + /* ignore args if it's NULL or empty */ > + if (args != NULL) { > + Py_ssize_t argcount = PyObject_Size(args); > + if (argcount < 0) > + return argcount; > + else if (argcount > 0) { > + PyErr_Format(PyExc_TypeError, "no positional arguments > expected"); > + return -1; > + } > + } > + if (kwds == NULL) > + return 0; > + return PyDict_Update(ns->ns_dict, kwds); > +} > + > + > +static void > +namespace_dealloc(_PyNamespaceObject *ns) > +{ > + PyObject_GC_UnTrack(ns); > + Py_CLEAR(ns->ns_dict); > + Py_TYPE(ns)->tp_free((PyObject *)ns); > +} > + > + > +static PyObject * > +namespace_repr(_PyNamespaceObject *ns) > +{ > + int i, loop_error = 0; > + PyObject *pairs = NULL, *d = NULL, *keys = NULL, *keys_iter = NULL; > + PyObject *key; > + PyObject *separator, *pairsrepr, *repr = NULL; > + > + i = Py_ReprEnter((PyObject *)ns); > + if (i != 0) { > + return i > 0 ? PyUnicode_FromString("namespace(...)") : NULL; > + } > + > + pairs = PyList_New(0); > + if (pairs == NULL) > + goto error; > + > + d = ((_PyNamespaceObject *)ns)->ns_dict; > + assert(d != NULL); > + Py_INCREF(d); > + > + keys = PyDict_Keys(d); > + if (keys == NULL) > + goto error; > + if (PyList_Sort(keys) != 0) > + goto error; > + > + keys_iter = PyObject_GetIter(keys); > + if (keys_iter == NULL) > + goto error; > + > + while ((key = PyIter_Next(keys_iter)) != NULL) { > + if (PyUnicode_Check(key) && PyUnicode_GET_SIZE(key) > 0) { > + PyObject *value, *item; > + > + value = PyDict_GetItem(d, key); > + assert(value != NULL); > + > + item = PyUnicode_FromFormat("%S=%R", key, value); > + if (item == NULL) { > + loop_error = 1; > + } > + else { > + loop_error = PyList_Append(pairs, item); > + Py_DECREF(item); > + } > + } > + > + Py_DECREF(key); > + if (loop_error) > + goto error; > + } > + > + separator = PyUnicode_FromString(", "); > + if (separator == NULL) > + goto error; > + > + pairsrepr = PyUnicode_Join(separator, pairs); > + Py_DECREF(separator); > + if (pairsrepr == NULL) > + goto error; > + > + repr = PyUnicode_FromFormat("%s(%S)", > + ((PyObject *)ns)->ob_type->tp_name, > pairsrepr); > + Py_DECREF(pairsrepr); > + > +error: > + Py_XDECREF(pairs); > + Py_XDECREF(d); > + Py_XDECREF(keys); > + Py_XDECREF(keys_iter); > + Py_ReprLeave((PyObject *)ns); > + > + return repr; > +} > + > + > +static int > +namespace_traverse(_PyNamespaceObject *ns, visitproc visit, void *arg) > +{ > + Py_VISIT(ns->ns_dict); > + return 0; > +} > + > + > +static int > +namespace_clear(_PyNamespaceObject *ns) > +{ > + Py_CLEAR(ns->ns_dict); > + return 0; > +} > + > + > +PyDoc_STRVAR(namespace_doc, > +"A simple attribute-based namespace.\n\ > +\n\ > +namespace(**kwargs)"); > + > +PyTypeObject _PyNamespace_Type = { > + PyVarObject_HEAD_INIT(&PyType_Type, 0) > + "namespace", /* tp_name */ > + sizeof(_PyNamespaceObject), /* tp_size */ > + 0, /* tp_itemsize */ > + (destructor)namespace_dealloc, /* tp_dealloc */ > + 0, /* tp_print */ > + 0, /* tp_getattr */ > + 0, /* tp_setattr */ > + 0, /* tp_reserved */ > + (reprfunc)namespace_repr, /* tp_repr */ > + 0, /* tp_as_number */ > + 0, /* tp_as_sequence */ > + 0, /* tp_as_mapping */ > + 0, /* tp_hash */ > + 0, /* tp_call */ > + 0, /* tp_str */ > + PyObject_GenericGetAttr, /* tp_getattro */ > + PyObject_GenericSetAttr, /* tp_setattro */ > + 0, /* tp_as_buffer */ > + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | > + Py_TPFLAGS_BASETYPE, /* tp_flags */ > + namespace_doc, /* tp_doc */ > + (traverseproc)namespace_traverse, /* tp_traverse */ > + (inquiry)namespace_clear, /* tp_clear */ > + 0, /* tp_richcompare */ > + 0, /* tp_weaklistoffset */ > + 0, /* tp_iter */ > + 0, /* tp_iternext */ > + 0, /* tp_methods */ > + namespace_members, /* tp_members */ > + 0, /* tp_getset */ > + 0, /* tp_base */ > + 0, /* tp_dict */ > + 0, /* tp_descr_get */ > + 0, /* tp_descr_set */ > + offsetof(_PyNamespaceObject, ns_dict), /* tp_dictoffset */ > + (initproc)namespace_init, /* tp_init */ > + PyType_GenericAlloc, /* tp_alloc */ > + (newfunc)namespace_new, /* tp_new */ > + PyObject_GC_Del, /* tp_free */ > +}; > + > + > +PyObject * > +_PyNamespace_New(PyObject *kwds) > +{ > + PyObject *ns = namespace_new(&_PyNamespace_Type, NULL, NULL); > + if (ns == NULL) > + return NULL; > + > + if (kwds == NULL) > + return ns; > + if (PyDict_Update(((_PyNamespaceObject *)ns)->ns_dict, kwds) != 0) { > + Py_DECREF(ns); > + return NULL; > + } > + > + return (PyObject *)ns; > +} > diff --git a/Objects/object.c b/Objects/object.c > --- a/Objects/object.c > +++ b/Objects/object.c > @@ -1707,6 +1707,9 @@ > > if (PyType_Ready(&PyZip_Type) < 0) > Py_FatalError("Can't initialize zip type"); > + > + if (PyType_Ready(&_PyNamespace_Type) < 0) > + Py_FatalError("Can't initialize namespace type"); > } > > > diff --git a/Python/sysmodule.c b/Python/sysmodule.c > --- a/Python/sysmodule.c > +++ b/Python/sysmodule.c > @@ -1261,6 +1261,7 @@ > float_info -- a struct sequence with information about the float > implementation.\n\ > float_repr_style -- string indicating the style of repr() output for > floats\n\ > hexversion -- version information encoded as a single integer\n\ > +implementation -- Python implementation information.\n\ > int_info -- a struct sequence with information about the int > implementation.\n\ > maxsize -- the largest supported length of containers.\n\ > maxunicode -- the value of the largest Unicode codepoint\n\ > @@ -1454,6 +1455,69 @@ > return version_info; > } > > +static PyObject * > +make_impl_info(PyObject *version_info) > +{ > + int res; > + PyObject *impl_info, *value, *ns; > + > + impl_info = PyDict_New(); > + if (impl_info == NULL) > + return NULL; > + > + /* populate the dict */ > + > +#define NAME "cpython" > +#define QUOTE(arg) #arg > +#define STRIFY(name) QUOTE(name) > +#define MAJOR STRIFY(PY_MAJOR_VERSION) > +#define MINOR STRIFY(PY_MINOR_VERSION) > +#define TAG NAME "-" MAJOR MINOR > + value = PyUnicode_FromString(NAME); > + if (value == NULL) > + goto error; > + res = PyDict_SetItemString(impl_info, "name", value); > + Py_DECREF(value); > + if (res < 0) > + goto error; > + > + value = PyUnicode_FromString(TAG); > + if (value == NULL) > + goto error; > + res = PyDict_SetItemString(impl_info, "cache_tag", value); > + Py_DECREF(value); > + if (res < 0) > + goto error; > +#undef NAME > +#undef QUOTE > +#undef STRIFY > +#undef MAJOR > +#undef MINOR > +#undef TAG > + > + res = PyDict_SetItemString(impl_info, "version", version_info); > + if (res < 0) > + goto error; > + > + value = PyLong_FromLong(PY_VERSION_HEX); > + if (value == NULL) > + goto error; > + res = PyDict_SetItemString(impl_info, "hexversion", value); > + Py_DECREF(value); > + if (res < 0) > + goto error; > + > + /* dict ready */ > + > + ns = _PyNamespace_New(impl_info); > + Py_DECREF(impl_info); > + return ns; > + > +error: > + Py_CLEAR(impl_info); > + return NULL; > +} > + > static struct PyModuleDef sysmodule = { > PyModuleDef_HEAD_INIT, > "sys", > @@ -1469,7 +1533,7 @@ > PyObject * > _PySys_Init(void) > { > - PyObject *m, *v, *sysdict; > + PyObject *m, *v, *sysdict, *version_info; > char *s; > > m = PyModule_Create(&sysmodule); > @@ -1589,11 +1653,15 @@ > /* version_info */ > if (VersionInfoType.tp_name == 0) > PyStructSequence_InitType(&VersionInfoType, &version_info_desc); > - SET_SYS_FROM_STRING("version_info", make_version_info()); > + version_info = make_version_info(); > + SET_SYS_FROM_STRING("version_info", version_info); > /* prevent user from creating new instances */ > VersionInfoType.tp_init = NULL; > VersionInfoType.tp_new = NULL; > > + /* implementation */ > + SET_SYS_FROM_STRING("implementation", make_impl_info(version_info)); > + > /* flags */ > if (FlagsType.tp_name == 0) > PyStructSequence_InitType(&FlagsType, &flags_desc); > > -- > Repository URL: http://hg.python.org/cpython > > _______________________________________________ > Python-checkins mailing list > Python-checkins at python.org > http://mail.python.org/mailman/listinfo/python-checkins > > -------------- next part -------------- An HTML attachment was scrubbed... URL: From brett at python.org Mon Jun 4 16:25:11 2012 From: brett at python.org (Brett Cannon) Date: Mon, 4 Jun 2012 10:25:11 -0400 Subject: [Python-checkins] cpython: Eric Snow's implementation of PEP 421. In-Reply-To: References: Message-ID: [Let's try this again since my last reply was rejected for being too large] On Mon, Jun 4, 2012 at 9:52 AM, barry.warsaw wrote: > http://hg.python.org/cpython/rev/9c445f4695c1 > changeset: 77339:9c445f4695c1 > parent: 77328:0808cb8c60fd > user: Barry Warsaw > date: Sun Jun 03 16:18:47 2012 -0400 > summary: > Eric Snow's implementation of PEP 421. > > Issue 14673: Add sys.implementation > > files: > Doc/library/sys.rst | 38 ++++ > Doc/library/types.rst | 24 ++ > Include/Python.h | 1 + > Include/namespaceobject.h | 17 + > Lib/test/test_sys.py | 18 ++ > Lib/test/test_types.py | 143 ++++++++++++++++- > Lib/types.py | 1 + > Makefile.pre.in | 2 + > Objects/namespaceobject.c | 225 ++++++++++++++++++++++++++ > Objects/object.c | 3 + > Python/sysmodule.c | 72 ++++++++- > 11 files changed, 541 insertions(+), 3 deletions(-) > > > diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst > --- a/Doc/library/sys.rst > +++ b/Doc/library/sys.rst > @@ -616,6 +616,44 @@ > > Thus ``2.1.0a3`` is hexversion ``0x020100a3``. > > + > +.. data:: implementation > + > + An object containing the information about the implementation of the > + currently running Python interpreter. Its attributes are the those > "the those" -> "those" > + that all Python implementations must implement. Should you mention that VMs are allowed to add their own attributes that are not listed? > They are described > + below. > + > + *name* is the implementation's identifier, like ``'cpython'``. > Is this guaranteed to be lowercase, or does it simply happen to be lowercase in this instance? > + > + *version* is a named tuple, in the same format as > + :data:`sys.version_info`. It represents the version of the Python > + *implementation*. This has a distinct meaning from the specific > + version of the Python *language* to which the currently running > + interpreter conforms, which ``sys.version_info`` represents. For > + example, for PyPy 1.8 ``sys.implementation.version`` might be > + ``sys.version_info(1, 8, 0, 'final', 0)``, whereas ``sys.version_info`` > + would be ``sys.version_info(1, 8, 0, 'final', 0)``. I think you meant to say ``sys.version_info(2, 7, 2, 'final', 0)``. What's with the ~? -Brett -------------- next part -------------- An HTML attachment was scrubbed... URL: From python-checkins at python.org Mon Jun 4 17:06:51 2012 From: python-checkins at python.org (barry.warsaw) Date: Mon, 04 Jun 2012 17:06:51 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_A_few_documentation_improve?= =?utf8?q?ments=2C_spurred_on_by_Brett=27s_review=2E?= Message-ID: http://hg.python.org/cpython/rev/20fd0568b3a1 changeset: 77342:20fd0568b3a1 user: Barry Warsaw date: Mon Jun 04 11:06:45 2012 -0400 summary: A few documentation improvements, spurred on by Brett's review. files: Doc/library/sys.rst | 23 +++++++++++++---------- 1 files changed, 13 insertions(+), 10 deletions(-) diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -619,12 +619,13 @@ .. data:: implementation - An object containing the information about the implementation of the - currently running Python interpreter. Its attributes are the those - that all Python implementations must implement. They are described - below. + An object containing information about the implementation of the + currently running Python interpreter. The following attributes are + required to exist in all Python implementations. - *name* is the implementation's identifier, like ``'cpython'``. + *name* is the implementation's identifier, e.g. ``'cpython'``. The actual + string is defined by the Python implementation, but it is guaranteed to be + lower case. *version* is a named tuple, in the same format as :data:`sys.version_info`. It represents the version of the Python @@ -633,7 +634,7 @@ interpreter conforms, which ``sys.version_info`` represents. For example, for PyPy 1.8 ``sys.implementation.version`` might be ``sys.version_info(1, 8, 0, 'final', 0)``, whereas ``sys.version_info`` - would be ``sys.version_info(1, 8, 0, 'final', 0)``. For CPython they + would be ``sys.version_info(2, 7, 2, 'final', 0)``. For CPython they are the same value, since it is the reference implementation. *hexversion* is the implementation version in hexadecimal format, like @@ -646,10 +647,12 @@ ``cache_tag`` is set to ``None``, it indicates that module caching should be disabled. - Regardless of its contents, :data:`sys.implementation` will not - change during a run of the interpreter, nor between implementation - versions. (It may change between Python language versions, - however.) See `PEP 421` for more information. + :data:`sys.implementation` may contain additional attributes specific to + the Python implementation. These non-standard attributes must start with + an underscore, and are not described here. Regardless of its contents, + :data:`sys.implementation` will not change during a run of the interpreter, + nor between implementation versions. (It may change between Python + language versions, however.) See `PEP 421` for more information. .. versionadded:: 3.3 -- Repository URL: http://hg.python.org/cpython From barry at python.org Mon Jun 4 17:10:02 2012 From: barry at python.org (Barry Warsaw) Date: Mon, 4 Jun 2012 11:10:02 -0400 Subject: [Python-checkins] cpython: Eric Snow's implementation of PEP 421. In-Reply-To: References: Message-ID: <20120604111002.221904c2@resist.wooz.org> Thanks for the second set of eyes, Brett. On Jun 04, 2012, at 10:16 AM, Brett Cannon wrote: >> +.. data:: implementation >> + >> + An object containing the information about the implementation of the >> + currently running Python interpreter. Its attributes are the those >> > >"the those" -> "those" I actually rewrote this section a bit: An object containing information about the implementation of the currently running Python interpreter. The following attributes are required to exist in all Python implementations. >> + that all Python implementations must implement. > >Should you mention that VMs are allowed to add their own attributes that >are not listed? Here's how I rewrote it: :data:`sys.implementation` may contain additional attributes specific to the Python implementation. These non-standard attributes must start with an underscore, and are not described here. Regardless of its contents, :data:`sys.implementation` will not change during a run of the interpreter, nor between implementation versions. (It may change between Python language versions, however.) See `PEP 421` for more information. >> They are described >> + below. >> + >> + *name* is the implementation's identifier, like ``'cpython'``. > >Is this guaranteed to be lowercase, or does it simply happen to be >lowercase in this instance? Yes, PEP 421 guarantees them to be lower cased. *name* is the implementation's identifier, e.g. ``'cpython'``. The actual string is defined by the Python implementation, but it is guaranteed to be lower case. >I think you meant to say ``sys.version_info(2, 7, 2, 'final', 0)``. Fixed. >> + However, for a structured record type use >> :func:`~collections.namedtuple` >> > >What's with the ~? I'm not sure, but it seems to result in a cross-reference, and I see tildes used elsewhere, so I guess it's some reST/docutils magic. I left this one in there. Cheers, -Barry From brett at python.org Mon Jun 4 17:39:40 2012 From: brett at python.org (Brett Cannon) Date: Mon, 4 Jun 2012 11:39:40 -0400 Subject: [Python-checkins] cpython: Eric Snow's implementation of PEP 421. In-Reply-To: <20120604111002.221904c2@resist.wooz.org> References: <20120604111002.221904c2@resist.wooz.org> Message-ID: On Mon, Jun 4, 2012 at 11:10 AM, Barry Warsaw wrote: > Thanks for the second set of eyes, Brett. > > On Jun 04, 2012, at 10:16 AM, Brett Cannon wrote: > > >> +.. data:: implementation > >> + > >> + An object containing the information about the implementation of the > >> + currently running Python interpreter. Its attributes are the those > >> > > > >"the those" -> "those" > > I actually rewrote this section a bit: > > An object containing information about the implementation of the > currently running Python interpreter. The following attributes are > required to exist in all Python implementations. > > >> + that all Python implementations must implement. > > > >Should you mention that VMs are allowed to add their own attributes that > >are not listed? > > Here's how I rewrote it: > > :data:`sys.implementation` may contain additional attributes specific to > the Python implementation. These non-standard attributes must start with > an underscore, and are not described here. Regardless of its contents, > :data:`sys.implementation` will not change during a run of the > interpreter, > nor between implementation versions. (It may change between Python > language versions, however.) See `PEP 421` for more information. > > >> They are described > >> + below. > >> + > >> + *name* is the implementation's identifier, like ``'cpython'``. > > > >Is this guaranteed to be lowercase, or does it simply happen to be > >lowercase in this instance? > > Yes, PEP 421 guarantees them to be lower cased. > *name* is the implementation's identifier, e.g. ``'cpython'``. The > actual > string is defined by the Python implementation, but it is guaranteed to > be > lower case. > > OK, then I would add a test to make sure this happens, like ``self.assertEqual(sys.implementation.name, sys.implement.name.lower())`` if you don't want to bother documenting it to make sure other VMs conform. -Brett >I think you meant to say ``sys.version_info(2, 7, 2, 'final', 0)``. > > Fixed. > > >> + However, for a structured record type use > >> :func:`~collections.namedtuple` > >> > > > >What's with the ~? > > I'm not sure, but it seems to result in a cross-reference, and I see tildes > used elsewhere, so I guess it's some reST/docutils magic. I left this one > in > there. > > Cheers, > -Barry > -------------- next part -------------- An HTML attachment was scrubbed... URL: From python-checkins at python.org Mon Jun 4 18:02:46 2012 From: python-checkins at python.org (barry.warsaw) Date: Mon, 04 Jun 2012 18:02:46 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_PEP_421_requires_that_=2Ena?= =?utf8?q?me_be_lower_case=2E?= Message-ID: http://hg.python.org/cpython/rev/a061ae246395 changeset: 77343:a061ae246395 user: Barry Warsaw date: Mon Jun 04 12:01:56 2012 -0400 summary: PEP 421 requires that .name be lower case. files: Lib/test/test_sys.py | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -599,6 +599,10 @@ version.serial << 0) self.assertEqual(sys.implementation.hexversion, hexversion) + # PEP 421 requires that .name be lower case. + self.assertEqual(sys.implementation.name, + sys.implementation.name.lower()) + class SizeofTest(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 18:02:48 2012 From: python-checkins at python.org (barry.warsaw) Date: Mon, 04 Jun 2012 18:02:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Whitespace_normalization?= Message-ID: http://hg.python.org/cpython/rev/59ae1e96db1c changeset: 77344:59ae1e96db1c user: Barry Warsaw date: Mon Jun 04 12:02:42 2012 -0400 summary: Whitespace normalization files: Lib/test/test_sys.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -600,7 +600,7 @@ self.assertEqual(sys.implementation.hexversion, hexversion) # PEP 421 requires that .name be lower case. - self.assertEqual(sys.implementation.name, + self.assertEqual(sys.implementation.name, sys.implementation.name.lower()) -- Repository URL: http://hg.python.org/cpython From barry at python.org Mon Jun 4 18:03:08 2012 From: barry at python.org (Barry Warsaw) Date: Mon, 4 Jun 2012 12:03:08 -0400 Subject: [Python-checkins] cpython: Eric Snow's implementation of PEP 421. In-Reply-To: References: <20120604111002.221904c2@resist.wooz.org> Message-ID: <20120604120308.73c41732@resist.wooz.org> On Jun 04, 2012, at 11:39 AM, Brett Cannon wrote: >OK, then I would add a test to make sure this happens, like >``self.assertEqual(sys.implementation.name, sys.implement.name.lower())`` >if you don't want to bother documenting it to make sure other VMs conform. Good idea. Done. -Barry -------------- next part -------------- A non-text attachment was scrubbed... Name: signature.asc Type: application/pgp-signature Size: 836 bytes Desc: not available URL: From python-checkins at python.org Mon Jun 4 18:34:56 2012 From: python-checkins at python.org (hynek.schlawack) Date: Mon, 04 Jun 2012 18:34:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2314814=3A_Remove_dead_cod?= =?utf8?q?e_from_ipaddress?= Message-ID: http://hg.python.org/cpython/rev/df6d1a4d83fa changeset: 77345:df6d1a4d83fa user: Hynek Schlawack date: Mon Jun 04 18:14:02 2012 +0200 summary: #14814: Remove dead code from ipaddress _BaseNetwork contained (faulty) methods for creating string representations. I've fixed them and put them to use by eliminating identical overrides. files: Lib/ipaddress.py | 45 ++++--------------------- Lib/test/test_ipaddress.py | 16 +++++++++ 2 files changed, 23 insertions(+), 38 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -578,6 +578,10 @@ def __repr__(self): return '%s(%r)' % (self.__class__.__name__, str(self)) + def __str__(self): + return '%s/%d' % (str(self.network_address), + self.prefixlen) + def hosts(self): """Generate Iterator over usable hosts in a network. @@ -663,9 +667,6 @@ return NotImplemented return not eq - def __str__(self): - return '%s/%s' % (self.ip, self._prefixlen) - def __hash__(self): return hash(int(self.network_address) ^ int(self.netmask)) @@ -708,15 +709,15 @@ @property def with_prefixlen(self): - return '%s/%d' % (str(self.ip), self._prefixlen) + return '%s/%d' % (str(self.network_address), self._prefixlen) @property def with_netmask(self): - return '%s/%s' % (str(self.ip), str(self.netmask)) + return '%s/%s' % (str(self.network_address), str(self.netmask)) @property def with_hostmask(self): - return '%s/%s' % (str(self.ip), str(self.hostmask)) + return '%s/%s' % (str(self.network_address), str(self.hostmask)) @property def num_addresses(self): @@ -1447,10 +1448,6 @@ """The binary representation of this address.""" return v4_int_to_packed(self.network_address) - def __str__(self): - return '%s/%d' % (str(self.network_address), - self.prefixlen) - def _is_valid_netmask(self, netmask): """Verify that the netmask is valid. @@ -1498,18 +1495,6 @@ return True return False - @property - def with_prefixlen(self): - return '%s/%d' % (str(self.network_address), self._prefixlen) - - @property - def with_netmask(self): - return '%s/%s' % (str(self.network_address), str(self.netmask)) - - @property - def with_hostmask(self): - return '%s/%s' % (str(self.network_address), str(self.hostmask)) - class _BaseV6: @@ -2108,10 +2093,6 @@ if self._prefixlen == (self._max_prefixlen - 1): self.hosts = self.__iter__ - def __str__(self): - return '%s/%d' % (str(self.network_address), - self.prefixlen) - def _is_valid_netmask(self, prefixlen): """Verify that the netmask/prefixlen is valid. @@ -2128,15 +2109,3 @@ except ValueError: return False return 0 <= prefixlen <= self._max_prefixlen - - @property - def with_prefixlen(self): - return '%s/%d' % (str(self.network_address), self._prefixlen) - - @property - def with_netmask(self): - return '%s/%s' % (str(self.network_address), str(self.netmask)) - - @property - def with_hostmask(self): - return '%s/%s' % (str(self.network_address), str(self.hostmask)) diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -972,6 +972,22 @@ self.assertTrue(self.ipv4_address in dummy) self.assertTrue(ip2 in dummy) + def testIPv6NetworkHelpers(self): + net = self.ipv6_network + self.assertEqual('2001:658:22a:cafe::/64', net.with_prefixlen) + self.assertEqual('2001:658:22a:cafe::/ffff:ffff:ffff:ffff::', + net.with_netmask) + self.assertEqual('2001:658:22a:cafe::/::ffff:ffff:ffff:ffff', + net.with_hostmask) + self.assertEqual('2001:658:22a:cafe::/64', str(net)) + + def testIPv4NetworkHelpers(self): + net = self.ipv4_network + self.assertEqual('1.2.3.0/24', net.with_prefixlen) + self.assertEqual('1.2.3.0/255.255.255.0', net.with_netmask) + self.assertEqual('1.2.3.0/0.0.0.255', net.with_hostmask) + self.assertEqual('1.2.3.0/24', str(net)) + def testCopyConstructor(self): addr1 = ipaddress.ip_network('10.1.1.0/24') addr2 = ipaddress.ip_network(addr1) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 19:28:16 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 04 Jun 2012 19:28:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_namespaceobject=2Eh_and?= =?utf8?q?_namespaceobject=2Ec_to_pythoncore=2Evcxproj?= Message-ID: http://hg.python.org/cpython/rev/ee7cd7d51ed6 changeset: 77346:ee7cd7d51ed6 user: Richard Oudkerk date: Mon Jun 04 18:24:44 2012 +0100 summary: Add namespaceobject.h and namespaceobject.c to pythoncore.vcxproj files: PCbuild/pythoncore.vcxproj | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -1,4 +1,4 @@ -? + @@ -402,6 +402,7 @@ + @@ -579,6 +580,7 @@ + -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 20:01:05 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 04 Jun 2012 20:01:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Make_Finalize_reserve_a_ref?= =?utf8?q?erence_to_os=2Egetpid_in_case_called_at_shutdown?= Message-ID: http://hg.python.org/cpython/rev/9257ea91df3d changeset: 77347:9257ea91df3d user: Richard Oudkerk date: Mon Jun 04 18:58:59 2012 +0100 summary: Make Finalize reserve a reference to os.getpid in case called at shutdown files: Lib/multiprocessing/util.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -170,7 +170,7 @@ # Need to bind these locally because the globals can have # been cleared at shutdown _finalizer_registry=_finalizer_registry, - sub_debug=sub_debug): + sub_debug=sub_debug, getpid=os.getpid): ''' Run the callback unless it has already been called or cancelled ''' @@ -179,7 +179,7 @@ except KeyError: sub_debug('finalizer no longer registered') else: - if self._pid != os.getpid(): + if self._pid != getpid(): sub_debug('finalizer ignored because different process') res = None else: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 20:01:07 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 04 Jun 2012 20:01:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_potential_NameError_in_?= =?utf8?q?multiprocessing=2ECondition=2Ewait=28=29?= Message-ID: http://hg.python.org/cpython/rev/3baeb5e13dd2 changeset: 77348:3baeb5e13dd2 user: Richard Oudkerk date: Mon Jun 04 18:59:07 2012 +0100 summary: Fix potential NameError in multiprocessing.Condition.wait() files: Lib/multiprocessing/synchronize.py | 3 +-- 1 files changed, 1 insertions(+), 2 deletions(-) diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py --- a/Lib/multiprocessing/synchronize.py +++ b/Lib/multiprocessing/synchronize.py @@ -216,7 +216,7 @@ try: # wait for notification or timeout - ret = self._wait_semaphore.acquire(True, timeout) + return self._wait_semaphore.acquire(True, timeout) finally: # indicate that this thread has woken self._woken_count.release() @@ -224,7 +224,6 @@ # reacquire lock for i in range(count): self._lock.acquire() - return ret def notify(self): assert self._lock._semlock._is_mine(), 'lock is not owned' -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 20:01:08 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 04 Jun 2012 20:01:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Prevent_handle_leak_if_Crea?= =?utf8?q?teProcess=28=29_fails_in_multiprocessing?= Message-ID: http://hg.python.org/cpython/rev/2959ef933b8b changeset: 77349:2959ef933b8b user: Richard Oudkerk date: Mon Jun 04 18:59:10 2012 +0100 summary: Prevent handle leak if CreateProcess() fails in multiprocessing files: Lib/multiprocessing/forking.py | 48 +++++++++++---------- 1 files changed, 25 insertions(+), 23 deletions(-) diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -209,6 +209,9 @@ _tls = _thread._local() def __init__(self, process_obj): + cmd = ' '.join('"%s"' % x for x in get_command_line()) + prep_data = get_preparation_data(process_obj._name) + # create pipe for communication with child rfd, wfd = os.pipe() @@ -216,31 +219,30 @@ rhandle = duplicate(msvcrt.get_osfhandle(rfd), inheritable=True) os.close(rfd) - # start process - cmd = get_command_line() + [rhandle] - cmd = ' '.join('"%s"' % x for x in cmd) - hp, ht, pid, tid = _winapi.CreateProcess( - _python_exe, cmd, None, None, 1, 0, None, None, None - ) - _winapi.CloseHandle(ht) - close(rhandle) + with open(wfd, 'wb', closefd=True) as to_child: + # start process + try: + hp, ht, pid, tid = _winapi.CreateProcess( + _python_exe, cmd + (' %s' % rhandle), + None, None, 1, 0, None, None, None + ) + _winapi.CloseHandle(ht) + finally: + close(rhandle) - # set attributes of self - self.pid = pid - self.returncode = None - self._handle = hp - self.sentinel = int(hp) + # set attributes of self + self.pid = pid + self.returncode = None + self._handle = hp + self.sentinel = int(hp) - # send information to child - prep_data = get_preparation_data(process_obj._name) - to_child = os.fdopen(wfd, 'wb') - Popen._tls.process_handle = int(hp) - try: - dump(prep_data, to_child, HIGHEST_PROTOCOL) - dump(process_obj, to_child, HIGHEST_PROTOCOL) - finally: - del Popen._tls.process_handle - to_child.close() + # send information to child + Popen._tls.process_handle = int(hp) + try: + dump(prep_data, to_child, HIGHEST_PROTOCOL) + dump(process_obj, to_child, HIGHEST_PROTOCOL) + finally: + del Popen._tls.process_handle @staticmethod def thread_is_spawning(): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 21:56:28 2012 From: python-checkins at python.org (r.david.murray) Date: Mon, 04 Jun 2012 21:56:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=238739=3A_fix_omission_of_?= =?utf8?q?DEBUGSTREAM_reset_in_new_test_in_test=5Fsmtpd=2E?= Message-ID: http://hg.python.org/cpython/rev/079c1942eedf changeset: 77350:079c1942eedf user: R David Murray date: Mon Jun 04 15:55:51 2012 -0400 summary: #8739: fix omission of DEBUGSTREAM reset in new test in test_smtpd. This clears up an error in detected by refleak mode that showed up when test_smtplib was run after test_smtpd in the same refleak run. files: Lib/test/test_smtpd.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_smtpd.py b/Lib/test/test_smtpd.py --- a/Lib/test/test_smtpd.py +++ b/Lib/test/test_smtpd.py @@ -507,6 +507,7 @@ def setUp(self): smtpd.socket = asyncore.socket = mock_socket + self.old_debugstream = smtpd.DEBUGSTREAM self.debug = smtpd.DEBUGSTREAM = io.StringIO() self.server = DummyServer('a', 'b') conn, addr = self.server.accept() @@ -516,6 +517,7 @@ def tearDown(self): asyncore.close_all() asyncore.socket = smtpd.socket = socket + smtpd.DEBUGSTREAM = self.old_debugstream def write_line(self, line): self.channel.socket.queue_recv(line) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 22:54:11 2012 From: python-checkins at python.org (victor.stinner) Date: Mon, 04 Jun 2012 22:54:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314993=3A_Use_stand?= =?utf8?q?ard_=22unsigned_char=22_instead_of_a_unsigned_char_bitfield?= Message-ID: http://hg.python.org/cpython/rev/09736ae1c314 changeset: 77351:09736ae1c314 user: Victor Stinner date: Mon Jun 04 22:52:12 2012 +0200 summary: Issue #14993: Use standard "unsigned char" instead of a unsigned char bitfield files: Include/unicodeobject.h | 10 +++----- Objects/stringlib/unicode_format.h | 2 +- Objects/unicodeobject.c | 20 +++++++++--------- 3 files changed, 15 insertions(+), 17 deletions(-) diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -901,12 +901,10 @@ /* minimum length of the buffer when overallocation is enabled, see _PyUnicodeWriter_Init() */ Py_ssize_t min_length; - struct { - unsigned char overallocate:1; - /* If readonly is 1, buffer is a shared string (cannot be modified) - and size is set to 0. */ - unsigned char readonly:1; - } flags; + unsigned char overallocate; + /* If readonly is 1, buffer is a shared string (cannot be modified) + and size is set to 0. */ + unsigned char readonly; } _PyUnicodeWriter ; /* Initialize a Unicode writer. diff --git a/Objects/stringlib/unicode_format.h b/Objects/stringlib/unicode_format.h --- a/Objects/stringlib/unicode_format.h +++ b/Objects/stringlib/unicode_format.h @@ -898,7 +898,7 @@ if (field_present) { if (iter.str.start == iter.str.end) - writer->flags.overallocate = 0; + writer->overallocate = 0; if (!output_markup(&field_name, &format_spec, format_spec_needs_expanding, conversion, writer, args, kwargs, recursion_depth, auto_number)) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -12808,7 +12808,7 @@ writer->kind = 5; /* invalid kind */ #endif writer->min_length = Py_MAX(min_length, 100); - writer->flags.overallocate = (min_length > 0); + writer->overallocate = (min_length > 0); } int @@ -12827,7 +12827,7 @@ newlen = writer->pos + length; if (writer->buffer == NULL) { - if (writer->flags.overallocate) { + if (writer->overallocate) { /* overallocate 25% to limit the number of resize */ if (newlen <= (PY_SSIZE_T_MAX - newlen / 4)) newlen += newlen / 4; @@ -12842,7 +12842,7 @@ } if (newlen > writer->size) { - if (writer->flags.overallocate) { + if (writer->overallocate) { /* overallocate 25% to limit the number of resize */ if (newlen <= (PY_SSIZE_T_MAX - newlen / 4)) newlen += newlen / 4; @@ -12850,7 +12850,7 @@ newlen = writer->min_length; } - if (maxchar > writer->maxchar || writer->flags.readonly) { + if (maxchar > writer->maxchar || writer->readonly) { /* resize + widen */ newbuffer = PyUnicode_New(newlen, maxchar); if (newbuffer == NULL) @@ -12858,7 +12858,7 @@ _PyUnicode_FastCopyCharacters(newbuffer, 0, writer->buffer, 0, writer->pos); Py_DECREF(writer->buffer); - writer->flags.readonly = 0; + writer->readonly = 0; } else { newbuffer = resize_compact(writer->buffer, newlen); @@ -12869,7 +12869,7 @@ _PyUnicodeWriter_Update(writer); } else if (maxchar > writer->maxchar) { - assert(!writer->flags.readonly); + assert(!writer->readonly); newbuffer = PyUnicode_New(writer->size, maxchar); if (newbuffer == NULL) return -1; @@ -12895,11 +12895,11 @@ return 0; maxchar = PyUnicode_MAX_CHAR_VALUE(str); if (maxchar > writer->maxchar || len > writer->size - writer->pos) { - if (writer->buffer == NULL && !writer->flags.overallocate) { + if (writer->buffer == NULL && !writer->overallocate) { Py_INCREF(str); writer->buffer = str; _PyUnicodeWriter_Update(writer); - writer->flags.readonly = 1; + writer->readonly = 1; writer->size = 0; writer->pos += len; return 0; @@ -12921,7 +12921,7 @@ Py_INCREF(unicode_empty); return unicode_empty; } - if (writer->flags.readonly) { + if (writer->readonly) { assert(PyUnicode_GET_LENGTH(writer->buffer) == writer->pos); return writer->buffer; } @@ -13638,7 +13638,7 @@ goto onError; } if (fmtcnt == 0) - writer.flags.overallocate = 0; + writer.overallocate = 0; if (c == '%') { if (_PyUnicodeWriter_Prepare(&writer, 1, '%') == -1) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 23:49:46 2012 From: python-checkins at python.org (barry.warsaw) Date: Mon, 04 Jun 2012 23:49:46 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Mark_PEP_421_as_final?= Message-ID: http://hg.python.org/peps/rev/c60be355412e changeset: 4445:c60be355412e user: Barry Warsaw date: Mon Jun 04 17:49:40 2012 -0400 summary: Mark PEP 421 as final files: pep-0421.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0421.txt b/pep-0421.txt --- a/pep-0421.txt +++ b/pep-0421.txt @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Eric Snow BDFL-Delegate: Barry Warsaw -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 26-April-2012 -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Jun 4 23:55:52 2012 From: python-checkins at python.org (nadeem.vawda) Date: Mon, 04 Jun 2012 23:55:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_fileobj_support_to_gzip?= =?utf8?b?Lm9wZW4oKS4=?= Message-ID: http://hg.python.org/cpython/rev/d87ec233d514 changeset: 77352:d87ec233d514 user: Nadeem Vawda date: Mon Jun 04 23:21:38 2012 +0200 summary: Add fileobj support to gzip.open(). files: Doc/library/gzip.rst | 20 +++++++++++--------- Lib/gzip.py | 13 ++++++++++++- Lib/test/test_gzip.py | 13 +++++++++++++ Misc/NEWS | 2 ++ 4 files changed, 38 insertions(+), 10 deletions(-) diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -14,10 +14,10 @@ The data compression is provided by the :mod:`zlib` module. The :mod:`gzip` module provides the :class:`GzipFile` class, as well as the -:func:`gzip.open`, :func:`compress` and :func:`decompress` convenience -functions. The :class:`GzipFile` class reads and writes :program:`gzip`\ -format -files, automatically compressing or decompressing the data so that it looks like -an ordinary :term:`file object`. +:func:`.open`, :func:`compress` and :func:`decompress` convenience functions. +The :class:`GzipFile` class reads and writes :program:`gzip`\ -format files, +automatically compressing or decompressing the data so that it looks like an +ordinary :term:`file object`. Note that additional file formats which can be decompressed by the :program:`gzip` and :program:`gunzip` programs, such as those produced by @@ -28,9 +28,11 @@ .. function:: open(filename, mode='rb', compresslevel=9, encoding=None, errors=None, newline=None) - Open *filename* as a gzip-compressed file in binary or text mode. + Open a gzip-compressed file in binary or text mode, returning a :term:`file + object`. - Returns a :term:`file object`. + The *filename* argument can be an actual filename (a :class:`str` or + :class:`bytes` object), or an existing file object to read from or write to. The *mode* argument can be any of ``'r'``, ``'rb'``, ``'a'``, ``'ab'``, ``'w'``, or ``'wb'`` for binary mode, or ``'rt'``, ``'at'``, or ``'wt'`` for @@ -48,8 +50,8 @@ handling behavior, and line ending(s). .. versionchanged:: 3.3 - Support for text mode was added, along with the *encoding*, *errors* and - *newline* arguments. + Added support for *filename* being a file object, support for text mode, + and the *encoding*, *errors* and *newline* arguments. .. class:: GzipFile(filename=None, mode=None, compresslevel=9, fileobj=None, mtime=None) @@ -75,7 +77,7 @@ is the mode of *fileobj* if discernible; otherwise, the default is ``'rb'``. Note that the file is always opened in binary mode. To open a compressed file - in text mode, use :func:`gzip.open` (or wrap your :class:`GzipFile` with an + in text mode, use :func:`.open` (or wrap your :class:`GzipFile` with an :class:`io.TextIOWrapper`). The *compresslevel* argument is an integer from ``1`` to ``9`` controlling the diff --git a/Lib/gzip.py b/Lib/gzip.py --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -20,6 +20,9 @@ encoding=None, errors=None, newline=None): """Open a gzip-compressed file in binary or text mode. + The filename argument can be an actual filename (a str or bytes object), or + an existing file object to read from or write to. + The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode, or "rt", "wt" or "at" for text mode. The default mode is "rb", and the default compresslevel is 9. @@ -43,7 +46,15 @@ raise ValueError("Argument 'errors' not supported in binary mode") if newline is not None: raise ValueError("Argument 'newline' not supported in binary mode") - binary_file = GzipFile(filename, mode.replace("t", ""), compresslevel) + + gz_mode = mode.replace("t", "") + if isinstance(filename, (str, bytes)): + binary_file = GzipFile(filename, gz_mode, compresslevel) + elif hasattr(filename, "read") or hasattr(filename, "write"): + binary_file = GzipFile(None, gz_mode, compresslevel, filename) + else: + raise TypeError("filename must be a str or bytes object, or a file") + if "t" in mode: return io.TextIOWrapper(binary_file, encoding, errors, newline) else: diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py --- a/Lib/test/test_gzip.py +++ b/Lib/test/test_gzip.py @@ -424,8 +424,21 @@ file_data = gzip.decompress(f.read()).decode("ascii") self.assertEqual(file_data, uncompressed_raw * 2) + def test_fileobj(self): + uncompressed_bytes = data1 * 50 + uncompressed_str = uncompressed_bytes.decode("ascii") + compressed = gzip.compress(uncompressed_bytes) + with gzip.open(io.BytesIO(compressed), "r") as f: + self.assertEqual(f.read(), uncompressed_bytes) + with gzip.open(io.BytesIO(compressed), "rb") as f: + self.assertEqual(f.read(), uncompressed_bytes) + with gzip.open(io.BytesIO(compressed), "rt") as f: + self.assertEqual(f.read(), uncompressed_str) + def test_bad_params(self): # Test invalid parameter combinations. + with self.assertRaises(TypeError): + gzip.open(123.456) with self.assertRaises(ValueError): gzip.open(self.filename, "wbt") with self.assertRaises(ValueError): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -15,6 +15,8 @@ Library ------- +- gzip.open() now accepts file objects as well as filenames. + - Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError when the path existed and had the S_ISGID mode bit set when it was not explicitly asked for. This is no longer an exception as mkdir -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 23:55:52 2012 From: python-checkins at python.org (nadeem.vawda) Date: Mon, 04 Jun 2012 23:55:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Make_BZ2File=27s_fileobj_su?= =?utf8?q?pport_easier_to_use=2E?= Message-ID: http://hg.python.org/cpython/rev/544e64e18a8a changeset: 77353:544e64e18a8a user: Nadeem Vawda date: Mon Jun 04 23:31:20 2012 +0200 summary: Make BZ2File's fileobj support easier to use. The fileobj argument was added during the 3.3 development cycle, so this change does not break backward compatibility with 3.2. files: Doc/library/bz2.rst | 16 +++++++----- Lib/bz2.py | 17 ++++++------- Lib/tarfile.py | 4 +- Lib/test/test_bz2.py | 40 +++++++++++++++++++++---------- Misc/NEWS | 3 ++ 5 files changed, 49 insertions(+), 31 deletions(-) diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst --- a/Doc/library/bz2.rst +++ b/Doc/library/bz2.rst @@ -26,17 +26,18 @@ (De)compression of files ------------------------ -.. class:: BZ2File(filename=None, mode='r', buffering=None, compresslevel=9, \*, fileobj=None) +.. class:: BZ2File(filename, mode='r', buffering=None, compresslevel=9) Open a bzip2-compressed file. - The :class:`BZ2File` can wrap an existing :term:`file object` (given by - *fileobj*), or operate directly on a named file (named by *filename*). - Exactly one of these two parameters should be provided. + If *filename* is a :class:`str` or :class:`bytes` object, open the named file + directly. Otherwise, *filename* should be a :term:`file object`, which will + be used to read or write the compressed data. The *mode* argument can be either ``'r'`` for reading (default), ``'w'`` for - overwriting, or ``'a'`` for appending. If *fileobj* is provided, a mode of - ``'w'`` does not truncate the file, and is instead equivalent to ``'a'``. + overwriting, or ``'a'`` for appending. If *filename* is a file object (rather + than an actual file name), a mode of ``'w'`` does not truncate the file, and + is instead equivalent to ``'a'``. The *buffering* argument is ignored. Its use is deprecated. @@ -69,7 +70,8 @@ :meth:`read1` and :meth:`readinto` methods were added. .. versionchanged:: 3.3 - The *fileobj* argument to the constructor was added. + Support was added for *filename* being a :term:`file object` instead of an + actual filename. .. versionchanged:: 3.3 The ``'a'`` (append) mode was added, along with support for reading diff --git a/Lib/bz2.py b/Lib/bz2.py --- a/Lib/bz2.py +++ b/Lib/bz2.py @@ -39,13 +39,12 @@ returned as bytes, and data to be written should be given as bytes. """ - def __init__(self, filename=None, mode="r", buffering=None, - compresslevel=9, *, fileobj=None): + def __init__(self, filename, mode="r", buffering=None, compresslevel=9): """Open a bzip2-compressed file. - If filename is given, open the named file. Otherwise, operate on - the file object given by fileobj. Exactly one of these two - parameters should be provided. + If filename is a str or bytes object, is gives the name of the file to + be opened. Otherwise, it should be a file object, which will be used to + read or write the compressed data. mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for appending. @@ -91,15 +90,15 @@ else: raise ValueError("Invalid mode: {!r}".format(mode)) - if filename is not None and fileobj is None: + if isinstance(filename, (str, bytes)): self._fp = open(filename, mode) self._closefp = True self._mode = mode_code - elif fileobj is not None and filename is None: - self._fp = fileobj + elif hasattr(filename, "read") or hasattr(filename, "write"): + self._fp = filename self._mode = mode_code else: - raise ValueError("Must give exactly one of filename and fileobj") + raise TypeError("filename must be a str or bytes object, or a file") def close(self): """Flush and close the file. diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -1657,8 +1657,8 @@ except ImportError: raise CompressionError("bz2 module is not available") - fileobj = bz2.BZ2File(filename=name if fileobj is None else None, - mode=mode, fileobj=fileobj, compresslevel=compresslevel) + fileobj = bz2.BZ2File(fileobj or name, mode, + compresslevel=compresslevel) try: t = cls.taropen(name, mode, fileobj, **kwargs) diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -81,6 +81,20 @@ with open(self.filename, "wb") as f: f.write(self.DATA * streams) + def testBadArgs(self): + with self.assertRaises(TypeError): + BZ2File(123.456) + with self.assertRaises(ValueError): + BZ2File("/dev/null", "z") + with self.assertRaises(ValueError): + BZ2File("/dev/null", "rx") + with self.assertRaises(ValueError): + BZ2File("/dev/null", "rbt") + with self.assertRaises(ValueError): + BZ2File("/dev/null", compresslevel=0) + with self.assertRaises(ValueError): + BZ2File("/dev/null", compresslevel=10) + def testRead(self): self.createTempFile() with BZ2File(self.filename) as bz2f: @@ -348,7 +362,7 @@ def testFileno(self): self.createTempFile() with open(self.filename, 'rb') as rawf: - bz2f = BZ2File(fileobj=rawf) + bz2f = BZ2File(rawf) try: self.assertEqual(bz2f.fileno(), rawf.fileno()) finally: @@ -356,7 +370,7 @@ self.assertRaises(ValueError, bz2f.fileno) def testSeekable(self): - bz2f = BZ2File(fileobj=BytesIO(self.DATA)) + bz2f = BZ2File(BytesIO(self.DATA)) try: self.assertTrue(bz2f.seekable()) bz2f.read() @@ -365,7 +379,7 @@ bz2f.close() self.assertRaises(ValueError, bz2f.seekable) - bz2f = BZ2File(fileobj=BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), mode="w") try: self.assertFalse(bz2f.seekable()) finally: @@ -374,7 +388,7 @@ src = BytesIO(self.DATA) src.seekable = lambda: False - bz2f = BZ2File(fileobj=src) + bz2f = BZ2File(src) try: self.assertFalse(bz2f.seekable()) finally: @@ -382,7 +396,7 @@ self.assertRaises(ValueError, bz2f.seekable) def testReadable(self): - bz2f = BZ2File(fileobj=BytesIO(self.DATA)) + bz2f = BZ2File(BytesIO(self.DATA)) try: self.assertTrue(bz2f.readable()) bz2f.read() @@ -391,7 +405,7 @@ bz2f.close() self.assertRaises(ValueError, bz2f.readable) - bz2f = BZ2File(fileobj=BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), mode="w") try: self.assertFalse(bz2f.readable()) finally: @@ -399,7 +413,7 @@ self.assertRaises(ValueError, bz2f.readable) def testWritable(self): - bz2f = BZ2File(fileobj=BytesIO(self.DATA)) + bz2f = BZ2File(BytesIO(self.DATA)) try: self.assertFalse(bz2f.writable()) bz2f.read() @@ -408,7 +422,7 @@ bz2f.close() self.assertRaises(ValueError, bz2f.writable) - bz2f = BZ2File(fileobj=BytesIO(), mode="w") + bz2f = BZ2File(BytesIO(), mode="w") try: self.assertTrue(bz2f.writable()) finally: @@ -512,14 +526,14 @@ def testReadBytesIO(self): with BytesIO(self.DATA) as bio: - with BZ2File(fileobj=bio) as bz2f: + with BZ2File(bio) as bz2f: self.assertRaises(TypeError, bz2f.read, None) self.assertEqual(bz2f.read(), self.TEXT) self.assertFalse(bio.closed) def testPeekBytesIO(self): with BytesIO(self.DATA) as bio: - with BZ2File(fileobj=bio) as bz2f: + with BZ2File(bio) as bz2f: pdata = bz2f.peek() self.assertNotEqual(len(pdata), 0) self.assertTrue(self.TEXT.startswith(pdata)) @@ -527,7 +541,7 @@ def testWriteBytesIO(self): with BytesIO() as bio: - with BZ2File(fileobj=bio, mode="w") as bz2f: + with BZ2File(bio, "w") as bz2f: self.assertRaises(TypeError, bz2f.write) bz2f.write(self.TEXT) self.assertEqual(self.decompress(bio.getvalue()), self.TEXT) @@ -535,14 +549,14 @@ def testSeekForwardBytesIO(self): with BytesIO(self.DATA) as bio: - with BZ2File(fileobj=bio) as bz2f: + with BZ2File(bio) as bz2f: self.assertRaises(TypeError, bz2f.seek) bz2f.seek(150) self.assertEqual(bz2f.read(), self.TEXT[150:]) def testSeekBackwardsBytesIO(self): with BytesIO(self.DATA) as bio: - with BZ2File(fileobj=bio) as bz2f: + with BZ2File(bio) as bz2f: bz2f.read(500) bz2f.seek(-150, 1) self.assertEqual(bz2f.read(), self.TEXT[500-150:]) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -15,6 +15,9 @@ Library ------- +- BZ2File.__init__() now accepts a file object as its first argument, rather + than requiring a separate "fileobj" argument. + - gzip.open() now accepts file objects as well as filenames. - Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 23:55:53 2012 From: python-checkins at python.org (nadeem.vawda) Date: Mon, 04 Jun 2012 23:55:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Clarify_acceptable_values_f?= =?utf8?b?b3IgQloyRmlsZS5fX2luaXRfXydzIG1vZGUgYXJndW1lbnQu?= Message-ID: http://hg.python.org/cpython/rev/3235748e6e81 changeset: 77354:3235748e6e81 user: Nadeem Vawda date: Mon Jun 04 23:31:22 2012 +0200 summary: Clarify acceptable values for BZ2File.__init__'s mode argument. files: Doc/library/bz2.rst | 8 +++++--- Lib/bz2.py | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst --- a/Doc/library/bz2.rst +++ b/Doc/library/bz2.rst @@ -35,9 +35,11 @@ be used to read or write the compressed data. The *mode* argument can be either ``'r'`` for reading (default), ``'w'`` for - overwriting, or ``'a'`` for appending. If *filename* is a file object (rather - than an actual file name), a mode of ``'w'`` does not truncate the file, and - is instead equivalent to ``'a'``. + overwriting, or ``'a'`` for appending. These can equivalently be given as + ``'rb'``, ``'wb'``, and ``'ab'`` respectively. + + If *filename* is a file object (rather than an actual file name), a mode of + ``'w'`` does not truncate the file, and is instead equivalent to ``'a'``. The *buffering* argument is ignored. Its use is deprecated. diff --git a/Lib/bz2.py b/Lib/bz2.py --- a/Lib/bz2.py +++ b/Lib/bz2.py @@ -46,8 +46,8 @@ be opened. Otherwise, it should be a file object, which will be used to read or write the compressed data. - mode can be 'r' for reading (default), 'w' for (over)writing, or - 'a' for appending. + mode can be 'r' for reading (default), 'w' for (over)writing, or 'a' for + appending. These can equivalently be given as 'rb', 'wb', and 'ab'. buffering is ignored. Its use is deprecated. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 23:55:55 2012 From: python-checkins at python.org (nadeem.vawda) Date: Mon, 04 Jun 2012 23:55:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_a_function_bz2=2Eopen?= =?utf8?b?KCksIHRvIG1hdGNoIGd6aXAub3BlbigpLg==?= Message-ID: http://hg.python.org/cpython/rev/2143386a17a4 changeset: 77355:2143386a17a4 user: Nadeem Vawda date: Mon Jun 04 23:32:38 2012 +0200 summary: Add a function bz2.open(), to match gzip.open(). files: Doc/library/bz2.rst | 33 ++++++++- Lib/bz2.py | 47 +++++++++++- Lib/test/test_bz2.py | 117 ++++++++++++++++++++++++++++-- Misc/NEWS | 3 + 4 files changed, 184 insertions(+), 16 deletions(-) diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst --- a/Doc/library/bz2.rst +++ b/Doc/library/bz2.rst @@ -14,7 +14,8 @@ The :mod:`bz2` module contains: -* The :class:`BZ2File` class for reading and writing compressed files. +* The :func:`.open` function and :class:`BZ2File` class for reading and + writing compressed files. * The :class:`BZ2Compressor` and :class:`BZ2Decompressor` classes for incremental (de)compression. * The :func:`compress` and :func:`decompress` functions for one-shot @@ -26,9 +27,37 @@ (De)compression of files ------------------------ +.. function:: open(filename, mode='r', compresslevel=9, encoding=None, errors=None, newline=None) + + Open a bzip2-compressed file in binary or text mode, returning a :term:`file + object`. + + As with the constructor for :class:`BZ2File`, the *filename* argument can be + an actual filename (a :class:`str` or :class:`bytes` object), or an existing + file object to read from or write to. + + The *mode* argument can be any of ``'r'``, ``'rb'``, ``'w'``, ``'wb'``, + ``'a'``, or ``'ab'`` for binary mode, or ``'rt'``, ``'wt'``, or ``'at'`` for + text mode. The default is ``'rb'``. + + The *compresslevel* argument is an integer from 1 to 9, as for the + :class:`BZ2File` constructor. + + For binary mode, this function is equivalent to the :class:`BZ2File` + constructor: ``BZ2File(filename, mode, compresslevel=compresslevel)``. In + this case, the *encoding*, *errors* and *newline* arguments must not be + provided. + + For text mode, a :class:`BZ2File` object is created, and wrapped in an + :class:`io.TextIOWrapper` instance with the specified encoding, error + handling behavior, and line ending(s). + + .. versionadded:: 3.3 + + .. class:: BZ2File(filename, mode='r', buffering=None, compresslevel=9) - Open a bzip2-compressed file. + Open a bzip2-compressed file in binary mode. If *filename* is a :class:`str` or :class:`bytes` object, open the named file directly. Otherwise, *filename* should be a :term:`file object`, which will diff --git a/Lib/bz2.py b/Lib/bz2.py --- a/Lib/bz2.py +++ b/Lib/bz2.py @@ -4,11 +4,12 @@ (de)compression, and functions for one-shot (de)compression. """ -__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "compress", - "decompress"] +__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", + "open", "compress", "decompress"] __author__ = "Nadeem Vawda " +import builtins import io import warnings @@ -91,7 +92,7 @@ raise ValueError("Invalid mode: {!r}".format(mode)) if isinstance(filename, (str, bytes)): - self._fp = open(filename, mode) + self._fp = builtins.open(filename, mode) self._closefp = True self._mode = mode_code elif hasattr(filename, "read") or hasattr(filename, "write"): @@ -391,6 +392,46 @@ return self._pos +def open(filename, mode="rb", compresslevel=9, + encoding=None, errors=None, newline=None): + """Open a bzip2-compressed file in binary or text mode. + + The filename argument can be an actual filename (a str or bytes object), or + an existing file object to read from or write to. + + The mode argument can be "r", "rb", "w", "wb", "a" or "ab" for binary mode, + or "rt", "wt" or "at" for text mode. The default mode is "rb", and the + default compresslevel is 9. + + For binary mode, this function is equivalent to the BZ2File constructor: + BZ2File(filename, mode, compresslevel). In this case, the encoding, errors + and newline arguments must not be provided. + + For text mode, a BZ2File object is created, and wrapped in an + io.TextIOWrapper instance with the specified encoding, error handling + behavior, and line ending(s). + + """ + if "t" in mode: + if "b" in mode: + raise ValueError("Invalid mode: %r" % (mode,)) + else: + if encoding is not None: + raise ValueError("Argument 'encoding' not supported in binary mode") + if errors is not None: + raise ValueError("Argument 'errors' not supported in binary mode") + if newline is not None: + raise ValueError("Argument 'newline' not supported in binary mode") + + bz_mode = mode.replace("t", "") + binary_file = BZ2File(filename, bz_mode, compresslevel=compresslevel) + + if "t" in mode: + return io.TextIOWrapper(binary_file, encoding, errors, newline) + else: + return binary_file + + def compress(data, compresslevel=9): """Compress a block of data. diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -48,6 +48,13 @@ TEXT = b''.join(TEXT_LINES) DATA = b'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`' + def setUp(self): + self.filename = TESTFN + + def tearDown(self): + if os.path.isfile(self.filename): + os.unlink(self.filename) + if has_cmdline_bunzip2: def decompress(self, data): pop = subprocess.Popen("bunzip2", shell=True, @@ -70,13 +77,6 @@ class BZ2FileTest(BaseTest): "Test BZ2File type miscellaneous methods." - def setUp(self): - self.filename = TESTFN - - def tearDown(self): - if os.path.isfile(self.filename): - os.unlink(self.filename) - def createTempFile(self, streams=1): with open(self.filename, "wb") as f: f.write(self.DATA * streams) @@ -650,9 +650,7 @@ decompressed = None -class FuncTest(BaseTest): - "Test module functions" - +class CompressDecompressTest(BaseTest): def testCompress(self): data = bz2.compress(self.TEXT) self.assertEqual(self.decompress(data), self.TEXT) @@ -672,12 +670,109 @@ text = bz2.decompress(self.DATA * 5) self.assertEqual(text, self.TEXT * 5) + +class OpenTest(BaseTest): + def test_binary_modes(self): + with bz2.open(self.filename, "wb") as f: + f.write(self.TEXT) + with open(self.filename, "rb") as f: + file_data = bz2.decompress(f.read()) + self.assertEqual(file_data, self.TEXT) + with bz2.open(self.filename, "rb") as f: + self.assertEqual(f.read(), self.TEXT) + with bz2.open(self.filename, "ab") as f: + f.write(self.TEXT) + with open(self.filename, "rb") as f: + file_data = bz2.decompress(f.read()) + self.assertEqual(file_data, self.TEXT * 2) + + def test_implicit_binary_modes(self): + # Test implicit binary modes (no "b" or "t" in mode string). + with bz2.open(self.filename, "w") as f: + f.write(self.TEXT) + with open(self.filename, "rb") as f: + file_data = bz2.decompress(f.read()) + self.assertEqual(file_data, self.TEXT) + with bz2.open(self.filename, "r") as f: + self.assertEqual(f.read(), self.TEXT) + with bz2.open(self.filename, "a") as f: + f.write(self.TEXT) + with open(self.filename, "rb") as f: + file_data = bz2.decompress(f.read()) + self.assertEqual(file_data, self.TEXT * 2) + + def test_text_modes(self): + text = self.TEXT.decode("ascii") + text_native_eol = text.replace("\n", os.linesep) + with bz2.open(self.filename, "wt") as f: + f.write(text) + with open(self.filename, "rb") as f: + file_data = bz2.decompress(f.read()).decode("ascii") + self.assertEqual(file_data, text_native_eol) + with bz2.open(self.filename, "rt") as f: + self.assertEqual(f.read(), text) + with bz2.open(self.filename, "at") as f: + f.write(text) + with open(self.filename, "rb") as f: + file_data = bz2.decompress(f.read()).decode("ascii") + self.assertEqual(file_data, text_native_eol * 2) + + def test_fileobj(self): + with bz2.open(BytesIO(self.DATA), "r") as f: + self.assertEqual(f.read(), self.TEXT) + with bz2.open(BytesIO(self.DATA), "rb") as f: + self.assertEqual(f.read(), self.TEXT) + text = self.TEXT.decode("ascii") + with bz2.open(BytesIO(self.DATA), "rt") as f: + self.assertEqual(f.read(), text) + + def test_bad_params(self): + # Test invalid parameter combinations. + with self.assertRaises(ValueError): + bz2.open(self.filename, "wbt") + with self.assertRaises(ValueError): + bz2.open(self.filename, "rb", encoding="utf-8") + with self.assertRaises(ValueError): + bz2.open(self.filename, "rb", errors="ignore") + with self.assertRaises(ValueError): + bz2.open(self.filename, "rb", newline="\n") + + def test_encoding(self): + # Test non-default encoding. + text = self.TEXT.decode("ascii") + text_native_eol = text.replace("\n", os.linesep) + with bz2.open(self.filename, "wt", encoding="utf-16-le") as f: + f.write(text) + with open(self.filename, "rb") as f: + file_data = bz2.decompress(f.read()).decode("utf-16-le") + self.assertEqual(file_data, text_native_eol) + with bz2.open(self.filename, "rt", encoding="utf-16-le") as f: + self.assertEqual(f.read(), text) + + def test_encoding_error_handler(self): + # Test with non-default encoding error handler. + with bz2.open(self.filename, "wb") as f: + f.write(b"foo\xffbar") + with bz2.open(self.filename, "rt", encoding="ascii", errors="ignore") \ + as f: + self.assertEqual(f.read(), "foobar") + + def test_newline(self): + # Test with explicit newline (universal newline mode disabled). + text = self.TEXT.decode("ascii") + with bz2.open(self.filename, "wt", newline="\n") as f: + f.write(text) + with bz2.open(self.filename, "rt", newline="\r") as f: + self.assertEqual(f.readlines(), [text]) + + def test_main(): support.run_unittest( BZ2FileTest, BZ2CompressorTest, BZ2DecompressorTest, - FuncTest + CompressDecompressTest, + OpenTest, ) support.reap_children() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -15,6 +15,9 @@ Library ------- +- The bz2 module now contains an open() function, allowing compressed files to + conveniently be opened in text mode as well as binary mode. + - BZ2File.__init__() now accepts a file object as its first argument, rather than requiring a separate "fileobj" argument. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 23:55:55 2012 From: python-checkins at python.org (nadeem.vawda) Date: Mon, 04 Jun 2012 23:55:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Simplify_usage_of_LZMAFile?= =?utf8?q?=27s_fileobj_support=2C_like_with_BZ2File=2E?= Message-ID: http://hg.python.org/cpython/rev/eae12226a66d changeset: 77356:eae12226a66d user: Nadeem Vawda date: Mon Jun 04 23:34:07 2012 +0200 summary: Simplify usage of LZMAFile's fileobj support, like with BZ2File. files: Doc/library/lzma.rst | 20 +- Lib/lzma.py | 19 +- Lib/tarfile.py | 3 +- Lib/test/test_lzma.py | 230 ++++++++++++++--------------- Misc/NEWS | 4 +- 5 files changed, 133 insertions(+), 143 deletions(-) diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst --- a/Doc/library/lzma.rst +++ b/Doc/library/lzma.rst @@ -29,18 +29,20 @@ Reading and writing compressed files ------------------------------------ -.. class:: LZMAFile(filename=None, mode="r", \*, fileobj=None, format=None, check=-1, preset=None, filters=None) +.. class:: LZMAFile(filename=None, mode="r", \*, format=None, check=-1, preset=None, filters=None) - Open an LZMA-compressed file. + Open an LZMA-compressed file in binary mode. - An :class:`LZMAFile` can wrap an existing :term:`file object` (given by - *fileobj*), or operate directly on a named file (named by *filename*). - Exactly one of these two parameters should be provided. If *fileobj* is - provided, it is not closed when the :class:`LZMAFile` is closed. + An :class:`LZMAFile` can wrap an already-open :term:`file object`, or operate + directly on a named file. The *filename* argument specifies either the file + object to wrap, or the name of the file to open (as a :class:`str` or + :class:`bytes` object). When wrapping an existing file object, the wrapped + file will not be closed when the :class:`LZMAFile` is closed. The *mode* argument can be either ``"r"`` for reading (default), ``"w"`` for - overwriting, or ``"a"`` for appending. If *fileobj* is provided, a mode of - ``"w"`` does not truncate the file, and is instead equivalent to ``"a"``. + overwriting, or ``"a"`` for appending. If *filename* is an existing file + object, a mode of ``"w"`` does not truncate the file, and is instead + equivalent to ``"a"``. When opening a file for reading, the input file may be the concatenation of multiple separate compressed streams. These are transparently decoded as a @@ -360,7 +362,7 @@ import lzma with open("file.xz", "wb") as f: f.write(b"This data will not be compressed\n") - with lzma.LZMAFile(fileobj=f, mode="w") as lzf: + with lzma.LZMAFile(f, "w") as lzf: lzf.write(b"This *will* be compressed\n") f.write(b"Not compressed\n") diff --git a/Lib/lzma.py b/Lib/lzma.py --- a/Lib/lzma.py +++ b/Lib/lzma.py @@ -46,13 +46,12 @@ """ def __init__(self, filename=None, mode="r", *, - fileobj=None, format=None, check=-1, - preset=None, filters=None): - """Open an LZMA-compressed file. + format=None, check=-1, preset=None, filters=None): + """Open an LZMA-compressed file in binary mode. - If filename is given, open the named file. Otherwise, operate on - the file object given by fileobj. Exactly one of these two - parameters should be provided. + filename can be either an actual file name (given as a str or + bytes object), in which case the named file is opened, or it can + be an existing file object to read from or write to. mode can be "r" for reading (default), "w" for (over)writing, or "a" for appending. @@ -119,16 +118,16 @@ else: raise ValueError("Invalid mode: {!r}".format(mode)) - if filename is not None and fileobj is None: + if isinstance(filename, (str, bytes)): mode += "b" self._fp = open(filename, mode) self._closefp = True self._mode = mode_code - elif fileobj is not None and filename is None: - self._fp = fileobj + elif hasattr(filename, "read") or hasattr(filename, "write"): + self._fp = filename self._mode = mode_code else: - raise ValueError("Must give exactly one of filename and fileobj") + raise TypeError("filename must be a str or bytes object, or a file") def close(self): """Flush and close the file. diff --git a/Lib/tarfile.py b/Lib/tarfile.py --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -1681,8 +1681,7 @@ except ImportError: raise CompressionError("lzma module is not available") - fileobj = lzma.LZMAFile(filename=name if fileobj is None else None, - mode=mode, fileobj=fileobj, preset=preset) + fileobj = lzma.LZMAFile(fileobj or name, mode, preset=preset) try: t = cls.taropen(name, mode, fileobj, **kwargs) diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -358,11 +358,11 @@ class FileTestCase(unittest.TestCase): def test_init(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: pass - with LZMAFile(fileobj=BytesIO(), mode="w") as f: + with LZMAFile(BytesIO(), "w") as f: pass - with LZMAFile(fileobj=BytesIO(), mode="a") as f: + with LZMAFile(BytesIO(), "a") as f: pass def test_init_with_filename(self): @@ -376,88 +376,84 @@ def test_init_bad_mode(self): with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode=(3, "x")) + LZMAFile(BytesIO(COMPRESSED_XZ), (3, "x")) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="") + LZMAFile(BytesIO(COMPRESSED_XZ), "") with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="x") + LZMAFile(BytesIO(COMPRESSED_XZ), "x") with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="rb") + LZMAFile(BytesIO(COMPRESSED_XZ), "rb") with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="r+") + LZMAFile(BytesIO(COMPRESSED_XZ), "r+") with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="wb") + LZMAFile(BytesIO(COMPRESSED_XZ), "wb") with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="w+") + LZMAFile(BytesIO(COMPRESSED_XZ), "w+") with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="rw") + LZMAFile(BytesIO(COMPRESSED_XZ), "rw") def test_init_bad_check(self): with self.assertRaises(TypeError): - LZMAFile(fileobj=BytesIO(), mode="w", check=b"asd") + LZMAFile(BytesIO(), "w", check=b"asd") # CHECK_UNKNOWN and anything above CHECK_ID_MAX should be invalid. with self.assertRaises(LZMAError): - LZMAFile(fileobj=BytesIO(), mode="w", check=lzma.CHECK_UNKNOWN) + LZMAFile(BytesIO(), "w", check=lzma.CHECK_UNKNOWN) with self.assertRaises(LZMAError): - LZMAFile(fileobj=BytesIO(), mode="w", check=lzma.CHECK_ID_MAX + 3) + LZMAFile(BytesIO(), "w", check=lzma.CHECK_ID_MAX + 3) # Cannot specify a check with mode="r". with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_NONE) + LZMAFile(BytesIO(COMPRESSED_XZ), check=lzma.CHECK_NONE) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_CRC32) + LZMAFile(BytesIO(COMPRESSED_XZ), check=lzma.CHECK_CRC32) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_CRC64) + LZMAFile(BytesIO(COMPRESSED_XZ), check=lzma.CHECK_CRC64) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_SHA256) + LZMAFile(BytesIO(COMPRESSED_XZ), check=lzma.CHECK_SHA256) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), check=lzma.CHECK_UNKNOWN) + LZMAFile(BytesIO(COMPRESSED_XZ), check=lzma.CHECK_UNKNOWN) def test_init_bad_preset(self): with self.assertRaises(TypeError): - LZMAFile(fileobj=BytesIO(), mode="w", preset=4.39) + LZMAFile(BytesIO(), "w", preset=4.39) with self.assertRaises(LZMAError): - LZMAFile(fileobj=BytesIO(), mode="w", preset=10) + LZMAFile(BytesIO(), "w", preset=10) with self.assertRaises(LZMAError): - LZMAFile(fileobj=BytesIO(), mode="w", preset=23) + LZMAFile(BytesIO(), "w", preset=23) with self.assertRaises(OverflowError): - LZMAFile(fileobj=BytesIO(), mode="w", preset=-1) + LZMAFile(BytesIO(), "w", preset=-1) with self.assertRaises(OverflowError): - LZMAFile(fileobj=BytesIO(), mode="w", preset=-7) + LZMAFile(BytesIO(), "w", preset=-7) with self.assertRaises(TypeError): - LZMAFile(fileobj=BytesIO(), mode="w", preset="foo") + LZMAFile(BytesIO(), "w", preset="foo") # Cannot specify a preset with mode="r". with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), preset=3) + LZMAFile(BytesIO(COMPRESSED_XZ), preset=3) def test_init_bad_filter_spec(self): with self.assertRaises(TypeError): - LZMAFile(fileobj=BytesIO(), mode="w", filters=[b"wobsite"]) + LZMAFile(BytesIO(), "w", filters=[b"wobsite"]) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(), mode="w", filters=[{"xyzzy": 3}]) + LZMAFile(BytesIO(), "w", filters=[{"xyzzy": 3}]) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(), mode="w", filters=[{"id": 98765}]) + LZMAFile(BytesIO(), "w", filters=[{"id": 98765}]) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(), mode="w", + LZMAFile(BytesIO(), "w", filters=[{"id": lzma.FILTER_LZMA2, "foo": 0}]) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(), mode="w", + LZMAFile(BytesIO(), "w", filters=[{"id": lzma.FILTER_DELTA, "foo": 0}]) with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(), mode="w", + LZMAFile(BytesIO(), "w", filters=[{"id": lzma.FILTER_X86, "foo": 0}]) def test_init_with_preset_and_filters(self): with self.assertRaises(ValueError): - LZMAFile(fileobj=BytesIO(), mode="w", format=lzma.FORMAT_RAW, - preset=6, filters=FILTERS_RAW_1) - - def test_init_with_filename_and_fileobj(self): - with self.assertRaises(ValueError): - LZMAFile("/dev/null", fileobj=BytesIO()) + LZMAFile(BytesIO(), "w", format=lzma.FORMAT_RAW, + preset=6, filters=FILTERS_RAW_1) def test_close(self): with BytesIO(COMPRESSED_XZ) as src: - f = LZMAFile(fileobj=src) + f = LZMAFile(src) f.close() # LZMAFile.close() should not close the underlying file object. self.assertFalse(src.closed) @@ -476,7 +472,7 @@ f.close() def test_closed(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) try: self.assertFalse(f.closed) f.read() @@ -485,7 +481,7 @@ f.close() self.assertTrue(f.closed) - f = LZMAFile(fileobj=BytesIO(), mode="w") + f = LZMAFile(BytesIO(), "w") try: self.assertFalse(f.closed) finally: @@ -493,7 +489,7 @@ self.assertTrue(f.closed) def test_fileno(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) try: self.assertRaises(UnsupportedOperation, f.fileno) finally: @@ -509,7 +505,7 @@ self.assertRaises(ValueError, f.fileno) def test_seekable(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) try: self.assertTrue(f.seekable()) f.read() @@ -518,7 +514,7 @@ f.close() self.assertRaises(ValueError, f.seekable) - f = LZMAFile(fileobj=BytesIO(), mode="w") + f = LZMAFile(BytesIO(), "w") try: self.assertFalse(f.seekable()) finally: @@ -527,7 +523,7 @@ src = BytesIO(COMPRESSED_XZ) src.seekable = lambda: False - f = LZMAFile(fileobj=src) + f = LZMAFile(src) try: self.assertFalse(f.seekable()) finally: @@ -535,7 +531,7 @@ self.assertRaises(ValueError, f.seekable) def test_readable(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) try: self.assertTrue(f.readable()) f.read() @@ -544,7 +540,7 @@ f.close() self.assertRaises(ValueError, f.readable) - f = LZMAFile(fileobj=BytesIO(), mode="w") + f = LZMAFile(BytesIO(), "w") try: self.assertFalse(f.readable()) finally: @@ -552,7 +548,7 @@ self.assertRaises(ValueError, f.readable) def test_writable(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) try: self.assertFalse(f.writable()) f.read() @@ -561,7 +557,7 @@ f.close() self.assertRaises(ValueError, f.writable) - f = LZMAFile(fileobj=BytesIO(), mode="w") + f = LZMAFile(BytesIO(), "w") try: self.assertTrue(f.writable()) finally: @@ -569,50 +565,46 @@ self.assertRaises(ValueError, f.writable) def test_read(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE)) as f: + with LZMAFile(BytesIO(COMPRESSED_ALONE)) as f: self.assertEqual(f.read(), INPUT) - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), - format=lzma.FORMAT_XZ) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ), format=lzma.FORMAT_XZ) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE), - format=lzma.FORMAT_ALONE) as f: + with LZMAFile(BytesIO(COMPRESSED_ALONE), format=lzma.FORMAT_ALONE) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_1), + with LZMAFile(BytesIO(COMPRESSED_RAW_1), format=lzma.FORMAT_RAW, filters=FILTERS_RAW_1) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_2), + with LZMAFile(BytesIO(COMPRESSED_RAW_2), format=lzma.FORMAT_RAW, filters=FILTERS_RAW_2) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_3), + with LZMAFile(BytesIO(COMPRESSED_RAW_3), format=lzma.FORMAT_RAW, filters=FILTERS_RAW_3) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_4), + with LZMAFile(BytesIO(COMPRESSED_RAW_4), format=lzma.FORMAT_RAW, filters=FILTERS_RAW_4) as f: self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") def test_read_0(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertEqual(f.read(0), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE)) as f: + with LZMAFile(BytesIO(COMPRESSED_ALONE)) as f: self.assertEqual(f.read(0), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), - format=lzma.FORMAT_XZ) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ), format=lzma.FORMAT_XZ) as f: self.assertEqual(f.read(0), b"") - with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE), - format=lzma.FORMAT_ALONE) as f: + with LZMAFile(BytesIO(COMPRESSED_ALONE), format=lzma.FORMAT_ALONE) as f: self.assertEqual(f.read(0), b"") def test_read_10(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: chunks = [] while True: result = f.read(10) @@ -623,11 +615,11 @@ self.assertEqual(b"".join(chunks), INPUT) def test_read_multistream(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 5)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ * 5)) as f: self.assertEqual(f.read(), INPUT * 5) - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ + COMPRESSED_ALONE)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ + COMPRESSED_ALONE)) as f: self.assertEqual(f.read(), INPUT * 2) - with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_3 * 4), + with LZMAFile(BytesIO(COMPRESSED_RAW_3 * 4), format=lzma.FORMAT_RAW, filters=FILTERS_RAW_3) as f: self.assertEqual(f.read(), INPUT * 4) @@ -637,7 +629,7 @@ saved_buffer_size = lzma._BUFFER_SIZE lzma._BUFFER_SIZE = len(COMPRESSED_XZ) try: - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 5)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ * 5)) as f: self.assertEqual(f.read(), INPUT * 5) finally: lzma._BUFFER_SIZE = saved_buffer_size @@ -649,20 +641,20 @@ self.assertEqual(f.read(), b"") def test_read_incomplete(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ[:128])) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ[:128])) as f: self.assertRaises(EOFError, f.read) def test_read_bad_args(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) f.close() self.assertRaises(ValueError, f.read) - with LZMAFile(fileobj=BytesIO(), mode="w") as f: + with LZMAFile(BytesIO(), "w") as f: self.assertRaises(ValueError, f.read) - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertRaises(TypeError, f.read, None) def test_read1(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: blocks = [] while True: result = f.read1() @@ -673,11 +665,11 @@ self.assertEqual(f.read1(), b"") def test_read1_0(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertEqual(f.read1(0), b"") def test_read1_10(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: blocks = [] while True: result = f.read1(10) @@ -688,7 +680,7 @@ self.assertEqual(f.read1(), b"") def test_read1_multistream(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 5)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ * 5)) as f: blocks = [] while True: result = f.read1() @@ -699,78 +691,76 @@ self.assertEqual(f.read1(), b"") def test_read1_bad_args(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) f.close() self.assertRaises(ValueError, f.read1) - with LZMAFile(fileobj=BytesIO(), mode="w") as f: + with LZMAFile(BytesIO(), "w") as f: self.assertRaises(ValueError, f.read1) - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertRaises(TypeError, f.read1, None) def test_peek(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: result = f.peek() self.assertGreater(len(result), 0) self.assertTrue(INPUT.startswith(result)) self.assertEqual(f.read(), INPUT) - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: result = f.peek(10) self.assertGreater(len(result), 0) self.assertTrue(INPUT.startswith(result)) self.assertEqual(f.read(), INPUT) def test_peek_bad_args(self): - with LZMAFile(fileobj=BytesIO(), mode="w") as f: + with LZMAFile(BytesIO(), "w") as f: self.assertRaises(ValueError, f.peek) def test_iterator(self): with BytesIO(INPUT) as f: lines = f.readlines() - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertListEqual(list(iter(f)), lines) - with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE)) as f: + with LZMAFile(BytesIO(COMPRESSED_ALONE)) as f: self.assertListEqual(list(iter(f)), lines) - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), - format=lzma.FORMAT_XZ) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ), format=lzma.FORMAT_XZ) as f: self.assertListEqual(list(iter(f)), lines) - with LZMAFile(fileobj=BytesIO(COMPRESSED_ALONE), - format=lzma.FORMAT_ALONE) as f: + with LZMAFile(BytesIO(COMPRESSED_ALONE), format=lzma.FORMAT_ALONE) as f: self.assertListEqual(list(iter(f)), lines) - with LZMAFile(fileobj=BytesIO(COMPRESSED_RAW_2), + with LZMAFile(BytesIO(COMPRESSED_RAW_2), format=lzma.FORMAT_RAW, filters=FILTERS_RAW_2) as f: self.assertListEqual(list(iter(f)), lines) def test_readline(self): with BytesIO(INPUT) as f: lines = f.readlines() - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: for line in lines: self.assertEqual(f.readline(), line) def test_readlines(self): with BytesIO(INPUT) as f: lines = f.readlines() - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertListEqual(f.readlines(), lines) def test_write(self): with BytesIO() as dst: - with LZMAFile(fileobj=dst, mode="w") as f: + with LZMAFile(dst, "w") as f: f.write(INPUT) expected = lzma.compress(INPUT) self.assertEqual(dst.getvalue(), expected) with BytesIO() as dst: - with LZMAFile(fileobj=dst, mode="w", format=lzma.FORMAT_XZ) as f: + with LZMAFile(dst, "w", format=lzma.FORMAT_XZ) as f: f.write(INPUT) expected = lzma.compress(INPUT, format=lzma.FORMAT_XZ) self.assertEqual(dst.getvalue(), expected) with BytesIO() as dst: - with LZMAFile(fileobj=dst, mode="w", format=lzma.FORMAT_ALONE) as f: + with LZMAFile(dst, "w", format=lzma.FORMAT_ALONE) as f: f.write(INPUT) expected = lzma.compress(INPUT, format=lzma.FORMAT_ALONE) self.assertEqual(dst.getvalue(), expected) with BytesIO() as dst: - with LZMAFile(fileobj=dst, mode="w", format=lzma.FORMAT_RAW, + with LZMAFile(dst, "w", format=lzma.FORMAT_RAW, filters=FILTERS_RAW_2) as f: f.write(INPUT) expected = lzma.compress(INPUT, format=lzma.FORMAT_RAW, @@ -779,7 +769,7 @@ def test_write_10(self): with BytesIO() as dst: - with LZMAFile(fileobj=dst, mode="w") as f: + with LZMAFile(dst, "w") as f: for start in range(0, len(INPUT), 10): f.write(INPUT[start:start+10]) expected = lzma.compress(INPUT) @@ -791,11 +781,11 @@ part3 = INPUT[1536:] expected = b"".join(lzma.compress(x) for x in (part1, part2, part3)) with BytesIO() as dst: - with LZMAFile(fileobj=dst, mode="w") as f: + with LZMAFile(dst, "w") as f: f.write(part1) - with LZMAFile(fileobj=dst, mode="a") as f: + with LZMAFile(dst, "a") as f: f.write(part2) - with LZMAFile(fileobj=dst, mode="a") as f: + with LZMAFile(dst, "a") as f: f.write(part3) self.assertEqual(dst.getvalue(), expected) @@ -827,12 +817,12 @@ unlink(TESTFN) def test_write_bad_args(self): - f = LZMAFile(fileobj=BytesIO(), mode="w") + f = LZMAFile(BytesIO(), "w") f.close() self.assertRaises(ValueError, f.write, b"foo") - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ), mode="r") as f: + with LZMAFile(BytesIO(COMPRESSED_XZ), "r") as f: self.assertRaises(ValueError, f.write, b"bar") - with LZMAFile(fileobj=BytesIO(), mode="w") as f: + with LZMAFile(BytesIO(), "w") as f: self.assertRaises(TypeError, f.write, None) self.assertRaises(TypeError, f.write, "text") self.assertRaises(TypeError, f.write, 789) @@ -841,75 +831,75 @@ with BytesIO(INPUT) as f: lines = f.readlines() with BytesIO() as dst: - with LZMAFile(fileobj=dst, mode="w") as f: + with LZMAFile(dst, "w") as f: f.writelines(lines) expected = lzma.compress(INPUT) self.assertEqual(dst.getvalue(), expected) def test_seek_forward(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: f.seek(555) self.assertEqual(f.read(), INPUT[555:]) def test_seek_forward_across_streams(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 2)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ * 2)) as f: f.seek(len(INPUT) + 123) self.assertEqual(f.read(), INPUT[123:]) def test_seek_forward_relative_to_current(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: f.read(100) f.seek(1236, 1) self.assertEqual(f.read(), INPUT[1336:]) def test_seek_forward_relative_to_end(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: f.seek(-555, 2) self.assertEqual(f.read(), INPUT[-555:]) def test_seek_backward(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: f.read(1001) f.seek(211) self.assertEqual(f.read(), INPUT[211:]) def test_seek_backward_across_streams(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ * 2)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ * 2)) as f: f.read(len(INPUT) + 333) f.seek(737) self.assertEqual(f.read(), INPUT[737:] + INPUT) def test_seek_backward_relative_to_end(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: f.seek(-150, 2) self.assertEqual(f.read(), INPUT[-150:]) def test_seek_past_end(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: f.seek(len(INPUT) + 9001) self.assertEqual(f.tell(), len(INPUT)) self.assertEqual(f.read(), b"") def test_seek_past_start(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: f.seek(-88) self.assertEqual(f.tell(), 0) self.assertEqual(f.read(), INPUT) def test_seek_bad_args(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) f.close() self.assertRaises(ValueError, f.seek, 0) - with LZMAFile(fileobj=BytesIO(), mode="w") as f: + with LZMAFile(BytesIO(), "w") as f: self.assertRaises(ValueError, f.seek, 0) - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: self.assertRaises(ValueError, f.seek, 0, 3) self.assertRaises(ValueError, f.seek, 9, ()) self.assertRaises(TypeError, f.seek, None) self.assertRaises(TypeError, f.seek, b"derp") def test_tell(self): - with LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) as f: + with LZMAFile(BytesIO(COMPRESSED_XZ)) as f: pos = 0 while True: self.assertEqual(f.tell(), pos) @@ -918,14 +908,14 @@ break pos += len(result) self.assertEqual(f.tell(), len(INPUT)) - with LZMAFile(fileobj=BytesIO(), mode="w") as f: + with LZMAFile(BytesIO(), "w") as f: for pos in range(0, len(INPUT), 144): self.assertEqual(f.tell(), pos) f.write(INPUT[pos:pos+144]) self.assertEqual(f.tell(), len(INPUT)) def test_tell_bad_args(self): - f = LZMAFile(fileobj=BytesIO(COMPRESSED_XZ)) + f = LZMAFile(BytesIO(COMPRESSED_XZ)) f.close() self.assertRaises(ValueError, f.tell) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -18,8 +18,8 @@ - The bz2 module now contains an open() function, allowing compressed files to conveniently be opened in text mode as well as binary mode. -- BZ2File.__init__() now accepts a file object as its first argument, rather - than requiring a separate "fileobj" argument. +- BZ2File.__init__() and LZMAFile.__init__() now accept a file object as their + first argument, rather than requiring a separate "fileobj" argument. - gzip.open() now accepts file objects as well as filenames. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 23:55:56 2012 From: python-checkins at python.org (nadeem.vawda) Date: Mon, 04 Jun 2012 23:55:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Allow_LZMAFile_to_accept_mo?= =?utf8?q?des_with_a_=22b=22_suffix=2E?= Message-ID: http://hg.python.org/cpython/rev/8cf02a2a44f0 changeset: 77357:8cf02a2a44f0 user: Nadeem Vawda date: Mon Jun 04 23:36:24 2012 +0200 summary: Allow LZMAFile to accept modes with a "b" suffix. files: Doc/library/lzma.rst | 8 +++++--- Lib/lzma.py | 10 ++++++---- Lib/test/test_lzma.py | 19 +++++++++++++++++-- Misc/NEWS | 2 ++ 4 files changed, 30 insertions(+), 9 deletions(-) diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst --- a/Doc/library/lzma.rst +++ b/Doc/library/lzma.rst @@ -40,9 +40,11 @@ file will not be closed when the :class:`LZMAFile` is closed. The *mode* argument can be either ``"r"`` for reading (default), ``"w"`` for - overwriting, or ``"a"`` for appending. If *filename* is an existing file - object, a mode of ``"w"`` does not truncate the file, and is instead - equivalent to ``"a"``. + overwriting, or ``"a"`` for appending. These can equivalently be given as + ``"rb"``, ``"wb"``, and ``"ab"`` respectively. + + If *filename* is a file object (rather than an actual file name), a mode of + ``"w"`` does not truncate the file, and is instead equivalent to ``"a"``. When opening a file for reading, the input file may be the concatenation of multiple separate compressed streams. These are transparently decoded as a diff --git a/Lib/lzma.py b/Lib/lzma.py --- a/Lib/lzma.py +++ b/Lib/lzma.py @@ -54,7 +54,8 @@ be an existing file object to read from or write to. mode can be "r" for reading (default), "w" for (over)writing, or - "a" for appending. + "a" for appending. These can equivalently be given as "rb", "wb", + and "ab" respectively. format specifies the container format to use for the file. If mode is "r", this defaults to FORMAT_AUTO. Otherwise, the @@ -93,7 +94,7 @@ self._pos = 0 self._size = -1 - if mode == "r": + if mode in ("r", "rb"): if check != -1: raise ValueError("Cannot specify an integrity check " "when opening a file for reading") @@ -109,7 +110,7 @@ self._init_args = {"format":format, "filters":filters} self._decompressor = LZMADecompressor(**self._init_args) self._buffer = None - elif mode in ("w", "a"): + elif mode in ("w", "wb", "a", "ab"): if format is None: format = FORMAT_XZ mode_code = _MODE_WRITE @@ -119,7 +120,8 @@ raise ValueError("Invalid mode: {!r}".format(mode)) if isinstance(filename, (str, bytes)): - mode += "b" + if "b" not in mode: + mode += "b" self._fp = open(filename, mode) self._closefp = True self._mode = mode_code diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -374,6 +374,21 @@ with LZMAFile(TESTFN, "a") as f: pass + def test_init_mode(self): + with TempFile(TESTFN): + with LZMAFile(TESTFN, "r"): + pass + with LZMAFile(TESTFN, "rb"): + pass + with LZMAFile(TESTFN, "w"): + pass + with LZMAFile(TESTFN, "wb"): + pass + with LZMAFile(TESTFN, "a"): + pass + with LZMAFile(TESTFN, "ab"): + pass + def test_init_bad_mode(self): with self.assertRaises(ValueError): LZMAFile(BytesIO(COMPRESSED_XZ), (3, "x")) @@ -382,11 +397,11 @@ with self.assertRaises(ValueError): LZMAFile(BytesIO(COMPRESSED_XZ), "x") with self.assertRaises(ValueError): - LZMAFile(BytesIO(COMPRESSED_XZ), "rb") + LZMAFile(BytesIO(COMPRESSED_XZ), "rt") with self.assertRaises(ValueError): LZMAFile(BytesIO(COMPRESSED_XZ), "r+") with self.assertRaises(ValueError): - LZMAFile(BytesIO(COMPRESSED_XZ), "wb") + LZMAFile(BytesIO(COMPRESSED_XZ), "wt") with self.assertRaises(ValueError): LZMAFile(BytesIO(COMPRESSED_XZ), "w+") with self.assertRaises(ValueError): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -15,6 +15,8 @@ Library ------- +- LZMAFile now accepts the modes "rb"/"wb"/"ab" as synonyms of "r"/"w"/"a". + - The bz2 module now contains an open() function, allowing compressed files to conveniently be opened in text mode as well as binary mode. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 4 23:55:57 2012 From: python-checkins at python.org (nadeem.vawda) Date: Mon, 04 Jun 2012 23:55:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_a_function_lzma=2Eopen?= =?utf8?b?KCksIHRvIG1hdGNoIGd6aXAub3BlbigpIGFuZCBiejIub3BlbigpLg==?= Message-ID: http://hg.python.org/cpython/rev/3d82ced09043 changeset: 77358:3d82ced09043 user: Nadeem Vawda date: Mon Jun 04 23:38:12 2012 +0200 summary: Add a function lzma.open(), to match gzip.open() and bz2.open(). files: Doc/library/lzma.rst | 29 ++++++++ Lib/lzma.py | 50 ++++++++++++++- Lib/test/test_lzma.py | 101 ++++++++++++++++++++++++++++++ Misc/NEWS | 4 +- 4 files changed, 180 insertions(+), 4 deletions(-) diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst --- a/Doc/library/lzma.rst +++ b/Doc/library/lzma.rst @@ -29,6 +29,35 @@ Reading and writing compressed files ------------------------------------ +.. function:: open(filename, mode="rb", \*, format=None, check=-1, preset=None, filters=None, encoding=None, errors=None, newline=None) + + Open an LZMA-compressed file in binary or text mode, returning a :term:`file + object`. + + The *filename* argument can be either an actual file name (given as a + :class:`str` or :class:`bytes` object), in which case the named file is + opened, or it can be an existing file object to read from or write to. + + The *mode* argument can be any of ``"r"``, ``"rb"``, ``"w"``, ``"wb"``, + ``"a"`` or ``"ab"`` for binary mode, or ``"rt"``, ``"wt"``, or ``"at"`` for + text mode. The default is ``"rb"``. + + When opening a file for reading, the *format* and *filters* arguments have + the same meanings as for :class:`LZMADecompressor`. In this case, the *check* + and *preset* arguments should not be used. + + When opening a file for writing, the *format*, *check*, *preset* and + *filters* arguments have the same meanings as for :class:`LZMACompressor`. + + For binary mode, this function is equivalent to the :class:`LZMAFile` + constructor: ``LZMAFile(filename, mode, ...)``. In this case, the *encoding*, + *errors* and *newline* arguments must not be provided. + + For text mode, a :class:`LZMAFile` object is created, and wrapped in an + :class:`io.TextIOWrapper` instance with the specified encoding, error + handling behavior, and line ending(s). + + .. class:: LZMAFile(filename=None, mode="r", \*, format=None, check=-1, preset=None, filters=None) Open an LZMA-compressed file in binary mode. diff --git a/Lib/lzma.py b/Lib/lzma.py --- a/Lib/lzma.py +++ b/Lib/lzma.py @@ -18,10 +18,11 @@ "MODE_FAST", "MODE_NORMAL", "PRESET_DEFAULT", "PRESET_EXTREME", "LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError", - "compress", "decompress", "is_check_supported", + "open", "compress", "decompress", "is_check_supported", "encode_filter_properties", "decode_filter_properties", ] +import builtins import io from _lzma import * @@ -122,7 +123,7 @@ if isinstance(filename, (str, bytes)): if "b" not in mode: mode += "b" - self._fp = open(filename, mode) + self._fp = builtins.open(filename, mode) self._closefp = True self._mode = mode_code elif hasattr(filename, "read") or hasattr(filename, "write"): @@ -370,6 +371,51 @@ return self._pos +def open(filename, mode="rb", *, + format=None, check=-1, preset=None, filters=None, + encoding=None, errors=None, newline=None): + """Open an LZMA-compressed file in binary or text mode. + + filename can be either an actual file name (given as a str or bytes object), + in which case the named file is opened, or it can be an existing file object + to read from or write to. + + The mode argument can be "r", "rb" (default), "w", "wb", "a", or "ab" for + binary mode, or "rt", "wt" or "at" for text mode. + + The format, check, preset and filters arguments specify the compression + settings, as for LZMACompressor, LZMADecompressor and LZMAFile. + + For binary mode, this function is equivalent to the LZMAFile constructor: + LZMAFile(filename, mode, ...). In this case, the encoding, errors and + newline arguments must not be provided. + + For text mode, a LZMAFile object is created, and wrapped in an + io.TextIOWrapper instance with the specified encoding, error handling + behavior, and line ending(s). + + """ + if "t" in mode: + if "b" in mode: + raise ValueError("Invalid mode: %r" % (mode,)) + else: + if encoding is not None: + raise ValueError("Argument 'encoding' not supported in binary mode") + if errors is not None: + raise ValueError("Argument 'errors' not supported in binary mode") + if newline is not None: + raise ValueError("Argument 'newline' not supported in binary mode") + + lz_mode = mode.replace("t", "") + binary_file = LZMAFile(filename, lz_mode, format=format, check=check, + preset=preset, filters=filters) + + if "t" in mode: + return io.TextIOWrapper(binary_file, encoding, errors, newline) + else: + return binary_file + + def compress(data, format=FORMAT_XZ, check=-1, preset=None, filters=None): """Compress a block of data. diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -935,6 +935,106 @@ self.assertRaises(ValueError, f.tell) +class OpenTestCase(unittest.TestCase): + + def test_binary_modes(self): + with lzma.open(BytesIO(COMPRESSED_XZ), "rb") as f: + self.assertEqual(f.read(), INPUT) + with BytesIO() as bio: + with lzma.open(bio, "wb") as f: + f.write(INPUT) + file_data = lzma.decompress(bio.getvalue()) + self.assertEqual(file_data, INPUT) + with lzma.open(bio, "ab") as f: + f.write(INPUT) + file_data = lzma.decompress(bio.getvalue()) + self.assertEqual(file_data, INPUT * 2) + + def test_text_modes(self): + uncompressed = INPUT.decode("ascii") + uncompressed_raw = uncompressed.replace("\n", os.linesep) + with lzma.open(BytesIO(COMPRESSED_XZ), "rt") as f: + self.assertEqual(f.read(), uncompressed) + with BytesIO() as bio: + with lzma.open(bio, "wt") as f: + f.write(uncompressed) + file_data = lzma.decompress(bio.getvalue()).decode("ascii") + self.assertEqual(file_data, uncompressed_raw) + with lzma.open(bio, "at") as f: + f.write(uncompressed) + file_data = lzma.decompress(bio.getvalue()).decode("ascii") + self.assertEqual(file_data, uncompressed_raw * 2) + + def test_filename(self): + with TempFile(TESTFN): + with lzma.open(TESTFN, "wb") as f: + f.write(INPUT) + with open(TESTFN, "rb") as f: + file_data = lzma.decompress(f.read()) + self.assertEqual(file_data, INPUT) + with lzma.open(TESTFN, "rb") as f: + self.assertEqual(f.read(), INPUT) + with lzma.open(TESTFN, "ab") as f: + f.write(INPUT) + with lzma.open(TESTFN, "rb") as f: + self.assertEqual(f.read(), INPUT * 2) + + def test_bad_params(self): + # Test invalid parameter combinations. + with self.assertRaises(ValueError): + lzma.open(TESTFN, "") + with self.assertRaises(ValueError): + lzma.open(TESTFN, "x") + with self.assertRaises(ValueError): + lzma.open(TESTFN, "rbt") + with self.assertRaises(ValueError): + lzma.open(TESTFN, "rb", encoding="utf-8") + with self.assertRaises(ValueError): + lzma.open(TESTFN, "rb", errors="ignore") + with self.assertRaises(ValueError): + lzma.open(TESTFN, "rb", newline="\n") + + def test_format_and_filters(self): + # Test non-default format and filter chain. + options = {"format": lzma.FORMAT_RAW, "filters": FILTERS_RAW_1} + with lzma.open(BytesIO(COMPRESSED_RAW_1), "rb", **options) as f: + self.assertEqual(f.read(), INPUT) + with BytesIO() as bio: + with lzma.open(bio, "wb", **options) as f: + f.write(INPUT) + file_data = lzma.decompress(bio.getvalue(), **options) + self.assertEqual(file_data, INPUT) + + def test_encoding(self): + # Test non-default encoding. + uncompressed = INPUT.decode("ascii") + uncompressed_raw = uncompressed.replace("\n", os.linesep) + with BytesIO() as bio: + with lzma.open(bio, "wt", encoding="utf-16-le") as f: + f.write(uncompressed) + file_data = lzma.decompress(bio.getvalue()).decode("utf-16-le") + self.assertEqual(file_data, uncompressed_raw) + bio.seek(0) + with lzma.open(bio, "rt", encoding="utf-16-le") as f: + self.assertEqual(f.read(), uncompressed) + + def test_encoding_error_handler(self): + # Test wih non-default encoding error handler. + with BytesIO(lzma.compress(b"foo\xffbar")) as bio: + with lzma.open(bio, "rt", encoding="ascii", errors="ignore") as f: + self.assertEqual(f.read(), "foobar") + + def test_newline(self): + # Test with explicit newline (universal newline mode disabled). + text = INPUT.decode("ascii") + with BytesIO() as bio: + with lzma.open(bio, "wt", newline="\n") as f: + f.write(text) + bio.seek(0) + with lzma.open(bio, "rt", newline="\r") as f: + self.assertEqual(f.readlines(), [text]) + + class MiscellaneousTestCase(unittest.TestCase): def test_is_check_supported(self): @@ -1385,6 +1485,7 @@ CompressorDecompressorTestCase, CompressDecompressFunctionTestCase, FileTestCase, + OpenTestCase, MiscellaneousTestCase, ) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -17,8 +17,8 @@ - LZMAFile now accepts the modes "rb"/"wb"/"ab" as synonyms of "r"/"w"/"a". -- The bz2 module now contains an open() function, allowing compressed files to - conveniently be opened in text mode as well as binary mode. +- The bz2 and lzma modules now each contain an open() function, allowing + compressed files to readily be opened in text mode as well as binary mode. - BZ2File.__init__() and LZMAFile.__init__() now accept a file object as their first argument, rather than requiring a separate "fileobj" argument. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 5 01:32:08 2012 From: python-checkins at python.org (victor.stinner) Date: Tue, 05 Jun 2012 01:32:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314711=3A_os=2Estat?= =?utf8?q?=5Ffloat=5Ftimes=28=29_has_been_deprecated=2E?= Message-ID: http://hg.python.org/cpython/rev/7cb15b47c70e changeset: 77359:7cb15b47c70e user: Victor Stinner date: Tue Jun 05 01:22:15 2012 +0200 summary: Issue #14711: os.stat_float_times() has been deprecated. files: Doc/library/os.rst | 2 ++ Lib/test/test_os.py | 8 ++++++-- Misc/NEWS | 2 ++ Modules/posixmodule.c | 8 ++++++-- 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -2128,6 +2128,8 @@ are processed, this application should turn the feature off until the library has been corrected. + .. deprecated:: 3.3 + .. function:: statvfs(path) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -30,7 +30,9 @@ threading = None from test.script_helper import assert_python_ok -os.stat_float_times(True) +with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + os.stat_float_times(True) st = os.stat(__file__) stat_supports_subsecond = ( # check if float and int timestamps are different @@ -388,7 +390,9 @@ filename = self.fname os.utime(filename, (0, 0)) set_time_func(filename, atime, mtime) - os.stat_float_times(True) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + os.stat_float_times(True) st = os.stat(filename) self.assertAlmostEqual(st.st_atime, atime, places=3) self.assertAlmostEqual(st.st_mtime, mtime, places=3) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -15,6 +15,8 @@ Library ------- +- Issue #14711: os.stat_float_times() has been deprecated. + - LZMAFile now accepts the modes "rb"/"wb"/"ab" as synonyms of "r"/"w"/"a". - The bz2 and lzma modules now each contain an open() function, allowing diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -1721,6 +1721,10 @@ int newval = -1; if (!PyArg_ParseTuple(args, "|i:stat_float_times", &newval)) return NULL; + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "stat_float_times() is deprecated", + 1)) + return NULL; if (newval == -1) /* Return old value */ return PyBool_FromLong(_stat_float_times); @@ -3605,7 +3609,7 @@ PyObject *args; PyObject *kwargs; - /* input/output */ + /* input/output */ PyObject **path; /* output only */ @@ -3655,7 +3659,7 @@ timet[1] = ua.mtime_s -/* +/* * utime_read_time_arguments() processes arguments for the utime * family of functions. */ -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Tue Jun 5 05:53:07 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 05 Jun 2012 05:53:07 +0200 Subject: [Python-checkins] Daily reference leaks (7cb15b47c70e): sum=2 Message-ID: results for 7cb15b47c70e on branch "default" -------------------------------------------- test_dbm leaked [2, 0, 0] references, sum=2 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogRYoO0h', '-x'] From python-checkins at python.org Tue Jun 5 11:25:31 2012 From: python-checkins at python.org (vinay.sajip) Date: Tue, 05 Jun 2012 11:25:31 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Move_PEP_405_to_Accepted_state?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/peps/rev/9fae41fdc069 changeset: 4446:9fae41fdc069 parent: 4433:3b7e6e0316be user: Carl Meyer date: Fri May 25 09:13:13 2012 -0600 summary: Move PEP 405 to Accepted state. files: pep-0405.txt | 18 +++++++++--------- 1 files changed, 9 insertions(+), 9 deletions(-) diff --git a/pep-0405.txt b/pep-0405.txt --- a/pep-0405.txt +++ b/pep-0405.txt @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Carl Meyer BDFL-Delegate: Nick Coghlan -Status: Draft +Status: Accepted Type: Standards Track Content-Type: text/x-rst Created: 13-Jun-2011 @@ -284,15 +284,15 @@ Current virtualenv handles include files in this way: -On POSIX systems where the installed Python's include files are found -in ``${base_prefix}/include/pythonX.X``, virtualenv creates -``${venv}/include/`` and symlink ``${base_prefix}/include/pythonX.X`` +On POSIX systems where the installed Python's include files are found in +``${base_prefix}/include/pythonX.X``, virtualenv creates +``${venv}/include/`` and symlinks ``${base_prefix}/include/pythonX.X`` to ``${venv}/include/pythonX.X``. On Windows, where Python's include files are found in ``{{ sys.prefix }}/Include`` and symlinks are not reliably available, virtualenv copies ``{{ sys.prefix }}/Include`` to ``${venv}/Include``. This ensures that extension modules built and -installed within the virtualenv will always find the Python header -files they need in the expected location relative to ``sys.prefix``. +installed within the virtualenv will always find the Python header files +they need in the expected location relative to ``sys.prefix``. This solution is not ideal when an extension module installs its own header files, as the default installation location for those header @@ -466,10 +466,10 @@ site-packages directories. The most notable case is probably `setuptools`_ and its fork -`distribute`_, which mostly use ``distutils``and ``sysconfig`` APIs, +`distribute`_, which mostly use ``distutils`` and ``sysconfig`` APIs, but do use ``sys.prefix`` directly to build up a list of site -directories for pre-flight checking where ``pth`` files can usefully -be placed. +directories for pre-flight checking where ``pth`` files can usefully be +placed. Otherwise, a `Google Code Search`_ turns up what appears to be a roughly even mix of usage between packages using ``sys.prefix`` to -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Jun 5 11:25:34 2012 From: python-checkins at python.org (vinay.sajip) Date: Tue, 05 Jun 2012 11:25:34 +0200 Subject: [Python-checkins] =?utf8?q?peps_=28merge_default_-=3E_default=29?= =?utf8?q?=3A_Merge_from_upstream=2E?= Message-ID: http://hg.python.org/peps/rev/6cc8a5492429 changeset: 4447:6cc8a5492429 parent: 4446:9fae41fdc069 parent: 4436:5a26cc296e83 user: Carl Meyer date: Fri May 25 09:13:31 2012 -0600 summary: Merge from upstream. files: pep-0405.txt | 3 ++- pep-0420.txt | 26 ++++++++++++++++---------- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/pep-0405.txt b/pep-0405.txt --- a/pep-0405.txt +++ b/pep-0405.txt @@ -9,7 +9,8 @@ Content-Type: text/x-rst Created: 13-Jun-2011 Python-Version: 3.3 -Post-History: 24-Oct-2011, 28-Oct-2011, 06-Mar-2012 +Post-History: 24-Oct-2011, 28-Oct-2011, 06-Mar-2012, 24-May-2012 +Resolution: http://mail.python.org/pipermail/python-dev/2012-May/119668.html Abstract diff --git a/pep-0420.txt b/pep-0420.txt --- a/pep-0420.txt +++ b/pep-0420.txt @@ -4,12 +4,12 @@ Last-Modified: $Date$ Author: Eric V. Smith Status: Accepted -Resolution: http://mail.python.org/pipermail/python-dev/2012-May/119651.html Type: Standards Track Content-Type: text/x-rst Created: 19-Apr-2012 Python-Version: 3.3 Post-History: +Resolution: http://mail.python.org/pipermail/python-dev/2012-May/119651.html Abstract ======== @@ -372,9 +372,11 @@ child three.py -We add the first two parent paths to ``sys.path``. The third -``parent`` portion is added dynamically to ``parent.__path__``, and -the third portion is then found when it is imported:: +We add ``project1`` and ``project2`` to ``sys.path``, then import +``parent.child.one`` and ``parent.child.two``. Then we add the +``project3`` to ``sys.path`` and when ``parent.child.three`` is +imported, ``project3/parent`` is automatically added to +``parent.__path__``:: # add the first two parent paths to sys.path >>> import sys @@ -400,17 +402,21 @@ File "", line 1250, in _find_and_load_unlocked ImportError: No module named 'parent.child.three' - # now add the third parent portion to parent.__path__: - >>> parent.__path__.append('Lib/test/namespace_pkgs/project3/parent') - >>> parent.__path__ - _NamespacePath(['Lib/test/namespace_pkgs/project1/parent', 'Lib/test/namespace_pkgs/project2/parent', 'Lib/test/namespace_pkgs/project3/parent']) + # now add project3 to sys.path: + >>> sys.path.append('Lib/test/namespace_pkgs/project3') # and now parent.child.three can be imported: >>> import parent.child.three - # and project3/parent/child has dynamically been added to parent.child.__path__ + # project3/parent has been added to parent.__path__: + >>> parent.__path__ + _NamespacePath(['Lib/test/namespace_pkgs/project1/parent', 'Lib/test/namespace_pkgs/project2/parent', 'Lib/test/namespace_pkgs/project3/parent']) + + # and project3/parent/child has been added to parent.child.__path__ >>> parent.child.__path__ _NamespacePath(['Lib/test/namespace_pkgs/project1/parent/child', 'Lib/test/namespace_pkgs/project2/parent/child', 'Lib/test/namespace_pkgs/project3/parent/child']) + >>> + Discussion @@ -446,7 +452,7 @@ 4. Implicit package directories will permanently entrench current newbie-hostile behavior in ``__main__``. -Nick later gave a detailed response to his own objections[5]_, which +Nick later gave a detailed response to his own objections [5]_, which is summarized here: 1. The practicality of this PEP wins over other proposals and the -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Jun 5 11:25:41 2012 From: python-checkins at python.org (vinay.sajip) Date: Tue, 05 Jun 2012 11:25:41 +0200 Subject: [Python-checkins] =?utf8?q?peps_=28merge_default_-=3E_default=29?= =?utf8?q?=3A_Incorporated_PEP_405_tidy-ups=2C_and_marked_as_Final=2E?= Message-ID: http://hg.python.org/peps/rev/3a983cb78f6d changeset: 4448:3a983cb78f6d parent: 4445:c60be355412e parent: 4447:6cc8a5492429 user: Vinay Sajip date: Tue Jun 05 10:25:18 2012 +0100 summary: Incorporated PEP 405 tidy-ups, and marked as Final. files: pep-0405.txt | 18 +++++++++--------- 1 files changed, 9 insertions(+), 9 deletions(-) diff --git a/pep-0405.txt b/pep-0405.txt --- a/pep-0405.txt +++ b/pep-0405.txt @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Carl Meyer BDFL-Delegate: Nick Coghlan -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 13-Jun-2011 @@ -285,15 +285,15 @@ Current virtualenv handles include files in this way: -On POSIX systems where the installed Python's include files are found -in ``${base_prefix}/include/pythonX.X``, virtualenv creates -``${venv}/include/`` and symlink ``${base_prefix}/include/pythonX.X`` +On POSIX systems where the installed Python's include files are found in +``${base_prefix}/include/pythonX.X``, virtualenv creates +``${venv}/include/`` and symlinks ``${base_prefix}/include/pythonX.X`` to ``${venv}/include/pythonX.X``. On Windows, where Python's include files are found in ``{{ sys.prefix }}/Include`` and symlinks are not reliably available, virtualenv copies ``{{ sys.prefix }}/Include`` to ``${venv}/Include``. This ensures that extension modules built and -installed within the virtualenv will always find the Python header -files they need in the expected location relative to ``sys.prefix``. +installed within the virtualenv will always find the Python header files +they need in the expected location relative to ``sys.prefix``. This solution is not ideal when an extension module installs its own header files, as the default installation location for those header @@ -467,10 +467,10 @@ site-packages directories. The most notable case is probably `setuptools`_ and its fork -`distribute`_, which mostly use ``distutils``and ``sysconfig`` APIs, +`distribute`_, which mostly use ``distutils`` and ``sysconfig`` APIs, but do use ``sys.prefix`` directly to build up a list of site -directories for pre-flight checking where ``pth`` files can usefully -be placed. +directories for pre-flight checking where ``pth`` files can usefully be +placed. Otherwise, a `Google Code Search`_ turns up what appears to be a roughly even mix of usage between packages using ``sys.prefix`` to -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Jun 5 11:57:20 2012 From: python-checkins at python.org (hynek.schlawack) Date: Tue, 05 Jun 2012 11:57:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2314814=3A_ipaddress=3A_re?= =?utf8?q?factor_dup_code=2C_minor_janitoring=2C_bump_coverage?= Message-ID: http://hg.python.org/cpython/rev/cca2a1cc9598 changeset: 77360:cca2a1cc9598 user: Hynek Schlawack date: Tue Jun 05 11:55:58 2012 +0200 summary: #14814: ipaddress: refactor dup code, minor janitoring, bump coverage - remove duplicate netmask/hostmask code - make two ifs more pythonic - remove packed property for networks - some minor pep8 stuff - Test coverage is now at 97%, the rest are mostly unreachable safeguards. files: Lib/ipaddress.py | 171 ++++++++--------------- Lib/test/test_ipaddress.py | 176 +++++++++++++++++++++--- 2 files changed, 207 insertions(+), 140 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -10,8 +10,10 @@ __version__ = '1.0' + import struct + IPV4LENGTH = 32 IPV6LENGTH = 128 @@ -424,7 +426,7 @@ An integer. """ - if not prefixlen and prefixlen != 0: + if prefixlen is None: prefixlen = self._prefixlen return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen) @@ -989,6 +991,9 @@ _ALL_ONES = (2**IPV4LENGTH) - 1 _DECIMAL_DIGITS = frozenset('0123456789') + # the valid octets for host and netmasks. only useful for IPv4. + _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0)) + def __init__(self, address): self._version = 4 self._max_prefixlen = IPV4LENGTH @@ -1060,6 +1065,53 @@ ip_int >>= 8 return '.'.join(octets) + def _is_valid_netmask(self, netmask): + """Verify that the netmask is valid. + + Args: + netmask: A string, either a prefix or dotted decimal + netmask. + + Returns: + A boolean, True if the prefix represents a valid IPv4 + netmask. + + """ + mask = netmask.split('.') + if len(mask) == 4: + if [x for x in mask if int(x) not in self._valid_mask_octets]: + return False + if [y for idx, y in enumerate(mask) if idx > 0 and + y > mask[idx - 1]]: + return False + return True + try: + netmask = int(netmask) + except ValueError: + return False + return 0 <= netmask <= self._max_prefixlen + + def _is_hostmask(self, ip_str): + """Test if the IP string is a hostmask (rather than a netmask). + + Args: + ip_str: A string, the potential hostmask. + + Returns: + A boolean, True if the IP string is a hostmask. + + """ + bits = ip_str.split('.') + try: + parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] + except ValueError: + return False + if len(parts) != len(bits): + return False + if parts[0] < parts[-1]: + return True + return False + @property def max_prefixlen(self): return self._max_prefixlen @@ -1213,9 +1265,6 @@ class IPv4Interface(IPv4Address): - # the valid octets for host and netmasks. only useful for IPv4. - _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0)) - def __init__(self, address): if isinstance(address, (bytes, int)): IPv4Address.__init__(self, address) @@ -1248,53 +1297,6 @@ def __hash__(self): return self._ip ^ self._prefixlen ^ int(self.network.network_address) - def _is_valid_netmask(self, netmask): - """Verify that the netmask is valid. - - Args: - netmask: A string, either a prefix or dotted decimal - netmask. - - Returns: - A boolean, True if the prefix represents a valid IPv4 - netmask. - - """ - mask = netmask.split('.') - if len(mask) == 4: - if [x for x in mask if int(x) not in self._valid_mask_octets]: - return False - if [y for idx, y in enumerate(mask) if idx > 0 and - y > mask[idx - 1]]: - return False - return True - try: - netmask = int(netmask) - except ValueError: - return False - return 0 <= netmask <= self._max_prefixlen - - def _is_hostmask(self, ip_str): - """Test if the IP string is a hostmask (rather than a netmask). - - Args: - ip_str: A string, the potential hostmask. - - Returns: - A boolean, True if the IP string is a hostmask. - - """ - bits = ip_str.split('.') - try: - parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] - except ValueError: - return False - if len(parts) != len(bits): - return False - if parts[0] < parts[-1]: - return True - return False - @property def prefixlen(self): return self._prefixlen @@ -1334,9 +1336,6 @@ # TODO (ncoghlan): Investigate using IPv4Interface instead _address_class = IPv4Address - # the valid octets for host and netmasks. only useful for IPv4. - _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0)) - def __init__(self, address, strict=True): """Instantiate a new IPv4 network object. @@ -1443,58 +1442,6 @@ if self._prefixlen == (self._max_prefixlen - 1): self.hosts = self.__iter__ - @property - def packed(self): - """The binary representation of this address.""" - return v4_int_to_packed(self.network_address) - - def _is_valid_netmask(self, netmask): - """Verify that the netmask is valid. - - Args: - netmask: A string, either a prefix or dotted decimal - netmask. - - Returns: - A boolean, True if the prefix represents a valid IPv4 - netmask. - - """ - mask = netmask.split('.') - if len(mask) == 4: - if [x for x in mask if int(x) not in self._valid_mask_octets]: - return False - if [y for idx, y in enumerate(mask) if idx > 0 and - y > mask[idx - 1]]: - return False - return True - try: - netmask = int(netmask) - except ValueError: - return False - return 0 <= netmask <= self._max_prefixlen - - def _is_hostmask(self, ip_str): - """Test if the IP string is a hostmask (rather than a netmask). - - Args: - ip_str: A string, the potential hostmask. - - Returns: - A boolean, True if the IP string is a hostmask. - - """ - bits = ip_str.split('.') - try: - parts = [int(x) for x in bits if int(x) in self._valid_mask_octets] - except ValueError: - return False - if len(parts) != len(bits): - return False - if parts[0] < parts[-1]: - return True - return False - class _BaseV6: @@ -1675,7 +1622,7 @@ ValueError: The address is bigger than 128 bits of all ones. """ - if not ip_int and ip_int != 0: + if ip_int is None: ip_int = int(self._ip) if ip_int > self._ALL_ONES: @@ -1721,11 +1668,6 @@ return self._max_prefixlen @property - def packed(self): - """The binary representation of this address.""" - return v6_int_to_packed(self._ip) - - @property def version(self): return self._version @@ -1931,6 +1873,11 @@ self._ip = self._ip_int_from_string(addr_str) + @property + def packed(self): + """The binary representation of this address.""" + return v6_int_to_packed(self._ip) + class IPv6Interface(IPv6Address): diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -5,12 +5,13 @@ import unittest -import time import ipaddress + # Compatibility function to cast str to bytes objects _cb = lambda bytestr: bytes(bytestr, 'charmap') + class IpaddrUnitTest(unittest.TestCase): def setUp(self): @@ -133,6 +134,31 @@ '1.a.2.3') self.assertEqual(False, ipaddress.IPv4Interface(1)._is_hostmask( '1.a.2.3')) + self.assertRaises(ValueError, ipaddress.ip_interface, 'bogus') + self.assertRaises(ValueError, ipaddress.IPv4Address, '127.0.0.1/32') + self.assertRaises(ValueError, ipaddress.v4_int_to_packed, -1) + self.assertRaises(ValueError, ipaddress.v4_int_to_packed, + 2 ** ipaddress.IPV4LENGTH) + self.assertRaises(ValueError, ipaddress.v6_int_to_packed, -1) + self.assertRaises(ValueError, ipaddress.v6_int_to_packed, + 2 ** ipaddress.IPV6LENGTH) + + def testInternals(self): + first, last = ipaddress._find_address_range([ + ipaddress.IPv4Address('10.10.10.10'), + ipaddress.IPv4Address('10.10.10.12')]) + self.assertEqual(first, last) + self.assertEqual(0, ipaddress._get_prefix_length(2**32, 0, 32)) + self.assertEqual(128, ipaddress._count_righthand_zero_bits(0, 128)) + base_ip = ipaddress._BaseAddress('127.0.0.1') + try: + base_ip.version + self.fail('_BaseAddress.version didn\'t raise NotImplementedError') + except NotImplementedError: + pass + self.assertEqual("IPv4Network('1.2.3.0/24')", repr(self.ipv4_network)) + self.assertEqual('0x1020318', hex(self.ipv4_network)) + self.assertRaises(TypeError, self.ipv4_network.__eq__, object()) def testGetNetwork(self): self.assertEqual(int(self.ipv4_network.network_address), 16909056) @@ -188,6 +214,7 @@ self.assertEqual([v6addr, v6net], sorted([v6net, v6addr], key=ipaddress.get_mixed_type_key)) + self.assertEqual(NotImplemented, ipaddress.get_mixed_type_key(object)) def testIpFromInt(self): self.assertEqual(self.ipv4_interface._ip, @@ -209,9 +236,15 @@ ipaddress.IPv6Interface, 2**128) self.assertRaises(ipaddress.AddressValueError, ipaddress.IPv6Interface, -1) + self.assertRaises(ipaddress.AddressValueError, + ipaddress.IPv6Network, 2**128) + self.assertRaises(ipaddress.AddressValueError, + ipaddress.IPv6Network, -1) - self.assertEqual(ipaddress.ip_network(self.ipv4_address._ip).version, 4) - self.assertEqual(ipaddress.ip_network(self.ipv6_address._ip).version, 6) + self.assertEqual(ipaddress.ip_network(self.ipv4_address._ip).version, + 4) + self.assertEqual(ipaddress.ip_network(self.ipv6_address._ip).version, + 6) def testIpFromPacked(self): ip = ipaddress.ip_network @@ -255,12 +288,31 @@ self.assertEqual(int(ipv4_zero_netmask.network.netmask), 0) self.assertTrue(ipv4_zero_netmask.network._is_valid_netmask( str(0))) + self.assertTrue(ipv4_zero_netmask._is_valid_netmask('0')) + self.assertTrue(ipv4_zero_netmask._is_valid_netmask('0.0.0.0')) + self.assertFalse(ipv4_zero_netmask._is_valid_netmask('invalid')) ipv6_zero_netmask = ipaddress.IPv6Interface('::1/0') self.assertEqual(int(ipv6_zero_netmask.network.netmask), 0) self.assertTrue(ipv6_zero_netmask.network._is_valid_netmask( str(0))) + def testIPv4NetAndHostmasks(self): + net = self.ipv4_network + self.assertFalse(net._is_valid_netmask('invalid')) + self.assertTrue(net._is_valid_netmask('128.128.128.128')) + self.assertFalse(net._is_valid_netmask('128.128.128.127')) + self.assertFalse(net._is_valid_netmask('128.128.128.255')) + self.assertTrue(net._is_valid_netmask('255.128.128.128')) + + self.assertFalse(net._is_hostmask('invalid')) + self.assertTrue(net._is_hostmask('128.255.255.255')) + self.assertFalse(net._is_hostmask('255.255.255.255')) + self.assertFalse(net._is_hostmask('1.2.3.4')) + + net = ipaddress.IPv4Network('127.0.0.0/0.0.0.255') + self.assertEqual(24, net.prefixlen) + def testGetBroadcast(self): self.assertEqual(int(self.ipv4_network.broadcast_address), 16909311) self.assertEqual(str(self.ipv4_network.broadcast_address), '1.2.3.255') @@ -300,17 +352,25 @@ def testGetSupernet4(self): self.assertRaises(ValueError, self.ipv4_network.supernet, prefixlen_diff=2, new_prefix=1) - self.assertRaises(ValueError, self.ipv4_network.supernet, new_prefix=25) + self.assertRaises(ValueError, self.ipv4_network.supernet, + new_prefix=25) self.assertEqual(self.ipv4_network.supernet(prefixlen_diff=2), self.ipv4_network.supernet(new_prefix=22)) self.assertRaises(ValueError, self.ipv6_network.supernet, prefixlen_diff=2, new_prefix=1) - self.assertRaises(ValueError, self.ipv6_network.supernet, new_prefix=65) + self.assertRaises(ValueError, self.ipv6_network.supernet, + new_prefix=65) self.assertEqual(self.ipv6_network.supernet(prefixlen_diff=2), self.ipv6_network.supernet(new_prefix=62)) def testHosts(self): + hosts = list(self.ipv4_network.hosts()) + self.assertEqual(254, len(hosts)) + self.assertEqual(ipaddress.IPv4Address('1.2.3.1'), hosts[0]) + self.assertEqual(ipaddress.IPv4Address('1.2.3.254'), hosts[-1]) + + # special case where only 1 bit is left for address self.assertEqual([ipaddress.IPv4Address('2.0.0.0'), ipaddress.IPv4Address('2.0.0.1')], list(ipaddress.ip_network('2.0.0.0/31').hosts())) @@ -398,7 +458,8 @@ def testGetNum_Addresses(self): self.assertEqual(self.ipv4_network.num_addresses, 256) - self.assertEqual(list(self.ipv4_network.subnets())[0].num_addresses, 128) + self.assertEqual(list(self.ipv4_network.subnets())[0].num_addresses, + 128) self.assertEqual(self.ipv4_network.supernet().num_addresses, 512) self.assertEqual(self.ipv6_network.num_addresses, 18446744073709551616) @@ -431,6 +492,8 @@ self.assertRaises(ipaddress.AddressValueError, ipaddress.IPv4Interface, '1.2.3.4/32/24') self.assertRaises(ipaddress.AddressValueError, + ipaddress.IPv4Network, '1.2.3.4/32/24') + self.assertRaises(ipaddress.AddressValueError, ipaddress.IPv4Interface, '10/8') self.assertRaises(ipaddress.AddressValueError, ipaddress.IPv6Interface, '10/8') @@ -545,8 +608,9 @@ # check that addreses are subsumed properly. collapsed = ipaddress.collapse_addresses( [ip1, ip2, ip3, ip4, ip5, ip6]) - self.assertEqual(list(collapsed), [ipaddress.IPv4Network('1.1.1.0/30'), - ipaddress.IPv4Network('1.1.1.4/32')]) + self.assertEqual(list(collapsed), + [ipaddress.IPv4Network('1.1.1.0/30'), + ipaddress.IPv4Network('1.1.1.4/32')]) # test a mix of IP addresses and networks including some duplicates ip1 = ipaddress.IPv4Address('1.1.1.0') @@ -557,7 +621,8 @@ #ip6 = ipaddress.IPv4Interface('1.1.1.4/30') # check that addreses are subsumed properly. collapsed = ipaddress.collapse_addresses([ip1, ip2, ip3, ip4]) - self.assertEqual(list(collapsed), [ipaddress.IPv4Network('1.1.1.0/30')]) + self.assertEqual(list(collapsed), + [ipaddress.IPv4Network('1.1.1.0/30')]) # test only IP networks ip1 = ipaddress.IPv4Network('1.1.0.0/24') @@ -565,17 +630,20 @@ ip3 = ipaddress.IPv4Network('1.1.2.0/24') ip4 = ipaddress.IPv4Network('1.1.3.0/24') ip5 = ipaddress.IPv4Network('1.1.4.0/24') - # stored in no particular order b/c we want CollapseAddr to call [].sort + # stored in no particular order b/c we want CollapseAddr to call + # [].sort ip6 = ipaddress.IPv4Network('1.1.0.0/22') # check that addreses are subsumed properly. collapsed = ipaddress.collapse_addresses([ip1, ip2, ip3, ip4, ip5, ip6]) - self.assertEqual(list(collapsed), [ipaddress.IPv4Network('1.1.0.0/22'), - ipaddress.IPv4Network('1.1.4.0/24')]) + self.assertEqual(list(collapsed), + [ipaddress.IPv4Network('1.1.0.0/22'), + ipaddress.IPv4Network('1.1.4.0/24')]) # test that two addresses are supernet'ed properly collapsed = ipaddress.collapse_addresses([ip1, ip2]) - self.assertEqual(list(collapsed), [ipaddress.IPv4Network('1.1.0.0/23')]) + self.assertEqual(list(collapsed), + [ipaddress.IPv4Network('1.1.0.0/23')]) # test same IP networks ip_same1 = ip_same2 = ipaddress.IPv4Network('1.1.1.1/32') @@ -614,7 +682,20 @@ summarize = ipaddress.summarize_address_range ip1 = ipaddress.ip_address('1.1.1.0') ip2 = ipaddress.ip_address('1.1.1.255') - # test a /24 is sumamrized properly + + # summarize works only for IPv4 & IPv6 + class IPv7Address(ipaddress.IPv6Address): + @property + def version(self): + return 7 + ip_invalid1 = IPv7Address('::1') + ip_invalid2 = IPv7Address('::1') + self.assertRaises(ValueError, list, + summarize(ip_invalid1, ip_invalid2)) + # test that a summary over ip4 & ip6 fails + self.assertRaises(TypeError, list, + summarize(ip1, ipaddress.IPv6Address('::1'))) + # test a /24 is summarized properly self.assertEqual(list(summarize(ip1, ip2))[0], ipaddress.ip_network('1.1.1.0/24')) # test an IPv4 range that isn't on a network byte boundary @@ -622,6 +703,11 @@ self.assertEqual(list(summarize(ip1, ip2)), [ipaddress.ip_network('1.1.1.0/29'), ipaddress.ip_network('1.1.1.8')]) + # all! + ip1 = ipaddress.IPv4Address(0) + ip2 = ipaddress.IPv4Address(ipaddress.IPv4Address._ALL_ONES) + self.assertEqual([ipaddress.IPv4Network('0.0.0.0/0')], + list(summarize(ip1, ip2))) ip1 = ipaddress.ip_address('1::') ip2 = ipaddress.ip_address('1:ffff:ffff:ffff:ffff:ffff:ffff:ffff') @@ -663,15 +749,20 @@ def testNetworkComparison(self): # ip1 and ip2 have the same network address ip1 = ipaddress.IPv4Network('1.1.1.0/24') - ip2 = ipaddress.IPv4Network('1.1.1.1/32') + ip2 = ipaddress.IPv4Network('1.1.1.0/32') ip3 = ipaddress.IPv4Network('1.1.2.0/24') self.assertTrue(ip1 < ip3) self.assertTrue(ip3 > ip2) - #self.assertEqual(ip1.compare_networks(ip2), 0) - #self.assertTrue(ip1._get_networks_key() == ip2._get_networks_key()) + self.assertEqual(ip1.compare_networks(ip1), 0) + + # if addresses are the same, sort by netmask + self.assertEqual(ip1.compare_networks(ip2), -1) + self.assertEqual(ip2.compare_networks(ip1), 1) + self.assertEqual(ip1.compare_networks(ip3), -1) + self.assertEqual(ip3.compare_networks(ip1), 1) self.assertTrue(ip1._get_networks_key() < ip3._get_networks_key()) ip1 = ipaddress.IPv6Network('2001:2000::/96') @@ -685,6 +776,9 @@ # Test comparing different protocols. # Should always raise a TypeError. + self.assertRaises(TypeError, + self.ipv4_network.compare_networks, + self.ipv6_network) ipv6 = ipaddress.IPv6Interface('::/0') ipv4 = ipaddress.IPv4Interface('0.0.0.0/0') self.assertRaises(TypeError, ipv4.__lt__, ipv6) @@ -811,6 +905,7 @@ self.assertEqual(True, ipaddress.ip_interface( '224.1.1.1/31').is_multicast) self.assertEqual(False, ipaddress.ip_network('240.0.0.0').is_multicast) + self.assertEqual(True, ipaddress.ip_network('240.0.0.0').is_reserved) self.assertEqual(True, ipaddress.ip_interface( '192.168.1.1/17').is_private) @@ -818,9 +913,12 @@ self.assertEqual(True, ipaddress.ip_network( '10.255.255.255').is_private) self.assertEqual(False, ipaddress.ip_network('11.0.0.0').is_private) + self.assertEqual(False, ipaddress.ip_network('11.0.0.0').is_reserved) self.assertEqual(True, ipaddress.ip_network( '172.31.255.255').is_private) self.assertEqual(False, ipaddress.ip_network('172.32.0.0').is_private) + self.assertEqual(True, + ipaddress.ip_network('169.254.1.0/24').is_link_local) self.assertEqual(True, ipaddress.ip_interface( @@ -840,6 +938,9 @@ self.assertEqual(True, ipaddress.ip_address('0.0.0.0').is_unspecified) self.assertEqual(True, ipaddress.ip_address('224.1.1.1').is_multicast) self.assertEqual(False, ipaddress.ip_address('240.0.0.0').is_multicast) + self.assertEqual(True, ipaddress.ip_address('240.0.0.1').is_reserved) + self.assertEqual(False, + ipaddress.ip_address('239.255.255.255').is_reserved) self.assertEqual(True, ipaddress.ip_address('192.168.1.1').is_private) self.assertEqual(False, ipaddress.ip_address('192.169.0.0').is_private) @@ -851,9 +952,9 @@ self.assertEqual(False, ipaddress.ip_address('172.32.0.0').is_private) self.assertEqual(True, - ipaddress.ip_address('169.254.100.200').is_link_local) + ipaddress.ip_address('169.254.100.200').is_link_local) self.assertEqual(False, - ipaddress.ip_address('169.255.100.200').is_link_local) + ipaddress.ip_address('169.255.100.200').is_link_local) self.assertEqual(True, ipaddress.ip_address('127.100.200.254').is_loopback) @@ -864,7 +965,7 @@ def testReservedIpv6(self): self.assertEqual(True, ipaddress.ip_network('ffff::').is_multicast) - self.assertEqual(True, ipaddress.ip_network(2**128-1).is_multicast) + self.assertEqual(True, ipaddress.ip_network(2**128 - 1).is_multicast) self.assertEqual(True, ipaddress.ip_network('ff00::').is_multicast) self.assertEqual(False, ipaddress.ip_network('fdff::').is_multicast) @@ -899,7 +1000,7 @@ # test addresses self.assertEqual(True, ipaddress.ip_address('ffff::').is_multicast) - self.assertEqual(True, ipaddress.ip_address(2**128-1).is_multicast) + self.assertEqual(True, ipaddress.ip_address(2**128 - 1).is_multicast) self.assertEqual(True, ipaddress.ip_address('ff00::').is_multicast) self.assertEqual(False, ipaddress.ip_address('fdff::').is_multicast) @@ -935,8 +1036,9 @@ self.assertEqual(True, ipaddress.ip_network('4000::1/128').is_reserved) def testIpv4Mapped(self): - self.assertEqual(ipaddress.ip_address('::ffff:192.168.1.1').ipv4_mapped, - ipaddress.ip_address('192.168.1.1')) + self.assertEqual( + ipaddress.ip_address('::ffff:192.168.1.1').ipv4_mapped, + ipaddress.ip_address('192.168.1.1')) self.assertEqual(ipaddress.ip_address('::c0a8:101').ipv4_mapped, None) self.assertEqual(ipaddress.ip_address('::ffff:c0a8:101').ipv4_mapped, ipaddress.ip_address('192.168.1.1')) @@ -946,21 +1048,25 @@ addr2 = ipaddress.ip_network('10.1.1.0/26') addr3 = ipaddress.ip_network('10.2.1.0/24') addr4 = ipaddress.ip_address('10.1.1.0') + addr5 = ipaddress.ip_network('2001:db8::0/32') self.assertEqual(sorted(list(addr1.address_exclude(addr2))), [ipaddress.ip_network('10.1.1.64/26'), ipaddress.ip_network('10.1.1.128/25')]) self.assertRaises(ValueError, list, addr1.address_exclude(addr3)) self.assertRaises(TypeError, list, addr1.address_exclude(addr4)) + self.assertRaises(TypeError, list, addr1.address_exclude(addr5)) self.assertEqual(list(addr1.address_exclude(addr1)), []) def testHash(self): + self.assertEqual(hash(ipaddress.ip_interface('10.1.1.0/24')), + hash(ipaddress.ip_interface('10.1.1.0/24'))) self.assertEqual(hash(ipaddress.ip_network('10.1.1.0/24')), - hash(ipaddress.ip_network('10.1.1.0/24'))) + hash(ipaddress.ip_network('10.1.1.0/24'))) self.assertEqual(hash(ipaddress.ip_address('10.1.1.0')), - hash(ipaddress.ip_address('10.1.1.0'))) + hash(ipaddress.ip_address('10.1.1.0'))) # i70 self.assertEqual(hash(ipaddress.ip_address('1.2.3.4')), - hash(ipaddress.ip_address( + hash(ipaddress.ip_address( int(ipaddress.ip_address('1.2.3.4')._ip)))) ip1 = ipaddress.ip_address('10.1.1.0') ip2 = ipaddress.ip_address('1::') @@ -972,6 +1078,18 @@ self.assertTrue(self.ipv4_address in dummy) self.assertTrue(ip2 in dummy) + def testIPBases(self): + net = self.ipv4_network + self.assertEqual('1.2.3.0/24', net.compressed) + self.assertEqual( + net._ip_int_from_prefix(24), + net._ip_int_from_prefix(None)) + net = self.ipv6_network + self.assertRaises(ValueError, net._string_from_ip_int, 2**128 + 1) + self.assertEqual( + self.ipv6_address._string_from_ip_int(self.ipv6_address._ip), + self.ipv6_address._string_from_ip_int(None)) + def testIPv6NetworkHelpers(self): net = self.ipv6_network self.assertEqual('2001:658:22a:cafe::/64', net.with_prefixlen) @@ -1032,6 +1150,7 @@ addr1 = ipaddress.IPv6Interface('2001::1') addr2 = ipaddress.IPv6Address('2001:0:5ef5:79fd:0:59d:a0e5:ba1') addr3 = ipaddress.IPv6Network('2001::/96') + addr4 = ipaddress.IPv4Address('192.168.178.1') self.assertEqual('2001:0000:0000:0000:0000:0000:0000:0001/128', addr1.exploded) self.assertEqual('0000:0000:0000:0000:0000:0000:0000:0001/128', @@ -1041,6 +1160,7 @@ addr2.exploded) self.assertEqual('2001:0000:0000:0000:0000:0000:0000:0000/96', addr3.exploded) + self.assertEqual('192.168.178.1', addr4.exploded) def testIntRepresentation(self): self.assertEqual(16909060, int(self.ipv4_address)) @@ -1118,7 +1238,8 @@ # V6 - check we're cached self.assertTrue('broadcast_address' in self.ipv6_network._cache) self.assertTrue('hostmask' in self.ipv6_network._cache) - self.assertTrue('broadcast_address' in self.ipv6_interface.network._cache) + self.assertTrue( + 'broadcast_address' in self.ipv6_interface.network._cache) self.assertTrue('hostmask' in self.ipv6_interface.network._cache) def testTeredo(self): @@ -1139,7 +1260,6 @@ ipaddress.IPv4Address('95.26.244.94')), teredo_addr.teredo) - def testsixtofour(self): sixtofouraddr = ipaddress.ip_address('2002:ac1d:2d64::1') bad_addr = ipaddress.ip_address('2000:ac1d:2d64::1') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 5 13:48:16 2012 From: python-checkins at python.org (victor.stinner) Date: Tue, 05 Jun 2012 13:48:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Close_=2311022=3A_TextIOWra?= =?utf8?q?pper_doesn=27t_call_locale=2Esetlocale=28=29_anymore?= Message-ID: http://hg.python.org/cpython/rev/2587328c7c9c changeset: 77361:2587328c7c9c user: Victor Stinner date: Tue Jun 05 13:43:22 2012 +0200 summary: Close #11022: TextIOWrapper doesn't call locale.setlocale() anymore open() and io.TextIOWrapper are now calling locale.getpreferredencoding(False) instead of locale.getpreferredencoding() in text mode if the encoding is not specified. Don't change temporary the locale encoding using locale.setlocale(), use the current locale encoding instead of the user preferred encoding. Explain also in open() documentation that locale.getpreferredencoding(False) is called if the encoding is not specified. files: Doc/library/functions.rst | 7 ++- Doc/library/io.rst | 8 ++++- Lib/_pyio.py | 4 +- Lib/test/test_builtin.py | 42 ++++++++++++++++++++------ Lib/test/test_io.py | 35 +++++++++++++++++----- Misc/NEWS | 6 +++ Modules/_io/_iomodule.c | 5 +- Modules/_io/textio.c | 4 +- 8 files changed, 83 insertions(+), 28 deletions(-) diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -800,9 +800,10 @@ already exists), ``'x'`` for exclusive creation and ``'a'`` for appending (which on *some* Unix systems, means that *all* writes append to the end of the file regardless of the current seek position). In text mode, if - *encoding* is not specified the encoding used is platform dependent. (For - reading and writing raw bytes use binary mode and leave *encoding* - unspecified.) The available modes are: + *encoding* is not specified the encoding used is platform dependent: + ``locale.getpreferredencoding(False)`` is called to get the current locale + encoding. (For reading and writing raw bytes use binary mode and leave + *encoding* unspecified.) The available modes are: ========= =============================================================== Character Meaning diff --git a/Doc/library/io.rst b/Doc/library/io.rst --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -752,7 +752,7 @@ It inherits :class:`TextIOBase`. *encoding* gives the name of the encoding that the stream will be decoded or - encoded with. It defaults to :func:`locale.getpreferredencoding`. + encoded with. It defaults to ``locale.getpreferredencoding(False)``. *errors* is an optional string that specifies how encoding and decoding errors are to be handled. Pass ``'strict'`` to raise a :exc:`ValueError` @@ -784,6 +784,12 @@ .. versionchanged:: 3.3 The *write_through* argument has been added. + .. versionchanged:: 3.3 + The default *encoding* is now ``locale.getpreferredencoding(False)`` + instead of ``locale.getpreferredencoding()``. Don't change temporary the + locale encoding using :func:`locale.setlocale`, use the current locale + encoding instead of the user preferred encoding. + :class:`TextIOWrapper` provides one attribute in addition to those of :class:`TextIOBase` and its parents: diff --git a/Lib/_pyio.py b/Lib/_pyio.py --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -1448,7 +1448,7 @@ r"""Character and line based layer over a BufferedIOBase object, buffer. encoding gives the name of the encoding that the stream will be - decoded or encoded with. It defaults to locale.getpreferredencoding. + decoded or encoded with. It defaults to locale.getpreferredencoding(False). errors determines the strictness of encoding and decoding (see the codecs.register) and defaults to "strict". @@ -1487,7 +1487,7 @@ # Importing locale may fail if Python is being built encoding = "ascii" else: - encoding = locale.getpreferredencoding() + encoding = locale.getpreferredencoding(False) if not isinstance(encoding, str): raise ValueError("invalid encoding: %r" % encoding) diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1,20 +1,21 @@ # Python test set -- built-in functions -import platform -import unittest -import sys -import warnings +import ast +import builtins import collections import io +import locale import os -import ast +import pickle +import platform +import random +import sys +import traceback import types -import builtins -import random -import traceback +import unittest +import warnings +from operator import neg from test.support import TESTFN, unlink, run_unittest, check_warnings -from operator import neg -import pickle try: import pty, signal except ImportError: @@ -961,6 +962,27 @@ fp.close() unlink(TESTFN) + def test_open_default_encoding(self): + old_environ = dict(os.environ) + try: + # try to get a user preferred encoding different than the current + # locale encoding to check that open() uses the current locale + # encoding and not the user preferred encoding + for key in ('LC_ALL', 'LANG', 'LC_CTYPE'): + if key in os.environ: + del os.environ[key] + + self.write_testfile() + current_locale_encoding = locale.getpreferredencoding(False) + fp = open(TESTFN, 'w') + try: + self.assertEqual(fp.encoding, current_locale_encoding) + finally: + fp.close() + finally: + os.environ.clear() + os.environ.update(old_environ) + def test_ord(self): self.assertEqual(ord(' '), 32) self.assertEqual(ord('A'), 65) diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -19,20 +19,21 @@ # test both implementations. This file has lots of examples. ################################################################################ +import abc +import array +import errno +import locale import os +import pickle +import random +import signal import sys import time -import array -import random import unittest +import warnings import weakref -import abc -import signal -import errno -import warnings -import pickle +from collections import deque from itertools import cycle, count -from collections import deque from test import support import codecs @@ -1881,6 +1882,24 @@ t.write("A\rB") self.assertEqual(r.getvalue(), b"XY\nZA\rB") + def test_default_encoding(self): + old_environ = dict(os.environ) + try: + # try to get a user preferred encoding different than the current + # locale encoding to check that TextIOWrapper() uses the current + # locale encoding and not the user preferred encoding + for key in ('LC_ALL', 'LANG', 'LC_CTYPE'): + if key in os.environ: + del os.environ[key] + + current_locale_encoding = locale.getpreferredencoding(False) + b = self.BytesIO() + t = self.TextIOWrapper(b) + self.assertEqual(t.encoding, current_locale_encoding) + finally: + os.environ.clear() + os.environ.update(old_environ) + def test_encoding(self): # Check the encoding attribute is always set, and valid b = self.BytesIO() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,12 @@ Core and Builtins ----------------- +- Issue #11022: open() and io.TextIOWrapper are now calling + locale.getpreferredencoding(False) instead of locale.getpreferredencoding() + in text mode if the encoding is not specified. Don't change temporary the + locale encoding using locale.setlocale(), use the current locale encoding + instead of the user preferred encoding. + - Issue #14673: Add Eric Snow's sys.implementation implementation. Library diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -112,8 +112,9 @@ "'a' for appending (which on some Unix systems, means that all writes\n" "append to the end of the file regardless of the current seek position).\n" "In text mode, if encoding is not specified the encoding used is platform\n" -"dependent. (For reading and writing raw bytes use binary mode and leave\n" -"encoding unspecified.) The available modes are:\n" +"dependent: locale.getpreferredencoding(False) is called to get the\n" +"current locale encoding. (For reading and writing raw bytes use binary\n" +"mode and leave encoding unspecified.) The available modes are:\n" "\n" "========= ===============================================================\n" "Character Meaning\n" diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -630,7 +630,7 @@ "Character and line based layer over a BufferedIOBase object, buffer.\n" "\n" "encoding gives the name of the encoding that the stream will be\n" - "decoded or encoded with. It defaults to locale.getpreferredencoding.\n" + "decoded or encoded with. It defaults to locale.getpreferredencoding(False).\n" "\n" "errors determines the strictness of encoding and decoding (see the\n" "codecs.register) and defaults to \"strict\".\n" @@ -898,7 +898,7 @@ else { use_locale: self->encoding = _PyObject_CallMethodId( - state->locale_module, &PyId_getpreferredencoding, NULL); + state->locale_module, &PyId_getpreferredencoding, "O", Py_False); if (self->encoding == NULL) { catch_ImportError: /* -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 5 14:09:40 2012 From: python-checkins at python.org (nick.coghlan) Date: Tue, 05 Jun 2012 14:09:40 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Add_PEP_422=3A_Dynamic_Class_D?= =?utf8?q?ecorators?= Message-ID: http://hg.python.org/peps/rev/0e3606500a26 changeset: 4449:0e3606500a26 user: Nick Coghlan date: Tue Jun 05 22:09:20 2012 +1000 summary: Add PEP 422: Dynamic Class Decorators files: pep-0422.txt | 168 +++++++++++++++++++++++++++++++++++++++ 1 files changed, 168 insertions(+), 0 deletions(-) diff --git a/pep-0422.txt b/pep-0422.txt new file mode 100644 --- /dev/null +++ b/pep-0422.txt @@ -0,0 +1,168 @@ +PEP: 422 +Title: Dynamic class decorators +Version: $Revision$ +Last-Modified: $Date$ +Author: Nick Coghlan +Status: Draft +Type: Standards Track +Content-Type: text/x-rst +Created: 5-Jun-2012 +Post-History: 5-Jun-2012 + + +Abstract +======== + +Classes currently support two mechanisms for modification of the class at +definition time: metaclasses and lexical decorators. + +Metaclasses can be awkward and challenging to use correctly in conjunction +with multiple inheritance and lexical decorators don't interact with class +inheritance at all. + +This PEP proposes a new mechanism for dynamic class decoration that +interacts more cleanly with class inheritance mechanisms. + + +Specification +============= + +This PEP proposes that a new step be added to the class creation process, +after the metaclass invocation to construct the class instance and before +the application of lexical decorators. + +This step will walk the class MRO in reverse order, looking for +``__decorators__`` entries in each class dictionary. These entries are +expected to be iterables that are also walked in reverse order to retrieve +class decorators that are automatically applied to the class being defined:: + + for entry in reversed(cls.mro()): + decorators = entry.__dict__.get("__decorators__", ()) + for deco in reversed(decorators): + cls = deco(cls) + +This step in the class creation process will be an implicit part of the +class statement and also part of the behaviour of ``types.new_class()``. + + +Rationale +========= + +When decorator support was added to classes, the lexical decoration syntax +was copied directly from function decorators:: + + @decorator + class Example: + # Subclasses will not be decorated automatically + pass + +This mechanism works well, so long as it is considered acceptable that the +decorator is *not* applied automatically to any subclasses. If it is +desired that the behaviour be inherited, it is currently necessary to +make the step up to defining a `custom metaclass`_:: + + class DynamicDecorators(type): + """Metaclass for dynamic decorator support + + Creates the class normally, then runs through the MRO looking for + __decorators__ attributes and applying the contained decorators to + the newly created class + """ + def __new__(meta, name, bases, ns): + cls = super(DynamicDecorators, meta).__new__(meta, name, bases, ns) + for entry in reversed(cls.mro()): + decorators = entry.__dict__.get("__decorators__", ()) + for deco in reversed(decorators): + cls = deco(cls) + return cls + + class Example(metaclass=DynamicDecorators): + # Subclasses *will* be decorated automatically + __decorators__ = [decorator] + +The main potential problem with this approach, is that it can place +significant constraints on the type heirarchy, as it requires that all +metaclasses used be well behaved with respect to multiple inheritance. + +By making dynamic decorators an inherent part of the class creation process, +many current use cases of metaclasses may be replaced with dynamic decorators +instead, greatly reducing the likelihood of metaclass conflicts, as well +as being substantially easier to write correctly in the first place. + + +Design Discussion +================= + + +Allowing metaclasses to override the dynamic decoration process +--------------------------------------------------------------- + +This PEP does not provide a mechanism that allows metaclasses to override the +dynamic decoration process. If this feature is deemed desirable in the +future, then it can be added by moving the functionality described in +this PEP into a new method on the metaclass (for example, ``__decorate__``), +with ``type`` providing a suitable default implementation that matches +the behaviour described here. + +This PEP chose the simplicity of the current approach, as lexical decorators +are currently outside the scope of metaclass control, so it seems reasonable +to pursue the simpler strategy in the absence of a solid use case for +making this behaviour configurable. + + +Iterating over decorator entries in reverse order +------------------------------------------------- + +This order was chosen to match the layout of lexical decorators when +converted to ordinary function calls. Just as the following are equivalent:: + + @deco2 + @deco1 + class C: + pass + + class C: + pass + C = deco2(deco1(C)) + +So too will the following be roughly equivalent (aside from inheritance):: + + class C: + __decorators__ = [deco2, deco1] + + class C: + pass + C = deco2(deco1(C)) + + +Iterating over the MRO in reverse order +--------------------------------------- + +The order of iteration over the MRO for decorator application was chosen to +match the order of actual call *evaluation* when using ``super`` to invoke +parent class implementations: the first method to run to completion is that +closest to the base of the class hierarchy. + + +References +========== + +.. _custom metaclass: + https://bitbucket.org/ncoghlan/misc/src/default/pep422.py + + +Copyright +========= + +This document has been placed in the public domain. + + +.. + Local Variables: + mode: indented-text + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 70 + coding: utf-8 + End: + -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Jun 5 14:17:06 2012 From: python-checkins at python.org (nick.coghlan) Date: Tue, 05 Jun 2012 14:17:06 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_PEP_420_is_done?= Message-ID: http://hg.python.org/peps/rev/60d94f1eee0b changeset: 4450:60d94f1eee0b user: Nick Coghlan date: Tue Jun 05 22:16:52 2012 +1000 summary: PEP 420 is done files: pep-0420.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0420.txt b/pep-0420.txt --- a/pep-0420.txt +++ b/pep-0420.txt @@ -3,7 +3,7 @@ Version: $Revision$ Last-Modified: $Date$ Author: Eric V. Smith -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 19-Apr-2012 -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Jun 5 14:23:38 2012 From: python-checkins at python.org (richard.oudkerk) Date: Tue, 05 Jun 2012 14:23:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_test_for_multiprocessin?= =?utf8?q?g=2EConditon=2Ewait=28=29_and_changset_3baeb5e13dd2?= Message-ID: http://hg.python.org/cpython/rev/b2f86880517f changeset: 77362:b2f86880517f user: Richard Oudkerk date: Tue Jun 05 13:15:29 2012 +0100 summary: Add test for multiprocessing.Conditon.wait() and changset 3baeb5e13dd2 files: Lib/test/test_multiprocessing.py | 28 ++++++++++++++++++++ 1 files changed, 28 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -956,6 +956,34 @@ p.join(5) self.assertTrue(success.value) + @classmethod + def _test_wait_result(cls, c, pid): + with c: + c.notify() + time.sleep(1) + if pid is not None: + os.kill(pid, signal.SIGINT) + + def test_wait_result(self): + if isinstance(self, ProcessesMixin) and sys.platform != 'win32': + pid = os.getpid() + else: + pid = None + + c = self.Condition() + with c: + self.assertFalse(c.wait(0)) + self.assertFalse(c.wait(0.1)) + + p = self.Process(target=self._test_wait_result, args=(c, pid)) + p.start() + + self.assertTrue(c.wait(10)) + if pid is not None: + self.assertRaises(KeyboardInterrupt, c.wait, 10) + + p.join() + class _TestEvent(BaseTestCase): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 5 22:30:55 2012 From: python-checkins at python.org (gregory.p.smith) Date: Tue, 05 Jun 2012 22:30:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fixes_issue_=23?= =?utf8?q?15000=3A_support_the_odd_x32_abi_on_posixsubprocess=27s_system_c?= =?utf8?q?all=2E?= Message-ID: http://hg.python.org/cpython/rev/aa9cfeea07ad changeset: 77363:aa9cfeea07ad branch: 3.2 parent: 77333:f3ce3e874a58 user: Gregory P. Smith date: Tue Jun 05 13:26:39 2012 -0700 summary: Fixes issue #15000: support the odd x32 abi on posixsubprocess's system call. files: Misc/NEWS | 2 ++ Modules/_posixsubprocess.c | 7 +++++++ 2 files changed, 9 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -275,6 +275,8 @@ Extension Modules ----------------- +- Issue #15000: Support the "unique" x32 architecture in _posixsubprocess.c. + - Issue #9041: An issue in ctypes.c_longdouble, ctypes.c_double, and ctypes.c_float that caused an incorrect exception to be returned in the case of overflow has been fixed. diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -175,8 +175,15 @@ * chooses to break compatibility with all existing binaries. Highly Unlikely. */ struct linux_dirent { +#if defined(__x86_64__) && defined(__ILP32__) + /* Support the wacky x32 ABI (fake 32-bit userspace speaking to x86_64 + * kernel interfaces) - https://sites.google.com/site/x32abi/ */ + unsigned long long d_ino; + unsigned long long d_off; +#else unsigned long d_ino; /* Inode number */ unsigned long d_off; /* Offset to next linux_dirent */ +#endif unsigned short d_reclen; /* Length of this linux_dirent */ char d_name[256]; /* Filename (null-terminated) */ }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 5 22:30:56 2012 From: python-checkins at python.org (gregory.p.smith) Date: Tue, 05 Jun 2012 22:30:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Fixes_issue_=2315000=3A_support_the_odd_x32_abi_on_posixsubp?= =?utf8?q?rocess=27s_system_call=2E?= Message-ID: http://hg.python.org/cpython/rev/5a4d5e714d08 changeset: 77364:5a4d5e714d08 parent: 77362:b2f86880517f parent: 77363:aa9cfeea07ad user: Gregory P. Smith date: Tue Jun 05 13:30:24 2012 -0700 summary: Fixes issue #15000: support the odd x32 abi on posixsubprocess's system call. files: Misc/NEWS | 5 +++++ Modules/_posixsubprocess.c | 7 +++++++ 2 files changed, 12 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -55,6 +55,11 @@ - Issue #14963: Convert contextlib.ExitStack.__exit__ to use an iterative algorithm (Patch by Alon Horev) +Extension Modules +----------------- + +- Issue #15000: Support the "unique" x32 architecture in _posixsubprocess.c. + Tests ----- diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -177,8 +177,15 @@ * chooses to break compatibility with all existing binaries. Highly Unlikely. */ struct linux_dirent { +#if defined(__x86_64__) && defined(__ILP32__) + /* Support the wacky x32 ABI (fake 32-bit userspace speaking to x86_64 + * kernel interfaces) - https://sites.google.com/site/x32abi/ */ + unsigned long long d_ino; + unsigned long long d_off; +#else unsigned long d_ino; /* Inode number */ unsigned long d_off; /* Offset to next linux_dirent */ +#endif unsigned short d_reclen; /* Length of this linux_dirent */ char d_name[256]; /* Filename (null-terminated) */ }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 6 00:18:42 2012 From: python-checkins at python.org (kristjan.jonsson) Date: Wed, 06 Jun 2012 00:18:42 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Signal_condition_variables_?= =?utf8?q?with_the_mutex_held=2E__Destroy_condition_variables?= Message-ID: http://hg.python.org/cpython/rev/6d146e2ae9a9 changeset: 77365:6d146e2ae9a9 user: Kristj?n Valur J?nsson date: Tue Jun 05 22:17:42 2012 +0000 summary: Signal condition variables with the mutex held. Destroy condition variables before their mutexes. files: Python/ceval_gil.h | 9 +++++---- Python/thread_pthread.h | 15 +++++++++------ 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -313,14 +313,15 @@ static void destroy_gil(void) { + /* some pthread-like implementations tie the mutex to the cond + * and must have the cond destroyed first. + */ + COND_FINI(gil_cond); MUTEX_FINI(gil_mutex); #ifdef FORCE_SWITCHING + COND_FINI(switch_cond); MUTEX_FINI(switch_mutex); #endif - COND_FINI(gil_cond); -#ifdef FORCE_SWITCHING - COND_FINI(switch_cond); -#endif _Py_atomic_store_explicit(&gil_locked, -1, _Py_memory_order_release); _Py_ANNOTATE_RWLOCK_DESTROY(&gil_locked); } diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h --- a/Python/thread_pthread.h +++ b/Python/thread_pthread.h @@ -443,12 +443,15 @@ dprintf(("PyThread_free_lock(%p) called\n", lock)); + /* some pthread-like implementations tie the mutex to the cond + * and must have the cond destroyed first. + */ + status = pthread_cond_destroy( &thelock->lock_released ); + CHECK_STATUS("pthread_cond_destroy"); + status = pthread_mutex_destroy( &thelock->mut ); CHECK_STATUS("pthread_mutex_destroy"); - status = pthread_cond_destroy( &thelock->lock_released ); - CHECK_STATUS("pthread_cond_destroy"); - free((void *)thelock); } @@ -531,12 +534,12 @@ thelock->locked = 0; - status = pthread_mutex_unlock( &thelock->mut ); - CHECK_STATUS("pthread_mutex_unlock[3]"); - /* wake up someone (anyone, if any) waiting on the lock */ status = pthread_cond_signal( &thelock->lock_released ); CHECK_STATUS("pthread_cond_signal"); + + status = pthread_mutex_unlock( &thelock->mut ); + CHECK_STATUS("pthread_mutex_unlock[3]"); } #endif /* USE_SEMAPHORES */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 6 01:39:38 2012 From: python-checkins at python.org (victor.stinner) Date: Wed, 06 Jun 2012 01:39:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Close_=236203=3A_Document_t?= =?utf8?q?hat_Python_3_sets_LC=5FCTYPE_at_startup_to_the_user=27s?= Message-ID: http://hg.python.org/cpython/rev/113cdce4663c changeset: 77366:113cdce4663c user: Victor Stinner date: Wed Jun 06 01:37:37 2012 +0200 summary: Close #6203: Document that Python 3 sets LC_CTYPE at startup to the user's preferred locale encoding files: Doc/library/locale.rst | 7 +++++-- 1 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Doc/library/locale.rst b/Doc/library/locale.rst --- a/Doc/library/locale.rst +++ b/Doc/library/locale.rst @@ -475,8 +475,11 @@ locale somewhat painful to use correctly. Initially, when a program is started, the locale is the ``C`` locale, no matter -what the user's preferred locale is. The program must explicitly say that it -wants the user's preferred locale settings by calling ``setlocale(LC_ALL, '')``. +what the user's preferred locale is. There is one exception: the +:data:`LC_CTYPE` category is changed at startup to set the current locale +encoding to the user's preferred locale encoding. The program must explicitly +say that it wants the user's preferred locale settings for other categories by +calling ``setlocale(LC_ALL, '')``. It is generally a bad idea to call :func:`setlocale` in some library routine, since as a side effect it affects the entire program. Saving and restoring it -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 6 02:20:57 2012 From: python-checkins at python.org (brett.cannon) Date: Wed, 06 Jun 2012 02:20:57 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Update_for_PEP_362_by_Yury_Sel?= =?utf8?q?ivanov_=28mostly=29_and_Larry_Hastings=2E?= Message-ID: http://hg.python.org/peps/rev/31ddc7f118d6 changeset: 4451:31ddc7f118d6 user: Brett Cannon date: Tue Jun 05 20:20:53 2012 -0400 summary: Update for PEP 362 by Yury Selivanov (mostly) and Larry Hastings. files: pep-0362.txt | 506 ++++++++++++++++++++------------------ 1 files changed, 261 insertions(+), 245 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -2,269 +2,313 @@ Title: Function Signature Object Version: $Revision$ Last-Modified: $Date$ -Author: Brett Cannon , Jiwon Seo +Author: Brett Cannon , Jiwon Seo , + Yury Selivanov , Larry Hastings Status: Draft Type: Standards Track Content-Type: text/x-rst Created: 21-Aug-2006 -Python-Version: 2.6 -Post-History: 05-Sep-2007 +Python-Version: 3.3 +Post-History: 04-Jun-2012 Abstract ======== Python has always supported powerful introspection capabilities, -including that for functions and methods (for the rest of this PEP the -word "function" refers to both functions and methods). Taking a -function object, you can fully reconstruct the function's signature. -Unfortunately it is a little unruly having to look at all the -different attributes to pull together complete information for a -function's signature. +including introspecting functions and methods. (For the rest of +this PEP, "function" refers to both functions and methods). By +examining a function object you can fully reconstruct the function's +signature. Unfortunately this information is stored in an inconvenient +manner, and is spread across a half-dozen deeply nested attributes. -This PEP proposes an object representation for function signatures. -This should help facilitate introspection on functions for various -uses. The introspection information contains all possible information -about the parameters in a signature (including Python 3.0 features). +This PEP proposes a new representation for function signatures. +The new representation contains all necessary information about a function +and its parameters, and makes introspection easy and straightforward. -This object, though, is not meant to replace existing ways of -introspection on a function's signature. The current solutions are -there to make Python's execution work in an efficient manner. The -proposed object representation is only meant to help make application -code have an easier time to query a function on its signature. - - -Purpose -======= - -An object representation of a function's call signature should provide -an easy way to introspect what a function expects as arguments. It -does not need to be a "live" representation, though; the signature can -be inferred once and stored without changes to the signature object -representation affecting the function it represents (but this is an -`Open Issues`_). - -Indirection of signature introspection can also occur. If a -decorator took a decorated function's signature object and set it on -the decorating function then introspection could be redirected to what -is actually expected instead of the typical ``*args, **kwargs`` -signature of decorating functions. +However, this object does not replace the existing function +metadata, which is used by Python itself to execute those +functions. The new metadata object is intended solely to make +function introspection easier for Python programmers. Signature Object ================ -The overall signature of an object is represented by the Signature -object. This object is to store a `Parameter object`_ for each -parameter in the signature. It is also to store any information -about the function itself that is pertinent to the signature. +A Signature object represents the overall signature of a function. +It stores a `Parameter object`_ for each parameter accepted by the +function, as well as information specific to the function itself. -A Signature object has the following structure attributes: +A Signature object has the following public attributes and methods: * name : str - Name of the function. This is not fully qualified because - function objects for methods do not know the class they are - contained within. This makes functions and methods - indistinguishable from one another when passed to decorators, - preventing proper creation of a fully qualified name. -* var_args : str - Name of the variable positional parameter (i.e., ``*args``), if - present, or the empty string. -* var_kw_args : str - Name of the variable keyword parameter (i.e., ``**kwargs``), if - present, or the empty string. -* var_annotations: dict(str, object) - Dict that contains the annotations for the variable parameters. - The keys are of the variable parameter with values of the - annotation. If an annotation does not exist for a variable - parameter then the key does not exist in the dict. + Name of the function. +* qualname : str + Fully qualified name of the function. * return_annotation : object - If present, the attribute is set to the annotation for the return - type of the function. -* parameters : list(Parameter) - List of the parameters of the function as represented by - Parameter objects in the order of its definition (keyword-only - arguments are in the order listed by ``code.co_varnames``). -* bind(\*args, \*\*kwargs) -> dict(str, object) - Create a mapping from arguments to parameters. The keys are the - names of the parameter that an argument maps to with the value - being the value the parameter would have if this function was - called with the given arguments. + The annotation for the return type of the function if specified. + If the function has no annotation for its return type, this + attribute is not set. +* parameters : OrderedDict + An ordered mapping of parameters' names to the corresponding + Parameter objects (keyword-only arguments are in the same order + as listed in ``code.co_varnames``). +* bind(\*args, \*\*kwargs) -> BoundArguments + Creates a mapping from positional and keyword arguments to + parameters. -Signature objects also have the following methods: +Once a Signature object is created for a particular function, +it's cached in the ``__signature__`` attribute of that function. -* __getitem__(self, key : str) -> Parameter - Returns the Parameter object for the named parameter. -* __iter__(self) - Returns an iterator that returns Parameter objects in their - sequential order based on their 'position' attribute. - -The Signature object is stored in the ``__signature__`` attribute of -a function. When it is to be created is discussed in -`Open Issues`_. +Changes to the Signature object, or to any of its data members, +do not affect the function itself. Parameter Object ================ -A function's signature is made up of several parameters. Python's -different kinds of parameters is quite large and rich and continues to -grow. Parameter objects represent any possible parameter. - -Originally the plan was to represent parameters using a list of -parameter names on the Signature object along with various dicts keyed -on parameter names to disseminate the various pieces of information -one can know about a parameter. But the decision was made to -incorporate all information about a parameter in a single object so -as to make extending the information easier. This was originally put -forth by Talin and the preferred form of Guido (as discussed at the -2006 Google Sprint). +Python's expressive syntax means functions can accept many different +kinds of parameters with many subtle semantic differences. We +propose a rich Parameter object designed to represent any possible +function parameter. The structure of the Parameter object is: -* name : (str | tuple(str)) - The name of the parameter as a string if it is not a tuple. If - the argument is a tuple then a tuple of strings is used. -* position : int - The position of the parameter within the signature of the - function (zero-indexed). For keyword-only parameters the position - value is arbitrary while not conflicting with positional - parameters. The suggestion of setting the attribute to None or -1 - to represent keyword-only parameters was rejected to prevent - variable type usage and as a possible point of errors, - respectively. -* default_value : object - The default value for the parameter, if present, else the - attribute does not exist. -* keyword_only : bool +* name : str + The name of the parameter as a string. +* default : object + The default value for the parameter if specified. If the + parameter has no default value, this attribute is not set. +* annotation : object + The annotation for the parameter if specified. If the + parameter has no annotation, this attribute is not set. +* is_keyword_only : bool True if the parameter is keyword-only, else False. -* annotation - Set to the annotation for the parameter. If ``has_annotation`` is - False then the attribute does not exist to prevent accidental use. +* is_args : bool + True if the parameter accepts variable number of arguments + (``\*args``-like), else False. +* is_kwargs : bool + True if the parameter accepts variable number of keyword + arguments (``\*\*kwargs``-like), else False. +* is_implemented : bool + True if the parameter is implemented for use. Some platforms + implement functions but can't support specific parameters + (e.g. "mode" for os.mkdir). Passing in an unimplemented + parameter may result in the parameter being ignored, + or in NotImplementedError being raised. It is intended that + all conditions where ``is_implemented`` may be False be + thoroughly documented. + + +BoundArguments Object +===================== + +Result of a ``Signature.bind`` call. Holds the mapping of arguments +to the function's parameters. + +Has the following public attributes: + +* arguments : OrderedDict + An ordered mutable mapping of parameters' names to arguments' values. + Does not contain arguments' default values. +* args : tuple + Tuple of positional arguments values. Dynamically computed from + the 'arguments' attribute. +* kwargs : dict + Dict of keyword arguments values. Dynamically computed from + the 'arguments' attribute. + +The ``arguments`` attribute should be used in conjunction with +``Signature.parameters`` for any arguments processing purposes. + +``args`` and ``kwargs`` properties should be used to invoke functions: +:: + + def test(a, *, b): + ... + + sig = signature(test) + ba = sig.bind(10, b=20) + test(*ba.args, **ba.kwargs) Implementation ============== -An implementation can be found in Python's sandbox [#impl]_. -There is a function named ``signature()`` which -returns the value stored on the ``__signature__`` attribute if it -exists, else it creates the Signature object for the -function and sets ``__signature__``. For methods this is stored -directly on the im_func function object since that is what decorators -work with. +An implementation for Python 3.3 can be found here: [#impl]_. +A python issue was also created: [#issue]_. + +The implementation adds a new function ``signature()`` to the +``inspect`` module. ``signature()`` returns the value stored +on the ``__signature__`` attribute if it exists, otherwise it +creates the Signature object for the function and caches it in +the function's ``__signature__``. (For methods this is stored +directly in the ``__func__`` function object, since that is what +decorators work with.) Examples ======== +Function Signature Renderer +--------------------------- +:: + + def render_signature(signature): + '''Renders function definition by its signature. + + Example: + + >>> def test(a:'foo', *, b:'bar', c=True, **kwargs:None) -> 'spam': + ... pass + + >>> render_signature(inspect.signature(test)) + test(a:'foo', *, b:'bar', c=True, **kwargs:None) -> 'spam' + ''' + + result = [] + render_kw_only_separator = True + for param in signature.parameters.values(): + formatted = param.name + + # Add annotation and default value + if hasattr(param, 'annotation'): + formatted = '{}:{!r}'.format(formatted, param.annotation) + if hasattr(param, 'default'): + formatted = '{}={!r}'.format(formatted, param.default) + + # Handle *args and **kwargs -like parameters + if param.is_args: + formatted = '*' + formatted + elif param.is_kwargs: + formatted = '**' + formatted + + if param.is_args: + # OK, we have an '*args'-like parameter, so we won't need + # a '*' to separate keyword-only arguments + render_kw_only_separator = False + elif param.is_keyword_only and render_kw_only_separator: + # We have a keyword-only parameter to render and we haven't + # rendered an '*args'-like parameter before, so add a '*' + # separator to the parameters list ("foo(arg1, *, arg2)" case) + result.append('*') + # This condition should be only triggered once, so + # reset the flag + render_kw_only_separator = False + + result.append(formatted) + + rendered = '{}({})'.format(signature.name, ', '.join(result)) + + if hasattr(signature, 'return_annotation'): + rendered += ' -> {!r}'.format(signature.return_annotation) + + return rendered + + Annotation Checker ------------------ :: - def quack_check(fxn): - """Decorator to verify arguments and return value quack as they should. + import inspect + import functools - Positional arguments. - >>> @quack_check - ... def one_arg(x:int): pass - ... - >>> one_arg(42) - >>> one_arg('a') - Traceback (most recent call last): - ... - TypeError: 'a' does not quack like a + def checktypes(func): + '''Decorator to verify arguments and return types + Example: - *args - >>> @quack_check - ... def var_args(*args:int): pass - ... - >>> var_args(*[1,2,3]) - >>> var_args(*[1,'b',3]) - Traceback (most recent call last): - ... - TypeError: *args contains a a value that does not quack like a + >>> @checktypes + ... def test(a:int, b:str) -> int: + ... return int(a * b) - **kwargs - >>> @quack_check - ... def var_kw_args(**kwargs:int): pass - ... - >>> var_kw_args(**{'a': 1}) - >>> var_kw_args(**{'a': 'A'}) - Traceback (most recent call last): - ... - TypeError: **kwargs contains a value that does not quack like a + >>> test(10, '1') + 1111111111 - Return annotations. - >>> @quack_check - ... def returned(x) -> int: return x - ... - >>> returned(42) - 42 - >>> returned('a') - Traceback (most recent call last): - ... - TypeError: the return value 'a' does not quack like a + >>> test(10, 1) + Traceback (most recent call last): + ... + ValueError: foo: wrong type of 'b' argument, 'str' expected, got 'int' + ''' - """ - # Get the signature; only needs to be calculated once. - sig = Signature(fxn) - def check(*args, **kwargs): - # Find out the variable -> value bindings. - bindings = sig.bind(*args, **kwargs) - # Check *args for the proper quack. + sig = inspect.signature(func) + + types = {} + for param in sig.parameters.values(): + # Iterate through function's parameters and build the list of + # arguments types try: - duck = sig.var_annotations[sig.var_args] - except KeyError: + type_ = param.annotation + except AttributeError: + continue + else: + if not inspect.isclass(type_): + # Not a type, skip it + continue + + types[param.name] = type_ + + # If the argument has a type specified, let's check that its + # default value (if present) conforms with the type. + try: + default = param.default + except AttributeError: + continue + else: + if not isinstance(default, type_): + raise ValueError("{func}: wrong type of a default value for {arg!r}". \ + format(func=sig.qualname, arg=param.name)) + + def check_type(sig, arg_name, arg_type, arg_value): + # Internal function that incapsulates arguments type checking + if not isinstance(arg_value, arg_type): + raise ValueError("{func}: wrong type of {arg!r} argument, " \ + "{exp!r} expected, got {got!r}". \ + format(func=sig.qualname, arg=arg_name, + exp=arg_type.__name__, got=type(arg_value).__name__)) + + @functools.wraps(func) + def wrapper(*args, **kwargs): + # Let's bind the arguments + ba = sig.bind(*args, **kwargs) + for arg_name, arg in ba.arguments.items(): + # And iterate through the bound arguments + try: + type_ = types[arg_name] + except KeyError: + continue + else: + # OK, we have a type for the argument, lets get the corresponding + # parameter description from the signature object + param = sig.parameters[arg_name] + if param.is_args: + # If this parameter is a variable-argument parameter, + # then we need to check each of its values + for value in arg: + check_type(sig, arg_name, type_, value) + elif param.is_kwargs: + # If this parameter is a variable-keyword-argument parameter: + for subname, value in arg.items(): + check_type(sig, arg_name + ':' + subname, type_, value) + else: + # And, finally, if this parameter a regular one: + check_type(sig, arg_name, type_, arg) + + result = func(*ba.args, **ba.kwargs) + # The last bit - let's check that the result is correct + try: + return_type = sig.return_annotation + except AttributeError: + # Looks like we don't have any restriction on the return type pass else: - # Check every value in *args. - for value in bindings[sig.var_args]: - if not isinstance(value, duck): - raise TypeError("*%s contains a a value that does not " - "quack like a %r" % - (sig.var_args, duck)) - # Remove it from the bindings so as to not check it again. - del bindings[sig.var_args] - # **kwargs. - try: - duck = sig.var_annotations[sig.var_kw_args] - except (KeyError, AttributeError): - pass - else: - # Check every value in **kwargs. - for value in bindings[sig.var_kw_args].values(): - if not isinstance(value, duck): - raise TypeError("**%s contains a value that does not " - "quack like a %r" % - (sig.var_kw_args, duck)) - # Remove from bindings so as to not check again. - del bindings[sig.var_kw_args] - # For each remaining variable ... - for var, value in bindings.items(): - # See if an annotation was set. - try: - duck = sig[var].annotation - except AttributeError: - continue - # Check that the value quacks like it should. - if not isinstance(value, duck): - raise TypeError('%r does not quack like a %s' % (value, duck)) - else: - # All the ducks quack fine; let the call proceed. - returned = fxn(*args, **kwargs) - # Check the return value. - try: - if not isinstance(returned, sig.return_annotation): - raise TypeError('the return value %r does not quack like ' - 'a %r' % (returned, - sig.return_annotation)) - except AttributeError: - pass - return returned - # Full-featured version would set function metadata. - return check + if isinstance(return_type, type) and not isinstance(result, return_type): + raise ValueError('{func}: wrong return type, {exp} expected, got {got}'. \ + format(func=sig.qualname, exp=return_type.__name__, + got=type(result).__name__)) + return result + + return wrapper Open Issues @@ -280,54 +324,23 @@ Signature object and store it to ``__signature__`` if needed, and then return the value of ``__signature__``. +In the current implementation, signatures are created only on demand +("lazy"). -Should ``Signature.bind`` return Parameter objects as keys? ------------------------------------------------------------ -Instead of returning a dict with keys consisting of the name of the -parameters, would it be more useful to instead use Parameter -objects? The name of the argument can easily be retrieved from the -key (and the name would be used as the hash for a Parameter object). +Deprecate ``inspect.getfullargspec()`` and ``inspect.getcallargs()``? +--------------------------------------------------------------------- - -Have ``var_args`` and ``_var_kw_args`` default to ``None``? ------------------------------------------------------------- - -It has been suggested by Fred Drake that these two attributes have a -value of ``None`` instead of empty strings when they do not exist. -The answer to this question will influence what the defaults are for -other attributes as well. - - -Deprecate ``inspect.getargspec()`` and ``.formatargspec()``? -------------------------------------------------------------- - -Since the Signature object replicates the use of ``getargspec()`` -from the ``inspect`` module it might make sense to deprecate it in -2.6. ``formatargspec()`` could also go if Signature objects gained a -__str__ representation. - -Issue with that is types such as ``int``, when used as annotations, -do not lend themselves for output (e.g., ``""`` is the -string represenation for ``int``). The repr representation of types -would need to change in order to make this reasonable. - - -Have the objects be "live"? ---------------------------- - -Jim Jewett pointed out that Signature and Parameter objects could be -"live". That would mean requesting information would be done on the -fly instead of caching it on the objects. It would also allow for -mutating the function if the Signature or Parameter objects were -mutated. +Since the Signature object replicates the use of ``getfullargspec()`` +and ``getcallargs()`` from the ``inspect`` module it might make sense +to begin deprecating them in 3.3. References ========== -.. [#impl] pep362 directory in Python's sandbox - (http://svn.python.org/view/sandbox/trunk/pep362/) +.. [#impl] pep362 branch (https://bitbucket.org/1st1/cpython/overview) +.. [#issue] issue 15008 (http://bugs.python.org/issue15008) Copyright @@ -335,7 +348,6 @@ This document has been placed in the public domain. - .. Local Variables: -- Repository URL: http://hg.python.org/peps From solipsis at pitrou.net Wed Jun 6 05:47:53 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 06 Jun 2012 05:47:53 +0200 Subject: [Python-checkins] Daily reference leaks (113cdce4663c): sum=0 Message-ID: results for 113cdce4663c on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog1ItS5Y', '-x'] From python-checkins at python.org Wed Jun 6 13:40:17 2012 From: python-checkins at python.org (nick.coghlan) Date: Wed, 06 Jun 2012 13:40:17 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_PEP_422_rewrite_to_present_an_?= =?utf8?q?idea_that_a=29_isn=27t_crazy_and_b=29_it_turns_out?= Message-ID: http://hg.python.org/peps/rev/8651e3c70755 changeset: 4452:8651e3c70755 user: Nick Coghlan date: Wed Jun 06 21:40:04 2012 +1000 summary: PEP 422 rewrite to present an idea that a) isn't crazy and b) it turns out Thomas Heller proposed back in 2001 files: pep-0422.txt | 334 +++++++++++++++++++++++++++----------- 1 files changed, 238 insertions(+), 96 deletions(-) diff --git a/pep-0422.txt b/pep-0422.txt --- a/pep-0422.txt +++ b/pep-0422.txt @@ -1,5 +1,5 @@ PEP: 422 -Title: Dynamic class decorators +Title: Simple class initialisation hook Version: $Revision$ Last-Modified: $Date$ Author: Nick Coghlan @@ -7,148 +7,287 @@ Type: Standards Track Content-Type: text/x-rst Created: 5-Jun-2012 +Python-Version: 3.4 Post-History: 5-Jun-2012 Abstract ======== -Classes currently support two mechanisms for modification of the class at -definition time: metaclasses and lexical decorators. +In Python 2, the body of a class definition could modify the way a class +was created (or simply arrange to run other code after the class was created) +by setting the ``__metaclass__`` attribute in the class body. While doing +this implicitly from called code required the use of an implementation detail +(specifically, ``sys._getframes()``), it could also be done explicitly in a +fully supported fashion (for example, by passing ``locals()`` to an +function that calculated a suitable ``__metaclass__`` value) -Metaclasses can be awkward and challenging to use correctly in conjunction -with multiple inheritance and lexical decorators don't interact with class -inheritance at all. +There is currently no corresponding mechanism in Python 3 that allows the +code executed in the class body to directly influence how the class object +is created. Instead, the class creation process is fully defined by the +class header, before the class body even begins executing. -This PEP proposes a new mechanism for dynamic class decoration that -interacts more cleanly with class inheritance mechanisms. +This PEP proposes a mechanism that will once again allow the body of a +class definition to more directly influence the way a class is created +(albeit in a more constrained fashion), as well as replacing some current +uses of metaclasses with a simpler, easier to understand alternative. -Specification -============= +Background +========== -This PEP proposes that a new step be added to the class creation process, -after the metaclass invocation to construct the class instance and before -the application of lexical decorators. +For an already created class ``cls``, the term "metaclass" has a clear +meaning: it is the value of ``type(cls)``. -This step will walk the class MRO in reverse order, looking for -``__decorators__`` entries in each class dictionary. These entries are -expected to be iterables that are also walked in reverse order to retrieve -class decorators that are automatically applied to the class being defined:: +*During* class creation, it has another meaning: it is also used to refer to +the metaclass hint that may be provided as part of the class definition. +While in many cases these two meanings end up referring to one and the same +object, there are two situations where that is not the case: - for entry in reversed(cls.mro()): - decorators = entry.__dict__.get("__decorators__", ()) - for deco in reversed(decorators): - cls = deco(cls) +* If the metaclass hint refers to an instance of ``type``, then it is + considered as a candidate metaclass along with the metaclasses of all of + the parents of the class being defined. If a more appropriate metaclass is + found amongst the candidates, then it will be used instead of the one + given in the metaclass hint. +* Otherwise, an explicit metaclass hint is assumed to be a factory function + and is called directly to create the class object. In this case, the final + metaclass will be determined by the factory function definition. In the + typical case (where the factory functions just calls ``type``, or, in + Python 3.3 or later, ``types.new_class``) the actual metaclass is then + determined based on the parent classes. -This step in the class creation process will be an implicit part of the -class statement and also part of the behaviour of ``types.new_class()``. +It is notable that only the actual metaclass is inherited - a factory +function used as a metaclass hook sees only the class currently being +defined, and is not invoked for any subclasses. +In Python 3, the metaclass hint is provided using the ``metaclass=Meta`` +keyword syntax in the class header. This allows the ``__prepare__`` method +on the metaclass to be used to create the ``locals()`` namespace used during +execution of the class body (for example, specifying the use of +``collections.OrderedDict`` instead of a regular ``dict``). -Rationale -========= +In Python 2, there was no ``__prepare__`` method (that API was added for +Python 3 by PEP 3115). Instead, a class body could set the ``__metaclass__`` +attribute, and the class creation process would extract that value from the +class namespace to use as the metaclass hint. There is `published code`_ that +makes use of this feature. -When decorator support was added to classes, the lexical decoration syntax -was copied directly from function decorators:: - @decorator - class Example: - # Subclasses will not be decorated automatically - pass +Proposal +======== -This mechanism works well, so long as it is considered acceptable that the -decorator is *not* applied automatically to any subclasses. If it is -desired that the behaviour be inherited, it is currently necessary to -make the step up to defining a `custom metaclass`_:: +This PEP proposes that a mechanism be added to Python 3 that meets the +following criteria: - class DynamicDecorators(type): - """Metaclass for dynamic decorator support +# Restores the ability for class namespaces to have some influence on the + class creation process (above and beyond populating the namespace itself), + but potentially without the full flexibility of the Python 2 style + ``__metaclass__`` hook +# Integrates nicely with class inheritance structures (including mixins and + multiple inheritance) +# Integrates nicely with the implicit ``__class__`` reference and + zero-argument ``super()`` syntax introduced by PEP 3135 +# Can be added to an existing base class without a significant risk of + introducing backwards compatibility problems - Creates the class normally, then runs through the MRO looking for - __decorators__ attributes and applying the contained decorators to - the newly created class - """ - def __new__(meta, name, bases, ns): - cls = super(DynamicDecorators, meta).__new__(meta, name, bases, ns) - for entry in reversed(cls.mro()): - decorators = entry.__dict__.get("__decorators__", ()) - for deco in reversed(decorators): - cls = deco(cls) +One mechanism that would achieve this goal is to add a new class +initialisation hook, modelled directly on the existing instance +initialisation hook. However, the signature would be constrained to ensure +that correctly supporting multiple inheritance is kept as simple as possible. + +Specifically, it is proposed that class definitions be able to provide a +class initialisation hook as follows:: + + class Example: + @classmethod + def __init_class__(cls): + # This is invoked after the class is created, but before any + # explicit decorators are called + # The usual super() mechanisms are used to correctly support + # multiple inheritance. The simple, decorator style invocation + # ensures that this is as simple as possible. + +If present on the created object, this new hook will be called by the class +creation machinery *after* the ``__class__`` reference has been initialised. +For ``types.new_class()``, it will be called as the last step before +returning the created class object. Calling the hook automatically from +``type.__init__`` unfortunately doesn't work, as it would mean the +``__init_class__`` method would be unable to call any methods that relied +on the ``__class__`` reference (or used the zero-argument form of +``super()``). + +If a metaclass wishes to block class initialisation for some reason, it +must arrange for ``cls.__init_class__`` to trigger ``AttributeError``. + +This general proposal is not a new idea (it was first suggested `more than +10 years ago`_), but I believe the situation has changed sufficiently in +that time that the idea is worth reconsidering. + + +Key Benefits +============ + + +Replaces dynamic setting of ``__metaclass__`` +--------------------------------------------- + +For use cases that didn't involve completely replacing the defined class, +Python 2 code that dynamically set ``__metaclass__`` can now dynamically +set ``__init_class__`` instead. For more advanced use cases, introduction of +an explicit metaclass will still be necessary in order to support Python 3. + + +Easier inheritance of definition time behaviour +----------------------------------------------- + +Understanding Python's metaclass system requires a deep understanding of +the type system and the class construction process. This is legitimately +seen as confusing, due to the need to keep multiple moving parts (the code, +the metaclass hint, the actual metaclass, the class object, instances of the +class object) clearly distinct in your mind. + +Understanding the proposed class initialisation hook requires understanding +decorators and ordinary method inheritance, which is a much simpler prospect. + + +Reduced chance of metaclass conflicts +------------------------------------- + +One of the big issues that makes library authors reluctant to use metaclasses +(even when it would be appropriate) is the risk of metaclass conflicts. +These occur whenever two unrelated metaclasses are used by the desired +parents of a class definition. This risk also makes it very difficult to +*add* a metaclass to a class that has previously been published without one. + +By contrast, adding an ``__init_class__`` method to an existing type poses +a similar level of risk to adding an ``__init__`` method: technically, there +is a risk of breaking poorly implemented subclasses, but when that occurs, +it is recognised as a bug in the subclass rather than the library author +breaching backwards compatibility guarantees. In fact, due to the constrained +signature, the risk in this case is actually even lower than in the case of +``__init__``. + + +Integrates cleanly with PEP 3135 +-------------------------------- + +Unlike code that runs as part of the metaclass, code that runs as part of +the new hook will be able to freely invoke class methods that rely on the +implicit ``__class__`` reference introduced by PEP 3135, including methods +that use the zero argument form of ``super()``. + + +Alternatives +============ + + +The Python 3 Status Quo +----------------------- + +The Python 3 status quo already offers a great deal of flexibility. For +changes which only affect a single class definition and which can be +specified at the time the code is written, then class decorators can be +used to modify a class explicitly. Class decorators largely ignore class +inheritance and can make full use of methods that rely on the ``__class__`` +reference being populated. + +Using a custom metaclass provides the same level of power as it did in +Python 2. However, it's notable that, unlike class decorators, a metaclass +cannot call any methods that rely on the ``__class__`` reference, as that +reference is not populated until after the metaclass constructor returns +control to the class creation code. + +One major use case for metaclasses actually closely resembles the use of +class decorators. It occurs whenever a metaclass has an implementation that +uses the following pattern:: + + class Metaclass(type): + def __new__(meta, *args, **kwds): + cls = super(Metaclass, meta).__new__(meta, *args, **kwds) + # Do something with cls return cls - class Example(metaclass=DynamicDecorators): - # Subclasses *will* be decorated automatically - __decorators__ = [decorator] +The key difference between this pattern and a class decorator is that it +is automatically inherited by subclasses. However, it also comes with a +major disadvantage: Python does not allow you to inherit from classes with +unrelated metaclasses. -The main potential problem with this approach, is that it can place -significant constraints on the type heirarchy, as it requires that all -metaclasses used be well behaved with respect to multiple inheritance. +Thus, the status quo requires that developers choose between the following +two alternatives: -By making dynamic decorators an inherent part of the class creation process, -many current use cases of metaclasses may be replaced with dynamic decorators -instead, greatly reducing the likelihood of metaclass conflicts, as well -as being substantially easier to write correctly in the first place. +* Use a class decorator, meaning that behaviour is not inherited and must be + requested explicitly on every subclass +* Use a metaclass, meaning that behaviour is inherited, but metaclass + conflicts may make integration with other libraries and frameworks more + difficult than it otherwise would be +If this PEP is ultimately rejected, then this is the existing design that +will remain in place by default. -Design Discussion -================= +Restoring the Python 2 metaclass hook +------------------------------------- -Allowing metaclasses to override the dynamic decoration process ---------------------------------------------------------------- +One simple alternative would be to restore support for a Python 2 style +``metaclass`` hook in the class body. This would be checked after the class +body was executed, potentially overwriting the metaclass hint provided in the +class header. -This PEP does not provide a mechanism that allows metaclasses to override the -dynamic decoration process. If this feature is deemed desirable in the -future, then it can be added by moving the functionality described in -this PEP into a new method on the metaclass (for example, ``__decorate__``), -with ``type`` providing a suitable default implementation that matches -the behaviour described here. +The main attraction of such an approach is that it would simplify porting +Python 2 applications that make use of this hook (especially those that do +so dynamically). -This PEP chose the simplicity of the current approach, as lexical decorators -are currently outside the scope of metaclass control, so it seems reasonable -to pursue the simpler strategy in the absence of a solid use case for -making this behaviour configurable. +However, this approach does nothing to simplify the process of adding +*inherited* class definition time behaviour, nor does it interoperate +cleanly with the PEP 3135 ``__class__`` and ``super()`` semantics (as with +any metaclass based solution, the ``__metaclass__`` hook would have to run +before the ``__class__`` reference has been populated. -Iterating over decorator entries in reverse order -------------------------------------------------- +Dynamic class decorators +------------------------ -This order was chosen to match the layout of lexical decorators when -converted to ordinary function calls. Just as the following are equivalent:: +The original version of this PEP was called "Dynamic class decorators" and +focused solely on a significantly more complicated proposal than that +presented in the current version. - @deco2 - @deco1 - class C: - pass +As with the current version, it proposed that a new step be added to the +class creation process, after the metaclass invocation to construct the +class instance and before the application of lexical decorators. However, +instead of a simple process of calling a single class method that relies +on normal inheritance mechanisms, it proposed a far more complicated +procedure that walked the class MRO looking for decorators stored in +iterable ``__decorators__`` attributes. - class C: - pass - C = deco2(deco1(C)) +Using the current version of the PEP, the scheme originally proposed could +be implemented as:: -So too will the following be roughly equivalent (aside from inheritance):: + class DynamicDecorators: + @classmethod + def __init_class__(cls): + super(DynamicDecorators, cls).__init_class__() + for entry in reversed(cls.mro()): + decorators = entry.__dict__.get("__decorators__", ()) + for deco in reversed(decorators): + cls = deco(cls) - class C: - __decorators__ = [deco2, deco1] +Any subclasses of this type would automatically have the contents of any +``__decorators__`` attributes processed and invoked. - class C: - pass - C = deco2(deco1(C)) +The mechanism in the current PEP is considered superior, as many issues +to do with ordering and the same decorator being invoked multiple times +simple go away, as that kind of thing is taken care of through the use of an +ordinary class method invocation. -Iterating over the MRO in reverse order ---------------------------------------- - -The order of iteration over the MRO for decorator application was chosen to -match the order of actual call *evaluation* when using ``super`` to invoke -parent class implementations: the first method to run to completion is that -closest to the base of the class hierarchy. - - References ========== -.. _custom metaclass: - https://bitbucket.org/ncoghlan/misc/src/default/pep422.py +.. _published code: + http://mail.python.org/pipermail/python-dev/2012-June/119878.html + +.. _more than 10 years ago: + http://mail.python.org/pipermail/python-dev/2001-November/018651.html Copyright -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 6 13:44:13 2012 From: python-checkins at python.org (nick.coghlan) Date: Wed, 06 Jun 2012 13:44:13 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Fix_numbered_bullet_points?= Message-ID: http://hg.python.org/peps/rev/c1bf52fbb073 changeset: 4453:c1bf52fbb073 user: Nick Coghlan date: Wed Jun 06 21:44:01 2012 +1000 summary: Fix numbered bullet points files: pep-0422.txt | 20 ++++++++++---------- 1 files changed, 10 insertions(+), 10 deletions(-) diff --git a/pep-0422.txt b/pep-0422.txt --- a/pep-0422.txt +++ b/pep-0422.txt @@ -79,16 +79,16 @@ This PEP proposes that a mechanism be added to Python 3 that meets the following criteria: -# Restores the ability for class namespaces to have some influence on the - class creation process (above and beyond populating the namespace itself), - but potentially without the full flexibility of the Python 2 style - ``__metaclass__`` hook -# Integrates nicely with class inheritance structures (including mixins and - multiple inheritance) -# Integrates nicely with the implicit ``__class__`` reference and - zero-argument ``super()`` syntax introduced by PEP 3135 -# Can be added to an existing base class without a significant risk of - introducing backwards compatibility problems +1. Restores the ability for class namespaces to have some influence on the + class creation process (above and beyond populating the namespace itself), + but potentially without the full flexibility of the Python 2 style + ``__metaclass__`` hook +2. Integrates nicely with class inheritance structures (including mixins and + multiple inheritance) +3. Integrates nicely with the implicit ``__class__`` reference and + zero-argument ``super()`` syntax introduced by PEP 3135 +4. Can be added to an existing base class without a significant risk of + introducing backwards compatibility problems One mechanism that would achieve this goal is to add a new class initialisation hook, modelled directly on the existing instance -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 6 13:45:35 2012 From: python-checkins at python.org (nick.coghlan) Date: Wed, 06 Jun 2012 13:45:35 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Reword_a_confusing_sentence?= Message-ID: http://hg.python.org/peps/rev/6858011a1c28 changeset: 4454:6858011a1c28 user: Nick Coghlan date: Wed Jun 06 21:45:26 2012 +1000 summary: Reword a confusing sentence files: pep-0422.txt | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pep-0422.txt b/pep-0422.txt --- a/pep-0422.txt +++ b/pep-0422.txt @@ -104,8 +104,8 @@ # This is invoked after the class is created, but before any # explicit decorators are called # The usual super() mechanisms are used to correctly support - # multiple inheritance. The simple, decorator style invocation - # ensures that this is as simple as possible. + # multiple inheritance. The decorator style invocation helps + # ensure that invoking the parent class is as simple as possible. If present on the created object, this new hook will be called by the class creation machinery *after* the ``__class__`` reference has been initialised. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 6 13:50:01 2012 From: python-checkins at python.org (nick.coghlan) Date: Wed, 06 Jun 2012 13:50:01 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Eliminate_a_typo=2E_Also_wonde?= =?utf8?q?r_how_many_times_I_can_use_the_word_=27simple=27_or_a?= Message-ID: http://hg.python.org/peps/rev/2d3a63f8cd3f changeset: 4455:2d3a63f8cd3f user: Nick Coghlan date: Wed Jun 06 21:49:50 2012 +1000 summary: Eliminate a typo. Also wonder how many times I can use the word 'simple' or a derivative in one PEP. files: pep-0422.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0422.txt b/pep-0422.txt --- a/pep-0422.txt +++ b/pep-0422.txt @@ -276,7 +276,7 @@ The mechanism in the current PEP is considered superior, as many issues to do with ordering and the same decorator being invoked multiple times -simple go away, as that kind of thing is taken care of through the use of an +just go away, as that kind of thing is taken care of through the use of an ordinary class method invocation. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 6 15:58:58 2012 From: python-checkins at python.org (stefan.krah) Date: Wed, 06 Jun 2012 15:58:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_1=29_Add_error_analysis_com?= =?utf8?b?bWVudHMgdG8gbXBkX3FsbjEwKCkgYW5kIF9tcGRfcWxuKCku?= Message-ID: http://hg.python.org/cpython/rev/36cd1cf5a160 changeset: 77367:36cd1cf5a160 user: Stefan Krah date: Wed Jun 06 15:57:18 2012 +0200 summary: 1) Add error analysis comments to mpd_qln10() and _mpd_qln(). 2) Simplify the precision adjustment code for values in [0.900, 1.15]. files: Modules/_decimal/libmpdec/mpdecimal.c | 131 ++++++++++--- 1 files changed, 98 insertions(+), 33 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -4212,6 +4212,18 @@ *status |= workstatus; } +/* + * Schedule the optimal precision increase for the Newton iteration. + * v := input operand + * z_0 := initial approximation + * initprec := natural number such that abs(log(v) - z_0) < 10**-initprec + * maxprec := target precision + * + * For convenience the output klist contains the elements in reverse order: + * klist := [k_n-1, ..., k_0], where + * 1) k_0 <= initprec and + * 2) abs(log(v) - result) < 10**(-2*k_n-1 + 1) <= 10**-maxprec. + */ static inline int ln_schedule_prec(mpd_ssize_t klist[MPD_MAX_PREC_LOG2], mpd_ssize_t maxprec, mpd_ssize_t initprec) @@ -4231,6 +4243,7 @@ return i-1; } +/* The constants have been verified with both decimal.py and mpfr. */ #ifdef CONFIG_64 #if MPD_RDIGITS != 19 #error "mpdecimal.c: MPD_RDIGITS must be 19." @@ -4285,7 +4298,7 @@ (mpd_uint_t *)mpd_ln10_data }; -/* Set 'result' to ln(10), with 'prec' digits, using ROUND_HALF_EVEN. */ +/* Set 'result' to ln(10). ulp error: abs(result - log(10)) < ulp(log(10)) */ void mpd_qln10(mpd_t *result, mpd_ssize_t prec, uint32_t *status) { @@ -4320,7 +4333,7 @@ mpd_maxcontext(&varcontext); varcontext.round = MPD_ROUND_TRUNC; - i = ln_schedule_prec(klist, prec+2, result->digits); + i = ln_schedule_prec(klist, prec+2, -result->exp); for (; i >= 0; i--) { varcontext.prec = 2*klist[i]+3; result->flags ^= MPD_NEG; @@ -4339,7 +4352,18 @@ mpd_qfinalize(result, &maxcontext, status); } -/* Initial approximations for the ln() iteration */ +/* + * Initial approximations for the ln() iteration. The values have the + * following properties (established with both decimal.py and mpfr): + * + * Index 0 - 400, logarithms of x in [1.00, 5.00]: + * abs(lnapprox[i] * 10**-3 - log((i+100)/100)) < 10**-2 + * abs(lnapprox[i] * 10**-3 - log((i+1+100)/100)) < 10**-2 + * + * Index 401 - 899, logarithms of x in (0.500, 0.999]: + * abs(-lnapprox[i] * 10**-3 - log((i+100)/1000)) < 10**-2 + * abs(-lnapprox[i] * 10**-3 - log((i+1+100)/1000)) < 10**-2 + */ static const uint16_t lnapprox[900] = { /* index 0 - 400: log((i+100)/100) * 1000 */ 0, 10, 20, 30, 39, 49, 58, 68, 77, 86, 95, 104, 113, 122, 131, 140, 148, 157, @@ -4406,7 +4430,10 @@ 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 }; -/* Internal ln() function that does not check for specials, zero or one. */ +/* + * Internal ln() function that does not check for specials, zero or one. + * Relative error: abs(result - log(a)) < 0.1 * 10**-prec * abs(log(a)) + */ static void _mpd_qln(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, uint32_t *status) @@ -4451,10 +4478,16 @@ mpd_setdigits(z); if (x <= 400) { + /* Reduce the input operand to 1.00 <= v <= 5.00. Let y = x + 100, + * so 100 <= y <= 500. Since y contains the most significant digits + * of v, y/100 <= v < (y+1)/100 and abs(z - log(v)) < 10**-2. */ v.exp = -(a_digits - 1); t = a_exp + a_digits - 1; } else { + /* Reduce the input operand to 0.500 < v <= 0.999. Let y = x + 100, + * so 500 < y <= 999. Since y contains the most significant digits + * of v, y/1000 <= v < (y+1)/1000 and abs(z - log(v)) < 10**-2. */ v.exp = -a_digits; t = a_exp + a_digits; mpd_set_negative(z); @@ -4465,37 +4498,46 @@ varcontext.round = MPD_ROUND_TRUNC; maxprec = ctx->prec + 2; - if (x <= 10 || x >= 805) { - /* v is close to 1: Estimate the magnitude of the logarithm. - * If v = 1 or ln(v) will underflow, skip the loop. Otherwise, - * adjust the precision upwards in order to obtain a sufficient - * number of significant digits. + if (t == 0 && (x <= 15 || x >= 800)) { + /* 0.900 <= v <= 1.15: Estimate the magnitude of the logarithm. + * If ln(v) will underflow, skip the loop. Otherwise, adjust the + * precision upwards in order to obtain a sufficient number of + * significant digits. * - * 1) x/(1+x) < ln(1+x) < x, for x > -1, x != 0 - * - * 2) (v-1)/v < ln(v) < v-1 + * Case v > 1: + * abs((v-1)/10) < abs((v-1)/v) < abs(ln(v)) < abs(v-1) + * Case v < 1: + * abs(v-1) < abs(ln(v)) < abs((v-1)/v) < abs((v-1)*10) */ - mpd_t *lower = &tmp; - mpd_t *upper = &vtmp; int cmp = _mpd_cmp(&v, &one); - varcontext.round = MPD_ROUND_CEILING; - varcontext.prec = maxprec; - mpd_qsub(upper, &v, &one, &varcontext, &varcontext.status); - varcontext.round = MPD_ROUND_FLOOR; - mpd_qdiv(lower, upper, &v, &varcontext, &varcontext.status); - varcontext.round = MPD_ROUND_TRUNC; + /* Upper bound (assume v > 1): abs(v-1), unrounded */ + _mpd_qsub(&tmp, &v, &one, &maxcontext, &maxcontext.status); + if (maxcontext.status & MPD_Errors) { + mpd_seterror(result, MPD_Malloc_error, status); + goto finish; + } if (cmp < 0) { - _mpd_ptrswap(&upper, &lower); - } - if (mpd_adjexp(upper) < mpd_etiny(ctx)) { - _settriple(z, (cmp<0), 1, mpd_etiny(ctx)-1); - goto postloop; - } - /* XXX optimization: t == 0 && mpd_adjexp(lower) < 0 */ - if (mpd_adjexp(lower) < 0) { - maxprec = maxprec - mpd_adjexp(lower); + /* v < 1: abs((v-1)*10) */ + tmp.exp += 1; + } + if (mpd_adjexp(&tmp) < mpd_etiny(ctx)) { + /* The upper bound is less than etiny: Underflow to zero */ + _settriple(result, (cmp<0), 1, mpd_etiny(ctx)-1); + goto finish; + } + /* Lower bound: abs((v-1)/10) or abs(v-1) */ + tmp.exp -= 1; + if (mpd_adjexp(&tmp) < 0) { + /* Absolute error of the loop: abs(z - log(v)) < 10**-p. If + * p = ctx->prec+2-adjexp(lower), then the relative error of + * the result is (using 10**adjexp(x) <= abs(x)): + * + * abs(z - log(v)) / abs(log(v)) < 10**-p / abs(log(v)) + * <= 10**(-ctx->prec-2) + */ + maxprec = maxprec - mpd_adjexp(&tmp); } } @@ -4523,14 +4565,37 @@ } } -postloop: - mpd_qln10(&v, maxprec+2, status); + /* + * Case t == 0: + * t * log(10) == 0, the result does not change and the analysis + * above applies. If v < 0.900 or v > 1.15, the relative error is + * less than 10**(-ctx.prec-1). + * Case t != 0: + * z := approx(log(v)) + * y := approx(log(10)) + * p := maxprec = ctx->prec + 2 + * Absolute errors: + * 1) abs(z - log(v)) < 10**-p + * 2) abs(y - log(10)) < 10**-p + * The multiplication is exact, so: + * 3) abs(t*y - t*log(10)) < t*10**-p + * The sum is exact, so: + * 4) abs((z + t*y) - (log(v) + t*log(10))) < (abs(t) + 1) * 10**-p + * Bounds for log(v) and log(10): + * 5) -7/10 < log(v) < 17/10 + * 6) 23/10 < log(10) < 24/10 + * Using 4), 5), 6) and t != 0, the relative error is: + * + * 7) relerr < ((abs(t) + 1)*10**-p) / abs(log(v) + t*log(10)) + * < 0.5 * 10**(-p + 1) = 0.5 * 10**(-ctx->prec-1) + */ + mpd_qln10(&v, maxprec+1, status); mpd_qmul_ssize(&tmp, &v, t, &maxcontext, status); - varcontext.prec = maxprec+2; - mpd_qadd(result, &tmp, z, &varcontext, status); + mpd_qadd(result, &tmp, z, &maxcontext, status); finish: + *status |= (MPD_Inexact|MPD_Rounded); mpd_del(&v); mpd_del(&vtmp); mpd_del(&tmp); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 6 23:59:43 2012 From: python-checkins at python.org (kristjan.jonsson) Date: Wed, 06 Jun 2012 23:59:43 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Rearrange_code_to_beat_an_o?= =?utf8?q?ptimizer_bug_affecting_Release_x64_on_windows?= Message-ID: http://hg.python.org/cpython/rev/e1b950cb6b74 changeset: 77368:e1b950cb6b74 user: Kristj?n Valur J?nsson date: Wed Jun 06 21:58:08 2012 +0000 summary: Rearrange code to beat an optimizer bug affecting Release x64 on windows with VS2010sp1 files: Objects/unicodeobject.c | 22 ++++++++++------------ 1 files changed, 10 insertions(+), 12 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -12038,16 +12038,23 @@ (categories Z* and C* except ASCII space) */ if (!Py_UNICODE_ISPRINTABLE(ch)) { + PyUnicode_WRITE(okind, odata, o++, '\\'); /* Map 8-bit characters to '\xhh' */ if (ch <= 0xff) { - PyUnicode_WRITE(okind, odata, o++, '\\'); PyUnicode_WRITE(okind, odata, o++, 'x'); PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 4) & 0x000F]); PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[ch & 0x000F]); } + /* Map 16-bit characters to '\uxxxx' */ + else if (ch <= 0xffff) { + PyUnicode_WRITE(okind, odata, o++, 'u'); + PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 12) & 0xF]); + PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 8) & 0xF]); + PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 4) & 0xF]); + PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[ch & 0xF]); + } /* Map 21-bit characters to '\U00xxxxxx' */ - else if (ch >= 0x10000) { - PyUnicode_WRITE(okind, odata, o++, '\\'); + else { PyUnicode_WRITE(okind, odata, o++, 'U'); PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 28) & 0xF]); PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 24) & 0xF]); @@ -12058,15 +12065,6 @@ PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 4) & 0xF]); PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[ch & 0xF]); } - /* Map 16-bit characters to '\uxxxx' */ - else { - PyUnicode_WRITE(okind, odata, o++, '\\'); - PyUnicode_WRITE(okind, odata, o++, 'u'); - PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 12) & 0xF]); - PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 8) & 0xF]); - PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[(ch >> 4) & 0xF]); - PyUnicode_WRITE(okind, odata, o++, Py_hexdigits[ch & 0xF]); - } } /* Copy characters as-is */ else { -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Thu Jun 7 05:48:12 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 07 Jun 2012 05:48:12 +0200 Subject: [Python-checkins] Daily reference leaks (e1b950cb6b74): sum=-1 Message-ID: results for e1b950cb6b74 on branch "default" -------------------------------------------- test_exceptions leaked [0, -1, 0] references, sum=-1 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogGc3eIB', '-x'] From python-checkins at python.org Thu Jun 7 14:08:54 2012 From: python-checkins at python.org (nick.coghlan) Date: Thu, 07 Jun 2012 14:08:54 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Update_422_based_on_python-dev?= =?utf8?q?_feedback?= Message-ID: http://hg.python.org/peps/rev/d65ae0ea8b46 changeset: 4456:d65ae0ea8b46 user: Nick Coghlan date: Thu Jun 07 22:08:41 2012 +1000 summary: Update 422 based on python-dev feedback files: pep-0422.txt | 97 ++++++++++++++++++++++++++++++--------- 1 files changed, 73 insertions(+), 24 deletions(-) diff --git a/pep-0422.txt b/pep-0422.txt --- a/pep-0422.txt +++ b/pep-0422.txt @@ -44,7 +44,7 @@ While in many cases these two meanings end up referring to one and the same object, there are two situations where that is not the case: -* If the metaclass hint refers to an instance of ``type``, then it is +* If the metaclass hint refers to a subclass of ``type``, then it is considered as a candidate metaclass along with the metaclasses of all of the parents of the class being defined. If a more appropriate metaclass is found amongst the candidates, then it will be used instead of the one @@ -72,6 +72,16 @@ class namespace to use as the metaclass hint. There is `published code`_ that makes use of this feature. +Another new feature in Python 3 is the zero-argument form of the ``super()`` +builtin, introduced by PEP 3135. This feature uses an implicit ``__class__`` +reference to the class being defined to replace the "by name" references +required in Python 2. Just as code invoked during execution of a Python 2 +metaclass could not call methods that referenced the class by name (as the +name had not yet been bound in the containing scope), similarly, Python 3 +metaclasses cannot call methods that rely on the implicit ``__class__`` +reference (as it is not populated until after the metaclass has returned +control to the class creation machiner). + Proposal ======== @@ -90,10 +100,10 @@ 4. Can be added to an existing base class without a significant risk of introducing backwards compatibility problems -One mechanism that would achieve this goal is to add a new class +One mechanism that can achieve this goal is to add a new class initialisation hook, modelled directly on the existing instance -initialisation hook. However, the signature would be constrained to ensure -that correctly supporting multiple inheritance is kept as simple as possible. +initialisation hook, but with the signature constrained to match that +of an ordinary class decorator. Specifically, it is proposed that class definitions be able to provide a class initialisation hook as follows:: @@ -110,51 +120,57 @@ If present on the created object, this new hook will be called by the class creation machinery *after* the ``__class__`` reference has been initialised. For ``types.new_class()``, it will be called as the last step before -returning the created class object. Calling the hook automatically from -``type.__init__`` unfortunately doesn't work, as it would mean the -``__init_class__`` method would be unable to call any methods that relied -on the ``__class__`` reference (or used the zero-argument form of -``super()``). +returning the created class object. If a metaclass wishes to block class initialisation for some reason, it must arrange for ``cls.__init_class__`` to trigger ``AttributeError``. -This general proposal is not a new idea (it was first suggested `more than -10 years ago`_), but I believe the situation has changed sufficiently in -that time that the idea is worth reconsidering. +This general proposal is not a new idea (it was first suggested for +inclusion in the language definition `more than 10 years ago`_, and a +similar mechanism has long been supported by `Zope's ExtensionClass`_), +but I believe the situation has changed sufficiently in recent years that +the idea is worth reconsidering. Key Benefits ============ -Replaces dynamic setting of ``__metaclass__`` ---------------------------------------------- +Replaces many use cases for dynamic setting of ``__metaclass__`` +----------------------------------------------------------------- -For use cases that didn't involve completely replacing the defined class, +For use cases that don't involve completely replacing the defined class, Python 2 code that dynamically set ``__metaclass__`` can now dynamically set ``__init_class__`` instead. For more advanced use cases, introduction of -an explicit metaclass will still be necessary in order to support Python 3. +an explicit metaclass (possibly made available as a required base class) will +still be necessary in order to support Python 3. Easier inheritance of definition time behaviour ----------------------------------------------- -Understanding Python's metaclass system requires a deep understanding of +Understanding Python's metaclasses requires a deep understanding of the type system and the class construction process. This is legitimately -seen as confusing, due to the need to keep multiple moving parts (the code, +seen as challenging, due to the need to keep multiple moving parts (the code, the metaclass hint, the actual metaclass, the class object, instances of the -class object) clearly distinct in your mind. +class object) clearly distinct in your mind. Even when you know the rules, +it's still easy to make a mistake if you're not being extremely careful. +An earlier version of this PEP actually included such a mistake: it +stated "instance of type" for a constraint that is actually "subclass of +type". -Understanding the proposed class initialisation hook requires understanding -decorators and ordinary method inheritance, which is a much simpler prospect. +Understanding the proposed class initialisation hook only requires +understanding decorators and ordinary method inheritance, which isn't +quite as daunting a task. The new hook provides a more gradual path +towards understanding all of the phases involved in the class definition +process. Reduced chance of metaclass conflicts ------------------------------------- One of the big issues that makes library authors reluctant to use metaclasses -(even when it would be appropriate) is the risk of metaclass conflicts. +(even when they would be appropriate) is the risk of metaclass conflicts. These occur whenever two unrelated metaclasses are used by the desired parents of a class definition. This risk also makes it very difficult to *add* a metaclass to a class that has previously been published without one. @@ -164,12 +180,12 @@ is a risk of breaking poorly implemented subclasses, but when that occurs, it is recognised as a bug in the subclass rather than the library author breaching backwards compatibility guarantees. In fact, due to the constrained -signature, the risk in this case is actually even lower than in the case of -``__init__``. +signature of ``__init_class__``, the risk in this case is actually even +lower than in the case of ``__init__``. -Integrates cleanly with PEP 3135 --------------------------------- +Integrates cleanly with \PEP 3135 +--------------------------------- Unlike code that runs as part of the metaclass, code that runs as part of the new hook will be able to freely invoke class methods that rely on the @@ -280,6 +296,35 @@ ordinary class method invocation. +Automatic metaclass derivation +------------------------------ + +When no appropriate metaclass is found, it's theoretically possible to +automatically derive a metaclass for a new type based on the metaclass hint +and the metaclasses of the bases. + +While adding such a mechanism would reduce the risk of spurious metaclass +conflicts, it would do nothing to improve integration with PEP 3135, would +not help with porting Python 2 code that set ``__metaclass__`` dynamically +and would not provide a more straightforward inherited mechanism for invoking +additional operations after the class invocation is complete. + +In addition, there would still be a risk of metaclass conflicts in cases +where the base metaclasses were not written with multiple inheritance in +mind. In such situations, there's a chance of introducing latent defects +if one or more metaclasses are not invoked correctly. + + +Calling the new hook from ``type.__init__`` +------------------------------------------- + +Calling the new hook automatically from ``type.__init__``, would achieve most +of the goals of this PEP. However, using that approach would mean that +``__init_class__`` implementations would be unable to call any methods that +relied on the ``__class__`` reference (or used the zero-argument form of +``super()``), and could not make use of those features themselves. + + References ========== @@ -289,6 +334,8 @@ .. _more than 10 years ago: http://mail.python.org/pipermail/python-dev/2001-November/018651.html +.. _Zope's ExtensionClass: + http://docs.zope.org/zope_secrets/extensionclass.html Copyright ========= -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Jun 7 14:42:47 2012 From: python-checkins at python.org (nick.coghlan) Date: Thu, 07 Jun 2012 14:42:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Nudge_readers_t?= =?utf8?q?owards_a_more_accurate_mental_model_for_loop_else_clauses?= Message-ID: http://hg.python.org/cpython/rev/6e4ec47fba6a changeset: 77369:6e4ec47fba6a branch: 3.2 parent: 77363:aa9cfeea07ad user: Nick Coghlan date: Thu Jun 07 22:41:34 2012 +1000 summary: Nudge readers towards a more accurate mental model for loop else clauses files: Doc/tutorial/controlflow.rst | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -187,6 +187,13 @@ (Yes, this is the correct code. Look closely: the ``else`` clause belongs to the :keyword:`for` loop, **not** the :keyword:`if` statement.) +When used with a loop, the ``else`` clause has more in common with the +``else`` clause of a :keyword:`try` statement than it does that of +:keyword:`if` statements: a :keyword:`try` statement's ``else`` clause runs +when no exception occurs, and a loop's ``else`` clause runs when no ``break`` +occurs. For more on the :keyword:`try` statement and exceptions, see +:ref:`tut-handling`. + .. _tut-pass: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 14:42:47 2012 From: python-checkins at python.org (nick.coghlan) Date: Thu, 07 Jun 2012 14:42:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_from_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/cedc68440a67 changeset: 77370:cedc68440a67 parent: 77368:e1b950cb6b74 parent: 77369:6e4ec47fba6a user: Nick Coghlan date: Thu Jun 07 22:42:29 2012 +1000 summary: Merge from 3.2 files: Doc/tutorial/controlflow.rst | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -187,6 +187,13 @@ (Yes, this is the correct code. Look closely: the ``else`` clause belongs to the :keyword:`for` loop, **not** the :keyword:`if` statement.) +When used with a loop, the ``else`` clause has more in common with the +``else`` clause of a :keyword:`try` statement than it does that of +:keyword:`if` statements: a :keyword:`try` statement's ``else`` clause runs +when no exception occurs, and a loop's ``else`` clause runs when no ``break`` +occurs. For more on the :keyword:`try` statement and exceptions, see +:ref:`tut-handling`. + .. _tut-pass: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 14:58:13 2012 From: python-checkins at python.org (nick.coghlan) Date: Thu, 07 Jun 2012 14:58:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Nudge_readers_t?= =?utf8?q?owards_a_more_accurate_mental_model_for_loop_else_clauses?= Message-ID: http://hg.python.org/cpython/rev/d79c837b6bf3 changeset: 77371:d79c837b6bf3 branch: 2.7 parent: 77315:477508efe4ab user: Nick Coghlan date: Thu Jun 07 22:57:35 2012 +1000 summary: Nudge readers towards a more accurate mental model for loop else clauses (Backport from 3.x) files: Doc/tutorial/controlflow.rst | 7 +++++++ 1 files changed, 7 insertions(+), 0 deletions(-) diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -159,6 +159,13 @@ (Yes, this is the correct code. Look closely: the ``else`` clause belongs to the :keyword:`for` loop, **not** the :keyword:`if` statement.) +When used with a loop, the ``else`` clause has more in common with the +``else`` clause of a :keyword:`try` statement than it does that of +:keyword:`if` statements: a :keyword:`try` statement's ``else`` clause runs +when no exception occurs, and a loop's ``else`` clause runs when no ``break`` +occurs. For more on the :keyword:`try` statement and exceptions, see +:ref:`tut-handling`. + .. _tut-pass: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 16:18:14 2012 From: python-checkins at python.org (brett.cannon) Date: Thu, 07 Jun 2012 16:18:14 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Update_from_Yury_for_PEP_362?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/peps/rev/f723e9fb778b changeset: 4457:f723e9fb778b user: Brett Cannon date: Thu Jun 07 10:18:08 2012 -0400 summary: Update from Yury for PEP 362. files: pep-0362.txt | 97 ++++++++++++++++++++++++--------------- 1 files changed, 60 insertions(+), 37 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -55,10 +55,8 @@ as listed in ``code.co_varnames``). * bind(\*args, \*\*kwargs) -> BoundArguments Creates a mapping from positional and keyword arguments to - parameters. - -Once a Signature object is created for a particular function, -it's cached in the ``__signature__`` attribute of that function. + parameters. Raises a ``BindError`` if the passed arguments + do not match the signature. Changes to the Signature object, or to any of its data members, do not affect the function itself. @@ -86,19 +84,25 @@ True if the parameter is keyword-only, else False. * is_args : bool True if the parameter accepts variable number of arguments - (``\*args``-like), else False. + (``*args``-like), else False. * is_kwargs : bool True if the parameter accepts variable number of keyword - arguments (``\*\*kwargs``-like), else False. + arguments (``**kwargs``-like), else False. * is_implemented : bool True if the parameter is implemented for use. Some platforms implement functions but can't support specific parameters - (e.g. "mode" for os.mkdir). Passing in an unimplemented + (e.g. "mode" for ``os.mkdir``). Passing in an unimplemented parameter may result in the parameter being ignored, or in NotImplementedError being raised. It is intended that all conditions where ``is_implemented`` may be False be thoroughly documented. +Parameter objects support testing for equality. Two Parameter +objects are equal, when all their properties are equal. Those +who need to test if one signature has the same parameters as +another, can do a direct comparison of ``Signature.parameters`` +collections: ``signature(foo).parameters == signature(bar).parameters``. + BoundArguments Object ===================== @@ -135,16 +139,58 @@ Implementation ============== +The implementation adds a new function ``signature()`` to the ``inspect`` +module. The function is the preferred way of getting a ``Signature`` for +a callable object. + +The function implements the following algorithm: + + - If the object is not callable - raise a TypeError + + - If the object has a ``__signature__`` attribute and if it + is not ``None`` - return it + + - If it is ``None`` and the object is an instance of + ``BuiltinFunction``, raise a ``ValueError`` + + - If the object is a an instance of ``FunctionType``: + + - If it has a ``__wrapped__`` attribute, return + ``signature(object.__wrapped__)`` + + - Or else construct a new ``Signature`` object and return it + + - if the object is a method, construct and return a new ``Signature`` + object, with its first parameter (usually ``self``) removed + + - If the object is a class return ``signature(object.__init__)`` + + - Return ``signature(object.__call__)`` + +Note, that the ``Signature`` object is created in a lazy manner, and +is not automatically cached. + An implementation for Python 3.3 can be found here: [#impl]_. A python issue was also created: [#issue]_. -The implementation adds a new function ``signature()`` to the -``inspect`` module. ``signature()`` returns the value stored -on the ``__signature__`` attribute if it exists, otherwise it -creates the Signature object for the function and caches it in -the function's ``__signature__``. (For methods this is stored -directly in the ``__func__`` function object, since that is what -decorators work with.) + +Design Considerations +===================== + +No Implicit Caching of Signature Objects +---------------------------------------- + +The first PEP design had a provision for implicit caching of ``Signature`` +objects in the ``inspect.signature()`` function. However, this has the +following downsides: + + * If the ``Signature`` object is cached then any changes to the function + it describes will not be reflected in it. However, If the caching is + needed, it can be always done manually and explicitly + + * It is better to reserve the ``__signature__`` attribute for the cases + when there is a need to explicitly set to a ``Signature`` object that + is different from the actual one Examples @@ -311,31 +357,6 @@ return wrapper -Open Issues -=========== - -When to construct the Signature object? ---------------------------------------- - -The Signature object can either be created in an eager or lazy -fashion. In the eager situation, the object can be created during -creation of the function object. In the lazy situation, one would -pass a function object to a function and that would generate the -Signature object and store it to ``__signature__`` if -needed, and then return the value of ``__signature__``. - -In the current implementation, signatures are created only on demand -("lazy"). - - -Deprecate ``inspect.getfullargspec()`` and ``inspect.getcallargs()``? ---------------------------------------------------------------------- - -Since the Signature object replicates the use of ``getfullargspec()`` -and ``getcallargs()`` from the ``inspect`` module it might make sense -to begin deprecating them in 3.3. - - References ========== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Jun 7 16:25:43 2012 From: python-checkins at python.org (brett.cannon) Date: Thu, 07 Jun 2012 16:25:43 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Another_update_from_Yury_for_P?= =?utf8?q?EP_362=2E?= Message-ID: http://hg.python.org/peps/rev/13f360c76e50 changeset: 4458:13f360c76e50 user: Brett Cannon date: Thu Jun 07 10:25:42 2012 -0400 summary: Another update from Yury for PEP 362. files: pep-0362.txt | 9 +++++++-- 1 files changed, 7 insertions(+), 2 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -160,11 +160,16 @@ - Or else construct a new ``Signature`` object and return it - - if the object is a method, construct and return a new ``Signature`` - object, with its first parameter (usually ``self``) removed + - if the object is a method or a classmethod, construct and return + a new ``Signature`` object, with its first parameter (usually + ``self`` or ``cls``) removed - If the object is a class return ``signature(object.__init__)`` + - If the object is an instance of ``functools.partial``, construct + a new ``Signature`` from its ``partial.func`` attribute, and + account for already bound ``partial.args`` and ``partial.kwargs`` + - Return ``signature(object.__call__)`` Note, that the ``Signature`` object is created in a lazy manner, and -- Repository URL: http://hg.python.org/peps From urban.dani+py at gmail.com Thu Jun 7 17:45:29 2012 From: urban.dani+py at gmail.com (Daniel Urban) Date: Thu, 7 Jun 2012 17:45:29 +0200 Subject: [Python-checkins] peps: Update 422 based on python-dev feedback In-Reply-To: References: Message-ID: On Thu, Jun 7, 2012 at 2:08 PM, nick.coghlan wrote: > -* If the metaclass hint refers to an instance of ``type``, then it is > +* If the metaclass hint refers to a subclass of ``type``, then it is > ? considered as a candidate metaclass along with the metaclasses of all of > ? the parents of the class being defined. If a more appropriate metaclass is > ? found amongst the candidates, then it will be used instead of the one I think here "instance" was correct (see http://hg.python.org/cpython/file/default/Lib/types.py#l76 and http://hg.python.org/cpython/file/cedc68440a67/Python/bltinmodule.c#l90). Daniel From python-checkins at python.org Thu Jun 7 17:50:40 2012 From: python-checkins at python.org (stefan.krah) Date: Thu, 07 Jun 2012 17:50:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_1=29_The_overflow_detection?= =?utf8?q?_in_mpd=5Fqln=28=29_has_a_surprising_number_of_case_splits=2E?= Message-ID: http://hg.python.org/cpython/rev/5588fe6874fa changeset: 77372:5588fe6874fa parent: 77370:cedc68440a67 user: Stefan Krah date: Thu Jun 07 17:48:47 2012 +0200 summary: 1) The overflow detection in mpd_qln() has a surprising number of case splits. List all of them in the comment. 2) Use the recently stated relative error of _mpd_qln() to generate the interval for the exact value of ln(x). See also the comment in mpd_qexp(). files: Modules/_decimal/libmpdec/mpdecimal.c | 26 ++++++++++---- 1 files changed, 19 insertions(+), 7 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -4632,14 +4632,26 @@ _settriple(result, MPD_POS, 0, 0); return; } - /* Check if the result will overflow. + /* + * Check if the result will overflow (0 < x, x != 1): + * 1) log10(x) < 0 iff adjexp(x) < 0 + * 2) 0 < x /\ x <= y ==> adjexp(x) <= adjexp(y) + * 3) 0 < x /\ x != 1 ==> 2 * abs(log10(x)) < abs(log(x)) + * 4) adjexp(x) <= log10(x) < adjexp(x) + 1 * - * 1) adjexp(a) + 1 > log10(a) >= adjexp(a) + * Case adjexp(x) >= 0: + * 5) 2 * adjexp(x) < abs(log(x)) + * Case adjexp(x) > 0: + * 6) adjexp(2 * adjexp(x)) <= adjexp(abs(log(x))) + * Case adjexp(x) == 0: + * mpd_exp_digits(t)-1 == 0 <= emax (the shortcut is not triggered) * - * 2) |log10(a)| >= adjexp(a), if adjexp(a) >= 0 - * |log10(a)| > -adjexp(a)-1, if adjexp(a) < 0 - * - * 3) |log(a)| > 2*|log10(a)| + * Case adjexp(x) < 0: + * 7) 2 * (-adjexp(x) - 1) < abs(log(x)) + * Case adjexp(x) < -1: + * 8) adjexp(2 * (-adjexp(x) - 1)) <= adjexp(abs(log(x))) + * Case adjexp(x) == -1: + * mpd_exp_digits(t)-1 == 0 <= emax (the shortcut is not triggered) */ adjexp = mpd_adjexp(a); t = (adjexp < 0) ? -adjexp-1 : adjexp; @@ -4674,7 +4686,7 @@ workctx.prec = prec; _mpd_qln(result, a, &workctx, status); _ssettriple(&ulp, MPD_POS, 1, - result->exp + result->digits-workctx.prec-1); + result->exp + result->digits-workctx.prec); workctx.prec = ctx->prec; mpd_qadd(&t1, result, &ulp, &workctx, &workctx.status); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 20:29:29 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Thu, 07 Jun 2012 20:29:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2311823=3A_disassemb?= =?utf8?q?ly_now_shows_argument_counts_on_calls_with_keyword_args?= Message-ID: http://hg.python.org/cpython/rev/22dc0a433b0e changeset: 77373:22dc0a433b0e user: Alexander Belopolsky date: Thu Jun 07 14:28:14 2012 -0400 summary: Issue #11823: disassembly now shows argument counts on calls with keyword args files: Lib/dis.py | 6 ++++++ Lib/opcode.py | 7 ++++++- Lib/test/test_dis.py | 6 +++--- Misc/NEWS | 2 ++ 4 files changed, 17 insertions(+), 4 deletions(-) diff --git a/Lib/dis.py b/Lib/dis.py --- a/Lib/dis.py +++ b/Lib/dis.py @@ -190,6 +190,9 @@ if free is None: free = co.co_cellvars + co.co_freevars print('(' + free[oparg] + ')', end=' ') + elif op in hasnargs: + print('(%d positional, %d keyword pair)' + % (code[i-2], code[i-1]), end=' ') print() def _disassemble_bytes(code, lasti=-1, varnames=None, names=None, @@ -229,6 +232,9 @@ print('(%d)' % oparg, end=' ') elif op in hascompare: print('(' + cmp_op[oparg] + ')', end=' ') + elif op in hasnargs: + print('(%d positional, %d keyword pair)' + % (code[i-2], code[i-1]), end=' ') print() def _disassemble_str(source): diff --git a/Lib/opcode.py b/Lib/opcode.py --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -6,7 +6,7 @@ __all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs", "haslocal", "hascompare", "hasfree", "opname", "opmap", - "HAVE_ARGUMENT", "EXTENDED_ARG"] + "HAVE_ARGUMENT", "EXTENDED_ARG", "hasnargs"] cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is', 'is not', 'exception match', 'BAD') @@ -18,6 +18,7 @@ haslocal = [] hascompare = [] hasfree = [] +hasnargs = [] opmap = {} opname = [''] * 256 @@ -152,6 +153,7 @@ def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3) def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8) +hasnargs.append(131) def_op('MAKE_FUNCTION', 132) # Number of args with default values def_op('BUILD_SLICE', 133) # Number of items def_op('MAKE_CLOSURE', 134) @@ -165,8 +167,11 @@ hasfree.append(138) def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8) +hasnargs.append(140) def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8) +hasnargs.append(141) def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8) +hasnargs.append(142) jrel_op('SETUP_WITH', 143) diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -38,7 +38,7 @@ dis_f = """\ %-4d 0 LOAD_GLOBAL 0 (print) 3 LOAD_FAST 0 (a) - 6 CALL_FUNCTION 1 + 6 CALL_FUNCTION 1 (1 positional, 0 keyword pair) 9 POP_TOP %-4d 10 LOAD_CONST 1 (1) @@ -50,7 +50,7 @@ dis_f_co_code = """\ 0 LOAD_GLOBAL 0 (0) 3 LOAD_FAST 0 (0) - 6 CALL_FUNCTION 1 + 6 CALL_FUNCTION 1 (1 positional, 0 keyword pair) 9 POP_TOP 10 LOAD_CONST 1 (1) 13 RETURN_VALUE @@ -68,7 +68,7 @@ 6 LOAD_CONST 1 (1) %-4d 9 LOAD_CONST 2 (10) - 12 CALL_FUNCTION 2 + 12 CALL_FUNCTION 2 (2 positional, 0 keyword pair) 15 GET_ITER >> 16 FOR_ITER 6 (to 25) 19 STORE_FAST 0 (res) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,8 @@ Library ------- +- Issue #11823: disassembly now shows argument counts on calls with keyword args. + - Issue #14711: os.stat_float_times() has been deprecated. - LZMAFile now accepts the modes "rb"/"wb"/"ab" as synonyms of "r"/"w"/"a". -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 21:42:02 2012 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 07 Jun 2012 21:42:02 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyMTU3?= =?utf8?q?=3A_pool=2Emap=28=29_does_not_handle_empty_iterable_correctly?= Message-ID: http://hg.python.org/cpython/rev/1b3d4ffcb4d1 changeset: 77374:1b3d4ffcb4d1 branch: 3.2 parent: 77369:6e4ec47fba6a user: Richard Oudkerk date: Wed Jun 06 19:04:57 2012 +0100 summary: Issue #12157: pool.map() does not handle empty iterable correctly Initial patch by mouad files: Lib/multiprocessing/pool.py | 1 + Lib/test/test_multiprocessing.py | 18 +++++++++++++++--- Misc/NEWS | 3 +++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -584,6 +584,7 @@ if chunksize <= 0: self._number_left = 0 self._ready = True + del cache[self._job] else: self._number_left = length//chunksize + bool(length % chunksize) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1178,6 +1178,18 @@ join() self.assertLess(join.elapsed, 0.5) + def test_empty_iterable(self): + # See Issue 12157 + p = self.Pool(1) + + self.assertEqual(p.map(sqr, []), []) + self.assertEqual(list(p.imap(sqr, [])), []) + self.assertEqual(list(p.imap_unordered(sqr, [])), []) + self.assertEqual(p.map_async(sqr, []).get(), []) + + p.close() + p.join() + def raising(): raise KeyError("key") @@ -2176,7 +2188,7 @@ 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'RawValue', 'RawArray', 'current_process', 'active_children', 'Pipe', - 'connection', 'JoinableQueue' + 'connection', 'JoinableQueue', 'Pool' ))) testcases_processes = create_test_cases(ProcessesMixin, type='processes') @@ -2190,7 +2202,7 @@ locals().update(get_attributes(manager, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'list', 'dict', - 'Namespace', 'JoinableQueue' + 'Namespace', 'JoinableQueue', 'Pool' ))) testcases_manager = create_test_cases(ManagerMixin, type='manager') @@ -2204,7 +2216,7 @@ 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'current_process', 'active_children', 'Pipe', 'connection', 'dict', 'list', - 'Namespace', 'JoinableQueue' + 'Namespace', 'JoinableQueue', 'Pool' ))) testcases_threads = create_test_cases(ThreadsMixin, type='threads') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,6 +70,9 @@ Library ------- +- Issue #12157: Make pool.map() empty iterables correctly. Initial + patch by mouad. + - Issue #14992: os.makedirs(path, exist_ok=True) would raise an OSError when the path existed and had the S_ISGID mode bit set when it was not explicitly asked for. This is no longer an exception as mkdir -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 21:42:03 2012 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 07 Jun 2012 21:42:03 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEzODU0?= =?utf8?q?=3A_Properly_handle_non-integer=2C_non-string_arg_to_SystemExit?= Message-ID: http://hg.python.org/cpython/rev/4346cba353b4 changeset: 77375:4346cba353b4 branch: 3.2 user: Richard Oudkerk date: Wed Jun 06 19:04:57 2012 +0100 summary: Issue #13854: Properly handle non-integer, non-string arg to SystemExit Previously multiprocessing only expected int or str. It also wrongly used an exit code of 1 when the argument was a string instead of zero. files: Lib/multiprocessing/process.py | 6 ++-- Lib/test/test_multiprocessing.py | 30 ++++++++++++++++++++ Misc/NEWS | 3 ++ 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py --- a/Lib/multiprocessing/process.py +++ b/Lib/multiprocessing/process.py @@ -271,11 +271,11 @@ except SystemExit as e: if not e.args: exitcode = 1 - elif type(e.args[0]) is int: + elif isinstance(e.args[0], int): exitcode = e.args[0] else: - sys.stderr.write(e.args[0] + '\n') - exitcode = 1 + sys.stderr.write(str(e.args[0]) + '\n') + exitcode = 0 if isinstance(e.args[0], str) else 1 except: exitcode = 1 import traceback diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -390,6 +390,36 @@ 1/0 # MARKER + @classmethod + def _test_sys_exit(cls, reason, testfn): + sys.stderr = open(testfn, 'w') + sys.exit(reason) + + def test_sys_exit(self): + # See Issue 13854 + if self.TYPE == 'threads': + return + + testfn = test.support.TESTFN + self.addCleanup(test.support.unlink, testfn) + + for reason, code in (([1, 2, 3], 1), ('ignore this', 0)): + p = self.Process(target=self._test_sys_exit, args=(reason, testfn)) + p.daemon = True + p.start() + p.join(5) + self.assertEqual(p.exitcode, code) + + with open(testfn, 'r') as f: + self.assertEqual(f.read().rstrip(), str(reason)) + + for reason in (True, False, 8): + p = self.Process(target=sys.exit, args=(reason,)) + p.daemon = True + p.start() + p.join(5) + self.assertEqual(p.exitcode, reason) + # # # diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,6 +70,9 @@ Library ------- +- Issue #13854: Make multiprocessing properly handle non-integer + non-string argument to SystemExit. + - Issue #12157: Make pool.map() empty iterables correctly. Initial patch by mouad. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 21:42:05 2012 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 07 Jun 2012 21:42:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_fixes_for_=2313854_and_=2312157=2E?= Message-ID: http://hg.python.org/cpython/rev/3585cb1388f2 changeset: 77376:3585cb1388f2 parent: 77373:22dc0a433b0e parent: 77375:4346cba353b4 user: Richard Oudkerk date: Thu Jun 07 20:38:11 2012 +0100 summary: Merge fixes for #13854 and #12157. files: Lib/multiprocessing/pool.py | 1 + Lib/multiprocessing/process.py | 6 +- Lib/test/test_multiprocessing.py | 48 ++++++++++++++++++- Misc/NEWS | 6 ++ 4 files changed, 55 insertions(+), 6 deletions(-) diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -576,6 +576,7 @@ if chunksize <= 0: self._number_left = 0 self._event.set() + del cache[self._job] else: self._number_left = length//chunksize + bool(length % chunksize) diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py --- a/Lib/multiprocessing/process.py +++ b/Lib/multiprocessing/process.py @@ -262,11 +262,11 @@ except SystemExit as e: if not e.args: exitcode = 1 - elif type(e.args[0]) is int: + elif isinstance(e.args[0], int): exitcode = e.args[0] else: - sys.stderr.write(e.args[0] + '\n') - exitcode = 1 + sys.stderr.write(str(e.args[0]) + '\n') + exitcode = 0 if isinstance(e.args[0], str) else 1 except: exitcode = 1 import traceback diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -439,6 +439,36 @@ 1/0 # MARKER + @classmethod + def _test_sys_exit(cls, reason, testfn): + sys.stderr = open(testfn, 'w') + sys.exit(reason) + + def test_sys_exit(self): + # See Issue 13854 + if self.TYPE == 'threads': + return + + testfn = test.support.TESTFN + self.addCleanup(test.support.unlink, testfn) + + for reason, code in (([1, 2, 3], 1), ('ignore this', 0)): + p = self.Process(target=self._test_sys_exit, args=(reason, testfn)) + p.daemon = True + p.start() + p.join(5) + self.assertEqual(p.exitcode, code) + + with open(testfn, 'r') as f: + self.assertEqual(f.read().rstrip(), str(reason)) + + for reason in (True, False, 8): + p = self.Process(target=sys.exit, args=(reason,)) + p.daemon = True + p.start() + p.join(5) + self.assertEqual(p.exitcode, reason) + # # # @@ -1342,6 +1372,18 @@ join() self.assertLess(join.elapsed, 0.5) + def test_empty_iterable(self): + # See Issue 12157 + p = self.Pool(1) + + self.assertEqual(p.map(sqr, []), []) + self.assertEqual(list(p.imap(sqr, [])), []) + self.assertEqual(list(p.imap_unordered(sqr, [])), []) + self.assertEqual(p.map_async(sqr, []).get(), []) + + p.close() + p.join() + def raising(): raise KeyError("key") @@ -2487,7 +2529,7 @@ 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'RawValue', 'RawArray', 'current_process', 'active_children', 'Pipe', - 'connection', 'JoinableQueue' + 'connection', 'JoinableQueue', 'Pool' ))) testcases_processes = create_test_cases(ProcessesMixin, type='processes') @@ -2501,7 +2543,7 @@ locals().update(get_attributes(manager, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'list', 'dict', - 'Namespace', 'JoinableQueue' + 'Namespace', 'JoinableQueue', 'Pool' ))) testcases_manager = create_test_cases(ManagerMixin, type='manager') @@ -2515,7 +2557,7 @@ 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'current_process', 'active_children', 'Pipe', 'connection', 'dict', 'list', - 'Namespace', 'JoinableQueue' + 'Namespace', 'JoinableQueue', 'Pool' ))) testcases_threads = create_test_cases(ThreadsMixin, type='threads') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,12 @@ Library ------- +- Issue #13854: Make multiprocessing properly handle non-integer + non-string argument to SystemExit. + +- Issue #12157: Make pool.map() empty iterables correctly. Initial + patch by mouad. + - Issue #11823: disassembly now shows argument counts on calls with keyword args. - Issue #14711: os.stat_float_times() has been deprecated. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 21:42:06 2012 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 07 Jun 2012 21:42:06 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzEyMTU3?= =?utf8?q?=3A_pool=2Emap=28=29_does_not_handle_empty_iterable_correctly?= Message-ID: http://hg.python.org/cpython/rev/7ab7836894c4 changeset: 77377:7ab7836894c4 branch: 2.7 parent: 77371:d79c837b6bf3 user: Richard Oudkerk date: Wed Jun 06 17:52:18 2012 +0100 summary: Issue #12157: pool.map() does not handle empty iterable correctly Initial patch by mouad files: Lib/multiprocessing/pool.py | 1 + Lib/test/test_multiprocessing.py | 18 +++++++++++++++--- Misc/NEWS | 3 +++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -576,6 +576,7 @@ if chunksize <= 0: self._number_left = 0 self._ready = True + del cache[self._job] else: self._number_left = length//chunksize + bool(length % chunksize) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1152,6 +1152,18 @@ join() self.assertTrue(join.elapsed < 0.2) + def test_empty_iterable(self): + # See Issue 12157 + p = self.Pool(1) + + self.assertEqual(p.map(sqr, []), []) + self.assertEqual(list(p.imap(sqr, [])), []) + self.assertEqual(list(p.imap_unordered(sqr, [])), []) + self.assertEqual(p.map_async(sqr, []).get(), []) + + p.close() + p.join() + def unpickleable_result(): return lambda: 42 @@ -2113,7 +2125,7 @@ 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'RawValue', 'RawArray', 'current_process', 'active_children', 'Pipe', - 'connection', 'JoinableQueue' + 'connection', 'JoinableQueue', 'Pool' ))) testcases_processes = create_test_cases(ProcessesMixin, type='processes') @@ -2127,7 +2139,7 @@ locals().update(get_attributes(manager, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'list', 'dict', - 'Namespace', 'JoinableQueue' + 'Namespace', 'JoinableQueue', 'Pool' ))) testcases_manager = create_test_cases(ManagerMixin, type='manager') @@ -2141,7 +2153,7 @@ 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', 'Event', 'Value', 'Array', 'current_process', 'active_children', 'Pipe', 'connection', 'dict', 'list', - 'Namespace', 'JoinableQueue' + 'Namespace', 'JoinableQueue', 'Pool' ))) testcases_threads = create_test_cases(ThreadsMixin, type='threads') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -67,6 +67,9 @@ Library ------- +- Issue #12157: Make pool.map() empty iterables correctly. Initial + patch by mouad. + - Issue #14962: Update text coloring in IDLE shell window after changing options. Patch by Roger Serwy. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 7 21:42:07 2012 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 07 Jun 2012 21:42:07 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzEzODU0?= =?utf8?q?=3A_Properly_handle_non-integer=2C_non-string_arg_to_SystemExit?= Message-ID: http://hg.python.org/cpython/rev/da5b370f41a1 changeset: 77378:da5b370f41a1 branch: 2.7 user: Richard Oudkerk date: Wed Jun 06 19:01:14 2012 +0100 summary: Issue #13854: Properly handle non-integer, non-string arg to SystemExit Previously multiprocessing only expected int or str. It also wrongly used an exit code of 1 when the argument was a string instead of zero. files: Lib/multiprocessing/process.py | 6 ++-- Lib/test/test_multiprocessing.py | 30 ++++++++++++++++++++ Misc/NEWS | 3 ++ 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py --- a/Lib/multiprocessing/process.py +++ b/Lib/multiprocessing/process.py @@ -262,12 +262,12 @@ except SystemExit, e: if not e.args: exitcode = 1 - elif type(e.args[0]) is int: + elif isinstance(e.args[0], int): exitcode = e.args[0] else: - sys.stderr.write(e.args[0] + '\n') + sys.stderr.write(str(e.args[0]) + '\n') sys.stderr.flush() - exitcode = 1 + exitcode = 0 if isinstance(e.args[0], str) else 1 except: exitcode = 1 import traceback diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -325,6 +325,36 @@ ] self.assertEqual(result, expected) + @classmethod + def _test_sys_exit(cls, reason, testfn): + sys.stderr = open(testfn, 'w') + sys.exit(reason) + + def test_sys_exit(self): + # See Issue 13854 + if self.TYPE == 'threads': + return + + testfn = test_support.TESTFN + self.addCleanup(test_support.unlink, testfn) + + for reason, code in (([1, 2, 3], 1), ('ignore this', 0)): + p = self.Process(target=self._test_sys_exit, args=(reason, testfn)) + p.daemon = True + p.start() + p.join(5) + self.assertEqual(p.exitcode, code) + + with open(testfn, 'r') as f: + self.assertEqual(f.read().rstrip(), str(reason)) + + for reason in (True, False, 8): + p = self.Process(target=sys.exit, args=(reason,)) + p.daemon = True + p.start() + p.join(5) + self.assertEqual(p.exitcode, reason) + # # # diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -67,6 +67,9 @@ Library ------- +- Issue #13854: Make multiprocessing properly handle non-integer + non-string argument to SystemExit. + - Issue #12157: Make pool.map() empty iterables correctly. Initial patch by mouad. -- Repository URL: http://hg.python.org/cpython From tjreedy at udel.edu Thu Jun 7 21:47:01 2012 From: tjreedy at udel.edu (Terry Reedy) Date: Thu, 07 Jun 2012 15:47:01 -0400 Subject: [Python-checkins] peps: Update 422 based on python-dev feedback In-Reply-To: References: Message-ID: On 6/7/2012 11:45 AM, Daniel Urban wrote: > On Thu, Jun 7, 2012 at 2:08 PM, nick.coghlan wrote: >> -* If the metaclass hint refers to an instance of ``type``, then it is >> +* If the metaclass hint refers to a subclass of ``type``, then it is >> considered as a candidate metaclass along with the metaclasses of all of >> the parents of the class being defined. If a more appropriate metaclass is >> found amongst the candidates, then it will be used instead of the one > > I think here "instance" was correct (see > http://hg.python.org/cpython/file/default/Lib/types.py#l76 and > http://hg.python.org/cpython/file/cedc68440a67/Python/bltinmodule.c#l90). If so, then the behavior of the standard case of a type subclass is not obviously (to me) covered. -- Terry Jan Reedy From urban.dani+py at gmail.com Thu Jun 7 22:17:15 2012 From: urban.dani+py at gmail.com (Daniel Urban) Date: Thu, 7 Jun 2012 22:17:15 +0200 Subject: [Python-checkins] peps: Update 422 based on python-dev feedback In-Reply-To: References: Message-ID: On Thu, Jun 7, 2012 at 9:47 PM, Terry Reedy wrote: > On 6/7/2012 11:45 AM, Daniel Urban wrote: >> >> On Thu, Jun 7, 2012 at 2:08 PM, nick.coghlan >> ?wrote: >>> >>> -* If the metaclass hint refers to an instance of ``type``, then it is >>> +* If the metaclass hint refers to a subclass of ``type``, then it is >>> ? considered as a candidate metaclass along with the metaclasses of all >>> of >>> ? the parents of the class being defined. If a more appropriate metaclass >>> is >>> ? found amongst the candidates, then it will be used instead of the one >> >> >> I think here "instance" was correct (see >> http://hg.python.org/cpython/file/default/Lib/types.py#l76 and >> http://hg.python.org/cpython/file/cedc68440a67/Python/bltinmodule.c#l90). > > > If so, then the behavior of the standard case of a type subclass is not > obviously (to me) covered. A subclass of type is also necessarily an instance of type, so that is also covered by this case. Daniel From python-checkins at python.org Fri Jun 8 02:05:12 2012 From: python-checkins at python.org (terry.reedy) Date: Fri, 08 Jun 2012 02:05:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_NEWS_fixes?= Message-ID: http://hg.python.org/cpython/rev/063f125dc0f7 changeset: 77379:063f125dc0f7 branch: 2.7 user: Terry Jan Reedy date: Thu Jun 07 19:50:30 2012 -0400 summary: NEWS fixes files: Lib/idlelib/NEWS.txt | 15 +++++++++++++-- Misc/NEWS | 2 +- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,5 +1,16 @@ +What's New in IDLE 2.7.4? +========================= + +- Issue # 12510: Attempt to get certain tool tips no longer crashes IDLE. + +- Issue10365: File open dialog now works instead of crashing even when + parent window is closed while dialog is open. + +- Issue 14876: use user-selected font for highlight configuration. + + What's New in IDLE 2.7.3? -======================= +========================= - Issue #14409: IDLE now properly executes commands in the Shell window when it cannot read the normal config files on startup and @@ -11,7 +22,7 @@ What's New in IDLE 2.7.2? -======================= +========================= *Release date: 29-May-2011* diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -78,7 +78,7 @@ - Issue #10997: Prevent a duplicate entry in IDLE's "Recent Files" menu. -- Issue12510: Attempting to get invalid tooltip no longer closes Idle. +- Issue #12510: Attempting to get invalid tooltip no longer closes Idle. Original patch by Roger Serwy. - Issue #10365: File open dialog now works instead of crashing -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 02:05:13 2012 From: python-checkins at python.org (terry.reedy) Date: Fri, 08 Jun 2012 02:05:13 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyNTEw?= =?utf8?q?=3A_Revise_and_triple_=23_of_calltip_tests=2C_with_an_eye_to_uni?= =?utf8?q?ttest?= Message-ID: http://hg.python.org/cpython/rev/02b4c62ce393 changeset: 77380:02b4c62ce393 branch: 3.2 parent: 77375:4346cba353b4 user: Terry Jan Reedy date: Thu Jun 07 19:41:04 2012 -0400 summary: Issue #12510: Revise and triple # of calltip tests, with an eye to unittest use. Make the get_entity 'method' a module function as it did not use 'self'. Delete buggy _find_constructor function that is not needed, at least in 3.x. Revise get_argspec so all tests pass. Add and fix NEWS entries. files: Lib/idlelib/CallTips.py | 191 +++++++++++++++++---------- Lib/idlelib/NEWS.txt | 8 + Misc/NEWS | 17 +- 3 files changed, 139 insertions(+), 77 deletions(-) diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py --- a/Lib/idlelib/CallTips.py +++ b/Lib/idlelib/CallTips.py @@ -100,52 +100,53 @@ return rpcclt.remotecall("exec", "get_the_calltip", (expression,), {}) else: - entity = self.get_entity(expression) - return get_argspec(entity) + return get_argspec(get_entity(expression)) - def get_entity(self, expression): - """Return the object corresponding to expression evaluated - in a namespace spanning sys.modules and __main.dict__. - """ - if expression: - namespace = sys.modules.copy() - namespace.update(__main__.__dict__) - try: - return eval(expression, namespace) - except BaseException: - # An uncaught exception closes idle, and eval can raise any - # exception, especially if user classes are involved. - return None +def get_entity(expression): + """Return the object corresponding to expression evaluated + in a namespace spanning sys.modules and __main.dict__. + """ + if expression: + namespace = sys.modules.copy() + namespace.update(__main__.__dict__) + try: + return eval(expression, namespace) + except BaseException: + # An uncaught exception closes idle, and eval can raise any + # exception, especially if user classes are involved. + return None -def _find_constructor(class_ob): - "Find the nearest __init__() in the class tree." - try: - return class_ob.__init__.__func__ - except AttributeError: - for base in class_ob.__bases__: - init = _find_constructor(base) - if init: - return init - return None +# The following are used in both get_argspec and tests +_self_pat = re.compile('self\,?\s*') +_default_callable_argspec = "No docstring, see docs." def get_argspec(ob): - """Get a string describing the arguments for the given object, - only if it is callable.""" + '''Return a string describing the arguments and return of a callable object. + + For Python-coded functions and methods, the first line is introspected. + Delete 'self' parameter for classes (.__init__) and bound methods. + The last line is the first line of the doc string. For builtins, this typically + includes the arguments in addition to the return value. + + ''' argspec = "" - if ob is not None and hasattr(ob, '__call__'): + if hasattr(ob, '__call__'): if isinstance(ob, type): - fob = _find_constructor(ob) - if fob is None: - fob = lambda: None - elif isinstance(ob, types.MethodType): - fob = ob.__func__ + fob = getattr(ob, '__init__', None) + elif isinstance(ob.__call__, types.MethodType): + fob = ob.__call__ else: fob = ob - if isinstance(fob, (types.FunctionType, types.LambdaType)): + if isinstance(fob, (types.FunctionType, types.MethodType)): argspec = inspect.formatargspec(*inspect.getfullargspec(fob)) - pat = re.compile('self\,?\s*') - argspec = pat.sub("", argspec) - doc = getattr(ob, "__doc__", "") + if (isinstance(ob, (type, types.MethodType)) or + isinstance(ob.__call__, types.MethodType)): + argspec = _self_pat.sub("", argspec) + + if isinstance(ob.__call__, types.MethodType): + doc = ob.__call__.__doc__ + else: + doc = getattr(ob, "__doc__", "") if doc: doc = doc.lstrip() pos = doc.find("\n") @@ -154,13 +155,16 @@ if argspec: argspec += "\n" argspec += doc[:pos] + if not argspec: + argspec = _default_callable_argspec return argspec ################################################# # -# Test code -# +# Test code tests CallTips.fetch_tip, get_entity, and get_argspec + def main(): + # Putting expected in docstrings results in doubled tips for test def t1(): "()" def t2(a, b=None): "(a, b=None)" def t3(a, *args): "(a, *args)" @@ -170,39 +174,88 @@ class TC(object): "(ai=None, *b)" - def __init__(self, ai=None, *b): "(ai=None, *b)" - def t1(self): "()" - def t2(self, ai, b=None): "(ai, b=None)" - def t3(self, ai, *args): "(ai, *args)" - def t4(self, *args): "(*args)" - def t5(self, ai, *args): "(ai, *args)" - def t6(self, ai, b=None, *args, **kw): "(ai, b=None, *args, **kw)" - - __main__.__dict__.update(locals()) - - def test(tests): - ct = CallTips() - failed=[] - for t in tests: - expected = t.__doc__ + "\n" + t.__doc__ - name = t.__name__ - # exercise fetch_tip(), not just get_argspec() - try: - qualified_name = "%s.%s" % (t.__self__.__class__.__name__, name) - except AttributeError: - qualified_name = name - argspec = ct.fetch_tip(qualified_name) - if argspec != expected: - failed.append(t) - fmt = "%s - expected %s, but got %s" - print(fmt % (t.__name__, expected, get_argspec(t))) - print("%d of %d tests failed" % (len(failed), len(tests))) + def __init__(self, ai=None, *b): "(self, ai=None, *b)" + def t1(self): "(self)" + def t2(self, ai, b=None): "(self, ai, b=None)" + def t3(self, ai, *args): "(self, ai, *args)" + def t4(self, *args): "(self, *args)" + def t5(self, ai, *args): "(self, ai, *args)" + def t6(self, ai, b=None, *args, **kw): "(self, ai, b=None, *args, **kw)" + @classmethod + def cm(cls, a): "(cls, a)" + @staticmethod + def sm(b): "(b)" + def __call__(self, ci): "(ci)" tc = TC() - tests = (t1, t2, t3, t4, t5, t6, - TC, tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6) - test(tests) + # Python classes that inherit builtin methods + class Int(int): "Int(x[, base]) -> integer" + class List(list): "List() -> new empty list" + # Simulate builtin with no docstring for default argspec test + class SB: __call__ = None + + __main__.__dict__.update(locals()) # required for get_entity eval() + + num_tests = num_fail = 0 + tip = CallTips().fetch_tip + + def test(expression, expected): + nonlocal num_tests, num_fail + num_tests += 1 + argspec = tip(expression) + if argspec != expected: + num_fail += 1 + fmt = "%s - expected\n%r\n - but got\n%r" + print(fmt % (expression, expected, argspec)) + + def test_builtins(): + # if first line of a possibly multiline compiled docstring changes, + # must change corresponding test string + test('int', "int(x[, base]) -> integer") + test('Int', Int.__doc__) + test('types.MethodType', "method(function, instance)") + test('list', "list() -> new empty list") + test('List', List.__doc__) + test('list.__new__', + 'T.__new__(S, ...) -> a new object with type S, a subtype of T') + test('list.__init__', + 'x.__init__(...) initializes x; see help(type(x)) for signature') + append_doc = "L.append(object) -> None -- append object to end" + test('list.append', append_doc) + test('[].append', append_doc) + test('List.append', append_doc) + test('SB()', _default_callable_argspec) + + def test_funcs(): + for func in (t1, t2, t3, t4, t5, t6, TC,): + fdoc = func.__doc__ + test(func.__name__, fdoc + "\n" + fdoc) + for func in (TC.t1, TC.t2, TC.t3, TC.t4, TC.t5, TC.t6, TC.cm, TC.sm): + fdoc = func.__doc__ + test('TC.'+func.__name__, fdoc + "\n" + fdoc) + + def test_methods(): + for func in (tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6): + fdoc = func.__doc__ + test('tc.'+func.__name__, _self_pat.sub("", fdoc) + "\n" + fdoc) + fdoc = tc.__call__.__doc__ + test('tc', fdoc + "\n" + fdoc) + + def test_non_callables(): + # expression evaluates, but not to a callable + for expr in ('0', '0.0' 'num_tests', b'num_tests', '[]', '{}'): + test(expr, '') + # expression does not evaluate, but raises an exception + for expr in ('1a', 'xyx', 'num_tests.xyz', '[int][1]', '{0:int}[1]'): + test(expr, '') + + test_builtins() + test_funcs() + test_non_callables() + test_methods() + + print("%d of %d tests failed" % (num_fail, num_tests)) if __name__ == '__main__': main() diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,6 +1,14 @@ What's New in IDLE 3.2.4? ========================= +- Issue # 12510: Attempt to get certain tool tips no longer crashes IDLE. + Erroneous tool tips have been corrected. Default added for callables. + +- Issue10365: File open dialog now works instead of crashing even when + parent window is closed while dialog is open. + +- Issue 14876: use user-selected font for highlight configuration. + - Issue #14937: Perform auto-completion of filenames in strings even for non-ASCII filenames. Likewise for identifiers. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -89,14 +89,15 @@ - Issue #14443: Tell rpmbuild to use the correct version of Python in bdist_rpm. Initial patch by Ross Lagerwall. -- Issue14929: Stop Idle 3.x from closing on Unicode decode errors when grepping. - Patch by Roger Serwy. - -- Issue12510: Attempting to get invalid tooltip no longer closes Idle. - Original patch by Roger Serwy. - -- Issue #10365: File open dialog now works instead of crashing - even when parent window is closed. Patch by Roger Serwy. +- Issue #14929: Stop Idle 3.x from closing on Unicode decode errors when + grepping. Patch by Roger Serwy. + +- Issue #12510: Attempting to get invalid tooltip no longer closes Idle. + Other tooltipss have been corrected or improved and the number of tests + has been tripled. Original patch by Roger Serwy. + +- Issue #10365: File open dialog now works instead of crashing even when + the parent window is closed before the dialog. Patch by Roger Serwy. - Issue #14876: Use user-selected font for highlight configuration. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 02:05:14 2012 From: python-checkins at python.org (terry.reedy) Date: Fri, 08 Jun 2012 02:05:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_from_3=2E2=2C_=2312510?= Message-ID: http://hg.python.org/cpython/rev/03b5f75ddac7 changeset: 77381:03b5f75ddac7 parent: 77376:3585cb1388f2 parent: 77380:02b4c62ce393 user: Terry Jan Reedy date: Thu Jun 07 20:04:17 2012 -0400 summary: Merge from 3.2, #12510 files: Lib/idlelib/CallTips.py | 191 +++++++++++++++++---------- Lib/idlelib/NEWS.txt | 8 + Misc/NEWS | 11 +- 3 files changed, 136 insertions(+), 74 deletions(-) diff --git a/Lib/idlelib/CallTips.py b/Lib/idlelib/CallTips.py --- a/Lib/idlelib/CallTips.py +++ b/Lib/idlelib/CallTips.py @@ -100,52 +100,53 @@ return rpcclt.remotecall("exec", "get_the_calltip", (expression,), {}) else: - entity = self.get_entity(expression) - return get_argspec(entity) + return get_argspec(get_entity(expression)) - def get_entity(self, expression): - """Return the object corresponding to expression evaluated - in a namespace spanning sys.modules and __main.dict__. - """ - if expression: - namespace = sys.modules.copy() - namespace.update(__main__.__dict__) - try: - return eval(expression, namespace) - except BaseException: - # An uncaught exception closes idle, and eval can raise any - # exception, especially if user classes are involved. - return None +def get_entity(expression): + """Return the object corresponding to expression evaluated + in a namespace spanning sys.modules and __main.dict__. + """ + if expression: + namespace = sys.modules.copy() + namespace.update(__main__.__dict__) + try: + return eval(expression, namespace) + except BaseException: + # An uncaught exception closes idle, and eval can raise any + # exception, especially if user classes are involved. + return None -def _find_constructor(class_ob): - "Find the nearest __init__() in the class tree." - try: - return class_ob.__init__.__func__ - except AttributeError: - for base in class_ob.__bases__: - init = _find_constructor(base) - if init: - return init - return None +# The following are used in both get_argspec and tests +_self_pat = re.compile('self\,?\s*') +_default_callable_argspec = "No docstring, see docs." def get_argspec(ob): - """Get a string describing the arguments for the given object, - only if it is callable.""" + '''Return a string describing the arguments and return of a callable object. + + For Python-coded functions and methods, the first line is introspected. + Delete 'self' parameter for classes (.__init__) and bound methods. + The last line is the first line of the doc string. For builtins, this typically + includes the arguments in addition to the return value. + + ''' argspec = "" - if ob is not None and hasattr(ob, '__call__'): + if hasattr(ob, '__call__'): if isinstance(ob, type): - fob = _find_constructor(ob) - if fob is None: - fob = lambda: None - elif isinstance(ob, types.MethodType): - fob = ob.__func__ + fob = getattr(ob, '__init__', None) + elif isinstance(ob.__call__, types.MethodType): + fob = ob.__call__ else: fob = ob - if isinstance(fob, (types.FunctionType, types.LambdaType)): + if isinstance(fob, (types.FunctionType, types.MethodType)): argspec = inspect.formatargspec(*inspect.getfullargspec(fob)) - pat = re.compile('self\,?\s*') - argspec = pat.sub("", argspec) - doc = getattr(ob, "__doc__", "") + if (isinstance(ob, (type, types.MethodType)) or + isinstance(ob.__call__, types.MethodType)): + argspec = _self_pat.sub("", argspec) + + if isinstance(ob.__call__, types.MethodType): + doc = ob.__call__.__doc__ + else: + doc = getattr(ob, "__doc__", "") if doc: doc = doc.lstrip() pos = doc.find("\n") @@ -154,13 +155,16 @@ if argspec: argspec += "\n" argspec += doc[:pos] + if not argspec: + argspec = _default_callable_argspec return argspec ################################################# # -# Test code -# +# Test code tests CallTips.fetch_tip, get_entity, and get_argspec + def main(): + # Putting expected in docstrings results in doubled tips for test def t1(): "()" def t2(a, b=None): "(a, b=None)" def t3(a, *args): "(a, *args)" @@ -170,39 +174,88 @@ class TC(object): "(ai=None, *b)" - def __init__(self, ai=None, *b): "(ai=None, *b)" - def t1(self): "()" - def t2(self, ai, b=None): "(ai, b=None)" - def t3(self, ai, *args): "(ai, *args)" - def t4(self, *args): "(*args)" - def t5(self, ai, *args): "(ai, *args)" - def t6(self, ai, b=None, *args, **kw): "(ai, b=None, *args, **kw)" - - __main__.__dict__.update(locals()) - - def test(tests): - ct = CallTips() - failed=[] - for t in tests: - expected = t.__doc__ + "\n" + t.__doc__ - name = t.__name__ - # exercise fetch_tip(), not just get_argspec() - try: - qualified_name = "%s.%s" % (t.__self__.__class__.__name__, name) - except AttributeError: - qualified_name = name - argspec = ct.fetch_tip(qualified_name) - if argspec != expected: - failed.append(t) - fmt = "%s - expected %s, but got %s" - print(fmt % (t.__name__, expected, get_argspec(t))) - print("%d of %d tests failed" % (len(failed), len(tests))) + def __init__(self, ai=None, *b): "(self, ai=None, *b)" + def t1(self): "(self)" + def t2(self, ai, b=None): "(self, ai, b=None)" + def t3(self, ai, *args): "(self, ai, *args)" + def t4(self, *args): "(self, *args)" + def t5(self, ai, *args): "(self, ai, *args)" + def t6(self, ai, b=None, *args, **kw): "(self, ai, b=None, *args, **kw)" + @classmethod + def cm(cls, a): "(cls, a)" + @staticmethod + def sm(b): "(b)" + def __call__(self, ci): "(ci)" tc = TC() - tests = (t1, t2, t3, t4, t5, t6, - TC, tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6) - test(tests) + # Python classes that inherit builtin methods + class Int(int): "Int(x[, base]) -> integer" + class List(list): "List() -> new empty list" + # Simulate builtin with no docstring for default argspec test + class SB: __call__ = None + + __main__.__dict__.update(locals()) # required for get_entity eval() + + num_tests = num_fail = 0 + tip = CallTips().fetch_tip + + def test(expression, expected): + nonlocal num_tests, num_fail + num_tests += 1 + argspec = tip(expression) + if argspec != expected: + num_fail += 1 + fmt = "%s - expected\n%r\n - but got\n%r" + print(fmt % (expression, expected, argspec)) + + def test_builtins(): + # if first line of a possibly multiline compiled docstring changes, + # must change corresponding test string + test('int', "int(x[, base]) -> integer") + test('Int', Int.__doc__) + test('types.MethodType', "method(function, instance)") + test('list', "list() -> new empty list") + test('List', List.__doc__) + test('list.__new__', + 'T.__new__(S, ...) -> a new object with type S, a subtype of T') + test('list.__init__', + 'x.__init__(...) initializes x; see help(type(x)) for signature') + append_doc = "L.append(object) -> None -- append object to end" + test('list.append', append_doc) + test('[].append', append_doc) + test('List.append', append_doc) + test('SB()', _default_callable_argspec) + + def test_funcs(): + for func in (t1, t2, t3, t4, t5, t6, TC,): + fdoc = func.__doc__ + test(func.__name__, fdoc + "\n" + fdoc) + for func in (TC.t1, TC.t2, TC.t3, TC.t4, TC.t5, TC.t6, TC.cm, TC.sm): + fdoc = func.__doc__ + test('TC.'+func.__name__, fdoc + "\n" + fdoc) + + def test_methods(): + for func in (tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6): + fdoc = func.__doc__ + test('tc.'+func.__name__, _self_pat.sub("", fdoc) + "\n" + fdoc) + fdoc = tc.__call__.__doc__ + test('tc', fdoc + "\n" + fdoc) + + def test_non_callables(): + # expression evaluates, but not to a callable + for expr in ('0', '0.0' 'num_tests', b'num_tests', '[]', '{}'): + test(expr, '') + # expression does not evaluate, but raises an exception + for expr in ('1a', 'xyx', 'num_tests.xyz', '[int][1]', '{0:int}[1]'): + test(expr, '') + + test_builtins() + test_funcs() + test_non_callables() + test_methods() + + print("%d of %d tests failed" % (num_fail, num_tests)) if __name__ == '__main__': main() diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -1,6 +1,14 @@ What's New in IDLE 3.3.0? ========================= +- Issue # 12510: Attempt to get certain tool tips no longer crashes IDLE. + Erroneous tool tips have been corrected. Default added for callables. + +- Issue10365: File open dialog now works instead of crashing even when + parent window is closed while dialog is open. + +- Issue 14876: use user-selected font for highlight configuration. + - Issue #14937: Perform auto-completion of filenames in strings even for non-ASCII filenames. Likewise for identifiers. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -144,14 +144,15 @@ - Issue #14443: Tell rpmbuild to use the correct version of Python in bdist_rpm. Initial patch by Ross Lagerwall. -- Issue #14929: Stop Idle 3.x from closing on Unicode decode errors when grepping. - Patch by Roger Serwy. +- Issue #14929: Stop Idle 3.x from closing on Unicode decode errors when + grepping. Patch by Roger Serwy. - Issue #12515: email now registers a defect if it gets to EOF while parsing a MIME part without seeing the closing MIME boundary. - Issue #12510: Attempting to get invalid tooltip no longer closes Idle. - Original patch by Roger Serwy. + Other tooltipss have been corrected or improved and the number of tests + has been tripled. Original patch by Roger Serwy. - Issue #1672568: email now always decodes base64 payloads, adding padding and ignoring non-base64-alphabet characters if needed, and registering defects @@ -161,8 +162,8 @@ is a missing header/body separator line. MalformedHeaderDefect, which the existing code would never actually generate, is deprecated. -- Issue #10365: File open dialog now works instead of crashing - even when parent window is closed. Patch by Roger Serwy. +- Issue #10365: File open dialog now works instead of crashing even when + the parent window is closed before the dialog. Patch by Roger Serwy. - Issue #8739: Updated smtpd to support RFC 5321, and added support for the RFC 1870 SIZE extension. -- Repository URL: http://hg.python.org/cpython From ncoghlan at gmail.com Fri Jun 8 03:10:26 2012 From: ncoghlan at gmail.com (Nick Coghlan) Date: Fri, 8 Jun 2012 11:10:26 +1000 Subject: [Python-checkins] peps: Update 422 based on python-dev feedback In-Reply-To: References: Message-ID: On Fri, Jun 8, 2012 at 1:45 AM, Daniel Urban wrote: > On Thu, Jun 7, 2012 at 2:08 PM, nick.coghlan wrote: >> -* If the metaclass hint refers to an instance of ``type``, then it is >> +* If the metaclass hint refers to a subclass of ``type``, then it is >> ? considered as a candidate metaclass along with the metaclasses of all of >> ? the parents of the class being defined. If a more appropriate metaclass is >> ? found amongst the candidates, then it will be used instead of the one > > I think here "instance" was correct (see > http://hg.python.org/cpython/file/default/Lib/types.py#l76 and > http://hg.python.org/cpython/file/cedc68440a67/Python/bltinmodule.c#l90). Hmm, thinking back on it, the REPL experiments that persuaded me Terry was right were flawed (I tried with object directly, but the signature of __new__/__init__ would have been wrong regardless in that case). Still, I'm kinda proving my point that I find it difficult to keep *all* the details of metaclass invocation straight in my head, even though I've been hacking on the type system for years. I've never had anything even close to that kind of problem with class methods :) Cheers, Nick. -- Nick Coghlan?? |?? ncoghlan at gmail.com?? |?? Brisbane, Australia From python-checkins at python.org Fri Jun 8 03:47:08 2012 From: python-checkins at python.org (r.david.murray) Date: Fri, 08 Jun 2012 03:47:08 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzg2NTI6IHVwZGF0?= =?utf8?q?e_errors_tutorial=2E?= Message-ID: http://hg.python.org/cpython/rev/b873afe640e2 changeset: 77382:b873afe640e2 branch: 2.7 parent: 77379:063f125dc0f7 user: R David Murray date: Thu Jun 07 21:46:44 2012 -0400 summary: #8652: update errors tutorial. The tutorial had some outdated examples. The patch also adds a caution about the meaning of parens in the except statement. Patch by Marien Zwart. files: Doc/tutorial/errors.rst | 14 +++++++++++--- 1 files changed, 11 insertions(+), 3 deletions(-) diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -120,6 +120,14 @@ ... except (RuntimeError, TypeError, NameError): ... pass +Note that the parentheses around this tuple are required, because +``except ValueError, e:`` was the syntax used for what is normally +written as ``except ValueError as e:`` in modern Python (described +below). The old syntax is still supported for backwards compatibility. +This means ``except RuntimeError, TypeError`` is not equivalent to +``except (RuntimeError, TypeError):`` but to ``except RuntimeError as +TypeError:`` which is not what you want. + The last except clause may omit the exception name(s), to serve as a wildcard. Use this with extreme caution, since it is easy to mask a real programming error in this way! It can also be used to print an error message and then re-raise @@ -131,8 +139,8 @@ f = open('myfile.txt') s = f.readline() i = int(s.strip()) - except IOError as (errno, strerror): - print "I/O error({0}): {1}".format(errno, strerror) + except IOError as e: + print "I/O error({0}): {1}".format(e.errno, e.strerror) except ValueError: print "Could not convert data to an integer." except: @@ -177,7 +185,7 @@ ... print type(inst) # the exception instance ... print inst.args # arguments stored in .args ... print inst # __str__ allows args to printed directly - ... x, y = inst # __getitem__ allows args to be unpacked directly + ... x, y = inst.args ... print 'x =', x ... print 'y =', y ... -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Jun 8 05:51:17 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 08 Jun 2012 05:51:17 +0200 Subject: [Python-checkins] Daily reference leaks (03b5f75ddac7): sum=1 Message-ID: results for 03b5f75ddac7 on branch "default" -------------------------------------------- test_support leaked [1, 0, 0] references, sum=1 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog4sufr0', '-x'] From python-checkins at python.org Fri Jun 8 15:22:02 2012 From: python-checkins at python.org (hynek.schlawack) Date: Fri, 08 Jun 2012 15:22:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2314814=3A_Remove_redundan?= =?utf8?q?t_code_from_ipaddress=2EIPv6Network?= Message-ID: http://hg.python.org/cpython/rev/4aeb5b9b62d7 changeset: 77383:4aeb5b9b62d7 parent: 77381:03b5f75ddac7 user: Hynek Schlawack date: Fri Jun 08 15:21:21 2012 +0200 summary: #14814: Remove redundant code from ipaddress.IPv6Network The strict checks and netmask computing don't make sense if constructed with a ALL_ONES mask based on addresses. Also fix a bug due to mis-indentation of a return statement in the same code block. files: Lib/ipaddress.py | 14 +------------- 1 files changed, 1 insertions(+), 13 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1990,12 +1990,6 @@ self.network_address = IPv6Address(address) self._prefixlen = self._max_prefixlen self.netmask = IPv6Address(self._ALL_ONES) - if strict: - if (IPv6Address(int(self.network_address) & - int(self.netmask)) != self.network_address): - raise ValueError('%s has host bits set' % str(self)) - self.network_address = IPv6Address(int(self.network_address) & - int(self.netmask)) return # Constructing from a packed address @@ -2004,13 +1998,7 @@ self.network_address = IPv6Address((tmp[0] << 64) | tmp[1]) self._prefixlen = self._max_prefixlen self.netmask = IPv6Address(self._ALL_ONES) - if strict: - if (IPv6Address(int(self.network_address) & - int(self.netmask)) != self.network_address): - raise ValueError('%s has host bits set' % str(self)) - self.network_address = IPv6Address(int(self.network_address) & - int(self.netmask)) - return + return # Assume input argument to be string or any object representation # which converts into a formatted IP prefix string. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 18:33:23 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 08 Jun 2012 18:33:23 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=232736=3A_Added_date?= =?utf8?q?time=2Etimestamp=28=29_method=2E?= Message-ID: http://hg.python.org/cpython/rev/6671c5039e15 changeset: 77384:6671c5039e15 user: Alexander Belopolsky date: Fri Jun 08 12:33:09 2012 -0400 summary: Issue #2736: Added datetime.timestamp() method. files: Doc/library/datetime.rst | 44 +++++++++++++++++----- Lib/datetime.py | 11 +++++- Lib/test/datetimetester.py | 36 +++++++++++++++++++ Misc/NEWS | 2 + Modules/_datetimemodule.c | 49 ++++++++++++++++++++++++++ 5 files changed, 130 insertions(+), 12 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -752,17 +752,6 @@ datetime(1970, 1, 1) + timedelta(seconds=timestamp) - There is no method to obtain the timestamp from a :class:`datetime` - instance, but POSIX timestamp corresponding to a :class:`datetime` - instance ``dt`` can be easily calculated as follows. For a naive - ``dt``:: - - timestamp = (dt - datetime(1970, 1, 1)) / timedelta(seconds=1) - - And for an aware ``dt``:: - - timestamp = (dt - datetime(1970, 1, 1, tzinfo=timezone.utc)) / timedelta(seconds=1) - .. versionchanged:: 3.3 Raise :exc:`OverflowError` instead of :exc:`ValueError` if the timestamp is out of the range of values supported by the platform C @@ -1054,6 +1043,39 @@ Return the proleptic Gregorian ordinal of the date. The same as ``self.date().toordinal()``. +.. method:: datetime.timestamp() + + Return POSIX timestamp corresponding to the :class:`datetime` + instance. The return value is a :class:`float` similar to that + returned by :func:`time.time`. + + Naive :class:`datetime` instances are assumed to represent local + time and this method relies on the platform C :c:func:`mktime` + function to perform the conversion. Since :class:`datetime` + supports wider range of values than :c:func:`mktime` on many + platforms, this method may raise :exc:`OverflowError` for times far + in the past or far in the future. + + For aware :class:`datetime` instances, the return value is computed + as:: + + (dt - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds() + + .. versionadded:: 3.3 + + .. note:: + + There is no method to obtain the POSIX timestamp directly from a + naive :class:`datetime` instance representing UTC time. If your + application uses this convention and your system timezone is not + set to UTC, you can obtain the POSIX timestamp by supplying + ``tzinfo=timezone.utc``:: + + timestamp = dt.replace(tzinfo=timezone.utc).timestamp() + + or by calculating the timestamp directly:: + + timestamp = (dt - datetime(1970, 1, 1)) / timedelta(seconds=1) .. method:: datetime.weekday() diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1434,6 +1434,15 @@ self.hour, self.minute, self.second, dst) + def timestamp(self): + "Return POSIX timestamp as float" + if self._tzinfo is None: + return _time.mktime((self.year, self.month, self.day, + self.hour, self.minute, self.second, + -1, -1, -1)) + self.microsecond / 1e6 + else: + return (self - _EPOCH).total_seconds() + def utctimetuple(self): "Return UTC time tuple compatible with time.gmtime()." offset = self.utcoffset() @@ -1889,7 +1898,7 @@ timezone.utc = timezone._create(timedelta(0)) timezone.min = timezone._create(timezone._minoffset) timezone.max = timezone._create(timezone._maxoffset) - +_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) """ Some time zone algebra. For a datetime x, let x.n = x stripped of its timezone -- its naive time. diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1735,6 +1735,42 @@ got = self.theclass.utcfromtimestamp(ts) self.verify_field_equality(expected, got) + # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in + # March (M3.2.0) and ends 2 a.m. on first Sunday in November (M11.1.0). + @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') + def test_timestamp_naive(self): + t = self.theclass(1970, 1, 1) + self.assertEqual(t.timestamp(), 18000.0) + t = self.theclass(1970, 1, 1, 1, 2, 3, 4) + self.assertEqual(t.timestamp(), + 18000.0 + 3600 + 2*60 + 3 + 4*1e-6) + # Missing hour defaults to standard time + t = self.theclass(2012, 3, 11, 2, 30) + self.assertEqual(self.theclass.fromtimestamp(t.timestamp()), + t + timedelta(hours=1)) + # Ambiguous hour defaults to DST + t = self.theclass(2012, 11, 4, 1, 30) + self.assertEqual(self.theclass.fromtimestamp(t.timestamp()), t) + + # Timestamp may raise an overflow error on some platforms + for t in [self.theclass(1,1,1), self.theclass(9999,12,12)]: + try: + s = t.timestamp() + except OverflowError: + pass + else: + self.assertEqual(self.theclass.fromtimestamp(s), t) + + def test_timestamp_aware(self): + t = self.theclass(1970, 1, 1, tzinfo=timezone.utc) + self.assertEqual(t.timestamp(), 0.0) + t = self.theclass(1970, 1, 1, 1, 2, 3, 4, tzinfo=timezone.utc) + self.assertEqual(t.timestamp(), + 3600 + 2*60 + 3 + 4*1e-6) + t = self.theclass(1970, 1, 1, 1, 2, 3, 4, + tzinfo=timezone(timedelta(hours=-5), 'EST')) + self.assertEqual(t.timestamp(), + 18000 + 3600 + 2*60 + 3 + 4*1e-6) def test_microsecond_rounding(self): for fts in [self.theclass.fromtimestamp, self.theclass.utcfromtimestamp]: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,8 @@ Library ------- +- Issue #2736: Added datetime.timestamp() method. + - Issue #13854: Make multiprocessing properly handle non-integer non-string argument to SystemExit. diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -766,6 +766,8 @@ /* The interned UTC timezone instance */ static PyObject *PyDateTime_TimeZone_UTC; +/* The interned Epoch datetime instance */ +static PyObject *PyDateTime_Epoch; /* Create new timezone instance checking offset range. This function does not check the name argument. Caller must assure @@ -4748,6 +4750,44 @@ } static PyObject * +datetime_timestamp(PyDateTime_DateTime *self) +{ + PyObject *result; + + if (HASTZINFO(self) && self->tzinfo != Py_None) { + PyObject *delta; + delta = datetime_subtract((PyObject *)self, PyDateTime_Epoch); + if (delta == NULL) + return NULL; + result = delta_total_seconds(delta); + Py_DECREF(delta); + } + else { + struct tm time; + time_t timestamp; + memset((void *) &time, '\0', sizeof(struct tm)); + time.tm_year = GET_YEAR(self) - 1900; + time.tm_mon = GET_MONTH(self) - 1; + time.tm_mday = GET_DAY(self); + time.tm_hour = DATE_GET_HOUR(self); + time.tm_min = DATE_GET_MINUTE(self); + time.tm_sec = DATE_GET_SECOND(self); + time.tm_wday = -1; + time.tm_isdst = -1; + timestamp = mktime(&time); + /* Return value of -1 does not necessarily mean an error, but tm_wday + * cannot remain set to -1 if mktime succeeded. */ + if (timestamp == (time_t)(-1) && time.tm_wday == -1) { + PyErr_SetString(PyExc_OverflowError, + "timestamp out of range"); + return NULL; + } + result = PyFloat_FromDouble(timestamp + DATE_GET_MICROSECOND(self) / 1e6); + } + return result; +} + +static PyObject * datetime_getdate(PyDateTime_DateTime *self) { return new_date(GET_YEAR(self), @@ -4894,6 +4934,9 @@ {"timetuple", (PyCFunction)datetime_timetuple, METH_NOARGS, PyDoc_STR("Return time tuple, compatible with time.localtime().")}, + {"timestamp", (PyCFunction)datetime_timestamp, METH_NOARGS, + PyDoc_STR("Return POSIX timestamp as float.")}, + {"utctimetuple", (PyCFunction)datetime_utctimetuple, METH_NOARGS, PyDoc_STR("Return UTC time tuple, compatible with time.localtime().")}, @@ -5151,6 +5194,12 @@ return NULL; Py_DECREF(x); + /* Epoch */ + PyDateTime_Epoch = new_datetime(1970, 1, 1, 0, 0, 0, 0, + PyDateTime_TimeZone_UTC); + if (PyDateTime_Epoch == NULL) + return NULL; + /* module initialization */ PyModule_AddIntConstant(m, "MINYEAR", MINYEAR); PyModule_AddIntConstant(m, "MAXYEAR", MAXYEAR); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 18:56:35 2012 From: python-checkins at python.org (stefan.krah) Date: Fri, 08 Jun 2012 18:56:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_1=29_List_relative_error_fo?= =?utf8?b?ciBfbXBkX3FsbjEwKCku?= Message-ID: http://hg.python.org/cpython/rev/87a8a209c6e1 changeset: 77385:87a8a209c6e1 parent: 77383:4aeb5b9b62d7 user: Stefan Krah date: Fri Jun 08 18:41:33 2012 +0200 summary: 1) List relative error for _mpd_qln10(). 2) Add rigorous error analysis to _mpd_qlog10 (ACL2 proofs exist). 3) Use the relative error as a basis for the interval generation in the correction loop (same as in _mpd_qln()). files: Modules/_decimal/libmpdec/mpdecimal.c | 38 +++++++++++--- 1 files changed, 29 insertions(+), 9 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -4298,7 +4298,14 @@ (mpd_uint_t *)mpd_ln10_data }; -/* Set 'result' to ln(10). ulp error: abs(result - log(10)) < ulp(log(10)) */ +/* + * Set 'result' to log(10). + * Ulp error: abs(result - log(10)) < ulp(log(10)) + * Relative error : abs(result - log(10)) < 5 * 10**-prec * log(10) + * + * NOTE: The relative error is not derived from the ulp error, but + * calculated separately using the fact that 23/10 < log(10) < 24/10. + */ void mpd_qln10(mpd_t *result, mpd_ssize_t prec, uint32_t *status) { @@ -4712,21 +4719,34 @@ } } -/* Internal log10() function that does not check for specials, zero, ... */ +/* + * Internal log10() function that does not check for specials, zero or one. + * Case SKIP_FINALIZE: + * Relative error: abs(result - log10(a)) < 0.1 * 10**-prec * abs(log10(a)) + * Case DO_FINALIZE: + * Ulp error: abs(result - log10(a)) < ulp(log10(a)) + */ +enum {SKIP_FINALIZE, DO_FINALIZE}; static void -_mpd_qlog10(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, - uint32_t *status) +_mpd_qlog10(int action, mpd_t *result, const mpd_t *a, + const mpd_context_t *ctx, uint32_t *status) { mpd_context_t workctx; MPD_NEW_STATIC(ln10,0,0,0,0); mpd_maxcontext(&workctx); workctx.prec = ctx->prec + 3; + /* relative error: 0.1 * 10**(-p-3). The specific underflow shortcut + * in _mpd_qln() does not change the final result. */ _mpd_qln(result, a, &workctx, status); + /* relative error: 5 * 10**(-p-3) */ mpd_qln10(&ln10, workctx.prec, status); - workctx = *ctx; - workctx.round = MPD_ROUND_HALF_EVEN; + if (action == DO_FINALIZE) { + workctx = *ctx; + workctx.round = MPD_ROUND_HALF_EVEN; + } + /* SKIP_FINALIZE: relative error: 5 * 10**(-p-3) */ _mpd_qdiv(NO_IDEAL_EXP, result, result, &ln10, &workctx, status); mpd_del(&ln10); @@ -4807,9 +4827,9 @@ prec = ctx->prec + 3; while (1) { workctx.prec = prec; - _mpd_qlog10(result, a, &workctx, status); + _mpd_qlog10(SKIP_FINALIZE, result, a, &workctx, status); _ssettriple(&ulp, MPD_POS, 1, - result->exp + result->digits-workctx.prec-1); + result->exp + result->digits-workctx.prec); workctx.prec = ctx->prec; mpd_qadd(&t1, result, &ulp, &workctx, &workctx.status); @@ -4829,7 +4849,7 @@ mpd_del(&aa); } else { - _mpd_qlog10(result, a, &workctx, status); + _mpd_qlog10(DO_FINALIZE, result, a, &workctx, status); mpd_check_underflow(result, &workctx, status); } } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 18:56:36 2012 From: python-checkins at python.org (stefan.krah) Date: Fri, 08 Jun 2012 18:56:36 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?b?KTogTWVyZ2Uu?= Message-ID: http://hg.python.org/cpython/rev/162a5508017a changeset: 77386:162a5508017a parent: 77385:87a8a209c6e1 parent: 77384:6671c5039e15 user: Stefan Krah date: Fri Jun 08 18:55:22 2012 +0200 summary: Merge. files: Doc/library/datetime.rst | 44 +++++++++++++++++----- Lib/datetime.py | 11 +++++- Lib/test/datetimetester.py | 36 +++++++++++++++++++ Misc/NEWS | 2 + Modules/_datetimemodule.c | 49 ++++++++++++++++++++++++++ 5 files changed, 130 insertions(+), 12 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -752,17 +752,6 @@ datetime(1970, 1, 1) + timedelta(seconds=timestamp) - There is no method to obtain the timestamp from a :class:`datetime` - instance, but POSIX timestamp corresponding to a :class:`datetime` - instance ``dt`` can be easily calculated as follows. For a naive - ``dt``:: - - timestamp = (dt - datetime(1970, 1, 1)) / timedelta(seconds=1) - - And for an aware ``dt``:: - - timestamp = (dt - datetime(1970, 1, 1, tzinfo=timezone.utc)) / timedelta(seconds=1) - .. versionchanged:: 3.3 Raise :exc:`OverflowError` instead of :exc:`ValueError` if the timestamp is out of the range of values supported by the platform C @@ -1054,6 +1043,39 @@ Return the proleptic Gregorian ordinal of the date. The same as ``self.date().toordinal()``. +.. method:: datetime.timestamp() + + Return POSIX timestamp corresponding to the :class:`datetime` + instance. The return value is a :class:`float` similar to that + returned by :func:`time.time`. + + Naive :class:`datetime` instances are assumed to represent local + time and this method relies on the platform C :c:func:`mktime` + function to perform the conversion. Since :class:`datetime` + supports wider range of values than :c:func:`mktime` on many + platforms, this method may raise :exc:`OverflowError` for times far + in the past or far in the future. + + For aware :class:`datetime` instances, the return value is computed + as:: + + (dt - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds() + + .. versionadded:: 3.3 + + .. note:: + + There is no method to obtain the POSIX timestamp directly from a + naive :class:`datetime` instance representing UTC time. If your + application uses this convention and your system timezone is not + set to UTC, you can obtain the POSIX timestamp by supplying + ``tzinfo=timezone.utc``:: + + timestamp = dt.replace(tzinfo=timezone.utc).timestamp() + + or by calculating the timestamp directly:: + + timestamp = (dt - datetime(1970, 1, 1)) / timedelta(seconds=1) .. method:: datetime.weekday() diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1434,6 +1434,15 @@ self.hour, self.minute, self.second, dst) + def timestamp(self): + "Return POSIX timestamp as float" + if self._tzinfo is None: + return _time.mktime((self.year, self.month, self.day, + self.hour, self.minute, self.second, + -1, -1, -1)) + self.microsecond / 1e6 + else: + return (self - _EPOCH).total_seconds() + def utctimetuple(self): "Return UTC time tuple compatible with time.gmtime()." offset = self.utcoffset() @@ -1889,7 +1898,7 @@ timezone.utc = timezone._create(timedelta(0)) timezone.min = timezone._create(timezone._minoffset) timezone.max = timezone._create(timezone._maxoffset) - +_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc) """ Some time zone algebra. For a datetime x, let x.n = x stripped of its timezone -- its naive time. diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1735,6 +1735,42 @@ got = self.theclass.utcfromtimestamp(ts) self.verify_field_equality(expected, got) + # Run with US-style DST rules: DST begins 2 a.m. on second Sunday in + # March (M3.2.0) and ends 2 a.m. on first Sunday in November (M11.1.0). + @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') + def test_timestamp_naive(self): + t = self.theclass(1970, 1, 1) + self.assertEqual(t.timestamp(), 18000.0) + t = self.theclass(1970, 1, 1, 1, 2, 3, 4) + self.assertEqual(t.timestamp(), + 18000.0 + 3600 + 2*60 + 3 + 4*1e-6) + # Missing hour defaults to standard time + t = self.theclass(2012, 3, 11, 2, 30) + self.assertEqual(self.theclass.fromtimestamp(t.timestamp()), + t + timedelta(hours=1)) + # Ambiguous hour defaults to DST + t = self.theclass(2012, 11, 4, 1, 30) + self.assertEqual(self.theclass.fromtimestamp(t.timestamp()), t) + + # Timestamp may raise an overflow error on some platforms + for t in [self.theclass(1,1,1), self.theclass(9999,12,12)]: + try: + s = t.timestamp() + except OverflowError: + pass + else: + self.assertEqual(self.theclass.fromtimestamp(s), t) + + def test_timestamp_aware(self): + t = self.theclass(1970, 1, 1, tzinfo=timezone.utc) + self.assertEqual(t.timestamp(), 0.0) + t = self.theclass(1970, 1, 1, 1, 2, 3, 4, tzinfo=timezone.utc) + self.assertEqual(t.timestamp(), + 3600 + 2*60 + 3 + 4*1e-6) + t = self.theclass(1970, 1, 1, 1, 2, 3, 4, + tzinfo=timezone(timedelta(hours=-5), 'EST')) + self.assertEqual(t.timestamp(), + 18000 + 3600 + 2*60 + 3 + 4*1e-6) def test_microsecond_rounding(self): for fts in [self.theclass.fromtimestamp, self.theclass.utcfromtimestamp]: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,8 @@ Library ------- +- Issue #2736: Added datetime.timestamp() method. + - Issue #13854: Make multiprocessing properly handle non-integer non-string argument to SystemExit. diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -766,6 +766,8 @@ /* The interned UTC timezone instance */ static PyObject *PyDateTime_TimeZone_UTC; +/* The interned Epoch datetime instance */ +static PyObject *PyDateTime_Epoch; /* Create new timezone instance checking offset range. This function does not check the name argument. Caller must assure @@ -4748,6 +4750,44 @@ } static PyObject * +datetime_timestamp(PyDateTime_DateTime *self) +{ + PyObject *result; + + if (HASTZINFO(self) && self->tzinfo != Py_None) { + PyObject *delta; + delta = datetime_subtract((PyObject *)self, PyDateTime_Epoch); + if (delta == NULL) + return NULL; + result = delta_total_seconds(delta); + Py_DECREF(delta); + } + else { + struct tm time; + time_t timestamp; + memset((void *) &time, '\0', sizeof(struct tm)); + time.tm_year = GET_YEAR(self) - 1900; + time.tm_mon = GET_MONTH(self) - 1; + time.tm_mday = GET_DAY(self); + time.tm_hour = DATE_GET_HOUR(self); + time.tm_min = DATE_GET_MINUTE(self); + time.tm_sec = DATE_GET_SECOND(self); + time.tm_wday = -1; + time.tm_isdst = -1; + timestamp = mktime(&time); + /* Return value of -1 does not necessarily mean an error, but tm_wday + * cannot remain set to -1 if mktime succeeded. */ + if (timestamp == (time_t)(-1) && time.tm_wday == -1) { + PyErr_SetString(PyExc_OverflowError, + "timestamp out of range"); + return NULL; + } + result = PyFloat_FromDouble(timestamp + DATE_GET_MICROSECOND(self) / 1e6); + } + return result; +} + +static PyObject * datetime_getdate(PyDateTime_DateTime *self) { return new_date(GET_YEAR(self), @@ -4894,6 +4934,9 @@ {"timetuple", (PyCFunction)datetime_timetuple, METH_NOARGS, PyDoc_STR("Return time tuple, compatible with time.localtime().")}, + {"timestamp", (PyCFunction)datetime_timestamp, METH_NOARGS, + PyDoc_STR("Return POSIX timestamp as float.")}, + {"utctimetuple", (PyCFunction)datetime_utctimetuple, METH_NOARGS, PyDoc_STR("Return UTC time tuple, compatible with time.localtime().")}, @@ -5151,6 +5194,12 @@ return NULL; Py_DECREF(x); + /* Epoch */ + PyDateTime_Epoch = new_datetime(1970, 1, 1, 0, 0, 0, 0, + PyDateTime_TimeZone_UTC); + if (PyDateTime_Epoch == NULL) + return NULL; + /* module initialization */ PyModule_AddIntConstant(m, "MINYEAR", MINYEAR); PyModule_AddIntConstant(m, "MAXYEAR", MAXYEAR); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 19:00:55 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 08 Jun 2012 19:00:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Relax_datetime=2Etimestamp?= =?utf8?q?=28=29_test_around_DST_change?= Message-ID: http://hg.python.org/cpython/rev/239ebc022479 changeset: 77387:239ebc022479 user: Alexander Belopolsky date: Fri Jun 08 12:58:31 2012 -0400 summary: Relax datetime.timestamp() test around DST change files: Lib/test/datetimetester.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1744,10 +1744,10 @@ t = self.theclass(1970, 1, 1, 1, 2, 3, 4) self.assertEqual(t.timestamp(), 18000.0 + 3600 + 2*60 + 3 + 4*1e-6) - # Missing hour defaults to standard time + # Missing hour may produce platform-dependent result t = self.theclass(2012, 3, 11, 2, 30) - self.assertEqual(self.theclass.fromtimestamp(t.timestamp()), - t + timedelta(hours=1)) + self.assertIn(self.theclass.fromtimestamp(t.timestamp()), + [t, t + timedelta(hours=1)]) # Ambiguous hour defaults to DST t = self.theclass(2012, 11, 4, 1, 30) self.assertEqual(self.theclass.fromtimestamp(t.timestamp()), t) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 19:00:56 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 08 Jun 2012 19:00:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Relax_datetime=2Etimestamp?= =?utf8?q?=28=29_test_around_DST_change?= Message-ID: http://hg.python.org/cpython/rev/e6b8202443b6 changeset: 77388:e6b8202443b6 user: Alexander Belopolsky date: Fri Jun 08 13:00:27 2012 -0400 summary: Relax datetime.timestamp() test around DST change files: Lib/test/datetimetester.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1747,7 +1747,7 @@ # Missing hour may produce platform-dependent result t = self.theclass(2012, 3, 11, 2, 30) self.assertIn(self.theclass.fromtimestamp(t.timestamp()), - [t, t + timedelta(hours=1)]) + [t - timedelta(hours=1), t + timedelta(hours=1)]) # Ambiguous hour defaults to DST t = self.theclass(2012, 11, 4, 1, 30) self.assertEqual(self.theclass.fromtimestamp(t.timestamp()), t) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 8 22:24:46 2012 From: python-checkins at python.org (raymond.hettinger) Date: Fri, 08 Jun 2012 22:24:46 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Code_cleanups?= Message-ID: http://hg.python.org/cpython/rev/aca1a271c4fc changeset: 77389:aca1a271c4fc branch: 2.7 parent: 77382:b873afe640e2 user: Raymond Hettinger date: Fri Jun 08 13:24:12 2012 -0700 summary: Code cleanups files: Doc/library/collections.rst | 64 ++++++------ Lib/collections.py | 117 ++++++++++++++--------- 2 files changed, 105 insertions(+), 76 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -601,47 +601,49 @@ >>> Point = namedtuple('Point', ['x', 'y'], verbose=True) class Point(tuple): - 'Point(x, y)' + 'Point(x, y)' - __slots__ = () + __slots__ = () - _fields = ('x', 'y') + _fields = ('x', 'y') - def __new__(_cls, x, y): - 'Create a new instance of Point(x, y)' - return _tuple.__new__(_cls, (x, y)) + def __new__(_cls, x, y): + 'Create a new instance of Point(x, y)' + return _tuple.__new__(_cls, (x, y)) - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new Point object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != 2: - raise TypeError('Expected 2 arguments, got %d' % len(result)) - return result + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new Point object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != 2: + raise TypeError('Expected 2 arguments, got %d' % len(result)) + return result - def __repr__(self): - 'Return a nicely formatted representation string' - return 'Point(x=%r, y=%r)' % self + def __repr__(self): + 'Return a nicely formatted representation string' + return 'Point(x=%r, y=%r)' % self - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values' - return OrderedDict(zip(self._fields, self)) + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values' + return OrderedDict(zip(self._fields, self)) - __dict__ = property(_asdict) + __dict__ = property(_asdict) - def _replace(_self, **kwds): - 'Return a new Point object replacing specified fields with new values' - result = _self._make(map(kwds.pop, ('x', 'y'), _self)) - if kwds: - raise ValueError('Got unexpected field names: %r' % kwds.keys()) - return result + def _replace(_self, **kwds): + 'Return a new Point object replacing specified fields with new values' + result = _self._make(map(kwds.pop, ('x', 'y'), _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % kwds.keys()) + return result - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) - x = _property(_itemgetter(0), doc='Alias for field number 0') - y = _property(_itemgetter(1), doc='Alias for field number 1') + x = _property(_itemgetter(0), doc='Alias for field number 0') + + y = _property(_itemgetter(1), doc='Alias for field number 1') + >>> p = Point(11, y=22) # instantiate with positional or keyword arguments >>> p[0] + p[1] # indexable like the plain tuple (11, 22) diff --git a/Lib/collections.py b/Lib/collections.py --- a/Lib/collections.py +++ b/Lib/collections.py @@ -234,10 +234,60 @@ ### namedtuple ################################################################################ +_class_template = '''\ +class {typename}(tuple): + '{typename}({arg_list})' + + __slots__ = () + + _fields = {field_names!r} + + def __new__(_cls, {arg_list}): + 'Create new instance of {typename}({arg_list})' + return _tuple.__new__(_cls, ({arg_list})) + + @classmethod + def _make(cls, iterable, new=tuple.__new__, len=len): + 'Make a new {typename} object from a sequence or iterable' + result = new(cls, iterable) + if len(result) != {num_fields:d}: + raise TypeError('Expected {num_fields:d} arguments, got %d' % len(result)) + return result + + def __repr__(self): + 'Return a nicely formatted representation string' + return '{typename}({repr_fmt})' % self + + def _asdict(self): + 'Return a new OrderedDict which maps field names to their values' + return OrderedDict(zip(self._fields, self)) + + __dict__ = property(_asdict) + + def _replace(_self, **kwds): + 'Return a new {typename} object replacing specified fields with new values' + result = _self._make(map(kwds.pop, {field_names!r}, _self)) + if kwds: + raise ValueError('Got unexpected field names: %r' % kwds.keys()) + return result + + def __getnewargs__(self): + 'Return self as a plain tuple. Used by copy and pickle.' + return tuple(self) + +{field_defs} +''' + +_repr_template = '{name}=%r' + +_field_template = '''\ + {name} = _property(_itemgetter({index:d}), doc='Alias for field number {index:d}') +''' + def namedtuple(typename, field_names, verbose=False, rename=False): """Returns a new subclass of tuple with named fields. - >>> Point = namedtuple('Point', 'x y') + >>> Point = namedtuple('Point', ['x', 'y']) >>> Point.__doc__ # docstring for the new class 'Point(x, y)' >>> p = Point(11, y=22) # instantiate with positional args or keywords @@ -267,8 +317,11 @@ names = list(field_names) seen = set() for i, name in enumerate(names): - if (not all(c.isalnum() or c=='_' for c in name) or _iskeyword(name) - or not name or name[0].isdigit() or name.startswith('_') + if (not all(c.isalnum() or c=='_' for c in name) + or _iskeyword(name) + or not name + or name[0].isdigit() + or name.startswith('_') or name in seen): names[i] = '_%d' % i seen.add(name) @@ -280,60 +333,34 @@ raise ValueError('Type names and field names cannot be a keyword: %r' % name) if name[0].isdigit(): raise ValueError('Type names and field names cannot start with a number: %r' % name) - seen_names = set() + seen = set() for name in field_names: if name.startswith('_') and not rename: raise ValueError('Field names cannot start with an underscore: %r' % name) - if name in seen_names: + if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) - seen_names.add(name) + seen.add(name) - # Create and fill-in the class template - numfields = len(field_names) - argtxt = repr(field_names).replace("'", "")[1:-1] # tuple repr without parens or quotes - reprtxt = ', '.join('%s=%%r' % name for name in field_names) - template = '''class %(typename)s(tuple): - '%(typename)s(%(argtxt)s)' \n - __slots__ = () \n - _fields = %(field_names)r \n - def __new__(_cls, %(argtxt)s): - 'Create new instance of %(typename)s(%(argtxt)s)' - return _tuple.__new__(_cls, (%(argtxt)s)) \n - @classmethod - def _make(cls, iterable, new=tuple.__new__, len=len): - 'Make a new %(typename)s object from a sequence or iterable' - result = new(cls, iterable) - if len(result) != %(numfields)d: - raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result)) - return result \n - def __repr__(self): - 'Return a nicely formatted representation string' - return '%(typename)s(%(reprtxt)s)' %% self \n - def _asdict(self): - 'Return a new OrderedDict which maps field names to their values' - return OrderedDict(zip(self._fields, self)) \n - __dict__ = property(_asdict) \n - def _replace(_self, **kwds): - 'Return a new %(typename)s object replacing specified fields with new values' - result = _self._make(map(kwds.pop, %(field_names)r, _self)) - if kwds: - raise ValueError('Got unexpected field names: %%r' %% kwds.keys()) - return result \n - def __getnewargs__(self): - 'Return self as a plain tuple. Used by copy and pickle.' - return tuple(self) \n\n''' % locals() - for i, name in enumerate(field_names): - template += " %s = _property(_itemgetter(%d), doc='Alias for field number %d')\n" % (name, i, i) + # Fill-in the class template + class_definition = _class_template.format( + typename = typename, + field_names = tuple(field_names), + num_fields = len(field_names), + arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], + repr_fmt = ', '.join(_repr_template.format(name=name) for name in field_names), + field_defs = '\n'.join(_field_template.format(index=index, name=name) + for index, name in enumerate(field_names)) + ) if verbose: - print template + print class_definition # Execute the template string in a temporary namespace and # support tracing utilities by setting a value for frame.f_globals['__name__'] namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename, OrderedDict=OrderedDict, _property=property, _tuple=tuple) try: - exec template in namespace - except SyntaxError, e: + exec class_definition in namespace + except SyntaxError as e: raise SyntaxError(e.message + ':\n' + template) result = namespace[typename] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 9 00:03:26 2012 From: python-checkins at python.org (stefan.krah) Date: Sat, 09 Jun 2012 00:03:26 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Enumerate_all_cases_in_the_?= =?utf8?q?overflow_detection_strategy_in_mpd=5Fqlog10=28=29=2E?= Message-ID: http://hg.python.org/cpython/rev/ace3a7eb10a8 changeset: 77390:ace3a7eb10a8 parent: 77388:e6b8202443b6 user: Stefan Krah date: Sat Jun 09 00:01:28 2012 +0200 summary: Enumerate all cases in the overflow detection strategy in mpd_qlog10(). files: Modules/_decimal/libmpdec/mpdecimal.c | 21 ++++++++++++-- 1 files changed, 17 insertions(+), 4 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -4793,12 +4793,25 @@ mpd_qfinalize(result, &workctx, status); return; } - /* Check if the result will overflow. + /* + * Check if the result will overflow (0 < x, x != 1): + * 1) log10(x) < 0 iff adjexp(x) < 0 + * 2) 0 < x /\ x <= y ==> adjexp(x) <= adjexp(y) + * 3) adjexp(x) <= log10(x) < adjexp(x) + 1 * - * 1) adjexp(a) + 1 > log10(a) >= adjexp(a) + * Case adjexp(x) >= 0: + * 4) adjexp(x) <= abs(log10(x)) + * Case adjexp(x) > 0: + * 5) adjexp(adjexp(x)) <= adjexp(abs(log10(x))) + * Case adjexp(x) == 0: + * mpd_exp_digits(t)-1 == 0 <= emax (the shortcut is not triggered) * - * 2) |log10(a)| >= adjexp(a), if adjexp(a) >= 0 - * |log10(a)| > -adjexp(a)-1, if adjexp(a) < 0 + * Case adjexp(x) < 0: + * 6) -adjexp(x) - 1 < abs(log10(x)) + * Case adjexp(x) < -1: + * 7) adjexp(-adjexp(x) - 1) <= adjexp(abs(log(x))) + * Case adjexp(x) == -1: + * mpd_exp_digits(t)-1 == 0 <= emax (the shortcut is not triggered) */ adjexp = mpd_adjexp(a); t = (adjexp < 0) ? -adjexp-1 : adjexp; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 9 04:46:33 2012 From: python-checkins at python.org (r.david.murray) Date: Sat, 09 Jun 2012 04:46:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Now_that_Defects_are_Except?= =?utf8?q?ion_subclasses=2C_call_super=2E?= Message-ID: http://hg.python.org/cpython/rev/10a8ad665749 changeset: 77391:10a8ad665749 user: R David Murray date: Fri Jun 08 22:45:46 2012 -0400 summary: Now that Defects are Exception subclasses, call super. The behavior of MessageDefect is legacy behavior. The chances anyone is actually using the undocumented 'line' attribute is low, but it costs little to retain backward compatibility. Although one of the costs is having to restore normal exception behavior in HeaderDefect. On the other hand, I'll probably add some specialized behavior there later. files: Lib/email/errors.py | 5 +++++ 1 files changed, 5 insertions(+), 0 deletions(-) diff --git a/Lib/email/errors.py b/Lib/email/errors.py --- a/Lib/email/errors.py +++ b/Lib/email/errors.py @@ -34,6 +34,8 @@ """Base class for a message defect.""" def __init__(self, line=None): + if line is not None: + super().__init__(line) self.line = line class NoBoundaryInMultipartDefect(MessageDefect): @@ -76,6 +78,9 @@ class HeaderDefect(MessageDefect): """Base class for a header defect.""" + def __init__(self, *args, **kw): + super().__init__(*args, **kw) + class InvalidHeaderDefect(HeaderDefect): """Header is not valid, message gives details.""" -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sat Jun 9 05:47:45 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 09 Jun 2012 05:47:45 +0200 Subject: [Python-checkins] Daily reference leaks (ace3a7eb10a8): sum=0 Message-ID: results for ace3a7eb10a8 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogH0a8vO', '-x'] From python-checkins at python.org Sat Jun 9 15:30:31 2012 From: python-checkins at python.org (stefan.krah) Date: Sat, 09 Jun 2012 15:30:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_one_extra_comparison_to?= =?utf8?q?_the_=5Fmpd=5Fshortmul=28=29_case_to_avoid_repetitive_code=2E?= Message-ID: http://hg.python.org/cpython/rev/08c474d33f82 changeset: 77392:08c474d33f82 user: Stefan Krah date: Sat Jun 09 15:28:36 2012 +0200 summary: Add one extra comparison to the _mpd_shortmul() case to avoid repetitive code. files: Modules/_decimal/libmpdec/mpdecimal.c | 26 +++++--------- 1 files changed, 9 insertions(+), 17 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -5543,32 +5543,24 @@ if (small->len == 1) { - if ((rdata = mpd_calloc(rsize, sizeof *rdata)) == NULL) { - mpd_seterror(result, MPD_Malloc_error, status); - return; - } - _mpd_shortmul(rdata, big->data, big->len, small->data[0]); + rdata = mpd_calloc(rsize, sizeof *rdata); + if (rdata != NULL) { + _mpd_shortmul(rdata, big->data, big->len, small->data[0]); + } } else if (rsize <= 1024) { rdata = _mpd_kmul(big->data, small->data, big->len, small->len, &rsize); - if (rdata == NULL) { - mpd_seterror(result, MPD_Malloc_error, status); - return; - } } else if (rsize <= 3*MPD_MAXTRANSFORM_2N) { rdata = _mpd_fntmul(big->data, small->data, big->len, small->len, &rsize); - if (rdata == NULL) { - mpd_seterror(result, MPD_Malloc_error, status); - return; - } } else { rdata = _mpd_kmul_fnt(big->data, small->data, big->len, small->len, &rsize); - if (rdata == NULL) { - mpd_seterror(result, MPD_Malloc_error, status); /* GCOV_UNLIKELY */ - return; /* GCOV_UNLIKELY */ - } + } + + if (rdata == NULL) { + mpd_seterror(result, MPD_Malloc_error, status); + return; } if (mpd_isdynamic_data(result)) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 9 18:32:07 2012 From: python-checkins at python.org (michael.foord) Date: Sat, 09 Jun 2012 18:32:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_exception_when_calling_?= =?utf8?q?reset=5Fmock_on_a_mock_created_with_autospec?= Message-ID: http://hg.python.org/cpython/rev/2059910e7d76 changeset: 77393:2059910e7d76 user: Michael Foord date: Sat Jun 09 17:31:59 2012 +0100 summary: Fix exception when calling reset_mock on a mock created with autospec files: Lib/unittest/mock.py | 3 +++ Lib/unittest/test/testmock/testhelpers.py | 7 +++++++ Lib/unittest/test/testmock/testmagicmethods.py | 8 ++++++++ 3 files changed, 18 insertions(+), 0 deletions(-) diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -510,6 +510,8 @@ self.method_calls = _CallList() for child in self._mock_children.values(): + if isinstance(child, _SpecState): + continue child.reset_mock() ret = self._mock_return_value @@ -664,6 +666,7 @@ # but not method calls _check_and_set_parent(self, value, None, name) setattr(type(self), name, value) + self._mock_children[name] = value elif name == '__class__': self._spec_class = value return diff --git a/Lib/unittest/test/testmock/testhelpers.py b/Lib/unittest/test/testmock/testhelpers.py --- a/Lib/unittest/test/testmock/testhelpers.py +++ b/Lib/unittest/test/testmock/testhelpers.py @@ -355,6 +355,13 @@ self.assertEqual(mock(), 'foo') + def test_autospec_reset_mock(self): + m = create_autospec(int) + int(m) + m.reset_mock() + self.assertEqual(m.__int__.call_count, 0) + + def test_mocking_unbound_methods(self): class Foo(object): def foo(self, foo): diff --git a/Lib/unittest/test/testmock/testmagicmethods.py b/Lib/unittest/test/testmock/testmagicmethods.py --- a/Lib/unittest/test/testmock/testmagicmethods.py +++ b/Lib/unittest/test/testmock/testmagicmethods.py @@ -345,6 +345,14 @@ self.assertEqual(mock[1][2][3], 3) + def test_magic_method_reset_mock(self): + mock = MagicMock() + str(mock) + self.assertTrue(mock.__str__.called) + mock.reset_mock() + self.assertFalse(mock.__str__.called) + + def test_dir(self): # overriding the default implementation for mock in Mock(), MagicMock(): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 9 22:04:47 2012 From: python-checkins at python.org (raymond.hettinger) Date: Sat, 09 Jun 2012 22:04:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Wrap_fat_lines_?= =?utf8?q?and_improve_some_variable_names=2E?= Message-ID: http://hg.python.org/cpython/rev/b15d5b2c9051 changeset: 77394:b15d5b2c9051 branch: 2.7 parent: 77389:aca1a271c4fc user: Raymond Hettinger date: Sat Jun 09 13:04:29 2012 -0700 summary: Wrap fat lines and improve some variable names. files: Lib/collections.py | 37 ++++++++++++++++++--------------- 1 files changed, 20 insertions(+), 17 deletions(-) diff --git a/Lib/collections.py b/Lib/collections.py --- a/Lib/collections.py +++ b/Lib/collections.py @@ -308,35 +308,37 @@ """ - # Parse and validate the field names. Validation serves two purposes, - # generating informative error messages and preventing template injection attacks. + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. if isinstance(field_names, basestring): - field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas - field_names = tuple(map(str, field_names)) + field_names = field_names.replace(',', ' ').split() + field_names = map(str, field_names) if rename: - names = list(field_names) seen = set() - for i, name in enumerate(names): + for index, name in enumerate(field_names): if (not all(c.isalnum() or c=='_' for c in name) or _iskeyword(name) or not name or name[0].isdigit() or name.startswith('_') or name in seen): - names[i] = '_%d' % i + field_names[index] = '_%d' % index seen.add(name) - field_names = tuple(names) - for name in (typename,) + field_names: + for name in [typename] + field_names: if not all(c.isalnum() or c=='_' for c in name): - raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name) + raise ValueError('Type names and field names can only contain ' + 'alphanumeric characters and underscores: %r' % name) if _iskeyword(name): - raise ValueError('Type names and field names cannot be a keyword: %r' % name) + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) if name[0].isdigit(): - raise ValueError('Type names and field names cannot start with a number: %r' % name) + raise ValueError('Type names and field names cannot start with ' + 'a number: %r' % name) seen = set() for name in field_names: if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: %r' % name) + raise ValueError('Field names cannot start with an underscore:' + '%r' % name) if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) seen.add(name) @@ -347,21 +349,22 @@ field_names = tuple(field_names), num_fields = len(field_names), arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) for name in field_names), + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), field_defs = '\n'.join(_field_template.format(index=index, name=name) for index, name in enumerate(field_names)) ) if verbose: print class_definition - # Execute the template string in a temporary namespace and - # support tracing utilities by setting a value for frame.f_globals['__name__'] + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename, OrderedDict=OrderedDict, _property=property, _tuple=tuple) try: exec class_definition in namespace except SyntaxError as e: - raise SyntaxError(e.message + ':\n' + template) + raise SyntaxError(e.message + ':\n' + class_definition) result = namespace[typename] # For pickling to work, the __module__ variable needs to be set to the frame -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 10 02:27:33 2012 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 10 Jun 2012 02:27:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Minor_formattin?= =?utf8?q?g_fix=3Dup?= Message-ID: http://hg.python.org/cpython/rev/272e7dcffd30 changeset: 77395:272e7dcffd30 branch: 2.7 user: Raymond Hettinger date: Sat Jun 09 17:27:23 2012 -0700 summary: Minor formatting fix=up files: Lib/collections.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/collections.py b/Lib/collections.py --- a/Lib/collections.py +++ b/Lib/collections.py @@ -337,7 +337,7 @@ seen = set() for name in field_names: if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore:' + raise ValueError('Field names cannot start with an underscore: ' '%r' % name) if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 10 03:46:56 2012 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 10 Jun 2012 03:46:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Minor_reformatting_=28wrap_?= =?utf8?q?fat_lines=2C_etc=2E=29_and_create_an_=5F=5Fmain=5F=5F_file?= Message-ID: http://hg.python.org/cpython/rev/626e9d6210a0 changeset: 77396:626e9d6210a0 parent: 77393:2059910e7d76 user: Raymond Hettinger date: Sat Jun 09 18:46:45 2012 -0700 summary: Minor reformatting (wrap fat lines, etc.) and create an __main__ file files: Lib/collections/__init__.py | 66 +++++------------------- Lib/collections/__main__.py | 38 ++++++++++++++ 2 files changed, 53 insertions(+), 51 deletions(-) diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -315,10 +315,10 @@ """ - # Parse and validate the field names. Validation serves two purposes, - # generating informative error messages and preventing template injection attacks. + # Validate the field names. At the user's option, either generate an error + # message or automatically replace the field name with a valid name. if isinstance(field_names, str): - field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas + field_names = field_names.replace(',', ' ').split() field_names = list(map(str, field_names)) if rename: seen = set() @@ -333,15 +333,19 @@ seen.add(name) for name in [typename] + field_names: if not all(c.isalnum() or c=='_' for c in name): - raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name) + raise ValueError('Type names and field names can only contain ' + 'alphanumeric characters and underscores: %r' % name) if _iskeyword(name): - raise ValueError('Type names and field names cannot be a keyword: %r' % name) + raise ValueError('Type names and field names cannot be a ' + 'keyword: %r' % name) if name[0].isdigit(): - raise ValueError('Type names and field names cannot start with a number: %r' % name) + raise ValueError('Type names and field names cannot start with ' + 'a number: %r' % name) seen = set() for name in field_names: if name.startswith('_') and not rename: - raise ValueError('Field names cannot start with an underscore: %r' % name) + raise ValueError('Field names cannot start with an underscore: ' + '%r' % name) if name in seen: raise ValueError('Encountered duplicate field name: %r' % name) seen.add(name) @@ -352,13 +356,14 @@ field_names = tuple(field_names), num_fields = len(field_names), arg_list = repr(tuple(field_names)).replace("'", "")[1:-1], - repr_fmt = ', '.join(_repr_template.format(name=name) for name in field_names), + repr_fmt = ', '.join(_repr_template.format(name=name) + for name in field_names), field_defs = '\n'.join(_field_template.format(index=index, name=name) for index, name in enumerate(field_names)) ) - # Execute the template string in a temporary namespace and - # support tracing utilities by setting a value for frame.f_globals['__name__'] + # Execute the template string in a temporary namespace and support + # tracing utilities by setting a value for frame.f_globals['__name__'] namespace = dict(__name__='namedtuple_%s' % typename) try: exec(class_definition, namespace) @@ -1122,44 +1127,3 @@ return self.__class__(self.data.translate(*args)) def upper(self): return self.__class__(self.data.upper()) def zfill(self, width): return self.__class__(self.data.zfill(width)) - - - -################################################################################ -### Simple tests -################################################################################ - -if __name__ == '__main__': - # verify that instances can be pickled - from pickle import loads, dumps - Point = namedtuple('Point', 'x, y', True) - p = Point(x=10, y=20) - assert p == loads(dumps(p)) - - # test and demonstrate ability to override methods - class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) - - for p in Point(3, 4), Point(14, 5/7.): - print (p) - - class Point(namedtuple('Point', 'x y')): - 'Point class with optimized _make() and _replace() without error-checking' - __slots__ = () - _make = classmethod(tuple.__new__) - def _replace(self, _map=map, **kwds): - return self._make(_map(kwds.get, ('x', 'y'), self)) - - print(Point(11, 22)._replace(x=100)) - - Point3D = namedtuple('Point3D', Point._fields + ('z',)) - print(Point3D.__doc__) - - import doctest - TestResults = namedtuple('TestResults', 'failed attempted') - print(TestResults(*doctest.testmod())) diff --git a/Lib/collections/__main__.py b/Lib/collections/__main__.py new file mode 100644 --- /dev/null +++ b/Lib/collections/__main__.py @@ -0,0 +1,38 @@ +################################################################################ +### Simple tests +################################################################################ + +# verify that instances can be pickled +from collections import namedtuple +from pickle import loads, dumps +Point = namedtuple('Point', 'x, y', True) +p = Point(x=10, y=20) +assert p == loads(dumps(p)) + +# test and demonstrate ability to override methods +class Point(namedtuple('Point', 'x y')): + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + +for p in Point(3, 4), Point(14, 5/7.): + print (p) + +class Point(namedtuple('Point', 'x y')): + 'Point class with optimized _make() and _replace() without error-checking' + __slots__ = () + _make = classmethod(tuple.__new__) + def _replace(self, _map=map, **kwds): + return self._make(_map(kwds.get, ('x', 'y'), self)) + +print(Point(11, 22)._replace(x=100)) + +Point3D = namedtuple('Point3D', Point._fields + ('z',)) +print(Point3D.__doc__) + +import doctest, collections +TestResults = namedtuple('TestResults', 'failed attempted') +print(TestResults(*doctest.testmod(collections))) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 10 04:15:34 2012 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 10 Jun 2012 04:15:34 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Note_that_the_=5Fasdict=28?= =?utf8?q?=29_method_is_outdated?= Message-ID: http://hg.python.org/cpython/rev/fecbcd5c3978 changeset: 77397:fecbcd5c3978 user: Raymond Hettinger date: Sat Jun 09 19:15:26 2012 -0700 summary: Note that the _asdict() method is outdated files: Doc/library/collections.rst | 937 ++++++++++++----------- 1 files changed, 469 insertions(+), 468 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -2,15 +2,15 @@ ========================================== .. module:: collections - :synopsis: Container datatypes + :synopsis: Container datatypes .. moduleauthor:: Raymond Hettinger .. sectionauthor:: Raymond Hettinger .. testsetup:: * - from collections import * - import itertools - __name__ = '' + from collections import * + import itertools + __name__ = '' **Source code:** :source:`Lib/collections/__init__.py` @@ -33,9 +33,9 @@ ===================== ==================================================================== .. versionchanged:: 3.3 - Moved :ref:`collections-abstract-base-classes` to the :mod:`collections.abc` module. - For backwards compatibility, they continue to be visible in this module - as well. + Moved :ref:`collections-abstract-base-classes` to the :mod:`collections.abc` module. + For backwards compatibility, they continue to be visible in this module + as well. :class:`ChainMap` objects @@ -51,105 +51,105 @@ .. class:: ChainMap(*maps) - A :class:`ChainMap` groups multiple dicts or other mappings together to - create a single, updateable view. If no *maps* are specified, a single empty - dictionary is provided so that a new chain always has at least one mapping. + A :class:`ChainMap` groups multiple dicts or other mappings together to + create a single, updateable view. If no *maps* are specified, a single empty + dictionary is provided so that a new chain always has at least one mapping. - The underlying mappings are stored in a list. That list is public and can - accessed or updated using the *maps* attribute. There is no other state. + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. - Lookups search the underlying mappings successively until a key is found. In - contrast, writes, updates, and deletions only operate on the first mapping. + Lookups search the underlying mappings successively until a key is found. In + contrast, writes, updates, and deletions only operate on the first mapping. - A :class:`ChainMap` incorporates the underlying mappings by reference. So, if - one of the underlying mappings gets updated, those changes will be reflected - in :class:`ChainMap`. + A :class:`ChainMap` incorporates the underlying mappings by reference. So, if + one of the underlying mappings gets updated, those changes will be reflected + in :class:`ChainMap`. - All of the usual dictionary methods are supported. In addition, there is a - *maps* attribute, a method for creating new subcontexts, and a property for - accessing all but the first mapping: + All of the usual dictionary methods are supported. In addition, there is a + *maps* attribute, a method for creating new subcontexts, and a property for + accessing all but the first mapping: - .. attribute:: maps + .. attribute:: maps - A user updateable list of mappings. The list is ordered from - first-searched to last-searched. It is the only stored state and can - be modified to change which mappings are searched. The list should - always contain at least one mapping. + A user updateable list of mappings. The list is ordered from + first-searched to last-searched. It is the only stored state and can + be modified to change which mappings are searched. The list should + always contain at least one mapping. - .. method:: new_child() + .. method:: new_child() - Returns a new :class:`ChainMap` containing a new :class:`dict` followed by - all of the maps in the current instance. A call to ``d.new_child()`` is - equivalent to: ``ChainMap({}, *d.maps)``. This method is used for - creating subcontexts that can be updated without altering values in any - of the parent mappings. + Returns a new :class:`ChainMap` containing a new :class:`dict` followed by + all of the maps in the current instance. A call to ``d.new_child()`` is + equivalent to: ``ChainMap({}, *d.maps)``. This method is used for + creating subcontexts that can be updated without altering values in any + of the parent mappings. - .. method:: parents() + .. method:: parents() - Returns a new :class:`ChainMap` containing all of the maps in the current - instance except the first one. This is useful for skipping the first map - in the search. The use-cases are similar to those for the - :keyword:`nonlocal` keyword used in :term:`nested scopes `. - The use-cases also parallel those for the builtin :func:`super` function. - A reference to ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``. + Returns a new :class:`ChainMap` containing all of the maps in the current + instance except the first one. This is useful for skipping the first map + in the search. The use-cases are similar to those for the + :keyword:`nonlocal` keyword used in :term:`nested scopes `. + The use-cases also parallel those for the builtin :func:`super` function. + A reference to ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``. - Example of simulating Python's internal lookup chain:: + Example of simulating Python's internal lookup chain:: - import builtins - pylookup = ChainMap(locals(), globals(), vars(builtins)) + import builtins + pylookup = ChainMap(locals(), globals(), vars(builtins)) - Example of letting user specified values take precedence over environment - variables which in turn take precedence over default values:: + Example of letting user specified values take precedence over environment + variables which in turn take precedence over default values:: - import os, argparse - defaults = {'color': 'red', 'user': guest} - parser = argparse.ArgumentParser() - parser.add_argument('-u', '--user') - parser.add_argument('-c', '--color') - user_specified = vars(parser.parse_args()) - combined = ChainMap(user_specified, os.environ, defaults) + import os, argparse + defaults = {'color': 'red', 'user': guest} + parser = argparse.ArgumentParser() + parser.add_argument('-u', '--user') + parser.add_argument('-c', '--color') + user_specified = vars(parser.parse_args()) + combined = ChainMap(user_specified, os.environ, defaults) - Example patterns for using the :class:`ChainMap` class to simulate nested - contexts:: + Example patterns for using the :class:`ChainMap` class to simulate nested + contexts:: - c = ChainMap() # Create root context - d = c.new_child() # Create nested child context - e = c.new_child() # Child of c, independent from d - e.maps[0] # Current context dictionary -- like Python's locals() - e.maps[-1] # Root context -- like Python's globals() - e.parents # Enclosing context chain -- like Python's nonlocals + c = ChainMap() # Create root context + d = c.new_child() # Create nested child context + e = c.new_child() # Child of c, independent from d + e.maps[0] # Current context dictionary -- like Python's locals() + e.maps[-1] # Root context -- like Python's globals() + e.parents # Enclosing context chain -- like Python's nonlocals - d['x'] # Get first key in the chain of contexts - d['x'] = 1 # Set value in current context - del['x'] # Delete from current context - list(d) # All nested values - k in d # Check all nested values - len(d) # Number of nested values - d.items() # All nested items - dict(d) # Flatten into a regular dictionary + d['x'] # Get first key in the chain of contexts + d['x'] = 1 # Set value in current context + del['x'] # Delete from current context + list(d) # All nested values + k in d # Check all nested values + len(d) # Number of nested values + d.items() # All nested items + dict(d) # Flatten into a regular dictionary - .. seealso:: + .. seealso:: - * The `MultiContext class - `_ - in the Enthought `CodeTools package - `_ has options to support - writing to any mapping in the chain. + * The `MultiContext class + `_ + in the Enthought `CodeTools package + `_ has options to support + writing to any mapping in the chain. - * Django's `Context class - `_ - for templating is a read-only chain of mappings. It also features - pushing and popping of contexts similar to the - :meth:`~collections.ChainMap.new_child` method and the - :meth:`~collections.ChainMap.parents` property. + * Django's `Context class + `_ + for templating is a read-only chain of mappings. It also features + pushing and popping of contexts similar to the + :meth:`~collections.ChainMap.new_child` method and the + :meth:`~collections.ChainMap.parents` property. - * The `Nested Contexts recipe - `_ has options to control - whether writes and other mutations apply only to the first mapping or to - any mapping in the chain. + * The `Nested Contexts recipe + `_ has options to control + whether writes and other mutations apply only to the first mapping or to + any mapping in the chain. - * A `greatly simplified read-only version of Chainmap - `_. + * A `greatly simplified read-only version of Chainmap + `_. :class:`Counter` objects @@ -174,85 +174,85 @@ .. class:: Counter([iterable-or-mapping]) - A :class:`Counter` is a :class:`dict` subclass for counting hashable objects. - It is an unordered collection where elements are stored as dictionary keys - and their counts are stored as dictionary values. Counts are allowed to be - any integer value including zero or negative counts. The :class:`Counter` - class is similar to bags or multisets in other languages. + A :class:`Counter` is a :class:`dict` subclass for counting hashable objects. + It is an unordered collection where elements are stored as dictionary keys + and their counts are stored as dictionary values. Counts are allowed to be + any integer value including zero or negative counts. The :class:`Counter` + class is similar to bags or multisets in other languages. - Elements are counted from an *iterable* or initialized from another - *mapping* (or counter): + Elements are counted from an *iterable* or initialized from another + *mapping* (or counter): >>> c = Counter() # a new, empty counter >>> c = Counter('gallahad') # a new counter from an iterable >>> c = Counter({'red': 4, 'blue': 2}) # a new counter from a mapping >>> c = Counter(cats=4, dogs=8) # a new counter from keyword args - Counter objects have a dictionary interface except that they return a zero - count for missing items instead of raising a :exc:`KeyError`: + Counter objects have a dictionary interface except that they return a zero + count for missing items instead of raising a :exc:`KeyError`: >>> c = Counter(['eggs', 'ham']) >>> c['bacon'] # count of a missing element is zero 0 - Setting a count to zero does not remove an element from a counter. - Use ``del`` to remove it entirely: + Setting a count to zero does not remove an element from a counter. + Use ``del`` to remove it entirely: >>> c['sausage'] = 0 # counter entry with a zero count >>> del c['sausage'] # del actually removes the entry - .. versionadded:: 3.1 + .. versionadded:: 3.1 - Counter objects support three methods beyond those available for all - dictionaries: + Counter objects support three methods beyond those available for all + dictionaries: - .. method:: elements() + .. method:: elements() - Return an iterator over elements repeating each as many times as its - count. Elements are returned in arbitrary order. If an element's count - is less than one, :meth:`elements` will ignore it. + Return an iterator over elements repeating each as many times as its + count. Elements are returned in arbitrary order. If an element's count + is less than one, :meth:`elements` will ignore it. >>> c = Counter(a=4, b=2, c=0, d=-2) >>> list(c.elements()) ['a', 'a', 'a', 'a', 'b', 'b'] - .. method:: most_common([n]) + .. method:: most_common([n]) - Return a list of the *n* most common elements and their counts from the - most common to the least. If *n* is not specified, :func:`most_common` - returns *all* elements in the counter. Elements with equal counts are - ordered arbitrarily: + Return a list of the *n* most common elements and their counts from the + most common to the least. If *n* is not specified, :func:`most_common` + returns *all* elements in the counter. Elements with equal counts are + ordered arbitrarily: >>> Counter('abracadabra').most_common(3) [('a', 5), ('r', 2), ('b', 2)] - .. method:: subtract([iterable-or-mapping]) + .. method:: subtract([iterable-or-mapping]) - Elements are subtracted from an *iterable* or from another *mapping* - (or counter). Like :meth:`dict.update` but subtracts counts instead - of replacing them. Both inputs and outputs may be zero or negative. + Elements are subtracted from an *iterable* or from another *mapping* + (or counter). Like :meth:`dict.update` but subtracts counts instead + of replacing them. Both inputs and outputs may be zero or negative. >>> c = Counter(a=4, b=2, c=0, d=-2) >>> d = Counter(a=1, b=2, c=3, d=4) >>> c.subtract(d) Counter({'a': 3, 'b': 0, 'c': -3, 'd': -6}) - .. versionadded:: 3.2 + .. versionadded:: 3.2 - The usual dictionary methods are available for :class:`Counter` objects - except for two which work differently for counters. + The usual dictionary methods are available for :class:`Counter` objects + except for two which work differently for counters. - .. method:: fromkeys(iterable) + .. method:: fromkeys(iterable) - This class method is not implemented for :class:`Counter` objects. + This class method is not implemented for :class:`Counter` objects. - .. method:: update([iterable-or-mapping]) + .. method:: update([iterable-or-mapping]) - Elements are counted from an *iterable* or added-in from another - *mapping* (or counter). Like :meth:`dict.update` but adds counts - instead of replacing them. Also, the *iterable* is expected to be a - sequence of elements, not a sequence of ``(key, value)`` pairs. + Elements are counted from an *iterable* or added-in from another + *mapping* (or counter). Like :meth:`dict.update` but adds counts + instead of replacing them. Also, the *iterable* is expected to be a + sequence of elements, not a sequence of ``(key, value)`` pairs. Common patterns for working with :class:`Counter` objects:: @@ -294,57 +294,57 @@ Counter({'b': 4}) .. versionadded:: 3.3 - Added support for unary plus, unary minus, and in-place multiset operations. + Added support for unary plus, unary minus, and in-place multiset operations. .. note:: - Counters were primarily designed to work with positive integers to represent - running counts; however, care was taken to not unnecessarily preclude use - cases needing other types or negative values. To help with those use cases, - this section documents the minimum range and type restrictions. + Counters were primarily designed to work with positive integers to represent + running counts; however, care was taken to not unnecessarily preclude use + cases needing other types or negative values. To help with those use cases, + this section documents the minimum range and type restrictions. - * The :class:`Counter` class itself is a dictionary subclass with no - restrictions on its keys and values. The values are intended to be numbers - representing counts, but you *could* store anything in the value field. + * The :class:`Counter` class itself is a dictionary subclass with no + restrictions on its keys and values. The values are intended to be numbers + representing counts, but you *could* store anything in the value field. - * The :meth:`most_common` method requires only that the values be orderable. + * The :meth:`most_common` method requires only that the values be orderable. - * For in-place operations such as ``c[key] += 1``, the value type need only - support addition and subtraction. So fractions, floats, and decimals would - work and negative values are supported. The same is also true for - :meth:`update` and :meth:`subtract` which allow negative and zero values - for both inputs and outputs. + * For in-place operations such as ``c[key] += 1``, the value type need only + support addition and subtraction. So fractions, floats, and decimals would + work and negative values are supported. The same is also true for + :meth:`update` and :meth:`subtract` which allow negative and zero values + for both inputs and outputs. - * The multiset methods are designed only for use cases with positive values. - The inputs may be negative or zero, but only outputs with positive values - are created. There are no type restrictions, but the value type needs to - support addition, subtraction, and comparison. + * The multiset methods are designed only for use cases with positive values. + The inputs may be negative or zero, but only outputs with positive values + are created. There are no type restrictions, but the value type needs to + support addition, subtraction, and comparison. - * The :meth:`elements` method requires integer counts. It ignores zero and - negative counts. + * The :meth:`elements` method requires integer counts. It ignores zero and + negative counts. .. seealso:: * `Counter class `_ - adapted for Python 2.5 and an early `Bag recipe - `_ for Python 2.4. + adapted for Python 2.5 and an early `Bag recipe + `_ for Python 2.4. * `Bag class `_ - in Smalltalk. + in Smalltalk. * Wikipedia entry for `Multisets `_. * `C++ multisets `_ - tutorial with examples. + tutorial with examples. * For mathematical operations on multisets and their use cases, see - *Knuth, Donald. The Art of Computer Programming Volume II, - Section 4.6.3, Exercise 19*. + *Knuth, Donald. The Art of Computer Programming Volume II, + Section 4.6.3, Exercise 19*. * To enumerate all distinct multisets of a given size over a given set of - elements, see :func:`itertools.combinations_with_replacement`. + elements, see :func:`itertools.combinations_with_replacement`. - map(Counter, combinations_with_replacement('ABC', 2)) --> AA AB AC BB BC CC + map(Counter, combinations_with_replacement('ABC', 2)) --> AA AB AC BB BC CC :class:`deque` objects @@ -352,105 +352,105 @@ .. class:: deque([iterable, [maxlen]]) - Returns a new deque object initialized left-to-right (using :meth:`append`) with - data from *iterable*. If *iterable* is not specified, the new deque is empty. + Returns a new deque object initialized left-to-right (using :meth:`append`) with + data from *iterable*. If *iterable* is not specified, the new deque is empty. - Deques are a generalization of stacks and queues (the name is pronounced "deck" - and is short for "double-ended queue"). Deques support thread-safe, memory - efficient appends and pops from either side of the deque with approximately the - same O(1) performance in either direction. + Deques are a generalization of stacks and queues (the name is pronounced "deck" + and is short for "double-ended queue"). Deques support thread-safe, memory + efficient appends and pops from either side of the deque with approximately the + same O(1) performance in either direction. - Though :class:`list` objects support similar operations, they are optimized for - fast fixed-length operations and incur O(n) memory movement costs for - ``pop(0)`` and ``insert(0, v)`` operations which change both the size and - position of the underlying data representation. + Though :class:`list` objects support similar operations, they are optimized for + fast fixed-length operations and incur O(n) memory movement costs for + ``pop(0)`` and ``insert(0, v)`` operations which change both the size and + position of the underlying data representation. - If *maxlen* is not specified or is *None*, deques may grow to an - arbitrary length. Otherwise, the deque is bounded to the specified maximum - length. Once a bounded length deque is full, when new items are added, a - corresponding number of items are discarded from the opposite end. Bounded - length deques provide functionality similar to the ``tail`` filter in - Unix. They are also useful for tracking transactions and other pools of data - where only the most recent activity is of interest. + If *maxlen* is not specified or is *None*, deques may grow to an + arbitrary length. Otherwise, the deque is bounded to the specified maximum + length. Once a bounded length deque is full, when new items are added, a + corresponding number of items are discarded from the opposite end. Bounded + length deques provide functionality similar to the ``tail`` filter in + Unix. They are also useful for tracking transactions and other pools of data + where only the most recent activity is of interest. - Deque objects support the following methods: + Deque objects support the following methods: - .. method:: append(x) + .. method:: append(x) - Add *x* to the right side of the deque. + Add *x* to the right side of the deque. - .. method:: appendleft(x) + .. method:: appendleft(x) - Add *x* to the left side of the deque. + Add *x* to the left side of the deque. - .. method:: clear() + .. method:: clear() - Remove all elements from the deque leaving it with length 0. + Remove all elements from the deque leaving it with length 0. - .. method:: count(x) + .. method:: count(x) - Count the number of deque elements equal to *x*. + Count the number of deque elements equal to *x*. - .. versionadded:: 3.2 + .. versionadded:: 3.2 - .. method:: extend(iterable) + .. method:: extend(iterable) - Extend the right side of the deque by appending elements from the iterable - argument. + Extend the right side of the deque by appending elements from the iterable + argument. - .. method:: extendleft(iterable) + .. method:: extendleft(iterable) - Extend the left side of the deque by appending elements from *iterable*. - Note, the series of left appends results in reversing the order of - elements in the iterable argument. + Extend the left side of the deque by appending elements from *iterable*. + Note, the series of left appends results in reversing the order of + elements in the iterable argument. - .. method:: pop() + .. method:: pop() - Remove and return an element from the right side of the deque. If no - elements are present, raises an :exc:`IndexError`. + Remove and return an element from the right side of the deque. If no + elements are present, raises an :exc:`IndexError`. - .. method:: popleft() + .. method:: popleft() - Remove and return an element from the left side of the deque. If no - elements are present, raises an :exc:`IndexError`. + Remove and return an element from the left side of the deque. If no + elements are present, raises an :exc:`IndexError`. - .. method:: remove(value) + .. method:: remove(value) - Removed the first occurrence of *value*. If not found, raises a - :exc:`ValueError`. + Removed the first occurrence of *value*. If not found, raises a + :exc:`ValueError`. - .. method:: reverse() + .. method:: reverse() - Reverse the elements of the deque in-place and then return ``None``. + Reverse the elements of the deque in-place and then return ``None``. - .. versionadded:: 3.2 + .. versionadded:: 3.2 - .. method:: rotate(n) + .. method:: rotate(n) - Rotate the deque *n* steps to the right. If *n* is negative, rotate to - the left. Rotating one step to the right is equivalent to: - ``d.appendleft(d.pop())``. + Rotate the deque *n* steps to the right. If *n* is negative, rotate to + the left. Rotating one step to the right is equivalent to: + ``d.appendleft(d.pop())``. - Deque objects also provide one read-only attribute: + Deque objects also provide one read-only attribute: - .. attribute:: maxlen + .. attribute:: maxlen - Maximum size of a deque or *None* if unbounded. + Maximum size of a deque or *None* if unbounded. - .. versionadded:: 3.1 + .. versionadded:: 3.1 In addition to the above, deques support iteration, pickling, ``len(d)``, @@ -463,56 +463,56 @@ .. doctest:: - >>> from collections import deque - >>> d = deque('ghi') # make a new deque with three items - >>> for elem in d: # iterate over the deque's elements - ... print(elem.upper()) - G - H - I + >>> from collections import deque + >>> d = deque('ghi') # make a new deque with three items + >>> for elem in d: # iterate over the deque's elements + ... print(elem.upper()) + G + H + I - >>> d.append('j') # add a new entry to the right side - >>> d.appendleft('f') # add a new entry to the left side - >>> d # show the representation of the deque - deque(['f', 'g', 'h', 'i', 'j']) + >>> d.append('j') # add a new entry to the right side + >>> d.appendleft('f') # add a new entry to the left side + >>> d # show the representation of the deque + deque(['f', 'g', 'h', 'i', 'j']) - >>> d.pop() # return and remove the rightmost item - 'j' - >>> d.popleft() # return and remove the leftmost item - 'f' - >>> list(d) # list the contents of the deque - ['g', 'h', 'i'] - >>> d[0] # peek at leftmost item - 'g' - >>> d[-1] # peek at rightmost item - 'i' + >>> d.pop() # return and remove the rightmost item + 'j' + >>> d.popleft() # return and remove the leftmost item + 'f' + >>> list(d) # list the contents of the deque + ['g', 'h', 'i'] + >>> d[0] # peek at leftmost item + 'g' + >>> d[-1] # peek at rightmost item + 'i' - >>> list(reversed(d)) # list the contents of a deque in reverse - ['i', 'h', 'g'] - >>> 'h' in d # search the deque - True - >>> d.extend('jkl') # add multiple elements at once - >>> d - deque(['g', 'h', 'i', 'j', 'k', 'l']) - >>> d.rotate(1) # right rotation - >>> d - deque(['l', 'g', 'h', 'i', 'j', 'k']) - >>> d.rotate(-1) # left rotation - >>> d - deque(['g', 'h', 'i', 'j', 'k', 'l']) + >>> list(reversed(d)) # list the contents of a deque in reverse + ['i', 'h', 'g'] + >>> 'h' in d # search the deque + True + >>> d.extend('jkl') # add multiple elements at once + >>> d + deque(['g', 'h', 'i', 'j', 'k', 'l']) + >>> d.rotate(1) # right rotation + >>> d + deque(['l', 'g', 'h', 'i', 'j', 'k']) + >>> d.rotate(-1) # left rotation + >>> d + deque(['g', 'h', 'i', 'j', 'k', 'l']) - >>> deque(reversed(d)) # make a new deque in reverse order - deque(['l', 'k', 'j', 'i', 'h', 'g']) - >>> d.clear() # empty the deque - >>> d.pop() # cannot pop from an empty deque - Traceback (most recent call last): - File "", line 1, in -toplevel- - d.pop() - IndexError: pop from an empty deque + >>> deque(reversed(d)) # make a new deque in reverse order + deque(['l', 'k', 'j', 'i', 'h', 'g']) + >>> d.clear() # empty the deque + >>> d.pop() # cannot pop from an empty deque + Traceback (most recent call last): + File "", line 1, in -toplevel- + d.pop() + IndexError: pop from an empty deque - >>> d.extendleft('abc') # extendleft() reverses the input order - >>> d - deque(['c', 'b', 'a']) + >>> d.extendleft('abc') # extendleft() reverses the input order + >>> d + deque(['c', 'b', 'a']) :class:`deque` Recipes @@ -523,10 +523,10 @@ Bounded length deques provide functionality similar to the ``tail`` filter in Unix:: - def tail(filename, n=10): - 'Return the last n lines of a file' - with open(filename) as f: - return deque(f, n) + def tail(filename, n=10): + 'Return the last n lines of a file' + with open(filename) as f: + return deque(f, n) Another approach to using deques is to maintain a sequence of recently added elements by appending to the right and popping to the left:: @@ -547,10 +547,10 @@ deletion. For example, a pure Python implementation of ``del d[n]`` relies on the :meth:`rotate` method to position elements to be popped:: - def delete_nth(d, n): - d.rotate(-n) - d.popleft() - d.rotate(n) + def delete_nth(d, n): + d.rotate(-n) + d.popleft() + d.rotate(n) To implement :class:`deque` slicing, use a similar approach applying :meth:`rotate` to bring a target element to the left side of the deque. Remove @@ -566,50 +566,50 @@ .. class:: defaultdict([default_factory[, ...]]) - Returns a new dictionary-like object. :class:`defaultdict` is a subclass of the - built-in :class:`dict` class. It overrides one method and adds one writable - instance variable. The remaining functionality is the same as for the - :class:`dict` class and is not documented here. + Returns a new dictionary-like object. :class:`defaultdict` is a subclass of the + built-in :class:`dict` class. It overrides one method and adds one writable + instance variable. The remaining functionality is the same as for the + :class:`dict` class and is not documented here. - The first argument provides the initial value for the :attr:`default_factory` - attribute; it defaults to ``None``. All remaining arguments are treated the same - as if they were passed to the :class:`dict` constructor, including keyword - arguments. + The first argument provides the initial value for the :attr:`default_factory` + attribute; it defaults to ``None``. All remaining arguments are treated the same + as if they were passed to the :class:`dict` constructor, including keyword + arguments. - :class:`defaultdict` objects support the following method in addition to the - standard :class:`dict` operations: + :class:`defaultdict` objects support the following method in addition to the + standard :class:`dict` operations: - .. method:: __missing__(key) + .. method:: __missing__(key) - If the :attr:`default_factory` attribute is ``None``, this raises a - :exc:`KeyError` exception with the *key* as argument. + If the :attr:`default_factory` attribute is ``None``, this raises a + :exc:`KeyError` exception with the *key* as argument. - If :attr:`default_factory` is not ``None``, it is called without arguments - to provide a default value for the given *key*, this value is inserted in - the dictionary for the *key*, and returned. + If :attr:`default_factory` is not ``None``, it is called without arguments + to provide a default value for the given *key*, this value is inserted in + the dictionary for the *key*, and returned. - If calling :attr:`default_factory` raises an exception this exception is - propagated unchanged. + If calling :attr:`default_factory` raises an exception this exception is + propagated unchanged. - This method is called by the :meth:`__getitem__` method of the - :class:`dict` class when the requested key is not found; whatever it - returns or raises is then returned or raised by :meth:`__getitem__`. + This method is called by the :meth:`__getitem__` method of the + :class:`dict` class when the requested key is not found; whatever it + returns or raises is then returned or raised by :meth:`__getitem__`. - Note that :meth:`__missing__` is *not* called for any operations besides - :meth:`__getitem__`. This means that :meth:`get` will, like normal - dictionaries, return ``None`` as a default rather than using - :attr:`default_factory`. + Note that :meth:`__missing__` is *not* called for any operations besides + :meth:`__getitem__`. This means that :meth:`get` will, like normal + dictionaries, return ``None`` as a default rather than using + :attr:`default_factory`. - :class:`defaultdict` objects support the following instance variable: + :class:`defaultdict` objects support the following instance variable: - .. attribute:: default_factory + .. attribute:: default_factory - This attribute is used by the :meth:`__missing__` method; it is - initialized from the first argument to the constructor, if present, or to - ``None``, if absent. + This attribute is used by the :meth:`__missing__` method; it is + initialized from the first argument to the constructor, if present, or to + ``None``, if absent. :class:`defaultdict` Examples @@ -618,13 +618,13 @@ Using :class:`list` as the :attr:`default_factory`, it is easy to group a sequence of key-value pairs into a dictionary of lists: - >>> s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)] - >>> d = defaultdict(list) - >>> for k, v in s: - ... d[k].append(v) - ... - >>> list(d.items()) - [('blue', [2, 4]), ('red', [1]), ('yellow', [1, 3])] + >>> s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)] + >>> d = defaultdict(list) + >>> for k, v in s: + ... d[k].append(v) + ... + >>> list(d.items()) + [('blue', [2, 4]), ('red', [1]), ('yellow', [1, 3])] When each key is encountered for the first time, it is not already in the mapping; so an entry is automatically created using the :attr:`default_factory` @@ -634,24 +634,24 @@ :meth:`list.append` operation adds another value to the list. This technique is simpler and faster than an equivalent technique using :meth:`dict.setdefault`: - >>> d = {} - >>> for k, v in s: - ... d.setdefault(k, []).append(v) - ... - >>> list(d.items()) - [('blue', [2, 4]), ('red', [1]), ('yellow', [1, 3])] + >>> d = {} + >>> for k, v in s: + ... d.setdefault(k, []).append(v) + ... + >>> list(d.items()) + [('blue', [2, 4]), ('red', [1]), ('yellow', [1, 3])] Setting the :attr:`default_factory` to :class:`int` makes the :class:`defaultdict` useful for counting (like a bag or multiset in other languages): - >>> s = 'mississippi' - >>> d = defaultdict(int) - >>> for k in s: - ... d[k] += 1 - ... - >>> list(d.items()) - [('i', 4), ('p', 2), ('s', 4), ('m', 1)] + >>> s = 'mississippi' + >>> d = defaultdict(int) + >>> for k in s: + ... d[k] += 1 + ... + >>> list(d.items()) + [('i', 4), ('p', 2), ('s', 4), ('m', 1)] When a letter is first encountered, it is missing from the mapping, so the :attr:`default_factory` function calls :func:`int` to supply a default count of @@ -662,23 +662,23 @@ is to use a lambda function which can supply any constant value (not just zero): - >>> def constant_factory(value): - ... return lambda: value - >>> d = defaultdict(constant_factory('')) - >>> d.update(name='John', action='ran') - >>> '%(name)s %(action)s to %(object)s' % d - 'John ran to ' + >>> def constant_factory(value): + ... return lambda: value + >>> d = defaultdict(constant_factory('')) + >>> d.update(name='John', action='ran') + >>> '%(name)s %(action)s to %(object)s' % d + 'John ran to ' Setting the :attr:`default_factory` to :class:`set` makes the :class:`defaultdict` useful for building a dictionary of sets: - >>> s = [('red', 1), ('blue', 2), ('red', 3), ('blue', 4), ('red', 1), ('blue', 4)] - >>> d = defaultdict(set) - >>> for k, v in s: - ... d[k].add(v) - ... - >>> list(d.items()) - [('blue', {2, 4}), ('red', {1, 3})] + >>> s = [('red', 1), ('blue', 2), ('red', 3), ('blue', 4), ('red', 1), ('blue', 4)] + >>> d = defaultdict(set) + >>> for k, v in s: + ... d[k].add(v) + ... + >>> list(d.items()) + [('blue', {2, 4}), ('red', {1, 3})] :func:`namedtuple` Factory Function for Tuples with Named Fields @@ -690,69 +690,69 @@ .. function:: namedtuple(typename, field_names, verbose=False, rename=False) - Returns a new tuple subclass named *typename*. The new subclass is used to - create tuple-like objects that have fields accessible by attribute lookup as - well as being indexable and iterable. Instances of the subclass also have a - helpful docstring (with typename and field_names) and a helpful :meth:`__repr__` - method which lists the tuple contents in a ``name=value`` format. + Returns a new tuple subclass named *typename*. The new subclass is used to + create tuple-like objects that have fields accessible by attribute lookup as + well as being indexable and iterable. Instances of the subclass also have a + helpful docstring (with typename and field_names) and a helpful :meth:`__repr__` + method which lists the tuple contents in a ``name=value`` format. - The *field_names* are a single string with each fieldname separated by whitespace - and/or commas, for example ``'x y'`` or ``'x, y'``. Alternatively, *field_names* - can be a sequence of strings such as ``['x', 'y']``. + The *field_names* are a single string with each fieldname separated by whitespace + and/or commas, for example ``'x y'`` or ``'x, y'``. Alternatively, *field_names* + can be a sequence of strings such as ``['x', 'y']``. - Any valid Python identifier may be used for a fieldname except for names - starting with an underscore. Valid identifiers consist of letters, digits, - and underscores but do not start with a digit or underscore and cannot be - a :mod:`keyword` such as *class*, *for*, *return*, *global*, *pass*, - or *raise*. + Any valid Python identifier may be used for a fieldname except for names + starting with an underscore. Valid identifiers consist of letters, digits, + and underscores but do not start with a digit or underscore and cannot be + a :mod:`keyword` such as *class*, *for*, *return*, *global*, *pass*, + or *raise*. - If *rename* is true, invalid fieldnames are automatically replaced - with positional names. For example, ``['abc', 'def', 'ghi', 'abc']`` is - converted to ``['abc', '_1', 'ghi', '_3']``, eliminating the keyword - ``def`` and the duplicate fieldname ``abc``. + If *rename* is true, invalid fieldnames are automatically replaced + with positional names. For example, ``['abc', 'def', 'ghi', 'abc']`` is + converted to ``['abc', '_1', 'ghi', '_3']``, eliminating the keyword + ``def`` and the duplicate fieldname ``abc``. - If *verbose* is true, the class definition is printed after it is - built. This option is outdated; instead, it is simpler to print the - :attr:`_source` attribute. + If *verbose* is true, the class definition is printed after it is + built. This option is outdated; instead, it is simpler to print the + :attr:`_source` attribute. - Named tuple instances do not have per-instance dictionaries, so they are - lightweight and require no more memory than regular tuples. + Named tuple instances do not have per-instance dictionaries, so they are + lightweight and require no more memory than regular tuples. - .. versionchanged:: 3.1 - Added support for *rename*. + .. versionchanged:: 3.1 + Added support for *rename*. .. doctest:: - :options: +NORMALIZE_WHITESPACE + :options: +NORMALIZE_WHITESPACE - >>> # Basic example - >>> Point = namedtuple('Point', ['x', 'y']) - >>> p = Point(11, y=22) # instantiate with positional or keyword arguments - >>> p[0] + p[1] # indexable like the plain tuple (11, 22) - 33 - >>> x, y = p # unpack like a regular tuple - >>> x, y - (11, 22) - >>> p.x + p.y # fields also accessible by name - 33 - >>> p # readable __repr__ with a name=value style - Point(x=11, y=22) + >>> # Basic example + >>> Point = namedtuple('Point', ['x', 'y']) + >>> p = Point(11, y=22) # instantiate with positional or keyword arguments + >>> p[0] + p[1] # indexable like the plain tuple (11, 22) + 33 + >>> x, y = p # unpack like a regular tuple + >>> x, y + (11, 22) + >>> p.x + p.y # fields also accessible by name + 33 + >>> p # readable __repr__ with a name=value style + Point(x=11, y=22) Named tuples are especially useful for assigning field names to result tuples returned by the :mod:`csv` or :mod:`sqlite3` modules:: - EmployeeRecord = namedtuple('EmployeeRecord', 'name, age, title, department, paygrade') + EmployeeRecord = namedtuple('EmployeeRecord', 'name, age, title, department, paygrade') - import csv - for emp in map(EmployeeRecord._make, csv.reader(open("employees.csv", "rb"))): - print(emp.name, emp.title) + import csv + for emp in map(EmployeeRecord._make, csv.reader(open("employees.csv", "rb"))): + print(emp.name, emp.title) - import sqlite3 - conn = sqlite3.connect('/companydata') - cursor = conn.cursor() - cursor.execute('SELECT name, age, title, department, paygrade FROM employees') - for emp in map(EmployeeRecord._make, cursor.fetchall()): - print(emp.name, emp.title) + import sqlite3 + conn = sqlite3.connect('/companydata') + cursor = conn.cursor() + cursor.execute('SELECT name, age, title, department, paygrade FROM employees') + for emp in map(EmployeeRecord._make, cursor.fetchall()): + print(emp.name, emp.title) In addition to the methods inherited from tuples, named tuples support three additional methods and two attributes. To prevent conflicts with @@ -760,62 +760,63 @@ .. classmethod:: somenamedtuple._make(iterable) - Class method that makes a new instance from an existing sequence or iterable. + Class method that makes a new instance from an existing sequence or iterable. .. doctest:: - >>> t = [11, 22] - >>> Point._make(t) - Point(x=11, y=22) + >>> t = [11, 22] + >>> Point._make(t) + Point(x=11, y=22) .. method:: somenamedtuple._asdict() - Return a new :class:`OrderedDict` which maps field names to their corresponding - values:: + Return a new :class:`OrderedDict` which maps field names to their corresponding + values. Note, this method is no longer needed now that the same effect can + be achieved by using the built-in :func:`vars` function:: - >>> p._asdict() - OrderedDict([('x', 11), ('y', 22)]) + >>> vars(p) + OrderedDict([('x', 11), ('y', 22)]) - .. versionchanged:: 3.1 - Returns an :class:`OrderedDict` instead of a regular :class:`dict`. + .. versionchanged:: 3.1 + Returns an :class:`OrderedDict` instead of a regular :class:`dict`. .. method:: somenamedtuple._replace(kwargs) - Return a new instance of the named tuple replacing specified fields with new - values: + Return a new instance of the named tuple replacing specified fields with new + values: :: - >>> p = Point(x=11, y=22) - >>> p._replace(x=33) - Point(x=33, y=22) + >>> p = Point(x=11, y=22) + >>> p._replace(x=33) + Point(x=33, y=22) - >>> for partnum, record in inventory.items(): - ... inventory[partnum] = record._replace(price=newprices[partnum], timestamp=time.now()) + >>> for partnum, record in inventory.items(): + ... inventory[partnum] = record._replace(price=newprices[partnum], timestamp=time.now()) .. attribute:: somenamedtuple._source - A string with the pure Python source code used to create the named - tuple class. The source makes the named tuple self-documenting. - It can be printed, executed using :func:`exec`, or saved to a file - and imported. + A string with the pure Python source code used to create the named + tuple class. The source makes the named tuple self-documenting. + It can be printed, executed using :func:`exec`, or saved to a file + and imported. - .. versionadded:: 3.3 + .. versionadded:: 3.3 .. attribute:: somenamedtuple._fields - Tuple of strings listing the field names. Useful for introspection - and for creating new named tuple types from existing named tuples. + Tuple of strings listing the field names. Useful for introspection + and for creating new named tuple types from existing named tuples. .. doctest:: - >>> p._fields # view the field names - ('x', 'y') + >>> p._fields # view the field names + ('x', 'y') - >>> Color = namedtuple('Color', 'red green blue') - >>> Pixel = namedtuple('Pixel', Point._fields + Color._fields) - >>> Pixel(11, 22, 128, 255, 0) - Pixel(x=11, y=22, red=128, green=255, blue=0) + >>> Color = namedtuple('Color', 'red green blue') + >>> Pixel = namedtuple('Pixel', Point._fields + Color._fields) + >>> Pixel(11, 22, 128, 255, 0) + Pixel(x=11, y=22, red=128, green=255, blue=0) To retrieve a field whose name is stored in a string, use the :func:`getattr` function: @@ -826,24 +827,24 @@ To convert a dictionary to a named tuple, use the double-star-operator (as described in :ref:`tut-unpacking-arguments`): - >>> d = {'x': 11, 'y': 22} - >>> Point(**d) - Point(x=11, y=22) + >>> d = {'x': 11, 'y': 22} + >>> Point(**d) + Point(x=11, y=22) Since a named tuple is a regular Python class, it is easy to add or change functionality with a subclass. Here is how to add a calculated field and a fixed-width print format: >>> class Point(namedtuple('Point', 'x y')): - __slots__ = () - @property - def hypot(self): - return (self.x ** 2 + self.y ** 2) ** 0.5 - def __str__(self): - return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) + __slots__ = () + @property + def hypot(self): + return (self.x ** 2 + self.y ** 2) ** 0.5 + def __str__(self): + return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot) >>> for p in Point(3, 4), Point(14, 5/7): - print(p) + print(p) Point: x= 3.000 y= 4.000 hypot= 5.000 Point: x=14.000 y= 0.714 hypot=14.018 @@ -870,19 +871,19 @@ >>> Status.open, Status.pending, Status.closed (0, 1, 2) >>> class Status: - open, pending, closed = range(3) + open, pending, closed = range(3) .. seealso:: - * `Named tuple recipe `_ - adapted for Python 2.4. + * `Named tuple recipe `_ + adapted for Python 2.4. - * `Recipe for named tuple abstract base class with a metaclass mix-in - `_ - by Jan Kaliszewski. Besides providing an :term:`abstract base class` for - named tuples, it also supports an alternate :term:`metaclass`-based - constructor that is convenient for use cases where named tuples are being - subclassed. + * `Recipe for named tuple abstract base class with a metaclass mix-in + `_ + by Jan Kaliszewski. Besides providing an :term:`abstract base class` for + named tuples, it also supports an alternate :term:`metaclass`-based + constructor that is convenient for use cases where named tuples are being + subclassed. :class:`OrderedDict` objects @@ -894,36 +895,36 @@ .. class:: OrderedDict([items]) - Return an instance of a dict subclass, supporting the usual :class:`dict` - methods. An *OrderedDict* is a dict that remembers the order that keys - were first inserted. If a new entry overwrites an existing entry, the - original insertion position is left unchanged. Deleting an entry and - reinserting it will move it to the end. + Return an instance of a dict subclass, supporting the usual :class:`dict` + methods. An *OrderedDict* is a dict that remembers the order that keys + were first inserted. If a new entry overwrites an existing entry, the + original insertion position is left unchanged. Deleting an entry and + reinserting it will move it to the end. - .. versionadded:: 3.1 + .. versionadded:: 3.1 - .. method:: popitem(last=True) + .. method:: popitem(last=True) - The :meth:`popitem` method for ordered dictionaries returns and removes a - (key, value) pair. The pairs are returned in LIFO order if *last* is true - or FIFO order if false. + The :meth:`popitem` method for ordered dictionaries returns and removes a + (key, value) pair. The pairs are returned in LIFO order if *last* is true + or FIFO order if false. - .. method:: move_to_end(key, last=True) + .. method:: move_to_end(key, last=True) - Move an existing *key* to either end of an ordered dictionary. The item - is moved to the right end if *last* is true (the default) or to the - beginning if *last* is false. Raises :exc:`KeyError` if the *key* does - not exist:: + Move an existing *key* to either end of an ordered dictionary. The item + is moved to the right end if *last* is true (the default) or to the + beginning if *last* is false. Raises :exc:`KeyError` if the *key* does + not exist:: - >>> d = OrderedDict.fromkeys('abcde') - >>> d.move_to_end('b') - >>> ''.join(d.keys()) - 'acdeb' - >>> d.move_to_end('b', last=False) - >>> ''.join(d.keys()) - 'bacde' + >>> d = OrderedDict.fromkeys('abcde') + >>> d.move_to_end('b') + >>> ''.join(d.keys()) + 'acdeb' + >>> d.move_to_end('b', last=False) + >>> ''.join(d.keys()) + 'bacde' - .. versionadded:: 3.2 + .. versionadded:: 3.2 In addition to the usual mapping methods, ordered dictionaries also support reverse iteration using :func:`reversed`. @@ -941,8 +942,8 @@ .. seealso:: - `Equivalent OrderedDict recipe `_ - that runs on Python 2.4 or later. + `Equivalent OrderedDict recipe `_ + that runs on Python 2.4 or later. :class:`OrderedDict` Examples and Recipes ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -985,7 +986,7 @@ An ordered dictionary can be combined with the :class:`Counter` class so that the counter remembers the order elements are first encountered:: - class OrderedCounter(Counter, OrderedDict): + class OrderedCounter(Counter, OrderedDict): 'Counter that remembers the order elements are first encountered' def __repr__(self): @@ -1006,19 +1007,19 @@ .. class:: UserDict([initialdata]) - Class that simulates a dictionary. The instance's contents are kept in a - regular dictionary, which is accessible via the :attr:`data` attribute of - :class:`UserDict` instances. If *initialdata* is provided, :attr:`data` is - initialized with its contents; note that a reference to *initialdata* will not - be kept, allowing it be used for other purposes. + Class that simulates a dictionary. The instance's contents are kept in a + regular dictionary, which is accessible via the :attr:`data` attribute of + :class:`UserDict` instances. If *initialdata* is provided, :attr:`data` is + initialized with its contents; note that a reference to *initialdata* will not + be kept, allowing it be used for other purposes. - In addition to supporting the methods and operations of mappings, - :class:`UserDict` instances provide the following attribute: + In addition to supporting the methods and operations of mappings, + :class:`UserDict` instances provide the following attribute: - .. attribute:: data + .. attribute:: data - A real dictionary used to store the contents of the :class:`UserDict` - class. + A real dictionary used to store the contents of the :class:`UserDict` + class. @@ -1036,19 +1037,19 @@ .. class:: UserList([list]) - Class that simulates a list. The instance's contents are kept in a regular - list, which is accessible via the :attr:`data` attribute of :class:`UserList` - instances. The instance's contents are initially set to a copy of *list*, - defaulting to the empty list ``[]``. *list* can be any iterable, for - example a real Python list or a :class:`UserList` object. + Class that simulates a list. The instance's contents are kept in a regular + list, which is accessible via the :attr:`data` attribute of :class:`UserList` + instances. The instance's contents are initially set to a copy of *list*, + defaulting to the empty list ``[]``. *list* can be any iterable, for + example a real Python list or a :class:`UserList` object. - In addition to supporting the methods and operations of mutable sequences, - :class:`UserList` instances provide the following attribute: + In addition to supporting the methods and operations of mutable sequences, + :class:`UserList` instances provide the following attribute: - .. attribute:: data + .. attribute:: data - A real :class:`list` object used to store the contents of the - :class:`UserList` class. + A real :class:`list` object used to store the contents of the + :class:`UserList` class. **Subclassing requirements:** Subclasses of :class:`UserList` are expect to offer a constructor which can be called with either no arguments or one @@ -1073,10 +1074,10 @@ .. class:: UserString([sequence]) - Class that simulates a string or a Unicode string object. The instance's - content is kept in a regular string object, which is accessible via the - :attr:`data` attribute of :class:`UserString` instances. The instance's - contents are initially set to a copy of *sequence*. The *sequence* can - be an instance of :class:`bytes`, :class:`str`, :class:`UserString` (or a - subclass) or an arbitrary sequence which can be converted into a string using - the built-in :func:`str` function. + Class that simulates a string or a Unicode string object. The instance's + content is kept in a regular string object, which is accessible via the + :attr:`data` attribute of :class:`UserString` instances. The instance's + contents are initially set to a copy of *sequence*. The *sequence* can + be an instance of :class:`bytes`, :class:`str`, :class:`UserString` (or a + subclass) or an arbitrary sequence which can be converted into a string using + the built-in :func:`str` function. -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Jun 10 05:49:17 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 10 Jun 2012 05:49:17 +0200 Subject: [Python-checkins] Daily reference leaks (2059910e7d76): sum=-2 Message-ID: results for 2059910e7d76 on branch "default" -------------------------------------------- test_exceptions leaked [0, -1, 0] references, sum=-1 test_support leaked [0, 0, -1] references, sum=-1 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogyYDqQt', '-x'] From python-checkins at python.org Sun Jun 10 07:52:08 2012 From: python-checkins at python.org (raymond.hettinger) Date: Sun, 10 Jun 2012 07:52:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Small_cleanup_and_optimizat?= =?utf8?q?ion?= Message-ID: http://hg.python.org/cpython/rev/743cf3319862 changeset: 77398:743cf3319862 user: Raymond Hettinger date: Sat Jun 09 22:51:39 2012 -0700 summary: Small cleanup and optimization files: Lib/collections/__init__.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -8,7 +8,7 @@ __all__ += collections.abc.__all__ from _collections import deque, defaultdict -from operator import itemgetter as _itemgetter +from operator import itemgetter as _itemgetter, eq as _eq from keyword import iskeyword as _iskeyword import sys as _sys import heapq as _heapq @@ -229,7 +229,7 @@ ''' if isinstance(other, OrderedDict): return len(self)==len(other) and \ - all(p==q for p, q in zip(self.items(), other.items())) + all(map(_eq, self.items(), other.items())) return dict.__eq__(self, other) -- Repository URL: http://hg.python.org/cpython From ncoghlan at gmail.com Sun Jun 10 15:41:55 2012 From: ncoghlan at gmail.com (Nick Coghlan) Date: Sun, 10 Jun 2012 23:41:55 +1000 Subject: [Python-checkins] cpython: Note that the _asdict() method is outdated In-Reply-To: References: Message-ID: On Sun, Jun 10, 2012 at 12:15 PM, raymond.hettinger wrote: > http://hg.python.org/cpython/rev/fecbcd5c3978 > changeset: ? 77397:fecbcd5c3978 > user: ? ? ? ?Raymond Hettinger > date: ? ? ? ?Sat Jun 09 19:15:26 2012 -0700 > summary: > ?Note that the _asdict() method is outdated This checkin changed a lot of the indentation in the collections docs. Did you mean to do that? Cheers, Nick. -- Nick Coghlan?? |?? ncoghlan at gmail.com?? |?? Brisbane, Australia From python-checkins at python.org Sun Jun 10 16:52:11 2012 From: python-checkins at python.org (stefan.krah) Date: Sun, 10 Jun 2012 16:52:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_1=29_State_restrictions_for?= =?utf8?q?_the_transform_length=2E?= Message-ID: http://hg.python.org/cpython/rev/27b9ab483c59 changeset: 77399:27b9ab483c59 user: Stefan Krah date: Sun Jun 10 16:50:55 2012 +0200 summary: 1) State restrictions for the transform length. 2) Switch argument order to match the function signature of mpd_calloc() (cosmetic change, since the order is irrelevant). files: Modules/_decimal/libmpdec/mpdecimal.c | 15 ++++++++++----- 1 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -5158,7 +5158,11 @@ } -/* Determine the minimum length for the number theoretic transform. */ +/* + * Determine the minimum length for the number theoretic transform. Valid + * transform lengths are 2**n or 3*2**n, where 2**n <= MPD_MAXTRANSFORM_2N. + * The function finds the shortest length m such that rsize <= m. + */ static inline mpd_size_t _mpd_get_transform_len(mpd_size_t rsize) { @@ -5169,6 +5173,7 @@ log2rsize = mpd_bsr(rsize); if (rsize <= 1024) { + /* 2**n is faster in this range. */ x = ((mpd_size_t)1)< http://hg.python.org/cpython/rev/b65c1f21369d changeset: 77400:b65c1f21369d user: Raymond Hettinger date: Sun Jun 10 11:39:44 2012 -0700 summary: Expand examples for ChainMap(). Improve markup. files: Doc/library/collections.rst | 108 ++++++++++++++++-------- 1 files changed, 72 insertions(+), 36 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -93,13 +93,44 @@ The use-cases also parallel those for the builtin :func:`super` function. A reference to ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``. - Example of simulating Python's internal lookup chain:: + +.. seealso:: + + * The `MultiContext class + `_ + in the Enthought `CodeTools package + `_ has options to support + writing to any mapping in the chain. + + * Django's `Context class + `_ + for templating is a read-only chain of mappings. It also features + pushing and popping of contexts similar to the + :meth:`~collections.ChainMap.new_child` method and the + :meth:`~collections.ChainMap.parents` property. + + * The `Nested Contexts recipe + `_ has options to control + whether writes and other mutations apply only to the first mapping or to + any mapping in the chain. + + * A `greatly simplified read-only version of Chainmap + `_. + + +:class:`ChainMap` Examples and Recipes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This section shows various approaches to working with chained maps. + + +Example of simulating Python's internal lookup chain:: import builtins pylookup = ChainMap(locals(), globals(), vars(builtins)) - Example of letting user specified values take precedence over environment - variables which in turn take precedence over default values:: +Example of letting user specified values take precedence over environment +variables which in turn take precedence over default values:: import os, argparse defaults = {'color': 'red', 'user': guest} @@ -109,8 +140,8 @@ user_specified = vars(parser.parse_args()) combined = ChainMap(user_specified, os.environ, defaults) - Example patterns for using the :class:`ChainMap` class to simulate nested - contexts:: +Example patterns for using the :class:`ChainMap` class to simulate nested +contexts:: c = ChainMap() # Create root context d = c.new_child() # Create nested child context @@ -128,28 +159,33 @@ d.items() # All nested items dict(d) # Flatten into a regular dictionary - .. seealso:: +The :class:`ChainMap` class only makes updates (writes and deletions) to the +first mapping in the chain while lookups will search the full chain. However, +if deep writes and deletions are desired, it is easy to make a subclass that +updates keys found deeper in the chain:: - * The `MultiContext class - `_ - in the Enthought `CodeTools package - `_ has options to support - writing to any mapping in the chain. + class DeepChainMap(ChainMap): + 'Variant of ChainMap that allows direct updates to inner scopes' - * Django's `Context class - `_ - for templating is a read-only chain of mappings. It also features - pushing and popping of contexts similar to the - :meth:`~collections.ChainMap.new_child` method and the - :meth:`~collections.ChainMap.parents` property. + def __setitem__(self, key, value): + for mapping in self.maps: + if key in mapping: + mapping[key] = value + return + self.maps[0][key] = value - * The `Nested Contexts recipe - `_ has options to control - whether writes and other mutations apply only to the first mapping or to - any mapping in the chain. + def __delitem__(self, key): + for mapping in self.maps: + if key in mapping: + del mapping[key] + return + raise KeyError(key) - * A `greatly simplified read-only version of Chainmap - `_. + >>> d = DeepChainMap({'zebra': 'black'}, {'elephant' : 'blue'}, {'lion' : 'yellow'}) + >>> d['lion'] = 'orange' # update an existing key two levels down + >>> d['snake'] = 'red' # new keys get added to the topmost dict + >>> del d['elephant'] # remove an existing key one level down + DeepChainMap({'zebra': 'black', 'snake': 'red'}, {}, {'lion': 'orange'}) :class:`Counter` objects @@ -326,23 +362,23 @@ .. seealso:: * `Counter class `_ - adapted for Python 2.5 and an early `Bag recipe - `_ for Python 2.4. + adapted for Python 2.5 and an early `Bag recipe + `_ for Python 2.4. * `Bag class `_ - in Smalltalk. + in Smalltalk. * Wikipedia entry for `Multisets `_. * `C++ multisets `_ - tutorial with examples. + tutorial with examples. * For mathematical operations on multisets and their use cases, see - *Knuth, Donald. The Art of Computer Programming Volume II, - Section 4.6.3, Exercise 19*. + *Knuth, Donald. The Art of Computer Programming Volume II, + Section 4.6.3, Exercise 19*. * To enumerate all distinct multisets of a given size over a given set of - elements, see :func:`itertools.combinations_with_replacement`. + elements, see :func:`itertools.combinations_with_replacement`. map(Counter, combinations_with_replacement('ABC', 2)) --> AA AB AC BB BC CC @@ -876,14 +912,14 @@ .. seealso:: * `Named tuple recipe `_ - adapted for Python 2.4. + adapted for Python 2.4. * `Recipe for named tuple abstract base class with a metaclass mix-in - `_ - by Jan Kaliszewski. Besides providing an :term:`abstract base class` for - named tuples, it also supports an alternate :term:`metaclass`-based - constructor that is convenient for use cases where named tuples are being - subclassed. + `_ + by Jan Kaliszewski. Besides providing an :term:`abstract base class` for + named tuples, it also supports an alternate :term:`metaclass`-based + constructor that is convenient for use cases where named tuples are being + subclassed. :class:`OrderedDict` objects -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 10 21:36:41 2012 From: python-checkins at python.org (michael.foord) Date: Sun, 10 Jun 2012 21:36:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Adding_patch=2Estopall_meth?= =?utf8?q?od_to_unittest=2Emock?= Message-ID: http://hg.python.org/cpython/rev/4d28666c54f2 changeset: 77401:4d28666c54f2 user: Michael Foord date: Sun Jun 10 20:36:32 2012 +0100 summary: Adding patch.stopall method to unittest.mock files: Doc/library/unittest.mock.rst | 8 +++- Lib/unittest/mock.py | 22 +++++++++++- Lib/unittest/test/testmock/testpatch.py | 18 ++++++++++ 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1354,8 +1354,12 @@ As an added bonus you no longer need to keep a reference to the `patcher` object. -In fact `start` and `stop` are just aliases for the context manager -`__enter__` and `__exit__` methods. +It is also possible to stop all patches which have been started by using +`patch.stopall`. + +.. function:: patch.stopall + + Stop all active patches. TEST_PREFIX diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -1002,6 +1002,7 @@ class _patch(object): attribute_name = None + _active_patches = set() def __init__( self, getter, attribute, new, spec, create, @@ -1270,8 +1271,18 @@ if _is_started(patcher): patcher.__exit__(*exc_info) - start = __enter__ - stop = __exit__ + + def start(self): + """Activate a patch, returning any created mock.""" + result = self.__enter__() + self._active_patches.add(self) + return result + + + def stop(self): + """Stop an active patch.""" + self._active_patches.discard(self) + return self.__exit__() @@ -1562,9 +1573,16 @@ del in_dict[key] +def _patch_stopall(): + """Stop all active patches.""" + for patch in list(_patch._active_patches): + patch.stop() + + patch.object = _patch_object patch.dict = _patch_dict patch.multiple = _patch_multiple +patch.stopall = _patch_stopall patch.TEST_PREFIX = 'test' magic_methods = ( diff --git a/Lib/unittest/test/testmock/testpatch.py b/Lib/unittest/test/testmock/testpatch.py --- a/Lib/unittest/test/testmock/testpatch.py +++ b/Lib/unittest/test/testmock/testpatch.py @@ -1762,6 +1762,24 @@ p.stop() + def test_patch_stopall(self): + unlink = os.unlink + chdir = os.chdir + path = os.path + patch('os.unlink', something).start() + patch('os.chdir', something_else).start() + + @patch('os.path') + def patched(mock_path): + patch.stopall() + self.assertIs(os.path, mock_path) + self.assertIs(os.unlink, unlink) + self.assertIs(os.chdir, chdir) + + patched() + self.assertIs(os.path, path) + + if __name__ == '__main__': unittest.main() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 10 21:38:54 2012 From: python-checkins at python.org (michael.foord) Date: Sun, 10 Jun 2012 21:38:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_unittest=2Emock_minor_doc_u?= =?utf8?q?pdate?= Message-ID: http://hg.python.org/cpython/rev/5399a7d792d5 changeset: 77402:5399a7d792d5 user: Michael Foord date: Sun Jun 10 20:38:54 2012 +0100 summary: unittest.mock minor doc update files: Doc/library/unittest.mock.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1359,7 +1359,7 @@ .. function:: patch.stopall - Stop all active patches. + Stop all active patches. Only stops patches started with `start`. TEST_PREFIX -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 05:03:50 2012 From: python-checkins at python.org (brian.quinlan) Date: Mon, 11 Jun 2012 05:03:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2315015=3A_Fix_accessing_a?= =?utf8?q?n_non-existing_attribute=2E?= Message-ID: http://hg.python.org/cpython/rev/68d7c0be5890 changeset: 77403:68d7c0be5890 user: Brian Quinlan date: Mon Jun 11 12:59:07 2012 +1000 summary: #15015: Fix accessing an non-existing attribute. files: Lib/concurrent/futures/_base.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py --- a/Lib/concurrent/futures/_base.py +++ b/Lib/concurrent/futures/_base.py @@ -470,8 +470,8 @@ return True else: LOGGER.critical('Future %s in unexpected state: %s', - id(self.future), - self.future._state) + id(self), + self._state) raise RuntimeError('Future in unexpected state') def set_result(self, result): -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Mon Jun 11 05:50:18 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 11 Jun 2012 05:50:18 +0200 Subject: [Python-checkins] Daily reference leaks (5399a7d792d5): sum=2 Message-ID: results for 5399a7d792d5 on branch "default" -------------------------------------------- test_dbm leaked [2, 0, 0] references, sum=2 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogtL3Nub', '-x'] From python-checkins at python.org Mon Jun 11 08:58:56 2012 From: python-checkins at python.org (stefan.krah) Date: Mon, 11 Jun 2012 08:58:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_1=29_Replace_long-winded_ab?= =?utf8?q?ort=28=29_construct_by_assert=28=29=2E?= Message-ID: http://hg.python.org/cpython/rev/8a222aac951b changeset: 77404:8a222aac951b user: Stefan Krah date: Mon Jun 11 08:57:17 2012 +0200 summary: 1) Replace long-winded abort() construct by assert(). 2) Remove micro optimization (inline checking for NaN before calling mpd_qcheck_nans()) that probably has no benefit in this case. files: Modules/_decimal/libmpdec/mpdecimal.c | 63 +++++++------- 1 files changed, 30 insertions(+), 33 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -5713,30 +5713,28 @@ mpd_qnext_minus(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, uint32_t *status) { - mpd_context_t workctx; /* function context */ + mpd_context_t workctx; MPD_NEW_CONST(tiny,MPD_POS,mpd_etiny(ctx)-1,1,1,1,1); if (mpd_isspecial(a)) { if (mpd_qcheck_nan(result, a, ctx, status)) { return; } - if (mpd_isinfinite(a)) { - if (mpd_isnegative(a)) { - mpd_qcopy(result, a, status); + + assert(mpd_isinfinite(a)); + if (mpd_isnegative(a)) { + mpd_qcopy(result, a, status); + return; + } + else { + mpd_clear_flags(result); + mpd_qmaxcoeff(result, ctx, status); + if (mpd_isnan(result)) { return; } - else { - mpd_clear_flags(result); - mpd_qmaxcoeff(result, ctx, status); - if (mpd_isnan(result)) { - return; - } - result->exp = ctx->emax - ctx->prec + 1; - return; - } - } - /* debug */ - abort(); /* GCOV_NOT_REACHED */ + result->exp = mpd_etop(ctx); + return; + } } mpd_workcontext(&workctx, ctx); @@ -5769,21 +5767,21 @@ if (mpd_qcheck_nan(result, a, ctx, status)) { return; } - if (mpd_isinfinite(a)) { - if (mpd_ispositive(a)) { - mpd_qcopy(result, a, status); + + assert(mpd_isinfinite(a)); + if (mpd_ispositive(a)) { + mpd_qcopy(result, a, status); + } + else { + mpd_clear_flags(result); + mpd_qmaxcoeff(result, ctx, status); + if (mpd_isnan(result)) { + return; } - else { - mpd_clear_flags(result); - mpd_qmaxcoeff(result, ctx, status); - if (mpd_isnan(result)) { - return; - } - mpd_set_flags(result, MPD_NEG); - result->exp = mpd_etop(ctx); - } - return; - } + mpd_set_flags(result, MPD_NEG); + result->exp = mpd_etop(ctx); + } + return; } mpd_workcontext(&workctx, ctx); @@ -5814,9 +5812,8 @@ { int c; - if (mpd_isnan(a) || mpd_isnan(b)) { - if (mpd_qcheck_nans(result, a, b, ctx, status)) - return; + if (mpd_qcheck_nans(result, a, b, ctx, status)) { + return; } c = _mpd_cmp(a, b); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 09:42:27 2012 From: python-checkins at python.org (raymond.hettinger) Date: Mon, 11 Jun 2012 09:42:27 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_indentation_of_method_a?= =?utf8?q?nd_attribute_examples=2E?= Message-ID: http://hg.python.org/cpython/rev/dce47c04d3ab changeset: 77405:dce47c04d3ab parent: 77400:b65c1f21369d user: Raymond Hettinger date: Mon Jun 11 00:38:14 2012 -0700 summary: Fix indentation of method and attribute examples. files: Doc/library/collections.rst | 36 +++++++++++------------- 1 files changed, 17 insertions(+), 19 deletions(-) diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -798,11 +798,11 @@ Class method that makes a new instance from an existing sequence or iterable. -.. doctest:: + .. doctest:: - >>> t = [11, 22] - >>> Point._make(t) - Point(x=11, y=22) + >>> t = [11, 22] + >>> Point._make(t) + Point(x=11, y=22) .. method:: somenamedtuple._asdict() @@ -819,16 +819,14 @@ .. method:: somenamedtuple._replace(kwargs) Return a new instance of the named tuple replacing specified fields with new - values: + values:: -:: + >>> p = Point(x=11, y=22) + >>> p._replace(x=33) + Point(x=33, y=22) - >>> p = Point(x=11, y=22) - >>> p._replace(x=33) - Point(x=33, y=22) - - >>> for partnum, record in inventory.items(): - ... inventory[partnum] = record._replace(price=newprices[partnum], timestamp=time.now()) + >>> for partnum, record in inventory.items(): + ... inventory[partnum] = record._replace(price=newprices[partnum], timestamp=time.now()) .. attribute:: somenamedtuple._source @@ -844,15 +842,15 @@ Tuple of strings listing the field names. Useful for introspection and for creating new named tuple types from existing named tuples. -.. doctest:: + .. doctest:: - >>> p._fields # view the field names - ('x', 'y') + >>> p._fields # view the field names + ('x', 'y') - >>> Color = namedtuple('Color', 'red green blue') - >>> Pixel = namedtuple('Pixel', Point._fields + Color._fields) - >>> Pixel(11, 22, 128, 255, 0) - Pixel(x=11, y=22, red=128, green=255, blue=0) + >>> Color = namedtuple('Color', 'red green blue') + >>> Pixel = namedtuple('Pixel', Point._fields + Color._fields) + >>> Pixel(11, 22, 128, 255, 0) + Pixel(x=11, y=22, red=128, green=255, blue=0) To retrieve a field whose name is stored in a string, use the :func:`getattr` function: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 09:42:28 2012 From: python-checkins at python.org (raymond.hettinger) Date: Mon, 11 Jun 2012 09:42:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge?= Message-ID: http://hg.python.org/cpython/rev/224a2d256f7c changeset: 77406:224a2d256f7c parent: 77405:dce47c04d3ab parent: 77404:8a222aac951b user: Raymond Hettinger date: Mon Jun 11 00:42:17 2012 -0700 summary: merge files: Doc/library/unittest.mock.rst | 8 +- Lib/concurrent/futures/_base.py | 4 +- Lib/unittest/mock.py | 22 ++++- Lib/unittest/test/testmock/testpatch.py | 18 +++ Modules/_decimal/libmpdec/mpdecimal.c | 63 ++++++------ 5 files changed, 76 insertions(+), 39 deletions(-) diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -1354,8 +1354,12 @@ As an added bonus you no longer need to keep a reference to the `patcher` object. -In fact `start` and `stop` are just aliases for the context manager -`__enter__` and `__exit__` methods. +It is also possible to stop all patches which have been started by using +`patch.stopall`. + +.. function:: patch.stopall + + Stop all active patches. Only stops patches started with `start`. TEST_PREFIX diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py --- a/Lib/concurrent/futures/_base.py +++ b/Lib/concurrent/futures/_base.py @@ -470,8 +470,8 @@ return True else: LOGGER.critical('Future %s in unexpected state: %s', - id(self.future), - self.future._state) + id(self), + self._state) raise RuntimeError('Future in unexpected state') def set_result(self, result): diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -1002,6 +1002,7 @@ class _patch(object): attribute_name = None + _active_patches = set() def __init__( self, getter, attribute, new, spec, create, @@ -1270,8 +1271,18 @@ if _is_started(patcher): patcher.__exit__(*exc_info) - start = __enter__ - stop = __exit__ + + def start(self): + """Activate a patch, returning any created mock.""" + result = self.__enter__() + self._active_patches.add(self) + return result + + + def stop(self): + """Stop an active patch.""" + self._active_patches.discard(self) + return self.__exit__() @@ -1562,9 +1573,16 @@ del in_dict[key] +def _patch_stopall(): + """Stop all active patches.""" + for patch in list(_patch._active_patches): + patch.stop() + + patch.object = _patch_object patch.dict = _patch_dict patch.multiple = _patch_multiple +patch.stopall = _patch_stopall patch.TEST_PREFIX = 'test' magic_methods = ( diff --git a/Lib/unittest/test/testmock/testpatch.py b/Lib/unittest/test/testmock/testpatch.py --- a/Lib/unittest/test/testmock/testpatch.py +++ b/Lib/unittest/test/testmock/testpatch.py @@ -1762,6 +1762,24 @@ p.stop() + def test_patch_stopall(self): + unlink = os.unlink + chdir = os.chdir + path = os.path + patch('os.unlink', something).start() + patch('os.chdir', something_else).start() + + @patch('os.path') + def patched(mock_path): + patch.stopall() + self.assertIs(os.path, mock_path) + self.assertIs(os.unlink, unlink) + self.assertIs(os.chdir, chdir) + + patched() + self.assertIs(os.path, path) + + if __name__ == '__main__': unittest.main() diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -5713,30 +5713,28 @@ mpd_qnext_minus(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, uint32_t *status) { - mpd_context_t workctx; /* function context */ + mpd_context_t workctx; MPD_NEW_CONST(tiny,MPD_POS,mpd_etiny(ctx)-1,1,1,1,1); if (mpd_isspecial(a)) { if (mpd_qcheck_nan(result, a, ctx, status)) { return; } - if (mpd_isinfinite(a)) { - if (mpd_isnegative(a)) { - mpd_qcopy(result, a, status); + + assert(mpd_isinfinite(a)); + if (mpd_isnegative(a)) { + mpd_qcopy(result, a, status); + return; + } + else { + mpd_clear_flags(result); + mpd_qmaxcoeff(result, ctx, status); + if (mpd_isnan(result)) { return; } - else { - mpd_clear_flags(result); - mpd_qmaxcoeff(result, ctx, status); - if (mpd_isnan(result)) { - return; - } - result->exp = ctx->emax - ctx->prec + 1; - return; - } - } - /* debug */ - abort(); /* GCOV_NOT_REACHED */ + result->exp = mpd_etop(ctx); + return; + } } mpd_workcontext(&workctx, ctx); @@ -5769,21 +5767,21 @@ if (mpd_qcheck_nan(result, a, ctx, status)) { return; } - if (mpd_isinfinite(a)) { - if (mpd_ispositive(a)) { - mpd_qcopy(result, a, status); + + assert(mpd_isinfinite(a)); + if (mpd_ispositive(a)) { + mpd_qcopy(result, a, status); + } + else { + mpd_clear_flags(result); + mpd_qmaxcoeff(result, ctx, status); + if (mpd_isnan(result)) { + return; } - else { - mpd_clear_flags(result); - mpd_qmaxcoeff(result, ctx, status); - if (mpd_isnan(result)) { - return; - } - mpd_set_flags(result, MPD_NEG); - result->exp = mpd_etop(ctx); - } - return; - } + mpd_set_flags(result, MPD_NEG); + result->exp = mpd_etop(ctx); + } + return; } mpd_workcontext(&workctx, ctx); @@ -5814,9 +5812,8 @@ { int c; - if (mpd_isnan(a) || mpd_isnan(b)) { - if (mpd_qcheck_nans(result, a, b, ctx, status)) - return; + if (mpd_qcheck_nans(result, a, b, ctx, status)) { + return; } c = _mpd_cmp(a, b); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 15:08:33 2012 From: python-checkins at python.org (nick.coghlan) Date: Mon, 11 Jun 2012 15:08:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Close_=2313857=3A_Added_tex?= =?utf8?q?twrap=2Eindent=28=29_function_=28initial_patch_by_Ezra?= Message-ID: http://hg.python.org/cpython/rev/6f7afe25d681 changeset: 77407:6f7afe25d681 user: Nick Coghlan date: Mon Jun 11 23:07:51 2012 +1000 summary: Close #13857: Added textwrap.indent() function (initial patch by Ezra Berch) files: Doc/library/textwrap.rst | 35 +++++- Doc/whatsnew/3.3.rst | 8 + Lib/test/test_textwrap.py | 140 +++++++++++++++++++++++++- Lib/textwrap.py | 21 +++- Misc/ACKS | 1 + Misc/NEWS | 3 + 6 files changed, 201 insertions(+), 7 deletions(-) diff --git a/Doc/library/textwrap.rst b/Doc/library/textwrap.rst --- a/Doc/library/textwrap.rst +++ b/Doc/library/textwrap.rst @@ -12,7 +12,7 @@ The :mod:`textwrap` module provides two convenience functions, :func:`wrap` and :func:`fill`, as well as :class:`TextWrapper`, the class that does all the work, -and a utility function :func:`dedent`. If you're just wrapping or filling one +and two utility functions, :func:`dedent` and :func:`indent`. If you're just wrapping or filling one or two text strings, the convenience functions should be good enough; otherwise, you should use an instance of :class:`TextWrapper` for efficiency. @@ -45,9 +45,10 @@ hyphenated words; only then will long words be broken if necessary, unless :attr:`TextWrapper.break_long_words` is set to false. -An additional utility function, :func:`dedent`, is provided to remove -indentation from strings that have unwanted whitespace to the left of the text. - +Two additional utility function, :func:`dedent` and :func:`indent`, are +provided to remove indentation from strings that have unwanted whitespace +to the left of the text and to add an arbitrary prefix to selected lines +in a block of text. .. function:: dedent(text) @@ -72,6 +73,32 @@ print(repr(dedent(s))) # prints 'hello\n world\n' +.. function:: indent(text, prefix, predicate=None) + + Add *prefix* to the beginning of selected lines in *text*. + + Lines are separated by calling ``text.splitlines(True)``. + + By default, *prefix* is added to all lines that do not consist + solely of whitespace (including any line endings). + + For example:: + + >>> s = 'hello\n\n \nworld' + >>> indent(s, ' ') + ' hello\n\n \n world' + + The optional *predicate* argument can be used to control which lines + are indented. For example, it is easy to add *prefix* to even empty + and whitespace-only lines:: + + >>> print(indent(s, '+ ', lambda line: True)) + + hello + + + + + + world + + .. class:: TextWrapper(**kwargs) The :class:`TextWrapper` constructor accepts a number of optional keyword diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -1406,6 +1406,14 @@ (:issue:`11223`) +textwrap +-------- + +* The :mod:`textwrap` module has a new :func:`~textwrap.indent` that makes + it straightforward to add a common prefix to selected lines in a block + of text. + + (:issue:`13857`) time ---- diff --git a/Lib/test/test_textwrap.py b/Lib/test/test_textwrap.py --- a/Lib/test/test_textwrap.py +++ b/Lib/test/test_textwrap.py @@ -11,7 +11,7 @@ import unittest from test import support -from textwrap import TextWrapper, wrap, fill, dedent +from textwrap import TextWrapper, wrap, fill, dedent, indent class BaseTestCase(unittest.TestCase): @@ -594,11 +594,147 @@ self.assertEqual(expect, dedent(text)) +# Test textwrap.indent +class IndentTestCase(unittest.TestCase): + # The examples used for tests. If any of these change, the expected + # results in the various test cases must also be updated. + # The roundtrip cases are separate, because textwrap.dedent doesn't + # handle Windows line endings + ROUNDTRIP_CASES = ( + # Basic test case + "Hi.\nThis is a test.\nTesting.", + # Include a blank line + "Hi.\nThis is a test.\n\nTesting.", + # Include leading and trailing blank lines + "\nHi.\nThis is a test.\nTesting.\n", + ) + CASES = ROUNDTRIP_CASES + ( + # Use Windows line endings + "Hi.\r\nThis is a test.\r\nTesting.\r\n", + # Pathological case + "\nHi.\r\nThis is a test.\n\r\nTesting.\r\n\n", + ) + + def test_indent_nomargin_default(self): + # indent should do nothing if 'prefix' is empty. + for text in self.CASES: + self.assertEqual(indent(text, ''), text) + + def test_indent_nomargin_explicit_default(self): + # The same as test_indent_nomargin, but explicitly requesting + # the default behaviour by passing None as the predicate + for text in self.CASES: + self.assertEqual(indent(text, '', None), text) + + def test_indent_nomargin_all_lines(self): + # The same as test_indent_nomargin, but using the optional + # predicate argument + predicate = lambda line: True + for text in self.CASES: + self.assertEqual(indent(text, '', predicate), text) + + def test_indent_no_lines(self): + # Explicitly skip indenting any lines + predicate = lambda line: False + for text in self.CASES: + self.assertEqual(indent(text, ' ', predicate), text) + + def test_roundtrip_spaces(self): + # A whitespace prefix should roundtrip with dedent + for text in self.ROUNDTRIP_CASES: + self.assertEqual(dedent(indent(text, ' ')), text) + + def test_roundtrip_tabs(self): + # A whitespace prefix should roundtrip with dedent + for text in self.ROUNDTRIP_CASES: + self.assertEqual(dedent(indent(text, '\t\t')), text) + + def test_roundtrip_mixed(self): + # A whitespace prefix should roundtrip with dedent + for text in self.ROUNDTRIP_CASES: + self.assertEqual(dedent(indent(text, ' \t \t ')), text) + + def test_indent_default(self): + # Test default indenting of lines that are not whitespace only + prefix = ' ' + expected = ( + # Basic test case + " Hi.\n This is a test.\n Testing.", + # Include a blank line + " Hi.\n This is a test.\n\n Testing.", + # Include leading and trailing blank lines + "\n Hi.\n This is a test.\n Testing.\n", + # Use Windows line endings + " Hi.\r\n This is a test.\r\n Testing.\r\n", + # Pathological case + "\n Hi.\r\n This is a test.\n\r\n Testing.\r\n\n", + ) + for text, expect in zip(self.CASES, expected): + self.assertEqual(indent(text, prefix), expect) + + def test_indent_explicit_default(self): + # Test default indenting of lines that are not whitespace only + prefix = ' ' + expected = ( + # Basic test case + " Hi.\n This is a test.\n Testing.", + # Include a blank line + " Hi.\n This is a test.\n\n Testing.", + # Include leading and trailing blank lines + "\n Hi.\n This is a test.\n Testing.\n", + # Use Windows line endings + " Hi.\r\n This is a test.\r\n Testing.\r\n", + # Pathological case + "\n Hi.\r\n This is a test.\n\r\n Testing.\r\n\n", + ) + for text, expect in zip(self.CASES, expected): + self.assertEqual(indent(text, prefix, None), expect) + + def test_indent_all_lines(self): + # Add 'prefix' to all lines, including whitespace-only ones. + prefix = ' ' + expected = ( + # Basic test case + " Hi.\n This is a test.\n Testing.", + # Include a blank line + " Hi.\n This is a test.\n \n Testing.", + # Include leading and trailing blank lines + " \n Hi.\n This is a test.\n Testing.\n", + # Use Windows line endings + " Hi.\r\n This is a test.\r\n Testing.\r\n", + # Pathological case + " \n Hi.\r\n This is a test.\n \r\n Testing.\r\n \n", + ) + predicate = lambda line: True + for text, expect in zip(self.CASES, expected): + self.assertEqual(indent(text, prefix, predicate), expect) + + def test_indent_empty_lines(self): + # Add 'prefix' solely to whitespace-only lines. + prefix = ' ' + expected = ( + # Basic test case + "Hi.\nThis is a test.\nTesting.", + # Include a blank line + "Hi.\nThis is a test.\n \nTesting.", + # Include leading and trailing blank lines + " \nHi.\nThis is a test.\nTesting.\n", + # Use Windows line endings + "Hi.\r\nThis is a test.\r\nTesting.\r\n", + # Pathological case + " \nHi.\r\nThis is a test.\n \r\nTesting.\r\n \n", + ) + predicate = lambda line: not line.strip() + for text, expect in zip(self.CASES, expected): + self.assertEqual(indent(text, prefix, predicate), expect) + + def test_main(): support.run_unittest(WrapTestCase, LongWordTestCase, IndentTestCases, - DedentTestCase) + DedentTestCase, + IndentTestCase) if __name__ == '__main__': test_main() diff --git a/Lib/textwrap.py b/Lib/textwrap.py --- a/Lib/textwrap.py +++ b/Lib/textwrap.py @@ -7,7 +7,7 @@ import re -__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent'] +__all__ = ['TextWrapper', 'wrap', 'fill', 'dedent', 'indent'] # Hardcode the recognized whitespace characters to the US-ASCII # whitespace characters. The main reason for doing this is that in @@ -386,6 +386,25 @@ text = re.sub(r'(?m)^' + margin, '', text) return text + +def indent(text, prefix, predicate=None): + """Adds 'prefix' to the beginning of selected lines in 'text'. + + If 'predicate' is provided, 'prefix' will only be added to the lines + where 'predicate(line)' is True. If 'predicate' is not provided, + it will default to adding 'prefix' to all non-empty lines that do not + consist solely of whitespace characters. + """ + if predicate is None: + def predicate(line): + return line.strip() + + def prefixed_lines(): + for line in text.splitlines(True): + yield (prefix + line if predicate(line) else line) + return ''.join(prefixed_lines()) + + if __name__ == "__main__": #print dedent("\tfoo\n\tbar") #print dedent(" \thello there\n \t how are you?") diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -82,6 +82,7 @@ Eli Bendersky Andrew Bennetts Andy Bensky +Ezra Berch Michel Van den Bergh Julian Berman Brice Berna diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,9 @@ Library ------- +- Issue #13857: Added textwrap.indent() function (initial patch by Ezra + Berch) + - Issue #2736: Added datetime.timestamp() method. - Issue #13854: Make multiprocessing properly handle non-integer -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 16:19:53 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 11 Jun 2012 16:19:53 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzEwMTMz?= =?utf8?q?=3A_Make_multiprocessing_deallocate_buffer_if_socket_read_fails?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/60a7b704de5c changeset: 77408:60a7b704de5c branch: 2.7 parent: 77395:272e7dcffd30 user: Richard Oudkerk date: Mon Jun 11 15:11:35 2012 +0100 summary: Issue #10133: Make multiprocessing deallocate buffer if socket read fails. Patch by Hallvard B Furuseth. files: Misc/NEWS | 3 + Modules/_multiprocessing/socket_connection.c | 29 +++++---- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -67,6 +67,9 @@ Library ------- +- Issue #10133: Make multiprocessing deallocate buffer if socket read + fails. Patch by Hallvard B Furuseth. + - Issue #13854: Make multiprocessing properly handle non-integer non-string argument to SystemExit. diff --git a/Modules/_multiprocessing/socket_connection.c b/Modules/_multiprocessing/socket_connection.c --- a/Modules/_multiprocessing/socket_connection.c +++ b/Modules/_multiprocessing/socket_connection.c @@ -117,7 +117,7 @@ conn_recv_string(ConnectionObject *conn, char *buffer, size_t buflength, char **newbuffer, size_t maxlength) { - int res; + Py_ssize_t res; UINT32 ulength; *newbuffer = NULL; @@ -132,20 +132,23 @@ if (ulength > maxlength) return MP_BAD_MESSAGE_LENGTH; - if (ulength <= buflength) { - Py_BEGIN_ALLOW_THREADS - res = _conn_recvall(conn->handle, buffer, (size_t)ulength); - Py_END_ALLOW_THREADS - return res < 0 ? res : ulength; - } else { - *newbuffer = PyMem_Malloc((size_t)ulength); - if (*newbuffer == NULL) + if (ulength > buflength) { + *newbuffer = buffer = PyMem_Malloc((size_t)ulength); + if (buffer == NULL) return MP_MEMORY_ERROR; - Py_BEGIN_ALLOW_THREADS - res = _conn_recvall(conn->handle, *newbuffer, (size_t)ulength); - Py_END_ALLOW_THREADS - return res < 0 ? (Py_ssize_t)res : (Py_ssize_t)ulength; } + + Py_BEGIN_ALLOW_THREADS + res = _conn_recvall(conn->handle, buffer, (size_t)ulength); + Py_END_ALLOW_THREADS + + if (res >= 0) { + res = (Py_ssize_t)ulength; + } else if (*newbuffer != NULL) { + PyMem_Free(*newbuffer); + *newbuffer = NULL; + } + return res; } /* -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 16:19:54 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 11 Jun 2012 16:19:54 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEwMTMz?= =?utf8?q?=3A_Make_multiprocessing_deallocate_buffer_if_socket_read_fails?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/5643697070c0 changeset: 77409:5643697070c0 branch: 3.2 parent: 77380:02b4c62ce393 user: Richard Oudkerk date: Mon Jun 11 15:12:12 2012 +0100 summary: Issue #10133: Make multiprocessing deallocate buffer if socket read fails. Patch by Hallvard B Furuseth. files: Misc/NEWS | 3 + Modules/_multiprocessing/socket_connection.c | 29 +++++---- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,6 +70,9 @@ Library ------- +- Issue #10133: Make multiprocessing deallocate buffer if socket read + fails. Patch by Hallvard B Furuseth. + - Issue #13854: Make multiprocessing properly handle non-integer non-string argument to SystemExit. diff --git a/Modules/_multiprocessing/socket_connection.c b/Modules/_multiprocessing/socket_connection.c --- a/Modules/_multiprocessing/socket_connection.c +++ b/Modules/_multiprocessing/socket_connection.c @@ -117,7 +117,7 @@ conn_recv_string(ConnectionObject *conn, char *buffer, size_t buflength, char **newbuffer, size_t maxlength) { - int res; + Py_ssize_t res; UINT32 ulength; *newbuffer = NULL; @@ -132,20 +132,23 @@ if (ulength > maxlength) return MP_BAD_MESSAGE_LENGTH; - if (ulength <= buflength) { - Py_BEGIN_ALLOW_THREADS - res = _conn_recvall(conn->handle, buffer, (size_t)ulength); - Py_END_ALLOW_THREADS - return res < 0 ? res : ulength; - } else { - *newbuffer = PyMem_Malloc((size_t)ulength); - if (*newbuffer == NULL) + if (ulength > buflength) { + *newbuffer = buffer = PyMem_Malloc((size_t)ulength); + if (buffer == NULL) return MP_MEMORY_ERROR; - Py_BEGIN_ALLOW_THREADS - res = _conn_recvall(conn->handle, *newbuffer, (size_t)ulength); - Py_END_ALLOW_THREADS - return res < 0 ? (Py_ssize_t)res : (Py_ssize_t)ulength; } + + Py_BEGIN_ALLOW_THREADS + res = _conn_recvall(conn->handle, buffer, (size_t)ulength); + Py_END_ALLOW_THREADS + + if (res >= 0) { + res = (Py_ssize_t)ulength; + } else if (*newbuffer != NULL) { + PyMem_Free(*newbuffer); + *newbuffer = NULL; + } + return res; } /* -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 16:19:54 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 11 Jun 2012 16:19:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Dummy_merge?= Message-ID: http://hg.python.org/cpython/rev/d9d382b7670a changeset: 77410:d9d382b7670a parent: 77407:6f7afe25d681 parent: 77409:5643697070c0 user: Richard Oudkerk date: Mon Jun 11 15:16:56 2012 +0100 summary: Dummy merge files: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 11 18:57:49 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 11 Jun 2012 18:57:49 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=233518=3A_Remove_ref?= =?utf8?q?erences_to_non-existent_BaseManager=2Efrom=5Faddress=28=29?= Message-ID: http://hg.python.org/cpython/rev/c2910971eb86 changeset: 77411:c2910971eb86 user: Richard Oudkerk date: Mon Jun 11 17:56:08 2012 +0100 summary: Issue #3518: Remove references to non-existent BaseManager.from_address() method files: Doc/library/multiprocessing.rst | 7 ++++--- Lib/multiprocessing/managers.py | 4 ---- Misc/NEWS | 3 +++ 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -1236,9 +1236,10 @@ type of shared object. This must be a string. *callable* is a callable used for creating objects for this type - identifier. If a manager instance will be created using the - :meth:`from_address` classmethod or if the *create_method* argument is - ``False`` then this can be left as ``None``. + identifier. If a manager instance will be connected to the + server using the :meth:`connect` method, or if the + *create_method* argument is ``False`` then this can be left as + ``None``. *proxytype* is a subclass of :class:`BaseProxy` which is used to create proxies for shared objects with this *typeid*. If ``None`` then a proxy diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -455,10 +455,6 @@ self._serializer = serializer self._Listener, self._Client = listener_client[serializer] - def __reduce__(self): - return type(self).from_address, \ - (self._address, self._authkey, self._serializer) - def get_server(self): ''' Return server object with serve_forever() method and address attribute diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,9 @@ Library ------- +- Issue #3518: Remove references to non-existent BaseManager.from_address() + method. + - Issue #13857: Added textwrap.indent() function (initial patch by Ezra Berch) -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Tue Jun 12 05:47:18 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 12 Jun 2012 05:47:18 +0200 Subject: [Python-checkins] Daily reference leaks (c2910971eb86): sum=0 Message-ID: results for c2910971eb86 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogydpjuP', '-x'] From python-checkins at python.org Tue Jun 12 21:06:54 2012 From: python-checkins at python.org (stefan.krah) Date: Tue, 12 Jun 2012 21:06:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_1=29_Fix_signature_of_=5Fmp?= =?utf8?q?d=5Fqpow=5Fuint=28=29=3A_contrary_to_the_comment_base_is_constan?= =?utf8?q?t=2E?= Message-ID: http://hg.python.org/cpython/rev/8f88718fe2c4 changeset: 77412:8f88718fe2c4 user: Stefan Krah date: Tue Jun 12 21:06:06 2012 +0200 summary: 1) Fix signature of _mpd_qpow_uint(): contrary to the comment base is constant. 2) Abort the loop for all specials, not only infinity. 3) Make the function more general and distinguish between zero clamping and folding down the exponent. The latter case is currently handled by setting context->clamp to 0 before calling the function. files: Modules/_decimal/libmpdec/mpdecimal.c | 16 ++++++++------ 1 files changed, 9 insertions(+), 7 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -107,8 +107,9 @@ const mpd_context_t *ctx, uint32_t *status); static void _mpd_base_ndivmod(mpd_t *q, mpd_t *r, const mpd_t *a, const mpd_t *b, uint32_t *status); -static inline void _mpd_qpow_uint(mpd_t *result, mpd_t *base, mpd_uint_t exp, - uint8_t resultsign, const mpd_context_t *ctx, uint32_t *status); +static inline void _mpd_qpow_uint(mpd_t *result, const mpd_t *base, + mpd_uint_t exp, uint8_t resultsign, + const mpd_context_t *ctx, uint32_t *status); mpd_uint_t mpd_qsshiftr(mpd_t *result, const mpd_t *a, mpd_ssize_t n); @@ -5841,12 +5842,12 @@ } /* - * Internal function: Integer power with mpd_uint_t exponent, base is modified! - * Function can fail with MPD_Malloc_error. + * Internal function: Integer power with mpd_uint_t exponent. The function + * can fail with MPD_Malloc_error. */ static inline void -_mpd_qpow_uint(mpd_t *result, mpd_t *base, mpd_uint_t exp, uint8_t resultsign, - const mpd_context_t *ctx, uint32_t *status) +_mpd_qpow_uint(mpd_t *result, const mpd_t *base, mpd_uint_t exp, + uint8_t resultsign, const mpd_context_t *ctx, uint32_t *status) { uint32_t workstatus = 0; mpd_uint_t n; @@ -5866,7 +5867,8 @@ if (exp & n) { mpd_qmul(result, result, base, ctx, &workstatus); } - if (workstatus & (MPD_Overflow|MPD_Clamped)) { + if (mpd_isspecial(result) || + (mpd_iszerocoeff(result) && (workstatus & MPD_Clamped))) { break; } } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 12 22:14:32 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Tue, 12 Jun 2012 22:14:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fixed_a_typo_in_time=5Floca?= =?utf8?q?ltime=28=29?= Message-ID: http://hg.python.org/cpython/rev/a6c53396c2c3 changeset: 77413:a6c53396c2c3 user: Alexander Belopolsky date: Tue Jun 12 16:14:17 2012 -0400 summary: Fixed a typo in time_localtime() files: Modules/timemodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/timemodule.c b/Modules/timemodule.c --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -401,7 +401,7 @@ if (!parse_time_t_args(args, "|O:localtime", &when)) return NULL; - if (pylocaltime(&when, &buf) == 1) + if (pylocaltime(&when, &buf) == -1) return NULL; return tmtotuple(&buf); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 12 22:18:09 2012 From: python-checkins at python.org (victor.stinner) Date: Tue, 12 Jun 2012 22:18:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_time=2Eget=5Fclock=5Finfo?= =?utf8?q?=28=29_uses_a_namespace_instead_of_structseq?= Message-ID: http://hg.python.org/cpython/rev/0011536f2a06 changeset: 77414:0011536f2a06 user: Victor Stinner date: Tue Jun 12 22:11:44 2012 +0200 summary: time.get_clock_info() uses a namespace instead of structseq files: Doc/library/time.rst | 36 ++++++-------------- Modules/timemodule.c | 57 +++++++++++-------------------- 2 files changed, 31 insertions(+), 62 deletions(-) diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -160,30 +160,6 @@ .. versionadded:: 3.3 -.. class:: clock_info - - Clock information object returned by :func:`get_clock_info`. - - .. attribute:: implementation - - The name of the underlying C function used to get the clock value. - - .. attribute:: monotonic - - ``True`` if the clock cannot go backward, ``False`` otherwise. - - .. attribute:: adjusted - - ``True`` if the clock can be adjusted (e.g. by a NTP daemon), ``False`` - otherwise. - - .. attribute:: resolution - - The resolution of the clock in seconds (:class:`float`). - - .. versionadded:: 3.3 - - .. function:: clock_settime(clk_id, time) Set the time of the specified clock *clk_id*. @@ -267,7 +243,7 @@ .. function:: get_clock_info(name) - Get information on the specified clock as a :class:`clock_info` object. + Get information on the specified clock as a namespace object. Supported clock names and the corresponding functions to read their value are: @@ -277,6 +253,16 @@ * ``'process_time'``: :func:`time.process_time` * ``'time'``: :func:`time.time` + The result has the following attributes: + + - *adjusted*: ``True`` if the clock can be adjusted (e.g. by a NTP daemon), + ``False`` otherwise + - *implementation*: The name of the underlying C function used to get + the clock value + - *monotonic*: ``True`` if the clock cannot go backward, + ``False`` otherwise + - *resolution*: The resolution of the clock in seconds (:class:`float`) + .. versionadded:: 3.3 diff --git a/Modules/timemodule.c b/Modules/timemodule.c --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -1124,35 +1124,12 @@ Process time for profiling: sum of the kernel and user-space CPU time."); -static PyTypeObject ClockInfoType; - -PyDoc_STRVAR(ClockInfo_docstring, - "Clock information"); - -static PyStructSequence_Field ClockInfo_fields[] = { - {"implementation", "name of the underlying C function " - "used to get the clock value"}, - {"monotonic", "True if the clock cannot go backward, False otherwise"}, - {"adjusted", "True if the clock can be adjusted " - "(e.g. by a NTP daemon), False otherwise"}, - {"resolution", "resolution of the clock in seconds"}, - {NULL, NULL} -}; - -static PyStructSequence_Desc ClockInfo_desc = { - "time.clock_info", - ClockInfo_docstring, - ClockInfo_fields, - 4, -}; - static PyObject * time_get_clock_info(PyObject *self, PyObject *args) { char *name; - PyObject *obj; _Py_clock_info_t info; - PyObject *result; + PyObject *obj = NULL, *dict, *ns; if (!PyArg_ParseTuple(args, "s:get_clock_info", &name)) return NULL; @@ -1191,39 +1168,50 @@ return NULL; Py_DECREF(obj); - result = PyStructSequence_New(&ClockInfoType); - if (result == NULL) + dict = PyDict_New(); + if (dict == NULL) return NULL; assert(info.implementation != NULL); obj = PyUnicode_FromString(info.implementation); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 0, obj); + if (PyDict_SetItemString(dict, "implementation", obj) == -1) + goto error; + Py_CLEAR(obj); assert(info.monotonic != -1); obj = PyBool_FromLong(info.monotonic); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 1, obj); + if (PyDict_SetItemString(dict, "monotonic", obj) == -1) + goto error; + Py_CLEAR(obj); assert(info.adjusted != -1); obj = PyBool_FromLong(info.adjusted); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 2, obj); + if (PyDict_SetItemString(dict, "adjusted", obj) == -1) + goto error; + Py_CLEAR(obj); assert(info.resolution > 0.0); assert(info.resolution <= 1.0); obj = PyFloat_FromDouble(info.resolution); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 3, obj); + if (PyDict_SetItemString(dict, "resolution", obj) == -1) + goto error; + Py_CLEAR(obj); - return result; + ns = _PyNamespace_New(dict); + Py_DECREF(dict); + return ns; error: - Py_DECREF(result); + Py_DECREF(dict); + Py_XDECREF(obj); return NULL; } @@ -1451,11 +1439,6 @@ PyStructSequence_InitType(&StructTimeType, &struct_time_type_desc); - /* initialize ClockInfoType */ - PyStructSequence_InitType(&ClockInfoType, &ClockInfo_desc); - Py_INCREF(&ClockInfoType); - PyModule_AddObject(m, "clock_info", (PyObject*)&ClockInfoType); - #ifdef MS_WINDOWS winver.dwOSVersionInfoSize = sizeof(winver); if (!GetVersionEx((OSVERSIONINFO*)&winver)) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 12 22:50:50 2012 From: python-checkins at python.org (victor.stinner) Date: Tue, 12 Jun 2012 22:50:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_PEP_418=3A_Rename_adjusted_?= =?utf8?q?attribute_to_adjustable_in_time=2Eget=5Fclock=5Finfo=28=29_resul?= =?utf8?q?t?= Message-ID: http://hg.python.org/cpython/rev/0e46e0cd368f changeset: 77415:0e46e0cd368f user: Victor Stinner date: Tue Jun 12 22:46:37 2012 +0200 summary: PEP 418: Rename adjusted attribute to adjustable in time.get_clock_info() result Fix also its value on Windows and Linux according to its documentation: "adjustable" indicates if the clock *can be* adjusted, not if it is or was adjusted. In most cases, it is not possible to indicate if a clock is or was adjusted. files: Doc/library/time.rst | 4 +- Include/pytime.h | 2 +- Lib/test/test_time.py | 12 +++++----- Misc/NEWS | 2 + Modules/timemodule.c | 36 ++++++++++++------------------ Python/pytime.c | 11 +++------ 6 files changed, 30 insertions(+), 37 deletions(-) diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -255,8 +255,8 @@ The result has the following attributes: - - *adjusted*: ``True`` if the clock can be adjusted (e.g. by a NTP daemon), - ``False`` otherwise + - *adjustable*: ``True`` if the clock can be changed automatically (e.g. by + a NTP daemon) or manually by the system administrator, ``False`` otherwise - *implementation*: The name of the underlying C function used to get the clock value - *monotonic*: ``True`` if the clock cannot go backward, diff --git a/Include/pytime.h b/Include/pytime.h --- a/Include/pytime.h +++ b/Include/pytime.h @@ -26,7 +26,7 @@ typedef struct { const char *implementation; int monotonic; - int adjusted; + int adjustable; double resolution; } _Py_clock_info_t; diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -32,14 +32,14 @@ info = time.get_clock_info('time') self.assertFalse(info.monotonic) if sys.platform != 'win32': - self.assertTrue(info.adjusted) + self.assertTrue(info.adjustable) def test_clock(self): time.clock() info = time.get_clock_info('clock') self.assertTrue(info.monotonic) - self.assertFalse(info.adjusted) + self.assertFalse(info.adjustable) @unittest.skipUnless(hasattr(time, 'clock_gettime'), 'need time.clock_gettime()') @@ -372,9 +372,9 @@ info = time.get_clock_info('monotonic') self.assertTrue(info.monotonic) if sys.platform == 'linux': - self.assertTrue(info.adjusted) + self.assertTrue(info.adjustable) else: - self.assertFalse(info.adjusted) + self.assertFalse(info.adjustable) def test_perf_counter(self): time.perf_counter() @@ -390,7 +390,7 @@ info = time.get_clock_info('process_time') self.assertTrue(info.monotonic) - self.assertFalse(info.adjusted) + self.assertFalse(info.adjustable) @unittest.skipUnless(hasattr(time, 'monotonic'), 'need time.monotonic') @@ -441,7 +441,7 @@ # 0.0 < resolution <= 1.0 self.assertGreater(info.resolution, 0.0) self.assertLessEqual(info.resolution, 1.0) - self.assertIsInstance(info.adjusted, bool) + self.assertIsInstance(info.adjustable, bool) self.assertRaises(ValueError, time.get_clock_info, 'xxx') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,8 @@ Library ------- +- Rename adjusted attribute to adjustable in time.get_clock_info() result. + - Issue #3518: Remove references to non-existent BaseManager.from_address() method. diff --git a/Modules/timemodule.c b/Modules/timemodule.c --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -96,7 +96,7 @@ info->implementation = "clock()"; info->resolution = 1.0 / (double)CLOCKS_PER_SEC; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble((double)value / CLOCKS_PER_SEC); } @@ -132,7 +132,7 @@ info->implementation = "QueryPerformanceCounter()"; info->resolution = 1.0 / (double)cpu_frequency; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } *result = PyFloat_FromDouble(diff / (double)cpu_frequency); return 0; @@ -882,7 +882,7 @@ return NULL; } info->resolution = timeIncrement * 1e-7; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble(result); @@ -903,7 +903,7 @@ info->implementation = "mach_absolute_time()"; info->resolution = (double)timebase.numer / timebase.denom * 1e-9; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble(secs); @@ -926,13 +926,7 @@ struct timespec res; info->monotonic = 1; info->implementation = function; -#if (defined(linux) || defined(__linux) || defined(__linux__)) \ - && !defined(CLOCK_HIGHRES) - /* CLOCK_MONOTONIC is adjusted on Linux */ - info->adjusted = 1; -#else - info->adjusted = 0; -#endif + info->adjustable = 0; if (clock_getres(clk_id, &res) == 0) info->resolution = res.tv_sec + res.tv_nsec * 1e-9; else @@ -1024,7 +1018,7 @@ info->implementation = "GetProcessTimes()"; info->resolution = 1e-7; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble(total * 1e-7); #else @@ -1053,7 +1047,7 @@ struct timespec res; info->implementation = function; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; if (clock_getres(clk_id, &res) == 0) info->resolution = res.tv_sec + res.tv_nsec * 1e-9; else @@ -1071,7 +1065,7 @@ if (info) { info->implementation = "getrusage(RUSAGE_SELF)"; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; info->resolution = 1e-6; } return PyFloat_FromDouble(total); @@ -1100,7 +1094,7 @@ if (info) { info->implementation = "times()"; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; info->resolution = 1.0 / ticks_per_second; } return PyFloat_FromDouble(total); @@ -1137,12 +1131,12 @@ #ifdef Py_DEBUG info.implementation = NULL; info.monotonic = -1; - info.adjusted = -1; + info.adjustable = -1; info.resolution = -1.0; #else info.implementation = ""; info.monotonic = 0; - info.adjusted = 0; + info.adjustable = 0; info.resolution = 1.0; #endif @@ -1188,11 +1182,11 @@ goto error; Py_CLEAR(obj); - assert(info.adjusted != -1); - obj = PyBool_FromLong(info.adjusted); + assert(info.adjustable != -1); + obj = PyBool_FromLong(info.adjustable); if (obj == NULL) goto error; - if (PyDict_SetItemString(dict, "adjusted", obj) == -1) + if (PyDict_SetItemString(dict, "adjustable", obj) == -1) goto error; Py_CLEAR(obj); @@ -1471,7 +1465,7 @@ struct timespec res; info->implementation = "clock_gettime(CLOCK_REALTIME)"; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; if (clock_getres(CLOCK_REALTIME, &res) == 0) info->resolution = res.tv_sec + res.tv_nsec * 1e-9; else diff --git a/Python/pytime.c b/Python/pytime.c --- a/Python/pytime.c +++ b/Python/pytime.c @@ -44,10 +44,7 @@ (void) GetSystemTimeAdjustment(&timeAdjustment, &timeIncrement, &isTimeAdjustmentDisabled); info->resolution = timeIncrement * 1e-7; - if (isTimeAdjustmentDisabled) - info->adjusted = 0; - else - info->adjusted = 1; + info->adjustable = 1; } #else /* There are three ways to get the time: @@ -71,7 +68,7 @@ info->implementation = "gettimeofday()"; info->resolution = 1e-6; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; } return; } @@ -87,7 +84,7 @@ info->implementation = "ftime()"; info->resolution = 1e-3; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; } } #else /* !HAVE_FTIME */ @@ -97,7 +94,7 @@ info->implementation = "time()"; info->resolution = 1.0; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; } #endif /* !HAVE_FTIME */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 12 23:08:05 2012 From: python-checkins at python.org (victor.stinner) Date: Tue, 12 Jun 2012 23:08:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_test=5Ftime_for_adjuste?= =?utf8?q?d/adjustable_changes?= Message-ID: http://hg.python.org/cpython/rev/26e2ee402a0b changeset: 77416:26e2ee402a0b user: Victor Stinner date: Tue Jun 12 23:04:11 2012 +0200 summary: Fix test_time for adjusted/adjustable changes files: Lib/test/test_time.py | 8 ++------ 1 files changed, 2 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -31,8 +31,7 @@ time.time() info = time.get_clock_info('time') self.assertFalse(info.monotonic) - if sys.platform != 'win32': - self.assertTrue(info.adjustable) + self.assertTrue(info.adjustable) def test_clock(self): time.clock() @@ -371,10 +370,7 @@ info = time.get_clock_info('monotonic') self.assertTrue(info.monotonic) - if sys.platform == 'linux': - self.assertTrue(info.adjustable) - else: - self.assertFalse(info.adjustable) + self.assertFalse(info.adjustable) def test_perf_counter(self): time.perf_counter() -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Wed Jun 13 05:47:09 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 13 Jun 2012 05:47:09 +0200 Subject: [Python-checkins] Daily reference leaks (26e2ee402a0b): sum=2 Message-ID: results for 26e2ee402a0b on branch "default" -------------------------------------------- test_support leaked [0, -1, 1] references, sum=0 test_dbm leaked [2, 0, 0] references, sum=2 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogkYVGEW', '-x'] From python-checkins at python.org Wed Jun 13 23:59:35 2012 From: python-checkins at python.org (sandro.tosi) Date: Wed, 13 Jun 2012 23:59:35 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1MDYw?= =?utf8?q?=3A_fix_typo_in_socket_doc=3B_Patch_by_anatoly_techtonik?= Message-ID: http://hg.python.org/cpython/rev/744fb52ffdf0 changeset: 77417:744fb52ffdf0 branch: 2.7 parent: 77408:60a7b704de5c user: Sandro Tosi date: Wed Jun 13 23:58:35 2012 +0200 summary: Issue #15060: fix typo in socket doc; Patch by anatoly techtonik files: Doc/library/socket.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -38,7 +38,7 @@ :const:`AF_UNIX` address family. A pair ``(host, port)`` is used for the :const:`AF_INET` address family, where *host* is a string representing either a hostname in Internet domain notation like ``'daring.cwi.nl'`` or an IPv4 address -like ``'100.50.200.5'``, and *port* is an integral port number. For +like ``'100.50.200.5'``, and *port* is an integer port number. For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo, scopeid)`` is used, where *flowinfo* and *scopeid* represents ``sin6_flowinfo`` and ``sin6_scope_id`` member in :const:`struct sockaddr_in6` in C. For -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 13 23:59:36 2012 From: python-checkins at python.org (sandro.tosi) Date: Wed, 13 Jun 2012 23:59:36 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE1MDYw?= =?utf8?q?=3A_fix_typo_in_socket_doc=3B_Patch_by_anatoly_techtonik?= Message-ID: http://hg.python.org/cpython/rev/4d755a711823 changeset: 77418:4d755a711823 branch: 3.2 parent: 77409:5643697070c0 user: Sandro Tosi date: Wed Jun 13 23:58:54 2012 +0200 summary: Issue #15060: fix typo in socket doc; Patch by anatoly techtonik files: Doc/library/socket.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -47,7 +47,7 @@ - A pair ``(host, port)`` is used for the :const:`AF_INET` address family, where *host* is a string representing either a hostname in Internet domain notation like ``'daring.cwi.nl'`` or an IPv4 address like ``'100.50.200.5'``, - and *port* is an integral port number. + and *port* is an integer port number. - For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo, scopeid)`` is used, where *flowinfo* and *scopeid* represent the ``sin6_flowinfo`` -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 13 23:59:37 2012 From: python-checkins at python.org (sandro.tosi) Date: Wed, 13 Jun 2012 23:59:37 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2315060=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/29fb36928433 changeset: 77419:29fb36928433 parent: 77416:26e2ee402a0b parent: 77418:4d755a711823 user: Sandro Tosi date: Wed Jun 13 23:59:21 2012 +0200 summary: Issue #15060: merge with 3.2 files: Doc/library/socket.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -61,7 +61,7 @@ - A pair ``(host, port)`` is used for the :const:`AF_INET` address family, where *host* is a string representing either a hostname in Internet domain notation like ``'daring.cwi.nl'`` or an IPv4 address like ``'100.50.200.5'``, - and *port* is an integral port number. + and *port* is an integer port number. - For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo, scopeid)`` is used, where *flowinfo* and *scopeid* represent the ``sin6_flowinfo`` -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 00:37:37 2012 From: python-checkins at python.org (sandro.tosi) Date: Thu, 14 Jun 2012 00:37:37 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1MDYw?= =?utf8?q?=3A_better_fix=2C_thanks_to_review_on_=23python-dev?= Message-ID: http://hg.python.org/cpython/rev/412c7daed0db changeset: 77420:412c7daed0db branch: 2.7 parent: 77417:744fb52ffdf0 user: Sandro Tosi date: Thu Jun 14 00:36:54 2012 +0200 summary: Issue #15060: better fix, thanks to review on #python-dev files: Doc/library/socket.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -38,7 +38,7 @@ :const:`AF_UNIX` address family. A pair ``(host, port)`` is used for the :const:`AF_INET` address family, where *host* is a string representing either a hostname in Internet domain notation like ``'daring.cwi.nl'`` or an IPv4 address -like ``'100.50.200.5'``, and *port* is an integer port number. For +like ``'100.50.200.5'``, and *port* is an integer. For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo, scopeid)`` is used, where *flowinfo* and *scopeid* represents ``sin6_flowinfo`` and ``sin6_scope_id`` member in :const:`struct sockaddr_in6` in C. For -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 00:37:39 2012 From: python-checkins at python.org (sandro.tosi) Date: Thu, 14 Jun 2012 00:37:39 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE1MDYw?= =?utf8?q?=3A_better_fix=2C_thanks_to_review_on_=23python-dev?= Message-ID: http://hg.python.org/cpython/rev/e616985284cd changeset: 77421:e616985284cd branch: 3.2 parent: 77418:4d755a711823 user: Sandro Tosi date: Thu Jun 14 00:37:09 2012 +0200 summary: Issue #15060: better fix, thanks to review on #python-dev files: Doc/library/socket.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -47,7 +47,7 @@ - A pair ``(host, port)`` is used for the :const:`AF_INET` address family, where *host* is a string representing either a hostname in Internet domain notation like ``'daring.cwi.nl'`` or an IPv4 address like ``'100.50.200.5'``, - and *port* is an integer port number. + and *port* is an integer. - For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo, scopeid)`` is used, where *flowinfo* and *scopeid* represent the ``sin6_flowinfo`` -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 00:37:41 2012 From: python-checkins at python.org (sandro.tosi) Date: Thu, 14 Jun 2012 00:37:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2315060=3A_merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/d16065304bbd changeset: 77422:d16065304bbd parent: 77419:29fb36928433 parent: 77421:e616985284cd user: Sandro Tosi date: Thu Jun 14 00:37:25 2012 +0200 summary: Issue #15060: merge with 3.2 files: Doc/library/socket.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -61,7 +61,7 @@ - A pair ``(host, port)`` is used for the :const:`AF_INET` address family, where *host* is a string representing either a hostname in Internet domain notation like ``'daring.cwi.nl'`` or an IPv4 address like ``'100.50.200.5'``, - and *port* is an integer port number. + and *port* is an integer. - For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo, scopeid)`` is used, where *flowinfo* and *scopeid* represent the ``sin6_flowinfo`` -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 04:15:07 2012 From: python-checkins at python.org (brett.cannon) Date: Thu, 14 Jun 2012 04:15:07 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Update_to_PEP_362_from_Yury=2E?= Message-ID: http://hg.python.org/peps/rev/882f0a5ba45e changeset: 4459:882f0a5ba45e user: Brett Cannon date: Wed Jun 13 22:15:01 2012 -0400 summary: Update to PEP 362 from Yury. files: pep-0362.txt | 140 ++++++++++++++++++++------------------ 1 files changed, 75 insertions(+), 65 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -16,7 +16,7 @@ ======== Python has always supported powerful introspection capabilities, -including introspecting functions and methods. (For the rest of +including introspecting functions and methods (for the rest of this PEP, "function" refers to both functions and methods). By examining a function object you can fully reconstruct the function's signature. Unfortunately this information is stored in an inconvenient @@ -35,16 +35,12 @@ Signature Object ================ -A Signature object represents the overall signature of a function. -It stores a `Parameter object`_ for each parameter accepted by the -function, as well as information specific to the function itself. +A Signature object represents the call signature of a function and +its return annotation. For each parameter accepted by the function +it stores a `Parameter object`_ in its ``parameters`` collection. A Signature object has the following public attributes and methods: -* name : str - Name of the function. -* qualname : str - Fully qualified name of the function. * return_annotation : object The annotation for the return type of the function if specified. If the function has no annotation for its return type, this @@ -55,8 +51,22 @@ as listed in ``code.co_varnames``). * bind(\*args, \*\*kwargs) -> BoundArguments Creates a mapping from positional and keyword arguments to - parameters. Raises a ``BindError`` if the passed arguments - do not match the signature. + parameters. Raises a ``BindError`` (subclass of ``TypeError``) + if the passed arguments do not match the signature. +* bind_partial(\*args, \*\*kwargs) -> BoundArguments + Works the same way as ``bind()``, but allows the omission + of some required arguments (mimics ``functools.partial`` + behavior.) +* format(...) -> str + Formats the Signature object to a string. Optional arguments allow + for custom render functions for parameter names, + annotations and default values, along with custom separators. + +Signature implements the ``__str__`` method, which fallbacks to the +``Signature.format()`` call. + +It's possible to test Signatures for equality. Two signatures +are equal when they have equal parameters and return annotations. Changes to the Signature object, or to any of its data members, do not affect the function itself. @@ -75,7 +85,7 @@ * name : str The name of the parameter as a string. * default : object - The default value for the parameter if specified. If the + The default value for the parameter, if specified. If the parameter has no default value, this attribute is not set. * annotation : object The annotation for the parameter if specified. If the @@ -97,11 +107,7 @@ all conditions where ``is_implemented`` may be False be thoroughly documented. -Parameter objects support testing for equality. Two Parameter -objects are equal, when all their properties are equal. Those -who need to test if one signature has the same parameters as -another, can do a direct comparison of ``Signature.parameters`` -collections: ``signature(foo).parameters == signature(bar).parameters``. +Two parameters are equal when all their attributes are equal. BoundArguments Object @@ -113,7 +119,7 @@ Has the following public attributes: * arguments : OrderedDict - An ordered mutable mapping of parameters' names to arguments' values. + An ordered, mutable mapping of parameters' names to arguments' values. Does not contain arguments' default values. * args : tuple Tuple of positional arguments values. Dynamically computed from @@ -125,7 +131,7 @@ The ``arguments`` attribute should be used in conjunction with ``Signature.parameters`` for any arguments processing purposes. -``args`` and ``kwargs`` properties should be used to invoke functions: +``args`` and ``kwargs`` properties can be used to invoke functions: :: def test(a, *, b): @@ -148,7 +154,7 @@ - If the object is not callable - raise a TypeError - If the object has a ``__signature__`` attribute and if it - is not ``None`` - return it + is not ``None`` - return a deepcopy of it - If it is ``None`` and the object is an instance of ``BuiltinFunction``, raise a ``ValueError`` @@ -160,29 +166,43 @@ - Or else construct a new ``Signature`` object and return it - - if the object is a method or a classmethod, construct and return + - If the object is a method or a classmethod, construct and return a new ``Signature`` object, with its first parameter (usually ``self`` or ``cls``) removed - - If the object is a class return ``signature(object.__init__)`` + - If the object is a staticmethod, construct and return + a new ``Signature`` object - If the object is an instance of ``functools.partial``, construct a new ``Signature`` from its ``partial.func`` attribute, and account for already bound ``partial.args`` and ``partial.kwargs`` + - If the object is a class or metaclass: + + - If the object's type has a ``__call__`` method defined in + its MRO, return a Signature for it + + - If the object has a ``__new__`` method defined in its class, + return a Signature object for it + + - If the object has a ``__init__`` method defined in its class, + return a Signature object for it + - Return ``signature(object.__call__)`` Note, that the ``Signature`` object is created in a lazy manner, and -is not automatically cached. +is not automatically cached. If, however, the Signature object was +explicitly cached by the user, ``signature()`` returns a new deepcopy +of it on each invocation. -An implementation for Python 3.3 can be found here: [#impl]_. -A python issue was also created: [#issue]_. +An implementation for Python 3.3 can be found at [#impl]_. +The python issue tracking the patch is [#issue]_. Design Considerations ===================== -No Implicit Caching of Signature Objects +No implicit caching of Signature objects ---------------------------------------- The first PEP design had a provision for implicit caching of ``Signature`` @@ -201,60 +221,49 @@ Examples ======== -Function Signature Renderer ---------------------------- +Visualizing Callable Objects' Signature +--------------------------------------- :: - def render_signature(signature): - '''Renders function definition by its signature. + from inspect import signature + from functools import partial, wraps - Example: - >>> def test(a:'foo', *, b:'bar', c=True, **kwargs:None) -> 'spam': - ... pass + class FooMeta(type): + def __new__(mcls, name, bases, dct, *, bar:bool=False): + return super().__new__(mcls, name, bases, dct) - >>> render_signature(inspect.signature(test)) - test(a:'foo', *, b:'bar', c=True, **kwargs:None) -> 'spam' - ''' + def __init__(cls, name, bases, dct, **kwargs): + return super().__init__(name, bases, dct) - result = [] - render_kw_only_separator = True - for param in signature.parameters.values(): - formatted = param.name - # Add annotation and default value - if hasattr(param, 'annotation'): - formatted = '{}:{!r}'.format(formatted, param.annotation) - if hasattr(param, 'default'): - formatted = '{}={!r}'.format(formatted, param.default) + class Foo(metaclass=FooMeta): + def __init__(self, spam:int=42): + self.spam = spam - # Handle *args and **kwargs -like parameters - if param.is_args: - formatted = '*' + formatted - elif param.is_kwargs: - formatted = '**' + formatted + def __call__(self, a, b, *, c) -> tuple: + return a, b, c - if param.is_args: - # OK, we have an '*args'-like parameter, so we won't need - # a '*' to separate keyword-only arguments - render_kw_only_separator = False - elif param.is_keyword_only and render_kw_only_separator: - # We have a keyword-only parameter to render and we haven't - # rendered an '*args'-like parameter before, so add a '*' - # separator to the parameters list ("foo(arg1, *, arg2)" case) - result.append('*') - # This condition should be only triggered once, so - # reset the flag - render_kw_only_separator = False - result.append(formatted) + print('FooMeta >', str(signature(FooMeta))) + print('Foo >', str(signature(Foo))) + print('Foo.__call__ >', str(signature(Foo.__call__))) + print('Foo().__call__ >', str(signature(Foo().__call__))) + print('partial(Foo().__call__, 1, c=3) >', + str(signature(partial(Foo().__call__, 1, c=3)))) + print('partial(partial(Foo().__call__, 1, c=3), 2, c=20) >', + str(signature(partial(partial(Foo().__call__, 1, c=3), 2, c=20)))) - rendered = '{}({})'.format(signature.name, ', '.join(result)) - if hasattr(signature, 'return_annotation'): - rendered += ' -> {!r}'.format(signature.return_annotation) +The script will output: +:: - return rendered + FooMeta > (name, bases, dct, *, bar:bool=False) + Foo > (spam:int=42) + Foo.__call__ > (self, a, b, *, c) -> tuple + Foo().__call__ > (a, b, *, c) -> tuple + partial(Foo().__call__, 1, c=3) > (b, *, c=3) -> tuple + partial(partial(Foo().__call__, 1, c=3), 2, c=20) > (*, c=20) -> tuple Annotation Checker -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Jun 14 04:16:08 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Thu, 14 Jun 2012 04:16:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=231667546=3A_On_plat?= =?utf8?q?forms_supporting_tm=5Fzone_and_tm=5Fgmtoff_fields?= Message-ID: http://hg.python.org/cpython/rev/3b5545ba6432 changeset: 77423:3b5545ba6432 user: Alexander Belopolsky date: Wed Jun 13 22:15:26 2012 -0400 summary: Issue #1667546: On platforms supporting tm_zone and tm_gmtoff fields in struct tm, time.struct_time objects returned by time.gmtime(), time.localtime() and time.strptime() functions now have tm_zone and tm_gmtoff attributes. Original patch by Paul Boddie. files: Doc/library/time.rst | 23 ++++++++++- Lib/_strptime.py | 6 +- Lib/test/test_structseq.py | 5 +- Lib/test/test_time.py | 51 ++++++++++++++++++++++++++ Misc/NEWS | 5 ++ Modules/timemodule.c | 36 +++++++++++++++++- 6 files changed, 117 insertions(+), 9 deletions(-) diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -77,6 +77,12 @@ See :class:`struct_time` for a description of these objects. + .. versionchanged:: 3.3 + + The :class:`struct_time` type was extended to provide the + :attr:`tm_gmtoff` and :attr:`tm_zone` attributes when platform + supports corresponding ``struct tm`` members. + * Use the following functions to convert between time representations: +-------------------------+-------------------------+-------------------------+ @@ -336,7 +342,6 @@ .. versionadded:: 3.3 - .. function:: sleep(secs) Suspend execution for the given number of seconds. The argument may be a @@ -433,6 +438,12 @@ | ``%Y`` | Year with century as a decimal number. | | | | | | +-----------+------------------------------------------------+-------+ + | ``%z`` | Time zone offset indicating a positive or | | + | | negative time difference from UTC/GMT of the | | + | | form +HHMM or -HHMM, where H represents decimal| | + | | hour digits and M represents decimal minute | | + | | digits [-23:59, +23:59]. | | + +-----------+------------------------------------------------+-------+ | ``%Z`` | Time zone name (no characters if no time zone | | | | exists). | | +-----------+------------------------------------------------+-------+ @@ -532,6 +543,10 @@ +-------+-------------------+---------------------------------+ | 8 | :attr:`tm_isdst` | 0, 1 or -1; see below | +-------+-------------------+---------------------------------+ + | N/A | :attr:`tm_zone` | abbreviation of timezone name | + +-------+-------------------+---------------------------------+ + | N/A | :attr:`tm_gmtoff` | offset from UTC in seconds | + +-------+-------------------+---------------------------------+ Note that unlike the C structure, the month value is a range of [1, 12], not [0, 11]. A ``-1`` argument as the daylight @@ -542,6 +557,11 @@ :class:`struct_time`, or having elements of the wrong type, a :exc:`TypeError` is raised. + .. versionchanged:: 3.3 + + :attr:`tm_gmtoff` and :attr:`tm_zone` attributes are avaliable on + platforms with C library supporting the corresponding fields in + ``struct tm``. .. function:: time() @@ -552,7 +572,6 @@ lower value than a previous call if the system clock has been set back between the two calls. - .. data:: timezone The offset of the local (non-DST) timezone, in seconds west of UTC (negative in diff --git a/Lib/_strptime.py b/Lib/_strptime.py --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -486,19 +486,19 @@ return (year, month, day, hour, minute, second, - weekday, julian, tz, gmtoff, tzname), fraction + weekday, julian, tz, tzname, gmtoff), fraction def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"): """Return a time struct based on the input string and the format string.""" tt = _strptime(data_string, format)[0] - return time.struct_time(tt[:9]) + return time.struct_time(tt[:time._STRUCT_TM_ITEMS]) def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"): """Return a class cls instance based on the input string and the format string.""" tt, fraction = _strptime(data_string, format) - gmtoff, tzname = tt[-2:] + tzname, gmtoff = tt[-2:] args = tt[:6] + (fraction,) if gmtoff is not None: tzdelta = datetime_timedelta(seconds=gmtoff) diff --git a/Lib/test/test_structseq.py b/Lib/test/test_structseq.py --- a/Lib/test/test_structseq.py +++ b/Lib/test/test_structseq.py @@ -78,8 +78,9 @@ def test_fields(self): t = time.gmtime() - self.assertEqual(len(t), t.n_fields) - self.assertEqual(t.n_fields, t.n_sequence_fields+t.n_unnamed_fields) + self.assertEqual(len(t), t.n_sequence_fields) + self.assertEqual(t.n_unnamed_fields, 0) + self.assertEqual(t.n_fields, time._STRUCT_TM_ITEMS) def test_constructor(self): t = time.struct_time diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -620,7 +620,58 @@ for invalid in self.invalid_values: self.assertRaises(OverflowError, pytime_object_to_timespec, invalid) + @unittest.skipUnless(time._STRUCT_TM_ITEMS == 11, "needs tm_zone support") + def test_localtime_timezone(self): + # Get the localtime and examine it for the offset and zone. + lt = time.localtime() + self.assertTrue(hasattr(lt, "tm_gmtoff")) + self.assertTrue(hasattr(lt, "tm_zone")) + + # See if the offset and zone are similar to the module + # attributes. + if lt.tm_gmtoff is None: + self.assertTrue(not hasattr(time, "timezone")) + else: + self.assertEqual(lt.tm_gmtoff, -[time.timezone, time.altzone][lt.tm_isdst]) + if lt.tm_zone is None: + self.assertTrue(not hasattr(time, "tzname")) + else: + self.assertEqual(lt.tm_zone, time.tzname[lt.tm_isdst]) + + # Try and make UNIX times from the localtime and a 9-tuple + # created from the localtime. Test to see that the times are + # the same. + t = time.mktime(lt); t9 = time.mktime(lt[:9]) + self.assertEqual(t, t9) + + # Make localtimes from the UNIX times and compare them to + # the original localtime, thus making a round trip. + new_lt = time.localtime(t); new_lt9 = time.localtime(t9) + self.assertEqual(new_lt, lt) + self.assertEqual(new_lt.tm_gmtoff, lt.tm_gmtoff) + self.assertEqual(new_lt.tm_zone, lt.tm_zone) + self.assertEqual(new_lt9, lt) + self.assertEqual(new_lt.tm_gmtoff, lt.tm_gmtoff) + self.assertEqual(new_lt9.tm_zone, lt.tm_zone) + + @unittest.skipUnless(time._STRUCT_TM_ITEMS == 11, "needs tm_zone support") + def test_strptime_timezone(self): + t = time.strptime("UTC", "%Z") + self.assertEqual(t.tm_zone, 'UTC') + t = time.strptime("+0500", "%z") + self.assertEqual(t.tm_gmtoff, 5 * 3600) + + @unittest.skipUnless(time._STRUCT_TM_ITEMS == 11, "needs tm_zone support") + def test_short_times(self): + + import pickle + + # Load a short time structure using pickle. + st = b"ctime\nstruct_time\np0\n((I2007\nI8\nI11\nI1\nI24\nI49\nI5\nI223\nI1\ntp1\n(dp2\ntp3\nRp4\n." + lt = pickle.loads(st) + self.assertIs(lt.tm_gmtoff, None) + self.assertIs(lt.tm_zone, None) def test_main(): support.run_unittest( diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,11 @@ Library ------- +- Issue #1667546: On platforms supporting tm_zone and tm_gmtoff fields + in struct tm, time.struct_time objects returned by time.gmtime(), + time.localtime() and time.strptime() functions now have tm_zone and + tm_gmtoff attributes. Original patch by Paul Boddie. + - Rename adjusted attribute to adjustable in time.get_clock_info() result. - Issue #3518: Remove references to non-existent BaseManager.from_address() diff --git a/Modules/timemodule.c b/Modules/timemodule.c --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -275,6 +275,10 @@ {"tm_wday", "day of week, range [0, 6], Monday is 0"}, {"tm_yday", "day of year, range [1, 366]"}, {"tm_isdst", "1 if summer time is in effect, 0 if not, and -1 if unknown"}, +#ifdef HAVE_STRUCT_TM_TM_ZONE + {"tm_zone", "abbreviation of timezone name"}, + {"tm_gmtoff", "offset from UTC in seconds"}, +#endif /* HAVE_STRUCT_TM_TM_ZONE */ {0} }; @@ -294,6 +298,7 @@ static int initialized; static PyTypeObject StructTimeType; + static PyObject * tmtotuple(struct tm *p) { @@ -312,6 +317,11 @@ SET(6, (p->tm_wday + 6) % 7); /* Want Monday == 0 */ SET(7, p->tm_yday + 1); /* Want January, 1 == 1 */ SET(8, p->tm_isdst); +#ifdef HAVE_STRUCT_TM_TM_ZONE + PyStructSequence_SET_ITEM(v, 9, + PyUnicode_DecodeLocale(p->tm_zone, "surrogateescape")); + SET(10, p->tm_gmtoff); +#endif /* HAVE_STRUCT_TM_TM_ZONE */ #undef SET if (PyErr_Occurred()) { Py_XDECREF(v); @@ -371,7 +381,10 @@ tm_sec, tm_wday, tm_yday, tm_isdst)\n\ \n\ Convert seconds since the Epoch to a time tuple expressing UTC (a.k.a.\n\ -GMT). When 'seconds' is not passed in, convert the current time instead."); +GMT). When 'seconds' is not passed in, convert the current time instead.\n\ +\n\ +If the platform supports the tm_gmtoff and tm_zone, they are available as\n\ +attributes only."); static int pylocaltime(time_t *timep, struct tm *result) @@ -438,6 +451,17 @@ p->tm_mon--; p->tm_wday = (p->tm_wday + 1) % 7; p->tm_yday--; +#ifdef HAVE_STRUCT_TM_TM_ZONE + if (Py_TYPE(args) == &StructTimeType) { + PyObject *item; + item = PyTuple_GET_ITEM(args, 9); + p->tm_zone = item == Py_None ? NULL : _PyUnicode_AsString(item); + item = PyTuple_GET_ITEM(args, 10); + p->tm_gmtoff = item == Py_None ? 0 : PyLong_AsLong(item); + if (PyErr_Occurred()) + return 0; + } +#endif /* HAVE_STRUCT_TM_TM_ZONE */ return 1; } @@ -778,7 +802,10 @@ PyDoc_STRVAR(mktime_doc, "mktime(tuple) -> floating point number\n\ \n\ -Convert a time tuple in local time to seconds since the Epoch."); +Convert a time tuple in local time to seconds since the Epoch.\n\ +Note that mktime(gmtime(0)) will not generally return zero for most\n\ +time zones; instead the returned value will either be equal to that\n\ +of the timezone or altzone attributes on the time module."); #endif /* HAVE_MKTIME */ #ifdef HAVE_WORKING_TZSET @@ -1443,6 +1470,11 @@ #endif } Py_INCREF(&StructTimeType); +#ifdef HAVE_STRUCT_TM_TM_ZONE + PyModule_AddIntConstant(m, "_STRUCT_TM_ITEMS", 11); +#else + PyModule_AddIntConstant(m, "_STRUCT_TM_ITEMS", 9); +#endif PyModule_AddObject(m, "struct_time", (PyObject*) &StructTimeType); initialized = 1; return m; -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Thu Jun 14 05:48:16 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 14 Jun 2012 05:48:16 +0200 Subject: [Python-checkins] Daily reference leaks (d16065304bbd): sum=0 Message-ID: results for d16065304bbd on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflognI8H8y', '-x'] From python-checkins at python.org Thu Jun 14 15:49:08 2012 From: python-checkins at python.org (martin.v.loewis) Date: Thu, 14 Jun 2012 15:49:08 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE0OTM3?= =?utf8?q?=3A_Fix_typo=2E_Patch_by_Roger_Serwy=2E?= Message-ID: http://hg.python.org/cpython/rev/62030ebb2b01 changeset: 77424:62030ebb2b01 branch: 3.2 parent: 77421:e616985284cd user: Martin v. L?wis date: Thu Jun 14 15:37:21 2012 +0200 summary: Issue #14937: Fix typo. Patch by Roger Serwy. files: Lib/idlelib/AutoComplete.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -143,7 +143,7 @@ elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES): self._remove_autocomplete_window() mode = COMPLETE_ATTRIBUTES - while i and curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127: + while i and (curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127): i -= 1 comp_start = curline[i:j] if i and curline[i-1] == '.': -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 15:49:11 2012 From: python-checkins at python.org (martin.v.loewis) Date: Thu, 14 Jun 2012 15:49:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_merge_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/82fc6ff7aa3e changeset: 77425:82fc6ff7aa3e parent: 77423:3b5545ba6432 parent: 77424:62030ebb2b01 user: Martin v. L?wis date: Thu Jun 14 15:37:53 2012 +0200 summary: merge 3.2 files: Lib/idlelib/AutoComplete.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -140,7 +140,7 @@ elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES): self._remove_autocomplete_window() mode = COMPLETE_ATTRIBUTES - while i and curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127: + while i and (curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127): i -= 1 comp_start = curline[i:j] if i and curline[i-1] == '.': -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 16:09:37 2012 From: python-checkins at python.org (martin.v.loewis) Date: Thu, 14 Jun 2012 16:09:37 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314936=3A_curses=5F?= =?utf8?q?panel_was_converted_to_PEP_3121_API=2E?= Message-ID: http://hg.python.org/cpython/rev/9a6b45a83dec changeset: 77426:9a6b45a83dec user: Martin v. L?wis date: Thu Jun 14 16:00:24 2012 +0200 summary: Issue #14936: curses_panel was converted to PEP 3121 API. Patch by Robin Schreiber. files: Misc/NEWS | 3 + Modules/_curses_panel.c | 54 ++++++++++++++++++++++------ 2 files changed, 45 insertions(+), 12 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,9 @@ Library ------- +- Issue #14936: curses_panel was converted to PEP 3121 API. + Patch by Robin Schreiber. + - Issue #1667546: On platforms supporting tm_zone and tm_gmtoff fields in struct tm, time.struct_time objects returned by time.gmtime(), time.localtime() and time.strptime() functions now have tm_zone and diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -16,8 +16,38 @@ #include -static PyObject *PyCursesError; +typedef struct { + PyObject *PyCursesError; +} _curses_panelstate; +#define _curses_panelstate(o) ((_curses_panelstate *)PyModule_GetState(o)) + +/*static PyObject *PyCursesError;*/ + +static int +_curses_panel_clear(PyObject *m) +{ + Py_CLEAR(_curses_panelstate(m)->PyCursesError); + return 0; +} + +static int +_curses_panel_traverse(PyObject *m, visitproc visit, void *arg) +{ + Py_VISIT(_curses_panelstate(m)->PyCursesError); + return 0; +} + +static void +_curses_panel_free(void *m) +{ + _curses_panel_clear((PyObject *) m); +} + +static struct PyModuleDef _curses_panelmodule; + +#define _curses_panelstate_global \ +((_curses_panelstate *) PyModule_GetState(PyState_FindModule(&_curses_panelmodule))) /* Utility Functions */ @@ -34,9 +64,9 @@ return Py_None; } else { if (fname == NULL) { - PyErr_SetString(PyCursesError, catchall_ERR); + PyErr_SetString(_curses_panelstate_global->PyCursesError, catchall_ERR); } else { - PyErr_Format(PyCursesError, "%s() returned ERR", fname); + PyErr_Format(_curses_panelstate_global->PyCursesError, "%s() returned ERR", fname); } return NULL; } @@ -280,7 +310,7 @@ rtn = replace_panel(self->pan, temp->win); if (rtn == ERR) { - PyErr_SetString(PyCursesError, "replace_panel() returned ERR"); + PyErr_SetString(_curses_panelstate_global->PyCursesError, "replace_panel() returned ERR"); return NULL; } Py_DECREF(po->wo); @@ -305,7 +335,7 @@ PyCursesInitialised; obj = (PyObject *) panel_userptr(self->pan); if (obj == NULL) { - PyErr_SetString(PyCursesError, "no userptr set"); + PyErr_SetString(_curses_panelstate_global->PyCursesError, "no userptr set"); return NULL; } @@ -405,7 +435,7 @@ return NULL; pan = new_panel(win->win); if (pan == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + PyErr_SetString(_curses_panelstate_global->PyCursesError, catchall_NULL); return NULL; } return (PyObject *)PyCursesPanel_New(pan, win); @@ -467,12 +497,12 @@ PyModuleDef_HEAD_INIT, "_curses_panel", NULL, - -1, + sizeof(_curses_panelstate), PyCurses_methods, NULL, - NULL, - NULL, - NULL + _curses_panel_traverse, + _curses_panel_clear, + _curses_panel_free }; PyMODINIT_FUNC @@ -493,8 +523,8 @@ d = PyModule_GetDict(m); /* For exception _curses_panel.error */ - PyCursesError = PyErr_NewException("_curses_panel.error", NULL, NULL); - PyDict_SetItemString(d, "error", PyCursesError); + _curses_panelstate(m)->PyCursesError = PyErr_NewException("_curses_panel.error", NULL, NULL); + PyDict_SetItemString(d, "error", _curses_panelstate(m)->PyCursesError); /* Make the version available */ v = PyUnicode_FromString(PyCursesVersion); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 16:09:38 2012 From: python-checkins at python.org (martin.v.loewis) Date: Thu, 14 Jun 2012 16:09:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314936=3A_curses=5F?= =?utf8?q?panel_was_converted_to_PEP_3121_and_PEP_384_API=2E?= Message-ID: http://hg.python.org/cpython/rev/6eb21c1d3099 changeset: 77427:6eb21c1d3099 user: Martin v. L?wis date: Thu Jun 14 16:01:23 2012 +0200 summary: Issue #14936: curses_panel was converted to PEP 3121 and PEP 384 API. Patch by Robin Schreiber. files: Misc/NEWS | 2 +- Modules/_curses_panel.c | 72 +++++++++++----------------- 2 files changed, 30 insertions(+), 44 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,7 +21,7 @@ Library ------- -- Issue #14936: curses_panel was converted to PEP 3121 API. +- Issue #14936: curses_panel was converted to PEP 3121 and PEP 384 API. Patch by Robin Schreiber. - Issue #1667546: On platforms supporting tm_zone and tm_gmtoff fields diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -18,12 +18,11 @@ typedef struct { PyObject *PyCursesError; + PyObject *PyCursesPanel_Type; } _curses_panelstate; #define _curses_panelstate(o) ((_curses_panelstate *)PyModule_GetState(o)) -/*static PyObject *PyCursesError;*/ - static int _curses_panel_clear(PyObject *m) { @@ -84,9 +83,8 @@ PyCursesWindowObject *wo; /* for reference counts */ } PyCursesPanelObject; -PyTypeObject PyCursesPanel_Type; - -#define PyCursesPanel_Check(v) (Py_TYPE(v) == &PyCursesPanel_Type) +#define PyCursesPanel_Check(v) \ + (Py_TYPE(v) == _curses_panelstate_global->PyCursesPanel_Type) /* Some helper functions. The problem is that there's always a window associated with a panel. To ensure that Python's GC doesn't pull @@ -205,7 +203,8 @@ { PyCursesPanelObject *po; - po = PyObject_NEW(PyCursesPanelObject, &PyCursesPanel_Type); + po = PyObject_NEW(PyCursesPanelObject, + (PyTypeObject *)(_curses_panelstate_global)->PyCursesPanel_Type); if (po == NULL) return NULL; po->pan = pan; if (insert_lop(po) < 0) { @@ -364,36 +363,18 @@ /* -------------------------------------------------------*/ -PyTypeObject PyCursesPanel_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_curses_panel.curses panel", /*tp_name*/ - sizeof(PyCursesPanelObject), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - /* methods */ - (destructor)PyCursesPanel_Dealloc, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_reserved*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT, /*tp_flags*/ - 0, /*tp_doc*/ - 0, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - PyCursesPanel_Methods, /*tp_methods*/ +static PyType_Slot PyCursesPanel_Type_slots[] = { + {Py_tp_dealloc, PyCursesPanel_Dealloc}, + {Py_tp_methods, PyCursesPanel_Methods}, + {0, 0}, +}; + +static PyType_Spec PyCursesPanel_Type_spec = { + "_curses_panel.curses panel", + sizeof(PyCursesPanelObject), + 0, + Py_TPFLAGS_DEFAULT, + PyCursesPanel_Type_slots }; /* Wrapper for panel_above(NULL). This function returns the bottom @@ -510,18 +491,20 @@ { PyObject *m, *d, *v; - /* Initialize object type */ - if (PyType_Ready(&PyCursesPanel_Type) < 0) - return NULL; - - import_curses(); - /* Create the module and add the functions */ m = PyModule_Create(&_curses_panelmodule); if (m == NULL) - return NULL; + goto fail; d = PyModule_GetDict(m); + /* Initialize object type */ + _curses_panelstate(m)->PyCursesPanel_Type = \ + PyType_FromSpec(&PyCursesPanel_Type_spec); + if (_curses_panelstate(m)->PyCursesPanel_Type == NULL) + goto fail; + + import_curses(); + /* For exception _curses_panel.error */ _curses_panelstate(m)->PyCursesError = PyErr_NewException("_curses_panel.error", NULL, NULL); PyDict_SetItemString(d, "error", _curses_panelstate(m)->PyCursesError); @@ -532,4 +515,7 @@ PyDict_SetItemString(d, "__version__", v); Py_DECREF(v); return m; + fail: + Py_XDECREF(m); + return NULL; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 16:14:46 2012 From: python-checkins at python.org (brett.cannon) Date: Thu, 14 Jun 2012 16:14:46 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Fix_a_spelling_error=2E?= Message-ID: http://hg.python.org/peps/rev/ea71020acbad changeset: 4460:ea71020acbad user: Brett Cannon date: Thu Jun 14 10:10:28 2012 -0400 summary: Fix a spelling error. files: pep-0362.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -320,7 +320,7 @@ format(func=sig.qualname, arg=param.name)) def check_type(sig, arg_name, arg_type, arg_value): - # Internal function that incapsulates arguments type checking + # Internal function that encapsulates arguments type checking if not isinstance(arg_value, arg_type): raise ValueError("{func}: wrong type of {arg!r} argument, " \ "{exp!r} expected, got {got!r}". \ -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Jun 14 16:49:08 2012 From: python-checkins at python.org (richard.oudkerk) Date: Thu, 14 Jun 2012 16:49:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2313841=3A_Make_chil?= =?utf8?q?d_processes_exit_using_sys=2Eexit=28=29_on_Windows?= Message-ID: http://hg.python.org/cpython/rev/d31e83497c5a changeset: 77428:d31e83497c5a user: Richard Oudkerk date: Thu Jun 14 15:30:10 2012 +0100 summary: Issue #13841: Make child processes exit using sys.exit() on Windows files: Lib/multiprocessing/forking.py | 6 +- Lib/multiprocessing/managers.py | 67 ++++++++----------- Lib/multiprocessing/util.py | 27 ++++--- Lib/test/support.py | 2 +- Lib/test/test_multiprocessing.py | 5 + Misc/NEWS | 2 + 6 files changed, 54 insertions(+), 55 deletions(-) diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -13,7 +13,7 @@ from multiprocessing import util, process -__all__ = ['Popen', 'assert_spawning', 'exit', 'duplicate', 'close', 'ForkingPickler'] +__all__ = ['Popen', 'assert_spawning', 'duplicate', 'close', 'ForkingPickler'] # # Check that the current thread is spawning a child process @@ -75,7 +75,6 @@ # if sys.platform != 'win32': - exit = os._exit duplicate = os.dup close = os.close @@ -168,7 +167,6 @@ WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") - exit = _winapi.ExitProcess close = _winapi.CloseHandle # @@ -349,7 +347,7 @@ from_parent.close() exitcode = self._bootstrap() - exit(exitcode) + sys.exit(exitcode) def get_preparation_data(name): diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -22,7 +22,7 @@ from traceback import format_exc from multiprocessing import Process, current_process, active_children, Pool, util, connection from multiprocessing.process import AuthenticationString -from multiprocessing.forking import exit, Popen, ForkingPickler +from multiprocessing.forking import Popen, ForkingPickler from time import time as _time # @@ -140,28 +140,38 @@ self.id_to_obj = {'0': (None, ())} self.id_to_refcount = {} self.mutex = threading.RLock() - self.stop = 0 def serve_forever(self): ''' Run the server forever ''' + self.stop_event = threading.Event() current_process()._manager_server = self try: + accepter = threading.Thread(target=self.accepter) + accepter.daemon = True + accepter.start() try: - while 1: - try: - c = self.listener.accept() - except (OSError, IOError): - continue - t = threading.Thread(target=self.handle_request, args=(c,)) - t.daemon = True - t.start() + while not self.stop_event.is_set(): + self.stop_event.wait(1) except (KeyboardInterrupt, SystemExit): pass finally: - self.stop = 999 - self.listener.close() + if sys.stdout != sys.__stdout__: + util.debug('resetting stdout, stderr') + sys.stdout = sys.__stdout__ + sys.stderr = sys.__stderr__ + sys.exit(0) + + def accepter(self): + while True: + try: + c = self.listener.accept() + except (OSError, IOError): + continue + t = threading.Thread(target=self.handle_request, args=(c,)) + t.daemon = True + t.start() def handle_request(self, c): ''' @@ -208,7 +218,7 @@ send = conn.send id_to_obj = self.id_to_obj - while not self.stop: + while not self.stop_event.is_set(): try: methodname = obj = None @@ -318,32 +328,13 @@ Shutdown this process ''' try: - try: - util.debug('manager received shutdown message') - c.send(('#RETURN', None)) - - if sys.stdout != sys.__stdout__: - util.debug('resetting stdout, stderr') - sys.stdout = sys.__stdout__ - sys.stderr = sys.__stderr__ - - util._run_finalizers(0) - - for p in active_children(): - util.debug('terminating a child process of manager') - p.terminate() - - for p in active_children(): - util.debug('terminating a child process of manager') - p.join() - - util._run_finalizers() - util.info('manager exiting with exitcode 0') - except: - import traceback - traceback.print_exc() + util.debug('manager received shutdown message') + c.send(('#RETURN', None)) + except: + import traceback + traceback.print_exc() finally: - exit(0) + self.stop_event.set() def create(self, c, typeid, *args, **kwds): ''' diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -269,21 +269,24 @@ def _exit_function(): global _exiting - info('process shutting down') - debug('running all "atexit" finalizers with priority >= 0') - _run_finalizers(0) + if not _exiting: + _exiting = True - for p in active_children(): - if p._daemonic: - info('calling terminate() for daemon %s', p.name) - p._popen.terminate() + info('process shutting down') + debug('running all "atexit" finalizers with priority >= 0') + _run_finalizers(0) - for p in active_children(): - info('calling join() for process %s', p.name) - p.join() + for p in active_children(): + if p._daemonic: + info('calling terminate() for daemon %s', p.name) + p._popen.terminate() - debug('running the remaining "atexit" finalizers') - _run_finalizers() + for p in active_children(): + info('calling join() for process %s', p.name) + p.join() + + debug('running the remaining "atexit" finalizers') + _run_finalizers() atexit.register(_exit_function) diff --git a/Lib/test/support.py b/Lib/test/support.py --- a/Lib/test/support.py +++ b/Lib/test/support.py @@ -1593,7 +1593,7 @@ This will typically be run on the result of the communicate() method of a subprocess.Popen object. """ - stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip() + stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip() return stderr def args_from_interpreter_flags(): diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1564,6 +1564,11 @@ manager.shutdown() + # If the manager process exited cleanly then the exitcode + # will be zero. Otherwise (after a short timeout) + # terminate() is used, resulting in an exitcode of -SIGTERM. + self.assertEqual(manager._process.exitcode, 0) + # # Test of connecting to a remote server and using xmlrpclib for serialization # diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,8 @@ Library ------- +- Issue #13841: Make child processes exit using sys.exit() on Windows. + - Issue #14936: curses_panel was converted to PEP 3121 and PEP 384 API. Patch by Robin Schreiber. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 21:54:18 2012 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 14 Jun 2012 21:54:18 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315070=3A_fix_VS9?= =?utf8?q?=2E0_build_regression?= Message-ID: http://hg.python.org/cpython/rev/ccbf6f970943 changeset: 77429:ccbf6f970943 user: Antoine Pitrou date: Thu Jun 14 21:51:12 2012 +0200 summary: Issue #15070: fix VS9.0 build regression files: PC/VS9.0/pythoncore.vcproj | 8 ++++++++ 1 files changed, 8 insertions(+), 0 deletions(-) diff --git a/PC/VS9.0/pythoncore.vcproj b/PC/VS9.0/pythoncore.vcproj --- a/PC/VS9.0/pythoncore.vcproj +++ b/PC/VS9.0/pythoncore.vcproj @@ -803,6 +803,10 @@ > + + @@ -1563,6 +1567,10 @@ > + + -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 14 21:57:28 2012 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 14 Jun 2012 21:57:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Update_=2Ehgignore_for_VS9?= =?utf8?q?=2E0-generated_files?= Message-ID: http://hg.python.org/cpython/rev/1c792a3e4763 changeset: 77430:1c792a3e4763 user: Antoine Pitrou date: Thu Jun 14 21:54:24 2012 +0200 summary: Update .hgignore for VS9.0-generated files files: .hgignore | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -55,6 +55,8 @@ PC/pythonnt_rc*.h PC/*.obj PC/*.exe +PC/*/*.exe +PC/*/*.pdb PC/*/*.user PC/*/*.ncb PC/*/*.suo -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Jun 15 05:47:04 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 15 Jun 2012 05:47:04 +0200 Subject: [Python-checkins] Daily reference leaks (1c792a3e4763): sum=0 Message-ID: results for 1c792a3e4763 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog19SXNK', '-x'] From python-checkins at python.org Fri Jun 15 06:44:20 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 15 Jun 2012 06:44:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Replace_the_iter/itertext_m?= =?utf8?q?ethods_of_Element_in_=5Felementtree_with_true_C?= Message-ID: http://hg.python.org/cpython/rev/652d148bdc1d changeset: 77431:652d148bdc1d user: Eli Bendersky date: Fri Jun 15 07:42:50 2012 +0300 summary: Replace the iter/itertext methods of Element in _elementtree with true C implementations, instead of the bootstrapped Python code. In addition to being cleaner (removing the last remains of the bootstrapping code in _elementtree), this gives a 10x performance boost for iter() on large documents. Also reorganized the tests a bit to be more robust. files: Lib/test/test_xml_etree.py | 247 ++++++++------ Lib/test/test_xml_etree_c.py | 28 +- Lib/xml/etree/ElementTree.py | 6 +- Modules/_elementtree.c | 362 +++++++++++++++++----- 4 files changed, 414 insertions(+), 229 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -23,7 +23,8 @@ from test import support from test.support import findfile, import_fresh_module, gc_collect -pyET = import_fresh_module('xml.etree.ElementTree', blocked=['_elementtree']) +pyET = None +ET = None SIMPLE_XMLFILE = findfile("simple.xml", subdir="xmltestdata") try: @@ -209,10 +210,8 @@ These methods return an iterable. See bug 6472. - >>> check_method(element.iter("tag").__next__) >>> check_method(element.iterfind("tag").__next__) >>> check_method(element.iterfind("*").__next__) - >>> check_method(tree.iter("tag").__next__) >>> check_method(tree.iterfind("tag").__next__) >>> check_method(tree.iterfind("*").__next__) @@ -291,42 +290,6 @@ 'hello' """ -# Only with Python implementation -def simplefind(): - """ - Test find methods using the elementpath fallback. - - >>> ElementTree = pyET - - >>> CurrentElementPath = ElementTree.ElementPath - >>> ElementTree.ElementPath = ElementTree._SimpleElementPath() - >>> elem = ElementTree.XML(SAMPLE_XML) - >>> elem.find("tag").tag - 'tag' - >>> ElementTree.ElementTree(elem).find("tag").tag - 'tag' - >>> elem.findtext("tag") - 'text' - >>> elem.findtext("tog") - >>> elem.findtext("tog", "default") - 'default' - >>> ElementTree.ElementTree(elem).findtext("tag") - 'text' - >>> summarize_list(elem.findall("tag")) - ['tag', 'tag'] - >>> summarize_list(elem.findall(".//tag")) - ['tag', 'tag', 'tag'] - - Path syntax doesn't work in this case. - - >>> elem.find("section/tag") - >>> elem.findtext("section/tag") - >>> summarize_list(elem.findall("section/tag")) - [] - - >>> ElementTree.ElementPath = CurrentElementPath - """ - def find(): """ Test find methods (including xpath syntax). @@ -1002,36 +965,6 @@ '1 < 2\n' """ -def iterators(): - """ - Test iterators. - - >>> e = ET.XML("this is a paragraph...") - >>> summarize_list(e.iter()) - ['html', 'body', 'i'] - >>> summarize_list(e.find("body").iter()) - ['body', 'i'] - >>> summarize(next(e.iter())) - 'html' - >>> "".join(e.itertext()) - 'this is a paragraph...' - >>> "".join(e.find("body").itertext()) - 'this is a paragraph.' - >>> next(e.itertext()) - 'this is a ' - - Method iterparse should return an iterator. See bug 6472. - - >>> sourcefile = serialize(e, to_string=False) - >>> next(ET.iterparse(sourcefile)) # doctest: +ELLIPSIS - ('end', ) - - >>> tree = ET.ElementTree(None) - >>> tree.iter() - Traceback (most recent call last): - AttributeError: 'NoneType' object has no attribute 'iter' - """ - ENTITY_XML = """\ @@ -1339,6 +1272,7 @@ """.format(html.escape(SIMPLE_XMLFILE, True)) + def xinclude_loader(href, parse="xml", encoding=None): try: data = XINCLUDE[href] @@ -1411,22 +1345,6 @@ >>> # print(serialize(document)) # C5 """ -def xinclude_default(): - """ - >>> from xml.etree import ElementInclude - - >>> document = xinclude_loader("default.xml") - >>> ElementInclude.include(document) - >>> print(serialize(document)) # default - -

Example.

- - text - texttail - - -
- """ # # badly formatted xi:include tags @@ -1917,9 +1835,8 @@ self.assertIsInstance(ET.QName, type) self.assertIsInstance(ET.ElementTree, type) self.assertIsInstance(ET.Element, type) - # XXX issue 14128 with C ElementTree - # self.assertIsInstance(ET.TreeBuilder, type) - # self.assertIsInstance(ET.XMLParser, type) + self.assertIsInstance(ET.TreeBuilder, type) + self.assertIsInstance(ET.XMLParser, type) def test_Element_subclass_trivial(self): class MyElement(ET.Element): @@ -1953,6 +1870,73 @@ self.assertEqual(mye.newmethod(), 'joe') +class ElementIterTest(unittest.TestCase): + def _ilist(self, elem, tag=None): + return summarize_list(elem.iter(tag)) + + def test_basic(self): + doc = ET.XML("this is a paragraph...") + self.assertEqual(self._ilist(doc), ['html', 'body', 'i']) + self.assertEqual(self._ilist(doc.find('body')), ['body', 'i']) + self.assertEqual(next(doc.iter()).tag, 'html') + self.assertEqual(''.join(doc.itertext()), 'this is a paragraph...') + self.assertEqual(''.join(doc.find('body').itertext()), + 'this is a paragraph.') + self.assertEqual(next(doc.itertext()), 'this is a ') + + # iterparse should return an iterator + sourcefile = serialize(doc, to_string=False) + self.assertEqual(next(ET.iterparse(sourcefile))[0], 'end') + + tree = ET.ElementTree(None) + self.assertRaises(AttributeError, tree.iter) + + def test_corners(self): + # single root, no subelements + a = ET.Element('a') + self.assertEqual(self._ilist(a), ['a']) + + # one child + b = ET.SubElement(a, 'b') + self.assertEqual(self._ilist(a), ['a', 'b']) + + # one child and one grandchild + c = ET.SubElement(b, 'c') + self.assertEqual(self._ilist(a), ['a', 'b', 'c']) + + # two children, only first with grandchild + d = ET.SubElement(a, 'd') + self.assertEqual(self._ilist(a), ['a', 'b', 'c', 'd']) + + # replace first child by second + a[0] = a[1] + del a[1] + self.assertEqual(self._ilist(a), ['a', 'd']) + + def test_iter_by_tag(self): + doc = ET.XML(''' + + + bedroom1 + bedroom2 + + nothing here + + + bedroom8 + + ''') + + self.assertEqual(self._ilist(doc, 'room'), ['room'] * 3) + self.assertEqual(self._ilist(doc, 'house'), ['house'] * 2) + + # make sure both tag=None and tag='*' return all tags + all_tags = ['document', 'house', 'room', 'room', + 'shed', 'house', 'room'] + self.assertEqual(self._ilist(doc), all_tags) + self.assertEqual(self._ilist(doc, '*'), all_tags) + + class TreeBuilderTest(unittest.TestCase): sample1 = (' +

Example.

+ + text + texttail + + +''') class XMLParserTest(unittest.TestCase): sample1 = '22' sample2 = ('>> cElementTree = cET - >>> e = cElementTree.Element('a') - >>> getattr(e, '\uD800') # doctest: +ELLIPSIS - Traceback (most recent call last): - ... - UnicodeEncodeError: ... - - >>> p = cElementTree.XMLParser() - >>> p.version.split()[0] - 'Expat' - >>> getattr(p, '\uD800') - Traceback (most recent call last): - ... - AttributeError: 'XMLParser' object has no attribute '\ud800' - """ - - class MiscTests(unittest.TestCase): # Issue #8651. @support.bigmemtest(size=support._2G + 100, memuse=1) @@ -46,6 +21,7 @@ finally: data = None + @unittest.skipUnless(cET, 'requires _elementtree') class TestAliasWorking(unittest.TestCase): # Test that the cET alias module is alive @@ -53,6 +29,7 @@ e = cET_alias.Element('foo') self.assertEqual(e.tag, 'foo') + @unittest.skipUnless(cET, 'requires _elementtree') class TestAcceleratorImported(unittest.TestCase): # Test that the C accelerator was imported, as expected @@ -67,7 +44,6 @@ from test import test_xml_etree, test_xml_etree_c # Run the tests specific to the C implementation - support.run_doctest(test_xml_etree_c, verbosity=True) support.run_unittest( MiscTests, TestAliasWorking, diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -916,11 +916,7 @@ _raise_serialization_error(qname) # populate qname and namespaces table - try: - iterate = elem.iter - except AttributeError: - iterate = elem.getiterator # cET compatibility - for elem in iterate(): + for elem in elem.iter(): tag = elem.tag if isinstance(tag, QName): if tag.text not in qnames: diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -103,8 +103,6 @@ /* glue functions (see the init function for details) */ static PyObject* elementtree_parseerror_obj; static PyObject* elementtree_deepcopy_obj; -static PyObject* elementtree_iter_obj; -static PyObject* elementtree_itertext_obj; static PyObject* elementpath_obj; /* helpers */ @@ -1109,67 +1107,32 @@ return list; } -static PyObject* -element_iter(ElementObject* self, PyObject* args) + +static PyObject * +create_elementiter(ElementObject *self, PyObject *tag, int gettext); + + +static PyObject * +element_iter(ElementObject *self, PyObject *args) { - PyObject* result; - PyObject* tag = Py_None; if (!PyArg_ParseTuple(args, "|O:iter", &tag)) return NULL; - if (!elementtree_iter_obj) { - PyErr_SetString( - PyExc_RuntimeError, - "iter helper not found" - ); - return NULL; - } - - args = PyTuple_New(2); - if (!args) - return NULL; - - Py_INCREF(self); PyTuple_SET_ITEM(args, 0, (PyObject*) self); - Py_INCREF(tag); PyTuple_SET_ITEM(args, 1, (PyObject*) tag); - - result = PyObject_CallObject(elementtree_iter_obj, args); - - Py_DECREF(args); - - return result; + return create_elementiter(self, tag, 0); } static PyObject* element_itertext(ElementObject* self, PyObject* args) { - PyObject* result; - if (!PyArg_ParseTuple(args, ":itertext")) return NULL; - if (!elementtree_itertext_obj) { - PyErr_SetString( - PyExc_RuntimeError, - "itertext helper not found" - ); - return NULL; - } - - args = PyTuple_New(1); - if (!args) - return NULL; - - Py_INCREF(self); PyTuple_SET_ITEM(args, 0, (PyObject*) self); - - result = PyObject_CallObject(elementtree_itertext_obj, args); - - Py_DECREF(args); - - return result; + return create_elementiter(self, Py_None, 1); } + static PyObject* element_getitem(PyObject* self_, Py_ssize_t index) { @@ -1790,6 +1753,267 @@ 0, /* tp_free */ }; +/******************************* Element iterator ****************************/ + +/* ElementIterObject represents the iteration state over an XML element in + * pre-order traversal. To keep track of which sub-element should be returned + * next, a stack of parents is maintained. This is a standard stack-based + * iterative pre-order traversal of a tree. + * The stack is managed using a single-linked list starting at parent_stack. + * Each stack node contains the saved parent to which we should return after + * the current one is exhausted, and the next child to examine in that parent. + */ +typedef struct ParentLocator_t { + ElementObject *parent; + Py_ssize_t child_index; + struct ParentLocator_t *next; +} ParentLocator; + +typedef struct { + PyObject_HEAD + ParentLocator *parent_stack; + ElementObject *root_element; + PyObject *sought_tag; + int root_done; + int gettext; +} ElementIterObject; + + +static void +elementiter_dealloc(ElementIterObject *it) +{ + ParentLocator *p = it->parent_stack; + while (p) { + ParentLocator *temp = p; + Py_XDECREF(p->parent); + p = p->next; + PyObject_Free(temp); + } + + Py_XDECREF(it->sought_tag); + Py_XDECREF(it->root_element); + + PyObject_GC_UnTrack(it); + PyObject_GC_Del(it); +} + +static int +elementiter_traverse(ElementIterObject *it, visitproc visit, void *arg) +{ + ParentLocator *p = it->parent_stack; + while (p) { + Py_VISIT(p->parent); + p = p->next; + } + + Py_VISIT(it->root_element); + Py_VISIT(it->sought_tag); + return 0; +} + +/* Helper function for elementiter_next. Add a new parent to the parent stack. + */ +static ParentLocator * +parent_stack_push_new(ParentLocator *stack, ElementObject *parent) +{ + ParentLocator *new_node = PyObject_Malloc(sizeof(ParentLocator)); + if (new_node) { + new_node->parent = parent; + Py_INCREF(parent); + new_node->child_index = 0; + new_node->next = stack; + } + return new_node; +} + +static PyObject * +elementiter_next(ElementIterObject *it) +{ + /* Sub-element iterator. + * + * A short note on gettext: this function serves both the iter() and + * itertext() methods to avoid code duplication. However, there are a few + * small differences in the way these iterations work. Namely: + * - itertext() only yields text from nodes that have it, and continues + * iterating when a node doesn't have text (so it doesn't return any + * node like iter()) + * - itertext() also has to handle tail, after finishing with all the + * children of a node. + */ + + while (1) { + /* Handle the case reached in the beginning and end of iteration, where + * the parent stack is empty. The root_done flag gives us indication + * whether we've just started iterating (so root_done is 0), in which + * case the root is returned. If root_done is 1 and we're here, the + * iterator is exhausted. + */ + if (!it->parent_stack->parent) { + if (it->root_done) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } else { + it->parent_stack = parent_stack_push_new(it->parent_stack, + it->root_element); + if (!it->parent_stack) { + PyErr_NoMemory(); + return NULL; + } + + it->root_done = 1; + if (it->sought_tag == Py_None || + PyObject_RichCompareBool(it->root_element->tag, + it->sought_tag, Py_EQ) == 1) { + if (it->gettext) { + PyObject *text = JOIN_OBJ(it->root_element->text); + if (PyObject_IsTrue(text)) { + Py_INCREF(text); + return text; + } + } else { + Py_INCREF(it->root_element); + return (PyObject *)it->root_element; + } + } + } + } + + /* See if there are children left to traverse in the current parent. If + * yes, visit the next child. If not, pop the stack and try again. + */ + ElementObject *cur_parent = it->parent_stack->parent; + Py_ssize_t child_index = it->parent_stack->child_index; + if (cur_parent->extra && child_index < cur_parent->extra->length) { + ElementObject *child = (ElementObject *) + cur_parent->extra->children[child_index]; + it->parent_stack->child_index++; + it->parent_stack = parent_stack_push_new(it->parent_stack, + child); + if (!it->parent_stack) { + PyErr_NoMemory(); + return NULL; + } + + if (it->gettext) { + PyObject *text = JOIN_OBJ(child->text); + if (PyObject_IsTrue(text)) { + Py_INCREF(text); + return text; + } + } else if (it->sought_tag == Py_None || + PyObject_RichCompareBool(child->tag, + it->sought_tag, Py_EQ) == 1) { + Py_INCREF(child); + return (PyObject *)child; + } + else + continue; + } + else { + PyObject *tail = it->gettext ? JOIN_OBJ(cur_parent->tail) : Py_None; + ParentLocator *next = it->parent_stack->next; + Py_XDECREF(it->parent_stack->parent); + PyObject_Free(it->parent_stack); + it->parent_stack = next; + + /* Note that extra condition on it->parent_stack->parent here; + * this is because itertext() is supposed to only return *inner* + * text, not text following the element it began iteration with. + */ + if (it->parent_stack->parent && PyObject_IsTrue(tail)) { + Py_INCREF(tail); + return tail; + } + } + } + + return NULL; +} + + +static PyTypeObject ElementIter_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + "_elementtree._element_iterator", /* tp_name */ + sizeof(ElementIterObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)elementiter_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)elementiter_traverse, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + PyObject_SelfIter, /* tp_iter */ + (iternextfunc)elementiter_next, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ +}; + + +static PyObject * +create_elementiter(ElementObject *self, PyObject *tag, int gettext) +{ + ElementIterObject *it; + PyObject *star = NULL; + + it = PyObject_GC_New(ElementIterObject, &ElementIter_Type); + if (!it) + return NULL; + if (!(it->parent_stack = PyObject_Malloc(sizeof(ParentLocator)))) { + PyObject_GC_Del(it); + return NULL; + } + + it->parent_stack->parent = NULL; + it->parent_stack->child_index = 0; + it->parent_stack->next = NULL; + + if (PyUnicode_Check(tag)) + star = PyUnicode_FromString("*"); + else if (PyBytes_Check(tag)) + star = PyBytes_FromString("*"); + + if (star && PyObject_RichCompareBool(tag, star, Py_EQ) == 1) + tag = Py_None; + + Py_XDECREF(star); + it->sought_tag = tag; + it->root_done = 0; + it->gettext = gettext; + it->root_element = self; + + Py_INCREF(self); + Py_INCREF(tag); + + PyObject_GC_Track(it); + return (PyObject *)it; +} + + /* ==================================================================== */ /* the tree builder type */ @@ -3238,8 +3462,7 @@ PyMODINIT_FUNC PyInit__elementtree(void) { - PyObject *m, *g, *temp; - char* bootstrap; + PyObject *m, *temp; /* Initialize object types */ if (PyType_Ready(&TreeBuilder_Type) < 0) @@ -3255,44 +3478,6 @@ if (!m) return NULL; - /* The code below requires that the module gets already added - to sys.modules. */ - PyDict_SetItemString(PyImport_GetModuleDict(), - _elementtreemodule.m_name, - m); - - /* python glue code */ - - g = PyDict_New(); - if (!g) - return NULL; - - PyDict_SetItemString(g, "__builtins__", PyEval_GetBuiltins()); - - bootstrap = ( - "def iter(node, tag=None):\n" /* helper */ - " if tag == '*':\n" - " tag = None\n" - " if tag is None or node.tag == tag:\n" - " yield node\n" - " for node in node:\n" - " for node in iter(node, tag):\n" - " yield node\n" - - "def itertext(node):\n" /* helper */ - " if node.text:\n" - " yield node.text\n" - " for e in node:\n" - " for s in e.itertext():\n" - " yield s\n" - " if e.tail:\n" - " yield e.tail\n" - - ); - - if (!PyRun_String(bootstrap, Py_file_input, g, NULL)) - return NULL; - if (!(temp = PyImport_ImportModule("copy"))) return NULL; elementtree_deepcopy_obj = PyObject_GetAttrString(temp, "deepcopy"); @@ -3301,9 +3486,6 @@ if (!(elementpath_obj = PyImport_ImportModule("xml.etree.ElementPath"))) return NULL; - elementtree_iter_obj = PyDict_GetItemString(g, "iter"); - elementtree_itertext_obj = PyDict_GetItemString(g, "itertext"); - /* link against pyexpat */ expat_capi = PyCapsule_Import(PyExpat_CAPSULE_NAME, 0); if (expat_capi) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 06:54:18 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 15 Jun 2012 06:54:18 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_windows_compilation_pro?= =?utf8?q?blems_caused_by_previous_commit=2E?= Message-ID: http://hg.python.org/cpython/rev/3c089edbf02c changeset: 77432:3c089edbf02c user: Eli Bendersky date: Fri Jun 15 07:52:49 2012 +0300 summary: Fix windows compilation problems caused by previous commit. files: Modules/_elementtree.c | 6 ++++-- 1 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -1840,6 +1840,8 @@ * - itertext() also has to handle tail, after finishing with all the * children of a node. */ + ElementObject *cur_parent; + Py_ssize_t child_index; while (1) { /* Handle the case reached in the beginning and end of iteration, where @@ -1881,8 +1883,8 @@ /* See if there are children left to traverse in the current parent. If * yes, visit the next child. If not, pop the stack and try again. */ - ElementObject *cur_parent = it->parent_stack->parent; - Py_ssize_t child_index = it->parent_stack->child_index; + cur_parent = it->parent_stack->parent; + child_index = it->parent_stack->child_index; if (cur_parent->extra && child_index < cur_parent->extra->length) { ElementObject *child = (ElementObject *) cur_parent->extra->children[child_index]; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 07:38:37 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 15 Jun 2012 07:38:37 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_mark_problematic_test_as_ex?= =?utf8?q?pected_failure_-_investigating?= Message-ID: http://hg.python.org/cpython/rev/40f2fdae6d22 changeset: 77433:40f2fdae6d22 user: Eli Bendersky date: Fri Jun 15 08:37:08 2012 +0300 summary: mark problematic test as expected failure - investigating files: Lib/test/test_xml_etree.py | 8 ++++++-- 1 files changed, 6 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2010,7 +2010,9 @@ ('html', '-//W3C//DTD XHTML 1.0 Transitional//EN', 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd')) + class XincludeTest(unittest.TestCase): + @unittest.expectedFailure def test_xinclude_default(self): from xml.etree import ElementInclude doc = xinclude_loader('default.xml') @@ -2024,6 +2026,8 @@ ''') + + class XMLParserTest(unittest.TestCase): sample1 = '22' sample2 = (' http://hg.python.org/cpython/rev/ea0dc4338987 changeset: 77434:ea0dc4338987 user: Eli Bendersky date: Fri Jun 15 09:03:19 2012 +0300 summary: Removed _SimpleElementPath and its flaky test. The test monkey-patches the module, which causes other failures and fails itself depending on the order tests are run. files: Lib/test/test_xml_etree.py | 23 --------------------- Lib/xml/etree/ElementTree.py | 26 +----------------------- 2 files changed, 1 insertions(+), 48 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2234,28 +2234,6 @@ self.assertEqual(pyET.Element.__module__, 'xml.etree.ElementTree') self.assertEqual(pyET.SubElement.__module__, 'xml.etree.ElementTree') - -class ElementPathFallbackTest(unittest.TestCase): - def test_fallback(self): - current_ElementPath = ET.ElementPath - ET.ElementPath = ET._SimpleElementPath() - elem = ET.XML(SAMPLE_XML) - self.assertEqual(elem.find('tag').tag, 'tag') - self.assertEqual(ET.ElementTree(elem).find('tag').tag, 'tag') - self.assertEqual(elem.findtext('tag'), 'text') - self.assertIsNone(elem.findtext('tog')) - self.assertEqual(elem.findtext('tog', 'default'), 'default') - self.assertEqual(ET.ElementTree(elem).findtext('tag'), 'text') - self.assertEqual(summarize_list(elem.findall('tag')), ['tag', 'tag']) - self.assertEqual(summarize_list(elem.findall('.//tag')), - ['tag', 'tag', 'tag']) - - #self.assertIsNone(elem.find('section/tag')) - #self.assertIsNone(elem.findtext('section/tag')) - self.assertEqual(summarize_list(elem.findall('section/tag')), []) - - ET.ElementPath = current_ElementPath - # -------------------------------------------------------------------- @@ -2328,7 +2306,6 @@ if pyET: test_classes.extend([ NoAcceleratorTest, - ElementPathFallbackTest, ]) support.run_unittest(*test_classes) diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -101,32 +101,8 @@ import re import warnings -class _SimpleElementPath: - # emulate pre-1.2 find/findtext/findall behaviour - def find(self, element, tag, namespaces=None): - for elem in element: - if elem.tag == tag: - return elem - return None - def findtext(self, element, tag, default=None, namespaces=None): - elem = self.find(element, tag) - if elem is None: - return default - return elem.text or "" - def iterfind(self, element, tag, namespaces=None): - if tag[:3] == ".//": - for elem in element.iter(tag[3:]): - yield elem - for elem in element: - if elem.tag == tag: - yield elem - def findall(self, element, tag, namespaces=None): - return list(self.iterfind(element, tag, namespaces)) +from . import ElementPath -try: - from . import ElementPath -except ImportError: - ElementPath = _SimpleElementPath() ## # Parser error. This is a subclass of SyntaxError. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 08:42:15 2012 From: python-checkins at python.org (eli.bendersky) Date: Fri, 15 Jun 2012 08:42:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Skip_XincludeTest_entirely_?= =?utf8?q?instead_of_just_ignoring_failures=2C_because_it_may?= Message-ID: http://hg.python.org/cpython/rev/40eeab0f7fc2 changeset: 77435:40eeab0f7fc2 user: Eli Bendersky date: Fri Jun 15 09:40:44 2012 +0300 summary: Skip XincludeTest entirely instead of just ignoring failures, because it may segfault, depending on the order of running tests files: Lib/test/test_xml_etree.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2011,8 +2011,8 @@ 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd')) + at unittest.skip('Unstable due to module monkeypatching') class XincludeTest(unittest.TestCase): - @unittest.expectedFailure def test_xinclude_default(self): from xml.etree import ElementInclude doc = xinclude_loader('default.xml') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 09:15:17 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 15 Jun 2012 09:15:17 +0200 (CEST) Subject: [Python-checkins] r88976 - tracker/roundup-src/roundup/cgi/client.py Message-ID: <3WDCdd5YGNzNtW@mail.python.org> Author: martin.v.loewis Date: Fri Jun 15 09:15:17 2012 New Revision: 88976 Log: Fake a list property to prevent "Error: not indexable". Modified: tracker/roundup-src/roundup/cgi/client.py Modified: tracker/roundup-src/roundup/cgi/client.py ============================================================================== --- tracker/roundup-src/roundup/cgi/client.py (original) +++ tracker/roundup-src/roundup/cgi/client.py Fri Jun 15 09:15:17 2012 @@ -305,6 +305,10 @@ # see if we need to re-parse the environment for the form (eg Zope) if form is None: self.form = cgi.FieldStorage(fp=request.rfile, environ=env) + # In some case (e.g. content-type application/xml), cgi + # will not parse anything. Fake a list property in this case + if self.form.list is None: + self.form.list = [] else: self.form = form From python-checkins at python.org Fri Jun 15 13:02:16 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 15 Jun 2012 13:02:16 +0200 (CEST) Subject: [Python-checkins] r88977 - tracker/instances/python-dev/detectors/no_texthtml.py Message-ID: <3WDJgX3pYMzNv9@mail.python.org> Author: martin.v.loewis Date: Fri Jun 15 13:02:16 2012 New Revision: 88977 Log: Add text/x-html and html to black list. Modified: tracker/instances/python-dev/detectors/no_texthtml.py Modified: tracker/instances/python-dev/detectors/no_texthtml.py ============================================================================== --- tracker/instances/python-dev/detectors/no_texthtml.py (original) +++ tracker/instances/python-dev/detectors/no_texthtml.py Fri Jun 15 13:02:16 2012 @@ -1,8 +1,8 @@ def audit_html_files(db, cl, nodeid, newvalues): - if newvalues.has_key('type') and newvalues['type'] == 'text/html': + if newvalues.has_key('type') and newvalues['type'] in ('text/html', 'html', 'text/x-html'): newvalues['type'] = 'text/plain' - + def init(db): db.file.audit('set', audit_html_files) From python-checkins at python.org Fri Jun 15 13:02:57 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 15 Jun 2012 13:02:57 +0200 (CEST) Subject: [Python-checkins] r88978 - tracker/instances/jython/detectors/no_texthtml.py Message-ID: <3WDJhK74ytzNv9@mail.python.org> Author: martin.v.loewis Date: Fri Jun 15 13:02:57 2012 New Revision: 88978 Log: Add text/x-html and html to black list. Modified: tracker/instances/jython/detectors/no_texthtml.py Modified: tracker/instances/jython/detectors/no_texthtml.py ============================================================================== --- tracker/instances/jython/detectors/no_texthtml.py (original) +++ tracker/instances/jython/detectors/no_texthtml.py Fri Jun 15 13:02:57 2012 @@ -1,8 +1,8 @@ def audit_html_files(db, cl, nodeid, newvalues): - if newvalues.has_key('type') and newvalues['type'] == 'text/html': + if newvalues.has_key('type') and newvalues['type'] in ('text/html', 'html', 'text/x-html'): newvalues['type'] = 'text/plain' - + def init(db): db.file.audit('set', audit_html_files) From python-checkins at python.org Fri Jun 15 13:14:47 2012 From: python-checkins at python.org (nick.coghlan) Date: Fri, 15 Jun 2012 13:14:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315061=3A_Don=27t_o?= =?utf8?q?versell_the_capabilities_of_the_new_non-shortcircuiting?= Message-ID: http://hg.python.org/cpython/rev/f36af3766a20 changeset: 77436:f36af3766a20 user: Nick Coghlan date: Fri Jun 15 21:14:08 2012 +1000 summary: Issue #15061: Don't oversell the capabilities of the new non-shortcircuiting comparison function in hmac files: Doc/library/hmac.rst | 41 +++++++++++++++++----------- Lib/hmac.py | 26 +++++++++--------- Lib/test/test_hmac.py | 44 ++++++++++++++++-------------- Misc/NEWS | 5 +++ 4 files changed, 66 insertions(+), 50 deletions(-) diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst --- a/Doc/library/hmac.rst +++ b/Doc/library/hmac.rst @@ -42,8 +42,8 @@ When comparing the output of :meth:`digest` to an externally-supplied digest during a verification routine, it is recommended to use the - :func:`hmac.secure_compare` function instead of the ``==`` operator - to avoid potential timing attacks. + :func:`compare_digest` function instead of the ``==`` operator + to reduce the vulnerability to timing attacks. .. method:: HMAC.hexdigest() @@ -54,10 +54,11 @@ .. warning:: - When comparing the output of :meth:`hexdigest` to an externally-supplied - digest during a verification routine, it is recommended to use the - :func:`hmac.secure_compare` function instead of the ``==`` operator - to avoid potential timing attacks. + The output of :meth:`hexdigest` should not be compared directly to an + externally-supplied digest during a verification routine. Instead, the + externally supplied digest should be converted to a :class:`bytes` + value and compared to the output of :meth:`digest` with + :func:`compare_digest`. .. method:: HMAC.copy() @@ -68,20 +69,28 @@ This module also provides the following helper function: -.. function:: secure_compare(a, b) +.. function:: compare_digest(a, b) - Returns the equivalent of ``a == b``, but using a time-independent - comparison method. Comparing the full lengths of the inputs *a* and *b*, - instead of short-circuiting the comparison upon the first unequal byte, - prevents leaking information about the inputs being compared and mitigates - potential timing attacks. The inputs must be either :class:`str` or - :class:`bytes` instances. + Returns the equivalent of ``a == b``, but avoids content based + short circuiting behaviour to reduce the vulnerability to timing + analysis. The inputs must be :class:`bytes` instances. + + Using a short circuiting comparison (that is, one that terminates as soon + as it finds any difference between the values) to check digests for + correctness can be problematic, as it introduces a potential + vulnerability when an attacker can control both the message to be checked + *and* the purported signature value. By keeping the plaintext consistent + and supplying different signature values, an attacker may be able to use + timing variations to search the signature space for the expected value in + O(n) time rather than the desired O(2**n). .. note:: - While the :func:`hmac.secure_compare` function prevents leaking the - contents of the inputs via a timing attack, it does leak the length - of the inputs. However, this generally is not a security risk. + While this function reduces the likelihood of leaking the contents of + the expected digest via a timing attack, it still uses short circuiting + behaviour based on the *length* of the inputs. It is assumed that the + expected length of the digest is not a secret, as it is typically + published as part of a file format, network protocol or API definition. .. versionadded:: 3.3 diff --git a/Lib/hmac.py b/Lib/hmac.py --- a/Lib/hmac.py +++ b/Lib/hmac.py @@ -13,24 +13,24 @@ digest_size = None -def secure_compare(a, b): - """Returns the equivalent of 'a == b', but using a time-independent - comparison method to prevent timing attacks.""" - if not ((isinstance(a, str) and isinstance(b, str)) or - (isinstance(a, bytes) and isinstance(b, bytes))): - raise TypeError("inputs must be strings or bytes") +def compare_digest(a, b): + """Returns the equivalent of 'a == b', but avoids content based short + circuiting to reduce the vulnerability to timing attacks.""" + # Consistent timing matters more here than data type flexibility + if not (isinstance(a, bytes) and isinstance(b, bytes)): + raise TypeError("inputs must be bytes instances") + # We assume the length of the expected digest is public knowledge, + # thus this early return isn't leaking anything an attacker wouldn't + # already know if len(a) != len(b): return False + # We assume that integers in the bytes range are all cached, + # thus timing shouldn't vary much due to integer object creation result = 0 - if isinstance(a, bytes): - for x, y in zip(a, b): - result |= x ^ y - else: - for x, y in zip(a, b): - result |= ord(x) ^ ord(y) - + for x, y in zip(a, b): + result |= x ^ y return result == 0 diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py --- a/Lib/test/test_hmac.py +++ b/Lib/test/test_hmac.py @@ -302,40 +302,42 @@ self.assertEqual(h1.hexdigest(), h2.hexdigest(), "Hexdigest of copy doesn't match original hexdigest.") -class SecureCompareTestCase(unittest.TestCase): +class CompareDigestTestCase(unittest.TestCase): def test_compare(self): # Testing input type exception handling a, b = 100, 200 - self.assertRaises(TypeError, hmac.secure_compare, a, b) - a, b = 100, "foobar" - self.assertRaises(TypeError, hmac.secure_compare, a, b) + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = 100, b"foobar" + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = b"foobar", 200 + self.assertRaises(TypeError, hmac.compare_digest, a, b) a, b = "foobar", b"foobar" - self.assertRaises(TypeError, hmac.secure_compare, a, b) + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = b"foobar", "foobar" + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = "foobar", "foobar" + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = bytearray(b"foobar"), bytearray(b"foobar") + self.assertRaises(TypeError, hmac.compare_digest, a, b) - # Testing str/bytes of different lengths - a, b = "foobar", "foo" - self.assertFalse(hmac.secure_compare(a, b)) + # Testing bytes of different lengths a, b = b"foobar", b"foo" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xde\xad" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) - # Testing str/bytes of same lengths, different values - a, b = "foobar", "foobaz" - self.assertFalse(hmac.secure_compare(a, b)) + # Testing bytes of same lengths, different values a, b = b"foobar", b"foobaz" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) - # Testing str/bytes of same lengths, same values - a, b = "foobar", "foobar" - self.assertTrue(hmac.secure_compare(a, b)) + # Testing bytes of same lengths, same values a, b = b"foobar", b"foobar" - self.assertTrue(hmac.secure_compare(a, b)) + self.assertTrue(hmac.compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef" - self.assertTrue(hmac.secure_compare(a, b)) + self.assertTrue(hmac.compare_digest(a, b)) def test_main(): support.run_unittest( @@ -343,7 +345,7 @@ ConstructorTestCase, SanityTestCase, CopyTestCase, - SecureCompareTestCase + CompareDigestTestCase ) if __name__ == "__main__": diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,11 @@ Library ------- +- Issue #15061: The inappropriately named hmac.secure_compare has been + renamed to hash.compare_digest, restricted to operating on bytes inputs + only and had its documentation updated to more acurrately reflect both its + intent and its limitations + - Issue #13841: Make child processes exit using sys.exit() on Windows. - Issue #14936: curses_panel was converted to PEP 3121 and PEP 384 API. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 13:16:37 2012 From: python-checkins at python.org (nick.coghlan) Date: Fri, 15 Jun 2012 13:16:37 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_typos_in_NEWS_entry?= Message-ID: http://hg.python.org/cpython/rev/593418586945 changeset: 77437:593418586945 user: Nick Coghlan date: Fri Jun 15 21:16:25 2012 +1000 summary: Fix typos in NEWS entry files: Misc/NEWS | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -22,8 +22,8 @@ ------- - Issue #15061: The inappropriately named hmac.secure_compare has been - renamed to hash.compare_digest, restricted to operating on bytes inputs - only and had its documentation updated to more acurrately reflect both its + renamed to hmac.compare_digest, restricted to operating on bytes inputs + only and had its documentation updated to more accurately reflect both its intent and its limitations - Issue #13841: Make child processes exit using sys.exit() on Windows. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 15:22:33 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 15 Jun 2012 15:22:33 +0200 (CEST) Subject: [Python-checkins] r88979 - tracker/instances/python-dev/extensions/openid_login.py Message-ID: <3WDMnP0pg3zNyM@mail.python.org> Author: martin.v.loewis Date: Fri Jun 15 15:22:32 2012 New Revision: 88979 Log: Catch discovery errors. Modified: tracker/instances/python-dev/extensions/openid_login.py Modified: tracker/instances/python-dev/extensions/openid_login.py ============================================================================== --- tracker/instances/python-dev/extensions/openid_login.py (original) +++ tracker/instances/python-dev/extensions/openid_login.py Fri Jun 15 15:22:32 2012 @@ -160,7 +160,10 @@ # results. However, the risk of login breaking if a provider does change # its service URL outweighs the cost of another HTTP request to perform # the discovery during login. - result = openid2rp.discover(provider_id) + try: + result = openid2rp.discover(provider_id) + except Exception: + result = None if result is None: self.client.error_message.append('Provider %s appears to be down' % providers[provider][0]) return From python-checkins at python.org Fri Jun 15 18:36:55 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 15 Jun 2012 18:36:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Skip_test=5Fbig?= =?utf8?q?mem=2Etest=5Funicode=5Frepr=5Foflw=2C_since_it_crashes_=28issue_?= =?utf8?b?IzE0OTA0KS4=?= Message-ID: http://hg.python.org/cpython/rev/1c9635109079 changeset: 77438:1c9635109079 branch: 2.7 parent: 77420:412c7daed0db user: Antoine Pitrou date: Fri Jun 15 18:33:48 2012 +0200 summary: Skip test_bigmem.test_unicode_repr_oflw, since it crashes (issue #14904). files: Lib/test/test_bigmem.py | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_bigmem.py b/Lib/test/test_bigmem.py --- a/Lib/test/test_bigmem.py +++ b/Lib/test/test_bigmem.py @@ -124,6 +124,7 @@ @precisionbigmemtest(size=_4G // 5, memuse=6+2) def test_unicode_repr_oflw(self, size): + self.skipTest("test crashes - see issue #14904") try: s = u"\uAAAA"*size r = repr(s) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 19:15:57 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 15 Jun 2012 19:15:57 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE0OTMz?= =?utf8?q?=3A_fix_misleading_doc_about_weakref_support_in_extension_types?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/69177ff1a643 changeset: 77439:69177ff1a643 branch: 3.2 parent: 77424:62030ebb2b01 user: Antoine Pitrou date: Fri Jun 15 19:11:31 2012 +0200 summary: Issue #14933: fix misleading doc about weakref support in extension types. files: Doc/extending/newtypes.rst | 5 ++--- 1 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -1459,9 +1459,8 @@ } The only further addition is that the destructor needs to call the weak -reference manager to clear any weak references. This should be done before any -other parts of the destruction have occurred, but is only required if the weak -reference list is non-*NULL*:: +reference manager to clear any weak references. This is only required if the +weak reference list is non-*NULL*:: static void instance_dealloc(PyInstanceObject *inst) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 19:15:58 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 15 Jun 2012 19:15:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2314933=3A_fix_misleading_doc_about_weakref_support_i?= =?utf8?q?n_extension_types=2E?= Message-ID: http://hg.python.org/cpython/rev/b17c8005e08a changeset: 77440:b17c8005e08a parent: 77437:593418586945 parent: 77439:69177ff1a643 user: Antoine Pitrou date: Fri Jun 15 19:12:04 2012 +0200 summary: Issue #14933: fix misleading doc about weakref support in extension types. files: Doc/extending/newtypes.rst | 5 ++--- 1 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -1437,9 +1437,8 @@ } The only further addition is that the destructor needs to call the weak -reference manager to clear any weak references. This should be done before any -other parts of the destruction have occurred, but is only required if the weak -reference list is non-*NULL*:: +reference manager to clear any weak references. This is only required if the +weak reference list is non-*NULL*:: static void instance_dealloc(PyInstanceObject *inst) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 19:15:59 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 15 Jun 2012 19:15:59 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE0OTMz?= =?utf8?q?=3A_fix_misleading_doc_about_weakref_support_in_extension_types?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/0ac1f90954dc changeset: 77441:0ac1f90954dc branch: 2.7 parent: 77438:1c9635109079 user: Antoine Pitrou date: Fri Jun 15 19:11:31 2012 +0200 summary: Issue #14933: fix misleading doc about weakref support in extension types. files: Doc/extending/newtypes.rst | 5 ++--- 1 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -1521,9 +1521,8 @@ } The only further addition is that the destructor needs to call the weak -reference manager to clear any weak references. This should be done before any -other parts of the destruction have occurred, but is only required if the weak -reference list is non-*NULL*:: +reference manager to clear any weak references. This is only required if the +weak reference list is non-*NULL*:: static void instance_dealloc(PyInstanceObject *inst) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 19:33:01 2012 From: python-checkins at python.org (richard.oudkerk) Date: Fri, 15 Jun 2012 19:33:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314059=3A_Implement?= =?utf8?q?_multiprocessing=2EBarrier?= Message-ID: http://hg.python.org/cpython/rev/2d2f206d040e changeset: 77442:2d2f206d040e parent: 77440:b17c8005e08a user: Richard Oudkerk date: Fri Jun 15 18:26:07 2012 +0100 summary: Issue #14059: Implement multiprocessing.Barrier files: Doc/library/multiprocessing.rst | 23 +- Lib/multiprocessing/__init__.py | 11 +- Lib/multiprocessing/dummy/__init__.py | 4 +- Lib/multiprocessing/managers.py | 21 + Lib/multiprocessing/synchronize.py | 40 + Lib/test/test_multiprocessing.py | 337 +++++++++++++- Misc/NEWS | 2 + 7 files changed, 426 insertions(+), 12 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -226,11 +226,11 @@ holds Python objects and allows other processes to manipulate them using proxies. - A manager returned by :func:`Manager` will support types :class:`list`, - :class:`dict`, :class:`Namespace`, :class:`Lock`, :class:`RLock`, - :class:`Semaphore`, :class:`BoundedSemaphore`, :class:`Condition`, - :class:`Event`, :class:`Queue`, :class:`Value` and :class:`Array`. For - example, :: + A manager returned by :func:`Manager` will support types + :class:`list`, :class:`dict`, :class:`Namespace`, :class:`Lock`, + :class:`RLock`, :class:`Semaphore`, :class:`BoundedSemaphore`, + :class:`Condition`, :class:`Event`, :class:`Barrier`, + :class:`Queue`, :class:`Value` and :class:`Array`. For example, :: from multiprocessing import Process, Manager @@ -885,6 +885,12 @@ Note that one can also create synchronization primitives by using a manager object -- see :ref:`multiprocessing-managers`. +.. class:: Barrier(parties[, action[, timeout]]) + + A barrier object: a clone of :class:`threading.Barrier`. + + .. versionadded:: 3.3 + .. class:: BoundedSemaphore([value]) A bounded semaphore object: a clone of :class:`threading.BoundedSemaphore`. @@ -1280,6 +1286,13 @@ It also supports creation of shared lists and dictionaries. + .. method:: Barrier(parties[, action[, timeout]]) + + Create a shared :class:`threading.Barrier` object and return a + proxy for it. + + .. versionadded:: 3.3 + .. method:: BoundedSemaphore([value]) Create a shared :class:`threading.BoundedSemaphore` object and return a diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py --- a/Lib/multiprocessing/__init__.py +++ b/Lib/multiprocessing/__init__.py @@ -23,8 +23,8 @@ 'Manager', 'Pipe', 'cpu_count', 'log_to_stderr', 'get_logger', 'allow_connection_pickling', 'BufferTooShort', 'TimeoutError', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', - 'Event', 'Queue', 'SimpleQueue', 'JoinableQueue', 'Pool', 'Value', 'Array', - 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING', + 'Event', 'Barrier', 'Queue', 'SimpleQueue', 'JoinableQueue', 'Pool', + 'Value', 'Array', 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING', ] __author__ = 'R. Oudkerk (r.m.oudkerk at gmail.com)' @@ -186,6 +186,13 @@ from multiprocessing.synchronize import Event return Event() +def Barrier(parties, action=None, timeout=None): + ''' + Returns a barrier object + ''' + from multiprocessing.synchronize import Barrier + return Barrier(parties, action, timeout) + def Queue(maxsize=0): ''' Returns a queue object diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py --- a/Lib/multiprocessing/dummy/__init__.py +++ b/Lib/multiprocessing/dummy/__init__.py @@ -35,7 +35,7 @@ __all__ = [ 'Process', 'current_process', 'active_children', 'freeze_support', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', - 'Event', 'Queue', 'Manager', 'Pipe', 'Pool', 'JoinableQueue' + 'Event', 'Barrier', 'Queue', 'Manager', 'Pipe', 'Pool', 'JoinableQueue' ] # @@ -49,7 +49,7 @@ from multiprocessing.dummy.connection import Pipe from threading import Lock, RLock, Semaphore, BoundedSemaphore -from threading import Event, Condition +from threading import Event, Condition, Barrier from queue import Queue # diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -993,6 +993,26 @@ def wait(self, timeout=None): return self._callmethod('wait', (timeout,)) + +class BarrierProxy(BaseProxy): + _exposed_ = ('__getattribute__', 'wait', 'abort', 'reset') + def wait(self, timeout=None): + return self._callmethod('wait', (timeout,)) + def abort(self): + return self._callmethod('abort') + def reset(self): + return self._callmethod('reset') + @property + def parties(self): + return self._callmethod('__getattribute__', ('parties',)) + @property + def n_waiting(self): + return self._callmethod('__getattribute__', ('n_waiting',)) + @property + def broken(self): + return self._callmethod('__getattribute__', ('broken',)) + + class NamespaceProxy(BaseProxy): _exposed_ = ('__getattribute__', '__setattr__', '__delattr__') def __getattr__(self, key): @@ -1084,6 +1104,7 @@ SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore, AcquirerProxy) SyncManager.register('Condition', threading.Condition, ConditionProxy) +SyncManager.register('Barrier', threading.Barrier, BarrierProxy) SyncManager.register('Pool', Pool, PoolProxy) SyncManager.register('list', list, ListProxy) SyncManager.register('dict', dict, DictProxy) diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py --- a/Lib/multiprocessing/synchronize.py +++ b/Lib/multiprocessing/synchronize.py @@ -333,3 +333,43 @@ return False finally: self._cond.release() + +# +# Barrier +# + +class Barrier(threading.Barrier): + + def __init__(self, parties, action=None, timeout=None): + import struct + from multiprocessing.heap import BufferWrapper + wrapper = BufferWrapper(struct.calcsize('i') * 2) + cond = Condition() + self.__setstate__((parties, action, timeout, cond, wrapper)) + self._state = 0 + self._count = 0 + + def __setstate__(self, state): + (self._parties, self._action, self._timeout, + self._cond, self._wrapper) = state + self._array = self._wrapper.create_memoryview().cast('i') + + def __getstate__(self): + return (self._parties, self._action, self._timeout, + self._cond, self._wrapper) + + @property + def _state(self): + return self._array[0] + + @_state.setter + def _state(self, value): + self._array[0] = value + + @property + def _count(self): + return self._array[1] + + @_count.setter + def _count(self, value): + self._array[1] = value diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -18,6 +18,7 @@ import socket import random import logging +import struct import test.support @@ -1057,6 +1058,336 @@ self.assertEqual(wait(), True) # +# Tests for Barrier - adapted from tests in test/lock_tests.py +# + +# Many of the tests for threading.Barrier use a list as an atomic +# counter: a value is appended to increment the counter, and the +# length of the list gives the value. We use the class DummyList +# for the same purpose. + +class _DummyList(object): + + def __init__(self): + wrapper = multiprocessing.heap.BufferWrapper(struct.calcsize('i')) + lock = multiprocessing.Lock() + self.__setstate__((wrapper, lock)) + self._lengthbuf[0] = 0 + + def __setstate__(self, state): + (self._wrapper, self._lock) = state + self._lengthbuf = self._wrapper.create_memoryview().cast('i') + + def __getstate__(self): + return (self._wrapper, self._lock) + + def append(self, _): + with self._lock: + self._lengthbuf[0] += 1 + + def __len__(self): + with self._lock: + return self._lengthbuf[0] + +def _wait(): + # A crude wait/yield function not relying on synchronization primitives. + time.sleep(0.01) + + +class Bunch(object): + """ + A bunch of threads. + """ + def __init__(self, namespace, f, args, n, wait_before_exit=False): + """ + Construct a bunch of `n` threads running the same function `f`. + If `wait_before_exit` is True, the threads won't terminate until + do_finish() is called. + """ + self.f = f + self.args = args + self.n = n + self.started = namespace.DummyList() + self.finished = namespace.DummyList() + self._can_exit = namespace.Value('i', not wait_before_exit) + for i in range(n): + namespace.Process(target=self.task).start() + + def task(self): + pid = os.getpid() + self.started.append(pid) + try: + self.f(*self.args) + finally: + self.finished.append(pid) + while not self._can_exit.value: + _wait() + + def wait_for_started(self): + while len(self.started) < self.n: + _wait() + + def wait_for_finished(self): + while len(self.finished) < self.n: + _wait() + + def do_finish(self): + self._can_exit.value = True + + +class AppendTrue(object): + def __init__(self, obj): + self.obj = obj + def __call__(self): + self.obj.append(True) + + +class _TestBarrier(BaseTestCase): + """ + Tests for Barrier objects. + """ + N = 5 + defaultTimeout = 10.0 # XXX Slow Windows buildbots need generous timeout + + def setUp(self): + self.barrier = self.Barrier(self.N, timeout=self.defaultTimeout) + + def tearDown(self): + self.barrier.abort() + self.barrier = None + + def DummyList(self): + if self.TYPE == 'threads': + return [] + elif self.TYPE == 'manager': + return self.manager.list() + else: + return _DummyList() + + def run_threads(self, f, args): + b = Bunch(self, f, args, self.N-1) + f(*args) + b.wait_for_finished() + + @classmethod + def multipass(cls, barrier, results, n): + m = barrier.parties + assert m == cls.N + for i in range(n): + results[0].append(True) + assert len(results[1]) == i * m + barrier.wait() + results[1].append(True) + assert len(results[0]) == (i + 1) * m + barrier.wait() + try: + assert barrier.n_waiting == 0 + except NotImplementedError: + pass + assert not barrier.broken + + def test_barrier(self, passes=1): + """ + Test that a barrier is passed in lockstep + """ + results = [self.DummyList(), self.DummyList()] + self.run_threads(self.multipass, (self.barrier, results, passes)) + + def test_barrier_10(self): + """ + Test that a barrier works for 10 consecutive runs + """ + return self.test_barrier(10) + + @classmethod + def _test_wait_return_f(cls, barrier, queue): + res = barrier.wait() + queue.put(res) + + def test_wait_return(self): + """ + test the return value from barrier.wait + """ + queue = self.Queue() + self.run_threads(self._test_wait_return_f, (self.barrier, queue)) + results = [queue.get() for i in range(self.N)] + self.assertEqual(results.count(0), 1) + + @classmethod + def _test_action_f(cls, barrier, results): + barrier.wait() + if len(results) != 1: + raise RuntimeError + + def test_action(self): + """ + Test the 'action' callback + """ + results = self.DummyList() + barrier = self.Barrier(self.N, action=AppendTrue(results)) + self.run_threads(self._test_action_f, (barrier, results)) + self.assertEqual(len(results), 1) + + @classmethod + def _test_abort_f(cls, barrier, results1, results2): + try: + i = barrier.wait() + if i == cls.N//2: + raise RuntimeError + barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + except RuntimeError: + barrier.abort() + + def test_abort(self): + """ + Test that an abort will put the barrier in a broken state + """ + results1 = self.DummyList() + results2 = self.DummyList() + self.run_threads(self._test_abort_f, + (self.barrier, results1, results2)) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertTrue(self.barrier.broken) + + @classmethod + def _test_reset_f(cls, barrier, results1, results2, results3): + i = barrier.wait() + if i == cls.N//2: + # Wait until the other threads are all in the barrier. + while barrier.n_waiting < cls.N-1: + time.sleep(0.001) + barrier.reset() + else: + try: + barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + # Now, pass the barrier again + barrier.wait() + results3.append(True) + + def test_reset(self): + """ + Test that a 'reset' on a barrier frees the waiting threads + """ + results1 = self.DummyList() + results2 = self.DummyList() + results3 = self.DummyList() + self.run_threads(self._test_reset_f, + (self.barrier, results1, results2, results3)) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertEqual(len(results3), self.N) + + @classmethod + def _test_abort_and_reset_f(cls, barrier, barrier2, + results1, results2, results3): + try: + i = barrier.wait() + if i == cls.N//2: + raise RuntimeError + barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + except RuntimeError: + barrier.abort() + # Synchronize and reset the barrier. Must synchronize first so + # that everyone has left it when we reset, and after so that no + # one enters it before the reset. + if barrier2.wait() == cls.N//2: + barrier.reset() + barrier2.wait() + barrier.wait() + results3.append(True) + + def test_abort_and_reset(self): + """ + Test that a barrier can be reset after being broken. + """ + results1 = self.DummyList() + results2 = self.DummyList() + results3 = self.DummyList() + barrier2 = self.Barrier(self.N) + + self.run_threads(self._test_abort_and_reset_f, + (self.barrier, barrier2, results1, results2, results3)) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertEqual(len(results3), self.N) + + @classmethod + def _test_timeout_f(cls, barrier, results): + i = barrier.wait(20) + if i == cls.N//2: + # One thread is late! + time.sleep(4.0) + try: + barrier.wait(0.5) + except threading.BrokenBarrierError: + results.append(True) + + def test_timeout(self): + """ + Test wait(timeout) + """ + results = self.DummyList() + self.run_threads(self._test_timeout_f, (self.barrier, results)) + self.assertEqual(len(results), self.barrier.parties) + + @classmethod + def _test_default_timeout_f(cls, barrier, results): + i = barrier.wait(20) + if i == cls.N//2: + # One thread is later than the default timeout + time.sleep(4.0) + try: + barrier.wait() + except threading.BrokenBarrierError: + results.append(True) + + def test_default_timeout(self): + """ + Test the barrier's default timeout + """ + barrier = self.Barrier(self.N, timeout=1.0) + results = self.DummyList() + self.run_threads(self._test_default_timeout_f, (barrier, results)) + self.assertEqual(len(results), barrier.parties) + + def test_single_thread(self): + b = self.Barrier(1) + b.wait() + b.wait() + + @classmethod + def _test_thousand_f(cls, barrier, passes, conn, lock): + for i in range(passes): + barrier.wait() + with lock: + conn.send(i) + + def test_thousand(self): + if self.TYPE == 'manager': + return + passes = 1000 + lock = self.Lock() + conn, child_conn = self.Pipe(False) + for j in range(self.N): + p = self.Process(target=self._test_thousand_f, + args=(self.barrier, passes, child_conn, lock)) + p.start() + + for i in range(passes): + for j in range(self.N): + self.assertEqual(conn.recv(), i) + +# # # @@ -2532,7 +2863,7 @@ Process = multiprocessing.Process locals().update(get_attributes(multiprocessing, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Value', 'Array', 'RawValue', + 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'RawValue', 'RawArray', 'current_process', 'active_children', 'Pipe', 'connection', 'JoinableQueue', 'Pool' ))) @@ -2547,7 +2878,7 @@ manager = object.__new__(multiprocessing.managers.SyncManager) locals().update(get_attributes(manager, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Value', 'Array', 'list', 'dict', + 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'list', 'dict', 'Namespace', 'JoinableQueue', 'Pool' ))) @@ -2560,7 +2891,7 @@ Process = multiprocessing.dummy.Process locals().update(get_attributes(multiprocessing.dummy, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Value', 'Array', 'current_process', + 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'current_process', 'active_children', 'Pipe', 'connection', 'dict', 'list', 'Namespace', 'JoinableQueue', 'Pool' ))) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,8 @@ Library ------- +- Issue #14059: Implement multiprocessing.Barrier. + - Issue #15061: The inappropriately named hmac.secure_compare has been renamed to hmac.compare_digest, restricted to operating on bytes inputs only and had its documentation updated to more accurately reflect both its -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 19:56:27 2012 From: python-checkins at python.org (brett.cannon) Date: Fri, 15 Jun 2012 19:56:27 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Update_from_Yury=2E?= Message-ID: http://hg.python.org/peps/rev/c1f693b39292 changeset: 4461:c1f693b39292 user: Brett Cannon date: Fri Jun 15 13:56:20 2012 -0400 summary: Update from Yury. files: pep-0362.txt | 164 +++++++++++++++++++++++++++++--------- 1 files changed, 126 insertions(+), 38 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -51,12 +51,13 @@ as listed in ``code.co_varnames``). * bind(\*args, \*\*kwargs) -> BoundArguments Creates a mapping from positional and keyword arguments to - parameters. Raises a ``BindError`` (subclass of ``TypeError``) - if the passed arguments do not match the signature. + parameters. Raises a ``TypeError`` if the passed arguments do + not match the signature. * bind_partial(\*args, \*\*kwargs) -> BoundArguments Works the same way as ``bind()``, but allows the omission of some required arguments (mimics ``functools.partial`` - behavior.) + behavior.) Raises a ``TypeError`` if the passed arguments do + not match the signature. * format(...) -> str Formats the Signature object to a string. Optional arguments allow for custom render functions for parameter names, @@ -84,27 +85,53 @@ * name : str The name of the parameter as a string. + * default : object The default value for the parameter, if specified. If the parameter has no default value, this attribute is not set. + * annotation : object The annotation for the parameter if specified. If the parameter has no annotation, this attribute is not set. -* is_keyword_only : bool - True if the parameter is keyword-only, else False. -* is_args : bool - True if the parameter accepts variable number of arguments - (``*args``-like), else False. -* is_kwargs : bool - True if the parameter accepts variable number of keyword - arguments (``**kwargs``-like), else False. -* is_implemented : bool + +* kind : str + Describes how argument values are bound to the parameter. + Possible values: + + * ``Parameter.POSITIONAL_ONLY`` - value must be supplied + as a positional argument. + + Python has no explicit syntax for defining positional-only + parameters, but many builtin and extension module functions + (especially those that accept only one or two parameters) + accept them. + + * ``Parameter.POSITIONAL_OR_KEYWORD`` - value may be + supplied as either a keyword or positional argument + (this is the standard binding behaviour for functions + implemented in Python.) + + * ``Parameter.KEYWORD_ONLY`` - value must be supplied + as a keyword argument. Keyword only parameters are those + which appear after a "*" or "\*args" entry in a Python + function definition. + + * ``Parameter.VAR_POSITIONAL`` - a tuple of positional + arguments that aren't bound to any other parameter. + This corresponds to a "\*args" parameter in a Python + function definition. + + * ``Parameter.VAR_KEYWORD`` - a dict of keyword arguments + that aren't bound to any other parameter. This corresponds + to a "\*\*kwds" parameter in a Python function definition. + +* implemented : bool True if the parameter is implemented for use. Some platforms implement functions but can't support specific parameters (e.g. "mode" for ``os.mkdir``). Passing in an unimplemented parameter may result in the parameter being ignored, or in NotImplementedError being raised. It is intended that - all conditions where ``is_implemented`` may be False be + all conditions where ``implemented`` may be False be thoroughly documented. Two parameters are equal when all their attributes are equal. @@ -159,12 +186,11 @@ - If it is ``None`` and the object is an instance of ``BuiltinFunction``, raise a ``ValueError`` - - If the object is a an instance of ``FunctionType``: + - If it has a ``__wrapped__`` attribute, return + ``signature(object.__wrapped__)`` - - If it has a ``__wrapped__`` attribute, return - ``signature(object.__wrapped__)`` - - - Or else construct a new ``Signature`` object and return it + - If the object is a an instance of ``FunctionType`` construct + and return a new ``Signature`` for it - If the object is a method or a classmethod, construct and return a new ``Signature`` object, with its first parameter (usually @@ -223,6 +249,9 @@ Visualizing Callable Objects' Signature --------------------------------------- + +Let's define some classes and functions: + :: from inspect import signature @@ -245,25 +274,62 @@ return a, b, c - print('FooMeta >', str(signature(FooMeta))) - print('Foo >', str(signature(Foo))) - print('Foo.__call__ >', str(signature(Foo.__call__))) - print('Foo().__call__ >', str(signature(Foo().__call__))) - print('partial(Foo().__call__, 1, c=3) >', - str(signature(partial(Foo().__call__, 1, c=3)))) - print('partial(partial(Foo().__call__, 1, c=3), 2, c=20) >', - str(signature(partial(partial(Foo().__call__, 1, c=3), 2, c=20)))) + def shared_vars(*shared_args): + """Decorator factory that defines shared variables that are + passed to every invocation of the function""" + def decorator(f): + @wraps(f) + def wrapper(*args, **kwds): + full_args = shared_args + args + return f(*full_args, **kwds) + # Override signature + sig = wrapper.__signature__ = signature(f) + for __ in shared_args: + sig.parameters.popitem(last=False) + return wrapper + return decorator -The script will output: + + @shared_vars({}) + def example(_state, a, b, c): + return _state, a, b, c + + + def format_signature(obj): + return str(signature(obj)) + + +Now, in the python REPL: + :: - FooMeta > (name, bases, dct, *, bar:bool=False) - Foo > (spam:int=42) - Foo.__call__ > (self, a, b, *, c) -> tuple - Foo().__call__ > (a, b, *, c) -> tuple - partial(Foo().__call__, 1, c=3) > (b, *, c=3) -> tuple - partial(partial(Foo().__call__, 1, c=3), 2, c=20) > (*, c=20) -> tuple + >>> format_signature(FooMeta) + '(name, bases, dct, *, bar:bool=False)' + + >>> format_signature(Foo) + '(spam:int=42)' + + >>> format_signature(Foo.__call__) + '(self, a, b, *, c) -> tuple' + + >>> format_signature(Foo().__call__) + '(a, b, *, c) -> tuple' + + >>> format_signature(partial(Foo().__call__, 1, c=3)) + '(b, *, c=3) -> tuple' + + >>> format_signature(partial(partial(Foo().__call__, 1, c=3), 2, c=20)) + '(*, c=20) -> tuple' + + >>> format_signature(example) + '(a, b, c)' + + >>> format_signature(partial(example, 1, 2)) + '(c)' + + >>> format_signature(partial(partial(example, 1, b=2), c=3)) + '(b=2, c=3)' Annotation Checker @@ -317,14 +383,14 @@ else: if not isinstance(default, type_): raise ValueError("{func}: wrong type of a default value for {arg!r}". \ - format(func=sig.qualname, arg=param.name)) + format(func=func.__qualname__, arg=param.name)) def check_type(sig, arg_name, arg_type, arg_value): # Internal function that encapsulates arguments type checking if not isinstance(arg_value, arg_type): raise ValueError("{func}: wrong type of {arg!r} argument, " \ "{exp!r} expected, got {got!r}". \ - format(func=sig.qualname, arg=arg_name, + format(func=func.__qualname__, arg=arg_name, exp=arg_type.__name__, got=type(arg_value).__name__)) @functools.wraps(func) @@ -341,12 +407,12 @@ # OK, we have a type for the argument, lets get the corresponding # parameter description from the signature object param = sig.parameters[arg_name] - if param.is_args: + if param.kind == param.VAR_POSITIONAL: # If this parameter is a variable-argument parameter, # then we need to check each of its values for value in arg: check_type(sig, arg_name, type_, value) - elif param.is_kwargs: + elif param.kind == param.VAR_KEYWORD: # If this parameter is a variable-keyword-argument parameter: for subname, value in arg.items(): check_type(sig, arg_name + ':' + subname, type_, value) @@ -364,13 +430,35 @@ else: if isinstance(return_type, type) and not isinstance(result, return_type): raise ValueError('{func}: wrong return type, {exp} expected, got {got}'. \ - format(func=sig.qualname, exp=return_type.__name__, + format(func=func.__qualname__, exp=return_type.__name__, got=type(result).__name__)) return result return wrapper +Render Function Signature to HTML +--------------------------------- + +:: + + import inspect + + def format_to_html(func): + sig = inspect.signature(func) + + html = sig.format(token_params_separator=',', + token_colon=':', + token_eq='=', + token_return_annotation='->', + token_left_paren='(', + token_right_paren=')', + token_kwonly_separator='*', + format_name=lambda name: ''+name+'') + + return '{}'.format(html) + + References ========== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Jun 15 20:04:00 2012 From: python-checkins at python.org (petri.lehtinen) Date: Fri, 15 Jun 2012 20:04:00 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzE1MDM2OiBNYWtl?= =?utf8?q?_a_repeated_changes_and_flushes_work_with_single-file_mailboxes?= Message-ID: http://hg.python.org/cpython/rev/0add70dd3c43 changeset: 77443:0add70dd3c43 branch: 2.7 parent: 77441:0ac1f90954dc user: Petri Lehtinen date: Fri Jun 15 20:50:51 2012 +0300 summary: #15036: Make a repeated changes and flushes work with single-file mailboxes files: Lib/mailbox.py | 1 + Lib/test/test_mailbox.py | 11 +++++++++++ Misc/NEWS | 4 ++++ 3 files changed, 16 insertions(+), 0 deletions(-) diff --git a/Lib/mailbox.py b/Lib/mailbox.py --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -649,6 +649,7 @@ new_file.write(buffer) new_toc[key] = (new_start, new_file.tell()) self._post_message_hook(new_file) + self._file_length = new_file.tell() except: new_file.close() os.remove(new_file.name) diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -386,6 +386,17 @@ # Write changes to disk self._test_flush_or_close(self._box.flush, True) + def test_popitem_and_flush_twice(self): + # See #15036. + self._box.add(self._template % 0) + self._box.add(self._template % 1) + self._box.flush() + + self._box.popitem() + self._box.flush() + self._box.popitem() + self._box.flush() + def test_lock_unlock(self): # Lock and unlock the mailbox self.assertFalse(os.path.exists(self._get_lock_path())) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -67,6 +67,10 @@ Library ------- +- Issue #15036: Allow removing or changing multiple items in + single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox + between the changes. + - Issue #10133: Make multiprocessing deallocate buffer if socket read fails. Patch by Hallvard B Furuseth. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 20:04:01 2012 From: python-checkins at python.org (petri.lehtinen) Date: Fri, 15 Jun 2012 20:04:01 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzE1MDM2OiBNYWtl?= =?utf8?q?_a_repeated_changes_and_flushes_work_with_single-file_mailboxes?= Message-ID: http://hg.python.org/cpython/rev/714b8f91f3d4 changeset: 77444:714b8f91f3d4 branch: 3.2 parent: 77439:69177ff1a643 user: Petri Lehtinen date: Fri Jun 15 20:50:51 2012 +0300 summary: #15036: Make a repeated changes and flushes work with single-file mailboxes files: Lib/mailbox.py | 1 + Lib/test/test_mailbox.py | 11 +++++++++++ Misc/NEWS | 4 ++++ 3 files changed, 16 insertions(+), 0 deletions(-) diff --git a/Lib/mailbox.py b/Lib/mailbox.py --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -675,6 +675,7 @@ new_file.write(buffer) new_toc[key] = (new_start, new_file.tell()) self._post_message_hook(new_file) + self._file_length = new_file.tell() except: new_file.close() os.remove(new_file.name) diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -500,6 +500,17 @@ # Write changes to disk self._test_flush_or_close(self._box.flush, True) + def test_popitem_and_flush_twice(self): + # See #15036. + self._box.add(self._template % 0) + self._box.add(self._template % 1) + self._box.flush() + + self._box.popitem() + self._box.flush() + self._box.popitem() + self._box.flush() + def test_lock_unlock(self): # Lock and unlock the mailbox self.assertFalse(os.path.exists(self._get_lock_path())) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,6 +70,10 @@ Library ------- +- Issue #15036: Allow removing or changing multiple items in + single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox + between the changes. + - Issue #10133: Make multiprocessing deallocate buffer if socket read fails. Patch by Hallvard B Furuseth. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 20:04:02 2012 From: python-checkins at python.org (petri.lehtinen) Date: Fri, 15 Jun 2012 20:04:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2315036=3A_Make_a_repeated_changes_and_flushes_work_with_si?= =?utf8?q?ngle-file_mailboxes?= Message-ID: http://hg.python.org/cpython/rev/87d119117560 changeset: 77445:87d119117560 parent: 77442:2d2f206d040e parent: 77444:714b8f91f3d4 user: Petri Lehtinen date: Fri Jun 15 21:01:39 2012 +0300 summary: #15036: Make a repeated changes and flushes work with single-file mailboxes files: Lib/mailbox.py | 1 + Lib/test/test_mailbox.py | 11 +++++++++++ Misc/NEWS | 4 ++++ 3 files changed, 16 insertions(+), 0 deletions(-) diff --git a/Lib/mailbox.py b/Lib/mailbox.py --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -675,6 +675,7 @@ new_file.write(buffer) new_toc[key] = (new_start, new_file.tell()) self._post_message_hook(new_file) + self._file_length = new_file.tell() except: new_file.close() os.remove(new_file.name) diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -504,6 +504,17 @@ # Write changes to disk self._test_flush_or_close(self._box.flush, True) + def test_popitem_and_flush_twice(self): + # See #15036. + self._box.add(self._template % 0) + self._box.add(self._template % 1) + self._box.flush() + + self._box.popitem() + self._box.flush() + self._box.popitem() + self._box.flush() + def test_lock_unlock(self): # Lock and unlock the mailbox self.assertFalse(os.path.exists(self._get_lock_path())) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -21,6 +21,10 @@ Library ------- +- Issue #15036: Allow removing or changing multiple items in + single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox + between the changes. + - Issue #14059: Implement multiprocessing.Barrier. - Issue #15061: The inappropriately named hmac.secure_compare has been -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 20:24:14 2012 From: python-checkins at python.org (richard.oudkerk) Date: Fri, 15 Jun 2012 20:24:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_for_2d2f206d040e_so_tha?= =?utf8?q?t_test=5Fmultiprocessing_does_not_depend_on_ctypes?= Message-ID: http://hg.python.org/cpython/rev/31b17246e959 changeset: 77446:31b17246e959 user: Richard Oudkerk date: Fri Jun 15 19:18:30 2012 +0100 summary: Fix for 2d2f206d040e so that test_multiprocessing does not depend on ctypes files: Lib/test/test_multiprocessing.py | 14 +++++++++----- 1 files changed, 9 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1109,9 +1109,13 @@ self.n = n self.started = namespace.DummyList() self.finished = namespace.DummyList() - self._can_exit = namespace.Value('i', not wait_before_exit) + self._can_exit = namespace.Event() + if not wait_before_exit: + self._can_exit.set() for i in range(n): - namespace.Process(target=self.task).start() + p = namespace.Process(target=self.task) + p.daemon = True + p.start() def task(self): pid = os.getpid() @@ -1120,8 +1124,8 @@ self.f(*self.args) finally: self.finished.append(pid) - while not self._can_exit.value: - _wait() + self._can_exit.wait(30) + assert self._can_exit.is_set() def wait_for_started(self): while len(self.started) < self.n: @@ -1132,7 +1136,7 @@ _wait() def do_finish(self): - self._can_exit.value = True + self._can_exit.set() class AppendTrue(object): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 21:13:52 2012 From: python-checkins at python.org (richard.oudkerk) Date: Fri, 15 Jun 2012 21:13:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Increase_timeout_used_when_?= =?utf8?q?waiting_for_manager_to_shutdown_cleanly?= Message-ID: http://hg.python.org/cpython/rev/831ae71d0bdc changeset: 77447:831ae71d0bdc user: Richard Oudkerk date: Fri Jun 15 20:08:29 2012 +0100 summary: Increase timeout used when waiting for manager to shutdown cleanly before resorting to terminate() files: Lib/multiprocessing/managers.py | 2 +- Lib/test/test_multiprocessing.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -582,7 +582,7 @@ except Exception: pass - process.join(timeout=0.2) + process.join(timeout=1.0) if process.is_alive(): util.info('manager still alive') if hasattr(process, 'terminate'): diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1820,6 +1820,11 @@ # run after all the other tests for the manager. It tests that # there have been no "reference leaks" for the manager's shared # objects. Note the comment in _TestPool.test_terminate(). + + # If some other test using ManagerMixin.manager fails, then the + # raised exception may keep alive a frame which holds a reference + # to a managed object. This will cause test_number_of_objects to + # also fail. ALLOWED_TYPES = ('manager',) def test_number_of_objects(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 22:18:31 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 15 Jun 2012 22:18:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315026=3A_utf-16_en?= =?utf8?q?coding_is_now_significantly_faster_=28up_to_10x=29=2E?= Message-ID: http://hg.python.org/cpython/rev/acca141fda80 changeset: 77448:acca141fda80 user: Antoine Pitrou date: Fri Jun 15 22:15:23 2012 +0200 summary: Issue #15026: utf-16 encoding is now significantly faster (up to 10x). Patch by Serhiy Storchaka. files: Include/unicodeobject.h | 4 +- Misc/NEWS | 3 + Objects/stringlib/codecs.h | 64 +++++++++++++++++++ Objects/unicodeobject.c | 86 ++++++++++--------------- 4 files changed, 105 insertions(+), 52 deletions(-) diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -188,9 +188,9 @@ (((((Py_UCS4)(high) & 0x03FF) << 10) | \ ((Py_UCS4)(low) & 0x03FF)) + 0x10000) /* high surrogate = top 10 bits added to D800 */ -#define Py_UNICODE_HIGH_SURROGATE(ch) (0xD800 | (((ch) - 0x10000) >> 10)) +#define Py_UNICODE_HIGH_SURROGATE(ch) (0xD800 - (0x10000 >> 10) + ((ch) >> 10)) /* low surrogate = bottom 10 bits added to DC00 */ -#define Py_UNICODE_LOW_SURROGATE(ch) (0xDC00 | (((ch) - 0x10000) & 0x3FF)) +#define Py_UNICODE_LOW_SURROGATE(ch) (0xDC00 + ((ch) & 0x3FF)) /* Check if substring matches at given offset. The offset must be valid, and the substring must not be empty. */ diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #15026: utf-16 encoding is now significantly faster (up to 10x). + Patch by Serhiy Storchaka. + - Issue #11022: open() and io.TextIOWrapper are now calling locale.getpreferredencoding(False) instead of locale.getpreferredencoding() in text mode if the encoding is not specified. Don't change temporary the diff --git a/Objects/stringlib/codecs.h b/Objects/stringlib/codecs.h --- a/Objects/stringlib/codecs.h +++ b/Objects/stringlib/codecs.h @@ -562,4 +562,68 @@ #undef STRIPPED_MASK #undef SWAB #undef LONG_PTR_MASK + + +Py_LOCAL_INLINE(void) +STRINGLIB(utf16_encode)(unsigned short *out, + const STRINGLIB_CHAR *in, + Py_ssize_t len, + int native_ordering) +{ + const STRINGLIB_CHAR *end = in + len; +#if STRINGLIB_SIZEOF_CHAR == 1 +# define SWAB2(CH) ((CH) << 8) +#else +# define SWAB2(CH) (((CH) << 8) | ((CH) >> 8)) +#endif +#if STRINGLIB_MAX_CHAR < 0x10000 + if (native_ordering) { +# if STRINGLIB_SIZEOF_CHAR == 2 + Py_MEMCPY(out, in, 2 * len); +# else + _PyUnicode_CONVERT_BYTES(STRINGLIB_CHAR, unsigned short, in, end, out); +# endif + } else { + const STRINGLIB_CHAR *unrolled_end = in + (len & ~ (Py_ssize_t) 3); + while (in < unrolled_end) { + out[0] = SWAB2(in[0]); + out[1] = SWAB2(in[1]); + out[2] = SWAB2(in[2]); + out[3] = SWAB2(in[3]); + in += 4; out += 4; + } + while (in < end) { + *out++ = SWAB2(*in); + ++in; + } + } +#else + if (native_ordering) { + while (in < end) { + Py_UCS4 ch = *in++; + if (ch < 0x10000) + *out++ = ch; + else { + out[0] = Py_UNICODE_HIGH_SURROGATE(ch); + out[1] = Py_UNICODE_LOW_SURROGATE(ch); + out += 2; + } + } + } else { + while (in < end) { + Py_UCS4 ch = *in++; + if (ch < 0x10000) + *out++ = SWAB2((Py_UCS2)ch); + else { + Py_UCS2 ch1 = Py_UNICODE_HIGH_SURROGATE(ch); + Py_UCS2 ch2 = Py_UNICODE_LOW_SURROGATE(ch); + out[0] = SWAB2(ch1); + out[1] = SWAB2(ch2); + out += 2; + } + } + } +#endif +#undef SWAB2 +} #endif /* STRINGLIB_IS_UNICODE */ diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -5359,26 +5359,18 @@ const char *errors, int byteorder) { - int kind; - void *data; + enum PyUnicode_Kind kind; + const void *data; Py_ssize_t len; PyObject *v; - unsigned char *p; - Py_ssize_t nsize, bytesize; - Py_ssize_t i, pairs; - /* Offsets from p for storing byte pairs in the right order. */ -#ifdef BYTEORDER_IS_LITTLE_ENDIAN - int ihi = 1, ilo = 0; + unsigned short *out; + Py_ssize_t bytesize; + Py_ssize_t pairs; +#ifdef WORDS_BIGENDIAN + int native_ordering = byteorder >= 0; #else - int ihi = 0, ilo = 1; -#endif - -#define STORECHAR(CH) \ - do { \ - p[ihi] = ((CH) >> 8) & 0xff; \ - p[ilo] = (CH) & 0xff; \ - p += 2; \ - } while(0) + int native_ordering = byteorder <= 0; +#endif if (!PyUnicode_Check(str)) { PyErr_BadArgument(); @@ -5391,53 +5383,47 @@ len = PyUnicode_GET_LENGTH(str); pairs = 0; - if (kind == PyUnicode_4BYTE_KIND) - for (i = 0; i < len; i++) - if (PyUnicode_READ(kind, data, i) >= 0x10000) + if (kind == PyUnicode_4BYTE_KIND) { + const Py_UCS4 *in = (const Py_UCS4 *)data; + const Py_UCS4 *end = in + len; + while (in < end) + if (*in++ >= 0x10000) pairs++; - /* 2 * (len + pairs + (byteorder == 0)) */ - if (len > PY_SSIZE_T_MAX - pairs - (byteorder == 0)) + } + if (len > PY_SSIZE_T_MAX / 2 - pairs - (byteorder == 0)) return PyErr_NoMemory(); - nsize = len + pairs + (byteorder == 0); - bytesize = nsize * 2; - if (bytesize / 2 != nsize) - return PyErr_NoMemory(); + bytesize = (len + pairs + (byteorder == 0)) * 2; v = PyBytes_FromStringAndSize(NULL, bytesize); if (v == NULL) return NULL; - p = (unsigned char *)PyBytes_AS_STRING(v); + /* output buffer is 2-bytes aligned */ + assert(((Py_uintptr_t)PyBytes_AS_STRING(v) & 1) == 0); + out = (unsigned short *)PyBytes_AS_STRING(v); if (byteorder == 0) - STORECHAR(0xFEFF); + *out++ = 0xFEFF; if (len == 0) goto done; - if (byteorder == -1) { - /* force LE */ - ihi = 1; - ilo = 0; - } - else if (byteorder == 1) { - /* force BE */ - ihi = 0; - ilo = 1; - } - - for (i = 0; i < len; i++) { - Py_UCS4 ch = PyUnicode_READ(kind, data, i); - Py_UCS4 ch2 = 0; - if (ch >= 0x10000) { - ch2 = Py_UNICODE_LOW_SURROGATE(ch); - ch = Py_UNICODE_HIGH_SURROGATE(ch); - } - STORECHAR(ch); - if (ch2) - STORECHAR(ch2); + switch (kind) { + case PyUnicode_1BYTE_KIND: { + ucs1lib_utf16_encode(out, (const Py_UCS1 *)data, len, native_ordering); + break; + } + case PyUnicode_2BYTE_KIND: { + ucs2lib_utf16_encode(out, (const Py_UCS2 *)data, len, native_ordering); + break; + } + case PyUnicode_4BYTE_KIND: { + ucs4lib_utf16_encode(out, (const Py_UCS4 *)data, len, native_ordering); + break; + } + default: + assert(0); } done: return v; -#undef STORECHAR } PyObject * -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 22:25:25 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 15 Jun 2012 22:25:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Mention_the_UTF-16_encoding?= =?utf8?q?_speedup_in_the_whatsnew_=28issue_=2315026=29=2E?= Message-ID: http://hg.python.org/cpython/rev/35667fc5f785 changeset: 77449:35667fc5f785 user: Antoine Pitrou date: Fri Jun 15 22:22:18 2012 +0200 summary: Mention the UTF-16 encoding speedup in the whatsnew (issue #15026). files: Doc/whatsnew/3.3.rst | 6 ++++-- 1 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -1484,9 +1484,11 @@ * repeating a single ASCII letter and getting a substring of a ASCII strings is 4 times faster -* UTF-8 and UTF-16 decoding is now 2x to 4x faster. +* UTF-8 and UTF-16 decoding is now 2x to 4x faster. UTF-16 encoding is now + up to 10x faster. - (contributed by Serhiy Storchaka, :issue:`14624` and :issue:`14738`.) + (contributed by Serhiy Storchaka, :issue:`14624`, :issue:`14738` and + :issue:`15026`.) Build and C API Changes -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 15 23:00:21 2012 From: python-checkins at python.org (richard.oudkerk) Date: Fri, 15 Jun 2012 23:00:21 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_=5FTestListener=2EALLOW?= =?utf8?q?ED=5FTYPES_and_add_sanity_check?= Message-ID: http://hg.python.org/cpython/rev/4c704dc97496 changeset: 77450:4c704dc97496 user: Richard Oudkerk date: Fri Jun 15 21:53:34 2012 +0100 summary: Fix _TestListener.ALLOWED_TYPES and add sanity check files: Lib/test/test_multiprocessing.py | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -2268,7 +2268,7 @@ class _TestListener(BaseTestCase): - ALLOWED_TYPES = ('processes') + ALLOWED_TYPES = ('processes',) def test_multiple_bind(self): for family in self.connection.families: @@ -2850,10 +2850,12 @@ result = {} glob = globals() Type = type.capitalize() + ALL_TYPES = {'processes', 'threads', 'manager'} for name in list(glob.keys()): if name.startswith('_Test'): base = glob[name] + assert set(base.ALLOWED_TYPES) <= ALL_TYPES, set(base.ALLOWED_TYPES) if type in base.ALLOWED_TYPES: newname = 'With' + Type + name[1:] class Temp(base, unittest.TestCase, Mixin): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 00:15:40 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Sat, 16 Jun 2012 00:15:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Removed_redundant_code?= Message-ID: http://hg.python.org/cpython/rev/14028bdd80bc changeset: 77451:14028bdd80bc user: Alexander Belopolsky date: Fri Jun 15 18:15:25 2012 -0400 summary: Removed redundant code files: Lib/datetime.py | 6 ++---- 1 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1670,10 +1670,8 @@ if mytz is ottz: base_compare = True else: - if mytz is not None: - myoff = self.utcoffset() - if ottz is not None: - otoff = other.utcoffset() + myoff = self.utcoffset() + otoff = other.utcoffset() base_compare = myoff == otoff if base_compare: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 01:05:32 2012 From: python-checkins at python.org (brett.cannon) Date: Sat, 16 Jun 2012 01:05:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Make_a_test_easier_to_read?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/b03dcd25ced8 changeset: 77452:b03dcd25ced8 parent: 77410:d9d382b7670a user: Brett Cannon date: Mon Jun 11 11:02:36 2012 -0400 summary: Make a test easier to read. files: Lib/test/test_reprlib.py | 7 +++---- 1 files changed, 3 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py --- a/Lib/test/test_reprlib.py +++ b/Lib/test/test_reprlib.py @@ -251,13 +251,12 @@ def test_module(self): self._check_path_limitations(self.pkgname) - eq = self.assertEqual create_empty_file(os.path.join(self.subpkgname, self.pkgname + '.py')) importlib.invalidate_caches() from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation - eq(repr(areallylongpackageandmodulenametotestreprtruncation), - "" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__)) - eq(repr(sys), "") + module = areallylongpackageandmodulenametotestreprtruncation + self.assertEqual(repr(module), "" % (module.__name__, module.__file__)) + self.assertEqual(repr(sys), "") def test_type(self): self._check_path_limitations('foo') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 01:05:33 2012 From: python-checkins at python.org (brett.cannon) Date: Sat, 16 Jun 2012 01:05:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Update_importlib=2Eh_by_tou?= =?utf8?q?ching_Lib/importlib/=5Fbootstrap=2Epy=2E?= Message-ID: http://hg.python.org/cpython/rev/0cb3a518116c changeset: 77453:0cb3a518116c user: Brett Cannon date: Mon Jun 11 11:02:53 2012 -0400 summary: Update importlib.h by touching Lib/importlib/_bootstrap.py. files: Python/importlib.h | Bin 1 files changed, 0 insertions(+), 0 deletions(-) diff --git a/Python/importlib.h b/Python/importlib.h index 224e28c029a7dc4b51e87b6f07a9f213f8d2bcf2..15d0ba4bbecf831fd80c35507509ed2721568f19 GIT binary patch [stripped] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 01:05:34 2012 From: python-checkins at python.org (brett.cannon) Date: Sat, 16 Jun 2012 01:05:34 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge?= Message-ID: http://hg.python.org/cpython/rev/7b8d3b7fd606 changeset: 77454:7b8d3b7fd606 parent: 77453:0cb3a518116c parent: 77451:14028bdd80bc user: Brett Cannon date: Fri Jun 15 19:04:29 2012 -0400 summary: Merge files: .hgignore | 2 + Doc/extending/newtypes.rst | 5 +- Doc/library/hmac.rst | 41 +- Doc/library/multiprocessing.rst | 30 +- Doc/library/socket.rst | 2 +- Doc/library/time.rst | 59 +- Doc/whatsnew/3.3.rst | 6 +- Include/pytime.h | 2 +- Include/unicodeobject.h | 4 +- Lib/_strptime.py | 6 +- Lib/datetime.py | 6 +- Lib/hmac.py | 26 +- Lib/idlelib/AutoComplete.py | 2 +- Lib/mailbox.py | 1 + Lib/multiprocessing/__init__.py | 11 +- Lib/multiprocessing/dummy/__init__.py | 4 +- Lib/multiprocessing/forking.py | 6 +- Lib/multiprocessing/managers.py | 94 +- Lib/multiprocessing/synchronize.py | 40 + Lib/multiprocessing/util.py | 27 +- Lib/test/support.py | 2 +- Lib/test/test_hmac.py | 44 +- Lib/test/test_mailbox.py | 11 + Lib/test/test_multiprocessing.py | 355 +++++++++++++- Lib/test/test_structseq.py | 5 +- Lib/test/test_time.py | 65 ++- Lib/test/test_xml_etree.py | 226 ++++---- Lib/test/test_xml_etree_c.py | 28 +- Lib/xml/etree/ElementTree.py | 32 +- Misc/NEWS | 29 + Modules/_curses_panel.c | 124 ++-- Modules/_decimal/libmpdec/mpdecimal.c | 16 +- Modules/_elementtree.c | 364 ++++++++++--- Modules/timemodule.c | 129 ++-- Objects/stringlib/codecs.h | 64 ++ Objects/unicodeobject.c | 86 +- PC/VS9.0/pythoncore.vcproj | 8 + Python/pytime.c | 11 +- 38 files changed, 1361 insertions(+), 612 deletions(-) diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -55,6 +55,8 @@ PC/pythonnt_rc*.h PC/*.obj PC/*.exe +PC/*/*.exe +PC/*/*.pdb PC/*/*.user PC/*/*.ncb PC/*/*.suo diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -1437,9 +1437,8 @@ } The only further addition is that the destructor needs to call the weak -reference manager to clear any weak references. This should be done before any -other parts of the destruction have occurred, but is only required if the weak -reference list is non-*NULL*:: +reference manager to clear any weak references. This is only required if the +weak reference list is non-*NULL*:: static void instance_dealloc(PyInstanceObject *inst) diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst --- a/Doc/library/hmac.rst +++ b/Doc/library/hmac.rst @@ -42,8 +42,8 @@ When comparing the output of :meth:`digest` to an externally-supplied digest during a verification routine, it is recommended to use the - :func:`hmac.secure_compare` function instead of the ``==`` operator - to avoid potential timing attacks. + :func:`compare_digest` function instead of the ``==`` operator + to reduce the vulnerability to timing attacks. .. method:: HMAC.hexdigest() @@ -54,10 +54,11 @@ .. warning:: - When comparing the output of :meth:`hexdigest` to an externally-supplied - digest during a verification routine, it is recommended to use the - :func:`hmac.secure_compare` function instead of the ``==`` operator - to avoid potential timing attacks. + The output of :meth:`hexdigest` should not be compared directly to an + externally-supplied digest during a verification routine. Instead, the + externally supplied digest should be converted to a :class:`bytes` + value and compared to the output of :meth:`digest` with + :func:`compare_digest`. .. method:: HMAC.copy() @@ -68,20 +69,28 @@ This module also provides the following helper function: -.. function:: secure_compare(a, b) +.. function:: compare_digest(a, b) - Returns the equivalent of ``a == b``, but using a time-independent - comparison method. Comparing the full lengths of the inputs *a* and *b*, - instead of short-circuiting the comparison upon the first unequal byte, - prevents leaking information about the inputs being compared and mitigates - potential timing attacks. The inputs must be either :class:`str` or - :class:`bytes` instances. + Returns the equivalent of ``a == b``, but avoids content based + short circuiting behaviour to reduce the vulnerability to timing + analysis. The inputs must be :class:`bytes` instances. + + Using a short circuiting comparison (that is, one that terminates as soon + as it finds any difference between the values) to check digests for + correctness can be problematic, as it introduces a potential + vulnerability when an attacker can control both the message to be checked + *and* the purported signature value. By keeping the plaintext consistent + and supplying different signature values, an attacker may be able to use + timing variations to search the signature space for the expected value in + O(n) time rather than the desired O(2**n). .. note:: - While the :func:`hmac.secure_compare` function prevents leaking the - contents of the inputs via a timing attack, it does leak the length - of the inputs. However, this generally is not a security risk. + While this function reduces the likelihood of leaking the contents of + the expected digest via a timing attack, it still uses short circuiting + behaviour based on the *length* of the inputs. It is assumed that the + expected length of the digest is not a secret, as it is typically + published as part of a file format, network protocol or API definition. .. versionadded:: 3.3 diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -226,11 +226,11 @@ holds Python objects and allows other processes to manipulate them using proxies. - A manager returned by :func:`Manager` will support types :class:`list`, - :class:`dict`, :class:`Namespace`, :class:`Lock`, :class:`RLock`, - :class:`Semaphore`, :class:`BoundedSemaphore`, :class:`Condition`, - :class:`Event`, :class:`Queue`, :class:`Value` and :class:`Array`. For - example, :: + A manager returned by :func:`Manager` will support types + :class:`list`, :class:`dict`, :class:`Namespace`, :class:`Lock`, + :class:`RLock`, :class:`Semaphore`, :class:`BoundedSemaphore`, + :class:`Condition`, :class:`Event`, :class:`Barrier`, + :class:`Queue`, :class:`Value` and :class:`Array`. For example, :: from multiprocessing import Process, Manager @@ -885,6 +885,12 @@ Note that one can also create synchronization primitives by using a manager object -- see :ref:`multiprocessing-managers`. +.. class:: Barrier(parties[, action[, timeout]]) + + A barrier object: a clone of :class:`threading.Barrier`. + + .. versionadded:: 3.3 + .. class:: BoundedSemaphore([value]) A bounded semaphore object: a clone of :class:`threading.BoundedSemaphore`. @@ -1236,9 +1242,10 @@ type of shared object. This must be a string. *callable* is a callable used for creating objects for this type - identifier. If a manager instance will be created using the - :meth:`from_address` classmethod or if the *create_method* argument is - ``False`` then this can be left as ``None``. + identifier. If a manager instance will be connected to the + server using the :meth:`connect` method, or if the + *create_method* argument is ``False`` then this can be left as + ``None``. *proxytype* is a subclass of :class:`BaseProxy` which is used to create proxies for shared objects with this *typeid*. If ``None`` then a proxy @@ -1279,6 +1286,13 @@ It also supports creation of shared lists and dictionaries. + .. method:: Barrier(parties[, action[, timeout]]) + + Create a shared :class:`threading.Barrier` object and return a + proxy for it. + + .. versionadded:: 3.3 + .. method:: BoundedSemaphore([value]) Create a shared :class:`threading.BoundedSemaphore` object and return a diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -61,7 +61,7 @@ - A pair ``(host, port)`` is used for the :const:`AF_INET` address family, where *host* is a string representing either a hostname in Internet domain notation like ``'daring.cwi.nl'`` or an IPv4 address like ``'100.50.200.5'``, - and *port* is an integral port number. + and *port* is an integer. - For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo, scopeid)`` is used, where *flowinfo* and *scopeid* represent the ``sin6_flowinfo`` diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -77,6 +77,12 @@ See :class:`struct_time` for a description of these objects. + .. versionchanged:: 3.3 + + The :class:`struct_time` type was extended to provide the + :attr:`tm_gmtoff` and :attr:`tm_zone` attributes when platform + supports corresponding ``struct tm`` members. + * Use the following functions to convert between time representations: +-------------------------+-------------------------+-------------------------+ @@ -160,30 +166,6 @@ .. versionadded:: 3.3 -.. class:: clock_info - - Clock information object returned by :func:`get_clock_info`. - - .. attribute:: implementation - - The name of the underlying C function used to get the clock value. - - .. attribute:: monotonic - - ``True`` if the clock cannot go backward, ``False`` otherwise. - - .. attribute:: adjusted - - ``True`` if the clock can be adjusted (e.g. by a NTP daemon), ``False`` - otherwise. - - .. attribute:: resolution - - The resolution of the clock in seconds (:class:`float`). - - .. versionadded:: 3.3 - - .. function:: clock_settime(clk_id, time) Set the time of the specified clock *clk_id*. @@ -267,7 +249,7 @@ .. function:: get_clock_info(name) - Get information on the specified clock as a :class:`clock_info` object. + Get information on the specified clock as a namespace object. Supported clock names and the corresponding functions to read their value are: @@ -277,6 +259,16 @@ * ``'process_time'``: :func:`time.process_time` * ``'time'``: :func:`time.time` + The result has the following attributes: + + - *adjustable*: ``True`` if the clock can be changed automatically (e.g. by + a NTP daemon) or manually by the system administrator, ``False`` otherwise + - *implementation*: The name of the underlying C function used to get + the clock value + - *monotonic*: ``True`` if the clock cannot go backward, + ``False`` otherwise + - *resolution*: The resolution of the clock in seconds (:class:`float`) + .. versionadded:: 3.3 @@ -350,7 +342,6 @@ .. versionadded:: 3.3 - .. function:: sleep(secs) Suspend execution for the given number of seconds. The argument may be a @@ -447,6 +438,12 @@ | ``%Y`` | Year with century as a decimal number. | | | | | | +-----------+------------------------------------------------+-------+ + | ``%z`` | Time zone offset indicating a positive or | | + | | negative time difference from UTC/GMT of the | | + | | form +HHMM or -HHMM, where H represents decimal| | + | | hour digits and M represents decimal minute | | + | | digits [-23:59, +23:59]. | | + +-----------+------------------------------------------------+-------+ | ``%Z`` | Time zone name (no characters if no time zone | | | | exists). | | +-----------+------------------------------------------------+-------+ @@ -546,6 +543,10 @@ +-------+-------------------+---------------------------------+ | 8 | :attr:`tm_isdst` | 0, 1 or -1; see below | +-------+-------------------+---------------------------------+ + | N/A | :attr:`tm_zone` | abbreviation of timezone name | + +-------+-------------------+---------------------------------+ + | N/A | :attr:`tm_gmtoff` | offset from UTC in seconds | + +-------+-------------------+---------------------------------+ Note that unlike the C structure, the month value is a range of [1, 12], not [0, 11]. A ``-1`` argument as the daylight @@ -556,6 +557,11 @@ :class:`struct_time`, or having elements of the wrong type, a :exc:`TypeError` is raised. + .. versionchanged:: 3.3 + + :attr:`tm_gmtoff` and :attr:`tm_zone` attributes are avaliable on + platforms with C library supporting the corresponding fields in + ``struct tm``. .. function:: time() @@ -566,7 +572,6 @@ lower value than a previous call if the system clock has been set back between the two calls. - .. data:: timezone The offset of the local (non-DST) timezone, in seconds west of UTC (negative in diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -1484,9 +1484,11 @@ * repeating a single ASCII letter and getting a substring of a ASCII strings is 4 times faster -* UTF-8 and UTF-16 decoding is now 2x to 4x faster. +* UTF-8 and UTF-16 decoding is now 2x to 4x faster. UTF-16 encoding is now + up to 10x faster. - (contributed by Serhiy Storchaka, :issue:`14624` and :issue:`14738`.) + (contributed by Serhiy Storchaka, :issue:`14624`, :issue:`14738` and + :issue:`15026`.) Build and C API Changes diff --git a/Include/pytime.h b/Include/pytime.h --- a/Include/pytime.h +++ b/Include/pytime.h @@ -26,7 +26,7 @@ typedef struct { const char *implementation; int monotonic; - int adjusted; + int adjustable; double resolution; } _Py_clock_info_t; diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h --- a/Include/unicodeobject.h +++ b/Include/unicodeobject.h @@ -188,9 +188,9 @@ (((((Py_UCS4)(high) & 0x03FF) << 10) | \ ((Py_UCS4)(low) & 0x03FF)) + 0x10000) /* high surrogate = top 10 bits added to D800 */ -#define Py_UNICODE_HIGH_SURROGATE(ch) (0xD800 | (((ch) - 0x10000) >> 10)) +#define Py_UNICODE_HIGH_SURROGATE(ch) (0xD800 - (0x10000 >> 10) + ((ch) >> 10)) /* low surrogate = bottom 10 bits added to DC00 */ -#define Py_UNICODE_LOW_SURROGATE(ch) (0xDC00 | (((ch) - 0x10000) & 0x3FF)) +#define Py_UNICODE_LOW_SURROGATE(ch) (0xDC00 + ((ch) & 0x3FF)) /* Check if substring matches at given offset. The offset must be valid, and the substring must not be empty. */ diff --git a/Lib/_strptime.py b/Lib/_strptime.py --- a/Lib/_strptime.py +++ b/Lib/_strptime.py @@ -486,19 +486,19 @@ return (year, month, day, hour, minute, second, - weekday, julian, tz, gmtoff, tzname), fraction + weekday, julian, tz, tzname, gmtoff), fraction def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"): """Return a time struct based on the input string and the format string.""" tt = _strptime(data_string, format)[0] - return time.struct_time(tt[:9]) + return time.struct_time(tt[:time._STRUCT_TM_ITEMS]) def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"): """Return a class cls instance based on the input string and the format string.""" tt, fraction = _strptime(data_string, format) - gmtoff, tzname = tt[-2:] + tzname, gmtoff = tt[-2:] args = tt[:6] + (fraction,) if gmtoff is not None: tzdelta = datetime_timedelta(seconds=gmtoff) diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1670,10 +1670,8 @@ if mytz is ottz: base_compare = True else: - if mytz is not None: - myoff = self.utcoffset() - if ottz is not None: - otoff = other.utcoffset() + myoff = self.utcoffset() + otoff = other.utcoffset() base_compare = myoff == otoff if base_compare: diff --git a/Lib/hmac.py b/Lib/hmac.py --- a/Lib/hmac.py +++ b/Lib/hmac.py @@ -13,24 +13,24 @@ digest_size = None -def secure_compare(a, b): - """Returns the equivalent of 'a == b', but using a time-independent - comparison method to prevent timing attacks.""" - if not ((isinstance(a, str) and isinstance(b, str)) or - (isinstance(a, bytes) and isinstance(b, bytes))): - raise TypeError("inputs must be strings or bytes") +def compare_digest(a, b): + """Returns the equivalent of 'a == b', but avoids content based short + circuiting to reduce the vulnerability to timing attacks.""" + # Consistent timing matters more here than data type flexibility + if not (isinstance(a, bytes) and isinstance(b, bytes)): + raise TypeError("inputs must be bytes instances") + # We assume the length of the expected digest is public knowledge, + # thus this early return isn't leaking anything an attacker wouldn't + # already know if len(a) != len(b): return False + # We assume that integers in the bytes range are all cached, + # thus timing shouldn't vary much due to integer object creation result = 0 - if isinstance(a, bytes): - for x, y in zip(a, b): - result |= x ^ y - else: - for x, y in zip(a, b): - result |= ord(x) ^ ord(y) - + for x, y in zip(a, b): + result |= x ^ y return result == 0 diff --git a/Lib/idlelib/AutoComplete.py b/Lib/idlelib/AutoComplete.py --- a/Lib/idlelib/AutoComplete.py +++ b/Lib/idlelib/AutoComplete.py @@ -140,7 +140,7 @@ elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES): self._remove_autocomplete_window() mode = COMPLETE_ATTRIBUTES - while i and curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127: + while i and (curline[i-1] in ID_CHARS or ord(curline[i-1]) > 127): i -= 1 comp_start = curline[i:j] if i and curline[i-1] == '.': diff --git a/Lib/mailbox.py b/Lib/mailbox.py --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -675,6 +675,7 @@ new_file.write(buffer) new_toc[key] = (new_start, new_file.tell()) self._post_message_hook(new_file) + self._file_length = new_file.tell() except: new_file.close() os.remove(new_file.name) diff --git a/Lib/multiprocessing/__init__.py b/Lib/multiprocessing/__init__.py --- a/Lib/multiprocessing/__init__.py +++ b/Lib/multiprocessing/__init__.py @@ -23,8 +23,8 @@ 'Manager', 'Pipe', 'cpu_count', 'log_to_stderr', 'get_logger', 'allow_connection_pickling', 'BufferTooShort', 'TimeoutError', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', - 'Event', 'Queue', 'SimpleQueue', 'JoinableQueue', 'Pool', 'Value', 'Array', - 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING', + 'Event', 'Barrier', 'Queue', 'SimpleQueue', 'JoinableQueue', 'Pool', + 'Value', 'Array', 'RawValue', 'RawArray', 'SUBDEBUG', 'SUBWARNING', ] __author__ = 'R. Oudkerk (r.m.oudkerk at gmail.com)' @@ -186,6 +186,13 @@ from multiprocessing.synchronize import Event return Event() +def Barrier(parties, action=None, timeout=None): + ''' + Returns a barrier object + ''' + from multiprocessing.synchronize import Barrier + return Barrier(parties, action, timeout) + def Queue(maxsize=0): ''' Returns a queue object diff --git a/Lib/multiprocessing/dummy/__init__.py b/Lib/multiprocessing/dummy/__init__.py --- a/Lib/multiprocessing/dummy/__init__.py +++ b/Lib/multiprocessing/dummy/__init__.py @@ -35,7 +35,7 @@ __all__ = [ 'Process', 'current_process', 'active_children', 'freeze_support', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Condition', - 'Event', 'Queue', 'Manager', 'Pipe', 'Pool', 'JoinableQueue' + 'Event', 'Barrier', 'Queue', 'Manager', 'Pipe', 'Pool', 'JoinableQueue' ] # @@ -49,7 +49,7 @@ from multiprocessing.dummy.connection import Pipe from threading import Lock, RLock, Semaphore, BoundedSemaphore -from threading import Event, Condition +from threading import Event, Condition, Barrier from queue import Queue # diff --git a/Lib/multiprocessing/forking.py b/Lib/multiprocessing/forking.py --- a/Lib/multiprocessing/forking.py +++ b/Lib/multiprocessing/forking.py @@ -13,7 +13,7 @@ from multiprocessing import util, process -__all__ = ['Popen', 'assert_spawning', 'exit', 'duplicate', 'close', 'ForkingPickler'] +__all__ = ['Popen', 'assert_spawning', 'duplicate', 'close', 'ForkingPickler'] # # Check that the current thread is spawning a child process @@ -75,7 +75,6 @@ # if sys.platform != 'win32': - exit = os._exit duplicate = os.dup close = os.close @@ -168,7 +167,6 @@ WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") - exit = _winapi.ExitProcess close = _winapi.CloseHandle # @@ -349,7 +347,7 @@ from_parent.close() exitcode = self._bootstrap() - exit(exitcode) + sys.exit(exitcode) def get_preparation_data(name): diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -22,7 +22,7 @@ from traceback import format_exc from multiprocessing import Process, current_process, active_children, Pool, util, connection from multiprocessing.process import AuthenticationString -from multiprocessing.forking import exit, Popen, ForkingPickler +from multiprocessing.forking import Popen, ForkingPickler from time import time as _time # @@ -140,28 +140,38 @@ self.id_to_obj = {'0': (None, ())} self.id_to_refcount = {} self.mutex = threading.RLock() - self.stop = 0 def serve_forever(self): ''' Run the server forever ''' + self.stop_event = threading.Event() current_process()._manager_server = self try: + accepter = threading.Thread(target=self.accepter) + accepter.daemon = True + accepter.start() try: - while 1: - try: - c = self.listener.accept() - except (OSError, IOError): - continue - t = threading.Thread(target=self.handle_request, args=(c,)) - t.daemon = True - t.start() + while not self.stop_event.is_set(): + self.stop_event.wait(1) except (KeyboardInterrupt, SystemExit): pass finally: - self.stop = 999 - self.listener.close() + if sys.stdout != sys.__stdout__: + util.debug('resetting stdout, stderr') + sys.stdout = sys.__stdout__ + sys.stderr = sys.__stderr__ + sys.exit(0) + + def accepter(self): + while True: + try: + c = self.listener.accept() + except (OSError, IOError): + continue + t = threading.Thread(target=self.handle_request, args=(c,)) + t.daemon = True + t.start() def handle_request(self, c): ''' @@ -208,7 +218,7 @@ send = conn.send id_to_obj = self.id_to_obj - while not self.stop: + while not self.stop_event.is_set(): try: methodname = obj = None @@ -318,32 +328,13 @@ Shutdown this process ''' try: - try: - util.debug('manager received shutdown message') - c.send(('#RETURN', None)) - - if sys.stdout != sys.__stdout__: - util.debug('resetting stdout, stderr') - sys.stdout = sys.__stdout__ - sys.stderr = sys.__stderr__ - - util._run_finalizers(0) - - for p in active_children(): - util.debug('terminating a child process of manager') - p.terminate() - - for p in active_children(): - util.debug('terminating a child process of manager') - p.join() - - util._run_finalizers() - util.info('manager exiting with exitcode 0') - except: - import traceback - traceback.print_exc() + util.debug('manager received shutdown message') + c.send(('#RETURN', None)) + except: + import traceback + traceback.print_exc() finally: - exit(0) + self.stop_event.set() def create(self, c, typeid, *args, **kwds): ''' @@ -455,10 +446,6 @@ self._serializer = serializer self._Listener, self._Client = listener_client[serializer] - def __reduce__(self): - return type(self).from_address, \ - (self._address, self._authkey, self._serializer) - def get_server(self): ''' Return server object with serve_forever() method and address attribute @@ -595,7 +582,7 @@ except Exception: pass - process.join(timeout=0.2) + process.join(timeout=1.0) if process.is_alive(): util.info('manager still alive') if hasattr(process, 'terminate'): @@ -1006,6 +993,26 @@ def wait(self, timeout=None): return self._callmethod('wait', (timeout,)) + +class BarrierProxy(BaseProxy): + _exposed_ = ('__getattribute__', 'wait', 'abort', 'reset') + def wait(self, timeout=None): + return self._callmethod('wait', (timeout,)) + def abort(self): + return self._callmethod('abort') + def reset(self): + return self._callmethod('reset') + @property + def parties(self): + return self._callmethod('__getattribute__', ('parties',)) + @property + def n_waiting(self): + return self._callmethod('__getattribute__', ('n_waiting',)) + @property + def broken(self): + return self._callmethod('__getattribute__', ('broken',)) + + class NamespaceProxy(BaseProxy): _exposed_ = ('__getattribute__', '__setattr__', '__delattr__') def __getattr__(self, key): @@ -1097,6 +1104,7 @@ SyncManager.register('BoundedSemaphore', threading.BoundedSemaphore, AcquirerProxy) SyncManager.register('Condition', threading.Condition, ConditionProxy) +SyncManager.register('Barrier', threading.Barrier, BarrierProxy) SyncManager.register('Pool', Pool, PoolProxy) SyncManager.register('list', list, ListProxy) SyncManager.register('dict', dict, DictProxy) diff --git a/Lib/multiprocessing/synchronize.py b/Lib/multiprocessing/synchronize.py --- a/Lib/multiprocessing/synchronize.py +++ b/Lib/multiprocessing/synchronize.py @@ -333,3 +333,43 @@ return False finally: self._cond.release() + +# +# Barrier +# + +class Barrier(threading.Barrier): + + def __init__(self, parties, action=None, timeout=None): + import struct + from multiprocessing.heap import BufferWrapper + wrapper = BufferWrapper(struct.calcsize('i') * 2) + cond = Condition() + self.__setstate__((parties, action, timeout, cond, wrapper)) + self._state = 0 + self._count = 0 + + def __setstate__(self, state): + (self._parties, self._action, self._timeout, + self._cond, self._wrapper) = state + self._array = self._wrapper.create_memoryview().cast('i') + + def __getstate__(self): + return (self._parties, self._action, self._timeout, + self._cond, self._wrapper) + + @property + def _state(self): + return self._array[0] + + @_state.setter + def _state(self, value): + self._array[0] = value + + @property + def _count(self): + return self._array[1] + + @_count.setter + def _count(self, value): + self._array[1] = value diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -269,21 +269,24 @@ def _exit_function(): global _exiting - info('process shutting down') - debug('running all "atexit" finalizers with priority >= 0') - _run_finalizers(0) + if not _exiting: + _exiting = True - for p in active_children(): - if p._daemonic: - info('calling terminate() for daemon %s', p.name) - p._popen.terminate() + info('process shutting down') + debug('running all "atexit" finalizers with priority >= 0') + _run_finalizers(0) - for p in active_children(): - info('calling join() for process %s', p.name) - p.join() + for p in active_children(): + if p._daemonic: + info('calling terminate() for daemon %s', p.name) + p._popen.terminate() - debug('running the remaining "atexit" finalizers') - _run_finalizers() + for p in active_children(): + info('calling join() for process %s', p.name) + p.join() + + debug('running the remaining "atexit" finalizers') + _run_finalizers() atexit.register(_exit_function) diff --git a/Lib/test/support.py b/Lib/test/support.py --- a/Lib/test/support.py +++ b/Lib/test/support.py @@ -1593,7 +1593,7 @@ This will typically be run on the result of the communicate() method of a subprocess.Popen object. """ - stderr = re.sub(br"\[\d+ refs\]\r?\n?$", b"", stderr).strip() + stderr = re.sub(br"\[\d+ refs\]\r?\n?", b"", stderr).strip() return stderr def args_from_interpreter_flags(): diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py --- a/Lib/test/test_hmac.py +++ b/Lib/test/test_hmac.py @@ -302,40 +302,42 @@ self.assertEqual(h1.hexdigest(), h2.hexdigest(), "Hexdigest of copy doesn't match original hexdigest.") -class SecureCompareTestCase(unittest.TestCase): +class CompareDigestTestCase(unittest.TestCase): def test_compare(self): # Testing input type exception handling a, b = 100, 200 - self.assertRaises(TypeError, hmac.secure_compare, a, b) - a, b = 100, "foobar" - self.assertRaises(TypeError, hmac.secure_compare, a, b) + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = 100, b"foobar" + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = b"foobar", 200 + self.assertRaises(TypeError, hmac.compare_digest, a, b) a, b = "foobar", b"foobar" - self.assertRaises(TypeError, hmac.secure_compare, a, b) + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = b"foobar", "foobar" + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = "foobar", "foobar" + self.assertRaises(TypeError, hmac.compare_digest, a, b) + a, b = bytearray(b"foobar"), bytearray(b"foobar") + self.assertRaises(TypeError, hmac.compare_digest, a, b) - # Testing str/bytes of different lengths - a, b = "foobar", "foo" - self.assertFalse(hmac.secure_compare(a, b)) + # Testing bytes of different lengths a, b = b"foobar", b"foo" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xde\xad" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) - # Testing str/bytes of same lengths, different values - a, b = "foobar", "foobaz" - self.assertFalse(hmac.secure_compare(a, b)) + # Testing bytes of same lengths, different values a, b = b"foobar", b"foobaz" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea" - self.assertFalse(hmac.secure_compare(a, b)) + self.assertFalse(hmac.compare_digest(a, b)) - # Testing str/bytes of same lengths, same values - a, b = "foobar", "foobar" - self.assertTrue(hmac.secure_compare(a, b)) + # Testing bytes of same lengths, same values a, b = b"foobar", b"foobar" - self.assertTrue(hmac.secure_compare(a, b)) + self.assertTrue(hmac.compare_digest(a, b)) a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef" - self.assertTrue(hmac.secure_compare(a, b)) + self.assertTrue(hmac.compare_digest(a, b)) def test_main(): support.run_unittest( @@ -343,7 +345,7 @@ ConstructorTestCase, SanityTestCase, CopyTestCase, - SecureCompareTestCase + CompareDigestTestCase ) if __name__ == "__main__": diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -504,6 +504,17 @@ # Write changes to disk self._test_flush_or_close(self._box.flush, True) + def test_popitem_and_flush_twice(self): + # See #15036. + self._box.add(self._template % 0) + self._box.add(self._template % 1) + self._box.flush() + + self._box.popitem() + self._box.flush() + self._box.popitem() + self._box.flush() + def test_lock_unlock(self): # Lock and unlock the mailbox self.assertFalse(os.path.exists(self._get_lock_path())) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -18,6 +18,7 @@ import socket import random import logging +import struct import test.support @@ -1057,6 +1058,340 @@ self.assertEqual(wait(), True) # +# Tests for Barrier - adapted from tests in test/lock_tests.py +# + +# Many of the tests for threading.Barrier use a list as an atomic +# counter: a value is appended to increment the counter, and the +# length of the list gives the value. We use the class DummyList +# for the same purpose. + +class _DummyList(object): + + def __init__(self): + wrapper = multiprocessing.heap.BufferWrapper(struct.calcsize('i')) + lock = multiprocessing.Lock() + self.__setstate__((wrapper, lock)) + self._lengthbuf[0] = 0 + + def __setstate__(self, state): + (self._wrapper, self._lock) = state + self._lengthbuf = self._wrapper.create_memoryview().cast('i') + + def __getstate__(self): + return (self._wrapper, self._lock) + + def append(self, _): + with self._lock: + self._lengthbuf[0] += 1 + + def __len__(self): + with self._lock: + return self._lengthbuf[0] + +def _wait(): + # A crude wait/yield function not relying on synchronization primitives. + time.sleep(0.01) + + +class Bunch(object): + """ + A bunch of threads. + """ + def __init__(self, namespace, f, args, n, wait_before_exit=False): + """ + Construct a bunch of `n` threads running the same function `f`. + If `wait_before_exit` is True, the threads won't terminate until + do_finish() is called. + """ + self.f = f + self.args = args + self.n = n + self.started = namespace.DummyList() + self.finished = namespace.DummyList() + self._can_exit = namespace.Event() + if not wait_before_exit: + self._can_exit.set() + for i in range(n): + p = namespace.Process(target=self.task) + p.daemon = True + p.start() + + def task(self): + pid = os.getpid() + self.started.append(pid) + try: + self.f(*self.args) + finally: + self.finished.append(pid) + self._can_exit.wait(30) + assert self._can_exit.is_set() + + def wait_for_started(self): + while len(self.started) < self.n: + _wait() + + def wait_for_finished(self): + while len(self.finished) < self.n: + _wait() + + def do_finish(self): + self._can_exit.set() + + +class AppendTrue(object): + def __init__(self, obj): + self.obj = obj + def __call__(self): + self.obj.append(True) + + +class _TestBarrier(BaseTestCase): + """ + Tests for Barrier objects. + """ + N = 5 + defaultTimeout = 10.0 # XXX Slow Windows buildbots need generous timeout + + def setUp(self): + self.barrier = self.Barrier(self.N, timeout=self.defaultTimeout) + + def tearDown(self): + self.barrier.abort() + self.barrier = None + + def DummyList(self): + if self.TYPE == 'threads': + return [] + elif self.TYPE == 'manager': + return self.manager.list() + else: + return _DummyList() + + def run_threads(self, f, args): + b = Bunch(self, f, args, self.N-1) + f(*args) + b.wait_for_finished() + + @classmethod + def multipass(cls, barrier, results, n): + m = barrier.parties + assert m == cls.N + for i in range(n): + results[0].append(True) + assert len(results[1]) == i * m + barrier.wait() + results[1].append(True) + assert len(results[0]) == (i + 1) * m + barrier.wait() + try: + assert barrier.n_waiting == 0 + except NotImplementedError: + pass + assert not barrier.broken + + def test_barrier(self, passes=1): + """ + Test that a barrier is passed in lockstep + """ + results = [self.DummyList(), self.DummyList()] + self.run_threads(self.multipass, (self.barrier, results, passes)) + + def test_barrier_10(self): + """ + Test that a barrier works for 10 consecutive runs + """ + return self.test_barrier(10) + + @classmethod + def _test_wait_return_f(cls, barrier, queue): + res = barrier.wait() + queue.put(res) + + def test_wait_return(self): + """ + test the return value from barrier.wait + """ + queue = self.Queue() + self.run_threads(self._test_wait_return_f, (self.barrier, queue)) + results = [queue.get() for i in range(self.N)] + self.assertEqual(results.count(0), 1) + + @classmethod + def _test_action_f(cls, barrier, results): + barrier.wait() + if len(results) != 1: + raise RuntimeError + + def test_action(self): + """ + Test the 'action' callback + """ + results = self.DummyList() + barrier = self.Barrier(self.N, action=AppendTrue(results)) + self.run_threads(self._test_action_f, (barrier, results)) + self.assertEqual(len(results), 1) + + @classmethod + def _test_abort_f(cls, barrier, results1, results2): + try: + i = barrier.wait() + if i == cls.N//2: + raise RuntimeError + barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + except RuntimeError: + barrier.abort() + + def test_abort(self): + """ + Test that an abort will put the barrier in a broken state + """ + results1 = self.DummyList() + results2 = self.DummyList() + self.run_threads(self._test_abort_f, + (self.barrier, results1, results2)) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertTrue(self.barrier.broken) + + @classmethod + def _test_reset_f(cls, barrier, results1, results2, results3): + i = barrier.wait() + if i == cls.N//2: + # Wait until the other threads are all in the barrier. + while barrier.n_waiting < cls.N-1: + time.sleep(0.001) + barrier.reset() + else: + try: + barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + # Now, pass the barrier again + barrier.wait() + results3.append(True) + + def test_reset(self): + """ + Test that a 'reset' on a barrier frees the waiting threads + """ + results1 = self.DummyList() + results2 = self.DummyList() + results3 = self.DummyList() + self.run_threads(self._test_reset_f, + (self.barrier, results1, results2, results3)) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertEqual(len(results3), self.N) + + @classmethod + def _test_abort_and_reset_f(cls, barrier, barrier2, + results1, results2, results3): + try: + i = barrier.wait() + if i == cls.N//2: + raise RuntimeError + barrier.wait() + results1.append(True) + except threading.BrokenBarrierError: + results2.append(True) + except RuntimeError: + barrier.abort() + # Synchronize and reset the barrier. Must synchronize first so + # that everyone has left it when we reset, and after so that no + # one enters it before the reset. + if barrier2.wait() == cls.N//2: + barrier.reset() + barrier2.wait() + barrier.wait() + results3.append(True) + + def test_abort_and_reset(self): + """ + Test that a barrier can be reset after being broken. + """ + results1 = self.DummyList() + results2 = self.DummyList() + results3 = self.DummyList() + barrier2 = self.Barrier(self.N) + + self.run_threads(self._test_abort_and_reset_f, + (self.barrier, barrier2, results1, results2, results3)) + self.assertEqual(len(results1), 0) + self.assertEqual(len(results2), self.N-1) + self.assertEqual(len(results3), self.N) + + @classmethod + def _test_timeout_f(cls, barrier, results): + i = barrier.wait(20) + if i == cls.N//2: + # One thread is late! + time.sleep(4.0) + try: + barrier.wait(0.5) + except threading.BrokenBarrierError: + results.append(True) + + def test_timeout(self): + """ + Test wait(timeout) + """ + results = self.DummyList() + self.run_threads(self._test_timeout_f, (self.barrier, results)) + self.assertEqual(len(results), self.barrier.parties) + + @classmethod + def _test_default_timeout_f(cls, barrier, results): + i = barrier.wait(20) + if i == cls.N//2: + # One thread is later than the default timeout + time.sleep(4.0) + try: + barrier.wait() + except threading.BrokenBarrierError: + results.append(True) + + def test_default_timeout(self): + """ + Test the barrier's default timeout + """ + barrier = self.Barrier(self.N, timeout=1.0) + results = self.DummyList() + self.run_threads(self._test_default_timeout_f, (barrier, results)) + self.assertEqual(len(results), barrier.parties) + + def test_single_thread(self): + b = self.Barrier(1) + b.wait() + b.wait() + + @classmethod + def _test_thousand_f(cls, barrier, passes, conn, lock): + for i in range(passes): + barrier.wait() + with lock: + conn.send(i) + + def test_thousand(self): + if self.TYPE == 'manager': + return + passes = 1000 + lock = self.Lock() + conn, child_conn = self.Pipe(False) + for j in range(self.N): + p = self.Process(target=self._test_thousand_f, + args=(self.barrier, passes, child_conn, lock)) + p.start() + + for i in range(passes): + for j in range(self.N): + self.assertEqual(conn.recv(), i) + +# # # @@ -1485,6 +1820,11 @@ # run after all the other tests for the manager. It tests that # there have been no "reference leaks" for the manager's shared # objects. Note the comment in _TestPool.test_terminate(). + + # If some other test using ManagerMixin.manager fails, then the + # raised exception may keep alive a frame which holds a reference + # to a managed object. This will cause test_number_of_objects to + # also fail. ALLOWED_TYPES = ('manager',) def test_number_of_objects(self): @@ -1564,6 +1904,11 @@ manager.shutdown() + # If the manager process exited cleanly then the exitcode + # will be zero. Otherwise (after a short timeout) + # terminate() is used, resulting in an exitcode of -SIGTERM. + self.assertEqual(manager._process.exitcode, 0) + # # Test of connecting to a remote server and using xmlrpclib for serialization # @@ -1923,7 +2268,7 @@ class _TestListener(BaseTestCase): - ALLOWED_TYPES = ('processes') + ALLOWED_TYPES = ('processes',) def test_multiple_bind(self): for family in self.connection.families: @@ -2505,10 +2850,12 @@ result = {} glob = globals() Type = type.capitalize() + ALL_TYPES = {'processes', 'threads', 'manager'} for name in list(glob.keys()): if name.startswith('_Test'): base = glob[name] + assert set(base.ALLOWED_TYPES) <= ALL_TYPES, set(base.ALLOWED_TYPES) if type in base.ALLOWED_TYPES: newname = 'With' + Type + name[1:] class Temp(base, unittest.TestCase, Mixin): @@ -2527,7 +2874,7 @@ Process = multiprocessing.Process locals().update(get_attributes(multiprocessing, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Value', 'Array', 'RawValue', + 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'RawValue', 'RawArray', 'current_process', 'active_children', 'Pipe', 'connection', 'JoinableQueue', 'Pool' ))) @@ -2542,7 +2889,7 @@ manager = object.__new__(multiprocessing.managers.SyncManager) locals().update(get_attributes(manager, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Value', 'Array', 'list', 'dict', + 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'list', 'dict', 'Namespace', 'JoinableQueue', 'Pool' ))) @@ -2555,7 +2902,7 @@ Process = multiprocessing.dummy.Process locals().update(get_attributes(multiprocessing.dummy, ( 'Queue', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', - 'Condition', 'Event', 'Value', 'Array', 'current_process', + 'Condition', 'Event', 'Barrier', 'Value', 'Array', 'current_process', 'active_children', 'Pipe', 'connection', 'dict', 'list', 'Namespace', 'JoinableQueue', 'Pool' ))) diff --git a/Lib/test/test_structseq.py b/Lib/test/test_structseq.py --- a/Lib/test/test_structseq.py +++ b/Lib/test/test_structseq.py @@ -78,8 +78,9 @@ def test_fields(self): t = time.gmtime() - self.assertEqual(len(t), t.n_fields) - self.assertEqual(t.n_fields, t.n_sequence_fields+t.n_unnamed_fields) + self.assertEqual(len(t), t.n_sequence_fields) + self.assertEqual(t.n_unnamed_fields, 0) + self.assertEqual(t.n_fields, time._STRUCT_TM_ITEMS) def test_constructor(self): t = time.struct_time diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -31,15 +31,14 @@ time.time() info = time.get_clock_info('time') self.assertFalse(info.monotonic) - if sys.platform != 'win32': - self.assertTrue(info.adjusted) + self.assertTrue(info.adjustable) def test_clock(self): time.clock() info = time.get_clock_info('clock') self.assertTrue(info.monotonic) - self.assertFalse(info.adjusted) + self.assertFalse(info.adjustable) @unittest.skipUnless(hasattr(time, 'clock_gettime'), 'need time.clock_gettime()') @@ -371,10 +370,7 @@ info = time.get_clock_info('monotonic') self.assertTrue(info.monotonic) - if sys.platform == 'linux': - self.assertTrue(info.adjusted) - else: - self.assertFalse(info.adjusted) + self.assertFalse(info.adjustable) def test_perf_counter(self): time.perf_counter() @@ -390,7 +386,7 @@ info = time.get_clock_info('process_time') self.assertTrue(info.monotonic) - self.assertFalse(info.adjusted) + self.assertFalse(info.adjustable) @unittest.skipUnless(hasattr(time, 'monotonic'), 'need time.monotonic') @@ -441,7 +437,7 @@ # 0.0 < resolution <= 1.0 self.assertGreater(info.resolution, 0.0) self.assertLessEqual(info.resolution, 1.0) - self.assertIsInstance(info.adjusted, bool) + self.assertIsInstance(info.adjustable, bool) self.assertRaises(ValueError, time.get_clock_info, 'xxx') @@ -624,7 +620,58 @@ for invalid in self.invalid_values: self.assertRaises(OverflowError, pytime_object_to_timespec, invalid) + @unittest.skipUnless(time._STRUCT_TM_ITEMS == 11, "needs tm_zone support") + def test_localtime_timezone(self): + # Get the localtime and examine it for the offset and zone. + lt = time.localtime() + self.assertTrue(hasattr(lt, "tm_gmtoff")) + self.assertTrue(hasattr(lt, "tm_zone")) + + # See if the offset and zone are similar to the module + # attributes. + if lt.tm_gmtoff is None: + self.assertTrue(not hasattr(time, "timezone")) + else: + self.assertEqual(lt.tm_gmtoff, -[time.timezone, time.altzone][lt.tm_isdst]) + if lt.tm_zone is None: + self.assertTrue(not hasattr(time, "tzname")) + else: + self.assertEqual(lt.tm_zone, time.tzname[lt.tm_isdst]) + + # Try and make UNIX times from the localtime and a 9-tuple + # created from the localtime. Test to see that the times are + # the same. + t = time.mktime(lt); t9 = time.mktime(lt[:9]) + self.assertEqual(t, t9) + + # Make localtimes from the UNIX times and compare them to + # the original localtime, thus making a round trip. + new_lt = time.localtime(t); new_lt9 = time.localtime(t9) + self.assertEqual(new_lt, lt) + self.assertEqual(new_lt.tm_gmtoff, lt.tm_gmtoff) + self.assertEqual(new_lt.tm_zone, lt.tm_zone) + self.assertEqual(new_lt9, lt) + self.assertEqual(new_lt.tm_gmtoff, lt.tm_gmtoff) + self.assertEqual(new_lt9.tm_zone, lt.tm_zone) + + @unittest.skipUnless(time._STRUCT_TM_ITEMS == 11, "needs tm_zone support") + def test_strptime_timezone(self): + t = time.strptime("UTC", "%Z") + self.assertEqual(t.tm_zone, 'UTC') + t = time.strptime("+0500", "%z") + self.assertEqual(t.tm_gmtoff, 5 * 3600) + + @unittest.skipUnless(time._STRUCT_TM_ITEMS == 11, "needs tm_zone support") + def test_short_times(self): + + import pickle + + # Load a short time structure using pickle. + st = b"ctime\nstruct_time\np0\n((I2007\nI8\nI11\nI1\nI24\nI49\nI5\nI223\nI1\ntp1\n(dp2\ntp3\nRp4\n." + lt = pickle.loads(st) + self.assertIs(lt.tm_gmtoff, None) + self.assertIs(lt.tm_zone, None) def test_main(): support.run_unittest( diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -23,7 +23,8 @@ from test import support from test.support import findfile, import_fresh_module, gc_collect -pyET = import_fresh_module('xml.etree.ElementTree', blocked=['_elementtree']) +pyET = None +ET = None SIMPLE_XMLFILE = findfile("simple.xml", subdir="xmltestdata") try: @@ -209,10 +210,8 @@ These methods return an iterable. See bug 6472. - >>> check_method(element.iter("tag").__next__) >>> check_method(element.iterfind("tag").__next__) >>> check_method(element.iterfind("*").__next__) - >>> check_method(tree.iter("tag").__next__) >>> check_method(tree.iterfind("tag").__next__) >>> check_method(tree.iterfind("*").__next__) @@ -291,42 +290,6 @@ 'hello' """ -# Only with Python implementation -def simplefind(): - """ - Test find methods using the elementpath fallback. - - >>> ElementTree = pyET - - >>> CurrentElementPath = ElementTree.ElementPath - >>> ElementTree.ElementPath = ElementTree._SimpleElementPath() - >>> elem = ElementTree.XML(SAMPLE_XML) - >>> elem.find("tag").tag - 'tag' - >>> ElementTree.ElementTree(elem).find("tag").tag - 'tag' - >>> elem.findtext("tag") - 'text' - >>> elem.findtext("tog") - >>> elem.findtext("tog", "default") - 'default' - >>> ElementTree.ElementTree(elem).findtext("tag") - 'text' - >>> summarize_list(elem.findall("tag")) - ['tag', 'tag'] - >>> summarize_list(elem.findall(".//tag")) - ['tag', 'tag', 'tag'] - - Path syntax doesn't work in this case. - - >>> elem.find("section/tag") - >>> elem.findtext("section/tag") - >>> summarize_list(elem.findall("section/tag")) - [] - - >>> ElementTree.ElementPath = CurrentElementPath - """ - def find(): """ Test find methods (including xpath syntax). @@ -1002,36 +965,6 @@ '1 < 2\n' """ -def iterators(): - """ - Test iterators. - - >>> e = ET.XML("this is a paragraph...") - >>> summarize_list(e.iter()) - ['html', 'body', 'i'] - >>> summarize_list(e.find("body").iter()) - ['body', 'i'] - >>> summarize(next(e.iter())) - 'html' - >>> "".join(e.itertext()) - 'this is a paragraph...' - >>> "".join(e.find("body").itertext()) - 'this is a paragraph.' - >>> next(e.itertext()) - 'this is a ' - - Method iterparse should return an iterator. See bug 6472. - - >>> sourcefile = serialize(e, to_string=False) - >>> next(ET.iterparse(sourcefile)) # doctest: +ELLIPSIS - ('end', ) - - >>> tree = ET.ElementTree(None) - >>> tree.iter() - Traceback (most recent call last): - AttributeError: 'NoneType' object has no attribute 'iter' - """ - ENTITY_XML = """\ @@ -1339,6 +1272,7 @@ """.format(html.escape(SIMPLE_XMLFILE, True)) + def xinclude_loader(href, parse="xml", encoding=None): try: data = XINCLUDE[href] @@ -1411,22 +1345,6 @@ >>> # print(serialize(document)) # C5 """ -def xinclude_default(): - """ - >>> from xml.etree import ElementInclude - - >>> document = xinclude_loader("default.xml") - >>> ElementInclude.include(document) - >>> print(serialize(document)) # default - -

Example.

- - text - texttail - - -
- """ # # badly formatted xi:include tags @@ -1917,9 +1835,8 @@ self.assertIsInstance(ET.QName, type) self.assertIsInstance(ET.ElementTree, type) self.assertIsInstance(ET.Element, type) - # XXX issue 14128 with C ElementTree - # self.assertIsInstance(ET.TreeBuilder, type) - # self.assertIsInstance(ET.XMLParser, type) + self.assertIsInstance(ET.TreeBuilder, type) + self.assertIsInstance(ET.XMLParser, type) def test_Element_subclass_trivial(self): class MyElement(ET.Element): @@ -1953,6 +1870,73 @@ self.assertEqual(mye.newmethod(), 'joe') +class ElementIterTest(unittest.TestCase): + def _ilist(self, elem, tag=None): + return summarize_list(elem.iter(tag)) + + def test_basic(self): + doc = ET.XML("this is a paragraph...") + self.assertEqual(self._ilist(doc), ['html', 'body', 'i']) + self.assertEqual(self._ilist(doc.find('body')), ['body', 'i']) + self.assertEqual(next(doc.iter()).tag, 'html') + self.assertEqual(''.join(doc.itertext()), 'this is a paragraph...') + self.assertEqual(''.join(doc.find('body').itertext()), + 'this is a paragraph.') + self.assertEqual(next(doc.itertext()), 'this is a ') + + # iterparse should return an iterator + sourcefile = serialize(doc, to_string=False) + self.assertEqual(next(ET.iterparse(sourcefile))[0], 'end') + + tree = ET.ElementTree(None) + self.assertRaises(AttributeError, tree.iter) + + def test_corners(self): + # single root, no subelements + a = ET.Element('a') + self.assertEqual(self._ilist(a), ['a']) + + # one child + b = ET.SubElement(a, 'b') + self.assertEqual(self._ilist(a), ['a', 'b']) + + # one child and one grandchild + c = ET.SubElement(b, 'c') + self.assertEqual(self._ilist(a), ['a', 'b', 'c']) + + # two children, only first with grandchild + d = ET.SubElement(a, 'd') + self.assertEqual(self._ilist(a), ['a', 'b', 'c', 'd']) + + # replace first child by second + a[0] = a[1] + del a[1] + self.assertEqual(self._ilist(a), ['a', 'd']) + + def test_iter_by_tag(self): + doc = ET.XML(''' + + + bedroom1 + bedroom2 + + nothing here + + + bedroom8 + + ''') + + self.assertEqual(self._ilist(doc, 'room'), ['room'] * 3) + self.assertEqual(self._ilist(doc, 'house'), ['house'] * 2) + + # make sure both tag=None and tag='*' return all tags + all_tags = ['document', 'house', 'room', 'room', + 'shed', 'house', 'room'] + self.assertEqual(self._ilist(doc), all_tags) + self.assertEqual(self._ilist(doc, '*'), all_tags) + + class TreeBuilderTest(unittest.TestCase): sample1 = (' +

Example.

+ + text + texttail + + +''') + + class XMLParserTest(unittest.TestCase): sample1 = '22' sample2 = ('>> cElementTree = cET - >>> e = cElementTree.Element('a') - >>> getattr(e, '\uD800') # doctest: +ELLIPSIS - Traceback (most recent call last): - ... - UnicodeEncodeError: ... - - >>> p = cElementTree.XMLParser() - >>> p.version.split()[0] - 'Expat' - >>> getattr(p, '\uD800') - Traceback (most recent call last): - ... - AttributeError: 'XMLParser' object has no attribute '\ud800' - """ - - class MiscTests(unittest.TestCase): # Issue #8651. @support.bigmemtest(size=support._2G + 100, memuse=1) @@ -46,6 +21,7 @@ finally: data = None + @unittest.skipUnless(cET, 'requires _elementtree') class TestAliasWorking(unittest.TestCase): # Test that the cET alias module is alive @@ -53,6 +29,7 @@ e = cET_alias.Element('foo') self.assertEqual(e.tag, 'foo') + @unittest.skipUnless(cET, 'requires _elementtree') class TestAcceleratorImported(unittest.TestCase): # Test that the C accelerator was imported, as expected @@ -67,7 +44,6 @@ from test import test_xml_etree, test_xml_etree_c # Run the tests specific to the C implementation - support.run_doctest(test_xml_etree_c, verbosity=True) support.run_unittest( MiscTests, TestAliasWorking, diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -101,32 +101,8 @@ import re import warnings -class _SimpleElementPath: - # emulate pre-1.2 find/findtext/findall behaviour - def find(self, element, tag, namespaces=None): - for elem in element: - if elem.tag == tag: - return elem - return None - def findtext(self, element, tag, default=None, namespaces=None): - elem = self.find(element, tag) - if elem is None: - return default - return elem.text or "" - def iterfind(self, element, tag, namespaces=None): - if tag[:3] == ".//": - for elem in element.iter(tag[3:]): - yield elem - for elem in element: - if elem.tag == tag: - yield elem - def findall(self, element, tag, namespaces=None): - return list(self.iterfind(element, tag, namespaces)) +from . import ElementPath -try: - from . import ElementPath -except ImportError: - ElementPath = _SimpleElementPath() ## # Parser error. This is a subclass of SyntaxError. @@ -916,11 +892,7 @@ _raise_serialization_error(qname) # populate qname and namespaces table - try: - iterate = elem.iter - except AttributeError: - iterate = elem.getiterator # cET compatibility - for elem in iterate(): + for elem in elem.iter(): tag = elem.tag if isinstance(tag, QName): if tag.text not in qnames: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #15026: utf-16 encoding is now significantly faster (up to 10x). + Patch by Serhiy Storchaka. + - Issue #11022: open() and io.TextIOWrapper are now calling locale.getpreferredencoding(False) instead of locale.getpreferredencoding() in text mode if the encoding is not specified. Don't change temporary the @@ -21,6 +24,32 @@ Library ------- +- Issue #15036: Allow removing or changing multiple items in + single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox + between the changes. + +- Issue #14059: Implement multiprocessing.Barrier. + +- Issue #15061: The inappropriately named hmac.secure_compare has been + renamed to hmac.compare_digest, restricted to operating on bytes inputs + only and had its documentation updated to more accurately reflect both its + intent and its limitations + +- Issue #13841: Make child processes exit using sys.exit() on Windows. + +- Issue #14936: curses_panel was converted to PEP 3121 and PEP 384 API. + Patch by Robin Schreiber. + +- Issue #1667546: On platforms supporting tm_zone and tm_gmtoff fields + in struct tm, time.struct_time objects returned by time.gmtime(), + time.localtime() and time.strptime() functions now have tm_zone and + tm_gmtoff attributes. Original patch by Paul Boddie. + +- Rename adjusted attribute to adjustable in time.get_clock_info() result. + +- Issue #3518: Remove references to non-existent BaseManager.from_address() + method. + - Issue #13857: Added textwrap.indent() function (initial patch by Ezra Berch) diff --git a/Modules/_curses_panel.c b/Modules/_curses_panel.c --- a/Modules/_curses_panel.c +++ b/Modules/_curses_panel.c @@ -16,8 +16,37 @@ #include -static PyObject *PyCursesError; +typedef struct { + PyObject *PyCursesError; + PyObject *PyCursesPanel_Type; +} _curses_panelstate; +#define _curses_panelstate(o) ((_curses_panelstate *)PyModule_GetState(o)) + +static int +_curses_panel_clear(PyObject *m) +{ + Py_CLEAR(_curses_panelstate(m)->PyCursesError); + return 0; +} + +static int +_curses_panel_traverse(PyObject *m, visitproc visit, void *arg) +{ + Py_VISIT(_curses_panelstate(m)->PyCursesError); + return 0; +} + +static void +_curses_panel_free(void *m) +{ + _curses_panel_clear((PyObject *) m); +} + +static struct PyModuleDef _curses_panelmodule; + +#define _curses_panelstate_global \ +((_curses_panelstate *) PyModule_GetState(PyState_FindModule(&_curses_panelmodule))) /* Utility Functions */ @@ -34,9 +63,9 @@ return Py_None; } else { if (fname == NULL) { - PyErr_SetString(PyCursesError, catchall_ERR); + PyErr_SetString(_curses_panelstate_global->PyCursesError, catchall_ERR); } else { - PyErr_Format(PyCursesError, "%s() returned ERR", fname); + PyErr_Format(_curses_panelstate_global->PyCursesError, "%s() returned ERR", fname); } return NULL; } @@ -54,9 +83,8 @@ PyCursesWindowObject *wo; /* for reference counts */ } PyCursesPanelObject; -PyTypeObject PyCursesPanel_Type; - -#define PyCursesPanel_Check(v) (Py_TYPE(v) == &PyCursesPanel_Type) +#define PyCursesPanel_Check(v) \ + (Py_TYPE(v) == _curses_panelstate_global->PyCursesPanel_Type) /* Some helper functions. The problem is that there's always a window associated with a panel. To ensure that Python's GC doesn't pull @@ -175,7 +203,8 @@ { PyCursesPanelObject *po; - po = PyObject_NEW(PyCursesPanelObject, &PyCursesPanel_Type); + po = PyObject_NEW(PyCursesPanelObject, + (PyTypeObject *)(_curses_panelstate_global)->PyCursesPanel_Type); if (po == NULL) return NULL; po->pan = pan; if (insert_lop(po) < 0) { @@ -280,7 +309,7 @@ rtn = replace_panel(self->pan, temp->win); if (rtn == ERR) { - PyErr_SetString(PyCursesError, "replace_panel() returned ERR"); + PyErr_SetString(_curses_panelstate_global->PyCursesError, "replace_panel() returned ERR"); return NULL; } Py_DECREF(po->wo); @@ -305,7 +334,7 @@ PyCursesInitialised; obj = (PyObject *) panel_userptr(self->pan); if (obj == NULL) { - PyErr_SetString(PyCursesError, "no userptr set"); + PyErr_SetString(_curses_panelstate_global->PyCursesError, "no userptr set"); return NULL; } @@ -334,36 +363,18 @@ /* -------------------------------------------------------*/ -PyTypeObject PyCursesPanel_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_curses_panel.curses panel", /*tp_name*/ - sizeof(PyCursesPanelObject), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - /* methods */ - (destructor)PyCursesPanel_Dealloc, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_reserved*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - 0, /*tp_as_sequence*/ - 0, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT, /*tp_flags*/ - 0, /*tp_doc*/ - 0, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - PyCursesPanel_Methods, /*tp_methods*/ +static PyType_Slot PyCursesPanel_Type_slots[] = { + {Py_tp_dealloc, PyCursesPanel_Dealloc}, + {Py_tp_methods, PyCursesPanel_Methods}, + {0, 0}, +}; + +static PyType_Spec PyCursesPanel_Type_spec = { + "_curses_panel.curses panel", + sizeof(PyCursesPanelObject), + 0, + Py_TPFLAGS_DEFAULT, + PyCursesPanel_Type_slots }; /* Wrapper for panel_above(NULL). This function returns the bottom @@ -405,7 +416,7 @@ return NULL; pan = new_panel(win->win); if (pan == NULL) { - PyErr_SetString(PyCursesError, catchall_NULL); + PyErr_SetString(_curses_panelstate_global->PyCursesError, catchall_NULL); return NULL; } return (PyObject *)PyCursesPanel_New(pan, win); @@ -467,12 +478,12 @@ PyModuleDef_HEAD_INIT, "_curses_panel", NULL, - -1, + sizeof(_curses_panelstate), PyCurses_methods, NULL, - NULL, - NULL, - NULL + _curses_panel_traverse, + _curses_panel_clear, + _curses_panel_free }; PyMODINIT_FUNC @@ -480,21 +491,23 @@ { PyObject *m, *d, *v; + /* Create the module and add the functions */ + m = PyModule_Create(&_curses_panelmodule); + if (m == NULL) + goto fail; + d = PyModule_GetDict(m); + /* Initialize object type */ - if (PyType_Ready(&PyCursesPanel_Type) < 0) - return NULL; + _curses_panelstate(m)->PyCursesPanel_Type = \ + PyType_FromSpec(&PyCursesPanel_Type_spec); + if (_curses_panelstate(m)->PyCursesPanel_Type == NULL) + goto fail; import_curses(); - /* Create the module and add the functions */ - m = PyModule_Create(&_curses_panelmodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); - /* For exception _curses_panel.error */ - PyCursesError = PyErr_NewException("_curses_panel.error", NULL, NULL); - PyDict_SetItemString(d, "error", PyCursesError); + _curses_panelstate(m)->PyCursesError = PyErr_NewException("_curses_panel.error", NULL, NULL); + PyDict_SetItemString(d, "error", _curses_panelstate(m)->PyCursesError); /* Make the version available */ v = PyUnicode_FromString(PyCursesVersion); @@ -502,4 +515,7 @@ PyDict_SetItemString(d, "__version__", v); Py_DECREF(v); return m; + fail: + Py_XDECREF(m); + return NULL; } diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -107,8 +107,9 @@ const mpd_context_t *ctx, uint32_t *status); static void _mpd_base_ndivmod(mpd_t *q, mpd_t *r, const mpd_t *a, const mpd_t *b, uint32_t *status); -static inline void _mpd_qpow_uint(mpd_t *result, mpd_t *base, mpd_uint_t exp, - uint8_t resultsign, const mpd_context_t *ctx, uint32_t *status); +static inline void _mpd_qpow_uint(mpd_t *result, const mpd_t *base, + mpd_uint_t exp, uint8_t resultsign, + const mpd_context_t *ctx, uint32_t *status); mpd_uint_t mpd_qsshiftr(mpd_t *result, const mpd_t *a, mpd_ssize_t n); @@ -5841,12 +5842,12 @@ } /* - * Internal function: Integer power with mpd_uint_t exponent, base is modified! - * Function can fail with MPD_Malloc_error. + * Internal function: Integer power with mpd_uint_t exponent. The function + * can fail with MPD_Malloc_error. */ static inline void -_mpd_qpow_uint(mpd_t *result, mpd_t *base, mpd_uint_t exp, uint8_t resultsign, - const mpd_context_t *ctx, uint32_t *status) +_mpd_qpow_uint(mpd_t *result, const mpd_t *base, mpd_uint_t exp, + uint8_t resultsign, const mpd_context_t *ctx, uint32_t *status) { uint32_t workstatus = 0; mpd_uint_t n; @@ -5866,7 +5867,8 @@ if (exp & n) { mpd_qmul(result, result, base, ctx, &workstatus); } - if (workstatus & (MPD_Overflow|MPD_Clamped)) { + if (mpd_isspecial(result) || + (mpd_iszerocoeff(result) && (workstatus & MPD_Clamped))) { break; } } diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -103,8 +103,6 @@ /* glue functions (see the init function for details) */ static PyObject* elementtree_parseerror_obj; static PyObject* elementtree_deepcopy_obj; -static PyObject* elementtree_iter_obj; -static PyObject* elementtree_itertext_obj; static PyObject* elementpath_obj; /* helpers */ @@ -1109,67 +1107,32 @@ return list; } -static PyObject* -element_iter(ElementObject* self, PyObject* args) + +static PyObject * +create_elementiter(ElementObject *self, PyObject *tag, int gettext); + + +static PyObject * +element_iter(ElementObject *self, PyObject *args) { - PyObject* result; - PyObject* tag = Py_None; if (!PyArg_ParseTuple(args, "|O:iter", &tag)) return NULL; - if (!elementtree_iter_obj) { - PyErr_SetString( - PyExc_RuntimeError, - "iter helper not found" - ); - return NULL; - } - - args = PyTuple_New(2); - if (!args) - return NULL; - - Py_INCREF(self); PyTuple_SET_ITEM(args, 0, (PyObject*) self); - Py_INCREF(tag); PyTuple_SET_ITEM(args, 1, (PyObject*) tag); - - result = PyObject_CallObject(elementtree_iter_obj, args); - - Py_DECREF(args); - - return result; + return create_elementiter(self, tag, 0); } static PyObject* element_itertext(ElementObject* self, PyObject* args) { - PyObject* result; - if (!PyArg_ParseTuple(args, ":itertext")) return NULL; - if (!elementtree_itertext_obj) { - PyErr_SetString( - PyExc_RuntimeError, - "itertext helper not found" - ); - return NULL; - } - - args = PyTuple_New(1); - if (!args) - return NULL; - - Py_INCREF(self); PyTuple_SET_ITEM(args, 0, (PyObject*) self); - - result = PyObject_CallObject(elementtree_itertext_obj, args); - - Py_DECREF(args); - - return result; + return create_elementiter(self, Py_None, 1); } + static PyObject* element_getitem(PyObject* self_, Py_ssize_t index) { @@ -1790,6 +1753,269 @@ 0, /* tp_free */ }; +/******************************* Element iterator ****************************/ + +/* ElementIterObject represents the iteration state over an XML element in + * pre-order traversal. To keep track of which sub-element should be returned + * next, a stack of parents is maintained. This is a standard stack-based + * iterative pre-order traversal of a tree. + * The stack is managed using a single-linked list starting at parent_stack. + * Each stack node contains the saved parent to which we should return after + * the current one is exhausted, and the next child to examine in that parent. + */ +typedef struct ParentLocator_t { + ElementObject *parent; + Py_ssize_t child_index; + struct ParentLocator_t *next; +} ParentLocator; + +typedef struct { + PyObject_HEAD + ParentLocator *parent_stack; + ElementObject *root_element; + PyObject *sought_tag; + int root_done; + int gettext; +} ElementIterObject; + + +static void +elementiter_dealloc(ElementIterObject *it) +{ + ParentLocator *p = it->parent_stack; + while (p) { + ParentLocator *temp = p; + Py_XDECREF(p->parent); + p = p->next; + PyObject_Free(temp); + } + + Py_XDECREF(it->sought_tag); + Py_XDECREF(it->root_element); + + PyObject_GC_UnTrack(it); + PyObject_GC_Del(it); +} + +static int +elementiter_traverse(ElementIterObject *it, visitproc visit, void *arg) +{ + ParentLocator *p = it->parent_stack; + while (p) { + Py_VISIT(p->parent); + p = p->next; + } + + Py_VISIT(it->root_element); + Py_VISIT(it->sought_tag); + return 0; +} + +/* Helper function for elementiter_next. Add a new parent to the parent stack. + */ +static ParentLocator * +parent_stack_push_new(ParentLocator *stack, ElementObject *parent) +{ + ParentLocator *new_node = PyObject_Malloc(sizeof(ParentLocator)); + if (new_node) { + new_node->parent = parent; + Py_INCREF(parent); + new_node->child_index = 0; + new_node->next = stack; + } + return new_node; +} + +static PyObject * +elementiter_next(ElementIterObject *it) +{ + /* Sub-element iterator. + * + * A short note on gettext: this function serves both the iter() and + * itertext() methods to avoid code duplication. However, there are a few + * small differences in the way these iterations work. Namely: + * - itertext() only yields text from nodes that have it, and continues + * iterating when a node doesn't have text (so it doesn't return any + * node like iter()) + * - itertext() also has to handle tail, after finishing with all the + * children of a node. + */ + ElementObject *cur_parent; + Py_ssize_t child_index; + + while (1) { + /* Handle the case reached in the beginning and end of iteration, where + * the parent stack is empty. The root_done flag gives us indication + * whether we've just started iterating (so root_done is 0), in which + * case the root is returned. If root_done is 1 and we're here, the + * iterator is exhausted. + */ + if (!it->parent_stack->parent) { + if (it->root_done) { + PyErr_SetNone(PyExc_StopIteration); + return NULL; + } else { + it->parent_stack = parent_stack_push_new(it->parent_stack, + it->root_element); + if (!it->parent_stack) { + PyErr_NoMemory(); + return NULL; + } + + it->root_done = 1; + if (it->sought_tag == Py_None || + PyObject_RichCompareBool(it->root_element->tag, + it->sought_tag, Py_EQ) == 1) { + if (it->gettext) { + PyObject *text = JOIN_OBJ(it->root_element->text); + if (PyObject_IsTrue(text)) { + Py_INCREF(text); + return text; + } + } else { + Py_INCREF(it->root_element); + return (PyObject *)it->root_element; + } + } + } + } + + /* See if there are children left to traverse in the current parent. If + * yes, visit the next child. If not, pop the stack and try again. + */ + cur_parent = it->parent_stack->parent; + child_index = it->parent_stack->child_index; + if (cur_parent->extra && child_index < cur_parent->extra->length) { + ElementObject *child = (ElementObject *) + cur_parent->extra->children[child_index]; + it->parent_stack->child_index++; + it->parent_stack = parent_stack_push_new(it->parent_stack, + child); + if (!it->parent_stack) { + PyErr_NoMemory(); + return NULL; + } + + if (it->gettext) { + PyObject *text = JOIN_OBJ(child->text); + if (PyObject_IsTrue(text)) { + Py_INCREF(text); + return text; + } + } else if (it->sought_tag == Py_None || + PyObject_RichCompareBool(child->tag, + it->sought_tag, Py_EQ) == 1) { + Py_INCREF(child); + return (PyObject *)child; + } + else + continue; + } + else { + PyObject *tail = it->gettext ? JOIN_OBJ(cur_parent->tail) : Py_None; + ParentLocator *next = it->parent_stack->next; + Py_XDECREF(it->parent_stack->parent); + PyObject_Free(it->parent_stack); + it->parent_stack = next; + + /* Note that extra condition on it->parent_stack->parent here; + * this is because itertext() is supposed to only return *inner* + * text, not text following the element it began iteration with. + */ + if (it->parent_stack->parent && PyObject_IsTrue(tail)) { + Py_INCREF(tail); + return tail; + } + } + } + + return NULL; +} + + +static PyTypeObject ElementIter_Type = { + PyVarObject_HEAD_INIT(NULL, 0) + "_elementtree._element_iterator", /* tp_name */ + sizeof(ElementIterObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)elementiter_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + 0, /* tp_reserved */ + 0, /* tp_repr */ + 0, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + 0, /* tp_hash */ + 0, /* tp_call */ + 0, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */ + 0, /* tp_doc */ + (traverseproc)elementiter_traverse, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + PyObject_SelfIter, /* tp_iter */ + (iternextfunc)elementiter_next, /* tp_iternext */ + 0, /* tp_methods */ + 0, /* tp_members */ + 0, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + 0, /* tp_init */ + 0, /* tp_alloc */ + 0, /* tp_new */ +}; + + +static PyObject * +create_elementiter(ElementObject *self, PyObject *tag, int gettext) +{ + ElementIterObject *it; + PyObject *star = NULL; + + it = PyObject_GC_New(ElementIterObject, &ElementIter_Type); + if (!it) + return NULL; + if (!(it->parent_stack = PyObject_Malloc(sizeof(ParentLocator)))) { + PyObject_GC_Del(it); + return NULL; + } + + it->parent_stack->parent = NULL; + it->parent_stack->child_index = 0; + it->parent_stack->next = NULL; + + if (PyUnicode_Check(tag)) + star = PyUnicode_FromString("*"); + else if (PyBytes_Check(tag)) + star = PyBytes_FromString("*"); + + if (star && PyObject_RichCompareBool(tag, star, Py_EQ) == 1) + tag = Py_None; + + Py_XDECREF(star); + it->sought_tag = tag; + it->root_done = 0; + it->gettext = gettext; + it->root_element = self; + + Py_INCREF(self); + Py_INCREF(tag); + + PyObject_GC_Track(it); + return (PyObject *)it; +} + + /* ==================================================================== */ /* the tree builder type */ @@ -3238,8 +3464,7 @@ PyMODINIT_FUNC PyInit__elementtree(void) { - PyObject *m, *g, *temp; - char* bootstrap; + PyObject *m, *temp; /* Initialize object types */ if (PyType_Ready(&TreeBuilder_Type) < 0) @@ -3255,44 +3480,6 @@ if (!m) return NULL; - /* The code below requires that the module gets already added - to sys.modules. */ - PyDict_SetItemString(PyImport_GetModuleDict(), - _elementtreemodule.m_name, - m); - - /* python glue code */ - - g = PyDict_New(); - if (!g) - return NULL; - - PyDict_SetItemString(g, "__builtins__", PyEval_GetBuiltins()); - - bootstrap = ( - "def iter(node, tag=None):\n" /* helper */ - " if tag == '*':\n" - " tag = None\n" - " if tag is None or node.tag == tag:\n" - " yield node\n" - " for node in node:\n" - " for node in iter(node, tag):\n" - " yield node\n" - - "def itertext(node):\n" /* helper */ - " if node.text:\n" - " yield node.text\n" - " for e in node:\n" - " for s in e.itertext():\n" - " yield s\n" - " if e.tail:\n" - " yield e.tail\n" - - ); - - if (!PyRun_String(bootstrap, Py_file_input, g, NULL)) - return NULL; - if (!(temp = PyImport_ImportModule("copy"))) return NULL; elementtree_deepcopy_obj = PyObject_GetAttrString(temp, "deepcopy"); @@ -3301,9 +3488,6 @@ if (!(elementpath_obj = PyImport_ImportModule("xml.etree.ElementPath"))) return NULL; - elementtree_iter_obj = PyDict_GetItemString(g, "iter"); - elementtree_itertext_obj = PyDict_GetItemString(g, "itertext"); - /* link against pyexpat */ expat_capi = PyCapsule_Import(PyExpat_CAPSULE_NAME, 0); if (expat_capi) { diff --git a/Modules/timemodule.c b/Modules/timemodule.c --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -96,7 +96,7 @@ info->implementation = "clock()"; info->resolution = 1.0 / (double)CLOCKS_PER_SEC; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble((double)value / CLOCKS_PER_SEC); } @@ -132,7 +132,7 @@ info->implementation = "QueryPerformanceCounter()"; info->resolution = 1.0 / (double)cpu_frequency; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } *result = PyFloat_FromDouble(diff / (double)cpu_frequency); return 0; @@ -275,6 +275,10 @@ {"tm_wday", "day of week, range [0, 6], Monday is 0"}, {"tm_yday", "day of year, range [1, 366]"}, {"tm_isdst", "1 if summer time is in effect, 0 if not, and -1 if unknown"}, +#ifdef HAVE_STRUCT_TM_TM_ZONE + {"tm_zone", "abbreviation of timezone name"}, + {"tm_gmtoff", "offset from UTC in seconds"}, +#endif /* HAVE_STRUCT_TM_TM_ZONE */ {0} }; @@ -294,6 +298,7 @@ static int initialized; static PyTypeObject StructTimeType; + static PyObject * tmtotuple(struct tm *p) { @@ -312,6 +317,11 @@ SET(6, (p->tm_wday + 6) % 7); /* Want Monday == 0 */ SET(7, p->tm_yday + 1); /* Want January, 1 == 1 */ SET(8, p->tm_isdst); +#ifdef HAVE_STRUCT_TM_TM_ZONE + PyStructSequence_SET_ITEM(v, 9, + PyUnicode_DecodeLocale(p->tm_zone, "surrogateescape")); + SET(10, p->tm_gmtoff); +#endif /* HAVE_STRUCT_TM_TM_ZONE */ #undef SET if (PyErr_Occurred()) { Py_XDECREF(v); @@ -371,7 +381,10 @@ tm_sec, tm_wday, tm_yday, tm_isdst)\n\ \n\ Convert seconds since the Epoch to a time tuple expressing UTC (a.k.a.\n\ -GMT). When 'seconds' is not passed in, convert the current time instead."); +GMT). When 'seconds' is not passed in, convert the current time instead.\n\ +\n\ +If the platform supports the tm_gmtoff and tm_zone, they are available as\n\ +attributes only."); static int pylocaltime(time_t *timep, struct tm *result) @@ -401,7 +414,7 @@ if (!parse_time_t_args(args, "|O:localtime", &when)) return NULL; - if (pylocaltime(&when, &buf) == 1) + if (pylocaltime(&when, &buf) == -1) return NULL; return tmtotuple(&buf); } @@ -438,6 +451,17 @@ p->tm_mon--; p->tm_wday = (p->tm_wday + 1) % 7; p->tm_yday--; +#ifdef HAVE_STRUCT_TM_TM_ZONE + if (Py_TYPE(args) == &StructTimeType) { + PyObject *item; + item = PyTuple_GET_ITEM(args, 9); + p->tm_zone = item == Py_None ? NULL : _PyUnicode_AsString(item); + item = PyTuple_GET_ITEM(args, 10); + p->tm_gmtoff = item == Py_None ? 0 : PyLong_AsLong(item); + if (PyErr_Occurred()) + return 0; + } +#endif /* HAVE_STRUCT_TM_TM_ZONE */ return 1; } @@ -778,7 +802,10 @@ PyDoc_STRVAR(mktime_doc, "mktime(tuple) -> floating point number\n\ \n\ -Convert a time tuple in local time to seconds since the Epoch."); +Convert a time tuple in local time to seconds since the Epoch.\n\ +Note that mktime(gmtime(0)) will not generally return zero for most\n\ +time zones; instead the returned value will either be equal to that\n\ +of the timezone or altzone attributes on the time module."); #endif /* HAVE_MKTIME */ #ifdef HAVE_WORKING_TZSET @@ -882,7 +909,7 @@ return NULL; } info->resolution = timeIncrement * 1e-7; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble(result); @@ -903,7 +930,7 @@ info->implementation = "mach_absolute_time()"; info->resolution = (double)timebase.numer / timebase.denom * 1e-9; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble(secs); @@ -926,13 +953,7 @@ struct timespec res; info->monotonic = 1; info->implementation = function; -#if (defined(linux) || defined(__linux) || defined(__linux__)) \ - && !defined(CLOCK_HIGHRES) - /* CLOCK_MONOTONIC is adjusted on Linux */ - info->adjusted = 1; -#else - info->adjusted = 0; -#endif + info->adjustable = 0; if (clock_getres(clk_id, &res) == 0) info->resolution = res.tv_sec + res.tv_nsec * 1e-9; else @@ -1024,7 +1045,7 @@ info->implementation = "GetProcessTimes()"; info->resolution = 1e-7; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; } return PyFloat_FromDouble(total * 1e-7); #else @@ -1053,7 +1074,7 @@ struct timespec res; info->implementation = function; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; if (clock_getres(clk_id, &res) == 0) info->resolution = res.tv_sec + res.tv_nsec * 1e-9; else @@ -1071,7 +1092,7 @@ if (info) { info->implementation = "getrusage(RUSAGE_SELF)"; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; info->resolution = 1e-6; } return PyFloat_FromDouble(total); @@ -1100,7 +1121,7 @@ if (info) { info->implementation = "times()"; info->monotonic = 1; - info->adjusted = 0; + info->adjustable = 0; info->resolution = 1.0 / ticks_per_second; } return PyFloat_FromDouble(total); @@ -1124,35 +1145,12 @@ Process time for profiling: sum of the kernel and user-space CPU time."); -static PyTypeObject ClockInfoType; - -PyDoc_STRVAR(ClockInfo_docstring, - "Clock information"); - -static PyStructSequence_Field ClockInfo_fields[] = { - {"implementation", "name of the underlying C function " - "used to get the clock value"}, - {"monotonic", "True if the clock cannot go backward, False otherwise"}, - {"adjusted", "True if the clock can be adjusted " - "(e.g. by a NTP daemon), False otherwise"}, - {"resolution", "resolution of the clock in seconds"}, - {NULL, NULL} -}; - -static PyStructSequence_Desc ClockInfo_desc = { - "time.clock_info", - ClockInfo_docstring, - ClockInfo_fields, - 4, -}; - static PyObject * time_get_clock_info(PyObject *self, PyObject *args) { char *name; - PyObject *obj; _Py_clock_info_t info; - PyObject *result; + PyObject *obj = NULL, *dict, *ns; if (!PyArg_ParseTuple(args, "s:get_clock_info", &name)) return NULL; @@ -1160,12 +1158,12 @@ #ifdef Py_DEBUG info.implementation = NULL; info.monotonic = -1; - info.adjusted = -1; + info.adjustable = -1; info.resolution = -1.0; #else info.implementation = ""; info.monotonic = 0; - info.adjusted = 0; + info.adjustable = 0; info.resolution = 1.0; #endif @@ -1191,39 +1189,50 @@ return NULL; Py_DECREF(obj); - result = PyStructSequence_New(&ClockInfoType); - if (result == NULL) + dict = PyDict_New(); + if (dict == NULL) return NULL; assert(info.implementation != NULL); obj = PyUnicode_FromString(info.implementation); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 0, obj); + if (PyDict_SetItemString(dict, "implementation", obj) == -1) + goto error; + Py_CLEAR(obj); assert(info.monotonic != -1); obj = PyBool_FromLong(info.monotonic); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 1, obj); + if (PyDict_SetItemString(dict, "monotonic", obj) == -1) + goto error; + Py_CLEAR(obj); - assert(info.adjusted != -1); - obj = PyBool_FromLong(info.adjusted); + assert(info.adjustable != -1); + obj = PyBool_FromLong(info.adjustable); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 2, obj); + if (PyDict_SetItemString(dict, "adjustable", obj) == -1) + goto error; + Py_CLEAR(obj); assert(info.resolution > 0.0); assert(info.resolution <= 1.0); obj = PyFloat_FromDouble(info.resolution); if (obj == NULL) goto error; - PyStructSequence_SET_ITEM(result, 3, obj); + if (PyDict_SetItemString(dict, "resolution", obj) == -1) + goto error; + Py_CLEAR(obj); - return result; + ns = _PyNamespace_New(dict); + Py_DECREF(dict); + return ns; error: - Py_DECREF(result); + Py_DECREF(dict); + Py_XDECREF(obj); return NULL; } @@ -1451,11 +1460,6 @@ PyStructSequence_InitType(&StructTimeType, &struct_time_type_desc); - /* initialize ClockInfoType */ - PyStructSequence_InitType(&ClockInfoType, &ClockInfo_desc); - Py_INCREF(&ClockInfoType); - PyModule_AddObject(m, "clock_info", (PyObject*)&ClockInfoType); - #ifdef MS_WINDOWS winver.dwOSVersionInfoSize = sizeof(winver); if (!GetVersionEx((OSVERSIONINFO*)&winver)) { @@ -1466,6 +1470,11 @@ #endif } Py_INCREF(&StructTimeType); +#ifdef HAVE_STRUCT_TM_TM_ZONE + PyModule_AddIntConstant(m, "_STRUCT_TM_ITEMS", 11); +#else + PyModule_AddIntConstant(m, "_STRUCT_TM_ITEMS", 9); +#endif PyModule_AddObject(m, "struct_time", (PyObject*) &StructTimeType); initialized = 1; return m; @@ -1488,7 +1497,7 @@ struct timespec res; info->implementation = "clock_gettime(CLOCK_REALTIME)"; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; if (clock_getres(CLOCK_REALTIME, &res) == 0) info->resolution = res.tv_sec + res.tv_nsec * 1e-9; else diff --git a/Objects/stringlib/codecs.h b/Objects/stringlib/codecs.h --- a/Objects/stringlib/codecs.h +++ b/Objects/stringlib/codecs.h @@ -562,4 +562,68 @@ #undef STRIPPED_MASK #undef SWAB #undef LONG_PTR_MASK + + +Py_LOCAL_INLINE(void) +STRINGLIB(utf16_encode)(unsigned short *out, + const STRINGLIB_CHAR *in, + Py_ssize_t len, + int native_ordering) +{ + const STRINGLIB_CHAR *end = in + len; +#if STRINGLIB_SIZEOF_CHAR == 1 +# define SWAB2(CH) ((CH) << 8) +#else +# define SWAB2(CH) (((CH) << 8) | ((CH) >> 8)) +#endif +#if STRINGLIB_MAX_CHAR < 0x10000 + if (native_ordering) { +# if STRINGLIB_SIZEOF_CHAR == 2 + Py_MEMCPY(out, in, 2 * len); +# else + _PyUnicode_CONVERT_BYTES(STRINGLIB_CHAR, unsigned short, in, end, out); +# endif + } else { + const STRINGLIB_CHAR *unrolled_end = in + (len & ~ (Py_ssize_t) 3); + while (in < unrolled_end) { + out[0] = SWAB2(in[0]); + out[1] = SWAB2(in[1]); + out[2] = SWAB2(in[2]); + out[3] = SWAB2(in[3]); + in += 4; out += 4; + } + while (in < end) { + *out++ = SWAB2(*in); + ++in; + } + } +#else + if (native_ordering) { + while (in < end) { + Py_UCS4 ch = *in++; + if (ch < 0x10000) + *out++ = ch; + else { + out[0] = Py_UNICODE_HIGH_SURROGATE(ch); + out[1] = Py_UNICODE_LOW_SURROGATE(ch); + out += 2; + } + } + } else { + while (in < end) { + Py_UCS4 ch = *in++; + if (ch < 0x10000) + *out++ = SWAB2((Py_UCS2)ch); + else { + Py_UCS2 ch1 = Py_UNICODE_HIGH_SURROGATE(ch); + Py_UCS2 ch2 = Py_UNICODE_LOW_SURROGATE(ch); + out[0] = SWAB2(ch1); + out[1] = SWAB2(ch2); + out += 2; + } + } + } +#endif +#undef SWAB2 +} #endif /* STRINGLIB_IS_UNICODE */ diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -5359,26 +5359,18 @@ const char *errors, int byteorder) { - int kind; - void *data; + enum PyUnicode_Kind kind; + const void *data; Py_ssize_t len; PyObject *v; - unsigned char *p; - Py_ssize_t nsize, bytesize; - Py_ssize_t i, pairs; - /* Offsets from p for storing byte pairs in the right order. */ -#ifdef BYTEORDER_IS_LITTLE_ENDIAN - int ihi = 1, ilo = 0; + unsigned short *out; + Py_ssize_t bytesize; + Py_ssize_t pairs; +#ifdef WORDS_BIGENDIAN + int native_ordering = byteorder >= 0; #else - int ihi = 0, ilo = 1; -#endif - -#define STORECHAR(CH) \ - do { \ - p[ihi] = ((CH) >> 8) & 0xff; \ - p[ilo] = (CH) & 0xff; \ - p += 2; \ - } while(0) + int native_ordering = byteorder <= 0; +#endif if (!PyUnicode_Check(str)) { PyErr_BadArgument(); @@ -5391,53 +5383,47 @@ len = PyUnicode_GET_LENGTH(str); pairs = 0; - if (kind == PyUnicode_4BYTE_KIND) - for (i = 0; i < len; i++) - if (PyUnicode_READ(kind, data, i) >= 0x10000) + if (kind == PyUnicode_4BYTE_KIND) { + const Py_UCS4 *in = (const Py_UCS4 *)data; + const Py_UCS4 *end = in + len; + while (in < end) + if (*in++ >= 0x10000) pairs++; - /* 2 * (len + pairs + (byteorder == 0)) */ - if (len > PY_SSIZE_T_MAX - pairs - (byteorder == 0)) + } + if (len > PY_SSIZE_T_MAX / 2 - pairs - (byteorder == 0)) return PyErr_NoMemory(); - nsize = len + pairs + (byteorder == 0); - bytesize = nsize * 2; - if (bytesize / 2 != nsize) - return PyErr_NoMemory(); + bytesize = (len + pairs + (byteorder == 0)) * 2; v = PyBytes_FromStringAndSize(NULL, bytesize); if (v == NULL) return NULL; - p = (unsigned char *)PyBytes_AS_STRING(v); + /* output buffer is 2-bytes aligned */ + assert(((Py_uintptr_t)PyBytes_AS_STRING(v) & 1) == 0); + out = (unsigned short *)PyBytes_AS_STRING(v); if (byteorder == 0) - STORECHAR(0xFEFF); + *out++ = 0xFEFF; if (len == 0) goto done; - if (byteorder == -1) { - /* force LE */ - ihi = 1; - ilo = 0; - } - else if (byteorder == 1) { - /* force BE */ - ihi = 0; - ilo = 1; - } - - for (i = 0; i < len; i++) { - Py_UCS4 ch = PyUnicode_READ(kind, data, i); - Py_UCS4 ch2 = 0; - if (ch >= 0x10000) { - ch2 = Py_UNICODE_LOW_SURROGATE(ch); - ch = Py_UNICODE_HIGH_SURROGATE(ch); - } - STORECHAR(ch); - if (ch2) - STORECHAR(ch2); + switch (kind) { + case PyUnicode_1BYTE_KIND: { + ucs1lib_utf16_encode(out, (const Py_UCS1 *)data, len, native_ordering); + break; + } + case PyUnicode_2BYTE_KIND: { + ucs2lib_utf16_encode(out, (const Py_UCS2 *)data, len, native_ordering); + break; + } + case PyUnicode_4BYTE_KIND: { + ucs4lib_utf16_encode(out, (const Py_UCS4 *)data, len, native_ordering); + break; + } + default: + assert(0); } done: return v; -#undef STORECHAR } PyObject * diff --git a/PC/VS9.0/pythoncore.vcproj b/PC/VS9.0/pythoncore.vcproj --- a/PC/VS9.0/pythoncore.vcproj +++ b/PC/VS9.0/pythoncore.vcproj @@ -803,6 +803,10 @@ > + + @@ -1563,6 +1567,10 @@ > + + diff --git a/Python/pytime.c b/Python/pytime.c --- a/Python/pytime.c +++ b/Python/pytime.c @@ -44,10 +44,7 @@ (void) GetSystemTimeAdjustment(&timeAdjustment, &timeIncrement, &isTimeAdjustmentDisabled); info->resolution = timeIncrement * 1e-7; - if (isTimeAdjustmentDisabled) - info->adjusted = 0; - else - info->adjusted = 1; + info->adjustable = 1; } #else /* There are three ways to get the time: @@ -71,7 +68,7 @@ info->implementation = "gettimeofday()"; info->resolution = 1e-6; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; } return; } @@ -87,7 +84,7 @@ info->implementation = "ftime()"; info->resolution = 1e-3; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; } } #else /* !HAVE_FTIME */ @@ -97,7 +94,7 @@ info->implementation = "time()"; info->resolution = 1.0; info->monotonic = 0; - info->adjusted = 1; + info->adjustable = 1; } #endif /* !HAVE_FTIME */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 01:21:13 2012 From: python-checkins at python.org (brett.cannon) Date: Sat, 16 Jun 2012 01:21:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Closes_issue_=2314982=3A_Do?= =?utf8?q?cument_that_pkgutil=27s_walk=5Fpackages=28=29_and?= Message-ID: http://hg.python.org/cpython/rev/b92fa1c5a96b changeset: 77455:b92fa1c5a96b user: Brett Cannon date: Fri Jun 15 19:21:07 2012 -0400 summary: Closes issue #14982: Document that pkgutil's walk_packages() and iter_modules() requires iter_modules() be defined on an importer. The importers in importlib do not define this non-standard method. files: Doc/library/pkgutil.rst | 8 ++++++++ Misc/NEWS | 4 ++++ 2 files changed, 12 insertions(+), 0 deletions(-) diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst --- a/Doc/library/pkgutil.rst +++ b/Doc/library/pkgutil.rst @@ -138,6 +138,10 @@ *prefix* is a string to output on the front of every module name on output. + .. note:: + Only works for importers which define a ``iter_modules()`` method, which + is non-standard but implemented by classes defined in this module. + .. function:: walk_packages(path=None, prefix='', onerror=None) @@ -166,6 +170,10 @@ # list all submodules of ctypes walk_packages(ctypes.__path__, ctypes.__name__ + '.') + .. note:: + Only works for importers which define a ``iter_modules()`` method, which + is non-standard but implemented by classes defined in this module. + .. function:: get_data(package, resource) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,10 @@ Library ------- +- Issue #14982: Document that pkgutil's iteration functions require the + non-standard iter_modules() method to be defined by an importer (something + the importlib importers do not define). + - Issue #15036: Allow removing or changing multiple items in single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox between the changes. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 01:39:12 2012 From: python-checkins at python.org (brett.cannon) Date: Sat, 16 Jun 2012 01:39:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2313959=3A_Add_to_im?= =?utf8?q?p=2Efind=5Fmodule=28=29_and_load=5Fmodule=27s_docstrings?= Message-ID: http://hg.python.org/cpython/rev/034c814eb187 changeset: 77456:034c814eb187 user: Brett Cannon date: Fri Jun 15 19:39:06 2012 -0400 summary: Issue #13959: Add to imp.find_module() and load_module's docstrings that they are deprecated (previous commit documented this fact in the module docs). files: Lib/imp.py | 10 ++++++---- 1 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Lib/imp.py b/Lib/imp.py --- a/Lib/imp.py +++ b/Lib/imp.py @@ -150,9 +150,10 @@ return _bootstrap.SourceFileLoader(name, path).load_module(name) -# XXX deprecate def load_module(name, file, filename, details): - """Load a module, given information returned by find_module(). + """**DEPRECATED** + + Load a module, given information returned by find_module(). The module name must include the full package name, if any. @@ -180,9 +181,10 @@ raise ImportError(msg, name=name) -# XXX deprecate def find_module(name, path=None): - """Search for a module. + """**DEPRECATED** + + Search for a module. If path is omitted or None, search for a built-in, frozen or special module and continue search in sys.path. The module name cannot -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 02:01:20 2012 From: python-checkins at python.org (brett.cannon) Date: Sat, 16 Jun 2012 02:01:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314938=3A_importlib?= =?utf8?q?=2Eabc=2ESourceLoader=2Eis=5Fpackage=28=29_now_takes_the?= Message-ID: http://hg.python.org/cpython/rev/240b7467e65c changeset: 77457:240b7467e65c user: Brett Cannon date: Fri Jun 15 20:00:53 2012 -0400 summary: Issue #14938: importlib.abc.SourceLoader.is_package() now takes the module name into consideration when determining whether a module is a package or not. This prevents importing a module's __init__ module directly and having it considered a package, which can lead to duplicate sub-modules. Thanks to Ronan Lamy for reporting the bug. files: Doc/library/importlib.rst | 6 ++++-- Lib/importlib/_bootstrap.py | 4 +++- Lib/importlib/test/source/test_abc_loader.py | 5 +++-- Misc/NEWS | 4 ++++ 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -351,8 +351,10 @@ .. method:: is_package(self, fullname) Concrete implementation of :meth:`InspectLoader.is_package`. A module - is determined to be a package if its file path is a file named - ``__init__`` when the file extension is removed. + is determined to be a package if its file path (as provided by + :meth:`ExecutionLoader.get_filename`) is a file named + ``__init__`` when the file extension is removed **and** the module name + itself does not end in ``__init__``. .. class:: PyLoader diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -578,7 +578,9 @@ """Concrete implementation of InspectLoader.is_package by checking if the path returned by get_filename has a filename of '__init__.py'.""" filename = _path_split(self.get_filename(fullname))[1] - return filename.rsplit('.', 1)[0] == '__init__' + filename_base = filename.rsplit('.', 1)[0] + tail_name = fullname.rpartition('.')[2] + return filename_base == '__init__' and tail_name != '__init__' def _bytes_from_bytecode(self, fullname, data, bytecode_path, source_stats): """Return the marshalled bytes from bytecode, verifying the magic diff --git a/Lib/importlib/test/source/test_abc_loader.py b/Lib/importlib/test/source/test_abc_loader.py --- a/Lib/importlib/test/source/test_abc_loader.py +++ b/Lib/importlib/test/source/test_abc_loader.py @@ -602,10 +602,11 @@ def test_is_package(self): # Properly detect when loading a package. + self.setUp(is_package=False) + self.assertFalse(self.loader.is_package(self.name)) self.setUp(is_package=True) self.assertTrue(self.loader.is_package(self.name)) - self.setUp(is_package=False) - self.assertFalse(self.loader.is_package(self.name)) + self.assertFalse(self.loader.is_package(self.name + '.__init__')) def test_get_code(self): # Verify the code object is created. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,6 +24,10 @@ Library ------- +- Issue #14938: importlib.abc.SourceLoader.is_package() will not consider a + module whose name ends in '__init__' a package (e.g. importing pkg.__init__ + directly should be considered a module, not a package). + - Issue #14982: Document that pkgutil's iteration functions require the non-standard iter_modules() method to be defined by an importer (something the importlib importers do not define). -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 02:20:18 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Sat, 16 Jun 2012 02:20:18 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315006=3A_Allow_equ?= =?utf8?q?ality_comparison_between_naive_and_aware_time?= Message-ID: http://hg.python.org/cpython/rev/8272699973cb changeset: 77458:8272699973cb user: Alexander Belopolsky date: Fri Jun 15 20:19:47 2012 -0400 summary: Issue #15006: Allow equality comparison between naive and aware time or datetime objects. files: Doc/library/datetime.rst | 20 ++++++++++++++++++-- Lib/datetime.py | 22 ++++++++++++++-------- Lib/test/datetimetester.py | 12 ++++++------ Misc/NEWS | 5 ++--- Modules/_datetimemodule.c | 16 ++++++++++++++++ 5 files changed, 56 insertions(+), 19 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -901,13 +901,21 @@ *datetime1* is considered less than *datetime2* when *datetime1* precedes *datetime2* in time. - If one comparand is naive and the other is aware, :exc:`TypeError` is raised. + If one comparand is naive and the other is aware, :exc:`TypeError` + is raised if an order comparison is attempted. For equality + comparisons, naive instances are never equal to aware instances. + If both comparands are aware, and have the same :attr:`tzinfo` attribute, the common :attr:`tzinfo` attribute is ignored and the base datetimes are compared. If both comparands are aware and have different :attr:`tzinfo` attributes, the comparands are first adjusted by subtracting their UTC offsets (obtained from ``self.utcoffset()``). + .. versionchanged:: 3.3 + + Equality comparisons between naive and aware :class:`datetime` + instances don't raise :exc:`TypeError`. + .. note:: In order to stop comparison from falling back to the default scheme of comparing @@ -1316,7 +1324,10 @@ * comparison of :class:`.time` to :class:`.time`, where *a* is considered less than *b* when *a* precedes *b* in time. If one comparand is naive and the other - is aware, :exc:`TypeError` is raised. If both comparands are aware, and have + is aware, :exc:`TypeError` is raised if an order comparison is attempted. For equality + comparisons, naive instances are never equal to aware instances. + + If both comparands are aware, and have the same :attr:`tzinfo` attribute, the common :attr:`tzinfo` attribute is ignored and the base times are compared. If both comparands are aware and have different :attr:`tzinfo` attributes, the comparands are first adjusted by @@ -1326,6 +1337,11 @@ different type, :exc:`TypeError` is raised unless the comparison is ``==`` or ``!=``. The latter cases return :const:`False` or :const:`True`, respectively. + .. versionchanged:: 3.3 + + Equality comparisons between naive and aware :class:`time` instances + don't raise :exc:`TypeError`. + * hash, use as dict key * efficient pickling diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1065,13 +1065,13 @@ def __eq__(self, other): if isinstance(other, time): - return self._cmp(other) == 0 + return self._cmp(other, allow_mixed=True) == 0 else: return False def __ne__(self, other): if isinstance(other, time): - return self._cmp(other) != 0 + return self._cmp(other, allow_mixed=True) != 0 else: return True @@ -1099,7 +1099,7 @@ else: _cmperror(self, other) - def _cmp(self, other): + def _cmp(self, other, allow_mixed=False): assert isinstance(other, time) mytz = self._tzinfo ottz = other._tzinfo @@ -1118,7 +1118,10 @@ (other._hour, other._minute, other._second, other._microsecond)) if myoff is None or otoff is None: - raise TypeError("cannot compare naive and aware times") + if allow_mixed: + return 2 # arbitrary non-zero value + else: + raise TypeError("cannot compare naive and aware times") myhhmm = self._hour * 60 + self._minute - myoff//timedelta(minutes=1) othhmm = other._hour * 60 + other._minute - otoff//timedelta(minutes=1) return _cmp((myhhmm, self._second, self._microsecond), @@ -1615,7 +1618,7 @@ def __eq__(self, other): if isinstance(other, datetime): - return self._cmp(other) == 0 + return self._cmp(other, allow_mixed=True) == 0 elif not isinstance(other, date): return NotImplemented else: @@ -1623,7 +1626,7 @@ def __ne__(self, other): if isinstance(other, datetime): - return self._cmp(other) != 0 + return self._cmp(other, allow_mixed=True) != 0 elif not isinstance(other, date): return NotImplemented else: @@ -1661,7 +1664,7 @@ else: _cmperror(self, other) - def _cmp(self, other): + def _cmp(self, other, allow_mixed=False): assert isinstance(other, datetime) mytz = self._tzinfo ottz = other._tzinfo @@ -1682,7 +1685,10 @@ other._hour, other._minute, other._second, other._microsecond)) if myoff is None or otoff is None: - raise TypeError("cannot compare naive and aware datetimes") + if allow_mixed: + return 2 # arbitrary non-zero value + else: + raise TypeError("cannot compare naive and aware datetimes") # XXX What follows could be done more efficiently... diff = self - other # this will take offsets into account if diff.days < 0: diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -2544,7 +2544,7 @@ self.assertEqual(t1, t2) self.assertEqual(t1, t3) self.assertEqual(t2, t3) - self.assertRaises(TypeError, lambda: t4 == t5) # mixed tz-aware & naive + self.assertNotEqual(t4, t5) # mixed tz-aware & naive self.assertRaises(TypeError, lambda: t4 < t5) # mixed tz-aware & naive self.assertRaises(TypeError, lambda: t5 < t4) # mixed tz-aware & naive @@ -2696,7 +2696,7 @@ t2 = t2.replace(tzinfo=FixedOffset(None, "")) self.assertEqual(t1, t2) t2 = t2.replace(tzinfo=FixedOffset(0, "")) - self.assertRaises(TypeError, lambda: t1 == t2) + self.assertNotEqual(t1, t2) # In time w/ identical tzinfo objects, utcoffset is ignored. class Varies(tzinfo): @@ -2801,16 +2801,16 @@ microsecond=1) self.assertTrue(t1 > t2) - # Make t2 naive and it should fail. + # Make t2 naive and it should differ. t2 = self.theclass.min - self.assertRaises(TypeError, lambda: t1 == t2) + self.assertNotEqual(t1, t2) self.assertEqual(t2, t2) # It's also naive if it has tzinfo but tzinfo.utcoffset() is None. class Naive(tzinfo): def utcoffset(self, dt): return None t2 = self.theclass(5, 6, 7, tzinfo=Naive()) - self.assertRaises(TypeError, lambda: t1 == t2) + self.assertNotEqual(t1, t2) self.assertEqual(t2, t2) # OTOH, it's OK to compare two of these mixing the two ways of being @@ -3327,7 +3327,7 @@ t2 = t2.replace(tzinfo=FixedOffset(None, "")) self.assertEqual(t1, t2) t2 = t2.replace(tzinfo=FixedOffset(0, "")) - self.assertRaises(TypeError, lambda: t1 == t2) + self.assertNotEqual(t1, t2) # In datetime w/ identical tzinfo objects, utcoffset is ignored. class Varies(tzinfo): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -24,9 +24,8 @@ Library ------- -- Issue #14938: importlib.abc.SourceLoader.is_package() will not consider a - module whose name ends in '__init__' a package (e.g. importing pkg.__init__ - directly should be considered a module, not a package). +- Issue #15006: Allow equality comparison between naive and aware + time or datetime objects. - Issue #14982: Document that pkgutil's iteration functions require the non-standard iter_modules() method to be defined by an importer (something diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -3707,6 +3707,14 @@ TIME_GET_MICROSECOND(other); result = diff_to_bool(diff, op); } + else if (op == Py_EQ) { + result = Py_False; + Py_INCREF(result); + } + else if (op == Py_NE) { + result = Py_True; + Py_INCREF(result); + } else { PyErr_SetString(PyExc_TypeError, "can't compare offset-naive and " @@ -4584,6 +4592,14 @@ Py_DECREF(delta); result = diff_to_bool(diff, op); } + else if (op == Py_EQ) { + result = Py_False; + Py_INCREF(result); + } + else if (op == Py_NE) { + result = Py_True; + Py_INCREF(result); + } else { PyErr_SetString(PyExc_TypeError, "can't compare offset-naive and " -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 02:29:09 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 02:29:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_unicodeobject=2Ec=3A_Remove?= =?utf8?q?_debug_code?= Message-ID: http://hg.python.org/cpython/rev/41b1710ee333 changeset: 77459:41b1710ee333 user: Victor Stinner date: Sat Jun 16 01:53:04 2012 +0200 summary: unicodeobject.c: Remove debug code files: Objects/unicodeobject.c | 14 -------------- 1 files changed, 0 insertions(+), 14 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -803,10 +803,6 @@ */ -#ifdef Py_DEBUG -static int unicode_old_new_calls = 0; -#endif - static PyUnicodeObject * _PyUnicode_New(Py_ssize_t length) { @@ -829,10 +825,6 @@ return NULL; } -#ifdef Py_DEBUG - ++unicode_old_new_calls; -#endif - unicode = PyObject_New(PyUnicodeObject, &PyUnicode_Type); if (unicode == NULL) return NULL; @@ -909,8 +901,6 @@ } #ifdef Py_DEBUG -static int unicode_new_new_calls = 0; - /* Functions wrapping macros for use in debugger */ char *_PyUnicode_utf8(void *unicode){ return PyUnicode_UTF8(unicode); @@ -979,10 +969,6 @@ return unicode_empty; } -#ifdef Py_DEBUG - ++unicode_new_new_calls; -#endif - is_ascii = 0; is_sharing = 0; struct_size = sizeof(PyCompactUnicodeObject); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 02:29:09 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 02:29:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Optimize_=5FPyUnicode=5FFas?= =?utf8?q?tCopyCharacters=28=29_when_maxchar=28from=29_=3E_maxchar=28to=29?= Message-ID: http://hg.python.org/cpython/rev/00b4451bf94c changeset: 77460:00b4451bf94c user: Victor Stinner date: Sat Jun 16 02:22:37 2012 +0200 summary: Optimize _PyUnicode_FastCopyCharacters() when maxchar(from) > maxchar(to) files: Objects/unicodeobject.c | 132 ++++++++++++++---------- Python/formatter_unicode.c | 6 +- 2 files changed, 80 insertions(+), 58 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1128,7 +1128,6 @@ { unsigned int from_kind, to_kind; void *from_data, *to_data; - int fast; assert(0 <= how_many); assert(0 <= from_start); @@ -1137,41 +1136,40 @@ assert(PyUnicode_IS_READY(from)); assert(from_start + how_many <= PyUnicode_GET_LENGTH(from)); - if (how_many == 0) - return 0; - assert(PyUnicode_Check(to)); assert(PyUnicode_IS_READY(to)); assert(to_start + how_many <= PyUnicode_GET_LENGTH(to)); + if (how_many == 0) + return 0; + from_kind = PyUnicode_KIND(from); from_data = PyUnicode_DATA(from); to_kind = PyUnicode_KIND(to); to_data = PyUnicode_DATA(to); -#ifdef Py_DEBUG - if (!check_maxchar - && (from_kind > to_kind - || (!PyUnicode_IS_ASCII(from) && PyUnicode_IS_ASCII(to)))) - { - const Py_UCS4 to_maxchar = PyUnicode_MAX_CHAR_VALUE(to); - Py_UCS4 ch; - Py_ssize_t i; - for (i=0; i < how_many; i++) { - ch = PyUnicode_READ(from_kind, from_data, from_start + i); - assert(ch <= to_maxchar); - } - } -#endif - fast = (from_kind == to_kind); - if (check_maxchar - && (!PyUnicode_IS_ASCII(from) && PyUnicode_IS_ASCII(to))) - { - /* deny latin1 => ascii */ - fast = 0; - } - - if (fast) { + if (from_kind == to_kind) { + if (!PyUnicode_IS_ASCII(from) && PyUnicode_IS_ASCII(to)) { + /* Writing Latin-1 characters into an ASCII string requires to + check that all written characters are pure ASCII */ +#ifndef Py_DEBUG + if (check_maxchar) { + Py_UCS4 max_char; + max_char = ucs1lib_find_max_char(from_data, + (char*)from_data + how_many); + if (max_char >= 128) + return -1; + } +#else + const Py_UCS4 to_maxchar = PyUnicode_MAX_CHAR_VALUE(to); + Py_UCS4 ch; + Py_ssize_t i; + for (i=0; i < how_many; i++) { + ch = PyUnicode_READ(from_kind, from_data, from_start + i); + assert(ch <= to_maxchar); + } +#endif + } Py_MEMCPY((char*)to_data + to_kind * to_start, (char*)from_data + from_kind * from_start, to_kind * how_many); @@ -1207,42 +1205,62 @@ ); } else { - /* check if max_char(from substring) <= max_char(to) */ - if (from_kind > to_kind - /* latin1 => ascii */ - || (!PyUnicode_IS_ASCII(from) && PyUnicode_IS_ASCII(to))) + assert (PyUnicode_MAX_CHAR_VALUE(from) > PyUnicode_MAX_CHAR_VALUE(to)); + +#ifndef Py_DEBUG + if (!check_maxchar) { + if (from_kind == PyUnicode_2BYTE_KIND + && to_kind == PyUnicode_1BYTE_KIND) + { + _PyUnicode_CONVERT_BYTES( + Py_UCS2, Py_UCS1, + PyUnicode_2BYTE_DATA(from) + from_start, + PyUnicode_2BYTE_DATA(from) + from_start + how_many, + PyUnicode_1BYTE_DATA(to) + to_start + ); + } + else if (from_kind == PyUnicode_4BYTE_KIND + && to_kind == PyUnicode_1BYTE_KIND) + { + _PyUnicode_CONVERT_BYTES( + Py_UCS4, Py_UCS1, + PyUnicode_4BYTE_DATA(from) + from_start, + PyUnicode_4BYTE_DATA(from) + from_start + how_many, + PyUnicode_1BYTE_DATA(to) + to_start + ); + } + else if (from_kind == PyUnicode_4BYTE_KIND + && to_kind == PyUnicode_2BYTE_KIND) + { + _PyUnicode_CONVERT_BYTES( + Py_UCS4, Py_UCS2, + PyUnicode_4BYTE_DATA(from) + from_start, + PyUnicode_4BYTE_DATA(from) + from_start + how_many, + PyUnicode_2BYTE_DATA(to) + to_start + ); + } + else { + assert(0); + return -1; + } + } + else +#endif { - /* slow path to check for character overflow */ const Py_UCS4 to_maxchar = PyUnicode_MAX_CHAR_VALUE(to); Py_UCS4 ch; Py_ssize_t i; -#ifdef Py_DEBUG for (i=0; i < how_many; i++) { ch = PyUnicode_READ(from_kind, from_data, from_start + i); +#ifndef Py_DEBUG assert(ch <= to_maxchar); +#else + if (ch > to_maxchar) + return -1; +#endif PyUnicode_WRITE(to_kind, to_data, to_start + i, ch); } -#else - if (!check_maxchar) { - for (i=0; i < how_many; i++) { - ch = PyUnicode_READ(from_kind, from_data, from_start + i); - PyUnicode_WRITE(to_kind, to_data, to_start + i, ch); - } - } - else { - for (i=0; i < how_many; i++) { - ch = PyUnicode_READ(from_kind, from_data, from_start + i); - if (ch > to_maxchar) - return 1; - PyUnicode_WRITE(to_kind, to_data, to_start + i, ch); - } - } -#endif - } - else { - assert(0 && "inconsistent state"); - return 1; } } return 0; @@ -13876,9 +13894,11 @@ } } - _PyUnicode_FastCopyCharacters(writer.buffer, writer.pos, - temp, pindex, len); - writer.pos += len; + if (len) { + _PyUnicode_FastCopyCharacters(writer.buffer, writer.pos, + temp, pindex, len); + writer.pos += len; + } if (width > len) { sublen = width - len; FILL(writer.kind, writer.data, ' ', writer.pos, sublen); diff --git a/Python/formatter_unicode.c b/Python/formatter_unicode.c --- a/Python/formatter_unicode.c +++ b/Python/formatter_unicode.c @@ -786,8 +786,10 @@ goto done; /* Then the source string. */ - _PyUnicode_FastCopyCharacters(writer->buffer, writer->pos, - value, 0, len); + if (len) { + _PyUnicode_FastCopyCharacters(writer->buffer, writer->pos, + value, 0, len); + } writer->pos += (len + rpad); result = 0; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 03:03:15 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 03:03:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Remove_debug_code?= Message-ID: http://hg.python.org/cpython/rev/c7d3e326ff78 changeset: 77461:c7d3e326ff78 user: Victor Stinner date: Sat Jun 16 02:44:43 2012 +0200 summary: Remove debug code files: Objects/unicodeobject.c | 8 -------- 1 files changed, 0 insertions(+), 8 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1371,10 +1371,6 @@ return 0; } -#ifdef Py_DEBUG -static int unicode_ready_calls = 0; -#endif - int _PyUnicode_Ready(PyObject *unicode) { @@ -1397,10 +1393,6 @@ /* Actually, it should neither be interned nor be anything else: */ assert(_PyUnicode_STATE(unicode).interned == SSTATE_NOT_INTERNED); -#ifdef Py_DEBUG - ++unicode_ready_calls; -#endif - end = _PyUnicode_WSTR(unicode) + _PyUnicode_WSTR_LENGTH(unicode); if (find_maxchar_surrogates(_PyUnicode_WSTR(unicode), end, &maxchar, &num_surrogates) == -1) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 03:03:16 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 03:03:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_=22=25f=22_format_of_st?= =?utf8?q?r=25args_if_the_result_is_not_an_ASCII_or_latin1_string?= Message-ID: http://hg.python.org/cpython/rev/73ff365bbb1d changeset: 77462:73ff365bbb1d user: Victor Stinner date: Sat Jun 16 02:57:41 2012 +0200 summary: Fix "%f" format of str%args if the result is not an ASCII or latin1 string files: Lib/test/test_format.py | 2 + Objects/unicodeobject.c | 36 +++++++++++++++------------- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py --- a/Lib/test/test_format.py +++ b/Lib/test/test_format.py @@ -265,6 +265,8 @@ raise TestFailed('"%*d"%(maxsize, -127) should fail') def test_non_ascii(self): + testformat("\u20ac=%f", (1.0,), "\u20ac=1.000000") + self.assertEqual(format("abc", "\u2007<5"), "abc\u2007\u2007") self.assertEqual(format(123, "\u2007<5"), "123\u2007\u2007") self.assertEqual(format(12.3, "\u2007<6"), "12.3\u2007\u2007") diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1660,34 +1660,34 @@ } /* Copy a ASCII or latin1 char* string into a Python Unicode string. - Return the length of the input string. WARNING: The function doesn't copy the terminating null character and doesn't check the maximum character (may write a latin1 character in an ASCII string). */ -static Py_ssize_t -unicode_write_cstr(PyObject *unicode, Py_ssize_t index, const char *str) +static void +unicode_write_cstr(PyObject *unicode, Py_ssize_t index, + const char *str, Py_ssize_t len) { enum PyUnicode_Kind kind = PyUnicode_KIND(unicode); void *data = PyUnicode_DATA(unicode); + const char *end = str + len; switch (kind) { case PyUnicode_1BYTE_KIND: { - Py_ssize_t len = strlen(str); assert(index + len <= PyUnicode_GET_LENGTH(unicode)); memcpy((char *) data + index, str, len); - return len; + break; } case PyUnicode_2BYTE_KIND: { Py_UCS2 *start = (Py_UCS2 *)data + index; Py_UCS2 *ucs2 = start; assert(index <= PyUnicode_GET_LENGTH(unicode)); - for (; *str; ++ucs2, ++str) + for (; str < end; ++ucs2, ++str) *ucs2 = (Py_UCS2)*str; assert((ucs2 - start) <= PyUnicode_GET_LENGTH(unicode)); - return ucs2 - start; + break; } default: { Py_UCS4 *start = (Py_UCS4 *)data + index; @@ -1695,11 +1695,10 @@ assert(kind == PyUnicode_4BYTE_KIND); assert(index <= PyUnicode_GET_LENGTH(unicode)); - for (; *str; ++ucs4, ++str) + for (; str < end; ++ucs4, ++str) *ucs4 = (Py_UCS4)*str; assert((ucs4 - start) <= PyUnicode_GET_LENGTH(unicode)); - return ucs4 - start; } } } @@ -2730,17 +2729,18 @@ case 'x': case 'p': { - Py_ssize_t written; + Py_ssize_t len; /* unused, since we already have the result */ if (*f == 'p') (void) va_arg(vargs, void *); else (void) va_arg(vargs, int); /* extract the result from numberresults and append. */ - written = unicode_write_cstr(string, i, numberresult); + len = strlen(numberresult); + unicode_write_cstr(string, i, numberresult, len); /* skip over the separating '\0' */ - i += written; - numberresult += written; + i += len; + numberresult += len; assert(*numberresult == '\0'); numberresult++; assert(numberresult <= numberresults + numbersize); @@ -2812,10 +2812,14 @@ PyUnicode_WRITE(kind, data, i++, '%'); break; default: - i += unicode_write_cstr(string, i, p); + { + Py_ssize_t len = strlen(p); + unicode_write_cstr(string, i, p, len); + i += len; assert(i == PyUnicode_GET_LENGTH(string)); goto end; } + } } else { assert(i < PyUnicode_GET_LENGTH(string)); @@ -13211,9 +13215,7 @@ if (writer) { if (_PyUnicodeWriter_Prepare(writer, len, 127) == -1) return -1; - memcpy((char*)writer->data + writer->pos * writer->kind, - p, - len); + unicode_write_cstr(writer->buffer, writer->pos, p, len); writer->pos += len; } else -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 03:24:24 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 03:24:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_unicode=5Fadjust=5Fmaxc?= =?utf8?q?har=28=29=3A_catch_PyUnicode=5FNew=28=29_failure?= Message-ID: http://hg.python.org/cpython/rev/068f2bb0c0ae changeset: 77463:068f2bb0c0ae user: Victor Stinner date: Sat Jun 16 03:17:34 2012 +0200 summary: Fix unicode_adjust_maxchar(): catch PyUnicode_New() failure files: Objects/unicodeobject.c | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -2084,7 +2084,8 @@ return; } copy = PyUnicode_New(len, max_char); - _PyUnicode_FastCopyCharacters(copy, 0, unicode, 0, len); + if (copy != NULL) + _PyUnicode_FastCopyCharacters(copy, 0, unicode, 0, len); Py_DECREF(unicode); *p_unicode = copy; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 03:24:25 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 03:24:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_get=5Fsourcefile=28=29=3A_u?= =?utf8?q?se_PyUnicode=5FREAD=28=29_to_avoid_the_creation_of_a_temporary?= Message-ID: http://hg.python.org/cpython/rev/b1f63680f5cb changeset: 77464:b1f63680f5cb user: Victor Stinner date: Sat Jun 16 03:22:05 2012 +0200 summary: get_sourcefile(): use PyUnicode_READ() to avoid the creation of a temporary Py_UCS4 buffer files: Python/import.c | 23 +++++++++++------------ 1 files changed, 11 insertions(+), 12 deletions(-) diff --git a/Python/import.c b/Python/import.c --- a/Python/import.c +++ b/Python/import.c @@ -1008,23 +1008,25 @@ get_sourcefile(PyObject *filename) { Py_ssize_t len; - Py_UCS4 *fileuni; PyObject *py; struct stat statbuf; int err; + void *data; + unsigned int kind; len = PyUnicode_GET_LENGTH(filename); if (len == 0) Py_RETURN_NONE; /* don't match *.pyc or *.pyo? */ - fileuni = PyUnicode_AsUCS4Copy(filename); - if (!fileuni) - return NULL; + data = PyUnicode_DATA(filename); + kind = PyUnicode_KIND(filename); if (len < 5 - || fileuni[len-4] != '.' - || (fileuni[len-3] != 'p' && fileuni[len-3] != 'P') - || (fileuni[len-2] != 'y' && fileuni[len-2] != 'Y')) + || PyUnicode_READ(kind, data, len-4) != '.' + || (PyUnicode_READ(kind, data, len-3) != 'p' + && PyUnicode_READ(kind, data, len-3) != 'P') + || (PyUnicode_READ(kind, data, len-2) != 'y' + && PyUnicode_READ(kind, data, len-2) != 'Y')) goto unchanged; /* Start by trying to turn PEP 3147 path into source path. If that @@ -1034,7 +1036,7 @@ py = make_source_pathname(filename); if (py == NULL) { PyErr_Clear(); - py = PyUnicode_FromKindAndData(PyUnicode_4BYTE_KIND, fileuni, len - 1); + py = PyUnicode_Substring(filename, 0, len - 1); } if (py == NULL) goto error; @@ -1042,17 +1044,14 @@ err = _Py_stat(py, &statbuf); if (err == -2) goto error; - if (err == 0 && S_ISREG(statbuf.st_mode)) { - PyMem_Free(fileuni); + if (err == 0 && S_ISREG(statbuf.st_mode)) return py; - } Py_DECREF(py); goto unchanged; error: PyErr_Clear(); unchanged: - PyMem_Free(fileuni); Py_INCREF(filename); return filename; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 04:56:15 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 04:56:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_a_compiler_warning_in_?= =?utf8?q?=5Fcopy=5Fcharacters=28=29_and_remove_debug_code?= Message-ID: http://hg.python.org/cpython/rev/34a6a233cfcc changeset: 77466:34a6a233cfcc user: Victor Stinner date: Sat Jun 16 04:53:25 2012 +0200 summary: Fix a compiler warning in _copy_characters() and remove debug code files: Objects/unicodeobject.c | 11 +---------- 1 files changed, 1 insertions(+), 10 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1156,7 +1156,7 @@ if (check_maxchar) { Py_UCS4 max_char; max_char = ucs1lib_find_max_char(from_data, - (char*)from_data + how_many); + (Py_UCS1*)from_data + how_many); if (max_char >= 128) return -1; } @@ -3860,11 +3860,6 @@ return PyUnicode_AsUTF8AndSize(unicode, NULL); } -#ifdef Py_DEBUG -static int unicode_as_unicode_calls = 0; -#endif - - Py_UNICODE * PyUnicode_AsUnicodeAndSize(PyObject *unicode, Py_ssize_t *size) { @@ -3888,10 +3883,6 @@ assert(_PyUnicode_KIND(unicode) != 0); assert(PyUnicode_IS_READY(unicode)); -#ifdef Py_DEBUG - ++unicode_as_unicode_calls; -#endif - if (PyUnicode_KIND(unicode) == PyUnicode_4BYTE_KIND) { #if SIZEOF_WCHAR_T == 2 four_bytes = PyUnicode_4BYTE_DATA(unicode); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 04:56:14 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 04:56:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Oops=2C_fix_my_previous_cha?= =?utf8?q?nge_on_=5Fcopy=5Fcharacters=28=29?= Message-ID: http://hg.python.org/cpython/rev/780783e2ca29 changeset: 77465:780783e2ca29 user: Victor Stinner date: Sat Jun 16 04:53:00 2012 +0200 summary: Oops, fix my previous change on _copy_characters() files: Objects/unicodeobject.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1254,10 +1254,10 @@ for (i=0; i < how_many; i++) { ch = PyUnicode_READ(from_kind, from_data, from_start + i); #ifndef Py_DEBUG - assert(ch <= to_maxchar); -#else if (ch > to_maxchar) return -1; +#else + assert(ch <= to_maxchar); #endif PyUnicode_WRITE(to_kind, to_data, to_start + i, ch); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 04:56:16 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 04:56:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_PyUnicode=5FGetSize=28?= =?utf8?q?=29=3A_Don=27t_replace_=5FPyUnicode=5FReady=28=29_exception?= Message-ID: http://hg.python.org/cpython/rev/176e61901895 changeset: 77467:176e61901895 user: Victor Stinner date: Sat Jun 16 04:53:46 2012 +0200 summary: Fix PyUnicode_GetSize(): Don't replace _PyUnicode_Ready() exception files: Objects/unicodeobject.c | 5 +++-- 1 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -3995,11 +3995,12 @@ Py_ssize_t PyUnicode_GetLength(PyObject *unicode) { - if (!PyUnicode_Check(unicode) || PyUnicode_READY(unicode) == -1) { + if (!PyUnicode_Check(unicode)) { PyErr_BadArgument(); return -1; } - + if (PyUnicode_READY(unicode) == -1) + return -1; return PyUnicode_GET_LENGTH(unicode); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 05:49:32 2012 From: python-checkins at python.org (eli.bendersky) Date: Sat, 16 Jun 2012 05:49:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Make_the_test_more_resilien?= =?utf8?q?t_to_test-run_order_=28closes_=2315075=29?= Message-ID: http://hg.python.org/cpython/rev/5782efaa8d68 changeset: 77468:5782efaa8d68 user: Eli Bendersky date: Sat Jun 16 06:47:44 2012 +0300 summary: Make the test more resilient to test-run order (closes #15075) files: Lib/test/test_xml_etree.py | 28 ++++++++++++++++++------- 1 files changed, 20 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -1279,8 +1279,7 @@ except KeyError: raise OSError("resource not found") if parse == "xml": - from xml.etree.ElementTree import XML - return XML(data) + data = ET.XML(data) return data def xinclude(): @@ -2011,12 +2010,20 @@ 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd')) - at unittest.skip('Unstable due to module monkeypatching') class XincludeTest(unittest.TestCase): + def _my_loader(self, href, parse): + # Used to avoid a test-dependency problem where the default loader + # of ElementInclude uses the pyET parser for cET tests. + if parse == 'xml': + with open(href, 'rb') as f: + return ET.parse(f).getroot() + else: + return None + def test_xinclude_default(self): from xml.etree import ElementInclude doc = xinclude_loader('default.xml') - ElementInclude.include(doc) + ElementInclude.include(doc, self._my_loader) s = serialize(doc) self.assertEqual(s.strip(), '''

Example.

@@ -2308,11 +2315,16 @@ NoAcceleratorTest, ]) - support.run_unittest(*test_classes) + try: + support.run_unittest(*test_classes) - # XXX the C module should give the same warnings as the Python module - with CleanContext(quiet=(module is not pyET)): - support.run_doctest(sys.modules[__name__], verbosity=True) + # XXX the C module should give the same warnings as the Python module + with CleanContext(quiet=(module is not pyET)): + support.run_doctest(sys.modules[__name__], verbosity=True) + finally: + # don't interfere with subsequent tests + ET = pyET = None + if __name__ == '__main__': test_main() -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sat Jun 16 05:55:38 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 16 Jun 2012 05:55:38 +0200 Subject: [Python-checkins] Daily reference leaks (b1f63680f5cb): sum=0 Message-ID: results for b1f63680f5cb on branch "default" -------------------------------------------- test_support leaked [0, -1, 1] references, sum=0 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogY7XOJT', '-x'] From python-checkins at python.org Sat Jun 16 16:40:49 2012 From: python-checkins at python.org (victor.stinner) Date: Sat, 16 Jun 2012 16:40:49 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbjogX2NvcHlfY2hhcmFjdGVycygp?= =?utf8?q?=3A_move_debug_code_at_the_top_to_avoid_noisy_=23ifdef?= Message-ID: http://hg.python.org/cpython/rev/f6463dc5ead6 changeset: 77469:f6463dc5ead6 user: Victor Stinner date: Sat Jun 16 16:38:26 2012 +0200 summary: _copy_characters(): move debug code at the top to avoid noisy #ifdef And don't use assert() anymore if check_maxchar is set: return -1 on error instead. files: Objects/unicodeobject.c | 49 +++++++++++++--------------- 1 files changed, 23 insertions(+), 26 deletions(-) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1148,27 +1148,31 @@ to_kind = PyUnicode_KIND(to); to_data = PyUnicode_DATA(to); +#ifdef Py_DEBUG + if (!check_maxchar + && PyUnicode_MAX_CHAR_VALUE(from) > PyUnicode_MAX_CHAR_VALUE(to)) + { + const Py_UCS4 to_maxchar = PyUnicode_MAX_CHAR_VALUE(to); + Py_UCS4 ch; + Py_ssize_t i; + for (i=0; i < how_many; i++) { + ch = PyUnicode_READ(from_kind, from_data, from_start + i); + assert(ch <= to_maxchar); + } + } +#endif + if (from_kind == to_kind) { - if (!PyUnicode_IS_ASCII(from) && PyUnicode_IS_ASCII(to)) { + if (check_maxchar + && !PyUnicode_IS_ASCII(from) && PyUnicode_IS_ASCII(to)) + { /* Writing Latin-1 characters into an ASCII string requires to check that all written characters are pure ASCII */ -#ifndef Py_DEBUG - if (check_maxchar) { - Py_UCS4 max_char; - max_char = ucs1lib_find_max_char(from_data, - (Py_UCS1*)from_data + how_many); - if (max_char >= 128) - return -1; - } -#else - const Py_UCS4 to_maxchar = PyUnicode_MAX_CHAR_VALUE(to); - Py_UCS4 ch; - Py_ssize_t i; - for (i=0; i < how_many; i++) { - ch = PyUnicode_READ(from_kind, from_data, from_start + i); - assert(ch <= to_maxchar); - } -#endif + Py_UCS4 max_char; + max_char = ucs1lib_find_max_char(from_data, + (Py_UCS1*)from_data + how_many); + if (max_char >= 128) + return -1; } Py_MEMCPY((char*)to_data + to_kind * to_start, (char*)from_data + from_kind * from_start, @@ -1207,7 +1211,6 @@ else { assert (PyUnicode_MAX_CHAR_VALUE(from) > PyUnicode_MAX_CHAR_VALUE(to)); -#ifndef Py_DEBUG if (!check_maxchar) { if (from_kind == PyUnicode_2BYTE_KIND && to_kind == PyUnicode_1BYTE_KIND) @@ -1244,21 +1247,15 @@ return -1; } } - else -#endif - { + else { const Py_UCS4 to_maxchar = PyUnicode_MAX_CHAR_VALUE(to); Py_UCS4 ch; Py_ssize_t i; for (i=0; i < how_many; i++) { ch = PyUnicode_READ(from_kind, from_data, from_start + i); -#ifndef Py_DEBUG if (ch > to_maxchar) return -1; -#else - assert(ch <= to_maxchar); -#endif PyUnicode_WRITE(to_kind, to_data, to_start + i, ch); } } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 19:46:31 2012 From: python-checkins at python.org (stefan.krah) Date: Sat, 16 Jun 2012 19:46:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_1=29_State_the_relative_err?= =?utf8?q?ors_of_the_power_functions_for_integer_exponents=2E?= Message-ID: http://hg.python.org/cpython/rev/73df491612aa changeset: 77470:73df491612aa user: Stefan Krah date: Sat Jun 16 19:45:35 2012 +0200 summary: 1) State the relative errors of the power functions for integer exponents. 2) _mpd_qpow_mpd(): Abort the loop for all specials, not only infinity. 3) _mpd_qpow_mpd(): Make the function more general and distinguish between zero clamping and folding down the exponent. The latter case is currently handled by setting context->clamp to 0 before calling the function. 4) _mpd_qpow_int(): Add one to the work precision in case of a negative exponent. This is to get the same relative error (0.1 * 10**-prec) for both positive and negative exponents. The previous relative error for negative exponents was (0.2 * 10**-prec). Both errors are _before_ the final rounding to the context precision. files: Modules/_decimal/libmpdec/mpdecimal.c | 20 +++++++++++++- 1 files changed, 18 insertions(+), 2 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -5844,6 +5844,12 @@ /* * Internal function: Integer power with mpd_uint_t exponent. The function * can fail with MPD_Malloc_error. + * + * The error is equal to the error incurred in k-1 multiplications. Assuming + * the upper bound for the relative error in each operation: + * + * abs(err) = 5 * 10**-prec + * result = x**k * (1 + err)**(k-1) */ static inline void _mpd_qpow_uint(mpd_t *result, const mpd_t *base, mpd_uint_t exp, @@ -5880,6 +5886,12 @@ /* * Internal function: Integer power with mpd_t exponent, tbase and texp * are modified!! Function can fail with MPD_Malloc_error. + * + * The error is equal to the error incurred in k multiplications. Assuming + * the upper bound for the relative error in each operation: + * + * abs(err) = 5 * 10**-prec + * result = x**k * (1 + err)**k */ static inline void _mpd_qpow_mpd(mpd_t *result, mpd_t *tbase, mpd_t *texp, uint8_t resultsign, @@ -5899,7 +5911,8 @@ if (mpd_isodd(texp)) { mpd_qmul(result, result, tbase, ctx, &workstatus); *status |= workstatus; - if (workstatus & (MPD_Overflow|MPD_Clamped)) { + if (mpd_isspecial(result) || + (mpd_iszerocoeff(result) && (workstatus & MPD_Clamped))) { break; } } @@ -5914,7 +5927,9 @@ } /* - * The power function for integer exponents. + * The power function for integer exponents. Relative error _before_ the + * final rounding to prec: + * abs(result - base**exp) < 0.1 * 10**-prec * abs(base**exp) */ static void _mpd_qpow_int(mpd_t *result, const mpd_t *base, const mpd_t *exp, @@ -5932,6 +5947,7 @@ workctx.round = MPD_ROUND_HALF_EVEN; workctx.clamp = 0; if (mpd_isnegative(exp)) { + workctx.prec += 1; mpd_qdiv(&tbase, &one, base, &workctx, status); if (*status&MPD_Errors) { mpd_setspecial(result, MPD_POS, MPD_NAN); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 22:54:05 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 16 Jun 2012 22:54:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314874=3A_Restore_c?= =?utf8?q?harmap_decoding_speed_to_pre-PEP_393_levels=2E?= Message-ID: http://hg.python.org/cpython/rev/8f3a5308f50b changeset: 77471:8f3a5308f50b user: Antoine Pitrou date: Sat Jun 16 22:48:21 2012 +0200 summary: Issue #14874: Restore charmap decoding speed to pre-PEP 393 levels. Patch by Serhiy Storchaka. files: Lib/codecs.py | 5 +- Lib/encodings/cp037.py | 1 + Lib/encodings/cp500.py | 1 + Lib/encodings/hp_roman8.py | 371 +++++++++++++++------ Lib/encodings/iso8859_1.py | 1 + Lib/encodings/mac_latin2.py | 403 +++++++++++++++-------- Lib/encodings/palmos.py | 304 +++++++++++++++-- Lib/encodings/ptcp154.py | 393 +++++++++++++++------- Misc/NEWS | 3 + Objects/unicodeobject.c | 63 ++- Tools/unicode/gencodec.py | 9 +- 11 files changed, 1123 insertions(+), 431 deletions(-) diff --git a/Lib/codecs.py b/Lib/codecs.py --- a/Lib/codecs.py +++ b/Lib/codecs.py @@ -1042,10 +1042,7 @@ mapped to themselves. """ - res = {} - for i in rng: - res[i]=i - return res + return {i:i for i in rng} def make_encoding_map(decoding_map): diff --git a/Lib/encodings/cp037.py b/Lib/encodings/cp037.py --- a/Lib/encodings/cp037.py +++ b/Lib/encodings/cp037.py @@ -301,6 +301,7 @@ '\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE '\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE '\x9f' # 0xFF -> CONTROL + '\ufffe' ## Widen to UCS2 for optimization ) ### Encoding table diff --git a/Lib/encodings/cp500.py b/Lib/encodings/cp500.py --- a/Lib/encodings/cp500.py +++ b/Lib/encodings/cp500.py @@ -301,6 +301,7 @@ '\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE '\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE '\x9f' # 0xFF -> CONTROL + '\ufffe' ## Widen to UCS2 for optimization ) ### Encoding table diff --git a/Lib/encodings/hp_roman8.py b/Lib/encodings/hp_roman8.py --- a/Lib/encodings/hp_roman8.py +++ b/Lib/encodings/hp_roman8.py @@ -14,18 +14,18 @@ class Codec(codecs.Codec): def encode(self,input,errors='strict'): - return codecs.charmap_encode(input,errors,encoding_map) + return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): - return codecs.charmap_decode(input,errors,decoding_map) + return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): - return codecs.charmap_encode(input,self.errors,encoding_map)[0] + return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): - return codecs.charmap_decode(input,self.errors,decoding_map)[0] + return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass @@ -46,107 +46,268 @@ streamreader=StreamReader, ) -### Decoding Map -decoding_map = codecs.make_identity_dict(range(256)) -decoding_map.update({ - 0x00a1: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE - 0x00a2: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX - 0x00a3: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE - 0x00a4: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX - 0x00a5: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS - 0x00a6: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX - 0x00a7: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS - 0x00a8: 0x00b4, # ACUTE ACCENT - 0x00a9: 0x02cb, # MODIFIER LETTER GRAVE ACCENT (Mandarin Chinese fourth tone) - 0x00aa: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT - 0x00ab: 0x00a8, # DIAERESIS - 0x00ac: 0x02dc, # SMALL TILDE - 0x00ad: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE - 0x00ae: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX - 0x00af: 0x20a4, # LIRA SIGN - 0x00b0: 0x00af, # MACRON - 0x00b1: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE - 0x00b2: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE - 0x00b3: 0x00b0, # DEGREE SIGN - 0x00b4: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA - 0x00b5: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA - 0x00b6: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE - 0x00b7: 0x00f1, # LATIN SMALL LETTER N WITH TILDE - 0x00b8: 0x00a1, # INVERTED EXCLAMATION MARK - 0x00b9: 0x00bf, # INVERTED QUESTION MARK - 0x00ba: 0x00a4, # CURRENCY SIGN - 0x00bb: 0x00a3, # POUND SIGN - 0x00bc: 0x00a5, # YEN SIGN - 0x00bd: 0x00a7, # SECTION SIGN - 0x00be: 0x0192, # LATIN SMALL LETTER F WITH HOOK - 0x00bf: 0x00a2, # CENT SIGN - 0x00c0: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX - 0x00c1: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX - 0x00c2: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX - 0x00c3: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX - 0x00c4: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE - 0x00c5: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE - 0x00c6: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE - 0x00c7: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE - 0x00c8: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE - 0x00c9: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE - 0x00ca: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE - 0x00cb: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE - 0x00cc: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS - 0x00cd: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS - 0x00ce: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS - 0x00cf: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS - 0x00d0: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE - 0x00d1: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX - 0x00d2: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE - 0x00d3: 0x00c6, # LATIN CAPITAL LETTER AE - 0x00d4: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE - 0x00d5: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE - 0x00d6: 0x00f8, # LATIN SMALL LETTER O WITH STROKE - 0x00d7: 0x00e6, # LATIN SMALL LETTER AE - 0x00d8: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS - 0x00d9: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE - 0x00da: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS - 0x00db: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS - 0x00dc: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE - 0x00dd: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS - 0x00de: 0x00df, # LATIN SMALL LETTER SHARP S (German) - 0x00df: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX - 0x00e0: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE - 0x00e1: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE - 0x00e2: 0x00e3, # LATIN SMALL LETTER A WITH TILDE - 0x00e3: 0x00d0, # LATIN CAPITAL LETTER ETH (Icelandic) - 0x00e4: 0x00f0, # LATIN SMALL LETTER ETH (Icelandic) - 0x00e5: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE - 0x00e6: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE - 0x00e7: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE - 0x00e8: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE - 0x00e9: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE - 0x00ea: 0x00f5, # LATIN SMALL LETTER O WITH TILDE - 0x00eb: 0x0160, # LATIN CAPITAL LETTER S WITH CARON - 0x00ec: 0x0161, # LATIN SMALL LETTER S WITH CARON - 0x00ed: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE - 0x00ee: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS - 0x00ef: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS - 0x00f0: 0x00de, # LATIN CAPITAL LETTER THORN (Icelandic) - 0x00f1: 0x00fe, # LATIN SMALL LETTER THORN (Icelandic) - 0x00f2: 0x00b7, # MIDDLE DOT - 0x00f3: 0x00b5, # MICRO SIGN - 0x00f4: 0x00b6, # PILCROW SIGN - 0x00f5: 0x00be, # VULGAR FRACTION THREE QUARTERS - 0x00f6: 0x2014, # EM DASH - 0x00f7: 0x00bc, # VULGAR FRACTION ONE QUARTER - 0x00f8: 0x00bd, # VULGAR FRACTION ONE HALF - 0x00f9: 0x00aa, # FEMININE ORDINAL INDICATOR - 0x00fa: 0x00ba, # MASCULINE ORDINAL INDICATOR - 0x00fb: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK - 0x00fc: 0x25a0, # BLACK SQUARE - 0x00fd: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK - 0x00fe: 0x00b1, # PLUS-MINUS SIGN - 0x00ff: None, -}) +### Decoding Table -### Encoding Map +decoding_table = ( + '\x00' # 0x00 -> NULL + '\x01' # 0x01 -> START OF HEADING + '\x02' # 0x02 -> START OF TEXT + '\x03' # 0x03 -> END OF TEXT + '\x04' # 0x04 -> END OF TRANSMISSION + '\x05' # 0x05 -> ENQUIRY + '\x06' # 0x06 -> ACKNOWLEDGE + '\x07' # 0x07 -> BELL + '\x08' # 0x08 -> BACKSPACE + '\t' # 0x09 -> HORIZONTAL TABULATION + '\n' # 0x0A -> LINE FEED + '\x0b' # 0x0B -> VERTICAL TABULATION + '\x0c' # 0x0C -> FORM FEED + '\r' # 0x0D -> CARRIAGE RETURN + '\x0e' # 0x0E -> SHIFT OUT + '\x0f' # 0x0F -> SHIFT IN + '\x10' # 0x10 -> DATA LINK ESCAPE + '\x11' # 0x11 -> DEVICE CONTROL ONE + '\x12' # 0x12 -> DEVICE CONTROL TWO + '\x13' # 0x13 -> DEVICE CONTROL THREE + '\x14' # 0x14 -> DEVICE CONTROL FOUR + '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE + '\x16' # 0x16 -> SYNCHRONOUS IDLE + '\x17' # 0x17 -> END OF TRANSMISSION BLOCK + '\x18' # 0x18 -> CANCEL + '\x19' # 0x19 -> END OF MEDIUM + '\x1a' # 0x1A -> SUBSTITUTE + '\x1b' # 0x1B -> ESCAPE + '\x1c' # 0x1C -> FILE SEPARATOR + '\x1d' # 0x1D -> GROUP SEPARATOR + '\x1e' # 0x1E -> RECORD SEPARATOR + '\x1f' # 0x1F -> UNIT SEPARATOR + ' ' # 0x20 -> SPACE + '!' # 0x21 -> EXCLAMATION MARK + '"' # 0x22 -> QUOTATION MARK + '#' # 0x23 -> NUMBER SIGN + '$' # 0x24 -> DOLLAR SIGN + '%' # 0x25 -> PERCENT SIGN + '&' # 0x26 -> AMPERSAND + "'" # 0x27 -> APOSTROPHE + '(' # 0x28 -> LEFT PARENTHESIS + ')' # 0x29 -> RIGHT PARENTHESIS + '*' # 0x2A -> ASTERISK + '+' # 0x2B -> PLUS SIGN + ',' # 0x2C -> COMMA + '-' # 0x2D -> HYPHEN-MINUS + '.' # 0x2E -> FULL STOP + '/' # 0x2F -> SOLIDUS + '0' # 0x30 -> DIGIT ZERO + '1' # 0x31 -> DIGIT ONE + '2' # 0x32 -> DIGIT TWO + '3' # 0x33 -> DIGIT THREE + '4' # 0x34 -> DIGIT FOUR + '5' # 0x35 -> DIGIT FIVE + '6' # 0x36 -> DIGIT SIX + '7' # 0x37 -> DIGIT SEVEN + '8' # 0x38 -> DIGIT EIGHT + '9' # 0x39 -> DIGIT NINE + ':' # 0x3A -> COLON + ';' # 0x3B -> SEMICOLON + '<' # 0x3C -> LESS-THAN SIGN + '=' # 0x3D -> EQUALS SIGN + '>' # 0x3E -> GREATER-THAN SIGN + '?' # 0x3F -> QUESTION MARK + '@' # 0x40 -> COMMERCIAL AT + 'A' # 0x41 -> LATIN CAPITAL LETTER A + 'B' # 0x42 -> LATIN CAPITAL LETTER B + 'C' # 0x43 -> LATIN CAPITAL LETTER C + 'D' # 0x44 -> LATIN CAPITAL LETTER D + 'E' # 0x45 -> LATIN CAPITAL LETTER E + 'F' # 0x46 -> LATIN CAPITAL LETTER F + 'G' # 0x47 -> LATIN CAPITAL LETTER G + 'H' # 0x48 -> LATIN CAPITAL LETTER H + 'I' # 0x49 -> LATIN CAPITAL LETTER I + 'J' # 0x4A -> LATIN CAPITAL LETTER J + 'K' # 0x4B -> LATIN CAPITAL LETTER K + 'L' # 0x4C -> LATIN CAPITAL LETTER L + 'M' # 0x4D -> LATIN CAPITAL LETTER M + 'N' # 0x4E -> LATIN CAPITAL LETTER N + 'O' # 0x4F -> LATIN CAPITAL LETTER O + 'P' # 0x50 -> LATIN CAPITAL LETTER P + 'Q' # 0x51 -> LATIN CAPITAL LETTER Q + 'R' # 0x52 -> LATIN CAPITAL LETTER R + 'S' # 0x53 -> LATIN CAPITAL LETTER S + 'T' # 0x54 -> LATIN CAPITAL LETTER T + 'U' # 0x55 -> LATIN CAPITAL LETTER U + 'V' # 0x56 -> LATIN CAPITAL LETTER V + 'W' # 0x57 -> LATIN CAPITAL LETTER W + 'X' # 0x58 -> LATIN CAPITAL LETTER X + 'Y' # 0x59 -> LATIN CAPITAL LETTER Y + 'Z' # 0x5A -> LATIN CAPITAL LETTER Z + '[' # 0x5B -> LEFT SQUARE BRACKET + '\\' # 0x5C -> REVERSE SOLIDUS + ']' # 0x5D -> RIGHT SQUARE BRACKET + '^' # 0x5E -> CIRCUMFLEX ACCENT + '_' # 0x5F -> LOW LINE + '`' # 0x60 -> GRAVE ACCENT + 'a' # 0x61 -> LATIN SMALL LETTER A + 'b' # 0x62 -> LATIN SMALL LETTER B + 'c' # 0x63 -> LATIN SMALL LETTER C + 'd' # 0x64 -> LATIN SMALL LETTER D + 'e' # 0x65 -> LATIN SMALL LETTER E + 'f' # 0x66 -> LATIN SMALL LETTER F + 'g' # 0x67 -> LATIN SMALL LETTER G + 'h' # 0x68 -> LATIN SMALL LETTER H + 'i' # 0x69 -> LATIN SMALL LETTER I + 'j' # 0x6A -> LATIN SMALL LETTER J + 'k' # 0x6B -> LATIN SMALL LETTER K + 'l' # 0x6C -> LATIN SMALL LETTER L + 'm' # 0x6D -> LATIN SMALL LETTER M + 'n' # 0x6E -> LATIN SMALL LETTER N + 'o' # 0x6F -> LATIN SMALL LETTER O + 'p' # 0x70 -> LATIN SMALL LETTER P + 'q' # 0x71 -> LATIN SMALL LETTER Q + 'r' # 0x72 -> LATIN SMALL LETTER R + 's' # 0x73 -> LATIN SMALL LETTER S + 't' # 0x74 -> LATIN SMALL LETTER T + 'u' # 0x75 -> LATIN SMALL LETTER U + 'v' # 0x76 -> LATIN SMALL LETTER V + 'w' # 0x77 -> LATIN SMALL LETTER W + 'x' # 0x78 -> LATIN SMALL LETTER X + 'y' # 0x79 -> LATIN SMALL LETTER Y + 'z' # 0x7A -> LATIN SMALL LETTER Z + '{' # 0x7B -> LEFT CURLY BRACKET + '|' # 0x7C -> VERTICAL LINE + '}' # 0x7D -> RIGHT CURLY BRACKET + '~' # 0x7E -> TILDE + '\x7f' # 0x7F -> DELETE + '\x80' # 0x80 -> + '\x81' # 0x81 -> + '\x82' # 0x82 -> + '\x83' # 0x83 -> + '\x84' # 0x84 -> + '\x85' # 0x85 -> + '\x86' # 0x86 -> + '\x87' # 0x87 -> + '\x88' # 0x88 -> + '\x89' # 0x89 -> + '\x8a' # 0x8A -> + '\x8b' # 0x8B -> + '\x8c' # 0x8C -> + '\x8d' # 0x8D -> + '\x8e' # 0x8E -> + '\x8f' # 0x8F -> + '\x90' # 0x90 -> + '\x91' # 0x91 -> + '\x92' # 0x92 -> + '\x93' # 0x93 -> + '\x94' # 0x94 -> + '\x95' # 0x95 -> + '\x96' # 0x96 -> + '\x97' # 0x97 -> + '\x98' # 0x98 -> + '\x99' # 0x99 -> + '\x9a' # 0x9A -> + '\x9b' # 0x9B -> + '\x9c' # 0x9C -> + '\x9d' # 0x9D -> + '\x9e' # 0x9E -> + '\x9f' # 0x9F -> + '\xa0' # 0xA0 -> NO-BREAK SPACE + '\xc0' # 0xA1 -> LATIN CAPITAL LETTER A WITH GRAVE + '\xc2' # 0xA2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX + '\xc8' # 0xA3 -> LATIN CAPITAL LETTER E WITH GRAVE + '\xca' # 0xA4 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX + '\xcb' # 0xA5 -> LATIN CAPITAL LETTER E WITH DIAERESIS + '\xce' # 0xA6 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX + '\xcf' # 0xA7 -> LATIN CAPITAL LETTER I WITH DIAERESIS + '\xb4' # 0xA8 -> ACUTE ACCENT + '\u02cb' # 0xA9 -> MODIFIER LETTER GRAVE ACCENT (MANDARIN CHINESE FOURTH TONE) + '\u02c6' # 0xAA -> MODIFIER LETTER CIRCUMFLEX ACCENT + '\xa8' # 0xAB -> DIAERESIS + '\u02dc' # 0xAC -> SMALL TILDE + '\xd9' # 0xAD -> LATIN CAPITAL LETTER U WITH GRAVE + '\xdb' # 0xAE -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX + '\u20a4' # 0xAF -> LIRA SIGN + '\xaf' # 0xB0 -> MACRON + '\xdd' # 0xB1 -> LATIN CAPITAL LETTER Y WITH ACUTE + '\xfd' # 0xB2 -> LATIN SMALL LETTER Y WITH ACUTE + '\xb0' # 0xB3 -> DEGREE SIGN + '\xc7' # 0xB4 -> LATIN CAPITAL LETTER C WITH CEDILLA + '\xe7' # 0xB5 -> LATIN SMALL LETTER C WITH CEDILLA + '\xd1' # 0xB6 -> LATIN CAPITAL LETTER N WITH TILDE + '\xf1' # 0xB7 -> LATIN SMALL LETTER N WITH TILDE + '\xa1' # 0xB8 -> INVERTED EXCLAMATION MARK + '\xbf' # 0xB9 -> INVERTED QUESTION MARK + '\xa4' # 0xBA -> CURRENCY SIGN + '\xa3' # 0xBB -> POUND SIGN + '\xa5' # 0xBC -> YEN SIGN + '\xa7' # 0xBD -> SECTION SIGN + '\u0192' # 0xBE -> LATIN SMALL LETTER F WITH HOOK + '\xa2' # 0xBF -> CENT SIGN + '\xe2' # 0xC0 -> LATIN SMALL LETTER A WITH CIRCUMFLEX + '\xea' # 0xC1 -> LATIN SMALL LETTER E WITH CIRCUMFLEX + '\xf4' # 0xC2 -> LATIN SMALL LETTER O WITH CIRCUMFLEX + '\xfb' # 0xC3 -> LATIN SMALL LETTER U WITH CIRCUMFLEX + '\xe1' # 0xC4 -> LATIN SMALL LETTER A WITH ACUTE + '\xe9' # 0xC5 -> LATIN SMALL LETTER E WITH ACUTE + '\xf3' # 0xC6 -> LATIN SMALL LETTER O WITH ACUTE + '\xfa' # 0xC7 -> LATIN SMALL LETTER U WITH ACUTE + '\xe0' # 0xC8 -> LATIN SMALL LETTER A WITH GRAVE + '\xe8' # 0xC9 -> LATIN SMALL LETTER E WITH GRAVE + '\xf2' # 0xCA -> LATIN SMALL LETTER O WITH GRAVE + '\xf9' # 0xCB -> LATIN SMALL LETTER U WITH GRAVE + '\xe4' # 0xCC -> LATIN SMALL LETTER A WITH DIAERESIS + '\xeb' # 0xCD -> LATIN SMALL LETTER E WITH DIAERESIS + '\xf6' # 0xCE -> LATIN SMALL LETTER O WITH DIAERESIS + '\xfc' # 0xCF -> LATIN SMALL LETTER U WITH DIAERESIS + '\xc5' # 0xD0 -> LATIN CAPITAL LETTER A WITH RING ABOVE + '\xee' # 0xD1 -> LATIN SMALL LETTER I WITH CIRCUMFLEX + '\xd8' # 0xD2 -> LATIN CAPITAL LETTER O WITH STROKE + '\xc6' # 0xD3 -> LATIN CAPITAL LETTER AE + '\xe5' # 0xD4 -> LATIN SMALL LETTER A WITH RING ABOVE + '\xed' # 0xD5 -> LATIN SMALL LETTER I WITH ACUTE + '\xf8' # 0xD6 -> LATIN SMALL LETTER O WITH STROKE + '\xe6' # 0xD7 -> LATIN SMALL LETTER AE + '\xc4' # 0xD8 -> LATIN CAPITAL LETTER A WITH DIAERESIS + '\xec' # 0xD9 -> LATIN SMALL LETTER I WITH GRAVE + '\xd6' # 0xDA -> LATIN CAPITAL LETTER O WITH DIAERESIS + '\xdc' # 0xDB -> LATIN CAPITAL LETTER U WITH DIAERESIS + '\xc9' # 0xDC -> LATIN CAPITAL LETTER E WITH ACUTE + '\xef' # 0xDD -> LATIN SMALL LETTER I WITH DIAERESIS + '\xdf' # 0xDE -> LATIN SMALL LETTER SHARP S (GERMAN) + '\xd4' # 0xDF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX + '\xc1' # 0xE0 -> LATIN CAPITAL LETTER A WITH ACUTE + '\xc3' # 0xE1 -> LATIN CAPITAL LETTER A WITH TILDE + '\xe3' # 0xE2 -> LATIN SMALL LETTER A WITH TILDE + '\xd0' # 0xE3 -> LATIN CAPITAL LETTER ETH (ICELANDIC) + '\xf0' # 0xE4 -> LATIN SMALL LETTER ETH (ICELANDIC) + '\xcd' # 0xE5 -> LATIN CAPITAL LETTER I WITH ACUTE + '\xcc' # 0xE6 -> LATIN CAPITAL LETTER I WITH GRAVE + '\xd3' # 0xE7 -> LATIN CAPITAL LETTER O WITH ACUTE + '\xd2' # 0xE8 -> LATIN CAPITAL LETTER O WITH GRAVE + '\xd5' # 0xE9 -> LATIN CAPITAL LETTER O WITH TILDE + '\xf5' # 0xEA -> LATIN SMALL LETTER O WITH TILDE + '\u0160' # 0xEB -> LATIN CAPITAL LETTER S WITH CARON + '\u0161' # 0xEC -> LATIN SMALL LETTER S WITH CARON + '\xda' # 0xED -> LATIN CAPITAL LETTER U WITH ACUTE + '\u0178' # 0xEE -> LATIN CAPITAL LETTER Y WITH DIAERESIS + '\xff' # 0xEF -> LATIN SMALL LETTER Y WITH DIAERESIS + '\xde' # 0xF0 -> LATIN CAPITAL LETTER THORN (ICELANDIC) + '\xfe' # 0xF1 -> LATIN SMALL LETTER THORN (ICELANDIC) + '\xb7' # 0xF2 -> MIDDLE DOT + '\xb5' # 0xF3 -> MICRO SIGN + '\xb6' # 0xF4 -> PILCROW SIGN + '\xbe' # 0xF5 -> VULGAR FRACTION THREE QUARTERS + '\u2014' # 0xF6 -> EM DASH + '\xbc' # 0xF7 -> VULGAR FRACTION ONE QUARTER + '\xbd' # 0xF8 -> VULGAR FRACTION ONE HALF + '\xaa' # 0xF9 -> FEMININE ORDINAL INDICATOR + '\xba' # 0xFA -> MASCULINE ORDINAL INDICATOR + '\xab' # 0xFB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK + '\u25a0' # 0xFC -> BLACK SQUARE + '\xbb' # 0xFD -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK + '\xb1' # 0xFE -> PLUS-MINUS SIGN + '\ufffe' +) -encoding_map = codecs.make_encoding_map(decoding_map) +### Encoding table +encoding_table=codecs.charmap_build(decoding_table) + diff --git a/Lib/encodings/iso8859_1.py b/Lib/encodings/iso8859_1.py --- a/Lib/encodings/iso8859_1.py +++ b/Lib/encodings/iso8859_1.py @@ -301,6 +301,7 @@ '\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE '\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic) '\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS + '\ufffe' ## Widen to UCS2 for optimization ) ### Encoding table diff --git a/Lib/encodings/mac_latin2.py b/Lib/encodings/mac_latin2.py --- a/Lib/encodings/mac_latin2.py +++ b/Lib/encodings/mac_latin2.py @@ -1,4 +1,4 @@ -""" Python Character Mapping Codec generated from 'LATIN2.TXT' with gencodec.py. +""" Python Character Mapping Codec mac_latin2 generated from 'MAPPINGS/VENDORS/MICSFT/MAC/LATIN2.TXT' with gencodec.py. Written by Marc-Andre Lemburg (mal at lemburg.com). @@ -14,18 +14,18 @@ class Codec(codecs.Codec): def encode(self,input,errors='strict'): - return codecs.charmap_encode(input,errors,encoding_map) + return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): - return codecs.charmap_decode(input,errors,decoding_map) + return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): - return codecs.charmap_encode(input,self.errors,encoding_map)[0] + return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): - return codecs.charmap_decode(input,self.errors,decoding_map)[0] + return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass @@ -46,138 +46,267 @@ streamwriter=StreamWriter, ) -### Decoding Map -decoding_map = codecs.make_identity_dict(range(256)) -decoding_map.update({ - 0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS - 0x0081: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON - 0x0082: 0x0101, # LATIN SMALL LETTER A WITH MACRON - 0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE - 0x0084: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK - 0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS - 0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS - 0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE - 0x0088: 0x0105, # LATIN SMALL LETTER A WITH OGONEK - 0x0089: 0x010c, # LATIN CAPITAL LETTER C WITH CARON - 0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS - 0x008b: 0x010d, # LATIN SMALL LETTER C WITH CARON - 0x008c: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE - 0x008d: 0x0107, # LATIN SMALL LETTER C WITH ACUTE - 0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE - 0x008f: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE - 0x0090: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE - 0x0091: 0x010e, # LATIN CAPITAL LETTER D WITH CARON - 0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE - 0x0093: 0x010f, # LATIN SMALL LETTER D WITH CARON - 0x0094: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON - 0x0095: 0x0113, # LATIN SMALL LETTER E WITH MACRON - 0x0096: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE - 0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE - 0x0098: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE - 0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX - 0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS - 0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE - 0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE - 0x009d: 0x011a, # LATIN CAPITAL LETTER E WITH CARON - 0x009e: 0x011b, # LATIN SMALL LETTER E WITH CARON - 0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS - 0x00a0: 0x2020, # DAGGER - 0x00a1: 0x00b0, # DEGREE SIGN - 0x00a2: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK - 0x00a4: 0x00a7, # SECTION SIGN - 0x00a5: 0x2022, # BULLET - 0x00a6: 0x00b6, # PILCROW SIGN - 0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S - 0x00a8: 0x00ae, # REGISTERED SIGN - 0x00aa: 0x2122, # TRADE MARK SIGN - 0x00ab: 0x0119, # LATIN SMALL LETTER E WITH OGONEK - 0x00ac: 0x00a8, # DIAERESIS - 0x00ad: 0x2260, # NOT EQUAL TO - 0x00ae: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA - 0x00af: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK - 0x00b0: 0x012f, # LATIN SMALL LETTER I WITH OGONEK - 0x00b1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON - 0x00b2: 0x2264, # LESS-THAN OR EQUAL TO - 0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO - 0x00b4: 0x012b, # LATIN SMALL LETTER I WITH MACRON - 0x00b5: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA - 0x00b6: 0x2202, # PARTIAL DIFFERENTIAL - 0x00b7: 0x2211, # N-ARY SUMMATION - 0x00b8: 0x0142, # LATIN SMALL LETTER L WITH STROKE - 0x00b9: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA - 0x00ba: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA - 0x00bb: 0x013d, # LATIN CAPITAL LETTER L WITH CARON - 0x00bc: 0x013e, # LATIN SMALL LETTER L WITH CARON - 0x00bd: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE - 0x00be: 0x013a, # LATIN SMALL LETTER L WITH ACUTE - 0x00bf: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA - 0x00c0: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA - 0x00c1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE - 0x00c2: 0x00ac, # NOT SIGN - 0x00c3: 0x221a, # SQUARE ROOT - 0x00c4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE - 0x00c5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON - 0x00c6: 0x2206, # INCREMENT - 0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK - 0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK - 0x00c9: 0x2026, # HORIZONTAL ELLIPSIS - 0x00ca: 0x00a0, # NO-BREAK SPACE - 0x00cb: 0x0148, # LATIN SMALL LETTER N WITH CARON - 0x00cc: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE - 0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE - 0x00ce: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE - 0x00cf: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON - 0x00d0: 0x2013, # EN DASH - 0x00d1: 0x2014, # EM DASH - 0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK - 0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK - 0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK - 0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK - 0x00d6: 0x00f7, # DIVISION SIGN - 0x00d7: 0x25ca, # LOZENGE - 0x00d8: 0x014d, # LATIN SMALL LETTER O WITH MACRON - 0x00d9: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE - 0x00da: 0x0155, # LATIN SMALL LETTER R WITH ACUTE - 0x00db: 0x0158, # LATIN CAPITAL LETTER R WITH CARON - 0x00dc: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK - 0x00dd: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK - 0x00de: 0x0159, # LATIN SMALL LETTER R WITH CARON - 0x00df: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA - 0x00e0: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA - 0x00e1: 0x0160, # LATIN CAPITAL LETTER S WITH CARON - 0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK - 0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK - 0x00e4: 0x0161, # LATIN SMALL LETTER S WITH CARON - 0x00e5: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE - 0x00e6: 0x015b, # LATIN SMALL LETTER S WITH ACUTE - 0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE - 0x00e8: 0x0164, # LATIN CAPITAL LETTER T WITH CARON - 0x00e9: 0x0165, # LATIN SMALL LETTER T WITH CARON - 0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE - 0x00eb: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON - 0x00ec: 0x017e, # LATIN SMALL LETTER Z WITH CARON - 0x00ed: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON - 0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE - 0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX - 0x00f0: 0x016b, # LATIN SMALL LETTER U WITH MACRON - 0x00f1: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE - 0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE - 0x00f3: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE - 0x00f4: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE - 0x00f5: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE - 0x00f6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK - 0x00f7: 0x0173, # LATIN SMALL LETTER U WITH OGONEK - 0x00f8: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE - 0x00f9: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE - 0x00fa: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA - 0x00fb: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE - 0x00fc: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE - 0x00fd: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE - 0x00fe: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA - 0x00ff: 0x02c7, # CARON -}) +### Decoding Table -### Encoding Map +decoding_table = ( + '\x00' # 0x00 -> NULL + '\x01' # 0x01 -> START OF HEADING + '\x02' # 0x02 -> START OF TEXT + '\x03' # 0x03 -> END OF TEXT + '\x04' # 0x04 -> END OF TRANSMISSION + '\x05' # 0x05 -> ENQUIRY + '\x06' # 0x06 -> ACKNOWLEDGE + '\x07' # 0x07 -> BELL + '\x08' # 0x08 -> BACKSPACE + '\t' # 0x09 -> HORIZONTAL TABULATION + '\n' # 0x0A -> LINE FEED + '\x0b' # 0x0B -> VERTICAL TABULATION + '\x0c' # 0x0C -> FORM FEED + '\r' # 0x0D -> CARRIAGE RETURN + '\x0e' # 0x0E -> SHIFT OUT + '\x0f' # 0x0F -> SHIFT IN + '\x10' # 0x10 -> DATA LINK ESCAPE + '\x11' # 0x11 -> DEVICE CONTROL ONE + '\x12' # 0x12 -> DEVICE CONTROL TWO + '\x13' # 0x13 -> DEVICE CONTROL THREE + '\x14' # 0x14 -> DEVICE CONTROL FOUR + '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE + '\x16' # 0x16 -> SYNCHRONOUS IDLE + '\x17' # 0x17 -> END OF TRANSMISSION BLOCK + '\x18' # 0x18 -> CANCEL + '\x19' # 0x19 -> END OF MEDIUM + '\x1a' # 0x1A -> SUBSTITUTE + '\x1b' # 0x1B -> ESCAPE + '\x1c' # 0x1C -> FILE SEPARATOR + '\x1d' # 0x1D -> GROUP SEPARATOR + '\x1e' # 0x1E -> RECORD SEPARATOR + '\x1f' # 0x1F -> UNIT SEPARATOR + ' ' # 0x20 -> SPACE + '!' # 0x21 -> EXCLAMATION MARK + '"' # 0x22 -> QUOTATION MARK + '#' # 0x23 -> NUMBER SIGN + '$' # 0x24 -> DOLLAR SIGN + '%' # 0x25 -> PERCENT SIGN + '&' # 0x26 -> AMPERSAND + "'" # 0x27 -> APOSTROPHE + '(' # 0x28 -> LEFT PARENTHESIS + ')' # 0x29 -> RIGHT PARENTHESIS + '*' # 0x2A -> ASTERISK + '+' # 0x2B -> PLUS SIGN + ',' # 0x2C -> COMMA + '-' # 0x2D -> HYPHEN-MINUS + '.' # 0x2E -> FULL STOP + '/' # 0x2F -> SOLIDUS + '0' # 0x30 -> DIGIT ZERO + '1' # 0x31 -> DIGIT ONE + '2' # 0x32 -> DIGIT TWO + '3' # 0x33 -> DIGIT THREE + '4' # 0x34 -> DIGIT FOUR + '5' # 0x35 -> DIGIT FIVE + '6' # 0x36 -> DIGIT SIX + '7' # 0x37 -> DIGIT SEVEN + '8' # 0x38 -> DIGIT EIGHT + '9' # 0x39 -> DIGIT NINE + ':' # 0x3A -> COLON + ';' # 0x3B -> SEMICOLON + '<' # 0x3C -> LESS-THAN SIGN + '=' # 0x3D -> EQUALS SIGN + '>' # 0x3E -> GREATER-THAN SIGN + '?' # 0x3F -> QUESTION MARK + '@' # 0x40 -> COMMERCIAL AT + 'A' # 0x41 -> LATIN CAPITAL LETTER A + 'B' # 0x42 -> LATIN CAPITAL LETTER B + 'C' # 0x43 -> LATIN CAPITAL LETTER C + 'D' # 0x44 -> LATIN CAPITAL LETTER D + 'E' # 0x45 -> LATIN CAPITAL LETTER E + 'F' # 0x46 -> LATIN CAPITAL LETTER F + 'G' # 0x47 -> LATIN CAPITAL LETTER G + 'H' # 0x48 -> LATIN CAPITAL LETTER H + 'I' # 0x49 -> LATIN CAPITAL LETTER I + 'J' # 0x4A -> LATIN CAPITAL LETTER J + 'K' # 0x4B -> LATIN CAPITAL LETTER K + 'L' # 0x4C -> LATIN CAPITAL LETTER L + 'M' # 0x4D -> LATIN CAPITAL LETTER M + 'N' # 0x4E -> LATIN CAPITAL LETTER N + 'O' # 0x4F -> LATIN CAPITAL LETTER O + 'P' # 0x50 -> LATIN CAPITAL LETTER P + 'Q' # 0x51 -> LATIN CAPITAL LETTER Q + 'R' # 0x52 -> LATIN CAPITAL LETTER R + 'S' # 0x53 -> LATIN CAPITAL LETTER S + 'T' # 0x54 -> LATIN CAPITAL LETTER T + 'U' # 0x55 -> LATIN CAPITAL LETTER U + 'V' # 0x56 -> LATIN CAPITAL LETTER V + 'W' # 0x57 -> LATIN CAPITAL LETTER W + 'X' # 0x58 -> LATIN CAPITAL LETTER X + 'Y' # 0x59 -> LATIN CAPITAL LETTER Y + 'Z' # 0x5A -> LATIN CAPITAL LETTER Z + '[' # 0x5B -> LEFT SQUARE BRACKET + '\\' # 0x5C -> REVERSE SOLIDUS + ']' # 0x5D -> RIGHT SQUARE BRACKET + '^' # 0x5E -> CIRCUMFLEX ACCENT + '_' # 0x5F -> LOW LINE + '`' # 0x60 -> GRAVE ACCENT + 'a' # 0x61 -> LATIN SMALL LETTER A + 'b' # 0x62 -> LATIN SMALL LETTER B + 'c' # 0x63 -> LATIN SMALL LETTER C + 'd' # 0x64 -> LATIN SMALL LETTER D + 'e' # 0x65 -> LATIN SMALL LETTER E + 'f' # 0x66 -> LATIN SMALL LETTER F + 'g' # 0x67 -> LATIN SMALL LETTER G + 'h' # 0x68 -> LATIN SMALL LETTER H + 'i' # 0x69 -> LATIN SMALL LETTER I + 'j' # 0x6A -> LATIN SMALL LETTER J + 'k' # 0x6B -> LATIN SMALL LETTER K + 'l' # 0x6C -> LATIN SMALL LETTER L + 'm' # 0x6D -> LATIN SMALL LETTER M + 'n' # 0x6E -> LATIN SMALL LETTER N + 'o' # 0x6F -> LATIN SMALL LETTER O + 'p' # 0x70 -> LATIN SMALL LETTER P + 'q' # 0x71 -> LATIN SMALL LETTER Q + 'r' # 0x72 -> LATIN SMALL LETTER R + 's' # 0x73 -> LATIN SMALL LETTER S + 't' # 0x74 -> LATIN SMALL LETTER T + 'u' # 0x75 -> LATIN SMALL LETTER U + 'v' # 0x76 -> LATIN SMALL LETTER V + 'w' # 0x77 -> LATIN SMALL LETTER W + 'x' # 0x78 -> LATIN SMALL LETTER X + 'y' # 0x79 -> LATIN SMALL LETTER Y + 'z' # 0x7A -> LATIN SMALL LETTER Z + '{' # 0x7B -> LEFT CURLY BRACKET + '|' # 0x7C -> VERTICAL LINE + '}' # 0x7D -> RIGHT CURLY BRACKET + '~' # 0x7E -> TILDE + '\x7f' # 0x7F -> DELETE + '\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS + '\u0100' # 0x81 -> LATIN CAPITAL LETTER A WITH MACRON + '\u0101' # 0x82 -> LATIN SMALL LETTER A WITH MACRON + '\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE + '\u0104' # 0x84 -> LATIN CAPITAL LETTER A WITH OGONEK + '\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS + '\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS + '\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE + '\u0105' # 0x88 -> LATIN SMALL LETTER A WITH OGONEK + '\u010c' # 0x89 -> LATIN CAPITAL LETTER C WITH CARON + '\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS + '\u010d' # 0x8B -> LATIN SMALL LETTER C WITH CARON + '\u0106' # 0x8C -> LATIN CAPITAL LETTER C WITH ACUTE + '\u0107' # 0x8D -> LATIN SMALL LETTER C WITH ACUTE + '\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE + '\u0179' # 0x8F -> LATIN CAPITAL LETTER Z WITH ACUTE + '\u017a' # 0x90 -> LATIN SMALL LETTER Z WITH ACUTE + '\u010e' # 0x91 -> LATIN CAPITAL LETTER D WITH CARON + '\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE + '\u010f' # 0x93 -> LATIN SMALL LETTER D WITH CARON + '\u0112' # 0x94 -> LATIN CAPITAL LETTER E WITH MACRON + '\u0113' # 0x95 -> LATIN SMALL LETTER E WITH MACRON + '\u0116' # 0x96 -> LATIN CAPITAL LETTER E WITH DOT ABOVE + '\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE + '\u0117' # 0x98 -> LATIN SMALL LETTER E WITH DOT ABOVE + '\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX + '\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS + '\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE + '\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE + '\u011a' # 0x9D -> LATIN CAPITAL LETTER E WITH CARON + '\u011b' # 0x9E -> LATIN SMALL LETTER E WITH CARON + '\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS + '\u2020' # 0xA0 -> DAGGER + '\xb0' # 0xA1 -> DEGREE SIGN + '\u0118' # 0xA2 -> LATIN CAPITAL LETTER E WITH OGONEK + '\xa3' # 0xA3 -> POUND SIGN + '\xa7' # 0xA4 -> SECTION SIGN + '\u2022' # 0xA5 -> BULLET + '\xb6' # 0xA6 -> PILCROW SIGN + '\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S + '\xae' # 0xA8 -> REGISTERED SIGN + '\xa9' # 0xA9 -> COPYRIGHT SIGN + '\u2122' # 0xAA -> TRADE MARK SIGN + '\u0119' # 0xAB -> LATIN SMALL LETTER E WITH OGONEK + '\xa8' # 0xAC -> DIAERESIS + '\u2260' # 0xAD -> NOT EQUAL TO + '\u0123' # 0xAE -> LATIN SMALL LETTER G WITH CEDILLA + '\u012e' # 0xAF -> LATIN CAPITAL LETTER I WITH OGONEK + '\u012f' # 0xB0 -> LATIN SMALL LETTER I WITH OGONEK + '\u012a' # 0xB1 -> LATIN CAPITAL LETTER I WITH MACRON + '\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO + '\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO + '\u012b' # 0xB4 -> LATIN SMALL LETTER I WITH MACRON + '\u0136' # 0xB5 -> LATIN CAPITAL LETTER K WITH CEDILLA + '\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL + '\u2211' # 0xB7 -> N-ARY SUMMATION + '\u0142' # 0xB8 -> LATIN SMALL LETTER L WITH STROKE + '\u013b' # 0xB9 -> LATIN CAPITAL LETTER L WITH CEDILLA + '\u013c' # 0xBA -> LATIN SMALL LETTER L WITH CEDILLA + '\u013d' # 0xBB -> LATIN CAPITAL LETTER L WITH CARON + '\u013e' # 0xBC -> LATIN SMALL LETTER L WITH CARON + '\u0139' # 0xBD -> LATIN CAPITAL LETTER L WITH ACUTE + '\u013a' # 0xBE -> LATIN SMALL LETTER L WITH ACUTE + '\u0145' # 0xBF -> LATIN CAPITAL LETTER N WITH CEDILLA + '\u0146' # 0xC0 -> LATIN SMALL LETTER N WITH CEDILLA + '\u0143' # 0xC1 -> LATIN CAPITAL LETTER N WITH ACUTE + '\xac' # 0xC2 -> NOT SIGN + '\u221a' # 0xC3 -> SQUARE ROOT + '\u0144' # 0xC4 -> LATIN SMALL LETTER N WITH ACUTE + '\u0147' # 0xC5 -> LATIN CAPITAL LETTER N WITH CARON + '\u2206' # 0xC6 -> INCREMENT + '\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK + '\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK + '\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS + '\xa0' # 0xCA -> NO-BREAK SPACE + '\u0148' # 0xCB -> LATIN SMALL LETTER N WITH CARON + '\u0150' # 0xCC -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE + '\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE + '\u0151' # 0xCE -> LATIN SMALL LETTER O WITH DOUBLE ACUTE + '\u014c' # 0xCF -> LATIN CAPITAL LETTER O WITH MACRON + '\u2013' # 0xD0 -> EN DASH + '\u2014' # 0xD1 -> EM DASH + '\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK + '\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK + '\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK + '\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK + '\xf7' # 0xD6 -> DIVISION SIGN + '\u25ca' # 0xD7 -> LOZENGE + '\u014d' # 0xD8 -> LATIN SMALL LETTER O WITH MACRON + '\u0154' # 0xD9 -> LATIN CAPITAL LETTER R WITH ACUTE + '\u0155' # 0xDA -> LATIN SMALL LETTER R WITH ACUTE + '\u0158' # 0xDB -> LATIN CAPITAL LETTER R WITH CARON + '\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK + '\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + '\u0159' # 0xDE -> LATIN SMALL LETTER R WITH CARON + '\u0156' # 0xDF -> LATIN CAPITAL LETTER R WITH CEDILLA + '\u0157' # 0xE0 -> LATIN SMALL LETTER R WITH CEDILLA + '\u0160' # 0xE1 -> LATIN CAPITAL LETTER S WITH CARON + '\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK + '\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK + '\u0161' # 0xE4 -> LATIN SMALL LETTER S WITH CARON + '\u015a' # 0xE5 -> LATIN CAPITAL LETTER S WITH ACUTE + '\u015b' # 0xE6 -> LATIN SMALL LETTER S WITH ACUTE + '\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE + '\u0164' # 0xE8 -> LATIN CAPITAL LETTER T WITH CARON + '\u0165' # 0xE9 -> LATIN SMALL LETTER T WITH CARON + '\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE + '\u017d' # 0xEB -> LATIN CAPITAL LETTER Z WITH CARON + '\u017e' # 0xEC -> LATIN SMALL LETTER Z WITH CARON + '\u016a' # 0xED -> LATIN CAPITAL LETTER U WITH MACRON + '\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE + '\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX + '\u016b' # 0xF0 -> LATIN SMALL LETTER U WITH MACRON + '\u016e' # 0xF1 -> LATIN CAPITAL LETTER U WITH RING ABOVE + '\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE + '\u016f' # 0xF3 -> LATIN SMALL LETTER U WITH RING ABOVE + '\u0170' # 0xF4 -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE + '\u0171' # 0xF5 -> LATIN SMALL LETTER U WITH DOUBLE ACUTE + '\u0172' # 0xF6 -> LATIN CAPITAL LETTER U WITH OGONEK + '\u0173' # 0xF7 -> LATIN SMALL LETTER U WITH OGONEK + '\xdd' # 0xF8 -> LATIN CAPITAL LETTER Y WITH ACUTE + '\xfd' # 0xF9 -> LATIN SMALL LETTER Y WITH ACUTE + '\u0137' # 0xFA -> LATIN SMALL LETTER K WITH CEDILLA + '\u017b' # 0xFB -> LATIN CAPITAL LETTER Z WITH DOT ABOVE + '\u0141' # 0xFC -> LATIN CAPITAL LETTER L WITH STROKE + '\u017c' # 0xFD -> LATIN SMALL LETTER Z WITH DOT ABOVE + '\u0122' # 0xFE -> LATIN CAPITAL LETTER G WITH CEDILLA + '\u02c7' # 0xFF -> CARON +) -encoding_map = codecs.make_encoding_map(decoding_map) +### Encoding table +encoding_table=codecs.charmap_build(decoding_table) diff --git a/Lib/encodings/palmos.py b/Lib/encodings/palmos.py --- a/Lib/encodings/palmos.py +++ b/Lib/encodings/palmos.py @@ -10,18 +10,18 @@ class Codec(codecs.Codec): def encode(self,input,errors='strict'): - return codecs.charmap_encode(input,errors,encoding_map) + return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): - return codecs.charmap_decode(input,errors,decoding_map) + return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): - return codecs.charmap_encode(input,self.errors,encoding_map)[0] + return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): - return codecs.charmap_decode(input,self.errors,decoding_map)[0] + return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass @@ -42,42 +42,268 @@ streamwriter=StreamWriter, ) -### Decoding Map -decoding_map = codecs.make_identity_dict(range(256)) +### Decoding Table -# The PalmOS character set is mostly iso-8859-1 with some differences. -decoding_map.update({ - 0x0080: 0x20ac, # EURO SIGN - 0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK - 0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK - 0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK - 0x0085: 0x2026, # HORIZONTAL ELLIPSIS - 0x0086: 0x2020, # DAGGER - 0x0087: 0x2021, # DOUBLE DAGGER - 0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT - 0x0089: 0x2030, # PER MILLE SIGN - 0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON - 0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK - 0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE - 0x008d: 0x2666, # BLACK DIAMOND SUIT - 0x008e: 0x2663, # BLACK CLUB SUIT - 0x008f: 0x2665, # BLACK HEART SUIT - 0x0090: 0x2660, # BLACK SPADE SUIT - 0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK - 0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK - 0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK - 0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK - 0x0095: 0x2022, # BULLET - 0x0096: 0x2013, # EN DASH - 0x0097: 0x2014, # EM DASH - 0x0098: 0x02dc, # SMALL TILDE - 0x0099: 0x2122, # TRADE MARK SIGN - 0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON - 0x009c: 0x0153, # LATIN SMALL LIGATURE OE - 0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS -}) +decoding_table = ( + '\x00' # 0x00 -> NULL + '\x01' # 0x01 -> START OF HEADING + '\x02' # 0x02 -> START OF TEXT + '\x03' # 0x03 -> END OF TEXT + '\x04' # 0x04 -> END OF TRANSMISSION + '\x05' # 0x05 -> ENQUIRY + '\x06' # 0x06 -> ACKNOWLEDGE + '\x07' # 0x07 -> BELL + '\x08' # 0x08 -> BACKSPACE + '\t' # 0x09 -> HORIZONTAL TABULATION + '\n' # 0x0A -> LINE FEED + '\x0b' # 0x0B -> VERTICAL TABULATION + '\x0c' # 0x0C -> FORM FEED + '\r' # 0x0D -> CARRIAGE RETURN + '\x0e' # 0x0E -> SHIFT OUT + '\x0f' # 0x0F -> SHIFT IN + '\x10' # 0x10 -> DATA LINK ESCAPE + '\x11' # 0x11 -> DEVICE CONTROL ONE + '\x12' # 0x12 -> DEVICE CONTROL TWO + '\x13' # 0x13 -> DEVICE CONTROL THREE + '\x14' # 0x14 -> DEVICE CONTROL FOUR + '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE + '\x16' # 0x16 -> SYNCHRONOUS IDLE + '\x17' # 0x17 -> END OF TRANSMISSION BLOCK + '\x18' # 0x18 -> CANCEL + '\x19' # 0x19 -> END OF MEDIUM + '\x1a' # 0x1A -> SUBSTITUTE + '\x1b' # 0x1B -> ESCAPE + '\x1c' # 0x1C -> FILE SEPARATOR + '\x1d' # 0x1D -> GROUP SEPARATOR + '\x1e' # 0x1E -> RECORD SEPARATOR + '\x1f' # 0x1F -> UNIT SEPARATOR + ' ' # 0x20 -> SPACE + '!' # 0x21 -> EXCLAMATION MARK + '"' # 0x22 -> QUOTATION MARK + '#' # 0x23 -> NUMBER SIGN + '$' # 0x24 -> DOLLAR SIGN + '%' # 0x25 -> PERCENT SIGN + '&' # 0x26 -> AMPERSAND + "'" # 0x27 -> APOSTROPHE + '(' # 0x28 -> LEFT PARENTHESIS + ')' # 0x29 -> RIGHT PARENTHESIS + '*' # 0x2A -> ASTERISK + '+' # 0x2B -> PLUS SIGN + ',' # 0x2C -> COMMA + '-' # 0x2D -> HYPHEN-MINUS + '.' # 0x2E -> FULL STOP + '/' # 0x2F -> SOLIDUS + '0' # 0x30 -> DIGIT ZERO + '1' # 0x31 -> DIGIT ONE + '2' # 0x32 -> DIGIT TWO + '3' # 0x33 -> DIGIT THREE + '4' # 0x34 -> DIGIT FOUR + '5' # 0x35 -> DIGIT FIVE + '6' # 0x36 -> DIGIT SIX + '7' # 0x37 -> DIGIT SEVEN + '8' # 0x38 -> DIGIT EIGHT + '9' # 0x39 -> DIGIT NINE + ':' # 0x3A -> COLON + ';' # 0x3B -> SEMICOLON + '<' # 0x3C -> LESS-THAN SIGN + '=' # 0x3D -> EQUALS SIGN + '>' # 0x3E -> GREATER-THAN SIGN + '?' # 0x3F -> QUESTION MARK + '@' # 0x40 -> COMMERCIAL AT + 'A' # 0x41 -> LATIN CAPITAL LETTER A + 'B' # 0x42 -> LATIN CAPITAL LETTER B + 'C' # 0x43 -> LATIN CAPITAL LETTER C + 'D' # 0x44 -> LATIN CAPITAL LETTER D + 'E' # 0x45 -> LATIN CAPITAL LETTER E + 'F' # 0x46 -> LATIN CAPITAL LETTER F + 'G' # 0x47 -> LATIN CAPITAL LETTER G + 'H' # 0x48 -> LATIN CAPITAL LETTER H + 'I' # 0x49 -> LATIN CAPITAL LETTER I + 'J' # 0x4A -> LATIN CAPITAL LETTER J + 'K' # 0x4B -> LATIN CAPITAL LETTER K + 'L' # 0x4C -> LATIN CAPITAL LETTER L + 'M' # 0x4D -> LATIN CAPITAL LETTER M + 'N' # 0x4E -> LATIN CAPITAL LETTER N + 'O' # 0x4F -> LATIN CAPITAL LETTER O + 'P' # 0x50 -> LATIN CAPITAL LETTER P + 'Q' # 0x51 -> LATIN CAPITAL LETTER Q + 'R' # 0x52 -> LATIN CAPITAL LETTER R + 'S' # 0x53 -> LATIN CAPITAL LETTER S + 'T' # 0x54 -> LATIN CAPITAL LETTER T + 'U' # 0x55 -> LATIN CAPITAL LETTER U + 'V' # 0x56 -> LATIN CAPITAL LETTER V + 'W' # 0x57 -> LATIN CAPITAL LETTER W + 'X' # 0x58 -> LATIN CAPITAL LETTER X + 'Y' # 0x59 -> LATIN CAPITAL LETTER Y + 'Z' # 0x5A -> LATIN CAPITAL LETTER Z + '[' # 0x5B -> LEFT SQUARE BRACKET + '\\' # 0x5C -> REVERSE SOLIDUS + ']' # 0x5D -> RIGHT SQUARE BRACKET + '^' # 0x5E -> CIRCUMFLEX ACCENT + '_' # 0x5F -> LOW LINE + '`' # 0x60 -> GRAVE ACCENT + 'a' # 0x61 -> LATIN SMALL LETTER A + 'b' # 0x62 -> LATIN SMALL LETTER B + 'c' # 0x63 -> LATIN SMALL LETTER C + 'd' # 0x64 -> LATIN SMALL LETTER D + 'e' # 0x65 -> LATIN SMALL LETTER E + 'f' # 0x66 -> LATIN SMALL LETTER F + 'g' # 0x67 -> LATIN SMALL LETTER G + 'h' # 0x68 -> LATIN SMALL LETTER H + 'i' # 0x69 -> LATIN SMALL LETTER I + 'j' # 0x6A -> LATIN SMALL LETTER J + 'k' # 0x6B -> LATIN SMALL LETTER K + 'l' # 0x6C -> LATIN SMALL LETTER L + 'm' # 0x6D -> LATIN SMALL LETTER M + 'n' # 0x6E -> LATIN SMALL LETTER N + 'o' # 0x6F -> LATIN SMALL LETTER O + 'p' # 0x70 -> LATIN SMALL LETTER P + 'q' # 0x71 -> LATIN SMALL LETTER Q + 'r' # 0x72 -> LATIN SMALL LETTER R + 's' # 0x73 -> LATIN SMALL LETTER S + 't' # 0x74 -> LATIN SMALL LETTER T + 'u' # 0x75 -> LATIN SMALL LETTER U + 'v' # 0x76 -> LATIN SMALL LETTER V + 'w' # 0x77 -> LATIN SMALL LETTER W + 'x' # 0x78 -> LATIN SMALL LETTER X + 'y' # 0x79 -> LATIN SMALL LETTER Y + 'z' # 0x7A -> LATIN SMALL LETTER Z + '{' # 0x7B -> LEFT CURLY BRACKET + '|' # 0x7C -> VERTICAL LINE + '}' # 0x7D -> RIGHT CURLY BRACKET + '~' # 0x7E -> TILDE + '\x7f' # 0x7F -> DELETE + '\u20ac' # 0x80 -> EURO SIGN + '\x81' # 0x81 -> + '\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK + '\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK + '\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK + '\u2026' # 0x85 -> HORIZONTAL ELLIPSIS + '\u2020' # 0x86 -> DAGGER + '\u2021' # 0x87 -> DOUBLE DAGGER + '\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT + '\u2030' # 0x89 -> PER MILLE SIGN + '\u0160' # 0x8A -> LATIN CAPITAL LETTER S WITH CARON + '\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK + '\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE + '\u2666' # 0x8D -> BLACK DIAMOND SUIT + '\u2663' # 0x8E -> BLACK CLUB SUIT + '\u2665' # 0x8F -> BLACK HEART SUIT + '\u2660' # 0x90 -> BLACK SPADE SUIT + '\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK + '\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK + '\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK + '\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK + '\u2022' # 0x95 -> BULLET + '\u2013' # 0x96 -> EN DASH + '\u2014' # 0x97 -> EM DASH + '\u02dc' # 0x98 -> SMALL TILDE + '\u2122' # 0x99 -> TRADE MARK SIGN + '\u0161' # 0x9A -> LATIN SMALL LETTER S WITH CARON + '\x9b' # 0x9B -> + '\u0153' # 0x9C -> LATIN SMALL LIGATURE OE + '\x9d' # 0x9D -> + '\x9e' # 0x9E -> + '\u0178' # 0x9F -> LATIN CAPITAL LETTER Y WITH DIAERESIS + '\xa0' # 0xA0 -> NO-BREAK SPACE + '\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK + '\xa2' # 0xA2 -> CENT SIGN + '\xa3' # 0xA3 -> POUND SIGN + '\xa4' # 0xA4 -> CURRENCY SIGN + '\xa5' # 0xA5 -> YEN SIGN + '\xa6' # 0xA6 -> BROKEN BAR + '\xa7' # 0xA7 -> SECTION SIGN + '\xa8' # 0xA8 -> DIAERESIS + '\xa9' # 0xA9 -> COPYRIGHT SIGN + '\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR + '\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK + '\xac' # 0xAC -> NOT SIGN + '\xad' # 0xAD -> SOFT HYPHEN + '\xae' # 0xAE -> REGISTERED SIGN + '\xaf' # 0xAF -> MACRON + '\xb0' # 0xB0 -> DEGREE SIGN + '\xb1' # 0xB1 -> PLUS-MINUS SIGN + '\xb2' # 0xB2 -> SUPERSCRIPT TWO + '\xb3' # 0xB3 -> SUPERSCRIPT THREE + '\xb4' # 0xB4 -> ACUTE ACCENT + '\xb5' # 0xB5 -> MICRO SIGN + '\xb6' # 0xB6 -> PILCROW SIGN + '\xb7' # 0xB7 -> MIDDLE DOT + '\xb8' # 0xB8 -> CEDILLA + '\xb9' # 0xB9 -> SUPERSCRIPT ONE + '\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR + '\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK + '\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER + '\xbd' # 0xBD -> VULGAR FRACTION ONE HALF + '\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS + '\xbf' # 0xBF -> INVERTED QUESTION MARK + '\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE + '\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE + '\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX + '\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE + '\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS + '\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE + '\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE + '\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA + '\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE + '\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE + '\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX + '\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS + '\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE + '\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE + '\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX + '\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS + '\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic) + '\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE + '\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE + '\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE + '\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX + '\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE + '\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS + '\xd7' # 0xD7 -> MULTIPLICATION SIGN + '\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE + '\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE + '\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE + '\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX + '\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS + '\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE + '\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic) + '\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German) + '\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE + '\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE + '\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX + '\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE + '\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS + '\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE + '\xe6' # 0xE6 -> LATIN SMALL LETTER AE + '\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA + '\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE + '\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE + '\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX + '\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS + '\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE + '\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE + '\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX + '\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS + '\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic) + '\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE + '\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE + '\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE + '\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX + '\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE + '\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS + '\xf7' # 0xF7 -> DIVISION SIGN + '\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE + '\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE + '\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE + '\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX + '\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS + '\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE + '\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic) + '\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS +) -### Encoding Map +### Encoding table +encoding_table=codecs.charmap_build(decoding_table) -encoding_map = codecs.make_encoding_map(decoding_map) diff --git a/Lib/encodings/ptcp154.py b/Lib/encodings/ptcp154.py --- a/Lib/encodings/ptcp154.py +++ b/Lib/encodings/ptcp154.py @@ -14,18 +14,18 @@ class Codec(codecs.Codec): def encode(self,input,errors='strict'): - return codecs.charmap_encode(input,errors,encoding_map) + return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): - return codecs.charmap_decode(input,errors,decoding_map) + return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): - return codecs.charmap_encode(input,self.errors,encoding_map)[0] + return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): - return codecs.charmap_decode(input,self.errors,decoding_map)[0] + return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass @@ -46,130 +46,267 @@ streamwriter=StreamWriter, ) -### Decoding Map -decoding_map = codecs.make_identity_dict(range(256)) -decoding_map.update({ - 0x0080: 0x0496, # CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER - 0x0081: 0x0492, # CYRILLIC CAPITAL LETTER GHE WITH STROKE - 0x0082: 0x04ee, # CYRILLIC CAPITAL LETTER U WITH MACRON - 0x0083: 0x0493, # CYRILLIC SMALL LETTER GHE WITH STROKE - 0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK - 0x0085: 0x2026, # HORIZONTAL ELLIPSIS - 0x0086: 0x04b6, # CYRILLIC CAPITAL LETTER CHE WITH DESCENDER - 0x0087: 0x04ae, # CYRILLIC CAPITAL LETTER STRAIGHT U - 0x0088: 0x04b2, # CYRILLIC CAPITAL LETTER HA WITH DESCENDER - 0x0089: 0x04af, # CYRILLIC SMALL LETTER STRAIGHT U - 0x008a: 0x04a0, # CYRILLIC CAPITAL LETTER BASHKIR KA - 0x008b: 0x04e2, # CYRILLIC CAPITAL LETTER I WITH MACRON - 0x008c: 0x04a2, # CYRILLIC CAPITAL LETTER EN WITH DESCENDER - 0x008d: 0x049a, # CYRILLIC CAPITAL LETTER KA WITH DESCENDER - 0x008e: 0x04ba, # CYRILLIC CAPITAL LETTER SHHA - 0x008f: 0x04b8, # CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE - 0x0090: 0x0497, # CYRILLIC SMALL LETTER ZHE WITH DESCENDER - 0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK - 0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK - 0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK - 0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK - 0x0095: 0x2022, # BULLET - 0x0096: 0x2013, # EN DASH - 0x0097: 0x2014, # EM DASH - 0x0098: 0x04b3, # CYRILLIC SMALL LETTER HA WITH DESCENDER - 0x0099: 0x04b7, # CYRILLIC SMALL LETTER CHE WITH DESCENDER - 0x009a: 0x04a1, # CYRILLIC SMALL LETTER BASHKIR KA - 0x009b: 0x04e3, # CYRILLIC SMALL LETTER I WITH MACRON - 0x009c: 0x04a3, # CYRILLIC SMALL LETTER EN WITH DESCENDER - 0x009d: 0x049b, # CYRILLIC SMALL LETTER KA WITH DESCENDER - 0x009e: 0x04bb, # CYRILLIC SMALL LETTER SHHA - 0x009f: 0x04b9, # CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE - 0x00a1: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U (Byelorussian) - 0x00a2: 0x045e, # CYRILLIC SMALL LETTER SHORT U (Byelorussian) - 0x00a3: 0x0408, # CYRILLIC CAPITAL LETTER JE - 0x00a4: 0x04e8, # CYRILLIC CAPITAL LETTER BARRED O - 0x00a5: 0x0498, # CYRILLIC CAPITAL LETTER ZE WITH DESCENDER - 0x00a6: 0x04b0, # CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE - 0x00a8: 0x0401, # CYRILLIC CAPITAL LETTER IO - 0x00aa: 0x04d8, # CYRILLIC CAPITAL LETTER SCHWA - 0x00ad: 0x04ef, # CYRILLIC SMALL LETTER U WITH MACRON - 0x00af: 0x049c, # CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE - 0x00b1: 0x04b1, # CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE - 0x00b2: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I - 0x00b3: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I - 0x00b4: 0x0499, # CYRILLIC SMALL LETTER ZE WITH DESCENDER - 0x00b5: 0x04e9, # CYRILLIC SMALL LETTER BARRED O - 0x00b8: 0x0451, # CYRILLIC SMALL LETTER IO - 0x00b9: 0x2116, # NUMERO SIGN - 0x00ba: 0x04d9, # CYRILLIC SMALL LETTER SCHWA - 0x00bc: 0x0458, # CYRILLIC SMALL LETTER JE - 0x00bd: 0x04aa, # CYRILLIC CAPITAL LETTER ES WITH DESCENDER - 0x00be: 0x04ab, # CYRILLIC SMALL LETTER ES WITH DESCENDER - 0x00bf: 0x049d, # CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE - 0x00c0: 0x0410, # CYRILLIC CAPITAL LETTER A - 0x00c1: 0x0411, # CYRILLIC CAPITAL LETTER BE - 0x00c2: 0x0412, # CYRILLIC CAPITAL LETTER VE - 0x00c3: 0x0413, # CYRILLIC CAPITAL LETTER GHE - 0x00c4: 0x0414, # CYRILLIC CAPITAL LETTER DE - 0x00c5: 0x0415, # CYRILLIC CAPITAL LETTER IE - 0x00c6: 0x0416, # CYRILLIC CAPITAL LETTER ZHE - 0x00c7: 0x0417, # CYRILLIC CAPITAL LETTER ZE - 0x00c8: 0x0418, # CYRILLIC CAPITAL LETTER I - 0x00c9: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I - 0x00ca: 0x041a, # CYRILLIC CAPITAL LETTER KA - 0x00cb: 0x041b, # CYRILLIC CAPITAL LETTER EL - 0x00cc: 0x041c, # CYRILLIC CAPITAL LETTER EM - 0x00cd: 0x041d, # CYRILLIC CAPITAL LETTER EN - 0x00ce: 0x041e, # CYRILLIC CAPITAL LETTER O - 0x00cf: 0x041f, # CYRILLIC CAPITAL LETTER PE - 0x00d0: 0x0420, # CYRILLIC CAPITAL LETTER ER - 0x00d1: 0x0421, # CYRILLIC CAPITAL LETTER ES - 0x00d2: 0x0422, # CYRILLIC CAPITAL LETTER TE - 0x00d3: 0x0423, # CYRILLIC CAPITAL LETTER U - 0x00d4: 0x0424, # CYRILLIC CAPITAL LETTER EF - 0x00d5: 0x0425, # CYRILLIC CAPITAL LETTER HA - 0x00d6: 0x0426, # CYRILLIC CAPITAL LETTER TSE - 0x00d7: 0x0427, # CYRILLIC CAPITAL LETTER CHE - 0x00d8: 0x0428, # CYRILLIC CAPITAL LETTER SHA - 0x00d9: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA - 0x00da: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN - 0x00db: 0x042b, # CYRILLIC CAPITAL LETTER YERU - 0x00dc: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN - 0x00dd: 0x042d, # CYRILLIC CAPITAL LETTER E - 0x00de: 0x042e, # CYRILLIC CAPITAL LETTER YU - 0x00df: 0x042f, # CYRILLIC CAPITAL LETTER YA - 0x00e0: 0x0430, # CYRILLIC SMALL LETTER A - 0x00e1: 0x0431, # CYRILLIC SMALL LETTER BE - 0x00e2: 0x0432, # CYRILLIC SMALL LETTER VE - 0x00e3: 0x0433, # CYRILLIC SMALL LETTER GHE - 0x00e4: 0x0434, # CYRILLIC SMALL LETTER DE - 0x00e5: 0x0435, # CYRILLIC SMALL LETTER IE - 0x00e6: 0x0436, # CYRILLIC SMALL LETTER ZHE - 0x00e7: 0x0437, # CYRILLIC SMALL LETTER ZE - 0x00e8: 0x0438, # CYRILLIC SMALL LETTER I - 0x00e9: 0x0439, # CYRILLIC SMALL LETTER SHORT I - 0x00ea: 0x043a, # CYRILLIC SMALL LETTER KA - 0x00eb: 0x043b, # CYRILLIC SMALL LETTER EL - 0x00ec: 0x043c, # CYRILLIC SMALL LETTER EM - 0x00ed: 0x043d, # CYRILLIC SMALL LETTER EN - 0x00ee: 0x043e, # CYRILLIC SMALL LETTER O - 0x00ef: 0x043f, # CYRILLIC SMALL LETTER PE - 0x00f0: 0x0440, # CYRILLIC SMALL LETTER ER - 0x00f1: 0x0441, # CYRILLIC SMALL LETTER ES - 0x00f2: 0x0442, # CYRILLIC SMALL LETTER TE - 0x00f3: 0x0443, # CYRILLIC SMALL LETTER U - 0x00f4: 0x0444, # CYRILLIC SMALL LETTER EF - 0x00f5: 0x0445, # CYRILLIC SMALL LETTER HA - 0x00f6: 0x0446, # CYRILLIC SMALL LETTER TSE - 0x00f7: 0x0447, # CYRILLIC SMALL LETTER CHE - 0x00f8: 0x0448, # CYRILLIC SMALL LETTER SHA - 0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA - 0x00fa: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN - 0x00fb: 0x044b, # CYRILLIC SMALL LETTER YERU - 0x00fc: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN - 0x00fd: 0x044d, # CYRILLIC SMALL LETTER E - 0x00fe: 0x044e, # CYRILLIC SMALL LETTER YU - 0x00ff: 0x044f, # CYRILLIC SMALL LETTER YA -}) +### Decoding Table -### Encoding Map +decoding_table = ( + '\x00' # 0x00 -> NULL + '\x01' # 0x01 -> START OF HEADING + '\x02' # 0x02 -> START OF TEXT + '\x03' # 0x03 -> END OF TEXT + '\x04' # 0x04 -> END OF TRANSMISSION + '\x05' # 0x05 -> ENQUIRY + '\x06' # 0x06 -> ACKNOWLEDGE + '\x07' # 0x07 -> BELL + '\x08' # 0x08 -> BACKSPACE + '\t' # 0x09 -> HORIZONTAL TABULATION + '\n' # 0x0A -> LINE FEED + '\x0b' # 0x0B -> VERTICAL TABULATION + '\x0c' # 0x0C -> FORM FEED + '\r' # 0x0D -> CARRIAGE RETURN + '\x0e' # 0x0E -> SHIFT OUT + '\x0f' # 0x0F -> SHIFT IN + '\x10' # 0x10 -> DATA LINK ESCAPE + '\x11' # 0x11 -> DEVICE CONTROL ONE + '\x12' # 0x12 -> DEVICE CONTROL TWO + '\x13' # 0x13 -> DEVICE CONTROL THREE + '\x14' # 0x14 -> DEVICE CONTROL FOUR + '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE + '\x16' # 0x16 -> SYNCHRONOUS IDLE + '\x17' # 0x17 -> END OF TRANSMISSION BLOCK + '\x18' # 0x18 -> CANCEL + '\x19' # 0x19 -> END OF MEDIUM + '\x1a' # 0x1A -> SUBSTITUTE + '\x1b' # 0x1B -> ESCAPE + '\x1c' # 0x1C -> FILE SEPARATOR + '\x1d' # 0x1D -> GROUP SEPARATOR + '\x1e' # 0x1E -> RECORD SEPARATOR + '\x1f' # 0x1F -> UNIT SEPARATOR + ' ' # 0x20 -> SPACE + '!' # 0x21 -> EXCLAMATION MARK + '"' # 0x22 -> QUOTATION MARK + '#' # 0x23 -> NUMBER SIGN + '$' # 0x24 -> DOLLAR SIGN + '%' # 0x25 -> PERCENT SIGN + '&' # 0x26 -> AMPERSAND + "'" # 0x27 -> APOSTROPHE + '(' # 0x28 -> LEFT PARENTHESIS + ')' # 0x29 -> RIGHT PARENTHESIS + '*' # 0x2A -> ASTERISK + '+' # 0x2B -> PLUS SIGN + ',' # 0x2C -> COMMA + '-' # 0x2D -> HYPHEN-MINUS + '.' # 0x2E -> FULL STOP + '/' # 0x2F -> SOLIDUS + '0' # 0x30 -> DIGIT ZERO + '1' # 0x31 -> DIGIT ONE + '2' # 0x32 -> DIGIT TWO + '3' # 0x33 -> DIGIT THREE + '4' # 0x34 -> DIGIT FOUR + '5' # 0x35 -> DIGIT FIVE + '6' # 0x36 -> DIGIT SIX + '7' # 0x37 -> DIGIT SEVEN + '8' # 0x38 -> DIGIT EIGHT + '9' # 0x39 -> DIGIT NINE + ':' # 0x3A -> COLON + ';' # 0x3B -> SEMICOLON + '<' # 0x3C -> LESS-THAN SIGN + '=' # 0x3D -> EQUALS SIGN + '>' # 0x3E -> GREATER-THAN SIGN + '?' # 0x3F -> QUESTION MARK + '@' # 0x40 -> COMMERCIAL AT + 'A' # 0x41 -> LATIN CAPITAL LETTER A + 'B' # 0x42 -> LATIN CAPITAL LETTER B + 'C' # 0x43 -> LATIN CAPITAL LETTER C + 'D' # 0x44 -> LATIN CAPITAL LETTER D + 'E' # 0x45 -> LATIN CAPITAL LETTER E + 'F' # 0x46 -> LATIN CAPITAL LETTER F + 'G' # 0x47 -> LATIN CAPITAL LETTER G + 'H' # 0x48 -> LATIN CAPITAL LETTER H + 'I' # 0x49 -> LATIN CAPITAL LETTER I + 'J' # 0x4A -> LATIN CAPITAL LETTER J + 'K' # 0x4B -> LATIN CAPITAL LETTER K + 'L' # 0x4C -> LATIN CAPITAL LETTER L + 'M' # 0x4D -> LATIN CAPITAL LETTER M + 'N' # 0x4E -> LATIN CAPITAL LETTER N + 'O' # 0x4F -> LATIN CAPITAL LETTER O + 'P' # 0x50 -> LATIN CAPITAL LETTER P + 'Q' # 0x51 -> LATIN CAPITAL LETTER Q + 'R' # 0x52 -> LATIN CAPITAL LETTER R + 'S' # 0x53 -> LATIN CAPITAL LETTER S + 'T' # 0x54 -> LATIN CAPITAL LETTER T + 'U' # 0x55 -> LATIN CAPITAL LETTER U + 'V' # 0x56 -> LATIN CAPITAL LETTER V + 'W' # 0x57 -> LATIN CAPITAL LETTER W + 'X' # 0x58 -> LATIN CAPITAL LETTER X + 'Y' # 0x59 -> LATIN CAPITAL LETTER Y + 'Z' # 0x5A -> LATIN CAPITAL LETTER Z + '[' # 0x5B -> LEFT SQUARE BRACKET + '\\' # 0x5C -> REVERSE SOLIDUS + ']' # 0x5D -> RIGHT SQUARE BRACKET + '^' # 0x5E -> CIRCUMFLEX ACCENT + '_' # 0x5F -> LOW LINE + '`' # 0x60 -> GRAVE ACCENT + 'a' # 0x61 -> LATIN SMALL LETTER A + 'b' # 0x62 -> LATIN SMALL LETTER B + 'c' # 0x63 -> LATIN SMALL LETTER C + 'd' # 0x64 -> LATIN SMALL LETTER D + 'e' # 0x65 -> LATIN SMALL LETTER E + 'f' # 0x66 -> LATIN SMALL LETTER F + 'g' # 0x67 -> LATIN SMALL LETTER G + 'h' # 0x68 -> LATIN SMALL LETTER H + 'i' # 0x69 -> LATIN SMALL LETTER I + 'j' # 0x6A -> LATIN SMALL LETTER J + 'k' # 0x6B -> LATIN SMALL LETTER K + 'l' # 0x6C -> LATIN SMALL LETTER L + 'm' # 0x6D -> LATIN SMALL LETTER M + 'n' # 0x6E -> LATIN SMALL LETTER N + 'o' # 0x6F -> LATIN SMALL LETTER O + 'p' # 0x70 -> LATIN SMALL LETTER P + 'q' # 0x71 -> LATIN SMALL LETTER Q + 'r' # 0x72 -> LATIN SMALL LETTER R + 's' # 0x73 -> LATIN SMALL LETTER S + 't' # 0x74 -> LATIN SMALL LETTER T + 'u' # 0x75 -> LATIN SMALL LETTER U + 'v' # 0x76 -> LATIN SMALL LETTER V + 'w' # 0x77 -> LATIN SMALL LETTER W + 'x' # 0x78 -> LATIN SMALL LETTER X + 'y' # 0x79 -> LATIN SMALL LETTER Y + 'z' # 0x7A -> LATIN SMALL LETTER Z + '{' # 0x7B -> LEFT CURLY BRACKET + '|' # 0x7C -> VERTICAL LINE + '}' # 0x7D -> RIGHT CURLY BRACKET + '~' # 0x7E -> TILDE + '\x7f' # 0x7F -> DELETE (DEL) + '\u0496' # 0x80 -> CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER + '\u0492' # 0x81 -> CYRILLIC CAPITAL LETTER GHE WITH STROKE + '\u04ee' # 0x82 -> CYRILLIC CAPITAL LETTER U WITH MACRON + '\u0493' # 0x83 -> CYRILLIC SMALL LETTER GHE WITH STROKE + '\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK + '\u2026' # 0x85 -> HORIZONTAL ELLIPSIS + '\u04b6' # 0x86 -> CYRILLIC CAPITAL LETTER CHE WITH DESCENDER + '\u04ae' # 0x87 -> CYRILLIC CAPITAL LETTER STRAIGHT U + '\u04b2' # 0x88 -> CYRILLIC CAPITAL LETTER HA WITH DESCENDER + '\u04af' # 0x89 -> CYRILLIC SMALL LETTER STRAIGHT U + '\u04a0' # 0x8A -> CYRILLIC CAPITAL LETTER BASHKIR KA + '\u04e2' # 0x8B -> CYRILLIC CAPITAL LETTER I WITH MACRON + '\u04a2' # 0x8C -> CYRILLIC CAPITAL LETTER EN WITH DESCENDER + '\u049a' # 0x8D -> CYRILLIC CAPITAL LETTER KA WITH DESCENDER + '\u04ba' # 0x8E -> CYRILLIC CAPITAL LETTER SHHA + '\u04b8' # 0x8F -> CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE + '\u0497' # 0x90 -> CYRILLIC SMALL LETTER ZHE WITH DESCENDER + '\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK + '\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK + '\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK + '\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK + '\u2022' # 0x95 -> BULLET + '\u2013' # 0x96 -> EN DASH + '\u2014' # 0x97 -> EM DASH + '\u04b3' # 0x98 -> CYRILLIC SMALL LETTER HA WITH DESCENDER + '\u04b7' # 0x99 -> CYRILLIC SMALL LETTER CHE WITH DESCENDER + '\u04a1' # 0x9A -> CYRILLIC SMALL LETTER BASHKIR KA + '\u04e3' # 0x9B -> CYRILLIC SMALL LETTER I WITH MACRON + '\u04a3' # 0x9C -> CYRILLIC SMALL LETTER EN WITH DESCENDER + '\u049b' # 0x9D -> CYRILLIC SMALL LETTER KA WITH DESCENDER + '\u04bb' # 0x9E -> CYRILLIC SMALL LETTER SHHA + '\u04b9' # 0x9F -> CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE + '\xa0' # 0xA0 -> NO-BREAK SPACE + '\u040e' # 0xA1 -> CYRILLIC CAPITAL LETTER SHORT U (Byelorussian) + '\u045e' # 0xA2 -> CYRILLIC SMALL LETTER SHORT U (Byelorussian) + '\u0408' # 0xA3 -> CYRILLIC CAPITAL LETTER JE + '\u04e8' # 0xA4 -> CYRILLIC CAPITAL LETTER BARRED O + '\u0498' # 0xA5 -> CYRILLIC CAPITAL LETTER ZE WITH DESCENDER + '\u04b0' # 0xA6 -> CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE + '\xa7' # 0xA7 -> SECTION SIGN + '\u0401' # 0xA8 -> CYRILLIC CAPITAL LETTER IO + '\xa9' # 0xA9 -> COPYRIGHT SIGN + '\u04d8' # 0xAA -> CYRILLIC CAPITAL LETTER SCHWA + '\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK + '\xac' # 0xAC -> NOT SIGN + '\u04ef' # 0xAD -> CYRILLIC SMALL LETTER U WITH MACRON + '\xae' # 0xAE -> REGISTERED SIGN + '\u049c' # 0xAF -> CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE + '\xb0' # 0xB0 -> DEGREE SIGN + '\u04b1' # 0xB1 -> CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE + '\u0406' # 0xB2 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I + '\u0456' # 0xB3 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I + '\u0499' # 0xB4 -> CYRILLIC SMALL LETTER ZE WITH DESCENDER + '\u04e9' # 0xB5 -> CYRILLIC SMALL LETTER BARRED O + '\xb6' # 0xB6 -> PILCROW SIGN + '\xb7' # 0xB7 -> MIDDLE DOT + '\u0451' # 0xB8 -> CYRILLIC SMALL LETTER IO + '\u2116' # 0xB9 -> NUMERO SIGN + '\u04d9' # 0xBA -> CYRILLIC SMALL LETTER SCHWA + '\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK + '\u0458' # 0xBC -> CYRILLIC SMALL LETTER JE + '\u04aa' # 0xBD -> CYRILLIC CAPITAL LETTER ES WITH DESCENDER + '\u04ab' # 0xBE -> CYRILLIC SMALL LETTER ES WITH DESCENDER + '\u049d' # 0xBF -> CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE + '\u0410' # 0xC0 -> CYRILLIC CAPITAL LETTER A + '\u0411' # 0xC1 -> CYRILLIC CAPITAL LETTER BE + '\u0412' # 0xC2 -> CYRILLIC CAPITAL LETTER VE + '\u0413' # 0xC3 -> CYRILLIC CAPITAL LETTER GHE + '\u0414' # 0xC4 -> CYRILLIC CAPITAL LETTER DE + '\u0415' # 0xC5 -> CYRILLIC CAPITAL LETTER IE + '\u0416' # 0xC6 -> CYRILLIC CAPITAL LETTER ZHE + '\u0417' # 0xC7 -> CYRILLIC CAPITAL LETTER ZE + '\u0418' # 0xC8 -> CYRILLIC CAPITAL LETTER I + '\u0419' # 0xC9 -> CYRILLIC CAPITAL LETTER SHORT I + '\u041a' # 0xCA -> CYRILLIC CAPITAL LETTER KA + '\u041b' # 0xCB -> CYRILLIC CAPITAL LETTER EL + '\u041c' # 0xCC -> CYRILLIC CAPITAL LETTER EM + '\u041d' # 0xCD -> CYRILLIC CAPITAL LETTER EN + '\u041e' # 0xCE -> CYRILLIC CAPITAL LETTER O + '\u041f' # 0xCF -> CYRILLIC CAPITAL LETTER PE + '\u0420' # 0xD0 -> CYRILLIC CAPITAL LETTER ER + '\u0421' # 0xD1 -> CYRILLIC CAPITAL LETTER ES + '\u0422' # 0xD2 -> CYRILLIC CAPITAL LETTER TE + '\u0423' # 0xD3 -> CYRILLIC CAPITAL LETTER U + '\u0424' # 0xD4 -> CYRILLIC CAPITAL LETTER EF + '\u0425' # 0xD5 -> CYRILLIC CAPITAL LETTER HA + '\u0426' # 0xD6 -> CYRILLIC CAPITAL LETTER TSE + '\u0427' # 0xD7 -> CYRILLIC CAPITAL LETTER CHE + '\u0428' # 0xD8 -> CYRILLIC CAPITAL LETTER SHA + '\u0429' # 0xD9 -> CYRILLIC CAPITAL LETTER SHCHA + '\u042a' # 0xDA -> CYRILLIC CAPITAL LETTER HARD SIGN + '\u042b' # 0xDB -> CYRILLIC CAPITAL LETTER YERU + '\u042c' # 0xDC -> CYRILLIC CAPITAL LETTER SOFT SIGN + '\u042d' # 0xDD -> CYRILLIC CAPITAL LETTER E + '\u042e' # 0xDE -> CYRILLIC CAPITAL LETTER YU + '\u042f' # 0xDF -> CYRILLIC CAPITAL LETTER YA + '\u0430' # 0xE0 -> CYRILLIC SMALL LETTER A + '\u0431' # 0xE1 -> CYRILLIC SMALL LETTER BE + '\u0432' # 0xE2 -> CYRILLIC SMALL LETTER VE + '\u0433' # 0xE3 -> CYRILLIC SMALL LETTER GHE + '\u0434' # 0xE4 -> CYRILLIC SMALL LETTER DE + '\u0435' # 0xE5 -> CYRILLIC SMALL LETTER IE + '\u0436' # 0xE6 -> CYRILLIC SMALL LETTER ZHE + '\u0437' # 0xE7 -> CYRILLIC SMALL LETTER ZE + '\u0438' # 0xE8 -> CYRILLIC SMALL LETTER I + '\u0439' # 0xE9 -> CYRILLIC SMALL LETTER SHORT I + '\u043a' # 0xEA -> CYRILLIC SMALL LETTER KA + '\u043b' # 0xEB -> CYRILLIC SMALL LETTER EL + '\u043c' # 0xEC -> CYRILLIC SMALL LETTER EM + '\u043d' # 0xED -> CYRILLIC SMALL LETTER EN + '\u043e' # 0xEE -> CYRILLIC SMALL LETTER O + '\u043f' # 0xEF -> CYRILLIC SMALL LETTER PE + '\u0440' # 0xF0 -> CYRILLIC SMALL LETTER ER + '\u0441' # 0xF1 -> CYRILLIC SMALL LETTER ES + '\u0442' # 0xF2 -> CYRILLIC SMALL LETTER TE + '\u0443' # 0xF3 -> CYRILLIC SMALL LETTER U + '\u0444' # 0xF4 -> CYRILLIC SMALL LETTER EF + '\u0445' # 0xF5 -> CYRILLIC SMALL LETTER HA + '\u0446' # 0xF6 -> CYRILLIC SMALL LETTER TSE + '\u0447' # 0xF7 -> CYRILLIC SMALL LETTER CHE + '\u0448' # 0xF8 -> CYRILLIC SMALL LETTER SHA + '\u0449' # 0xF9 -> CYRILLIC SMALL LETTER SHCHA + '\u044a' # 0xFA -> CYRILLIC SMALL LETTER HARD SIGN + '\u044b' # 0xFB -> CYRILLIC SMALL LETTER YERU + '\u044c' # 0xFC -> CYRILLIC SMALL LETTER SOFT SIGN + '\u044d' # 0xFD -> CYRILLIC SMALL LETTER E + '\u044e' # 0xFE -> CYRILLIC SMALL LETTER YU + '\u044f' # 0xFF -> CYRILLIC SMALL LETTER YA +) -encoding_map = codecs.make_encoding_map(decoding_map) +### Encoding table +encoding_table=codecs.charmap_build(decoding_table) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #14874: Restore charmap decoding speed to pre-PEP 393 levels. + Patch by Serhiy Storchaka. + - Issue #15026: utf-16 encoding is now significantly faster (up to 10x). Patch by Serhiy Storchaka. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -7435,24 +7435,53 @@ e = s + size; if (PyUnicode_CheckExact(mapping)) { Py_ssize_t maplen; - enum PyUnicode_Kind kind; - void *data; + enum PyUnicode_Kind mapkind; + void *mapdata; Py_UCS4 x; if (PyUnicode_READY(mapping) == -1) return NULL; maplen = PyUnicode_GET_LENGTH(mapping); - data = PyUnicode_DATA(mapping); - kind = PyUnicode_KIND(mapping); + mapdata = PyUnicode_DATA(mapping); + mapkind = PyUnicode_KIND(mapping); while (s < e) { - unsigned char ch = *s; + unsigned char ch; + if (mapkind == PyUnicode_2BYTE_KIND && maplen >= 256) { + enum PyUnicode_Kind outkind = PyUnicode_KIND(v); + if (outkind == PyUnicode_1BYTE_KIND) { + void *outdata = PyUnicode_DATA(v); + Py_UCS4 maxchar = PyUnicode_MAX_CHAR_VALUE(v); + while (s < e) { + unsigned char ch = *s; + x = PyUnicode_READ(PyUnicode_2BYTE_KIND, mapdata, ch); + if (x > maxchar) + goto Error; + PyUnicode_WRITE(PyUnicode_1BYTE_KIND, outdata, outpos++, x); + ++s; + } + break; + } + else if (outkind == PyUnicode_2BYTE_KIND) { + void *outdata = PyUnicode_DATA(v); + while (s < e) { + unsigned char ch = *s; + x = PyUnicode_READ(PyUnicode_2BYTE_KIND, mapdata, ch); + if (x == 0xFFFE) + goto Error; + PyUnicode_WRITE(PyUnicode_2BYTE_KIND, outdata, outpos++, x); + ++s; + } + break; + } + } + ch = *s; if (ch < maplen) - x = PyUnicode_READ(kind, data, ch); + x = PyUnicode_READ(mapkind, mapdata, ch); else x = 0xfffe; /* invalid value */ - +Error: if (x == 0xfffe) { /* undefined mapping */ @@ -7667,14 +7696,17 @@ int count2 = 0, count3 = 0; int kind; void *data; + Py_ssize_t length; Py_UCS4 ch; - if (!PyUnicode_Check(string) || PyUnicode_GET_LENGTH(string) != 256) { + if (!PyUnicode_Check(string) || !PyUnicode_GET_LENGTH(string)) { PyErr_BadArgument(); return NULL; } kind = PyUnicode_KIND(string); data = PyUnicode_DATA(string); + length = PyUnicode_GET_LENGTH(string); + length = Py_MIN(length, 256); memset(level1, 0xFF, sizeof level1); memset(level2, 0xFF, sizeof level2); @@ -7683,7 +7715,7 @@ a mapping dictionary. */ if (PyUnicode_READ(kind, data, 0) != 0) need_dict = 1; - for (i = 1; i < 256; i++) { + for (i = 1; i < length; i++) { int l1, l2; ch = PyUnicode_READ(kind, data, i); if (ch == 0 || ch > 0xFFFF) { @@ -7709,7 +7741,7 @@ PyObject *key, *value; if (!result) return NULL; - for (i = 0; i < 256; i++) { + for (i = 0; i < length; i++) { key = PyLong_FromLong(PyUnicode_READ(kind, data, i)); value = PyLong_FromLong(i); if (!key || !value) @@ -7743,17 +7775,18 @@ memset(mlevel2, 0xFF, 16*count2); memset(mlevel3, 0, 128*count3); count3 = 0; - for (i = 1; i < 256; i++) { + for (i = 1; i < length; i++) { int o1, o2, o3, i2, i3; - if (PyUnicode_READ(kind, data, i) == 0xFFFE) + Py_UCS4 ch = PyUnicode_READ(kind, data, i); + if (ch == 0xFFFE) /* unmapped character */ continue; - o1 = PyUnicode_READ(kind, data, i)>>11; - o2 = (PyUnicode_READ(kind, data, i)>>7) & 0xF; + o1 = ch>>11; + o2 = (ch>>7) & 0xF; i2 = 16*mlevel1[o1] + o2; if (mlevel2[i2] == 0xFF) mlevel2[i2] = count3++; - o3 = PyUnicode_READ(kind, data, i) & 0x7F; + o3 = ch & 0x7F; i3 = 128*mlevel2[i2] + o3; mlevel3[i3] = i; } diff --git a/Tools/unicode/gencodec.py b/Tools/unicode/gencodec.py --- a/Tools/unicode/gencodec.py +++ b/Tools/unicode/gencodec.py @@ -102,7 +102,7 @@ comment = '' else: comment = comment[1:].strip() - if enc < 256: + if not isinstance(enc, tuple) and enc < 256: if enc in unmapped: unmapped.remove(enc) if enc == uni: @@ -202,11 +202,10 @@ # Analyze map and create table dict mappings = sorted(map.items()) table = {} - maxkey = 0 + maxkey = 255 if 'IDENTITY' in map: for key in range(256): table[key] = (key, '') - maxkey = 255 del map['IDENTITY'] for mapkey, mapvalue in mappings: mapcomment = '' @@ -224,6 +223,7 @@ return None # Create table code + maxchar = 0 for key in range(maxkey + 1): if key not in table: mapvalue = MISSING_CODE @@ -238,6 +238,7 @@ return None else: mapchar = chr(mapvalue) + maxchar = max(maxchar, ord(mapchar)) if mapcomment and comments: append(' %a \t# %s -> %s' % (mapchar, hexrepr(key, key_precision), @@ -245,6 +246,8 @@ else: append(' %a' % mapchar) + if maxchar < 256: + append(' %a \t## Widen to UCS2 for optimization' % UNI_UNDEFINED) append(')') return l -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 22:54:06 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 16 Jun 2012 22:54:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Normalize_whitespace?= Message-ID: http://hg.python.org/cpython/rev/7cb974fe5e43 changeset: 77472:7cb974fe5e43 user: Antoine Pitrou date: Sat Jun 16 22:50:54 2012 +0200 summary: Normalize whitespace files: Lib/encodings/hp_roman8.py | 1 - Lib/encodings/palmos.py | 1 - 2 files changed, 0 insertions(+), 2 deletions(-) diff --git a/Lib/encodings/hp_roman8.py b/Lib/encodings/hp_roman8.py --- a/Lib/encodings/hp_roman8.py +++ b/Lib/encodings/hp_roman8.py @@ -310,4 +310,3 @@ ### Encoding table encoding_table=codecs.charmap_build(decoding_table) - diff --git a/Lib/encodings/palmos.py b/Lib/encodings/palmos.py --- a/Lib/encodings/palmos.py +++ b/Lib/encodings/palmos.py @@ -306,4 +306,3 @@ ### Encoding table encoding_table=codecs.charmap_build(decoding_table) - -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 16 23:21:55 2012 From: python-checkins at python.org (georg.brandl) Date: Sat, 16 Jun 2012 23:21:55 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_PEP_update_for_3=2E3=2E?= Message-ID: http://hg.python.org/peps/rev/634c46edc47a changeset: 4462:634c46edc47a user: Georg Brandl date: Sat Jun 16 23:22:21 2012 +0200 summary: PEP update for 3.3. files: pep-0398.txt | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pep-0398.txt b/pep-0398.txt --- a/pep-0398.txt +++ b/pep-0398.txt @@ -70,6 +70,7 @@ * PEP 417: Including mock in the Standard Library * PEP 418: Add monotonic time, performance counter, and process time functions * PEP 420: Implicit Namespace Packages +* PEP 421: Adding sys.implementation * PEP 3118: Revising the buffer protocol (protocol semantics finalised) * PEP 3144: IP Address manipulation library * PEP 3151: Reworking the OS and IO exception hierarchy @@ -87,8 +88,6 @@ * PEP 362: Function Signature Object * PEP 397: Python launcher for Windows -* PEP 421: Adding sys.implementation -* PEP 3143: Standard daemon process library * PEP 3154: Pickle protocol version 4 (Note that these are not accepted yet and even if they are, they might @@ -105,6 +104,7 @@ Deferred to post-3.3: * PEP 395: Qualified Names for Modules +* PEP 3143: Standard daemon process library * Breaking out standard library and docs in separate repos Copyright -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Jun 17 00:30:51 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 17 Jun 2012 00:30:51 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Try_to_fix_issue_=2315086?= =?utf8?q?=3A_build_failure_on_Ubuntu_shared_buildbot=2E?= Message-ID: http://hg.python.org/cpython/rev/3680b3423aa3 changeset: 77473:3680b3423aa3 user: Antoine Pitrou date: Sun Jun 17 00:27:30 2012 +0200 summary: Try to fix issue #15086: build failure on Ubuntu shared buildbot. files: Makefile.pre.in | 4 ++-- Python/importlib.h | Bin 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -442,7 +442,7 @@ $(MAKE) all CFLAGS="$(CFLAGS) -fprofile-generate" LIBS="$(LIBS) -lgcov" run_profile_task: - ./$(BUILDPYTHON) $(PROFILE_TASK) + $(RUNSHARED) ./$(BUILDPYTHON) $(PROFILE_TASK) build_all_use_profile: $(MAKE) all CFLAGS="$(CFLAGS) -fprofile-use" @@ -580,7 +580,7 @@ Python/importlib.h: $(srcdir)/Lib/importlib/_bootstrap.py $(srcdir)/Python/freeze_importlib.py @if test -f ./$(BUILDPYTHON); then \ - ./$(BUILDPYTHON) $(srcdir)/Python/freeze_importlib.py \ + $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Python/freeze_importlib.py \ $(srcdir)/Lib/importlib/_bootstrap.py Python/importlib.h; \ else \ echo "----------------------------------------------------------"; \ diff --git a/Python/importlib.h b/Python/importlib.h index 15d0ba4bbecf831fd80c35507509ed2721568f19..c9431e49e929cf16e5376c1c6b9e4ba09e891bda GIT binary patch [stripped] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 04:50:57 2012 From: python-checkins at python.org (meador.inge) Date: Sun, 17 Jun 2012 04:50:57 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1MDU0?= =?utf8?q?=3A_Fix_incorrect_tokenization_of_=27b=27_and_=27br=27_string_li?= =?utf8?q?terals=2E?= Message-ID: http://hg.python.org/cpython/rev/35d3a8ed7997 changeset: 77474:35d3a8ed7997 branch: 2.7 parent: 77443:0add70dd3c43 user: Meador Inge date: Sat Jun 16 21:05:50 2012 -0500 summary: Issue #15054: Fix incorrect tokenization of 'b' and 'br' string literals. Patch by Serhiy Storchaka. files: Lib/test/test_tokenize.py | 25 +++++++++++++++++++++++++ Lib/tokenize.py | 10 +++++----- Misc/NEWS | 4 ++++ 3 files changed, 34 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -278,6 +278,31 @@ OP '+' (1, 32) (1, 33) STRING 'UR"ABC"' (1, 34) (1, 41) + >>> dump_tokens("b'abc' + B'abc'") + STRING "b'abc'" (1, 0) (1, 6) + OP '+' (1, 7) (1, 8) + STRING "B'abc'" (1, 9) (1, 15) + >>> dump_tokens('b"abc" + B"abc"') + STRING 'b"abc"' (1, 0) (1, 6) + OP '+' (1, 7) (1, 8) + STRING 'B"abc"' (1, 9) (1, 15) + >>> dump_tokens("br'abc' + bR'abc' + Br'abc' + BR'abc'") + STRING "br'abc'" (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING "bR'abc'" (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING "Br'abc'" (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING "BR'abc'" (1, 30) (1, 37) + >>> dump_tokens('br"abc" + bR"abc" + Br"abc" + BR"abc"') + STRING 'br"abc"' (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING 'bR"abc"' (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING 'Br"abc"' (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING 'BR"abc"' (1, 30) (1, 37) + Operators >>> dump_tokens("def d22(a, b, c=2, d=2, *k): pass") diff --git a/Lib/tokenize.py b/Lib/tokenize.py --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -70,10 +70,10 @@ Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" # Tail end of """ string. Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' -Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""') +Triple = group("[uUbB]?[rR]?'''", '[uUbB]?[rR]?"""') # Single-line ' or " string. -String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", - r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') +String = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", + r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') # Because of leftmost-then-longest match semantics, be sure to put the # longest operators first (e.g., if = came before ==, == would get @@ -91,9 +91,9 @@ Token = Ignore + PlainToken # First (or only) line of ' or " string. -ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + +ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + group("'", r'\\\r?\n'), - r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + + r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + group('"', r'\\\r?\n')) PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -67,6 +67,10 @@ Library ------- +- Issue #15054: A bug in tokenize.tokenize that caused string literals + with 'b' and 'br' prefixes to be incorrectly tokenized has been fixed. + Patch by Serhiy Storchaka. + - Issue #15036: Allow removing or changing multiple items in single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox between the changes. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 04:50:58 2012 From: python-checkins at python.org (meador.inge) Date: Sun, 17 Jun 2012 04:50:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315054=3A_Fix_incor?= =?utf8?q?rect_tokenization_of_=27b=27_string_literals=2E?= Message-ID: http://hg.python.org/cpython/rev/115b0cb52c6c changeset: 77475:115b0cb52c6c parent: 77473:3680b3423aa3 user: Meador Inge date: Sat Jun 16 21:49:08 2012 -0500 summary: Issue #15054: Fix incorrect tokenization of 'b' string literals. Patch by Serhiy Storchaka. files: Lib/test/test_tokenize.py | 76 +++++++++++++++++++++++++++ Lib/tokenize.py | 2 +- Misc/NEWS | 4 + 3 files changed, 81 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -289,6 +289,82 @@ OP '+' (1, 29) (1, 30) STRING 'R"ABC"' (1, 31) (1, 37) + >>> dump_tokens("u'abc' + U'abc'") + ENCODING 'utf-8' (0, 0) (0, 0) + STRING "u'abc'" (1, 0) (1, 6) + OP '+' (1, 7) (1, 8) + STRING "U'abc'" (1, 9) (1, 15) + >>> dump_tokens('u"abc" + U"abc"') + ENCODING 'utf-8' (0, 0) (0, 0) + STRING 'u"abc"' (1, 0) (1, 6) + OP '+' (1, 7) (1, 8) + STRING 'U"abc"' (1, 9) (1, 15) + >>> dump_tokens("ur'abc' + uR'abc' + Ur'abc' + UR'abc'") + ENCODING 'utf-8' (0, 0) (0, 0) + STRING "ur'abc'" (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING "uR'abc'" (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING "Ur'abc'" (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING "UR'abc'" (1, 30) (1, 37) + >>> dump_tokens('ur"abc" + uR"abc" + Ur"abc" + UR"abc"') + ENCODING 'utf-8' (0, 0) (0, 0) + STRING 'ur"abc"' (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING 'uR"abc"' (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING 'Ur"abc"' (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING 'UR"abc"' (1, 30) (1, 37) + + >>> dump_tokens("b'abc' + B'abc'") + ENCODING 'utf-8' (0, 0) (0, 0) + STRING "b'abc'" (1, 0) (1, 6) + OP '+' (1, 7) (1, 8) + STRING "B'abc'" (1, 9) (1, 15) + >>> dump_tokens('b"abc" + B"abc"') + ENCODING 'utf-8' (0, 0) (0, 0) + STRING 'b"abc"' (1, 0) (1, 6) + OP '+' (1, 7) (1, 8) + STRING 'B"abc"' (1, 9) (1, 15) + >>> dump_tokens("br'abc' + bR'abc' + Br'abc' + BR'abc'") + ENCODING 'utf-8' (0, 0) (0, 0) + STRING "br'abc'" (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING "bR'abc'" (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING "Br'abc'" (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING "BR'abc'" (1, 30) (1, 37) + >>> dump_tokens('br"abc" + bR"abc" + Br"abc" + BR"abc"') + ENCODING 'utf-8' (0, 0) (0, 0) + STRING 'br"abc"' (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING 'bR"abc"' (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING 'Br"abc"' (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING 'BR"abc"' (1, 30) (1, 37) + >>> dump_tokens("rb'abc' + rB'abc' + Rb'abc' + RB'abc'") + ENCODING 'utf-8' (0, 0) (0, 0) + STRING "rb'abc'" (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING "rB'abc'" (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING "Rb'abc'" (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING "RB'abc'" (1, 30) (1, 37) + >>> dump_tokens('rb"abc" + rB"abc" + Rb"abc" + RB"abc"') + ENCODING 'utf-8' (0, 0) (0, 0) + STRING 'rb"abc"' (1, 0) (1, 7) + OP '+' (1, 8) (1, 9) + STRING 'rB"abc"' (1, 10) (1, 17) + OP '+' (1, 18) (1, 19) + STRING 'Rb"abc"' (1, 20) (1, 27) + OP '+' (1, 28) (1, 29) + STRING 'RB"abc"' (1, 30) (1, 37) + Operators >>> dump_tokens("def d22(a, b, c=2, d=2, *k): pass") diff --git a/Lib/tokenize.py b/Lib/tokenize.py --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -127,7 +127,7 @@ Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]') Number = group(Imagnumber, Floatnumber, Intnumber) -StringPrefix = r'(?:[uU][rR]?|[bB][rR]|[rR][bB]|[rR]|[uU])?' +StringPrefix = r'(?:[uUbB][rR]?|[rR][bB]?)?' # Tail end of ' string. Single = r"[^'\\]*(?:\\.[^'\\]*)*'" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -27,6 +27,10 @@ Library ------- +- Issue #15054: A bug in tokenize.tokenize that caused string literals + with 'b' prefixes to be incorrectly tokenized has been fixed. + Patch by Serhiy Storchaka. + - Issue #15006: Allow equality comparison between naive and aware time or datetime objects. -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Jun 17 05:48:41 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 17 Jun 2012 05:48:41 +0200 Subject: [Python-checkins] Daily reference leaks (3680b3423aa3): sum=2 Message-ID: results for 3680b3423aa3 on branch "default" -------------------------------------------- test_dbm leaked [2, 0, 0] references, sum=2 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogxfzqvN', '-x'] From python-checkins at python.org Sun Jun 17 07:16:03 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 07:16:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2313783=3A_the_PEP_3?= =?utf8?q?80_implementation_no_longer_expands_the_public_C_API?= Message-ID: http://hg.python.org/cpython/rev/cfbf6aa5c9e3 changeset: 77476:cfbf6aa5c9e3 user: Nick Coghlan date: Sun Jun 17 15:15:49 2012 +1000 summary: Issue #13783: the PEP 380 implementation no longer expands the public C API files: Include/genobject.h | 2 +- Include/pyerrors.h | 3 --- Misc/NEWS | 6 ++++++ Objects/exceptions.c | 6 ------ Objects/genobject.c | 7 ++++--- Python/ceval.c | 2 +- 6 files changed, 12 insertions(+), 14 deletions(-) diff --git a/Include/genobject.h b/Include/genobject.h --- a/Include/genobject.h +++ b/Include/genobject.h @@ -34,7 +34,7 @@ PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *); PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *); -PyAPI_FUNC(int) PyGen_FetchStopIterationValue(PyObject **); +PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **); PyObject *_PyGen_Send(PyGenObject *, PyObject *); #ifdef __cplusplus diff --git a/Include/pyerrors.h b/Include/pyerrors.h --- a/Include/pyerrors.h +++ b/Include/pyerrors.h @@ -400,9 +400,6 @@ const char *reason /* UTF-8 encoded string */ ); -/* create a StopIteration exception with the given value */ -PyAPI_FUNC(PyObject *) PyStopIteration_Create(PyObject *); - /* These APIs aren't really part of the error implementation, but often needed to format error messages; the native C lib APIs are not available on all platforms, which is why we provide emulations diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -111,6 +111,12 @@ - Issue #14963: Convert contextlib.ExitStack.__exit__ to use an iterative algorithm (Patch by Alon Horev) +C-API +----- + +- Issue #13783: Inadvertent additions to the public C API in the PEP 380 + implementation have either been removed or marked as private interfaces. + Extension Modules ----------------- diff --git a/Objects/exceptions.c b/Objects/exceptions.c --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -516,12 +516,6 @@ return BaseException_traverse((PyBaseExceptionObject *)self, visit, arg); } -PyObject * -PyStopIteration_Create(PyObject *value) -{ - return PyObject_CallFunctionObjArgs(PyExc_StopIteration, value, NULL); -} - ComplexExtendsException( PyExc_Exception, /* base */ StopIteration, /* name */ diff --git a/Objects/genobject.c b/Objects/genobject.c --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -97,7 +97,8 @@ /* Delay exception instantiation if we can */ PyErr_SetNone(PyExc_StopIteration); } else { - PyObject *e = PyStopIteration_Create(result); + PyObject *e = PyObject_CallFunctionObjArgs( + PyExc_StopIteration, result, NULL); if (e != NULL) { PyErr_SetObject(PyExc_StopIteration, e); Py_DECREF(e); @@ -339,7 +340,7 @@ Py_DECREF(ret); /* Termination repetition of YIELD_FROM */ gen->gi_frame->f_lasti++; - if (PyGen_FetchStopIterationValue(&val) == 0) { + if (_PyGen_FetchStopIterationValue(&val) == 0) { ret = gen_send_ex(gen, val, 0); Py_DECREF(val); } else { @@ -428,7 +429,7 @@ */ int -PyGen_FetchStopIterationValue(PyObject **pvalue) { +_PyGen_FetchStopIterationValue(PyObject **pvalue) { PyObject *et, *ev, *tb; PyObject *value = NULL; diff --git a/Python/ceval.c b/Python/ceval.c --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1852,7 +1852,7 @@ PyObject *val; x = POP(); /* Remove iter from stack */ Py_DECREF(x); - err = PyGen_FetchStopIterationValue(&val); + err = _PyGen_FetchStopIterationValue(&val); if (err < 0) { x = NULL; break; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 07:45:31 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 07:45:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2313783=3A_PEP_380_c?= =?utf8?q?leanup_part_2=2C_using_the_new_identifier_APIs_in_the?= Message-ID: http://hg.python.org/cpython/rev/438b861e2edb changeset: 77477:438b861e2edb user: Nick Coghlan date: Sun Jun 17 15:45:11 2012 +1000 summary: Issue #13783: PEP 380 cleanup part 2, using the new identifier APIs in the generator implementation files: Misc/NEWS | 2 ++ Objects/genobject.c | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #13783: Generator objects now use the identifier APIs internally + - Issue #14874: Restore charmap decoding speed to pre-PEP 393 levels. Patch by Serhiy Storchaka. diff --git a/Objects/genobject.c b/Objects/genobject.c --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -149,13 +149,14 @@ gen_close_iter(PyObject *yf) { PyObject *retval = NULL; + _Py_IDENTIFIER(close); if (PyGen_CheckExact(yf)) { retval = gen_close((PyGenObject *)yf, NULL); if (retval == NULL) return -1; } else { - PyObject *meth = PyObject_GetAttrString(yf, "close"); + PyObject *meth = _PyObject_GetAttrId(yf, &PyId_close); if (meth == NULL) { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) PyErr_WriteUnraisable(yf); @@ -295,6 +296,7 @@ PyObject *tb = NULL; PyObject *val = NULL; PyObject *yf = gen_yf(gen); + _Py_IDENTIFIER(throw); if (!PyArg_UnpackTuple(args, "throw", 1, 3, &typ, &val, &tb)) return NULL; @@ -316,7 +318,7 @@ ret = gen_throw((PyGenObject *)yf, args); gen->gi_running = 0; } else { - PyObject *meth = PyObject_GetAttrString(yf, "throw"); + PyObject *meth = _PyObject_GetAttrId(yf, &PyId_throw); if (meth == NULL) { if (!PyErr_ExceptionMatches(PyExc_AttributeError)) { Py_DECREF(yf); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 08:33:14 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 08:33:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Improve_an_internal_ipaddre?= =?utf8?q?ss_test=2C_add_a_comment_explaining_why_treating?= Message-ID: http://hg.python.org/cpython/rev/739f5c725958 changeset: 77478:739f5c725958 user: Nick Coghlan date: Sun Jun 17 16:33:00 2012 +1000 summary: Improve an internal ipaddress test, add a comment explaining why treating networks as containers of interfaces rather than addresses would prove confusing files: Lib/ipaddress.py | 21 +++++++++---------- Lib/test/test_ipaddress.py | 27 ++++++++++++++++++++----- 2 files changed, 31 insertions(+), 17 deletions(-) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -416,6 +416,11 @@ """Return the shorthand version of the IP address as a string.""" return str(self) + @property + def version(self): + msg = '%200s has no version specified' % (type(self),) + raise NotImplementedError(msg) + def _ip_int_from_prefix(self, prefixlen=None): """Turn the prefix length netmask into a int for comparison. @@ -555,10 +560,6 @@ def _get_address_key(self): return (self._version, self) - @property - def version(self): - raise NotImplementedError('BaseIP has no version') - class _BaseNetwork(_IPAddressBase): @@ -727,12 +728,12 @@ return int(self.broadcast_address) - int(self.network_address) + 1 @property - def version(self): - raise NotImplementedError('BaseNet has no version') - - @property def _address_class(self): - raise NotImplementedError('BaseNet has no associated address class') + # Returning bare address objects (rather than interfaces) allows for + # more consistent behaviour across the network address, broadcast + # address and individual host addresses. + msg = '%200s has no associated address class' % (type(self),) + raise NotImplementedError(msg) @property def prefixlen(self): @@ -1333,7 +1334,6 @@ """ # Class to use when creating address objects - # TODO (ncoghlan): Investigate using IPv4Interface instead _address_class = IPv4Address def __init__(self, address, strict=True): @@ -1945,7 +1945,6 @@ """ # Class to use when creating address objects - # TODO (ncoghlan): Investigate using IPv6Interface instead _address_class = IPv6Address def __init__(self, address, strict=True): diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -150,16 +150,31 @@ self.assertEqual(first, last) self.assertEqual(0, ipaddress._get_prefix_length(2**32, 0, 32)) self.assertEqual(128, ipaddress._count_righthand_zero_bits(0, 128)) - base_ip = ipaddress._BaseAddress('127.0.0.1') - try: - base_ip.version - self.fail('_BaseAddress.version didn\'t raise NotImplementedError') - except NotImplementedError: - pass self.assertEqual("IPv4Network('1.2.3.0/24')", repr(self.ipv4_network)) self.assertEqual('0x1020318', hex(self.ipv4_network)) self.assertRaises(TypeError, self.ipv4_network.__eq__, object()) + def testMissingAddressVersion(self): + class Broken(ipaddress._BaseAddress): + pass + broken = Broken('127.0.0.1') + with self.assertRaisesRegex(NotImplementedError, "Broken.*version"): + broken.version + + def testMissingNetworkVersion(self): + class Broken(ipaddress._BaseNetwork): + pass + broken = Broken('127.0.0.1') + with self.assertRaisesRegex(NotImplementedError, "Broken.*version"): + broken.version + + def testMissingAddressClass(self): + class Broken(ipaddress._BaseNetwork): + pass + broken = Broken('127.0.0.1') + with self.assertRaisesRegex(NotImplementedError, "Broken.*address"): + broken._address_class + def testGetNetwork(self): self.assertEqual(int(self.ipv4_network.network_address), 16909056) self.assertEqual(str(self.ipv4_network.network_address), '1.2.3.0') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 09:24:25 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 09:24:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314814=3A_Add_first?= =?utf8?q?_draft_of_PEP_3144_ipaddress_module_documentation?= Message-ID: http://hg.python.org/cpython/rev/cacf3b1e20da changeset: 77479:cacf3b1e20da user: Nick Coghlan date: Sun Jun 17 17:24:10 2012 +1000 summary: Issue #14814: Add first draft of PEP 3144 ipaddress module documentation (initial patch by Sandro Tosi) files: Doc/howto/ipaddress.rst | 34 ++- Doc/library/internet.rst | 1 + Doc/library/ipaddress.rst | 251 ++++++++++++++++++++++++++ Misc/NEWS | 5 + 4 files changed, 278 insertions(+), 13 deletions(-) diff --git a/Doc/howto/ipaddress.rst b/Doc/howto/ipaddress.rst --- a/Doc/howto/ipaddress.rst +++ b/Doc/howto/ipaddress.rst @@ -1,14 +1,19 @@ .. _ipaddress-howto: -*************** -Ipaddress Howto -*************** +*************************************** +An Introduction to the ipaddress module +*************************************** :author: Peter Moody +:author: Nick Coghlan -.. topic:: Abstract +.. topic:: Overview - This document is a gentle introduction to :mod:`ipaddress` module. + This document aims to provide a gentle introduction to :mod:`ipaddress` + module. It is aimed primarily at users that aren't already familiar with + IP networking terminology, but may also be useful to network engineers + wanting an overview of how the ipaddress module represents IP network + addressing concepts. Creating Address/Network/Interface objects @@ -40,7 +45,7 @@ Addresses, often referred to as "host addresses" are the most basic unit when working with IP addressing. The simplest way to create addresses is -to use the ``ip_address`` factory function, which automatically determines +to use the :func:`ipaddress.ip_address` factory function, which automatically determines whether to create an IPv4 or IPv6 address based on the passed in value:: >>> ipaddress.ip_address('192.0.2.1') @@ -113,7 +118,7 @@ >>> ipaddress.ip_network(3221225984) IPv4Network('192.0.2.0/32') - >>> ipaddress.ip_network(42540766411282592856903984951653826560L) + >>> ipaddress.ip_network(42540766411282592856903984951653826560) IPv6Network('2001:db8::/128') Creation of a particular kind of network can be forced by calling the @@ -275,15 +280,18 @@ Exceptions raised by :mod:`ipaddress` ===================================== -If you try to create an address/network/interface object with an invalid value -for either the address or netmask, :mod:`ipaddress` will raise an -:exc:`AddressValueError` or :exc:`NetmaskValueError` respectively. However, -this applies only when calling the class constructors directly. The factory -functions and other module level functions will just raise :exc:`ValueError`. +When creating address/network/interface objects using the version-agnostic +factory functions, any errors will be reported as :exc:`ValueError`. + +For some use cases, it desirable to know whether it is the address or the +netmask which is incorrect. To support these use cases, the class +constructors actually raise the :exc:`ValueError` subclasses +:exc:`ipaddress.AddressValueError` and :exc:`ipaddress.NetmaskValueError` +to indicate exactly which part of the definition failed to parse correctly. Both of the module specific exceptions have :exc:`ValueError` as their parent class, so if you're not concerned with the particular type of error, -you can still do the following:: +you can still write code like the following:: try: ipaddress.IPv4Address(address) diff --git a/Doc/library/internet.rst b/Doc/library/internet.rst --- a/Doc/library/internet.rst +++ b/Doc/library/internet.rst @@ -42,3 +42,4 @@ http.cookiejar.rst xmlrpc.client.rst xmlrpc.server.rst + ipaddress.rst diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst new file mode 100644 --- /dev/null +++ b/Doc/library/ipaddress.rst @@ -0,0 +1,251 @@ +:mod:`ipaddress` --- IPv4/IPv6 manipulation library +=================================================== + +.. module:: ipaddress + :synopsis: IPv4/IPv6 manipulation library. +.. moduleauthor:: Peter Moody + +**Source code:** :source:`Lib/ipaddress.py` + +-------------- + +The :mod:`ipaddress` module provides the capabilities to create, manipulate and +operate on IPv4 and IPv6 addresses and networks. + +This is the full module API reference - for an overview and introduction, +see :ref:`ipaddress-howto`. + +The functions and classes in this module make it straightforward to handle +various tasks related to IP addresses, including checking whether or not two +hosts are on the same subnet, iterating over all hosts in a particular +subnet, as well as checking whether or not a string represents a valid +IP address or network definition. + + +Defining IP Addresses and Interfaces +------------------------------------ + +The :mod:`ipaddress` module provides factory functions to define IP addresses +and networks: + +.. function:: ip_address(address) + + Return an :class:`IPv4Address` or :class:`IPv6Address` object depending on + the IP address passed as argument. *address* is a string or integer + representing the IP address. Either IPv4 or IPv6 addresses may be supplied; + integers less than 2**32 will be considered to be IPv4 by default. A + :exc:`ValueError` is raised if the *address* passed is neither an IPv4 nor + IPv6 address. + + >>> ipaddress.ip_address('192.168.0.1') + IPv4Address('192.168.0.1') + >>> ipaddress.ip_address('2001:db8::') + IPv6Address('2001:db8::') + + +.. function:: ip_network(address, strict=True) + + Return an :class:`IPv4Network` or :class:`IPv6Network` object depending on + the IP address passed as argument. *address* is a string or integer + representing the IP network. Either IPv4 or IPv6 networks may be supplied; + integers less than 2**32 will be considered to be IPv4 by default. *strict* + is passed to :class:`IPv4Network` or :class:`IPv6Network` constructor. A + :exc:`ValueError` is raised if the string passed isn't either an IPv4 or IPv6 + address, or if the network has host bits set. + + >>> ipaddress.ip_network('192.168.0.0/28') + IPv4Network('192.168.0.0/28') + + +.. function:: ip_interface(address) + + Return an :class:`IPv4Interface` or :class:`IPv6Interface` object depending + on the IP address passed as argument. *address* is a string or integer + representing the IP address. Either IPv4 or IPv6 addresses may be supplied; + integers less than 2**32 will be considered to be IPv4 by default.. A + :exc:`ValueError` is raised if the *address* passed isn't either an IPv4 or + IPv6 address. + + +Representing IP Addresses and Networks +-------------------------------------- + +The module defines the following and classes to represent IP addresses +and networks: + +.. todo: list the properties and methods + +.. class:: IPv4Address(address) + + Construct an IPv4 address. *address* is a string or integer representing the + IP address. An :exc:`AddressValueError` is raised if *address* is not a + valid IPv4 address. + + >>> ipaddress.IPv4Address('192.168.0.1') + IPv4Address('192.168.0.1') + >>> ipaddress.IPv4Address('192.0.2.1') == ipaddress.IPv4Address(3221225985) + True + + +.. class:: IPv4Interface(address) + + Construct an IPv4 interface. *address* is a string or integer representing + the IP interface. An :exc:`AddressValueError` is raised if *address* is not + a valid IPv4 address. + + The network address for the interface is determined by calling + ``IPv4Network(address, strict=False)``. + + >>> ipaddress.IPv4Interface('192.168.0.0/24') + IPv4Interface('192.168.0.0/24') + >>> ipaddress.IPv4Interface('192.168.0.0/24').network + IPv4Network('192.168.0.0/24') + + +.. class:: IPv4Network(address, strict=True) + + Construct an IPv4 network. *address* is a string or integer representing the + IP address (and optionally the network). An :exc:`AddressValueError` is + raised if *address* is not a valid IPv4 address. A :exc:`NetmaskValueError` + is raised if the netmask is not valid for an IPv4 address. + + If *strict* is ``True`` and host bits are set in the supplied address, + then :exc:`ValueError` is raised. Otherwise, the host bits are masked out + to determine the appropriate network address. + + >>> ipaddress.IPv4Network('192.0.2.0/27') + IPv4Network('192.0.2.0/27') + >>> ipaddress.IPv4Network('192.0.2.0/27').netmask + IPv4Address('255.255.255.224') + >>> ipaddress.IPv4Network('192.0.2.5/27', strict=False) + IPv4Network('192.0.2.0/27') + + +.. class:: IPv6Address(address) + + Construct an IPv6 address. *address* is a string or integer representing the + IP address. An :exc:`AddressValueError` is raised if *address* is not a + valid IPv6 address. + + >>> ipaddress.IPv6Address('2001:db8::1000') + IPv6Address('2001:db8::1000') + + +.. class:: IPv6Interface(address) + + Construct an IPv6 interface. *address* is a string or integer representing + the IP interface. An :exc:`AddressValueError` is raised if *address* is not + a valid IPv6 address. + + The network address for the interface is determined by calling + ``IPv6Network(address, strict=False)``. + + >>> ipaddress.IPv6Interface('2001:db8::1000/96') + IPv6Interface('2001:db8::1000/96') + >>> ipaddress.IPv6Interface('2001:db8::1000/96').network + IPv6Network('2001:db8::/96') + + +.. class:: IPv6Network(address, strict=True) + + Construct an IPv6 network. *address* is a string or integer representing the + IP address (and optionally the network). An :exc:`AddressValueError` is + raised if *address* is not a valid IPv6 address. A :exc:`NetmaskValueError` + is raised if the netmask is not valid for an IPv6 address. + + If *strict* is ``True`` and host bits are set in the supplied address, + then :exc:`ValueError` is raised. Otherwise, the host bits are masked out + to determine the appropriate network address. + + >>> ipaddress.IPv6Network('2001:db8::/96') + IPv6Network('2001:db8::/96') + >>> ipaddress.IPv6Network('2001:db8::/96').netmask + IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff::') + >>> ipaddress.IPv6Network('2001:db8::1000/96', strict=False) + IPv6Network('2001:db8::/96') + + +Other Module Level Functions +---------------------------- + +The module also provides the following module level functions: + +.. function:: v4_int_to_packed(address) + + Represent an address as 4 packed bytes in network (big-endian) order. + *address* is an integer representation of an IPv4 IP address. A + :exc:`ValueError` is raised if the integer is negative or too large to be an + IPv4 IP address. + + >>> ipaddress.ip_address(3221225985) + IPv4Address('192.0.2.1') + >>> ipaddress.v4_int_to_packed(3221225985) + b'\xc0\x00\x02\x01' + + +.. function:: v6_int_to_packed(address) + + Represent an address as 16 packed bytes in network (big-endian) order. + *address* is an integer representation of an IPv6 IP address. A + :exc:`ValueError` is raised if the integer is negative or too large to be an + IPv6 IP address. + + +.. function:: summarize_address_range(first, last) + + Return an iterator of the summarized network range given the first and last + IP addresses. *first* is the first :class:`IPv4Address` or + :class:`IPv6Address` in the range and *last* is the last :class:`IPv4Address` + or :class:`IPv6Address` in the range. A :exc:`TypeError` is raised if + *first* or *last* are not IP addresses or are not of the same version. A + :exc:`ValueError` is raised if *last* is not greater than *first* or if + *first* address version is not 4 or 6. + + >>> [ipaddr for ipaddr in ipaddress.summarize_address_range( + ... ipaddress.IPv4Address('192.0.2.0'), + ... ipaddress.IPv4Address('192.0.2.130'))] + [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'), IPv4Network('192.0.2.130/32')] + + +.. function:: collapse_addresses(addresses) + + Return an iterator of the collapsed :class:`IPv4Network` or + :class:`IPv6Network` objects. *addresses* is an iterator of + :class:`IPv4Network` or :class:`IPv6Network` objects. A :exc:`TypeError` is + raised if *addresses* contains mixed version objects. + + >>> [ipaddr for ipaddr in + ... ipaddress.collapse_addresses([ipaddress.IPv4Network('192.0.2.0/25'), + ... ipaddress.IPv4Network('192.0.2.128/25')])] + [IPv4Network('192.0.2.0/24')] + + +.. function:: get_mixed_type_key(obj) + + Return a key suitable for sorting between networks and addresses. Address + and Network objects are not sortable by default; they're fundamentally + different, so the expression:: + + IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24') + + doesn't make sense. There are some times however, where you may wish to + have :mod:`ipaddress` sort these anyway. If you need to do this, you can use + this function as the ``key`` argument to :func:`sorted()`. + + *obj* is either a network or address object. + + +Custom Exceptions +----------------- + +To support more specific error reporting from class constructors, the +module defines the following exceptions: + +.. exception:: AddressValueError(ValueError) + + Any value error related to the address. + + +.. exception:: NetmaskValueError(ValueError) + + Any value error related to the netmask. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -124,6 +124,11 @@ - Issue #15000: Support the "unique" x32 architecture in _posixsubprocess.c. +Documentation +------------- + +- Issue #14814: Added first draft of ipaddress module API reference + Tests ----- -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 09:54:28 2012 From: python-checkins at python.org (jason.coombs) Date: Sun, 17 Jun 2012 09:54:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Adding_test_from_issue6727_?= =?utf8?q?demonstrating_that_symlink_import_issue_does_not?= Message-ID: http://hg.python.org/cpython/rev/afe67ea94bc6 changeset: 77480:afe67ea94bc6 user: Jason R. Coombs date: Sun Jun 17 03:53:47 2012 -0400 summary: Adding test from issue6727 demonstrating that symlink import issue does not occur here in 3.3 files: Lib/test/test_import.py | 53 +++++++++++++++++++++++++++++ 1 files changed, 53 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py --- a/Lib/test/test_import.py +++ b/Lib/test/test_import.py @@ -13,7 +13,9 @@ import unittest import textwrap import errno +import shutil +import test.support from test.support import ( EnvironmentVarGuard, TESTFN, check_warnings, forget, is_jython, make_legacy_pyc, rmtree, run_unittest, swap_attr, swap_item, temp_umask, @@ -690,6 +692,56 @@ self.assertEqual(m.x, 5) +class TestSymbolicallyLinkedPackage(unittest.TestCase): + package_name = 'sample' + + def setUp(self): + if os.path.exists(self.tagged): + shutil.rmtree(self.tagged) + if os.path.exists(self.package_name): + os.remove(self.package_name) + self.orig_sys_path = sys.path[:] + + # create a sample package; imagine you have a package with a tag and + # you want to symbolically link it from its untagged name. + os.mkdir(self.tagged) + init_file = os.path.join(self.tagged, '__init__.py') + open(init_file, 'w').close() + assert os.path.exists(init_file) + + # now create a symlink to the tagged package + # sample -> sample-tagged + os.symlink(self.tagged, self.package_name) + + # assert os.path.isdir(self.package_name) # currently fails + assert os.path.isfile(os.path.join(self.package_name, '__init__.py')) + + @property + def tagged(self): + return self.package_name + '-tagged' + + # regression test for issue6727 + @unittest.skipUnless( + not hasattr(sys, 'getwindowsversion') + or sys.getwindowsversion() >= (6, 0), + "Windows Vista or later required") + @test.support.skip_unless_symlink + def test_symlinked_dir_importable(self): + # make sure sample can only be imported from the current directory. + sys.path[:] = ['.'] + + # and try to import the package + __import__(self.package_name) + + def tearDown(self): + # now cleanup + if os.path.exists(self.package_name): + os.remove(self.package_name) + if os.path.exists(self.tagged): + shutil.rmtree(self.tagged) + sys.path[:] = self.orig_sys_path + + def test_main(verbose=None): flag = importlib_util.using___import__ try: @@ -697,6 +749,7 @@ run_unittest(ImportTests, PycacheTests, PycRewritingTests, PathsTests, RelativeImportTests, OverridingImportBuiltinTests, + TestSymbolicallyLinkedPackage, importlib_import_test_suite()) finally: importlib_util.using___import__ = flag -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 10:28:09 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 10:28:09 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE1MDQ0?= =?utf8?q?=3A_Handle_Fedora_17=27s_approach_to_ndbm_compatibility?= Message-ID: http://hg.python.org/cpython/rev/e2be1f43beed changeset: 77481:e2be1f43beed branch: 3.2 parent: 77444:714b8f91f3d4 user: Nick Coghlan date: Sun Jun 17 18:27:11 2012 +1000 summary: Issue #15044: Handle Fedora 17's approach to ndbm compatibility files: setup.py | 6 +++++- 1 files changed, 5 insertions(+), 1 deletions(-) diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1077,10 +1077,14 @@ for cand in dbm_order: if cand == "ndbm": if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't + # Some systems have -lndbm, others have -lgdbm_compat, + # others don't have either if self.compiler.find_library_file(lib_dirs, 'ndbm'): ndbm_libs = ['ndbm'] + elif self.compiler.find_library_file(lib_dirs, + 'gdbm_compat'): + ndbm_libs = ['gdbm_compat'] else: ndbm_libs = [] print("building dbm using ndbm") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 10:28:10 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 10:28:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_from_3=2E2=2E_=28Issue_=2315044=3A_Handle_Fedora_17=27?= =?utf8?q?s_approach_to_ndbm?= Message-ID: http://hg.python.org/cpython/rev/1f6c23ed8218 changeset: 77482:1f6c23ed8218 parent: 77480:afe67ea94bc6 parent: 77481:e2be1f43beed user: Nick Coghlan date: Sun Jun 17 18:27:54 2012 +1000 summary: Merge from 3.2. (Issue #15044: Handle Fedora 17's approach to ndbm compatibility) files: setup.py | 6 +++++- 1 files changed, 5 insertions(+), 1 deletions(-) diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1085,10 +1085,14 @@ for cand in dbm_order: if cand == "ndbm": if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't + # Some systems have -lndbm, others have -lgdbm_compat, + # others don't have either if self.compiler.find_library_file(lib_dirs, 'ndbm'): ndbm_libs = ['ndbm'] + elif self.compiler.find_library_file(lib_dirs, + 'gdbm_compat'): + ndbm_libs = ['gdbm_compat'] else: ndbm_libs = [] if dbm_setup_debug: print("building dbm using ndbm") -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 10:35:53 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 10:35:53 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1MDQ0?= =?utf8?q?=3A_Handle_Fedora_17=27s_approach_to_ndbm_compatibility_=28backp?= =?utf8?q?ort_from?= Message-ID: http://hg.python.org/cpython/rev/4d62f788aa19 changeset: 77483:4d62f788aa19 branch: 2.7 parent: 77474:35d3a8ed7997 user: Nick Coghlan date: Sun Jun 17 18:35:39 2012 +1000 summary: Issue #15044: Handle Fedora 17's approach to ndbm compatibility (backport from 3.x) files: setup.py | 6 +++++- 1 files changed, 5 insertions(+), 1 deletions(-) diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1158,10 +1158,14 @@ for cand in dbm_order: if cand == "ndbm": if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't + # Some systems have -lndbm, others have -lgdbm_compat, + # others don't have either if self.compiler.find_library_file(lib_dirs, 'ndbm'): ndbm_libs = ['ndbm'] + elif self.compiler.find_library_file(lib_dirs, + 'gdbm_compat'): + ndbm_libs = ['gdbm_compat'] else: ndbm_libs = [] print "building dbm using ndbm" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 10:42:07 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 17 Jun 2012 10:42:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Elaborate_that_sizeof_only_?= =?utf8?q?accounts_for_the_object_itself=2E?= Message-ID: http://hg.python.org/cpython/rev/cddaf96c8149 changeset: 77484:cddaf96c8149 parent: 77482:1f6c23ed8218 user: Martin v. L?wis date: Sun Jun 17 10:40:16 2012 +0200 summary: Elaborate that sizeof only accounts for the object itself. files: Doc/library/sys.rst | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -441,6 +441,9 @@ does not have to hold true for third-party extensions as it is implementation specific. + Only the memory consumption directly attributed to the object is + accounted for, not the memory consumption of objects it refers to. + If given, *default* will be returned if the object does not provide means to retrieve the size. Otherwise a :exc:`TypeError` will be raised. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 10:42:08 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 17 Jun 2012 10:42:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314055=3A_Add_=5F?= =?utf8?q?=5Fsizeof=5F=5F_support_to_=5Felementtree=2E?= Message-ID: http://hg.python.org/cpython/rev/093dec81ea1f changeset: 77485:093dec81ea1f user: Martin v. L?wis date: Sun Jun 17 10:41:22 2012 +0200 summary: Issue #14055: Add __sizeof__ support to _elementtree. files: Lib/test/test_xml_etree_c.py | 39 ++++++++++++++++++++++- Misc/NEWS | 2 + Modules/_elementtree.c | 14 ++++++++ 3 files changed, 53 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_xml_etree_c.py b/Lib/test/test_xml_etree_c.py --- a/Lib/test/test_xml_etree_c.py +++ b/Lib/test/test_xml_etree_c.py @@ -1,5 +1,5 @@ # xml.etree test for cElementTree - +import sys, struct from test import support from test.support import import_fresh_module import unittest @@ -40,6 +40,40 @@ self.assertEqual(cET_alias.SubElement.__module__, '_elementtree') + at unittest.skipUnless(cET, 'requires _elementtree') +class SizeofTest(unittest.TestCase): + def setUp(self): + import _testcapi + gc_headsize = _testcapi.SIZEOF_PYGC_HEAD + # object header + header = 'PP' + if hasattr(sys, "gettotalrefcount"): + # debug header + header = 'PP' + header + # fields + element = header + '5P' + self.elementsize = gc_headsize + struct.calcsize(element) + # extra + self.extra = struct.calcsize('PiiP4P') + + def test_element(self): + e = cET.Element('a') + self.assertEqual(sys.getsizeof(e), self.elementsize) + + def test_element_with_attrib(self): + e = cET.Element('a', href='about:') + self.assertEqual(sys.getsizeof(e), + self.elementsize + self.extra) + + def test_element_with_children(self): + e = cET.Element('a') + for i in range(5): + cET.SubElement(e, 'span') + # should have space for 8 children now + self.assertEqual(sys.getsizeof(e), + self.elementsize + self.extra + + struct.calcsize('8P')) + def test_main(): from test import test_xml_etree, test_xml_etree_c @@ -47,7 +81,8 @@ support.run_unittest( MiscTests, TestAliasWorking, - TestAcceleratorImported + TestAcceleratorImported, + SizeofTest, ) # Run the same test suite as the Python module diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,8 @@ Library ------- +- Issue #14055: Add __sizeof__ support to _elementtree. + - Issue #15054: A bug in tokenize.tokenize that caused string literals with 'b' prefixes to be incorrectly tokenized has been fixed. Patch by Serhiy Storchaka. diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -842,6 +842,19 @@ return NULL; } +static PyObject* +element_sizeof(PyObject* _self, PyObject* args) +{ + ElementObject *self = (ElementObject*)_self; + Py_ssize_t result = sizeof(ElementObject); + if (self->extra) { + result += sizeof(ElementObjectExtra); + if (self->extra->children != self->extra->_children) + result += sizeof(PyObject*) * self->extra->allocated; + } + return PyLong_FromSsize_t(result); +} + LOCAL(int) checkpath(PyObject* tag) { @@ -1609,6 +1622,7 @@ {"__copy__", (PyCFunction) element_copy, METH_VARARGS}, {"__deepcopy__", (PyCFunction) element_deepcopy, METH_VARARGS}, + {"__sizeof__", element_sizeof, METH_NOARGS}, {NULL, NULL} }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 10:42:08 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 17 Jun 2012 10:42:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_White_space_normalization?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/72becb32208c changeset: 77486:72becb32208c user: Martin v. L?wis date: Sun Jun 17 10:42:02 2012 +0200 summary: White space normalization. files: Lib/test/test_xml_etree_c.py | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_xml_etree_c.py b/Lib/test/test_xml_etree_c.py --- a/Lib/test/test_xml_etree_c.py +++ b/Lib/test/test_xml_etree_c.py @@ -62,7 +62,7 @@ def test_element_with_attrib(self): e = cET.Element('a', href='about:') - self.assertEqual(sys.getsizeof(e), + self.assertEqual(sys.getsizeof(e), self.elementsize + self.extra) def test_element_with_children(self): @@ -70,8 +70,8 @@ for i in range(5): cET.SubElement(e, 'span') # should have space for 8 children now - self.assertEqual(sys.getsizeof(e), - self.elementsize + self.extra + + self.assertEqual(sys.getsizeof(e), + self.elementsize + self.extra + struct.calcsize('8P')) def test_main(): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 11:00:49 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 11:00:49 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE1MDQz?= =?utf8?q?=3A_skip_test=5Fgdb_if_the_custom_hooks_can=27t_be_loaded?= Message-ID: http://hg.python.org/cpython/rev/2db5010a610c changeset: 77487:2db5010a610c branch: 3.2 parent: 77481:e2be1f43beed user: Nick Coghlan date: Sun Jun 17 18:57:20 2012 +1000 summary: Issue #15043: skip test_gdb if the custom hooks can't be loaded files: Lib/test/test_gdb.py | 9 +++++++++ Misc/NEWS | 3 +++ 2 files changed, 12 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -32,6 +32,15 @@ if gdbpy_version == b'': raise unittest.SkipTest("gdb not built with embedded python support") +# Verify that "gdb" can load our custom hooks +p = subprocess.Popen(["gdb", "--batch", cmd, + "--args", sys.executable], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) +__, gdbpy_errors = p.communicate() +if b"auto-loading has been declined" in gdbpy_errors: + msg = "gdb security settings prevent use of custom hooks: %s" + raise unittest.SkipTest(msg % gdbpy_errors) + def gdb_has_frame_select(): # Does this build of gdb have gdb.Frame.select ? cmd = "--eval-command=python print(dir(gdb.Frame))" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -301,6 +301,9 @@ Tests ----- +- Issue #15043: test_gdb is now skipped entirely if gdb security settings + block loading of the gdb hooks + - Issue #14026: In test_cmd_line_script, check that sys.argv is populated correctly for the various invocation approaches (Patch by Jason Yeo) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 11:04:07 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 11:04:07 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1MDQz?= =?utf8?q?=3A_skip_test=5Fgdb_if_the_custom_hooks_can=27t_be_loaded_=28bac?= =?utf8?q?kport_from?= Message-ID: http://hg.python.org/cpython/rev/ac2f5067c220 changeset: 77488:ac2f5067c220 branch: 2.7 parent: 77483:4d62f788aa19 user: Nick Coghlan date: Sun Jun 17 19:03:39 2012 +1000 summary: Issue #15043: skip test_gdb if the custom hooks can't be loaded (backport from 3.x) files: Lib/test/test_gdb.py | 9 +++++++++ Misc/NEWS | 3 +++ 2 files changed, 12 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -32,6 +32,15 @@ if gdbpy_version == '': raise unittest.SkipTest("gdb not built with embedded python support") +# Verify that "gdb" can load our custom hooks +p = subprocess.Popen(["gdb", "--batch", cmd, + "--args", sys.executable], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) +__, gdbpy_errors = p.communicate() +if b"auto-loading has been declined" in gdbpy_errors: + msg = "gdb security settings prevent use of custom hooks: %s" + raise unittest.SkipTest(msg % gdbpy_errors) + def python_is_optimized(): cflags = sysconfig.get_config_vars()['PY_CFLAGS'] final_opt = "" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -227,6 +227,9 @@ Tests ----- +- Issue #15043: test_gdb is now skipped entirely if gdb security settings + block loading of the gdb hooks + - Issue #14589: Update certificate chain for sha256.tbs-internet.com, fixing a test failure in test_ssl. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 11:06:32 2012 From: python-checkins at python.org (jason.coombs) Date: Sun, 17 Jun 2012 11:06:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Disable_test_on_Unix=2E_Cau?= =?utf8?q?ses_buildbots_to_fail=2E_See_Issue_=2315091?= Message-ID: http://hg.python.org/cpython/rev/939a68f5d14c changeset: 77489:939a68f5d14c parent: 77486:72becb32208c user: Jason R. Coombs date: Sun Jun 17 05:06:16 2012 -0400 summary: Disable test on Unix. Causes buildbots to fail. See Issue #15091 files: Lib/test/test_import.py | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py --- a/Lib/test/test_import.py +++ b/Lib/test/test_import.py @@ -726,6 +726,10 @@ or sys.getwindowsversion() >= (6, 0), "Windows Vista or later required") @test.support.skip_unless_symlink + @unittest.skipUnless( + sys.platform == 'win32', + "Test failing on Unix (see issue15091)" + ) def test_symlinked_dir_importable(self): # make sure sample can only be imported from the current directory. sys.path[:] = ['.'] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 11:14:49 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 11:14:49 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Add_section_on_unexpected_?= =?utf8?q?skips_when_running_the_tests?= Message-ID: http://hg.python.org/devguide/rev/9fee8e6c2619 changeset: 522:9fee8e6c2619 user: Nick Coghlan date: Sun Jun 17 19:14:39 2012 +1000 summary: Add section on unexpected skips when running the tests files: runtests.rst | 18 ++++++++++++++++++ 1 files changed, 18 insertions(+), 0 deletions(-) diff --git a/runtests.rst b/runtests.rst --- a/runtests.rst +++ b/runtests.rst @@ -71,6 +71,24 @@ above. +Unexpected Skips +---------------- + +Sometimes when running the test suite, you will see "unexpected skips" +reported. These represent cases where an entire test module has been +skipped, but the test suite normally expects the tests in that module to +be executed on that platform. + +Often, the cause is that an optional module hasn't been built due to missing +build dependencies. In these cases, the missing module reported when the test +is skipped should match one of the modules reported as failing to build when +:ref:`compiling`. + +In other cases, the skip message should provide enough detail to help figure +out and resolve the cause of the problem (for example, the default security +settings on some platforms will disallow some tests) + + Writing ------- -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Sun Jun 17 11:19:59 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 11:19:59 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_from_3=2E2_=28Issue_=2315043=3A_skip_test=5Fgdb_if_the?= =?utf8?q?_custom_hooks_can=27t_be_loaded=29?= Message-ID: http://hg.python.org/cpython/rev/9456591d0761 changeset: 77490:9456591d0761 parent: 77489:939a68f5d14c parent: 77487:2db5010a610c user: Nick Coghlan date: Sun Jun 17 19:16:02 2012 +1000 summary: Merge from 3.2 (Issue #15043: skip test_gdb if the custom hooks can't be loaded) files: Lib/test/test_gdb.py | 9 +++++++++ Misc/NEWS | 5 ++++- 2 files changed, 13 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -36,6 +36,15 @@ if gdbpy_version == b'': raise unittest.SkipTest("gdb not built with embedded python support") +# Verify that "gdb" can load our custom hooks +p = subprocess.Popen(["gdb", "--batch", cmd, + "--args", sys.executable], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) +__, gdbpy_errors = p.communicate() +if b"auto-loading has been declined" in gdbpy_errors: + msg = "gdb security settings prevent use of custom hooks: %s" + raise unittest.SkipTest(msg % gdbpy_errors) + def gdb_has_frame_select(): # Does this build of gdb have gdb.Frame.select ? cmd = "--eval-command=python print(dir(gdb.Frame))" diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -134,7 +134,10 @@ Tests ----- -- Issue #14963 (partial): Add test cases for exception handling behaviour +- Issue #15043: test_gdb is now skipped entirely if gdb security settings + block loading of the gdb hooks + +- Issue #14963: Add test cases for exception handling behaviour in contextlib.ExitStack (Initial patch by Alon Horev) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 12:12:27 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 12:12:27 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Add_FAQ_entry_with_details?= =?utf8?q?_on_getting_test=5Fgdb_to_run_when_the_list_of?= Message-ID: http://hg.python.org/devguide/rev/1d81501be702 changeset: 523:1d81501be702 user: Nick Coghlan date: Sun Jun 17 20:09:45 2012 +1000 summary: Add FAQ entry with details on getting test_gdb to run when the list of auto-load safe directories is restricted by default files: faq.rst | 11 +++++++++++ 1 files changed, 11 insertions(+), 0 deletions(-) diff --git a/faq.rst b/faq.rst --- a/faq.rst +++ b/faq.rst @@ -885,3 +885,14 @@ If the system copy of Autoconf does not match this version, you will need to install your own copy of Autoconf. + + +How do I update my auto-load-safe-path to allow test_gdb to run? +---------------------------------------------------------------- + +If ``test_gdb`` is being skipped with an "auto-loading has been declined" +message, then it is necessary to identify any Python build directories as +auto-load safe. One way to achieve this is to add a line like the following +to ``~/.gdbinit`` (edit the specific list of paths as appropriate):: + + add-auto-load-safe-path ~/devel/py3k:~/devel/py32:~/devel/py27 -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Sun Jun 17 12:12:35 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 12:12:35 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_Making_this_a_subsection_o?= =?utf8?q?f_the_Running_tests_section?= Message-ID: http://hg.python.org/devguide/rev/b0da797c84af changeset: 524:b0da797c84af user: Nick Coghlan date: Sun Jun 17 20:12:16 2012 +1000 summary: Making this a subsection of the Running tests section files: runtests.rst | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diff --git a/runtests.rst b/runtests.rst --- a/runtests.rst +++ b/runtests.rst @@ -72,7 +72,7 @@ Unexpected Skips ----------------- +^^^^^^^^^^^^^^^^ Sometimes when running the test suite, you will see "unexpected skips" reported. These represent cases where an entire test module has been -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Sun Jun 17 13:16:25 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 13:16:25 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE1MDk1?= =?utf8?q?=3A_Use_better_assertions_in_test=5Fimaplib?= Message-ID: http://hg.python.org/cpython/rev/4028bb7a5ac2 changeset: 77491:4028bb7a5ac2 branch: 3.2 parent: 77487:2db5010a610c user: Nick Coghlan date: Sun Jun 17 21:10:21 2012 +1000 summary: Issue #15095: Use better assertions in test_imaplib files: Lib/test/test_imaplib.py | 10 +++++----- 1 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -233,8 +233,8 @@ with transient_internet(self.host): for cap in self.server.capabilities: self.assertIsInstance(cap, str) - self.assertTrue('LOGINDISABLED' in self.server.capabilities) - self.assertTrue('AUTH=ANONYMOUS' in self.server.capabilities) + self.assertIn('LOGINDISABLED', self.server.capabilities) + self.assertIn('AUTH=ANONYMOUS', self.server.capabilities) rs = self.server.login(self.username, self.password) self.assertEqual(rs[0], 'OK') @@ -257,7 +257,7 @@ def test_logincapa(self): for cap in self.server.capabilities: self.assertIsInstance(cap, str) - self.assertFalse('LOGINDISABLED' in self.server.capabilities) + self.assertNotIn('LOGINDISABLED', self.server.capabilities) @unittest.skipUnless(ssl, "SSL not available") @@ -268,8 +268,8 @@ def test_logincapa(self): for cap in self.server.capabilities: self.assertIsInstance(cap, str) - self.assertFalse('LOGINDISABLED' in self.server.capabilities) - self.assertTrue('AUTH=PLAIN' in self.server.capabilities) + self.assertNotIn('LOGINDISABLED', self.server.capabilities) + self.assertIn('AUTH=PLAIN', self.server.capabilities) def test_main(): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 13:16:30 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 13:16:30 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_from_3=2E2_=28Issue_=2315095=3A_Use_better_assertions_?= =?utf8?q?in_test=5Fimaplib=29?= Message-ID: http://hg.python.org/cpython/rev/bbe1a2049ca1 changeset: 77492:bbe1a2049ca1 parent: 77490:9456591d0761 parent: 77491:4028bb7a5ac2 user: Nick Coghlan date: Sun Jun 17 21:15:45 2012 +1000 summary: Merge from 3.2 (Issue #15095: Use better assertions in test_imaplib) files: Lib/test/test_imaplib.py | 10 +++++----- 1 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -233,8 +233,8 @@ with transient_internet(self.host): for cap in self.server.capabilities: self.assertIsInstance(cap, str) - self.assertTrue('LOGINDISABLED' in self.server.capabilities) - self.assertTrue('AUTH=ANONYMOUS' in self.server.capabilities) + self.assertIn('LOGINDISABLED', self.server.capabilities) + self.assertIn('AUTH=ANONYMOUS', self.server.capabilities) rs = self.server.login(self.username, self.password) self.assertEqual(rs[0], 'OK') @@ -257,7 +257,7 @@ def test_logincapa(self): for cap in self.server.capabilities: self.assertIsInstance(cap, str) - self.assertFalse('LOGINDISABLED' in self.server.capabilities) + self.assertNotIn('LOGINDISABLED', self.server.capabilities) @unittest.skipUnless(ssl, "SSL not available") @@ -280,8 +280,8 @@ try: for cap in server.capabilities: self.assertIsInstance(cap, str) - self.assertFalse('LOGINDISABLED' in server.capabilities) - self.assertTrue('AUTH=PLAIN' in server.capabilities) + self.assertNotIn('LOGINDISABLED', server.capabilities) + self.assertIn('AUTH=PLAIN', server.capabilities) rs = server.login(self.username, self.password) self.assertEqual(rs[0], 'OK') finally: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 13:30:52 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 13:30:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Ignore_X-Antivi?= =?utf8?q?rus_headers_in_test=5Fnntplib?= Message-ID: http://hg.python.org/cpython/rev/201484fb885e changeset: 77493:201484fb885e branch: 3.2 parent: 77491:4028bb7a5ac2 user: Nick Coghlan date: Sun Jun 17 21:27:18 2012 +1000 summary: Ignore X-Antivirus headers in test_nntplib files: Lib/test/test_nntplib.py | 5 ++++- Misc/NEWS | 3 +++ 2 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py --- a/Lib/test/test_nntplib.py +++ b/Lib/test/test_nntplib.py @@ -176,7 +176,10 @@ resp, article = self.server.article(art_num) self.assertTrue(resp.startswith("220 "), resp) self.check_article_resp(resp, article, art_num) - self.assertEqual(article.lines, head.lines + [b''] + body.lines) + # Tolerate running the tests from behind a NNTP virus checker + filtered_lines = [line for line in article.lines + if not line.startswith(b'X-Antivirus')] + self.assertEqual(filtered_lines, head.lines + [b''] + body.lines) def test_capabilities(self): # The server under test implements NNTP version 2 and has a diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -301,6 +301,9 @@ Tests ----- +- test_nntplib now tolerates being run from behind NNTP gateways that add + "X-Antivirus" headers to articles + - Issue #15043: test_gdb is now skipped entirely if gdb security settings block loading of the gdb hooks -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 13:30:53 2012 From: python-checkins at python.org (nick.coghlan) Date: Sun, 17 Jun 2012 13:30:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_from_3=2E2_=28Ignore_X-Antivirus_headers_in_test=5Fnnt?= =?utf8?q?plib=29?= Message-ID: http://hg.python.org/cpython/rev/c0eab397f098 changeset: 77494:c0eab397f098 parent: 77492:bbe1a2049ca1 parent: 77493:201484fb885e user: Nick Coghlan date: Sun Jun 17 21:30:35 2012 +1000 summary: Merge from 3.2 (Ignore X-Antivirus headers in test_nntplib) files: Lib/test/test_nntplib.py | 5 ++++- Misc/NEWS | 3 +++ 2 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py --- a/Lib/test/test_nntplib.py +++ b/Lib/test/test_nntplib.py @@ -177,7 +177,10 @@ resp, article = self.server.article(art_num) self.assertTrue(resp.startswith("220 "), resp) self.check_article_resp(resp, article, art_num) - self.assertEqual(article.lines, head.lines + [b''] + body.lines) + # Tolerate running the tests from behind a NNTP virus checker + filtered_lines = [line for line in article.lines + if not line.startswith(b'X-Antivirus')] + self.assertEqual(filtered_lines, head.lines + [b''] + body.lines) def test_capabilities(self): # The server under test implements NNTP version 2 and has a diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -134,6 +134,9 @@ Tests ----- +- test_nntplib now tolerates being run from behind NNTP gateways that add + "X-Antivirus" headers to articles + - Issue #15043: test_gdb is now skipped entirely if gdb security settings block loading of the gdb hooks -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 13:37:16 2012 From: python-checkins at python.org (ezio.melotti) Date: Sun, 17 Jun 2012 13:37:16 +0200 Subject: [Python-checkins] =?utf8?q?devguide=3A_=2314919=3A_clarify_paragr?= =?utf8?q?aph_in_the_devguide=2E?= Message-ID: http://hg.python.org/devguide/rev/2171bb071dd6 changeset: 525:2171bb071dd6 user: Ezio Melotti date: Sun Jun 17 13:37:09 2012 +0200 summary: #14919: clarify paragraph in the devguide. files: triaging.rst | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/triaging.rst b/triaging.rst --- a/triaging.rst +++ b/triaging.rst @@ -180,12 +180,13 @@ attention. Use the :ref:`experts` to know who wants to be added to the nosy list for issues targeting specific areas. -If you have JavaScript enabled and permission to edit the nosy list, you can -use the ``[+]`` button to add yourself to the nosy (remember to click on + +If you are logged in and have JavaScript enabled, you can use the ``[+]`` +button to add yourself to the nosy list (remember to click on "Submit Changes" afterwards). Note that you are added to the nosy automatically when you submit a message. -The nosy list also has an autocomplete that lets you search from the list of -developers and from the :ref:`experts`. The search is case-insensitive and +The nosy list also has an autocomplete that lets you search from the lists of +developers and :ref:`experts`. The search is case-insensitive and works for real names, modules, interest areas, etc., and only adds the username(s) to the nosy once an entry is selected. -- Repository URL: http://hg.python.org/devguide From python-checkins at python.org Sun Jun 17 14:13:54 2012 From: python-checkins at python.org (ezio.melotti) Date: Sun, 17 Jun 2012 14:13:54 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzE0ODQwOiBBZGQg?= =?utf8?q?a_bit_on_the_difference_between_tuples_and_lists=2E__Initial_pat?= =?utf8?q?ch_by?= Message-ID: http://hg.python.org/cpython/rev/bb63919cde6e changeset: 77495:bb63919cde6e branch: 2.7 parent: 77488:ac2f5067c220 user: Ezio Melotti date: Sun Jun 17 14:10:59 2012 +0200 summary: #14840: Add a bit on the difference between tuples and lists. Initial patch by Zachary Ware. files: Doc/tutorial/datastructures.rst | 28 +++++++++++++++------ Misc/ACKS | 1 + 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -423,17 +423,31 @@ ... u = t, (1, 2, 3, 4, 5) >>> u ((12345, 54321, 'hello!'), (1, 2, 3, 4, 5)) + >>> # Tuples are immutable: + ... t[0] = 88888 + Traceback (most recent call last): + File "", line 1, in + TypeError: 'tuple' object does not support item assignment + >>> # but they can contain mutable objects: + ... v = ([1, 2, 3], [3, 2, 1]) + >>> v + ([1, 2, 3], [3, 2, 1]) + As you see, on output tuples are always enclosed in parentheses, so that nested tuples are interpreted correctly; they may be input with or without surrounding parentheses, although often parentheses are necessary anyway (if the tuple is -part of a larger expression). +part of a larger expression). It is not possible to assign to the individual +items of a tuple, however it is possible to create tuples which contain mutable +objects, such as lists. -Tuples have many uses. For example: (x, y) coordinate pairs, employee records -from a database, etc. Tuples, like strings, are immutable: it is not possible -to assign to the individual items of a tuple (you can simulate much of the same -effect with slicing and concatenation, though). It is also possible to create -tuples which contain mutable objects, such as lists. +Though tuples may seem similar to lists, they are often used in different +situations and for different purposes. +Tuples are :term:`immutable`, and usually contain an heterogeneous sequence of +elements that are accessed via unpacking (see later in this section) or indexing +(or even by attribute in the case of :func:`namedtuples `). +Lists are :term:`mutable`, and their elements are usually homogeneous and are +accessed by iterating over the list. A special problem is the construction of tuples containing 0 or 1 items: the syntax has some extra quirks to accommodate these. Empty tuples are constructed @@ -462,8 +476,6 @@ sequence. Note that multiple assignment is really just a combination of tuple packing and sequence unpacking. -.. XXX Add a bit on the difference between tuples and lists. - .. _tut-sets: diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -900,6 +900,7 @@ Larry Wall Kevin Walzer Greg Ward +Zachary Ware Barry Warsaw Steve Waterbury Bob Watson -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 14:13:55 2012 From: python-checkins at python.org (ezio.melotti) Date: Sun, 17 Jun 2012 14:13:55 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzE0ODQwOiBBZGQg?= =?utf8?q?a_bit_on_the_difference_between_tuples_and_lists=2E__Initial_pat?= =?utf8?q?ch_by?= Message-ID: http://hg.python.org/cpython/rev/3550416d83b3 changeset: 77496:3550416d83b3 branch: 3.2 parent: 77493:201484fb885e user: Ezio Melotti date: Sun Jun 17 14:10:59 2012 +0200 summary: #14840: Add a bit on the difference between tuples and lists. Initial patch by Zachary Ware. files: Doc/tutorial/datastructures.rst | 28 +++++++++++++++------ Misc/ACKS | 1 + 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -349,17 +349,31 @@ ... u = t, (1, 2, 3, 4, 5) >>> u ((12345, 54321, 'hello!'), (1, 2, 3, 4, 5)) + >>> # Tuples are immutable: + ... t[0] = 88888 + Traceback (most recent call last): + File "", line 1, in + TypeError: 'tuple' object does not support item assignment + >>> # but they can contain mutable objects: + ... v = ([1, 2, 3], [3, 2, 1]) + >>> v + ([1, 2, 3], [3, 2, 1]) + As you see, on output tuples are always enclosed in parentheses, so that nested tuples are interpreted correctly; they may be input with or without surrounding parentheses, although often parentheses are necessary anyway (if the tuple is -part of a larger expression). +part of a larger expression). It is not possible to assign to the individual +items of a tuple, however it is possible to create tuples which contain mutable +objects, such as lists. -Tuples have many uses. For example: (x, y) coordinate pairs, employee records -from a database, etc. Tuples, like strings, are immutable: it is not possible -to assign to the individual items of a tuple (you can simulate much of the same -effect with slicing and concatenation, though). It is also possible to create -tuples which contain mutable objects, such as lists. +Though tuples may seem similar to lists, they are often used in different +situations and for different purposes. +Tuples are :term:`immutable`, and usually contain an heterogeneous sequence of +elements that are accessed via unpacking (see later in this section) or indexing +(or even by attribute in the case of :func:`namedtuples `). +Lists are :term:`mutable`, and their elements are usually homogeneous and are +accessed by iterating over the list. A special problem is the construction of tuples containing 0 or 1 items: the syntax has some extra quirks to accommodate these. Empty tuples are constructed @@ -388,8 +402,6 @@ sequence. Note that multiple assignment is really just a combination of tuple packing and sequence unpacking. -.. XXX Add a bit on the difference between tuples and lists. - .. _tut-sets: diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -986,6 +986,7 @@ Kevin Walzer Rodrigo Steinmuller Wanderley Greg Ward +Zachary Ware Barry Warsaw Steve Waterbury Bob Watson -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 14:13:57 2012 From: python-checkins at python.org (ezio.melotti) Date: Sun, 17 Jun 2012 14:13:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_=2314840=3A_merge_with_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/2fad115408e9 changeset: 77497:2fad115408e9 parent: 77494:c0eab397f098 parent: 77496:3550416d83b3 user: Ezio Melotti date: Sun Jun 17 14:12:42 2012 +0200 summary: #14840: merge with 3.2. files: Doc/tutorial/datastructures.rst | 28 +++++++++++++++------ Misc/ACKS | 1 + 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -354,17 +354,31 @@ ... u = t, (1, 2, 3, 4, 5) >>> u ((12345, 54321, 'hello!'), (1, 2, 3, 4, 5)) + >>> # Tuples are immutable: + ... t[0] = 88888 + Traceback (most recent call last): + File "", line 1, in + TypeError: 'tuple' object does not support item assignment + >>> # but they can contain mutable objects: + ... v = ([1, 2, 3], [3, 2, 1]) + >>> v + ([1, 2, 3], [3, 2, 1]) + As you see, on output tuples are always enclosed in parentheses, so that nested tuples are interpreted correctly; they may be input with or without surrounding parentheses, although often parentheses are necessary anyway (if the tuple is -part of a larger expression). +part of a larger expression). It is not possible to assign to the individual +items of a tuple, however it is possible to create tuples which contain mutable +objects, such as lists. -Tuples have many uses. For example: (x, y) coordinate pairs, employee records -from a database, etc. Tuples, like strings, are immutable: it is not possible -to assign to the individual items of a tuple (you can simulate much of the same -effect with slicing and concatenation, though). It is also possible to create -tuples which contain mutable objects, such as lists. +Though tuples may seem similar to lists, they are often used in different +situations and for different purposes. +Tuples are :term:`immutable`, and usually contain an heterogeneous sequence of +elements that are accessed via unpacking (see later in this section) or indexing +(or even by attribute in the case of :func:`namedtuples `). +Lists are :term:`mutable`, and their elements are usually homogeneous and are +accessed by iterating over the list. A special problem is the construction of tuples containing 0 or 1 items: the syntax has some extra quirks to accommodate these. Empty tuples are constructed @@ -393,8 +407,6 @@ sequence. Note that multiple assignment is really just a combination of tuple packing and sequence unpacking. -.. XXX Add a bit on the difference between tuples and lists. - .. _tut-sets: diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1092,6 +1092,7 @@ Kevin Walzer Rodrigo Steinmuller Wanderley Greg Ward +Zachary Ware Barry Warsaw Steve Waterbury Bob Watson -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 21:27:41 2012 From: python-checkins at python.org (r.david.murray) Date: Sun, 17 Jun 2012 21:27:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Update_out_of_d?= =?utf8?q?ate_docstring=2E?= Message-ID: http://hg.python.org/cpython/rev/6fbbf4ec6db5 changeset: 77498:6fbbf4ec6db5 branch: 3.2 parent: 77496:3550416d83b3 user: R David Murray date: Sun Jun 17 15:26:35 2012 -0400 summary: Update out of date docstring. files: Lib/email/message.py | 12 +++++------- 1 files changed, 5 insertions(+), 7 deletions(-) diff --git a/Lib/email/message.py b/Lib/email/message.py --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -613,17 +613,15 @@ the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and LANGUAGE can be None, in which case you should consider VALUE to be encoded in the us-ascii charset. You can usually ignore LANGUAGE. + The parameter value (either the returned string, or the VALUE item in + the 3-tuple) is always unquoted, unless unquote is set to False. - Your application should be prepared to deal with 3-tuple return - values, and can convert the parameter to a Unicode string like so: + If your application doesn't care whether the parameter was RFC 2231 + encoded, it can turn the return value into a string as follows: param = msg.get_param('foo') - if isinstance(param, tuple): - param = unicode(param[2], param[0] or 'us-ascii') + param = email.utils.collapse_rfc2231_value(rawparam) - In any case, the parameter value (either the returned string, or the - VALUE item in the 3-tuple) is always unquoted, unless unquote is set - to False. """ if header not in self: return failobj -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 21:27:41 2012 From: python-checkins at python.org (r.david.murray) Date: Sun, 17 Jun 2012 21:27:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge=3A_Update_out_of_date_docstring=2E?= Message-ID: http://hg.python.org/cpython/rev/6e0ee573ba1e changeset: 77499:6e0ee573ba1e parent: 77497:2fad115408e9 parent: 77498:6fbbf4ec6db5 user: R David Murray date: Sun Jun 17 15:27:21 2012 -0400 summary: Merge: Update out of date docstring. files: Lib/email/message.py | 12 +++++------- 1 files changed, 5 insertions(+), 7 deletions(-) diff --git a/Lib/email/message.py b/Lib/email/message.py --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -628,17 +628,15 @@ the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and LANGUAGE can be None, in which case you should consider VALUE to be encoded in the us-ascii charset. You can usually ignore LANGUAGE. + The parameter value (either the returned string, or the VALUE item in + the 3-tuple) is always unquoted, unless unquote is set to False. - Your application should be prepared to deal with 3-tuple return - values, and can convert the parameter to a Unicode string like so: + If your application doesn't care whether the parameter was RFC 2231 + encoded, it can turn the return value into a string as follows: param = msg.get_param('foo') - if isinstance(param, tuple): - param = unicode(param[2], param[0] or 'us-ascii') + param = email.utils.collapse_rfc2231_value(rawparam) - In any case, the parameter value (either the returned string, or the - VALUE item in the 3-tuple) is always unquoted, unless unquote is set - to False. """ if header not in self: return failobj -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 22:36:55 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 17 Jun 2012 22:36:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314657=3A_The_froze?= =?utf8?q?n_instance_of_importlib_used_for_bootstrap_is_now_also?= Message-ID: http://hg.python.org/cpython/rev/e3a984076837 changeset: 77500:e3a984076837 user: Antoine Pitrou date: Sun Jun 17 22:33:38 2012 +0200 summary: Issue #14657: The frozen instance of importlib used for bootstrap is now also the module imported as importlib._bootstrap. files: Lib/importlib/__init__.py | 25 ++++++++++++++++--------- Lib/test/test_import.py | 20 +++++++++++++++++++- Misc/NEWS | 3 +++ 3 files changed, 38 insertions(+), 10 deletions(-) diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py --- a/Lib/importlib/__init__.py +++ b/Lib/importlib/__init__.py @@ -1,19 +1,26 @@ """A pure Python implementation of import.""" __all__ = ['__import__', 'import_module', 'invalidate_caches'] -from . import _bootstrap - - -# To simplify imports in test code -_w_long = _bootstrap._w_long -_r_long = _bootstrap._r_long - - # Bootstrap help ##################################################### import imp import sys -_bootstrap._setup(sys, imp) +try: + _bootstrap = sys.modules['_frozen_importlib'] +except ImportError: + from . import _bootstrap + _bootstrap._setup(sys, imp) +else: + # importlib._bootstrap is the built-in import, ensure we don't create + # a second copy of the module. + _bootstrap.__name__ = 'importlib._bootstrap' + _bootstrap.__package__ = 'importlib' + _bootstrap.__file__ = __file__.replace('__init__.py', '_bootstrap.py') + sys.modules['importlib._bootstrap'] = _bootstrap + +# To simplify imports in test code +_w_long = _bootstrap._w_long +_r_long = _bootstrap._r_long # Public API ######################################################### diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py --- a/Lib/test/test_import.py +++ b/Lib/test/test_import.py @@ -19,7 +19,7 @@ from test.support import ( EnvironmentVarGuard, TESTFN, check_warnings, forget, is_jython, make_legacy_pyc, rmtree, run_unittest, swap_attr, swap_item, temp_umask, - unlink, unload, create_empty_file) + unlink, unload, create_empty_file, cpython_only) from test import script_helper @@ -746,6 +746,23 @@ sys.path[:] = self.orig_sys_path + at cpython_only +class ImportlibBootstrapTests(unittest.TestCase): + # These tests check that importlib is bootstrapped. + + def test_frozen_importlib(self): + mod = sys.modules['_frozen_importlib'] + self.assertTrue(mod) + + def test_frozen_importlib_is_bootstrap(self): + from importlib import _bootstrap + mod = sys.modules['_frozen_importlib'] + self.assertIs(mod, _bootstrap) + self.assertEqual(mod.__name__, 'importlib._bootstrap') + self.assertEqual(mod.__package__, 'importlib') + self.assertTrue(mod.__file__.endswith('_bootstrap.py'), mod.__file__) + + def test_main(verbose=None): flag = importlib_util.using___import__ try: @@ -753,6 +770,7 @@ run_unittest(ImportTests, PycacheTests, PycRewritingTests, PathsTests, RelativeImportTests, OverridingImportBuiltinTests, + ImportlibBootstrapTests, TestSymbolicallyLinkedPackage, importlib_import_test_suite()) finally: diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,9 @@ Library ------- +- Issue #14657: The frozen instance of importlib used for bootstrap is now + also the module imported as importlib._bootstrap. + - Issue #14055: Add __sizeof__ support to _elementtree. - Issue #15054: A bug in tokenize.tokenize that caused string literals -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 17 23:21:35 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 17 Jun 2012 23:21:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Prevent_test=5Finspect_from?= =?utf8?q?_keeping_alive_a_ton_of_frames_and_local_variables_by?= Message-ID: http://hg.python.org/cpython/rev/bc2459063bcd changeset: 77501:bc2459063bcd user: Antoine Pitrou date: Sun Jun 17 23:18:07 2012 +0200 summary: Prevent test_inspect from keeping alive a ton of frames and local variables by way of a global variable keeping a reference to a traceback. Should fix some buildbot failures. files: Lib/importlib/test/test_locks.py | 2 +- Lib/test/test_inspect.py | 27 +++++++++++-------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/Lib/importlib/test/test_locks.py b/Lib/importlib/test/test_locks.py --- a/Lib/importlib/test/test_locks.py +++ b/Lib/importlib/test/test_locks.py @@ -101,7 +101,7 @@ def test_all_locks(self): support.gc_collect() - self.assertEqual(0, len(_bootstrap._module_locks)) + self.assertEqual(0, len(_bootstrap._module_locks), _bootstrap._module_locks) @support.reap_threads diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -41,11 +41,6 @@ import builtins -try: - 1/0 -except: - tb = sys.exc_info()[2] - git = mod.StupidGit() class IsTestBase(unittest.TestCase): @@ -79,23 +74,31 @@ def test_excluding_predicates(self): + global tb self.istest(inspect.isbuiltin, 'sys.exit') self.istest(inspect.isbuiltin, '[].append') self.istest(inspect.iscode, 'mod.spam.__code__') - self.istest(inspect.isframe, 'tb.tb_frame') + try: + 1/0 + except: + tb = sys.exc_info()[2] + self.istest(inspect.isframe, 'tb.tb_frame') + self.istest(inspect.istraceback, 'tb') + if hasattr(types, 'GetSetDescriptorType'): + self.istest(inspect.isgetsetdescriptor, + 'type(tb.tb_frame).f_locals') + else: + self.assertFalse(inspect.isgetsetdescriptor(type(tb.tb_frame).f_locals)) + finally: + # Clear traceback and all the frames and local variables hanging to it. + tb = None self.istest(inspect.isfunction, 'mod.spam') self.istest(inspect.isfunction, 'mod.StupidGit.abuse') self.istest(inspect.ismethod, 'git.argue') self.istest(inspect.ismodule, 'mod') - self.istest(inspect.istraceback, 'tb') self.istest(inspect.isdatadescriptor, 'collections.defaultdict.default_factory') self.istest(inspect.isgenerator, '(x for x in range(2))') self.istest(inspect.isgeneratorfunction, 'generator_function_example') - if hasattr(types, 'GetSetDescriptorType'): - self.istest(inspect.isgetsetdescriptor, - 'type(tb.tb_frame).f_locals') - else: - self.assertFalse(inspect.isgetsetdescriptor(type(tb.tb_frame).f_locals)) if hasattr(types, 'MemberDescriptorType'): self.istest(inspect.ismemberdescriptor, 'datetime.timedelta.days') else: -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Mon Jun 18 05:47:50 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Mon, 18 Jun 2012 05:47:50 +0200 Subject: [Python-checkins] Daily reference leaks (bc2459063bcd): sum=0 Message-ID: results for bc2459063bcd on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogG3xUC0', '-x'] From python-checkins at python.org Mon Jun 18 09:49:29 2012 From: python-checkins at python.org (petri.lehtinen) Date: Mon, 18 Jun 2012 09:49:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fix_NEWS_entry_?= =?utf8?q?for_=2315036?= Message-ID: http://hg.python.org/cpython/rev/8b38a81ba3bf changeset: 77502:8b38a81ba3bf branch: 2.7 parent: 77495:bb63919cde6e user: Petri Lehtinen date: Mon Jun 18 10:42:05 2012 +0300 summary: Fix NEWS entry for #15036 files: Misc/NEWS | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -71,9 +71,9 @@ with 'b' and 'br' prefixes to be incorrectly tokenized has been fixed. Patch by Serhiy Storchaka. -- Issue #15036: Allow removing or changing multiple items in - single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox - between the changes. +- Issue #15036: Mailbox no longer throws an error if a flush is done + between operations when removing or changing multiple items in mbox, + MMDF, or Babyl mailboxes. - Issue #10133: Make multiprocessing deallocate buffer if socket read fails. Patch by Hallvard B Furuseth. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 09:49:32 2012 From: python-checkins at python.org (petri.lehtinen) Date: Mon, 18 Jun 2012 09:49:32 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_NEWS_entry_?= =?utf8?q?for_=2315036?= Message-ID: http://hg.python.org/cpython/rev/38e2a87c9051 changeset: 77503:38e2a87c9051 branch: 3.2 parent: 77498:6fbbf4ec6db5 user: Petri Lehtinen date: Mon Jun 18 10:43:53 2012 +0300 summary: Fix NEWS entry for #15036 files: Misc/NEWS | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,9 +70,9 @@ Library ------- -- Issue #15036: Allow removing or changing multiple items in - single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox - between the changes. +- Issue #15036: Mailbox no longer throws an error if a flush is done + between operations when removing or changing multiple items in mbox, + MMDF, or Babyl mailboxes. - Issue #10133: Make multiprocessing deallocate buffer if socket read fails. Patch by Hallvard B Furuseth. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 09:49:33 2012 From: python-checkins at python.org (petri.lehtinen) Date: Mon, 18 Jun 2012 09:49:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Fix_NEWS_entry_for_=2315036?= Message-ID: http://hg.python.org/cpython/rev/072b08989731 changeset: 77504:072b08989731 parent: 77501:bc2459063bcd parent: 77503:38e2a87c9051 user: Petri Lehtinen date: Mon Jun 18 10:45:56 2012 +0300 summary: Fix NEWS entry for #15036 files: Misc/NEWS | 6 +++--- 1 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -45,9 +45,9 @@ non-standard iter_modules() method to be defined by an importer (something the importlib importers do not define). -- Issue #15036: Allow removing or changing multiple items in - single-file mailboxes (mbox, MMDF, Babyl) flushing the mailbox - between the changes. +- Issue #15036: Mailbox no longer throws an error if a flush is done + between operations when removing or changing multiple items in mbox, + MMDF, or Babyl mailboxes. - Issue #14059: Implement multiprocessing.Barrier. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 15:12:59 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 18 Jun 2012 15:12:59 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fiddle_with_timeouts_in_bar?= =?utf8?q?rier_tests?= Message-ID: http://hg.python.org/cpython/rev/18de5d7e1256 changeset: 77505:18de5d7e1256 user: Richard Oudkerk date: Mon Jun 18 14:11:10 2012 +0100 summary: Fiddle with timeouts in barrier tests files: Lib/test/test_multiprocessing.py | 12 ++++++------ 1 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1151,7 +1151,7 @@ Tests for Barrier objects. """ N = 5 - defaultTimeout = 10.0 # XXX Slow Windows buildbots need generous timeout + defaultTimeout = 30.0 # XXX Slow Windows buildbots need generous timeout def setUp(self): self.barrier = self.Barrier(self.N, timeout=self.defaultTimeout) @@ -1327,10 +1327,10 @@ @classmethod def _test_timeout_f(cls, barrier, results): - i = barrier.wait(20) + i = barrier.wait() if i == cls.N//2: # One thread is late! - time.sleep(4.0) + time.sleep(1.0) try: barrier.wait(0.5) except threading.BrokenBarrierError: @@ -1346,10 +1346,10 @@ @classmethod def _test_default_timeout_f(cls, barrier, results): - i = barrier.wait(20) + i = barrier.wait(cls.defaultTimeout) if i == cls.N//2: # One thread is later than the default timeout - time.sleep(4.0) + time.sleep(1.0) try: barrier.wait() except threading.BrokenBarrierError: @@ -1359,7 +1359,7 @@ """ Test the barrier's default timeout """ - barrier = self.Barrier(self.N, timeout=1.0) + barrier = self.Barrier(self.N, timeout=0.5) results = self.DummyList() self.run_threads(self._test_default_timeout_f, (barrier, results)) self.assertEqual(len(results), barrier.parties) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 16:28:38 2012 From: python-checkins at python.org (martin.v.loewis) Date: Mon, 18 Jun 2012 16:28:38 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Tighten_the_specification=2C_a?= =?utf8?q?nd_elaborate_missing_details=2E?= Message-ID: http://hg.python.org/peps/rev/80ce8e5dd630 changeset: 4463:80ce8e5dd630 parent: 4444:727662ab7f50 user: Martin v. L?wis date: Mon Jun 18 16:28:05 2012 +0200 summary: Tighten the specification, and elaborate missing details. files: pep-0397.txt | 171 ++++++++++++++++++++++++++------------- 1 files changed, 114 insertions(+), 57 deletions(-) diff --git a/pep-0397.txt b/pep-0397.txt --- a/pep-0397.txt +++ b/pep-0397.txt @@ -2,7 +2,8 @@ Title: Python launcher for Windows Version: $Revision$ Last-Modified: $Date$ -Author: Mark Hammond +Author: Mark Hammond , + Martin v. L?wis Status: Draft Type: Standards Track Content-Type: text/plain @@ -62,67 +63,68 @@ While this PEP offers the ability to use a shebang line which should work on both Windows and Unix, this is not the primary motivation for this PEP - the primary motivation is to allow a specific version to be - specified without inventing new syntax or conventions to describe it. + specified without inventing new syntax or conventions to describe + it. -An overview of the launcher. +Specification - This PEP outlines the general functionality and key guidelines of a - launcher for Windows. It is accompanied by an implementation [3], - written in C, which defines the detailed implementation. Over - time, changes to the implementation may be desired - if the changes - adhere to the guidelines in this PEP and have been made following - the standard Python development model this PEP need not change. - In other words, this PEP makes no attempt to describe in detail every - feature offered by the launcher but instead to offer guidelines the - launcher should adhere to. + This PEP specifies features of the launcher; a prototype + implementation is provided in [3] which will be distributed + together with the Windows installer of Python, but will also be + available separately (but released along with the Python + installer). New features may be added to the launcher as + long as the features prescribed here continue to work. - The launcher will come in 2 versions - one which is a console program and +Installation + + The launcher comes in 2 versions - one which is a console program and one which is a "windows" (ie., GUI) program. These 2 launchers correspond to the 'python.exe' and 'pythonw.exe' executables which currently ship with Python. The console launcher will be named 'py.exe' and the Windows one named 'pyw.exe'. The "windows" (ie., GUI) version of the launcher will attempt to locate and launch pythonw.exe even if a virtual shebang - line nominates simply "python" - infact, the trailing 'w' notation will - not be supported in the virtual shebang line at all. + line nominates simply "python" - infact, the trailing 'w' notation is + not supported in the virtual shebang line at all. - The launcher will be distributed with all future versions of Python - and if possible, should be installed directly into the Windows directory - (note that the System32 directory is not a good option as this directory - is not on the default PATH for 32bit processes on a 64bit Windows.) If - the launcher can't be installed in the Windows directory, the installer - can suggest or choose an alternative, but it will be the responsibility - of the user to ensure this directory is on the PATH. - - Once installed, the "console" version of the launcher should be + The launcher is installed into the Windows directory (see + discussion below) if installed by a privileged user. The + stand-alone installer asks for an alternative location of the + installer, and adds that location to the user's PATH. + + The launcher installation is registered in + HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\CurrentVersion\SharedDLLs + with a reference counter. + It contains a version resource matching the version number of the + pythonXY.dll with which it is distributed. Independent + installations will always only overwrite newer versions of the + launcher with older versions. + + The 32-bit distribution of Python will not install a 32-bit + version of the launcher on a 64-bit system. + + Once installed, the "console" version of the launcher is associated with .py files and the "windows" version associated with .pyw files. - The launcher will not be tied to a specific version of Python - eg., a + The launcher is not tied to a specific version of Python - eg., a launcher distributed with Python 3.3 should be capable of locating and - executing any Python 2.x and Python 3.x version. Future versions of the - launcher should remain backwards compatible with older versions, so later - versions of Python can install an updated version of the launcher without - impacting how the previously installed version of the launcher is used. + executing any Python 2.x and Python 3.x version. However, the + launcher binaries have a version resource that is the same as the + version resource in the Python binaries that they are released with. - The launcher may offer some conveniences for Python developers working - interactively - for example, starting the launcher with no command-line - arguments will launch the default Python with no command-line arguments. - Further, command-line arguments will be supported to allow a specific - Python version to be launched interactively - however, these conveniences - must not detract from the primary purpose of launching scripts and must - be easy to avoid if desired. +Python Script Launching -Guidelines for a Python launcher. + The launcher is restricted to launching Python scripts. + It is not intended as a general-purpose script launcher or + shebang processor. - The Python launcher described in this PEP will intentionally be - constrained to the use-cases described in the Rationale section - above. It will not attempt to be a general purpose script launcher - or shebang processor. - - The launcher should support for format of shebang lines as described + The launcher supports the syntax of shebang lines as described in [1], including all restrictions listed. - The launcher should support shebang lines commonly found on Unix. + The launcher supports shebang lines referring to Python + executables with any of the (regex) prefixes "/usr/bin/", "/usr/local/bin" + and "/usr/bin/env *", as well as binaries specified without + For example, a shebang line of '#! /usr/bin/python' should work even though there is unlikely to be an executable in the relative Windows directory "\usr\bin". This means that many scripts can use a single @@ -159,17 +161,21 @@ If the first command-line argument does not start with a dash ('-') character, an attempt will be made to open that argument as a file - and parsed for a shebang line according to the rules in [1]. Once - parsed, the command will be categorized according to the following rules: + and parsed for a shebang line according to the rules in [1]:: + + #! interpreter [optional-arg] + + Once parsed, the command will be categorized according to the following rules: * If the command starts with the definition of a customized command followed by a whitespace character (including a newline), the customized command will be used. See below for a description of customized commands. - * The launcher will define a set of strings which are considered Unix - compatible commands to launch Python, such as '/usr/bin/python' etc. - If a command matching one of these strings will be treated as a + * The launcher will define a set of prefixes which are considered Unix + compatible commands to launch Python, namely "/usr/bin/python", + "/usr/local/bin/python", "/usr/bin/env python", and "python". + If a command starts with one of these strings will be treated as a 'virtual command' and the rules described in Python Version Qualifiers (below) will be used to locate the executable to use. @@ -185,7 +191,7 @@ command processor (such as automatic appending of extensions other than '.exe', support for batch files, etc) will not be used. - The use of 'virtual' shebang lines will be encouraged as this should + The use of 'virtual' shebang lines is encouraged as this should allow for portable shebang lines to be specified which work on multiple operating systems and different installations of the same operating system. @@ -199,16 +205,32 @@ to create the specified child process will cause the launcher to display an appropriate message and terminate with a specific exit code. -Virtual commands in shebang lines: +Configuration file + + Two .ini files will be searched by the launcher - ``py.ini`` in the + current user's "application data" directory (i.e. the directory returned + by calling the Windows function SHGetFolderPath with CSIDL_LOCAL_APPDATA) + and ``py.ini`` in the same directory as the launcher. The same .ini + files are used for both the 'console' version of the launcher (i.e. + py.exe) and for the 'windows' version (i.e. pyw.exe) + + + Customization specified in the "application directory" will have + precedence over the one next to the executable, so a user, who may not + have write access to the .ini file next to the launcher, can override + commands in that global .ini file) + +Virtual commands in shebang lines Virtual Commands are shebang lines which start with strings which would be expected to work on Unix platforms - examples include '/usr/bin/python', '/usr/bin/env python' and 'python'. Optionally, the virtual command may be suffixed with a version qualifier (see below), such as '/usr/bin/python2' or '/usr/bin/python3.2'. The command executed - is based on the rules described in Python Version Qualifiers below. + is based on the rules described in Python Version Qualifiers + below. -Customized Commands: +Customized Commands The launcher will support the ability to define "Customized Commands" in a Windows .ini file (ie, a file which can be parsed by the Windows function @@ -251,10 +273,25 @@ installed last). As noted above, an optional "-32" suffix can be used on a version specifier to change this behaviour. - The launcher will support various customization options to allow - fine-grained control over which specific Python version is chosen given - a partial or empty version qualifier - see the launcher documentation [4] - for details. + If no version qualifiers are found in a command, the environment variable + ``PY_PYTHON`` can be set to specify the default version qualifier - the default + value is "2". Note this value could specify just a major version (e.g. "2") or + a major.minor qualifier (e.g. "2.6"), or even major.minor-32. + + If no minor version qualifiers are found, the environment variable + ``PY_PYTHON{major}`` (where ``{major}`` is the current major version qualifier + as determined above) can be set to specify the full version. If no such option + is found, the launcher will enumerate the installed Python versions and use + the latest minor release found for the major version, which is likely, + although not guaranteed, to be the most recently installed version in that + family. + + In addition to environment variables, the same settings can be configured + in the .INI file used by the launcher. The section in the INI file is + called ``[defaults]`` and the key name will be the same as the + environment variables without the leading ``PY\_`` prefix (and note that + the key names in the INI file are case insensitive.) The contents of + an environment variable will override things specified in the INI file. Command-line handling @@ -287,6 +324,26 @@ Process Launching + The launcher offers some conveniences for Python developers working + interactively - for example, starting the launcher with no command-line + arguments will launch the default Python with no command-line arguments. + Further, command-line arguments will be supported to allow a specific + Python version to be launched interactively - however, these conveniences + must not detract from the primary purpose of launching scripts and must + be easy to avoid if desired. + + The launcher creates a subprocess to start the actual + interpreter. See `Discussion? below for the rationale. + + +Discussion + + It may be surprising that the launcher is installed into the + Windows directory, and not the System32 directory. The reason is + that the System32 directory is not on the Path of a 32-bit process + running on a 64-bit system. However, the Windows directory is + always on the path. + Ideally, the launcher process would execute Python directly inside the same process, primarily so the parent of the launcher process could terminate the launcher and have the Python interpreter terminate. If the -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Jun 18 16:28:38 2012 From: python-checkins at python.org (martin.v.loewis) Date: Mon, 18 Jun 2012 16:28:38 +0200 Subject: [Python-checkins] =?utf8?q?peps_=28merge_default_-=3E_default=29?= =?utf8?q?=3A_merged?= Message-ID: http://hg.python.org/peps/rev/0cde8b31a8a2 changeset: 4464:0cde8b31a8a2 parent: 4463:80ce8e5dd630 parent: 4462:634c46edc47a user: Martin v. L?wis date: Mon Jun 18 16:28:24 2012 +0200 summary: merged files: pep-0362.txt | 650 +++++++++++++++++++++++--------------- pep-0398.txt | 4 +- pep-0405.txt | 18 +- pep-0420.txt | 2 +- pep-0421.txt | 2 +- pep-0422.txt | 354 +++++++++++++++++++++ 6 files changed, 762 insertions(+), 268 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -2,332 +2,468 @@ Title: Function Signature Object Version: $Revision$ Last-Modified: $Date$ -Author: Brett Cannon , Jiwon Seo +Author: Brett Cannon , Jiwon Seo , + Yury Selivanov , Larry Hastings Status: Draft Type: Standards Track Content-Type: text/x-rst Created: 21-Aug-2006 -Python-Version: 2.6 -Post-History: 05-Sep-2007 +Python-Version: 3.3 +Post-History: 04-Jun-2012 Abstract ======== Python has always supported powerful introspection capabilities, -including that for functions and methods (for the rest of this PEP the -word "function" refers to both functions and methods). Taking a -function object, you can fully reconstruct the function's signature. -Unfortunately it is a little unruly having to look at all the -different attributes to pull together complete information for a -function's signature. +including introspecting functions and methods (for the rest of +this PEP, "function" refers to both functions and methods). By +examining a function object you can fully reconstruct the function's +signature. Unfortunately this information is stored in an inconvenient +manner, and is spread across a half-dozen deeply nested attributes. -This PEP proposes an object representation for function signatures. -This should help facilitate introspection on functions for various -uses. The introspection information contains all possible information -about the parameters in a signature (including Python 3.0 features). +This PEP proposes a new representation for function signatures. +The new representation contains all necessary information about a function +and its parameters, and makes introspection easy and straightforward. -This object, though, is not meant to replace existing ways of -introspection on a function's signature. The current solutions are -there to make Python's execution work in an efficient manner. The -proposed object representation is only meant to help make application -code have an easier time to query a function on its signature. - - -Purpose -======= - -An object representation of a function's call signature should provide -an easy way to introspect what a function expects as arguments. It -does not need to be a "live" representation, though; the signature can -be inferred once and stored without changes to the signature object -representation affecting the function it represents (but this is an -`Open Issues`_). - -Indirection of signature introspection can also occur. If a -decorator took a decorated function's signature object and set it on -the decorating function then introspection could be redirected to what -is actually expected instead of the typical ``*args, **kwargs`` -signature of decorating functions. +However, this object does not replace the existing function +metadata, which is used by Python itself to execute those +functions. The new metadata object is intended solely to make +function introspection easier for Python programmers. Signature Object ================ -The overall signature of an object is represented by the Signature -object. This object is to store a `Parameter object`_ for each -parameter in the signature. It is also to store any information -about the function itself that is pertinent to the signature. +A Signature object represents the call signature of a function and +its return annotation. For each parameter accepted by the function +it stores a `Parameter object`_ in its ``parameters`` collection. -A Signature object has the following structure attributes: +A Signature object has the following public attributes and methods: -* name : str - Name of the function. This is not fully qualified because - function objects for methods do not know the class they are - contained within. This makes functions and methods - indistinguishable from one another when passed to decorators, - preventing proper creation of a fully qualified name. -* var_args : str - Name of the variable positional parameter (i.e., ``*args``), if - present, or the empty string. -* var_kw_args : str - Name of the variable keyword parameter (i.e., ``**kwargs``), if - present, or the empty string. -* var_annotations: dict(str, object) - Dict that contains the annotations for the variable parameters. - The keys are of the variable parameter with values of the - annotation. If an annotation does not exist for a variable - parameter then the key does not exist in the dict. * return_annotation : object - If present, the attribute is set to the annotation for the return - type of the function. -* parameters : list(Parameter) - List of the parameters of the function as represented by - Parameter objects in the order of its definition (keyword-only - arguments are in the order listed by ``code.co_varnames``). -* bind(\*args, \*\*kwargs) -> dict(str, object) - Create a mapping from arguments to parameters. The keys are the - names of the parameter that an argument maps to with the value - being the value the parameter would have if this function was - called with the given arguments. + The annotation for the return type of the function if specified. + If the function has no annotation for its return type, this + attribute is not set. +* parameters : OrderedDict + An ordered mapping of parameters' names to the corresponding + Parameter objects (keyword-only arguments are in the same order + as listed in ``code.co_varnames``). +* bind(\*args, \*\*kwargs) -> BoundArguments + Creates a mapping from positional and keyword arguments to + parameters. Raises a ``TypeError`` if the passed arguments do + not match the signature. +* bind_partial(\*args, \*\*kwargs) -> BoundArguments + Works the same way as ``bind()``, but allows the omission + of some required arguments (mimics ``functools.partial`` + behavior.) Raises a ``TypeError`` if the passed arguments do + not match the signature. +* format(...) -> str + Formats the Signature object to a string. Optional arguments allow + for custom render functions for parameter names, + annotations and default values, along with custom separators. -Signature objects also have the following methods: +Signature implements the ``__str__`` method, which fallbacks to the +``Signature.format()`` call. -* __getitem__(self, key : str) -> Parameter - Returns the Parameter object for the named parameter. -* __iter__(self) - Returns an iterator that returns Parameter objects in their - sequential order based on their 'position' attribute. +It's possible to test Signatures for equality. Two signatures +are equal when they have equal parameters and return annotations. -The Signature object is stored in the ``__signature__`` attribute of -a function. When it is to be created is discussed in -`Open Issues`_. +Changes to the Signature object, or to any of its data members, +do not affect the function itself. Parameter Object ================ -A function's signature is made up of several parameters. Python's -different kinds of parameters is quite large and rich and continues to -grow. Parameter objects represent any possible parameter. - -Originally the plan was to represent parameters using a list of -parameter names on the Signature object along with various dicts keyed -on parameter names to disseminate the various pieces of information -one can know about a parameter. But the decision was made to -incorporate all information about a parameter in a single object so -as to make extending the information easier. This was originally put -forth by Talin and the preferred form of Guido (as discussed at the -2006 Google Sprint). +Python's expressive syntax means functions can accept many different +kinds of parameters with many subtle semantic differences. We +propose a rich Parameter object designed to represent any possible +function parameter. The structure of the Parameter object is: -* name : (str | tuple(str)) - The name of the parameter as a string if it is not a tuple. If - the argument is a tuple then a tuple of strings is used. -* position : int - The position of the parameter within the signature of the - function (zero-indexed). For keyword-only parameters the position - value is arbitrary while not conflicting with positional - parameters. The suggestion of setting the attribute to None or -1 - to represent keyword-only parameters was rejected to prevent - variable type usage and as a possible point of errors, - respectively. -* default_value : object - The default value for the parameter, if present, else the - attribute does not exist. -* keyword_only : bool - True if the parameter is keyword-only, else False. -* annotation - Set to the annotation for the parameter. If ``has_annotation`` is - False then the attribute does not exist to prevent accidental use. +* name : str + The name of the parameter as a string. + +* default : object + The default value for the parameter, if specified. If the + parameter has no default value, this attribute is not set. + +* annotation : object + The annotation for the parameter if specified. If the + parameter has no annotation, this attribute is not set. + +* kind : str + Describes how argument values are bound to the parameter. + Possible values: + + * ``Parameter.POSITIONAL_ONLY`` - value must be supplied + as a positional argument. + + Python has no explicit syntax for defining positional-only + parameters, but many builtin and extension module functions + (especially those that accept only one or two parameters) + accept them. + + * ``Parameter.POSITIONAL_OR_KEYWORD`` - value may be + supplied as either a keyword or positional argument + (this is the standard binding behaviour for functions + implemented in Python.) + + * ``Parameter.KEYWORD_ONLY`` - value must be supplied + as a keyword argument. Keyword only parameters are those + which appear after a "*" or "\*args" entry in a Python + function definition. + + * ``Parameter.VAR_POSITIONAL`` - a tuple of positional + arguments that aren't bound to any other parameter. + This corresponds to a "\*args" parameter in a Python + function definition. + + * ``Parameter.VAR_KEYWORD`` - a dict of keyword arguments + that aren't bound to any other parameter. This corresponds + to a "\*\*kwds" parameter in a Python function definition. + +* implemented : bool + True if the parameter is implemented for use. Some platforms + implement functions but can't support specific parameters + (e.g. "mode" for ``os.mkdir``). Passing in an unimplemented + parameter may result in the parameter being ignored, + or in NotImplementedError being raised. It is intended that + all conditions where ``implemented`` may be False be + thoroughly documented. + +Two parameters are equal when all their attributes are equal. + + +BoundArguments Object +===================== + +Result of a ``Signature.bind`` call. Holds the mapping of arguments +to the function's parameters. + +Has the following public attributes: + +* arguments : OrderedDict + An ordered, mutable mapping of parameters' names to arguments' values. + Does not contain arguments' default values. +* args : tuple + Tuple of positional arguments values. Dynamically computed from + the 'arguments' attribute. +* kwargs : dict + Dict of keyword arguments values. Dynamically computed from + the 'arguments' attribute. + +The ``arguments`` attribute should be used in conjunction with +``Signature.parameters`` for any arguments processing purposes. + +``args`` and ``kwargs`` properties can be used to invoke functions: +:: + + def test(a, *, b): + ... + + sig = signature(test) + ba = sig.bind(10, b=20) + test(*ba.args, **ba.kwargs) Implementation ============== -An implementation can be found in Python's sandbox [#impl]_. -There is a function named ``signature()`` which -returns the value stored on the ``__signature__`` attribute if it -exists, else it creates the Signature object for the -function and sets ``__signature__``. For methods this is stored -directly on the im_func function object since that is what decorators -work with. +The implementation adds a new function ``signature()`` to the ``inspect`` +module. The function is the preferred way of getting a ``Signature`` for +a callable object. + +The function implements the following algorithm: + + - If the object is not callable - raise a TypeError + + - If the object has a ``__signature__`` attribute and if it + is not ``None`` - return a deepcopy of it + + - If it is ``None`` and the object is an instance of + ``BuiltinFunction``, raise a ``ValueError`` + + - If it has a ``__wrapped__`` attribute, return + ``signature(object.__wrapped__)`` + + - If the object is a an instance of ``FunctionType`` construct + and return a new ``Signature`` for it + + - If the object is a method or a classmethod, construct and return + a new ``Signature`` object, with its first parameter (usually + ``self`` or ``cls``) removed + + - If the object is a staticmethod, construct and return + a new ``Signature`` object + + - If the object is an instance of ``functools.partial``, construct + a new ``Signature`` from its ``partial.func`` attribute, and + account for already bound ``partial.args`` and ``partial.kwargs`` + + - If the object is a class or metaclass: + + - If the object's type has a ``__call__`` method defined in + its MRO, return a Signature for it + + - If the object has a ``__new__`` method defined in its class, + return a Signature object for it + + - If the object has a ``__init__`` method defined in its class, + return a Signature object for it + + - Return ``signature(object.__call__)`` + +Note, that the ``Signature`` object is created in a lazy manner, and +is not automatically cached. If, however, the Signature object was +explicitly cached by the user, ``signature()`` returns a new deepcopy +of it on each invocation. + +An implementation for Python 3.3 can be found at [#impl]_. +The python issue tracking the patch is [#issue]_. + + +Design Considerations +===================== + +No implicit caching of Signature objects +---------------------------------------- + +The first PEP design had a provision for implicit caching of ``Signature`` +objects in the ``inspect.signature()`` function. However, this has the +following downsides: + + * If the ``Signature`` object is cached then any changes to the function + it describes will not be reflected in it. However, If the caching is + needed, it can be always done manually and explicitly + + * It is better to reserve the ``__signature__`` attribute for the cases + when there is a need to explicitly set to a ``Signature`` object that + is different from the actual one Examples ======== +Visualizing Callable Objects' Signature +--------------------------------------- + +Let's define some classes and functions: + +:: + + from inspect import signature + from functools import partial, wraps + + + class FooMeta(type): + def __new__(mcls, name, bases, dct, *, bar:bool=False): + return super().__new__(mcls, name, bases, dct) + + def __init__(cls, name, bases, dct, **kwargs): + return super().__init__(name, bases, dct) + + + class Foo(metaclass=FooMeta): + def __init__(self, spam:int=42): + self.spam = spam + + def __call__(self, a, b, *, c) -> tuple: + return a, b, c + + + def shared_vars(*shared_args): + """Decorator factory that defines shared variables that are + passed to every invocation of the function""" + + def decorator(f): + @wraps(f) + def wrapper(*args, **kwds): + full_args = shared_args + args + return f(*full_args, **kwds) + # Override signature + sig = wrapper.__signature__ = signature(f) + for __ in shared_args: + sig.parameters.popitem(last=False) + return wrapper + return decorator + + + @shared_vars({}) + def example(_state, a, b, c): + return _state, a, b, c + + + def format_signature(obj): + return str(signature(obj)) + + +Now, in the python REPL: + +:: + + >>> format_signature(FooMeta) + '(name, bases, dct, *, bar:bool=False)' + + >>> format_signature(Foo) + '(spam:int=42)' + + >>> format_signature(Foo.__call__) + '(self, a, b, *, c) -> tuple' + + >>> format_signature(Foo().__call__) + '(a, b, *, c) -> tuple' + + >>> format_signature(partial(Foo().__call__, 1, c=3)) + '(b, *, c=3) -> tuple' + + >>> format_signature(partial(partial(Foo().__call__, 1, c=3), 2, c=20)) + '(*, c=20) -> tuple' + + >>> format_signature(example) + '(a, b, c)' + + >>> format_signature(partial(example, 1, 2)) + '(c)' + + >>> format_signature(partial(partial(example, 1, b=2), c=3)) + '(b=2, c=3)' + + Annotation Checker ------------------ :: - def quack_check(fxn): - """Decorator to verify arguments and return value quack as they should. + import inspect + import functools - Positional arguments. - >>> @quack_check - ... def one_arg(x:int): pass - ... - >>> one_arg(42) - >>> one_arg('a') - Traceback (most recent call last): - ... - TypeError: 'a' does not quack like a + def checktypes(func): + '''Decorator to verify arguments and return types + Example: - *args - >>> @quack_check - ... def var_args(*args:int): pass - ... - >>> var_args(*[1,2,3]) - >>> var_args(*[1,'b',3]) - Traceback (most recent call last): - ... - TypeError: *args contains a a value that does not quack like a + >>> @checktypes + ... def test(a:int, b:str) -> int: + ... return int(a * b) - **kwargs - >>> @quack_check - ... def var_kw_args(**kwargs:int): pass - ... - >>> var_kw_args(**{'a': 1}) - >>> var_kw_args(**{'a': 'A'}) - Traceback (most recent call last): - ... - TypeError: **kwargs contains a value that does not quack like a + >>> test(10, '1') + 1111111111 - Return annotations. - >>> @quack_check - ... def returned(x) -> int: return x - ... - >>> returned(42) - 42 - >>> returned('a') - Traceback (most recent call last): - ... - TypeError: the return value 'a' does not quack like a + >>> test(10, 1) + Traceback (most recent call last): + ... + ValueError: foo: wrong type of 'b' argument, 'str' expected, got 'int' + ''' - """ - # Get the signature; only needs to be calculated once. - sig = Signature(fxn) - def check(*args, **kwargs): - # Find out the variable -> value bindings. - bindings = sig.bind(*args, **kwargs) - # Check *args for the proper quack. + sig = inspect.signature(func) + + types = {} + for param in sig.parameters.values(): + # Iterate through function's parameters and build the list of + # arguments types try: - duck = sig.var_annotations[sig.var_args] - except KeyError: + type_ = param.annotation + except AttributeError: + continue + else: + if not inspect.isclass(type_): + # Not a type, skip it + continue + + types[param.name] = type_ + + # If the argument has a type specified, let's check that its + # default value (if present) conforms with the type. + try: + default = param.default + except AttributeError: + continue + else: + if not isinstance(default, type_): + raise ValueError("{func}: wrong type of a default value for {arg!r}". \ + format(func=func.__qualname__, arg=param.name)) + + def check_type(sig, arg_name, arg_type, arg_value): + # Internal function that encapsulates arguments type checking + if not isinstance(arg_value, arg_type): + raise ValueError("{func}: wrong type of {arg!r} argument, " \ + "{exp!r} expected, got {got!r}". \ + format(func=func.__qualname__, arg=arg_name, + exp=arg_type.__name__, got=type(arg_value).__name__)) + + @functools.wraps(func) + def wrapper(*args, **kwargs): + # Let's bind the arguments + ba = sig.bind(*args, **kwargs) + for arg_name, arg in ba.arguments.items(): + # And iterate through the bound arguments + try: + type_ = types[arg_name] + except KeyError: + continue + else: + # OK, we have a type for the argument, lets get the corresponding + # parameter description from the signature object + param = sig.parameters[arg_name] + if param.kind == param.VAR_POSITIONAL: + # If this parameter is a variable-argument parameter, + # then we need to check each of its values + for value in arg: + check_type(sig, arg_name, type_, value) + elif param.kind == param.VAR_KEYWORD: + # If this parameter is a variable-keyword-argument parameter: + for subname, value in arg.items(): + check_type(sig, arg_name + ':' + subname, type_, value) + else: + # And, finally, if this parameter a regular one: + check_type(sig, arg_name, type_, arg) + + result = func(*ba.args, **ba.kwargs) + # The last bit - let's check that the result is correct + try: + return_type = sig.return_annotation + except AttributeError: + # Looks like we don't have any restriction on the return type pass else: - # Check every value in *args. - for value in bindings[sig.var_args]: - if not isinstance(value, duck): - raise TypeError("*%s contains a a value that does not " - "quack like a %r" % - (sig.var_args, duck)) - # Remove it from the bindings so as to not check it again. - del bindings[sig.var_args] - # **kwargs. - try: - duck = sig.var_annotations[sig.var_kw_args] - except (KeyError, AttributeError): - pass - else: - # Check every value in **kwargs. - for value in bindings[sig.var_kw_args].values(): - if not isinstance(value, duck): - raise TypeError("**%s contains a value that does not " - "quack like a %r" % - (sig.var_kw_args, duck)) - # Remove from bindings so as to not check again. - del bindings[sig.var_kw_args] - # For each remaining variable ... - for var, value in bindings.items(): - # See if an annotation was set. - try: - duck = sig[var].annotation - except AttributeError: - continue - # Check that the value quacks like it should. - if not isinstance(value, duck): - raise TypeError('%r does not quack like a %s' % (value, duck)) - else: - # All the ducks quack fine; let the call proceed. - returned = fxn(*args, **kwargs) - # Check the return value. - try: - if not isinstance(returned, sig.return_annotation): - raise TypeError('the return value %r does not quack like ' - 'a %r' % (returned, - sig.return_annotation)) - except AttributeError: - pass - return returned - # Full-featured version would set function metadata. - return check + if isinstance(return_type, type) and not isinstance(result, return_type): + raise ValueError('{func}: wrong return type, {exp} expected, got {got}'. \ + format(func=func.__qualname__, exp=return_type.__name__, + got=type(result).__name__)) + return result + return wrapper -Open Issues -=========== -When to construct the Signature object? ---------------------------------------- +Render Function Signature to HTML +--------------------------------- -The Signature object can either be created in an eager or lazy -fashion. In the eager situation, the object can be created during -creation of the function object. In the lazy situation, one would -pass a function object to a function and that would generate the -Signature object and store it to ``__signature__`` if -needed, and then return the value of ``__signature__``. +:: + import inspect -Should ``Signature.bind`` return Parameter objects as keys? ------------------------------------------------------------ + def format_to_html(func): + sig = inspect.signature(func) -Instead of returning a dict with keys consisting of the name of the -parameters, would it be more useful to instead use Parameter -objects? The name of the argument can easily be retrieved from the -key (and the name would be used as the hash for a Parameter object). + html = sig.format(token_params_separator=',', + token_colon=':', + token_eq='=', + token_return_annotation='->', + token_left_paren='(', + token_right_paren=')', + token_kwonly_separator='*', + format_name=lambda name: ''+name+'') - -Have ``var_args`` and ``_var_kw_args`` default to ``None``? ------------------------------------------------------------- - -It has been suggested by Fred Drake that these two attributes have a -value of ``None`` instead of empty strings when they do not exist. -The answer to this question will influence what the defaults are for -other attributes as well. - - -Deprecate ``inspect.getargspec()`` and ``.formatargspec()``? -------------------------------------------------------------- - -Since the Signature object replicates the use of ``getargspec()`` -from the ``inspect`` module it might make sense to deprecate it in -2.6. ``formatargspec()`` could also go if Signature objects gained a -__str__ representation. - -Issue with that is types such as ``int``, when used as annotations, -do not lend themselves for output (e.g., ``""`` is the -string represenation for ``int``). The repr representation of types -would need to change in order to make this reasonable. - - -Have the objects be "live"? ---------------------------- - -Jim Jewett pointed out that Signature and Parameter objects could be -"live". That would mean requesting information would be done on the -fly instead of caching it on the objects. It would also allow for -mutating the function if the Signature or Parameter objects were -mutated. + return '{}'.format(html) References ========== -.. [#impl] pep362 directory in Python's sandbox - (http://svn.python.org/view/sandbox/trunk/pep362/) +.. [#impl] pep362 branch (https://bitbucket.org/1st1/cpython/overview) +.. [#issue] issue 15008 (http://bugs.python.org/issue15008) Copyright @@ -335,7 +471,6 @@ This document has been placed in the public domain. - .. Local Variables: diff --git a/pep-0398.txt b/pep-0398.txt --- a/pep-0398.txt +++ b/pep-0398.txt @@ -70,6 +70,7 @@ * PEP 417: Including mock in the Standard Library * PEP 418: Add monotonic time, performance counter, and process time functions * PEP 420: Implicit Namespace Packages +* PEP 421: Adding sys.implementation * PEP 3118: Revising the buffer protocol (protocol semantics finalised) * PEP 3144: IP Address manipulation library * PEP 3151: Reworking the OS and IO exception hierarchy @@ -87,8 +88,6 @@ * PEP 362: Function Signature Object * PEP 397: Python launcher for Windows -* PEP 421: Adding sys.implementation -* PEP 3143: Standard daemon process library * PEP 3154: Pickle protocol version 4 (Note that these are not accepted yet and even if they are, they might @@ -105,6 +104,7 @@ Deferred to post-3.3: * PEP 395: Qualified Names for Modules +* PEP 3143: Standard daemon process library * Breaking out standard library and docs in separate repos Copyright diff --git a/pep-0405.txt b/pep-0405.txt --- a/pep-0405.txt +++ b/pep-0405.txt @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Carl Meyer BDFL-Delegate: Nick Coghlan -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 13-Jun-2011 @@ -285,15 +285,15 @@ Current virtualenv handles include files in this way: -On POSIX systems where the installed Python's include files are found -in ``${base_prefix}/include/pythonX.X``, virtualenv creates -``${venv}/include/`` and symlink ``${base_prefix}/include/pythonX.X`` +On POSIX systems where the installed Python's include files are found in +``${base_prefix}/include/pythonX.X``, virtualenv creates +``${venv}/include/`` and symlinks ``${base_prefix}/include/pythonX.X`` to ``${venv}/include/pythonX.X``. On Windows, where Python's include files are found in ``{{ sys.prefix }}/Include`` and symlinks are not reliably available, virtualenv copies ``{{ sys.prefix }}/Include`` to ``${venv}/Include``. This ensures that extension modules built and -installed within the virtualenv will always find the Python header -files they need in the expected location relative to ``sys.prefix``. +installed within the virtualenv will always find the Python header files +they need in the expected location relative to ``sys.prefix``. This solution is not ideal when an extension module installs its own header files, as the default installation location for those header @@ -467,10 +467,10 @@ site-packages directories. The most notable case is probably `setuptools`_ and its fork -`distribute`_, which mostly use ``distutils``and ``sysconfig`` APIs, +`distribute`_, which mostly use ``distutils`` and ``sysconfig`` APIs, but do use ``sys.prefix`` directly to build up a list of site -directories for pre-flight checking where ``pth`` files can usefully -be placed. +directories for pre-flight checking where ``pth`` files can usefully be +placed. Otherwise, a `Google Code Search`_ turns up what appears to be a roughly even mix of usage between packages using ``sys.prefix`` to diff --git a/pep-0420.txt b/pep-0420.txt --- a/pep-0420.txt +++ b/pep-0420.txt @@ -3,7 +3,7 @@ Version: $Revision$ Last-Modified: $Date$ Author: Eric V. Smith -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 19-Apr-2012 diff --git a/pep-0421.txt b/pep-0421.txt --- a/pep-0421.txt +++ b/pep-0421.txt @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Eric Snow BDFL-Delegate: Barry Warsaw -Status: Accepted +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 26-April-2012 diff --git a/pep-0422.txt b/pep-0422.txt new file mode 100644 --- /dev/null +++ b/pep-0422.txt @@ -0,0 +1,354 @@ +PEP: 422 +Title: Simple class initialisation hook +Version: $Revision$ +Last-Modified: $Date$ +Author: Nick Coghlan +Status: Draft +Type: Standards Track +Content-Type: text/x-rst +Created: 5-Jun-2012 +Python-Version: 3.4 +Post-History: 5-Jun-2012 + + +Abstract +======== + +In Python 2, the body of a class definition could modify the way a class +was created (or simply arrange to run other code after the class was created) +by setting the ``__metaclass__`` attribute in the class body. While doing +this implicitly from called code required the use of an implementation detail +(specifically, ``sys._getframes()``), it could also be done explicitly in a +fully supported fashion (for example, by passing ``locals()`` to an +function that calculated a suitable ``__metaclass__`` value) + +There is currently no corresponding mechanism in Python 3 that allows the +code executed in the class body to directly influence how the class object +is created. Instead, the class creation process is fully defined by the +class header, before the class body even begins executing. + +This PEP proposes a mechanism that will once again allow the body of a +class definition to more directly influence the way a class is created +(albeit in a more constrained fashion), as well as replacing some current +uses of metaclasses with a simpler, easier to understand alternative. + + +Background +========== + +For an already created class ``cls``, the term "metaclass" has a clear +meaning: it is the value of ``type(cls)``. + +*During* class creation, it has another meaning: it is also used to refer to +the metaclass hint that may be provided as part of the class definition. +While in many cases these two meanings end up referring to one and the same +object, there are two situations where that is not the case: + +* If the metaclass hint refers to a subclass of ``type``, then it is + considered as a candidate metaclass along with the metaclasses of all of + the parents of the class being defined. If a more appropriate metaclass is + found amongst the candidates, then it will be used instead of the one + given in the metaclass hint. +* Otherwise, an explicit metaclass hint is assumed to be a factory function + and is called directly to create the class object. In this case, the final + metaclass will be determined by the factory function definition. In the + typical case (where the factory functions just calls ``type``, or, in + Python 3.3 or later, ``types.new_class``) the actual metaclass is then + determined based on the parent classes. + +It is notable that only the actual metaclass is inherited - a factory +function used as a metaclass hook sees only the class currently being +defined, and is not invoked for any subclasses. + +In Python 3, the metaclass hint is provided using the ``metaclass=Meta`` +keyword syntax in the class header. This allows the ``__prepare__`` method +on the metaclass to be used to create the ``locals()`` namespace used during +execution of the class body (for example, specifying the use of +``collections.OrderedDict`` instead of a regular ``dict``). + +In Python 2, there was no ``__prepare__`` method (that API was added for +Python 3 by PEP 3115). Instead, a class body could set the ``__metaclass__`` +attribute, and the class creation process would extract that value from the +class namespace to use as the metaclass hint. There is `published code`_ that +makes use of this feature. + +Another new feature in Python 3 is the zero-argument form of the ``super()`` +builtin, introduced by PEP 3135. This feature uses an implicit ``__class__`` +reference to the class being defined to replace the "by name" references +required in Python 2. Just as code invoked during execution of a Python 2 +metaclass could not call methods that referenced the class by name (as the +name had not yet been bound in the containing scope), similarly, Python 3 +metaclasses cannot call methods that rely on the implicit ``__class__`` +reference (as it is not populated until after the metaclass has returned +control to the class creation machiner). + + +Proposal +======== + +This PEP proposes that a mechanism be added to Python 3 that meets the +following criteria: + +1. Restores the ability for class namespaces to have some influence on the + class creation process (above and beyond populating the namespace itself), + but potentially without the full flexibility of the Python 2 style + ``__metaclass__`` hook +2. Integrates nicely with class inheritance structures (including mixins and + multiple inheritance) +3. Integrates nicely with the implicit ``__class__`` reference and + zero-argument ``super()`` syntax introduced by PEP 3135 +4. Can be added to an existing base class without a significant risk of + introducing backwards compatibility problems + +One mechanism that can achieve this goal is to add a new class +initialisation hook, modelled directly on the existing instance +initialisation hook, but with the signature constrained to match that +of an ordinary class decorator. + +Specifically, it is proposed that class definitions be able to provide a +class initialisation hook as follows:: + + class Example: + @classmethod + def __init_class__(cls): + # This is invoked after the class is created, but before any + # explicit decorators are called + # The usual super() mechanisms are used to correctly support + # multiple inheritance. The decorator style invocation helps + # ensure that invoking the parent class is as simple as possible. + +If present on the created object, this new hook will be called by the class +creation machinery *after* the ``__class__`` reference has been initialised. +For ``types.new_class()``, it will be called as the last step before +returning the created class object. + +If a metaclass wishes to block class initialisation for some reason, it +must arrange for ``cls.__init_class__`` to trigger ``AttributeError``. + +This general proposal is not a new idea (it was first suggested for +inclusion in the language definition `more than 10 years ago`_, and a +similar mechanism has long been supported by `Zope's ExtensionClass`_), +but I believe the situation has changed sufficiently in recent years that +the idea is worth reconsidering. + + +Key Benefits +============ + + +Replaces many use cases for dynamic setting of ``__metaclass__`` +----------------------------------------------------------------- + +For use cases that don't involve completely replacing the defined class, +Python 2 code that dynamically set ``__metaclass__`` can now dynamically +set ``__init_class__`` instead. For more advanced use cases, introduction of +an explicit metaclass (possibly made available as a required base class) will +still be necessary in order to support Python 3. + + +Easier inheritance of definition time behaviour +----------------------------------------------- + +Understanding Python's metaclasses requires a deep understanding of +the type system and the class construction process. This is legitimately +seen as challenging, due to the need to keep multiple moving parts (the code, +the metaclass hint, the actual metaclass, the class object, instances of the +class object) clearly distinct in your mind. Even when you know the rules, +it's still easy to make a mistake if you're not being extremely careful. +An earlier version of this PEP actually included such a mistake: it +stated "instance of type" for a constraint that is actually "subclass of +type". + +Understanding the proposed class initialisation hook only requires +understanding decorators and ordinary method inheritance, which isn't +quite as daunting a task. The new hook provides a more gradual path +towards understanding all of the phases involved in the class definition +process. + + +Reduced chance of metaclass conflicts +------------------------------------- + +One of the big issues that makes library authors reluctant to use metaclasses +(even when they would be appropriate) is the risk of metaclass conflicts. +These occur whenever two unrelated metaclasses are used by the desired +parents of a class definition. This risk also makes it very difficult to +*add* a metaclass to a class that has previously been published without one. + +By contrast, adding an ``__init_class__`` method to an existing type poses +a similar level of risk to adding an ``__init__`` method: technically, there +is a risk of breaking poorly implemented subclasses, but when that occurs, +it is recognised as a bug in the subclass rather than the library author +breaching backwards compatibility guarantees. In fact, due to the constrained +signature of ``__init_class__``, the risk in this case is actually even +lower than in the case of ``__init__``. + + +Integrates cleanly with \PEP 3135 +--------------------------------- + +Unlike code that runs as part of the metaclass, code that runs as part of +the new hook will be able to freely invoke class methods that rely on the +implicit ``__class__`` reference introduced by PEP 3135, including methods +that use the zero argument form of ``super()``. + + +Alternatives +============ + + +The Python 3 Status Quo +----------------------- + +The Python 3 status quo already offers a great deal of flexibility. For +changes which only affect a single class definition and which can be +specified at the time the code is written, then class decorators can be +used to modify a class explicitly. Class decorators largely ignore class +inheritance and can make full use of methods that rely on the ``__class__`` +reference being populated. + +Using a custom metaclass provides the same level of power as it did in +Python 2. However, it's notable that, unlike class decorators, a metaclass +cannot call any methods that rely on the ``__class__`` reference, as that +reference is not populated until after the metaclass constructor returns +control to the class creation code. + +One major use case for metaclasses actually closely resembles the use of +class decorators. It occurs whenever a metaclass has an implementation that +uses the following pattern:: + + class Metaclass(type): + def __new__(meta, *args, **kwds): + cls = super(Metaclass, meta).__new__(meta, *args, **kwds) + # Do something with cls + return cls + +The key difference between this pattern and a class decorator is that it +is automatically inherited by subclasses. However, it also comes with a +major disadvantage: Python does not allow you to inherit from classes with +unrelated metaclasses. + +Thus, the status quo requires that developers choose between the following +two alternatives: + +* Use a class decorator, meaning that behaviour is not inherited and must be + requested explicitly on every subclass +* Use a metaclass, meaning that behaviour is inherited, but metaclass + conflicts may make integration with other libraries and frameworks more + difficult than it otherwise would be + +If this PEP is ultimately rejected, then this is the existing design that +will remain in place by default. + + +Restoring the Python 2 metaclass hook +------------------------------------- + +One simple alternative would be to restore support for a Python 2 style +``metaclass`` hook in the class body. This would be checked after the class +body was executed, potentially overwriting the metaclass hint provided in the +class header. + +The main attraction of such an approach is that it would simplify porting +Python 2 applications that make use of this hook (especially those that do +so dynamically). + +However, this approach does nothing to simplify the process of adding +*inherited* class definition time behaviour, nor does it interoperate +cleanly with the PEP 3135 ``__class__`` and ``super()`` semantics (as with +any metaclass based solution, the ``__metaclass__`` hook would have to run +before the ``__class__`` reference has been populated. + + +Dynamic class decorators +------------------------ + +The original version of this PEP was called "Dynamic class decorators" and +focused solely on a significantly more complicated proposal than that +presented in the current version. + +As with the current version, it proposed that a new step be added to the +class creation process, after the metaclass invocation to construct the +class instance and before the application of lexical decorators. However, +instead of a simple process of calling a single class method that relies +on normal inheritance mechanisms, it proposed a far more complicated +procedure that walked the class MRO looking for decorators stored in +iterable ``__decorators__`` attributes. + +Using the current version of the PEP, the scheme originally proposed could +be implemented as:: + + class DynamicDecorators: + @classmethod + def __init_class__(cls): + super(DynamicDecorators, cls).__init_class__() + for entry in reversed(cls.mro()): + decorators = entry.__dict__.get("__decorators__", ()) + for deco in reversed(decorators): + cls = deco(cls) + +Any subclasses of this type would automatically have the contents of any +``__decorators__`` attributes processed and invoked. + +The mechanism in the current PEP is considered superior, as many issues +to do with ordering and the same decorator being invoked multiple times +just go away, as that kind of thing is taken care of through the use of an +ordinary class method invocation. + + +Automatic metaclass derivation +------------------------------ + +When no appropriate metaclass is found, it's theoretically possible to +automatically derive a metaclass for a new type based on the metaclass hint +and the metaclasses of the bases. + +While adding such a mechanism would reduce the risk of spurious metaclass +conflicts, it would do nothing to improve integration with PEP 3135, would +not help with porting Python 2 code that set ``__metaclass__`` dynamically +and would not provide a more straightforward inherited mechanism for invoking +additional operations after the class invocation is complete. + +In addition, there would still be a risk of metaclass conflicts in cases +where the base metaclasses were not written with multiple inheritance in +mind. In such situations, there's a chance of introducing latent defects +if one or more metaclasses are not invoked correctly. + + +Calling the new hook from ``type.__init__`` +------------------------------------------- + +Calling the new hook automatically from ``type.__init__``, would achieve most +of the goals of this PEP. However, using that approach would mean that +``__init_class__`` implementations would be unable to call any methods that +relied on the ``__class__`` reference (or used the zero-argument form of +``super()``), and could not make use of those features themselves. + + +References +========== + +.. _published code: + http://mail.python.org/pipermail/python-dev/2012-June/119878.html + +.. _more than 10 years ago: + http://mail.python.org/pipermail/python-dev/2001-November/018651.html + +.. _Zope's ExtensionClass: + http://docs.zope.org/zope_secrets/extensionclass.html + +Copyright +========= + +This document has been placed in the public domain. + + +.. + Local Variables: + mode: indented-text + indent-tabs-mode: nil + sentence-end-double-space: t + fill-column: 70 + coding: utf-8 + End: + -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Jun 18 17:08:38 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 18 Jun 2012 17:08:38 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE1MTAx?= =?utf8?q?=3A_Make_pool_finalizer_avoid_joining_current_thread?= Message-ID: http://hg.python.org/cpython/rev/4c07b9c49b75 changeset: 77506:4c07b9c49b75 branch: 2.7 parent: 77502:8b38a81ba3bf user: Richard Oudkerk date: Mon Jun 18 15:37:31 2012 +0100 summary: Issue #15101: Make pool finalizer avoid joining current thread files: Lib/multiprocessing/pool.py | 9 ++++++--- Misc/NEWS | 2 ++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -489,7 +489,8 @@ # We must wait for the worker handler to exit before terminating # workers because we don't want workers to be restarted behind our back. debug('joining worker handler') - worker_handler.join() + if threading.current_thread() is not worker_handler: + worker_handler.join(1e100) # Terminate workers which haven't already finished. if pool and hasattr(pool[0], 'terminate'): @@ -499,10 +500,12 @@ p.terminate() debug('joining task handler') - task_handler.join(1e100) + if threading.current_thread() is not task_handler: + task_handler.join(1e100) debug('joining result handler') - result_handler.join(1e100) + if threading.current_thread() is not result_handler: + result_handler.join(1e100) if pool and hasattr(pool[0], 'terminate'): debug('joining pool workers') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -67,6 +67,8 @@ Library ------- +- Issue #15101: Make pool finalizer avoid joining current thread. + - Issue #15054: A bug in tokenize.tokenize that caused string literals with 'b' and 'br' prefixes to be incorrectly tokenized has been fixed. Patch by Serhiy Storchaka. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 17:08:39 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 18 Jun 2012 17:08:39 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE1MTAx?= =?utf8?q?=3A_Make_pool_finalizer_avoid_joining_current_thread=2E?= Message-ID: http://hg.python.org/cpython/rev/e1cd1f430ff1 changeset: 77507:e1cd1f430ff1 branch: 3.2 parent: 77503:38e2a87c9051 user: Richard Oudkerk date: Mon Jun 18 15:54:57 2012 +0100 summary: Issue #15101: Make pool finalizer avoid joining current thread. files: Lib/multiprocessing/pool.py | 9 ++++++--- Misc/NEWS | 2 ++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -493,7 +493,8 @@ # We must wait for the worker handler to exit before terminating # workers because we don't want workers to be restarted behind our back. debug('joining worker handler') - worker_handler.join() + if threading.current_thread() is not worker_handler: + worker_handler.join() # Terminate workers which haven't already finished. if pool and hasattr(pool[0], 'terminate'): @@ -503,10 +504,12 @@ p.terminate() debug('joining task handler') - task_handler.join() + if threading.current_thread() is not task_handler: + task_handler.join() debug('joining result handler') - result_handler.join() + if threading.current_thread() is not result_handler: + result_handler.join() if pool and hasattr(pool[0], 'terminate'): debug('joining pool workers') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,6 +70,8 @@ Library ------- +- Issue #15101: Make pool finalizer avoid joining current thread. + - Issue #15036: Mailbox no longer throws an error if a flush is done between operations when removing or changing multiple items in mbox, MMDF, or Babyl mailboxes. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 17:08:40 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 18 Jun 2012 17:08:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2315101=3A_Make_pool_finalizer_avoid_joining_current_?= =?utf8?q?thread=2E?= Message-ID: http://hg.python.org/cpython/rev/59e0a51c5fc3 changeset: 77508:59e0a51c5fc3 parent: 77505:18de5d7e1256 parent: 77507:e1cd1f430ff1 user: Richard Oudkerk date: Mon Jun 18 16:02:49 2012 +0100 summary: Issue #15101: Make pool finalizer avoid joining current thread. files: Lib/multiprocessing/pool.py | 9 ++++++--- Misc/NEWS | 2 ++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -496,7 +496,8 @@ # We must wait for the worker handler to exit before terminating # workers because we don't want workers to be restarted behind our back. debug('joining worker handler') - worker_handler.join() + if threading.current_thread() is not worker_handler: + worker_handler.join() # Terminate workers which haven't already finished. if pool and hasattr(pool[0], 'terminate'): @@ -506,10 +507,12 @@ p.terminate() debug('joining task handler') - task_handler.join() + if threading.current_thread() is not task_handler: + task_handler.join() debug('joining result handler') - result_handler.join() + if threading.current_thread() is not result_handler: + result_handler.join() if pool and hasattr(pool[0], 'terminate'): debug('joining pool workers') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,8 @@ Library ------- +- Issue #15101: Make pool finalizer avoid joining current thread. + - Issue #14657: The frozen instance of importlib used for bootstrap is now also the module imported as importlib._bootstrap. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 17:17:09 2012 From: python-checkins at python.org (martin.v.loewis) Date: Mon, 18 Jun 2012 17:17:09 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Define_how_stand-alone_release?= =?utf8?q?s_can_update_bundled_releases=2E?= Message-ID: http://hg.python.org/peps/rev/e44585ca023f changeset: 4465:e44585ca023f user: Martin v. L?wis date: Mon Jun 18 17:17:05 2012 +0200 summary: Define how stand-alone releases can update bundled releases. files: pep-0397.txt | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/pep-0397.txt b/pep-0397.txt --- a/pep-0397.txt +++ b/pep-0397.txt @@ -97,7 +97,9 @@ It contains a version resource matching the version number of the pythonXY.dll with which it is distributed. Independent installations will always only overwrite newer versions of the - launcher with older versions. + launcher with older versions. Stand-alone releases use + a release level of 0x10 in FIELD3 of the CPython release on which + they are based. The 32-bit distribution of Python will not install a 32-bit version of the launcher on a 64-bit system. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Mon Jun 18 18:55:38 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 18 Jun 2012 18:55:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315064=3A_Implement?= =?utf8?q?_context_manager_protocol_for_multiprocessing_types?= Message-ID: http://hg.python.org/cpython/rev/6d2a773d8e00 changeset: 77509:6d2a773d8e00 user: Richard Oudkerk date: Mon Jun 18 17:47:52 2012 +0100 summary: Issue #15064: Implement context manager protocol for multiprocessing types files: Doc/library/multiprocessing.rst | 17 ++++++ Lib/multiprocessing/connection.py | 18 ++++++- Lib/multiprocessing/dummy/connection.py | 12 ++++ Lib/multiprocessing/pool.py | 6 ++ Lib/test/test_multiprocessing.py | 35 +++++++++++++ Misc/NEWS | 2 + 6 files changed, 89 insertions(+), 1 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -834,6 +834,10 @@ Connection objects themselves can now be transferred between processes using :meth:`Connection.send` and :meth:`Connection.recv`. + .. versionadded:: 3.3 + Connection objects now support the context manager protocol -- see + :ref:`typecontextmanager`. :meth:`__enter__` returns the + connection object, and :meth:`__exit__` calls :meth:`close`. For example: @@ -1277,6 +1281,9 @@ The address used by the manager. + Manager objects support the context manager protocol -- see + :ref:`typecontextmanager`. :meth:`__enter__` returns the + manager object, and :meth:`__exit__` calls :meth:`shutdown`. .. class:: SyncManager @@ -1747,6 +1754,11 @@ Wait for the worker processes to exit. One must call :meth:`close` or :meth:`terminate` before using :meth:`join`. + .. versionadded:: 3.3 + Pool objects now support the context manager protocol -- see + :ref:`typecontextmanager`. :meth:`__enter__` returns the pool + object, and :meth:`__exit__` calls :meth:`terminate`. + .. class:: AsyncResult @@ -1911,6 +1923,11 @@ The address from which the last accepted connection came. If this is unavailable then it is ``None``. + .. versionadded:: 3.3 + Listener objects now support the context manager protocol -- see + :ref:`typecontextmanager`. :meth:`__enter__` returns the + listener object, and :meth:`__exit__` calls :meth:`close`. + .. function:: wait(object_list, timeout=None) Wait till an object in *object_list* is ready. Returns the list of diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py --- a/Lib/multiprocessing/connection.py +++ b/Lib/multiprocessing/connection.py @@ -257,6 +257,12 @@ self._check_readable() return self._poll(timeout) + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + self.close() + if _winapi: @@ -436,6 +442,8 @@ Returns a `Connection` object. ''' + if self._listener is None: + raise IOError('listener is closed') c = self._listener.accept() if self._authkey: deliver_challenge(c, self._authkey) @@ -446,11 +454,19 @@ ''' Close the bound socket or named pipe of `self`. ''' - return self._listener.close() + if self._listener is not None: + self._listener.close() + self._listener = None address = property(lambda self: self._listener._address) last_accepted = property(lambda self: self._listener._last_accepted) + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + self.close() + def Client(address, family=None, authkey=None): ''' diff --git a/Lib/multiprocessing/dummy/connection.py b/Lib/multiprocessing/dummy/connection.py --- a/Lib/multiprocessing/dummy/connection.py +++ b/Lib/multiprocessing/dummy/connection.py @@ -53,6 +53,12 @@ address = property(lambda self: self._backlog_queue) + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + self.close() + def Client(address): _in, _out = Queue(), Queue() @@ -85,3 +91,9 @@ def close(self): pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, exc_tb): + self.close() diff --git a/Lib/multiprocessing/pool.py b/Lib/multiprocessing/pool.py --- a/Lib/multiprocessing/pool.py +++ b/Lib/multiprocessing/pool.py @@ -522,6 +522,12 @@ debug('cleaning up worker %d' % p.pid) p.join() + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.terminate() + # # Class whose instances are returned by `Pool.apply_async()` # diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1719,6 +1719,15 @@ p.close() p.join() + def test_context(self): + if self.TYPE == 'processes': + L = list(range(10)) + expected = [sqr(i) for i in L] + with multiprocessing.Pool(2) as p: + r = p.map_async(sqr, L) + self.assertEqual(r.get(), expected) + self.assertRaises(AssertionError, p.map_async, sqr, L) + def raising(): raise KeyError("key") @@ -2266,6 +2275,22 @@ self.assertRaises(RuntimeError, reduction.recv_handle, conn) p.join() + def test_context(self): + a, b = self.Pipe() + + with a, b: + a.send(1729) + self.assertEqual(b.recv(), 1729) + if self.TYPE == 'processes': + self.assertFalse(a.closed) + self.assertFalse(b.closed) + + if self.TYPE == 'processes': + self.assertTrue(a.closed) + self.assertTrue(b.closed) + self.assertRaises(IOError, a.recv) + self.assertRaises(IOError, b.recv) + class _TestListener(BaseTestCase): ALLOWED_TYPES = ('processes',) @@ -2277,6 +2302,16 @@ self.assertRaises(OSError, self.connection.Listener, l.address, family) + def test_context(self): + with self.connection.Listener() as l: + with self.connection.Client(l.address) as c: + with l.accept() as d: + c.send(1729) + self.assertEqual(d.recv(), 1729) + + if self.TYPE == 'processes': + self.assertRaises(IOError, l.accept) + class _TestListenerClient(BaseTestCase): ALLOWED_TYPES = ('processes', 'threads') diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -29,6 +29,8 @@ Library ------- +- Issue #15064: Implement context manager protocol for multiprocessing types + - Issue #15101: Make pool finalizer avoid joining current thread. - Issue #14657: The frozen instance of importlib used for bootstrap is now -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 19:58:09 2012 From: python-checkins at python.org (stefan.krah) Date: Mon, 18 Jun 2012 19:58:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_comments_to_the_power_f?= =?utf8?q?unctions=2C_in_particular_to_=5Fmpd=5Fqpow=5Freal=28=29=2E?= Message-ID: http://hg.python.org/cpython/rev/360f9d483f94 changeset: 77510:360f9d483f94 user: Stefan Krah date: Mon Jun 18 19:57:23 2012 +0200 summary: Add comments to the power functions, in particular to _mpd_qpow_real(). files: Modules/_decimal/libmpdec/mpdecimal.c | 39 +++++++++++++- 1 files changed, 34 insertions(+), 5 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -5984,8 +5984,10 @@ mpd_qfinalize(result, ctx, status); } -/* - * This is an internal function that does not check for NaNs. +/* + * If the exponent is infinite and base equals one, the result is one + * with a coefficient of length prec. Otherwise, result is undefined. + * Return the value of the comparison against one. */ static int _qcheck_pow_one_inf(mpd_t *result, const mpd_t *base, uint8_t resultsign, @@ -6006,7 +6008,7 @@ } /* - * If base equals one, calculate the correct power of one result. + * If abs(base) equals one, calculate the correct power of one result. * Otherwise, result is undefined. Return the value of the comparison * against 1. * @@ -6060,7 +6062,7 @@ /* * Detect certain over/underflow of x**y. - * ACL2 proof: pow_bounds.lisp. + * ACL2 proof: pow-bounds.lisp. * * Symbols: * @@ -6215,7 +6217,10 @@ } */ -/* The power function for real exponents */ +/* + * The power function for real exponents. + * Relative error: abs(result - e**y) < e**y * 1/5 * 10**(-prec - 1) + */ static void _mpd_qpow_real(mpd_t *result, const mpd_t *base, const mpd_t *exp, const mpd_context_t *ctx, uint32_t *status) @@ -6234,6 +6239,30 @@ workctx.round = MPD_ROUND_HALF_EVEN; workctx.allcr = ctx->allcr; + /* + * extra := MPD_EXPDIGITS = MPD_EXP_MAX_T + * wp := prec + 4 + extra + * abs(err) < 5 * 10**-wp + * y := log(base) * exp + * Calculate: + * 1) e**(y * (1 + err)**2) * (1 + err) + * = e**y * e**(y * (2*err + err**2)) * (1 + err) + * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + * Relative error of the underlined term: + * 2) abs(e**(y * (2*err + err**2)) - 1) + * Case abs(y) >= 10**extra: + * 3) adjexp(y)+1 > log10(abs(y)) >= extra + * This triggers the Overflow/Underflow shortcut in _mpd_qexp(), + * so no further analysis is necessary. + * Case abs(y) < 10**extra: + * 4) abs(y * (2*err + err**2)) < 1/5 * 10**(-prec - 2) + * Use (see _mpd_qexp): + * 5) abs(x) <= 9/10 * 10**-p ==> abs(e**x - 1) < 10**-p + * With 2), 4) and 5): + * 6) abs(e**(y * (2*err + err**2)) - 1) < 10**(-prec - 2) + * The complete relative error of 1) is: + * 7) abs(result - e**y) < e**y * 1/5 * 10**(-prec - 1) + */ mpd_qln(result, base, &workctx, &workctx.status); mpd_qmul(result, result, &texp, &workctx, &workctx.status); mpd_qexp(result, result, &workctx, status); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 22:31:52 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 18 Jun 2012 22:31:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315064=3A_Make_Base?= =?utf8?q?Manager=2E=5F=5Fenter=5F=5F=28=29_start_server_if_necessary=2E?= Message-ID: http://hg.python.org/cpython/rev/198382b4bcd0 changeset: 77511:198382b4bcd0 user: Richard Oudkerk date: Mon Jun 18 21:29:30 2012 +0100 summary: Issue #15064: Make BaseManager.__enter__() start server if necessary. files: Doc/library/multiprocessing.rst | 11 +++++-- Lib/multiprocessing/managers.py | 3 ++ Lib/test/test_multiprocessing.py | 28 +++++++++++++++----- 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -1281,9 +1281,14 @@ The address used by the manager. - Manager objects support the context manager protocol -- see - :ref:`typecontextmanager`. :meth:`__enter__` returns the - manager object, and :meth:`__exit__` calls :meth:`shutdown`. + .. versionchanged:: 3.3 + Manager objects support the context manager protocol -- see + :ref:`typecontextmanager`. :meth:`__enter__` starts the server + process (if it has not already started) and then returns the + manager object. :meth:`__exit__` calls :meth:`shutdown`. + + In previous versions :meth:`__enter__` did not start the + manager's server process if it was not already started. .. class:: SyncManager diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -561,6 +561,9 @@ conn.close() def __enter__(self): + if self._state.value == State.INITIAL: + self.start() + assert self._state.value == State.STARTED return self def __exit__(self, exc_type, exc_val, exc_tb): diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py --- a/Lib/test/test_multiprocessing.py +++ b/Lib/test/test_multiprocessing.py @@ -1888,7 +1888,27 @@ def test_mymanager(self): manager = MyManager() manager.start() - + self.common(manager) + manager.shutdown() + + # If the manager process exited cleanly then the exitcode + # will be zero. Otherwise (after a short timeout) + # terminate() is used, resulting in an exitcode of -SIGTERM. + self.assertEqual(manager._process.exitcode, 0) + + def test_mymanager_context(self): + with MyManager() as manager: + self.common(manager) + self.assertEqual(manager._process.exitcode, 0) + + def test_mymanager_context_prestarted(self): + manager = MyManager() + manager.start() + with manager: + self.common(manager) + self.assertEqual(manager._process.exitcode, 0) + + def common(self, manager): foo = manager.Foo() bar = manager.Bar() baz = manager.baz() @@ -1911,12 +1931,6 @@ self.assertEqual(list(baz), [i*i for i in range(10)]) - manager.shutdown() - - # If the manager process exited cleanly then the exitcode - # will be zero. Otherwise (after a short timeout) - # terminate() is used, resulting in an exitcode of -SIGTERM. - self.assertEqual(manager._process.exitcode, 0) # # Test of connecting to a remote server and using xmlrpclib for serialization -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 22:31:53 2012 From: python-checkins at python.org (richard.oudkerk) Date: Mon, 18 Jun 2012 22:31:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315064=3A_Use_with-?= =?utf8?q?blocks_for_some_examples_in_docs=2E?= Message-ID: http://hg.python.org/cpython/rev/836d712461b3 changeset: 77512:836d712461b3 user: Richard Oudkerk date: Mon Jun 18 21:29:36 2012 +0100 summary: Issue #15064: Use with-blocks for some examples in docs. files: Doc/library/multiprocessing.rst | 110 +++++++++---------- 1 files changed, 51 insertions(+), 59 deletions(-) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -241,17 +241,16 @@ l.reverse() if __name__ == '__main__': - manager = Manager() - - d = manager.dict() - l = manager.list(range(10)) - - p = Process(target=f, args=(d, l)) - p.start() - p.join() - - print(d) - print(l) + with Manager() as manager: + d = manager.dict() + l = manager.list(range(10)) + + p = Process(target=f, args=(d, l)) + p.start() + p.join() + + print(d) + print(l) will print :: @@ -279,10 +278,10 @@ return x*x if __name__ == '__main__': - pool = Pool(processes=4) # start 4 worker processes - result = pool.apply_async(f, [10]) # evaluate "f(10)" asynchronously - print(result.get(timeout=1)) # prints "100" unless your computer is *very* slow - print(pool.map(f, range(10))) # prints "[0, 1, 4,..., 81]" + with Pool(processes=4) as pool # start 4 worker processes + result = pool.apply_async(f, [10]) # evaluate "f(10)" asynchronously + print(result.get(timeout=1)) # prints "100" unless your computer is *very* slow + print(pool.map(f, range(10))) # prints "[0, 1, 4,..., 81]" Reference @@ -1426,11 +1425,10 @@ MyManager.register('Maths', MathsClass) if __name__ == '__main__': - manager = MyManager() - manager.start() - maths = manager.Maths() - print(maths.add(4, 3)) # prints 7 - print(maths.mul(7, 8)) # prints 56 + with MyManager() as manager: + maths = manager.Maths() + print(maths.add(4, 3)) # prints 7 + print(maths.mul(7, 8)) # prints 56 Using a remote manager @@ -1798,21 +1796,20 @@ return x*x if __name__ == '__main__': - pool = Pool(processes=4) # start 4 worker processes - - result = pool.apply_async(f, (10,)) # evaluate "f(10)" asynchronously - print(result.get(timeout=1)) # prints "100" unless your computer is *very* slow - - print(pool.map(f, range(10))) # prints "[0, 1, 4,..., 81]" - - it = pool.imap(f, range(10)) - print(next(it)) # prints "0" - print(next(it)) # prints "1" - print(it.next(timeout=1)) # prints "4" unless your computer is *very* slow - - import time - result = pool.apply_async(time.sleep, (10,)) - print(result.get(timeout=1)) # raises TimeoutError + with Pool(processes=4) as pool: # start 4 worker processes + result = pool.apply_async(f, (10,)) # evaluate "f(10)" asynchronously + print(result.get(timeout=1)) # prints "100" unless your computer is *very* slow + + print(pool.map(f, range(10))) # prints "[0, 1, 4,..., 81]" + + it = pool.imap(f, range(10)) + print(next(it)) # prints "0" + print(next(it)) # prints "1" + print(it.next(timeout=1)) # prints "4" unless your computer is *very* slow + + import time + result = pool.apply_async(time.sleep, (10,)) + print(result.get(timeout=1)) # raises TimeoutError .. _multiprocessing-listeners-clients: @@ -1984,19 +1981,16 @@ from array import array address = ('localhost', 6000) # family is deduced to be 'AF_INET' - listener = Listener(address, authkey=b'secret password') - - conn = listener.accept() - print('connection accepted from', listener.last_accepted) - - conn.send([2.25, None, 'junk', float]) - - conn.send_bytes(b'hello') - - conn.send_bytes(array('i', [42, 1729])) - - conn.close() - listener.close() + + with Listener(address, authkey=b'secret password') as listener: + with listener.accept() as conn: + print('connection accepted from', listener.last_accepted) + + conn.send([2.25, None, 'junk', float]) + + conn.send_bytes(b'hello') + + conn.send_bytes(array('i', [42, 1729])) The following code connects to the server and receives some data from the server:: @@ -2005,17 +1999,15 @@ from array import array address = ('localhost', 6000) - conn = Client(address, authkey=b'secret password') - - print(conn.recv()) # => [2.25, None, 'junk', float] - - print(conn.recv_bytes()) # => 'hello' - - arr = array('i', [0, 0, 0, 0, 0]) - print(conn.recv_bytes_into(arr)) # => 8 - print(arr) # => array('i', [42, 1729, 0, 0, 0]) - - conn.close() + + with Client(address, authkey=b'secret password') as conn: + print(conn.recv()) # => [2.25, None, 'junk', float] + + print(conn.recv_bytes()) # => 'hello' + + arr = array('i', [0, 0, 0, 0, 0]) + print(conn.recv_bytes_into(arr)) # => 8 + print(arr) # => array('i', [42, 1729, 0, 0, 0]) The following code uses :func:`~multiprocessing.connection.wait` to wait for messages from multiple processes at once:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Mon Jun 18 22:33:21 2012 From: python-checkins at python.org (kristjan.jonsson) Date: Mon, 18 Jun 2012 22:33:21 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315038=3A_Optimize_?= =?utf8?q?python_Locks_on_Windows?= Message-ID: http://hg.python.org/cpython/rev/978326f98316 changeset: 77513:978326f98316 user: Kristj?n Valur J?nsson date: Mon Jun 18 20:30:44 2012 +0000 summary: Issue #15038: Optimize python Locks on Windows Extract cross-platform condition variable support into a separate file and provide user-mode non-recursive locks for Windows. files: Misc/NEWS | 2 + PCbuild/pythoncore.vcxproj | 4 +- PCbuild/pythoncore.vcxproj.filters | 8 + Python/ceval_gil.h | 226 +--------- Python/condvar.h | 353 +++++++++++++++++ Python/thread_nt.h | 104 +++++ 6 files changed, 501 insertions(+), 196 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -26,6 +26,8 @@ - Issue #14673: Add Eric Snow's sys.implementation implementation. +- Issue #15038: Optimize python Locks on Windows. + Library ------- diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -1,4 +1,4 @@ - +? @@ -481,6 +481,8 @@ + + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -402,6 +402,13 @@ Python + + + Python + + + Python +
@@ -908,6 +915,7 @@ PC + diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -59,213 +59,49 @@ (Note: this mechanism is enabled with FORCE_SWITCHING above) */ -#ifndef _POSIX_THREADS -/* This means pthreads are not implemented in libc headers, hence the macro - not present in unistd.h. But they still can be implemented as an external - library (e.g. gnu pth in pthread emulation) */ -# ifdef HAVE_PTHREAD_H -# include /* _POSIX_THREADS */ -# endif +#include "condvar.h" +#ifndef Py_HAVE_CONDVAR +#error You need either a POSIX-compatible or a Windows system! #endif +#define MUTEX_T PyMUTEX_T +#define MUTEX_INIT(mut) \ + if (PyMUTEX_INIT(&(mut))) { \ + Py_FatalError("PyMUTEX_INIT(" #mut ") failed"); }; +#define MUTEX_FINI(mut) \ + if (PyMUTEX_FINI(&(mut))) { \ + Py_FatalError("PyMUTEX_FINI(" #mut ") failed"); }; +#define MUTEX_LOCK(mut) \ + if (PyMUTEX_LOCK(&(mut))) { \ + Py_FatalError("PyMUTEX_LOCK(" #mut ") failed"); }; +#define MUTEX_UNLOCK(mut) \ + if (PyMUTEX_UNLOCK(&(mut))) { \ + Py_FatalError("PyMUTEX_UNLOCK(" #mut ") failed"); }; -#ifdef _POSIX_THREADS - -/* - * POSIX support - */ - -#include - -#define ADD_MICROSECONDS(tv, interval) \ -do { \ - tv.tv_usec += (long) interval; \ - tv.tv_sec += tv.tv_usec / 1000000; \ - tv.tv_usec %= 1000000; \ -} while (0) - -/* We assume all modern POSIX systems have gettimeofday() */ -#ifdef GETTIMEOFDAY_NO_TZ -#define GETTIMEOFDAY(ptv) gettimeofday(ptv) -#else -#define GETTIMEOFDAY(ptv) gettimeofday(ptv, (struct timezone *)NULL) -#endif - -#define MUTEX_T pthread_mutex_t -#define MUTEX_INIT(mut) \ - if (pthread_mutex_init(&mut, NULL)) { \ - Py_FatalError("pthread_mutex_init(" #mut ") failed"); }; -#define MUTEX_FINI(mut) \ - if (pthread_mutex_destroy(&mut)) { \ - Py_FatalError("pthread_mutex_destroy(" #mut ") failed"); }; -#define MUTEX_LOCK(mut) \ - if (pthread_mutex_lock(&mut)) { \ - Py_FatalError("pthread_mutex_lock(" #mut ") failed"); }; -#define MUTEX_UNLOCK(mut) \ - if (pthread_mutex_unlock(&mut)) { \ - Py_FatalError("pthread_mutex_unlock(" #mut ") failed"); }; - -#define COND_T pthread_cond_t +#define COND_T PyCOND_T #define COND_INIT(cond) \ - if (pthread_cond_init(&cond, NULL)) { \ - Py_FatalError("pthread_cond_init(" #cond ") failed"); }; + if (PyCOND_INIT(&(cond))) { \ + Py_FatalError("PyCOND_INIT(" #cond ") failed"); }; #define COND_FINI(cond) \ - if (pthread_cond_destroy(&cond)) { \ - Py_FatalError("pthread_cond_destroy(" #cond ") failed"); }; + if (PyCOND_FINI(&(cond))) { \ + Py_FatalError("PyCOND_FINI(" #cond ") failed"); }; #define COND_SIGNAL(cond) \ - if (pthread_cond_signal(&cond)) { \ - Py_FatalError("pthread_cond_signal(" #cond ") failed"); }; + if (PyCOND_SIGNAL(&(cond))) { \ + Py_FatalError("PyCOND_SIGNAL(" #cond ") failed"); }; #define COND_WAIT(cond, mut) \ - if (pthread_cond_wait(&cond, &mut)) { \ - Py_FatalError("pthread_cond_wait(" #cond ") failed"); }; + if (PyCOND_WAIT(&(cond), &(mut))) { \ + Py_FatalError("PyCOND_WAIT(" #cond ") failed"); }; #define COND_TIMED_WAIT(cond, mut, microseconds, timeout_result) \ { \ - int r; \ - struct timespec ts; \ - struct timeval deadline; \ - \ - GETTIMEOFDAY(&deadline); \ - ADD_MICROSECONDS(deadline, microseconds); \ - ts.tv_sec = deadline.tv_sec; \ - ts.tv_nsec = deadline.tv_usec * 1000; \ - \ - r = pthread_cond_timedwait(&cond, &mut, &ts); \ - if (r == ETIMEDOUT) \ + int r = PyCOND_TIMEDWAIT(&(cond), &(mut), (microseconds)); \ + if (r < 0) \ + Py_FatalError("PyCOND_WAIT(" #cond ") failed"); \ + if (r) /* 1 == timeout, 2 == impl. can't say, so assume timeout */ \ timeout_result = 1; \ - else if (r) \ - Py_FatalError("pthread_cond_timedwait(" #cond ") failed"); \ else \ timeout_result = 0; \ } \ -#elif defined(NT_THREADS) - -/* - * Windows (2000 and later, as well as (hopefully) CE) support - */ - -#include - -#define MUTEX_T CRITICAL_SECTION -#define MUTEX_INIT(mut) do { \ - if (!(InitializeCriticalSectionAndSpinCount(&(mut), 4000))) \ - Py_FatalError("CreateMutex(" #mut ") failed"); \ -} while (0) -#define MUTEX_FINI(mut) \ - DeleteCriticalSection(&(mut)) -#define MUTEX_LOCK(mut) \ - EnterCriticalSection(&(mut)) -#define MUTEX_UNLOCK(mut) \ - LeaveCriticalSection(&(mut)) - -/* We emulate condition variables with a semaphore. - We use a Semaphore rather than an auto-reset event, because although - an auto-resent event might appear to solve the lost-wakeup bug (race - condition between releasing the outer lock and waiting) because it - maintains state even though a wait hasn't happened, there is still - a lost wakeup problem if more than one thread are interrupted in the - critical place. A semaphore solves that. - Because it is ok to signal a condition variable with no one - waiting, we need to keep track of the number of - waiting threads. Otherwise, the semaphore's state could rise - without bound. - - Generic emulations of the pthread_cond_* API using - Win32 functions can be found on the Web. - The following read can be edificating (or not): - http://www.cse.wustl.edu/~schmidt/win32-cv-1.html -*/ -typedef struct COND_T -{ - HANDLE sem; /* the semaphore */ - int n_waiting; /* how many are unreleased */ -} COND_T; - -__inline static void _cond_init(COND_T *cond) -{ - /* A semaphore with a large max value, The positive value - * is only needed to catch those "lost wakeup" events and - * race conditions when a timed wait elapses. - */ - if (!(cond->sem = CreateSemaphore(NULL, 0, 1000, NULL))) - Py_FatalError("CreateSemaphore() failed"); - cond->n_waiting = 0; -} - -__inline static void _cond_fini(COND_T *cond) -{ - BOOL ok = CloseHandle(cond->sem); - if (!ok) - Py_FatalError("CloseHandle() failed"); -} - -__inline static void _cond_wait(COND_T *cond, MUTEX_T *mut) -{ - ++cond->n_waiting; - MUTEX_UNLOCK(*mut); - /* "lost wakeup bug" would occur if the caller were interrupted here, - * but we are safe because we are using a semaphore wich has an internal - * count. - */ - if (WaitForSingleObject(cond->sem, INFINITE) == WAIT_FAILED) - Py_FatalError("WaitForSingleObject() failed"); - MUTEX_LOCK(*mut); -} - -__inline static int _cond_timed_wait(COND_T *cond, MUTEX_T *mut, - int us) -{ - DWORD r; - ++cond->n_waiting; - MUTEX_UNLOCK(*mut); - r = WaitForSingleObject(cond->sem, us / 1000); - if (r == WAIT_FAILED) - Py_FatalError("WaitForSingleObject() failed"); - MUTEX_LOCK(*mut); - if (r == WAIT_TIMEOUT) - --cond->n_waiting; - /* Here we have a benign race condition with _cond_signal. If the - * wait operation has timed out, but before we can acquire the - * mutex again to decrement n_waiting, a thread holding the mutex - * still sees a positive n_waiting value and may call - * ReleaseSemaphore and decrement n_waiting. - * This will cause n_waiting to be decremented twice. - * This is benign, though, because ReleaseSemaphore will also have - * been called, leaving the semaphore state positive. We may - * thus end up with semaphore in state 1, and n_waiting == -1, and - * the next time someone calls _cond_wait(), that thread will - * pass right through, decrementing the semaphore state and - * incrementing n_waiting, thus correcting the extra _cond_signal. - */ - return r == WAIT_TIMEOUT; -} - -__inline static void _cond_signal(COND_T *cond) { - /* NOTE: This must be called with the mutex held */ - if (cond->n_waiting > 0) { - if (!ReleaseSemaphore(cond->sem, 1, NULL)) - Py_FatalError("ReleaseSemaphore() failed"); - --cond->n_waiting; - } -} - -#define COND_INIT(cond) \ - _cond_init(&(cond)) -#define COND_FINI(cond) \ - _cond_fini(&(cond)) -#define COND_SIGNAL(cond) \ - _cond_signal(&(cond)) -#define COND_WAIT(cond, mut) \ - _cond_wait(&(cond), &(mut)) -#define COND_TIMED_WAIT(cond, mut, us, timeout_result) do { \ - (timeout_result) = _cond_timed_wait(&(cond), &(mut), us); \ -} while (0) - -#else - -#error You need either a POSIX-compatible or a Windows system! - -#endif /* _POSIX_THREADS, NT_THREADS */ /* Whether the GIL is already taken (-1 if uninitialized). This is atomic @@ -356,13 +192,13 @@ MUTEX_LOCK(switch_mutex); /* Not switched yet => wait */ if (_Py_atomic_load_relaxed(&gil_last_holder) == tstate) { - RESET_GIL_DROP_REQUEST(); + RESET_GIL_DROP_REQUEST(); /* NOTE: if COND_WAIT does not atomically start waiting when releasing the mutex, another thread can run through, take the GIL and drop it again, and reset the condition before we even had a chance to wait for it. */ COND_WAIT(switch_cond, switch_mutex); - } + } MUTEX_UNLOCK(switch_mutex); } #endif diff --git a/Python/condvar.h b/Python/condvar.h new file mode 100644 --- /dev/null +++ b/Python/condvar.h @@ -0,0 +1,353 @@ +/* + * Portable condition variable support for windows and pthreads. + * Everything is inline, this header can be included where needed. + * + * APIs generally return 0 on success and non-zero on error, + * and the caller needs to use its platform's error mechanism to + * discover the error (errno, or GetLastError()) + * + * Note that some implementations cannot distinguish between a + * condition variable wait time-out and successful wait. Most often + * the difference is moot anyway since the wait condition must be + * re-checked. + * PyCOND_TIMEDWAIT, in addition to returning negative on error, + * thus returns 0 on regular success, 1 on timeout + * or 2 if it can't tell. + */ + +#ifndef _CONDVAR_H_ +#define _CONDVAR_H_ + +#include "Python.h" + +#ifndef _POSIX_THREADS +/* This means pthreads are not implemented in libc headers, hence the macro + not present in unistd.h. But they still can be implemented as an external + library (e.g. gnu pth in pthread emulation) */ +# ifdef HAVE_PTHREAD_H +# include /* _POSIX_THREADS */ +# endif +#endif + +#ifdef _POSIX_THREADS +/* + * POSIX support + */ +#define Py_HAVE_CONDVAR + +#include + +#define PyCOND_ADD_MICROSECONDS(tv, interval) \ +do { \ + tv.tv_usec += (long) interval; \ + tv.tv_sec += tv.tv_usec / 1000000; \ + tv.tv_usec %= 1000000; \ +} while (0) + +/* We assume all modern POSIX systems have gettimeofday() */ +#ifdef GETTIMEOFDAY_NO_TZ +#define PyCOND_GETTIMEOFDAY(ptv) gettimeofday(ptv) +#else +#define PyCOND_GETTIMEOFDAY(ptv) gettimeofday(ptv, (struct timezone *)NULL) +#endif + +/* The following functions return 0 on success, nonzero on error */ +#define PyMUTEX_T pthread_mutex_t +#define PyMUTEX_INIT(mut) pthread_mutex_init((mut), NULL) +#define PyMUTEX_FINI(mut) pthread_mutex_destroy(mut) +#define PyMUTEX_LOCK(mut) pthread_mutex_lock(mut) +#define PyMUTEX_UNLOCK(mut) pthread_mutex_unlock(mut) + +#define PyCOND_T pthread_cond_t +#define PyCOND_INIT(cond) pthread_cond_init((cond), NULL) +#define PyCOND_FINI(cond) pthread_cond_destroy(cond) +#define PyCOND_SIGNAL(cond) pthread_cond_signal(cond) +#define PyCOND_BROADCAST(cond) pthread_cond_broadcast(cond) +#define PyCOND_WAIT(cond, mut) pthread_cond_wait((cond), (mut)) + +/* return 0 for success, 1 on timeout, -1 on error */ +Py_LOCAL_INLINE(int) +PyCOND_TIMEDWAIT(PyCOND_T *cond, PyMUTEX_T *mut, long us) +{ + int r; + struct timespec ts; + struct timeval deadline; + + PyCOND_GETTIMEOFDAY(&deadline); + PyCOND_ADD_MICROSECONDS(deadline, us); + ts.tv_sec = deadline.tv_sec; + ts.tv_nsec = deadline.tv_usec * 1000; + + r = pthread_cond_timedwait((cond), (mut), &ts); + if (r == ETIMEDOUT) + return 1; + else if (r) + return -1; + else + return 0; +} + +#elif defined(NT_THREADS) +/* + * Windows (XP, 2003 server and later, as well as (hopefully) CE) support + * + * Emulated condition variables ones that work with XP and later, plus + * example native support on VISTA and onwards. + */ +#define Py_HAVE_CONDVAR + + +/* include windows if it hasn't been done before */ +#define WIN32_LEAN_AND_MEAN +#include + +/* options */ +/* non-emulated condition variables are provided for those that want + * to target Windows Vista. Modify this macro to enable them. + */ +#ifndef _PY_EMULATED_WIN_CV +#define _PY_EMULATED_WIN_CV 1 /* use emulated condition variables */ +#endif + +/* fall back to emulation if not targeting Vista */ +#if !defined NTDDI_VISTA || NTDDI_VERSION < NTDDI_VISTA +#undef _PY_EMULATED_WIN_CV +#define _PY_EMULATED_WIN_CV 1 +#endif + + +#if _PY_EMULATED_WIN_CV + +/* The mutex is a CriticalSection object and + The condition variables is emulated with the help of a semaphore. + Semaphores are available on Windows XP (2003 server) and later. + We use a Semaphore rather than an auto-reset event, because although + an auto-resent event might appear to solve the lost-wakeup bug (race + condition between releasing the outer lock and waiting) because it + maintains state even though a wait hasn't happened, there is still + a lost wakeup problem if more than one thread are interrupted in the + critical place. A semaphore solves that, because its state is counted, + not Boolean. + Because it is ok to signal a condition variable with no one + waiting, we need to keep track of the number of + waiting threads. Otherwise, the semaphore's state could rise + without bound. This also helps reduce the number of "spurious wakeups" + that would otherwise happen. + + Generic emulations of the pthread_cond_* API using + earlier Win32 functions can be found on the Web. + The following read can be edificating (or not): + http://www.cse.wustl.edu/~schmidt/win32-cv-1.html +*/ + +typedef CRITICAL_SECTION PyMUTEX_T; + +Py_LOCAL_INLINE(int) +PyMUTEX_INIT(PyMUTEX_T *cs) +{ + InitializeCriticalSection(cs); + return 0; +} + +Py_LOCAL_INLINE(int) +PyMUTEX_FINI(PyMUTEX_T *cs) +{ + DeleteCriticalSection(cs); + return 0; +} + +Py_LOCAL_INLINE(int) +PyMUTEX_LOCK(PyMUTEX_T *cs) +{ + EnterCriticalSection(cs); + return 0; +} + +Py_LOCAL_INLINE(int) +PyMUTEX_UNLOCK(PyMUTEX_T *cs) +{ + LeaveCriticalSection(cs); + return 0; +} + +/* The ConditionVariable object. From XP onwards it is easily emulated with + * a Semaphore + */ + +typedef struct _PyCOND_T +{ + HANDLE sem; + int waiting; +} PyCOND_T; + +Py_LOCAL_INLINE(int) +PyCOND_INIT(PyCOND_T *cv) +{ + /* A semaphore with a "large" max value, The positive value + * is only needed to catch those "lost wakeup" events and + * race conditions when a timed wait elapses. + */ + cv->sem = CreateSemaphore(NULL, 0, 100000, NULL); + if (cv->sem==NULL) + return -1; + cv->waiting = 0; + return 0; +} + +Py_LOCAL_INLINE(int) +PyCOND_FINI(PyCOND_T *cv) +{ + return CloseHandle(cv->sem) ? 0 : -1; +} + +/* this implementation can detect a timeout. Returns 1 on timeout, + * 0 otherwise (and -1 on error) + */ +Py_LOCAL_INLINE(int) +_PyCOND_WAIT_MS(PyCOND_T *cv, PyMUTEX_T *cs, DWORD ms) +{ + DWORD wait; + cv->waiting++; + PyMUTEX_UNLOCK(cs); + /* "lost wakeup bug" would occur if the caller were interrupted here, + * but we are safe because we are using a semaphore wich has an internal + * count. + */ + wait = WaitForSingleObject(cv->sem, ms); + PyMUTEX_LOCK(cs); + if (wait != WAIT_OBJECT_0) + --cv->waiting; + /* Here we have a benign race condition with PyCOND_SIGNAL. + * When failure occurs or timeout, it is possible that + * PyCOND_SIGNAL also decrements this value + * and signals releases the mutex. This is benign because it + * just means an extra spurious wakeup for a waiting thread. + */ + + if (wait == WAIT_FAILED) + return -1; + /* return 0 on success, 1 on timeout */ + return wait != WAIT_OBJECT_0; +} + +Py_LOCAL_INLINE(int) +PyCOND_WAIT(PyCOND_T *cv, PyMUTEX_T *cs) +{ + int result = _PyCOND_WAIT_MS(cv, cs, INFINITE); + return result >= 0 ? 0 : result; +} + +Py_LOCAL_INLINE(int) +PyCOND_TIMEDWAIT(PyCOND_T *cv, PyMUTEX_T *cs, long us) +{ + return _PyCOND_WAIT_MS(cv, cs, us/1000); +} + +Py_LOCAL_INLINE(int) +PyCOND_SIGNAL(PyCOND_T *cv) +{ + if (cv->waiting) { + /* notifying thread decreases the cv->waiting count so that + * a delay between notify and wakeup doesn't cause a number + * of extra ReleaseSemaphore calls + */ + cv->waiting--; + return ReleaseSemaphore(cv->sem, 1, NULL) ? 0 : -1; + } + return 0; +} + +Py_LOCAL_INLINE(int) +PyCOND_BROADCAST(PyCOND_T *cv) +{ + if (cv->waiting) { + return ReleaseSemaphore(cv->sem, cv->waiting, NULL) ? 0 : -1; + cv->waiting = 0; + } + return 0; +} + +#else + +/* Use native Win7 primitives if build target is Win7 or higher */ + +/* SRWLOCK is faster and better than CriticalSection */ +typedef SRWLOCK PyMUTEX_T; + +Py_LOCAL_INLINE(int) +PyMUTEX_INIT(PyMUTEX_T *cs) +{ + InitializeSRWLock(cs); + return 0; +} + +Py_LOCAL_INLINE(int) +PyMUTEX_FINI(PyMUTEX_T *cs) +{ + return 0; +} + +Py_LOCAL_INLINE(int) +PyMUTEX_LOCK(PyMUTEX_T *cs) +{ + AcquireSRWLockExclusive(cs); + return 0; +} + +Py_LOCAL_INLINE(int) +PyMUTEX_UNLOCK(PyMUTEX_T *cs) +{ + ReleaseSRWLockExclusive(cs); + return 0; +} + + +typedef CONDITION_VARIABLE PyCOND_T; + +Py_LOCAL_INLINE(int) +PyCOND_INIT(PyCOND_T *cv) +{ + InitializeConditionVariable(cv); + return 0; +} +Py_LOCAL_INLINE(int) +PyCOND_FINI(PyCOND_T *cv) +{ + return 0; +} + +Py_LOCAL_INLINE(int) +PyCOND_WAIT(PyCOND_T *cv, PyMUTEX_T *cs) +{ + return SleepConditionVariableSRW(cv, cs, INFINITE, 0) ? 0 : -1; +} + +/* This implementation makes no distinction about timeouts. Signal + * 2 to indicate that we don't know. + */ +Py_LOCAL_INLINE(int) +PyCOND_TIMEDWAIT(PyCOND_T *cv, PyMUTEX_T *cs, long us) +{ + return SleepConditionVariableSRW(cv, cs, us/1000, 0) ? 2 : -1; +} + +Py_LOCAL_INLINE(int) +PyCOND_SIGNAL(PyCOND_T *cv) +{ + WakeConditionVariable(cv); + return 0; +} + +Py_LOCAL_INLINE(int) +PyCOND_BROADCAST(PyCOND_T *cv) +{ + WakeAllConditionVariable(cv); + return 0; +} + + +#endif /* _PY_EMULATED_WIN_CV */ + +#endif /* _POSIX_THREADS, NT_THREADS */ + +#endif /* _CONDVAR_H_ */ diff --git a/Python/thread_nt.h b/Python/thread_nt.h --- a/Python/thread_nt.h +++ b/Python/thread_nt.h @@ -9,6 +9,109 @@ #include #endif +/* options */ +#ifndef _PY_USE_CV_LOCKS +#define _PY_USE_CV_LOCKS 1 /* use locks based on cond vars */ +#endif + +/* Now, define a non-recursive mutex using either condition variables + * and critical sections (fast) or using operating system mutexes + * (slow) + */ + +#if _PY_USE_CV_LOCKS + +#include "condvar.h" + +typedef struct _NRMUTEX +{ + PyMUTEX_T cs; + PyCOND_T cv; + int locked; +} NRMUTEX; +typedef NRMUTEX *PNRMUTEX; + +PNRMUTEX +AllocNonRecursiveMutex() +{ + PNRMUTEX m = (PNRMUTEX)malloc(sizeof(NRMUTEX)); + if (!m) + return NULL; + if (PyCOND_INIT(&m->cv)) + goto fail; + if (PyMUTEX_INIT(&m->cs)) { + PyCOND_FINI(&m->cv); + goto fail; + } + m->locked = 0; + return m; +fail: + free(m); + return NULL; +} + +VOID +FreeNonRecursiveMutex(PNRMUTEX mutex) +{ + if (mutex) { + PyCOND_FINI(&mutex->cv); + PyMUTEX_FINI(&mutex->cs); + free(mutex); + } +} + +DWORD +EnterNonRecursiveMutex(PNRMUTEX mutex, DWORD milliseconds) +{ + DWORD result = WAIT_OBJECT_0; + if (PyMUTEX_LOCK(&mutex->cs)) + return WAIT_FAILED; + if (milliseconds == INFINITE) { + while (mutex->locked) { + if (PyCOND_WAIT(&mutex->cv, &mutex->cs)) { + result = WAIT_FAILED; + break; + } + } + } else if (milliseconds != 0) { + /* wait at least until the target */ + DWORD now, target = GetTickCount() + milliseconds; + while (mutex->locked) { + if (PyCOND_TIMEDWAIT(&mutex->cv, &mutex->cs, milliseconds*1000) < 0) { + result = WAIT_FAILED; + break; + } + now = GetTickCount(); + if (target <= now) + break; + milliseconds = target-now; + } + } + if (!mutex->locked) { + mutex->locked = 1; + result = WAIT_OBJECT_0; + } else if (result == WAIT_OBJECT_0) + result = WAIT_TIMEOUT; + /* else, it is WAIT_FAILED */ + PyMUTEX_UNLOCK(&mutex->cs); /* must ignore result here */ + return result; +} + +BOOL +LeaveNonRecursiveMutex(PNRMUTEX mutex) +{ + BOOL result; + if (PyMUTEX_LOCK(&mutex->cs)) + return FALSE; + mutex->locked = 0; + result = PyCOND_SIGNAL(&mutex->cv); + result &= PyMUTEX_UNLOCK(&mutex->cs); + return result; +} + +#else /* if ! _PY_USE_CV_LOCKS */ + +/* NR-locks based on a kernel mutex */ #define PNRMUTEX HANDLE PNRMUTEX @@ -35,6 +138,7 @@ { return ReleaseSemaphore(mutex, 1, NULL); } +#endif /* _PY_USE_CV_LOCKS */ long PyThread_get_thread_ident(void); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 19 01:41:44 2012 From: python-checkins at python.org (brian.curtin) Date: Tue, 19 Jun 2012 01:41:44 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_=2314772=3A_Return_the_?= =?utf8?q?destination_from_some_shutil_functions=2E?= Message-ID: http://hg.python.org/cpython/rev/8281233ec648 changeset: 77514:8281233ec648 user: Brian Curtin date: Mon Jun 18 18:41:07 2012 -0500 summary: Fix #14772: Return the destination from some shutil functions. files: Doc/library/shutil.rst | 14 ++++++--- Lib/shutil.py | 13 +++++++-- Lib/test/test_shutil.py | 41 +++++++++++++++++++++++++++++ Misc/NEWS | 2 + 4 files changed, 62 insertions(+), 8 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -50,7 +50,7 @@ .. function:: copyfile(src, dst, symlinks=False) Copy the contents (no metadata) of the file named *src* to a file named - *dst*. *dst* must be the complete target file name; look at + *dst* and return *dst*. *dst* must be the complete target file name; look at :func:`shutil.copy` for a copy that accepts a target directory path. If *src* and *dst* are the same files, :exc:`Error` is raised. @@ -91,7 +91,8 @@ .. function:: copy(src, dst, symlinks=False)) - Copy the file *src* to the file or directory *dst*. If *dst* is a directory, a + Copy the file *src* to the file or directory *dst* and return the file's + destination. If *dst* is a directory, a file with the same basename as *src* is created (or overwritten) in the directory specified. Permission bits are copied. *src* and *dst* are path names given as strings. If *symlinks* is true, symbolic links won't be @@ -102,7 +103,8 @@ .. function:: copy2(src, dst, symlinks=False) - Similar to :func:`shutil.copy`, but metadata is copied as well. This is + Similar to :func:`shutil.copy`, including that the destination is + returned, but metadata is copied as well. This is similar to the Unix command :program:`cp -p`. If *symlinks* is true, symbolic links won't be followed but recreated instead -- this resembles GNU's :program:`cp -P`. @@ -120,7 +122,8 @@ .. function:: copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, ignore_dangling_symlinks=False) - Recursively copy an entire directory tree rooted at *src*. The destination + Recursively copy an entire directory tree rooted at *src*, returning the + destination directory. The destination directory, named by *dst*, must not already exist; it will be created as well as missing parent directories. Permissions and times of directories are copied with :func:`copystat`, individual files are copied using @@ -189,7 +192,8 @@ .. function:: move(src, dst) - Recursively move a file or directory (*src*) to another location (*dst*). + Recursively move a file or directory (*src*) to another location (*dst*) + and return the destination. If the destination is a directory or a symlink to a directory, then *src* is moved inside that directory. diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -109,6 +109,7 @@ with open(src, 'rb') as fsrc: with open(dst, 'wb') as fdst: copyfileobj(fsrc, fdst) + return dst def copymode(src, dst, symlinks=False): """Copy mode bits from src to dst. @@ -197,7 +198,7 @@ pass def copy(src, dst, symlinks=False): - """Copy data and mode bits ("cp src dst"). + """Copy data and mode bits ("cp src dst"). Return the file's destination. The destination may be a directory. @@ -209,9 +210,11 @@ dst = os.path.join(dst, os.path.basename(src)) copyfile(src, dst, symlinks=symlinks) copymode(src, dst, symlinks=symlinks) + return dst def copy2(src, dst, symlinks=False): - """Copy data and all stat info ("cp -p src dst"). + """Copy data and all stat info ("cp -p src dst"). Return the file's + destination." The destination may be a directory. @@ -224,6 +227,7 @@ copyfile(src, dst, symlinks=symlinks) copystat(src, dst, symlinks=symlinks) _copyxattr(src, dst, symlinks=symlinks) + return dst def ignore_patterns(*patterns): """Function that can be used as copytree() ignore parameter. @@ -322,6 +326,7 @@ errors.extend((src, dst, str(why))) if errors: raise Error(errors) + return dst def rmtree(path, ignore_errors=False, onerror=None): """Recursively delete a directory tree. @@ -379,7 +384,8 @@ def move(src, dst): """Recursively move a file or directory to another location. This is - similar to the Unix "mv" command. + similar to the Unix "mv" command. Return the file or directory's + destination. If the destination is a directory or a symlink to a directory, the source is moved inside the directory. The destination path must not already @@ -423,6 +429,7 @@ else: copy2(src, real_dst) os.unlink(src) + return real_dst def _destinsrc(src, dst): src = abspath(src) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1095,6 +1095,38 @@ shutil.chown(dirname, user, group) check_chown(dirname, uid, gid) + def test_copy_return_value(self): + # copy and copy2 both return their destination path. + for fn in (shutil.copy, shutil.copy2): + src_dir = self.mkdtemp() + dst_dir = self.mkdtemp() + src = os.path.join(src_dir, 'foo') + write_file(src, 'foo') + rv = fn(src, dst_dir) + self.assertEqual(rv, os.path.join(dst_dir, 'foo')) + rv = fn(src, os.path.join(dst_dir, 'bar')) + self.assertEqual(rv, os.path.join(dst_dir, 'bar')) + + def test_copyfile_return_value(self): + # copytree returns its destination path. + src_dir = self.mkdtemp() + dst_dir = self.mkdtemp() + dst_file = os.path.join(dst_dir, 'bar') + src_file = os.path.join(src_dir, 'foo') + write_file(src_file, 'foo') + rv = shutil.copyfile(src_file, dst_file) + self.assertTrue(os.path.exists(rv)) + self.assertEqual(read_file(src_file), read_file(dst_file)) + + def test_copytree_return_value(self): + # copytree returns its destination path. + src_dir = self.mkdtemp() + dst_dir = src_dir + "dest" + src = os.path.join(src_dir, 'foo') + write_file(src, 'foo') + rv = shutil.copytree(src_dir, dst_dir) + self.assertEqual(['foo'], os.listdir(rv)) + class TestMove(unittest.TestCase): @@ -1251,6 +1283,15 @@ self.assertTrue(os.path.islink(dst_link)) self.assertTrue(os.path.samefile(src, dst_link)) + def test_move_return_value(self): + rv = shutil.move(self.src_file, self.dst_dir) + self.assertEqual(rv, + os.path.join(self.dst_dir, os.path.basename(self.src_file))) + + def test_move_as_rename_return_value(self): + rv = shutil.move(self.src_file, os.path.join(self.dst_dir, 'bar')) + self.assertEqual(rv, os.path.join(self.dst_dir, 'bar')) + class TestCopyFile(unittest.TestCase): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -31,6 +31,8 @@ Library ------- +- Issue #14772: Return destination values from some shutil functions. + - Issue #15064: Implement context manager protocol for multiprocessing types - Issue #15101: Make pool finalizer avoid joining current thread. -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Tue Jun 19 05:53:03 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Tue, 19 Jun 2012 05:53:03 +0200 Subject: [Python-checkins] Daily reference leaks (8281233ec648): sum=0 Message-ID: results for 8281233ec648 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogmBYxgZ', '-x'] From python-checkins at python.org Tue Jun 19 11:38:28 2012 From: python-checkins at python.org (larry.hastings) Date: Tue, 19 Jun 2012 11:38:28 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Committing_the_latest_changes_?= =?utf8?q?to_PEP_362_on_behalf_of_Yury_Selivanov=2E?= Message-ID: http://hg.python.org/peps/rev/659639095ace changeset: 4466:659639095ace user: Larry Hastings date: Tue Jun 19 02:38:15 2012 -0700 summary: Committing the latest changes to PEP 362 on behalf of Yury Selivanov. files: pep-0362.txt | 66 ++++++++++++--------------------------- 1 files changed, 21 insertions(+), 45 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -58,20 +58,19 @@ of some required arguments (mimics ``functools.partial`` behavior.) Raises a ``TypeError`` if the passed arguments do not match the signature. -* format(...) -> str - Formats the Signature object to a string. Optional arguments allow - for custom render functions for parameter names, - annotations and default values, along with custom separators. -Signature implements the ``__str__`` method, which fallbacks to the -``Signature.format()`` call. - -It's possible to test Signatures for equality. Two signatures -are equal when they have equal parameters and return annotations. +It's possible to test Signatures for equality. Two signatures are +equal when their parameters are equal, their positional and +positional-only parameters appear in the same order, and they +have equal return annotations. Changes to the Signature object, or to any of its data members, do not affect the function itself. +Signature also implements ``__str__`` and ``__copy__`` methods. +The latter creates a shallow copy of Signature, with all Parameter +objects copied as well. + Parameter Object ================ @@ -125,16 +124,8 @@ that aren't bound to any other parameter. This corresponds to a "\*\*kwds" parameter in a Python function definition. -* implemented : bool - True if the parameter is implemented for use. Some platforms - implement functions but can't support specific parameters - (e.g. "mode" for ``os.mkdir``). Passing in an unimplemented - parameter may result in the parameter being ignored, - or in NotImplementedError being raised. It is intended that - all conditions where ``implemented`` may be False be - thoroughly documented. - -Two parameters are equal when all their attributes are equal. +Two parameters are equal when they have equal names, kinds, defaults, +and annotations. BoundArguments Object @@ -181,10 +172,7 @@ - If the object is not callable - raise a TypeError - If the object has a ``__signature__`` attribute and if it - is not ``None`` - return a deepcopy of it - - - If it is ``None`` and the object is an instance of - ``BuiltinFunction``, raise a ``ValueError`` + is not ``None`` - return a shallow copy of it - If it has a ``__wrapped__`` attribute, return ``signature(object.__wrapped__)`` @@ -218,7 +206,7 @@ Note, that the ``Signature`` object is created in a lazy manner, and is not automatically cached. If, however, the Signature object was -explicitly cached by the user, ``signature()`` returns a new deepcopy +explicitly cached by the user, ``signature()`` returns a new shallow copy of it on each invocation. An implementation for Python 3.3 can be found at [#impl]_. @@ -244,6 +232,15 @@ is different from the actual one +Some functions may not be introspectable +---------------------------------------- + +Some functions may not be introspectable in certain implementations of +Python. For example, in CPython, builtin functions defined in C provide +no metadata about their arguments. Adding support for them is out of +scope for this PEP. + + Examples ======== @@ -437,28 +434,6 @@ return wrapper -Render Function Signature to HTML ---------------------------------- - -:: - - import inspect - - def format_to_html(func): - sig = inspect.signature(func) - - html = sig.format(token_params_separator=',', - token_colon=':', - token_eq='=', - token_return_annotation='->', - token_left_paren='(', - token_right_paren=')', - token_kwonly_separator='*', - format_name=lambda name: ''+name+'') - - return '{}'.format(html) - - References ========== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Jun 19 12:11:11 2012 From: python-checkins at python.org (kristjan.jonsson) Date: Tue, 19 Jun 2012 12:11:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315038=3A?= Message-ID: http://hg.python.org/cpython/rev/110b38c36a31 changeset: 77515:110b38c36a31 user: Kristjan Valur Jonsson date: Tue Jun 19 10:10:09 2012 +0000 summary: Issue #15038: Fix incorrect test of the condition variable state, spotted by Richard Oudkerk. This could cause the internal condition variable to grow without bounds. files: Python/condvar.h | 18 +++++++++++++----- 1 files changed, 13 insertions(+), 5 deletions(-) diff --git a/Python/condvar.h b/Python/condvar.h --- a/Python/condvar.h +++ b/Python/condvar.h @@ -177,7 +177,7 @@ typedef struct _PyCOND_T { HANDLE sem; - int waiting; + int waiting; /* to allow PyCOND_SIGNAL to be a no-op */ } PyCOND_T; Py_LOCAL_INLINE(int) @@ -222,6 +222,10 @@ * PyCOND_SIGNAL also decrements this value * and signals releases the mutex. This is benign because it * just means an extra spurious wakeup for a waiting thread. + * ('waiting' corresponds to the semaphore's "negative" count and + * we may end up with e.g. (waiting == -1 && sem.count == 1). When + * a new thread comes along, it will pass right throuhgh, having + * adjusted it to (waiting == 0 && sem.count == 0). */ if (wait == WAIT_FAILED) @@ -246,10 +250,14 @@ Py_LOCAL_INLINE(int) PyCOND_SIGNAL(PyCOND_T *cv) { - if (cv->waiting) { + /* this test allows PyCOND_SIGNAL to be a no-op unless required + * to wake someone up, thus preventing an unbounded increase of + * the semaphore's internal counter. + */ + if (cv->waiting > 0) { /* notifying thread decreases the cv->waiting count so that - * a delay between notify and wakeup doesn't cause a number - * of extra ReleaseSemaphore calls + * a delay between notify and actual wakeup of the target thread + * doesn't cause a number of extra ReleaseSemaphore calls. */ cv->waiting--; return ReleaseSemaphore(cv->sem, 1, NULL) ? 0 : -1; @@ -260,7 +268,7 @@ Py_LOCAL_INLINE(int) PyCOND_BROADCAST(PyCOND_T *cv) { - if (cv->waiting) { + if (cv->waiting > 0) { return ReleaseSemaphore(cv->sem, cv->waiting, NULL) ? 0 : -1; cv->waiting = 0; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 19 16:34:15 2012 From: python-checkins at python.org (local-hg) Date: Tue, 19 Jun 2012 16:34:15 +0200 Subject: [Python-checkins] =?utf8?q?hooks=3A_Add_a_diff-blacklist_option_t?= =?utf8?q?o_omit_selected_files_from_email_notification?= Message-ID: http://hg.python.org/hooks/rev/885b48eeae7d changeset: 81:885b48eeae7d user: Antoine Pitrou date: Tue Jun 19 16:34:15 2012 +0200 summary: Add a diff-blacklist option to omit selected files from email notification diffs. files: mail.py | 17 +++++++++++++++++ 1 files changed, 17 insertions(+), 0 deletions(-) diff --git a/mail.py b/mail.py --- a/mail.py +++ b/mail.py @@ -19,6 +19,7 @@ BASE = 'http://hg.python.org/' CSET_URL = BASE + '%s/rev/%s' + def send(sub, sender, to, body): msg = MIMEMultipart() msg['Subject'] = Header(sub, 'utf8') @@ -45,6 +46,19 @@ stripped.append(chunk) return stripped +def strip_blacklisted_files(chunks, blacklisted): + stripped = [] + for chunk in chunks: + lines = chunk.splitlines(True) + for i, line in enumerate(lines[:4]): + if (line.startswith('+++ b/') and + line[6:].rstrip() in blacklisted): + lines = lines[:i+1] + ['[stripped]\n'] + chunk = ''.join(lines) + break + stripped.append(chunk) + return stripped + def _incoming(ui, repo, **kwargs): # Ensure that no fancying of output is enabled (e.g. coloring) os.environ['TERM'] = 'dumb' @@ -57,6 +71,8 @@ else: colormod._styles.clear() + blacklisted = ui.config('mail', 'diff-blacklist', '').split() + displayer = cmdutil.changeset_printer(ui, repo, False, False, True) ctx = repo[kwargs['node']] displayer.show(ctx) @@ -83,6 +99,7 @@ body.append(' ' + line) body += ['', ''] diffchunks = strip_bin_diffs(diffchunks) + diffchunks = strip_blacklisted_files(diffchunks, blacklisted) body.append(''.join(chunk for chunk in diffchunks)) body.append('-- ') -- Repository URL: http://hg.python.org/hooks From python-checkins at python.org Tue Jun 19 16:37:01 2012 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 19 Jun 2012 16:37:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315103=3A_remove_th?= =?utf8?q?e_NUL_character_=28serving_as_a_Mercurial_binary_marker=29?= Message-ID: http://hg.python.org/cpython/rev/7d86e207598f changeset: 77516:7d86e207598f user: Antoine Pitrou date: Tue Jun 19 16:33:39 2012 +0200 summary: Issue #15103: remove the NUL character (serving as a Mercurial binary marker) from Python/importlib.h. Instead the email notification hook uses a configuration option to omit importlib.h diffs. files: Python/freeze_importlib.py | 1 - Python/importlib.h | Bin 2 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Python/freeze_importlib.py b/Python/freeze_importlib.py --- a/Python/freeze_importlib.py +++ b/Python/freeze_importlib.py @@ -24,7 +24,6 @@ lines.append('};\n') with open(output_path, 'w', encoding='utf-8') as output_file: output_file.write('\n'.join(lines)) - output_file.write('/* Mercurial binary marker: \x00 */') # Avoid a compiler warning for lack of EOL output_file.write('\n') diff --git a/Python/importlib.h b/Python/importlib.h index c9431e49e929cf16e5376c1c6b9e4ba09e891bda..0b633ab529225b8428d0d538954c52ecd3f43cfb GIT binary patch [stripped] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 19 17:03:25 2012 From: python-checkins at python.org (brian.curtin) Date: Tue, 19 Jun 2012 17:03:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_versionchanged_tags_for?= =?utf8?q?_=2314772_changes?= Message-ID: http://hg.python.org/cpython/rev/e8ea27ab9fa6 changeset: 77517:e8ea27ab9fa6 user: Brian Curtin date: Tue Jun 19 10:03:05 2012 -0500 summary: Add versionchanged tags for #14772 changes files: Doc/library/shutil.rst | 13 +++++++++++++ 1 files changed, 13 insertions(+), 0 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -66,6 +66,8 @@ :exc:`IOError` used to be raised instead of :exc:`OSError`. Added *symlinks* argument. + .. versionchanged:: 3.3 + Added return of the *dst*. .. function:: copymode(src, dst, symlinks=False) @@ -101,6 +103,9 @@ .. versionchanged:: 3.3 Added *symlinks* argument. + .. versionchanged:: 3.3 + Added return of the *dst*. + .. function:: copy2(src, dst, symlinks=False) Similar to :func:`shutil.copy`, including that the destination is @@ -113,6 +118,9 @@ Added *symlinks* argument, try to copy extended file system attributes too (currently Linux only). + .. versionchanged:: 3.3 + Added return of the *dst*. + .. function:: ignore_patterns(\*patterns) This factory function creates a function that can be used as a callable for @@ -169,6 +177,8 @@ .. versionchanged:: 3.3 Copy metadata when *symlinks* is false. + .. versionchanged:: 3.3 + Added return of the *dst*. .. function:: rmtree(path, ignore_errors=False, onerror=None) @@ -211,6 +221,9 @@ Added explicit symlink handling for foreign filesystems, thus adapting it to the behavior of GNU's :program:`mv`. + .. versionchanged:: 3.3 + Added return of the *dst*. + .. function:: disk_usage(path) Return disk usage statistics about the given path as a :term:`named tuple` -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 19 17:13:56 2012 From: python-checkins at python.org (martin.v.loewis) Date: Tue, 19 Jun 2012 17:13:56 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Incorporate_Terry=27s_and_Bria?= =?utf8?q?n=27s_feedback=2E?= Message-ID: http://hg.python.org/peps/rev/a57419aee37d changeset: 4467:a57419aee37d user: Martin v. L?wis date: Tue Jun 19 17:13:49 2012 +0200 summary: Incorporate Terry's and Brian's feedback. files: pep-0397.txt | 8 +++++--- 1 files changed, 5 insertions(+), 3 deletions(-) diff --git a/pep-0397.txt b/pep-0397.txt --- a/pep-0397.txt +++ b/pep-0397.txt @@ -96,8 +96,8 @@ with a reference counter. It contains a version resource matching the version number of the pythonXY.dll with which it is distributed. Independent - installations will always only overwrite newer versions of the - launcher with older versions. Stand-alone releases use + installations will overwrite older version + of the launcher with newer versions. Stand-alone releases use a release level of 0x10 in FIELD3 of the CPython release on which they are based. @@ -211,7 +211,9 @@ Two .ini files will be searched by the launcher - ``py.ini`` in the current user's "application data" directory (i.e. the directory returned - by calling the Windows function SHGetFolderPath with CSIDL_LOCAL_APPDATA) + by calling the Windows function SHGetFolderPath with CSIDL_LOCAL_APPDATA, + %USERPROFILE%\AppData\Local on Vista+, + %USERPROFILE%\Local Settings\Application Data on XP) and ``py.ini`` in the same directory as the launcher. The same .ini files are used for both the 'console' version of the launcher (i.e. py.exe) and for the 'windows' version (i.e. pyw.exe) -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Tue Jun 19 18:31:14 2012 From: python-checkins at python.org (kristjan.jonsson) Date: Tue, 19 Jun 2012 18:31:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315038=3A_Document_?= =?utf8?q?caveats_with_the_emulated_condition_variables=2E?= Message-ID: http://hg.python.org/cpython/rev/d7a72fdcc168 changeset: 77518:d7a72fdcc168 user: Kristjan Valur Jonsson date: Tue Jun 19 16:30:28 2012 +0000 summary: Issue #15038: Document caveats with the emulated condition variables. files: Python/condvar.h | 29 +++++++++++++++++++++++++++++ 1 files changed, 29 insertions(+), 0 deletions(-) diff --git a/Python/condvar.h b/Python/condvar.h --- a/Python/condvar.h +++ b/Python/condvar.h @@ -13,6 +13,28 @@ * PyCOND_TIMEDWAIT, in addition to returning negative on error, * thus returns 0 on regular success, 1 on timeout * or 2 if it can't tell. + * + * There are at least two caveats with using these condition variables, + * due to the fact that they may be emulated with Semaphores on + * Windows: + * 1) While PyCOND_SIGNAL() will wake up at least one thread, we + * cannot currently guarantee that it will be one of the threads + * already waiting in a PyCOND_WAIT() call. It _could_ cause + * the wakeup of a subsequent thread to try a PyCOND_WAIT(), + * including the thread doing the PyCOND_SIGNAL() itself. + * The same applies to PyCOND_BROADCAST(), if N threads are waiting + * then at least N threads will be woken up, but not necessarily + * those already waiting. + * For this reason, don't make the scheduling assumption that a + * specific other thread will get the wakeup signal + * 2) The _mutex_ must be held when calling PyCOND_SIGNAL() and + * PyCOND_BROADCAST(). + * While e.g. the posix standard strongly recommends that the mutex + * associated with the condition variable is held when a + * pthread_cond_signal() call is made, this is not a hard requirement, + * although scheduling will not be "reliable" if it isn't. Here + * the mutex is used for internal synchronization of the emulated + * Condition Variable. */ #ifndef _CONDVAR_H_ @@ -134,10 +156,17 @@ without bound. This also helps reduce the number of "spurious wakeups" that would otherwise happen. + This implementation still has the problem that the threads woken + with a "signal" aren't necessarily those that are already + waiting. It corresponds to listing 2 in: + http://birrell.org/andrew/papers/ImplementingCVs.pdf + Generic emulations of the pthread_cond_* API using earlier Win32 functions can be found on the Web. The following read can be edificating (or not): http://www.cse.wustl.edu/~schmidt/win32-cv-1.html + + See also */ typedef CRITICAL_SECTION PyMUTEX_T; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Tue Jun 19 22:32:58 2012 From: python-checkins at python.org (antoine.pitrou) Date: Tue, 19 Jun 2012 22:32:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314928=3A_Fix_impor?= =?utf8?q?tlib_bootstrap_issues_by_using_a_custom_executable?= Message-ID: http://hg.python.org/cpython/rev/c3616595dada changeset: 77519:c3616595dada user: Antoine Pitrou date: Tue Jun 19 22:29:35 2012 +0200 summary: Issue #14928: Fix importlib bootstrap issues by using a custom executable (Modules/_freeze_importlib) to build Python/importlib.h. files: .hgignore | 1 + Include/pythonrun.h | 3 + Makefile.pre.in | 31 +- Misc/NEWS | 6 + Modules/_freeze_importlib.c | 131 ++ Python/freeze_importlib.py | 39 - Python/importlib.h | 1281 +++++++++++----------- Python/pythonrun.c | 11 +- 8 files changed, 806 insertions(+), 697 deletions(-) diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -79,6 +79,7 @@ PCbuild/amd64 BuildLog.htm __pycache__ +Modules/_freeze_importlib Modules/_testembed .coverage coverage/ diff --git a/Include/pythonrun.h b/Include/pythonrun.h --- a/Include/pythonrun.h +++ b/Include/pythonrun.h @@ -30,6 +30,9 @@ PyAPI_FUNC(void) Py_Initialize(void); PyAPI_FUNC(void) Py_InitializeEx(int); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _Py_InitializeEx_Private(int, int); +#endif PyAPI_FUNC(void) Py_Finalize(void); PyAPI_FUNC(int) Py_IsInitialized(void); PyAPI_FUNC(PyThreadState *) Py_NewInterpreter(void); diff --git a/Makefile.pre.in b/Makefile.pre.in --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -324,7 +324,6 @@ Python/codecs.o \ Python/dynamic_annotations.o \ Python/errors.o \ - Python/frozen.o \ Python/frozenmain.o \ Python/future.o \ Python/getargs.o \ @@ -410,7 +409,7 @@ ########################################################################## # objects that get linked into the Python library -LIBRARY_OBJS= \ +LIBRARY_OBJS_OMIT_FROZEN= \ Modules/getbuildinfo.o \ $(PARSER_OBJS) \ $(OBJECT_OBJS) \ @@ -419,6 +418,10 @@ $(SIGNAL_OBJS) \ $(MODOBJS) +LIBRARY_OBJS= \ + $(LIBRARY_OBJS_OMIT_FROZEN) \ + Python/frozen.o + ######################################################################### # Rules @@ -478,7 +481,7 @@ $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o $(AR) $(ARFLAGS) $@ $(PARSER_OBJS) $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS) - $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS) + $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS) Python/frozen.o $(AR) $(ARFLAGS) $@ $(MODULE_OBJS) $(SIGNAL_OBJS) $(AR) $(ARFLAGS) $@ $(MODOBJS) $(RANLIB) $@ @@ -578,18 +581,14 @@ ############################################################################ # Importlib -Python/importlib.h: $(srcdir)/Lib/importlib/_bootstrap.py $(srcdir)/Python/freeze_importlib.py - @if test -f ./$(BUILDPYTHON); then \ - $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Python/freeze_importlib.py \ - $(srcdir)/Lib/importlib/_bootstrap.py Python/importlib.h; \ - else \ - echo "----------------------------------------------------------"; \ - echo "Python/importlib.h needs to be rebuilt, but no interpreter"; \ - echo "is available to do so. Leaving the previous version in"; \ - echo "place. You may want to run ''make'' a second time after"; \ - echo "this build is complete."; \ - echo "----------------------------------------------------------"; \ - fi +Modules/_freeze_importlib: Modules/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) + $(LINKCC) $(PY_LDFLAGS) -o $@ Modules/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + +Python/importlib.h: $(srcdir)/Lib/importlib/_bootstrap.py Modules/_freeze_importlib.c + $(MAKE) Modules/_freeze_importlib + ./Modules/_freeze_importlib \ + $(srcdir)/Lib/importlib/_bootstrap.py Python/importlib.h + ############################################################################ # Special rules for object files @@ -1389,7 +1388,7 @@ find build -name 'fficonfig.py' -exec rm -f {} ';' || true -rm -f Lib/lib2to3/*Grammar*.pickle -rm -f $(SYSCONFIGDATA) - -rm -f Modules/_testembed + -rm -f Modules/_testembed Modules/_freeze_importlib profile-removal: find . -name '*.gc??' -exec rm -f {} ';' diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -154,6 +154,12 @@ - Issue #14963: Add test cases for exception handling behaviour in contextlib.ExitStack (Initial patch by Alon Horev) +Build +----- + +- Issue #14928: Fix importlib bootstrap issues by using a custom executable + (Modules/_freeze_importlib) to build Python/importlib.h. + What's New in Python 3.3.0 Alpha 4? =================================== diff --git a/Modules/_freeze_importlib.c b/Modules/_freeze_importlib.c new file mode 100644 --- /dev/null +++ b/Modules/_freeze_importlib.c @@ -0,0 +1,131 @@ +/* This is built as a stand-alone executable by the Makefile, and helps turn + Lib/importlib/_bootstrap.py into a frozen module in Python/importlib.h +*/ + +#include +#include + +#include +#include +#include +#include + + +/* To avoid a circular dependency on frozen.o, we create our own structure + of frozen modules instead, left deliberately blank so as to avoid + unintentional import of a stale version of _frozen_importlib. */ + +static struct _frozen _PyImport_FrozenModules[] = { + {0, 0, 0} /* sentinel */ +}; + +struct _frozen *PyImport_FrozenModules = _PyImport_FrozenModules; + + +const char header[] = "/* Auto-generated by Modules/_freeze_importlib.c */"; + +int +main(int argc, char *argv[]) +{ + char *inpath, *outpath; + FILE *infile, *outfile = NULL; + struct stat st; + size_t text_size, data_size, n; + char *text, *data; + PyObject *code, *marshalled; + + if (argc != 3) { + fprintf(stderr, "need to specify input and output paths\n"); + return 2; + } + inpath = argv[1]; + outpath = argv[2]; + infile = fopen(inpath, "rb"); + if (infile == NULL) { + fprintf(stderr, "cannot open '%s' for reading\n", inpath); + return 1; + } + if (fstat(fileno(infile), &st)) { + fclose(infile); + fprintf(stderr, "cannot fstat '%s'\n", inpath); + return 1; + } + text_size = st.st_size; + text = (char *) malloc(text_size + 1); + if (text == NULL) { + fclose(infile); + fprintf(stderr, "could not allocate %ld bytes\n", (long) text_size); + return 1; + } + n = fread(text, 1, text_size, infile); + fclose(infile); + infile = NULL; + if (n < text_size) { + fprintf(stderr, "read too short: got %ld instead of %ld bytes\n", + (long) n, (long) text_size); + return 1; + } + text[text_size] = '\0'; + + Py_NoUserSiteDirectory++; + Py_NoSiteFlag++; + Py_IgnoreEnvironmentFlag++; + + Py_SetProgramName(L"./_freeze_importlib"); + /* Don't install importlib, since it could execute outdated bytecode. */ + _Py_InitializeEx_Private(1, 0); + + code = Py_CompileStringExFlags(text, "", + Py_file_input, NULL, 0); + if (code == NULL) + goto error; + marshalled = PyMarshal_WriteObjectToString(code, Py_MARSHAL_VERSION); + Py_DECREF(code); + if (marshalled == NULL) + goto error; + + assert(PyBytes_CheckExact(marshalled)); + data = PyBytes_AS_STRING(marshalled); + data_size = PyBytes_GET_SIZE(marshalled); + + outfile = fopen(outpath, "wb"); + if (outfile == NULL) { + fprintf(stderr, "cannot open '%s' for writing\n", outpath); + return 1; + } + fprintf(outfile, "%s\n", header); + fprintf(outfile, "unsigned char _Py_M__importlib[] = {\n"); + for (n = 0; n < data_size; n += 16) { + size_t i, end = Py_MIN(n + 16, data_size); + fprintf(outfile, " "); + for (i = n; i < end; i++) { + fprintf(outfile, "%d,", (int) data[i]); + } + fprintf(outfile, "\n"); + } + fprintf(outfile, "};\n"); + + Py_DECREF(marshalled); + + Py_Finalize(); + if (infile) + fclose(infile); + if (outfile) { + if (ferror(outfile)) { + fprintf(stderr, "error when writing to '%s'\n", outpath); + fclose(outfile); + return 1; + } + fclose(outfile); + } + return 0; + +error: + PyErr_Print(); + Py_Finalize(); + if (infile) + fclose(infile); + if (outfile) + fclose(outfile); + return 1; +} diff --git a/Python/freeze_importlib.py b/Python/freeze_importlib.py deleted file mode 100644 --- a/Python/freeze_importlib.py +++ /dev/null @@ -1,39 +0,0 @@ -#! /usr/bin/env python -"""Freeze importlib for use as the implementation of import.""" -import marshal - - -header = """/* Auto-generated by Python/freeze_importlib.py */""" - - -def main(input_path, output_path): - with open(input_path, 'r', encoding='utf-8') as input_file: - source = input_file.read() - - code = compile(source, '', 'exec') - - lines = [header] - lines.append('unsigned char _Py_M__importlib[] = {') - data = marshal.dumps(code) - # Code from Tools/freeze/makefreeze.py:writecode() - for i in range(0, len(data), 16): - line = [' '] - for c in data[i:i+16]: - line.append('%d,' % c) - lines.append(''.join(line)) - lines.append('};\n') - with open(output_path, 'w', encoding='utf-8') as output_file: - output_file.write('\n'.join(lines)) - # Avoid a compiler warning for lack of EOL - output_file.write('\n') - - -if __name__ == '__main__': - import sys - - args = sys.argv[1:] - if len(args) != 2: - print('Need to specify input and output file paths', file=sys.stderr) - sys.exit(1) - - main(*args) diff --git a/Python/importlib.h b/Python/importlib.h --- a/Python/importlib.h +++ b/Python/importlib.h [stripped] diff --git a/Python/pythonrun.c b/Python/pythonrun.c --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -242,7 +242,7 @@ void -Py_InitializeEx(int install_sigs) +_Py_InitializeEx_Private(int install_sigs, int install_importlib) { PyInterpreterState *interp; PyThreadState *tstate; @@ -363,6 +363,9 @@ /* Initialize _warnings. */ _PyWarnings_Init(); + if (!install_importlib) + return; + import_init(interp, sysmod); _PyTime_Init(); @@ -393,6 +396,12 @@ } void +Py_InitializeEx(int install_sigs) +{ + _Py_InitializeEx_Private(install_sigs, 1); +} + +void Py_Initialize(void) { Py_InitializeEx(1); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 20 01:51:45 2012 From: python-checkins at python.org (nadeem.vawda) Date: Wed, 20 Jun 2012 01:51:45 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Fix_GzipFile=27?= =?utf8?q?s_handling_of_filenames_given_as_bytes_objects=2E?= Message-ID: http://hg.python.org/cpython/rev/e044fa016c85 changeset: 77520:e044fa016c85 branch: 3.2 parent: 77507:e1cd1f430ff1 user: Nadeem Vawda date: Wed Jun 20 01:35:22 2012 +0200 summary: Fix GzipFile's handling of filenames given as bytes objects. files: Lib/gzip.py | 8 ++++---- Lib/test/test_gzip.py | 14 ++++++++++++++ Misc/NEWS | 2 ++ 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/Lib/gzip.py b/Lib/gzip.py --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -159,9 +159,8 @@ if fileobj is None: fileobj = self.myfileobj = builtins.open(filename, mode or 'rb') if filename is None: - if hasattr(fileobj, 'name') and isinstance(fileobj.name, str): - filename = fileobj.name - else: + filename = getattr(fileobj, 'name', '') + if not isinstance(filename, (str, bytes)): filename = '' if mode is None: if hasattr(fileobj, 'mode'): mode = fileobj.mode @@ -236,7 +235,8 @@ # RFC 1952 requires the FNAME field to be Latin-1. Do not # include filenames that cannot be represented that way. fname = os.path.basename(self.name) - fname = fname.encode('latin-1') + if not isinstance(fname, bytes): + fname = fname.encode('latin-1') if fname.endswith(b'.gz'): fname = fname[:-3] except UnicodeEncodeError: diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py --- a/Lib/test/test_gzip.py +++ b/Lib/test/test_gzip.py @@ -331,6 +331,20 @@ with gzip.GzipFile(fileobj=f, mode="w") as g: pass + def test_bytes_filename(self): + str_filename = self.filename + try: + bytes_filename = str_filename.encode("ascii") + except UnicodeEncodeError: + self.skipTest("Temporary file name needs to be ASCII") + with gzip.GzipFile(bytes_filename, "wb") as f: + f.write(data1 * 50) + with gzip.GzipFile(bytes_filename, "rb") as f: + self.assertEqual(f.read(), data1 * 50) + # Sanity check that we are actually operating on the right file. + with gzip.GzipFile(str_filename, "rb") as f: + self.assertEqual(f.read(), data1 * 50) + # Testing compress/decompress shortcut functions def test_compress(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,6 +70,8 @@ Library ------- +- Fix GzipFile's handling of filenames given as bytes objects. + - Issue #15101: Make pool finalizer avoid joining current thread. - Issue #15036: Mailbox no longer throws an error if a flush is done -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 20 01:51:46 2012 From: python-checkins at python.org (nadeem.vawda) Date: Wed, 20 Jun 2012 01:51:46 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Fix_GzipFile=27s_handling_of_filenames_given_as_bytes_object?= =?utf8?q?s=2E?= Message-ID: http://hg.python.org/cpython/rev/d129982c063d changeset: 77521:d129982c063d parent: 77519:c3616595dada parent: 77520:e044fa016c85 user: Nadeem Vawda date: Wed Jun 20 01:48:50 2012 +0200 summary: Fix GzipFile's handling of filenames given as bytes objects. Add relevant tests for GzipFile, and also for BZ2File and LZMAFile. files: Lib/gzip.py | 8 ++++---- Lib/test/test_bz2.py | 15 +++++++++++++++ Lib/test/test_gzip.py | 14 ++++++++++++++ Lib/test/test_lzma.py | 24 ++++++++++++++++++++++++ Misc/NEWS | 2 ++ 5 files changed, 59 insertions(+), 4 deletions(-) diff --git a/Lib/gzip.py b/Lib/gzip.py --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -182,9 +182,8 @@ if fileobj is None: fileobj = self.myfileobj = builtins.open(filename, mode or 'rb') if filename is None: - if hasattr(fileobj, 'name') and isinstance(fileobj.name, str): - filename = fileobj.name - else: + filename = getattr(fileobj, 'name', '') + if not isinstance(filename, (str, bytes)): filename = '' if mode is None: mode = getattr(fileobj, 'mode', 'rb') @@ -258,7 +257,8 @@ # RFC 1952 requires the FNAME field to be Latin-1. Do not # include filenames that cannot be represented that way. fname = os.path.basename(self.name) - fname = fname.encode('latin-1') + if not isinstance(fname, bytes): + fname = fname.encode('latin-1') if fname.endswith(b'.gz'): fname = fname[:-3] except UnicodeEncodeError: diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -522,6 +522,21 @@ with BZ2File(self.filename) as bz2f: self.assertEqual(bz2f.read(), data1 + data2) + def testOpenBytesFilename(self): + str_filename = self.filename + try: + bytes_filename = str_filename.encode("ascii") + except UnicodeEncodeError: + self.skipTest("Temporary file name needs to be ASCII") + with BZ2File(bytes_filename, "wb") as f: + f.write(self.DATA) + with BZ2File(bytes_filename, "rb") as f: + self.assertEqual(f.read(), self.DATA) + # Sanity check that we are actually operating on the right file. + with BZ2File(str_filename, "rb") as f: + self.assertEqual(f.read(), self.DATA) + + # Tests for a BZ2File wrapping another file object: def testReadBytesIO(self): diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py --- a/Lib/test/test_gzip.py +++ b/Lib/test/test_gzip.py @@ -355,6 +355,20 @@ with gzip.GzipFile(fileobj=f, mode="w") as g: pass + def test_bytes_filename(self): + str_filename = self.filename + try: + bytes_filename = str_filename.encode("ascii") + except UnicodeEncodeError: + self.skipTest("Temporary file name needs to be ASCII") + with gzip.GzipFile(bytes_filename, "wb") as f: + f.write(data1 * 50) + with gzip.GzipFile(bytes_filename, "rb") as f: + self.assertEqual(f.read(), data1 * 50) + # Sanity check that we are actually operating on the right file. + with gzip.GzipFile(str_filename, "rb") as f: + self.assertEqual(f.read(), data1 * 50) + # Testing compress/decompress shortcut functions def test_compress(self): diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -655,6 +655,16 @@ self.assertEqual(f.read(), INPUT) self.assertEqual(f.read(), b"") + def test_read_from_file_with_bytes_filename(self): + try: + bytes_filename = TESTFN.encode("ascii") + except UnicodeEncodeError: + self.skipTest("Temporary file name needs to be ASCII") + with TempFile(TESTFN, COMPRESSED_XZ): + with LZMAFile(bytes_filename) as f: + self.assertEqual(f.read(), INPUT) + self.assertEqual(f.read(), b"") + def test_read_incomplete(self): with LZMAFile(BytesIO(COMPRESSED_XZ[:128])) as f: self.assertRaises(EOFError, f.read) @@ -814,6 +824,20 @@ finally: unlink(TESTFN) + def test_write_to_file_with_bytes_filename(self): + try: + bytes_filename = TESTFN.encode("ascii") + except UnicodeEncodeError: + self.skipTest("Temporary file name needs to be ASCII") + try: + with LZMAFile(bytes_filename, "w") as f: + f.write(INPUT) + expected = lzma.compress(INPUT) + with open(TESTFN, "rb") as f: + self.assertEqual(f.read(), expected) + finally: + unlink(TESTFN) + def test_write_append_to_file(self): part1 = INPUT[:1024] part2 = INPUT[1024:1536] diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -31,6 +31,8 @@ Library ------- +- Fix GzipFile's handling of filenames given as bytes objects. + - Issue #14772: Return destination values from some shutil functions. - Issue #15064: Implement context manager protocol for multiprocessing types -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Wed Jun 20 05:45:40 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Wed, 20 Jun 2012 05:45:40 +0200 Subject: [Python-checkins] Daily reference leaks (d129982c063d): sum=0 Message-ID: results for d129982c063d on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogzoUHP5', '-x'] From python-checkins at python.org Wed Jun 20 11:16:54 2012 From: python-checkins at python.org (martin.v.loewis) Date: Wed, 20 Jun 2012 11:16:54 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Specify_that_the_=5CWindows_co?= =?utf8?q?py_of_the_launcher_is_always_a_32-bit_executable=2E?= Message-ID: http://hg.python.org/peps/rev/153e05bb5f39 changeset: 4468:153e05bb5f39 user: Martin v. L?wis date: Wed Jun 20 11:16:44 2012 +0200 summary: Specify that the \Windows copy of the launcher is always a 32-bit executable. files: pep-0397.txt | 17 +++++++++++------ 1 files changed, 11 insertions(+), 6 deletions(-) diff --git a/pep-0397.txt b/pep-0397.txt --- a/pep-0397.txt +++ b/pep-0397.txt @@ -1,7 +1,7 @@ PEP: 397 Title: Python launcher for Windows -Version: $Revision$ -Last-Modified: $Date$ +Version: $Revision: a57419aee37d $ +Last-Modified: $Date: 2012/06/19 15:13:49 $ Author: Mark Hammond , Martin v. L?wis Status: Draft @@ -89,7 +89,11 @@ The launcher is installed into the Windows directory (see discussion below) if installed by a privileged user. The stand-alone installer asks for an alternative location of the - installer, and adds that location to the user's PATH. + installer, and adds that location to the user's PATH. + + The installation in the Windows directory is a 32-bit executable + (see discussion); the standalone installer may also offer to install + 64-bit versions of the launcher. The launcher installation is registered in HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\CurrentVersion\SharedDLLs @@ -101,9 +105,6 @@ a release level of 0x10 in FIELD3 of the CPython release on which they are based. - The 32-bit distribution of Python will not install a 32-bit - version of the launcher on a 64-bit system. - Once installed, the "console" version of the launcher is associated with .py files and the "windows" version associated with .pyw files. @@ -348,6 +349,10 @@ running on a 64-bit system. However, the Windows directory is always on the path. + The launcher that is installed into the Windows directory is a 32-bit + executable so that the 32-bit CPython installer can provide the same + binary for both 32-bit and 64-bit Windows installations. + Ideally, the launcher process would execute Python directly inside the same process, primarily so the parent of the launcher process could terminate the launcher and have the Python interpreter terminate. If the -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 20 11:18:13 2012 From: python-checkins at python.org (christian.heimes) Date: Wed, 20 Jun 2012 11:18:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315096=3A_Drop_supp?= =?utf8?q?ort_for_the_ur_string_prefix?= Message-ID: http://hg.python.org/cpython/rev/8e47e9af826e changeset: 77522:8e47e9af826e user: Christian Heimes date: Wed Jun 20 11:17:58 2012 +0200 summary: Issue #15096: Drop support for the ur string prefix files: Doc/reference/lexical_analysis.rst | 14 ++++++---- Lib/test/test_strlit.py | 9 +++++++ Lib/test/test_tokenize.py | 22 +---------------- Lib/tokenize.py | 12 ++------- Misc/NEWS | 3 ++ Parser/tokenizer.c | 5 ++- 6 files changed, 28 insertions(+), 37 deletions(-) diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -401,7 +401,7 @@ .. productionlist:: stringliteral: [`stringprefix`](`shortstring` | `longstring`) - stringprefix: "r" | "u" | "ur" | "R" | "U" | "UR" | "Ur" | "uR" + stringprefix: "r" | "u" | "R" | "U" shortstring: "'" `shortstringitem`* "'" | '"' `shortstringitem`* '"' longstring: "'''" `longstringitem`* "'''" | '"""' `longstringitem`* '"""' shortstringitem: `shortstringchar` | `stringescapeseq` @@ -444,19 +444,21 @@ As of Python 3.3 it is possible again to prefix unicode strings with a ``u`` prefix to simplify maintenance of dual 2.x and 3.x codebases. -Both string and bytes literals may optionally be prefixed with a letter ``'r'`` +Bytes literals may optionally be prefixed with a letter ``'r'`` or ``'R'``; such strings are called :dfn:`raw strings` and treat backslashes as literal characters. As a result, in string literals, ``'\U'`` and ``'\u'`` -escapes in raw strings are not treated specially. +escapes in raw strings are not treated specially. Given that Python 2.x's raw +unicode literals behave differently than Python 3.x's the ``'ur'`` syntax +is not supported. .. versionadded:: 3.3 The ``'rb'`` prefix of raw bytes literals has been added as a synonym of ``'br'``. .. versionadded:: 3.3 - Support for the unicode legacy literal (``u'value'``) and other - versions were reintroduced to simplify the maintenance of dual - Python 2.x and 3.x codebases. See :pep:`414` for more information. + Support for the unicode legacy literal (``u'value'``) was reintroduced + to simplify the maintenance of dual Python 2.x and 3.x codebases. + See :pep:`414` for more information. In triple-quoted strings, unescaped newlines and quotes are allowed (and are retained), except that three unescaped quotes in a row terminate the string. (A diff --git a/Lib/test/test_strlit.py b/Lib/test/test_strlit.py --- a/Lib/test/test_strlit.py +++ b/Lib/test/test_strlit.py @@ -123,6 +123,15 @@ self.assertRaises(SyntaxError, eval, """ rrb'' """) self.assertRaises(SyntaxError, eval, """ rbb'' """) + def test_eval_str_u(self): + self.assertEqual(eval(""" u'x' """), 'x') + self.assertEqual(eval(""" U'\u00e4' """), '?') + self.assertEqual(eval(""" u'\N{LATIN SMALL LETTER A WITH DIAERESIS}' """), '?') + self.assertRaises(SyntaxError, eval, """ ur'' """) + self.assertRaises(SyntaxError, eval, """ ru'' """) + self.assertRaises(SyntaxError, eval, """ bu'' """) + self.assertRaises(SyntaxError, eval, """ ub'' """) + def check_encoding(self, encoding, extra=""): modname = "xx_" + encoding.replace("-", "_") fn = os.path.join(self.tmpdir, modname + ".py") diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -299,24 +299,6 @@ STRING 'u"abc"' (1, 0) (1, 6) OP '+' (1, 7) (1, 8) STRING 'U"abc"' (1, 9) (1, 15) - >>> dump_tokens("ur'abc' + uR'abc' + Ur'abc' + UR'abc'") - ENCODING 'utf-8' (0, 0) (0, 0) - STRING "ur'abc'" (1, 0) (1, 7) - OP '+' (1, 8) (1, 9) - STRING "uR'abc'" (1, 10) (1, 17) - OP '+' (1, 18) (1, 19) - STRING "Ur'abc'" (1, 20) (1, 27) - OP '+' (1, 28) (1, 29) - STRING "UR'abc'" (1, 30) (1, 37) - >>> dump_tokens('ur"abc" + uR"abc" + Ur"abc" + UR"abc"') - ENCODING 'utf-8' (0, 0) (0, 0) - STRING 'ur"abc"' (1, 0) (1, 7) - OP '+' (1, 8) (1, 9) - STRING 'uR"abc"' (1, 10) (1, 17) - OP '+' (1, 18) (1, 19) - STRING 'Ur"abc"' (1, 20) (1, 27) - OP '+' (1, 28) (1, 29) - STRING 'UR"abc"' (1, 30) (1, 37) >>> dump_tokens("b'abc' + B'abc'") ENCODING 'utf-8' (0, 0) (0, 0) @@ -642,7 +624,7 @@ Legacy unicode literals: - >>> dump_tokens("?rter = u'places'\\ngr?n = UR'green'") + >>> dump_tokens("?rter = u'places'\\ngr?n = U'green'") ENCODING 'utf-8' (0, 0) (0, 0) NAME '?rter' (1, 0) (1, 5) OP '=' (1, 6) (1, 7) @@ -650,7 +632,7 @@ NEWLINE '\\n' (1, 17) (1, 18) NAME 'gr?n' (2, 0) (2, 4) OP '=' (2, 5) (2, 6) - STRING "UR'green'" (2, 7) (2, 16) + STRING "U'green'" (2, 7) (2, 15) """ from test import support diff --git a/Lib/tokenize.py b/Lib/tokenize.py --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -127,7 +127,7 @@ Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]') Number = group(Imagnumber, Floatnumber, Intnumber) -StringPrefix = r'(?:[uUbB][rR]?|[rR][bB]?)?' +StringPrefix = r'(?:[bB][rR]?|[rR][bB]?|[uU])?' # Tail end of ' string. Single = r"[^'\\]*(?:\\.[^'\\]*)*'" @@ -183,12 +183,8 @@ "rB'''": Single3, 'rB"""': Double3, "RB'''": Single3, 'RB"""': Double3, "u'''": Single3, 'u"""': Double3, - "ur'''": Single3, 'ur"""': Double3, "R'''": Single3, 'R"""': Double3, "U'''": Single3, 'U"""': Double3, - "uR'''": Single3, 'uR"""': Double3, - "Ur'''": Single3, 'Ur"""': Double3, - "UR'''": Single3, 'UR"""': Double3, 'r': None, 'R': None, 'b': None, 'B': None, 'u': None, 'U': None} @@ -201,8 +197,7 @@ "rb'''", 'rb"""', "rB'''", 'rB"""', "Rb'''", 'Rb"""', "RB'''", 'RB"""', "u'''", 'u"""', "U'''", 'U"""', - "ur'''", 'ur"""', "Ur'''", 'Ur"""', - "uR'''", 'uR"""', "UR'''", 'UR"""'): + ): triple_quoted[t] = t single_quoted = {} for t in ("'", '"', @@ -213,8 +208,7 @@ "rb'", 'rb"', "rB'", 'rB"', "Rb'", 'Rb"', "RB'", 'RB"' , "u'", 'u"', "U'", 'U"', - "ur'", 'ur"', "Ur'", 'Ur"', - "uR'", 'uR"', "UR'", 'UR"' ): + ): single_quoted[t] = t tabsize = 8 diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #15096: Removed support for ur'' as the raw notation isn't + compatible with Python 2.x's raw unicode strings. + - Issue #13783: Generator objects now use the identifier APIs internally - Issue #14874: Restore charmap decoding speed to pre-PEP 393 levels. diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1412,7 +1412,7 @@ /* Identifier (most frequent token!) */ nonascii = 0; if (is_potential_identifier_start(c)) { - /* Process b"", r"", u"", br"", rb"" and ur"" */ + /* Process b"", r"", u"", br"" and rb"" */ int saw_b = 0, saw_r = 0, saw_u = 0; while (1) { if (!(saw_b || saw_u) && (c == 'b' || c == 'B')) @@ -1421,7 +1421,8 @@ want to support it in arbitrary order like byte literals. */ else if (!(saw_b || saw_u || saw_r) && (c == 'u' || c == 'U')) saw_u = 1; - else if (!saw_r && (c == 'r' || c == 'R')) + /* ur"" and ru"" are not supported */ + else if (!(saw_r || saw_u) && (c == 'r' || c == 'R')) saw_r = 1; else break; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 20 11:25:40 2012 From: python-checkins at python.org (georg.brandl) Date: Wed, 20 Jun 2012 11:25:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_small_overeager_edit_fr?= =?utf8?q?om_8e47e9af826e=2E?= Message-ID: http://hg.python.org/cpython/rev/c4b8c8064ef4 changeset: 77523:c4b8c8064ef4 user: Georg Brandl date: Wed Jun 20 11:26:03 2012 +0200 summary: Fix small overeager edit from 8e47e9af826e. files: Doc/reference/lexical_analysis.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -444,7 +444,7 @@ As of Python 3.3 it is possible again to prefix unicode strings with a ``u`` prefix to simplify maintenance of dual 2.x and 3.x codebases. -Bytes literals may optionally be prefixed with a letter ``'r'`` +Both string and bytes literals may optionally be prefixed with a letter ``'r'`` or ``'R'``; such strings are called :dfn:`raw strings` and treat backslashes as literal characters. As a result, in string literals, ``'\U'`` and ``'\u'`` escapes in raw strings are not treated specially. Given that Python 2.x's raw -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 20 13:17:30 2012 From: python-checkins at python.org (matthias.klose) Date: Wed, 20 Jun 2012 13:17:30 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Do_not_italicize_punctuatio?= =?utf8?q?n_in_python=281=29_manual_page_=28Matt_Kraai=29=2E?= Message-ID: http://hg.python.org/cpython/rev/c1c632e2560d changeset: 77524:c1c632e2560d user: doko at ubuntu.com date: Wed Jun 20 13:16:31 2012 +0200 summary: Do not italicize punctuation in python(1) manual page (Matt Kraai). files: Misc/python.man | 12 ++++++------ 1 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Misc/python.man b/Misc/python.man --- a/Misc/python.man +++ b/Misc/python.man @@ -257,9 +257,9 @@ from that file; when called with .B \-c -.I command, +.IR command , it executes the Python statement(s) given as -.I command. +.IR command . Here .I command may contain multiple statements separated by newlines. @@ -269,7 +269,7 @@ .PP If available, the script name and additional arguments thereafter are passed to the script in the Python variable -.I sys.argv , +.IR sys.argv , which is a list of strings (you must first .I import sys to be able to access it). @@ -283,14 +283,14 @@ .I '-c'. Note that options interpreted by the Python interpreter itself are not placed in -.I sys.argv. +.IR sys.argv . .PP In interactive mode, the primary prompt is `>>>'; the second prompt (which appears when a command is not complete) is `...'. The prompts can be changed by assignment to .I sys.ps1 or -.I sys.ps2. +.IR sys.ps2 . The interpreter quits when it reads an EOF at a prompt. When an unhandled exception occurs, a stack trace is printed and control returns to the primary prompt; in non-interactive mode, the @@ -346,7 +346,7 @@ inserted in the path in front of $PYTHONPATH. The search path can be manipulated from within a Python program as the variable -.I sys.path . +.IR sys.path . .IP PYTHONSTARTUP If this is the name of a readable file, the Python commands in that file are executed before the first prompt is displayed in interactive -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 20 13:46:11 2012 From: python-checkins at python.org (nick.coghlan) Date: Wed, 20 Jun 2012 13:46:11 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Update_PEP_414_to_record_the_e?= =?utf8?q?xclusion_of_raw_Unicode_literals_from_the_scope?= Message-ID: http://hg.python.org/peps/rev/f565858c556a changeset: 4469:f565858c556a user: Nick Coghlan date: Wed Jun 20 21:45:58 2012 +1000 summary: Update PEP 414 to record the exclusion of raw Unicode literals from the scope files: pep-0403.txt | 2 ++ pep-0414.txt | 38 +++++++++++++++++++++++++++++++++----- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/pep-0403.txt b/pep-0403.txt --- a/pep-0403.txt +++ b/pep-0403.txt @@ -90,6 +90,8 @@ def adder(i): return lambda x: x + i +If a list comprehension grows to the + Proposal ======== diff --git a/pep-0414.txt b/pep-0414.txt --- a/pep-0414.txt +++ b/pep-0414.txt @@ -40,7 +40,7 @@ Specifically, the Python 3 definition for string literal prefixes will be expanded to allow:: - "u" | "U" | "ur" | "UR" | "Ur" | "uR" + "u" | "U" in addition to the currently supported:: @@ -61,13 +61,40 @@ U'''text''' U"""text""" -Combination of the unicode prefix with the raw string prefix will also be -supported, just as it was in Python 2. - No changes are proposed to Python 3's actual Unicode handling, only to the acceptable forms for string literals. +Exclusion of "Raw" Unicode Literals +=================================== + +Python 2 supports a concept of "raw" Unicode literals that don't meet the +convential definition of a raw string: ``\uXXXX`` and ``\UXXXXXXXX`` escape +sequences are still processed by the compiler and converted to the +appropriate Unicode code points when creating the associated Unicode objects. + +Python 3 has no corresponding concept - the compiler performs *no* +preprocessing of the contents of raw string literals. This matches the +behaviour of 8-bit raw string literals in Python 2. + +Since such strings are rarely used and would be interpreted differently in +Python 3 if permitted, it was decided that leaving them out entirely was +a better choice. Code which uses them will thus still fail immediately on +Python 3 (with a Syntax Error), rather than potentially producing different +output. + +To get equivalent behaviour that will run on both Python 2 and Python 3, +either an ordinary Unicode literal can be used (with appropriate additional +escaping within the string), or else string concatenation or string +formatting can be combine the raw portions of the string with those that +require the use of Unicode escape sequences. + +Note that when using ``from __future__ import unicode_literals`` in Python 2, +the nominally "raw" Unicode string literals will process ``\uXXXX`` and +``\UXXXXXXXX`` escape sequences, just like Python 2 strings explicitly marked +with the "raw Unicode" prefix. + + Author's Note ============= @@ -318,7 +345,8 @@ how to use them properly". These responses are a case of completely missing the point of what people are -complaining about. The feedback that resulted in this PEP isn't due to people complaining that ports aren't possible. Instead, the feedback is coming from +complaining about. The feedback that resulted in this PEP isn't due to people +complaining that ports aren't possible. Instead, the feedback is coming from people that have succesfully *completed* ports and are objecting that they found the experience thoroughly *unpleasant* for the class of application that they needed to port (specifically, Unicode aware web frameworks and support -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 20 15:11:40 2012 From: python-checkins at python.org (nick.coghlan) Date: Wed, 20 Jun 2012 15:11:40 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Revert_accidental_edit_to_unre?= =?utf8?q?lated_PEP?= Message-ID: http://hg.python.org/peps/rev/d369b9b83b71 changeset: 4470:d369b9b83b71 user: Nick Coghlan date: Wed Jun 20 23:11:17 2012 +1000 summary: Revert accidental edit to unrelated PEP files: pep-0403.txt | 2 -- 1 files changed, 0 insertions(+), 2 deletions(-) diff --git a/pep-0403.txt b/pep-0403.txt --- a/pep-0403.txt +++ b/pep-0403.txt @@ -90,8 +90,6 @@ def adder(i): return lambda x: x + i -If a list comprehension grows to the - Proposal ======== -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 20 16:25:17 2012 From: python-checkins at python.org (jason.coombs) Date: Wed, 20 Jun 2012 16:25:17 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Prefer_assertEqual_to_simpl?= =?utf8?q?y_assert_per_recommendation_in_issue6727=2E?= Message-ID: http://hg.python.org/cpython/rev/24369f6c4a22 changeset: 77525:24369f6c4a22 user: Jason R. Coombs date: Wed Jun 20 10:24:24 2012 -0400 summary: Prefer assertEqual to simply assert per recommendation in issue6727. Clarified comment on disabled code to reference issue15093. files: Lib/test/test_import.py | 11 ++++++++--- 1 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py --- a/Lib/test/test_import.py +++ b/Lib/test/test_import.py @@ -707,14 +707,19 @@ os.mkdir(self.tagged) init_file = os.path.join(self.tagged, '__init__.py') open(init_file, 'w').close() - assert os.path.exists(init_file) + self.assertEqual(os.path.exists(init_file), True) # now create a symlink to the tagged package # sample -> sample-tagged os.symlink(self.tagged, self.package_name) - # assert os.path.isdir(self.package_name) # currently fails - assert os.path.isfile(os.path.join(self.package_name, '__init__.py')) + # disabled because os.isdir currently fails (see issue 15093) + # self.assertEqual(os.path.isdir(self.package_name), True) + + self.assertEqual( + os.path.isfile(os.path.join(self.package_name, '__init__.py')), + True, + ) @property def tagged(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 20 18:54:54 2012 From: python-checkins at python.org (brian.curtin) Date: Wed, 20 Jun 2012 18:54:54 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Accepting_PEP_397?= Message-ID: http://hg.python.org/peps/rev/3174cd22edcf changeset: 4471:3174cd22edcf user: Brian Curtin date: Wed Jun 20 11:54:32 2012 -0500 summary: Accepting PEP 397 files: pep-0397.txt | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/pep-0397.txt b/pep-0397.txt --- a/pep-0397.txt +++ b/pep-0397.txt @@ -4,11 +4,12 @@ Last-Modified: $Date: 2012/06/19 15:13:49 $ Author: Mark Hammond , Martin v. L?wis -Status: Draft +Status: Accepted Type: Standards Track Content-Type: text/plain Created: 15-Mar-2011 Post-History: 21-July-2011, 17-May-2011, 15-Mar-2011 +Resolution: http://mail.python.org/pipermail/python-dev/2012-June/120505.html Abstract -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Wed Jun 20 23:36:38 2012 From: python-checkins at python.org (stefan.krah) Date: Wed, 20 Jun 2012 23:36:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Many_cleanups_of_redundant_?= =?utf8?b?Y29kZSBpbiBtcGRfcXJlbV9uZWFyKCk6?= Message-ID: http://hg.python.org/cpython/rev/95221b37bf04 changeset: 77526:95221b37bf04 user: Stefan Krah date: Wed Jun 20 23:34:58 2012 +0200 summary: Many cleanups of redundant code in mpd_qrem_near(): 1) _mpd_qdivmod() uses the context precision only in two places, and the new code should be exactly equivalent to the previous code. 2) Remove misleading comment. 3) The quotient *is* an integer with exponent 0, so calling mpd_qtrunc() is pointless. 4) Replace two instances of identical code by a single one. 5) Use _mpd_cmp_abs() instead of mpd_cmp_total_mag(): the operands are not special. 6) Don't clear MPD_Rounded in the status (with the current code it should not be set within the function). files: Modules/_decimal/libmpdec/mpdecimal.c | 42 ++++++-------- 1 files changed, 17 insertions(+), 25 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -6679,7 +6679,7 @@ mpd_context_t workctx; MPD_NEW_STATIC(btmp,0,0,0,0); MPD_NEW_STATIC(q,0,0,0,0); - mpd_ssize_t expdiff, floordigits; + mpd_ssize_t expdiff, qdigits; int cmp, isodd, allnine; if (mpd_isspecial(a) || mpd_isspecial(b)) { @@ -6716,53 +6716,45 @@ b = &btmp; } - workctx = *ctx; - workctx.prec = a->digits; - workctx.prec = (workctx.prec > ctx->prec) ? workctx.prec : ctx->prec; - - _mpd_qdivmod(&q, r, a, b, &workctx, status); - if (mpd_isnan(&q) || mpd_isnan(r) || q.digits > ctx->prec) { - mpd_seterror(r, MPD_Division_impossible, status); + _mpd_qdivmod(&q, r, a, b, ctx, status); + if (mpd_isnan(&q) || mpd_isnan(r)) { goto finish; } if (mpd_iszerocoeff(r)) { goto finish; } - /* Deal with cases like rmnx078: - * remaindernear 999999999.5 1 -> NaN Division_impossible */ expdiff = mpd_adjexp(b) - mpd_adjexp(r); if (-1 <= expdiff && expdiff <= 1) { - mpd_qtrunc(&q, &q, &workctx, &workctx.status); allnine = mpd_coeff_isallnine(&q); - floordigits = q.digits; + qdigits = q.digits; isodd = mpd_isodd(&q); mpd_maxcontext(&workctx); if (mpd_sign(a) == mpd_sign(b)) { + /* sign(r) == sign(b) */ _mpd_qsub(&q, r, b, &workctx, &workctx.status); - if (workctx.status&MPD_Errors) { - mpd_seterror(r, workctx.status&MPD_Errors, status); - goto finish; - } } else { + /* sign(r) != sign(b) */ _mpd_qadd(&q, r, b, &workctx, &workctx.status); - if (workctx.status&MPD_Errors) { - mpd_seterror(r, workctx.status&MPD_Errors, status); - goto finish; - } - } - - cmp = mpd_cmp_total_mag(&q, r); + } + + if (workctx.status&MPD_Errors) { + mpd_seterror(r, workctx.status&MPD_Errors, status); + goto finish; + } + + cmp = _mpd_cmp_abs(&q, r); if (cmp < 0 || (cmp == 0 && isodd)) { - if (allnine && floordigits == ctx->prec) { + /* abs(r) > abs(b)/2 or abs(r) == abs(b)/2 and isodd(quotient) */ + if (allnine && qdigits == ctx->prec) { + /* abs(quotient) + 1 == 10**prec */ mpd_seterror(r, MPD_Division_impossible, status); goto finish; } mpd_qcopy(r, &q, status); - *status &= ~MPD_Rounded; } } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Wed Jun 20 23:40:12 2012 From: python-checkins at python.org (stefan.krah) Date: Wed, 20 Jun 2012 23:40:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_comment=2E?= Message-ID: http://hg.python.org/cpython/rev/717753fb5c48 changeset: 77527:717753fb5c48 user: Stefan Krah date: Wed Jun 20 23:38:51 2012 +0200 summary: Fix comment. files: Modules/_decimal/libmpdec/mpdecimal.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -7080,7 +7080,7 @@ * q, r = divmod(coeff(a), coeff(b)) * * Strategy: Multiply the dividend by the reciprocal of the divisor. The - * inexact result is fixed by a small loop, using at most 2 iterations. + * inexact result is fixed by a small loop, using at most one iteration. * * ACL2 proofs: * ------------ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 02:20:46 2012 From: python-checkins at python.org (nadeem.vawda) Date: Thu, 21 Jun 2012 02:20:46 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314684=3A_Add_suppo?= =?utf8?q?rt_for_predefined_compression_dictionaries_to_the_zlib?= Message-ID: http://hg.python.org/cpython/rev/dd4f7d5c51c7 changeset: 77528:dd4f7d5c51c7 user: Nadeem Vawda date: Thu Jun 21 02:13:12 2012 +0200 summary: Issue #14684: Add support for predefined compression dictionaries to the zlib module. Original patch by Sam Rushing. files: Doc/library/zlib.rst | 31 ++++++- Lib/test/test_zlib.py | 30 +++++++ Misc/NEWS | 3 + Modules/zlibmodule.c | 116 ++++++++++++++++++++++++----- 4 files changed, 152 insertions(+), 28 deletions(-) diff --git a/Doc/library/zlib.rst b/Doc/library/zlib.rst --- a/Doc/library/zlib.rst +++ b/Doc/library/zlib.rst @@ -58,12 +58,19 @@ exception if any error occurs. -.. function:: compressobj([level]) +.. function:: compressobj([level[, method[, wbits[, memlevel[, strategy[, zdict]]]]]]) Returns a compression object, to be used for compressing data streams that won't - fit into memory at once. *level* is an integer from ``1`` to ``9`` controlling - the level of compression; ``1`` is fastest and produces the least compression, - ``9`` is slowest and produces the most. The default value is ``6``. + fit into memory at once. + + *level* is an integer from ``1`` to ``9`` controlling the level of + compression; ``1`` is fastest and produces the least compression, ``9`` is + slowest and produces the most. The default value is ``6``. + + *zdict* is a predefined compression dictionary. This is a sequence of bytes + (such as a :class:`bytes` object) containing subsequences that are expected + to occur frequently in the data that is to be compressed. Those subsequences + that are expected to be most common should come at the end of the dictionary. .. function:: crc32(data[, value]) @@ -114,11 +121,21 @@ to :c:func:`malloc`. The default size is 16384. -.. function:: decompressobj([wbits]) +.. function:: decompressobj([wbits[, zdict]]) Returns a decompression object, to be used for decompressing data streams that - won't fit into memory at once. The *wbits* parameter controls the size of the - window buffer. + won't fit into memory at once. + + The *wbits* parameter controls the size of the window buffer. + + The *zdict* parameter specifies a predefined compression dictionary. If + provided, this must be the same dictionary as was used by the compressor that + produced the data that is to be decompressed. + +.. note:: + If *zdict* is a mutable object (such as a :class:`bytearray`), you must not + modify its contents between the call to :func:`decompressobj` and the first + call to the decompressor's ``decompress()`` method. Compression objects support the following methods: diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py --- a/Lib/test/test_zlib.py +++ b/Lib/test/test_zlib.py @@ -425,6 +425,36 @@ dco = zlib.decompressobj() self.assertEqual(dco.flush(), b"") # Returns nothing + def test_dictionary(self): + h = HAMLET_SCENE + # build a simulated dictionary out of the words in HAMLET + words = h.split() + random.shuffle(words) + zdict = b''.join(words) + # use it to compress HAMLET + co = zlib.compressobj(zdict=zdict) + cd = co.compress(h) + co.flush() + # verify that it will decompress with the dictionary + dco = zlib.decompressobj(zdict=zdict) + self.assertEqual(dco.decompress(cd) + dco.flush(), h) + # verify that it fails when not given the dictionary + dco = zlib.decompressobj() + self.assertRaises(zlib.error, dco.decompress, cd) + + def test_dictionary_streaming(self): + # this is simulating the needs of SPDY to be able to reuse the same + # stream object (with its compression state) between sets of compressed + # headers. + co = zlib.compressobj(zdict=HAMLET_SCENE) + do = zlib.decompressobj(zdict=HAMLET_SCENE) + piece = HAMLET_SCENE[1000:1500] + d0 = co.compress(piece) + co.flush(zlib.Z_SYNC_FLUSH) + d1 = co.compress(piece[100:]) + co.flush(zlib.Z_SYNC_FLUSH) + d2 = co.compress(piece[:-100]) + co.flush(zlib.Z_SYNC_FLUSH) + self.assertEqual(do.decompress(d0), piece) + self.assertEqual(do.decompress(d1), piece[100:]) + self.assertEqual(do.decompress(d2), piece[:-100]) + def test_decompress_incomplete_stream(self): # This is 'foo', deflated x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -34,6 +34,9 @@ Library ------- +- Issue #14684: zlib.compressobj() and zlib.decompressobj() now support the use + of predefined compression dictionaries. Original patch by Sam Rushing. + - Fix GzipFile's handling of filenames given as bytes objects. - Issue #14772: Return destination values from some shutil functions. diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -45,6 +45,7 @@ PyObject *unconsumed_tail; char eof; int is_initialised; + PyObject *zdict; #ifdef WITH_THREAD PyThread_type_lock lock; #endif @@ -80,14 +81,21 @@ } PyDoc_STRVAR(compressobj__doc__, -"compressobj([level]) -- Return a compressor object.\n" +"compressobj([level[, method[, wbits[, memlevel[, strategy[, zdict]]]]]])\n" +" -- Return a compressor object.\n" "\n" -"Optional arg level is the compression level, in 1-9."); +"Optional arg level is the compression level, in 1-9.\n" +"\n" +"Optional arg zdict is the predefined compression dictionary - a sequence of\n" +"bytes containing subsequences that are likely to occur in the input data."); PyDoc_STRVAR(decompressobj__doc__, -"decompressobj([wbits]) -- Return a decompressor object.\n" +"decompressobj([wbits[, zdict]]) -- Return a decompressor object.\n" "\n" -"Optional arg wbits is the window buffer size."); +"Optional arg wbits is the window buffer size.\n" +"\n" +"Optional arg zdict is the predefined compression dictionary. This must be\n" +"the same dictionary as used by the compressor that produced the input data."); static compobject * newcompobject(PyTypeObject *type) @@ -98,6 +106,7 @@ return NULL; self->eof = 0; self->is_initialised = 0; + self->zdict = NULL; self->unused_data = PyBytes_FromStringAndSize("", 0); if (self->unused_data == NULL) { Py_DECREF(self); @@ -316,19 +325,24 @@ } static PyObject * -PyZlib_compressobj(PyObject *selfptr, PyObject *args) +PyZlib_compressobj(PyObject *selfptr, PyObject *args, PyObject *kwargs) { compobject *self; int level=Z_DEFAULT_COMPRESSION, method=DEFLATED; int wbits=MAX_WBITS, memLevel=DEF_MEM_LEVEL, strategy=0, err; + Py_buffer zdict; + static char *kwlist[] = {"level", "method", "wbits", + "memLevel", "strategy", "zdict", NULL}; - if (!PyArg_ParseTuple(args, "|iiiii:compressobj", &level, &method, &wbits, - &memLevel, &strategy)) + zdict.buf = NULL; /* Sentinel, so we can tell whether zdict was supplied. */ + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|iiiiiy*:compressobj", + kwlist, &level, &method, &wbits, + &memLevel, &strategy, &zdict)) return NULL; self = newcompobject(&Comptype); if (self==NULL) - return(NULL); + goto error; self->zst.zalloc = (alloc_func)NULL; self->zst.zfree = (free_func)Z_NULL; self->zst.next_in = NULL; @@ -337,30 +351,58 @@ switch(err) { case (Z_OK): self->is_initialised = 1; - return (PyObject*)self; + if (zdict.buf == NULL) { + goto success; + } else { + err = deflateSetDictionary(&self->zst, zdict.buf, zdict.len); + switch (err) { + case (Z_OK): + goto success; + case (Z_STREAM_ERROR): + PyErr_SetString(PyExc_ValueError, "Invalid dictionary"); + goto error; + default: + PyErr_SetString(PyExc_ValueError, "deflateSetDictionary()"); + goto error; + } + } case (Z_MEM_ERROR): - Py_DECREF(self); PyErr_SetString(PyExc_MemoryError, "Can't allocate memory for compression object"); - return NULL; + goto error; case(Z_STREAM_ERROR): - Py_DECREF(self); PyErr_SetString(PyExc_ValueError, "Invalid initialization option"); - return NULL; + goto error; default: zlib_error(self->zst, err, "while creating compression object"); - Py_DECREF(self); - return NULL; + goto error; } + + error: + Py_XDECREF(self); + self = NULL; + success: + if (zdict.buf != NULL) + PyBuffer_Release(&zdict); + return (PyObject*)self; } static PyObject * -PyZlib_decompressobj(PyObject *selfptr, PyObject *args) +PyZlib_decompressobj(PyObject *selfptr, PyObject *args, PyObject *kwargs) { + static char *kwlist[] = {"wbits", "zdict", NULL}; int wbits=DEF_WBITS, err; compobject *self; - if (!PyArg_ParseTuple(args, "|i:decompressobj", &wbits)) + PyObject *zdict=NULL; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|iO:decompressobj", + kwlist, &wbits, &zdict)) return NULL; + if (zdict != NULL && !PyObject_CheckBuffer(zdict)) { + PyErr_SetString(PyExc_TypeError, + "zdict argument must support the buffer protocol"); + return NULL; + } self = newcompobject(&Decomptype); if (self == NULL) @@ -369,6 +411,10 @@ self->zst.zfree = (free_func)Z_NULL; self->zst.next_in = NULL; self->zst.avail_in = 0; + if (zdict != NULL) { + Py_INCREF(zdict); + self->zdict = zdict; + } err = inflateInit2(&self->zst, wbits); switch(err) { case (Z_OK): @@ -398,6 +444,7 @@ #endif Py_XDECREF(self->unused_data); Py_XDECREF(self->unconsumed_tail); + Py_XDECREF(self->zdict); PyObject_Del(self); } @@ -557,6 +604,27 @@ err = inflate(&(self->zst), Z_SYNC_FLUSH); Py_END_ALLOW_THREADS + if (err == Z_NEED_DICT && self->zdict != NULL) { + Py_buffer zdict_buf; + if (PyObject_GetBuffer(self->zdict, &zdict_buf, PyBUF_SIMPLE) == -1) { + Py_DECREF(RetVal); + RetVal = NULL; + goto error; + } + err = inflateSetDictionary(&(self->zst), zdict_buf.buf, zdict_buf.len); + PyBuffer_Release(&zdict_buf); + if (err != Z_OK) { + zlib_error(self->zst, err, "while decompressing data"); + Py_DECREF(RetVal); + RetVal = NULL; + goto error; + } + /* repeat the call to inflate! */ + Py_BEGIN_ALLOW_THREADS + err = inflate(&(self->zst), Z_SYNC_FLUSH); + Py_END_ALLOW_THREADS + } + /* While Z_OK and the output buffer is full, there might be more output. So extend the output buffer and try again. */ @@ -770,10 +838,13 @@ } Py_INCREF(self->unused_data); Py_INCREF(self->unconsumed_tail); + Py_XINCREF(self->zdict); Py_XDECREF(retval->unused_data); Py_XDECREF(retval->unconsumed_tail); + Py_XDECREF(retval->zdict); retval->unused_data = self->unused_data; retval->unconsumed_tail = self->unconsumed_tail; + retval->zdict = self->zdict; retval->eof = self->eof; /* Mark it as being initialized */ @@ -822,10 +893,13 @@ Py_INCREF(self->unused_data); Py_INCREF(self->unconsumed_tail); + Py_XINCREF(self->zdict); Py_XDECREF(retval->unused_data); Py_XDECREF(retval->unconsumed_tail); + Py_XDECREF(retval->zdict); retval->unused_data = self->unused_data; retval->unconsumed_tail = self->unconsumed_tail; + retval->zdict = self->zdict; retval->eof = self->eof; /* Mark it as being initialized */ @@ -1032,13 +1106,13 @@ adler32__doc__}, {"compress", (PyCFunction)PyZlib_compress, METH_VARARGS, compress__doc__}, - {"compressobj", (PyCFunction)PyZlib_compressobj, METH_VARARGS, + {"compressobj", (PyCFunction)PyZlib_compressobj, METH_VARARGS|METH_KEYWORDS, compressobj__doc__}, {"crc32", (PyCFunction)PyZlib_crc32, METH_VARARGS, crc32__doc__}, {"decompress", (PyCFunction)PyZlib_decompress, METH_VARARGS, decompress__doc__}, - {"decompressobj", (PyCFunction)PyZlib_decompressobj, METH_VARARGS, + {"decompressobj", (PyCFunction)PyZlib_decompressobj, METH_VARARGS|METH_KEYWORDS, decompressobj__doc__}, {NULL, NULL} }; @@ -1112,10 +1186,10 @@ "\n" "adler32(string[, start]) -- Compute an Adler-32 checksum.\n" "compress(string[, level]) -- Compress string, with compression level in 1-9.\n" -"compressobj([level]) -- Return a compressor object.\n" +"compressobj([level[, ...]]) -- Return a compressor object.\n" "crc32(string[, start]) -- Compute a CRC-32 checksum.\n" "decompress(string,[wbits],[bufsize]) -- Decompresses a compressed string.\n" -"decompressobj([wbits]) -- Return a decompressor object.\n" +"decompressobj([wbits[, zdict]]]) -- Return a decompressor object.\n" "\n" "'wbits' is window buffer size.\n" "Compressor objects support compress() and flush() methods; decompressor\n" -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Thu Jun 21 05:44:53 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Thu, 21 Jun 2012 05:44:53 +0200 Subject: [Python-checkins] Daily reference leaks (dd4f7d5c51c7): sum=0 Message-ID: results for dd4f7d5c51c7 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflog7T7iQX', '-x'] From python-checkins at python.org Thu Jun 21 08:48:38 2012 From: python-checkins at python.org (ned.deily) Date: Thu, 21 Jun 2012 08:48:38 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314225=3A_Fix_Unico?= =?utf8?q?de_support_for_curses_=28=2312567=29_on_OS_X=3A?= Message-ID: http://hg.python.org/cpython/rev/2035c5ad4239 changeset: 77529:2035c5ad4239 user: Ned Deily date: Wed Jun 20 23:47:14 2012 -0700 summary: Issue #14225: Fix Unicode support for curses (#12567) on OS X: 1. on OS X, there is no separate /usr/lib/libcursesw nor libpanelw 2. _XOPEN_SOURCE_EXTENDED must be enabled for _curses build files: Misc/NEWS | 2 ++ setup.py | 13 +++++++++++++ 2 files changed, 15 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -165,6 +165,8 @@ Build ----- +- Issue #14225: Fix Unicode support for curses (#12567) on OS X + - Issue #14928: Fix importlib bootstrap issues by using a custom executable (Modules/_freeze_importlib) to build Python/importlib.h. diff --git a/setup.py b/setup.py --- a/setup.py +++ b/setup.py @@ -1185,6 +1185,18 @@ # Bug 1464056: If _curses.so links with ncursesw, # _curses_panel.so must link with panelw. panel_library = 'panelw' + if platform == 'darwin': + # On OS X, there is no separate /usr/lib/libncursesw nor + # libpanelw. If we are here, we found a locally-supplied + # version of libncursesw. There should be also be a + # libpanelw. _XOPEN_SOURCE defines are usually excluded + # for OS X but we need _XOPEN_SOURCE_EXTENDED here for + # ncurses wide char support + curses_defines.append(('_XOPEN_SOURCE_EXTENDED', '1')) + elif platform == 'darwin' and curses_library == 'ncurses': + # Building with the system-suppied combined libncurses/libpanel + curses_defines.append(('HAVE_NCURSESW', '1')) + curses_defines.append(('_XOPEN_SOURCE_EXTENDED', '1')) if curses_library.startswith('ncurses'): curses_libs = [curses_library] @@ -1213,6 +1225,7 @@ self.compiler.find_library_file(lib_dirs, panel_library)): exts.append( Extension('_curses_panel', ['_curses_panel.c'], include_dirs=curses_includes, + define_macros=curses_defines, libraries = [panel_library] + curses_libs) ) else: missing.append('_curses_panel') -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 10:44:33 2012 From: python-checkins at python.org (larry.hastings) Date: Thu, 21 Jun 2012 10:44:33 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_The_latest_changes_from_Yury_S?= =?utf8?q?elivanov=2E__I_can_almost_taste_the_acceptance!?= Message-ID: http://hg.python.org/peps/rev/1edf1cecae7d changeset: 4472:1edf1cecae7d user: Larry Hastings date: Thu Jun 21 01:44:15 2012 -0700 summary: The latest changes from Yury Selivanov. I can almost taste the acceptance! files: pep-0362.txt | 159 +++++++++++++++++++++++++++++++------- 1 files changed, 128 insertions(+), 31 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -42,23 +42,58 @@ A Signature object has the following public attributes and methods: * return_annotation : object - The annotation for the return type of the function if specified. - If the function has no annotation for its return type, this - attribute is not set. + The "return" annotation for the function. If the function + has no "return" annotation, this attribute is not set. + * parameters : OrderedDict An ordered mapping of parameters' names to the corresponding - Parameter objects (keyword-only arguments are in the same order - as listed in ``code.co_varnames``). + Parameter objects. + * bind(\*args, \*\*kwargs) -> BoundArguments Creates a mapping from positional and keyword arguments to parameters. Raises a ``TypeError`` if the passed arguments do not match the signature. + * bind_partial(\*args, \*\*kwargs) -> BoundArguments Works the same way as ``bind()``, but allows the omission of some required arguments (mimics ``functools.partial`` behavior.) Raises a ``TypeError`` if the passed arguments do not match the signature. +* replace(parameters, \*, return_annotation) -> Signature + Creates a new Signature instance based on the instance + ``replace`` was invoked on. It is possible to pass different + ``parameters`` and/or ``return_annotation`` to override the + corresponding properties of the base signature. To remove + ``return_annotation`` from the copied ``Signature``, pass in + ``Signature.empty``. + +Signature objects are immutable. Use ``Signature.replace()`` to +make a modified copy: +:: + + >>> sig = signature(foo) + >>> new_sig = sig.replace(return_annotation="new return annotation") + >>> new_sig is not sig + True + >>> new_sig.return_annotation == sig.return_annotation + True + >>> new_sig.parameters == sig.parameters + True + +There are two ways to instantiate a Signature class: + +* Signature(parameters, *, return_annotation) + Default Signature constructor. Accepts an optional sequence + of ``Parameter`` objects, and an optional ``return_annotation``. + Parameters sequence is validated to check that there are no + parameters with duplicate names, and that the parameters + are in the right order, i.e. positional-only first, then + positional-or-keyword, etc. +* Signature.from_function(function) + Returns a Signature object reflecting the signature of the + function passed in. + It's possible to test Signatures for equality. Two signatures are equal when their parameters are equal, their positional and positional-only parameters appear in the same order, and they @@ -67,9 +102,14 @@ Changes to the Signature object, or to any of its data members, do not affect the function itself. -Signature also implements ``__str__`` and ``__copy__`` methods. -The latter creates a shallow copy of Signature, with all Parameter -objects copied as well. +Signature also implements ``__str__``: +:: + + >>> str(Signature.from_function((lambda *args: None))) + '(*args)' + + >>> str(Signature()) + '()' Parameter Object @@ -80,20 +120,22 @@ propose a rich Parameter object designed to represent any possible function parameter. -The structure of the Parameter object is: +A Parameter object has the following public attributes and methods: * name : str - The name of the parameter as a string. + The name of the parameter as a string. Must be a valid + python identifier name (with the exception of ``POSITIONAL_ONLY`` + parameters, which can have it set to ``None``.) * default : object - The default value for the parameter, if specified. If the - parameter has no default value, this attribute is not set. + The default value for the parameter. If the parameter has no + default value, this attribute is not set. * annotation : object - The annotation for the parameter if specified. If the - parameter has no annotation, this attribute is not set. + The annotation for the parameter. If the parameter has no + annotation, this attribute is not set. -* kind : str +* kind Describes how argument values are bound to the parameter. Possible values: @@ -101,7 +143,7 @@ as a positional argument. Python has no explicit syntax for defining positional-only - parameters, but many builtin and extension module functions + parameters, but many built-in and extension module functions (especially those that accept only one or two parameters) accept them. @@ -124,9 +166,30 @@ that aren't bound to any other parameter. This corresponds to a "\*\*kwds" parameter in a Python function definition. +* replace(\*, name, kind, default, annotation) -> Parameter + Creates a new Parameter instance based on the instance + ``replaced`` was invoked on. To override a Parameter + attribute, pass the corresponding argument. To remove + an attribute from a ``Parameter``, pass ``Parameter.empty``. + + Two parameters are equal when they have equal names, kinds, defaults, and annotations. +Parameter objects are immutable. Instead of modifying a Parameter object, +you can use ``Parameter.replace()`` to create a modified copy like so: +:: + + >>> param = Parameter('foo', Parameter.KEYWORD_ONLY, default=42) + >>> str(param) + 'foo=42' + + >>> str(param.replace()) + 'foo=42' + + >>> str(param.replace(default=Parameter.empty, annotation='spam')) + "foo:'spam'" + BoundArguments Object ===================== @@ -138,7 +201,8 @@ * arguments : OrderedDict An ordered, mutable mapping of parameters' names to arguments' values. - Does not contain arguments' default values. + Contains only explicitly bound arguments. Arguments for + which ``bind()`` relied on a default value are skipped. * args : tuple Tuple of positional arguments values. Dynamically computed from the 'arguments' attribute. @@ -159,6 +223,23 @@ ba = sig.bind(10, b=20) test(*ba.args, **ba.kwargs) +Arguments which could be passed as part of either ``*args`` or ``**kwargs`` +will be included only in the ``BoundArguments.args`` attribute. Consider the +following example: +:: + + def test(a=1, b=2, c=3): + pass + + sig = signature(test) + ba = sig.bind(a=10, c=13) + + >>> ba.args + (10,) + + >>> ba.kwargs: + {'c': 13} + Implementation ============== @@ -172,7 +253,7 @@ - If the object is not callable - raise a TypeError - If the object has a ``__signature__`` attribute and if it - is not ``None`` - return a shallow copy of it + is not ``None`` - return it - If it has a ``__wrapped__`` attribute, return ``signature(object.__wrapped__)`` @@ -180,12 +261,9 @@ - If the object is a an instance of ``FunctionType`` construct and return a new ``Signature`` for it - - If the object is a method or a classmethod, construct and return - a new ``Signature`` object, with its first parameter (usually - ``self`` or ``cls``) removed - - - If the object is a staticmethod, construct and return - a new ``Signature`` object + - If the object is a method, construct and return a new ``Signature`` + object, with its first parameter (usually ``self`` or ``cls``) + removed - If the object is an instance of ``functools.partial``, construct a new ``Signature`` from its ``partial.func`` attribute, and @@ -196,15 +274,15 @@ - If the object's type has a ``__call__`` method defined in its MRO, return a Signature for it - - If the object has a ``__new__`` method defined in its class, + - If the object has a ``__new__`` method defined in its MRO, return a Signature object for it - - If the object has a ``__init__`` method defined in its class, + - If the object has a ``__init__`` method defined in its MRO, return a Signature object for it - Return ``signature(object.__call__)`` -Note, that the ``Signature`` object is created in a lazy manner, and +Note that the ``Signature`` object is created in a lazy manner, and is not automatically cached. If, however, the Signature object was explicitly cached by the user, ``signature()`` returns a new shallow copy of it on each invocation. @@ -236,11 +314,21 @@ ---------------------------------------- Some functions may not be introspectable in certain implementations of -Python. For example, in CPython, builtin functions defined in C provide +Python. For example, in CPython, built-in functions defined in C provide no metadata about their arguments. Adding support for them is out of scope for this PEP. +Signature and Parameter equivalence +----------------------------------- + +We assume that parameter names have semantic significance--two +signatures are equal only when their corresponding parameters have +the exact same names. Users who want looser equivalence tests, perhaps +ignoring names of VAR_KEYWORD or VAR_POSITIONAL parameters, will +need to implement those themselves. + + Examples ======== @@ -270,6 +358,10 @@ def __call__(self, a, b, *, c) -> tuple: return a, b, c + @classmethod + def spam(cls, a): + return a + def shared_vars(*shared_args): """Decorator factory that defines shared variables that are @@ -280,10 +372,12 @@ def wrapper(*args, **kwds): full_args = shared_args + args return f(*full_args, **kwds) + # Override signature - sig = wrapper.__signature__ = signature(f) - for __ in shared_args: - sig.parameters.popitem(last=False) + sig = signature(f) + sig = sig.replace(tuple(sig.parameters.values())[1:]) + wrapper.__signature__ = sig + return wrapper return decorator @@ -313,6 +407,9 @@ >>> format_signature(Foo().__call__) '(a, b, *, c) -> tuple' + >>> format_signature(Foo.spam) + '(a)' + >>> format_signature(partial(Foo().__call__, 1, c=3)) '(b, *, c=3) -> tuple' -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Jun 21 12:13:21 2012 From: python-checkins at python.org (matthias.klose) Date: Thu, 21 Jun 2012 12:13:21 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_format=5Fobj=3A_make_it_sta?= =?utf8?q?tic?= Message-ID: http://hg.python.org/cpython/rev/d8244d4dbe37 changeset: 77530:d8244d4dbe37 user: doko at ubuntu.com date: Thu Jun 21 12:12:20 2012 +0200 summary: format_obj: make it static files: Python/formatter_unicode.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Python/formatter_unicode.c b/Python/formatter_unicode.c --- a/Python/formatter_unicode.c +++ b/Python/formatter_unicode.c @@ -1347,7 +1347,7 @@ /************************************************************************/ /*********** built in formatters ****************************************/ /************************************************************************/ -int +static int format_obj(PyObject *obj, _PyUnicodeWriter *writer) { PyObject *str; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 12:14:41 2012 From: python-checkins at python.org (matthias.klose) Date: Thu, 21 Jun 2012 12:14:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Remove_references_to_remove?= =?utf8?q?d_md5=2Eh_and_md5=2Ec_files=2E?= Message-ID: http://hg.python.org/cpython/rev/009ac63759e9 changeset: 77531:009ac63759e9 user: doko at ubuntu.com date: Thu Jun 21 12:13:35 2012 +0200 summary: Remove references to removed md5.h and md5.c files. files: Modules/Setup.dist | 5 ++--- 1 files changed, 2 insertions(+), 3 deletions(-) diff --git a/Modules/Setup.dist b/Modules/Setup.dist --- a/Modules/Setup.dist +++ b/Modules/Setup.dist @@ -234,10 +234,9 @@ # system does not have the OpenSSL libs containing an optimized version. # The _md5 module implements the RSA Data Security, Inc. MD5 -# Message-Digest Algorithm, described in RFC 1321. The necessary files -# md5.c and md5.h are included here. +# Message-Digest Algorithm, described in RFC 1321. -#_md5 md5module.c md5.c +#_md5 md5module.c # The _sha module implements the SHA checksum algorithms. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 16:02:18 2012 From: python-checkins at python.org (matthias.klose) Date: Thu, 21 Jun 2012 16:02:18 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_name_of_the_sha1_extens?= =?utf8?q?ion=2E?= Message-ID: http://hg.python.org/cpython/rev/7737ca687f6a changeset: 77532:7737ca687f6a user: doko at ubuntu.com date: Thu Jun 21 16:00:52 2012 +0200 summary: Fix name of the sha1 extension. files: Modules/Setup.dist | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/Setup.dist b/Modules/Setup.dist --- a/Modules/Setup.dist +++ b/Modules/Setup.dist @@ -241,7 +241,7 @@ # The _sha module implements the SHA checksum algorithms. # (NIST's Secure Hash Algorithms.) -#_sha shamodule.c +#_sha1 shamodule.c #_sha256 sha256module.c #_sha512 sha512module.c -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 16:23:16 2012 From: python-checkins at python.org (matthias.klose) Date: Thu, 21 Jun 2012 16:23:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2E=2E=2E_and_fix_the_name_?= =?utf8?q?of_the_sha1_file_name=2E?= Message-ID: http://hg.python.org/cpython/rev/98b0ae585f5e changeset: 77533:98b0ae585f5e user: doko at ubuntu.com date: Thu Jun 21 16:22:15 2012 +0200 summary: ... and fix the name of the sha1 file name. files: Modules/Setup.dist | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/Setup.dist b/Modules/Setup.dist --- a/Modules/Setup.dist +++ b/Modules/Setup.dist @@ -241,7 +241,7 @@ # The _sha module implements the SHA checksum algorithms. # (NIST's Secure Hash Algorithms.) -#_sha1 shamodule.c +#_sha1 sha1module.c #_sha256 sha256module.c #_sha512 sha512module.c -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 17:06:51 2012 From: python-checkins at python.org (matthias.klose) Date: Thu, 21 Jun 2012 17:06:51 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_sha1=5F=7Binit=2Cprocess=2C?= =?utf8?q?done=7D=3A_make_static?= Message-ID: http://hg.python.org/cpython/rev/02a4542efbb5 changeset: 77534:02a4542efbb5 user: doko at ubuntu.com date: Thu Jun 21 17:05:50 2012 +0200 summary: sha1_{init,process,done}: make static files: Modules/sha1module.c | 9 ++++++--- 1 files changed, 6 insertions(+), 3 deletions(-) diff --git a/Modules/sha1module.c b/Modules/sha1module.c --- a/Modules/sha1module.c +++ b/Modules/sha1module.c @@ -184,7 +184,8 @@ Initialize the hash state @param sha1 The hash state you wish to initialize */ -void sha1_init(struct sha1_state *sha1) +static void +sha1_init(struct sha1_state *sha1) { assert(sha1 != NULL); sha1->state[0] = 0x67452301UL; @@ -202,7 +203,8 @@ @param in The data to hash @param inlen The length of the data (octets) */ -void sha1_process(struct sha1_state *sha1, +static void +sha1_process(struct sha1_state *sha1, const unsigned char *in, Py_ssize_t inlen) { Py_ssize_t n; @@ -237,7 +239,8 @@ @param sha1 The hash state @param out [out] The destination of the hash (20 bytes) */ -void sha1_done(struct sha1_state *sha1, unsigned char *out) +static void +sha1_done(struct sha1_state *sha1, unsigned char *out) { int i; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 17:27:09 2012 From: python-checkins at python.org (matthias.klose) Date: Thu, 21 Jun 2012 17:27:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_md5=5F=7Binit=2Cprocess=2Cd?= =?utf8?q?one=7D=3A_make_static?= Message-ID: http://hg.python.org/cpython/rev/02d56e032053 changeset: 77535:02d56e032053 user: doko at ubuntu.com date: Thu Jun 21 17:26:06 2012 +0200 summary: md5_{init,process,done}: make static files: Modules/md5module.c | 10 ++++++---- 1 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Modules/md5module.c b/Modules/md5module.c --- a/Modules/md5module.c +++ b/Modules/md5module.c @@ -210,7 +210,8 @@ Initialize the hash state @param sha1 The hash state you wish to initialize */ -void md5_init(struct md5_state *md5) +static void +md5_init(struct md5_state *md5) { assert(md5 != NULL); md5->state[0] = 0x67452301UL; @@ -227,8 +228,8 @@ @param in The data to hash @param inlen The length of the data (octets) */ -void md5_process(struct md5_state *md5, - const unsigned char *in, Py_ssize_t inlen) +static void +md5_process(struct md5_state *md5, const unsigned char *in, Py_ssize_t inlen) { Py_ssize_t n; @@ -262,7 +263,8 @@ @param sha1 The hash state @param out [out] The destination of the hash (16 bytes) */ -void md5_done(struct md5_state *md5, unsigned char *out) +static void +md5_done(struct md5_state *md5, unsigned char *out) { int i; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 17:29:55 2012 From: python-checkins at python.org (antoine.pitrou) Date: Thu, 21 Jun 2012 17:29:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Make_private_function_stati?= =?utf8?q?c_=28from_=60make_smelly=60=29?= Message-ID: http://hg.python.org/cpython/rev/890d5c8dc59f changeset: 77536:890d5c8dc59f user: Antoine Pitrou date: Thu Jun 21 17:26:28 2012 +0200 summary: Make private function static (from `make smelly`) files: Objects/stringlib/localeutil.h | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Objects/stringlib/localeutil.h b/Objects/stringlib/localeutil.h --- a/Objects/stringlib/localeutil.h +++ b/Objects/stringlib/localeutil.h @@ -99,7 +99,7 @@ * As closely as possible, this code mimics the logic in decimal.py's _insert_thousands_sep(). **/ -Py_ssize_t +static Py_ssize_t STRINGLIB(InsertThousandsGrouping)( STRINGLIB_CHAR *buffer, Py_ssize_t n_buffer, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 19:06:39 2012 From: python-checkins at python.org (larry.hastings) Date: Thu, 21 Jun 2012 19:06:39 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_The_latest_round_of_changes_fr?= =?utf8?q?om_Yury_Selivanov_on_PEP_362=2E?= Message-ID: http://hg.python.org/peps/rev/9efc42e7e268 changeset: 4473:9efc42e7e268 user: Larry Hastings date: Thu Jun 21 10:06:19 2012 -0700 summary: The latest round of changes from Yury Selivanov on PEP 362. files: pep-0362.txt | 31 +++++++++++++++++++++++-------- 1 files changed, 23 insertions(+), 8 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -72,18 +72,25 @@ make a modified copy: :: + >>> def foo() -> None: + ... pass >>> sig = signature(foo) + >>> new_sig = sig.replace(return_annotation="new return annotation") >>> new_sig is not sig True - >>> new_sig.return_annotation == sig.return_annotation + >>> new_sig.return_annotation != sig.return_annotation True >>> new_sig.parameters == sig.parameters True + >>> new_sig = new_sig.replace(return_annotation=new_sig.empty) + >>> hasattr(new_sig, "return_annotation") + False + There are two ways to instantiate a Signature class: -* Signature(parameters, *, return_annotation) +* Signature(parameters, \*, return_annotation) Default Signature constructor. Accepts an optional sequence of ``Parameter`` objects, and an optional ``return_annotation``. Parameters sequence is validated to check that there are no @@ -166,6 +173,9 @@ that aren't bound to any other parameter. This corresponds to a "\*\*kwds" parameter in a Python function definition. + Always use ``Parameter.*`` constants for setting and checking + value of the ``kind`` attribute. + * replace(\*, name, kind, default, annotation) -> Parameter Creates a new Parameter instance based on the instance ``replaced`` was invoked on. To override a Parameter @@ -173,6 +183,12 @@ an attribute from a ``Parameter``, pass ``Parameter.empty``. +Parameter constructor: + +* Parameter(name, kind, \*, annotation, default) + Instantiates a Parameter object. ``name`` and ``kind`` are required, + while ``annotation`` and ``default`` are optional. + Two parameters are equal when they have equal names, kinds, defaults, and annotations. @@ -283,9 +299,8 @@ - Return ``signature(object.__call__)`` Note that the ``Signature`` object is created in a lazy manner, and -is not automatically cached. If, however, the Signature object was -explicitly cached by the user, ``signature()`` returns a new shallow copy -of it on each invocation. +is not automatically cached. However, the user can manually cache a +Signature by storing it in the ``__signature__`` attribute. An implementation for Python 3.3 can be found at [#impl]_. The python issue tracking the patch is [#issue]_. @@ -323,9 +338,9 @@ ----------------------------------- We assume that parameter names have semantic significance--two -signatures are equal only when their corresponding parameters have -the exact same names. Users who want looser equivalence tests, perhaps -ignoring names of VAR_KEYWORD or VAR_POSITIONAL parameters, will +signatures are equal only when their corresponding parameters are equal +and have the exact same names. Users who want looser equivalence tests, +perhaps ignoring names of VAR_KEYWORD or VAR_POSITIONAL parameters, will need to implement those themselves. -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Thu Jun 21 21:00:22 2012 From: python-checkins at python.org (hynek.schlawack) Date: Thu, 21 Jun 2012 21:00:22 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogIzEwMDUzOiBEb24n?= =?utf8?q?t_close_FDs_when_FileIO=2E=5F=5Finit=5F=5F_fails?= Message-ID: http://hg.python.org/cpython/rev/981ad5254d07 changeset: 77537:981ad5254d07 branch: 2.7 parent: 77506:4c07b9c49b75 user: Hynek Schlawack date: Thu Jun 21 19:45:19 2012 +0200 summary: #10053: Don't close FDs when FileIO.__init__ fails Loosely based on the work by Hirokazu Yamamoto. files: Lib/test/test_fileio.py | 11 +++++++++++ Misc/NEWS | 3 +++ Modules/_io/fileio.c | 12 ++++++------ 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py --- a/Lib/test/test_fileio.py +++ b/Lib/test/test_fileio.py @@ -421,6 +421,17 @@ 'IOError: [Errno 2] No such file or directory' not in out): self.fail('Bad output: %r' % out) + def testUnclosedFDOnException(self): + class MyException(Exception): pass + class MyFileIO(_FileIO): + def __setattr__(self, name, value): + if name == "name": + raise MyException("blocked setting name") + return super(MyFileIO, self).__setattr__(name, value) + fd = os.open(__file__, os.O_RDONLY) + self.assertRaises(MyException, MyFileIO, fd) + os.close(fd) # should not raise OSError(EBADF) + def test_main(): # Historically, these tests have been sloppy about removing TESTFN. # So get rid of it no matter what. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -9,6 +9,9 @@ Core and Builtins ----------------- +- Issue #10053: Don't close FDs when FileIO.__init__ fails. Loosely based on + the work by Hirokazu Yamamoto. + - Issue #14775: Fix a potential quadratic dict build-up due to the garbage collector repeatedly trying to untrack dicts. diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -195,6 +195,7 @@ int flags = 0; int fd = -1; int closefd = 1; + int fd_is_own = 0; assert(PyFileIO_Check(oself)); if (self->fd >= 0) { @@ -345,6 +346,7 @@ #endif self->fd = open(name, flags, 0666); Py_END_ALLOW_THREADS + fd_is_own = 1; if (self->fd < 0) { #ifdef MS_WINDOWS if (widename != NULL) @@ -366,19 +368,17 @@ end of file (otherwise, it might be done only on the first write()). */ PyObject *pos = portable_lseek(self->fd, NULL, 2); - if (pos == NULL) { - if (closefd) { - close(self->fd); - self->fd = -1; - } + if (pos == NULL) goto error; - } Py_DECREF(pos); } goto done; error: + if (!fd_is_own) + self->fd = -1; + ret = -1; done: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 21:00:23 2012 From: python-checkins at python.org (hynek.schlawack) Date: Thu, 21 Jun 2012 21:00:23 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogIzEwMDUzOiBEb24n?= =?utf8?q?t_close_FDs_when_FileIO=2E=5F=5Finit=5F=5F_fails?= Message-ID: http://hg.python.org/cpython/rev/d042bd8625f3 changeset: 77538:d042bd8625f3 branch: 3.2 parent: 77520:e044fa016c85 user: Hynek Schlawack date: Thu Jun 21 20:20:25 2012 +0200 summary: #10053: Don't close FDs when FileIO.__init__ fails Loosely based on the work by Hirokazu Yamamoto. files: Lib/test/test_fileio.py | 11 +++++++++++ Misc/NEWS | 3 +++ Modules/_io/fileio.c | 11 +++++------ 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py --- a/Lib/test/test_fileio.py +++ b/Lib/test/test_fileio.py @@ -403,6 +403,17 @@ self.assertRaises(ValueError, _FileIO, "/some/invalid/name", "rt") self.assertEqual(w.warnings, []) + def testUnclosedFDOnException(self): + class MyException(Exception): pass + class MyFileIO(_FileIO): + def __setattr__(self, name, value): + if name == "name": + raise MyException("blocked setting name") + return super(MyFileIO, self).__setattr__(name, value) + fd = os.open(__file__, os.O_RDONLY) + self.assertRaises(MyException, MyFileIO, fd) + os.close(fd) # should not raise OSError(EBADF) + def test_main(): # Historically, these tests have been sloppy about removing TESTFN. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #10053: Don't close FDs when FileIO.__init__ fails. Loosely based on + the work by Hirokazu Yamamoto. + - Issue #14775: Fix a potential quadratic dict build-up due to the garbage collector repeatedly trying to untrack dicts. diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -224,6 +224,7 @@ int flags = 0; int fd = -1; int closefd = 1; + int fd_is_own = 0; assert(PyFileIO_Check(oself)); if (self->fd >= 0) { @@ -362,6 +363,7 @@ #endif self->fd = open(name, flags, 0666); Py_END_ALLOW_THREADS + fd_is_own = 1; if (self->fd < 0) { #ifdef MS_WINDOWS if (widename != NULL) @@ -388,13 +390,8 @@ end of file (otherwise, it might be done only on the first write()). */ PyObject *pos = portable_lseek(self->fd, NULL, 2); - if (pos == NULL) { - if (closefd) { - close(self->fd); - self->fd = -1; - } + if (pos == NULL) goto error; - } Py_DECREF(pos); } @@ -402,6 +399,8 @@ error: ret = -1; + if (!fd_is_own) + self->fd = -1; if (self->fd >= 0) internal_close(self); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Thu Jun 21 21:00:25 2012 From: python-checkins at python.org (hynek.schlawack) Date: Thu, 21 Jun 2012 21:00:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?b?OiAjMTAwNTM6IERvbid0IGNsb3NlIEZEcyB3aGVuIEZpbGVJTy5fX2luaXRfXyBm?= =?utf8?q?ails?= Message-ID: http://hg.python.org/cpython/rev/464cf523485e changeset: 77539:464cf523485e parent: 77536:890d5c8dc59f parent: 77538:d042bd8625f3 user: Hynek Schlawack date: Thu Jun 21 20:58:31 2012 +0200 summary: #10053: Don't close FDs when FileIO.__init__ fails Loosely based on the work by Hirokazu Yamamoto. files: Lib/test/test_fileio.py | 11 +++++++++++ Misc/NEWS | 3 +++ Modules/_io/fileio.c | 12 ++++++------ 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py --- a/Lib/test/test_fileio.py +++ b/Lib/test/test_fileio.py @@ -404,6 +404,17 @@ self.assertRaises(ValueError, _FileIO, "/some/invalid/name", "rt") self.assertEqual(w.warnings, []) + def testUnclosedFDOnException(self): + class MyException(Exception): pass + class MyFileIO(_FileIO): + def __setattr__(self, name, value): + if name == "name": + raise MyException("blocked setting name") + return super(MyFileIO, self).__setattr__(name, value) + fd = os.open(__file__, os.O_RDONLY) + self.assertRaises(MyException, MyFileIO, fd) + os.close(fd) # should not raise OSError(EBADF) + def test_main(): # Historically, these tests have been sloppy about removing TESTFN. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #10053: Don't close FDs when FileIO.__init__ fails. Loosely based on + the work by Hirokazu Yamamoto. + - Issue #15096: Removed support for ur'' as the raw notation isn't compatible with Python 2.x's raw unicode strings. diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -227,6 +227,7 @@ int flags = 0; int fd = -1; int closefd = 1; + int fd_is_own = 0; assert(PyFileIO_Check(oself)); if (self->fd >= 0) { @@ -376,6 +377,7 @@ #endif self->fd = open(name, flags, 0666); Py_END_ALLOW_THREADS + fd_is_own = 1; } else { PyObject *fdobj = PyObject_CallFunction( opener, "Oi", nameobj, flags); @@ -393,6 +395,7 @@ if (self->fd == -1) { goto error; } + fd_is_own = 1; } if (self->fd < 0) { @@ -421,13 +424,8 @@ end of file (otherwise, it might be done only on the first write()). */ PyObject *pos = portable_lseek(self->fd, NULL, 2); - if (pos == NULL) { - if (closefd) { - close(self->fd); - self->fd = -1; - } + if (pos == NULL) goto error; - } Py_DECREF(pos); } @@ -435,6 +433,8 @@ error: ret = -1; + if (!fd_is_own) + self->fd = -1; if (self->fd >= 0) internal_close(self); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:12:56 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:12:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_launcher_source_and_res?= =?utf8?q?ources?= Message-ID: http://hg.python.org/cpython/rev/0eaf0102b0a5 changeset: 77540:0eaf0102b0a5 parent: 77525:24369f6c4a22 user: Brian Curtin date: Wed Jun 20 15:36:14 2012 -0500 summary: Add launcher source and resources files: PC/launcher.c | 1377 ++++++++++++++++++++++++++++++++++ PC/launcher.ico | Bin PC/pylauncher.rc | 3 + 3 files changed, 1380 insertions(+), 0 deletions(-) diff --git a/PC/launcher.c b/PC/launcher.c new file mode 100644 --- /dev/null +++ b/PC/launcher.c @@ -0,0 +1,1377 @@ +/* + * Copyright (C) 2011-2012 Vinay Sajip. All rights reserved. + * + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + * Based on the work of: + * + * Mark Hammond (original author of Python version) + * Curt Hagenlocher (job management) + */ + +#include +#include +#include +#include + +#define BUFSIZE 256 +#define MSGSIZE 1024 + +/* Build options. */ +#define SKIP_PREFIX +/* #define SEARCH_PATH */ + +/* Just for now - static definition */ + +static FILE * log_fp = NULL; + +static wchar_t * +skip_whitespace(wchar_t * p) +{ + while (*p && isspace(*p)) + ++p; + return p; +} + +/* + * This function is here to minimise Visual Studio + * warnings about security implications of getenv, and to + * treat blank values as if they are absent. + */ +static wchar_t * get_env(wchar_t * key) +{ + wchar_t * result = _wgetenv(key); + + if (result) { + result = skip_whitespace(result); + if (*result == L'\0') + result = NULL; + } + return result; +} + +static void +debug(wchar_t * format, ...) +{ + va_list va; + + if (log_fp != NULL) { + va_start(va, format); + vfwprintf_s(log_fp, format, va); + } +} + +static void +winerror(int rc, wchar_t * message, int size) +{ + FormatMessageW( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, rc, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + message, size, NULL); +} + +static void +error(int rc, wchar_t * format, ... ) +{ + va_list va; + wchar_t message[MSGSIZE]; + wchar_t win_message[MSGSIZE]; + int len; + + va_start(va, format); + len = _vsnwprintf_s(message, MSGSIZE, _TRUNCATE, format, va); + + if (rc == 0) { /* a Windows error */ + winerror(GetLastError(), win_message, MSGSIZE); + if (len >= 0) { + _snwprintf_s(&message[len], MSGSIZE - len, _TRUNCATE, L": %s", + win_message); + } + } + +#if !defined(_WINDOWS) + fwprintf(stderr, L"%s\n", message); +#else + MessageBox(NULL, message, TEXT("Python Launcher is sorry to say ..."), MB_OK); +#endif + ExitProcess(rc); +} + +#if defined(_WINDOWS) + +#define PYTHON_EXECUTABLE L"pythonw.exe" + +#else + +#define PYTHON_EXECUTABLE L"python.exe" + +#endif + +#define RC_NO_STD_HANDLES 100 +#define RC_CREATE_PROCESS 101 +#define RC_BAD_VIRTUAL_PATH 102 +#define RC_NO_PYTHON 103 + +#define MAX_VERSION_SIZE 4 + +typedef struct { + wchar_t version[MAX_VERSION_SIZE]; /* m.n */ + int bits; /* 32 or 64 */ + wchar_t executable[MAX_PATH]; +} INSTALLED_PYTHON; + +/* + * To avoid messing about with heap allocations, just assume we can allocate + * statically and never have to deal with more versions than this. + */ +#define MAX_INSTALLED_PYTHONS 100 + +static INSTALLED_PYTHON installed_pythons[MAX_INSTALLED_PYTHONS]; + +static size_t num_installed_pythons = 0; + +/* to hold SOFTWARE\Python\PythonCore\X.Y\InstallPath */ +#define IP_BASE_SIZE 40 +#define IP_SIZE (IP_BASE_SIZE + MAX_VERSION_SIZE) +#define CORE_PATH L"SOFTWARE\\Python\\PythonCore" + +static wchar_t * location_checks[] = { + L"\\", + L"\\PCBuild\\", + L"\\PCBuild\\amd64\\", + NULL +}; + +static INSTALLED_PYTHON * +find_existing_python(wchar_t * path) +{ + INSTALLED_PYTHON * result = NULL; + size_t i; + INSTALLED_PYTHON * ip; + + for (i = 0, ip = installed_pythons; i < num_installed_pythons; i++, ip++) { + if (_wcsicmp(path, ip->executable) == 0) { + result = ip; + break; + } + } + return result; +} + +static void +locate_pythons_for_key(HKEY root, REGSAM flags) +{ + HKEY core_root, ip_key; + LSTATUS status = RegOpenKeyExW(root, CORE_PATH, 0, flags, &core_root); + wchar_t message[MSGSIZE]; + DWORD i; + size_t n; + BOOL ok; + DWORD type, data_size, attrs; + INSTALLED_PYTHON * ip, * pip; + wchar_t ip_path[IP_SIZE]; + wchar_t * check; + wchar_t ** checkp; + wchar_t *key_name = (root == HKEY_LOCAL_MACHINE) ? L"HKLM" : L"HKCU"; + + if (status != ERROR_SUCCESS) + debug(L"locate_pythons_for_key: unable to open PythonCore key in %s\n", + key_name); + else { + ip = &installed_pythons[num_installed_pythons]; + for (i = 0; num_installed_pythons < MAX_INSTALLED_PYTHONS; i++) { + status = RegEnumKeyW(core_root, i, ip->version, MAX_VERSION_SIZE); + if (status != ERROR_SUCCESS) { + if (status != ERROR_NO_MORE_ITEMS) { + /* unexpected error */ + winerror(status, message, MSGSIZE); + debug(L"Can't enumerate registry key for version %s: %s\n", + ip->version, message); + } + break; + } + else { + _snwprintf_s(ip_path, IP_SIZE, _TRUNCATE, + L"%s\\%s\\InstallPath", CORE_PATH, ip->version); + status = RegOpenKeyExW(root, ip_path, 0, flags, &ip_key); + if (status != ERROR_SUCCESS) { + winerror(status, message, MSGSIZE); + // Note: 'message' already has a trailing \n + debug(L"%s\\%s: %s", key_name, ip_path, message); + continue; + } + data_size = sizeof(ip->executable) - 1; + status = RegQueryValueEx(ip_key, NULL, NULL, &type, + (LPBYTE) ip->executable, &data_size); + RegCloseKey(ip_key); + if (status != ERROR_SUCCESS) { + winerror(status, message, MSGSIZE); + debug(L"%s\\%s: %s\n", key_name, ip_path, message); + continue; + } + if (type == REG_SZ) { + data_size = data_size / sizeof(wchar_t) - 1; /* for NUL */ + if (ip->executable[data_size - 1] == L'\\') + --data_size; /* reg value ended in a backslash */ + /* ip->executable is data_size long */ + for (checkp = location_checks; *checkp; ++checkp) { + check = *checkp; + _snwprintf_s(&ip->executable[data_size], + MAX_PATH - data_size, + MAX_PATH - data_size, + L"%s%s", check, PYTHON_EXECUTABLE); + attrs = GetFileAttributesW(ip->executable); + if (attrs == INVALID_FILE_ATTRIBUTES) { + winerror(GetLastError(), message, MSGSIZE); + debug(L"locate_pythons_for_key: %s: %s", + ip->executable, message); + } + else if (attrs & FILE_ATTRIBUTE_DIRECTORY) { + debug(L"locate_pythons_for_key: '%s' is a \ +directory\n", + ip->executable, attrs); + } + else if (find_existing_python(ip->executable)) { + debug(L"locate_pythons_for_key: %s: already \ +found: %s\n", ip->executable); + } + else { + /* check the executable type. */ + ok = GetBinaryTypeW(ip->executable, &attrs); + if (!ok) { + debug(L"Failure getting binary type: %s\n", + ip->executable); + } + else { + if (attrs == SCS_64BIT_BINARY) + ip->bits = 64; + else if (attrs == SCS_32BIT_BINARY) + ip->bits = 32; + else + ip->bits = 0; + if (ip->bits == 0) { + debug(L"locate_pythons_for_key: %s: \ +invalid binary type: %X\n", + ip->executable, attrs); + } + else { + if (wcschr(ip->executable, L' ') != NULL) { + /* has spaces, so quote */ + n = wcslen(ip->executable); + memmove(&ip->executable[1], + ip->executable, n * sizeof(wchar_t)); + ip->executable[0] = L'\"'; + ip->executable[n + 1] = L'\"'; + ip->executable[n + 2] = L'\0'; + } + debug(L"locate_pythons_for_key: %s \ +is a %dbit executable\n", + ip->executable, ip->bits); + ++num_installed_pythons; + pip = ip++; + if (num_installed_pythons >= + MAX_INSTALLED_PYTHONS) + break; + /* Copy over the attributes for the next */ + *ip = *pip; + } + } + } + } + } + } + } + RegCloseKey(core_root); + } +} + +static int +compare_pythons(const void * p1, const void * p2) +{ + INSTALLED_PYTHON * ip1 = (INSTALLED_PYTHON *) p1; + INSTALLED_PYTHON * ip2 = (INSTALLED_PYTHON *) p2; + /* note reverse sorting on version */ + int result = wcscmp(ip2->version, ip1->version); + + if (result == 0) + result = ip2->bits - ip1->bits; /* 64 before 32 */ + return result; +} + +static void +locate_all_pythons() +{ +#if defined(_M_X64) + // If we are a 64bit process, first hit the 32bit keys. + debug(L"locating Pythons in 32bit registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_32KEY); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_32KEY); +#else + // If we are a 32bit process on a 64bit Windows, first hit the 64bit keys. + BOOL f64 = FALSE; + if (IsWow64Process(GetCurrentProcess(), &f64) && f64) { + debug(L"locating Pythons in 64bit registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_64KEY); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_64KEY); + } +#endif + // now hit the "native" key for this process bittedness. + debug(L"locating Pythons in native registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ); + qsort(installed_pythons, num_installed_pythons, sizeof(INSTALLED_PYTHON), + compare_pythons); +} + +static INSTALLED_PYTHON * +find_python_by_version(wchar_t const * wanted_ver) +{ + INSTALLED_PYTHON * result = NULL; + INSTALLED_PYTHON * ip = installed_pythons; + size_t i, n; + size_t wlen = wcslen(wanted_ver); + int bits = 0; + + if (wcsstr(wanted_ver, L"-32")) + bits = 32; + for (i = 0; i < num_installed_pythons; i++, ip++) { + n = wcslen(ip->version); + if (n > wlen) + n = wlen; + if ((wcsncmp(ip->version, wanted_ver, n) == 0) && + /* bits == 0 => don't care */ + ((bits == 0) || (ip->bits == bits))) { + result = ip; + break; + } + } + return result; +} + + +static wchar_t appdata_ini_path[MAX_PATH]; +static wchar_t launcher_ini_path[MAX_PATH]; + +/* + * Get a value either from the environment or a configuration file. + * The key passed in will either be "python", "python2" or "python3". + */ +static wchar_t * +get_configured_value(wchar_t * key) +{ +/* + * Note: this static value is used to return a configured value + * obtained either from the environment or configuration file. + * This should be OK since there wouldn't be any concurrent calls. + */ + static wchar_t configured_value[MSGSIZE]; + wchar_t * result = NULL; + wchar_t * found_in = L"environment"; + DWORD size; + + /* First, search the environment. */ + _snwprintf_s(configured_value, MSGSIZE, _TRUNCATE, L"py_%s", key); + result = get_env(configured_value); + if (result == NULL && appdata_ini_path[0]) { + /* Not in environment: check local configuration. */ + size = GetPrivateProfileStringW(L"defaults", key, NULL, + configured_value, MSGSIZE, + appdata_ini_path); + if (size > 0) { + result = configured_value; + found_in = appdata_ini_path; + } + } + if (result == NULL && launcher_ini_path[0]) { + /* Not in environment or local: check global configuration. */ + size = GetPrivateProfileStringW(L"defaults", key, NULL, + configured_value, MSGSIZE, + launcher_ini_path); + if (size > 0) { + result = configured_value; + found_in = launcher_ini_path; + } + } + if (result) { + debug(L"found configured value '%s=%s' in %s\n", + key, result, found_in ? found_in : L"(unknown)"); + } else { + debug(L"found no configured value for '%s'\n", key); + } + return result; +} + +static INSTALLED_PYTHON * +locate_python(wchar_t * wanted_ver) +{ + static wchar_t config_key [] = { L"pythonX" }; + static wchar_t * last_char = &config_key[sizeof(config_key) / + sizeof(wchar_t) - 2]; + INSTALLED_PYTHON * result = NULL; + size_t n = wcslen(wanted_ver); + wchar_t * configured_value; + + if (num_installed_pythons == 0) + locate_all_pythons(); + + if (n == 1) { /* just major version specified */ + *last_char = *wanted_ver; + configured_value = get_configured_value(config_key); + if (configured_value != NULL) + wanted_ver = configured_value; + } + if (*wanted_ver) { + result = find_python_by_version(wanted_ver); + debug(L"search for Python version '%s' found ", wanted_ver); + if (result) { + debug(L"'%s'\n", result->executable); + } else { + debug(L"no interpreter\n"); + } + } + else { + *last_char = L'\0'; /* look for an overall default */ + configured_value = get_configured_value(config_key); + if (configured_value) + result = find_python_by_version(configured_value); + if (result == NULL) + result = find_python_by_version(L"2"); + if (result == NULL) + result = find_python_by_version(L"3"); + debug(L"search for default Python found "); + if (result) { + debug(L"version %s at '%s'\n", + result->version, result->executable); + } else { + debug(L"no interpreter\n"); + } + } + return result; +} + +/* + * Process creation code + */ + +static BOOL +safe_duplicate_handle(HANDLE in, HANDLE * pout) +{ + BOOL ok; + HANDLE process = GetCurrentProcess(); + DWORD rc; + + *pout = NULL; + ok = DuplicateHandle(process, in, process, pout, 0, TRUE, + DUPLICATE_SAME_ACCESS); + if (!ok) { + rc = GetLastError(); + if (rc == ERROR_INVALID_HANDLE) { + debug(L"DuplicateHandle returned ERROR_INVALID_HANDLE\n"); + ok = TRUE; + } + else { + debug(L"DuplicateHandle returned %d\n", rc); + } + } + return ok; +} + +static BOOL WINAPI +ctrl_c_handler(DWORD code) +{ + return TRUE; /* We just ignore all control events. */ +} + +static void +run_child(wchar_t * cmdline) +{ + HANDLE job; + JOBOBJECT_EXTENDED_LIMIT_INFORMATION info; + DWORD rc; + BOOL ok; + STARTUPINFOW si; + PROCESS_INFORMATION pi; + + debug(L"run_child: about to run '%s'\n", cmdline); + job = CreateJobObject(NULL, NULL); + ok = QueryInformationJobObject(job, JobObjectExtendedLimitInformation, + &info, sizeof(info), &rc); + if (!ok || (rc != sizeof(info)) || !job) + error(RC_CREATE_PROCESS, L"Job information querying failed"); + info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE | + JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK; + ok = SetInformationJobObject(job, JobObjectExtendedLimitInformation, &info, + sizeof(info)); + if (!ok) + error(RC_CREATE_PROCESS, L"Job information setting failed"); + memset(&si, 0, sizeof(si)); + si.cb = sizeof(si); + ok = safe_duplicate_handle(GetStdHandle(STD_INPUT_HANDLE), &si.hStdInput); + if (!ok) + error(RC_NO_STD_HANDLES, L"stdin duplication failed"); + ok = safe_duplicate_handle(GetStdHandle(STD_OUTPUT_HANDLE), &si.hStdOutput); + if (!ok) + error(RC_NO_STD_HANDLES, L"stdout duplication failed"); + ok = safe_duplicate_handle(GetStdHandle(STD_ERROR_HANDLE), &si.hStdError); + if (!ok) + error(RC_NO_STD_HANDLES, L"stderr duplication failed"); + + ok = SetConsoleCtrlHandler(ctrl_c_handler, TRUE); + if (!ok) + error(RC_CREATE_PROCESS, L"control handler setting failed"); + + si.dwFlags = STARTF_USESTDHANDLES; + ok = CreateProcessW(NULL, cmdline, NULL, NULL, TRUE, + 0, NULL, NULL, &si, &pi); + if (!ok) + error(RC_CREATE_PROCESS, L"Unable to create process using '%s'", cmdline); + AssignProcessToJobObject(job, pi.hProcess); + CloseHandle(pi.hThread); + WaitForSingleObject(pi.hProcess, INFINITE); + ok = GetExitCodeProcess(pi.hProcess, &rc); + if (!ok) + error(RC_CREATE_PROCESS, L"Failed to get exit code of process"); + debug(L"child process exit code: %d\n", rc); + ExitProcess(rc); +} + +static void +invoke_child(wchar_t * executable, wchar_t * suffix, wchar_t * cmdline) +{ + wchar_t * child_command; + size_t child_command_size; + BOOL no_suffix = (suffix == NULL) || (*suffix == L'\0'); + BOOL no_cmdline = (*cmdline == L'\0'); + + if (no_suffix && no_cmdline) + run_child(executable); + else { + if (no_suffix) { + /* add 2 for space separator + terminating NUL. */ + child_command_size = wcslen(executable) + wcslen(cmdline) + 2; + } + else { + /* add 3 for 2 space separators + terminating NUL. */ + child_command_size = wcslen(executable) + wcslen(suffix) + + wcslen(cmdline) + 3; + } + child_command = calloc(child_command_size, sizeof(wchar_t)); + if (child_command == NULL) + error(RC_CREATE_PROCESS, L"unable to allocate %d bytes for child command.", + child_command_size); + if (no_suffix) + _snwprintf_s(child_command, child_command_size, + child_command_size - 1, L"%s %s", + executable, cmdline); + else + _snwprintf_s(child_command, child_command_size, + child_command_size - 1, L"%s %s %s", + executable, suffix, cmdline); + run_child(child_command); + free(child_command); + } +} + +static wchar_t * builtin_virtual_paths [] = { + L"/usr/bin/env python", + L"/usr/bin/python", + L"/usr/local/bin/python", + L"python", + NULL +}; + +/* For now, a static array of commands. */ + +#define MAX_COMMANDS 100 + +typedef struct { + wchar_t key[MAX_PATH]; + wchar_t value[MSGSIZE]; +} COMMAND; + +static COMMAND commands[MAX_COMMANDS]; +static int num_commands = 0; + +#if defined(SKIP_PREFIX) + +static wchar_t * builtin_prefixes [] = { + /* These must be in an order that the longest matches should be found, + * i.e. if the prefix is "/usr/bin/env ", it should match that entry + * *before* matching "/usr/bin/". + */ + L"/usr/bin/env ", + L"/usr/bin/", + L"/usr/local/bin/", + NULL +}; + +static wchar_t * skip_prefix(wchar_t * name) +{ + wchar_t ** pp = builtin_prefixes; + wchar_t * result = name; + wchar_t * p; + size_t n; + + for (; p = *pp; pp++) { + n = wcslen(p); + if (_wcsnicmp(p, name, n) == 0) { + result += n; /* skip the prefix */ + if (p[n - 1] == L' ') /* No empty strings in table, so n > 1 */ + result = skip_whitespace(result); + break; + } + } + return result; +} + +#endif + +#if defined(SEARCH_PATH) + +static COMMAND path_command; + +static COMMAND * find_on_path(wchar_t * name) +{ + wchar_t * pathext; + size_t varsize; + wchar_t * context = NULL; + wchar_t * extension; + COMMAND * result = NULL; + DWORD len; + errno_t rc; + + wcscpy_s(path_command.key, MAX_PATH, name); + if (wcschr(name, L'.') != NULL) { + /* assume it has an extension. */ + len = SearchPathW(NULL, name, NULL, MSGSIZE, path_command.value, NULL); + if (len) { + result = &path_command; + } + } + else { + /* No extension - search using registered extensions. */ + rc = _wdupenv_s(&pathext, &varsize, L"PATHEXT"); + if (rc == 0) { + extension = wcstok_s(pathext, L";", &context); + while (extension) { + len = SearchPathW(NULL, name, extension, MSGSIZE, path_command.value, NULL); + if (len) { + result = &path_command; + break; + } + extension = wcstok_s(NULL, L";", &context); + } + free(pathext); + } + } + return result; +} + +#endif + +static COMMAND * find_command(wchar_t * name) +{ + COMMAND * result = NULL; + COMMAND * cp = commands; + int i; + + for (i = 0; i < num_commands; i++, cp++) { + if (_wcsicmp(cp->key, name) == 0) { + result = cp; + break; + } + } +#if defined(SEARCH_PATH) + if (result == NULL) + result = find_on_path(name); +#endif + return result; +} + +static void +update_command(COMMAND * cp, wchar_t * name, wchar_t * cmdline) +{ + wcsncpy_s(cp->key, MAX_PATH, name, _TRUNCATE); + wcsncpy_s(cp->value, MSGSIZE, cmdline, _TRUNCATE); +} + +static void +add_command(wchar_t * name, wchar_t * cmdline) +{ + if (num_commands >= MAX_COMMANDS) { + debug(L"can't add %s = '%s': no room\n", name, cmdline); + } + else { + COMMAND * cp = &commands[num_commands++]; + + update_command(cp, name, cmdline); + } +} + +static void +read_config_file(wchar_t * config_path) +{ + wchar_t keynames[MSGSIZE]; + wchar_t value[MSGSIZE]; + DWORD read; + wchar_t * key; + COMMAND * cp; + wchar_t * cmdp; + + read = GetPrivateProfileStringW(L"commands", NULL, NULL, keynames, MSGSIZE, + config_path); + if (read == MSGSIZE - 1) { + debug(L"read_commands: %s: not enough space for names\n", config_path); + } + key = keynames; + while (*key) { + read = GetPrivateProfileStringW(L"commands", key, NULL, value, MSGSIZE, + config_path); + if (read == MSGSIZE - 1) { + debug(L"read_commands: %s: not enough space for %s\n", + config_path, key); + } + cmdp = skip_whitespace(value); + if (*cmdp) { + cp = find_command(key); + if (cp == NULL) + add_command(key, value); + else + update_command(cp, key, value); + } + key += wcslen(key) + 1; + } +} + +static void read_commands() +{ + if (launcher_ini_path[0]) + read_config_file(launcher_ini_path); + if (appdata_ini_path[0]) + read_config_file(appdata_ini_path); +} + +static BOOL +parse_shebang(wchar_t * shebang_line, int nchars, wchar_t ** command, + wchar_t ** suffix) +{ + BOOL rc = FALSE; + wchar_t ** vpp; + size_t plen; + wchar_t * p; + wchar_t zapped; + wchar_t * endp = shebang_line + nchars - 1; + COMMAND * cp; + wchar_t * skipped; + + *command = NULL; /* failure return */ + *suffix = NULL; + + if ((*shebang_line++ == L'#') && (*shebang_line++ == L'!')) { + shebang_line = skip_whitespace(shebang_line); + if (*shebang_line) { + *command = shebang_line; + for (vpp = builtin_virtual_paths; *vpp; ++vpp) { + plen = wcslen(*vpp); + if (wcsncmp(shebang_line, *vpp, plen) == 0) { + rc = TRUE; + /* We can do this because all builtin commands contain + * "python". + */ + *command = wcsstr(shebang_line, L"python"); + break; + } + } + if (*vpp == NULL) { + /* + * Not found in builtins - look in customised commands. + * + * We can't permanently modify the shebang line in case + * it's not a customised command, but we can temporarily + * stick a NUL after the command while searching for it, + * then put back the char we zapped. + */ +#if defined(SKIP_PREFIX) + skipped = skip_prefix(shebang_line); +#else + skipped = shebang_line; +#endif + p = wcspbrk(skipped, L" \t\r\n"); + if (p != NULL) { + zapped = *p; + *p = L'\0'; + } + cp = find_command(skipped); + if (p != NULL) + *p = zapped; + if (cp != NULL) { + *command = cp->value; + if (p != NULL) + *suffix = skip_whitespace(p); + } + } + /* remove trailing whitespace */ + while ((endp > shebang_line) && isspace(*endp)) + --endp; + if (endp > shebang_line) + endp[1] = L'\0'; + } + } + return rc; +} + +/* #define CP_UTF8 65001 defined in winnls.h */ +#define CP_UTF16LE 1200 +#define CP_UTF16BE 1201 +#define CP_UTF32LE 12000 +#define CP_UTF32BE 12001 + +typedef struct { + int length; + char sequence[4]; + UINT code_page; +} BOM; + +/* + * Strictly, we don't need to handle UTF-16 anf UTF-32, since Python itself + * doesn't. Never mind, one day it might - there's no harm leaving it in. + */ +static BOM BOMs[] = { + { 3, { 0xEF, 0xBB, 0xBF }, CP_UTF8 }, /* UTF-8 - keep first */ + { 2, { 0xFF, 0xFE }, CP_UTF16LE }, /* UTF-16LE */ + { 2, { 0xFE, 0xFF }, CP_UTF16BE }, /* UTF-16BE */ + { 4, { 0xFF, 0xFE, 0x00, 0x00 }, CP_UTF32LE }, /* UTF-32LE */ + { 4, { 0x00, 0x00, 0xFE, 0xFF }, CP_UTF32BE }, /* UTF-32BE */ + { 0 } /* sentinel */ +}; + +static BOM * +find_BOM(char * buffer) +{ +/* + * Look for a BOM in the input and return a pointer to the + * corresponding structure, or NULL if not found. + */ + BOM * result = NULL; + BOM *bom; + + for (bom = BOMs; bom->length; bom++) { + if (strncmp(bom->sequence, buffer, bom->length) == 0) { + result = bom; + break; + } + } + return result; +} + +static char * +find_terminator(char * buffer, int len, BOM *bom) +{ + char * result = NULL; + char * end = buffer + len; + char * p; + char c; + int cp; + + for (p = buffer; p < end; p++) { + c = *p; + if (c == '\r') { + result = p; + break; + } + if (c == '\n') { + result = p; + break; + } + } + if (result != NULL) { + cp = bom->code_page; + + /* adjustments to include all bytes of the char */ + /* no adjustment needed for UTF-8 or big endian */ + if (cp == CP_UTF16LE) + ++result; + else if (cp == CP_UTF32LE) + result += 3; + ++result; /* point just past terminator */ + } + return result; +} + +static BOOL +validate_version(wchar_t * p) +{ + BOOL result = TRUE; + + if (!isdigit(*p)) /* expect major version */ + result = FALSE; + else if (*++p) { /* more to do */ + if (*p != L'.') /* major/minor separator */ + result = FALSE; + else { + ++p; + if (!isdigit(*p)) /* expect minor version */ + result = FALSE; + else { + ++p; + if (*p) { /* more to do */ + if (*p != L'-') + result = FALSE; + else { + ++p; + if ((*p != '3') && (*++p != '2') && !*++p) + result = FALSE; + } + } + } + } + } + return result; +} + +typedef struct { + unsigned short min; + unsigned short max; + wchar_t version[MAX_VERSION_SIZE]; +} PYC_MAGIC; + +static PYC_MAGIC magic_values[] = { + { 0xc687, 0xc687, L"2.0" }, + { 0xeb2a, 0xeb2a, L"2.1" }, + { 0xed2d, 0xed2d, L"2.2" }, + { 0xf23b, 0xf245, L"2.3" }, + { 0xf259, 0xf26d, L"2.4" }, + { 0xf277, 0xf2b3, L"2.5" }, + { 0xf2c7, 0xf2d1, L"2.6" }, + { 0xf2db, 0xf303, L"2.7" }, + { 0x0bb8, 0x0c3b, L"3.0" }, + { 0x0c45, 0x0c4f, L"3.1" }, + { 0x0c58, 0x0c6c, L"3.2" }, + { 0x0c76, 0x0c76, L"3.3" }, + { 0 } +}; + +static INSTALLED_PYTHON * +find_by_magic(unsigned short magic) +{ + INSTALLED_PYTHON * result = NULL; + PYC_MAGIC * mp; + + for (mp = magic_values; mp->min; mp++) { + if ((magic >= mp->min) && (magic <= mp->max)) { + result = locate_python(mp->version); + if (result != NULL) + break; + } + } + return result; +} + +static void +maybe_handle_shebang(wchar_t ** argv, wchar_t * cmdline) +{ +/* + * Look for a shebang line in the first argument. If found + * and we spawn a child process, this never returns. If it + * does return then we process the args "normally". + * + * argv[0] might be a filename with a shebang. + */ + FILE * fp; + errno_t rc = _wfopen_s(&fp, *argv, L"rb"); + unsigned char buffer[BUFSIZE]; + wchar_t shebang_line[BUFSIZE + 1]; + size_t read; + char *p; + char * start; + char * shebang_alias = (char *) shebang_line; + BOM* bom; + int i, j, nchars = 0; + int header_len; + BOOL is_virt; + wchar_t * command; + wchar_t * suffix; + INSTALLED_PYTHON * ip; + + if (rc == 0) { + read = fread(buffer, sizeof(char), BUFSIZE, fp); + debug(L"maybe_handle_shebang: read %d bytes\n", read); + fclose(fp); + + if ((read >= 4) && (buffer[3] == '\n') && (buffer[2] == '\r')) { + ip = find_by_magic((buffer[1] << 8 | buffer[0]) & 0xFFFF); + if (ip != NULL) { + debug(L"script file is compiled against Python %s\n", + ip->version); + invoke_child(ip->executable, NULL, cmdline); + } + } + /* Look for BOM */ + bom = find_BOM(buffer); + if (bom == NULL) { + start = buffer; + debug(L"maybe_handle_shebang: BOM not found, using UTF-8\n"); + bom = BOMs; /* points to UTF-8 entry - the default */ + } + else { + debug(L"maybe_handle_shebang: BOM found, code page %d\n", + bom->code_page); + start = &buffer[bom->length]; + } + p = find_terminator(start, BUFSIZE, bom); + /* + * If no CR or LF was found in the heading, + * we assume it's not a shebang file. + */ + if (p == NULL) { + debug(L"maybe_handle_shebang: No line terminator found\n"); + } + else { + /* + * Found line terminator - parse the shebang. + * + * Strictly, we don't need to handle UTF-16 anf UTF-32, + * since Python itself doesn't. + * Never mind, one day it might. + */ + header_len = (int) (p - start); + switch(bom->code_page) { + case CP_UTF8: + nchars = MultiByteToWideChar(bom->code_page, + 0, + start, header_len, shebang_line, + BUFSIZE); + break; + case CP_UTF16BE: + if (header_len % 2 != 0) { + debug(L"maybe_handle_shebang: UTF-16BE, but an odd number \ +of bytes: %d\n", header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + for (i = header_len; i > 0; i -= 2) { + shebang_alias[i - 1] = start[i - 2]; + shebang_alias[i - 2] = start[i - 1]; + } + nchars = header_len / sizeof(wchar_t); + } + break; + case CP_UTF16LE: + if ((header_len % 2) != 0) { + debug(L"UTF-16LE, but an odd number of bytes: %d\n", + header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + /* no actual conversion needed. */ + memcpy(shebang_line, start, header_len); + nchars = header_len / sizeof(wchar_t); + } + break; + case CP_UTF32BE: + if (header_len % 4 != 0) { + debug(L"UTF-32BE, but not divisible by 4: %d\n", + header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + for (i = header_len, j = header_len / 2; i > 0; i -= 4, + j -= 2) { + shebang_alias[j - 1] = start[i - 2]; + shebang_alias[j - 2] = start[i - 1]; + } + nchars = header_len / sizeof(wchar_t); + } + break; + case CP_UTF32LE: + if (header_len % 4 != 0) { + debug(L"UTF-32LE, but not divisible by 4: %d\n", + header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + for (i = header_len, j = header_len / 2; i > 0; i -= 4, + j -= 2) { + shebang_alias[j - 1] = start[i - 3]; + shebang_alias[j - 2] = start[i - 4]; + } + nchars = header_len / sizeof(wchar_t); + } + break; + } + if (nchars > 0) { + shebang_line[--nchars] = L'\0'; + is_virt = parse_shebang(shebang_line, nchars, &command, + &suffix); + if (command != NULL) { + debug(L"parse_shebang: found command: %s\n", command); + if (!is_virt) { + invoke_child(command, suffix, cmdline); + } + else { + suffix = wcschr(command, L' '); + if (suffix != NULL) { + *suffix++ = L'\0'; + suffix = skip_whitespace(suffix); + } + if (wcsncmp(command, L"python", 6)) + error(RC_BAD_VIRTUAL_PATH, L"Unknown virtual \ +path '%s'", command); + command += 6; /* skip past "python" */ + if (*command && !validate_version(command)) + error(RC_BAD_VIRTUAL_PATH, L"Invalid version \ +specification: '%s'.\nIn the first line of the script, 'python' needs to be \ +followed by a valid version specifier.\nPlease check the documentation.", + command); + /* TODO could call validate_version(command) */ + ip = locate_python(command); + if (ip == NULL) { + error(RC_NO_PYTHON, L"Requested Python version \ +(%s) is not installed", command); + } + else { + invoke_child(ip->executable, suffix, cmdline); + } + } + } + } + } + } +} + +static wchar_t * +skip_me(wchar_t * cmdline) +{ + BOOL quoted; + wchar_t c; + wchar_t * result = cmdline; + + quoted = cmdline[0] == L'\"'; + if (!quoted) + c = L' '; + else { + c = L'\"'; + ++result; + } + result = wcschr(result, c); + if (result == NULL) /* when, for example, just exe name on command line */ + result = L""; + else { + ++result; /* skip past space or closing quote */ + result = skip_whitespace(result); + } + return result; +} + +static DWORD version_high = 0; +static DWORD version_low = 0; + +static void +get_version_info(wchar_t * version_text, size_t size) +{ + WORD maj, min, rel, bld; + + if (!version_high && !version_low) + wcsncpy_s(version_text, size, L"0.1", _TRUNCATE); /* fallback */ + else { + maj = HIWORD(version_high); + min = LOWORD(version_high); + rel = HIWORD(version_low); + bld = LOWORD(version_low); + _snwprintf_s(version_text, size, _TRUNCATE, L"%d.%d.%d.%d", maj, + min, rel, bld); + } +} + +static int +process(int argc, wchar_t ** argv) +{ + wchar_t * wp; + wchar_t * command; + wchar_t * p; + int rc = 0; + size_t plen; + INSTALLED_PYTHON * ip; + BOOL valid; + DWORD size, attrs; + HRESULT hr; + wchar_t message[MSGSIZE]; + wchar_t version_text [MAX_PATH]; + void * version_data; + VS_FIXEDFILEINFO * file_info; + UINT block_size; + + wp = get_env(L"PYLAUNCH_DEBUG"); + if ((wp != NULL) && (*wp != L'\0')) + log_fp = stderr; + +#if defined(_M_X64) + debug(L"launcher build: 64bit\n"); +#else + debug(L"launcher build: 32bit\n"); +#endif +#if defined(_WINDOWS) + debug(L"launcher executable: Windows\n"); +#else + debug(L"launcher executable: Console\n"); +#endif + /* Get the local appdata folder (non-roaming) */ + hr = SHGetFolderPathW(NULL, CSIDL_LOCAL_APPDATA, + NULL, 0, appdata_ini_path); + if (hr != S_OK) { + debug(L"SHGetFolderPath failed: %X\n", hr); + appdata_ini_path[0] = L'\0'; + } + else { + plen = wcslen(appdata_ini_path); + p = &appdata_ini_path[plen]; + wcsncpy_s(p, MAX_PATH - plen, L"\\py.ini", _TRUNCATE); + attrs = GetFileAttributesW(appdata_ini_path); + if (attrs == INVALID_FILE_ATTRIBUTES) { + debug(L"File '%s' non-existent\n", appdata_ini_path); + appdata_ini_path[0] = L'\0'; + } else { + debug(L"Using local configuration file '%s'\n", appdata_ini_path); + } + } + plen = GetModuleFileNameW(NULL, launcher_ini_path, MAX_PATH); + size = GetFileVersionInfoSizeW(launcher_ini_path, &size); + if (size == 0) { + winerror(GetLastError(), message, MSGSIZE); + debug(L"GetFileVersionInfoSize failed: %s\n", message); + } + else { + version_data = malloc(size); + if (version_data) { + valid = GetFileVersionInfoW(launcher_ini_path, 0, size, + version_data); + if (!valid) + debug(L"GetFileVersionInfo failed: %X\n", GetLastError()); + else { + valid = VerQueryValueW(version_data, L"\\", &file_info, + &block_size); + if (!valid) + debug(L"VerQueryValue failed: %X\n", GetLastError()); + else { + version_high = file_info->dwFileVersionMS; + version_low = file_info->dwFileVersionLS; + } + } + free(version_data); + } + } + p = wcsrchr(launcher_ini_path, L'\\'); + if (p == NULL) { + debug(L"GetModuleFileNameW returned value has no backslash: %s\n", + launcher_ini_path); + launcher_ini_path[0] = L'\0'; + } + else { + wcsncpy_s(p, MAX_PATH - (p - launcher_ini_path), L"\\py.ini", + _TRUNCATE); + attrs = GetFileAttributesW(launcher_ini_path); + if (attrs == INVALID_FILE_ATTRIBUTES) { + debug(L"File '%s' non-existent\n", launcher_ini_path); + launcher_ini_path[0] = L'\0'; + } else { + debug(L"Using global configuration file '%s'\n", launcher_ini_path); + } + } + + command = skip_me(GetCommandLineW()); + debug(L"Called with command line: %s", command); + if (argc <= 1) { + valid = FALSE; + p = NULL; + } + else { + p = argv[1]; + plen = wcslen(p); + if (p[0] != L'-') { + read_commands(); + maybe_handle_shebang(&argv[1], command); + } + /* No file with shebang, or an unrecognised shebang. + * Is the first arg a special version qualifier? + */ + valid = (*p == L'-') && validate_version(&p[1]); + if (valid) { + ip = locate_python(&p[1]); + if (ip == NULL) + error(RC_NO_PYTHON, L"Requested Python version (%s) not \ +installed", &p[1]); + command += wcslen(p); + command = skip_whitespace(command); + } + } + if (!valid) { + ip = locate_python(L""); + if (ip == NULL) + error(RC_NO_PYTHON, L"Can't find a default Python."); + if ((argc == 2) && (!_wcsicmp(p, L"-h") || !_wcsicmp(p, L"--help"))) { +#if defined(_M_X64) + BOOL canDo64bit = TRUE; +#else + // If we are a 32bit process on a 64bit Windows, first hit the 64bit keys. + BOOL canDo64bit = FALSE; + IsWow64Process(GetCurrentProcess(), &canDo64bit); +#endif + + get_version_info(version_text, MAX_PATH); + fwprintf(stdout, L"\ +Python Launcher for Windows Version %s\n\n", version_text); + fwprintf(stdout, L"\ +usage: %s [ launcher-arguments ] script [ script-arguments ]\n\n", argv[0]); + fputws(L"\ +Launcher arguments:\n\n\ +-2 : Launch the latest Python 2.x version\n\ +-3 : Launch the latest Python 3.x version\n\ +-X.Y : Launch the specified Python version\n", stdout); + if (canDo64bit) { + fputws(L"\ +-X.Y-32: Launch the specified 32bit Python version", stdout); + } + fputws(L"\n\nThe following help text is from Python:\n\n", stdout); + fflush(stdout); + } + } + invoke_child(ip->executable, NULL, command); + return rc; +} + +#if defined(_WINDOWS) + +int WINAPI wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, + LPWSTR lpstrCmd, int nShow) +{ + return process(__argc, __wargv); +} + +#else + +int cdecl wmain(int argc, wchar_t ** argv) +{ + return process(argc, argv); +} + +#endif \ No newline at end of file diff --git a/PC/launcher.ico b/PC/launcher.ico new file mode 100644 index 0000000000000000000000000000000000000000..dad7d572ce781b7b0916ed669207f1ae3b9ad83c GIT binary patch [stripped] diff --git a/PC/pylauncher.rc b/PC/pylauncher.rc new file mode 100644 --- /dev/null +++ b/PC/pylauncher.rc @@ -0,0 +1,3 @@ +IDI_ICON1 ICON "launcher.ico" +IDI_ICON2 ICON "py.ico" +IDI_ICON3 ICON "pyc.ico" \ No newline at end of file -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:12:59 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:12:59 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Initial_changes_to_get_the_?= =?utf8?q?py_launcher_building?= Message-ID: http://hg.python.org/cpython/rev/5aca2464262b changeset: 77541:5aca2464262b user: Brian Curtin date: Wed Jun 20 15:37:24 2012 -0500 summary: Initial changes to get the py launcher building files: PCbuild/pcbuild.sln | 14 +++ PCbuild/pylauncher.vcxproj | 83 ++++++++++++++++++ PCbuild/pylauncher.vcxproj.filters | 32 ++++++ 3 files changed, 129 insertions(+), 0 deletions(-) diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -68,6 +68,8 @@ EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testbuffer", "_testbuffer.vcxproj", "{A2697BD3-28C1-4AEC-9106-8B748639FD16}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pylauncher", "pylauncher.vcxproj", "{7B2727B5-5A3F-40EE-A866-43A13CD31446}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 @@ -565,6 +567,18 @@ {A2697BD3-28C1-4AEC-9106-8B748639FD16}.Release|Win32.Build.0 = Release|Win32 {A2697BD3-28C1-4AEC-9106-8B748639FD16}.Release|x64.ActiveCfg = Release|x64 {A2697BD3-28C1-4AEC-9106-8B748639FD16}.Release|x64.Build.0 = Release|x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|Win32.ActiveCfg = Debug|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|Win32.Build.0 = Debug|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|x64.ActiveCfg = Debug|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|Win32.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|Win32.Build.0 = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|x64.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|Win32.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|Win32.Build.0 = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|x64.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.Build.0 = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.ActiveCfg = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/PCbuild/pylauncher.vcxproj b/PCbuild/pylauncher.vcxproj new file mode 100644 --- /dev/null +++ b/PCbuild/pylauncher.vcxproj @@ -0,0 +1,83 @@ +? + + + + Debug + Win32 + + + Release + Win32 + + + + {7B2727B5-5A3F-40EE-A866-43A13CD31446} + pylauncher + + + + Application + true + MultiByte + + + Application + false + true + MultiByte + + + + + + + + + + + + + + + py_d + + + + Level3 + Disabled + _CONSOLE;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Console + $(OutDir)py_d.exe + + + + + Level3 + MaxSpeed + true + true + + + true + true + true + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/PCbuild/pylauncher.vcxproj.filters b/PCbuild/pylauncher.vcxproj.filters new file mode 100644 --- /dev/null +++ b/PCbuild/pylauncher.vcxproj.filters @@ -0,0 +1,32 @@ +? + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + + + + Resource Files + + + + + Resource Files + + + \ No newline at end of file -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:00 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Support_32-bit_release_buil?= =?utf8?q?ding=3A?= Message-ID: http://hg.python.org/cpython/rev/4610be3579ae changeset: 77542:4610be3579ae user: Brian Curtin date: Wed Jun 20 15:45:12 2012 -0500 summary: Support 32-bit release building: files: PCbuild/pylauncher.vcxproj | 11 ++++++++++- 1 files changed, 10 insertions(+), 1 deletions(-) diff --git a/PCbuild/pylauncher.vcxproj b/PCbuild/pylauncher.vcxproj --- a/PCbuild/pylauncher.vcxproj +++ b/PCbuild/pylauncher.vcxproj @@ -36,11 +36,16 @@ + + py_d + + py + Level3 @@ -52,7 +57,7 @@ version.lib;%(AdditionalDependencies) false Console - $(OutDir)py_d.exe + $(OutDir)$(TargetName)_d$(TargetExt) @@ -61,11 +66,15 @@ MaxSpeed true true + _CONSOLE;NDEBUG;%(PreprocessorDefinitions) true true true + false + version.lib;%(AdditionalDependencies) + Console -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:01 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Get_64-bit_building?= Message-ID: http://hg.python.org/cpython/rev/965ca746438b changeset: 77543:965ca746438b user: Brian Curtin date: Wed Jun 20 15:55:04 2012 -0500 summary: Get 64-bit building files: PCbuild/pcbuild.sln | 6 +- PCbuild/pylauncher.vcxproj | 70 +++++++++++++++++++++++++- 2 files changed, 73 insertions(+), 3 deletions(-) diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -569,7 +569,8 @@ {A2697BD3-28C1-4AEC-9106-8B748639FD16}.Release|x64.Build.0 = Release|x64 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|Win32.ActiveCfg = Debug|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|Win32.Build.0 = Debug|Win32 - {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|x64.ActiveCfg = Debug|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|x64.ActiveCfg = Debug|x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|x64.Build.0 = Debug|x64 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|Win32.ActiveCfg = Release|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|Win32.Build.0 = Release|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|x64.ActiveCfg = Release|Win32 @@ -578,7 +579,8 @@ {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|x64.ActiveCfg = Release|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.ActiveCfg = Release|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.Build.0 = Release|Win32 - {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.ActiveCfg = Release|x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/PCbuild/pylauncher.vcxproj b/PCbuild/pylauncher.vcxproj --- a/PCbuild/pylauncher.vcxproj +++ b/PCbuild/pylauncher.vcxproj @@ -5,10 +5,18 @@ Debug Win32 + + Debug + x64 + Release Win32 + + Release + x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446} @@ -20,12 +28,23 @@ true MultiByte + + Application + true + MultiByte + Application false true MultiByte + + Application + false + true + MultiByte + @@ -34,18 +53,36 @@ + + + + + + + + + + + + py_d + + py_d + py + + py + Level3 @@ -57,7 +94,21 @@ version.lib;%(AdditionalDependencies) false Console - $(OutDir)$(TargetName)_d$(TargetExt) + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + Disabled + _CONSOLE;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Console + $(OutDir)$(TargetName)$(TargetExt) @@ -77,6 +128,23 @@ Console + + + Level3 + MaxSpeed + true + true + _CONSOLE;NDEBUG;%(PreprocessorDefinitions) + + + true + true + true + false + version.lib;%(AdditionalDependencies) + Console + + -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:02 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_the_pyw_launcher?= Message-ID: http://hg.python.org/cpython/rev/d184f53e261a changeset: 77544:d184f53e261a user: Brian Curtin date: Wed Jun 20 16:11:08 2012 -0500 summary: Add the pyw launcher files: PCbuild/pcbuild.sln | 16 ++++++++++++++++ 1 files changed, 16 insertions(+), 0 deletions(-) diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -70,6 +70,8 @@ EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pylauncher", "pylauncher.vcxproj", "{7B2727B5-5A3F-40EE-A866-43A13CD31446}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pywlauncher", "pywlauncher.vcxproj", "{1D4B18D3-7C12-4ECB-9179-8531FF876CE6}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 @@ -581,6 +583,20 @@ {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.Build.0 = Release|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.ActiveCfg = Release|x64 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.Build.0 = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|Win32.ActiveCfg = Debug|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|Win32.Build.0 = Debug|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|x64.ActiveCfg = Debug|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|x64.Build.0 = Debug|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|Win32.ActiveCfg = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.ActiveCfg = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.Build.0 = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|Win32.ActiveCfg = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.ActiveCfg = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.Build.0 = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.Build.0 = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.ActiveCfg = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:03 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_pywlauncher_project?= Message-ID: http://hg.python.org/cpython/rev/6ca5e1a9abff changeset: 77545:6ca5e1a9abff user: Brian Curtin date: Wed Jun 20 16:11:39 2012 -0500 summary: Add pywlauncher project files: PCbuild/pywlauncher.vcxproj | 160 ++++++++++++++++ PCbuild/pywlauncher.vcxproj.filters | 32 +++ 2 files changed, 192 insertions(+), 0 deletions(-) diff --git a/PCbuild/pywlauncher.vcxproj b/PCbuild/pywlauncher.vcxproj new file mode 100644 --- /dev/null +++ b/PCbuild/pywlauncher.vcxproj @@ -0,0 +1,160 @@ +? + + + + Debug + Win32 + + + Debug + x64 + + + Release + Win32 + + + Release + x64 + + + + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6} + pywlauncher + + + + Application + true + Unicode + + + Application + true + Unicode + + + Application + false + true + Unicode + + + Application + false + true + Unicode + + + + + + + + + + + + + + + + + + + + + + + + + + + + + pyw_d + + + pyw_d + + + pyw + + + pyw + + + + Level3 + Disabled + _WINDOWS;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Windows + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + Disabled + _WINDOWS;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Windows + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + MaxSpeed + true + true + _WINDOWS;NDEBUG;%(PreprocessorDefinitions) + + + true + true + true + false + version.lib;%(AdditionalDependencies) + Windows + + + + + Level3 + MaxSpeed + true + true + _WINDOWS;NDEBUG;%(PreprocessorDefinitions) + + + true + true + true + false + version.lib;%(AdditionalDependencies) + Windows + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/PCbuild/pywlauncher.vcxproj.filters b/PCbuild/pywlauncher.vcxproj.filters new file mode 100644 --- /dev/null +++ b/PCbuild/pywlauncher.vcxproj.filters @@ -0,0 +1,32 @@ +? + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + + + + Resource Files + + + + + Resource Files + + + \ No newline at end of file -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:04 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_associator?= Message-ID: http://hg.python.org/cpython/rev/e75a4a1f3ddf changeset: 77546:e75a4a1f3ddf user: Brian Curtin date: Wed Jun 20 22:48:54 2012 -0500 summary: Add associator files: PC/associator.c | 731 +++++++ PC/associator.h | 1480 ++++++++++++++++ PC/associator.rc | 97 + PCbuild/associator.vcxproj | 84 + PCbuild/associator.vcxproj.filters | 32 + PCbuild/pcbuild.sln | 14 + 6 files changed, 2438 insertions(+), 0 deletions(-) diff --git a/PC/associator.c b/PC/associator.c new file mode 100644 --- /dev/null +++ b/PC/associator.c @@ -0,0 +1,731 @@ +/* + * Copyright (C) 2011-2012 Vinay Sajip. All rights reserved. + * + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +#include +#include +#include +#include "associator.h" + +#define PYTHON_EXECUTABLE L"python.exe" + +#define MSGSIZE 1024 +#define MAX_VERSION_SIZE 4 + +typedef struct { + wchar_t version[MAX_VERSION_SIZE]; /* m.n */ + int bits; /* 32 or 64 */ + wchar_t executable[MAX_PATH]; +} INSTALLED_PYTHON; + +/* + * To avoid messing about with heap allocations, just assume we can allocate + * statically and never have to deal with more versions than this. + */ +#define MAX_INSTALLED_PYTHONS 100 + +static INSTALLED_PYTHON installed_pythons[MAX_INSTALLED_PYTHONS]; + +static size_t num_installed_pythons = 0; + +/* to hold SOFTWARE\Python\PythonCore\X.Y\InstallPath */ +#define IP_BASE_SIZE 40 +#define IP_SIZE (IP_BASE_SIZE + MAX_VERSION_SIZE) +#define CORE_PATH L"SOFTWARE\\Python\\PythonCore" + +static wchar_t * location_checks[] = { + L"\\", +/* + L"\\PCBuild\\", + L"\\PCBuild\\amd64\\", + */ + NULL +}; + +static wchar_t * +skip_whitespace(wchar_t * p) +{ + while (*p && isspace(*p)) + ++p; + return p; +} + +/* + * This function is here to minimise Visual Studio + * warnings about security implications of getenv, and to + * treat blank values as if they are absent. + */ +static wchar_t * get_env(wchar_t * key) +{ + wchar_t * result = _wgetenv(key); + + if (result) { + result = skip_whitespace(result); + if (*result == L'\0') + result = NULL; + } + return result; +} + +static FILE * log_fp = NULL; + +static void +debug(wchar_t * format, ...) +{ + va_list va; + + if (log_fp != NULL) { + va_start(va, format); + vfwprintf_s(log_fp, format, va); + } +} + +static void winerror(int rc, wchar_t * message, int size) +{ + FormatMessageW( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, rc, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + message, size, NULL); +} + +static INSTALLED_PYTHON * +find_existing_python(wchar_t * path) +{ + INSTALLED_PYTHON * result = NULL; + size_t i; + INSTALLED_PYTHON * ip; + + for (i = 0, ip = installed_pythons; i < num_installed_pythons; i++, ip++) { + if (_wcsicmp(path, ip->executable) == 0) { + result = ip; + break; + } + } + return result; +} + +static void +locate_pythons_for_key(HKEY root, REGSAM flags) +{ + HKEY core_root, ip_key; + LSTATUS status = RegOpenKeyExW(root, CORE_PATH, 0, flags, &core_root); + wchar_t message[MSGSIZE]; + DWORD i; + size_t n; + BOOL ok; + DWORD type, data_size, attrs; + INSTALLED_PYTHON * ip, * pip; + wchar_t ip_path[IP_SIZE]; + wchar_t * check; + wchar_t ** checkp; + wchar_t *key_name = (root == HKEY_LOCAL_MACHINE) ? L"HKLM" : L"HKCU"; + + if (status != ERROR_SUCCESS) + debug(L"locate_pythons_for_key: unable to open PythonCore key in %s\n", + key_name); + else { + ip = &installed_pythons[num_installed_pythons]; + for (i = 0; num_installed_pythons < MAX_INSTALLED_PYTHONS; i++) { + status = RegEnumKeyW(core_root, i, ip->version, MAX_VERSION_SIZE); + if (status != ERROR_SUCCESS) { + if (status != ERROR_NO_MORE_ITEMS) { + /* unexpected error */ + winerror(status, message, MSGSIZE); + debug(L"Can't enumerate registry key for version %s: %s\n", + ip->version, message); + } + break; + } + else { + _snwprintf_s(ip_path, IP_SIZE, _TRUNCATE, + L"%s\\%s\\InstallPath", CORE_PATH, ip->version); + status = RegOpenKeyExW(root, ip_path, 0, flags, &ip_key); + if (status != ERROR_SUCCESS) { + winerror(status, message, MSGSIZE); + // Note: 'message' already has a trailing \n + debug(L"%s\\%s: %s", key_name, ip_path, message); + continue; + } + data_size = sizeof(ip->executable) - 1; + status = RegQueryValueEx(ip_key, NULL, NULL, &type, + (LPBYTE) ip->executable, &data_size); + RegCloseKey(ip_key); + if (status != ERROR_SUCCESS) { + winerror(status, message, MSGSIZE); + debug(L"%s\\%s: %s\n", key_name, ip_path, message); + continue; + } + if (type == REG_SZ) { + data_size = data_size / sizeof(wchar_t) - 1; /* for NUL */ + if (ip->executable[data_size - 1] == L'\\') + --data_size; /* reg value ended in a backslash */ + /* ip->executable is data_size long */ + for (checkp = location_checks; *checkp; ++checkp) { + check = *checkp; + _snwprintf_s(&ip->executable[data_size], + MAX_PATH - data_size, + MAX_PATH - data_size, + L"%s%s", check, PYTHON_EXECUTABLE); + attrs = GetFileAttributesW(ip->executable); + if (attrs == INVALID_FILE_ATTRIBUTES) { + winerror(GetLastError(), message, MSGSIZE); + debug(L"locate_pythons_for_key: %s: %s", + ip->executable, message); + } + else if (attrs & FILE_ATTRIBUTE_DIRECTORY) { + debug(L"locate_pythons_for_key: '%s' is a \ +directory\n", + ip->executable, attrs); + } + else if (find_existing_python(ip->executable)) { + debug(L"locate_pythons_for_key: %s: already \ +found: %s\n", ip->executable); + } + else { + /* check the executable type. */ + ok = GetBinaryTypeW(ip->executable, &attrs); + if (!ok) { + debug(L"Failure getting binary type: %s\n", + ip->executable); + } + else { + if (attrs == SCS_64BIT_BINARY) + ip->bits = 64; + else if (attrs == SCS_32BIT_BINARY) + ip->bits = 32; + else + ip->bits = 0; + if (ip->bits == 0) { + debug(L"locate_pythons_for_key: %s: \ +invalid binary type: %X\n", + ip->executable, attrs); + } + else { + if (wcschr(ip->executable, L' ') != NULL) { + /* has spaces, so quote */ + n = wcslen(ip->executable); + memmove(&ip->executable[1], + ip->executable, n * sizeof(wchar_t)); + ip->executable[0] = L'\"'; + ip->executable[n + 1] = L'\"'; + ip->executable[n + 2] = L'\0'; + } + debug(L"locate_pythons_for_key: %s \ +is a %dbit executable\n", + ip->executable, ip->bits); + ++num_installed_pythons; + pip = ip++; + if (num_installed_pythons >= + MAX_INSTALLED_PYTHONS) + break; + /* Copy over the attributes for the next */ + *ip = *pip; + } + } + } + } + } + } + } + RegCloseKey(core_root); + } +} + +static int +compare_pythons(const void * p1, const void * p2) +{ + INSTALLED_PYTHON * ip1 = (INSTALLED_PYTHON *) p1; + INSTALLED_PYTHON * ip2 = (INSTALLED_PYTHON *) p2; + /* note reverse sorting on version */ + int result = wcscmp(ip2->version, ip1->version); + + if (result == 0) + result = ip2->bits - ip1->bits; /* 64 before 32 */ + return result; +} + +static void +locate_all_pythons() +{ +#if defined(_M_X64) + // If we are a 64bit process, first hit the 32bit keys. + debug(L"locating Pythons in 32bit registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_32KEY); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_32KEY); +#else + // If we are a 32bit process on a 64bit Windows, first hit the 64bit keys. + BOOL f64 = FALSE; + if (IsWow64Process(GetCurrentProcess(), &f64) && f64) { + debug(L"locating Pythons in 64bit registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_64KEY); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_64KEY); + } +#endif + // now hit the "native" key for this process bittedness. + debug(L"locating Pythons in native registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ); + qsort(installed_pythons, num_installed_pythons, sizeof(INSTALLED_PYTHON), + compare_pythons); +} + +typedef struct { + wchar_t * path; + wchar_t * key; + wchar_t * value; +} REGISTRY_ENTRY; + +static REGISTRY_ENTRY registry_entries[] = { + { L".py", NULL, L"Python.File" }, + { L".pyc", NULL, L"Python.CompiledFile" }, + { L".pyo", NULL, L"Python.CompiledFile" }, + { L".pyw", NULL, L"Python.NoConFile" }, + + { L"Python.CompiledFile", NULL, L"Compiled Python File" }, + { L"Python.CompiledFile\\DefaultIcon", NULL, L"pyc.ico" }, + { L"Python.CompiledFile\\shell\\open", NULL, L"Open" }, + { L"Python.CompiledFile\\shell\\open\\command", NULL, L"python.exe" }, + + { L"Python.File", NULL, L"Python File" }, + { L"Python.File\\DefaultIcon", NULL, L"py.ico" }, + { L"Python.File\\shell\\open", NULL, L"Open" }, + { L"Python.File\\shell\\open\\command", NULL, L"python.exe" }, + + { L"Python.NoConFile", NULL, L"Python File (no console)" }, + { L"Python.NoConFile\\DefaultIcon", NULL, L"py.ico" }, + { L"Python.NoConFile\\shell\\open", NULL, L"Open" }, + { L"Python.NoConFile\\shell\\open\\command", NULL, L"pythonw.exe" }, + + { NULL } +}; + +static BOOL +do_association(INSTALLED_PYTHON * ip) +{ + LONG rc; + BOOL result = TRUE; + REGISTRY_ENTRY * rp = registry_entries; + wchar_t value[MAX_PATH]; + wchar_t root[MAX_PATH]; + wchar_t message[MSGSIZE]; + wchar_t * pvalue; + HKEY hKey; + DWORD len; + + wcsncpy_s(root, MAX_PATH, ip->executable, _TRUNCATE); + pvalue = wcsrchr(root, '\\'); + if (pvalue) + *pvalue = L'\0'; + + for (; rp->path; ++rp) { + if (wcsstr(rp->path, L"DefaultIcon")) { + pvalue = value; + _snwprintf_s(value, MAX_PATH, _TRUNCATE, + L"%s\\DLLs\\%s", root, rp->value); + } + else if (wcsstr(rp->path, L"open\\command")) { + pvalue = value; + _snwprintf_s(value, MAX_PATH, _TRUNCATE, + L"%s\\%s \"%%1\" %%*", root, rp->value); + } + else { + pvalue = rp->value; + } + /* use rp->path, rp->key, pvalue */ + /* NOTE: size is in bytes */ + len = (DWORD) ((1 + wcslen(pvalue)) * sizeof(wchar_t)); + rc = RegOpenKeyEx(HKEY_CLASSES_ROOT, rp->path, 0, KEY_SET_VALUE, &hKey); + if (rc == ERROR_SUCCESS) { + rc = RegSetValueExW(hKey, rp->key, 0, REG_SZ, (LPBYTE) pvalue, len); + RegCloseKey(hKey); + } + if (rc != ERROR_SUCCESS) { + winerror(rc, message, MSGSIZE); + MessageBoxW(NULL, message, L"Unable to set file associations", MB_OK | MB_ICONSTOP); + result = FALSE; + break; + } + } + return result; +} + +static BOOL +associations_exist() +{ + BOOL result = FALSE; + REGISTRY_ENTRY * rp = registry_entries; + wchar_t buffer[MSGSIZE]; + LONG csize = MSGSIZE * sizeof(wchar_t); + LONG rc; + + /* Currently, if any is found, we assume they're all there. */ + + for (; rp->path; ++rp) { + LONG size = csize; + rc = RegQueryValueW(HKEY_CLASSES_ROOT, rp->path, buffer, &size); + if (rc == ERROR_SUCCESS) { + result = TRUE; + break; + } + } + return result; +} + +/* --------------------------------------------------------------------*/ + +static BOOL CALLBACK +find_by_title(HWND hwnd, LPARAM lParam) +{ + wchar_t buffer[MSGSIZE]; + BOOL not_found = TRUE; + + wchar_t * p = (wchar_t *) GetWindowTextW(hwnd, buffer, MSGSIZE); + if (wcsstr(buffer, L"Python Launcher") == buffer) { + not_found = FALSE; + *((HWND *) lParam) = hwnd; + } + return not_found; +} + +static HWND +find_installer_window() +{ + HWND result = NULL; + BOOL found = EnumWindows(find_by_title, (LPARAM) &result); + + return result; +} + +static void +centre_window_in_front(HWND hwnd) +{ + HWND hwndParent; + RECT rect, rectP; + int width, height; + int screenwidth, screenheight; + int x, y; + + //make the window relative to its parent + + screenwidth = GetSystemMetrics(SM_CXSCREEN); + screenheight = GetSystemMetrics(SM_CYSCREEN); + + hwndParent = GetParent(hwnd); + + GetWindowRect(hwnd, &rect); + if (hwndParent) { + GetWindowRect(hwndParent, &rectP); + } + else { + rectP.left = rectP.top = 0; + rectP.right = screenwidth; + rectP.bottom = screenheight; + } + + width = rect.right - rect.left; + height = rect.bottom - rect.top; + + x = ((rectP.right-rectP.left) - width) / 2 + rectP.left; + y = ((rectP.bottom-rectP.top) - height) / 2 + rectP.top; + + + //make sure that the dialog box never moves outside of + //the screen + + if (x < 0) + x = 0; + + if (y < 0) + y = 0; + + if (x + width > screenwidth) + x = screenwidth - width; + if (y + height > screenheight) + y = screenheight - height; + + SetWindowPos(hwnd, HWND_TOPMOST, x, y, width, height, SWP_SHOWWINDOW); +} + +static void +init_list(HWND hList) +{ + LVCOLUMNW column; + LVITEMW item; + int colno = 0; + int width = 0; + int row; + size_t i; + INSTALLED_PYTHON * ip; + RECT r; + LPARAM style; + + GetClientRect(hList, &r); + + style = SendMessage(hList, LVM_GETEXTENDEDLISTVIEWSTYLE, 0, 0); + SendMessage(hList, LVM_SETEXTENDEDLISTVIEWSTYLE, + 0, style | LVS_EX_FULLROWSELECT); + + /* First set up the columns */ + memset(&column, 0, sizeof(column)); + column.mask = LVCF_TEXT | LVCF_WIDTH | LVCF_SUBITEM; + column.pszText = L"Version"; + column.cx = 60; + width += column.cx; + SendMessage(hList, LVM_INSERTCOLUMN, colno++,(LPARAM) &column); +#if defined(_M_X64) + column.pszText = L"Bits"; + column.cx = 40; + column.iSubItem = colno; + SendMessage(hList, LVM_INSERTCOLUMN, colno++,(LPARAM) &column); + width += column.cx; +#endif + column.pszText = L"Path"; + column.cx = r.right - r.top - width; + column.iSubItem = colno; + SendMessage(hList, LVM_INSERTCOLUMN, colno++,(LPARAM) &column); + + /* Then insert the rows */ + memset(&item, 0, sizeof(item)); + item.mask = LVIF_TEXT; + for (i = 0, ip = installed_pythons; i < num_installed_pythons; i++,ip++) { + item.iItem = (int) i; + item.iSubItem = 0; + item.pszText = ip->version; + colno = 0; + row = (int) SendMessage(hList, LVM_INSERTITEM, 0, (LPARAM) &item); +#if defined(_M_X64) + item.iSubItem = ++colno; + item.pszText = (ip->bits == 64) ? L"64": L"32"; + SendMessage(hList, LVM_SETITEM, row, (LPARAM) &item); +#endif + item.iSubItem = ++colno; + item.pszText = ip->executable; + SendMessage(hList, LVM_SETITEM, row, (LPARAM) &item); + } +} + +/* ----------------------------------------------------------------*/ + +typedef int (__stdcall *MSGBOXWAPI)(IN HWND hWnd, + IN LPCWSTR lpText, IN LPCWSTR lpCaption, + IN UINT uType, IN WORD wLanguageId, IN DWORD dwMilliseconds); + +int MessageBoxTimeoutW(IN HWND hWnd, IN LPCWSTR lpText, + IN LPCWSTR lpCaption, IN UINT uType, + IN WORD wLanguageId, IN DWORD dwMilliseconds); + +#define MB_TIMEDOUT 32000 + +int MessageBoxTimeoutW(HWND hWnd, LPCWSTR lpText, + LPCWSTR lpCaption, UINT uType, WORD wLanguageId, DWORD dwMilliseconds) +{ + static MSGBOXWAPI MsgBoxTOW = NULL; + + if (!MsgBoxTOW) { + HMODULE hUser32 = GetModuleHandleW(L"user32.dll"); + if (hUser32) + MsgBoxTOW = (MSGBOXWAPI)GetProcAddress(hUser32, + "MessageBoxTimeoutW"); + else { + //stuff happened, add code to handle it here + //(possibly just call MessageBox()) + return 0; + } + } + + if (MsgBoxTOW) + return MsgBoxTOW(hWnd, lpText, lpCaption, uType, wLanguageId, + dwMilliseconds); + + return 0; +} +/* ----------------------------------------------------------------*/ + +static INT_PTR CALLBACK +DialogProc(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam) +{ + HWND hList; + HWND hChild; + static int selected_index = -1; + WORD low = LOWORD(wParam); + wchar_t confirmation[MSGSIZE]; + BOOL result = FALSE; + + debug(L"DialogProc entry: 0x%02X\n", message); + switch (message) { + case WM_INITDIALOG: + hList = GetDlgItem(hDlg, IDC_LIST1); + init_list(hList); + SetFocus(hList); + result = TRUE; + break; + case WM_COMMAND: + if((low == IDOK) || (low == IDCANCEL)) { + HMODULE hUser32 = LoadLibraryW(L"user32.dll"); + + if (low == IDCANCEL) + wcsncpy_s(confirmation, MSGSIZE, L"No association was \ +performed.", _TRUNCATE); + else { + if (selected_index < 0) { + /* should never happen */ + wcsncpy_s(confirmation, MSGSIZE, L"The Python version to \ +associate with couldn't be determined.", _TRUNCATE); + } + else { + INSTALLED_PYTHON * ip = &installed_pythons[selected_index]; + + /* Do the association and set the message. */ + do_association(ip); + _snwprintf_s(confirmation, MSGSIZE, _TRUNCATE, + L"Associated Python files with the Python %s \ +found at '%s'", ip->version, ip->executable); + } + } + + if (hUser32) { + MessageBoxTimeoutW(hDlg, + confirmation, + L"Association Status", + MB_OK | MB_SETFOREGROUND | + MB_ICONINFORMATION, + 0, 2000); + FreeLibrary(hUser32); + } + PostQuitMessage(0); + EndDialog(hDlg, 0); + result = TRUE; + } + break; + case WM_NOTIFY: + if (low == IDC_LIST1) { + NMLISTVIEW * p = (NMLISTVIEW *) lParam; + + if ((p->hdr.code == LVN_ITEMCHANGED) && + (p->uNewState & LVIS_SELECTED)) { + hChild = GetDlgItem(hDlg, IDOK); + selected_index = p->iItem; + EnableWindow(hChild, selected_index >= 0); + } + result = TRUE; + } + break; + case WM_DESTROY: + PostQuitMessage(0); + result = TRUE; + break; + case WM_CLOSE: + DestroyWindow(hDlg); + result = TRUE; + break; + } + debug(L"DialogProc exit: %d\n", result); + return result; +} + +int WINAPI wWinMain(HINSTANCE hInstance, + HINSTANCE hPrevInstance, + LPWSTR lpCmdLine, int nShow) +{ + MSG msg; + HWND hDialog = 0; + HICON hIcon; + HWND hParent; + int status; + DWORD dw; + INITCOMMONCONTROLSEX icx; + wchar_t * wp; + + wp = get_env(L"PYASSOC_DEBUG"); + if ((wp != NULL) && (*wp != L'\0')) { + fopen_s(&log_fp, "c:\\temp\\associator.log", "w"); + } + + if (!lpCmdLine) { + debug(L"No command line specified.\n"); + return 0; + } + if (!wcsstr(lpCmdLine, L"nocheck") && + associations_exist()) /* Could have been restored by uninstall. */ + return 0; + + locate_all_pythons(); + + if (num_installed_pythons == 0) + return 0; + + debug(L"%d pythons found.\n", num_installed_pythons); + + /* + * OK, now there's something to do. + * + * We need to find the installer window to be the parent of + * our dialog, otherwise our dialog will be behind it. + * + * First, initialize common controls. If you don't - on + * some machines it works fine, on others the dialog never + * appears! + */ + + icx.dwSize = sizeof(icx); + icx.dwICC = ICC_LISTVIEW_CLASSES; + InitCommonControlsEx(&icx); + + hParent = find_installer_window(); + debug(L"installer window: %X\n", hParent); + hDialog = CreateDialogW(hInstance, MAKEINTRESOURCE(DLG_MAIN), hParent, + DialogProc); + dw = GetLastError(); + debug(L"dialog created: %X: error: %X\n", hDialog, dw); + + if (!hDialog) + { + wchar_t buf [100]; + _snwprintf_s(buf, 100, _TRUNCATE, L"Error 0x%x", GetLastError()); + MessageBoxW(0, buf, L"CreateDialog", MB_ICONEXCLAMATION | MB_OK); + return 1; + } + + centre_window_in_front(hDialog); + hIcon = LoadIcon( GetModuleHandle(NULL), MAKEINTRESOURCE(DLG_ICON)); + if( hIcon ) + { + SendMessage(hDialog, WM_SETICON, ICON_BIG, (LPARAM) hIcon); + SendMessage(hDialog, WM_SETICON, ICON_SMALL, (LPARAM) hIcon); + DestroyIcon(hIcon); + } + + while ((status = GetMessage (& msg, 0, 0, 0)) != 0) + { + if (status == -1) + return -1; + if (!IsDialogMessage(hDialog, & msg)) + { + TranslateMessage( & msg ); + DispatchMessage( & msg ); + } + } + + return (int) msg.wParam; +} diff --git a/PC/associator.h b/PC/associator.h new file mode 100644 --- /dev/null +++ b/PC/associator.h @@ -0,0 +1,1480 @@ +//{{NO_DEPENDENCIES}} +// Microsoft Visual C++ generated include file. +// Used by main.rc +// +#define SW_HIDE 0 +#define HIDE_WINDOW 0 +#define WM_NULL 0x0000 +#define WA_INACTIVE 0 +#define HTNOWHERE 0 +#define SMTO_NORMAL 0x0000 +#define ICON_SMALL 0 +#define SIZE_RESTORED 0 +#define BN_CLICKED 0 +#define BST_UNCHECKED 0x0000 +#define HDS_HORZ 0x0000 +#define TBSTYLE_BUTTON 0x0000 +#define TBS_HORZ 0x0000 +#define TBS_BOTTOM 0x0000 +#define TBS_RIGHT 0x0000 +#define LVS_ICON 0x0000 +#define LVS_ALIGNTOP 0x0000 +#define TCS_TABS 0x0000 +#define TCS_SINGLELINE 0x0000 +#define TCS_RIGHTJUSTIFY 0x0000 +#define DTS_SHORTDATEFORMAT 0x0000 +#define PGS_VERT 0x00000000 +#define LANG_NEUTRAL 0x00 +#define SUBLANG_NEUTRAL 0x00 +#define SORT_DEFAULT 0x0 +#define SORT_JAPANESE_XJIS 0x0 +#define SORT_CHINESE_BIG5 0x0 +#define SORT_CHINESE_PRCP 0x0 +#define SORT_KOREAN_KSC 0x0 +#define SORT_HUNGARIAN_DEFAULT 0x0 +#define SORT_GEORGIAN_TRADITIONAL 0x0 +#define _USE_DECLSPECS_FOR_SAL 0 +#define SW_SHOWNORMAL 1 +#define SW_NORMAL 1 +#define SHOW_OPENWINDOW 1 +#define SW_PARENTCLOSING 1 +#define VK_LBUTTON 0x01 +#define WM_CREATE 0x0001 +#define WA_ACTIVE 1 +#define PWR_OK 1 +#define PWR_SUSPENDREQUEST 1 +#define NFR_ANSI 1 +#define UIS_SET 1 +#define UISF_HIDEFOCUS 0x1 +#define XBUTTON1 0x0001 +#define WMSZ_LEFT 1 +#define HTCLIENT 1 +#define SMTO_BLOCK 0x0001 +#define MA_ACTIVATE 1 +#define ICON_BIG 1 +#define SIZE_MINIMIZED 1 +#define MK_LBUTTON 0x0001 +#define TME_HOVER 0x00000001 +#define CS_VREDRAW 0x0001 +#define CF_TEXT 1 +#define SCF_ISSECURE 0x00000001 +#define IDOK 1 +#define BN_PAINT 1 +#define BST_CHECKED 0x0001 +#define TBSTYLE_SEP 0x0001 +#define TTS_ALWAYSTIP 0x01 +#define TBS_AUTOTICKS 0x0001 +#define UDS_WRAP 0x0001 +#define PBS_SMOOTH 0x01 +#define LWS_TRANSPARENT 0x0001 +#define LVS_REPORT 0x0001 +#define TVS_HASBUTTONS 0x0001 +#define TCS_SCROLLOPPOSITE 0x0001 +#define ACS_CENTER 0x0001 +#define MCS_DAYSTATE 0x0001 +#define DTS_UPDOWN 0x0001 +#define PGS_HORZ 0x00000001 +#define NFS_EDIT 0x0001 +#define BCSIF_GLYPH 0x0001 +#define BCSS_NOSPLIT 0x0001 +#define LANG_ARABIC 0x01 +#define SUBLANG_DEFAULT 0x01 +#define SUBLANG_AFRIKAANS_SOUTH_AFRICA 0x01 +#define SUBLANG_ALBANIAN_ALBANIA 0x01 +#define SUBLANG_ALSATIAN_FRANCE 0x01 +#define SUBLANG_AMHARIC_ETHIOPIA 0x01 +#define SUBLANG_ARABIC_SAUDI_ARABIA 0x01 +#define SUBLANG_ARMENIAN_ARMENIA 0x01 +#define SUBLANG_ASSAMESE_INDIA 0x01 +#define SUBLANG_AZERI_LATIN 0x01 +#define SUBLANG_BASHKIR_RUSSIA 0x01 +#define SUBLANG_BASQUE_BASQUE 0x01 +#define SUBLANG_BELARUSIAN_BELARUS 0x01 +#define SUBLANG_BENGALI_INDIA 0x01 +#define SUBLANG_BRETON_FRANCE 0x01 +#define SUBLANG_BULGARIAN_BULGARIA 0x01 +#define SUBLANG_CATALAN_CATALAN 0x01 +#define SUBLANG_CHINESE_TRADITIONAL 0x01 +#define SUBLANG_CORSICAN_FRANCE 0x01 +#define SUBLANG_CZECH_CZECH_REPUBLIC 0x01 +#define SUBLANG_CROATIAN_CROATIA 0x01 +#define SUBLANG_DANISH_DENMARK 0x01 +#define SUBLANG_DARI_AFGHANISTAN 0x01 +#define SUBLANG_DIVEHI_MALDIVES 0x01 +#define SUBLANG_DUTCH 0x01 +#define SUBLANG_ENGLISH_US 0x01 +#define SUBLANG_ESTONIAN_ESTONIA 0x01 +#define SUBLANG_FAEROESE_FAROE_ISLANDS 0x01 +#define SUBLANG_FILIPINO_PHILIPPINES 0x01 +#define SUBLANG_FINNISH_FINLAND 0x01 +#define SUBLANG_FRENCH 0x01 +#define SUBLANG_FRISIAN_NETHERLANDS 0x01 +#define SUBLANG_GALICIAN_GALICIAN 0x01 +#define SUBLANG_GEORGIAN_GEORGIA 0x01 +#define SUBLANG_GERMAN 0x01 +#define SUBLANG_GREEK_GREECE 0x01 +#define SUBLANG_GREENLANDIC_GREENLAND 0x01 +#define SUBLANG_GUJARATI_INDIA 0x01 +#define SUBLANG_HAUSA_NIGERIA_LATIN 0x01 +#define SUBLANG_HEBREW_ISRAEL 0x01 +#define SUBLANG_HINDI_INDIA 0x01 +#define SUBLANG_HUNGARIAN_HUNGARY 0x01 +#define SUBLANG_ICELANDIC_ICELAND 0x01 +#define SUBLANG_IGBO_NIGERIA 0x01 +#define SUBLANG_INDONESIAN_INDONESIA 0x01 +#define SUBLANG_INUKTITUT_CANADA 0x01 +#define SUBLANG_ITALIAN 0x01 +#define SUBLANG_JAPANESE_JAPAN 0x01 +#define SUBLANG_KANNADA_INDIA 0x01 +#define SUBLANG_KAZAK_KAZAKHSTAN 0x01 +#define SUBLANG_KHMER_CAMBODIA 0x01 +#define SUBLANG_KICHE_GUATEMALA 0x01 +#define SUBLANG_KINYARWANDA_RWANDA 0x01 +#define SUBLANG_KONKANI_INDIA 0x01 +#define SUBLANG_KOREAN 0x01 +#define SUBLANG_KYRGYZ_KYRGYZSTAN 0x01 +#define SUBLANG_LAO_LAO 0x01 +#define SUBLANG_LATVIAN_LATVIA 0x01 +#define SUBLANG_LITHUANIAN 0x01 +#define SUBLANG_LUXEMBOURGISH_LUXEMBOURG 0x01 +#define SUBLANG_MACEDONIAN_MACEDONIA 0x01 +#define SUBLANG_MALAY_MALAYSIA 0x01 +#define SUBLANG_MALAYALAM_INDIA 0x01 +#define SUBLANG_MALTESE_MALTA 0x01 +#define SUBLANG_MAORI_NEW_ZEALAND 0x01 +#define SUBLANG_MAPUDUNGUN_CHILE 0x01 +#define SUBLANG_MARATHI_INDIA 0x01 +#define SUBLANG_MOHAWK_MOHAWK 0x01 +#define SUBLANG_MONGOLIAN_CYRILLIC_MONGOLIA 0x01 +#define SUBLANG_NEPALI_NEPAL 0x01 +#define SUBLANG_NORWEGIAN_BOKMAL 0x01 +#define SUBLANG_OCCITAN_FRANCE 0x01 +#define SUBLANG_ORIYA_INDIA 0x01 +#define SUBLANG_PASHTO_AFGHANISTAN 0x01 +#define SUBLANG_PERSIAN_IRAN 0x01 +#define SUBLANG_POLISH_POLAND 0x01 +#define SUBLANG_PORTUGUESE_BRAZILIAN 0x01 +#define SUBLANG_PUNJABI_INDIA 0x01 +#define SUBLANG_QUECHUA_BOLIVIA 0x01 +#define SUBLANG_ROMANIAN_ROMANIA 0x01 +#define SUBLANG_ROMANSH_SWITZERLAND 0x01 +#define SUBLANG_RUSSIAN_RUSSIA 0x01 +#define SUBLANG_SAMI_NORTHERN_NORWAY 0x01 +#define SUBLANG_SANSKRIT_INDIA 0x01 +#define SUBLANG_SERBIAN_CROATIA 0x01 +#define SUBLANG_SINDHI_INDIA 0x01 +#define SUBLANG_SINHALESE_SRI_LANKA 0x01 +#define SUBLANG_SOTHO_NORTHERN_SOUTH_AFRICA 0x01 +#define SUBLANG_SLOVAK_SLOVAKIA 0x01 +#define SUBLANG_SLOVENIAN_SLOVENIA 0x01 +#define SUBLANG_SPANISH 0x01 +#define SUBLANG_SWAHILI_KENYA 0x01 +#define SUBLANG_SWEDISH 0x01 +#define SUBLANG_SYRIAC_SYRIA 0x01 +#define SUBLANG_TAJIK_TAJIKISTAN 0x01 +#define SUBLANG_TAMIL_INDIA 0x01 +#define SUBLANG_TATAR_RUSSIA 0x01 +#define SUBLANG_TELUGU_INDIA 0x01 +#define SUBLANG_THAI_THAILAND 0x01 +#define SUBLANG_TIBETAN_PRC 0x01 +#define SUBLANG_TSWANA_SOUTH_AFRICA 0x01 +#define SUBLANG_TURKISH_TURKEY 0x01 +#define SUBLANG_TURKMEN_TURKMENISTAN 0x01 +#define SUBLANG_UIGHUR_PRC 0x01 +#define SUBLANG_UKRAINIAN_UKRAINE 0x01 +#define SUBLANG_UPPER_SORBIAN_GERMANY 0x01 +#define SUBLANG_URDU_PAKISTAN 0x01 +#define SUBLANG_UZBEK_LATIN 0x01 +#define SUBLANG_VIETNAMESE_VIETNAM 0x01 +#define SUBLANG_WELSH_UNITED_KINGDOM 0x01 +#define SUBLANG_WOLOF_SENEGAL 0x01 +#define SUBLANG_XHOSA_SOUTH_AFRICA 0x01 +#define SUBLANG_YAKUT_RUSSIA 0x01 +#define SUBLANG_YI_PRC 0x01 +#define SUBLANG_YORUBA_NIGERIA 0x01 +#define SUBLANG_ZULU_SOUTH_AFRICA 0x01 +#define SORT_INVARIANT_MATH 0x1 +#define SORT_JAPANESE_UNICODE 0x1 +#define SORT_CHINESE_UNICODE 0x1 +#define SORT_KOREAN_UNICODE 0x1 +#define SORT_GERMAN_PHONE_BOOK 0x1 +#define SORT_HUNGARIAN_TECHNICAL 0x1 +#define SORT_GEORGIAN_MODERN 0x1 +#define VS_VERSION_INFO 1 +#define VFFF_ISSHAREDFILE 0x0001 +#define VFF_CURNEDEST 0x0001 +#define VIFF_FORCEINSTALL 0x0001 +#define SW_SHOWMINIMIZED 2 +#define SHOW_ICONWINDOW 2 +#define SW_OTHERZOOM 2 +#define VK_RBUTTON 0x02 +#define WM_DESTROY 0x0002 +#define WA_CLICKACTIVE 2 +#define PWR_SUSPENDRESUME 2 +#define NFR_UNICODE 2 +#define UIS_CLEAR 2 +#define UISF_HIDEACCEL 0x2 +#define XBUTTON2 0x0002 +#define WMSZ_RIGHT 2 +#define HTCAPTION 2 +#define SMTO_ABORTIFHUNG 0x0002 +#define MA_ACTIVATEANDEAT 2 +#define ICON_SMALL2 2 +#define SIZE_MAXIMIZED 2 +#define MK_RBUTTON 0x0002 +#define TME_LEAVE 0x00000002 +#define CS_HREDRAW 0x0002 +#define CF_BITMAP 2 +#define IDCANCEL 2 +#define BN_HILITE 2 +#define BST_INDETERMINATE 0x0002 +#define HDS_BUTTONS 0x0002 +#define TBSTYLE_CHECK 0x0002 +#define TTS_NOPREFIX 0x02 +#define TBS_VERT 0x0002 +#define UDS_SETBUDDYINT 0x0002 +#define LWS_IGNORERETURN 0x0002 +#define LVS_SMALLICON 0x0002 +#define TVS_HASLINES 0x0002 +#define TVS_EX_MULTISELECT 0x0002 +#define TCS_BOTTOM 0x0002 +#define TCS_RIGHT 0x0002 +#define ACS_TRANSPARENT 0x0002 +#define MCS_MULTISELECT 0x0002 +#define DTS_SHOWNONE 0x0002 +#define PGS_AUTOSCROLL 0x00000002 +#define NFS_STATIC 0x0002 +#define BCSIF_IMAGE 0x0002 +#define BCSS_STRETCH 0x0002 +#define LANG_BULGARIAN 0x02 +#define SUBLANG_SYS_DEFAULT 0x02 +#define SUBLANG_ARABIC_IRAQ 0x02 +#define SUBLANG_AZERI_CYRILLIC 0x02 +#define SUBLANG_BENGALI_BANGLADESH 0x02 +#define SUBLANG_CHINESE_SIMPLIFIED 0x02 +#define SUBLANG_DUTCH_BELGIAN 0x02 +#define SUBLANG_ENGLISH_UK 0x02 +#define SUBLANG_FRENCH_BELGIAN 0x02 +#define SUBLANG_GERMAN_SWISS 0x02 +#define SUBLANG_INUKTITUT_CANADA_LATIN 0x02 +#define SUBLANG_IRISH_IRELAND 0x02 +#define SUBLANG_ITALIAN_SWISS 0x02 +#define SUBLANG_KASHMIRI_SASIA 0x02 +#define SUBLANG_KASHMIRI_INDIA 0x02 +#define SUBLANG_LOWER_SORBIAN_GERMANY 0x02 +#define SUBLANG_MALAY_BRUNEI_DARUSSALAM 0x02 +#define SUBLANG_MONGOLIAN_PRC 0x02 +#define SUBLANG_NEPALI_INDIA 0x02 +#define SUBLANG_NORWEGIAN_NYNORSK 0x02 +#define SUBLANG_PORTUGUESE 0x02 +#define SUBLANG_QUECHUA_ECUADOR 0x02 +#define SUBLANG_SAMI_NORTHERN_SWEDEN 0x02 +#define SUBLANG_SERBIAN_LATIN 0x02 +#define SUBLANG_SINDHI_PAKISTAN 0x02 +#define SUBLANG_SINDHI_AFGHANISTAN 0x02 +#define SUBLANG_SPANISH_MEXICAN 0x02 +#define SUBLANG_SWEDISH_FINLAND 0x02 +#define SUBLANG_TAMAZIGHT_ALGERIA_LATIN 0x02 +#define SUBLANG_TIGRIGNA_ERITREA 0x02 +#define SUBLANG_URDU_INDIA 0x02 +#define SUBLANG_UZBEK_CYRILLIC 0x02 +#define SORT_CHINESE_PRC 0x2 +#define VFF_FILEINUSE 0x0002 +#define VIFF_DONTDELETEOLD 0x0002 +#define SW_SHOWMAXIMIZED 3 +#define SW_MAXIMIZE 3 +#define SHOW_FULLSCREEN 3 +#define SW_PARENTOPENING 3 +#define VK_CANCEL 0x03 +#define WM_MOVE 0x0003 +#define PWR_CRITICALRESUME 3 +#define NF_QUERY 3 +#define UIS_INITIALIZE 3 +#define WMSZ_TOP 3 +#define HTSYSMENU 3 +#define MA_NOACTIVATE 3 +#define SIZE_MAXSHOW 3 +#define CF_METAFILEPICT 3 +#define IDABORT 3 +#define BN_UNHILITE 3 +#define LVS_LIST 0x0003 +#define LVS_TYPEMASK 0x0003 +#define LANG_CATALAN 0x03 +#define SUBLANG_CUSTOM_DEFAULT 0x03 +#define SUBLANG_ARABIC_EGYPT 0x03 +#define SUBLANG_CHINESE_HONGKONG 0x03 +#define SUBLANG_ENGLISH_AUS 0x03 +#define SUBLANG_FRENCH_CANADIAN 0x03 +#define SUBLANG_GERMAN_AUSTRIAN 0x03 +#define SUBLANG_QUECHUA_PERU 0x03 +#define SUBLANG_SAMI_NORTHERN_FINLAND 0x03 +#define SUBLANG_SERBIAN_CYRILLIC 0x03 +#define SUBLANG_SPANISH_MODERN 0x03 +#define SORT_CHINESE_BOPOMOFO 0x3 +#define SW_SHOWNOACTIVATE 4 +#define SHOW_OPENNOACTIVATE 4 +#define SW_OTHERUNZOOM 4 +#define VK_MBUTTON 0x04 +#define NF_REQUERY 4 +#define UISF_ACTIVE 0x4 +#define WMSZ_TOPLEFT 4 +#define HTGROWBOX 4 +#define MA_NOACTIVATEANDEAT 4 +#define SIZE_MAXHIDE 4 +#define MK_SHIFT 0x0004 +#define CF_SYLK 4 +#define IDRETRY 4 +#define BN_DISABLE 4 +#define BST_PUSHED 0x0004 +#define HDS_HOTTRACK 0x0004 +#define TBSTYLE_GROUP 0x0004 +#define TBS_TOP 0x0004 +#define TBS_LEFT 0x0004 +#define UDS_ALIGNRIGHT 0x0004 +#define PBS_VERTICAL 0x04 +#define LWS_NOPREFIX 0x0004 +#define LVS_SINGLESEL 0x0004 +#define TVS_LINESATROOT 0x0004 +#define TVS_EX_DOUBLEBUFFER 0x0004 +#define TCS_MULTISELECT 0x0004 +#define ACS_AUTOPLAY 0x0004 +#define MCS_WEEKNUMBERS 0x0004 +#define DTS_LONGDATEFORMAT 0x0004 +#define PGS_DRAGNDROP 0x00000004 +#define NFS_LISTCOMBO 0x0004 +#define BCSIF_STYLE 0x0004 +#define BCSS_ALIGNLEFT 0x0004 +#define LANG_CHINESE 0x04 +#define LANG_CHINESE_SIMPLIFIED 0x04 +#define SUBLANG_CUSTOM_UNSPECIFIED 0x04 +#define SUBLANG_ARABIC_LIBYA 0x04 +#define SUBLANG_CHINESE_SINGAPORE 0x04 +#define SUBLANG_CROATIAN_BOSNIA_HERZEGOVINA_LATIN 0x04 +#define SUBLANG_ENGLISH_CAN 0x04 +#define SUBLANG_FRENCH_SWISS 0x04 +#define SUBLANG_GERMAN_LUXEMBOURG 0x04 +#define SUBLANG_SAMI_LULE_NORWAY 0x04 +#define SUBLANG_SPANISH_GUATEMALA 0x04 +#define SORT_JAPANESE_RADICALSTROKE 0x4 +#define VFF_BUFFTOOSMALL 0x0004 +#define SW_SHOW 5 +#define VK_XBUTTON1 0x05 +#define WM_SIZE 0x0005 +#define WMSZ_TOPRIGHT 5 +#define HTMENU 5 +#define CF_DIF 5 +#define IDIGNORE 5 +#define BN_DOUBLECLICKED 5 +#define LANG_CZECH 0x05 +#define SUBLANG_UI_CUSTOM_DEFAULT 0x05 +#define SUBLANG_ARABIC_ALGERIA 0x05 +#define SUBLANG_BOSNIAN_BOSNIA_HERZEGOVINA_LATIN 0x05 +#define SUBLANG_CHINESE_MACAU 0x05 +#define SUBLANG_ENGLISH_NZ 0x05 +#define SUBLANG_FRENCH_LUXEMBOURG 0x05 +#define SUBLANG_GERMAN_LIECHTENSTEIN 0x05 +#define SUBLANG_SAMI_LULE_SWEDEN 0x05 +#define SUBLANG_SPANISH_COSTA_RICA 0x05 +#define SW_MINIMIZE 6 +#define VK_XBUTTON2 0x06 +#define WM_ACTIVATE 0x0006 +#define WMSZ_BOTTOM 6 +#define HTHSCROLL 6 +#define CF_TIFF 6 +#define IDYES 6 +#define BN_SETFOCUS 6 +#define LANG_DANISH 0x06 +#define SUBLANG_ARABIC_MOROCCO 0x06 +#define SUBLANG_ENGLISH_EIRE 0x06 +#define SUBLANG_FRENCH_MONACO 0x06 +#define SUBLANG_SAMI_SOUTHERN_NORWAY 0x06 +#define SUBLANG_SERBIAN_BOSNIA_HERZEGOVINA_LATIN 0x06 +#define SUBLANG_SPANISH_PANAMA 0x06 +#define SW_SHOWMINNOACTIVE 7 +#define WM_SETFOCUS 0x0007 +#define WMSZ_BOTTOMLEFT 7 +#define HTVSCROLL 7 +#define CF_OEMTEXT 7 +#define IDNO 7 +#define BN_KILLFOCUS 7 +#define LANG_GERMAN 0x07 +#define SUBLANG_ARABIC_TUNISIA 0x07 +#define SUBLANG_ENGLISH_SOUTH_AFRICA 0x07 +#define SUBLANG_SAMI_SOUTHERN_SWEDEN 0x07 +#define SUBLANG_SERBIAN_BOSNIA_HERZEGOVINA_CYRILLIC 0x07 +#define SUBLANG_SPANISH_DOMINICAN_REPUBLIC 0x07 +#define SW_SHOWNA 8 +#define VK_BACK 0x08 +#define WM_KILLFOCUS 0x0008 +#define WMSZ_BOTTOMRIGHT 8 +#define HTMINBUTTON 8 +#define SMTO_NOTIMEOUTIFNOTHUNG 0x0008 +#define MK_CONTROL 0x0008 +#define CS_DBLCLKS 0x0008 +#define CF_DIB 8 +#define IDCLOSE 8 +#define BST_FOCUS 0x0008 +#define HDS_HIDDEN 0x0008 +#define TBSTYLE_DROPDOWN 0x0008 +#define TBS_BOTH 0x0008 +#define UDS_ALIGNLEFT 0x0008 +#define PBS_MARQUEE 0x08 +#define LWS_USEVISUALSTYLE 0x0008 +#define LVS_SHOWSELALWAYS 0x0008 +#define TVS_EDITLABELS 0x0008 +#define TVS_EX_NOINDENTSTATE 0x0008 +#define TCS_FLATBUTTONS 0x0008 +#define ACS_TIMER 0x0008 +#define MCS_NOTODAYCIRCLE 0x0008 +#define NFS_BUTTON 0x0008 +#define BCSIF_SIZE 0x0008 +#define BCSS_IMAGE 0x0008 +#define LANG_GREEK 0x08 +#define SUBLANG_ARABIC_OMAN 0x08 +#define SUBLANG_BOSNIAN_BOSNIA_HERZEGOVINA_CYRILLIC 0x08 +#define SUBLANG_ENGLISH_JAMAICA 0x08 +#define SUBLANG_SAMI_SKOLT_FINLAND 0x08 +#define SUBLANG_SPANISH_VENEZUELA 0x08 +#define SW_RESTORE 9 +#define VK_TAB 0x09 +#define HTMAXBUTTON 9 +#define CF_PALETTE 9 +#define IDHELP 9 +#define DTS_TIMEFORMAT 0x0009 +#define LANG_ENGLISH 0x09 +#define SUBLANG_ARABIC_YEMEN 0x09 +#define SUBLANG_ENGLISH_CARIBBEAN 0x09 +#define SUBLANG_SAMI_INARI_FINLAND 0x09 +#define SUBLANG_SPANISH_COLOMBIA 0x09 +#define SW_SHOWDEFAULT 10 +#define WM_ENABLE 0x000A +#define HTLEFT 10 +#define CF_PENDATA 10 +#define IDTRYAGAIN 10 +#define HELP_CONTEXTMENU 0x000a +#define LANG_SPANISH 0x0a +#define SUBLANG_ARABIC_SYRIA 0x0a +#define SUBLANG_ENGLISH_BELIZE 0x0a +#define SUBLANG_SPANISH_PERU 0x0a +#define SW_FORCEMINIMIZE 11 +#define SW_MAX 11 +#define WM_SETREDRAW 0x000B +#define HTRIGHT 11 +#define CF_RIFF 11 +#define IDCONTINUE 11 +#define HELP_FINDER 0x000b +#define LANG_FINNISH 0x0b +#define SUBLANG_ARABIC_JORDAN 0x0b +#define SUBLANG_ENGLISH_TRINIDAD 0x0b +#define SUBLANG_SPANISH_ARGENTINA 0x0b +#define VK_CLEAR 0x0C +#define WM_SETTEXT 0x000C +#define HTTOP 12 +#define CF_WAVE 12 +#define HELP_WM_HELP 0x000c +#define DTS_SHORTDATECENTURYFORMAT 0x000C +#define LANG_FRENCH 0x0c +#define SUBLANG_ARABIC_LEBANON 0x0c +#define SUBLANG_ENGLISH_ZIMBABWE 0x0c +#define SUBLANG_SPANISH_ECUADOR 0x0c +#define VK_RETURN 0x0D +#define WM_GETTEXT 0x000D +#define HTTOPLEFT 13 +#define CF_UNICODETEXT 13 +#define HELP_SETPOPUP_POS 0x000d +#define LANG_HEBREW 0x0d +#define SUBLANG_ARABIC_KUWAIT 0x0d +#define SUBLANG_ENGLISH_PHILIPPINES 0x0d +#define SUBLANG_SPANISH_CHILE 0x0d +#define WM_GETTEXTLENGTH 0x000E +#define HTTOPRIGHT 14 +#define CF_ENHMETAFILE 14 +#define LANG_HUNGARIAN 0x0e +#define SUBLANG_ARABIC_UAE 0x0e +#define SUBLANG_SPANISH_URUGUAY 0x0e +#define WM_PAINT 0x000F +#define HTBOTTOM 15 +#define CF_HDROP 15 +#define LANG_ICELANDIC 0x0f +#define SUBLANG_ARABIC_BAHRAIN 0x0f +#define SUBLANG_SPANISH_PARAGUAY 0x0f +#define VK_SHIFT 0x10 +#define WM_CLOSE 0x0010 +#define HTBOTTOMLEFT 16 +#define WVR_ALIGNTOP 0x0010 +#define MK_MBUTTON 0x0010 +#define TME_NONCLIENT 0x00000010 +#define CF_LOCALE 16 +#define HELP_TCARD_DATA 0x0010 +#define TBSTYLE_AUTOSIZE 0x0010 +#define TTS_NOANIMATE 0x10 +#define TBS_NOTICKS 0x0010 +#define UDS_AUTOBUDDY 0x0010 +#define PBS_SMOOTHREVERSE 0x10 +#define LWS_USECUSTOMTEXT 0x0010 +#define LVS_SORTASCENDING 0x0010 +#define TVS_DISABLEDRAGDROP 0x0010 +#define TVS_EX_RICHTOOLTIP 0x0010 +#define TCS_FORCEICONLEFT 0x0010 +#define MCS_NOTODAY 0x0010 +#define DTS_APPCANPARSE 0x0010 +#define NFS_ALL 0x0010 +#define LANG_ITALIAN 0x10 +#define SUBLANG_ARABIC_QATAR 0x10 +#define SUBLANG_ENGLISH_INDIA 0x10 +#define SUBLANG_SPANISH_BOLIVIA 0x10 +#define VK_CONTROL 0x11 +#define WM_QUERYENDSESSION 0x0011 +#define HTBOTTOMRIGHT 17 +#define CF_DIBV5 17 +#define HELP_TCARD_OTHER_CALLER 0x0011 +#define LANG_JAPANESE 0x11 +#define SUBLANG_ENGLISH_MALAYSIA 0x11 +#define SUBLANG_SPANISH_EL_SALVADOR 0x11 +#define VK_MENU 0x12 +#define WM_QUIT 0x0012 +#define HTBORDER 18 +#define CF_MAX 18 +#define LANG_KOREAN 0x12 +#define SUBLANG_ENGLISH_SINGAPORE 0x12 +#define SUBLANG_SPANISH_HONDURAS 0x12 +#define VK_PAUSE 0x13 +#define WM_QUERYOPEN 0x0013 +#define HTOBJECT 19 +#define LANG_DUTCH 0x13 +#define SUBLANG_SPANISH_NICARAGUA 0x13 +#define VK_CAPITAL 0x14 +#define WM_ERASEBKGND 0x0014 +#define HTCLOSE 20 +#define LANG_NORWEGIAN 0x14 +#define SUBLANG_SPANISH_PUERTO_RICO 0x14 +#define VK_KANA 0x15 +#define VK_HANGEUL 0x15 +#define VK_HANGUL 0x15 +#define WM_SYSCOLORCHANGE 0x0015 +#define HTHELP 21 +#define LANG_POLISH 0x15 +#define SUBLANG_SPANISH_US 0x15 +#define WM_ENDSESSION 0x0016 +#define LANG_PORTUGUESE 0x16 +#define VK_JUNJA 0x17 +#define LANG_ROMANSH 0x17 +#define VK_FINAL 0x18 +#define WM_SHOWWINDOW 0x0018 +#define LANG_ROMANIAN 0x18 +#define VK_HANJA 0x19 +#define VK_KANJI 0x19 +#define LANG_RUSSIAN 0x19 +#define WM_WININICHANGE 0x001A +#define LANG_BOSNIAN 0x1a +#define LANG_CROATIAN 0x1a +#define LANG_SERBIAN 0x1a +#define VK_ESCAPE 0x1B +#define WM_DEVMODECHANGE 0x001B +#define LANG_SLOVAK 0x1b +#define VK_CONVERT 0x1C +#define WM_ACTIVATEAPP 0x001C +#define LANG_ALBANIAN 0x1c +#define VK_NONCONVERT 0x1D +#define WM_FONTCHANGE 0x001D +#define LANG_SWEDISH 0x1d +#define VK_ACCEPT 0x1E +#define WM_TIMECHANGE 0x001E +#define LANG_THAI 0x1e +#define VK_MODECHANGE 0x1F +#define WM_CANCELMODE 0x001F +#define LANG_TURKISH 0x1f +#define VK_SPACE 0x20 +#define WM_SETCURSOR 0x0020 +#define SMTO_ERRORONEXIT 0x0020 +#define WVR_ALIGNLEFT 0x0020 +#define MK_XBUTTON1 0x0020 +#define CS_OWNDC 0x0020 +#define TBSTYLE_NOPREFIX 0x0020 +#define TTS_NOFADE 0x20 +#define TBS_ENABLESELRANGE 0x0020 +#define UDS_ARROWKEYS 0x0020 +#define LWS_RIGHT 0x0020 +#define LVS_SORTDESCENDING 0x0020 +#define TVS_SHOWSELALWAYS 0x0020 +#define TVS_EX_AUTOHSCROLL 0x0020 +#define TCS_FORCELABELLEFT 0x0020 +#define DTS_RIGHTALIGN 0x0020 +#define NFS_USEFONTASSOC 0x0020 +#define LANG_URDU 0x20 +#define VK_PRIOR 0x21 +#define WM_MOUSEACTIVATE 0x0021 +#define LANG_INDONESIAN 0x21 +#define VK_NEXT 0x22 +#define WM_CHILDACTIVATE 0x0022 +#define LANG_UKRAINIAN 0x22 +#define VK_END 0x23 +#define WM_QUEUESYNC 0x0023 +#define LANG_BELARUSIAN 0x23 +#define VK_HOME 0x24 +#define WM_GETMINMAXINFO 0x0024 +#define LANG_SLOVENIAN 0x24 +#define VK_LEFT 0x25 +#define LANG_ESTONIAN 0x25 +#define VK_UP 0x26 +#define WM_PAINTICON 0x0026 +#define LANG_LATVIAN 0x26 +#define VK_RIGHT 0x27 +#define WM_ICONERASEBKGND 0x0027 +#define LANG_LITHUANIAN 0x27 +#define VK_DOWN 0x28 +#define WM_NEXTDLGCTL 0x0028 +#define LANG_TAJIK 0x28 +#define VK_SELECT 0x29 +#define LANG_FARSI 0x29 +#define LANG_PERSIAN 0x29 +#define VK_PRINT 0x2A +#define WM_SPOOLERSTATUS 0x002A +#define LANG_VIETNAMESE 0x2a +#define VK_EXECUTE 0x2B +#define WM_DRAWITEM 0x002B +#define LANG_ARMENIAN 0x2b +#define VK_SNAPSHOT 0x2C +#define WM_MEASUREITEM 0x002C +#define LANG_AZERI 0x2c +#define VK_INSERT 0x2D +#define WM_DELETEITEM 0x002D +#define LANG_BASQUE 0x2d +#define VK_DELETE 0x2E +#define WM_VKEYTOITEM 0x002E +#define LANG_LOWER_SORBIAN 0x2e +#define LANG_UPPER_SORBIAN 0x2e +#define VK_HELP 0x2F +#define WM_CHARTOITEM 0x002F +#define LANG_MACEDONIAN 0x2f +#define WM_SETFONT 0x0030 +#define WM_GETFONT 0x0031 +#define WM_SETHOTKEY 0x0032 +#define LANG_TSWANA 0x32 +#define WM_GETHOTKEY 0x0033 +#define LANG_XHOSA 0x34 +#define LANG_ZULU 0x35 +#define LANG_AFRIKAANS 0x36 +#define WM_QUERYDRAGICON 0x0037 +#define LANG_GEORGIAN 0x37 +#define LANG_FAEROESE 0x38 +#define WM_COMPAREITEM 0x0039 +#define LANG_HINDI 0x39 +#define LANG_MALTESE 0x3a +#define LANG_SAMI 0x3b +#define LANG_IRISH 0x3c +#define WM_GETOBJECT 0x003D +#define LANG_MALAY 0x3e +#define LANG_KAZAK 0x3f +#define WVR_ALIGNBOTTOM 0x0040 +#define MK_XBUTTON2 0x0040 +#define CS_CLASSDC 0x0040 +#define HDS_DRAGDROP 0x0040 +#define BTNS_SHOWTEXT 0x0040 +#define TTS_BALLOON 0x40 +#define TBS_FIXEDLENGTH 0x0040 +#define UDS_HORZ 0x0040 +#define LVS_SHAREIMAGELISTS 0x0040 +#define TVS_RTLREADING 0x0040 +#define TVS_EX_FADEINOUTEXPANDOS 0x0040 +#define TCS_HOTTRACK 0x0040 +#define MCS_NOTRAILINGDATES 0x0040 +#define LANG_KYRGYZ 0x40 +#define WM_COMPACTING 0x0041 +#define LANG_SWAHILI 0x41 +#define LANG_TURKMEN 0x42 +#define LANG_UZBEK 0x43 +#define WM_COMMNOTIFY 0x0044 +#define LANG_TATAR 0x44 +#define LANG_BENGALI 0x45 +#define WM_WINDOWPOSCHANGING 0x0046 +#define LANG_PUNJABI 0x46 +#define WM_WINDOWPOSCHANGED 0x0047 +#define LANG_GUJARATI 0x47 +#define WM_POWER 0x0048 +#define LANG_ORIYA 0x48 +#define LANG_TAMIL 0x49 +#define WM_COPYDATA 0x004A +#define LANG_TELUGU 0x4a +#define WM_CANCELJOURNAL 0x004B +#define LANG_KANNADA 0x4b +#define LANG_MALAYALAM 0x4c +#define LANG_ASSAMESE 0x4d +#define WM_NOTIFY 0x004E +#define LANG_MARATHI 0x4e +#define LANG_SANSKRIT 0x4f +#define WM_INPUTLANGCHANGEREQUEST 0x0050 +#define LANG_MONGOLIAN 0x50 +#define WM_INPUTLANGCHANGE 0x0051 +#define LANG_TIBETAN 0x51 +#define WM_TCARD 0x0052 +#define LANG_WELSH 0x52 +#define WM_HELP 0x0053 +#define LANG_KHMER 0x53 +#define WM_USERCHANGED 0x0054 +#define LANG_LAO 0x54 +#define WM_NOTIFYFORMAT 0x0055 +#define LANG_GALICIAN 0x56 +#define LANG_KONKANI 0x57 +#define LANG_MANIPURI 0x58 +#define LANG_SINDHI 0x59 +#define LANG_SYRIAC 0x5a +#define VK_LWIN 0x5B +#define LANG_SINHALESE 0x5b +#define VK_RWIN 0x5C +#define VK_APPS 0x5D +#define LANG_INUKTITUT 0x5d +#define LANG_AMHARIC 0x5e +#define VK_SLEEP 0x5F +#define LANG_TAMAZIGHT 0x5f +#define VK_NUMPAD0 0x60 +#define LANG_KASHMIRI 0x60 +#define VK_NUMPAD1 0x61 +#define LANG_NEPALI 0x61 +#define VK_NUMPAD2 0x62 +#define LANG_FRISIAN 0x62 +#define VK_NUMPAD3 0x63 +#define LANG_PASHTO 0x63 +#define VK_NUMPAD4 0x64 +#define LANG_FILIPINO 0x64 +#define VS_USER_DEFINED 100 +#define VK_NUMPAD5 0x65 +#define LANG_DIVEHI 0x65 +#define VK_NUMPAD6 0x66 +#define VK_NUMPAD7 0x67 +#define VK_NUMPAD8 0x68 +#define LANG_HAUSA 0x68 +#define VK_NUMPAD9 0x69 +#define VK_MULTIPLY 0x6A +#define LANG_YORUBA 0x6a +#define VK_ADD 0x6B +#define LANG_QUECHUA 0x6b +#define VK_SEPARATOR 0x6C +#define LANG_SOTHO 0x6c +#define VK_SUBTRACT 0x6D +#define LANG_BASHKIR 0x6d +#define VK_DECIMAL 0x6E +#define LANG_LUXEMBOURGISH 0x6e +#define VK_DIVIDE 0x6F +#define LANG_GREENLANDIC 0x6f +#define VK_F1 0x70 +#define LANG_IGBO 0x70 +#define VK_F2 0x71 +#define VK_F3 0x72 +#define VK_F4 0x73 +#define LANG_TIGRIGNA 0x73 +#define VK_F5 0x74 +#define VK_F6 0x75 +#define VK_F7 0x76 +#define VK_F8 0x77 +#define VK_F9 0x78 +#define WHEEL_DELTA 120 +#define LANG_YI 0x78 +#define VK_F10 0x79 +#define VK_F11 0x7A +#define LANG_MAPUDUNGUN 0x7a +#define VK_F12 0x7B +#define WM_CONTEXTMENU 0x007B +#define VK_F13 0x7C +#define WM_STYLECHANGING 0x007C +#define LANG_MOHAWK 0x7c +#define VK_F14 0x7D +#define WM_STYLECHANGED 0x007D +#define VK_F15 0x7E +#define WM_DISPLAYCHANGE 0x007E +#define LANG_BRETON 0x7e +#define VK_F16 0x7F +#define WM_GETICON 0x007F +#define LANG_INVARIANT 0x7f +#define VK_F17 0x80 +#define WM_SETICON 0x0080 +#define WVR_ALIGNRIGHT 0x0080 +#define CS_PARENTDC 0x0080 +#define CF_OWNERDISPLAY 0x0080 +#define HDS_FULLDRAG 0x0080 +#define BTNS_WHOLEDROPDOWN 0x0080 +#define TTS_CLOSE 0x80 +#define TBS_NOTHUMB 0x0080 +#define UDS_NOTHOUSANDS 0x0080 +#define LVS_NOLABELWRAP 0x0080 +#define TVS_NOTOOLTIPS 0x0080 +#define TVS_EX_PARTIALCHECKBOXES 0x0080 +#define TCS_VERTICAL 0x0080 +#define MCS_SHORTDAYSOFWEEK 0x0080 +#define LANG_UIGHUR 0x80 +#define VK_F18 0x81 +#define WM_NCCREATE 0x0081 +#define CF_DSPTEXT 0x0081 +#define LANG_MAORI 0x81 +#define VK_F19 0x82 +#define WM_NCDESTROY 0x0082 +#define CF_DSPBITMAP 0x0082 +#define LANG_OCCITAN 0x82 +#define VK_F20 0x83 +#define WM_NCCALCSIZE 0x0083 +#define CF_DSPMETAFILEPICT 0x0083 +#define LANG_CORSICAN 0x83 +#define VK_F21 0x84 +#define WM_NCHITTEST 0x0084 +#define LANG_ALSATIAN 0x84 +#define VK_F22 0x85 +#define WM_NCPAINT 0x0085 +#define LANG_YAKUT 0x85 +#define VK_F23 0x86 +#define WM_NCACTIVATE 0x0086 +#define LANG_KICHE 0x86 +#define VK_F24 0x87 +#define WM_GETDLGCODE 0x0087 +#define LANG_KINYARWANDA 0x87 +#define WM_SYNCPAINT 0x0088 +#define LANG_WOLOF 0x88 +#define LANG_DARI 0x8c +#define CF_DSPENHMETAFILE 0x008E +#define VK_NUMLOCK 0x90 +#define VK_SCROLL 0x91 +#define VK_OEM_NEC_EQUAL 0x92 +#define VK_OEM_FJ_JISHO 0x92 +#define VK_OEM_FJ_MASSHOU 0x93 +#define VK_OEM_FJ_TOUROKU 0x94 +#define VK_OEM_FJ_LOYA 0x95 +#define VK_OEM_FJ_ROYA 0x96 +#define VK_LSHIFT 0xA0 +#define WM_NCMOUSEMOVE 0x00A0 +#define VK_RSHIFT 0xA1 +#define WM_NCLBUTTONDOWN 0x00A1 +#define VK_LCONTROL 0xA2 +#define WM_NCLBUTTONUP 0x00A2 +#define VK_RCONTROL 0xA3 +#define WM_NCLBUTTONDBLCLK 0x00A3 +#define VK_LMENU 0xA4 +#define WM_NCRBUTTONDOWN 0x00A4 +#define VK_RMENU 0xA5 +#define WM_NCRBUTTONUP 0x00A5 +#define VK_BROWSER_BACK 0xA6 +#define WM_NCRBUTTONDBLCLK 0x00A6 +#define VK_BROWSER_FORWARD 0xA7 +#define WM_NCMBUTTONDOWN 0x00A7 +#define VK_BROWSER_REFRESH 0xA8 +#define WM_NCMBUTTONUP 0x00A8 +#define VK_BROWSER_STOP 0xA9 +#define WM_NCMBUTTONDBLCLK 0x00A9 +#define VK_BROWSER_SEARCH 0xAA +#define VK_BROWSER_FAVORITES 0xAB +#define WM_NCXBUTTONDOWN 0x00AB +#define VK_BROWSER_HOME 0xAC +#define WM_NCXBUTTONUP 0x00AC +#define VK_VOLUME_MUTE 0xAD +#define WM_NCXBUTTONDBLCLK 0x00AD +#define VK_VOLUME_DOWN 0xAE +#define VK_VOLUME_UP 0xAF +#define VK_MEDIA_NEXT_TRACK 0xB0 +#define EM_GETSEL 0x00B0 +#define VK_MEDIA_PREV_TRACK 0xB1 +#define EM_SETSEL 0x00B1 +#define VK_MEDIA_STOP 0xB2 +#define EM_GETRECT 0x00B2 +#define VK_MEDIA_PLAY_PAUSE 0xB3 +#define EM_SETRECT 0x00B3 +#define VK_LAUNCH_MAIL 0xB4 +#define EM_SETRECTNP 0x00B4 +#define VK_LAUNCH_MEDIA_SELECT 0xB5 +#define EM_SCROLL 0x00B5 +#define VK_LAUNCH_APP1 0xB6 +#define EM_LINESCROLL 0x00B6 +#define VK_LAUNCH_APP2 0xB7 +#define EM_SCROLLCARET 0x00B7 +#define EM_GETMODIFY 0x00B8 +#define EM_SETMODIFY 0x00B9 +#define VK_OEM_1 0xBA +#define EM_GETLINECOUNT 0x00BA +#define VK_OEM_PLUS 0xBB +#define EM_LINEINDEX 0x00BB +#define VK_OEM_COMMA 0xBC +#define EM_SETHANDLE 0x00BC +#define VK_OEM_MINUS 0xBD +#define EM_GETHANDLE 0x00BD +#define VK_OEM_PERIOD 0xBE +#define EM_GETTHUMB 0x00BE +#define VK_OEM_2 0xBF +#define VK_OEM_3 0xC0 +#define EM_LINELENGTH 0x00C1 +#define EM_REPLACESEL 0x00C2 +#define EM_GETLINE 0x00C4 +#define EM_LIMITTEXT 0x00C5 +#define EM_CANUNDO 0x00C6 +#define EM_UNDO 0x00C7 +#define EM_FMTLINES 0x00C8 +#define DLG_MAIN 200 +#define EM_LINEFROMCHAR 0x00C9 +#define EM_SETTABSTOPS 0x00CB +#define EM_SETPASSWORDCHAR 0x00CC +#define EM_EMPTYUNDOBUFFER 0x00CD +#define EM_GETFIRSTVISIBLELINE 0x00CE +#define EM_SETREADONLY 0x00CF +#define EM_SETWORDBREAKPROC 0x00D0 +#define EM_GETWORDBREAKPROC 0x00D1 +#define EM_GETPASSWORDCHAR 0x00D2 +#define EM_SETMARGINS 0x00D3 +#define EM_GETMARGINS 0x00D4 +#define EM_GETLIMITTEXT 0x00D5 +#define EM_POSFROMCHAR 0x00D6 +#define EM_CHARFROMPOS 0x00D7 +#define EM_SETIMESTATUS 0x00D8 +#define EM_GETIMESTATUS 0x00D9 +#define VK_OEM_4 0xDB +#define VK_OEM_5 0xDC +#define VK_OEM_6 0xDD +#define VK_OEM_7 0xDE +#define VK_OEM_8 0xDF +#define VK_OEM_AX 0xE1 +#define VK_OEM_102 0xE2 +#define VK_ICO_HELP 0xE3 +#define VK_ICO_00 0xE4 +#define VK_PROCESSKEY 0xE5 +#define VK_ICO_CLEAR 0xE6 +#define VK_PACKET 0xE7 +#define VK_OEM_RESET 0xE9 +#define VK_OEM_JUMP 0xEA +#define VK_OEM_PA1 0xEB +#define VK_OEM_PA2 0xEC +#define VK_OEM_PA3 0xED +#define VK_OEM_WSCTRL 0xEE +#define VK_OEM_CUSEL 0xEF +#define VK_OEM_ATTN 0xF0 +#define BM_GETCHECK 0x00F0 +#define VK_OEM_FINISH 0xF1 +#define BM_SETCHECK 0x00F1 +#define VK_OEM_COPY 0xF2 +#define BM_GETSTATE 0x00F2 +#define VK_OEM_AUTO 0xF3 +#define BM_SETSTATE 0x00F3 +#define VK_OEM_ENLW 0xF4 +#define BM_SETSTYLE 0x00F4 +#define VK_OEM_BACKTAB 0xF5 +#define BM_CLICK 0x00F5 +#define VK_ATTN 0xF6 +#define BM_GETIMAGE 0x00F6 +#define VK_CRSEL 0xF7 +#define BM_SETIMAGE 0x00F7 +#define VK_EXSEL 0xF8 +#define BM_SETDONTCLICK 0x00F8 +#define VK_EREOF 0xF9 +#define VK_PLAY 0xFA +#define VK_ZOOM 0xFB +#define VK_NONAME 0xFC +#define VK_PA1 0xFD +#define VK_OEM_CLEAR 0xFE +#define WM_INPUT_DEVICE_CHANGE 0x00FE +#define SUBVERSION_MASK 0x000000FF +#define WM_INPUT 0x00FF +#define WM_KEYFIRST 0x0100 +#define WM_KEYDOWN 0x0100 +#define WVR_HREDRAW 0x0100 +#define HDS_FILTERBAR 0x0100 +#define TBSTYLE_TOOLTIPS 0x0100 +#define RBS_TOOLTIPS 0x00000100 +#define TTS_USEVISUALSTYLE 0x100 +#define SBARS_SIZEGRIP 0x0100 +#define TBS_TOOLTIPS 0x0100 +#define UDS_HOTTRACK 0x0100 +#define LVS_AUTOARRANGE 0x0100 +#define TVS_CHECKBOXES 0x0100 +#define TVS_EX_EXCLUSIONCHECKBOXES 0x0100 +#define TCS_BUTTONS 0x0100 +#define MCS_NOSELCHANGEONNAV 0x0100 +#define WM_KEYUP 0x0101 +#define WM_CHAR 0x0102 +#define WM_DEADCHAR 0x0103 +#define WM_SYSKEYDOWN 0x0104 +#define WM_SYSKEYUP 0x0105 +#define WM_SYSCHAR 0x0106 +#define WM_SYSDEADCHAR 0x0107 +#define WM_UNICHAR 0x0109 +#define WM_IME_STARTCOMPOSITION 0x010D +#define WM_IME_ENDCOMPOSITION 0x010E +#define WM_IME_COMPOSITION 0x010F +#define WM_IME_KEYLAST 0x010F +#define WM_INITDIALOG 0x0110 +#define WM_COMMAND 0x0111 +#define WM_SYSCOMMAND 0x0112 +#define WM_TIMER 0x0113 +#define WM_HSCROLL 0x0114 +#define WM_VSCROLL 0x0115 +#define WM_INITMENU 0x0116 +#define WM_INITMENUPOPUP 0x0117 +#define WM_MENUSELECT 0x011F +#define WM_MENUCHAR 0x0120 +#define WM_ENTERIDLE 0x0121 +#define WM_MENURBUTTONUP 0x0122 +#define WM_MENUDRAG 0x0123 +#define WM_MENUGETOBJECT 0x0124 +#define WM_UNINITMENUPOPUP 0x0125 +#define WM_MENUCOMMAND 0x0126 +#define WM_CHANGEUISTATE 0x0127 +#define WM_UPDATEUISTATE 0x0128 +#define WM_QUERYUISTATE 0x0129 +#define DLG_ICON 300 +#define WM_CTLCOLORMSGBOX 0x0132 +#define WM_CTLCOLOREDIT 0x0133 +#define WM_CTLCOLORLISTBOX 0x0134 +#define WM_CTLCOLORBTN 0x0135 +#define WM_CTLCOLORDLG 0x0136 +#define WM_CTLCOLORSCROLLBAR 0x0137 +#define WM_CTLCOLORSTATIC 0x0138 +#define MN_GETHMENU 0x01E1 +#define _WIN32_IE_IE20 0x0200 +#define WM_MOUSEFIRST 0x0200 +#define WM_MOUSEMOVE 0x0200 +#define WVR_VREDRAW 0x0200 +#define CS_NOCLOSE 0x0200 +#define CF_PRIVATEFIRST 0x0200 +#define HDS_FLAT 0x0200 +#define TBSTYLE_WRAPABLE 0x0200 +#define RBS_VARHEIGHT 0x00000200 +#define TBS_REVERSED 0x0200 +#define LVS_EDITLABELS 0x0200 +#define TVS_TRACKSELECT 0x0200 +#define TVS_EX_DIMMEDCHECKBOXES 0x0200 +#define TCS_MULTILINE 0x0200 +#define WM_LBUTTONDOWN 0x0201 +#define WM_LBUTTONUP 0x0202 +#define WM_LBUTTONDBLCLK 0x0203 +#define WM_RBUTTONDOWN 0x0204 +#define WM_RBUTTONUP 0x0205 +#define WM_RBUTTONDBLCLK 0x0206 +#define WM_MBUTTONDOWN 0x0207 +#define WM_MBUTTONUP 0x0208 +#define WM_MBUTTONDBLCLK 0x0209 +#define WM_MOUSEWHEEL 0x020A +#define WM_XBUTTONDOWN 0x020B +#define WM_XBUTTONUP 0x020C +#define WM_XBUTTONDBLCLK 0x020D +#define WM_MOUSEHWHEEL 0x020E +#define WM_PARENTNOTIFY 0x0210 +#define WM_ENTERMENULOOP 0x0211 +#define WM_EXITMENULOOP 0x0212 +#define WM_NEXTMENU 0x0213 +#define WM_SIZING 0x0214 +#define WM_CAPTURECHANGED 0x0215 +#define WM_MOVING 0x0216 +#define WM_POWERBROADCAST 0x0218 +#define WM_DEVICECHANGE 0x0219 +#define WM_MDICREATE 0x0220 +#define WM_MDIDESTROY 0x0221 +#define WM_MDIACTIVATE 0x0222 +#define WM_MDIRESTORE 0x0223 +#define WM_MDINEXT 0x0224 +#define WM_MDIMAXIMIZE 0x0225 +#define WM_MDITILE 0x0226 +#define WM_MDICASCADE 0x0227 +#define WM_MDIICONARRANGE 0x0228 +#define WM_MDIGETACTIVE 0x0229 +#define WM_MDISETMENU 0x0230 +#define WM_ENTERSIZEMOVE 0x0231 +#define WM_EXITSIZEMOVE 0x0232 +#define WM_DROPFILES 0x0233 +#define WM_MDIREFRESHMENU 0x0234 +#define WM_IME_SETCONTEXT 0x0281 +#define WM_IME_NOTIFY 0x0282 +#define WM_IME_CONTROL 0x0283 +#define WM_IME_COMPOSITIONFULL 0x0284 +#define WM_IME_SELECT 0x0285 +#define WM_IME_CHAR 0x0286 +#define WM_IME_REQUEST 0x0288 +#define WM_IME_KEYDOWN 0x0290 +#define WM_IME_KEYUP 0x0291 +#define WM_NCMOUSEHOVER 0x02A0 +#define WM_MOUSEHOVER 0x02A1 +#define WM_NCMOUSELEAVE 0x02A2 +#define WM_MOUSELEAVE 0x02A3 +#define WM_WTSSESSION_CHANGE 0x02B1 +#define WM_TABLET_FIRST 0x02c0 +#define WM_TABLET_LAST 0x02df +#define CF_PRIVATELAST 0x02FF +#define _WIN32_IE_IE30 0x0300 +#define WM_CUT 0x0300 +#define CF_GDIOBJFIRST 0x0300 +#define WM_COPY 0x0301 +#define _WIN32_IE_IE302 0x0302 +#define WM_PASTE 0x0302 +#define WM_CLEAR 0x0303 +#define WM_UNDO 0x0304 +#define WM_RENDERFORMAT 0x0305 +#define WM_RENDERALLFORMATS 0x0306 +#define WM_DESTROYCLIPBOARD 0x0307 +#define WM_DRAWCLIPBOARD 0x0308 +#define WM_PAINTCLIPBOARD 0x0309 +#define WM_VSCROLLCLIPBOARD 0x030A +#define WM_SIZECLIPBOARD 0x030B +#define WM_ASKCBFORMATNAME 0x030C +#define WM_CHANGECBCHAIN 0x030D +#define WM_HSCROLLCLIPBOARD 0x030E +#define WM_QUERYNEWPALETTE 0x030F +#define WM_PALETTEISCHANGING 0x0310 +#define WM_PALETTECHANGED 0x0311 +#define WM_HOTKEY 0x0312 +#define WM_PRINT 0x0317 +#define WM_PRINTCLIENT 0x0318 +#define WM_APPCOMMAND 0x0319 +#define WM_THEMECHANGED 0x031A +#define WM_CLIPBOARDUPDATE 0x031D +#define WM_DWMCOMPOSITIONCHANGED 0x031E +#define WM_DWMNCRENDERINGCHANGED 0x031F +#define WM_DWMCOLORIZATIONCOLORCHANGED 0x0320 +#define WM_DWMWINDOWMAXIMIZEDCHANGE 0x0321 +#define WM_GETTITLEBARINFOEX 0x033F +#define WM_HANDHELDFIRST 0x0358 +#define WM_HANDHELDLAST 0x035F +#define WM_AFXFIRST 0x0360 +#define WM_AFXLAST 0x037F +#define WM_PENWINFIRST 0x0380 +#define WM_PENWINLAST 0x038F +#define WM_DDE_FIRST 0x03E0 +#define IDC_STATUS 1000 +#define IDC_LIST1 1000 +#define IDC_LEFT 1001 +#define IDC_RIGHT 1002 +#define IDC_TOP 1003 +#define IDC_MIDDLE 1004 +#define IDC_BOTTOM 1005 +#define IDC_EDIT 1010 +#define IDC_CLEAR 1011 +#define CF_GDIOBJLAST 0x03FF +#define _WIN32_WINNT_NT4 0x0400 +#define _WIN32_IE_IE40 0x0400 +#define WM_USER 0x0400 +#define WVR_VALIDRECTS 0x0400 +#define HDS_CHECKBOXES 0x0400 +#define TBSTYLE_ALTDRAG 0x0400 +#define RBS_BANDBORDERS 0x00000400 +#define TBS_DOWNISLEFT 0x0400 +#define LVS_OWNERDRAWFIXED 0x0400 +#define TVS_SINGLEEXPAND 0x0400 +#define TVS_EX_DRAWIMAGEASYNC 0x0400 +#define TCS_FIXEDWIDTH 0x0400 +#define ctlFirst 0x0400 +#define psh1 0x0400 +#define _WIN32_IE_IE401 0x0401 +#define psh2 0x0401 +#define psh3 0x0402 +#define psh4 0x0403 +#define psh5 0x0404 +#define psh6 0x0405 +#define psh7 0x0406 +#define psh8 0x0407 +#define psh9 0x0408 +#define psh10 0x0409 +#define psh11 0x040a +#define psh12 0x040b +#define psh13 0x040c +#define psh14 0x040d +#define psh15 0x040e +#define psh16 0x040f +#define _WIN32_WINDOWS 0x0410 +#define chx1 0x0410 +#define chx2 0x0411 +#define chx3 0x0412 +#define chx4 0x0413 +#define chx5 0x0414 +#define chx6 0x0415 +#define chx7 0x0416 +#define chx8 0x0417 +#define chx9 0x0418 +#define chx10 0x0419 +#define chx11 0x041a +#define chx12 0x041b +#define chx13 0x041c +#define chx14 0x041d +#define chx15 0x041e +#define chx16 0x041f +#define rad1 0x0420 +#define rad2 0x0421 +#define rad3 0x0422 +#define rad4 0x0423 +#define rad5 0x0424 +#define rad6 0x0425 +#define rad7 0x0426 +#define rad8 0x0427 +#define rad9 0x0428 +#define rad10 0x0429 +#define rad11 0x042a +#define rad12 0x042b +#define rad13 0x042c +#define rad14 0x042d +#define rad15 0x042e +#define rad16 0x042f +#define grp1 0x0430 +#define grp2 0x0431 +#define grp3 0x0432 +#define grp4 0x0433 +#define frm1 0x0434 +#define frm2 0x0435 +#define frm3 0x0436 +#define frm4 0x0437 +#define rct1 0x0438 +#define rct2 0x0439 +#define rct3 0x043a +#define rct4 0x043b +#define ico1 0x043c +#define ico2 0x043d +#define ico3 0x043e +#define ico4 0x043f +#define stc1 0x0440 +#define stc2 0x0441 +#define stc3 0x0442 +#define stc4 0x0443 +#define stc5 0x0444 +#define stc6 0x0445 +#define stc7 0x0446 +#define stc8 0x0447 +#define stc9 0x0448 +#define stc10 0x0449 +#define stc11 0x044a +#define stc12 0x044b +#define stc13 0x044c +#define stc14 0x044d +#define stc15 0x044e +#define stc16 0x044f +#define stc17 0x0450 +#define stc18 0x0451 +#define stc19 0x0452 +#define stc20 0x0453 +#define stc21 0x0454 +#define stc22 0x0455 +#define stc23 0x0456 +#define stc24 0x0457 +#define stc25 0x0458 +#define stc26 0x0459 +#define stc27 0x045a +#define stc28 0x045b +#define stc29 0x045c +#define stc30 0x045d +#define stc31 0x045e +#define stc32 0x045f +#define lst1 0x0460 +#define lst2 0x0461 +#define lst3 0x0462 +#define lst4 0x0463 +#define lst5 0x0464 +#define lst6 0x0465 +#define lst7 0x0466 +#define lst8 0x0467 +#define lst9 0x0468 +#define lst10 0x0469 +#define lst11 0x046a +#define lst12 0x046b +#define lst13 0x046c +#define lst14 0x046d +#define lst15 0x046e +#define lst16 0x046f +#define cmb1 0x0470 +#define cmb2 0x0471 +#define cmb3 0x0472 +#define cmb4 0x0473 +#define cmb5 0x0474 +#define cmb6 0x0475 +#define cmb7 0x0476 +#define cmb8 0x0477 +#define cmb9 0x0478 +#define cmb10 0x0479 +#define cmb11 0x047a +#define cmb12 0x047b +#define cmb13 0x047c +#define cmb14 0x047d +#define cmb15 0x047e +#define cmb16 0x047f +#define edt1 0x0480 +#define edt2 0x0481 +#define edt3 0x0482 +#define edt4 0x0483 +#define edt5 0x0484 +#define edt6 0x0485 +#define edt7 0x0486 +#define edt8 0x0487 +#define edt9 0x0488 +#define edt10 0x0489 +#define edt11 0x048a +#define edt12 0x048b +#define edt13 0x048c +#define edt14 0x048d +#define edt15 0x048e +#define edt16 0x048f +#define scr1 0x0490 +#define scr2 0x0491 +#define scr3 0x0492 +#define scr4 0x0493 +#define scr5 0x0494 +#define scr6 0x0495 +#define scr7 0x0496 +#define scr8 0x0497 +#define ctl1 0x04A0 +#define ctlLast 0x04ff +#define _WIN32_WINNT_WIN2K 0x0500 +#define _WIN32_IE_IE50 0x0500 +#define _WIN32_WINNT_WINXP 0x0501 +#define _WIN32_IE_IE501 0x0501 +#define _WIN32_WINNT_WS03 0x0502 +#define _WIN32_IE_IE55 0x0550 +#define _WIN32_WINNT_LONGHORN 0x0600 +#define _WIN32_IE_IE60 0x0600 +#define FILEOPENORD 1536 +#define _WIN32_IE_IE60SP1 0x0601 +#define MULTIFILEOPENORD 1537 +#define _WIN32_IE_WS03 0x0602 +#define PRINTDLGORD 1538 +#define _WIN32_IE_IE60SP2 0x0603 +#define PRNSETUPDLGORD 1539 +#define FINDDLGORD 1540 +#define REPLACEDLGORD 1541 +#define FONTDLGORD 1542 +#define FORMATDLGORD31 1543 +#define FORMATDLGORD30 1544 +#define RUNDLGORD 1545 +#define PAGESETUPDLGORD 1546 +#define NEWFILEOPENORD 1547 +#define PRINTDLGEXORD 1549 +#define PAGESETUPDLGORDMOTIF 1550 +#define COLORMGMTDLGORD 1551 +#define NEWFILEOPENV2ORD 1552 +#define NEWFILEOPENV3ORD 1553 +#define _WIN32_IE_IE70 0x0700 +#define CS_SAVEBITS 0x0800 +#define HDS_NOSIZING 0x0800 +#define TBSTYLE_FLAT 0x0800 +#define RBS_FIXEDORDER 0x00000800 +#define SBARS_TOOLTIPS 0x0800 +#define SBT_TOOLTIPS 0x0800 +#define TBS_NOTIFYBEFOREMOVE 0x0800 +#define LVS_ALIGNLEFT 0x0800 +#define TVS_INFOTIP 0x0800 +#define TCS_RAGGEDRIGHT 0x0800 +#define LVS_ALIGNMASK 0x0c00 +#define CS_BYTEALIGNCLIENT 0x1000 +#define HDS_OVERFLOW 0x1000 +#define TBSTYLE_LIST 0x1000 +#define RBS_REGISTERDROP 0x00001000 +#define TBS_TRANSPARENTBKGND 0x1000 +#define LVS_OWNERDATA 0x1000 +#define TVS_FULLROWSELECT 0x1000 +#define TCS_FOCUSONBUTTONDOWN 0x1000 +#define CS_BYTEALIGNWINDOW 0x2000 +#define TBSTYLE_CUSTOMERASE 0x2000 +#define RBS_AUTOSIZE 0x00002000 +#define LVS_NOSCROLL 0x2000 +#define TVS_NOSCROLL 0x2000 +#define TCS_OWNERDRAWFIXED 0x2000 +#define CS_GLOBALCLASS 0x4000 +#define TBSTYLE_REGISTERDROP 0x4000 +#define RBS_VERTICALGRIPPER 0x00004000 +#define LVS_NOCOLUMNHEADER 0x4000 +#define TVS_NONEVENHEIGHT 0x4000 +#define TCS_TOOLTIPS 0x4000 +#define IDH_NO_HELP 28440 +#define IDH_MISSING_CONTEXT 28441 +#define IDH_GENERIC_HELP_BUTTON 28442 +#define IDH_OK 28443 +#define IDH_CANCEL 28444 +#define IDH_HELP 28445 +#define LANG_BOSNIAN_NEUTRAL 0x781a +#define LANG_CHINESE_TRADITIONAL 0x7c04 +#define LANG_SERBIAN_NEUTRAL 0x7c1a +#define IDTIMEOUT 32000 +#define OCR_NORMAL 32512 +#define OIC_SAMPLE 32512 +#define OCR_IBEAM 32513 +#define OIC_HAND 32513 +#define OCR_WAIT 32514 +#define OIC_QUES 32514 +#define OCR_CROSS 32515 +#define OIC_BANG 32515 +#define OCR_UP 32516 +#define OIC_NOTE 32516 +#define OIC_WINLOGO 32517 +#define OIC_SHIELD 32518 +#define OCR_SIZE 32640 +#define OCR_ICON 32641 +#define OCR_SIZENWSE 32642 +#define OCR_SIZENESW 32643 +#define OCR_SIZEWE 32644 +#define OCR_SIZENS 32645 +#define OCR_SIZEALL 32646 +#define OCR_ICOCUR 32647 +#define OCR_NO 32648 +#define OCR_HAND 32649 +#define OCR_APPSTARTING 32650 +#define OBM_LFARROWI 32734 +#define OBM_RGARROWI 32735 +#define OBM_DNARROWI 32736 +#define OBM_UPARROWI 32737 +#define OBM_COMBO 32738 +#define OBM_MNARROW 32739 +#define OBM_LFARROWD 32740 +#define OBM_RGARROWD 32741 +#define OBM_DNARROWD 32742 +#define OBM_UPARROWD 32743 +#define OBM_RESTORED 32744 +#define OBM_ZOOMD 32745 +#define OBM_REDUCED 32746 +#define OBM_RESTORE 32747 +#define OBM_ZOOM 32748 +#define OBM_REDUCE 32749 +#define OBM_LFARROW 32750 +#define OBM_RGARROW 32751 +#define OBM_DNARROW 32752 +#define OBM_UPARROW 32753 +#define OBM_CLOSE 32754 +#define OBM_OLD_RESTORE 32755 +#define OBM_OLD_ZOOM 32756 +#define OBM_OLD_REDUCE 32757 +#define OBM_BTNCORNERS 32758 +#define OBM_CHECKBOXES 32759 +#define OBM_CHECK 32760 +#define OBM_BTSIZE 32761 +#define OBM_OLD_LFARROW 32762 +#define OBM_OLD_RGARROW 32763 +#define OBM_OLD_DNARROW 32764 +#define OBM_OLD_UPARROW 32765 +#define OBM_SIZE 32766 +#define OBM_OLD_CLOSE 32767 +#define WM_APP 0x8000 +#define HELP_TCARD 0x8000 +#define TBSTYLE_TRANSPARENT 0x8000 +#define RBS_DBLCLKTOGGLE 0x00008000 +#define LVS_NOSORTHEADER 0x8000 +#define TVS_NOHSCROLL 0x8000 +#define TCS_FOCUSNEVER 0x8000 +#define SC_SIZE 0xF000 +#define SC_SEPARATOR 0xF00F +#define SC_MOVE 0xF010 +#define SC_MINIMIZE 0xF020 +#define SC_MAXIMIZE 0xF030 +#define SC_NEXTWINDOW 0xF040 +#define SC_PREVWINDOW 0xF050 +#define SC_CLOSE 0xF060 +#define SC_VSCROLL 0xF070 +#define SC_HSCROLL 0xF080 +#define SC_MOUSEMENU 0xF090 +#define SC_KEYMENU 0xF100 +#define SC_ARRANGE 0xF110 +#define SC_RESTORE 0xF120 +#define SC_TASKLIST 0xF130 +#define SC_SCREENSAVE 0xF140 +#define SC_HOTKEY 0xF150 +#define SC_DEFAULT 0xF160 +#define SC_MONITORPOWER 0xF170 +#define SC_CONTEXTHELP 0xF180 +#define LVS_TYPESTYLEMASK 0xfc00 +#define SPVERSION_MASK 0x0000FF00 +#define UNICODE_NOCHAR 0xFFFF +#define IDC_STATIC -1 + +// Next default values for new objects +// +#ifdef APSTUDIO_INVOKED +#ifndef APSTUDIO_READONLY_SYMBOLS +#define _APS_NEXT_RESOURCE_VALUE 101 +#define _APS_NEXT_COMMAND_VALUE 40001 +#define _APS_NEXT_CONTROL_VALUE 1002 +#define _APS_NEXT_SYMED_VALUE 101 +#endif +#endif diff --git a/PC/associator.rc b/PC/associator.rc new file mode 100644 --- /dev/null +++ b/PC/associator.rc @@ -0,0 +1,97 @@ +// Microsoft Visual C++ generated resource script. +// +#include "associator.h" +#include "winuser.h" +///////////////////////////////////////////////////////////////////////////// +// English (U.K.) resources + +#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENG) +#ifdef _WIN32 +LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_UK +#pragma code_page(1252) +#endif //_WIN32 + +///////////////////////////////////////////////////////////////////////////// +// +// Icon +// + +// Icon with lowest ID value placed first to ensure application icon +// remains consistent on all systems. +DLG_ICON ICON "launcher.ico" + +///////////////////////////////////////////////////////////////////////////// +// +// Dialog +// + +DLG_MAIN DIALOGEX 20, 40, 236, 183 +// Make the dialog visible after positioning at centre +STYLE DS_SETFONT | DS_3DLOOK | WS_MINIMIZEBOX | WS_CAPTION | WS_SYSMENU +EXSTYLE WS_EX_NOPARENTNOTIFY +CAPTION "Python File Associations Have Ceased To Be!" +FONT 10, "Arial", 400, 0, 0x0 +BEGIN + LTEXT "You've uninstalled the Python Launcher, so now there are no applications associated with Python files.",IDC_STATIC,7,7,225,18 + LTEXT "You may wish to associate Python files with one of the Python versions installed on your machine, listed below:",IDC_STATIC,7,27,225,18 + CONTROL "",IDC_LIST1,"SysListView32",LVS_REPORT | LVS_SINGLESEL | LVS_ALIGNLEFT | WS_BORDER | WS_TABSTOP,6,49,224,105 + PUSHBUTTON "&Associate with selected Python",IDOK,6,164,117,14,WS_DISABLED + PUSHBUTTON "Do&n't associate Python files",IDCANCEL,128,164,102,14 +END + + +#ifdef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// TEXTINCLUDE +// + +1 TEXTINCLUDE +BEGIN + "resource.h\0" +END + +2 TEXTINCLUDE +BEGIN + "\0" +END + +3 TEXTINCLUDE +BEGIN + "\r\n" + "\0" +END + +#endif // APSTUDIO_INVOKED + + +///////////////////////////////////////////////////////////////////////////// +// +// DESIGNINFO +// + +#ifdef APSTUDIO_INVOKED +GUIDELINES DESIGNINFO +BEGIN + DLG_MAIN, DIALOG + BEGIN + RIGHTMARGIN, 238 + END +END +#endif // APSTUDIO_INVOKED + +#endif // English (U.K.) resources +///////////////////////////////////////////////////////////////////////////// + + + +#ifndef APSTUDIO_INVOKED +///////////////////////////////////////////////////////////////////////////// +// +// Generated from the TEXTINCLUDE 3 resource. +// + + +///////////////////////////////////////////////////////////////////////////// +#endif // not APSTUDIO_INVOKED + diff --git a/PCbuild/associator.vcxproj b/PCbuild/associator.vcxproj new file mode 100644 --- /dev/null +++ b/PCbuild/associator.vcxproj @@ -0,0 +1,84 @@ +? + + + + Debug + Win32 + + + Release + Win32 + + + + {023B3CDA-59C8-45FD-95DC-F8973322ED34} + associator + + + + Application + true + Unicode + + + Application + false + true + Unicode + + + + + + + + + + + + + + + + + + + Level3 + Disabled + _DEBUG;_WINDOWS;%(PreprocessorDefinitions) + + + true + comctl32.lib;%(AdditionalDependencies) + false + + + + + Level3 + MaxSpeed + true + true + NDEBUG;_WINDOWS;%(PreprocessorDefinitions) + + + true + true + true + comctl32.lib;%(AdditionalDependencies) + false + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/PCbuild/associator.vcxproj.filters b/PCbuild/associator.vcxproj.filters new file mode 100644 --- /dev/null +++ b/PCbuild/associator.vcxproj.filters @@ -0,0 +1,32 @@ +? + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + + + + Resource Files + + + + + Header Files + + + \ No newline at end of file diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -72,6 +72,8 @@ EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pywlauncher", "pywlauncher.vcxproj", "{1D4B18D3-7C12-4ECB-9179-8531FF876CE6}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "associator", "associator.vcxproj", "{023B3CDA-59C8-45FD-95DC-F8973322ED34}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 @@ -597,6 +599,18 @@ {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.Build.0 = Release|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.ActiveCfg = Release|x64 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.Build.0 = Release|x64 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|Win32.ActiveCfg = Debug|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|Win32.Build.0 = Debug|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|x64.ActiveCfg = Debug|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGInstrument|Win32.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGInstrument|Win32.Build.0 = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGInstrument|x64.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGUpdate|Win32.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGUpdate|Win32.Build.0 = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGUpdate|x64.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Release|Win32.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Release|Win32.Build.0 = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Release|x64.ActiveCfg = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:04 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_version_resource=2E?= Message-ID: http://hg.python.org/cpython/rev/62c55d6c6038 changeset: 77547:62c55d6c6038 user: Martin v. L?wis date: Thu Jun 21 16:27:58 2012 +0200 summary: Add version resource. files: PC/pylauncher.rc | 52 +++++++++++++++++++++++++- PCbuild/pylauncher.vcxproj | 5 ++ 2 files changed, 55 insertions(+), 2 deletions(-) diff --git a/PC/pylauncher.rc b/PC/pylauncher.rc --- a/PC/pylauncher.rc +++ b/PC/pylauncher.rc @@ -1,3 +1,51 @@ +#include + +#define MS_WINDOWS +#include "..\Include\modsupport.h" +#include "..\Include\patchlevel.h" +#ifdef _DEBUG +# include "pythonnt_rc_d.h" +#else +# include "pythonnt_rc.h" +#endif + +#define PYTHON_VERSION PY_VERSION "\0" +#define PYVERSION64 PY_MAJOR_VERSION, PY_MINOR_VERSION, FIELD3, PYTHON_API_VERSION + +VS_VERSION_INFO VERSIONINFO + FILEVERSION PYVERSION64 + PRODUCTVERSION PYVERSION64 + FILEFLAGSMASK 0x17L +#ifdef _DEBUG + FILEFLAGS 0x1L +#else + FILEFLAGS 0x0L +#endif + FILEOS 0x4L + FILETYPE 0x1L + FILESUBTYPE 0x0L +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "080904b0" + BEGIN + VALUE "Comments", "Python Launcher for Windows" + VALUE "CompanyName", "Python Software Foundation" + VALUE "FileDescription", "Python Launcher for Windows (Console)" + VALUE "FileVersion", PYTHON_VERSION + VALUE "InternalName", "py" + VALUE "LegalCopyright", "Copyright (C) 2011-2012 Python Software Foundation" + VALUE "OriginalFilename", "py" + VALUE "ProductName", "Python Launcher for Windows" + VALUE "ProductVersion", PYTHON_VERSION + END + END + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x809, 1200 + END +END + IDI_ICON1 ICON "launcher.ico" -IDI_ICON2 ICON "py.ico" -IDI_ICON3 ICON "pyc.ico" \ No newline at end of file + + diff --git a/PCbuild/pylauncher.vcxproj b/PCbuild/pylauncher.vcxproj --- a/PCbuild/pylauncher.vcxproj +++ b/PCbuild/pylauncher.vcxproj @@ -154,6 +154,11 @@ + + + {f0e0541e-f17d-430b-97c4-93adf0dd284e} + + -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:05 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Use_GetEnvironmentVariableW?= =?utf8?q?_instead_of_=5Fwgetenv_to_silence_VC_warnings=2E?= Message-ID: http://hg.python.org/cpython/rev/a71bf682235e changeset: 77548:a71bf682235e user: Martin v. L?wis date: Thu Jun 21 16:33:09 2012 +0200 summary: Use GetEnvironmentVariableW instead of _wgetenv to silence VC warnings. files: PC/launcher.c | 27 ++++++++++++++++++--------- 1 files changed, 18 insertions(+), 9 deletions(-) diff --git a/PC/launcher.c b/PC/launcher.c --- a/PC/launcher.c +++ b/PC/launcher.c @@ -54,22 +54,31 @@ } /* - * This function is here to minimise Visual Studio - * warnings about security implications of getenv, and to - * treat blank values as if they are absent. + * This function is here to simplify memory management + * and to treat blank values as if they are absent. */ static wchar_t * get_env(wchar_t * key) { - wchar_t * result = _wgetenv(key); + /* This is not thread-safe, just like getenv */ + static wchar_t buf[256]; + DWORD result = GetEnvironmentVariableW(key, buf, 256); - if (result) { - result = skip_whitespace(result); - if (*result == L'\0') - result = NULL; + if (result > 256) { + /* Large environment variable. Accept some leakage */ + wchar_t *buf2 = (wchar_t*)malloc(sizeof(wchar_t) * (result+1)); + GetEnvironmentVariableW(key, buf2, result); + return buf2; } - return result; + + if (result == 0) + /* Either some error, e.g. ERROR_ENVVAR_NOT_FOUND, + or an empty environment variable. */ + return NULL; + + return buf; } + static void debug(wchar_t * format, ...) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:06 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Package_the_launcher=2E?= Message-ID: http://hg.python.org/cpython/rev/5155d34aadf0 changeset: 77549:5155d34aadf0 user: Martin v. L?wis date: Thu Jun 21 17:36:05 2012 +0200 summary: Package the launcher. files: Tools/msi/msi.py | 36 ++++++++++++++++++++++++++++++------ 1 files changed, 30 insertions(+), 6 deletions(-) diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -286,7 +286,7 @@ None, migrate_features, None, "REMOVEOLDSNAPSHOT")]) props = "REMOVEOLDSNAPSHOT;REMOVEOLDVERSION" - props += ";TARGETDIR;DLLDIR" + props += ";TARGETDIR;DLLDIR;LAUNCHERDIR" # Installer collects the product codes of the earlier releases in # these properties. In order to allow modification of the properties, # they must be declared as secure. See "SecureCustomProperties Property" @@ -426,6 +426,8 @@ "[WindowsVolume]Python%s%s" % (major, minor)), ("SetDLLDirToTarget", 307, "DLLDIR", "[TARGETDIR]"), ("SetDLLDirToSystem32", 307, "DLLDIR", SystemFolderName), + ("SetLauncherDirToTarget", 307, "LAUNCHERDIR", "[TARGETDIR]"), + ("SetLauncherDirToWindows", 307, "LAUNCHERDIR", "[WindowsFolder]"), # msidbCustomActionTypeExe + msidbCustomActionTypeSourceFile # See "Custom Action Type 18" ("CompilePyc", 18, "python.exe", compileargs), @@ -442,6 +444,8 @@ # In the user interface, assume all-users installation if privileged. ("SetDLLDirToSystem32", 'DLLDIR="" and ' + sys32cond, 751), ("SetDLLDirToTarget", 'DLLDIR="" and not ' + sys32cond, 752), + ("SetLauncherDirToWindows", 'LAUNCHERDIR="" and ' + sys32cond, 753), + ("SetLauncherDirToTarget", 'LAUNCHERDIR="" and not ' + sys32cond, 754), ("SelectDirectoryDlg", "Not Installed", 1230), # XXX no support for resume installations yet #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), @@ -450,6 +454,7 @@ add_data(db, "AdminUISequence", [("InitialTargetDir", 'TARGETDIR=""', 750), ("SetDLLDirToTarget", 'DLLDIR=""', 751), + ("SetLauncherDirToTarget", 'LAUNCHERDIR=""', 752), ]) # Prepend TARGETDIR to the system path, and remove it on uninstall. @@ -461,6 +466,8 @@ [("InitialTargetDir", 'TARGETDIR=""', 750), ("SetDLLDirToSystem32", 'DLLDIR="" and ' + sys32cond, 751), ("SetDLLDirToTarget", 'DLLDIR="" and not ' + sys32cond, 752), + ("SetLauncherDirToWindows", 'LAUNCHERDIR="" and ' + sys32cond, 753), + ("SetLauncherDirToTarget", 'LAUNCHERDIR="" and not ' + sys32cond, 754), ("UpdateEditIDLE", None, 1050), ("CompilePyc", "COMPILEALL", 6800), ("CompilePyo", "COMPILEALL", 6801), @@ -469,6 +476,7 @@ add_data(db, "AdminExecuteSequence", [("InitialTargetDir", 'TARGETDIR=""', 750), ("SetDLLDirToTarget", 'DLLDIR=""', 751), + ("SetLauncherDirToTarget", 'LAUNCHERDIR=""', 752), ("CompilePyc", "COMPILEALL", 6800), ("CompilePyo", "COMPILEALL", 6801), ("CompileGrammar", "COMPILEALL", 6802), @@ -904,7 +912,7 @@ dirs = glob.glob(srcdir+"/../"+pat) if not dirs: raise ValueError, "Could not find "+srcdir+"/../"+pat - if len(dirs) > 2: + if len(dirs) > 2 and not snapshot: raise ValueError, "Multiple copies of "+pat dir = dirs[0] shutil.copyfileobj(open(os.path.join(dir, file)), out) @@ -939,6 +947,7 @@ # See "File Table", "Component Table", "Directory Table", # "FeatureComponents Table" def add_files(db): + installer = msilib.MakeInstaller() hgfiles = hgmanifest() cab = CAB("python") tmpfiles = [] @@ -958,11 +967,26 @@ # msidbComponentAttributesSharedDllRefCount = 8, see "Component Table" dlldir = PyDirectory(db, cab, root, srcdir, "DLLDIR", ".") + launcherdir = PyDirectory(db, cab, root, srcdir, "LAUNCHERDIR", ".") + + # msidbComponentAttributes64bit = 256; this disables registry redirection + # to allow setting the SharedDLLs key in the 64-bit portion even for a + # 32-bit installer. + # XXX does this still allow to install the component on a 32-bit system? + launcher = os.path.join(srcdir, PCBUILD, "py.exe") + launcherdir.start_component("launcher", flags = 8+256, keyfile="py.exe") + launcherdir.add_file("%s/py.exe" % PCBUILD, + version=installer.FileVersion(launcher, 0), + language=installer.FileVersion(launcher, 1)) + launcherw = os.path.join(srcdir, PCBUILD, "pyw.exe") + launcherdir.start_component("launcherw", flags = 8+256, keyfile="pyw.exe") + launcherdir.add_file("%s/pyw.exe" % PCBUILD, + version=installer.FileVersion(launcherw, 0), + language=installer.FileVersion(launcherw, 1)) pydll = "python%s%s.dll" % (major, minor) pydllsrc = os.path.join(srcdir, PCBUILD, pydll) dlldir.start_component("DLLDIR", flags = 8, keyfile = pydll, uuid = pythondll_uuid) - installer = msilib.MakeInstaller() pyversion = installer.FileVersion(pydllsrc, 0) if not snapshot: # For releases, the Python DLL has the same version as the @@ -1211,11 +1235,11 @@ "text/plain", "REGISTRY.def"), #Verbs ("py.open", -1, pat % (testprefix, "", "open"), "", - r'"[TARGETDIR]python.exe" "%1" %*', "REGISTRY.def"), + r'"[LAUNCHERDIR]py.exe" "%1" %*', "REGISTRY.def"), ("pyw.open", -1, pat % (testprefix, "NoCon", "open"), "", - r'"[TARGETDIR]pythonw.exe" "%1" %*', "REGISTRY.def"), + r'"[LAUNCHERDIR]pyw.exe" "%1" %*', "REGISTRY.def"), ("pyc.open", -1, pat % (testprefix, "Compiled", "open"), "", - r'"[TARGETDIR]python.exe" "%1" %*', "REGISTRY.def"), + r'"[LAUNCHERDIR]py.exe" "%1" %*', "REGISTRY.def"), ] + tcl_verbs + [ #Icons ("py.icon", -1, pat2 % (testprefix, ""), "", -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:07 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:07 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_off-by-one_error=2E?= Message-ID: http://hg.python.org/cpython/rev/d4e744d79e67 changeset: 77550:d4e744d79e67 user: Martin v. L?wis date: Thu Jun 21 17:36:15 2012 +0200 summary: Fix off-by-one error. files: PC/launcher.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/PC/launcher.c b/PC/launcher.c --- a/PC/launcher.c +++ b/PC/launcher.c @@ -63,7 +63,7 @@ static wchar_t buf[256]; DWORD result = GetEnvironmentVariableW(key, buf, 256); - if (result > 256) { + if (result > 255) { /* Large environment variable. Accept some leakage */ wchar_t *buf2 = (wchar_t*)malloc(sizeof(wchar_t) * (result+1)); GetEnvironmentVariableW(key, buf2, result); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:08 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_UNICODE_glitch=2E?= Message-ID: http://hg.python.org/cpython/rev/e8796a9631ce changeset: 77551:e8796a9631ce user: Martin v. L?wis date: Thu Jun 21 18:15:54 2012 +0200 summary: Fix UNICODE glitch. files: PC/launcher.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/PC/launcher.c b/PC/launcher.c --- a/PC/launcher.c +++ b/PC/launcher.c @@ -230,8 +230,8 @@ continue; } data_size = sizeof(ip->executable) - 1; - status = RegQueryValueEx(ip_key, NULL, NULL, &type, - (LPBYTE) ip->executable, &data_size); + status = RegQueryValueExW(ip_key, NULL, NULL, &type, + (LPBYTE)ip->executable, &data_size); RegCloseKey(ip_key); if (status != ERROR_SUCCESS) { winerror(status, message, MSGSIZE); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:08 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Build_and_bundle_the_32-bit?= =?utf8?q?_launcher_in_all_configurations=2E?= Message-ID: http://hg.python.org/cpython/rev/13964ca5fb53 changeset: 77552:13964ca5fb53 user: Martin v. L?wis date: Thu Jun 21 18:24:32 2012 +0200 summary: Build and bundle the 32-bit launcher in all configurations. files: PCbuild/pcbuild.sln | 16 ++++++++-------- Tools/msi/msi.py | 5 +++-- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -583,22 +583,22 @@ {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|x64.ActiveCfg = Release|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.ActiveCfg = Release|Win32 {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.Build.0 = Release|Win32 - {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.ActiveCfg = Release|x64 - {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.Build.0 = Release|x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.Build.0 = Release|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|Win32.ActiveCfg = Debug|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|Win32.Build.0 = Debug|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|x64.ActiveCfg = Debug|x64 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|x64.Build.0 = Debug|x64 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|Win32.ActiveCfg = Release|x64 - {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.ActiveCfg = Release|x64 - {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.Build.0 = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.Build.0 = Release|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|Win32.ActiveCfg = Release|x64 - {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.ActiveCfg = Release|x64 - {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.Build.0 = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.Build.0 = Release|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.ActiveCfg = Release|Win32 {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.Build.0 = Release|Win32 - {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.ActiveCfg = Release|x64 - {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.Build.0 = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.Build.0 = Release|Win32 {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|Win32.ActiveCfg = Debug|Win32 {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|Win32.Build.0 = Debug|Win32 {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|x64.ActiveCfg = Debug|Win32 diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -973,12 +973,13 @@ # to allow setting the SharedDLLs key in the 64-bit portion even for a # 32-bit installer. # XXX does this still allow to install the component on a 32-bit system? - launcher = os.path.join(srcdir, PCBUILD, "py.exe") + # Pick up 32-bit binary always + launcher = os.path.join(srcdir, "PCBuild", "py.exe") launcherdir.start_component("launcher", flags = 8+256, keyfile="py.exe") launcherdir.add_file("%s/py.exe" % PCBUILD, version=installer.FileVersion(launcher, 0), language=installer.FileVersion(launcher, 1)) - launcherw = os.path.join(srcdir, PCBUILD, "pyw.exe") + launcherw = os.path.join(srcdir, "PCBuild", "pyw.exe") launcherdir.start_component("launcherw", flags = 8+256, keyfile="pyw.exe") launcherdir.add_file("%s/pyw.exe" % PCBUILD, version=installer.FileVersion(launcherw, 0), -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:09 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Remove_associator_project_-?= =?utf8?q?_it=27s_not_needed?= Message-ID: http://hg.python.org/cpython/rev/f0eb280ba898 changeset: 77553:f0eb280ba898 user: Brian Curtin date: Thu Jun 21 16:35:12 2012 -0500 summary: Remove associator project - it's not needed files: PC/associator.c | 731 ------- PC/associator.h | 1480 ---------------- PC/associator.rc | 97 - PCbuild/associator.vcxproj | 84 - PCbuild/associator.vcxproj.filters | 32 - PCbuild/pcbuild.sln | 2 - 6 files changed, 0 insertions(+), 2426 deletions(-) diff --git a/PC/associator.c b/PC/associator.c deleted file mode 100644 --- a/PC/associator.c +++ /dev/null @@ -1,731 +0,0 @@ -/* - * Copyright (C) 2011-2012 Vinay Sajip. All rights reserved. - * - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - */ -#include -#include -#include -#include "associator.h" - -#define PYTHON_EXECUTABLE L"python.exe" - -#define MSGSIZE 1024 -#define MAX_VERSION_SIZE 4 - -typedef struct { - wchar_t version[MAX_VERSION_SIZE]; /* m.n */ - int bits; /* 32 or 64 */ - wchar_t executable[MAX_PATH]; -} INSTALLED_PYTHON; - -/* - * To avoid messing about with heap allocations, just assume we can allocate - * statically and never have to deal with more versions than this. - */ -#define MAX_INSTALLED_PYTHONS 100 - -static INSTALLED_PYTHON installed_pythons[MAX_INSTALLED_PYTHONS]; - -static size_t num_installed_pythons = 0; - -/* to hold SOFTWARE\Python\PythonCore\X.Y\InstallPath */ -#define IP_BASE_SIZE 40 -#define IP_SIZE (IP_BASE_SIZE + MAX_VERSION_SIZE) -#define CORE_PATH L"SOFTWARE\\Python\\PythonCore" - -static wchar_t * location_checks[] = { - L"\\", -/* - L"\\PCBuild\\", - L"\\PCBuild\\amd64\\", - */ - NULL -}; - -static wchar_t * -skip_whitespace(wchar_t * p) -{ - while (*p && isspace(*p)) - ++p; - return p; -} - -/* - * This function is here to minimise Visual Studio - * warnings about security implications of getenv, and to - * treat blank values as if they are absent. - */ -static wchar_t * get_env(wchar_t * key) -{ - wchar_t * result = _wgetenv(key); - - if (result) { - result = skip_whitespace(result); - if (*result == L'\0') - result = NULL; - } - return result; -} - -static FILE * log_fp = NULL; - -static void -debug(wchar_t * format, ...) -{ - va_list va; - - if (log_fp != NULL) { - va_start(va, format); - vfwprintf_s(log_fp, format, va); - } -} - -static void winerror(int rc, wchar_t * message, int size) -{ - FormatMessageW( - FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, - NULL, rc, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), - message, size, NULL); -} - -static INSTALLED_PYTHON * -find_existing_python(wchar_t * path) -{ - INSTALLED_PYTHON * result = NULL; - size_t i; - INSTALLED_PYTHON * ip; - - for (i = 0, ip = installed_pythons; i < num_installed_pythons; i++, ip++) { - if (_wcsicmp(path, ip->executable) == 0) { - result = ip; - break; - } - } - return result; -} - -static void -locate_pythons_for_key(HKEY root, REGSAM flags) -{ - HKEY core_root, ip_key; - LSTATUS status = RegOpenKeyExW(root, CORE_PATH, 0, flags, &core_root); - wchar_t message[MSGSIZE]; - DWORD i; - size_t n; - BOOL ok; - DWORD type, data_size, attrs; - INSTALLED_PYTHON * ip, * pip; - wchar_t ip_path[IP_SIZE]; - wchar_t * check; - wchar_t ** checkp; - wchar_t *key_name = (root == HKEY_LOCAL_MACHINE) ? L"HKLM" : L"HKCU"; - - if (status != ERROR_SUCCESS) - debug(L"locate_pythons_for_key: unable to open PythonCore key in %s\n", - key_name); - else { - ip = &installed_pythons[num_installed_pythons]; - for (i = 0; num_installed_pythons < MAX_INSTALLED_PYTHONS; i++) { - status = RegEnumKeyW(core_root, i, ip->version, MAX_VERSION_SIZE); - if (status != ERROR_SUCCESS) { - if (status != ERROR_NO_MORE_ITEMS) { - /* unexpected error */ - winerror(status, message, MSGSIZE); - debug(L"Can't enumerate registry key for version %s: %s\n", - ip->version, message); - } - break; - } - else { - _snwprintf_s(ip_path, IP_SIZE, _TRUNCATE, - L"%s\\%s\\InstallPath", CORE_PATH, ip->version); - status = RegOpenKeyExW(root, ip_path, 0, flags, &ip_key); - if (status != ERROR_SUCCESS) { - winerror(status, message, MSGSIZE); - // Note: 'message' already has a trailing \n - debug(L"%s\\%s: %s", key_name, ip_path, message); - continue; - } - data_size = sizeof(ip->executable) - 1; - status = RegQueryValueEx(ip_key, NULL, NULL, &type, - (LPBYTE) ip->executable, &data_size); - RegCloseKey(ip_key); - if (status != ERROR_SUCCESS) { - winerror(status, message, MSGSIZE); - debug(L"%s\\%s: %s\n", key_name, ip_path, message); - continue; - } - if (type == REG_SZ) { - data_size = data_size / sizeof(wchar_t) - 1; /* for NUL */ - if (ip->executable[data_size - 1] == L'\\') - --data_size; /* reg value ended in a backslash */ - /* ip->executable is data_size long */ - for (checkp = location_checks; *checkp; ++checkp) { - check = *checkp; - _snwprintf_s(&ip->executable[data_size], - MAX_PATH - data_size, - MAX_PATH - data_size, - L"%s%s", check, PYTHON_EXECUTABLE); - attrs = GetFileAttributesW(ip->executable); - if (attrs == INVALID_FILE_ATTRIBUTES) { - winerror(GetLastError(), message, MSGSIZE); - debug(L"locate_pythons_for_key: %s: %s", - ip->executable, message); - } - else if (attrs & FILE_ATTRIBUTE_DIRECTORY) { - debug(L"locate_pythons_for_key: '%s' is a \ -directory\n", - ip->executable, attrs); - } - else if (find_existing_python(ip->executable)) { - debug(L"locate_pythons_for_key: %s: already \ -found: %s\n", ip->executable); - } - else { - /* check the executable type. */ - ok = GetBinaryTypeW(ip->executable, &attrs); - if (!ok) { - debug(L"Failure getting binary type: %s\n", - ip->executable); - } - else { - if (attrs == SCS_64BIT_BINARY) - ip->bits = 64; - else if (attrs == SCS_32BIT_BINARY) - ip->bits = 32; - else - ip->bits = 0; - if (ip->bits == 0) { - debug(L"locate_pythons_for_key: %s: \ -invalid binary type: %X\n", - ip->executable, attrs); - } - else { - if (wcschr(ip->executable, L' ') != NULL) { - /* has spaces, so quote */ - n = wcslen(ip->executable); - memmove(&ip->executable[1], - ip->executable, n * sizeof(wchar_t)); - ip->executable[0] = L'\"'; - ip->executable[n + 1] = L'\"'; - ip->executable[n + 2] = L'\0'; - } - debug(L"locate_pythons_for_key: %s \ -is a %dbit executable\n", - ip->executable, ip->bits); - ++num_installed_pythons; - pip = ip++; - if (num_installed_pythons >= - MAX_INSTALLED_PYTHONS) - break; - /* Copy over the attributes for the next */ - *ip = *pip; - } - } - } - } - } - } - } - RegCloseKey(core_root); - } -} - -static int -compare_pythons(const void * p1, const void * p2) -{ - INSTALLED_PYTHON * ip1 = (INSTALLED_PYTHON *) p1; - INSTALLED_PYTHON * ip2 = (INSTALLED_PYTHON *) p2; - /* note reverse sorting on version */ - int result = wcscmp(ip2->version, ip1->version); - - if (result == 0) - result = ip2->bits - ip1->bits; /* 64 before 32 */ - return result; -} - -static void -locate_all_pythons() -{ -#if defined(_M_X64) - // If we are a 64bit process, first hit the 32bit keys. - debug(L"locating Pythons in 32bit registry\n"); - locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_32KEY); - locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_32KEY); -#else - // If we are a 32bit process on a 64bit Windows, first hit the 64bit keys. - BOOL f64 = FALSE; - if (IsWow64Process(GetCurrentProcess(), &f64) && f64) { - debug(L"locating Pythons in 64bit registry\n"); - locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_64KEY); - locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_64KEY); - } -#endif - // now hit the "native" key for this process bittedness. - debug(L"locating Pythons in native registry\n"); - locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ); - locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ); - qsort(installed_pythons, num_installed_pythons, sizeof(INSTALLED_PYTHON), - compare_pythons); -} - -typedef struct { - wchar_t * path; - wchar_t * key; - wchar_t * value; -} REGISTRY_ENTRY; - -static REGISTRY_ENTRY registry_entries[] = { - { L".py", NULL, L"Python.File" }, - { L".pyc", NULL, L"Python.CompiledFile" }, - { L".pyo", NULL, L"Python.CompiledFile" }, - { L".pyw", NULL, L"Python.NoConFile" }, - - { L"Python.CompiledFile", NULL, L"Compiled Python File" }, - { L"Python.CompiledFile\\DefaultIcon", NULL, L"pyc.ico" }, - { L"Python.CompiledFile\\shell\\open", NULL, L"Open" }, - { L"Python.CompiledFile\\shell\\open\\command", NULL, L"python.exe" }, - - { L"Python.File", NULL, L"Python File" }, - { L"Python.File\\DefaultIcon", NULL, L"py.ico" }, - { L"Python.File\\shell\\open", NULL, L"Open" }, - { L"Python.File\\shell\\open\\command", NULL, L"python.exe" }, - - { L"Python.NoConFile", NULL, L"Python File (no console)" }, - { L"Python.NoConFile\\DefaultIcon", NULL, L"py.ico" }, - { L"Python.NoConFile\\shell\\open", NULL, L"Open" }, - { L"Python.NoConFile\\shell\\open\\command", NULL, L"pythonw.exe" }, - - { NULL } -}; - -static BOOL -do_association(INSTALLED_PYTHON * ip) -{ - LONG rc; - BOOL result = TRUE; - REGISTRY_ENTRY * rp = registry_entries; - wchar_t value[MAX_PATH]; - wchar_t root[MAX_PATH]; - wchar_t message[MSGSIZE]; - wchar_t * pvalue; - HKEY hKey; - DWORD len; - - wcsncpy_s(root, MAX_PATH, ip->executable, _TRUNCATE); - pvalue = wcsrchr(root, '\\'); - if (pvalue) - *pvalue = L'\0'; - - for (; rp->path; ++rp) { - if (wcsstr(rp->path, L"DefaultIcon")) { - pvalue = value; - _snwprintf_s(value, MAX_PATH, _TRUNCATE, - L"%s\\DLLs\\%s", root, rp->value); - } - else if (wcsstr(rp->path, L"open\\command")) { - pvalue = value; - _snwprintf_s(value, MAX_PATH, _TRUNCATE, - L"%s\\%s \"%%1\" %%*", root, rp->value); - } - else { - pvalue = rp->value; - } - /* use rp->path, rp->key, pvalue */ - /* NOTE: size is in bytes */ - len = (DWORD) ((1 + wcslen(pvalue)) * sizeof(wchar_t)); - rc = RegOpenKeyEx(HKEY_CLASSES_ROOT, rp->path, 0, KEY_SET_VALUE, &hKey); - if (rc == ERROR_SUCCESS) { - rc = RegSetValueExW(hKey, rp->key, 0, REG_SZ, (LPBYTE) pvalue, len); - RegCloseKey(hKey); - } - if (rc != ERROR_SUCCESS) { - winerror(rc, message, MSGSIZE); - MessageBoxW(NULL, message, L"Unable to set file associations", MB_OK | MB_ICONSTOP); - result = FALSE; - break; - } - } - return result; -} - -static BOOL -associations_exist() -{ - BOOL result = FALSE; - REGISTRY_ENTRY * rp = registry_entries; - wchar_t buffer[MSGSIZE]; - LONG csize = MSGSIZE * sizeof(wchar_t); - LONG rc; - - /* Currently, if any is found, we assume they're all there. */ - - for (; rp->path; ++rp) { - LONG size = csize; - rc = RegQueryValueW(HKEY_CLASSES_ROOT, rp->path, buffer, &size); - if (rc == ERROR_SUCCESS) { - result = TRUE; - break; - } - } - return result; -} - -/* --------------------------------------------------------------------*/ - -static BOOL CALLBACK -find_by_title(HWND hwnd, LPARAM lParam) -{ - wchar_t buffer[MSGSIZE]; - BOOL not_found = TRUE; - - wchar_t * p = (wchar_t *) GetWindowTextW(hwnd, buffer, MSGSIZE); - if (wcsstr(buffer, L"Python Launcher") == buffer) { - not_found = FALSE; - *((HWND *) lParam) = hwnd; - } - return not_found; -} - -static HWND -find_installer_window() -{ - HWND result = NULL; - BOOL found = EnumWindows(find_by_title, (LPARAM) &result); - - return result; -} - -static void -centre_window_in_front(HWND hwnd) -{ - HWND hwndParent; - RECT rect, rectP; - int width, height; - int screenwidth, screenheight; - int x, y; - - //make the window relative to its parent - - screenwidth = GetSystemMetrics(SM_CXSCREEN); - screenheight = GetSystemMetrics(SM_CYSCREEN); - - hwndParent = GetParent(hwnd); - - GetWindowRect(hwnd, &rect); - if (hwndParent) { - GetWindowRect(hwndParent, &rectP); - } - else { - rectP.left = rectP.top = 0; - rectP.right = screenwidth; - rectP.bottom = screenheight; - } - - width = rect.right - rect.left; - height = rect.bottom - rect.top; - - x = ((rectP.right-rectP.left) - width) / 2 + rectP.left; - y = ((rectP.bottom-rectP.top) - height) / 2 + rectP.top; - - - //make sure that the dialog box never moves outside of - //the screen - - if (x < 0) - x = 0; - - if (y < 0) - y = 0; - - if (x + width > screenwidth) - x = screenwidth - width; - if (y + height > screenheight) - y = screenheight - height; - - SetWindowPos(hwnd, HWND_TOPMOST, x, y, width, height, SWP_SHOWWINDOW); -} - -static void -init_list(HWND hList) -{ - LVCOLUMNW column; - LVITEMW item; - int colno = 0; - int width = 0; - int row; - size_t i; - INSTALLED_PYTHON * ip; - RECT r; - LPARAM style; - - GetClientRect(hList, &r); - - style = SendMessage(hList, LVM_GETEXTENDEDLISTVIEWSTYLE, 0, 0); - SendMessage(hList, LVM_SETEXTENDEDLISTVIEWSTYLE, - 0, style | LVS_EX_FULLROWSELECT); - - /* First set up the columns */ - memset(&column, 0, sizeof(column)); - column.mask = LVCF_TEXT | LVCF_WIDTH | LVCF_SUBITEM; - column.pszText = L"Version"; - column.cx = 60; - width += column.cx; - SendMessage(hList, LVM_INSERTCOLUMN, colno++,(LPARAM) &column); -#if defined(_M_X64) - column.pszText = L"Bits"; - column.cx = 40; - column.iSubItem = colno; - SendMessage(hList, LVM_INSERTCOLUMN, colno++,(LPARAM) &column); - width += column.cx; -#endif - column.pszText = L"Path"; - column.cx = r.right - r.top - width; - column.iSubItem = colno; - SendMessage(hList, LVM_INSERTCOLUMN, colno++,(LPARAM) &column); - - /* Then insert the rows */ - memset(&item, 0, sizeof(item)); - item.mask = LVIF_TEXT; - for (i = 0, ip = installed_pythons; i < num_installed_pythons; i++,ip++) { - item.iItem = (int) i; - item.iSubItem = 0; - item.pszText = ip->version; - colno = 0; - row = (int) SendMessage(hList, LVM_INSERTITEM, 0, (LPARAM) &item); -#if defined(_M_X64) - item.iSubItem = ++colno; - item.pszText = (ip->bits == 64) ? L"64": L"32"; - SendMessage(hList, LVM_SETITEM, row, (LPARAM) &item); -#endif - item.iSubItem = ++colno; - item.pszText = ip->executable; - SendMessage(hList, LVM_SETITEM, row, (LPARAM) &item); - } -} - -/* ----------------------------------------------------------------*/ - -typedef int (__stdcall *MSGBOXWAPI)(IN HWND hWnd, - IN LPCWSTR lpText, IN LPCWSTR lpCaption, - IN UINT uType, IN WORD wLanguageId, IN DWORD dwMilliseconds); - -int MessageBoxTimeoutW(IN HWND hWnd, IN LPCWSTR lpText, - IN LPCWSTR lpCaption, IN UINT uType, - IN WORD wLanguageId, IN DWORD dwMilliseconds); - -#define MB_TIMEDOUT 32000 - -int MessageBoxTimeoutW(HWND hWnd, LPCWSTR lpText, - LPCWSTR lpCaption, UINT uType, WORD wLanguageId, DWORD dwMilliseconds) -{ - static MSGBOXWAPI MsgBoxTOW = NULL; - - if (!MsgBoxTOW) { - HMODULE hUser32 = GetModuleHandleW(L"user32.dll"); - if (hUser32) - MsgBoxTOW = (MSGBOXWAPI)GetProcAddress(hUser32, - "MessageBoxTimeoutW"); - else { - //stuff happened, add code to handle it here - //(possibly just call MessageBox()) - return 0; - } - } - - if (MsgBoxTOW) - return MsgBoxTOW(hWnd, lpText, lpCaption, uType, wLanguageId, - dwMilliseconds); - - return 0; -} -/* ----------------------------------------------------------------*/ - -static INT_PTR CALLBACK -DialogProc(HWND hDlg, UINT message, WPARAM wParam, LPARAM lParam) -{ - HWND hList; - HWND hChild; - static int selected_index = -1; - WORD low = LOWORD(wParam); - wchar_t confirmation[MSGSIZE]; - BOOL result = FALSE; - - debug(L"DialogProc entry: 0x%02X\n", message); - switch (message) { - case WM_INITDIALOG: - hList = GetDlgItem(hDlg, IDC_LIST1); - init_list(hList); - SetFocus(hList); - result = TRUE; - break; - case WM_COMMAND: - if((low == IDOK) || (low == IDCANCEL)) { - HMODULE hUser32 = LoadLibraryW(L"user32.dll"); - - if (low == IDCANCEL) - wcsncpy_s(confirmation, MSGSIZE, L"No association was \ -performed.", _TRUNCATE); - else { - if (selected_index < 0) { - /* should never happen */ - wcsncpy_s(confirmation, MSGSIZE, L"The Python version to \ -associate with couldn't be determined.", _TRUNCATE); - } - else { - INSTALLED_PYTHON * ip = &installed_pythons[selected_index]; - - /* Do the association and set the message. */ - do_association(ip); - _snwprintf_s(confirmation, MSGSIZE, _TRUNCATE, - L"Associated Python files with the Python %s \ -found at '%s'", ip->version, ip->executable); - } - } - - if (hUser32) { - MessageBoxTimeoutW(hDlg, - confirmation, - L"Association Status", - MB_OK | MB_SETFOREGROUND | - MB_ICONINFORMATION, - 0, 2000); - FreeLibrary(hUser32); - } - PostQuitMessage(0); - EndDialog(hDlg, 0); - result = TRUE; - } - break; - case WM_NOTIFY: - if (low == IDC_LIST1) { - NMLISTVIEW * p = (NMLISTVIEW *) lParam; - - if ((p->hdr.code == LVN_ITEMCHANGED) && - (p->uNewState & LVIS_SELECTED)) { - hChild = GetDlgItem(hDlg, IDOK); - selected_index = p->iItem; - EnableWindow(hChild, selected_index >= 0); - } - result = TRUE; - } - break; - case WM_DESTROY: - PostQuitMessage(0); - result = TRUE; - break; - case WM_CLOSE: - DestroyWindow(hDlg); - result = TRUE; - break; - } - debug(L"DialogProc exit: %d\n", result); - return result; -} - -int WINAPI wWinMain(HINSTANCE hInstance, - HINSTANCE hPrevInstance, - LPWSTR lpCmdLine, int nShow) -{ - MSG msg; - HWND hDialog = 0; - HICON hIcon; - HWND hParent; - int status; - DWORD dw; - INITCOMMONCONTROLSEX icx; - wchar_t * wp; - - wp = get_env(L"PYASSOC_DEBUG"); - if ((wp != NULL) && (*wp != L'\0')) { - fopen_s(&log_fp, "c:\\temp\\associator.log", "w"); - } - - if (!lpCmdLine) { - debug(L"No command line specified.\n"); - return 0; - } - if (!wcsstr(lpCmdLine, L"nocheck") && - associations_exist()) /* Could have been restored by uninstall. */ - return 0; - - locate_all_pythons(); - - if (num_installed_pythons == 0) - return 0; - - debug(L"%d pythons found.\n", num_installed_pythons); - - /* - * OK, now there's something to do. - * - * We need to find the installer window to be the parent of - * our dialog, otherwise our dialog will be behind it. - * - * First, initialize common controls. If you don't - on - * some machines it works fine, on others the dialog never - * appears! - */ - - icx.dwSize = sizeof(icx); - icx.dwICC = ICC_LISTVIEW_CLASSES; - InitCommonControlsEx(&icx); - - hParent = find_installer_window(); - debug(L"installer window: %X\n", hParent); - hDialog = CreateDialogW(hInstance, MAKEINTRESOURCE(DLG_MAIN), hParent, - DialogProc); - dw = GetLastError(); - debug(L"dialog created: %X: error: %X\n", hDialog, dw); - - if (!hDialog) - { - wchar_t buf [100]; - _snwprintf_s(buf, 100, _TRUNCATE, L"Error 0x%x", GetLastError()); - MessageBoxW(0, buf, L"CreateDialog", MB_ICONEXCLAMATION | MB_OK); - return 1; - } - - centre_window_in_front(hDialog); - hIcon = LoadIcon( GetModuleHandle(NULL), MAKEINTRESOURCE(DLG_ICON)); - if( hIcon ) - { - SendMessage(hDialog, WM_SETICON, ICON_BIG, (LPARAM) hIcon); - SendMessage(hDialog, WM_SETICON, ICON_SMALL, (LPARAM) hIcon); - DestroyIcon(hIcon); - } - - while ((status = GetMessage (& msg, 0, 0, 0)) != 0) - { - if (status == -1) - return -1; - if (!IsDialogMessage(hDialog, & msg)) - { - TranslateMessage( & msg ); - DispatchMessage( & msg ); - } - } - - return (int) msg.wParam; -} diff --git a/PC/associator.h b/PC/associator.h deleted file mode 100644 --- a/PC/associator.h +++ /dev/null @@ -1,1480 +0,0 @@ -//{{NO_DEPENDENCIES}} -// Microsoft Visual C++ generated include file. -// Used by main.rc -// -#define SW_HIDE 0 -#define HIDE_WINDOW 0 -#define WM_NULL 0x0000 -#define WA_INACTIVE 0 -#define HTNOWHERE 0 -#define SMTO_NORMAL 0x0000 -#define ICON_SMALL 0 -#define SIZE_RESTORED 0 -#define BN_CLICKED 0 -#define BST_UNCHECKED 0x0000 -#define HDS_HORZ 0x0000 -#define TBSTYLE_BUTTON 0x0000 -#define TBS_HORZ 0x0000 -#define TBS_BOTTOM 0x0000 -#define TBS_RIGHT 0x0000 -#define LVS_ICON 0x0000 -#define LVS_ALIGNTOP 0x0000 -#define TCS_TABS 0x0000 -#define TCS_SINGLELINE 0x0000 -#define TCS_RIGHTJUSTIFY 0x0000 -#define DTS_SHORTDATEFORMAT 0x0000 -#define PGS_VERT 0x00000000 -#define LANG_NEUTRAL 0x00 -#define SUBLANG_NEUTRAL 0x00 -#define SORT_DEFAULT 0x0 -#define SORT_JAPANESE_XJIS 0x0 -#define SORT_CHINESE_BIG5 0x0 -#define SORT_CHINESE_PRCP 0x0 -#define SORT_KOREAN_KSC 0x0 -#define SORT_HUNGARIAN_DEFAULT 0x0 -#define SORT_GEORGIAN_TRADITIONAL 0x0 -#define _USE_DECLSPECS_FOR_SAL 0 -#define SW_SHOWNORMAL 1 -#define SW_NORMAL 1 -#define SHOW_OPENWINDOW 1 -#define SW_PARENTCLOSING 1 -#define VK_LBUTTON 0x01 -#define WM_CREATE 0x0001 -#define WA_ACTIVE 1 -#define PWR_OK 1 -#define PWR_SUSPENDREQUEST 1 -#define NFR_ANSI 1 -#define UIS_SET 1 -#define UISF_HIDEFOCUS 0x1 -#define XBUTTON1 0x0001 -#define WMSZ_LEFT 1 -#define HTCLIENT 1 -#define SMTO_BLOCK 0x0001 -#define MA_ACTIVATE 1 -#define ICON_BIG 1 -#define SIZE_MINIMIZED 1 -#define MK_LBUTTON 0x0001 -#define TME_HOVER 0x00000001 -#define CS_VREDRAW 0x0001 -#define CF_TEXT 1 -#define SCF_ISSECURE 0x00000001 -#define IDOK 1 -#define BN_PAINT 1 -#define BST_CHECKED 0x0001 -#define TBSTYLE_SEP 0x0001 -#define TTS_ALWAYSTIP 0x01 -#define TBS_AUTOTICKS 0x0001 -#define UDS_WRAP 0x0001 -#define PBS_SMOOTH 0x01 -#define LWS_TRANSPARENT 0x0001 -#define LVS_REPORT 0x0001 -#define TVS_HASBUTTONS 0x0001 -#define TCS_SCROLLOPPOSITE 0x0001 -#define ACS_CENTER 0x0001 -#define MCS_DAYSTATE 0x0001 -#define DTS_UPDOWN 0x0001 -#define PGS_HORZ 0x00000001 -#define NFS_EDIT 0x0001 -#define BCSIF_GLYPH 0x0001 -#define BCSS_NOSPLIT 0x0001 -#define LANG_ARABIC 0x01 -#define SUBLANG_DEFAULT 0x01 -#define SUBLANG_AFRIKAANS_SOUTH_AFRICA 0x01 -#define SUBLANG_ALBANIAN_ALBANIA 0x01 -#define SUBLANG_ALSATIAN_FRANCE 0x01 -#define SUBLANG_AMHARIC_ETHIOPIA 0x01 -#define SUBLANG_ARABIC_SAUDI_ARABIA 0x01 -#define SUBLANG_ARMENIAN_ARMENIA 0x01 -#define SUBLANG_ASSAMESE_INDIA 0x01 -#define SUBLANG_AZERI_LATIN 0x01 -#define SUBLANG_BASHKIR_RUSSIA 0x01 -#define SUBLANG_BASQUE_BASQUE 0x01 -#define SUBLANG_BELARUSIAN_BELARUS 0x01 -#define SUBLANG_BENGALI_INDIA 0x01 -#define SUBLANG_BRETON_FRANCE 0x01 -#define SUBLANG_BULGARIAN_BULGARIA 0x01 -#define SUBLANG_CATALAN_CATALAN 0x01 -#define SUBLANG_CHINESE_TRADITIONAL 0x01 -#define SUBLANG_CORSICAN_FRANCE 0x01 -#define SUBLANG_CZECH_CZECH_REPUBLIC 0x01 -#define SUBLANG_CROATIAN_CROATIA 0x01 -#define SUBLANG_DANISH_DENMARK 0x01 -#define SUBLANG_DARI_AFGHANISTAN 0x01 -#define SUBLANG_DIVEHI_MALDIVES 0x01 -#define SUBLANG_DUTCH 0x01 -#define SUBLANG_ENGLISH_US 0x01 -#define SUBLANG_ESTONIAN_ESTONIA 0x01 -#define SUBLANG_FAEROESE_FAROE_ISLANDS 0x01 -#define SUBLANG_FILIPINO_PHILIPPINES 0x01 -#define SUBLANG_FINNISH_FINLAND 0x01 -#define SUBLANG_FRENCH 0x01 -#define SUBLANG_FRISIAN_NETHERLANDS 0x01 -#define SUBLANG_GALICIAN_GALICIAN 0x01 -#define SUBLANG_GEORGIAN_GEORGIA 0x01 -#define SUBLANG_GERMAN 0x01 -#define SUBLANG_GREEK_GREECE 0x01 -#define SUBLANG_GREENLANDIC_GREENLAND 0x01 -#define SUBLANG_GUJARATI_INDIA 0x01 -#define SUBLANG_HAUSA_NIGERIA_LATIN 0x01 -#define SUBLANG_HEBREW_ISRAEL 0x01 -#define SUBLANG_HINDI_INDIA 0x01 -#define SUBLANG_HUNGARIAN_HUNGARY 0x01 -#define SUBLANG_ICELANDIC_ICELAND 0x01 -#define SUBLANG_IGBO_NIGERIA 0x01 -#define SUBLANG_INDONESIAN_INDONESIA 0x01 -#define SUBLANG_INUKTITUT_CANADA 0x01 -#define SUBLANG_ITALIAN 0x01 -#define SUBLANG_JAPANESE_JAPAN 0x01 -#define SUBLANG_KANNADA_INDIA 0x01 -#define SUBLANG_KAZAK_KAZAKHSTAN 0x01 -#define SUBLANG_KHMER_CAMBODIA 0x01 -#define SUBLANG_KICHE_GUATEMALA 0x01 -#define SUBLANG_KINYARWANDA_RWANDA 0x01 -#define SUBLANG_KONKANI_INDIA 0x01 -#define SUBLANG_KOREAN 0x01 -#define SUBLANG_KYRGYZ_KYRGYZSTAN 0x01 -#define SUBLANG_LAO_LAO 0x01 -#define SUBLANG_LATVIAN_LATVIA 0x01 -#define SUBLANG_LITHUANIAN 0x01 -#define SUBLANG_LUXEMBOURGISH_LUXEMBOURG 0x01 -#define SUBLANG_MACEDONIAN_MACEDONIA 0x01 -#define SUBLANG_MALAY_MALAYSIA 0x01 -#define SUBLANG_MALAYALAM_INDIA 0x01 -#define SUBLANG_MALTESE_MALTA 0x01 -#define SUBLANG_MAORI_NEW_ZEALAND 0x01 -#define SUBLANG_MAPUDUNGUN_CHILE 0x01 -#define SUBLANG_MARATHI_INDIA 0x01 -#define SUBLANG_MOHAWK_MOHAWK 0x01 -#define SUBLANG_MONGOLIAN_CYRILLIC_MONGOLIA 0x01 -#define SUBLANG_NEPALI_NEPAL 0x01 -#define SUBLANG_NORWEGIAN_BOKMAL 0x01 -#define SUBLANG_OCCITAN_FRANCE 0x01 -#define SUBLANG_ORIYA_INDIA 0x01 -#define SUBLANG_PASHTO_AFGHANISTAN 0x01 -#define SUBLANG_PERSIAN_IRAN 0x01 -#define SUBLANG_POLISH_POLAND 0x01 -#define SUBLANG_PORTUGUESE_BRAZILIAN 0x01 -#define SUBLANG_PUNJABI_INDIA 0x01 -#define SUBLANG_QUECHUA_BOLIVIA 0x01 -#define SUBLANG_ROMANIAN_ROMANIA 0x01 -#define SUBLANG_ROMANSH_SWITZERLAND 0x01 -#define SUBLANG_RUSSIAN_RUSSIA 0x01 -#define SUBLANG_SAMI_NORTHERN_NORWAY 0x01 -#define SUBLANG_SANSKRIT_INDIA 0x01 -#define SUBLANG_SERBIAN_CROATIA 0x01 -#define SUBLANG_SINDHI_INDIA 0x01 -#define SUBLANG_SINHALESE_SRI_LANKA 0x01 -#define SUBLANG_SOTHO_NORTHERN_SOUTH_AFRICA 0x01 -#define SUBLANG_SLOVAK_SLOVAKIA 0x01 -#define SUBLANG_SLOVENIAN_SLOVENIA 0x01 -#define SUBLANG_SPANISH 0x01 -#define SUBLANG_SWAHILI_KENYA 0x01 -#define SUBLANG_SWEDISH 0x01 -#define SUBLANG_SYRIAC_SYRIA 0x01 -#define SUBLANG_TAJIK_TAJIKISTAN 0x01 -#define SUBLANG_TAMIL_INDIA 0x01 -#define SUBLANG_TATAR_RUSSIA 0x01 -#define SUBLANG_TELUGU_INDIA 0x01 -#define SUBLANG_THAI_THAILAND 0x01 -#define SUBLANG_TIBETAN_PRC 0x01 -#define SUBLANG_TSWANA_SOUTH_AFRICA 0x01 -#define SUBLANG_TURKISH_TURKEY 0x01 -#define SUBLANG_TURKMEN_TURKMENISTAN 0x01 -#define SUBLANG_UIGHUR_PRC 0x01 -#define SUBLANG_UKRAINIAN_UKRAINE 0x01 -#define SUBLANG_UPPER_SORBIAN_GERMANY 0x01 -#define SUBLANG_URDU_PAKISTAN 0x01 -#define SUBLANG_UZBEK_LATIN 0x01 -#define SUBLANG_VIETNAMESE_VIETNAM 0x01 -#define SUBLANG_WELSH_UNITED_KINGDOM 0x01 -#define SUBLANG_WOLOF_SENEGAL 0x01 -#define SUBLANG_XHOSA_SOUTH_AFRICA 0x01 -#define SUBLANG_YAKUT_RUSSIA 0x01 -#define SUBLANG_YI_PRC 0x01 -#define SUBLANG_YORUBA_NIGERIA 0x01 -#define SUBLANG_ZULU_SOUTH_AFRICA 0x01 -#define SORT_INVARIANT_MATH 0x1 -#define SORT_JAPANESE_UNICODE 0x1 -#define SORT_CHINESE_UNICODE 0x1 -#define SORT_KOREAN_UNICODE 0x1 -#define SORT_GERMAN_PHONE_BOOK 0x1 -#define SORT_HUNGARIAN_TECHNICAL 0x1 -#define SORT_GEORGIAN_MODERN 0x1 -#define VS_VERSION_INFO 1 -#define VFFF_ISSHAREDFILE 0x0001 -#define VFF_CURNEDEST 0x0001 -#define VIFF_FORCEINSTALL 0x0001 -#define SW_SHOWMINIMIZED 2 -#define SHOW_ICONWINDOW 2 -#define SW_OTHERZOOM 2 -#define VK_RBUTTON 0x02 -#define WM_DESTROY 0x0002 -#define WA_CLICKACTIVE 2 -#define PWR_SUSPENDRESUME 2 -#define NFR_UNICODE 2 -#define UIS_CLEAR 2 -#define UISF_HIDEACCEL 0x2 -#define XBUTTON2 0x0002 -#define WMSZ_RIGHT 2 -#define HTCAPTION 2 -#define SMTO_ABORTIFHUNG 0x0002 -#define MA_ACTIVATEANDEAT 2 -#define ICON_SMALL2 2 -#define SIZE_MAXIMIZED 2 -#define MK_RBUTTON 0x0002 -#define TME_LEAVE 0x00000002 -#define CS_HREDRAW 0x0002 -#define CF_BITMAP 2 -#define IDCANCEL 2 -#define BN_HILITE 2 -#define BST_INDETERMINATE 0x0002 -#define HDS_BUTTONS 0x0002 -#define TBSTYLE_CHECK 0x0002 -#define TTS_NOPREFIX 0x02 -#define TBS_VERT 0x0002 -#define UDS_SETBUDDYINT 0x0002 -#define LWS_IGNORERETURN 0x0002 -#define LVS_SMALLICON 0x0002 -#define TVS_HASLINES 0x0002 -#define TVS_EX_MULTISELECT 0x0002 -#define TCS_BOTTOM 0x0002 -#define TCS_RIGHT 0x0002 -#define ACS_TRANSPARENT 0x0002 -#define MCS_MULTISELECT 0x0002 -#define DTS_SHOWNONE 0x0002 -#define PGS_AUTOSCROLL 0x00000002 -#define NFS_STATIC 0x0002 -#define BCSIF_IMAGE 0x0002 -#define BCSS_STRETCH 0x0002 -#define LANG_BULGARIAN 0x02 -#define SUBLANG_SYS_DEFAULT 0x02 -#define SUBLANG_ARABIC_IRAQ 0x02 -#define SUBLANG_AZERI_CYRILLIC 0x02 -#define SUBLANG_BENGALI_BANGLADESH 0x02 -#define SUBLANG_CHINESE_SIMPLIFIED 0x02 -#define SUBLANG_DUTCH_BELGIAN 0x02 -#define SUBLANG_ENGLISH_UK 0x02 -#define SUBLANG_FRENCH_BELGIAN 0x02 -#define SUBLANG_GERMAN_SWISS 0x02 -#define SUBLANG_INUKTITUT_CANADA_LATIN 0x02 -#define SUBLANG_IRISH_IRELAND 0x02 -#define SUBLANG_ITALIAN_SWISS 0x02 -#define SUBLANG_KASHMIRI_SASIA 0x02 -#define SUBLANG_KASHMIRI_INDIA 0x02 -#define SUBLANG_LOWER_SORBIAN_GERMANY 0x02 -#define SUBLANG_MALAY_BRUNEI_DARUSSALAM 0x02 -#define SUBLANG_MONGOLIAN_PRC 0x02 -#define SUBLANG_NEPALI_INDIA 0x02 -#define SUBLANG_NORWEGIAN_NYNORSK 0x02 -#define SUBLANG_PORTUGUESE 0x02 -#define SUBLANG_QUECHUA_ECUADOR 0x02 -#define SUBLANG_SAMI_NORTHERN_SWEDEN 0x02 -#define SUBLANG_SERBIAN_LATIN 0x02 -#define SUBLANG_SINDHI_PAKISTAN 0x02 -#define SUBLANG_SINDHI_AFGHANISTAN 0x02 -#define SUBLANG_SPANISH_MEXICAN 0x02 -#define SUBLANG_SWEDISH_FINLAND 0x02 -#define SUBLANG_TAMAZIGHT_ALGERIA_LATIN 0x02 -#define SUBLANG_TIGRIGNA_ERITREA 0x02 -#define SUBLANG_URDU_INDIA 0x02 -#define SUBLANG_UZBEK_CYRILLIC 0x02 -#define SORT_CHINESE_PRC 0x2 -#define VFF_FILEINUSE 0x0002 -#define VIFF_DONTDELETEOLD 0x0002 -#define SW_SHOWMAXIMIZED 3 -#define SW_MAXIMIZE 3 -#define SHOW_FULLSCREEN 3 -#define SW_PARENTOPENING 3 -#define VK_CANCEL 0x03 -#define WM_MOVE 0x0003 -#define PWR_CRITICALRESUME 3 -#define NF_QUERY 3 -#define UIS_INITIALIZE 3 -#define WMSZ_TOP 3 -#define HTSYSMENU 3 -#define MA_NOACTIVATE 3 -#define SIZE_MAXSHOW 3 -#define CF_METAFILEPICT 3 -#define IDABORT 3 -#define BN_UNHILITE 3 -#define LVS_LIST 0x0003 -#define LVS_TYPEMASK 0x0003 -#define LANG_CATALAN 0x03 -#define SUBLANG_CUSTOM_DEFAULT 0x03 -#define SUBLANG_ARABIC_EGYPT 0x03 -#define SUBLANG_CHINESE_HONGKONG 0x03 -#define SUBLANG_ENGLISH_AUS 0x03 -#define SUBLANG_FRENCH_CANADIAN 0x03 -#define SUBLANG_GERMAN_AUSTRIAN 0x03 -#define SUBLANG_QUECHUA_PERU 0x03 -#define SUBLANG_SAMI_NORTHERN_FINLAND 0x03 -#define SUBLANG_SERBIAN_CYRILLIC 0x03 -#define SUBLANG_SPANISH_MODERN 0x03 -#define SORT_CHINESE_BOPOMOFO 0x3 -#define SW_SHOWNOACTIVATE 4 -#define SHOW_OPENNOACTIVATE 4 -#define SW_OTHERUNZOOM 4 -#define VK_MBUTTON 0x04 -#define NF_REQUERY 4 -#define UISF_ACTIVE 0x4 -#define WMSZ_TOPLEFT 4 -#define HTGROWBOX 4 -#define MA_NOACTIVATEANDEAT 4 -#define SIZE_MAXHIDE 4 -#define MK_SHIFT 0x0004 -#define CF_SYLK 4 -#define IDRETRY 4 -#define BN_DISABLE 4 -#define BST_PUSHED 0x0004 -#define HDS_HOTTRACK 0x0004 -#define TBSTYLE_GROUP 0x0004 -#define TBS_TOP 0x0004 -#define TBS_LEFT 0x0004 -#define UDS_ALIGNRIGHT 0x0004 -#define PBS_VERTICAL 0x04 -#define LWS_NOPREFIX 0x0004 -#define LVS_SINGLESEL 0x0004 -#define TVS_LINESATROOT 0x0004 -#define TVS_EX_DOUBLEBUFFER 0x0004 -#define TCS_MULTISELECT 0x0004 -#define ACS_AUTOPLAY 0x0004 -#define MCS_WEEKNUMBERS 0x0004 -#define DTS_LONGDATEFORMAT 0x0004 -#define PGS_DRAGNDROP 0x00000004 -#define NFS_LISTCOMBO 0x0004 -#define BCSIF_STYLE 0x0004 -#define BCSS_ALIGNLEFT 0x0004 -#define LANG_CHINESE 0x04 -#define LANG_CHINESE_SIMPLIFIED 0x04 -#define SUBLANG_CUSTOM_UNSPECIFIED 0x04 -#define SUBLANG_ARABIC_LIBYA 0x04 -#define SUBLANG_CHINESE_SINGAPORE 0x04 -#define SUBLANG_CROATIAN_BOSNIA_HERZEGOVINA_LATIN 0x04 -#define SUBLANG_ENGLISH_CAN 0x04 -#define SUBLANG_FRENCH_SWISS 0x04 -#define SUBLANG_GERMAN_LUXEMBOURG 0x04 -#define SUBLANG_SAMI_LULE_NORWAY 0x04 -#define SUBLANG_SPANISH_GUATEMALA 0x04 -#define SORT_JAPANESE_RADICALSTROKE 0x4 -#define VFF_BUFFTOOSMALL 0x0004 -#define SW_SHOW 5 -#define VK_XBUTTON1 0x05 -#define WM_SIZE 0x0005 -#define WMSZ_TOPRIGHT 5 -#define HTMENU 5 -#define CF_DIF 5 -#define IDIGNORE 5 -#define BN_DOUBLECLICKED 5 -#define LANG_CZECH 0x05 -#define SUBLANG_UI_CUSTOM_DEFAULT 0x05 -#define SUBLANG_ARABIC_ALGERIA 0x05 -#define SUBLANG_BOSNIAN_BOSNIA_HERZEGOVINA_LATIN 0x05 -#define SUBLANG_CHINESE_MACAU 0x05 -#define SUBLANG_ENGLISH_NZ 0x05 -#define SUBLANG_FRENCH_LUXEMBOURG 0x05 -#define SUBLANG_GERMAN_LIECHTENSTEIN 0x05 -#define SUBLANG_SAMI_LULE_SWEDEN 0x05 -#define SUBLANG_SPANISH_COSTA_RICA 0x05 -#define SW_MINIMIZE 6 -#define VK_XBUTTON2 0x06 -#define WM_ACTIVATE 0x0006 -#define WMSZ_BOTTOM 6 -#define HTHSCROLL 6 -#define CF_TIFF 6 -#define IDYES 6 -#define BN_SETFOCUS 6 -#define LANG_DANISH 0x06 -#define SUBLANG_ARABIC_MOROCCO 0x06 -#define SUBLANG_ENGLISH_EIRE 0x06 -#define SUBLANG_FRENCH_MONACO 0x06 -#define SUBLANG_SAMI_SOUTHERN_NORWAY 0x06 -#define SUBLANG_SERBIAN_BOSNIA_HERZEGOVINA_LATIN 0x06 -#define SUBLANG_SPANISH_PANAMA 0x06 -#define SW_SHOWMINNOACTIVE 7 -#define WM_SETFOCUS 0x0007 -#define WMSZ_BOTTOMLEFT 7 -#define HTVSCROLL 7 -#define CF_OEMTEXT 7 -#define IDNO 7 -#define BN_KILLFOCUS 7 -#define LANG_GERMAN 0x07 -#define SUBLANG_ARABIC_TUNISIA 0x07 -#define SUBLANG_ENGLISH_SOUTH_AFRICA 0x07 -#define SUBLANG_SAMI_SOUTHERN_SWEDEN 0x07 -#define SUBLANG_SERBIAN_BOSNIA_HERZEGOVINA_CYRILLIC 0x07 -#define SUBLANG_SPANISH_DOMINICAN_REPUBLIC 0x07 -#define SW_SHOWNA 8 -#define VK_BACK 0x08 -#define WM_KILLFOCUS 0x0008 -#define WMSZ_BOTTOMRIGHT 8 -#define HTMINBUTTON 8 -#define SMTO_NOTIMEOUTIFNOTHUNG 0x0008 -#define MK_CONTROL 0x0008 -#define CS_DBLCLKS 0x0008 -#define CF_DIB 8 -#define IDCLOSE 8 -#define BST_FOCUS 0x0008 -#define HDS_HIDDEN 0x0008 -#define TBSTYLE_DROPDOWN 0x0008 -#define TBS_BOTH 0x0008 -#define UDS_ALIGNLEFT 0x0008 -#define PBS_MARQUEE 0x08 -#define LWS_USEVISUALSTYLE 0x0008 -#define LVS_SHOWSELALWAYS 0x0008 -#define TVS_EDITLABELS 0x0008 -#define TVS_EX_NOINDENTSTATE 0x0008 -#define TCS_FLATBUTTONS 0x0008 -#define ACS_TIMER 0x0008 -#define MCS_NOTODAYCIRCLE 0x0008 -#define NFS_BUTTON 0x0008 -#define BCSIF_SIZE 0x0008 -#define BCSS_IMAGE 0x0008 -#define LANG_GREEK 0x08 -#define SUBLANG_ARABIC_OMAN 0x08 -#define SUBLANG_BOSNIAN_BOSNIA_HERZEGOVINA_CYRILLIC 0x08 -#define SUBLANG_ENGLISH_JAMAICA 0x08 -#define SUBLANG_SAMI_SKOLT_FINLAND 0x08 -#define SUBLANG_SPANISH_VENEZUELA 0x08 -#define SW_RESTORE 9 -#define VK_TAB 0x09 -#define HTMAXBUTTON 9 -#define CF_PALETTE 9 -#define IDHELP 9 -#define DTS_TIMEFORMAT 0x0009 -#define LANG_ENGLISH 0x09 -#define SUBLANG_ARABIC_YEMEN 0x09 -#define SUBLANG_ENGLISH_CARIBBEAN 0x09 -#define SUBLANG_SAMI_INARI_FINLAND 0x09 -#define SUBLANG_SPANISH_COLOMBIA 0x09 -#define SW_SHOWDEFAULT 10 -#define WM_ENABLE 0x000A -#define HTLEFT 10 -#define CF_PENDATA 10 -#define IDTRYAGAIN 10 -#define HELP_CONTEXTMENU 0x000a -#define LANG_SPANISH 0x0a -#define SUBLANG_ARABIC_SYRIA 0x0a -#define SUBLANG_ENGLISH_BELIZE 0x0a -#define SUBLANG_SPANISH_PERU 0x0a -#define SW_FORCEMINIMIZE 11 -#define SW_MAX 11 -#define WM_SETREDRAW 0x000B -#define HTRIGHT 11 -#define CF_RIFF 11 -#define IDCONTINUE 11 -#define HELP_FINDER 0x000b -#define LANG_FINNISH 0x0b -#define SUBLANG_ARABIC_JORDAN 0x0b -#define SUBLANG_ENGLISH_TRINIDAD 0x0b -#define SUBLANG_SPANISH_ARGENTINA 0x0b -#define VK_CLEAR 0x0C -#define WM_SETTEXT 0x000C -#define HTTOP 12 -#define CF_WAVE 12 -#define HELP_WM_HELP 0x000c -#define DTS_SHORTDATECENTURYFORMAT 0x000C -#define LANG_FRENCH 0x0c -#define SUBLANG_ARABIC_LEBANON 0x0c -#define SUBLANG_ENGLISH_ZIMBABWE 0x0c -#define SUBLANG_SPANISH_ECUADOR 0x0c -#define VK_RETURN 0x0D -#define WM_GETTEXT 0x000D -#define HTTOPLEFT 13 -#define CF_UNICODETEXT 13 -#define HELP_SETPOPUP_POS 0x000d -#define LANG_HEBREW 0x0d -#define SUBLANG_ARABIC_KUWAIT 0x0d -#define SUBLANG_ENGLISH_PHILIPPINES 0x0d -#define SUBLANG_SPANISH_CHILE 0x0d -#define WM_GETTEXTLENGTH 0x000E -#define HTTOPRIGHT 14 -#define CF_ENHMETAFILE 14 -#define LANG_HUNGARIAN 0x0e -#define SUBLANG_ARABIC_UAE 0x0e -#define SUBLANG_SPANISH_URUGUAY 0x0e -#define WM_PAINT 0x000F -#define HTBOTTOM 15 -#define CF_HDROP 15 -#define LANG_ICELANDIC 0x0f -#define SUBLANG_ARABIC_BAHRAIN 0x0f -#define SUBLANG_SPANISH_PARAGUAY 0x0f -#define VK_SHIFT 0x10 -#define WM_CLOSE 0x0010 -#define HTBOTTOMLEFT 16 -#define WVR_ALIGNTOP 0x0010 -#define MK_MBUTTON 0x0010 -#define TME_NONCLIENT 0x00000010 -#define CF_LOCALE 16 -#define HELP_TCARD_DATA 0x0010 -#define TBSTYLE_AUTOSIZE 0x0010 -#define TTS_NOANIMATE 0x10 -#define TBS_NOTICKS 0x0010 -#define UDS_AUTOBUDDY 0x0010 -#define PBS_SMOOTHREVERSE 0x10 -#define LWS_USECUSTOMTEXT 0x0010 -#define LVS_SORTASCENDING 0x0010 -#define TVS_DISABLEDRAGDROP 0x0010 -#define TVS_EX_RICHTOOLTIP 0x0010 -#define TCS_FORCEICONLEFT 0x0010 -#define MCS_NOTODAY 0x0010 -#define DTS_APPCANPARSE 0x0010 -#define NFS_ALL 0x0010 -#define LANG_ITALIAN 0x10 -#define SUBLANG_ARABIC_QATAR 0x10 -#define SUBLANG_ENGLISH_INDIA 0x10 -#define SUBLANG_SPANISH_BOLIVIA 0x10 -#define VK_CONTROL 0x11 -#define WM_QUERYENDSESSION 0x0011 -#define HTBOTTOMRIGHT 17 -#define CF_DIBV5 17 -#define HELP_TCARD_OTHER_CALLER 0x0011 -#define LANG_JAPANESE 0x11 -#define SUBLANG_ENGLISH_MALAYSIA 0x11 -#define SUBLANG_SPANISH_EL_SALVADOR 0x11 -#define VK_MENU 0x12 -#define WM_QUIT 0x0012 -#define HTBORDER 18 -#define CF_MAX 18 -#define LANG_KOREAN 0x12 -#define SUBLANG_ENGLISH_SINGAPORE 0x12 -#define SUBLANG_SPANISH_HONDURAS 0x12 -#define VK_PAUSE 0x13 -#define WM_QUERYOPEN 0x0013 -#define HTOBJECT 19 -#define LANG_DUTCH 0x13 -#define SUBLANG_SPANISH_NICARAGUA 0x13 -#define VK_CAPITAL 0x14 -#define WM_ERASEBKGND 0x0014 -#define HTCLOSE 20 -#define LANG_NORWEGIAN 0x14 -#define SUBLANG_SPANISH_PUERTO_RICO 0x14 -#define VK_KANA 0x15 -#define VK_HANGEUL 0x15 -#define VK_HANGUL 0x15 -#define WM_SYSCOLORCHANGE 0x0015 -#define HTHELP 21 -#define LANG_POLISH 0x15 -#define SUBLANG_SPANISH_US 0x15 -#define WM_ENDSESSION 0x0016 -#define LANG_PORTUGUESE 0x16 -#define VK_JUNJA 0x17 -#define LANG_ROMANSH 0x17 -#define VK_FINAL 0x18 -#define WM_SHOWWINDOW 0x0018 -#define LANG_ROMANIAN 0x18 -#define VK_HANJA 0x19 -#define VK_KANJI 0x19 -#define LANG_RUSSIAN 0x19 -#define WM_WININICHANGE 0x001A -#define LANG_BOSNIAN 0x1a -#define LANG_CROATIAN 0x1a -#define LANG_SERBIAN 0x1a -#define VK_ESCAPE 0x1B -#define WM_DEVMODECHANGE 0x001B -#define LANG_SLOVAK 0x1b -#define VK_CONVERT 0x1C -#define WM_ACTIVATEAPP 0x001C -#define LANG_ALBANIAN 0x1c -#define VK_NONCONVERT 0x1D -#define WM_FONTCHANGE 0x001D -#define LANG_SWEDISH 0x1d -#define VK_ACCEPT 0x1E -#define WM_TIMECHANGE 0x001E -#define LANG_THAI 0x1e -#define VK_MODECHANGE 0x1F -#define WM_CANCELMODE 0x001F -#define LANG_TURKISH 0x1f -#define VK_SPACE 0x20 -#define WM_SETCURSOR 0x0020 -#define SMTO_ERRORONEXIT 0x0020 -#define WVR_ALIGNLEFT 0x0020 -#define MK_XBUTTON1 0x0020 -#define CS_OWNDC 0x0020 -#define TBSTYLE_NOPREFIX 0x0020 -#define TTS_NOFADE 0x20 -#define TBS_ENABLESELRANGE 0x0020 -#define UDS_ARROWKEYS 0x0020 -#define LWS_RIGHT 0x0020 -#define LVS_SORTDESCENDING 0x0020 -#define TVS_SHOWSELALWAYS 0x0020 -#define TVS_EX_AUTOHSCROLL 0x0020 -#define TCS_FORCELABELLEFT 0x0020 -#define DTS_RIGHTALIGN 0x0020 -#define NFS_USEFONTASSOC 0x0020 -#define LANG_URDU 0x20 -#define VK_PRIOR 0x21 -#define WM_MOUSEACTIVATE 0x0021 -#define LANG_INDONESIAN 0x21 -#define VK_NEXT 0x22 -#define WM_CHILDACTIVATE 0x0022 -#define LANG_UKRAINIAN 0x22 -#define VK_END 0x23 -#define WM_QUEUESYNC 0x0023 -#define LANG_BELARUSIAN 0x23 -#define VK_HOME 0x24 -#define WM_GETMINMAXINFO 0x0024 -#define LANG_SLOVENIAN 0x24 -#define VK_LEFT 0x25 -#define LANG_ESTONIAN 0x25 -#define VK_UP 0x26 -#define WM_PAINTICON 0x0026 -#define LANG_LATVIAN 0x26 -#define VK_RIGHT 0x27 -#define WM_ICONERASEBKGND 0x0027 -#define LANG_LITHUANIAN 0x27 -#define VK_DOWN 0x28 -#define WM_NEXTDLGCTL 0x0028 -#define LANG_TAJIK 0x28 -#define VK_SELECT 0x29 -#define LANG_FARSI 0x29 -#define LANG_PERSIAN 0x29 -#define VK_PRINT 0x2A -#define WM_SPOOLERSTATUS 0x002A -#define LANG_VIETNAMESE 0x2a -#define VK_EXECUTE 0x2B -#define WM_DRAWITEM 0x002B -#define LANG_ARMENIAN 0x2b -#define VK_SNAPSHOT 0x2C -#define WM_MEASUREITEM 0x002C -#define LANG_AZERI 0x2c -#define VK_INSERT 0x2D -#define WM_DELETEITEM 0x002D -#define LANG_BASQUE 0x2d -#define VK_DELETE 0x2E -#define WM_VKEYTOITEM 0x002E -#define LANG_LOWER_SORBIAN 0x2e -#define LANG_UPPER_SORBIAN 0x2e -#define VK_HELP 0x2F -#define WM_CHARTOITEM 0x002F -#define LANG_MACEDONIAN 0x2f -#define WM_SETFONT 0x0030 -#define WM_GETFONT 0x0031 -#define WM_SETHOTKEY 0x0032 -#define LANG_TSWANA 0x32 -#define WM_GETHOTKEY 0x0033 -#define LANG_XHOSA 0x34 -#define LANG_ZULU 0x35 -#define LANG_AFRIKAANS 0x36 -#define WM_QUERYDRAGICON 0x0037 -#define LANG_GEORGIAN 0x37 -#define LANG_FAEROESE 0x38 -#define WM_COMPAREITEM 0x0039 -#define LANG_HINDI 0x39 -#define LANG_MALTESE 0x3a -#define LANG_SAMI 0x3b -#define LANG_IRISH 0x3c -#define WM_GETOBJECT 0x003D -#define LANG_MALAY 0x3e -#define LANG_KAZAK 0x3f -#define WVR_ALIGNBOTTOM 0x0040 -#define MK_XBUTTON2 0x0040 -#define CS_CLASSDC 0x0040 -#define HDS_DRAGDROP 0x0040 -#define BTNS_SHOWTEXT 0x0040 -#define TTS_BALLOON 0x40 -#define TBS_FIXEDLENGTH 0x0040 -#define UDS_HORZ 0x0040 -#define LVS_SHAREIMAGELISTS 0x0040 -#define TVS_RTLREADING 0x0040 -#define TVS_EX_FADEINOUTEXPANDOS 0x0040 -#define TCS_HOTTRACK 0x0040 -#define MCS_NOTRAILINGDATES 0x0040 -#define LANG_KYRGYZ 0x40 -#define WM_COMPACTING 0x0041 -#define LANG_SWAHILI 0x41 -#define LANG_TURKMEN 0x42 -#define LANG_UZBEK 0x43 -#define WM_COMMNOTIFY 0x0044 -#define LANG_TATAR 0x44 -#define LANG_BENGALI 0x45 -#define WM_WINDOWPOSCHANGING 0x0046 -#define LANG_PUNJABI 0x46 -#define WM_WINDOWPOSCHANGED 0x0047 -#define LANG_GUJARATI 0x47 -#define WM_POWER 0x0048 -#define LANG_ORIYA 0x48 -#define LANG_TAMIL 0x49 -#define WM_COPYDATA 0x004A -#define LANG_TELUGU 0x4a -#define WM_CANCELJOURNAL 0x004B -#define LANG_KANNADA 0x4b -#define LANG_MALAYALAM 0x4c -#define LANG_ASSAMESE 0x4d -#define WM_NOTIFY 0x004E -#define LANG_MARATHI 0x4e -#define LANG_SANSKRIT 0x4f -#define WM_INPUTLANGCHANGEREQUEST 0x0050 -#define LANG_MONGOLIAN 0x50 -#define WM_INPUTLANGCHANGE 0x0051 -#define LANG_TIBETAN 0x51 -#define WM_TCARD 0x0052 -#define LANG_WELSH 0x52 -#define WM_HELP 0x0053 -#define LANG_KHMER 0x53 -#define WM_USERCHANGED 0x0054 -#define LANG_LAO 0x54 -#define WM_NOTIFYFORMAT 0x0055 -#define LANG_GALICIAN 0x56 -#define LANG_KONKANI 0x57 -#define LANG_MANIPURI 0x58 -#define LANG_SINDHI 0x59 -#define LANG_SYRIAC 0x5a -#define VK_LWIN 0x5B -#define LANG_SINHALESE 0x5b -#define VK_RWIN 0x5C -#define VK_APPS 0x5D -#define LANG_INUKTITUT 0x5d -#define LANG_AMHARIC 0x5e -#define VK_SLEEP 0x5F -#define LANG_TAMAZIGHT 0x5f -#define VK_NUMPAD0 0x60 -#define LANG_KASHMIRI 0x60 -#define VK_NUMPAD1 0x61 -#define LANG_NEPALI 0x61 -#define VK_NUMPAD2 0x62 -#define LANG_FRISIAN 0x62 -#define VK_NUMPAD3 0x63 -#define LANG_PASHTO 0x63 -#define VK_NUMPAD4 0x64 -#define LANG_FILIPINO 0x64 -#define VS_USER_DEFINED 100 -#define VK_NUMPAD5 0x65 -#define LANG_DIVEHI 0x65 -#define VK_NUMPAD6 0x66 -#define VK_NUMPAD7 0x67 -#define VK_NUMPAD8 0x68 -#define LANG_HAUSA 0x68 -#define VK_NUMPAD9 0x69 -#define VK_MULTIPLY 0x6A -#define LANG_YORUBA 0x6a -#define VK_ADD 0x6B -#define LANG_QUECHUA 0x6b -#define VK_SEPARATOR 0x6C -#define LANG_SOTHO 0x6c -#define VK_SUBTRACT 0x6D -#define LANG_BASHKIR 0x6d -#define VK_DECIMAL 0x6E -#define LANG_LUXEMBOURGISH 0x6e -#define VK_DIVIDE 0x6F -#define LANG_GREENLANDIC 0x6f -#define VK_F1 0x70 -#define LANG_IGBO 0x70 -#define VK_F2 0x71 -#define VK_F3 0x72 -#define VK_F4 0x73 -#define LANG_TIGRIGNA 0x73 -#define VK_F5 0x74 -#define VK_F6 0x75 -#define VK_F7 0x76 -#define VK_F8 0x77 -#define VK_F9 0x78 -#define WHEEL_DELTA 120 -#define LANG_YI 0x78 -#define VK_F10 0x79 -#define VK_F11 0x7A -#define LANG_MAPUDUNGUN 0x7a -#define VK_F12 0x7B -#define WM_CONTEXTMENU 0x007B -#define VK_F13 0x7C -#define WM_STYLECHANGING 0x007C -#define LANG_MOHAWK 0x7c -#define VK_F14 0x7D -#define WM_STYLECHANGED 0x007D -#define VK_F15 0x7E -#define WM_DISPLAYCHANGE 0x007E -#define LANG_BRETON 0x7e -#define VK_F16 0x7F -#define WM_GETICON 0x007F -#define LANG_INVARIANT 0x7f -#define VK_F17 0x80 -#define WM_SETICON 0x0080 -#define WVR_ALIGNRIGHT 0x0080 -#define CS_PARENTDC 0x0080 -#define CF_OWNERDISPLAY 0x0080 -#define HDS_FULLDRAG 0x0080 -#define BTNS_WHOLEDROPDOWN 0x0080 -#define TTS_CLOSE 0x80 -#define TBS_NOTHUMB 0x0080 -#define UDS_NOTHOUSANDS 0x0080 -#define LVS_NOLABELWRAP 0x0080 -#define TVS_NOTOOLTIPS 0x0080 -#define TVS_EX_PARTIALCHECKBOXES 0x0080 -#define TCS_VERTICAL 0x0080 -#define MCS_SHORTDAYSOFWEEK 0x0080 -#define LANG_UIGHUR 0x80 -#define VK_F18 0x81 -#define WM_NCCREATE 0x0081 -#define CF_DSPTEXT 0x0081 -#define LANG_MAORI 0x81 -#define VK_F19 0x82 -#define WM_NCDESTROY 0x0082 -#define CF_DSPBITMAP 0x0082 -#define LANG_OCCITAN 0x82 -#define VK_F20 0x83 -#define WM_NCCALCSIZE 0x0083 -#define CF_DSPMETAFILEPICT 0x0083 -#define LANG_CORSICAN 0x83 -#define VK_F21 0x84 -#define WM_NCHITTEST 0x0084 -#define LANG_ALSATIAN 0x84 -#define VK_F22 0x85 -#define WM_NCPAINT 0x0085 -#define LANG_YAKUT 0x85 -#define VK_F23 0x86 -#define WM_NCACTIVATE 0x0086 -#define LANG_KICHE 0x86 -#define VK_F24 0x87 -#define WM_GETDLGCODE 0x0087 -#define LANG_KINYARWANDA 0x87 -#define WM_SYNCPAINT 0x0088 -#define LANG_WOLOF 0x88 -#define LANG_DARI 0x8c -#define CF_DSPENHMETAFILE 0x008E -#define VK_NUMLOCK 0x90 -#define VK_SCROLL 0x91 -#define VK_OEM_NEC_EQUAL 0x92 -#define VK_OEM_FJ_JISHO 0x92 -#define VK_OEM_FJ_MASSHOU 0x93 -#define VK_OEM_FJ_TOUROKU 0x94 -#define VK_OEM_FJ_LOYA 0x95 -#define VK_OEM_FJ_ROYA 0x96 -#define VK_LSHIFT 0xA0 -#define WM_NCMOUSEMOVE 0x00A0 -#define VK_RSHIFT 0xA1 -#define WM_NCLBUTTONDOWN 0x00A1 -#define VK_LCONTROL 0xA2 -#define WM_NCLBUTTONUP 0x00A2 -#define VK_RCONTROL 0xA3 -#define WM_NCLBUTTONDBLCLK 0x00A3 -#define VK_LMENU 0xA4 -#define WM_NCRBUTTONDOWN 0x00A4 -#define VK_RMENU 0xA5 -#define WM_NCRBUTTONUP 0x00A5 -#define VK_BROWSER_BACK 0xA6 -#define WM_NCRBUTTONDBLCLK 0x00A6 -#define VK_BROWSER_FORWARD 0xA7 -#define WM_NCMBUTTONDOWN 0x00A7 -#define VK_BROWSER_REFRESH 0xA8 -#define WM_NCMBUTTONUP 0x00A8 -#define VK_BROWSER_STOP 0xA9 -#define WM_NCMBUTTONDBLCLK 0x00A9 -#define VK_BROWSER_SEARCH 0xAA -#define VK_BROWSER_FAVORITES 0xAB -#define WM_NCXBUTTONDOWN 0x00AB -#define VK_BROWSER_HOME 0xAC -#define WM_NCXBUTTONUP 0x00AC -#define VK_VOLUME_MUTE 0xAD -#define WM_NCXBUTTONDBLCLK 0x00AD -#define VK_VOLUME_DOWN 0xAE -#define VK_VOLUME_UP 0xAF -#define VK_MEDIA_NEXT_TRACK 0xB0 -#define EM_GETSEL 0x00B0 -#define VK_MEDIA_PREV_TRACK 0xB1 -#define EM_SETSEL 0x00B1 -#define VK_MEDIA_STOP 0xB2 -#define EM_GETRECT 0x00B2 -#define VK_MEDIA_PLAY_PAUSE 0xB3 -#define EM_SETRECT 0x00B3 -#define VK_LAUNCH_MAIL 0xB4 -#define EM_SETRECTNP 0x00B4 -#define VK_LAUNCH_MEDIA_SELECT 0xB5 -#define EM_SCROLL 0x00B5 -#define VK_LAUNCH_APP1 0xB6 -#define EM_LINESCROLL 0x00B6 -#define VK_LAUNCH_APP2 0xB7 -#define EM_SCROLLCARET 0x00B7 -#define EM_GETMODIFY 0x00B8 -#define EM_SETMODIFY 0x00B9 -#define VK_OEM_1 0xBA -#define EM_GETLINECOUNT 0x00BA -#define VK_OEM_PLUS 0xBB -#define EM_LINEINDEX 0x00BB -#define VK_OEM_COMMA 0xBC -#define EM_SETHANDLE 0x00BC -#define VK_OEM_MINUS 0xBD -#define EM_GETHANDLE 0x00BD -#define VK_OEM_PERIOD 0xBE -#define EM_GETTHUMB 0x00BE -#define VK_OEM_2 0xBF -#define VK_OEM_3 0xC0 -#define EM_LINELENGTH 0x00C1 -#define EM_REPLACESEL 0x00C2 -#define EM_GETLINE 0x00C4 -#define EM_LIMITTEXT 0x00C5 -#define EM_CANUNDO 0x00C6 -#define EM_UNDO 0x00C7 -#define EM_FMTLINES 0x00C8 -#define DLG_MAIN 200 -#define EM_LINEFROMCHAR 0x00C9 -#define EM_SETTABSTOPS 0x00CB -#define EM_SETPASSWORDCHAR 0x00CC -#define EM_EMPTYUNDOBUFFER 0x00CD -#define EM_GETFIRSTVISIBLELINE 0x00CE -#define EM_SETREADONLY 0x00CF -#define EM_SETWORDBREAKPROC 0x00D0 -#define EM_GETWORDBREAKPROC 0x00D1 -#define EM_GETPASSWORDCHAR 0x00D2 -#define EM_SETMARGINS 0x00D3 -#define EM_GETMARGINS 0x00D4 -#define EM_GETLIMITTEXT 0x00D5 -#define EM_POSFROMCHAR 0x00D6 -#define EM_CHARFROMPOS 0x00D7 -#define EM_SETIMESTATUS 0x00D8 -#define EM_GETIMESTATUS 0x00D9 -#define VK_OEM_4 0xDB -#define VK_OEM_5 0xDC -#define VK_OEM_6 0xDD -#define VK_OEM_7 0xDE -#define VK_OEM_8 0xDF -#define VK_OEM_AX 0xE1 -#define VK_OEM_102 0xE2 -#define VK_ICO_HELP 0xE3 -#define VK_ICO_00 0xE4 -#define VK_PROCESSKEY 0xE5 -#define VK_ICO_CLEAR 0xE6 -#define VK_PACKET 0xE7 -#define VK_OEM_RESET 0xE9 -#define VK_OEM_JUMP 0xEA -#define VK_OEM_PA1 0xEB -#define VK_OEM_PA2 0xEC -#define VK_OEM_PA3 0xED -#define VK_OEM_WSCTRL 0xEE -#define VK_OEM_CUSEL 0xEF -#define VK_OEM_ATTN 0xF0 -#define BM_GETCHECK 0x00F0 -#define VK_OEM_FINISH 0xF1 -#define BM_SETCHECK 0x00F1 -#define VK_OEM_COPY 0xF2 -#define BM_GETSTATE 0x00F2 -#define VK_OEM_AUTO 0xF3 -#define BM_SETSTATE 0x00F3 -#define VK_OEM_ENLW 0xF4 -#define BM_SETSTYLE 0x00F4 -#define VK_OEM_BACKTAB 0xF5 -#define BM_CLICK 0x00F5 -#define VK_ATTN 0xF6 -#define BM_GETIMAGE 0x00F6 -#define VK_CRSEL 0xF7 -#define BM_SETIMAGE 0x00F7 -#define VK_EXSEL 0xF8 -#define BM_SETDONTCLICK 0x00F8 -#define VK_EREOF 0xF9 -#define VK_PLAY 0xFA -#define VK_ZOOM 0xFB -#define VK_NONAME 0xFC -#define VK_PA1 0xFD -#define VK_OEM_CLEAR 0xFE -#define WM_INPUT_DEVICE_CHANGE 0x00FE -#define SUBVERSION_MASK 0x000000FF -#define WM_INPUT 0x00FF -#define WM_KEYFIRST 0x0100 -#define WM_KEYDOWN 0x0100 -#define WVR_HREDRAW 0x0100 -#define HDS_FILTERBAR 0x0100 -#define TBSTYLE_TOOLTIPS 0x0100 -#define RBS_TOOLTIPS 0x00000100 -#define TTS_USEVISUALSTYLE 0x100 -#define SBARS_SIZEGRIP 0x0100 -#define TBS_TOOLTIPS 0x0100 -#define UDS_HOTTRACK 0x0100 -#define LVS_AUTOARRANGE 0x0100 -#define TVS_CHECKBOXES 0x0100 -#define TVS_EX_EXCLUSIONCHECKBOXES 0x0100 -#define TCS_BUTTONS 0x0100 -#define MCS_NOSELCHANGEONNAV 0x0100 -#define WM_KEYUP 0x0101 -#define WM_CHAR 0x0102 -#define WM_DEADCHAR 0x0103 -#define WM_SYSKEYDOWN 0x0104 -#define WM_SYSKEYUP 0x0105 -#define WM_SYSCHAR 0x0106 -#define WM_SYSDEADCHAR 0x0107 -#define WM_UNICHAR 0x0109 -#define WM_IME_STARTCOMPOSITION 0x010D -#define WM_IME_ENDCOMPOSITION 0x010E -#define WM_IME_COMPOSITION 0x010F -#define WM_IME_KEYLAST 0x010F -#define WM_INITDIALOG 0x0110 -#define WM_COMMAND 0x0111 -#define WM_SYSCOMMAND 0x0112 -#define WM_TIMER 0x0113 -#define WM_HSCROLL 0x0114 -#define WM_VSCROLL 0x0115 -#define WM_INITMENU 0x0116 -#define WM_INITMENUPOPUP 0x0117 -#define WM_MENUSELECT 0x011F -#define WM_MENUCHAR 0x0120 -#define WM_ENTERIDLE 0x0121 -#define WM_MENURBUTTONUP 0x0122 -#define WM_MENUDRAG 0x0123 -#define WM_MENUGETOBJECT 0x0124 -#define WM_UNINITMENUPOPUP 0x0125 -#define WM_MENUCOMMAND 0x0126 -#define WM_CHANGEUISTATE 0x0127 -#define WM_UPDATEUISTATE 0x0128 -#define WM_QUERYUISTATE 0x0129 -#define DLG_ICON 300 -#define WM_CTLCOLORMSGBOX 0x0132 -#define WM_CTLCOLOREDIT 0x0133 -#define WM_CTLCOLORLISTBOX 0x0134 -#define WM_CTLCOLORBTN 0x0135 -#define WM_CTLCOLORDLG 0x0136 -#define WM_CTLCOLORSCROLLBAR 0x0137 -#define WM_CTLCOLORSTATIC 0x0138 -#define MN_GETHMENU 0x01E1 -#define _WIN32_IE_IE20 0x0200 -#define WM_MOUSEFIRST 0x0200 -#define WM_MOUSEMOVE 0x0200 -#define WVR_VREDRAW 0x0200 -#define CS_NOCLOSE 0x0200 -#define CF_PRIVATEFIRST 0x0200 -#define HDS_FLAT 0x0200 -#define TBSTYLE_WRAPABLE 0x0200 -#define RBS_VARHEIGHT 0x00000200 -#define TBS_REVERSED 0x0200 -#define LVS_EDITLABELS 0x0200 -#define TVS_TRACKSELECT 0x0200 -#define TVS_EX_DIMMEDCHECKBOXES 0x0200 -#define TCS_MULTILINE 0x0200 -#define WM_LBUTTONDOWN 0x0201 -#define WM_LBUTTONUP 0x0202 -#define WM_LBUTTONDBLCLK 0x0203 -#define WM_RBUTTONDOWN 0x0204 -#define WM_RBUTTONUP 0x0205 -#define WM_RBUTTONDBLCLK 0x0206 -#define WM_MBUTTONDOWN 0x0207 -#define WM_MBUTTONUP 0x0208 -#define WM_MBUTTONDBLCLK 0x0209 -#define WM_MOUSEWHEEL 0x020A -#define WM_XBUTTONDOWN 0x020B -#define WM_XBUTTONUP 0x020C -#define WM_XBUTTONDBLCLK 0x020D -#define WM_MOUSEHWHEEL 0x020E -#define WM_PARENTNOTIFY 0x0210 -#define WM_ENTERMENULOOP 0x0211 -#define WM_EXITMENULOOP 0x0212 -#define WM_NEXTMENU 0x0213 -#define WM_SIZING 0x0214 -#define WM_CAPTURECHANGED 0x0215 -#define WM_MOVING 0x0216 -#define WM_POWERBROADCAST 0x0218 -#define WM_DEVICECHANGE 0x0219 -#define WM_MDICREATE 0x0220 -#define WM_MDIDESTROY 0x0221 -#define WM_MDIACTIVATE 0x0222 -#define WM_MDIRESTORE 0x0223 -#define WM_MDINEXT 0x0224 -#define WM_MDIMAXIMIZE 0x0225 -#define WM_MDITILE 0x0226 -#define WM_MDICASCADE 0x0227 -#define WM_MDIICONARRANGE 0x0228 -#define WM_MDIGETACTIVE 0x0229 -#define WM_MDISETMENU 0x0230 -#define WM_ENTERSIZEMOVE 0x0231 -#define WM_EXITSIZEMOVE 0x0232 -#define WM_DROPFILES 0x0233 -#define WM_MDIREFRESHMENU 0x0234 -#define WM_IME_SETCONTEXT 0x0281 -#define WM_IME_NOTIFY 0x0282 -#define WM_IME_CONTROL 0x0283 -#define WM_IME_COMPOSITIONFULL 0x0284 -#define WM_IME_SELECT 0x0285 -#define WM_IME_CHAR 0x0286 -#define WM_IME_REQUEST 0x0288 -#define WM_IME_KEYDOWN 0x0290 -#define WM_IME_KEYUP 0x0291 -#define WM_NCMOUSEHOVER 0x02A0 -#define WM_MOUSEHOVER 0x02A1 -#define WM_NCMOUSELEAVE 0x02A2 -#define WM_MOUSELEAVE 0x02A3 -#define WM_WTSSESSION_CHANGE 0x02B1 -#define WM_TABLET_FIRST 0x02c0 -#define WM_TABLET_LAST 0x02df -#define CF_PRIVATELAST 0x02FF -#define _WIN32_IE_IE30 0x0300 -#define WM_CUT 0x0300 -#define CF_GDIOBJFIRST 0x0300 -#define WM_COPY 0x0301 -#define _WIN32_IE_IE302 0x0302 -#define WM_PASTE 0x0302 -#define WM_CLEAR 0x0303 -#define WM_UNDO 0x0304 -#define WM_RENDERFORMAT 0x0305 -#define WM_RENDERALLFORMATS 0x0306 -#define WM_DESTROYCLIPBOARD 0x0307 -#define WM_DRAWCLIPBOARD 0x0308 -#define WM_PAINTCLIPBOARD 0x0309 -#define WM_VSCROLLCLIPBOARD 0x030A -#define WM_SIZECLIPBOARD 0x030B -#define WM_ASKCBFORMATNAME 0x030C -#define WM_CHANGECBCHAIN 0x030D -#define WM_HSCROLLCLIPBOARD 0x030E -#define WM_QUERYNEWPALETTE 0x030F -#define WM_PALETTEISCHANGING 0x0310 -#define WM_PALETTECHANGED 0x0311 -#define WM_HOTKEY 0x0312 -#define WM_PRINT 0x0317 -#define WM_PRINTCLIENT 0x0318 -#define WM_APPCOMMAND 0x0319 -#define WM_THEMECHANGED 0x031A -#define WM_CLIPBOARDUPDATE 0x031D -#define WM_DWMCOMPOSITIONCHANGED 0x031E -#define WM_DWMNCRENDERINGCHANGED 0x031F -#define WM_DWMCOLORIZATIONCOLORCHANGED 0x0320 -#define WM_DWMWINDOWMAXIMIZEDCHANGE 0x0321 -#define WM_GETTITLEBARINFOEX 0x033F -#define WM_HANDHELDFIRST 0x0358 -#define WM_HANDHELDLAST 0x035F -#define WM_AFXFIRST 0x0360 -#define WM_AFXLAST 0x037F -#define WM_PENWINFIRST 0x0380 -#define WM_PENWINLAST 0x038F -#define WM_DDE_FIRST 0x03E0 -#define IDC_STATUS 1000 -#define IDC_LIST1 1000 -#define IDC_LEFT 1001 -#define IDC_RIGHT 1002 -#define IDC_TOP 1003 -#define IDC_MIDDLE 1004 -#define IDC_BOTTOM 1005 -#define IDC_EDIT 1010 -#define IDC_CLEAR 1011 -#define CF_GDIOBJLAST 0x03FF -#define _WIN32_WINNT_NT4 0x0400 -#define _WIN32_IE_IE40 0x0400 -#define WM_USER 0x0400 -#define WVR_VALIDRECTS 0x0400 -#define HDS_CHECKBOXES 0x0400 -#define TBSTYLE_ALTDRAG 0x0400 -#define RBS_BANDBORDERS 0x00000400 -#define TBS_DOWNISLEFT 0x0400 -#define LVS_OWNERDRAWFIXED 0x0400 -#define TVS_SINGLEEXPAND 0x0400 -#define TVS_EX_DRAWIMAGEASYNC 0x0400 -#define TCS_FIXEDWIDTH 0x0400 -#define ctlFirst 0x0400 -#define psh1 0x0400 -#define _WIN32_IE_IE401 0x0401 -#define psh2 0x0401 -#define psh3 0x0402 -#define psh4 0x0403 -#define psh5 0x0404 -#define psh6 0x0405 -#define psh7 0x0406 -#define psh8 0x0407 -#define psh9 0x0408 -#define psh10 0x0409 -#define psh11 0x040a -#define psh12 0x040b -#define psh13 0x040c -#define psh14 0x040d -#define psh15 0x040e -#define psh16 0x040f -#define _WIN32_WINDOWS 0x0410 -#define chx1 0x0410 -#define chx2 0x0411 -#define chx3 0x0412 -#define chx4 0x0413 -#define chx5 0x0414 -#define chx6 0x0415 -#define chx7 0x0416 -#define chx8 0x0417 -#define chx9 0x0418 -#define chx10 0x0419 -#define chx11 0x041a -#define chx12 0x041b -#define chx13 0x041c -#define chx14 0x041d -#define chx15 0x041e -#define chx16 0x041f -#define rad1 0x0420 -#define rad2 0x0421 -#define rad3 0x0422 -#define rad4 0x0423 -#define rad5 0x0424 -#define rad6 0x0425 -#define rad7 0x0426 -#define rad8 0x0427 -#define rad9 0x0428 -#define rad10 0x0429 -#define rad11 0x042a -#define rad12 0x042b -#define rad13 0x042c -#define rad14 0x042d -#define rad15 0x042e -#define rad16 0x042f -#define grp1 0x0430 -#define grp2 0x0431 -#define grp3 0x0432 -#define grp4 0x0433 -#define frm1 0x0434 -#define frm2 0x0435 -#define frm3 0x0436 -#define frm4 0x0437 -#define rct1 0x0438 -#define rct2 0x0439 -#define rct3 0x043a -#define rct4 0x043b -#define ico1 0x043c -#define ico2 0x043d -#define ico3 0x043e -#define ico4 0x043f -#define stc1 0x0440 -#define stc2 0x0441 -#define stc3 0x0442 -#define stc4 0x0443 -#define stc5 0x0444 -#define stc6 0x0445 -#define stc7 0x0446 -#define stc8 0x0447 -#define stc9 0x0448 -#define stc10 0x0449 -#define stc11 0x044a -#define stc12 0x044b -#define stc13 0x044c -#define stc14 0x044d -#define stc15 0x044e -#define stc16 0x044f -#define stc17 0x0450 -#define stc18 0x0451 -#define stc19 0x0452 -#define stc20 0x0453 -#define stc21 0x0454 -#define stc22 0x0455 -#define stc23 0x0456 -#define stc24 0x0457 -#define stc25 0x0458 -#define stc26 0x0459 -#define stc27 0x045a -#define stc28 0x045b -#define stc29 0x045c -#define stc30 0x045d -#define stc31 0x045e -#define stc32 0x045f -#define lst1 0x0460 -#define lst2 0x0461 -#define lst3 0x0462 -#define lst4 0x0463 -#define lst5 0x0464 -#define lst6 0x0465 -#define lst7 0x0466 -#define lst8 0x0467 -#define lst9 0x0468 -#define lst10 0x0469 -#define lst11 0x046a -#define lst12 0x046b -#define lst13 0x046c -#define lst14 0x046d -#define lst15 0x046e -#define lst16 0x046f -#define cmb1 0x0470 -#define cmb2 0x0471 -#define cmb3 0x0472 -#define cmb4 0x0473 -#define cmb5 0x0474 -#define cmb6 0x0475 -#define cmb7 0x0476 -#define cmb8 0x0477 -#define cmb9 0x0478 -#define cmb10 0x0479 -#define cmb11 0x047a -#define cmb12 0x047b -#define cmb13 0x047c -#define cmb14 0x047d -#define cmb15 0x047e -#define cmb16 0x047f -#define edt1 0x0480 -#define edt2 0x0481 -#define edt3 0x0482 -#define edt4 0x0483 -#define edt5 0x0484 -#define edt6 0x0485 -#define edt7 0x0486 -#define edt8 0x0487 -#define edt9 0x0488 -#define edt10 0x0489 -#define edt11 0x048a -#define edt12 0x048b -#define edt13 0x048c -#define edt14 0x048d -#define edt15 0x048e -#define edt16 0x048f -#define scr1 0x0490 -#define scr2 0x0491 -#define scr3 0x0492 -#define scr4 0x0493 -#define scr5 0x0494 -#define scr6 0x0495 -#define scr7 0x0496 -#define scr8 0x0497 -#define ctl1 0x04A0 -#define ctlLast 0x04ff -#define _WIN32_WINNT_WIN2K 0x0500 -#define _WIN32_IE_IE50 0x0500 -#define _WIN32_WINNT_WINXP 0x0501 -#define _WIN32_IE_IE501 0x0501 -#define _WIN32_WINNT_WS03 0x0502 -#define _WIN32_IE_IE55 0x0550 -#define _WIN32_WINNT_LONGHORN 0x0600 -#define _WIN32_IE_IE60 0x0600 -#define FILEOPENORD 1536 -#define _WIN32_IE_IE60SP1 0x0601 -#define MULTIFILEOPENORD 1537 -#define _WIN32_IE_WS03 0x0602 -#define PRINTDLGORD 1538 -#define _WIN32_IE_IE60SP2 0x0603 -#define PRNSETUPDLGORD 1539 -#define FINDDLGORD 1540 -#define REPLACEDLGORD 1541 -#define FONTDLGORD 1542 -#define FORMATDLGORD31 1543 -#define FORMATDLGORD30 1544 -#define RUNDLGORD 1545 -#define PAGESETUPDLGORD 1546 -#define NEWFILEOPENORD 1547 -#define PRINTDLGEXORD 1549 -#define PAGESETUPDLGORDMOTIF 1550 -#define COLORMGMTDLGORD 1551 -#define NEWFILEOPENV2ORD 1552 -#define NEWFILEOPENV3ORD 1553 -#define _WIN32_IE_IE70 0x0700 -#define CS_SAVEBITS 0x0800 -#define HDS_NOSIZING 0x0800 -#define TBSTYLE_FLAT 0x0800 -#define RBS_FIXEDORDER 0x00000800 -#define SBARS_TOOLTIPS 0x0800 -#define SBT_TOOLTIPS 0x0800 -#define TBS_NOTIFYBEFOREMOVE 0x0800 -#define LVS_ALIGNLEFT 0x0800 -#define TVS_INFOTIP 0x0800 -#define TCS_RAGGEDRIGHT 0x0800 -#define LVS_ALIGNMASK 0x0c00 -#define CS_BYTEALIGNCLIENT 0x1000 -#define HDS_OVERFLOW 0x1000 -#define TBSTYLE_LIST 0x1000 -#define RBS_REGISTERDROP 0x00001000 -#define TBS_TRANSPARENTBKGND 0x1000 -#define LVS_OWNERDATA 0x1000 -#define TVS_FULLROWSELECT 0x1000 -#define TCS_FOCUSONBUTTONDOWN 0x1000 -#define CS_BYTEALIGNWINDOW 0x2000 -#define TBSTYLE_CUSTOMERASE 0x2000 -#define RBS_AUTOSIZE 0x00002000 -#define LVS_NOSCROLL 0x2000 -#define TVS_NOSCROLL 0x2000 -#define TCS_OWNERDRAWFIXED 0x2000 -#define CS_GLOBALCLASS 0x4000 -#define TBSTYLE_REGISTERDROP 0x4000 -#define RBS_VERTICALGRIPPER 0x00004000 -#define LVS_NOCOLUMNHEADER 0x4000 -#define TVS_NONEVENHEIGHT 0x4000 -#define TCS_TOOLTIPS 0x4000 -#define IDH_NO_HELP 28440 -#define IDH_MISSING_CONTEXT 28441 -#define IDH_GENERIC_HELP_BUTTON 28442 -#define IDH_OK 28443 -#define IDH_CANCEL 28444 -#define IDH_HELP 28445 -#define LANG_BOSNIAN_NEUTRAL 0x781a -#define LANG_CHINESE_TRADITIONAL 0x7c04 -#define LANG_SERBIAN_NEUTRAL 0x7c1a -#define IDTIMEOUT 32000 -#define OCR_NORMAL 32512 -#define OIC_SAMPLE 32512 -#define OCR_IBEAM 32513 -#define OIC_HAND 32513 -#define OCR_WAIT 32514 -#define OIC_QUES 32514 -#define OCR_CROSS 32515 -#define OIC_BANG 32515 -#define OCR_UP 32516 -#define OIC_NOTE 32516 -#define OIC_WINLOGO 32517 -#define OIC_SHIELD 32518 -#define OCR_SIZE 32640 -#define OCR_ICON 32641 -#define OCR_SIZENWSE 32642 -#define OCR_SIZENESW 32643 -#define OCR_SIZEWE 32644 -#define OCR_SIZENS 32645 -#define OCR_SIZEALL 32646 -#define OCR_ICOCUR 32647 -#define OCR_NO 32648 -#define OCR_HAND 32649 -#define OCR_APPSTARTING 32650 -#define OBM_LFARROWI 32734 -#define OBM_RGARROWI 32735 -#define OBM_DNARROWI 32736 -#define OBM_UPARROWI 32737 -#define OBM_COMBO 32738 -#define OBM_MNARROW 32739 -#define OBM_LFARROWD 32740 -#define OBM_RGARROWD 32741 -#define OBM_DNARROWD 32742 -#define OBM_UPARROWD 32743 -#define OBM_RESTORED 32744 -#define OBM_ZOOMD 32745 -#define OBM_REDUCED 32746 -#define OBM_RESTORE 32747 -#define OBM_ZOOM 32748 -#define OBM_REDUCE 32749 -#define OBM_LFARROW 32750 -#define OBM_RGARROW 32751 -#define OBM_DNARROW 32752 -#define OBM_UPARROW 32753 -#define OBM_CLOSE 32754 -#define OBM_OLD_RESTORE 32755 -#define OBM_OLD_ZOOM 32756 -#define OBM_OLD_REDUCE 32757 -#define OBM_BTNCORNERS 32758 -#define OBM_CHECKBOXES 32759 -#define OBM_CHECK 32760 -#define OBM_BTSIZE 32761 -#define OBM_OLD_LFARROW 32762 -#define OBM_OLD_RGARROW 32763 -#define OBM_OLD_DNARROW 32764 -#define OBM_OLD_UPARROW 32765 -#define OBM_SIZE 32766 -#define OBM_OLD_CLOSE 32767 -#define WM_APP 0x8000 -#define HELP_TCARD 0x8000 -#define TBSTYLE_TRANSPARENT 0x8000 -#define RBS_DBLCLKTOGGLE 0x00008000 -#define LVS_NOSORTHEADER 0x8000 -#define TVS_NOHSCROLL 0x8000 -#define TCS_FOCUSNEVER 0x8000 -#define SC_SIZE 0xF000 -#define SC_SEPARATOR 0xF00F -#define SC_MOVE 0xF010 -#define SC_MINIMIZE 0xF020 -#define SC_MAXIMIZE 0xF030 -#define SC_NEXTWINDOW 0xF040 -#define SC_PREVWINDOW 0xF050 -#define SC_CLOSE 0xF060 -#define SC_VSCROLL 0xF070 -#define SC_HSCROLL 0xF080 -#define SC_MOUSEMENU 0xF090 -#define SC_KEYMENU 0xF100 -#define SC_ARRANGE 0xF110 -#define SC_RESTORE 0xF120 -#define SC_TASKLIST 0xF130 -#define SC_SCREENSAVE 0xF140 -#define SC_HOTKEY 0xF150 -#define SC_DEFAULT 0xF160 -#define SC_MONITORPOWER 0xF170 -#define SC_CONTEXTHELP 0xF180 -#define LVS_TYPESTYLEMASK 0xfc00 -#define SPVERSION_MASK 0x0000FF00 -#define UNICODE_NOCHAR 0xFFFF -#define IDC_STATIC -1 - -// Next default values for new objects -// -#ifdef APSTUDIO_INVOKED -#ifndef APSTUDIO_READONLY_SYMBOLS -#define _APS_NEXT_RESOURCE_VALUE 101 -#define _APS_NEXT_COMMAND_VALUE 40001 -#define _APS_NEXT_CONTROL_VALUE 1002 -#define _APS_NEXT_SYMED_VALUE 101 -#endif -#endif diff --git a/PC/associator.rc b/PC/associator.rc deleted file mode 100644 --- a/PC/associator.rc +++ /dev/null @@ -1,97 +0,0 @@ -// Microsoft Visual C++ generated resource script. -// -#include "associator.h" -#include "winuser.h" -///////////////////////////////////////////////////////////////////////////// -// English (U.K.) resources - -#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENG) -#ifdef _WIN32 -LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_UK -#pragma code_page(1252) -#endif //_WIN32 - -///////////////////////////////////////////////////////////////////////////// -// -// Icon -// - -// Icon with lowest ID value placed first to ensure application icon -// remains consistent on all systems. -DLG_ICON ICON "launcher.ico" - -///////////////////////////////////////////////////////////////////////////// -// -// Dialog -// - -DLG_MAIN DIALOGEX 20, 40, 236, 183 -// Make the dialog visible after positioning at centre -STYLE DS_SETFONT | DS_3DLOOK | WS_MINIMIZEBOX | WS_CAPTION | WS_SYSMENU -EXSTYLE WS_EX_NOPARENTNOTIFY -CAPTION "Python File Associations Have Ceased To Be!" -FONT 10, "Arial", 400, 0, 0x0 -BEGIN - LTEXT "You've uninstalled the Python Launcher, so now there are no applications associated with Python files.",IDC_STATIC,7,7,225,18 - LTEXT "You may wish to associate Python files with one of the Python versions installed on your machine, listed below:",IDC_STATIC,7,27,225,18 - CONTROL "",IDC_LIST1,"SysListView32",LVS_REPORT | LVS_SINGLESEL | LVS_ALIGNLEFT | WS_BORDER | WS_TABSTOP,6,49,224,105 - PUSHBUTTON "&Associate with selected Python",IDOK,6,164,117,14,WS_DISABLED - PUSHBUTTON "Do&n't associate Python files",IDCANCEL,128,164,102,14 -END - - -#ifdef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// TEXTINCLUDE -// - -1 TEXTINCLUDE -BEGIN - "resource.h\0" -END - -2 TEXTINCLUDE -BEGIN - "\0" -END - -3 TEXTINCLUDE -BEGIN - "\r\n" - "\0" -END - -#endif // APSTUDIO_INVOKED - - -///////////////////////////////////////////////////////////////////////////// -// -// DESIGNINFO -// - -#ifdef APSTUDIO_INVOKED -GUIDELINES DESIGNINFO -BEGIN - DLG_MAIN, DIALOG - BEGIN - RIGHTMARGIN, 238 - END -END -#endif // APSTUDIO_INVOKED - -#endif // English (U.K.) resources -///////////////////////////////////////////////////////////////////////////// - - - -#ifndef APSTUDIO_INVOKED -///////////////////////////////////////////////////////////////////////////// -// -// Generated from the TEXTINCLUDE 3 resource. -// - - -///////////////////////////////////////////////////////////////////////////// -#endif // not APSTUDIO_INVOKED - diff --git a/PCbuild/associator.vcxproj b/PCbuild/associator.vcxproj deleted file mode 100644 --- a/PCbuild/associator.vcxproj +++ /dev/null @@ -1,84 +0,0 @@ -? - - - - Debug - Win32 - - - Release - Win32 - - - - {023B3CDA-59C8-45FD-95DC-F8973322ED34} - associator - - - - Application - true - Unicode - - - Application - false - true - Unicode - - - - - - - - - - - - - - - - - - - Level3 - Disabled - _DEBUG;_WINDOWS;%(PreprocessorDefinitions) - - - true - comctl32.lib;%(AdditionalDependencies) - false - - - - - Level3 - MaxSpeed - true - true - NDEBUG;_WINDOWS;%(PreprocessorDefinitions) - - - true - true - true - comctl32.lib;%(AdditionalDependencies) - false - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/PCbuild/associator.vcxproj.filters b/PCbuild/associator.vcxproj.filters deleted file mode 100644 --- a/PCbuild/associator.vcxproj.filters +++ /dev/null @@ -1,32 +0,0 @@ -? - - - - {4FC737F1-C7A5-4376-A066-2A32D752A2FF} - cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - - - - - Source Files - - - - - Resource Files - - - - - Header Files - - - \ No newline at end of file diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -72,8 +72,6 @@ EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pywlauncher", "pywlauncher.vcxproj", "{1D4B18D3-7C12-4ECB-9179-8531FF876CE6}" EndProject -Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "associator", "associator.vcxproj", "{023B3CDA-59C8-45FD-95DC-F8973322ED34}" -EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:10 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Remove_the_original_license?= =?utf8?q?=2C_as_this_was_contributed_under_Vinay_Sajip=27s?= Message-ID: http://hg.python.org/cpython/rev/2c4101b71328 changeset: 77554:2c4101b71328 parent: 77552:13964ca5fb53 user: Martin v. L?wis date: Thu Jun 21 19:29:37 2012 +0200 summary: Remove the original license, as this was contributed under Vinay Sajip's agreement. files: PC/associator.c | 25 ++----------------------- PC/launcher.c | 25 ++----------------------- 2 files changed, 4 insertions(+), 46 deletions(-) diff --git a/PC/associator.c b/PC/associator.c --- a/PC/associator.c +++ b/PC/associator.c @@ -1,27 +1,6 @@ /* - * Copyright (C) 2011-2012 Vinay Sajip. All rights reserved. - * - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. + * Copyright (C) 2011-2012 Vinay Sajip. + * Licensed to PSF under a contributor agreement. */ #include #include diff --git a/PC/launcher.c b/PC/launcher.c --- a/PC/launcher.c +++ b/PC/launcher.c @@ -1,27 +1,6 @@ /* - * Copyright (C) 2011-2012 Vinay Sajip. All rights reserved. - * - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. + * Copyright (C) 2011-2012 Vinay Sajip. + * Licensed to PSF under a contributor agreement. * * Based on the work of: * -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:11 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_branch_merge?= Message-ID: http://hg.python.org/cpython/rev/dedc2cade108 changeset: 77555:dedc2cade108 parent: 77553:f0eb280ba898 parent: 77554:2c4101b71328 user: Brian Curtin date: Thu Jun 21 16:36:05 2012 -0500 summary: branch merge files: PC/launcher.c | 25 ++----------------------- 1 files changed, 2 insertions(+), 23 deletions(-) diff --git a/PC/launcher.c b/PC/launcher.c --- a/PC/launcher.c +++ b/PC/launcher.c @@ -1,27 +1,6 @@ /* - * Copyright (C) 2011-2012 Vinay Sajip. All rights reserved. - * - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. + * Copyright (C) 2011-2012 Vinay Sajip. + * Licensed to PSF under a contributor agreement. * * Based on the work of: * -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 00:13:11 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 00:13:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge_/features/pep397_changes?= Message-ID: http://hg.python.org/cpython/rev/a7ecbb2ad967 changeset: 77556:a7ecbb2ad967 parent: 77539:464cf523485e parent: 77555:dedc2cade108 user: Brian Curtin date: Thu Jun 21 17:11:45 2012 -0500 summary: Merge /features/pep397 changes files: PC/launcher.c | 1365 +++++++++++++++ PC/launcher.ico | Bin PC/pylauncher.rc | 51 + PCbuild/pcbuild.sln | 44 + PCbuild/pylauncher.vcxproj | 165 + PCbuild/pylauncher.vcxproj.filters | 32 + PCbuild/pywlauncher.vcxproj | 160 + PCbuild/pywlauncher.vcxproj.filters | 32 + Tools/msi/msi.py | 37 +- 9 files changed, 1880 insertions(+), 6 deletions(-) diff --git a/PC/launcher.c b/PC/launcher.c new file mode 100644 --- /dev/null +++ b/PC/launcher.c @@ -0,0 +1,1365 @@ +/* + * Copyright (C) 2011-2012 Vinay Sajip. + * Licensed to PSF under a contributor agreement. + * + * Based on the work of: + * + * Mark Hammond (original author of Python version) + * Curt Hagenlocher (job management) + */ + +#include +#include +#include +#include + +#define BUFSIZE 256 +#define MSGSIZE 1024 + +/* Build options. */ +#define SKIP_PREFIX +/* #define SEARCH_PATH */ + +/* Just for now - static definition */ + +static FILE * log_fp = NULL; + +static wchar_t * +skip_whitespace(wchar_t * p) +{ + while (*p && isspace(*p)) + ++p; + return p; +} + +/* + * This function is here to simplify memory management + * and to treat blank values as if they are absent. + */ +static wchar_t * get_env(wchar_t * key) +{ + /* This is not thread-safe, just like getenv */ + static wchar_t buf[256]; + DWORD result = GetEnvironmentVariableW(key, buf, 256); + + if (result > 255) { + /* Large environment variable. Accept some leakage */ + wchar_t *buf2 = (wchar_t*)malloc(sizeof(wchar_t) * (result+1)); + GetEnvironmentVariableW(key, buf2, result); + return buf2; + } + + if (result == 0) + /* Either some error, e.g. ERROR_ENVVAR_NOT_FOUND, + or an empty environment variable. */ + return NULL; + + return buf; +} + + +static void +debug(wchar_t * format, ...) +{ + va_list va; + + if (log_fp != NULL) { + va_start(va, format); + vfwprintf_s(log_fp, format, va); + } +} + +static void +winerror(int rc, wchar_t * message, int size) +{ + FormatMessageW( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, rc, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + message, size, NULL); +} + +static void +error(int rc, wchar_t * format, ... ) +{ + va_list va; + wchar_t message[MSGSIZE]; + wchar_t win_message[MSGSIZE]; + int len; + + va_start(va, format); + len = _vsnwprintf_s(message, MSGSIZE, _TRUNCATE, format, va); + + if (rc == 0) { /* a Windows error */ + winerror(GetLastError(), win_message, MSGSIZE); + if (len >= 0) { + _snwprintf_s(&message[len], MSGSIZE - len, _TRUNCATE, L": %s", + win_message); + } + } + +#if !defined(_WINDOWS) + fwprintf(stderr, L"%s\n", message); +#else + MessageBox(NULL, message, TEXT("Python Launcher is sorry to say ..."), MB_OK); +#endif + ExitProcess(rc); +} + +#if defined(_WINDOWS) + +#define PYTHON_EXECUTABLE L"pythonw.exe" + +#else + +#define PYTHON_EXECUTABLE L"python.exe" + +#endif + +#define RC_NO_STD_HANDLES 100 +#define RC_CREATE_PROCESS 101 +#define RC_BAD_VIRTUAL_PATH 102 +#define RC_NO_PYTHON 103 + +#define MAX_VERSION_SIZE 4 + +typedef struct { + wchar_t version[MAX_VERSION_SIZE]; /* m.n */ + int bits; /* 32 or 64 */ + wchar_t executable[MAX_PATH]; +} INSTALLED_PYTHON; + +/* + * To avoid messing about with heap allocations, just assume we can allocate + * statically and never have to deal with more versions than this. + */ +#define MAX_INSTALLED_PYTHONS 100 + +static INSTALLED_PYTHON installed_pythons[MAX_INSTALLED_PYTHONS]; + +static size_t num_installed_pythons = 0; + +/* to hold SOFTWARE\Python\PythonCore\X.Y\InstallPath */ +#define IP_BASE_SIZE 40 +#define IP_SIZE (IP_BASE_SIZE + MAX_VERSION_SIZE) +#define CORE_PATH L"SOFTWARE\\Python\\PythonCore" + +static wchar_t * location_checks[] = { + L"\\", + L"\\PCBuild\\", + L"\\PCBuild\\amd64\\", + NULL +}; + +static INSTALLED_PYTHON * +find_existing_python(wchar_t * path) +{ + INSTALLED_PYTHON * result = NULL; + size_t i; + INSTALLED_PYTHON * ip; + + for (i = 0, ip = installed_pythons; i < num_installed_pythons; i++, ip++) { + if (_wcsicmp(path, ip->executable) == 0) { + result = ip; + break; + } + } + return result; +} + +static void +locate_pythons_for_key(HKEY root, REGSAM flags) +{ + HKEY core_root, ip_key; + LSTATUS status = RegOpenKeyExW(root, CORE_PATH, 0, flags, &core_root); + wchar_t message[MSGSIZE]; + DWORD i; + size_t n; + BOOL ok; + DWORD type, data_size, attrs; + INSTALLED_PYTHON * ip, * pip; + wchar_t ip_path[IP_SIZE]; + wchar_t * check; + wchar_t ** checkp; + wchar_t *key_name = (root == HKEY_LOCAL_MACHINE) ? L"HKLM" : L"HKCU"; + + if (status != ERROR_SUCCESS) + debug(L"locate_pythons_for_key: unable to open PythonCore key in %s\n", + key_name); + else { + ip = &installed_pythons[num_installed_pythons]; + for (i = 0; num_installed_pythons < MAX_INSTALLED_PYTHONS; i++) { + status = RegEnumKeyW(core_root, i, ip->version, MAX_VERSION_SIZE); + if (status != ERROR_SUCCESS) { + if (status != ERROR_NO_MORE_ITEMS) { + /* unexpected error */ + winerror(status, message, MSGSIZE); + debug(L"Can't enumerate registry key for version %s: %s\n", + ip->version, message); + } + break; + } + else { + _snwprintf_s(ip_path, IP_SIZE, _TRUNCATE, + L"%s\\%s\\InstallPath", CORE_PATH, ip->version); + status = RegOpenKeyExW(root, ip_path, 0, flags, &ip_key); + if (status != ERROR_SUCCESS) { + winerror(status, message, MSGSIZE); + // Note: 'message' already has a trailing \n + debug(L"%s\\%s: %s", key_name, ip_path, message); + continue; + } + data_size = sizeof(ip->executable) - 1; + status = RegQueryValueExW(ip_key, NULL, NULL, &type, + (LPBYTE)ip->executable, &data_size); + RegCloseKey(ip_key); + if (status != ERROR_SUCCESS) { + winerror(status, message, MSGSIZE); + debug(L"%s\\%s: %s\n", key_name, ip_path, message); + continue; + } + if (type == REG_SZ) { + data_size = data_size / sizeof(wchar_t) - 1; /* for NUL */ + if (ip->executable[data_size - 1] == L'\\') + --data_size; /* reg value ended in a backslash */ + /* ip->executable is data_size long */ + for (checkp = location_checks; *checkp; ++checkp) { + check = *checkp; + _snwprintf_s(&ip->executable[data_size], + MAX_PATH - data_size, + MAX_PATH - data_size, + L"%s%s", check, PYTHON_EXECUTABLE); + attrs = GetFileAttributesW(ip->executable); + if (attrs == INVALID_FILE_ATTRIBUTES) { + winerror(GetLastError(), message, MSGSIZE); + debug(L"locate_pythons_for_key: %s: %s", + ip->executable, message); + } + else if (attrs & FILE_ATTRIBUTE_DIRECTORY) { + debug(L"locate_pythons_for_key: '%s' is a \ +directory\n", + ip->executable, attrs); + } + else if (find_existing_python(ip->executable)) { + debug(L"locate_pythons_for_key: %s: already \ +found: %s\n", ip->executable); + } + else { + /* check the executable type. */ + ok = GetBinaryTypeW(ip->executable, &attrs); + if (!ok) { + debug(L"Failure getting binary type: %s\n", + ip->executable); + } + else { + if (attrs == SCS_64BIT_BINARY) + ip->bits = 64; + else if (attrs == SCS_32BIT_BINARY) + ip->bits = 32; + else + ip->bits = 0; + if (ip->bits == 0) { + debug(L"locate_pythons_for_key: %s: \ +invalid binary type: %X\n", + ip->executable, attrs); + } + else { + if (wcschr(ip->executable, L' ') != NULL) { + /* has spaces, so quote */ + n = wcslen(ip->executable); + memmove(&ip->executable[1], + ip->executable, n * sizeof(wchar_t)); + ip->executable[0] = L'\"'; + ip->executable[n + 1] = L'\"'; + ip->executable[n + 2] = L'\0'; + } + debug(L"locate_pythons_for_key: %s \ +is a %dbit executable\n", + ip->executable, ip->bits); + ++num_installed_pythons; + pip = ip++; + if (num_installed_pythons >= + MAX_INSTALLED_PYTHONS) + break; + /* Copy over the attributes for the next */ + *ip = *pip; + } + } + } + } + } + } + } + RegCloseKey(core_root); + } +} + +static int +compare_pythons(const void * p1, const void * p2) +{ + INSTALLED_PYTHON * ip1 = (INSTALLED_PYTHON *) p1; + INSTALLED_PYTHON * ip2 = (INSTALLED_PYTHON *) p2; + /* note reverse sorting on version */ + int result = wcscmp(ip2->version, ip1->version); + + if (result == 0) + result = ip2->bits - ip1->bits; /* 64 before 32 */ + return result; +} + +static void +locate_all_pythons() +{ +#if defined(_M_X64) + // If we are a 64bit process, first hit the 32bit keys. + debug(L"locating Pythons in 32bit registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_32KEY); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_32KEY); +#else + // If we are a 32bit process on a 64bit Windows, first hit the 64bit keys. + BOOL f64 = FALSE; + if (IsWow64Process(GetCurrentProcess(), &f64) && f64) { + debug(L"locating Pythons in 64bit registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ | KEY_WOW64_64KEY); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ | KEY_WOW64_64KEY); + } +#endif + // now hit the "native" key for this process bittedness. + debug(L"locating Pythons in native registry\n"); + locate_pythons_for_key(HKEY_CURRENT_USER, KEY_READ); + locate_pythons_for_key(HKEY_LOCAL_MACHINE, KEY_READ); + qsort(installed_pythons, num_installed_pythons, sizeof(INSTALLED_PYTHON), + compare_pythons); +} + +static INSTALLED_PYTHON * +find_python_by_version(wchar_t const * wanted_ver) +{ + INSTALLED_PYTHON * result = NULL; + INSTALLED_PYTHON * ip = installed_pythons; + size_t i, n; + size_t wlen = wcslen(wanted_ver); + int bits = 0; + + if (wcsstr(wanted_ver, L"-32")) + bits = 32; + for (i = 0; i < num_installed_pythons; i++, ip++) { + n = wcslen(ip->version); + if (n > wlen) + n = wlen; + if ((wcsncmp(ip->version, wanted_ver, n) == 0) && + /* bits == 0 => don't care */ + ((bits == 0) || (ip->bits == bits))) { + result = ip; + break; + } + } + return result; +} + + +static wchar_t appdata_ini_path[MAX_PATH]; +static wchar_t launcher_ini_path[MAX_PATH]; + +/* + * Get a value either from the environment or a configuration file. + * The key passed in will either be "python", "python2" or "python3". + */ +static wchar_t * +get_configured_value(wchar_t * key) +{ +/* + * Note: this static value is used to return a configured value + * obtained either from the environment or configuration file. + * This should be OK since there wouldn't be any concurrent calls. + */ + static wchar_t configured_value[MSGSIZE]; + wchar_t * result = NULL; + wchar_t * found_in = L"environment"; + DWORD size; + + /* First, search the environment. */ + _snwprintf_s(configured_value, MSGSIZE, _TRUNCATE, L"py_%s", key); + result = get_env(configured_value); + if (result == NULL && appdata_ini_path[0]) { + /* Not in environment: check local configuration. */ + size = GetPrivateProfileStringW(L"defaults", key, NULL, + configured_value, MSGSIZE, + appdata_ini_path); + if (size > 0) { + result = configured_value; + found_in = appdata_ini_path; + } + } + if (result == NULL && launcher_ini_path[0]) { + /* Not in environment or local: check global configuration. */ + size = GetPrivateProfileStringW(L"defaults", key, NULL, + configured_value, MSGSIZE, + launcher_ini_path); + if (size > 0) { + result = configured_value; + found_in = launcher_ini_path; + } + } + if (result) { + debug(L"found configured value '%s=%s' in %s\n", + key, result, found_in ? found_in : L"(unknown)"); + } else { + debug(L"found no configured value for '%s'\n", key); + } + return result; +} + +static INSTALLED_PYTHON * +locate_python(wchar_t * wanted_ver) +{ + static wchar_t config_key [] = { L"pythonX" }; + static wchar_t * last_char = &config_key[sizeof(config_key) / + sizeof(wchar_t) - 2]; + INSTALLED_PYTHON * result = NULL; + size_t n = wcslen(wanted_ver); + wchar_t * configured_value; + + if (num_installed_pythons == 0) + locate_all_pythons(); + + if (n == 1) { /* just major version specified */ + *last_char = *wanted_ver; + configured_value = get_configured_value(config_key); + if (configured_value != NULL) + wanted_ver = configured_value; + } + if (*wanted_ver) { + result = find_python_by_version(wanted_ver); + debug(L"search for Python version '%s' found ", wanted_ver); + if (result) { + debug(L"'%s'\n", result->executable); + } else { + debug(L"no interpreter\n"); + } + } + else { + *last_char = L'\0'; /* look for an overall default */ + configured_value = get_configured_value(config_key); + if (configured_value) + result = find_python_by_version(configured_value); + if (result == NULL) + result = find_python_by_version(L"2"); + if (result == NULL) + result = find_python_by_version(L"3"); + debug(L"search for default Python found "); + if (result) { + debug(L"version %s at '%s'\n", + result->version, result->executable); + } else { + debug(L"no interpreter\n"); + } + } + return result; +} + +/* + * Process creation code + */ + +static BOOL +safe_duplicate_handle(HANDLE in, HANDLE * pout) +{ + BOOL ok; + HANDLE process = GetCurrentProcess(); + DWORD rc; + + *pout = NULL; + ok = DuplicateHandle(process, in, process, pout, 0, TRUE, + DUPLICATE_SAME_ACCESS); + if (!ok) { + rc = GetLastError(); + if (rc == ERROR_INVALID_HANDLE) { + debug(L"DuplicateHandle returned ERROR_INVALID_HANDLE\n"); + ok = TRUE; + } + else { + debug(L"DuplicateHandle returned %d\n", rc); + } + } + return ok; +} + +static BOOL WINAPI +ctrl_c_handler(DWORD code) +{ + return TRUE; /* We just ignore all control events. */ +} + +static void +run_child(wchar_t * cmdline) +{ + HANDLE job; + JOBOBJECT_EXTENDED_LIMIT_INFORMATION info; + DWORD rc; + BOOL ok; + STARTUPINFOW si; + PROCESS_INFORMATION pi; + + debug(L"run_child: about to run '%s'\n", cmdline); + job = CreateJobObject(NULL, NULL); + ok = QueryInformationJobObject(job, JobObjectExtendedLimitInformation, + &info, sizeof(info), &rc); + if (!ok || (rc != sizeof(info)) || !job) + error(RC_CREATE_PROCESS, L"Job information querying failed"); + info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE | + JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK; + ok = SetInformationJobObject(job, JobObjectExtendedLimitInformation, &info, + sizeof(info)); + if (!ok) + error(RC_CREATE_PROCESS, L"Job information setting failed"); + memset(&si, 0, sizeof(si)); + si.cb = sizeof(si); + ok = safe_duplicate_handle(GetStdHandle(STD_INPUT_HANDLE), &si.hStdInput); + if (!ok) + error(RC_NO_STD_HANDLES, L"stdin duplication failed"); + ok = safe_duplicate_handle(GetStdHandle(STD_OUTPUT_HANDLE), &si.hStdOutput); + if (!ok) + error(RC_NO_STD_HANDLES, L"stdout duplication failed"); + ok = safe_duplicate_handle(GetStdHandle(STD_ERROR_HANDLE), &si.hStdError); + if (!ok) + error(RC_NO_STD_HANDLES, L"stderr duplication failed"); + + ok = SetConsoleCtrlHandler(ctrl_c_handler, TRUE); + if (!ok) + error(RC_CREATE_PROCESS, L"control handler setting failed"); + + si.dwFlags = STARTF_USESTDHANDLES; + ok = CreateProcessW(NULL, cmdline, NULL, NULL, TRUE, + 0, NULL, NULL, &si, &pi); + if (!ok) + error(RC_CREATE_PROCESS, L"Unable to create process using '%s'", cmdline); + AssignProcessToJobObject(job, pi.hProcess); + CloseHandle(pi.hThread); + WaitForSingleObject(pi.hProcess, INFINITE); + ok = GetExitCodeProcess(pi.hProcess, &rc); + if (!ok) + error(RC_CREATE_PROCESS, L"Failed to get exit code of process"); + debug(L"child process exit code: %d\n", rc); + ExitProcess(rc); +} + +static void +invoke_child(wchar_t * executable, wchar_t * suffix, wchar_t * cmdline) +{ + wchar_t * child_command; + size_t child_command_size; + BOOL no_suffix = (suffix == NULL) || (*suffix == L'\0'); + BOOL no_cmdline = (*cmdline == L'\0'); + + if (no_suffix && no_cmdline) + run_child(executable); + else { + if (no_suffix) { + /* add 2 for space separator + terminating NUL. */ + child_command_size = wcslen(executable) + wcslen(cmdline) + 2; + } + else { + /* add 3 for 2 space separators + terminating NUL. */ + child_command_size = wcslen(executable) + wcslen(suffix) + + wcslen(cmdline) + 3; + } + child_command = calloc(child_command_size, sizeof(wchar_t)); + if (child_command == NULL) + error(RC_CREATE_PROCESS, L"unable to allocate %d bytes for child command.", + child_command_size); + if (no_suffix) + _snwprintf_s(child_command, child_command_size, + child_command_size - 1, L"%s %s", + executable, cmdline); + else + _snwprintf_s(child_command, child_command_size, + child_command_size - 1, L"%s %s %s", + executable, suffix, cmdline); + run_child(child_command); + free(child_command); + } +} + +static wchar_t * builtin_virtual_paths [] = { + L"/usr/bin/env python", + L"/usr/bin/python", + L"/usr/local/bin/python", + L"python", + NULL +}; + +/* For now, a static array of commands. */ + +#define MAX_COMMANDS 100 + +typedef struct { + wchar_t key[MAX_PATH]; + wchar_t value[MSGSIZE]; +} COMMAND; + +static COMMAND commands[MAX_COMMANDS]; +static int num_commands = 0; + +#if defined(SKIP_PREFIX) + +static wchar_t * builtin_prefixes [] = { + /* These must be in an order that the longest matches should be found, + * i.e. if the prefix is "/usr/bin/env ", it should match that entry + * *before* matching "/usr/bin/". + */ + L"/usr/bin/env ", + L"/usr/bin/", + L"/usr/local/bin/", + NULL +}; + +static wchar_t * skip_prefix(wchar_t * name) +{ + wchar_t ** pp = builtin_prefixes; + wchar_t * result = name; + wchar_t * p; + size_t n; + + for (; p = *pp; pp++) { + n = wcslen(p); + if (_wcsnicmp(p, name, n) == 0) { + result += n; /* skip the prefix */ + if (p[n - 1] == L' ') /* No empty strings in table, so n > 1 */ + result = skip_whitespace(result); + break; + } + } + return result; +} + +#endif + +#if defined(SEARCH_PATH) + +static COMMAND path_command; + +static COMMAND * find_on_path(wchar_t * name) +{ + wchar_t * pathext; + size_t varsize; + wchar_t * context = NULL; + wchar_t * extension; + COMMAND * result = NULL; + DWORD len; + errno_t rc; + + wcscpy_s(path_command.key, MAX_PATH, name); + if (wcschr(name, L'.') != NULL) { + /* assume it has an extension. */ + len = SearchPathW(NULL, name, NULL, MSGSIZE, path_command.value, NULL); + if (len) { + result = &path_command; + } + } + else { + /* No extension - search using registered extensions. */ + rc = _wdupenv_s(&pathext, &varsize, L"PATHEXT"); + if (rc == 0) { + extension = wcstok_s(pathext, L";", &context); + while (extension) { + len = SearchPathW(NULL, name, extension, MSGSIZE, path_command.value, NULL); + if (len) { + result = &path_command; + break; + } + extension = wcstok_s(NULL, L";", &context); + } + free(pathext); + } + } + return result; +} + +#endif + +static COMMAND * find_command(wchar_t * name) +{ + COMMAND * result = NULL; + COMMAND * cp = commands; + int i; + + for (i = 0; i < num_commands; i++, cp++) { + if (_wcsicmp(cp->key, name) == 0) { + result = cp; + break; + } + } +#if defined(SEARCH_PATH) + if (result == NULL) + result = find_on_path(name); +#endif + return result; +} + +static void +update_command(COMMAND * cp, wchar_t * name, wchar_t * cmdline) +{ + wcsncpy_s(cp->key, MAX_PATH, name, _TRUNCATE); + wcsncpy_s(cp->value, MSGSIZE, cmdline, _TRUNCATE); +} + +static void +add_command(wchar_t * name, wchar_t * cmdline) +{ + if (num_commands >= MAX_COMMANDS) { + debug(L"can't add %s = '%s': no room\n", name, cmdline); + } + else { + COMMAND * cp = &commands[num_commands++]; + + update_command(cp, name, cmdline); + } +} + +static void +read_config_file(wchar_t * config_path) +{ + wchar_t keynames[MSGSIZE]; + wchar_t value[MSGSIZE]; + DWORD read; + wchar_t * key; + COMMAND * cp; + wchar_t * cmdp; + + read = GetPrivateProfileStringW(L"commands", NULL, NULL, keynames, MSGSIZE, + config_path); + if (read == MSGSIZE - 1) { + debug(L"read_commands: %s: not enough space for names\n", config_path); + } + key = keynames; + while (*key) { + read = GetPrivateProfileStringW(L"commands", key, NULL, value, MSGSIZE, + config_path); + if (read == MSGSIZE - 1) { + debug(L"read_commands: %s: not enough space for %s\n", + config_path, key); + } + cmdp = skip_whitespace(value); + if (*cmdp) { + cp = find_command(key); + if (cp == NULL) + add_command(key, value); + else + update_command(cp, key, value); + } + key += wcslen(key) + 1; + } +} + +static void read_commands() +{ + if (launcher_ini_path[0]) + read_config_file(launcher_ini_path); + if (appdata_ini_path[0]) + read_config_file(appdata_ini_path); +} + +static BOOL +parse_shebang(wchar_t * shebang_line, int nchars, wchar_t ** command, + wchar_t ** suffix) +{ + BOOL rc = FALSE; + wchar_t ** vpp; + size_t plen; + wchar_t * p; + wchar_t zapped; + wchar_t * endp = shebang_line + nchars - 1; + COMMAND * cp; + wchar_t * skipped; + + *command = NULL; /* failure return */ + *suffix = NULL; + + if ((*shebang_line++ == L'#') && (*shebang_line++ == L'!')) { + shebang_line = skip_whitespace(shebang_line); + if (*shebang_line) { + *command = shebang_line; + for (vpp = builtin_virtual_paths; *vpp; ++vpp) { + plen = wcslen(*vpp); + if (wcsncmp(shebang_line, *vpp, plen) == 0) { + rc = TRUE; + /* We can do this because all builtin commands contain + * "python". + */ + *command = wcsstr(shebang_line, L"python"); + break; + } + } + if (*vpp == NULL) { + /* + * Not found in builtins - look in customised commands. + * + * We can't permanently modify the shebang line in case + * it's not a customised command, but we can temporarily + * stick a NUL after the command while searching for it, + * then put back the char we zapped. + */ +#if defined(SKIP_PREFIX) + skipped = skip_prefix(shebang_line); +#else + skipped = shebang_line; +#endif + p = wcspbrk(skipped, L" \t\r\n"); + if (p != NULL) { + zapped = *p; + *p = L'\0'; + } + cp = find_command(skipped); + if (p != NULL) + *p = zapped; + if (cp != NULL) { + *command = cp->value; + if (p != NULL) + *suffix = skip_whitespace(p); + } + } + /* remove trailing whitespace */ + while ((endp > shebang_line) && isspace(*endp)) + --endp; + if (endp > shebang_line) + endp[1] = L'\0'; + } + } + return rc; +} + +/* #define CP_UTF8 65001 defined in winnls.h */ +#define CP_UTF16LE 1200 +#define CP_UTF16BE 1201 +#define CP_UTF32LE 12000 +#define CP_UTF32BE 12001 + +typedef struct { + int length; + char sequence[4]; + UINT code_page; +} BOM; + +/* + * Strictly, we don't need to handle UTF-16 anf UTF-32, since Python itself + * doesn't. Never mind, one day it might - there's no harm leaving it in. + */ +static BOM BOMs[] = { + { 3, { 0xEF, 0xBB, 0xBF }, CP_UTF8 }, /* UTF-8 - keep first */ + { 2, { 0xFF, 0xFE }, CP_UTF16LE }, /* UTF-16LE */ + { 2, { 0xFE, 0xFF }, CP_UTF16BE }, /* UTF-16BE */ + { 4, { 0xFF, 0xFE, 0x00, 0x00 }, CP_UTF32LE }, /* UTF-32LE */ + { 4, { 0x00, 0x00, 0xFE, 0xFF }, CP_UTF32BE }, /* UTF-32BE */ + { 0 } /* sentinel */ +}; + +static BOM * +find_BOM(char * buffer) +{ +/* + * Look for a BOM in the input and return a pointer to the + * corresponding structure, or NULL if not found. + */ + BOM * result = NULL; + BOM *bom; + + for (bom = BOMs; bom->length; bom++) { + if (strncmp(bom->sequence, buffer, bom->length) == 0) { + result = bom; + break; + } + } + return result; +} + +static char * +find_terminator(char * buffer, int len, BOM *bom) +{ + char * result = NULL; + char * end = buffer + len; + char * p; + char c; + int cp; + + for (p = buffer; p < end; p++) { + c = *p; + if (c == '\r') { + result = p; + break; + } + if (c == '\n') { + result = p; + break; + } + } + if (result != NULL) { + cp = bom->code_page; + + /* adjustments to include all bytes of the char */ + /* no adjustment needed for UTF-8 or big endian */ + if (cp == CP_UTF16LE) + ++result; + else if (cp == CP_UTF32LE) + result += 3; + ++result; /* point just past terminator */ + } + return result; +} + +static BOOL +validate_version(wchar_t * p) +{ + BOOL result = TRUE; + + if (!isdigit(*p)) /* expect major version */ + result = FALSE; + else if (*++p) { /* more to do */ + if (*p != L'.') /* major/minor separator */ + result = FALSE; + else { + ++p; + if (!isdigit(*p)) /* expect minor version */ + result = FALSE; + else { + ++p; + if (*p) { /* more to do */ + if (*p != L'-') + result = FALSE; + else { + ++p; + if ((*p != '3') && (*++p != '2') && !*++p) + result = FALSE; + } + } + } + } + } + return result; +} + +typedef struct { + unsigned short min; + unsigned short max; + wchar_t version[MAX_VERSION_SIZE]; +} PYC_MAGIC; + +static PYC_MAGIC magic_values[] = { + { 0xc687, 0xc687, L"2.0" }, + { 0xeb2a, 0xeb2a, L"2.1" }, + { 0xed2d, 0xed2d, L"2.2" }, + { 0xf23b, 0xf245, L"2.3" }, + { 0xf259, 0xf26d, L"2.4" }, + { 0xf277, 0xf2b3, L"2.5" }, + { 0xf2c7, 0xf2d1, L"2.6" }, + { 0xf2db, 0xf303, L"2.7" }, + { 0x0bb8, 0x0c3b, L"3.0" }, + { 0x0c45, 0x0c4f, L"3.1" }, + { 0x0c58, 0x0c6c, L"3.2" }, + { 0x0c76, 0x0c76, L"3.3" }, + { 0 } +}; + +static INSTALLED_PYTHON * +find_by_magic(unsigned short magic) +{ + INSTALLED_PYTHON * result = NULL; + PYC_MAGIC * mp; + + for (mp = magic_values; mp->min; mp++) { + if ((magic >= mp->min) && (magic <= mp->max)) { + result = locate_python(mp->version); + if (result != NULL) + break; + } + } + return result; +} + +static void +maybe_handle_shebang(wchar_t ** argv, wchar_t * cmdline) +{ +/* + * Look for a shebang line in the first argument. If found + * and we spawn a child process, this never returns. If it + * does return then we process the args "normally". + * + * argv[0] might be a filename with a shebang. + */ + FILE * fp; + errno_t rc = _wfopen_s(&fp, *argv, L"rb"); + unsigned char buffer[BUFSIZE]; + wchar_t shebang_line[BUFSIZE + 1]; + size_t read; + char *p; + char * start; + char * shebang_alias = (char *) shebang_line; + BOM* bom; + int i, j, nchars = 0; + int header_len; + BOOL is_virt; + wchar_t * command; + wchar_t * suffix; + INSTALLED_PYTHON * ip; + + if (rc == 0) { + read = fread(buffer, sizeof(char), BUFSIZE, fp); + debug(L"maybe_handle_shebang: read %d bytes\n", read); + fclose(fp); + + if ((read >= 4) && (buffer[3] == '\n') && (buffer[2] == '\r')) { + ip = find_by_magic((buffer[1] << 8 | buffer[0]) & 0xFFFF); + if (ip != NULL) { + debug(L"script file is compiled against Python %s\n", + ip->version); + invoke_child(ip->executable, NULL, cmdline); + } + } + /* Look for BOM */ + bom = find_BOM(buffer); + if (bom == NULL) { + start = buffer; + debug(L"maybe_handle_shebang: BOM not found, using UTF-8\n"); + bom = BOMs; /* points to UTF-8 entry - the default */ + } + else { + debug(L"maybe_handle_shebang: BOM found, code page %d\n", + bom->code_page); + start = &buffer[bom->length]; + } + p = find_terminator(start, BUFSIZE, bom); + /* + * If no CR or LF was found in the heading, + * we assume it's not a shebang file. + */ + if (p == NULL) { + debug(L"maybe_handle_shebang: No line terminator found\n"); + } + else { + /* + * Found line terminator - parse the shebang. + * + * Strictly, we don't need to handle UTF-16 anf UTF-32, + * since Python itself doesn't. + * Never mind, one day it might. + */ + header_len = (int) (p - start); + switch(bom->code_page) { + case CP_UTF8: + nchars = MultiByteToWideChar(bom->code_page, + 0, + start, header_len, shebang_line, + BUFSIZE); + break; + case CP_UTF16BE: + if (header_len % 2 != 0) { + debug(L"maybe_handle_shebang: UTF-16BE, but an odd number \ +of bytes: %d\n", header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + for (i = header_len; i > 0; i -= 2) { + shebang_alias[i - 1] = start[i - 2]; + shebang_alias[i - 2] = start[i - 1]; + } + nchars = header_len / sizeof(wchar_t); + } + break; + case CP_UTF16LE: + if ((header_len % 2) != 0) { + debug(L"UTF-16LE, but an odd number of bytes: %d\n", + header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + /* no actual conversion needed. */ + memcpy(shebang_line, start, header_len); + nchars = header_len / sizeof(wchar_t); + } + break; + case CP_UTF32BE: + if (header_len % 4 != 0) { + debug(L"UTF-32BE, but not divisible by 4: %d\n", + header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + for (i = header_len, j = header_len / 2; i > 0; i -= 4, + j -= 2) { + shebang_alias[j - 1] = start[i - 2]; + shebang_alias[j - 2] = start[i - 1]; + } + nchars = header_len / sizeof(wchar_t); + } + break; + case CP_UTF32LE: + if (header_len % 4 != 0) { + debug(L"UTF-32LE, but not divisible by 4: %d\n", + header_len); + /* nchars = 0; Not needed - initialised to 0. */ + } + else { + for (i = header_len, j = header_len / 2; i > 0; i -= 4, + j -= 2) { + shebang_alias[j - 1] = start[i - 3]; + shebang_alias[j - 2] = start[i - 4]; + } + nchars = header_len / sizeof(wchar_t); + } + break; + } + if (nchars > 0) { + shebang_line[--nchars] = L'\0'; + is_virt = parse_shebang(shebang_line, nchars, &command, + &suffix); + if (command != NULL) { + debug(L"parse_shebang: found command: %s\n", command); + if (!is_virt) { + invoke_child(command, suffix, cmdline); + } + else { + suffix = wcschr(command, L' '); + if (suffix != NULL) { + *suffix++ = L'\0'; + suffix = skip_whitespace(suffix); + } + if (wcsncmp(command, L"python", 6)) + error(RC_BAD_VIRTUAL_PATH, L"Unknown virtual \ +path '%s'", command); + command += 6; /* skip past "python" */ + if (*command && !validate_version(command)) + error(RC_BAD_VIRTUAL_PATH, L"Invalid version \ +specification: '%s'.\nIn the first line of the script, 'python' needs to be \ +followed by a valid version specifier.\nPlease check the documentation.", + command); + /* TODO could call validate_version(command) */ + ip = locate_python(command); + if (ip == NULL) { + error(RC_NO_PYTHON, L"Requested Python version \ +(%s) is not installed", command); + } + else { + invoke_child(ip->executable, suffix, cmdline); + } + } + } + } + } + } +} + +static wchar_t * +skip_me(wchar_t * cmdline) +{ + BOOL quoted; + wchar_t c; + wchar_t * result = cmdline; + + quoted = cmdline[0] == L'\"'; + if (!quoted) + c = L' '; + else { + c = L'\"'; + ++result; + } + result = wcschr(result, c); + if (result == NULL) /* when, for example, just exe name on command line */ + result = L""; + else { + ++result; /* skip past space or closing quote */ + result = skip_whitespace(result); + } + return result; +} + +static DWORD version_high = 0; +static DWORD version_low = 0; + +static void +get_version_info(wchar_t * version_text, size_t size) +{ + WORD maj, min, rel, bld; + + if (!version_high && !version_low) + wcsncpy_s(version_text, size, L"0.1", _TRUNCATE); /* fallback */ + else { + maj = HIWORD(version_high); + min = LOWORD(version_high); + rel = HIWORD(version_low); + bld = LOWORD(version_low); + _snwprintf_s(version_text, size, _TRUNCATE, L"%d.%d.%d.%d", maj, + min, rel, bld); + } +} + +static int +process(int argc, wchar_t ** argv) +{ + wchar_t * wp; + wchar_t * command; + wchar_t * p; + int rc = 0; + size_t plen; + INSTALLED_PYTHON * ip; + BOOL valid; + DWORD size, attrs; + HRESULT hr; + wchar_t message[MSGSIZE]; + wchar_t version_text [MAX_PATH]; + void * version_data; + VS_FIXEDFILEINFO * file_info; + UINT block_size; + + wp = get_env(L"PYLAUNCH_DEBUG"); + if ((wp != NULL) && (*wp != L'\0')) + log_fp = stderr; + +#if defined(_M_X64) + debug(L"launcher build: 64bit\n"); +#else + debug(L"launcher build: 32bit\n"); +#endif +#if defined(_WINDOWS) + debug(L"launcher executable: Windows\n"); +#else + debug(L"launcher executable: Console\n"); +#endif + /* Get the local appdata folder (non-roaming) */ + hr = SHGetFolderPathW(NULL, CSIDL_LOCAL_APPDATA, + NULL, 0, appdata_ini_path); + if (hr != S_OK) { + debug(L"SHGetFolderPath failed: %X\n", hr); + appdata_ini_path[0] = L'\0'; + } + else { + plen = wcslen(appdata_ini_path); + p = &appdata_ini_path[plen]; + wcsncpy_s(p, MAX_PATH - plen, L"\\py.ini", _TRUNCATE); + attrs = GetFileAttributesW(appdata_ini_path); + if (attrs == INVALID_FILE_ATTRIBUTES) { + debug(L"File '%s' non-existent\n", appdata_ini_path); + appdata_ini_path[0] = L'\0'; + } else { + debug(L"Using local configuration file '%s'\n", appdata_ini_path); + } + } + plen = GetModuleFileNameW(NULL, launcher_ini_path, MAX_PATH); + size = GetFileVersionInfoSizeW(launcher_ini_path, &size); + if (size == 0) { + winerror(GetLastError(), message, MSGSIZE); + debug(L"GetFileVersionInfoSize failed: %s\n", message); + } + else { + version_data = malloc(size); + if (version_data) { + valid = GetFileVersionInfoW(launcher_ini_path, 0, size, + version_data); + if (!valid) + debug(L"GetFileVersionInfo failed: %X\n", GetLastError()); + else { + valid = VerQueryValueW(version_data, L"\\", &file_info, + &block_size); + if (!valid) + debug(L"VerQueryValue failed: %X\n", GetLastError()); + else { + version_high = file_info->dwFileVersionMS; + version_low = file_info->dwFileVersionLS; + } + } + free(version_data); + } + } + p = wcsrchr(launcher_ini_path, L'\\'); + if (p == NULL) { + debug(L"GetModuleFileNameW returned value has no backslash: %s\n", + launcher_ini_path); + launcher_ini_path[0] = L'\0'; + } + else { + wcsncpy_s(p, MAX_PATH - (p - launcher_ini_path), L"\\py.ini", + _TRUNCATE); + attrs = GetFileAttributesW(launcher_ini_path); + if (attrs == INVALID_FILE_ATTRIBUTES) { + debug(L"File '%s' non-existent\n", launcher_ini_path); + launcher_ini_path[0] = L'\0'; + } else { + debug(L"Using global configuration file '%s'\n", launcher_ini_path); + } + } + + command = skip_me(GetCommandLineW()); + debug(L"Called with command line: %s", command); + if (argc <= 1) { + valid = FALSE; + p = NULL; + } + else { + p = argv[1]; + plen = wcslen(p); + if (p[0] != L'-') { + read_commands(); + maybe_handle_shebang(&argv[1], command); + } + /* No file with shebang, or an unrecognised shebang. + * Is the first arg a special version qualifier? + */ + valid = (*p == L'-') && validate_version(&p[1]); + if (valid) { + ip = locate_python(&p[1]); + if (ip == NULL) + error(RC_NO_PYTHON, L"Requested Python version (%s) not \ +installed", &p[1]); + command += wcslen(p); + command = skip_whitespace(command); + } + } + if (!valid) { + ip = locate_python(L""); + if (ip == NULL) + error(RC_NO_PYTHON, L"Can't find a default Python."); + if ((argc == 2) && (!_wcsicmp(p, L"-h") || !_wcsicmp(p, L"--help"))) { +#if defined(_M_X64) + BOOL canDo64bit = TRUE; +#else + // If we are a 32bit process on a 64bit Windows, first hit the 64bit keys. + BOOL canDo64bit = FALSE; + IsWow64Process(GetCurrentProcess(), &canDo64bit); +#endif + + get_version_info(version_text, MAX_PATH); + fwprintf(stdout, L"\ +Python Launcher for Windows Version %s\n\n", version_text); + fwprintf(stdout, L"\ +usage: %s [ launcher-arguments ] script [ script-arguments ]\n\n", argv[0]); + fputws(L"\ +Launcher arguments:\n\n\ +-2 : Launch the latest Python 2.x version\n\ +-3 : Launch the latest Python 3.x version\n\ +-X.Y : Launch the specified Python version\n", stdout); + if (canDo64bit) { + fputws(L"\ +-X.Y-32: Launch the specified 32bit Python version", stdout); + } + fputws(L"\n\nThe following help text is from Python:\n\n", stdout); + fflush(stdout); + } + } + invoke_child(ip->executable, NULL, command); + return rc; +} + +#if defined(_WINDOWS) + +int WINAPI wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, + LPWSTR lpstrCmd, int nShow) +{ + return process(__argc, __wargv); +} + +#else + +int cdecl wmain(int argc, wchar_t ** argv) +{ + return process(argc, argv); +} + +#endif \ No newline at end of file diff --git a/PC/launcher.ico b/PC/launcher.ico new file mode 100644 index 0000000000000000000000000000000000000000..dad7d572ce781b7b0916ed669207f1ae3b9ad83c GIT binary patch [stripped] diff --git a/PC/pylauncher.rc b/PC/pylauncher.rc new file mode 100644 --- /dev/null +++ b/PC/pylauncher.rc @@ -0,0 +1,51 @@ +#include + +#define MS_WINDOWS +#include "..\Include\modsupport.h" +#include "..\Include\patchlevel.h" +#ifdef _DEBUG +# include "pythonnt_rc_d.h" +#else +# include "pythonnt_rc.h" +#endif + +#define PYTHON_VERSION PY_VERSION "\0" +#define PYVERSION64 PY_MAJOR_VERSION, PY_MINOR_VERSION, FIELD3, PYTHON_API_VERSION + +VS_VERSION_INFO VERSIONINFO + FILEVERSION PYVERSION64 + PRODUCTVERSION PYVERSION64 + FILEFLAGSMASK 0x17L +#ifdef _DEBUG + FILEFLAGS 0x1L +#else + FILEFLAGS 0x0L +#endif + FILEOS 0x4L + FILETYPE 0x1L + FILESUBTYPE 0x0L +BEGIN + BLOCK "StringFileInfo" + BEGIN + BLOCK "080904b0" + BEGIN + VALUE "Comments", "Python Launcher for Windows" + VALUE "CompanyName", "Python Software Foundation" + VALUE "FileDescription", "Python Launcher for Windows (Console)" + VALUE "FileVersion", PYTHON_VERSION + VALUE "InternalName", "py" + VALUE "LegalCopyright", "Copyright (C) 2011-2012 Python Software Foundation" + VALUE "OriginalFilename", "py" + VALUE "ProductName", "Python Launcher for Windows" + VALUE "ProductVersion", PYTHON_VERSION + END + END + BLOCK "VarFileInfo" + BEGIN + VALUE "Translation", 0x809, 1200 + END +END + +IDI_ICON1 ICON "launcher.ico" + + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -68,6 +68,10 @@ EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testbuffer", "_testbuffer.vcxproj", "{A2697BD3-28C1-4AEC-9106-8B748639FD16}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pylauncher", "pylauncher.vcxproj", "{7B2727B5-5A3F-40EE-A866-43A13CD31446}" +EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pywlauncher", "pywlauncher.vcxproj", "{1D4B18D3-7C12-4ECB-9179-8531FF876CE6}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Win32 = Debug|Win32 @@ -565,6 +569,46 @@ {A2697BD3-28C1-4AEC-9106-8B748639FD16}.Release|Win32.Build.0 = Release|Win32 {A2697BD3-28C1-4AEC-9106-8B748639FD16}.Release|x64.ActiveCfg = Release|x64 {A2697BD3-28C1-4AEC-9106-8B748639FD16}.Release|x64.Build.0 = Release|x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|Win32.ActiveCfg = Debug|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|Win32.Build.0 = Debug|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|x64.ActiveCfg = Debug|x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Debug|x64.Build.0 = Debug|x64 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|Win32.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|Win32.Build.0 = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGInstrument|x64.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|Win32.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|Win32.Build.0 = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.PGUpdate|x64.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|Win32.Build.0 = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.ActiveCfg = Release|Win32 + {7B2727B5-5A3F-40EE-A866-43A13CD31446}.Release|x64.Build.0 = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|Win32.ActiveCfg = Debug|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|Win32.Build.0 = Debug|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|x64.ActiveCfg = Debug|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Debug|x64.Build.0 = Debug|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|Win32.ActiveCfg = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGInstrument|x64.Build.0 = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|Win32.ActiveCfg = Release|x64 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.PGUpdate|x64.Build.0 = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|Win32.Build.0 = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.ActiveCfg = Release|Win32 + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6}.Release|x64.Build.0 = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|Win32.ActiveCfg = Debug|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|Win32.Build.0 = Debug|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Debug|x64.ActiveCfg = Debug|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGInstrument|Win32.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGInstrument|Win32.Build.0 = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGInstrument|x64.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGUpdate|Win32.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGUpdate|Win32.Build.0 = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.PGUpdate|x64.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Release|Win32.ActiveCfg = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Release|Win32.Build.0 = Release|Win32 + {023B3CDA-59C8-45FD-95DC-F8973322ED34}.Release|x64.ActiveCfg = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/PCbuild/pylauncher.vcxproj b/PCbuild/pylauncher.vcxproj new file mode 100644 --- /dev/null +++ b/PCbuild/pylauncher.vcxproj @@ -0,0 +1,165 @@ +? + + + + Debug + Win32 + + + Debug + x64 + + + Release + Win32 + + + Release + x64 + + + + {7B2727B5-5A3F-40EE-A866-43A13CD31446} + pylauncher + + + + Application + true + MultiByte + + + Application + true + MultiByte + + + Application + false + true + MultiByte + + + Application + false + true + MultiByte + + + + + + + + + + + + + + + + + + + + + + + + + + + + + py_d + + + py_d + + + py + + + py + + + + Level3 + Disabled + _CONSOLE;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Console + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + Disabled + _CONSOLE;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Console + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + MaxSpeed + true + true + _CONSOLE;NDEBUG;%(PreprocessorDefinitions) + + + true + true + true + false + version.lib;%(AdditionalDependencies) + Console + + + + + Level3 + MaxSpeed + true + true + _CONSOLE;NDEBUG;%(PreprocessorDefinitions) + + + true + true + true + false + version.lib;%(AdditionalDependencies) + Console + + + + + + + + + + + + + + {f0e0541e-f17d-430b-97c4-93adf0dd284e} + + + + + + \ No newline at end of file diff --git a/PCbuild/pylauncher.vcxproj.filters b/PCbuild/pylauncher.vcxproj.filters new file mode 100644 --- /dev/null +++ b/PCbuild/pylauncher.vcxproj.filters @@ -0,0 +1,32 @@ +? + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + + + + Resource Files + + + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/pywlauncher.vcxproj b/PCbuild/pywlauncher.vcxproj new file mode 100644 --- /dev/null +++ b/PCbuild/pywlauncher.vcxproj @@ -0,0 +1,160 @@ +? + + + + Debug + Win32 + + + Debug + x64 + + + Release + Win32 + + + Release + x64 + + + + {1D4B18D3-7C12-4ECB-9179-8531FF876CE6} + pywlauncher + + + + Application + true + Unicode + + + Application + true + Unicode + + + Application + false + true + Unicode + + + Application + false + true + Unicode + + + + + + + + + + + + + + + + + + + + + + + + + + + + + pyw_d + + + pyw_d + + + pyw + + + pyw + + + + Level3 + Disabled + _WINDOWS;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Windows + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + Disabled + _WINDOWS;%(PreprocessorDefinitions) + + + true + version.lib;%(AdditionalDependencies) + false + Windows + $(OutDir)$(TargetName)$(TargetExt) + + + + + Level3 + MaxSpeed + true + true + _WINDOWS;NDEBUG;%(PreprocessorDefinitions) + + + true + true + true + false + version.lib;%(AdditionalDependencies) + Windows + + + + + Level3 + MaxSpeed + true + true + _WINDOWS;NDEBUG;%(PreprocessorDefinitions) + + + true + true + true + false + version.lib;%(AdditionalDependencies) + Windows + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/PCbuild/pywlauncher.vcxproj.filters b/PCbuild/pywlauncher.vcxproj.filters new file mode 100644 --- /dev/null +++ b/PCbuild/pywlauncher.vcxproj.filters @@ -0,0 +1,32 @@ +? + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hpp;hxx;hm;inl;inc;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + + + + Resource Files + + + + + Resource Files + + + \ No newline at end of file diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -286,7 +286,7 @@ None, migrate_features, None, "REMOVEOLDSNAPSHOT")]) props = "REMOVEOLDSNAPSHOT;REMOVEOLDVERSION" - props += ";TARGETDIR;DLLDIR" + props += ";TARGETDIR;DLLDIR;LAUNCHERDIR" # Installer collects the product codes of the earlier releases in # these properties. In order to allow modification of the properties, # they must be declared as secure. See "SecureCustomProperties Property" @@ -426,6 +426,8 @@ "[WindowsVolume]Python%s%s" % (major, minor)), ("SetDLLDirToTarget", 307, "DLLDIR", "[TARGETDIR]"), ("SetDLLDirToSystem32", 307, "DLLDIR", SystemFolderName), + ("SetLauncherDirToTarget", 307, "LAUNCHERDIR", "[TARGETDIR]"), + ("SetLauncherDirToWindows", 307, "LAUNCHERDIR", "[WindowsFolder]"), # msidbCustomActionTypeExe + msidbCustomActionTypeSourceFile # See "Custom Action Type 18" ("CompilePyc", 18, "python.exe", compileargs), @@ -442,6 +444,8 @@ # In the user interface, assume all-users installation if privileged. ("SetDLLDirToSystem32", 'DLLDIR="" and ' + sys32cond, 751), ("SetDLLDirToTarget", 'DLLDIR="" and not ' + sys32cond, 752), + ("SetLauncherDirToWindows", 'LAUNCHERDIR="" and ' + sys32cond, 753), + ("SetLauncherDirToTarget", 'LAUNCHERDIR="" and not ' + sys32cond, 754), ("SelectDirectoryDlg", "Not Installed", 1230), # XXX no support for resume installations yet #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), @@ -450,6 +454,7 @@ add_data(db, "AdminUISequence", [("InitialTargetDir", 'TARGETDIR=""', 750), ("SetDLLDirToTarget", 'DLLDIR=""', 751), + ("SetLauncherDirToTarget", 'LAUNCHERDIR=""', 752), ]) # Prepend TARGETDIR to the system path, and remove it on uninstall. @@ -461,6 +466,8 @@ [("InitialTargetDir", 'TARGETDIR=""', 750), ("SetDLLDirToSystem32", 'DLLDIR="" and ' + sys32cond, 751), ("SetDLLDirToTarget", 'DLLDIR="" and not ' + sys32cond, 752), + ("SetLauncherDirToWindows", 'LAUNCHERDIR="" and ' + sys32cond, 753), + ("SetLauncherDirToTarget", 'LAUNCHERDIR="" and not ' + sys32cond, 754), ("UpdateEditIDLE", None, 1050), ("CompilePyc", "COMPILEALL", 6800), ("CompilePyo", "COMPILEALL", 6801), @@ -469,6 +476,7 @@ add_data(db, "AdminExecuteSequence", [("InitialTargetDir", 'TARGETDIR=""', 750), ("SetDLLDirToTarget", 'DLLDIR=""', 751), + ("SetLauncherDirToTarget", 'LAUNCHERDIR=""', 752), ("CompilePyc", "COMPILEALL", 6800), ("CompilePyo", "COMPILEALL", 6801), ("CompileGrammar", "COMPILEALL", 6802), @@ -904,7 +912,7 @@ dirs = glob.glob(srcdir+"/../"+pat) if not dirs: raise ValueError, "Could not find "+srcdir+"/../"+pat - if len(dirs) > 2: + if len(dirs) > 2 and not snapshot: raise ValueError, "Multiple copies of "+pat dir = dirs[0] shutil.copyfileobj(open(os.path.join(dir, file)), out) @@ -939,6 +947,7 @@ # See "File Table", "Component Table", "Directory Table", # "FeatureComponents Table" def add_files(db): + installer = msilib.MakeInstaller() hgfiles = hgmanifest() cab = CAB("python") tmpfiles = [] @@ -958,11 +967,27 @@ # msidbComponentAttributesSharedDllRefCount = 8, see "Component Table" dlldir = PyDirectory(db, cab, root, srcdir, "DLLDIR", ".") + launcherdir = PyDirectory(db, cab, root, srcdir, "LAUNCHERDIR", ".") + + # msidbComponentAttributes64bit = 256; this disables registry redirection + # to allow setting the SharedDLLs key in the 64-bit portion even for a + # 32-bit installer. + # XXX does this still allow to install the component on a 32-bit system? + # Pick up 32-bit binary always + launcher = os.path.join(srcdir, "PCBuild", "py.exe") + launcherdir.start_component("launcher", flags = 8+256, keyfile="py.exe") + launcherdir.add_file("%s/py.exe" % PCBUILD, + version=installer.FileVersion(launcher, 0), + language=installer.FileVersion(launcher, 1)) + launcherw = os.path.join(srcdir, "PCBuild", "pyw.exe") + launcherdir.start_component("launcherw", flags = 8+256, keyfile="pyw.exe") + launcherdir.add_file("%s/pyw.exe" % PCBUILD, + version=installer.FileVersion(launcherw, 0), + language=installer.FileVersion(launcherw, 1)) pydll = "python%s%s.dll" % (major, minor) pydllsrc = os.path.join(srcdir, PCBUILD, pydll) dlldir.start_component("DLLDIR", flags = 8, keyfile = pydll, uuid = pythondll_uuid) - installer = msilib.MakeInstaller() pyversion = installer.FileVersion(pydllsrc, 0) if not snapshot: # For releases, the Python DLL has the same version as the @@ -1211,11 +1236,11 @@ "text/plain", "REGISTRY.def"), #Verbs ("py.open", -1, pat % (testprefix, "", "open"), "", - r'"[TARGETDIR]python.exe" "%1" %*', "REGISTRY.def"), + r'"[LAUNCHERDIR]py.exe" "%1" %*', "REGISTRY.def"), ("pyw.open", -1, pat % (testprefix, "NoCon", "open"), "", - r'"[TARGETDIR]pythonw.exe" "%1" %*', "REGISTRY.def"), + r'"[LAUNCHERDIR]pyw.exe" "%1" %*', "REGISTRY.def"), ("pyc.open", -1, pat % (testprefix, "Compiled", "open"), "", - r'"[TARGETDIR]python.exe" "%1" %*', "REGISTRY.def"), + r'"[LAUNCHERDIR]py.exe" "%1" %*', "REGISTRY.def"), ] + tcl_verbs + [ #Icons ("py.icon", -1, pat2 % (testprefix, ""), "", -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 01:51:39 2012 From: python-checkins at python.org (nadeem.vawda) Date: Fri, 22 Jun 2012 01:51:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Make_lzma=2E=7Bencode=2Cdec?= =?utf8?q?ode=7D=5Ffilter=5Fproperties_private=2E?= Message-ID: http://hg.python.org/cpython/rev/0f470b91b710 changeset: 77557:0f470b91b710 user: Nadeem Vawda date: Thu Jun 21 23:36:48 2012 +0200 summary: Make lzma.{encode,decode}_filter_properties private. These functions were originally added to support LZMA compression in the zipfile module, and are not of interest for the majority of users. They can be made public in 3.4 if there is user interest, but in the meanwhile, I've opted to present a smaller, simpler API for the module's initial release. files: Doc/library/lzma.rst | 26 ----------------------- Lib/lzma.py | 2 +- Lib/test/test_lzma.py | 26 +++++++++++----------- Lib/zipfile.py | 6 ++-- Modules/_lzmamodule.c | 34 ++++++++++++------------------ 5 files changed, 31 insertions(+), 63 deletions(-) diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst --- a/Doc/library/lzma.rst +++ b/Doc/library/lzma.rst @@ -268,32 +268,6 @@ feature set. -.. function:: encode_filter_properties(filter) - - Return a :class:`bytes` object encoding the options (properties) of the - filter specified by *filter* (a dictionary). - - *filter* is interpreted as a filter specifier, as described in - :ref:`filter-chain-specs`. - - The returned data does not include the filter ID itself, only the options. - - This function is primarily of interest to users implementing custom file - formats. - - -.. function:: decode_filter_properties(filter_id, encoded_props) - - Return a dictionary describing a filter with ID *filter_id*, and options - (properties) decoded from the :class:`bytes` object *encoded_props*. - - The returned dictionary is a filter specifier, as described in - :ref:`filter-chain-specs`. - - This function is primarily of interest to users implementing custom file - formats. - - .. _filter-chain-specs: Specifying custom filter chains diff --git a/Lib/lzma.py b/Lib/lzma.py --- a/Lib/lzma.py +++ b/Lib/lzma.py @@ -19,12 +19,12 @@ "LZMACompressor", "LZMADecompressor", "LZMAFile", "LZMAError", "open", "compress", "decompress", "is_check_supported", - "encode_filter_properties", "decode_filter_properties", ] import builtins import io from _lzma import * +from _lzma import _encode_filter_properties, _decode_filter_properties _MODE_CLOSED = 0 diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -1073,19 +1073,19 @@ # This value should not be a valid check ID. self.assertFalse(lzma.is_check_supported(lzma.CHECK_UNKNOWN)) - def test_encode_filter_properties(self): + def test__encode_filter_properties(self): with self.assertRaises(TypeError): - lzma.encode_filter_properties(b"not a dict") + lzma._encode_filter_properties(b"not a dict") with self.assertRaises(ValueError): - lzma.encode_filter_properties({"id": 0x100}) + lzma._encode_filter_properties({"id": 0x100}) with self.assertRaises(ValueError): - lzma.encode_filter_properties({"id": lzma.FILTER_LZMA2, "junk": 12}) + lzma._encode_filter_properties({"id": lzma.FILTER_LZMA2, "junk": 12}) with self.assertRaises(lzma.LZMAError): - lzma.encode_filter_properties({"id": lzma.FILTER_DELTA, + lzma._encode_filter_properties({"id": lzma.FILTER_DELTA, "dist": 9001}) # Test with parameters used by zipfile module. - props = lzma.encode_filter_properties({ + props = lzma._encode_filter_properties({ "id": lzma.FILTER_LZMA1, "pb": 2, "lp": 0, @@ -1094,14 +1094,14 @@ }) self.assertEqual(props, b"]\x00\x00\x80\x00") - def test_decode_filter_properties(self): + def test__decode_filter_properties(self): with self.assertRaises(TypeError): - lzma.decode_filter_properties(lzma.FILTER_X86, {"should be": bytes}) + lzma._decode_filter_properties(lzma.FILTER_X86, {"should be": bytes}) with self.assertRaises(lzma.LZMAError): - lzma.decode_filter_properties(lzma.FILTER_DELTA, b"too long") + lzma._decode_filter_properties(lzma.FILTER_DELTA, b"too long") # Test with parameters used by zipfile module. - filterspec = lzma.decode_filter_properties( + filterspec = lzma._decode_filter_properties( lzma.FILTER_LZMA1, b"]\x00\x00\x80\x00") self.assertEqual(filterspec["id"], lzma.FILTER_LZMA1) self.assertEqual(filterspec["pb"], 2) @@ -1110,10 +1110,10 @@ self.assertEqual(filterspec["dict_size"], 8 << 20) def test_filter_properties_roundtrip(self): - spec1 = lzma.decode_filter_properties( + spec1 = lzma._decode_filter_properties( lzma.FILTER_LZMA1, b"]\x00\x00\x80\x00") - reencoded = lzma.encode_filter_properties(spec1) - spec2 = lzma.decode_filter_properties(lzma.FILTER_LZMA1, reencoded) + reencoded = lzma._encode_filter_properties(spec1) + spec2 = lzma._decode_filter_properties(lzma.FILTER_LZMA1, reencoded) self.assertEqual(spec1, spec2) diff --git a/Lib/zipfile.py b/Lib/zipfile.py --- a/Lib/zipfile.py +++ b/Lib/zipfile.py @@ -495,9 +495,9 @@ self._comp = None def _init(self): - props = lzma.encode_filter_properties({'id': lzma.FILTER_LZMA1}) + props = lzma._encode_filter_properties({'id': lzma.FILTER_LZMA1}) self._comp = lzma.LZMACompressor(lzma.FORMAT_RAW, filters=[ - lzma.decode_filter_properties(lzma.FILTER_LZMA1, props) + lzma._decode_filter_properties(lzma.FILTER_LZMA1, props) ]) return struct.pack(' bytes\n" +PyDoc_STRVAR(_encode_filter_properties_doc, +"_encode_filter_properties(filter) -> bytes\n" "\n" "Return a bytes object encoding the options (properties) of the filter\n" "specified by *filter* (a dict).\n" "\n" -"The result does not include the filter ID itself, only the options.\n" -"\n" -"This function is primarily of interest to users implementing custom\n" -"file formats.\n"); +"The result does not include the filter ID itself, only the options.\n"); static PyObject * -encode_filter_properties(PyObject *self, PyObject *args) +_encode_filter_properties(PyObject *self, PyObject *args) { PyObject *filterspec; lzma_filter filter; @@ -1117,7 +1114,7 @@ uint32_t encoded_size; PyObject *result = NULL; - if (!PyArg_ParseTuple(args, "O:encode_filter_properties", &filterspec)) + if (!PyArg_ParseTuple(args, "O:_encode_filter_properties", &filterspec)) return NULL; if (parse_filter_spec(&filter, filterspec) == NULL) @@ -1146,24 +1143,21 @@ } -PyDoc_STRVAR(decode_filter_properties_doc, -"decode_filter_properties(filter_id, encoded_props) -> dict\n" +PyDoc_STRVAR(_decode_filter_properties_doc, +"_decode_filter_properties(filter_id, encoded_props) -> dict\n" "\n" "Return a dict describing a filter with ID *filter_id*, and options\n" -"(properties) decoded from the bytes object *encoded_props*.\n" -"\n" -"This function is primarily of interest to users implementing custom\n" -"file formats.\n"); +"(properties) decoded from the bytes object *encoded_props*.\n"); static PyObject * -decode_filter_properties(PyObject *self, PyObject *args) +_decode_filter_properties(PyObject *self, PyObject *args) { Py_buffer encoded_props; lzma_filter filter; lzma_ret lzret; PyObject *result = NULL; - if (!PyArg_ParseTuple(args, "O&y*:decode_filter_properties", + if (!PyArg_ParseTuple(args, "O&y*:_decode_filter_properties", lzma_vli_converter, &filter.id, &encoded_props)) return NULL; @@ -1187,10 +1181,10 @@ static PyMethodDef module_methods[] = { {"is_check_supported", (PyCFunction)is_check_supported, METH_VARARGS, is_check_supported_doc}, - {"encode_filter_properties", (PyCFunction)encode_filter_properties, - METH_VARARGS, encode_filter_properties_doc}, - {"decode_filter_properties", (PyCFunction)decode_filter_properties, - METH_VARARGS, decode_filter_properties_doc}, + {"_encode_filter_properties", (PyCFunction)_encode_filter_properties, + METH_VARARGS, _encode_filter_properties_doc}, + {"_decode_filter_properties", (PyCFunction)_decode_filter_properties, + METH_VARARGS, _decode_filter_properties_doc}, {NULL} }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 01:51:40 2012 From: python-checkins at python.org (nadeem.vawda) Date: Fri, 22 Jun 2012 01:51:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Tidy_up_comments_from_dd4f7?= =?utf8?q?d5c51c7_=28zlib_compression_dictionary_support=29=2E?= Message-ID: http://hg.python.org/cpython/rev/4931cbc58cde changeset: 77558:4931cbc58cde user: Nadeem Vawda date: Fri Jun 22 00:35:57 2012 +0200 summary: Tidy up comments from dd4f7d5c51c7 (zlib compression dictionary support). files: Lib/test/test_zlib.py | 13 ++++++------- Modules/zlibmodule.c | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py --- a/Lib/test/test_zlib.py +++ b/Lib/test/test_zlib.py @@ -427,24 +427,23 @@ def test_dictionary(self): h = HAMLET_SCENE - # build a simulated dictionary out of the words in HAMLET + # Build a simulated dictionary out of the words in HAMLET. words = h.split() random.shuffle(words) zdict = b''.join(words) - # use it to compress HAMLET + # Use it to compress HAMLET. co = zlib.compressobj(zdict=zdict) cd = co.compress(h) + co.flush() - # verify that it will decompress with the dictionary + # Verify that it will decompress with the dictionary. dco = zlib.decompressobj(zdict=zdict) self.assertEqual(dco.decompress(cd) + dco.flush(), h) - # verify that it fails when not given the dictionary + # Verify that it fails when not given the dictionary. dco = zlib.decompressobj() self.assertRaises(zlib.error, dco.decompress, cd) def test_dictionary_streaming(self): - # this is simulating the needs of SPDY to be able to reuse the same - # stream object (with its compression state) between sets of compressed - # headers. + # This simulates the reuse of a compressor object for compressing + # several separate data streams. co = zlib.compressobj(zdict=HAMLET_SCENE) do = zlib.decompressobj(zdict=HAMLET_SCENE) piece = HAMLET_SCENE[1000:1500] diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -619,7 +619,7 @@ RetVal = NULL; goto error; } - /* repeat the call to inflate! */ + /* Repeat the call to inflate. */ Py_BEGIN_ALLOW_THREADS err = inflate(&(self->zst), Z_SYNC_FLUSH); Py_END_ALLOW_THREADS -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 01:51:41 2012 From: python-checkins at python.org (nadeem.vawda) Date: Fri, 22 Jun 2012 01:51:41 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Document_the_rest_of_zlib?= =?utf8?b?LmNvbXByZXNzb2JqKCkncyBhcmd1bWVudHMu?= Message-ID: http://hg.python.org/cpython/rev/1cfa44cb5af0 changeset: 77559:1cfa44cb5af0 user: Nadeem Vawda date: Fri Jun 22 01:40:49 2012 +0200 summary: Document the rest of zlib.compressobj()'s arguments. Original patch by Jim Jewett; see issue 14684. files: Doc/library/zlib.rst | 22 ++++++++++++++++++---- Misc/ACKS | 1 + Modules/zlibmodule.c | 21 +++++++++++++++++---- 3 files changed, 36 insertions(+), 8 deletions(-) diff --git a/Doc/library/zlib.rst b/Doc/library/zlib.rst --- a/Doc/library/zlib.rst +++ b/Doc/library/zlib.rst @@ -58,14 +58,28 @@ exception if any error occurs. -.. function:: compressobj([level[, method[, wbits[, memlevel[, strategy[, zdict]]]]]]) +.. function:: compressobj(level=-1, method=DEFLATED, wbits=15, memlevel=8, strategy=Z_DEFAULT_STRATEGY, [zdict]) Returns a compression object, to be used for compressing data streams that won't fit into memory at once. - *level* is an integer from ``1`` to ``9`` controlling the level of - compression; ``1`` is fastest and produces the least compression, ``9`` is - slowest and produces the most. The default value is ``6``. + *level* is the compression level -- an integer from ``1`` to ``9``. A value + of ``1`` is fastest and produces the least compression, while a value of + ``9`` is slowest and produces the most. The default value is ``6``. + + *method* is the compression algorithm. Currently, the only supported value is + ``DEFLATED``. + + *wbits* is the base two logarithm of the size of the window buffer. This + should be an integer from ``8`` to ``15``. Higher values give better + compression, but use more memory. + + *memlevel* controls the amount of memory used for internal compression state. + Valid values range from ``1`` to ``9``. Higher values using more memory, + but are faster and produce smaller output. + + *strategy* is used to tune the compression algorithm. Possible values are + ``Z_DEFAULT_STRATEGY``, ``Z_FILTERED``, and ``Z_HUFFMAN_ONLY``. *zdict* is a predefined compression dictionary. This is a sequence of bytes (such as a :class:`bytes` object) containing subsequences that are expected diff --git a/Misc/ACKS b/Misc/ACKS --- a/Misc/ACKS +++ b/Misc/ACKS @@ -503,6 +503,7 @@ Drew Jenkins Flemming Kj?r Jensen MunSic Jeong +Jim Jewett Orjan Johansen Fredrik Johansson Gregory K. Johnson diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -81,13 +81,26 @@ } PyDoc_STRVAR(compressobj__doc__, -"compressobj([level[, method[, wbits[, memlevel[, strategy[, zdict]]]]]])\n" +"compressobj(level=-1, method=DEFLATED, wbits=15, memlevel=8,\n" +" strategy=Z_DEFAULT_STRATEGY[, zdict])\n" " -- Return a compressor object.\n" "\n" -"Optional arg level is the compression level, in 1-9.\n" +"level is the compression level (an integer in the range 0-9; default is 6).\n" +"Higher compression levels are slower, but produce smaller results.\n" "\n" -"Optional arg zdict is the predefined compression dictionary - a sequence of\n" -"bytes containing subsequences that are likely to occur in the input data."); +"method is the compression algorithm. If given, this must be DEFLATED.\n" +"\n" +"wbits is the base two logarithm of the window size (range: 8..15).\n" +"\n" +"memlevel controls the amount of memory used for internal compression state.\n" +"Valid values range from 1 to 9. Higher values result in higher memory usage,\n" +"faster compression, and smaller output.\n" +"\n" +"strategy is used to tune the compression algorithm. Possible values are\n" +"Z_DEFAULT_STRATEGY, Z_FILTERED, and Z_HUFFMAN_ONLY.\n" +"\n" +"zdict is the predefined compression dictionary - a sequence of bytes\n" +"containing subsequences that are likely to occur in the input data."); PyDoc_STRVAR(decompressobj__doc__, "decompressobj([wbits[, zdict]]) -- Return a decompressor object.\n" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 02:49:12 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 02:49:12 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE0NjUz?= =?utf8?q?=3A_email=2Eutils=2Emktime=5Ftz=28=29_no_longer_relies_on_system?= Message-ID: http://hg.python.org/cpython/rev/ffc048f43a70 changeset: 77560:ffc048f43a70 branch: 3.2 parent: 77538:d042bd8625f3 user: Alexander Belopolsky date: Thu Jun 21 20:34:09 2012 -0400 summary: Issue #14653: email.utils.mktime_tz() no longer relies on system mktime() when timezone offest is supplied. files: Lib/email/_parseaddr.py | 8 ++++---- Lib/email/test/test_email.py | 6 ++++++ Misc/NEWS | 3 +++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py --- a/Lib/email/_parseaddr.py +++ b/Lib/email/_parseaddr.py @@ -13,7 +13,7 @@ 'quote', ] -import time +import time, calendar SPACE = ' ' EMPTYSTRING = '' @@ -152,13 +152,13 @@ def mktime_tz(data): - """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp.""" + """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp.""" if data[9] is None: # No zone info, so localtime is better assumption than GMT return time.mktime(data[:8] + (-1,)) else: - t = time.mktime(data[:8] + (0,)) - return t - data[9] - time.timezone + t = calendar.timegm(data) + return t - data[9] def quote(str): diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py --- a/Lib/email/test/test_email.py +++ b/Lib/email/test/test_email.py @@ -2585,6 +2585,12 @@ eq(time.localtime(t)[:6], timetup[:6]) eq(int(time.strftime('%Y', timetup[:9])), 2003) + def test_mktime_tz(self): + self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + -1, -1, -1, 0)), 0) + self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + -1, -1, -1, 1234)), -1234) + def test_parsedate_y2k(self): """Test for parsing a date with a two-digit year. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -73,6 +73,9 @@ Library ------- +- Issue #14653: email.utils.mktime_tz() no longer relies on system + mktime() when timezone offest is supplied. + - Fix GzipFile's handling of filenames given as bytes objects. - Issue #15101: Make pool finalizer avoid joining current thread. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 02:49:13 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 02:49:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2314653=3A_email=2Eutils=2Emktime=5Ftz=28=29_no_longe?= =?utf8?q?r_relies_on_system?= Message-ID: http://hg.python.org/cpython/rev/9f88c38318ac changeset: 77561:9f88c38318ac parent: 77559:1cfa44cb5af0 parent: 77560:ffc048f43a70 user: Alexander Belopolsky date: Thu Jun 21 20:48:23 2012 -0400 summary: Issue #14653: email.utils.mktime_tz() no longer relies on system mktime() when timezone offest is supplied. files: Lib/email/_parseaddr.py | 8 ++++---- Lib/test/test_email/test_email.py | 6 ++++++ Misc/NEWS | 3 +++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py --- a/Lib/email/_parseaddr.py +++ b/Lib/email/_parseaddr.py @@ -13,7 +13,7 @@ 'quote', ] -import time +import time, calendar SPACE = ' ' EMPTYSTRING = '' @@ -177,13 +177,13 @@ def mktime_tz(data): - """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp.""" + """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp.""" if data[9] is None: # No zone info, so localtime is better assumption than GMT return time.mktime(data[:8] + (-1,)) else: - t = time.mktime(data[:8] + (0,)) - return t - data[9] - time.timezone + t = calendar.timegm(data) + return t - data[9] def quote(str): diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -2722,6 +2722,12 @@ eq(time.localtime(t)[:6], timetup[:6]) eq(int(time.strftime('%Y', timetup[:9])), 2003) + def test_mktime_tz(self): + self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + -1, -1, -1, 0)), 0) + self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + -1, -1, -1, 1234)), -1234) + def test_parsedate_y2k(self): """Test for parsing a date with a two-digit year. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -37,6 +37,9 @@ Library ------- +- Issue #14653: email.utils.mktime_tz() no longer relies on system + mktime() when timezone offest is supplied. + - Issue #14684: zlib.compressobj() and zlib.decompressobj() now support the use of predefined compression dictionaries. Original patch by Sam Rushing. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 02:57:51 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 02:57:51 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMi43KTogSXNzdWUgIzE0NjUz?= =?utf8?q?=3A_email=2Eutils=2Emktime=5Ftz=28=29_no_longer_relies_on_system?= Message-ID: http://hg.python.org/cpython/rev/a283563c8cc4 changeset: 77562:a283563c8cc4 branch: 2.7 parent: 77537:981ad5254d07 user: Alexander Belopolsky date: Thu Jun 21 20:57:39 2012 -0400 summary: Issue #14653: email.utils.mktime_tz() no longer relies on system mktime() when timezone offest is supplied. files: Lib/email/_parseaddr.py | 8 ++++---- Lib/email/test/test_email.py | 6 ++++++ Misc/NEWS | 3 +++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py --- a/Lib/email/_parseaddr.py +++ b/Lib/email/_parseaddr.py @@ -13,7 +13,7 @@ 'quote', ] -import time +import time, calendar SPACE = ' ' EMPTYSTRING = '' @@ -150,13 +150,13 @@ def mktime_tz(data): - """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp.""" + """Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp.""" if data[9] is None: # No zone info, so localtime is better assumption than GMT return time.mktime(data[:8] + (-1,)) else: - t = time.mktime(data[:8] + (0,)) - return t - data[9] - time.timezone + t = calendar.timegm(data) + return t - data[9] def quote(str): diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py --- a/Lib/email/test/test_email.py +++ b/Lib/email/test/test_email.py @@ -2262,6 +2262,12 @@ eq(time.localtime(t)[:6], timetup[:6]) eq(int(time.strftime('%Y', timetup[:9])), 2003) + def test_mktime_tz(self): + self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + -1, -1, -1, 0)), 0) + self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + -1, -1, -1, 1234)), -1234) + def test_parsedate_y2k(self): """Test for parsing a date with a two-digit year. diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -70,6 +70,9 @@ Library ------- +- Issue #14653: email.utils.mktime_tz() no longer relies on system + mktime() when timezone offest is supplied. + - Issue #15101: Make pool finalizer avoid joining current thread. - Issue #15054: A bug in tokenize.tokenize that caused string literals -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Fri Jun 22 05:44:07 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Fri, 22 Jun 2012 05:44:07 +0200 Subject: [Python-checkins] Daily reference leaks (9f88c38318ac): sum=0 Message-ID: results for 9f88c38318ac on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogOaCSgZ', '-x'] From python-checkins at python.org Fri Jun 22 09:34:05 2012 From: python-checkins at python.org (hynek.schlawack) Date: Fri, 22 Jun 2012 09:34:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Simplify_code_in_fileio=5Fi?= =?utf8?q?nit?= Message-ID: http://hg.python.org/cpython/rev/701d4989504d changeset: 77563:701d4989504d parent: 77561:9f88c38318ac user: Hynek Schlawack date: Fri Jun 22 09:32:22 2012 +0200 summary: Simplify code in fileio_init If an identical code line is in both at the end of if and else, it can as well stand after the block. :) The code is from 464cf523485e, I didn't see it before checking the commits in the web interface of course. files: Modules/_io/fileio.c | 3 +-- 1 files changed, 1 insertions(+), 2 deletions(-) diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -377,7 +377,6 @@ #endif self->fd = open(name, flags, 0666); Py_END_ALLOW_THREADS - fd_is_own = 1; } else { PyObject *fdobj = PyObject_CallFunction( opener, "Oi", nameobj, flags); @@ -395,9 +394,9 @@ if (self->fd == -1) { goto error; } - fd_is_own = 1; } + fd_is_own = 1; if (self->fd < 0) { #ifdef MS_WINDOWS if (widename != NULL) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 12:21:25 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 22 Jun 2012 12:21:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315042=3A_Add_PySta?= =?utf8?q?te=5FAddModule_and_PyState=5FRemoveModule=2E?= Message-ID: http://hg.python.org/cpython/rev/55e8cba34b11 changeset: 77564:55e8cba34b11 user: Martin v. L?wis date: Fri Jun 22 12:20:55 2012 +0200 summary: Issue #15042: Add PyState_AddModule and PyState_RemoveModule. Add version guard for Py_LIMITED_API additions. Issue #15081: Document PyState_FindModule. Patch by Robin Schreiber. files: Doc/c-api/module.rst | 22 ++++++++++++++- Include/pystate.h | 5 +++ Misc/NEWS | 6 ++++ PC/python3.def | 2 + Python/pystate.c | 45 ++++++++++++++++++++++++++++++- 5 files changed, 77 insertions(+), 3 deletions(-) diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -113,8 +113,28 @@ Return a pointer to the :c:type:`PyModuleDef` struct from which the module was created, or *NULL* if the module wasn't created with - :c:func:`PyModule_Create`. + :c:func:`PyModule_Create`.i +.. c:function:: PyObject* PyState_FindModule(PyModuleDef *def) + + Returns the module object that was created from *def* for the current interpreter. + This method requires that the module object has been attached to the interpreter state with + :c:func:`PyState_AddModule` beforehand. In case the corresponding module object is not + found or has not been attached to the interpreter state yet, it returns NULL. + +.. c:function:: int PyState_AddModule(PyModuleDef *def, PyObject *module) + + Attaches the module object passed to the function to the interpreter state. This allows + the module object to be accessible via + :c:func:`PyState_FindModule`. + + .. versionadded:: 3.3 + +.. c:function:: int PyState_RemoveModule(PyModuleDef *def, PyObject *module) + + Removes the module object created from *def* from the interpreter state. + + .. versionadded:: 3.3 Initializing C modules ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/Include/pystate.h b/Include/pystate.h --- a/Include/pystate.h +++ b/Include/pystate.h @@ -124,6 +124,11 @@ PyAPI_FUNC(void) PyInterpreterState_Clear(PyInterpreterState *); PyAPI_FUNC(void) PyInterpreterState_Delete(PyInterpreterState *); PyAPI_FUNC(int) _PyState_AddModule(PyObject*, struct PyModuleDef*); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000 +/* New in 3.3 */ +PyAPI_FUNC(int) PyState_AddModule(PyObject*, struct PyModuleDef*); +PyAPI_FUNC(int) PyState_RemoveModule(struct PyModuleDef*); +#endif PyAPI_FUNC(PyObject*) PyState_FindModule(struct PyModuleDef*); PyAPI_FUNC(PyThreadState *) PyThreadState_New(PyInterpreterState *); diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #15042: Add PyState_AddModule and PyState_RemoveModule. Add version + guard for Py_LIMITED_API additions. Patch by Robin Schreiber. + - Issue #10053: Don't close FDs when FileIO.__init__ fails. Loosely based on the work by Hirokazu Yamamoto. @@ -154,6 +157,9 @@ Documentation ------------- +- Issue #15081: Document PyState_FindModule. + Patch by Robin Schreiber. + - Issue #14814: Added first draft of ipaddress module API reference Tests diff --git a/PC/python3.def b/PC/python3.def --- a/PC/python3.def +++ b/PC/python3.def @@ -471,6 +471,8 @@ PySlice_Type=python33.PySlice_Type DATA PySortWrapper_Type=python33.PySortWrapper_Type DATA PyState_FindModule=python33.PyState_FindModule + PyState_AddModule=python33.PyState_AddModule + PyState_RemoveModule=python33.PyState_RemoveModule PyStructSequence_GetItem=python33.PyStructSequence_GetItem PyStructSequence_New=python33.PyStructSequence_New PyStructSequence_NewType=python33.PyStructSequence_NewType diff --git a/Python/pystate.c b/Python/pystate.c --- a/Python/pystate.c +++ b/Python/pystate.c @@ -239,9 +239,9 @@ } PyObject* -PyState_FindModule(struct PyModuleDef* m) +PyState_FindModule(struct PyModuleDef* module) { - Py_ssize_t index = m->m_base.m_index; + Py_ssize_t index = module->m_base.m_index; PyInterpreterState *state = PyThreadState_GET()->interp; PyObject *res; if (index == 0) @@ -273,6 +273,47 @@ def->m_base.m_index, module); } +int +PyState_AddModule(PyObject* module, struct PyModuleDef* def) +{ + Py_ssize_t index; + PyInterpreterState *state = PyThreadState_GET()->interp; + if (!def) { + Py_FatalError("PyState_AddModule: Module Definition is NULL"); + return -1; + } + index = def->m_base.m_index; + if (state->modules_by_index) { + if(PyList_GET_SIZE(state->modules_by_index) >= index) { + if(module == PyList_GET_ITEM(state->modules_by_index, index)) { + Py_FatalError("PyState_AddModule: Module already added!"); + return -1; + } + } + } + return _PyState_AddModule(module, def); +} + +int +PyState_RemoveModule(struct PyModuleDef* def) +{ + Py_ssize_t index = def->m_base.m_index; + PyInterpreterState *state = PyThreadState_GET()->interp; + if (index == 0) { + Py_FatalError("PyState_RemoveModule: Module index invalid."); + return -1; + } + if (state->modules_by_index == NULL) { + Py_FatalError("PyState_RemoveModule: Interpreters module-list not acessible."); + return -1; + } + if (index > PyList_GET_SIZE(state->modules_by_index)) { + Py_FatalError("PyState_RemoveModule: Module index out of bounds."); + return -1; + } + return PyList_SetItem(state->modules_by_index, index, Py_None); +} + void PyThreadState_Clear(PyThreadState *tstate) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 12:50:02 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 22 Jun 2012 12:50:02 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_Stable_ABI_documentatio?= =?utf8?q?n=2E?= Message-ID: http://hg.python.org/cpython/rev/2b150996d4c6 changeset: 77565:2b150996d4c6 user: Martin v. L?wis date: Fri Jun 22 12:49:08 2012 +0200 summary: Add Stable ABI documentation. files: Doc/c-api/index.rst | 1 + Doc/c-api/stable.rst | 39 ++++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 0 deletions(-) diff --git a/Doc/c-api/index.rst b/Doc/c-api/index.rst --- a/Doc/c-api/index.rst +++ b/Doc/c-api/index.rst @@ -25,3 +25,4 @@ init.rst memory.rst objimpl.rst + stable.rst diff --git a/Doc/c-api/stable.rst b/Doc/c-api/stable.rst new file mode 100644 --- /dev/null +++ b/Doc/c-api/stable.rst @@ -0,0 +1,39 @@ +.. highlightlang:: c + +.. _stable: + +********************************** +Stable Appliction Binary Interface +********************************** + +Traditionally, the C API of Python will change with every release. +Most changes will be source-compatible, typically by only adding API, +rather than changing existing API or removing API (although some +interfaces do get removed after being deprecated first). + +Unfortunately, the API compatibility does not extend to binary +compatibility (the ABI). The reason is primarily the evolution of +struct definitions, where addition of a new field, or changing +the type of a field, might not break the API, but can break the ABI. +As a consequence, extension modules need to be recompiled for +every Python release (although an exception is possible on Unix +when none of the affected interfaces are used). In addition, on +Windows, extension modules link with a specific pythonXY.dll and +need to be recompiled to link with a newer one. + +Since Python 3.2, a subset of the API has been declared to guarantee +a stable ABI. Extension modules wishing to use this API need to define +Py_LIMITED_API. A number of interpreter details then become hidden +from the extension module; in return, a module is built that works +on any 3.x version (x>=2) without recompilation. In some cases, the +stable ABI needs to be extended with new functions. Extensions modules +wishing to use these new APIs need to set Py_LIMITED_API to the +PY_VERSION_HEX value of the minimum Python version they want to +support (e.g. 0x03030000 for Python 3.3). Such modules will work +on all subsequent Python releases, but fail to load (because of +missing symbols) on the older releases. + +As of Python 3.2, the set of functions available to the limited API +is documented in PEP 384. + +.. XXX copy exact list here? Into each functions definition? -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 12:50:03 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 22 Jun 2012 12:50:03 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Whitespace_normalization?= Message-ID: http://hg.python.org/cpython/rev/8f5eeee8e70b changeset: 77566:8f5eeee8e70b user: Martin v. L?wis date: Fri Jun 22 12:49:59 2012 +0200 summary: Whitespace normalization files: Doc/c-api/stable.rst | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Doc/c-api/stable.rst b/Doc/c-api/stable.rst --- a/Doc/c-api/stable.rst +++ b/Doc/c-api/stable.rst @@ -15,8 +15,8 @@ compatibility (the ABI). The reason is primarily the evolution of struct definitions, where addition of a new field, or changing the type of a field, might not break the API, but can break the ABI. -As a consequence, extension modules need to be recompiled for -every Python release (although an exception is possible on Unix +As a consequence, extension modules need to be recompiled for +every Python release (although an exception is possible on Unix when none of the affected interfaces are used). In addition, on Windows, extension modules link with a specific pythonXY.dll and need to be recompiled to link with a newer one. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 12:57:11 2012 From: python-checkins at python.org (larry.hastings) Date: Fri, 22 Jun 2012 12:57:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314769=3A_test=5Fca?= =?utf8?q?pi_now_has_SkipitemTest=2C_which_cleverly_checks?= Message-ID: http://hg.python.org/cpython/rev/ace45d23628a changeset: 77567:ace45d23628a user: Larry Hastings date: Fri Jun 22 03:56:29 2012 -0700 summary: Issue #14769: test_capi now has SkipitemTest, which cleverly checks for "parity" between PyArg_ParseTuple() and the Python/getargs.c static function skipitem() for all possible "format units". files: Lib/test/test_capi.py | 71 +++++++++++++++++++- Misc/NEWS | 4 + Modules/_testcapimodule.c | 98 ++++++++++++++++---------- 3 files changed, 134 insertions(+), 39 deletions(-) diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -214,9 +214,78 @@ finally: os.chdir(oldcwd) +class SkipitemTest(unittest.TestCase): + + def test_skipitem(self): + """ + If this test failed, you probably added a new "format unit" + in Python/getargs.c, but neglected to update our poor friend + skipitem() in the same file. (If so, shame on you!) + + This function brute-force tests all** ASCII characters (1 to 127 + inclusive) as format units, checking to see that + PyArg_ParseTupleAndKeywords() return consistent errors both when + the unit is attempted to be used and when it is skipped. If the + format unit doesn't exist, we'll get one of two specific error + messages (one for used, one for skipped); if it does exist we + *won't* get that error--we'll get either no error or some other + error. If we get the "does not exist" error for one test and + not for the other, there's a mismatch, and the test fails. + + ** Okay, it actually skips some ASCII characters. Some characters + have special funny semantics, and it would be difficult to + accomodate them here. + """ + empty_tuple = () + tuple_1 = (0,) + dict_b = {'b':1} + keywords = ["a", "b"] + + # Python C source files must be ASCII, + # therefore we'll never have a format unit > 127 + for i in range(1, 128): + c = chr(i) + + # skip non-printable characters, no one is insane enough to define + # one as a format unit + # skip parentheses, the error reporting is inconsistent about them + # skip 'e', it's always a two-character code + # skip '|' and '$', they don't represent arguments anyway + if (not c.isprintable()) or (c in '()e|$'): + continue + + # test the format unit when not skipped + format = c + "i" + try: + # (note: the format string must be bytes!) + _testcapi.parse_tuple_and_keywords(tuple_1, dict_b, + format.encode("ascii"), keywords) + when_not_skipped = False + except TypeError as e: + s = "argument 1 must be impossible, not int" + when_not_skipped = (str(e) == s) + except RuntimeError as e: + when_not_skipped = False + + # test the format unit when skipped + optional_format = "|" + format + try: + _testcapi.parse_tuple_and_keywords(empty_tuple, dict_b, + optional_format.encode("ascii"), keywords) + when_skipped = False + except RuntimeError as e: + s = "impossible: '{}'".format(format) + when_skipped = (str(e) == s) + + message = ("test_skipitem_parity: " + "detected mismatch between convertsimple and skipitem " + "for format unit '{}' ({}), not skipped {}, skipped {}".format( + c, i, when_skipped, when_not_skipped)) + self.assertIs(when_skipped, when_not_skipped, message) def test_main(): - support.run_unittest(CAPITest, TestPendingCalls, Test6012, EmbeddingTest) + support.run_unittest(CAPITest, TestPendingCalls, + Test6012, EmbeddingTest, SkipitemTest) for name in dir(_testcapi): if name.startswith('test_'): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -165,6 +165,10 @@ Tests ----- +- Issue #14769: test_capi now has SkipitemTest, which cleverly checks + for "parity" between PyArg_ParseTuple() and the Python/getargs.c static + function skipitem() for all possible "format units". + - test_nntplib now tolerates being run from behind NNTP gateways that add "X-Antivirus" headers to articles diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -1195,51 +1195,73 @@ } static PyObject * -test_bug_7414(PyObject *self) +parse_tuple_and_keywords(PyObject *self, PyObject *args) { - /* Issue #7414: for PyArg_ParseTupleAndKeywords, 'C' code wasn't being - skipped properly in skipitem() */ - int a = 0, b = 0, result; - char *kwlist[] = {"a", "b", NULL}; - PyObject *tuple = NULL, *dict = NULL, *b_str; + PyObject *sub_args; + PyObject *sub_kwargs; + char *sub_format; + PyObject *sub_keywords; - tuple = PyTuple_New(0); - if (tuple == NULL) - goto failure; - dict = PyDict_New(); - if (dict == NULL) - goto failure; - b_str = PyUnicode_FromString("b"); - if (b_str == NULL) - goto failure; - result = PyDict_SetItemString(dict, "b", b_str); - Py_DECREF(b_str); - if (result < 0) - goto failure; + Py_ssize_t i, size; + char *keywords[8 + 1]; /* space for NULL at end */ + PyObject *o; + PyObject *converted[8]; - result = PyArg_ParseTupleAndKeywords(tuple, dict, "|CC", - kwlist, &a, &b); - if (!result) - goto failure; + int result; + PyObject *return_value = NULL; - if (a != 0) - return raiseTestError("test_bug_7414", - "C format code not skipped properly"); - if (b != 'b') - return raiseTestError("test_bug_7414", - "C format code returned wrong value"); + char buffers[32][8]; - Py_DECREF(dict); - Py_DECREF(tuple); - Py_RETURN_NONE; + if (!PyArg_ParseTuple(args, "OOyO:parse_tuple_and_keywords", + &sub_args, &sub_kwargs, + &sub_format, &sub_keywords)) + return NULL; - failure: - Py_XDECREF(dict); - Py_XDECREF(tuple); - return NULL; + if (!(PyList_CheckExact(sub_keywords) || PyTuple_CheckExact(sub_keywords))) { + PyErr_SetString(PyExc_ValueError, + "parse_tuple_and_keywords: sub_keywords must be either list or tuple"); + return NULL; + } + + memset(buffers, 0, sizeof(buffers)); + memset(converted, 0, sizeof(converted)); + memset(keywords, 0, sizeof(keywords)); + + size = PySequence_Fast_GET_SIZE(sub_keywords); + if (size > 8) { + PyErr_SetString(PyExc_ValueError, + "parse_tuple_and_keywords: too many keywords in sub_keywords"); + goto exit; + } + + for (i = 0; i < size; i++) { + o = PySequence_Fast_GET_ITEM(sub_keywords, i); + if (!PyUnicode_FSConverter(o, (void *)(converted + i))) { + PyErr_Format(PyExc_ValueError, + "parse_tuple_and_keywords: could not convert keywords[%s] to narrow string", i); + goto exit; + } + keywords[i] = PyBytes_AS_STRING(converted[i]); + } + + result = PyArg_ParseTupleAndKeywords(sub_args, sub_kwargs, + sub_format, keywords, + buffers + 0, buffers + 1, buffers + 2, buffers + 3, + buffers + 4, buffers + 5, buffers + 6, buffers + 7); + + if (result) { + return_value = Py_None; + Py_INCREF(Py_None); + } + +exit: + size = sizeof(converted) / sizeof(converted[0]); + for (i = 0; i < size; i++) { + Py_XDECREF(converted[i]); + } + return return_value; } - static volatile int x; /* Test the u and u# codes for PyArg_ParseTuple. May leak memory in case @@ -2426,7 +2448,7 @@ {"test_long_numbits", (PyCFunction)test_long_numbits, METH_NOARGS}, {"test_k_code", (PyCFunction)test_k_code, METH_NOARGS}, {"test_empty_argparse", (PyCFunction)test_empty_argparse,METH_NOARGS}, - {"test_bug_7414", (PyCFunction)test_bug_7414, METH_NOARGS}, + {"parse_tuple_and_keywords", parse_tuple_and_keywords, METH_VARARGS}, {"test_null_strings", (PyCFunction)test_null_strings, METH_NOARGS}, {"test_string_from_format", (PyCFunction)test_string_from_format, METH_NOARGS}, {"test_with_docstring", (PyCFunction)test_with_docstring, METH_NOARGS, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 16:39:00 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 16:39:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Fixed_the_name_?= =?utf8?q?of_the_=27email=2EUtils=27_module_in_tests=2E?= Message-ID: http://hg.python.org/cpython/rev/10faad45905a changeset: 77568:10faad45905a branch: 2.7 parent: 77562:a283563c8cc4 user: Alexander Belopolsky date: Fri Jun 22 10:38:48 2012 -0400 summary: Fixed the name of the 'email.Utils' module in tests. files: Lib/email/test/test_email.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py --- a/Lib/email/test/test_email.py +++ b/Lib/email/test/test_email.py @@ -2263,9 +2263,9 @@ eq(int(time.strftime('%Y', timetup[:9])), 2003) def test_mktime_tz(self): - self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + self.assertEqual(Utils.mktime_tz((1970, 1, 1, 0, 0, 0, -1, -1, -1, 0)), 0) - self.assertEqual(utils.mktime_tz((1970, 1, 1, 0, 0, 0, + self.assertEqual(Utils.mktime_tz((1970, 1, 1, 0, 0, 0, -1, -1, -1, 1234)), -1234) def test_parsedate_y2k(self): -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 18:26:16 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 18:26:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=239527=3A_datetime?= =?utf8?q?=2Eastimezone=28=29_method_will_now_supply_a_class?= Message-ID: http://hg.python.org/cpython/rev/88a5f2730579 changeset: 77569:88a5f2730579 parent: 77567:ace45d23628a user: Alexander Belopolsky date: Fri Jun 22 12:23:23 2012 -0400 summary: Issue #9527: datetime.astimezone() method will now supply a class timezone instance corresponding to the system local timezone when called with no arguments. files: Doc/library/datetime.rst | 11 ++- Lib/datetime.py | 28 +++++++- Lib/test/datetimetester.py | 21 +++++- Misc/NEWS | 4 + Modules/_datetimemodule.c | 86 ++++++++++++++++++++++++- 5 files changed, 138 insertions(+), 12 deletions(-) diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst --- a/Doc/library/datetime.rst +++ b/Doc/library/datetime.rst @@ -958,17 +958,22 @@ datetime with no conversion of date and time data. -.. method:: datetime.astimezone(tz) +.. method:: datetime.astimezone(tz=None) - Return a :class:`.datetime` object with new :attr:`tzinfo` attribute *tz*, + Return a :class:`datetime` object with new :attr:`tzinfo` attribute *tz*, adjusting the date and time data so the result is the same UTC time as *self*, but in *tz*'s local time. - *tz* must be an instance of a :class:`tzinfo` subclass, and its + If provided, *tz* must be an instance of a :class:`tzinfo` subclass, and its :meth:`utcoffset` and :meth:`dst` methods must not return ``None``. *self* must be aware (``self.tzinfo`` must not be ``None``, and ``self.utcoffset()`` must not return ``None``). + If called without arguments (or with ``tz=None``) the system local + timezone is assumed. The ``tzinfo`` attribute of the converted + datetime instance will be set to an instance of :class:`timezone` + with the zone name and offset obtained from the OS. + If ``self.tzinfo`` is *tz*, ``self.astimezone(tz)`` is equal to *self*: no adjustment of date or time data is performed. Else the result is local time in time zone *tz*, representing the same UTC time as *self*: after diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1493,8 +1493,32 @@ return datetime(year, month, day, hour, minute, second, microsecond, tzinfo) - def astimezone(self, tz): - if not isinstance(tz, tzinfo): + def astimezone(self, tz=None): + if tz is None: + if self.tzinfo is None: + raise ValueError("astimezone() requires an aware datetime") + ts = (self - _EPOCH) // timedelta(seconds=1) + localtm = _time.localtime(ts) + local = datetime(*localtm[:6]) + try: + # Extract TZ data if available + gmtoff = localtm.tm_gmtoff + zone = localtm.tm_zone + except AttributeError: + # Compute UTC offset and compare with the value implied + # by tm_isdst. If the values match, use the zone name + # implied by tm_isdst. + delta = local - datetime(*_time.gmtime(ts)[:6]) + dst = _time.daylight and localtm.tm_isdst > 0 + gmtoff = _time.altzone if dst else _time.timezone + if delta == timedelta(seconds=-gmtoff): + tz = timezone(delta, _time.tzname[dst]) + else: + tz = timezone(delta) + else: + tz = timezone(timedelta(seconds=-gmtoff), zone) + + elif not isinstance(tz, tzinfo): raise TypeError("tz argument must be an instance of tzinfo") mytz = self.tzinfo diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -1972,7 +1972,7 @@ # simply can't be applied to a naive object. dt = self.theclass.now() f = FixedOffset(44, "") - self.assertRaises(TypeError, dt.astimezone) # not enough args + self.assertRaises(ValueError, dt.astimezone) # naive self.assertRaises(TypeError, dt.astimezone, f, f) # too many args self.assertRaises(TypeError, dt.astimezone, dt) # arg wrong type self.assertRaises(ValueError, dt.astimezone, f) # naive @@ -3253,8 +3253,6 @@ self.assertTrue(dt.tzinfo is f44m) # Replacing with degenerate tzinfo raises an exception. self.assertRaises(ValueError, dt.astimezone, fnone) - # Ditto with None tz. - self.assertRaises(TypeError, dt.astimezone, None) # Replacing with same tzinfo makes no change. x = dt.astimezone(dt.tzinfo) self.assertTrue(x.tzinfo is f44m) @@ -3274,6 +3272,23 @@ self.assertTrue(got.tzinfo is expected.tzinfo) self.assertEqual(got, expected) + @support.run_with_tz('UTC') + def test_astimezone_default_utc(self): + dt = self.theclass.now(timezone.utc) + self.assertEqual(dt.astimezone(None), dt) + self.assertEqual(dt.astimezone(), dt) + + @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') + def test_astimezone_default_eastern(self): + dt = self.theclass(2012, 11, 4, 6, 30, tzinfo=timezone.utc) + local = dt.astimezone() + self.assertEqual(dt, local) + self.assertEqual(local.strftime("%z %Z"), "+0500 EST") + dt = self.theclass(2012, 11, 4, 5, 30, tzinfo=timezone.utc) + local = dt.astimezone() + self.assertEqual(dt, local) + self.assertEqual(local.strftime("%z %Z"), "+0400 EDT") + def test_aware_subtract(self): cls = self.theclass diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,10 @@ Library ------- +- Issue #9527: datetime.astimezone() method will now supply a class + timezone instance corresponding to the system local timezone when + called with no arguments. + - Issue #14653: email.utils.mktime_tz() no longer relies on system mktime() when timezone offest is supplied. diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -4686,17 +4686,87 @@ } static PyObject * +local_timezone(PyObject *utc_time) +{ + PyObject *result = NULL; + struct tm *timep; + time_t timestamp; + long offset; + PyObject *delta; + PyObject *one_second; + PyObject *seconds; + PyObject *nameo = NULL; + const char *zone = NULL; + + delta = datetime_subtract((PyObject *)utc_time, PyDateTime_Epoch); + if (delta == NULL) + return NULL; + one_second = new_delta(0, 1, 0, 0); + if (one_second == NULL) + goto error; + seconds = divide_timedelta_timedelta((PyDateTime_Delta *)delta, + (PyDateTime_Delta *)one_second); + Py_DECREF(one_second); + if (seconds == NULL) + goto error; + Py_DECREF(delta); + timestamp = PyLong_AsLong(seconds); + Py_DECREF(seconds); + if (timestamp == -1 && PyErr_Occurred()) + return NULL; + timep = localtime(×tamp); +#ifdef HAVE_STRUCT_TM_TM_ZONE + offset = timep->tm_gmtoff; + zone = timep->tm_zone; + delta = new_delta(0, -offset, 0, 0); +#else /* HAVE_STRUCT_TM_TM_ZONE */ + { + PyObject *local_time; + Py_INCREF(utc_time->tzinfo); + local_time = new_datetime(timep->tm_year + 1900, timep->tm_mon + 1, + timep->tm_mday, timep->tm_hour, timep->tm_min, + timep->tm_sec, utc_time->tzinfo); + if (local_time == NULL) { + Py_DECREF(utc_time->tzinfo); + goto error; + } + delta = datetime_subtract(local_time, utc_time); + /* XXX: before relying on tzname, we should compare delta + to the offset implied by timezone/altzone */ + if (daylight && timep->tm_isdst >= 0) + zone = tzname[timep->tm_isdst % 2]; + else + zone = tzname[0]; + Py_DECREF(local_time); + } +#endif /* HAVE_STRUCT_TM_TM_ZONE */ + if (zone != NULL) { + nameo = PyUnicode_DecodeLocale(zone, "surrogateescape"); + if (nameo == NULL) + goto error; + } + result = new_timezone(delta, nameo); + Py_DECREF(nameo); + error: + Py_DECREF(delta); + return result; +} + +static PyObject * datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) { PyObject *result; PyObject *offset; PyObject *temp; - PyObject *tzinfo; + PyObject *tzinfo = Py_None; _Py_IDENTIFIER(fromutc); static char *keywords[] = {"tz", NULL}; - if (! PyArg_ParseTupleAndKeywords(args, kw, "O!:astimezone", keywords, - &PyDateTime_TZInfoType, &tzinfo)) + if (! PyArg_ParseTupleAndKeywords(args, kw, "|O:astimezone", keywords, + &tzinfo)) + return NULL; + + if (check_tzinfo_subclass(tzinfo) == -1) return NULL; if (!HASTZINFO(self) || self->tzinfo == Py_None) @@ -4729,8 +4799,16 @@ /* Attach new tzinfo and let fromutc() do the rest. */ temp = ((PyDateTime_DateTime *)result)->tzinfo; + if (tzinfo == Py_None) { + tzinfo = local_timezone(result); + if (tzinfo == NULL) { + Py_DECREF(result); + return NULL; + } + } + else + Py_INCREF(tzinfo); ((PyDateTime_DateTime *)result)->tzinfo = tzinfo; - Py_INCREF(tzinfo); Py_DECREF(temp); temp = result; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 18:26:17 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 18:26:17 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=239527=3A_datetime?= =?utf8?q?=2Eastimezone=28=29_method_will_now_supply_a_class?= Message-ID: http://hg.python.org/cpython/rev/336c53c1f547 changeset: 77570:336c53c1f547 user: Alexander Belopolsky date: Fri Jun 22 12:25:57 2012 -0400 summary: Issue #9527: datetime.astimezone() method will now supply a class timezone instance corresponding to the system local timezone when called with no arguments. files: Lib/datetime.py | 4 ++-- Lib/test/datetimetester.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1501,7 +1501,7 @@ localtm = _time.localtime(ts) local = datetime(*localtm[:6]) try: - # Extract TZ data if available + # Extract TZ data if available gmtoff = localtm.tm_gmtoff zone = localtm.tm_zone except AttributeError: @@ -1517,7 +1517,7 @@ tz = timezone(delta) else: tz = timezone(timedelta(seconds=-gmtoff), zone) - + elif not isinstance(tz, tzinfo): raise TypeError("tz argument must be an instance of tzinfo") diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -3283,11 +3283,11 @@ dt = self.theclass(2012, 11, 4, 6, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0500 EST") + self.assertEqual(local.strftime("%z %Z"), "+0500 EST") dt = self.theclass(2012, 11, 4, 5, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0400 EDT") + self.assertEqual(local.strftime("%z %Z"), "+0400 EDT") def test_aware_subtract(self): cls = self.theclass -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 18:33:10 2012 From: python-checkins at python.org (jesus.cea) Date: Fri, 22 Jun 2012 18:33:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Closes_=2310142=3A_Support_?= =?utf8?q?for_SEEK=5FHOLE/SEEK=5FDATA?= Message-ID: http://hg.python.org/cpython/rev/de2a0cb6ba52 changeset: 77571:de2a0cb6ba52 user: Jesus Cea date: Fri Jun 22 18:32:07 2012 +0200 summary: Closes #10142: Support for SEEK_HOLE/SEEK_DATA files: Doc/library/io.rst | 5 +++++ Doc/library/os.rst | 4 ++++ Lib/_pyio.py | 21 +++++++++++++-------- Lib/os.py | 1 + Lib/test/test_io.py | 2 +- Lib/test/test_posix.py | 20 ++++++++++++++++++++ Misc/NEWS | 2 ++ Modules/_io/bufferedio.c | 21 ++++++++++++++++++--- Modules/posixmodule.c | 7 +++++++ 9 files changed, 71 insertions(+), 12 deletions(-) diff --git a/Doc/library/io.rst b/Doc/library/io.rst --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -291,6 +291,11 @@ .. versionadded:: 3.1 The ``SEEK_*`` constants. + .. versionadded:: 3.3 + Some operating systems could support additional values, like + :data:`os.SEEK_HOLE` or :data:`os.SEEK_DATA`. The valid values + for a file could depend on it being open in text or binary mode. + .. method:: seekable() Return ``True`` if the stream supports random access. If ``False``, diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -995,6 +995,10 @@ Parameters to the :func:`lseek` function. Their values are 0, 1, and 2, respectively. Availability: Windows, Unix. + .. versionadded:: 3.3 + Some operating systems could support additional values, like + :data:`os.SEEK_HOLE` or :data:`os.SEEK_DATA`. + .. function:: mkdirat(dirfd, path, mode=0o777) diff --git a/Lib/_pyio.py b/Lib/_pyio.py --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -16,6 +16,11 @@ import io from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END) +valid_seek_flags = {0, 1, 2} # Hardwired values +if hasattr(os, 'SEEK_HOLE') : + valid_seek_flags.add(os.SEEK_HOLE) + valid_seek_flags.add(os.SEEK_DATA) + # open() uses st_blksize whenever we can DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes @@ -306,6 +311,7 @@ * 0 -- start of stream (the default); offset should be zero or positive * 1 -- current stream position; offset may be negative * 2 -- end of stream; offset is usually negative + Some operating systems / file systems could provide additional values. Return an int indicating the new absolute position. """ @@ -866,7 +872,7 @@ elif whence == 2: self._pos = max(0, len(self._buffer) + pos) else: - raise ValueError("invalid whence value") + raise ValueError("unsupported whence value") return self._pos def tell(self): @@ -1041,7 +1047,7 @@ return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos def seek(self, pos, whence=0): - if not (0 <= whence <= 2): + if whence not in valid_seek_flags: raise ValueError("invalid whence value") with self._read_lock: if whence == 1: @@ -1138,8 +1144,8 @@ return _BufferedIOMixin.tell(self) + len(self._write_buf) def seek(self, pos, whence=0): - if not (0 <= whence <= 2): - raise ValueError("invalid whence") + if whence not in valid_seek_flags: + raise ValueError("invalid whence value") with self._write_lock: self._flush_unlocked() return _BufferedIOMixin.seek(self, pos, whence) @@ -1235,8 +1241,8 @@ BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size) def seek(self, pos, whence=0): - if not (0 <= whence <= 2): - raise ValueError("invalid whence") + if whence not in valid_seek_flags: + raise ValueError("invalid whence value") self.flush() if self._read_buf: # Undo read ahead. @@ -1852,8 +1858,7 @@ self._decoder.reset() return position if whence != 0: - raise ValueError("invalid whence (%r, should be 0, 1 or 2)" % - (whence,)) + raise ValueError("unsupported whence (%r)" % (whence,)) if cookie < 0: raise ValueError("negative seek position %r" % (cookie,)) self.flush() diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -121,6 +121,7 @@ # Python uses fixed values for the SEEK_ constants; they are mapped # to native constants if necessary in posixmodule.c +# Other possible SEEK values are directly imported from posixmodule.c SEEK_SET = 0 SEEK_CUR = 1 SEEK_END = 2 diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -706,7 +706,7 @@ bufio = self.tp(rawio) # Invalid whence self.assertRaises(ValueError, bufio.seek, 0, -1) - self.assertRaises(ValueError, bufio.seek, 0, 3) + self.assertRaises(ValueError, bufio.seek, 0, 9) def test_override_destructor(self): tp = self.tp diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -1015,6 +1015,26 @@ posix.RTLD_GLOBAL posix.RTLD_LOCAL + @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), + "test needs an OS that reports file holes") + def test_fs_holes(self) : + # Even if the filesystem doesn't report holes, + # if the OS supports it the SEEK_* constants + # will be defined and will have a consistent + # behaviour: + # os.SEEK_DATA = current position + # os.SEEK_HOLE = end of file position + with open(support.TESTFN, 'r+b') as fp : + fp.write(b"hello") + fp.flush() + size = fp.tell() + fno = fp.fileno() + for i in range(size) : + self.assertEqual(i, os.lseek(fno, i, os.SEEK_DATA)) + self.assertLessEqual(size, os.lseek(fno, i, os.SEEK_HOLE)) + self.assertRaises(OSError, os.lseek, fno, size, os.SEEK_DATA) + self.assertRaises(OSError, os.lseek, fno, size, os.SEEK_HOLE) + class PosixGroupsTester(unittest.TestCase): def setUp(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -935,6 +935,8 @@ - Issue #14259: The finditer() method of re objects did not take any keyword arguments, contrary to the documentation. +- Issue #10142: Support for SEEK_HOLE/SEEK_DATA (for example, under ZFS). + Tests ----- diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -1157,9 +1157,20 @@ if (!PyArg_ParseTuple(args, "O|i:seek", &targetobj, &whence)) { return NULL; } - if (whence < 0 || whence > 2) { + + /* Do some error checking instead of trusting OS 'seek()' + ** error detection, just in case. + */ + if ((whence < 0 || whence >2) +#ifdef SEEK_HOLE + && (whence != SEEK_HOLE) +#endif +#ifdef SEEK_DATA + && (whence != SEEK_DATA) +#endif + ) { PyErr_Format(PyExc_ValueError, - "whence must be between 0 and 2, not %d", whence); + "whence value %d unsupported", whence); return NULL; } @@ -1172,7 +1183,11 @@ if (target == -1 && PyErr_Occurred()) return NULL; - if (whence != 2 && self->readable) { + /* SEEK_SET and SEEK_CUR are special because we could seek inside the + buffer. Other whence values must be managed without this optimization. + Some Operating Systems can provide additional values, like + SEEK_HOLE/SEEK_DATA. */ + if (((whence == 0) || (whence == 1)) && self->readable) { Py_off_t current, avail; /* Check if seeking leaves us inside the current buffer, so as to return quickly if possible. Also, we needn't take the diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11292,6 +11292,13 @@ #endif +#ifdef SEEK_HOLE + if (ins(d, "SEEK_HOLE", (long)SEEK_HOLE)) return -1; +#endif +#ifdef SEEK_DATA + if (ins(d, "SEEK_DATA", (long)SEEK_DATA)) return -1; +#endif + /* MS Windows */ #ifdef O_NOINHERIT /* Don't inherit in child processes. */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 18:48:29 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 18:48:29 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=237582=3A_Use_ISO_ti?= =?utf8?q?mestamp_in_diff=2Epy?= Message-ID: http://hg.python.org/cpython/rev/ec95b94ea831 changeset: 77572:ec95b94ea831 parent: 77570:336c53c1f547 user: Alexander Belopolsky date: Fri Jun 22 12:46:19 2012 -0400 summary: Issue #7582: Use ISO timestamp in diff.py files: Tools/scripts/diff.py | 16 ++++++++++++---- 1 files changed, 12 insertions(+), 4 deletions(-) diff --git a/Tools/scripts/diff.py b/Tools/scripts/diff.py --- a/Tools/scripts/diff.py +++ b/Tools/scripts/diff.py @@ -9,6 +9,12 @@ """ import sys, os, time, difflib, optparse +from datetime import datetime, timezone + +def file_mtime(path): + t = datetime.fromtimestamp(os.stat(path).st_mtime, + timezone.utc) + return t.astimezone().isoformat() def main(): @@ -30,10 +36,12 @@ n = options.lines fromfile, tofile = args - fromdate = time.ctime(os.stat(fromfile).st_mtime) - todate = time.ctime(os.stat(tofile).st_mtime) - fromlines = open(fromfile, 'U').readlines() - tolines = open(tofile, 'U').readlines() + fromdate = file_mtime(fromfile) + todate = file_mtime(tofile) + with open(fromfile, 'U') as ff: + fromlines = ff.readlines() + with open(tofile, 'U') as tf: + tolines = tf.readlines() if options.u: diff = difflib.unified_diff(fromlines, tolines, fromfile, tofile, fromdate, todate, n=n) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 18:48:30 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 18:48:30 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge?= Message-ID: http://hg.python.org/cpython/rev/4bb933184df4 changeset: 77573:4bb933184df4 parent: 77572:ec95b94ea831 parent: 77571:de2a0cb6ba52 user: Alexander Belopolsky date: Fri Jun 22 12:48:08 2012 -0400 summary: merge files: Doc/library/io.rst | 5 +++++ Doc/library/os.rst | 4 ++++ Lib/_pyio.py | 21 +++++++++++++-------- Lib/os.py | 1 + Lib/test/test_io.py | 2 +- Lib/test/test_posix.py | 20 ++++++++++++++++++++ Misc/NEWS | 2 ++ Modules/_io/bufferedio.c | 21 ++++++++++++++++++--- Modules/posixmodule.c | 7 +++++++ 9 files changed, 71 insertions(+), 12 deletions(-) diff --git a/Doc/library/io.rst b/Doc/library/io.rst --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -291,6 +291,11 @@ .. versionadded:: 3.1 The ``SEEK_*`` constants. + .. versionadded:: 3.3 + Some operating systems could support additional values, like + :data:`os.SEEK_HOLE` or :data:`os.SEEK_DATA`. The valid values + for a file could depend on it being open in text or binary mode. + .. method:: seekable() Return ``True`` if the stream supports random access. If ``False``, diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -995,6 +995,10 @@ Parameters to the :func:`lseek` function. Their values are 0, 1, and 2, respectively. Availability: Windows, Unix. + .. versionadded:: 3.3 + Some operating systems could support additional values, like + :data:`os.SEEK_HOLE` or :data:`os.SEEK_DATA`. + .. function:: mkdirat(dirfd, path, mode=0o777) diff --git a/Lib/_pyio.py b/Lib/_pyio.py --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -16,6 +16,11 @@ import io from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END) +valid_seek_flags = {0, 1, 2} # Hardwired values +if hasattr(os, 'SEEK_HOLE') : + valid_seek_flags.add(os.SEEK_HOLE) + valid_seek_flags.add(os.SEEK_DATA) + # open() uses st_blksize whenever we can DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes @@ -306,6 +311,7 @@ * 0 -- start of stream (the default); offset should be zero or positive * 1 -- current stream position; offset may be negative * 2 -- end of stream; offset is usually negative + Some operating systems / file systems could provide additional values. Return an int indicating the new absolute position. """ @@ -866,7 +872,7 @@ elif whence == 2: self._pos = max(0, len(self._buffer) + pos) else: - raise ValueError("invalid whence value") + raise ValueError("unsupported whence value") return self._pos def tell(self): @@ -1041,7 +1047,7 @@ return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos def seek(self, pos, whence=0): - if not (0 <= whence <= 2): + if whence not in valid_seek_flags: raise ValueError("invalid whence value") with self._read_lock: if whence == 1: @@ -1138,8 +1144,8 @@ return _BufferedIOMixin.tell(self) + len(self._write_buf) def seek(self, pos, whence=0): - if not (0 <= whence <= 2): - raise ValueError("invalid whence") + if whence not in valid_seek_flags: + raise ValueError("invalid whence value") with self._write_lock: self._flush_unlocked() return _BufferedIOMixin.seek(self, pos, whence) @@ -1235,8 +1241,8 @@ BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size) def seek(self, pos, whence=0): - if not (0 <= whence <= 2): - raise ValueError("invalid whence") + if whence not in valid_seek_flags: + raise ValueError("invalid whence value") self.flush() if self._read_buf: # Undo read ahead. @@ -1852,8 +1858,7 @@ self._decoder.reset() return position if whence != 0: - raise ValueError("invalid whence (%r, should be 0, 1 or 2)" % - (whence,)) + raise ValueError("unsupported whence (%r)" % (whence,)) if cookie < 0: raise ValueError("negative seek position %r" % (cookie,)) self.flush() diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -121,6 +121,7 @@ # Python uses fixed values for the SEEK_ constants; they are mapped # to native constants if necessary in posixmodule.c +# Other possible SEEK values are directly imported from posixmodule.c SEEK_SET = 0 SEEK_CUR = 1 SEEK_END = 2 diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -706,7 +706,7 @@ bufio = self.tp(rawio) # Invalid whence self.assertRaises(ValueError, bufio.seek, 0, -1) - self.assertRaises(ValueError, bufio.seek, 0, 3) + self.assertRaises(ValueError, bufio.seek, 0, 9) def test_override_destructor(self): tp = self.tp diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -1015,6 +1015,26 @@ posix.RTLD_GLOBAL posix.RTLD_LOCAL + @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), + "test needs an OS that reports file holes") + def test_fs_holes(self) : + # Even if the filesystem doesn't report holes, + # if the OS supports it the SEEK_* constants + # will be defined and will have a consistent + # behaviour: + # os.SEEK_DATA = current position + # os.SEEK_HOLE = end of file position + with open(support.TESTFN, 'r+b') as fp : + fp.write(b"hello") + fp.flush() + size = fp.tell() + fno = fp.fileno() + for i in range(size) : + self.assertEqual(i, os.lseek(fno, i, os.SEEK_DATA)) + self.assertLessEqual(size, os.lseek(fno, i, os.SEEK_HOLE)) + self.assertRaises(OSError, os.lseek, fno, size, os.SEEK_DATA) + self.assertRaises(OSError, os.lseek, fno, size, os.SEEK_HOLE) + class PosixGroupsTester(unittest.TestCase): def setUp(self): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -935,6 +935,8 @@ - Issue #14259: The finditer() method of re objects did not take any keyword arguments, contrary to the documentation. +- Issue #10142: Support for SEEK_HOLE/SEEK_DATA (for example, under ZFS). + Tests ----- diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -1157,9 +1157,20 @@ if (!PyArg_ParseTuple(args, "O|i:seek", &targetobj, &whence)) { return NULL; } - if (whence < 0 || whence > 2) { + + /* Do some error checking instead of trusting OS 'seek()' + ** error detection, just in case. + */ + if ((whence < 0 || whence >2) +#ifdef SEEK_HOLE + && (whence != SEEK_HOLE) +#endif +#ifdef SEEK_DATA + && (whence != SEEK_DATA) +#endif + ) { PyErr_Format(PyExc_ValueError, - "whence must be between 0 and 2, not %d", whence); + "whence value %d unsupported", whence); return NULL; } @@ -1172,7 +1183,11 @@ if (target == -1 && PyErr_Occurred()) return NULL; - if (whence != 2 && self->readable) { + /* SEEK_SET and SEEK_CUR are special because we could seek inside the + buffer. Other whence values must be managed without this optimization. + Some Operating Systems can provide additional values, like + SEEK_HOLE/SEEK_DATA. */ + if (((whence == 0) || (whence == 1)) && self->readable) { Py_off_t current, avail; /* Check if seeking leaves us inside the current buffer, so as to return quickly if possible. Also, we needn't take the diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -11292,6 +11292,13 @@ #endif +#ifdef SEEK_HOLE + if (ins(d, "SEEK_HOLE", (long)SEEK_HOLE)) return -1; +#endif +#ifdef SEEK_DATA + if (ins(d, "SEEK_DATA", (long)SEEK_DATA)) return -1; +#endif + /* MS Windows */ #ifdef O_NOINHERIT /* Don't inherit in child processes. */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 19:27:48 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 19:27:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=239527=3A_Fixes_for_?= =?utf8?q?platforms_without_tm=5Fzone?= Message-ID: http://hg.python.org/cpython/rev/a7237f157625 changeset: 77574:a7237f157625 user: Alexander Belopolsky date: Fri Jun 22 13:23:21 2012 -0400 summary: Issue #9527: Fixes for platforms without tm_zone files: Modules/_datetimemodule.c | 42 ++++++++++++++------------ 1 files changed, 22 insertions(+), 20 deletions(-) diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -809,14 +809,16 @@ } if (GET_TD_MICROSECONDS(offset) != 0 || GET_TD_SECONDS(offset) % 60 != 0) { PyErr_Format(PyExc_ValueError, "offset must be a timedelta" - " representing a whole number of minutes"); + " representing a whole number of minutes," + " not %R.", offset); return NULL; } if ((GET_TD_DAYS(offset) == -1 && GET_TD_SECONDS(offset) == 0) || GET_TD_DAYS(offset) < -1 || GET_TD_DAYS(offset) >= 1) { PyErr_Format(PyExc_ValueError, "offset must be a timedelta" " strictly between -timedelta(hours=24) and" - " timedelta(hours=24)."); + " timedelta(hours=24)," + " not %R.", offset); return NULL; } @@ -4686,12 +4688,11 @@ } static PyObject * -local_timezone(PyObject *utc_time) +local_timezone(PyDateTime_DateTime *utc_time) { PyObject *result = NULL; struct tm *timep; time_t timestamp; - long offset; PyObject *delta; PyObject *one_second; PyObject *seconds; @@ -4716,21 +4717,22 @@ return NULL; timep = localtime(×tamp); #ifdef HAVE_STRUCT_TM_TM_ZONE - offset = timep->tm_gmtoff; - zone = timep->tm_zone; - delta = new_delta(0, -offset, 0, 0); + { + long offset; + offset = timep->tm_gmtoff; + zone = timep->tm_zone; + delta = new_delta(0, -offset, 0, 0); + } #else /* HAVE_STRUCT_TM_TM_ZONE */ { PyObject *local_time; - Py_INCREF(utc_time->tzinfo); local_time = new_datetime(timep->tm_year + 1900, timep->tm_mon + 1, timep->tm_mday, timep->tm_hour, timep->tm_min, - timep->tm_sec, utc_time->tzinfo); - if (local_time == NULL) { - Py_DECREF(utc_time->tzinfo); + timep->tm_sec, DATE_GET_MICROSECOND(utc_time), + utc_time->tzinfo); + if (local_time == NULL) goto error; - } - delta = datetime_subtract(local_time, utc_time); + delta = datetime_subtract((PyObject*)utc_time, local_time); /* XXX: before relying on tzname, we should compare delta to the offset implied by timezone/altzone */ if (daylight && timep->tm_isdst >= 0) @@ -4755,7 +4757,7 @@ static PyObject * datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) { - PyObject *result; + PyDateTime_DateTime *result; PyObject *offset; PyObject *temp; PyObject *tzinfo = Py_None; @@ -4791,14 +4793,14 @@ } /* result = self - offset */ - result = add_datetime_timedelta(self, - (PyDateTime_Delta *)offset, -1); + result = (PyDateTime_DateTime *)add_datetime_timedelta(self, + (PyDateTime_Delta *)offset, -1); Py_DECREF(offset); if (result == NULL) return NULL; /* Attach new tzinfo and let fromutc() do the rest. */ - temp = ((PyDateTime_DateTime *)result)->tzinfo; + temp = result->tzinfo; if (tzinfo == Py_None) { tzinfo = local_timezone(result); if (tzinfo == NULL) { @@ -4808,14 +4810,14 @@ } else Py_INCREF(tzinfo); - ((PyDateTime_DateTime *)result)->tzinfo = tzinfo; + result->tzinfo = tzinfo; Py_DECREF(temp); - temp = result; + temp = (PyObject *)result; result = _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "O", temp); Py_DECREF(temp); - return result; + return (PyObject *)result; } static PyObject * -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 20:13:00 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 20:13:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fixed_compiler_warnings_in_?= =?utf8?q?datetime=5Fastimezone=28=29?= Message-ID: http://hg.python.org/cpython/rev/b9a6592c6250 changeset: 77575:b9a6592c6250 user: Alexander Belopolsky date: Fri Jun 22 14:11:58 2012 -0400 summary: Fixed compiler warnings in datetime_astimezone() files: Modules/_datetimemodule.c | 9 +++++---- 1 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -4754,7 +4754,7 @@ return result; } -static PyObject * +static PyDateTime_DateTime * datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) { PyDateTime_DateTime *result; @@ -4777,7 +4777,7 @@ /* Conversion to self's own time zone is a NOP. */ if (self->tzinfo == tzinfo) { Py_INCREF(self); - return (PyObject *)self; + return self; } /* Convert self to UTC. */ @@ -4814,10 +4814,11 @@ Py_DECREF(temp); temp = (PyObject *)result; - result = _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "O", temp); + result = (PyDateTime_DateTime *) + _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "O", temp); Py_DECREF(temp); - return (PyObject *)result; + return result; } static PyObject * -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 20:44:55 2012 From: python-checkins at python.org (kristjan.jonsson) Date: Fri, 22 Jun 2012 20:44:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315124=3A_Optimize_?= =?utf8?q?=5Fthread=2ELockType_deletion_and_acquisition_when?= Message-ID: http://hg.python.org/cpython/rev/dfc7fd24983a changeset: 77576:dfc7fd24983a user: Kristjan Valur Jonsson date: Fri Jun 22 18:40:02 2012 +0000 summary: Issue #15124: Optimize _thread.LockType deletion and acquisition when not contested, similar to what _thread.RLock already has. files: Modules/_threadmodule.c | 38 ++++++++++++---------------- 1 files changed, 17 insertions(+), 21 deletions(-) diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -23,6 +23,7 @@ PyObject_HEAD PyThread_type_lock lock_lock; PyObject *in_weakreflist; + char locked; /* for sanity checking */ } lockobject; static void @@ -32,9 +33,8 @@ PyObject_ClearWeakRefs((PyObject *) self); if (self->lock_lock != NULL) { /* Unlock the lock so it's safe to free it */ - PyThread_acquire_lock(self->lock_lock, 0); - PyThread_release_lock(self->lock_lock); - + if (self->locked) + PyThread_release_lock(self->lock_lock); PyThread_free_lock(self->lock_lock); } PyObject_Del(self); @@ -62,9 +62,13 @@ do { - Py_BEGIN_ALLOW_THREADS - r = PyThread_acquire_lock_timed(lock, microseconds, 1); - Py_END_ALLOW_THREADS + /* first a simple non-blocking try without releasing the GIL */ + r = PyThread_acquire_lock_timed(lock, 0, 0); + if (r == PY_LOCK_FAILURE && microseconds != 0) { + Py_BEGIN_ALLOW_THREADS + r = PyThread_acquire_lock_timed(lock, microseconds, 1); + Py_END_ALLOW_THREADS + } if (r == PY_LOCK_INTR) { /* Run signal handlers if we were interrupted. Propagate @@ -135,6 +139,8 @@ return NULL; } + if (r == PY_LOCK_ACQUIRED) + self->locked = 1; return PyBool_FromLong(r == PY_LOCK_ACQUIRED); } @@ -153,13 +159,13 @@ lock_PyThread_release_lock(lockobject *self) { /* Sanity check: the lock must be locked */ - if (PyThread_acquire_lock(self->lock_lock, 0)) { - PyThread_release_lock(self->lock_lock); + if (!self->locked) { PyErr_SetString(ThreadError, "release unlocked lock"); return NULL; } PyThread_release_lock(self->lock_lock); + self->locked = 0; Py_INCREF(Py_None); return Py_None; } @@ -175,11 +181,7 @@ static PyObject * lock_locked_lock(lockobject *self) { - if (PyThread_acquire_lock(self->lock_lock, 0)) { - PyThread_release_lock(self->lock_lock); - return PyBool_FromLong(0L); - } - return PyBool_FromLong(1L); + return PyBool_FromLong((long)self->locked); } PyDoc_STRVAR(locked_doc, @@ -313,14 +315,7 @@ self->rlock_count = count; Py_RETURN_TRUE; } - - if (self->rlock_count > 0 || - !PyThread_acquire_lock(self->rlock_lock, 0)) { - if (microseconds == 0) { - Py_RETURN_FALSE; - } - r = acquire_timed(self->rlock_lock, microseconds); - } + r = acquire_timed(self->rlock_lock, microseconds); if (r == PY_LOCK_ACQUIRED) { assert(self->rlock_count == 0); self->rlock_owner = tid; @@ -548,6 +543,7 @@ if (self == NULL) return NULL; self->lock_lock = PyThread_allocate_lock(); + self->locked = 0; self->in_weakreflist = NULL; if (self->lock_lock == NULL) { Py_DECREF(self); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 20:56:56 2012 From: python-checkins at python.org (david.malcolm) Date: Fri, 22 Jun 2012 20:56:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314785=3A_Add_sys?= =?utf8?q?=2E=5Fdebugmallocstats=28=29_to_help_debug_low-level_memory?= Message-ID: http://hg.python.org/cpython/rev/d63a80abfbec changeset: 77577:d63a80abfbec user: David Malcolm date: Fri Jun 22 14:55:41 2012 -0400 summary: Issue #14785: Add sys._debugmallocstats() to help debug low-level memory allocation issues files: Doc/library/sys.rst | 16 +++++ Include/dictobject.h | 1 + Include/floatobject.h | 2 + Include/frameobject.h | 2 + Include/listobject.h | 1 + Include/methodobject.h | 5 + Include/object.h | 8 ++ Include/objimpl.h | 4 +- Include/setobject.h | 1 + Include/tupleobject.h | 3 + Lib/test/test_sys.py | 6 ++ Misc/NEWS | 3 + Objects/classobject.c | 9 +++ Objects/dictobject.c | 9 +++ Objects/floatobject.c | 10 +++ Objects/frameobject.c | 10 +++ Objects/listobject.c | 9 +++ Objects/methodobject.c | 9 +++ Objects/object.c | 12 ++++ Objects/obmalloc.c | 81 +++++++++++++++++------------ Objects/setobject.c | 10 +++ Objects/tupleobject.c | 16 +++++ Python/pythonrun.c | 2 +- Python/sysmodule.c | 23 ++++++++ 24 files changed, 217 insertions(+), 35 deletions(-) diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -106,6 +106,22 @@ This function should be used for internal and specialized purposes only. +.. function:: _debugmallocstats() + + Print low-level information to stderr about the state of CPython's memory + allocator. + + If Python is configured --with-pydebug, it also performs some expensive + internal consistency checks. + + .. versionadded:: 3.3 + + .. impl-detail:: + + This function is specific to CPython. The exact output format is not + defined here, and may change. + + .. data:: dllhandle Integer specifying the handle of the Python DLL. Availability: Windows. diff --git a/Include/dictobject.h b/Include/dictobject.h --- a/Include/dictobject.h +++ b/Include/dictobject.h @@ -111,6 +111,7 @@ #ifndef Py_LIMITED_API int _PyObjectDict_SetItem(PyTypeObject *tp, PyObject **dictptr, PyObject *name, PyObject *value); PyObject *_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObject *); +PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out); #endif #ifdef __cplusplus diff --git a/Include/floatobject.h b/Include/floatobject.h --- a/Include/floatobject.h +++ b/Include/floatobject.h @@ -110,6 +110,8 @@ /* free list api */ PyAPI_FUNC(int) PyFloat_ClearFreeList(void); +PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out); + /* Format the object based on the format_spec, as defined in PEP 3101 (Advanced String Formatting). */ PyAPI_FUNC(int) _PyFloat_FormatAdvancedWriter( diff --git a/Include/frameobject.h b/Include/frameobject.h --- a/Include/frameobject.h +++ b/Include/frameobject.h @@ -79,6 +79,8 @@ PyAPI_FUNC(int) PyFrame_ClearFreeList(void); +PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out); + /* Return the line of code the frame is currently executing. */ PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *); diff --git a/Include/listobject.h b/Include/listobject.h --- a/Include/listobject.h +++ b/Include/listobject.h @@ -64,6 +64,7 @@ PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *); PyAPI_FUNC(int) PyList_ClearFreeList(void); +PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); #endif /* Macro, trading safety for speed */ diff --git a/Include/methodobject.h b/Include/methodobject.h --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -82,6 +82,11 @@ PyAPI_FUNC(int) PyCFunction_ClearFreeList(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyCFunction_DebugMallocStats(FILE *out); +PyAPI_FUNC(void) _PyMethod_DebugMallocStats(FILE *out); +#endif + #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h --- a/Include/object.h +++ b/Include/object.h @@ -977,6 +977,14 @@ else \ _PyTrash_deposit_object((PyObject*)op); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) +_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks, + size_t sizeof_block); +PyAPI_FUNC(void) +_PyObject_DebugTypeStats(FILE *out); +#endif /* ifndef Py_LIMITED_API */ + #ifdef __cplusplus } #endif diff --git a/Include/objimpl.h b/Include/objimpl.h --- a/Include/objimpl.h +++ b/Include/objimpl.h @@ -101,13 +101,15 @@ /* Macros */ #ifdef WITH_PYMALLOC +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out); +#endif /* #ifndef Py_LIMITED_API */ #ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */ PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes); PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes); PyAPI_FUNC(void) _PyObject_DebugFree(void *p); PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p); PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p); -PyAPI_FUNC(void) _PyObject_DebugMallocStats(void); PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes); PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes); PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p); diff --git a/Include/setobject.h b/Include/setobject.h --- a/Include/setobject.h +++ b/Include/setobject.h @@ -101,6 +101,7 @@ PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); PyAPI_FUNC(int) PySet_ClearFreeList(void); +PyAPI_FUNC(void) _PySet_DebugMallocStats(FILE *out); #endif #ifdef __cplusplus diff --git a/Include/tupleobject.h b/Include/tupleobject.h --- a/Include/tupleobject.h +++ b/Include/tupleobject.h @@ -63,6 +63,9 @@ #endif PyAPI_FUNC(int) PyTuple_ClearFreeList(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out); +#endif /* Py_LIMITED_API */ #ifdef __cplusplus } diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -603,6 +603,12 @@ self.assertEqual(sys.implementation.name, sys.implementation.name.lower()) + def test_debugmallocstats(self): + # Test sys._debugmallocstats() + from test.script_helper import assert_python_ok + args = ['-c', 'import sys; sys._debugmallocstats()'] + ret, out, err = assert_python_ok(*args) + self.assertIn(b"free PyDictObjects", err) class SizeofTest(unittest.TestCase): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -147,6 +147,9 @@ - Issue #14963: Convert contextlib.ExitStack.__exit__ to use an iterative algorithm (Patch by Alon Horev) +- Issue #14785: Add sys._debugmallocstats() to help debug low-level memory + allocation issues + C-API ----- diff --git a/Objects/classobject.c b/Objects/classobject.c --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -400,6 +400,15 @@ (void)PyMethod_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyMethod_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyMethodObject", + numfree, sizeof(PyMethodObject)); +} + /* ------------------------------------------------------------------------ * instance method */ diff --git a/Objects/dictobject.c b/Objects/dictobject.c --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -255,6 +255,15 @@ return ret; } +/* Print summary info about the state of the optimized allocator */ +void +_PyDict_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyDictObject", numfree, sizeof(PyDictObject)); +} + + void PyDict_Fini(void) { diff --git a/Objects/floatobject.c b/Objects/floatobject.c --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -1933,6 +1933,16 @@ (void)PyFloat_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyFloat_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyFloatObject", + numfree, sizeof(PyFloatObject)); +} + + /*---------------------------------------------------------------------------- * _PyFloat_{Pack,Unpack}{4,8}. See floatobject.h. */ diff --git a/Objects/frameobject.c b/Objects/frameobject.c --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -955,3 +955,13 @@ Py_XDECREF(builtin_object); builtin_object = NULL; } + +/* Print summary info about the state of the optimized allocator */ +void +_PyFrame_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyFrameObject", + numfree, sizeof(PyFrameObject)); +} + diff --git a/Objects/listobject.c b/Objects/listobject.c --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -117,6 +117,15 @@ PyList_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyList_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyListObject", + numfree, sizeof(PyListObject)); +} + PyObject * PyList_New(Py_ssize_t size) { diff --git a/Objects/methodobject.c b/Objects/methodobject.c --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -338,6 +338,15 @@ (void)PyCFunction_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyCFunction_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyCFunction", + numfree, sizeof(PyCFunction)); +} + /* PyCFunction_New() is now just a macro that calls PyCFunction_NewEx(), but it's part of the API so we need to keep a function around that existing C extensions can call. diff --git a/Objects/object.c b/Objects/object.c --- a/Objects/object.c +++ b/Objects/object.c @@ -1852,6 +1852,18 @@ PyMem_FREE(p); } +void +_PyObject_DebugTypeStats(FILE *out) +{ + _PyCFunction_DebugMallocStats(out); + _PyDict_DebugMallocStats(out); + _PyFloat_DebugMallocStats(out); + _PyFrame_DebugMallocStats(out); + _PyList_DebugMallocStats(out); + _PyMethod_DebugMallocStats(out); + _PySet_DebugMallocStats(out); + _PyTuple_DebugMallocStats(out); +} /* These methods are used to control infinite recursion in repr, str, print, etc. Container objects that may recursively contain themselves, diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -523,12 +523,10 @@ /* Number of arenas allocated that haven't been free()'d. */ static size_t narenas_currently_allocated = 0; -#ifdef PYMALLOC_DEBUG /* Total number of times malloc() called to allocate an arena. */ static size_t ntimes_arena_allocated = 0; /* High water mark (max value ever seen) for narenas_currently_allocated. */ static size_t narenas_highwater = 0; -#endif /* Allocate a new arena. If we run out of memory, return NULL. Else * allocate a new arena, and return the address of an arena_object @@ -545,7 +543,7 @@ #ifdef PYMALLOC_DEBUG if (Py_GETENV("PYTHONMALLOCSTATS")) - _PyObject_DebugMallocStats(); + _PyObject_DebugMallocStats(stderr); #endif if (unused_arena_objects == NULL) { uint i; @@ -613,11 +611,9 @@ arenaobj->address = (uptr)address; ++narenas_currently_allocated; -#ifdef PYMALLOC_DEBUG ++ntimes_arena_allocated; if (narenas_currently_allocated > narenas_highwater) narenas_highwater = narenas_currently_allocated; -#endif arenaobj->freepools = NULL; /* pool_address <- first pool-aligned address in the arena nfreepools <- number of whole pools that fit after alignment */ @@ -1723,17 +1719,19 @@ } } +#endif /* PYMALLOC_DEBUG */ + static size_t -printone(const char* msg, size_t value) +printone(FILE *out, const char* msg, size_t value) { int i, k; char buf[100]; size_t origvalue = value; - fputs(msg, stderr); + fputs(msg, out); for (i = (int)strlen(msg); i < 35; ++i) - fputc(' ', stderr); - fputc('=', stderr); + fputc(' ', out); + fputc('=', out); /* Write the value with commas. */ i = 22; @@ -1754,17 +1752,33 @@ while (i >= 0) buf[i--] = ' '; - fputs(buf, stderr); + fputs(buf, out); return origvalue; } -/* Print summary info to stderr about the state of pymalloc's structures. +void +_PyDebugAllocatorStats(FILE *out, + const char *block_name, int num_blocks, size_t sizeof_block) +{ + char buf1[128]; + char buf2[128]; + PyOS_snprintf(buf1, sizeof(buf1), + "%d %ss * %zd bytes each", + num_blocks, block_name, sizeof_block); + PyOS_snprintf(buf2, sizeof(buf2), + "%48s ", buf1); + (void)printone(out, buf2, num_blocks * sizeof_block); +} + +#ifdef WITH_PYMALLOC + +/* Print summary info to "out" about the state of pymalloc's structures. * In Py_DEBUG mode, also perform some expensive internal consistency * checks. */ void -_PyObject_DebugMallocStats(void) +_PyObject_DebugMallocStats(FILE *out) { uint i; const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT; @@ -1793,7 +1807,7 @@ size_t total; char buf[128]; - fprintf(stderr, "Small block threshold = %d, in %u size classes.\n", + fprintf(out, "Small block threshold = %d, in %u size classes.\n", SMALL_REQUEST_THRESHOLD, numclasses); for (i = 0; i < numclasses; ++i) @@ -1847,10 +1861,10 @@ } assert(narenas == narenas_currently_allocated); - fputc('\n', stderr); + fputc('\n', out); fputs("class size num pools blocks in use avail blocks\n" "----- ---- --------- ------------- ------------\n", - stderr); + out); for (i = 0; i < numclasses; ++i) { size_t p = numpools[i]; @@ -1861,7 +1875,7 @@ assert(b == 0 && f == 0); continue; } - fprintf(stderr, "%5u %6u " + fprintf(out, "%5u %6u " "%11" PY_FORMAT_SIZE_T "u " "%15" PY_FORMAT_SIZE_T "u " "%13" PY_FORMAT_SIZE_T "u\n", @@ -1871,35 +1885,36 @@ pool_header_bytes += p * POOL_OVERHEAD; quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size); } - fputc('\n', stderr); - (void)printone("# times object malloc called", serialno); - - (void)printone("# arenas allocated total", ntimes_arena_allocated); - (void)printone("# arenas reclaimed", ntimes_arena_allocated - narenas); - (void)printone("# arenas highwater mark", narenas_highwater); - (void)printone("# arenas allocated current", narenas); + fputc('\n', out); +#ifdef PYMALLOC_DEBUG + (void)printone(out, "# times object malloc called", serialno); +#endif + (void)printone(out, "# arenas allocated total", ntimes_arena_allocated); + (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas); + (void)printone(out, "# arenas highwater mark", narenas_highwater); + (void)printone(out, "# arenas allocated current", narenas); PyOS_snprintf(buf, sizeof(buf), "%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena", narenas, ARENA_SIZE); - (void)printone(buf, narenas * ARENA_SIZE); + (void)printone(out, buf, narenas * ARENA_SIZE); - fputc('\n', stderr); + fputc('\n', out); - total = printone("# bytes in allocated blocks", allocated_bytes); - total += printone("# bytes in available blocks", available_bytes); + total = printone(out, "# bytes in allocated blocks", allocated_bytes); + total += printone(out, "# bytes in available blocks", available_bytes); PyOS_snprintf(buf, sizeof(buf), "%u unused pools * %d bytes", numfreepools, POOL_SIZE); - total += printone(buf, (size_t)numfreepools * POOL_SIZE); + total += printone(out, buf, (size_t)numfreepools * POOL_SIZE); - total += printone("# bytes lost to pool headers", pool_header_bytes); - total += printone("# bytes lost to quantization", quantization); - total += printone("# bytes lost to arena alignment", arena_alignment); - (void)printone("Total", total); + total += printone(out, "# bytes lost to pool headers", pool_header_bytes); + total += printone(out, "# bytes lost to quantization", quantization); + total += printone(out, "# bytes lost to arena alignment", arena_alignment); + (void)printone(out, "Total", total); } -#endif /* PYMALLOC_DEBUG */ +#endif /* #ifdef WITH_PYMALLOC */ #ifdef Py_USING_MEMORY_DEBUGGER /* Make this function last so gcc won't inline it since the definition is diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -1133,6 +1133,16 @@ Py_CLEAR(emptyfrozenset); } +/* Print summary info about the state of the optimized allocator */ +void +_PySet_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PySetObject", + numfree, sizeof(PySetObject)); +} + + static PyObject * set_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -45,6 +45,22 @@ } #endif +/* Print summary info about the state of the optimized allocator */ +void +_PyTuple_DebugMallocStats(FILE *out) +{ +#if PyTuple_MAXSAVESIZE > 0 + int i; + char buf[128]; + for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { + PyOS_snprintf(buf, sizeof(buf), + "free %d-sized PyTupleObject", i); + _PyDebugAllocatorStats(out, + buf, + numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i)); + } +#endif +} PyObject * PyTuple_New(register Py_ssize_t size) diff --git a/Python/pythonrun.c b/Python/pythonrun.c --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -642,7 +642,7 @@ #endif /* Py_TRACE_REFS */ #ifdef PYMALLOC_DEBUG if (Py_GETENV("PYTHONMALLOCSTATS")) - _PyObject_DebugMallocStats(); + _PyObject_DebugMallocStats(stderr); #endif call_ll_exitfuncs(); diff --git a/Python/sysmodule.c b/Python/sysmodule.c --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -997,6 +997,27 @@ extern "C" { #endif +static PyObject * +sys_debugmallocstats(PyObject *self, PyObject *args) +{ +#ifdef WITH_PYMALLOC + _PyObject_DebugMallocStats(stderr); + fputc('\n', stderr); +#endif + _PyObject_DebugTypeStats(stderr); + + Py_RETURN_NONE; +} +PyDoc_STRVAR(debugmallocstats_doc, +"_debugmallocstats()\n\ +\n\ +Print summary info to stderr about the state of\n\ +pymalloc's structures.\n\ +\n\ +In Py_DEBUG mode, also perform some expensive internal consistency\n\ +checks.\n\ +"); + #ifdef Py_TRACE_REFS /* Defined in objects.c because it uses static globals if that file */ extern PyObject *_Py_GetObjects(PyObject *, PyObject *); @@ -1093,6 +1114,8 @@ {"settrace", sys_settrace, METH_O, settrace_doc}, {"gettrace", sys_gettrace, METH_NOARGS, gettrace_doc}, {"call_tracing", sys_call_tracing, METH_VARARGS, call_tracing_doc}, + {"_debugmallocstats", sys_debugmallocstats, METH_VARARGS, + debugmallocstats_doc}, {NULL, NULL} /* sentinel */ }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 21:17:08 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 22 Jun 2012 21:17:08 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314837=3A_SSL_error?= =?utf8?q?s_now_have_=60library=60_and_=60reason=60_attributes_describing?= Message-ID: http://hg.python.org/cpython/rev/96513d71e650 changeset: 77578:96513d71e650 parent: 77574:a7237f157625 user: Antoine Pitrou date: Fri Jun 22 21:11:52 2012 +0200 summary: Issue #14837: SSL errors now have `library` and `reason` attributes describing precisely what happened and in which OpenSSL submodule. The str() of a SSLError is also enhanced accordingly. NOTE: this commit creates a reference leak. The leak seems tied to the use of PyType_FromSpec() to create the SSLError type. The leak is on the type object when it is instantiated: >>> e = ssl.SSLError() >>> sys.getrefcount(ssl.SSLError) 35 >>> e = ssl.SSLError() >>> sys.getrefcount(ssl.SSLError) 36 >>> e = ssl.SSLError() >>> sys.getrefcount(ssl.SSLError) 37 files: Doc/library/ssl.rst | 16 + Lib/test/test_ssl.py | 45 +++++- Misc/NEWS | 4 + Modules/_ssl.c | 262 +++++++++++++++++++++++------- 4 files changed, 262 insertions(+), 65 deletions(-) diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -59,6 +59,22 @@ .. versionchanged:: 3.3 :exc:`SSLError` used to be a subtype of :exc:`socket.error`. + .. attribute:: library + + A string mnemonic designating the OpenSSL submodule in which the error + occurred, such as ``SSL``, ``PEM`` or ``X509``. The range of possible + values depends on the OpenSSL version. + + .. versionadded:: 3.3 + + .. attribute:: reason + + A string mnemonic designating the reason this error occurred, for + example ``CERTIFICATE_VERIFY_FAILED``. The range of possible + values depends on the OpenSSL version. + + .. versionadded:: 3.3 + .. exception:: SSLZeroReturnError A subclass of :exc:`SSLError` raised when trying to read or write and diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -552,7 +552,7 @@ with self.assertRaises(FileNotFoundError) as cm: ctx.load_dh_params(WRONGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) - with self.assertRaisesRegex(ssl.SSLError, "PEM routines"): + with self.assertRaises(ssl.SSLError) as cm: ctx.load_dh_params(CERTFILE) @skip_if_broken_ubuntu_ssl @@ -590,6 +590,47 @@ self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo") +class SSLErrorTests(unittest.TestCase): + + def test_str(self): + # The str() of a SSLError doesn't include the errno + e = ssl.SSLError(1, "foo") + self.assertEqual(str(e), "foo") + self.assertEqual(e.errno, 1) + # Same for a subclass + e = ssl.SSLZeroReturnError(1, "foo") + self.assertEqual(str(e), "foo") + self.assertEqual(e.errno, 1) + + def test_lib_reason(self): + # Test the library and reason attributes + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with self.assertRaises(ssl.SSLError) as cm: + ctx.load_dh_params(CERTFILE) + self.assertEqual(cm.exception.library, 'PEM') + self.assertEqual(cm.exception.reason, 'NO_START_LINE') + s = str(cm.exception) + self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s) + + def test_subclass(self): + # Check that the appropriate SSLError subclass is raised + # (this only tests one of them) + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with socket.socket() as s: + s.bind(("127.0.0.1", 0)) + s.listen(5) + with socket.socket() as c: + c.connect(s.getsockname()) + c.setblocking(False) + c = ctx.wrap_socket(c, False, do_handshake_on_connect=False) + with self.assertRaises(ssl.SSLWantReadError) as cm: + c.do_handshake() + s = str(cm.exception) + self.assertTrue(s.startswith("The operation did not complete (read)"), s) + # For compatibility + self.assertEqual(cm.exception.errno, ssl.SSL_ERROR_WANT_READ) + + class NetworkedTests(unittest.TestCase): def test_connect(self): @@ -1931,7 +1972,7 @@ if not os.path.exists(filename): raise support.TestFailed("Can't read certificate file %r" % filename) - tests = [ContextTests, BasicSocketTests] + tests = [ContextTests, BasicSocketTests, SSLErrorTests] if support.is_resource_enabled('network'): tests.append(NetworkedTests) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,10 @@ Library ------- +- Issue #14837: SSL errors now have ``library`` and ``reason`` attributes + describing precisely what happened and in which OpenSSL submodule. The + str() of a SSLError is also enhanced accordingly. + - Issue #9527: datetime.astimezone() method will now supply a class timezone instance corresponding to the system local timezone when called with no arguments. diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -76,6 +76,16 @@ PY_SSL_VERSION_TLS1 }; +struct py_ssl_error_code { + const char *mnemonic; + int library, reason; +}; + +struct py_ssl_library_code { + const char *library; + int code; +}; + /* Include symbols from _socket module */ #include "socketmodule.h" @@ -97,6 +107,9 @@ #include "openssl/err.h" #include "openssl/rand.h" +/* Include generated data (error codes) */ +#include "_ssl_data.h" + /* SSL error object */ static PyObject *PySSLErrorObject; static PyObject *PySSLZeroReturnErrorObject; @@ -105,6 +118,11 @@ static PyObject *PySSLSyscallErrorObject; static PyObject *PySSLEOFErrorObject; +/* Error mappings */ +static PyObject *err_codes_to_names; +static PyObject *err_names_to_codes; +static PyObject *lib_codes_to_names; + #ifdef WITH_THREAD /* serves as a flag to see whether we've initialized the SSL thread support. */ @@ -202,22 +220,134 @@ #define ERRSTR1(x,y,z) (x ":" y ": " z) #define ERRSTR(x) ERRSTR1("_ssl.c", STRINGIFY2(__LINE__), x) -/* XXX It might be helpful to augment the error message generated - below with the name of the SSL function that generated the error. - I expect it's obvious most of the time. -*/ + +/* + * SSL errors. + */ + +PyDoc_STRVAR(SSLError_doc, +"An error occurred in the SSL implementation."); + +PyDoc_STRVAR(SSLZeroReturnError_doc, +"SSL/TLS session closed cleanly."); + +PyDoc_STRVAR(SSLWantReadError_doc, +"Non-blocking SSL socket needs to read more data\n" +"before the requested operation can be completed."); + +PyDoc_STRVAR(SSLWantWriteError_doc, +"Non-blocking SSL socket needs to write more data\n" +"before the requested operation can be completed."); + +PyDoc_STRVAR(SSLSyscallError_doc, +"System error when attempting SSL operation."); + +PyDoc_STRVAR(SSLEOFError_doc, +"SSL/TLS connection terminated abruptly."); + +static PyObject * +SSLError_str(PyOSErrorObject *self) +{ + if (self->strerror != NULL && PyUnicode_Check(self->strerror)) { + Py_INCREF(self->strerror); + return self->strerror; + } + else + return PyObject_Str(self->args); +} + +static PyType_Slot sslerror_type_slots[] = { + {Py_tp_base, NULL}, /* Filled out in module init as it's not a constant */ + {Py_tp_doc, SSLError_doc}, + {Py_tp_str, SSLError_str}, + {0, 0}, +}; + +static PyType_Spec sslerror_type_spec = { + "ssl.SSLError", + sizeof(PyOSErrorObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + sslerror_type_slots +}; + +static void +fill_and_set_sslerror(PyObject *type, int ssl_errno, const char *errstr, + int lineno, unsigned long errcode) +{ + PyObject *err_value = NULL, *reason_obj = NULL, *lib_obj = NULL; + PyObject *init_value, *msg, *key; + _Py_IDENTIFIER(reason); + _Py_IDENTIFIER(library); + + if (errcode != 0) { + int lib, reason; + + lib = ERR_GET_LIB(errcode); + reason = ERR_GET_REASON(errcode); + key = Py_BuildValue("ii", lib, reason); + if (key == NULL) + goto fail; + reason_obj = PyDict_GetItem(err_codes_to_names, key); + Py_DECREF(key); + if (reason_obj == NULL) { + /* XXX if reason < 100, it might reflect a library number (!!) */ + PyErr_Clear(); + } + key = PyLong_FromLong(lib); + if (key == NULL) + goto fail; + lib_obj = PyDict_GetItem(lib_codes_to_names, key); + Py_DECREF(key); + if (lib_obj == NULL) { + PyErr_Clear(); + } + if (errstr == NULL) + errstr = ERR_reason_error_string(errcode); + } + if (errstr == NULL) + errstr = "unknown error"; + + if (reason_obj && lib_obj) + msg = PyUnicode_FromFormat("[%S: %S] %s (_ssl.c:%d)", + lib_obj, reason_obj, errstr, lineno); + else if (lib_obj) + msg = PyUnicode_FromFormat("[%S] %s (_ssl.c:%d)", + lib_obj, errstr, lineno); + else + msg = PyUnicode_FromFormat("%s (_ssl.c:%d)", errstr, lineno); + + if (msg == NULL) + goto fail; + init_value = Py_BuildValue("iN", ssl_errno, msg); + err_value = PyObject_CallObject(type, init_value); + Py_DECREF(init_value); + if (err_value == NULL) + goto fail; + if (reason_obj == NULL) + reason_obj = Py_None; + if (_PyObject_SetAttrId(err_value, &PyId_reason, reason_obj)) + goto fail; + if (lib_obj == NULL) + lib_obj = Py_None; + if (_PyObject_SetAttrId(err_value, &PyId_library, lib_obj)) + goto fail; + PyErr_SetObject(type, err_value); +fail: + Py_XDECREF(err_value); +} static PyObject * PySSL_SetError(PySSLSocket *obj, int ret, char *filename, int lineno) { - PyObject *v; PyObject *type = PySSLErrorObject; - char buf[2048]; - char *errstr; + char *errstr = NULL; int err; enum py_ssl_error p = PY_SSL_ERROR_NONE; + unsigned long e = 0; assert(ret <= 0); + e = ERR_peek_last_error(); if (obj->ssl != NULL) { err = SSL_get_error(obj->ssl, ret); @@ -248,7 +378,6 @@ break; case SSL_ERROR_SYSCALL: { - unsigned long e = ERR_get_error(); if (e == 0) { PySocketSockObject *s = (PySocketSockObject *) PyWeakref_GetObject(obj->Socket); @@ -260,9 +389,9 @@ /* underlying BIO reported an I/O error */ Py_INCREF(s); ERR_clear_error(); - v = s->errorhandler(); + s->errorhandler(); Py_DECREF(s); - return v; + return NULL; } else { /* possible? */ p = PY_SSL_ERROR_SYSCALL; type = PySSLSyscallErrorObject; @@ -270,60 +399,43 @@ } } else { p = PY_SSL_ERROR_SYSCALL; - /* XXX Protected by global interpreter lock */ - errstr = ERR_error_string(e, NULL); } break; } case SSL_ERROR_SSL: { - unsigned long e = ERR_get_error(); p = PY_SSL_ERROR_SSL; - if (e != 0) - /* XXX Protected by global interpreter lock */ - errstr = ERR_error_string(e, NULL); - else { /* possible? */ + if (e == 0) + /* possible? */ errstr = "A failure in the SSL library occurred"; - } break; } default: p = PY_SSL_ERROR_INVALID_ERROR_CODE; errstr = "Invalid error code"; } - } else { - errstr = ERR_error_string(ERR_peek_last_error(), NULL); } - PyOS_snprintf(buf, sizeof(buf), "_ssl.c:%d: %s", lineno, errstr); + fill_and_set_sslerror(type, p, errstr, lineno, e); ERR_clear_error(); - v = Py_BuildValue("(is)", p, buf); - if (v != NULL) { - PyErr_SetObject(type, v); - Py_DECREF(v); - } return NULL; } static PyObject * _setSSLError (char *errstr, int errcode, char *filename, int lineno) { - char buf[2048]; - PyObject *v; - - if (errstr == NULL) { + if (errstr == NULL) errcode = ERR_peek_last_error(); - errstr = ERR_error_string(errcode, NULL); - } - PyOS_snprintf(buf, sizeof(buf), "_ssl.c:%d: %s", lineno, errstr); + else + errcode = 0; + fill_and_set_sslerror(PySSLErrorObject, errcode, errstr, lineno, errcode); ERR_clear_error(); - v = Py_BuildValue("(is)", errcode, buf); - if (v != NULL) { - PyErr_SetObject(PySSLErrorObject, v); - Py_DECREF(v); - } return NULL; } +/* + * SSL objects + */ + static PySSLSocket * newPySSLSocket(SSL_CTX *ctx, PySocketSockObject *sock, enum py_ssl_server_or_client socket_type, @@ -2520,27 +2632,6 @@ *major = libver & 0xFF; } -PyDoc_STRVAR(SSLError_doc, -"An error occurred in the SSL implementation."); - -PyDoc_STRVAR(SSLZeroReturnError_doc, -"SSL/TLS session closed cleanly."); - -PyDoc_STRVAR(SSLWantReadError_doc, -"Non-blocking SSL socket needs to read more data\n" -"before the requested operation can be completed."); - -PyDoc_STRVAR(SSLWantWriteError_doc, -"Non-blocking SSL socket needs to write more data\n" -"before the requested operation can be completed."); - -PyDoc_STRVAR(SSLSyscallError_doc, -"System error when attempting SSL operation."); - -PyDoc_STRVAR(SSLEOFError_doc, -"SSL/TLS connection terminated abruptly."); - - PyMODINIT_FUNC PyInit__ssl(void) { @@ -2548,6 +2639,8 @@ unsigned long libver; unsigned int major, minor, fix, patch, status; PySocketModule_APIObject *socket_api; + struct py_ssl_error_code *errcode; + struct py_ssl_library_code *libcode; if (PyType_Ready(&PySSLContext_Type) < 0) return NULL; @@ -2577,12 +2670,11 @@ OpenSSL_add_all_algorithms(); /* Add symbols to module dict */ - PySSLErrorObject = PyErr_NewExceptionWithDoc("ssl.SSLError", - SSLError_doc, - PyExc_OSError, - NULL); + sslerror_type_slots[0].pfunc = PyExc_OSError; + PySSLErrorObject = PyType_FromSpec(&sslerror_type_spec); if (PySSLErrorObject == NULL) return NULL; + PySSLZeroReturnErrorObject = PyErr_NewExceptionWithDoc( "ssl.SSLZeroReturnError", SSLZeroReturnError_doc, PySSLErrorObject, NULL); @@ -2705,6 +2797,50 @@ Py_INCREF(r); PyModule_AddObject(m, "HAS_NPN", r); + /* Mappings for error codes */ + err_codes_to_names = PyDict_New(); + err_names_to_codes = PyDict_New(); + if (err_codes_to_names == NULL || err_names_to_codes == NULL) + return NULL; + errcode = error_codes; + while (errcode->mnemonic != NULL) { + PyObject *mnemo, *key; + mnemo = PyUnicode_FromString(errcode->mnemonic); + key = Py_BuildValue("ii", errcode->library, errcode->reason); + if (mnemo == NULL || key == NULL) + return NULL; + if (PyDict_SetItem(err_codes_to_names, key, mnemo)) + return NULL; + if (PyDict_SetItem(err_names_to_codes, mnemo, key)) + return NULL; + Py_DECREF(key); + Py_DECREF(mnemo); + errcode++; + } + if (PyModule_AddObject(m, "err_codes_to_names", err_codes_to_names)) + return NULL; + if (PyModule_AddObject(m, "err_names_to_codes", err_names_to_codes)) + return NULL; + + lib_codes_to_names = PyDict_New(); + if (lib_codes_to_names == NULL) + return NULL; + libcode = library_codes; + while (libcode->library != NULL) { + PyObject *mnemo, *key; + key = PyLong_FromLong(libcode->code); + mnemo = PyUnicode_FromString(libcode->library); + if (key == NULL || mnemo == NULL) + return NULL; + if (PyDict_SetItem(lib_codes_to_names, key, mnemo)) + return NULL; + Py_DECREF(key); + Py_DECREF(mnemo); + libcode++; + } + if (PyModule_AddObject(m, "lib_codes_to_names", lib_codes_to_names)) + return NULL; + /* OpenSSL version */ /* SSLeay() gives us the version of the library linked against, which could be different from the headers version. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 21:17:09 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 22 Jun 2012 21:17:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_forgotten_files_for_=23?= =?utf8?q?14837=2E?= Message-ID: http://hg.python.org/cpython/rev/e193fe3d017e changeset: 77579:e193fe3d017e user: Antoine Pitrou date: Fri Jun 22 21:12:59 2012 +0200 summary: Add forgotten files for #14837. files: Modules/_ssl_data.h | 1653 ++++++++++++++++++++++++ Tools/ssl/make_ssl_data.py | 57 + 2 files changed, 1710 insertions(+), 0 deletions(-) diff --git a/Modules/_ssl_data.h b/Modules/_ssl_data.h new file mode 100644 --- /dev/null +++ b/Modules/_ssl_data.h @@ -0,0 +1,1653 @@ +/* File generated by Tools/ssl/make_ssl_data.py */ +/* Generated on 2012-05-16T23:56:40.981382 */ + +static struct py_ssl_library_code library_codes[] = { + {"PEM", ERR_LIB_PEM}, + {"SSL", ERR_LIB_SSL}, + {"X509", ERR_LIB_X509}, + { NULL } +}; + +static struct py_ssl_error_code error_codes[] = { + #ifdef PEM_R_BAD_BASE64_DECODE + {"BAD_BASE64_DECODE", ERR_LIB_PEM, PEM_R_BAD_BASE64_DECODE}, + #else + {"BAD_BASE64_DECODE", ERR_LIB_PEM, 100}, + #endif + #ifdef PEM_R_BAD_DECRYPT + {"BAD_DECRYPT", ERR_LIB_PEM, PEM_R_BAD_DECRYPT}, + #else + {"BAD_DECRYPT", ERR_LIB_PEM, 101}, + #endif + #ifdef PEM_R_BAD_END_LINE + {"BAD_END_LINE", ERR_LIB_PEM, PEM_R_BAD_END_LINE}, + #else + {"BAD_END_LINE", ERR_LIB_PEM, 102}, + #endif + #ifdef PEM_R_BAD_IV_CHARS + {"BAD_IV_CHARS", ERR_LIB_PEM, PEM_R_BAD_IV_CHARS}, + #else + {"BAD_IV_CHARS", ERR_LIB_PEM, 103}, + #endif + #ifdef PEM_R_BAD_MAGIC_NUMBER + {"BAD_MAGIC_NUMBER", ERR_LIB_PEM, PEM_R_BAD_MAGIC_NUMBER}, + #else + {"BAD_MAGIC_NUMBER", ERR_LIB_PEM, 116}, + #endif + #ifdef PEM_R_BAD_PASSWORD_READ + {"BAD_PASSWORD_READ", ERR_LIB_PEM, PEM_R_BAD_PASSWORD_READ}, + #else + {"BAD_PASSWORD_READ", ERR_LIB_PEM, 104}, + #endif + #ifdef PEM_R_BAD_VERSION_NUMBER + {"BAD_VERSION_NUMBER", ERR_LIB_PEM, PEM_R_BAD_VERSION_NUMBER}, + #else + {"BAD_VERSION_NUMBER", ERR_LIB_PEM, 117}, + #endif + #ifdef PEM_R_BIO_WRITE_FAILURE + {"BIO_WRITE_FAILURE", ERR_LIB_PEM, PEM_R_BIO_WRITE_FAILURE}, + #else + {"BIO_WRITE_FAILURE", ERR_LIB_PEM, 118}, + #endif + #ifdef PEM_R_CIPHER_IS_NULL + {"CIPHER_IS_NULL", ERR_LIB_PEM, PEM_R_CIPHER_IS_NULL}, + #else + {"CIPHER_IS_NULL", ERR_LIB_PEM, 127}, + #endif + #ifdef PEM_R_ERROR_CONVERTING_PRIVATE_KEY + {"ERROR_CONVERTING_PRIVATE_KEY", ERR_LIB_PEM, PEM_R_ERROR_CONVERTING_PRIVATE_KEY}, + #else + {"ERROR_CONVERTING_PRIVATE_KEY", ERR_LIB_PEM, 115}, + #endif + #ifdef PEM_R_EXPECTING_PRIVATE_KEY_BLOB + {"EXPECTING_PRIVATE_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PRIVATE_KEY_BLOB}, + #else + {"EXPECTING_PRIVATE_KEY_BLOB", ERR_LIB_PEM, 119}, + #endif + #ifdef PEM_R_EXPECTING_PUBLIC_KEY_BLOB + {"EXPECTING_PUBLIC_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PUBLIC_KEY_BLOB}, + #else + {"EXPECTING_PUBLIC_KEY_BLOB", ERR_LIB_PEM, 120}, + #endif + #ifdef PEM_R_INCONSISTENT_HEADER + {"INCONSISTENT_HEADER", ERR_LIB_PEM, PEM_R_INCONSISTENT_HEADER}, + #else + {"INCONSISTENT_HEADER", ERR_LIB_PEM, 121}, + #endif + #ifdef PEM_R_KEYBLOB_HEADER_PARSE_ERROR + {"KEYBLOB_HEADER_PARSE_ERROR", ERR_LIB_PEM, PEM_R_KEYBLOB_HEADER_PARSE_ERROR}, + #else + {"KEYBLOB_HEADER_PARSE_ERROR", ERR_LIB_PEM, 122}, + #endif + #ifdef PEM_R_KEYBLOB_TOO_SHORT + {"KEYBLOB_TOO_SHORT", ERR_LIB_PEM, PEM_R_KEYBLOB_TOO_SHORT}, + #else + {"KEYBLOB_TOO_SHORT", ERR_LIB_PEM, 123}, + #endif + #ifdef PEM_R_NOT_DEK_INFO + {"NOT_DEK_INFO", ERR_LIB_PEM, PEM_R_NOT_DEK_INFO}, + #else + {"NOT_DEK_INFO", ERR_LIB_PEM, 105}, + #endif + #ifdef PEM_R_NOT_ENCRYPTED + {"NOT_ENCRYPTED", ERR_LIB_PEM, PEM_R_NOT_ENCRYPTED}, + #else + {"NOT_ENCRYPTED", ERR_LIB_PEM, 106}, + #endif + #ifdef PEM_R_NOT_PROC_TYPE + {"NOT_PROC_TYPE", ERR_LIB_PEM, PEM_R_NOT_PROC_TYPE}, + #else + {"NOT_PROC_TYPE", ERR_LIB_PEM, 107}, + #endif + #ifdef PEM_R_NO_START_LINE + {"NO_START_LINE", ERR_LIB_PEM, PEM_R_NO_START_LINE}, + #else + {"NO_START_LINE", ERR_LIB_PEM, 108}, + #endif + #ifdef PEM_R_PROBLEMS_GETTING_PASSWORD + {"PROBLEMS_GETTING_PASSWORD", ERR_LIB_PEM, PEM_R_PROBLEMS_GETTING_PASSWORD}, + #else + {"PROBLEMS_GETTING_PASSWORD", ERR_LIB_PEM, 109}, + #endif + #ifdef PEM_R_PUBLIC_KEY_NO_RSA + {"PUBLIC_KEY_NO_RSA", ERR_LIB_PEM, PEM_R_PUBLIC_KEY_NO_RSA}, + #else + {"PUBLIC_KEY_NO_RSA", ERR_LIB_PEM, 110}, + #endif + #ifdef PEM_R_PVK_DATA_TOO_SHORT + {"PVK_DATA_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_DATA_TOO_SHORT}, + #else + {"PVK_DATA_TOO_SHORT", ERR_LIB_PEM, 124}, + #endif + #ifdef PEM_R_PVK_TOO_SHORT + {"PVK_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_TOO_SHORT}, + #else + {"PVK_TOO_SHORT", ERR_LIB_PEM, 125}, + #endif + #ifdef PEM_R_READ_KEY + {"READ_KEY", ERR_LIB_PEM, PEM_R_READ_KEY}, + #else + {"READ_KEY", ERR_LIB_PEM, 111}, + #endif + #ifdef PEM_R_SHORT_HEADER + {"SHORT_HEADER", ERR_LIB_PEM, PEM_R_SHORT_HEADER}, + #else + {"SHORT_HEADER", ERR_LIB_PEM, 112}, + #endif + #ifdef PEM_R_UNSUPPORTED_CIPHER + {"UNSUPPORTED_CIPHER", ERR_LIB_PEM, PEM_R_UNSUPPORTED_CIPHER}, + #else + {"UNSUPPORTED_CIPHER", ERR_LIB_PEM, 113}, + #endif + #ifdef PEM_R_UNSUPPORTED_ENCRYPTION + {"UNSUPPORTED_ENCRYPTION", ERR_LIB_PEM, PEM_R_UNSUPPORTED_ENCRYPTION}, + #else + {"UNSUPPORTED_ENCRYPTION", ERR_LIB_PEM, 114}, + #endif + #ifdef PEM_R_UNSUPPORTED_KEY_COMPONENTS + {"UNSUPPORTED_KEY_COMPONENTS", ERR_LIB_PEM, PEM_R_UNSUPPORTED_KEY_COMPONENTS}, + #else + {"UNSUPPORTED_KEY_COMPONENTS", ERR_LIB_PEM, 126}, + #endif + #ifdef SSL_R_APP_DATA_IN_HANDSHAKE + {"APP_DATA_IN_HANDSHAKE", ERR_LIB_SSL, SSL_R_APP_DATA_IN_HANDSHAKE}, + #else + {"APP_DATA_IN_HANDSHAKE", ERR_LIB_SSL, 100}, + #endif + #ifdef SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT + {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", ERR_LIB_SSL, SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT}, + #else + {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", ERR_LIB_SSL, 272}, + #endif + #ifdef SSL_R_BAD_ALERT_RECORD + {"BAD_ALERT_RECORD", ERR_LIB_SSL, SSL_R_BAD_ALERT_RECORD}, + #else + {"BAD_ALERT_RECORD", ERR_LIB_SSL, 101}, + #endif + #ifdef SSL_R_BAD_AUTHENTICATION_TYPE + {"BAD_AUTHENTICATION_TYPE", ERR_LIB_SSL, SSL_R_BAD_AUTHENTICATION_TYPE}, + #else + {"BAD_AUTHENTICATION_TYPE", ERR_LIB_SSL, 102}, + #endif + #ifdef SSL_R_BAD_CHANGE_CIPHER_SPEC + {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, SSL_R_BAD_CHANGE_CIPHER_SPEC}, + #else + {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, 103}, + #endif + #ifdef SSL_R_BAD_CHECKSUM + {"BAD_CHECKSUM", ERR_LIB_SSL, SSL_R_BAD_CHECKSUM}, + #else + {"BAD_CHECKSUM", ERR_LIB_SSL, 104}, + #endif + #ifdef SSL_R_BAD_DATA_RETURNED_BY_CALLBACK + {"BAD_DATA_RETURNED_BY_CALLBACK", ERR_LIB_SSL, SSL_R_BAD_DATA_RETURNED_BY_CALLBACK}, + #else + {"BAD_DATA_RETURNED_BY_CALLBACK", ERR_LIB_SSL, 106}, + #endif + #ifdef SSL_R_BAD_DECOMPRESSION + {"BAD_DECOMPRESSION", ERR_LIB_SSL, SSL_R_BAD_DECOMPRESSION}, + #else + {"BAD_DECOMPRESSION", ERR_LIB_SSL, 107}, + #endif + #ifdef SSL_R_BAD_DH_G_LENGTH + {"BAD_DH_G_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_G_LENGTH}, + #else + {"BAD_DH_G_LENGTH", ERR_LIB_SSL, 108}, + #endif + #ifdef SSL_R_BAD_DH_PUB_KEY_LENGTH + {"BAD_DH_PUB_KEY_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_PUB_KEY_LENGTH}, + #else + {"BAD_DH_PUB_KEY_LENGTH", ERR_LIB_SSL, 109}, + #endif + #ifdef SSL_R_BAD_DH_P_LENGTH + {"BAD_DH_P_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_P_LENGTH}, + #else + {"BAD_DH_P_LENGTH", ERR_LIB_SSL, 110}, + #endif + #ifdef SSL_R_BAD_DIGEST_LENGTH + {"BAD_DIGEST_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DIGEST_LENGTH}, + #else + {"BAD_DIGEST_LENGTH", ERR_LIB_SSL, 111}, + #endif + #ifdef SSL_R_BAD_DSA_SIGNATURE + {"BAD_DSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_DSA_SIGNATURE}, + #else + {"BAD_DSA_SIGNATURE", ERR_LIB_SSL, 112}, + #endif + #ifdef SSL_R_BAD_ECC_CERT + {"BAD_ECC_CERT", ERR_LIB_SSL, SSL_R_BAD_ECC_CERT}, + #else + {"BAD_ECC_CERT", ERR_LIB_SSL, 304}, + #endif + #ifdef SSL_R_BAD_ECDSA_SIGNATURE + {"BAD_ECDSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_ECDSA_SIGNATURE}, + #else + {"BAD_ECDSA_SIGNATURE", ERR_LIB_SSL, 305}, + #endif + #ifdef SSL_R_BAD_ECPOINT + {"BAD_ECPOINT", ERR_LIB_SSL, SSL_R_BAD_ECPOINT}, + #else + {"BAD_ECPOINT", ERR_LIB_SSL, 306}, + #endif + #ifdef SSL_R_BAD_HANDSHAKE_LENGTH + {"BAD_HANDSHAKE_LENGTH", ERR_LIB_SSL, SSL_R_BAD_HANDSHAKE_LENGTH}, + #else + {"BAD_HANDSHAKE_LENGTH", ERR_LIB_SSL, 332}, + #endif + #ifdef SSL_R_BAD_HELLO_REQUEST + {"BAD_HELLO_REQUEST", ERR_LIB_SSL, SSL_R_BAD_HELLO_REQUEST}, + #else + {"BAD_HELLO_REQUEST", ERR_LIB_SSL, 105}, + #endif + #ifdef SSL_R_BAD_LENGTH + {"BAD_LENGTH", ERR_LIB_SSL, SSL_R_BAD_LENGTH}, + #else + {"BAD_LENGTH", ERR_LIB_SSL, 271}, + #endif + #ifdef SSL_R_BAD_MAC_DECODE + {"BAD_MAC_DECODE", ERR_LIB_SSL, SSL_R_BAD_MAC_DECODE}, + #else + {"BAD_MAC_DECODE", ERR_LIB_SSL, 113}, + #endif + #ifdef SSL_R_BAD_MAC_LENGTH + {"BAD_MAC_LENGTH", ERR_LIB_SSL, SSL_R_BAD_MAC_LENGTH}, + #else + {"BAD_MAC_LENGTH", ERR_LIB_SSL, 333}, + #endif + #ifdef SSL_R_BAD_MESSAGE_TYPE + {"BAD_MESSAGE_TYPE", ERR_LIB_SSL, SSL_R_BAD_MESSAGE_TYPE}, + #else + {"BAD_MESSAGE_TYPE", ERR_LIB_SSL, 114}, + #endif + #ifdef SSL_R_BAD_PACKET_LENGTH + {"BAD_PACKET_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PACKET_LENGTH}, + #else + {"BAD_PACKET_LENGTH", ERR_LIB_SSL, 115}, + #endif + #ifdef SSL_R_BAD_PROTOCOL_VERSION_NUMBER + {"BAD_PROTOCOL_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_BAD_PROTOCOL_VERSION_NUMBER}, + #else + {"BAD_PROTOCOL_VERSION_NUMBER", ERR_LIB_SSL, 116}, + #endif + #ifdef SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH + {"BAD_PSK_IDENTITY_HINT_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH}, + #else + {"BAD_PSK_IDENTITY_HINT_LENGTH", ERR_LIB_SSL, 316}, + #endif + #ifdef SSL_R_BAD_RESPONSE_ARGUMENT + {"BAD_RESPONSE_ARGUMENT", ERR_LIB_SSL, SSL_R_BAD_RESPONSE_ARGUMENT}, + #else + {"BAD_RESPONSE_ARGUMENT", ERR_LIB_SSL, 117}, + #endif + #ifdef SSL_R_BAD_RSA_DECRYPT + {"BAD_RSA_DECRYPT", ERR_LIB_SSL, SSL_R_BAD_RSA_DECRYPT}, + #else + {"BAD_RSA_DECRYPT", ERR_LIB_SSL, 118}, + #endif + #ifdef SSL_R_BAD_RSA_ENCRYPT + {"BAD_RSA_ENCRYPT", ERR_LIB_SSL, SSL_R_BAD_RSA_ENCRYPT}, + #else + {"BAD_RSA_ENCRYPT", ERR_LIB_SSL, 119}, + #endif + #ifdef SSL_R_BAD_RSA_E_LENGTH + {"BAD_RSA_E_LENGTH", ERR_LIB_SSL, SSL_R_BAD_RSA_E_LENGTH}, + #else + {"BAD_RSA_E_LENGTH", ERR_LIB_SSL, 120}, + #endif + #ifdef SSL_R_BAD_RSA_MODULUS_LENGTH + {"BAD_RSA_MODULUS_LENGTH", ERR_LIB_SSL, SSL_R_BAD_RSA_MODULUS_LENGTH}, + #else + {"BAD_RSA_MODULUS_LENGTH", ERR_LIB_SSL, 121}, + #endif + #ifdef SSL_R_BAD_RSA_SIGNATURE + {"BAD_RSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_RSA_SIGNATURE}, + #else + {"BAD_RSA_SIGNATURE", ERR_LIB_SSL, 122}, + #endif + #ifdef SSL_R_BAD_SIGNATURE + {"BAD_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_SIGNATURE}, + #else + {"BAD_SIGNATURE", ERR_LIB_SSL, 123}, + #endif + #ifdef SSL_R_BAD_SSL_FILETYPE + {"BAD_SSL_FILETYPE", ERR_LIB_SSL, SSL_R_BAD_SSL_FILETYPE}, + #else + {"BAD_SSL_FILETYPE", ERR_LIB_SSL, 124}, + #endif + #ifdef SSL_R_BAD_SSL_SESSION_ID_LENGTH + {"BAD_SSL_SESSION_ID_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SSL_SESSION_ID_LENGTH}, + #else + {"BAD_SSL_SESSION_ID_LENGTH", ERR_LIB_SSL, 125}, + #endif + #ifdef SSL_R_BAD_STATE + {"BAD_STATE", ERR_LIB_SSL, SSL_R_BAD_STATE}, + #else + {"BAD_STATE", ERR_LIB_SSL, 126}, + #endif + #ifdef SSL_R_BAD_WRITE_RETRY + {"BAD_WRITE_RETRY", ERR_LIB_SSL, SSL_R_BAD_WRITE_RETRY}, + #else + {"BAD_WRITE_RETRY", ERR_LIB_SSL, 127}, + #endif + #ifdef SSL_R_BIO_NOT_SET + {"BIO_NOT_SET", ERR_LIB_SSL, SSL_R_BIO_NOT_SET}, + #else + {"BIO_NOT_SET", ERR_LIB_SSL, 128}, + #endif + #ifdef SSL_R_BLOCK_CIPHER_PAD_IS_WRONG + {"BLOCK_CIPHER_PAD_IS_WRONG", ERR_LIB_SSL, SSL_R_BLOCK_CIPHER_PAD_IS_WRONG}, + #else + {"BLOCK_CIPHER_PAD_IS_WRONG", ERR_LIB_SSL, 129}, + #endif + #ifdef SSL_R_BN_LIB + {"BN_LIB", ERR_LIB_SSL, SSL_R_BN_LIB}, + #else + {"BN_LIB", ERR_LIB_SSL, 130}, + #endif + #ifdef SSL_R_CA_DN_LENGTH_MISMATCH + {"CA_DN_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CA_DN_LENGTH_MISMATCH}, + #else + {"CA_DN_LENGTH_MISMATCH", ERR_LIB_SSL, 131}, + #endif + #ifdef SSL_R_CA_DN_TOO_LONG + {"CA_DN_TOO_LONG", ERR_LIB_SSL, SSL_R_CA_DN_TOO_LONG}, + #else + {"CA_DN_TOO_LONG", ERR_LIB_SSL, 132}, + #endif + #ifdef SSL_R_CCS_RECEIVED_EARLY + {"CCS_RECEIVED_EARLY", ERR_LIB_SSL, SSL_R_CCS_RECEIVED_EARLY}, + #else + {"CCS_RECEIVED_EARLY", ERR_LIB_SSL, 133}, + #endif + #ifdef SSL_R_CERTIFICATE_VERIFY_FAILED + {"CERTIFICATE_VERIFY_FAILED", ERR_LIB_SSL, SSL_R_CERTIFICATE_VERIFY_FAILED}, + #else + {"CERTIFICATE_VERIFY_FAILED", ERR_LIB_SSL, 134}, + #endif + #ifdef SSL_R_CERT_LENGTH_MISMATCH + {"CERT_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CERT_LENGTH_MISMATCH}, + #else + {"CERT_LENGTH_MISMATCH", ERR_LIB_SSL, 135}, + #endif + #ifdef SSL_R_CHALLENGE_IS_DIFFERENT + {"CHALLENGE_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_CHALLENGE_IS_DIFFERENT}, + #else + {"CHALLENGE_IS_DIFFERENT", ERR_LIB_SSL, 136}, + #endif + #ifdef SSL_R_CIPHER_CODE_WRONG_LENGTH + {"CIPHER_CODE_WRONG_LENGTH", ERR_LIB_SSL, SSL_R_CIPHER_CODE_WRONG_LENGTH}, + #else + {"CIPHER_CODE_WRONG_LENGTH", ERR_LIB_SSL, 137}, + #endif + #ifdef SSL_R_CIPHER_OR_HASH_UNAVAILABLE + {"CIPHER_OR_HASH_UNAVAILABLE", ERR_LIB_SSL, SSL_R_CIPHER_OR_HASH_UNAVAILABLE}, + #else + {"CIPHER_OR_HASH_UNAVAILABLE", ERR_LIB_SSL, 138}, + #endif + #ifdef SSL_R_CIPHER_TABLE_SRC_ERROR + {"CIPHER_TABLE_SRC_ERROR", ERR_LIB_SSL, SSL_R_CIPHER_TABLE_SRC_ERROR}, + #else + {"CIPHER_TABLE_SRC_ERROR", ERR_LIB_SSL, 139}, + #endif + #ifdef SSL_R_CLIENTHELLO_TLSEXT + {"CLIENTHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_CLIENTHELLO_TLSEXT}, + #else + {"CLIENTHELLO_TLSEXT", ERR_LIB_SSL, 226}, + #endif + #ifdef SSL_R_COMPRESSED_LENGTH_TOO_LONG + {"COMPRESSED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_COMPRESSED_LENGTH_TOO_LONG}, + #else + {"COMPRESSED_LENGTH_TOO_LONG", ERR_LIB_SSL, 140}, + #endif + #ifdef SSL_R_COMPRESSION_DISABLED + {"COMPRESSION_DISABLED", ERR_LIB_SSL, SSL_R_COMPRESSION_DISABLED}, + #else + {"COMPRESSION_DISABLED", ERR_LIB_SSL, 343}, + #endif + #ifdef SSL_R_COMPRESSION_FAILURE + {"COMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_COMPRESSION_FAILURE}, + #else + {"COMPRESSION_FAILURE", ERR_LIB_SSL, 141}, + #endif + #ifdef SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE + {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", ERR_LIB_SSL, SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE}, + #else + {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", ERR_LIB_SSL, 307}, + #endif + #ifdef SSL_R_COMPRESSION_LIBRARY_ERROR + {"COMPRESSION_LIBRARY_ERROR", ERR_LIB_SSL, SSL_R_COMPRESSION_LIBRARY_ERROR}, + #else + {"COMPRESSION_LIBRARY_ERROR", ERR_LIB_SSL, 142}, + #endif + #ifdef SSL_R_CONNECTION_ID_IS_DIFFERENT + {"CONNECTION_ID_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_CONNECTION_ID_IS_DIFFERENT}, + #else + {"CONNECTION_ID_IS_DIFFERENT", ERR_LIB_SSL, 143}, + #endif + #ifdef SSL_R_CONNECTION_TYPE_NOT_SET + {"CONNECTION_TYPE_NOT_SET", ERR_LIB_SSL, SSL_R_CONNECTION_TYPE_NOT_SET}, + #else + {"CONNECTION_TYPE_NOT_SET", ERR_LIB_SSL, 144}, + #endif + #ifdef SSL_R_COOKIE_MISMATCH + {"COOKIE_MISMATCH", ERR_LIB_SSL, SSL_R_COOKIE_MISMATCH}, + #else + {"COOKIE_MISMATCH", ERR_LIB_SSL, 308}, + #endif + #ifdef SSL_R_DATA_BETWEEN_CCS_AND_FINISHED + {"DATA_BETWEEN_CCS_AND_FINISHED", ERR_LIB_SSL, SSL_R_DATA_BETWEEN_CCS_AND_FINISHED}, + #else + {"DATA_BETWEEN_CCS_AND_FINISHED", ERR_LIB_SSL, 145}, + #endif + #ifdef SSL_R_DATA_LENGTH_TOO_LONG + {"DATA_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_DATA_LENGTH_TOO_LONG}, + #else + {"DATA_LENGTH_TOO_LONG", ERR_LIB_SSL, 146}, + #endif + #ifdef SSL_R_DECRYPTION_FAILED + {"DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED}, + #else + {"DECRYPTION_FAILED", ERR_LIB_SSL, 147}, + #endif + #ifdef SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC + {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC}, + #else + {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", ERR_LIB_SSL, 281}, + #endif + #ifdef SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG + {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG}, + #else + {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, 148}, + #endif + #ifdef SSL_R_DIGEST_CHECK_FAILED + {"DIGEST_CHECK_FAILED", ERR_LIB_SSL, SSL_R_DIGEST_CHECK_FAILED}, + #else + {"DIGEST_CHECK_FAILED", ERR_LIB_SSL, 149}, + #endif + #ifdef SSL_R_DTLS_MESSAGE_TOO_BIG + {"DTLS_MESSAGE_TOO_BIG", ERR_LIB_SSL, SSL_R_DTLS_MESSAGE_TOO_BIG}, + #else + {"DTLS_MESSAGE_TOO_BIG", ERR_LIB_SSL, 334}, + #endif + #ifdef SSL_R_DUPLICATE_COMPRESSION_ID + {"DUPLICATE_COMPRESSION_ID", ERR_LIB_SSL, SSL_R_DUPLICATE_COMPRESSION_ID}, + #else + {"DUPLICATE_COMPRESSION_ID", ERR_LIB_SSL, 309}, + #endif + #ifdef SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT + {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT}, + #else + {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", ERR_LIB_SSL, 317}, + #endif + #ifdef SSL_R_ECC_CERT_NOT_FOR_SIGNING + {"ECC_CERT_NOT_FOR_SIGNING", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_SIGNING}, + #else + {"ECC_CERT_NOT_FOR_SIGNING", ERR_LIB_SSL, 318}, + #endif + #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE + {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE}, + #else + {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", ERR_LIB_SSL, 322}, + #endif + #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE + {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE}, + #else + {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", ERR_LIB_SSL, 323}, + #endif + #ifdef SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER + {"ECGROUP_TOO_LARGE_FOR_CIPHER", ERR_LIB_SSL, SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER}, + #else + {"ECGROUP_TOO_LARGE_FOR_CIPHER", ERR_LIB_SSL, 310}, + #endif + #ifdef SSL_R_ENCRYPTED_LENGTH_TOO_LONG + {"ENCRYPTED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_ENCRYPTED_LENGTH_TOO_LONG}, + #else + {"ENCRYPTED_LENGTH_TOO_LONG", ERR_LIB_SSL, 150}, + #endif + #ifdef SSL_R_ERROR_GENERATING_TMP_RSA_KEY + {"ERROR_GENERATING_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_ERROR_GENERATING_TMP_RSA_KEY}, + #else + {"ERROR_GENERATING_TMP_RSA_KEY", ERR_LIB_SSL, 282}, + #endif + #ifdef SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST + {"ERROR_IN_RECEIVED_CIPHER_LIST", ERR_LIB_SSL, SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST}, + #else + {"ERROR_IN_RECEIVED_CIPHER_LIST", ERR_LIB_SSL, 151}, + #endif + #ifdef SSL_R_EXCESSIVE_MESSAGE_SIZE + {"EXCESSIVE_MESSAGE_SIZE", ERR_LIB_SSL, SSL_R_EXCESSIVE_MESSAGE_SIZE}, + #else + {"EXCESSIVE_MESSAGE_SIZE", ERR_LIB_SSL, 152}, + #endif + #ifdef SSL_R_EXTRA_DATA_IN_MESSAGE + {"EXTRA_DATA_IN_MESSAGE", ERR_LIB_SSL, SSL_R_EXTRA_DATA_IN_MESSAGE}, + #else + {"EXTRA_DATA_IN_MESSAGE", ERR_LIB_SSL, 153}, + #endif + #ifdef SSL_R_GOT_A_FIN_BEFORE_A_CCS + {"GOT_A_FIN_BEFORE_A_CCS", ERR_LIB_SSL, SSL_R_GOT_A_FIN_BEFORE_A_CCS}, + #else + {"GOT_A_FIN_BEFORE_A_CCS", ERR_LIB_SSL, 154}, + #endif + #ifdef SSL_R_HTTPS_PROXY_REQUEST + {"HTTPS_PROXY_REQUEST", ERR_LIB_SSL, SSL_R_HTTPS_PROXY_REQUEST}, + #else + {"HTTPS_PROXY_REQUEST", ERR_LIB_SSL, 155}, + #endif + #ifdef SSL_R_HTTP_REQUEST + {"HTTP_REQUEST", ERR_LIB_SSL, SSL_R_HTTP_REQUEST}, + #else + {"HTTP_REQUEST", ERR_LIB_SSL, 156}, + #endif + #ifdef SSL_R_ILLEGAL_PADDING + {"ILLEGAL_PADDING", ERR_LIB_SSL, SSL_R_ILLEGAL_PADDING}, + #else + {"ILLEGAL_PADDING", ERR_LIB_SSL, 283}, + #endif + #ifdef SSL_R_INCONSISTENT_COMPRESSION + {"INCONSISTENT_COMPRESSION", ERR_LIB_SSL, SSL_R_INCONSISTENT_COMPRESSION}, + #else + {"INCONSISTENT_COMPRESSION", ERR_LIB_SSL, 340}, + #endif + #ifdef SSL_R_INVALID_CHALLENGE_LENGTH + {"INVALID_CHALLENGE_LENGTH", ERR_LIB_SSL, SSL_R_INVALID_CHALLENGE_LENGTH}, + #else + {"INVALID_CHALLENGE_LENGTH", ERR_LIB_SSL, 158}, + #endif + #ifdef SSL_R_INVALID_COMMAND + {"INVALID_COMMAND", ERR_LIB_SSL, SSL_R_INVALID_COMMAND}, + #else + {"INVALID_COMMAND", ERR_LIB_SSL, 280}, + #endif + #ifdef SSL_R_INVALID_COMPRESSION_ALGORITHM + {"INVALID_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_INVALID_COMPRESSION_ALGORITHM}, + #else + {"INVALID_COMPRESSION_ALGORITHM", ERR_LIB_SSL, 341}, + #endif + #ifdef SSL_R_INVALID_PURPOSE + {"INVALID_PURPOSE", ERR_LIB_SSL, SSL_R_INVALID_PURPOSE}, + #else + {"INVALID_PURPOSE", ERR_LIB_SSL, 278}, + #endif + #ifdef SSL_R_INVALID_STATUS_RESPONSE + {"INVALID_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_INVALID_STATUS_RESPONSE}, + #else + {"INVALID_STATUS_RESPONSE", ERR_LIB_SSL, 328}, + #endif + #ifdef SSL_R_INVALID_TICKET_KEYS_LENGTH + {"INVALID_TICKET_KEYS_LENGTH", ERR_LIB_SSL, SSL_R_INVALID_TICKET_KEYS_LENGTH}, + #else + {"INVALID_TICKET_KEYS_LENGTH", ERR_LIB_SSL, 325}, + #endif + #ifdef SSL_R_INVALID_TRUST + {"INVALID_TRUST", ERR_LIB_SSL, SSL_R_INVALID_TRUST}, + #else + {"INVALID_TRUST", ERR_LIB_SSL, 279}, + #endif + #ifdef SSL_R_KEY_ARG_TOO_LONG + {"KEY_ARG_TOO_LONG", ERR_LIB_SSL, SSL_R_KEY_ARG_TOO_LONG}, + #else + {"KEY_ARG_TOO_LONG", ERR_LIB_SSL, 284}, + #endif + #ifdef SSL_R_KRB5 + {"KRB5", ERR_LIB_SSL, SSL_R_KRB5}, + #else + {"KRB5", ERR_LIB_SSL, 285}, + #endif + #ifdef SSL_R_KRB5_C_CC_PRINC + {"KRB5_C_CC_PRINC", ERR_LIB_SSL, SSL_R_KRB5_C_CC_PRINC}, + #else + {"KRB5_C_CC_PRINC", ERR_LIB_SSL, 286}, + #endif + #ifdef SSL_R_KRB5_C_GET_CRED + {"KRB5_C_GET_CRED", ERR_LIB_SSL, SSL_R_KRB5_C_GET_CRED}, + #else + {"KRB5_C_GET_CRED", ERR_LIB_SSL, 287}, + #endif + #ifdef SSL_R_KRB5_C_INIT + {"KRB5_C_INIT", ERR_LIB_SSL, SSL_R_KRB5_C_INIT}, + #else + {"KRB5_C_INIT", ERR_LIB_SSL, 288}, + #endif + #ifdef SSL_R_KRB5_C_MK_REQ + {"KRB5_C_MK_REQ", ERR_LIB_SSL, SSL_R_KRB5_C_MK_REQ}, + #else + {"KRB5_C_MK_REQ", ERR_LIB_SSL, 289}, + #endif + #ifdef SSL_R_KRB5_S_BAD_TICKET + {"KRB5_S_BAD_TICKET", ERR_LIB_SSL, SSL_R_KRB5_S_BAD_TICKET}, + #else + {"KRB5_S_BAD_TICKET", ERR_LIB_SSL, 290}, + #endif + #ifdef SSL_R_KRB5_S_INIT + {"KRB5_S_INIT", ERR_LIB_SSL, SSL_R_KRB5_S_INIT}, + #else + {"KRB5_S_INIT", ERR_LIB_SSL, 291}, + #endif + #ifdef SSL_R_KRB5_S_RD_REQ + {"KRB5_S_RD_REQ", ERR_LIB_SSL, SSL_R_KRB5_S_RD_REQ}, + #else + {"KRB5_S_RD_REQ", ERR_LIB_SSL, 292}, + #endif + #ifdef SSL_R_KRB5_S_TKT_EXPIRED + {"KRB5_S_TKT_EXPIRED", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_EXPIRED}, + #else + {"KRB5_S_TKT_EXPIRED", ERR_LIB_SSL, 293}, + #endif + #ifdef SSL_R_KRB5_S_TKT_NYV + {"KRB5_S_TKT_NYV", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_NYV}, + #else + {"KRB5_S_TKT_NYV", ERR_LIB_SSL, 294}, + #endif + #ifdef SSL_R_KRB5_S_TKT_SKEW + {"KRB5_S_TKT_SKEW", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_SKEW}, + #else + {"KRB5_S_TKT_SKEW", ERR_LIB_SSL, 295}, + #endif + #ifdef SSL_R_LENGTH_MISMATCH + {"LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_LENGTH_MISMATCH}, + #else + {"LENGTH_MISMATCH", ERR_LIB_SSL, 159}, + #endif + #ifdef SSL_R_LENGTH_TOO_SHORT + {"LENGTH_TOO_SHORT", ERR_LIB_SSL, SSL_R_LENGTH_TOO_SHORT}, + #else + {"LENGTH_TOO_SHORT", ERR_LIB_SSL, 160}, + #endif + #ifdef SSL_R_LIBRARY_BUG + {"LIBRARY_BUG", ERR_LIB_SSL, SSL_R_LIBRARY_BUG}, + #else + {"LIBRARY_BUG", ERR_LIB_SSL, 274}, + #endif + #ifdef SSL_R_LIBRARY_HAS_NO_CIPHERS + {"LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_LIBRARY_HAS_NO_CIPHERS}, + #else + {"LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, 161}, + #endif + #ifdef SSL_R_MESSAGE_TOO_LONG + {"MESSAGE_TOO_LONG", ERR_LIB_SSL, SSL_R_MESSAGE_TOO_LONG}, + #else + {"MESSAGE_TOO_LONG", ERR_LIB_SSL, 296}, + #endif + #ifdef SSL_R_MISSING_DH_DSA_CERT + {"MISSING_DH_DSA_CERT", ERR_LIB_SSL, SSL_R_MISSING_DH_DSA_CERT}, + #else + {"MISSING_DH_DSA_CERT", ERR_LIB_SSL, 162}, + #endif + #ifdef SSL_R_MISSING_DH_KEY + {"MISSING_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_DH_KEY}, + #else + {"MISSING_DH_KEY", ERR_LIB_SSL, 163}, + #endif + #ifdef SSL_R_MISSING_DH_RSA_CERT + {"MISSING_DH_RSA_CERT", ERR_LIB_SSL, SSL_R_MISSING_DH_RSA_CERT}, + #else + {"MISSING_DH_RSA_CERT", ERR_LIB_SSL, 164}, + #endif + #ifdef SSL_R_MISSING_DSA_SIGNING_CERT + {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_DSA_SIGNING_CERT}, + #else + {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, 165}, + #endif + #ifdef SSL_R_MISSING_EXPORT_TMP_DH_KEY + {"MISSING_EXPORT_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_EXPORT_TMP_DH_KEY}, + #else + {"MISSING_EXPORT_TMP_DH_KEY", ERR_LIB_SSL, 166}, + #endif + #ifdef SSL_R_MISSING_EXPORT_TMP_RSA_KEY + {"MISSING_EXPORT_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_MISSING_EXPORT_TMP_RSA_KEY}, + #else + {"MISSING_EXPORT_TMP_RSA_KEY", ERR_LIB_SSL, 167}, + #endif + #ifdef SSL_R_MISSING_RSA_CERTIFICATE + {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, SSL_R_MISSING_RSA_CERTIFICATE}, + #else + {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, 168}, + #endif + #ifdef SSL_R_MISSING_RSA_ENCRYPTING_CERT + {"MISSING_RSA_ENCRYPTING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_ENCRYPTING_CERT}, + #else + {"MISSING_RSA_ENCRYPTING_CERT", ERR_LIB_SSL, 169}, + #endif + #ifdef SSL_R_MISSING_RSA_SIGNING_CERT + {"MISSING_RSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_SIGNING_CERT}, + #else + {"MISSING_RSA_SIGNING_CERT", ERR_LIB_SSL, 170}, + #endif + #ifdef SSL_R_MISSING_TMP_DH_KEY + {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_DH_KEY}, + #else + {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, 171}, + #endif + #ifdef SSL_R_MISSING_TMP_ECDH_KEY + {"MISSING_TMP_ECDH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_ECDH_KEY}, + #else + {"MISSING_TMP_ECDH_KEY", ERR_LIB_SSL, 311}, + #endif + #ifdef SSL_R_MISSING_TMP_RSA_KEY + {"MISSING_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_RSA_KEY}, + #else + {"MISSING_TMP_RSA_KEY", ERR_LIB_SSL, 172}, + #endif + #ifdef SSL_R_MISSING_TMP_RSA_PKEY + {"MISSING_TMP_RSA_PKEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_RSA_PKEY}, + #else + {"MISSING_TMP_RSA_PKEY", ERR_LIB_SSL, 173}, + #endif + #ifdef SSL_R_MISSING_VERIFY_MESSAGE + {"MISSING_VERIFY_MESSAGE", ERR_LIB_SSL, SSL_R_MISSING_VERIFY_MESSAGE}, + #else + {"MISSING_VERIFY_MESSAGE", ERR_LIB_SSL, 174}, + #endif + #ifdef SSL_R_NON_SSLV2_INITIAL_PACKET + {"NON_SSLV2_INITIAL_PACKET", ERR_LIB_SSL, SSL_R_NON_SSLV2_INITIAL_PACKET}, + #else + {"NON_SSLV2_INITIAL_PACKET", ERR_LIB_SSL, 175}, + #endif + #ifdef SSL_R_NO_CERTIFICATES_RETURNED + {"NO_CERTIFICATES_RETURNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATES_RETURNED}, + #else + {"NO_CERTIFICATES_RETURNED", ERR_LIB_SSL, 176}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_ASSIGNED + {"NO_CERTIFICATE_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_ASSIGNED}, + #else + {"NO_CERTIFICATE_ASSIGNED", ERR_LIB_SSL, 177}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_RETURNED + {"NO_CERTIFICATE_RETURNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_RETURNED}, + #else + {"NO_CERTIFICATE_RETURNED", ERR_LIB_SSL, 178}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_SET + {"NO_CERTIFICATE_SET", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_SET}, + #else + {"NO_CERTIFICATE_SET", ERR_LIB_SSL, 179}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_SPECIFIED + {"NO_CERTIFICATE_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_SPECIFIED}, + #else + {"NO_CERTIFICATE_SPECIFIED", ERR_LIB_SSL, 180}, + #endif + #ifdef SSL_R_NO_CIPHERS_AVAILABLE + {"NO_CIPHERS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_CIPHERS_AVAILABLE}, + #else + {"NO_CIPHERS_AVAILABLE", ERR_LIB_SSL, 181}, + #endif + #ifdef SSL_R_NO_CIPHERS_PASSED + {"NO_CIPHERS_PASSED", ERR_LIB_SSL, SSL_R_NO_CIPHERS_PASSED}, + #else + {"NO_CIPHERS_PASSED", ERR_LIB_SSL, 182}, + #endif + #ifdef SSL_R_NO_CIPHERS_SPECIFIED + {"NO_CIPHERS_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_CIPHERS_SPECIFIED}, + #else + {"NO_CIPHERS_SPECIFIED", ERR_LIB_SSL, 183}, + #endif + #ifdef SSL_R_NO_CIPHER_LIST + {"NO_CIPHER_LIST", ERR_LIB_SSL, SSL_R_NO_CIPHER_LIST}, + #else + {"NO_CIPHER_LIST", ERR_LIB_SSL, 184}, + #endif + #ifdef SSL_R_NO_CIPHER_MATCH + {"NO_CIPHER_MATCH", ERR_LIB_SSL, SSL_R_NO_CIPHER_MATCH}, + #else + {"NO_CIPHER_MATCH", ERR_LIB_SSL, 185}, + #endif + #ifdef SSL_R_NO_CLIENT_CERT_METHOD + {"NO_CLIENT_CERT_METHOD", ERR_LIB_SSL, SSL_R_NO_CLIENT_CERT_METHOD}, + #else + {"NO_CLIENT_CERT_METHOD", ERR_LIB_SSL, 331}, + #endif + #ifdef SSL_R_NO_CLIENT_CERT_RECEIVED + {"NO_CLIENT_CERT_RECEIVED", ERR_LIB_SSL, SSL_R_NO_CLIENT_CERT_RECEIVED}, + #else + {"NO_CLIENT_CERT_RECEIVED", ERR_LIB_SSL, 186}, + #endif + #ifdef SSL_R_NO_COMPRESSION_SPECIFIED + {"NO_COMPRESSION_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_COMPRESSION_SPECIFIED}, + #else + {"NO_COMPRESSION_SPECIFIED", ERR_LIB_SSL, 187}, + #endif + #ifdef SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER + {"NO_GOST_CERTIFICATE_SENT_BY_PEER", ERR_LIB_SSL, SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER}, + #else + {"NO_GOST_CERTIFICATE_SENT_BY_PEER", ERR_LIB_SSL, 330}, + #endif + #ifdef SSL_R_NO_METHOD_SPECIFIED + {"NO_METHOD_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_METHOD_SPECIFIED}, + #else + {"NO_METHOD_SPECIFIED", ERR_LIB_SSL, 188}, + #endif + #ifdef SSL_R_NO_PRIVATEKEY + {"NO_PRIVATEKEY", ERR_LIB_SSL, SSL_R_NO_PRIVATEKEY}, + #else + {"NO_PRIVATEKEY", ERR_LIB_SSL, 189}, + #endif + #ifdef SSL_R_NO_PRIVATE_KEY_ASSIGNED + {"NO_PRIVATE_KEY_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_PRIVATE_KEY_ASSIGNED}, + #else + {"NO_PRIVATE_KEY_ASSIGNED", ERR_LIB_SSL, 190}, + #endif + #ifdef SSL_R_NO_PROTOCOLS_AVAILABLE + {"NO_PROTOCOLS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_PROTOCOLS_AVAILABLE}, + #else + {"NO_PROTOCOLS_AVAILABLE", ERR_LIB_SSL, 191}, + #endif + #ifdef SSL_R_NO_PUBLICKEY + {"NO_PUBLICKEY", ERR_LIB_SSL, SSL_R_NO_PUBLICKEY}, + #else + {"NO_PUBLICKEY", ERR_LIB_SSL, 192}, + #endif + #ifdef SSL_R_NO_RENEGOTIATION + {"NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_NO_RENEGOTIATION}, + #else + {"NO_RENEGOTIATION", ERR_LIB_SSL, 339}, + #endif + #ifdef SSL_R_NO_REQUIRED_DIGEST + {"NO_REQUIRED_DIGEST", ERR_LIB_SSL, SSL_R_NO_REQUIRED_DIGEST}, + #else + {"NO_REQUIRED_DIGEST", ERR_LIB_SSL, 324}, + #endif + #ifdef SSL_R_NO_SHARED_CIPHER + {"NO_SHARED_CIPHER", ERR_LIB_SSL, SSL_R_NO_SHARED_CIPHER}, + #else + {"NO_SHARED_CIPHER", ERR_LIB_SSL, 193}, + #endif + #ifdef SSL_R_NO_VERIFY_CALLBACK + {"NO_VERIFY_CALLBACK", ERR_LIB_SSL, SSL_R_NO_VERIFY_CALLBACK}, + #else + {"NO_VERIFY_CALLBACK", ERR_LIB_SSL, 194}, + #endif + #ifdef SSL_R_NULL_SSL_CTX + {"NULL_SSL_CTX", ERR_LIB_SSL, SSL_R_NULL_SSL_CTX}, + #else + {"NULL_SSL_CTX", ERR_LIB_SSL, 195}, + #endif + #ifdef SSL_R_NULL_SSL_METHOD_PASSED + {"NULL_SSL_METHOD_PASSED", ERR_LIB_SSL, SSL_R_NULL_SSL_METHOD_PASSED}, + #else + {"NULL_SSL_METHOD_PASSED", ERR_LIB_SSL, 196}, + #endif + #ifdef SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED + {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED}, + #else + {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, 197}, + #endif + #ifdef SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED + {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED}, + #else + {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", ERR_LIB_SSL, 344}, + #endif + #ifdef SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE + {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", ERR_LIB_SSL, SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE}, + #else + {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", ERR_LIB_SSL, 297}, + #endif + #ifdef SSL_R_OPAQUE_PRF_INPUT_TOO_LONG + {"OPAQUE_PRF_INPUT_TOO_LONG", ERR_LIB_SSL, SSL_R_OPAQUE_PRF_INPUT_TOO_LONG}, + #else + {"OPAQUE_PRF_INPUT_TOO_LONG", ERR_LIB_SSL, 327}, + #endif + #ifdef SSL_R_PACKET_LENGTH_TOO_LONG + {"PACKET_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_PACKET_LENGTH_TOO_LONG}, + #else + {"PACKET_LENGTH_TOO_LONG", ERR_LIB_SSL, 198}, + #endif + #ifdef SSL_R_PARSE_TLSEXT + {"PARSE_TLSEXT", ERR_LIB_SSL, SSL_R_PARSE_TLSEXT}, + #else + {"PARSE_TLSEXT", ERR_LIB_SSL, 227}, + #endif + #ifdef SSL_R_PATH_TOO_LONG + {"PATH_TOO_LONG", ERR_LIB_SSL, SSL_R_PATH_TOO_LONG}, + #else + {"PATH_TOO_LONG", ERR_LIB_SSL, 270}, + #endif + #ifdef SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE + {"PEER_DID_NOT_RETURN_A_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE}, + #else + {"PEER_DID_NOT_RETURN_A_CERTIFICATE", ERR_LIB_SSL, 199}, + #endif + #ifdef SSL_R_PEER_ERROR + {"PEER_ERROR", ERR_LIB_SSL, SSL_R_PEER_ERROR}, + #else + {"PEER_ERROR", ERR_LIB_SSL, 200}, + #endif + #ifdef SSL_R_PEER_ERROR_CERTIFICATE + {"PEER_ERROR_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_ERROR_CERTIFICATE}, + #else + {"PEER_ERROR_CERTIFICATE", ERR_LIB_SSL, 201}, + #endif + #ifdef SSL_R_PEER_ERROR_NO_CERTIFICATE + {"PEER_ERROR_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_ERROR_NO_CERTIFICATE}, + #else + {"PEER_ERROR_NO_CERTIFICATE", ERR_LIB_SSL, 202}, + #endif + #ifdef SSL_R_PEER_ERROR_NO_CIPHER + {"PEER_ERROR_NO_CIPHER", ERR_LIB_SSL, SSL_R_PEER_ERROR_NO_CIPHER}, + #else + {"PEER_ERROR_NO_CIPHER", ERR_LIB_SSL, 203}, + #endif + #ifdef SSL_R_PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE + {"PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE}, + #else + {"PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE", ERR_LIB_SSL, 204}, + #endif + #ifdef SSL_R_PRE_MAC_LENGTH_TOO_LONG + {"PRE_MAC_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_PRE_MAC_LENGTH_TOO_LONG}, + #else + {"PRE_MAC_LENGTH_TOO_LONG", ERR_LIB_SSL, 205}, + #endif + #ifdef SSL_R_PROBLEMS_MAPPING_CIPHER_FUNCTIONS + {"PROBLEMS_MAPPING_CIPHER_FUNCTIONS", ERR_LIB_SSL, SSL_R_PROBLEMS_MAPPING_CIPHER_FUNCTIONS}, + #else + {"PROBLEMS_MAPPING_CIPHER_FUNCTIONS", ERR_LIB_SSL, 206}, + #endif + #ifdef SSL_R_PROTOCOL_IS_SHUTDOWN + {"PROTOCOL_IS_SHUTDOWN", ERR_LIB_SSL, SSL_R_PROTOCOL_IS_SHUTDOWN}, + #else + {"PROTOCOL_IS_SHUTDOWN", ERR_LIB_SSL, 207}, + #endif + #ifdef SSL_R_PSK_IDENTITY_NOT_FOUND + {"PSK_IDENTITY_NOT_FOUND", ERR_LIB_SSL, SSL_R_PSK_IDENTITY_NOT_FOUND}, + #else + {"PSK_IDENTITY_NOT_FOUND", ERR_LIB_SSL, 223}, + #endif + #ifdef SSL_R_PSK_NO_CLIENT_CB + {"PSK_NO_CLIENT_CB", ERR_LIB_SSL, SSL_R_PSK_NO_CLIENT_CB}, + #else + {"PSK_NO_CLIENT_CB", ERR_LIB_SSL, 224}, + #endif + #ifdef SSL_R_PSK_NO_SERVER_CB + {"PSK_NO_SERVER_CB", ERR_LIB_SSL, SSL_R_PSK_NO_SERVER_CB}, + #else + {"PSK_NO_SERVER_CB", ERR_LIB_SSL, 225}, + #endif + #ifdef SSL_R_PUBLIC_KEY_ENCRYPT_ERROR + {"PUBLIC_KEY_ENCRYPT_ERROR", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_ENCRYPT_ERROR}, + #else + {"PUBLIC_KEY_ENCRYPT_ERROR", ERR_LIB_SSL, 208}, + #endif + #ifdef SSL_R_PUBLIC_KEY_IS_NOT_RSA + {"PUBLIC_KEY_IS_NOT_RSA", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_IS_NOT_RSA}, + #else + {"PUBLIC_KEY_IS_NOT_RSA", ERR_LIB_SSL, 209}, + #endif + #ifdef SSL_R_PUBLIC_KEY_NOT_RSA + {"PUBLIC_KEY_NOT_RSA", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_NOT_RSA}, + #else + {"PUBLIC_KEY_NOT_RSA", ERR_LIB_SSL, 210}, + #endif + #ifdef SSL_R_READ_BIO_NOT_SET + {"READ_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_READ_BIO_NOT_SET}, + #else + {"READ_BIO_NOT_SET", ERR_LIB_SSL, 211}, + #endif + #ifdef SSL_R_READ_TIMEOUT_EXPIRED + {"READ_TIMEOUT_EXPIRED", ERR_LIB_SSL, SSL_R_READ_TIMEOUT_EXPIRED}, + #else + {"READ_TIMEOUT_EXPIRED", ERR_LIB_SSL, 312}, + #endif + #ifdef SSL_R_READ_WRONG_PACKET_TYPE + {"READ_WRONG_PACKET_TYPE", ERR_LIB_SSL, SSL_R_READ_WRONG_PACKET_TYPE}, + #else + {"READ_WRONG_PACKET_TYPE", ERR_LIB_SSL, 212}, + #endif + #ifdef SSL_R_RECORD_LENGTH_MISMATCH + {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_RECORD_LENGTH_MISMATCH}, + #else + {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, 213}, + #endif + #ifdef SSL_R_RECORD_TOO_LARGE + {"RECORD_TOO_LARGE", ERR_LIB_SSL, SSL_R_RECORD_TOO_LARGE}, + #else + {"RECORD_TOO_LARGE", ERR_LIB_SSL, 214}, + #endif + #ifdef SSL_R_RECORD_TOO_SMALL + {"RECORD_TOO_SMALL", ERR_LIB_SSL, SSL_R_RECORD_TOO_SMALL}, + #else + {"RECORD_TOO_SMALL", ERR_LIB_SSL, 298}, + #endif + #ifdef SSL_R_RENEGOTIATE_EXT_TOO_LONG + {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, SSL_R_RENEGOTIATE_EXT_TOO_LONG}, + #else + {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, 335}, + #endif + #ifdef SSL_R_RENEGOTIATION_ENCODING_ERR + {"RENEGOTIATION_ENCODING_ERR", ERR_LIB_SSL, SSL_R_RENEGOTIATION_ENCODING_ERR}, + #else + {"RENEGOTIATION_ENCODING_ERR", ERR_LIB_SSL, 336}, + #endif + #ifdef SSL_R_RENEGOTIATION_MISMATCH + {"RENEGOTIATION_MISMATCH", ERR_LIB_SSL, SSL_R_RENEGOTIATION_MISMATCH}, + #else + {"RENEGOTIATION_MISMATCH", ERR_LIB_SSL, 337}, + #endif + #ifdef SSL_R_REQUIRED_CIPHER_MISSING + {"REQUIRED_CIPHER_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_CIPHER_MISSING}, + #else + {"REQUIRED_CIPHER_MISSING", ERR_LIB_SSL, 215}, + #endif + #ifdef SSL_R_REQUIRED_COMPRESSSION_ALGORITHM_MISSING + {"REQUIRED_COMPRESSSION_ALGORITHM_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_COMPRESSSION_ALGORITHM_MISSING}, + #else + {"REQUIRED_COMPRESSSION_ALGORITHM_MISSING", ERR_LIB_SSL, 342}, + #endif + #ifdef SSL_R_REUSE_CERT_LENGTH_NOT_ZERO + {"REUSE_CERT_LENGTH_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CERT_LENGTH_NOT_ZERO}, + #else + {"REUSE_CERT_LENGTH_NOT_ZERO", ERR_LIB_SSL, 216}, + #endif + #ifdef SSL_R_REUSE_CERT_TYPE_NOT_ZERO + {"REUSE_CERT_TYPE_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CERT_TYPE_NOT_ZERO}, + #else + {"REUSE_CERT_TYPE_NOT_ZERO", ERR_LIB_SSL, 217}, + #endif + #ifdef SSL_R_REUSE_CIPHER_LIST_NOT_ZERO + {"REUSE_CIPHER_LIST_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CIPHER_LIST_NOT_ZERO}, + #else + {"REUSE_CIPHER_LIST_NOT_ZERO", ERR_LIB_SSL, 218}, + #endif + #ifdef SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING + {"SCSV_RECEIVED_WHEN_RENEGOTIATING", ERR_LIB_SSL, SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING}, + #else + {"SCSV_RECEIVED_WHEN_RENEGOTIATING", ERR_LIB_SSL, 345}, + #endif + #ifdef SSL_R_SERVERHELLO_TLSEXT + {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_SERVERHELLO_TLSEXT}, + #else + {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, 275}, + #endif + #ifdef SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED + {"SESSION_ID_CONTEXT_UNINITIALIZED", ERR_LIB_SSL, SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED}, + #else + {"SESSION_ID_CONTEXT_UNINITIALIZED", ERR_LIB_SSL, 277}, + #endif + #ifdef SSL_R_SHORT_READ + {"SHORT_READ", ERR_LIB_SSL, SSL_R_SHORT_READ}, + #else + {"SHORT_READ", ERR_LIB_SSL, 219}, + #endif + #ifdef SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE + {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", ERR_LIB_SSL, SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE}, + #else + {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", ERR_LIB_SSL, 220}, + #endif + #ifdef SSL_R_SSL23_DOING_SESSION_ID_REUSE + {"SSL23_DOING_SESSION_ID_REUSE", ERR_LIB_SSL, SSL_R_SSL23_DOING_SESSION_ID_REUSE}, + #else + {"SSL23_DOING_SESSION_ID_REUSE", ERR_LIB_SSL, 221}, + #endif + #ifdef SSL_R_SSL2_CONNECTION_ID_TOO_LONG + {"SSL2_CONNECTION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL2_CONNECTION_ID_TOO_LONG}, + #else + {"SSL2_CONNECTION_ID_TOO_LONG", ERR_LIB_SSL, 299}, + #endif + #ifdef SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT + {"SSL3_EXT_INVALID_ECPOINTFORMAT", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT}, + #else + {"SSL3_EXT_INVALID_ECPOINTFORMAT", ERR_LIB_SSL, 321}, + #endif + #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME + {"SSL3_EXT_INVALID_SERVERNAME", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME}, + #else + {"SSL3_EXT_INVALID_SERVERNAME", ERR_LIB_SSL, 319}, + #endif + #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE + {"SSL3_EXT_INVALID_SERVERNAME_TYPE", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE}, + #else + {"SSL3_EXT_INVALID_SERVERNAME_TYPE", ERR_LIB_SSL, 320}, + #endif + #ifdef SSL_R_SSL3_SESSION_ID_TOO_LONG + {"SSL3_SESSION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL3_SESSION_ID_TOO_LONG}, + #else + {"SSL3_SESSION_ID_TOO_LONG", ERR_LIB_SSL, 300}, + #endif + #ifdef SSL_R_SSL3_SESSION_ID_TOO_SHORT + {"SSL3_SESSION_ID_TOO_SHORT", ERR_LIB_SSL, SSL_R_SSL3_SESSION_ID_TOO_SHORT}, + #else + {"SSL3_SESSION_ID_TOO_SHORT", ERR_LIB_SSL, 222}, + #endif + #ifdef SSL_R_SSLV3_ALERT_BAD_CERTIFICATE + {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_CERTIFICATE}, + #else + {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, 1042}, + #endif + #ifdef SSL_R_SSLV3_ALERT_BAD_RECORD_MAC + {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_RECORD_MAC}, + #else + {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, 1020}, + #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED + {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED}, + #else + {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, 1045}, + #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED + {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED}, + #else + {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, 1044}, + #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN + {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN}, + #else + {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, 1046}, + #endif + #ifdef SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE + {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE}, + #else + {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, 1030}, + #endif + #ifdef SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE + {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE}, + #else + {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, 1040}, + #endif + #ifdef SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER + {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER}, + #else + {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, 1047}, + #endif + #ifdef SSL_R_SSLV3_ALERT_NO_CERTIFICATE + {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_NO_CERTIFICATE}, + #else + {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, 1041}, + #endif + #ifdef SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE + {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE}, + #else + {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, 1010}, + #endif + #ifdef SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE + {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE}, + #else + {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, 1043}, + #endif + #ifdef SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION + {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", ERR_LIB_SSL, SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION}, + #else + {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", ERR_LIB_SSL, 228}, + #endif + #ifdef SSL_R_SSL_HANDSHAKE_FAILURE + {"SSL_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSL_HANDSHAKE_FAILURE}, + #else + {"SSL_HANDSHAKE_FAILURE", ERR_LIB_SSL, 229}, + #endif + #ifdef SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS + {"SSL_LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS}, + #else + {"SSL_LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, 230}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_CALLBACK_FAILED + {"SSL_SESSION_ID_CALLBACK_FAILED", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CALLBACK_FAILED}, + #else + {"SSL_SESSION_ID_CALLBACK_FAILED", ERR_LIB_SSL, 301}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_CONFLICT + {"SSL_SESSION_ID_CONFLICT", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONFLICT}, + #else + {"SSL_SESSION_ID_CONFLICT", ERR_LIB_SSL, 302}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG + {"SSL_SESSION_ID_CONTEXT_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG}, + #else + {"SSL_SESSION_ID_CONTEXT_TOO_LONG", ERR_LIB_SSL, 273}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH + {"SSL_SESSION_ID_HAS_BAD_LENGTH", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH}, + #else + {"SSL_SESSION_ID_HAS_BAD_LENGTH", ERR_LIB_SSL, 303}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_IS_DIFFERENT + {"SSL_SESSION_ID_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_IS_DIFFERENT}, + #else + {"SSL_SESSION_ID_IS_DIFFERENT", ERR_LIB_SSL, 231}, + #endif + #ifdef SSL_R_TLSV1_ALERT_ACCESS_DENIED + {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_ACCESS_DENIED}, + #else + {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, 1049}, + #endif + #ifdef SSL_R_TLSV1_ALERT_DECODE_ERROR + {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECODE_ERROR}, + #else + {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, 1050}, + #endif + #ifdef SSL_R_TLSV1_ALERT_DECRYPTION_FAILED + {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPTION_FAILED}, + #else + {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, 1021}, + #endif + #ifdef SSL_R_TLSV1_ALERT_DECRYPT_ERROR + {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPT_ERROR}, + #else + {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, 1051}, + #endif + #ifdef SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION + {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION}, + #else + {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, 1060}, + #endif + #ifdef SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY + {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY}, + #else + {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, 1071}, + #endif + #ifdef SSL_R_TLSV1_ALERT_INTERNAL_ERROR + {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INTERNAL_ERROR}, + #else + {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, 1080}, + #endif + #ifdef SSL_R_TLSV1_ALERT_NO_RENEGOTIATION + {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_RENEGOTIATION}, + #else + {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, 1100}, + #endif + #ifdef SSL_R_TLSV1_ALERT_PROTOCOL_VERSION + {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_PROTOCOL_VERSION}, + #else + {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, 1070}, + #endif + #ifdef SSL_R_TLSV1_ALERT_RECORD_OVERFLOW + {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_RECORD_OVERFLOW}, + #else + {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, 1022}, + #endif + #ifdef SSL_R_TLSV1_ALERT_UNKNOWN_CA + {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_UNKNOWN_CA}, + #else + {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, 1048}, + #endif + #ifdef SSL_R_TLSV1_ALERT_USER_CANCELLED + {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_USER_CANCELLED}, + #else + {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, 1090}, + #endif + #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE + {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE}, + #else + {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, 1114}, + #endif + #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE + {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE}, + #else + {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, 1113}, + #endif + #ifdef SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE + {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE}, + #else + {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, 1111}, + #endif + #ifdef SSL_R_TLSV1_UNRECOGNIZED_NAME + {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, SSL_R_TLSV1_UNRECOGNIZED_NAME}, + #else + {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, 1112}, + #endif + #ifdef SSL_R_TLSV1_UNSUPPORTED_EXTENSION + {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, SSL_R_TLSV1_UNSUPPORTED_EXTENSION}, + #else + {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, 1110}, + #endif + #ifdef SSL_R_TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER + {"TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER", ERR_LIB_SSL, SSL_R_TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER}, + #else + {"TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER", ERR_LIB_SSL, 232}, + #endif + #ifdef SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST + {"TLS_INVALID_ECPOINTFORMAT_LIST", ERR_LIB_SSL, SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST}, + #else + {"TLS_INVALID_ECPOINTFORMAT_LIST", ERR_LIB_SSL, 157}, + #endif + #ifdef SSL_R_TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST + {"TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST", ERR_LIB_SSL, SSL_R_TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST}, + #else + {"TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST", ERR_LIB_SSL, 233}, + #endif + #ifdef SSL_R_TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG + {"TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, SSL_R_TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG}, + #else + {"TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, 234}, + #endif + #ifdef SSL_R_TRIED_TO_USE_UNSUPPORTED_CIPHER + {"TRIED_TO_USE_UNSUPPORTED_CIPHER", ERR_LIB_SSL, SSL_R_TRIED_TO_USE_UNSUPPORTED_CIPHER}, + #else + {"TRIED_TO_USE_UNSUPPORTED_CIPHER", ERR_LIB_SSL, 235}, + #endif + #ifdef SSL_R_UNABLE_TO_DECODE_DH_CERTS + {"UNABLE_TO_DECODE_DH_CERTS", ERR_LIB_SSL, SSL_R_UNABLE_TO_DECODE_DH_CERTS}, + #else + {"UNABLE_TO_DECODE_DH_CERTS", ERR_LIB_SSL, 236}, + #endif + #ifdef SSL_R_UNABLE_TO_DECODE_ECDH_CERTS + {"UNABLE_TO_DECODE_ECDH_CERTS", ERR_LIB_SSL, SSL_R_UNABLE_TO_DECODE_ECDH_CERTS}, + #else + {"UNABLE_TO_DECODE_ECDH_CERTS", ERR_LIB_SSL, 313}, + #endif + #ifdef SSL_R_UNABLE_TO_EXTRACT_PUBLIC_KEY + {"UNABLE_TO_EXTRACT_PUBLIC_KEY", ERR_LIB_SSL, SSL_R_UNABLE_TO_EXTRACT_PUBLIC_KEY}, + #else + {"UNABLE_TO_EXTRACT_PUBLIC_KEY", ERR_LIB_SSL, 237}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_DH_PARAMETERS + {"UNABLE_TO_FIND_DH_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_DH_PARAMETERS}, + #else + {"UNABLE_TO_FIND_DH_PARAMETERS", ERR_LIB_SSL, 238}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS + {"UNABLE_TO_FIND_ECDH_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS}, + #else + {"UNABLE_TO_FIND_ECDH_PARAMETERS", ERR_LIB_SSL, 314}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS + {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS}, + #else + {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", ERR_LIB_SSL, 239}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_SSL_METHOD + {"UNABLE_TO_FIND_SSL_METHOD", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_SSL_METHOD}, + #else + {"UNABLE_TO_FIND_SSL_METHOD", ERR_LIB_SSL, 240}, + #endif + #ifdef SSL_R_UNABLE_TO_LOAD_SSL2_MD5_ROUTINES + {"UNABLE_TO_LOAD_SSL2_MD5_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL2_MD5_ROUTINES}, + #else + {"UNABLE_TO_LOAD_SSL2_MD5_ROUTINES", ERR_LIB_SSL, 241}, + #endif + #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES + {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES}, + #else + {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", ERR_LIB_SSL, 242}, + #endif + #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES + {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES}, + #else + {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", ERR_LIB_SSL, 243}, + #endif + #ifdef SSL_R_UNEXPECTED_MESSAGE + {"UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_UNEXPECTED_MESSAGE}, + #else + {"UNEXPECTED_MESSAGE", ERR_LIB_SSL, 244}, + #endif + #ifdef SSL_R_UNEXPECTED_RECORD + {"UNEXPECTED_RECORD", ERR_LIB_SSL, SSL_R_UNEXPECTED_RECORD}, + #else + {"UNEXPECTED_RECORD", ERR_LIB_SSL, 245}, + #endif + #ifdef SSL_R_UNINITIALIZED + {"UNINITIALIZED", ERR_LIB_SSL, SSL_R_UNINITIALIZED}, + #else + {"UNINITIALIZED", ERR_LIB_SSL, 276}, + #endif + #ifdef SSL_R_UNKNOWN_ALERT_TYPE + {"UNKNOWN_ALERT_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_ALERT_TYPE}, + #else + {"UNKNOWN_ALERT_TYPE", ERR_LIB_SSL, 246}, + #endif + #ifdef SSL_R_UNKNOWN_CERTIFICATE_TYPE + {"UNKNOWN_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CERTIFICATE_TYPE}, + #else + {"UNKNOWN_CERTIFICATE_TYPE", ERR_LIB_SSL, 247}, + #endif + #ifdef SSL_R_UNKNOWN_CIPHER_RETURNED + {"UNKNOWN_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_RETURNED}, + #else + {"UNKNOWN_CIPHER_RETURNED", ERR_LIB_SSL, 248}, + #endif + #ifdef SSL_R_UNKNOWN_CIPHER_TYPE + {"UNKNOWN_CIPHER_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_TYPE}, + #else + {"UNKNOWN_CIPHER_TYPE", ERR_LIB_SSL, 249}, + #endif + #ifdef SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE + {"UNKNOWN_KEY_EXCHANGE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE}, + #else + {"UNKNOWN_KEY_EXCHANGE_TYPE", ERR_LIB_SSL, 250}, + #endif + #ifdef SSL_R_UNKNOWN_PKEY_TYPE + {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_PKEY_TYPE}, + #else + {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, 251}, + #endif + #ifdef SSL_R_UNKNOWN_PROTOCOL + {"UNKNOWN_PROTOCOL", ERR_LIB_SSL, SSL_R_UNKNOWN_PROTOCOL}, + #else + {"UNKNOWN_PROTOCOL", ERR_LIB_SSL, 252}, + #endif + #ifdef SSL_R_UNKNOWN_REMOTE_ERROR_TYPE + {"UNKNOWN_REMOTE_ERROR_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_REMOTE_ERROR_TYPE}, + #else + {"UNKNOWN_REMOTE_ERROR_TYPE", ERR_LIB_SSL, 253}, + #endif + #ifdef SSL_R_UNKNOWN_SSL_VERSION + {"UNKNOWN_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNKNOWN_SSL_VERSION}, + #else + {"UNKNOWN_SSL_VERSION", ERR_LIB_SSL, 254}, + #endif + #ifdef SSL_R_UNKNOWN_STATE + {"UNKNOWN_STATE", ERR_LIB_SSL, SSL_R_UNKNOWN_STATE}, + #else + {"UNKNOWN_STATE", ERR_LIB_SSL, 255}, + #endif + #ifdef SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED + {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", ERR_LIB_SSL, SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED}, + #else + {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", ERR_LIB_SSL, 338}, + #endif + #ifdef SSL_R_UNSUPPORTED_CIPHER + {"UNSUPPORTED_CIPHER", ERR_LIB_SSL, SSL_R_UNSUPPORTED_CIPHER}, + #else + {"UNSUPPORTED_CIPHER", ERR_LIB_SSL, 256}, + #endif + #ifdef SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM + {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM}, + #else + {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_SSL, 257}, + #endif + #ifdef SSL_R_UNSUPPORTED_DIGEST_TYPE + {"UNSUPPORTED_DIGEST_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_DIGEST_TYPE}, + #else + {"UNSUPPORTED_DIGEST_TYPE", ERR_LIB_SSL, 326}, + #endif + #ifdef SSL_R_UNSUPPORTED_ELLIPTIC_CURVE + {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_ELLIPTIC_CURVE}, + #else + {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, 315}, + #endif + #ifdef SSL_R_UNSUPPORTED_PROTOCOL + {"UNSUPPORTED_PROTOCOL", ERR_LIB_SSL, SSL_R_UNSUPPORTED_PROTOCOL}, + #else + {"UNSUPPORTED_PROTOCOL", ERR_LIB_SSL, 258}, + #endif + #ifdef SSL_R_UNSUPPORTED_SSL_VERSION + {"UNSUPPORTED_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNSUPPORTED_SSL_VERSION}, + #else + {"UNSUPPORTED_SSL_VERSION", ERR_LIB_SSL, 259}, + #endif + #ifdef SSL_R_UNSUPPORTED_STATUS_TYPE + {"UNSUPPORTED_STATUS_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_STATUS_TYPE}, + #else + {"UNSUPPORTED_STATUS_TYPE", ERR_LIB_SSL, 329}, + #endif + #ifdef SSL_R_WRITE_BIO_NOT_SET + {"WRITE_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_WRITE_BIO_NOT_SET}, + #else + {"WRITE_BIO_NOT_SET", ERR_LIB_SSL, 260}, + #endif + #ifdef SSL_R_WRONG_CIPHER_RETURNED + {"WRONG_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_WRONG_CIPHER_RETURNED}, + #else + {"WRONG_CIPHER_RETURNED", ERR_LIB_SSL, 261}, + #endif + #ifdef SSL_R_WRONG_MESSAGE_TYPE + {"WRONG_MESSAGE_TYPE", ERR_LIB_SSL, SSL_R_WRONG_MESSAGE_TYPE}, + #else + {"WRONG_MESSAGE_TYPE", ERR_LIB_SSL, 262}, + #endif + #ifdef SSL_R_WRONG_NUMBER_OF_KEY_BITS + {"WRONG_NUMBER_OF_KEY_BITS", ERR_LIB_SSL, SSL_R_WRONG_NUMBER_OF_KEY_BITS}, + #else + {"WRONG_NUMBER_OF_KEY_BITS", ERR_LIB_SSL, 263}, + #endif + #ifdef SSL_R_WRONG_SIGNATURE_LENGTH + {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_LENGTH}, + #else + {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, 264}, + #endif + #ifdef SSL_R_WRONG_SIGNATURE_SIZE + {"WRONG_SIGNATURE_SIZE", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_SIZE}, + #else + {"WRONG_SIGNATURE_SIZE", ERR_LIB_SSL, 265}, + #endif + #ifdef SSL_R_WRONG_SSL_VERSION + {"WRONG_SSL_VERSION", ERR_LIB_SSL, SSL_R_WRONG_SSL_VERSION}, + #else + {"WRONG_SSL_VERSION", ERR_LIB_SSL, 266}, + #endif + #ifdef SSL_R_WRONG_VERSION_NUMBER + {"WRONG_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_WRONG_VERSION_NUMBER}, + #else + {"WRONG_VERSION_NUMBER", ERR_LIB_SSL, 267}, + #endif + #ifdef SSL_R_X509_LIB + {"X509_LIB", ERR_LIB_SSL, SSL_R_X509_LIB}, + #else + {"X509_LIB", ERR_LIB_SSL, 268}, + #endif + #ifdef SSL_R_X509_VERIFICATION_SETUP_PROBLEMS + {"X509_VERIFICATION_SETUP_PROBLEMS", ERR_LIB_SSL, SSL_R_X509_VERIFICATION_SETUP_PROBLEMS}, + #else + {"X509_VERIFICATION_SETUP_PROBLEMS", ERR_LIB_SSL, 269}, + #endif + #ifdef X509_R_BAD_X509_FILETYPE + {"BAD_X509_FILETYPE", ERR_LIB_X509, X509_R_BAD_X509_FILETYPE}, + #else + {"BAD_X509_FILETYPE", ERR_LIB_X509, 100}, + #endif + #ifdef X509_R_BASE64_DECODE_ERROR + {"BASE64_DECODE_ERROR", ERR_LIB_X509, X509_R_BASE64_DECODE_ERROR}, + #else + {"BASE64_DECODE_ERROR", ERR_LIB_X509, 118}, + #endif + #ifdef X509_R_CANT_CHECK_DH_KEY + {"CANT_CHECK_DH_KEY", ERR_LIB_X509, X509_R_CANT_CHECK_DH_KEY}, + #else + {"CANT_CHECK_DH_KEY", ERR_LIB_X509, 114}, + #endif + #ifdef X509_R_CERT_ALREADY_IN_HASH_TABLE + {"CERT_ALREADY_IN_HASH_TABLE", ERR_LIB_X509, X509_R_CERT_ALREADY_IN_HASH_TABLE}, + #else + {"CERT_ALREADY_IN_HASH_TABLE", ERR_LIB_X509, 101}, + #endif + #ifdef X509_R_ERR_ASN1_LIB + {"ERR_ASN1_LIB", ERR_LIB_X509, X509_R_ERR_ASN1_LIB}, + #else + {"ERR_ASN1_LIB", ERR_LIB_X509, 102}, + #endif + #ifdef X509_R_INVALID_DIRECTORY + {"INVALID_DIRECTORY", ERR_LIB_X509, X509_R_INVALID_DIRECTORY}, + #else + {"INVALID_DIRECTORY", ERR_LIB_X509, 113}, + #endif + #ifdef X509_R_INVALID_FIELD_NAME + {"INVALID_FIELD_NAME", ERR_LIB_X509, X509_R_INVALID_FIELD_NAME}, + #else + {"INVALID_FIELD_NAME", ERR_LIB_X509, 119}, + #endif + #ifdef X509_R_INVALID_TRUST + {"INVALID_TRUST", ERR_LIB_X509, X509_R_INVALID_TRUST}, + #else + {"INVALID_TRUST", ERR_LIB_X509, 123}, + #endif + #ifdef X509_R_KEY_TYPE_MISMATCH + {"KEY_TYPE_MISMATCH", ERR_LIB_X509, X509_R_KEY_TYPE_MISMATCH}, + #else + {"KEY_TYPE_MISMATCH", ERR_LIB_X509, 115}, + #endif + #ifdef X509_R_KEY_VALUES_MISMATCH + {"KEY_VALUES_MISMATCH", ERR_LIB_X509, X509_R_KEY_VALUES_MISMATCH}, + #else + {"KEY_VALUES_MISMATCH", ERR_LIB_X509, 116}, + #endif + #ifdef X509_R_LOADING_CERT_DIR + {"LOADING_CERT_DIR", ERR_LIB_X509, X509_R_LOADING_CERT_DIR}, + #else + {"LOADING_CERT_DIR", ERR_LIB_X509, 103}, + #endif + #ifdef X509_R_LOADING_DEFAULTS + {"LOADING_DEFAULTS", ERR_LIB_X509, X509_R_LOADING_DEFAULTS}, + #else + {"LOADING_DEFAULTS", ERR_LIB_X509, 104}, + #endif + #ifdef X509_R_METHOD_NOT_SUPPORTED + {"METHOD_NOT_SUPPORTED", ERR_LIB_X509, X509_R_METHOD_NOT_SUPPORTED}, + #else + {"METHOD_NOT_SUPPORTED", ERR_LIB_X509, 124}, + #endif + #ifdef X509_R_NO_CERT_SET_FOR_US_TO_VERIFY + {"NO_CERT_SET_FOR_US_TO_VERIFY", ERR_LIB_X509, X509_R_NO_CERT_SET_FOR_US_TO_VERIFY}, + #else + {"NO_CERT_SET_FOR_US_TO_VERIFY", ERR_LIB_X509, 105}, + #endif + #ifdef X509_R_PUBLIC_KEY_DECODE_ERROR + {"PUBLIC_KEY_DECODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_DECODE_ERROR}, + #else + {"PUBLIC_KEY_DECODE_ERROR", ERR_LIB_X509, 125}, + #endif + #ifdef X509_R_PUBLIC_KEY_ENCODE_ERROR + {"PUBLIC_KEY_ENCODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_ENCODE_ERROR}, + #else + {"PUBLIC_KEY_ENCODE_ERROR", ERR_LIB_X509, 126}, + #endif + #ifdef X509_R_SHOULD_RETRY + {"SHOULD_RETRY", ERR_LIB_X509, X509_R_SHOULD_RETRY}, + #else + {"SHOULD_RETRY", ERR_LIB_X509, 106}, + #endif + #ifdef X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN + {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", ERR_LIB_X509, X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN}, + #else + {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", ERR_LIB_X509, 107}, + #endif + #ifdef X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY + {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", ERR_LIB_X509, X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY}, + #else + {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", ERR_LIB_X509, 108}, + #endif + #ifdef X509_R_UNKNOWN_KEY_TYPE + {"UNKNOWN_KEY_TYPE", ERR_LIB_X509, X509_R_UNKNOWN_KEY_TYPE}, + #else + {"UNKNOWN_KEY_TYPE", ERR_LIB_X509, 117}, + #endif + #ifdef X509_R_UNKNOWN_NID + {"UNKNOWN_NID", ERR_LIB_X509, X509_R_UNKNOWN_NID}, + #else + {"UNKNOWN_NID", ERR_LIB_X509, 109}, + #endif + #ifdef X509_R_UNKNOWN_PURPOSE_ID + {"UNKNOWN_PURPOSE_ID", ERR_LIB_X509, X509_R_UNKNOWN_PURPOSE_ID}, + #else + {"UNKNOWN_PURPOSE_ID", ERR_LIB_X509, 121}, + #endif + #ifdef X509_R_UNKNOWN_TRUST_ID + {"UNKNOWN_TRUST_ID", ERR_LIB_X509, X509_R_UNKNOWN_TRUST_ID}, + #else + {"UNKNOWN_TRUST_ID", ERR_LIB_X509, 120}, + #endif + #ifdef X509_R_UNSUPPORTED_ALGORITHM + {"UNSUPPORTED_ALGORITHM", ERR_LIB_X509, X509_R_UNSUPPORTED_ALGORITHM}, + #else + {"UNSUPPORTED_ALGORITHM", ERR_LIB_X509, 111}, + #endif + #ifdef X509_R_WRONG_LOOKUP_TYPE + {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, X509_R_WRONG_LOOKUP_TYPE}, + #else + {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, 112}, + #endif + #ifdef X509_R_WRONG_TYPE + {"WRONG_TYPE", ERR_LIB_X509, X509_R_WRONG_TYPE}, + #else + {"WRONG_TYPE", ERR_LIB_X509, 122}, + #endif + { NULL } +}; diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py new file mode 100644 --- /dev/null +++ b/Tools/ssl/make_ssl_data.py @@ -0,0 +1,57 @@ +#! /usr/bin/env python3 + +import datetime +import os +import re +import sys + + +def parse_error_codes(h_file, prefix): + pat = re.compile(r"#define\W+(%s([\w]+))\W+(\d+)\b" % re.escape(prefix)) + codes = [] + with open(h_file, "r", encoding="latin1") as f: + for line in f: + match = pat.search(line) + if match: + code, name, num = match.groups() + num = int(num) + codes.append((code, name, num)) + return codes + +if __name__ == "__main__": + openssl_inc = sys.argv[1] + outfile = sys.argv[2] + use_stdout = outfile == '-' + f = sys.stdout if use_stdout else open(outfile, "w") + error_libraries = ( + # (library code, mnemonic, error prefix, header file) + ('ERR_LIB_PEM', 'PEM', 'PEM_R_', 'pem.h'), + ('ERR_LIB_SSL', 'SSL', 'SSL_R_', 'ssl.h'), + ('ERR_LIB_X509', 'X509', 'X509_R_', 'x509.h'), + ) + def w(l): + f.write(l + "\n") + w("/* File generated by Tools/ssl/make_ssl_data.py */") + w("/* Generated on %s */" % datetime.datetime.now().isoformat()) + w("") + + w("static struct py_ssl_library_code library_codes[] = {") + for libcode, mnemo, _, _ in error_libraries: + w(' {"%s", %s},' % (mnemo, libcode)) + w(' { NULL }') + w('};') + w("") + + w("static struct py_ssl_error_code error_codes[] = {") + for libcode, _, prefix, h_file in error_libraries: + codes = parse_error_codes(os.path.join(openssl_inc, h_file), prefix) + for code, name, num in sorted(codes): + w(' #ifdef %s' % (code)) + w(' {"%s", %s, %s},' % (name, libcode, code)) + w(' #else') + w(' {"%s", %s, %d},' % (name, libcode, num)) + w(' #endif') + w(' { NULL }') + w('};') + if not use_stdout: + f.close() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 21:17:10 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 22 Jun 2012 21:17:10 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge?= Message-ID: http://hg.python.org/cpython/rev/34319fc28da6 changeset: 77580:34319fc28da6 parent: 77579:e193fe3d017e parent: 77577:d63a80abfbec user: Antoine Pitrou date: Fri Jun 22 21:13:34 2012 +0200 summary: Merge files: Doc/library/sys.rst | 16 +++++ Include/dictobject.h | 1 + Include/floatobject.h | 2 + Include/frameobject.h | 2 + Include/listobject.h | 1 + Include/methodobject.h | 5 + Include/object.h | 8 ++ Include/objimpl.h | 4 +- Include/setobject.h | 1 + Include/tupleobject.h | 3 + Lib/test/test_sys.py | 6 ++ Misc/NEWS | 3 + Modules/_datetimemodule.c | 9 +- Modules/_threadmodule.c | 38 +++++------- Objects/classobject.c | 9 +++ Objects/dictobject.c | 9 +++ Objects/floatobject.c | 10 +++ Objects/frameobject.c | 10 +++ Objects/listobject.c | 9 +++ Objects/methodobject.c | 9 +++ Objects/object.c | 12 ++++ Objects/obmalloc.c | 81 ++++++++++++++++----------- Objects/setobject.c | 10 +++ Objects/tupleobject.c | 16 +++++ Python/pythonrun.c | 2 +- Python/sysmodule.c | 23 +++++++ 26 files changed, 239 insertions(+), 60 deletions(-) diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -106,6 +106,22 @@ This function should be used for internal and specialized purposes only. +.. function:: _debugmallocstats() + + Print low-level information to stderr about the state of CPython's memory + allocator. + + If Python is configured --with-pydebug, it also performs some expensive + internal consistency checks. + + .. versionadded:: 3.3 + + .. impl-detail:: + + This function is specific to CPython. The exact output format is not + defined here, and may change. + + .. data:: dllhandle Integer specifying the handle of the Python DLL. Availability: Windows. diff --git a/Include/dictobject.h b/Include/dictobject.h --- a/Include/dictobject.h +++ b/Include/dictobject.h @@ -111,6 +111,7 @@ #ifndef Py_LIMITED_API int _PyObjectDict_SetItem(PyTypeObject *tp, PyObject **dictptr, PyObject *name, PyObject *value); PyObject *_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObject *); +PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out); #endif #ifdef __cplusplus diff --git a/Include/floatobject.h b/Include/floatobject.h --- a/Include/floatobject.h +++ b/Include/floatobject.h @@ -110,6 +110,8 @@ /* free list api */ PyAPI_FUNC(int) PyFloat_ClearFreeList(void); +PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out); + /* Format the object based on the format_spec, as defined in PEP 3101 (Advanced String Formatting). */ PyAPI_FUNC(int) _PyFloat_FormatAdvancedWriter( diff --git a/Include/frameobject.h b/Include/frameobject.h --- a/Include/frameobject.h +++ b/Include/frameobject.h @@ -79,6 +79,8 @@ PyAPI_FUNC(int) PyFrame_ClearFreeList(void); +PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out); + /* Return the line of code the frame is currently executing. */ PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *); diff --git a/Include/listobject.h b/Include/listobject.h --- a/Include/listobject.h +++ b/Include/listobject.h @@ -64,6 +64,7 @@ PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *); PyAPI_FUNC(int) PyList_ClearFreeList(void); +PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); #endif /* Macro, trading safety for speed */ diff --git a/Include/methodobject.h b/Include/methodobject.h --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -82,6 +82,11 @@ PyAPI_FUNC(int) PyCFunction_ClearFreeList(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyCFunction_DebugMallocStats(FILE *out); +PyAPI_FUNC(void) _PyMethod_DebugMallocStats(FILE *out); +#endif + #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h --- a/Include/object.h +++ b/Include/object.h @@ -977,6 +977,14 @@ else \ _PyTrash_deposit_object((PyObject*)op); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) +_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks, + size_t sizeof_block); +PyAPI_FUNC(void) +_PyObject_DebugTypeStats(FILE *out); +#endif /* ifndef Py_LIMITED_API */ + #ifdef __cplusplus } #endif diff --git a/Include/objimpl.h b/Include/objimpl.h --- a/Include/objimpl.h +++ b/Include/objimpl.h @@ -101,13 +101,15 @@ /* Macros */ #ifdef WITH_PYMALLOC +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out); +#endif /* #ifndef Py_LIMITED_API */ #ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */ PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes); PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes); PyAPI_FUNC(void) _PyObject_DebugFree(void *p); PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p); PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p); -PyAPI_FUNC(void) _PyObject_DebugMallocStats(void); PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes); PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes); PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p); diff --git a/Include/setobject.h b/Include/setobject.h --- a/Include/setobject.h +++ b/Include/setobject.h @@ -101,6 +101,7 @@ PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); PyAPI_FUNC(int) PySet_ClearFreeList(void); +PyAPI_FUNC(void) _PySet_DebugMallocStats(FILE *out); #endif #ifdef __cplusplus diff --git a/Include/tupleobject.h b/Include/tupleobject.h --- a/Include/tupleobject.h +++ b/Include/tupleobject.h @@ -63,6 +63,9 @@ #endif PyAPI_FUNC(int) PyTuple_ClearFreeList(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out); +#endif /* Py_LIMITED_API */ #ifdef __cplusplus } diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -603,6 +603,12 @@ self.assertEqual(sys.implementation.name, sys.implementation.name.lower()) + def test_debugmallocstats(self): + # Test sys._debugmallocstats() + from test.script_helper import assert_python_ok + args = ['-c', 'import sys; sys._debugmallocstats()'] + ret, out, err = assert_python_ok(*args) + self.assertIn(b"free PyDictObjects", err) class SizeofTest(unittest.TestCase): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -151,6 +151,9 @@ - Issue #14963: Convert contextlib.ExitStack.__exit__ to use an iterative algorithm (Patch by Alon Horev) +- Issue #14785: Add sys._debugmallocstats() to help debug low-level memory + allocation issues + C-API ----- diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -4754,7 +4754,7 @@ return result; } -static PyObject * +static PyDateTime_DateTime * datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) { PyDateTime_DateTime *result; @@ -4777,7 +4777,7 @@ /* Conversion to self's own time zone is a NOP. */ if (self->tzinfo == tzinfo) { Py_INCREF(self); - return (PyObject *)self; + return self; } /* Convert self to UTC. */ @@ -4814,10 +4814,11 @@ Py_DECREF(temp); temp = (PyObject *)result; - result = _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "O", temp); + result = (PyDateTime_DateTime *) + _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "O", temp); Py_DECREF(temp); - return (PyObject *)result; + return result; } static PyObject * diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -23,6 +23,7 @@ PyObject_HEAD PyThread_type_lock lock_lock; PyObject *in_weakreflist; + char locked; /* for sanity checking */ } lockobject; static void @@ -32,9 +33,8 @@ PyObject_ClearWeakRefs((PyObject *) self); if (self->lock_lock != NULL) { /* Unlock the lock so it's safe to free it */ - PyThread_acquire_lock(self->lock_lock, 0); - PyThread_release_lock(self->lock_lock); - + if (self->locked) + PyThread_release_lock(self->lock_lock); PyThread_free_lock(self->lock_lock); } PyObject_Del(self); @@ -62,9 +62,13 @@ do { - Py_BEGIN_ALLOW_THREADS - r = PyThread_acquire_lock_timed(lock, microseconds, 1); - Py_END_ALLOW_THREADS + /* first a simple non-blocking try without releasing the GIL */ + r = PyThread_acquire_lock_timed(lock, 0, 0); + if (r == PY_LOCK_FAILURE && microseconds != 0) { + Py_BEGIN_ALLOW_THREADS + r = PyThread_acquire_lock_timed(lock, microseconds, 1); + Py_END_ALLOW_THREADS + } if (r == PY_LOCK_INTR) { /* Run signal handlers if we were interrupted. Propagate @@ -135,6 +139,8 @@ return NULL; } + if (r == PY_LOCK_ACQUIRED) + self->locked = 1; return PyBool_FromLong(r == PY_LOCK_ACQUIRED); } @@ -153,13 +159,13 @@ lock_PyThread_release_lock(lockobject *self) { /* Sanity check: the lock must be locked */ - if (PyThread_acquire_lock(self->lock_lock, 0)) { - PyThread_release_lock(self->lock_lock); + if (!self->locked) { PyErr_SetString(ThreadError, "release unlocked lock"); return NULL; } PyThread_release_lock(self->lock_lock); + self->locked = 0; Py_INCREF(Py_None); return Py_None; } @@ -175,11 +181,7 @@ static PyObject * lock_locked_lock(lockobject *self) { - if (PyThread_acquire_lock(self->lock_lock, 0)) { - PyThread_release_lock(self->lock_lock); - return PyBool_FromLong(0L); - } - return PyBool_FromLong(1L); + return PyBool_FromLong((long)self->locked); } PyDoc_STRVAR(locked_doc, @@ -313,14 +315,7 @@ self->rlock_count = count; Py_RETURN_TRUE; } - - if (self->rlock_count > 0 || - !PyThread_acquire_lock(self->rlock_lock, 0)) { - if (microseconds == 0) { - Py_RETURN_FALSE; - } - r = acquire_timed(self->rlock_lock, microseconds); - } + r = acquire_timed(self->rlock_lock, microseconds); if (r == PY_LOCK_ACQUIRED) { assert(self->rlock_count == 0); self->rlock_owner = tid; @@ -548,6 +543,7 @@ if (self == NULL) return NULL; self->lock_lock = PyThread_allocate_lock(); + self->locked = 0; self->in_weakreflist = NULL; if (self->lock_lock == NULL) { Py_DECREF(self); diff --git a/Objects/classobject.c b/Objects/classobject.c --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -400,6 +400,15 @@ (void)PyMethod_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyMethod_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyMethodObject", + numfree, sizeof(PyMethodObject)); +} + /* ------------------------------------------------------------------------ * instance method */ diff --git a/Objects/dictobject.c b/Objects/dictobject.c --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -255,6 +255,15 @@ return ret; } +/* Print summary info about the state of the optimized allocator */ +void +_PyDict_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyDictObject", numfree, sizeof(PyDictObject)); +} + + void PyDict_Fini(void) { diff --git a/Objects/floatobject.c b/Objects/floatobject.c --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -1933,6 +1933,16 @@ (void)PyFloat_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyFloat_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyFloatObject", + numfree, sizeof(PyFloatObject)); +} + + /*---------------------------------------------------------------------------- * _PyFloat_{Pack,Unpack}{4,8}. See floatobject.h. */ diff --git a/Objects/frameobject.c b/Objects/frameobject.c --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -955,3 +955,13 @@ Py_XDECREF(builtin_object); builtin_object = NULL; } + +/* Print summary info about the state of the optimized allocator */ +void +_PyFrame_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyFrameObject", + numfree, sizeof(PyFrameObject)); +} + diff --git a/Objects/listobject.c b/Objects/listobject.c --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -117,6 +117,15 @@ PyList_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyList_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyListObject", + numfree, sizeof(PyListObject)); +} + PyObject * PyList_New(Py_ssize_t size) { diff --git a/Objects/methodobject.c b/Objects/methodobject.c --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -338,6 +338,15 @@ (void)PyCFunction_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyCFunction_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyCFunction", + numfree, sizeof(PyCFunction)); +} + /* PyCFunction_New() is now just a macro that calls PyCFunction_NewEx(), but it's part of the API so we need to keep a function around that existing C extensions can call. diff --git a/Objects/object.c b/Objects/object.c --- a/Objects/object.c +++ b/Objects/object.c @@ -1852,6 +1852,18 @@ PyMem_FREE(p); } +void +_PyObject_DebugTypeStats(FILE *out) +{ + _PyCFunction_DebugMallocStats(out); + _PyDict_DebugMallocStats(out); + _PyFloat_DebugMallocStats(out); + _PyFrame_DebugMallocStats(out); + _PyList_DebugMallocStats(out); + _PyMethod_DebugMallocStats(out); + _PySet_DebugMallocStats(out); + _PyTuple_DebugMallocStats(out); +} /* These methods are used to control infinite recursion in repr, str, print, etc. Container objects that may recursively contain themselves, diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -523,12 +523,10 @@ /* Number of arenas allocated that haven't been free()'d. */ static size_t narenas_currently_allocated = 0; -#ifdef PYMALLOC_DEBUG /* Total number of times malloc() called to allocate an arena. */ static size_t ntimes_arena_allocated = 0; /* High water mark (max value ever seen) for narenas_currently_allocated. */ static size_t narenas_highwater = 0; -#endif /* Allocate a new arena. If we run out of memory, return NULL. Else * allocate a new arena, and return the address of an arena_object @@ -545,7 +543,7 @@ #ifdef PYMALLOC_DEBUG if (Py_GETENV("PYTHONMALLOCSTATS")) - _PyObject_DebugMallocStats(); + _PyObject_DebugMallocStats(stderr); #endif if (unused_arena_objects == NULL) { uint i; @@ -613,11 +611,9 @@ arenaobj->address = (uptr)address; ++narenas_currently_allocated; -#ifdef PYMALLOC_DEBUG ++ntimes_arena_allocated; if (narenas_currently_allocated > narenas_highwater) narenas_highwater = narenas_currently_allocated; -#endif arenaobj->freepools = NULL; /* pool_address <- first pool-aligned address in the arena nfreepools <- number of whole pools that fit after alignment */ @@ -1723,17 +1719,19 @@ } } +#endif /* PYMALLOC_DEBUG */ + static size_t -printone(const char* msg, size_t value) +printone(FILE *out, const char* msg, size_t value) { int i, k; char buf[100]; size_t origvalue = value; - fputs(msg, stderr); + fputs(msg, out); for (i = (int)strlen(msg); i < 35; ++i) - fputc(' ', stderr); - fputc('=', stderr); + fputc(' ', out); + fputc('=', out); /* Write the value with commas. */ i = 22; @@ -1754,17 +1752,33 @@ while (i >= 0) buf[i--] = ' '; - fputs(buf, stderr); + fputs(buf, out); return origvalue; } -/* Print summary info to stderr about the state of pymalloc's structures. +void +_PyDebugAllocatorStats(FILE *out, + const char *block_name, int num_blocks, size_t sizeof_block) +{ + char buf1[128]; + char buf2[128]; + PyOS_snprintf(buf1, sizeof(buf1), + "%d %ss * %zd bytes each", + num_blocks, block_name, sizeof_block); + PyOS_snprintf(buf2, sizeof(buf2), + "%48s ", buf1); + (void)printone(out, buf2, num_blocks * sizeof_block); +} + +#ifdef WITH_PYMALLOC + +/* Print summary info to "out" about the state of pymalloc's structures. * In Py_DEBUG mode, also perform some expensive internal consistency * checks. */ void -_PyObject_DebugMallocStats(void) +_PyObject_DebugMallocStats(FILE *out) { uint i; const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT; @@ -1793,7 +1807,7 @@ size_t total; char buf[128]; - fprintf(stderr, "Small block threshold = %d, in %u size classes.\n", + fprintf(out, "Small block threshold = %d, in %u size classes.\n", SMALL_REQUEST_THRESHOLD, numclasses); for (i = 0; i < numclasses; ++i) @@ -1847,10 +1861,10 @@ } assert(narenas == narenas_currently_allocated); - fputc('\n', stderr); + fputc('\n', out); fputs("class size num pools blocks in use avail blocks\n" "----- ---- --------- ------------- ------------\n", - stderr); + out); for (i = 0; i < numclasses; ++i) { size_t p = numpools[i]; @@ -1861,7 +1875,7 @@ assert(b == 0 && f == 0); continue; } - fprintf(stderr, "%5u %6u " + fprintf(out, "%5u %6u " "%11" PY_FORMAT_SIZE_T "u " "%15" PY_FORMAT_SIZE_T "u " "%13" PY_FORMAT_SIZE_T "u\n", @@ -1871,35 +1885,36 @@ pool_header_bytes += p * POOL_OVERHEAD; quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size); } - fputc('\n', stderr); - (void)printone("# times object malloc called", serialno); - - (void)printone("# arenas allocated total", ntimes_arena_allocated); - (void)printone("# arenas reclaimed", ntimes_arena_allocated - narenas); - (void)printone("# arenas highwater mark", narenas_highwater); - (void)printone("# arenas allocated current", narenas); + fputc('\n', out); +#ifdef PYMALLOC_DEBUG + (void)printone(out, "# times object malloc called", serialno); +#endif + (void)printone(out, "# arenas allocated total", ntimes_arena_allocated); + (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas); + (void)printone(out, "# arenas highwater mark", narenas_highwater); + (void)printone(out, "# arenas allocated current", narenas); PyOS_snprintf(buf, sizeof(buf), "%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena", narenas, ARENA_SIZE); - (void)printone(buf, narenas * ARENA_SIZE); + (void)printone(out, buf, narenas * ARENA_SIZE); - fputc('\n', stderr); + fputc('\n', out); - total = printone("# bytes in allocated blocks", allocated_bytes); - total += printone("# bytes in available blocks", available_bytes); + total = printone(out, "# bytes in allocated blocks", allocated_bytes); + total += printone(out, "# bytes in available blocks", available_bytes); PyOS_snprintf(buf, sizeof(buf), "%u unused pools * %d bytes", numfreepools, POOL_SIZE); - total += printone(buf, (size_t)numfreepools * POOL_SIZE); + total += printone(out, buf, (size_t)numfreepools * POOL_SIZE); - total += printone("# bytes lost to pool headers", pool_header_bytes); - total += printone("# bytes lost to quantization", quantization); - total += printone("# bytes lost to arena alignment", arena_alignment); - (void)printone("Total", total); + total += printone(out, "# bytes lost to pool headers", pool_header_bytes); + total += printone(out, "# bytes lost to quantization", quantization); + total += printone(out, "# bytes lost to arena alignment", arena_alignment); + (void)printone(out, "Total", total); } -#endif /* PYMALLOC_DEBUG */ +#endif /* #ifdef WITH_PYMALLOC */ #ifdef Py_USING_MEMORY_DEBUGGER /* Make this function last so gcc won't inline it since the definition is diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -1133,6 +1133,16 @@ Py_CLEAR(emptyfrozenset); } +/* Print summary info about the state of the optimized allocator */ +void +_PySet_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PySetObject", + numfree, sizeof(PySetObject)); +} + + static PyObject * set_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -45,6 +45,22 @@ } #endif +/* Print summary info about the state of the optimized allocator */ +void +_PyTuple_DebugMallocStats(FILE *out) +{ +#if PyTuple_MAXSAVESIZE > 0 + int i; + char buf[128]; + for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { + PyOS_snprintf(buf, sizeof(buf), + "free %d-sized PyTupleObject", i); + _PyDebugAllocatorStats(out, + buf, + numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i)); + } +#endif +} PyObject * PyTuple_New(register Py_ssize_t size) diff --git a/Python/pythonrun.c b/Python/pythonrun.c --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -642,7 +642,7 @@ #endif /* Py_TRACE_REFS */ #ifdef PYMALLOC_DEBUG if (Py_GETENV("PYTHONMALLOCSTATS")) - _PyObject_DebugMallocStats(); + _PyObject_DebugMallocStats(stderr); #endif call_ll_exitfuncs(); diff --git a/Python/sysmodule.c b/Python/sysmodule.c --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -997,6 +997,27 @@ extern "C" { #endif +static PyObject * +sys_debugmallocstats(PyObject *self, PyObject *args) +{ +#ifdef WITH_PYMALLOC + _PyObject_DebugMallocStats(stderr); + fputc('\n', stderr); +#endif + _PyObject_DebugTypeStats(stderr); + + Py_RETURN_NONE; +} +PyDoc_STRVAR(debugmallocstats_doc, +"_debugmallocstats()\n\ +\n\ +Print summary info to stderr about the state of\n\ +pymalloc's structures.\n\ +\n\ +In Py_DEBUG mode, also perform some expensive internal consistency\n\ +checks.\n\ +"); + #ifdef Py_TRACE_REFS /* Defined in objects.c because it uses static globals if that file */ extern PyObject *_Py_GetObjects(PyObject *, PyObject *); @@ -1093,6 +1114,8 @@ {"settrace", sys_settrace, METH_O, settrace_doc}, {"gettrace", sys_gettrace, METH_NOARGS, gettrace_doc}, {"call_tracing", sys_call_tracing, METH_VARARGS, call_tracing_doc}, + {"_debugmallocstats", sys_debugmallocstats, METH_VARARGS, + debugmallocstats_doc}, {NULL, NULL} /* sentinel */ }; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 21:59:14 2012 From: python-checkins at python.org (larry.hastings) Date: Fri, 22 Jun 2012 21:59:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314769=3A_Incorpora?= =?utf8?q?ted_mildly_pedantic_feedback_from_python-dev=2E?= Message-ID: http://hg.python.org/cpython/rev/40d7869501a2 changeset: 77581:40d7869501a2 user: Larry Hastings date: Fri Jun 22 12:58:36 2012 -0700 summary: Issue #14769: Incorporated mildly pedantic feedback from python-dev. Mostly documentation changes; the code changes are clarifications, not semantic changes. files: Lib/test/test_capi.py | 38 ++++++++++++++++-------------- 1 files changed, 20 insertions(+), 18 deletions(-) diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -222,36 +222,38 @@ in Python/getargs.c, but neglected to update our poor friend skipitem() in the same file. (If so, shame on you!) - This function brute-force tests all** ASCII characters (1 to 127 - inclusive) as format units, checking to see that - PyArg_ParseTupleAndKeywords() return consistent errors both when - the unit is attempted to be used and when it is skipped. If the - format unit doesn't exist, we'll get one of two specific error - messages (one for used, one for skipped); if it does exist we - *won't* get that error--we'll get either no error or some other - error. If we get the "does not exist" error for one test and - not for the other, there's a mismatch, and the test fails. + With a few exceptions**, this function brute-force tests all + printable ASCII*** characters (32 to 126 inclusive) as format units, + checking to see that PyArg_ParseTupleAndKeywords() return consistent + errors both when the unit is attempted to be used and when it is + skipped. If the format unit doesn't exist, we'll get one of two + specific error messages (one for used, one for skipped); if it does + exist we *won't* get that error--we'll get either no error or some + other error. If we get the specific "does not exist" error for one + test and not for the other, there's a mismatch, and the test fails. - ** Okay, it actually skips some ASCII characters. Some characters - have special funny semantics, and it would be difficult to - accomodate them here. + ** Some format units have special funny semantics and it would + be difficult to accomodate them here. Since these are all + well-established and properly skipped in skipitem() we can + get away with not testing them--this test is really intended + to catch *new* format units. + + *** Python C source files must be ASCII. Therefore it's impossible + to have non-ASCII format units. + """ empty_tuple = () tuple_1 = (0,) dict_b = {'b':1} keywords = ["a", "b"] - # Python C source files must be ASCII, - # therefore we'll never have a format unit > 127 - for i in range(1, 128): + for i in range(32, 127): c = chr(i) - # skip non-printable characters, no one is insane enough to define - # one as a format unit # skip parentheses, the error reporting is inconsistent about them # skip 'e', it's always a two-character code # skip '|' and '$', they don't represent arguments anyway - if (not c.isprintable()) or (c in '()e|$'): + if c in '()e|$': continue # test the format unit when not skipped -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 22:09:57 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 22:09:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=239527=3A_tm=5Fgmtof?= =?utf8?q?f_has_=27correct=27_sign=2E?= Message-ID: http://hg.python.org/cpython/rev/0f0e3ec22fce changeset: 77582:0f0e3ec22fce parent: 77580:34319fc28da6 user: Alexander Belopolsky date: Fri Jun 22 16:04:19 2012 -0400 summary: Issue #9527: tm_gmtoff has 'correct' sign. files: Doc/library/time.rst | 2 +- Lib/datetime.py | 6 +++--- Lib/test/datetimetester.py | 6 ++++-- Modules/_datetimemodule.c | 10 +++------- 4 files changed, 11 insertions(+), 13 deletions(-) diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -545,7 +545,7 @@ +-------+-------------------+---------------------------------+ | N/A | :attr:`tm_zone` | abbreviation of timezone name | +-------+-------------------+---------------------------------+ - | N/A | :attr:`tm_gmtoff` | offset from UTC in seconds | + | N/A | :attr:`tm_gmtoff` | offset east of UTC in seconds | +-------+-------------------+---------------------------------+ Note that unlike the C structure, the month value is a range of [1, 12], not diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1510,13 +1510,13 @@ # implied by tm_isdst. delta = local - datetime(*_time.gmtime(ts)[:6]) dst = _time.daylight and localtm.tm_isdst > 0 - gmtoff = _time.altzone if dst else _time.timezone - if delta == timedelta(seconds=-gmtoff): + gmtoff = -(_time.altzone if dst else _time.timezone) + if delta == timedelta(seconds=gmtoff): tz = timezone(delta, _time.tzname[dst]) else: tz = timezone(delta) else: - tz = timezone(timedelta(seconds=-gmtoff), zone) + tz = timezone(timedelta(seconds=gmtoff), zone) elif not isinstance(tz, tzinfo): raise TypeError("tz argument must be an instance of tzinfo") diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -3278,16 +3278,18 @@ self.assertEqual(dt.astimezone(None), dt) self.assertEqual(dt.astimezone(), dt) + # Note that offset in TZ variable has the opposite sign to that + # produced by %z directive. @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') def test_astimezone_default_eastern(self): dt = self.theclass(2012, 11, 4, 6, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0500 EST") + self.assertEqual(local.strftime("%z %Z"), "-0500 EST") dt = self.theclass(2012, 11, 4, 5, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0400 EDT") + self.assertEqual(local.strftime("%z %Z"), "-0400 EDT") def test_aware_subtract(self): cls = self.theclass diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -4717,12 +4717,8 @@ return NULL; timep = localtime(×tamp); #ifdef HAVE_STRUCT_TM_TM_ZONE - { - long offset; - offset = timep->tm_gmtoff; - zone = timep->tm_zone; - delta = new_delta(0, -offset, 0, 0); - } + zone = timep->tm_zone; + delta = new_delta(0, timep->tm_gmtoff, 0, 1); #else /* HAVE_STRUCT_TM_TM_ZONE */ { PyObject *local_time; @@ -4732,7 +4728,7 @@ utc_time->tzinfo); if (local_time == NULL) goto error; - delta = datetime_subtract((PyObject*)utc_time, local_time); + delta = datetime_subtract(local_time, (PyObject*)utc_time); /* XXX: before relying on tzname, we should compare delta to the offset implied by timezone/altzone */ if (daylight && timep->tm_isdst >= 0) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 22:09:58 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Fri, 22 Jun 2012 22:09:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge?= Message-ID: http://hg.python.org/cpython/rev/74ee787306b9 changeset: 77583:74ee787306b9 parent: 77582:0f0e3ec22fce parent: 77581:40d7869501a2 user: Alexander Belopolsky date: Fri Jun 22 16:09:42 2012 -0400 summary: merge files: Lib/test/test_capi.py | 38 ++++++++++++++++-------------- 1 files changed, 20 insertions(+), 18 deletions(-) diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -222,36 +222,38 @@ in Python/getargs.c, but neglected to update our poor friend skipitem() in the same file. (If so, shame on you!) - This function brute-force tests all** ASCII characters (1 to 127 - inclusive) as format units, checking to see that - PyArg_ParseTupleAndKeywords() return consistent errors both when - the unit is attempted to be used and when it is skipped. If the - format unit doesn't exist, we'll get one of two specific error - messages (one for used, one for skipped); if it does exist we - *won't* get that error--we'll get either no error or some other - error. If we get the "does not exist" error for one test and - not for the other, there's a mismatch, and the test fails. + With a few exceptions**, this function brute-force tests all + printable ASCII*** characters (32 to 126 inclusive) as format units, + checking to see that PyArg_ParseTupleAndKeywords() return consistent + errors both when the unit is attempted to be used and when it is + skipped. If the format unit doesn't exist, we'll get one of two + specific error messages (one for used, one for skipped); if it does + exist we *won't* get that error--we'll get either no error or some + other error. If we get the specific "does not exist" error for one + test and not for the other, there's a mismatch, and the test fails. - ** Okay, it actually skips some ASCII characters. Some characters - have special funny semantics, and it would be difficult to - accomodate them here. + ** Some format units have special funny semantics and it would + be difficult to accomodate them here. Since these are all + well-established and properly skipped in skipitem() we can + get away with not testing them--this test is really intended + to catch *new* format units. + + *** Python C source files must be ASCII. Therefore it's impossible + to have non-ASCII format units. + """ empty_tuple = () tuple_1 = (0,) dict_b = {'b':1} keywords = ["a", "b"] - # Python C source files must be ASCII, - # therefore we'll never have a format unit > 127 - for i in range(1, 128): + for i in range(32, 127): c = chr(i) - # skip non-printable characters, no one is insane enough to define - # one as a format unit # skip parentheses, the error reporting is inconsistent about them # skip 'e', it's always a two-character code # skip '|' and '$', they don't represent arguments anyway - if (not c.isprintable()) or (c in '()e|$'): + if c in '()e|$': continue # test the format unit when not skipped -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 22:10:57 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 22 Jun 2012 22:10:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_a_docstring_at_the_top_?= =?utf8?q?of_Tools/ssl/make=5Fssl=5Fdata=2Epy?= Message-ID: http://hg.python.org/cpython/rev/ad9f6e294d09 changeset: 77584:ad9f6e294d09 parent: 77580:34319fc28da6 user: Antoine Pitrou date: Fri Jun 22 22:07:01 2012 +0200 summary: Add a docstring at the top of Tools/ssl/make_ssl_data.py files: Tools/ssl/make_ssl_data.py | 11 +++++++++++ 1 files changed, 11 insertions(+), 0 deletions(-) diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py --- a/Tools/ssl/make_ssl_data.py +++ b/Tools/ssl/make_ssl_data.py @@ -1,5 +1,16 @@ #! /usr/bin/env python3 +""" +This script should be called *manually* when we want to upgrade SSLError +`library` and `reason` mnemnonics to a more recent OpenSSL version. + +It takes two arguments: +- the path to the OpenSSL include files' directory + (e.g. openssl-1.0.1-beta3/include/openssl/) +- the path to the C file to be generated + (probably Modules/_ssl_data.h) +""" + import datetime import os import re -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 22:10:58 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 22 Jun 2012 22:10:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge?= Message-ID: http://hg.python.org/cpython/rev/483d7dd6851f changeset: 77585:483d7dd6851f parent: 77584:ad9f6e294d09 parent: 77583:74ee787306b9 user: Antoine Pitrou date: Fri Jun 22 22:07:24 2012 +0200 summary: Merge files: Doc/library/time.rst | 2 +- Lib/datetime.py | 6 ++-- Lib/test/datetimetester.py | 6 ++- Lib/test/test_capi.py | 38 +++++++++++++------------ Modules/_datetimemodule.c | 10 ++---- 5 files changed, 31 insertions(+), 31 deletions(-) diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -545,7 +545,7 @@ +-------+-------------------+---------------------------------+ | N/A | :attr:`tm_zone` | abbreviation of timezone name | +-------+-------------------+---------------------------------+ - | N/A | :attr:`tm_gmtoff` | offset from UTC in seconds | + | N/A | :attr:`tm_gmtoff` | offset east of UTC in seconds | +-------+-------------------+---------------------------------+ Note that unlike the C structure, the month value is a range of [1, 12], not diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1510,13 +1510,13 @@ # implied by tm_isdst. delta = local - datetime(*_time.gmtime(ts)[:6]) dst = _time.daylight and localtm.tm_isdst > 0 - gmtoff = _time.altzone if dst else _time.timezone - if delta == timedelta(seconds=-gmtoff): + gmtoff = -(_time.altzone if dst else _time.timezone) + if delta == timedelta(seconds=gmtoff): tz = timezone(delta, _time.tzname[dst]) else: tz = timezone(delta) else: - tz = timezone(timedelta(seconds=-gmtoff), zone) + tz = timezone(timedelta(seconds=gmtoff), zone) elif not isinstance(tz, tzinfo): raise TypeError("tz argument must be an instance of tzinfo") diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -3278,16 +3278,18 @@ self.assertEqual(dt.astimezone(None), dt) self.assertEqual(dt.astimezone(), dt) + # Note that offset in TZ variable has the opposite sign to that + # produced by %z directive. @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') def test_astimezone_default_eastern(self): dt = self.theclass(2012, 11, 4, 6, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0500 EST") + self.assertEqual(local.strftime("%z %Z"), "-0500 EST") dt = self.theclass(2012, 11, 4, 5, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0400 EDT") + self.assertEqual(local.strftime("%z %Z"), "-0400 EDT") def test_aware_subtract(self): cls = self.theclass diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -222,36 +222,38 @@ in Python/getargs.c, but neglected to update our poor friend skipitem() in the same file. (If so, shame on you!) - This function brute-force tests all** ASCII characters (1 to 127 - inclusive) as format units, checking to see that - PyArg_ParseTupleAndKeywords() return consistent errors both when - the unit is attempted to be used and when it is skipped. If the - format unit doesn't exist, we'll get one of two specific error - messages (one for used, one for skipped); if it does exist we - *won't* get that error--we'll get either no error or some other - error. If we get the "does not exist" error for one test and - not for the other, there's a mismatch, and the test fails. + With a few exceptions**, this function brute-force tests all + printable ASCII*** characters (32 to 126 inclusive) as format units, + checking to see that PyArg_ParseTupleAndKeywords() return consistent + errors both when the unit is attempted to be used and when it is + skipped. If the format unit doesn't exist, we'll get one of two + specific error messages (one for used, one for skipped); if it does + exist we *won't* get that error--we'll get either no error or some + other error. If we get the specific "does not exist" error for one + test and not for the other, there's a mismatch, and the test fails. - ** Okay, it actually skips some ASCII characters. Some characters - have special funny semantics, and it would be difficult to - accomodate them here. + ** Some format units have special funny semantics and it would + be difficult to accomodate them here. Since these are all + well-established and properly skipped in skipitem() we can + get away with not testing them--this test is really intended + to catch *new* format units. + + *** Python C source files must be ASCII. Therefore it's impossible + to have non-ASCII format units. + """ empty_tuple = () tuple_1 = (0,) dict_b = {'b':1} keywords = ["a", "b"] - # Python C source files must be ASCII, - # therefore we'll never have a format unit > 127 - for i in range(1, 128): + for i in range(32, 127): c = chr(i) - # skip non-printable characters, no one is insane enough to define - # one as a format unit # skip parentheses, the error reporting is inconsistent about them # skip 'e', it's always a two-character code # skip '|' and '$', they don't represent arguments anyway - if (not c.isprintable()) or (c in '()e|$'): + if c in '()e|$': continue # test the format unit when not skipped diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -4717,12 +4717,8 @@ return NULL; timep = localtime(×tamp); #ifdef HAVE_STRUCT_TM_TM_ZONE - { - long offset; - offset = timep->tm_gmtoff; - zone = timep->tm_zone; - delta = new_delta(0, -offset, 0, 0); - } + zone = timep->tm_zone; + delta = new_delta(0, timep->tm_gmtoff, 0, 1); #else /* HAVE_STRUCT_TM_TM_ZONE */ { PyObject *local_time; @@ -4732,7 +4728,7 @@ utc_time->tzinfo); if (local_time == NULL) goto error; - delta = datetime_subtract((PyObject*)utc_time, local_time); + delta = datetime_subtract(local_time, (PyObject*)utc_time); /* XXX: before relying on tzname, we should compare delta to the offset implied by timezone/altzone */ if (daylight && timep->tm_isdst >= 0) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 22:13:23 2012 From: python-checkins at python.org (larry.hastings) Date: Fri, 22 Jun 2012 22:13:23 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_One_last_edit_on_PEP_362=3B_I_?= =?utf8?q?think_Guido=27s_ready_to_give_this_the_green_light=2E?= Message-ID: http://hg.python.org/peps/rev/a1622b1bafe0 changeset: 4474:a1622b1bafe0 user: Larry Hastings date: Fri Jun 22 13:12:59 2012 -0700 summary: One last edit on PEP 362; I think Guido's ready to give this the green light. files: pep-0362.txt | 73 ++++++++++++++++++++------------------- 1 files changed, 37 insertions(+), 36 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -43,7 +43,8 @@ * return_annotation : object The "return" annotation for the function. If the function - has no "return" annotation, this attribute is not set. + has no "return" annotation, this attribute is set to + ``Signature.empty``. * parameters : OrderedDict An ordered mapping of parameters' names to the corresponding @@ -60,7 +61,7 @@ behavior.) Raises a ``TypeError`` if the passed arguments do not match the signature. -* replace(parameters, \*, return_annotation) -> Signature +* replace(parameters=, \*, return_annotation=) -> Signature Creates a new Signature instance based on the instance ``replace`` was invoked on. It is possible to pass different ``parameters`` and/or ``return_annotation`` to override the @@ -68,6 +69,9 @@ ``return_annotation`` from the copied ``Signature``, pass in ``Signature.empty``. + Note that the '=' notation, means that the argument is + optional. This notation applies to the rest of this PEP. + Signature objects are immutable. Use ``Signature.replace()`` to make a modified copy: :: @@ -90,7 +94,7 @@ There are two ways to instantiate a Signature class: -* Signature(parameters, \*, return_annotation) +* Signature(parameters=, \*, return_annotation=Signature.empty) Default Signature constructor. Accepts an optional sequence of ``Parameter`` objects, and an optional ``return_annotation``. Parameters sequence is validated to check that there are no @@ -136,11 +140,11 @@ * default : object The default value for the parameter. If the parameter has no - default value, this attribute is not set. + default value, this attribute is set to ``Parameter.empty``. * annotation : object The annotation for the parameter. If the parameter has no - annotation, this attribute is not set. + annotation, this attribute is set to ``Parameter.empty``. * kind Describes how argument values are bound to the parameter. @@ -176,7 +180,7 @@ Always use ``Parameter.*`` constants for setting and checking value of the ``kind`` attribute. -* replace(\*, name, kind, default, annotation) -> Parameter +* replace(\*, name=, kind=, default=, annotation=) -> Parameter Creates a new Parameter instance based on the instance ``replaced`` was invoked on. To override a Parameter attribute, pass the corresponding argument. To remove @@ -185,7 +189,7 @@ Parameter constructor: -* Parameter(name, kind, \*, annotation, default) +* Parameter(name, kind, \*, annotation=Parameter.empty, default=Parameter.empty) Instantiates a Parameter object. ``name`` and ``kind`` are required, while ``annotation`` and ``default`` are optional. @@ -277,9 +281,11 @@ - If the object is a an instance of ``FunctionType`` construct and return a new ``Signature`` for it - - If the object is a method, construct and return a new ``Signature`` + - If the object is a bound method, construct and return a new ``Signature`` object, with its first parameter (usually ``self`` or ``cls``) - removed + removed. (``classmethod`` and ``staticmethod`` are supported + too. Since both are descriptors, the former returns a bound method, + and the latter returns its wrapped function.) - If the object is an instance of ``functools.partial``, construct a new ``Signature`` from its ``partial.func`` attribute, and @@ -472,27 +478,23 @@ for param in sig.parameters.values(): # Iterate through function's parameters and build the list of # arguments types + type_ = param.annotation + if type_ is param.empty or not inspect.isclass(type_): + # Missing annotation or not a type, skip it + continue + + types[param.name] = type_ + + # If the argument has a type specified, let's check that its + # default value (if present) conforms with the type. try: - type_ = param.annotation + default = param.default except AttributeError: continue else: - if not inspect.isclass(type_): - # Not a type, skip it - continue - - types[param.name] = type_ - - # If the argument has a type specified, let's check that its - # default value (if present) conforms with the type. - try: - default = param.default - except AttributeError: - continue - else: - if not isinstance(default, type_): - raise ValueError("{func}: wrong type of a default value for {arg!r}". \ - format(func=func.__qualname__, arg=param.name)) + if not isinstance(default, type_): + raise ValueError("{func}: wrong type of a default value for {arg!r}". \ + format(func=func.__qualname__, arg=param.name)) def check_type(sig, arg_name, arg_type, arg_value): # Internal function that encapsulates arguments type checking @@ -530,17 +532,16 @@ check_type(sig, arg_name, type_, arg) result = func(*ba.args, **ba.kwargs) + # The last bit - let's check that the result is correct - try: - return_type = sig.return_annotation - except AttributeError: - # Looks like we don't have any restriction on the return type - pass - else: - if isinstance(return_type, type) and not isinstance(result, return_type): - raise ValueError('{func}: wrong return type, {exp} expected, got {got}'. \ - format(func=func.__qualname__, exp=return_type.__name__, - got=type(result).__name__)) + return_type = sig.return_annotation + if (return_type is not sig._empty and + isinstance(return_type, type) and + not isinstance(result, return_type)): + + raise ValueError('{func}: wrong return type, {exp} expected, got {got}'. \ + format(func=func.__qualname__, exp=return_type.__name__, + got=type(result).__name__)) return result return wrapper -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Fri Jun 22 22:20:18 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 22 Jun 2012 22:20:18 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315143=3A_Define_?= =?utf8?q?=5FDEBUG_when_compiling_resources=2E?= Message-ID: http://hg.python.org/cpython/rev/f59e6cc3d5eb changeset: 77586:f59e6cc3d5eb parent: 77580:34319fc28da6 user: Martin v. L?wis date: Fri Jun 22 22:16:42 2012 +0200 summary: Issue #15143: Define _DEBUG when compiling resources. files: PCbuild/debug.props | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/PCbuild/debug.props b/PCbuild/debug.props --- a/PCbuild/debug.props +++ b/PCbuild/debug.props @@ -12,6 +12,9 @@ _DEBUG;%(PreprocessorDefinitions) + + _DEBUG + -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 22:20:19 2012 From: python-checkins at python.org (martin.v.loewis) Date: Fri, 22 Jun 2012 22:20:19 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge_heads?= Message-ID: http://hg.python.org/cpython/rev/c6e0ca1e3a0f changeset: 77587:c6e0ca1e3a0f parent: 77586:f59e6cc3d5eb parent: 77585:483d7dd6851f user: Martin v. L?wis date: Fri Jun 22 22:19:51 2012 +0200 summary: Merge heads files: Doc/library/time.rst | 2 +- Lib/datetime.py | 6 ++-- Lib/test/datetimetester.py | 6 ++- Lib/test/test_capi.py | 38 +++++++++++++------------ Modules/_datetimemodule.c | 10 ++---- Tools/ssl/make_ssl_data.py | 11 +++++++ 6 files changed, 42 insertions(+), 31 deletions(-) diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -545,7 +545,7 @@ +-------+-------------------+---------------------------------+ | N/A | :attr:`tm_zone` | abbreviation of timezone name | +-------+-------------------+---------------------------------+ - | N/A | :attr:`tm_gmtoff` | offset from UTC in seconds | + | N/A | :attr:`tm_gmtoff` | offset east of UTC in seconds | +-------+-------------------+---------------------------------+ Note that unlike the C structure, the month value is a range of [1, 12], not diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1510,13 +1510,13 @@ # implied by tm_isdst. delta = local - datetime(*_time.gmtime(ts)[:6]) dst = _time.daylight and localtm.tm_isdst > 0 - gmtoff = _time.altzone if dst else _time.timezone - if delta == timedelta(seconds=-gmtoff): + gmtoff = -(_time.altzone if dst else _time.timezone) + if delta == timedelta(seconds=gmtoff): tz = timezone(delta, _time.tzname[dst]) else: tz = timezone(delta) else: - tz = timezone(timedelta(seconds=-gmtoff), zone) + tz = timezone(timedelta(seconds=gmtoff), zone) elif not isinstance(tz, tzinfo): raise TypeError("tz argument must be an instance of tzinfo") diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -3278,16 +3278,18 @@ self.assertEqual(dt.astimezone(None), dt) self.assertEqual(dt.astimezone(), dt) + # Note that offset in TZ variable has the opposite sign to that + # produced by %z directive. @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') def test_astimezone_default_eastern(self): dt = self.theclass(2012, 11, 4, 6, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0500 EST") + self.assertEqual(local.strftime("%z %Z"), "-0500 EST") dt = self.theclass(2012, 11, 4, 5, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0400 EDT") + self.assertEqual(local.strftime("%z %Z"), "-0400 EDT") def test_aware_subtract(self): cls = self.theclass diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -222,36 +222,38 @@ in Python/getargs.c, but neglected to update our poor friend skipitem() in the same file. (If so, shame on you!) - This function brute-force tests all** ASCII characters (1 to 127 - inclusive) as format units, checking to see that - PyArg_ParseTupleAndKeywords() return consistent errors both when - the unit is attempted to be used and when it is skipped. If the - format unit doesn't exist, we'll get one of two specific error - messages (one for used, one for skipped); if it does exist we - *won't* get that error--we'll get either no error or some other - error. If we get the "does not exist" error for one test and - not for the other, there's a mismatch, and the test fails. + With a few exceptions**, this function brute-force tests all + printable ASCII*** characters (32 to 126 inclusive) as format units, + checking to see that PyArg_ParseTupleAndKeywords() return consistent + errors both when the unit is attempted to be used and when it is + skipped. If the format unit doesn't exist, we'll get one of two + specific error messages (one for used, one for skipped); if it does + exist we *won't* get that error--we'll get either no error or some + other error. If we get the specific "does not exist" error for one + test and not for the other, there's a mismatch, and the test fails. - ** Okay, it actually skips some ASCII characters. Some characters - have special funny semantics, and it would be difficult to - accomodate them here. + ** Some format units have special funny semantics and it would + be difficult to accomodate them here. Since these are all + well-established and properly skipped in skipitem() we can + get away with not testing them--this test is really intended + to catch *new* format units. + + *** Python C source files must be ASCII. Therefore it's impossible + to have non-ASCII format units. + """ empty_tuple = () tuple_1 = (0,) dict_b = {'b':1} keywords = ["a", "b"] - # Python C source files must be ASCII, - # therefore we'll never have a format unit > 127 - for i in range(1, 128): + for i in range(32, 127): c = chr(i) - # skip non-printable characters, no one is insane enough to define - # one as a format unit # skip parentheses, the error reporting is inconsistent about them # skip 'e', it's always a two-character code # skip '|' and '$', they don't represent arguments anyway - if (not c.isprintable()) or (c in '()e|$'): + if c in '()e|$': continue # test the format unit when not skipped diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -4717,12 +4717,8 @@ return NULL; timep = localtime(×tamp); #ifdef HAVE_STRUCT_TM_TM_ZONE - { - long offset; - offset = timep->tm_gmtoff; - zone = timep->tm_zone; - delta = new_delta(0, -offset, 0, 0); - } + zone = timep->tm_zone; + delta = new_delta(0, timep->tm_gmtoff, 0, 1); #else /* HAVE_STRUCT_TM_TM_ZONE */ { PyObject *local_time; @@ -4732,7 +4728,7 @@ utc_time->tzinfo); if (local_time == NULL) goto error; - delta = datetime_subtract((PyObject*)utc_time, local_time); + delta = datetime_subtract(local_time, (PyObject*)utc_time); /* XXX: before relying on tzname, we should compare delta to the offset implied by timezone/altzone */ if (daylight && timep->tm_isdst >= 0) diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py --- a/Tools/ssl/make_ssl_data.py +++ b/Tools/ssl/make_ssl_data.py @@ -1,5 +1,16 @@ #! /usr/bin/env python3 +""" +This script should be called *manually* when we want to upgrade SSLError +`library` and `reason` mnemnonics to a more recent OpenSSL version. + +It takes two arguments: +- the path to the OpenSSL include files' directory + (e.g. openssl-1.0.1-beta3/include/openssl/) +- the path to the C file to be generated + (probably Modules/_ssl_data.h) +""" + import datetime import os import re -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 23:03:39 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 23:03:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_=23444582=2E_Add_shutil?= =?utf8?q?=2Ewhich_function_for_finding_programs_on_the_system_path=2E?= Message-ID: http://hg.python.org/cpython/rev/0fe7439e470c changeset: 77588:0fe7439e470c user: Brian Curtin date: Fri Jun 22 16:00:30 2012 -0500 summary: Fix #444582. Add shutil.which function for finding programs on the system path. files: Doc/library/shutil.rst | 24 +++++++++++++ Lib/shutil.py | 50 ++++++++++++++++++++++++++++- Lib/test/test_shutil.py | 45 +++++++++++++++++++++++++- Misc/NEWS | 4 ++ 4 files changed, 121 insertions(+), 2 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -247,6 +247,30 @@ .. versionadded:: 3.3 +.. function:: which(cmd, mode=os.F_OK | os.X_OK, path=None) + + Return the full path to an executable which would be run if the given + *cmd* was called. If no *cmd* would be called, return ``None``. + + *mode* is a permission mask passed a to :func:`os.access`, by default + determining if the file exists and executable. + + When no *path* is specified, the results of :func:`os.environ` are + used, returning either the "PATH" value or a fallback of :attr:`os.defpath`. + + On Windows, the current directory is always prepended to the *path* + whether or not you use the default or provide your own, which + is the behavior the command shell uses when finding executables. + Additionaly, when finding the *cmd* in the *path*, the + ``PATHEXT`` environment variable is checked. For example, if you + call ``shutil.which("python")``, :func:`which` will search + ``PATHEXT`` to know that it should look for ``python.exe`` within + the *path* directories. + + >>> print(shutil.which("python")) + 'c:\\python33\\python.exe' + + .. versionadded:: 3.3 .. exception:: Error diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -36,7 +36,7 @@ "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive", - "ignore_patterns", "chown"] + "ignore_patterns", "chown", "which"] # disk_usage is added later, if available on the platform class Error(EnvironmentError): @@ -961,3 +961,51 @@ lines = size.lines return os.terminal_size((columns, lines)) + +def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a file, mode, and a path string, return the path whichs conform + to the given mode on the path.""" + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + if (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)): + return True + return False + + # Short circuit. If we're given a full path which matches the mode + # and it exists, we're done here. + if _access_check(cmd, mode): + return cmd + + path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + matches = [cmd for ext in pathext if cmd.lower().endswith(ext.lower())] + # If it does match, only test that one, otherwise we have to try others. + files = [cmd + ext.lower() for ext in pathext] if not matches else [cmd] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + dir = os.path.normcase(os.path.abspath(dir)) + if not dir in seen: + seen.add(dir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1128,6 +1128,49 @@ self.assertEqual(['foo'], os.listdir(rv)) +class TestWhich(unittest.TestCase): + + def setUp(self): + self.temp_dir = tempfile.mkdtemp() + # Give the temp_file an ".exe" suffix for all. + # It's needed on Windows and not harmful on other platforms. + self.temp_file = tempfile.NamedTemporaryFile(dir=self.temp_dir, + suffix=".exe") + os.chmod(self.temp_file.name, stat.S_IXUSR) + self.addCleanup(self.temp_file.close) + self.dir, self.file = os.path.split(self.temp_file.name) + + def test_basic(self): + # Given an EXE in a directory, it should be returned. + rv = shutil.which(self.file, path=self.dir) + self.assertEqual(rv, self.temp_file.name) + + def test_full_path_short_circuit(self): + # When given the fully qualified path to an executable that exists, + # it should be returned. + rv = shutil.which(self.temp_file.name, path=self.temp_dir) + self.assertEqual(self.temp_file.name, rv) + + def test_non_matching_mode(self): + # Set the file read-only and ask for writeable files. + os.chmod(self.temp_file.name, stat.S_IREAD) + rv = shutil.which(self.file, path=self.dir, mode=os.W_OK) + self.assertIsNone(rv) + + def test_nonexistent_file(self): + # Return None when no matching executable file is found on the path. + rv = shutil.which("foo.exe", path=self.dir) + self.assertIsNone(rv) + + @unittest.skipUnless(sys.platform == "win32", + "pathext check is Windows-only") + def test_pathext_checking(self): + # Ask for the file without the ".exe" extension, then ensure that + # it gets found properly with the extension. + rv = shutil.which(self.temp_file.name[:-4], path=self.dir) + self.assertEqual(self.temp_file.name, rv) + + class TestMove(unittest.TestCase): def setUp(self): @@ -1460,7 +1503,7 @@ def test_main(): support.run_unittest(TestShutil, TestMove, TestCopyFile, - TermsizeTests) + TermsizeTests, TestWhich) if __name__ == '__main__': test_main() diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,10 @@ Library ------- +- Issue #444582: Add shutil.which, for finding programs on the system path. + Original patch by Erik Demaine, with later iterations by Jan Killian + and Brian Curtin. + - Issue #14837: SSL errors now have ``library`` and ``reason`` attributes describing precisely what happened and in which OpenSSL submodule. The str() of a SSLError is also enhanced accordingly. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 23:03:40 2012 From: python-checkins at python.org (brian.curtin) Date: Fri, 22 Jun 2012 23:03:40 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_whitespace_normalize?= Message-ID: http://hg.python.org/cpython/rev/1997633ff1fe changeset: 77589:1997633ff1fe user: Brian Curtin date: Fri Jun 22 16:03:06 2012 -0500 summary: whitespace normalize files: Lib/shutil.py | 1 - 1 files changed, 0 insertions(+), 1 deletions(-) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -1008,4 +1008,3 @@ if _access_check(name, mode): return name return None - -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Fri Jun 22 23:36:37 2012 From: python-checkins at python.org (antoine.pitrou) Date: Fri, 22 Jun 2012 23:36:37 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=23444582=3A_shutil?= =?utf8?q?=2Ewhich=28=29_respects_relative_paths=2E?= Message-ID: http://hg.python.org/cpython/rev/9d2fe615a400 changeset: 77590:9d2fe615a400 user: Antoine Pitrou date: Fri Jun 22 23:33:05 2012 +0200 summary: Issue #444582: shutil.which() respects relative paths. files: Doc/library/shutil.rst | 4 ++-- Lib/shutil.py | 2 +- Lib/test/test_shutil.py | 10 ++++++++++ 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -249,8 +249,8 @@ .. function:: which(cmd, mode=os.F_OK | os.X_OK, path=None) - Return the full path to an executable which would be run if the given - *cmd* was called. If no *cmd* would be called, return ``None``. + Return the path to an executable which would be run if the given *cmd* + was called. If no *cmd* would be called, return ``None``. *mode* is a permission mask passed a to :func:`os.access`, by default determining if the file exists and executable. diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -1000,7 +1000,7 @@ seen = set() for dir in path: - dir = os.path.normcase(os.path.abspath(dir)) + dir = os.path.normcase(dir) if not dir in seen: seen.add(dir) for thefile in files: diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1157,6 +1157,16 @@ rv = shutil.which(self.file, path=self.dir, mode=os.W_OK) self.assertIsNone(rv) + def test_relative(self): + old_cwd = os.getcwd() + base_dir, tail_dir = os.path.split(self.dir) + os.chdir(base_dir) + try: + rv = shutil.which(self.file, path=tail_dir) + self.assertEqual(rv, os.path.join(tail_dir, self.file)) + finally: + os.chdir(old_cwd) + def test_nonexistent_file(self): # Return None when no matching executable file is found on the path. rv = shutil.which("foo.exe", path=self.dir) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 00:11:09 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 00:11:09 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_debug_output_for_tempor?= =?utf8?q?ary_buildbot_failures?= Message-ID: http://hg.python.org/cpython/rev/574c45f26280 changeset: 77591:574c45f26280 user: Antoine Pitrou date: Sat Jun 23 00:07:38 2012 +0200 summary: Add debug output for temporary buildbot failures files: Lib/test/test_reprlib.py | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py --- a/Lib/test/test_reprlib.py +++ b/Lib/test/test_reprlib.py @@ -10,7 +10,7 @@ import importlib import unittest -from test.support import run_unittest, create_empty_file +from test.support import run_unittest, create_empty_file, verbose from reprlib import repr as r # Don't shadow builtin repr from reprlib import Repr from reprlib import recursive_repr @@ -248,6 +248,8 @@ # (see http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx#maxpath) self.skipTest("test paths too long (%d characters) for Windows' 260 character limit" % cached_path_len) + elif os.name == 'nt' and verbose: + print("len(cached_path_len) =", len(cached_path_len)) def test_module(self): self._check_path_limitations(self.pkgname) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 00:16:57 2012 From: python-checkins at python.org (larry.hastings) Date: Sat, 23 Jun 2012 00:16:57 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Mark_PEP_362_as_accepted=2E__H?= =?utf8?q?uzzah!?= Message-ID: http://hg.python.org/peps/rev/5019413bf672 changeset: 4475:5019413bf672 user: Larry Hastings date: Fri Jun 22 15:16:35 2012 -0700 summary: Mark PEP 362 as accepted. Huzzah! files: pep-0362.txt | 9 ++++++++- 1 files changed, 8 insertions(+), 1 deletions(-) diff --git a/pep-0362.txt b/pep-0362.txt --- a/pep-0362.txt +++ b/pep-0362.txt @@ -4,7 +4,7 @@ Last-Modified: $Date$ Author: Brett Cannon , Jiwon Seo , Yury Selivanov , Larry Hastings -Status: Draft +Status: Final Type: Standards Track Content-Type: text/x-rst Created: 21-Aug-2006 @@ -546,12 +546,19 @@ return wrapper +Acceptance +========== + +PEP 362 was accepted by Guido, Friday, June 22, 2012 [#accepted]_ . +The reference implementation was committed to trunk later that day. + References ========== .. [#impl] pep362 branch (https://bitbucket.org/1st1/cpython/overview) .. [#issue] issue 15008 (http://bugs.python.org/issue15008) +.. [#accepted] "A Desperate Plea For Introspection (aka: BDFAP Needed)" (http://mail.python.org/pipermail/python-dev/2012-June/120682.html) Copyright -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Jun 23 00:18:50 2012 From: python-checkins at python.org (guido.van.rossum) Date: Sat, 23 Jun 2012 00:18:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Speed_up_base64=2Eurlsafe?= =?utf8?b?X3tlbixkZX1jb2RlKCku?= Message-ID: http://hg.python.org/cpython/rev/2cbbc015d8a2 changeset: 77592:2cbbc015d8a2 parent: 77571:de2a0cb6ba52 user: Guido van Rossum date: Fri Jun 22 15:16:09 2012 -0700 summary: Speed up base64.urlsafe_{en,de}code(). files: Lib/base64.py | 24 +++++++++++------------- Misc/NEWS | 2 ++ 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Lib/base64.py b/Lib/base64.py --- a/Lib/base64.py +++ b/Lib/base64.py @@ -40,14 +40,6 @@ else: raise TypeError("argument should be bytes or ASCII string, not %s" % s.__class__.__name__) -def _translate(s, altchars): - if not isinstance(s, bytes_types): - raise TypeError("expected bytes, not %s" % s.__class__.__name__) - translation = bytearray(range(256)) - for k, v in altchars.items(): - translation[ord(k)] = v[0] - return s.translate(translation) - # Base64 encoding/decoding uses binascii @@ -71,7 +63,7 @@ raise TypeError("expected bytes, not %s" % altchars.__class__.__name__) assert len(altchars) == 2, repr(altchars) - return _translate(encoded, {'+': altchars[0:1], '/': altchars[1:2]}) + return encoded.translate(bytes.maketrans(b'+/', altchars)) return encoded @@ -93,7 +85,7 @@ if altchars is not None: altchars = _bytes_from_decode_data(altchars) assert len(altchars) == 2, repr(altchars) - s = _translate(s, {chr(altchars[0]): b'+', chr(altchars[1]): b'/'}) + s = s.translate(bytes.maketrans(altchars, b'+/')) if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s): raise binascii.Error('Non-base64 digit found') return binascii.a2b_base64(s) @@ -116,6 +108,10 @@ """ return b64decode(s) + +_urlsafe_encode_translation = bytes.maketrans(b'+/', b'-_') +_urlsafe_decode_translation = bytes.maketrans(b'-_', b'+/') + def urlsafe_b64encode(s): """Encode a byte string using a url-safe Base64 alphabet. @@ -123,7 +119,7 @@ returned. The alphabet uses '-' instead of '+' and '_' instead of '/'. """ - return b64encode(s, b'-_') + return b64encode(s).translate(_urlsafe_encode_translation) def urlsafe_b64decode(s): """Decode a byte string encoded with the standard Base64 alphabet. @@ -135,7 +131,9 @@ The alphabet uses '-' instead of '+' and '_' instead of '/'. """ - return b64decode(s, b'-_') + s = _bytes_from_decode_data(s) + s = s.translate(_urlsafe_decode_translation) + return b64decode(s) @@ -228,7 +226,7 @@ if map01 is not None: map01 = _bytes_from_decode_data(map01) assert len(map01) == 1, repr(map01) - s = _translate(s, {b'0': b'O', b'1': map01}) + s = s.translate(bytes.maketrans(b'01', b'O' + map01)) if casefold: s = s.upper() # Strip off pad characters from the right. We need to count the pad diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,8 @@ Library ------- +- Issue: #15138: base64.urlsafe_{en,de}code() are now 3-4x faster. + - Issue #9527: datetime.astimezone() method will now supply a class timezone instance corresponding to the system local timezone when called with no arguments. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 00:18:52 2012 From: python-checkins at python.org (guido.van.rossum) Date: Sat, 23 Jun 2012 00:18:52 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge?= Message-ID: http://hg.python.org/cpython/rev/a55912db5941 changeset: 77593:a55912db5941 parent: 77592:2cbbc015d8a2 parent: 77591:574c45f26280 user: Guido van Rossum date: Fri Jun 22 15:18:38 2012 -0700 summary: Merge files: Doc/library/shutil.rst | 24 + Doc/library/ssl.rst | 16 + Doc/library/sys.rst | 16 + Doc/library/time.rst | 2 +- Include/dictobject.h | 1 + Include/floatobject.h | 2 + Include/frameobject.h | 2 + Include/listobject.h | 1 + Include/methodobject.h | 5 + Include/object.h | 8 + Include/objimpl.h | 4 +- Include/setobject.h | 1 + Include/tupleobject.h | 3 + Lib/datetime.py | 6 +- Lib/shutil.py | 49 +- Lib/test/datetimetester.py | 6 +- Lib/test/test_capi.py | 38 +- Lib/test/test_reprlib.py | 4 +- Lib/test/test_shutil.py | 55 +- Lib/test/test_ssl.py | 45 +- Lib/test/test_sys.py | 6 + Misc/NEWS | 11 + Modules/_datetimemodule.c | 41 +- Modules/_ssl.c | 262 ++- Modules/_ssl_data.h | 1653 ++++++++++++++++++++++++ Modules/_threadmodule.c | 38 +- Objects/classobject.c | 9 + Objects/dictobject.c | 9 + Objects/floatobject.c | 10 + Objects/frameobject.c | 10 + Objects/listobject.c | 9 + Objects/methodobject.c | 9 + Objects/object.c | 12 + Objects/obmalloc.c | 81 +- Objects/setobject.c | 10 + Objects/tupleobject.c | 16 + PCbuild/debug.props | 3 + Python/pythonrun.c | 2 +- Python/sysmodule.c | 23 + Tools/scripts/diff.py | 16 +- Tools/ssl/make_ssl_data.py | 68 + 41 files changed, 2413 insertions(+), 173 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -247,6 +247,30 @@ .. versionadded:: 3.3 +.. function:: which(cmd, mode=os.F_OK | os.X_OK, path=None) + + Return the path to an executable which would be run if the given *cmd* + was called. If no *cmd* would be called, return ``None``. + + *mode* is a permission mask passed a to :func:`os.access`, by default + determining if the file exists and executable. + + When no *path* is specified, the results of :func:`os.environ` are + used, returning either the "PATH" value or a fallback of :attr:`os.defpath`. + + On Windows, the current directory is always prepended to the *path* + whether or not you use the default or provide your own, which + is the behavior the command shell uses when finding executables. + Additionaly, when finding the *cmd* in the *path*, the + ``PATHEXT`` environment variable is checked. For example, if you + call ``shutil.which("python")``, :func:`which` will search + ``PATHEXT`` to know that it should look for ``python.exe`` within + the *path* directories. + + >>> print(shutil.which("python")) + 'c:\\python33\\python.exe' + + .. versionadded:: 3.3 .. exception:: Error diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -59,6 +59,22 @@ .. versionchanged:: 3.3 :exc:`SSLError` used to be a subtype of :exc:`socket.error`. + .. attribute:: library + + A string mnemonic designating the OpenSSL submodule in which the error + occurred, such as ``SSL``, ``PEM`` or ``X509``. The range of possible + values depends on the OpenSSL version. + + .. versionadded:: 3.3 + + .. attribute:: reason + + A string mnemonic designating the reason this error occurred, for + example ``CERTIFICATE_VERIFY_FAILED``. The range of possible + values depends on the OpenSSL version. + + .. versionadded:: 3.3 + .. exception:: SSLZeroReturnError A subclass of :exc:`SSLError` raised when trying to read or write and diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -106,6 +106,22 @@ This function should be used for internal and specialized purposes only. +.. function:: _debugmallocstats() + + Print low-level information to stderr about the state of CPython's memory + allocator. + + If Python is configured --with-pydebug, it also performs some expensive + internal consistency checks. + + .. versionadded:: 3.3 + + .. impl-detail:: + + This function is specific to CPython. The exact output format is not + defined here, and may change. + + .. data:: dllhandle Integer specifying the handle of the Python DLL. Availability: Windows. diff --git a/Doc/library/time.rst b/Doc/library/time.rst --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -545,7 +545,7 @@ +-------+-------------------+---------------------------------+ | N/A | :attr:`tm_zone` | abbreviation of timezone name | +-------+-------------------+---------------------------------+ - | N/A | :attr:`tm_gmtoff` | offset from UTC in seconds | + | N/A | :attr:`tm_gmtoff` | offset east of UTC in seconds | +-------+-------------------+---------------------------------+ Note that unlike the C structure, the month value is a range of [1, 12], not diff --git a/Include/dictobject.h b/Include/dictobject.h --- a/Include/dictobject.h +++ b/Include/dictobject.h @@ -111,6 +111,7 @@ #ifndef Py_LIMITED_API int _PyObjectDict_SetItem(PyTypeObject *tp, PyObject **dictptr, PyObject *name, PyObject *value); PyObject *_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObject *); +PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out); #endif #ifdef __cplusplus diff --git a/Include/floatobject.h b/Include/floatobject.h --- a/Include/floatobject.h +++ b/Include/floatobject.h @@ -110,6 +110,8 @@ /* free list api */ PyAPI_FUNC(int) PyFloat_ClearFreeList(void); +PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out); + /* Format the object based on the format_spec, as defined in PEP 3101 (Advanced String Formatting). */ PyAPI_FUNC(int) _PyFloat_FormatAdvancedWriter( diff --git a/Include/frameobject.h b/Include/frameobject.h --- a/Include/frameobject.h +++ b/Include/frameobject.h @@ -79,6 +79,8 @@ PyAPI_FUNC(int) PyFrame_ClearFreeList(void); +PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out); + /* Return the line of code the frame is currently executing. */ PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *); diff --git a/Include/listobject.h b/Include/listobject.h --- a/Include/listobject.h +++ b/Include/listobject.h @@ -64,6 +64,7 @@ PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *); PyAPI_FUNC(int) PyList_ClearFreeList(void); +PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); #endif /* Macro, trading safety for speed */ diff --git a/Include/methodobject.h b/Include/methodobject.h --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -82,6 +82,11 @@ PyAPI_FUNC(int) PyCFunction_ClearFreeList(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyCFunction_DebugMallocStats(FILE *out); +PyAPI_FUNC(void) _PyMethod_DebugMallocStats(FILE *out); +#endif + #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h --- a/Include/object.h +++ b/Include/object.h @@ -977,6 +977,14 @@ else \ _PyTrash_deposit_object((PyObject*)op); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) +_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks, + size_t sizeof_block); +PyAPI_FUNC(void) +_PyObject_DebugTypeStats(FILE *out); +#endif /* ifndef Py_LIMITED_API */ + #ifdef __cplusplus } #endif diff --git a/Include/objimpl.h b/Include/objimpl.h --- a/Include/objimpl.h +++ b/Include/objimpl.h @@ -101,13 +101,15 @@ /* Macros */ #ifdef WITH_PYMALLOC +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out); +#endif /* #ifndef Py_LIMITED_API */ #ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */ PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes); PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes); PyAPI_FUNC(void) _PyObject_DebugFree(void *p); PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p); PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p); -PyAPI_FUNC(void) _PyObject_DebugMallocStats(void); PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes); PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes); PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p); diff --git a/Include/setobject.h b/Include/setobject.h --- a/Include/setobject.h +++ b/Include/setobject.h @@ -101,6 +101,7 @@ PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); PyAPI_FUNC(int) PySet_ClearFreeList(void); +PyAPI_FUNC(void) _PySet_DebugMallocStats(FILE *out); #endif #ifdef __cplusplus diff --git a/Include/tupleobject.h b/Include/tupleobject.h --- a/Include/tupleobject.h +++ b/Include/tupleobject.h @@ -63,6 +63,9 @@ #endif PyAPI_FUNC(int) PyTuple_ClearFreeList(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out); +#endif /* Py_LIMITED_API */ #ifdef __cplusplus } diff --git a/Lib/datetime.py b/Lib/datetime.py --- a/Lib/datetime.py +++ b/Lib/datetime.py @@ -1510,13 +1510,13 @@ # implied by tm_isdst. delta = local - datetime(*_time.gmtime(ts)[:6]) dst = _time.daylight and localtm.tm_isdst > 0 - gmtoff = _time.altzone if dst else _time.timezone - if delta == timedelta(seconds=-gmtoff): + gmtoff = -(_time.altzone if dst else _time.timezone) + if delta == timedelta(seconds=gmtoff): tz = timezone(delta, _time.tzname[dst]) else: tz = timezone(delta) else: - tz = timezone(timedelta(seconds=-gmtoff), zone) + tz = timezone(timedelta(seconds=gmtoff), zone) elif not isinstance(tz, tzinfo): raise TypeError("tz argument must be an instance of tzinfo") diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -36,7 +36,7 @@ "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive", - "ignore_patterns", "chown"] + "ignore_patterns", "chown", "which"] # disk_usage is added later, if available on the platform class Error(EnvironmentError): @@ -961,3 +961,50 @@ lines = size.lines return os.terminal_size((columns, lines)) + +def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a file, mode, and a path string, return the path whichs conform + to the given mode on the path.""" + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + if (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)): + return True + return False + + # Short circuit. If we're given a full path which matches the mode + # and it exists, we're done here. + if _access_check(cmd, mode): + return cmd + + path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + matches = [cmd for ext in pathext if cmd.lower().endswith(ext.lower())] + # If it does match, only test that one, otherwise we have to try others. + files = [cmd + ext.lower() for ext in pathext] if not matches else [cmd] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + dir = os.path.normcase(dir) + if not dir in seen: + seen.add(dir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py --- a/Lib/test/datetimetester.py +++ b/Lib/test/datetimetester.py @@ -3278,16 +3278,18 @@ self.assertEqual(dt.astimezone(None), dt) self.assertEqual(dt.astimezone(), dt) + # Note that offset in TZ variable has the opposite sign to that + # produced by %z directive. @support.run_with_tz('EST+05EDT,M3.2.0,M11.1.0') def test_astimezone_default_eastern(self): dt = self.theclass(2012, 11, 4, 6, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0500 EST") + self.assertEqual(local.strftime("%z %Z"), "-0500 EST") dt = self.theclass(2012, 11, 4, 5, 30, tzinfo=timezone.utc) local = dt.astimezone() self.assertEqual(dt, local) - self.assertEqual(local.strftime("%z %Z"), "+0400 EDT") + self.assertEqual(local.strftime("%z %Z"), "-0400 EDT") def test_aware_subtract(self): cls = self.theclass diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -222,36 +222,38 @@ in Python/getargs.c, but neglected to update our poor friend skipitem() in the same file. (If so, shame on you!) - This function brute-force tests all** ASCII characters (1 to 127 - inclusive) as format units, checking to see that - PyArg_ParseTupleAndKeywords() return consistent errors both when - the unit is attempted to be used and when it is skipped. If the - format unit doesn't exist, we'll get one of two specific error - messages (one for used, one for skipped); if it does exist we - *won't* get that error--we'll get either no error or some other - error. If we get the "does not exist" error for one test and - not for the other, there's a mismatch, and the test fails. + With a few exceptions**, this function brute-force tests all + printable ASCII*** characters (32 to 126 inclusive) as format units, + checking to see that PyArg_ParseTupleAndKeywords() return consistent + errors both when the unit is attempted to be used and when it is + skipped. If the format unit doesn't exist, we'll get one of two + specific error messages (one for used, one for skipped); if it does + exist we *won't* get that error--we'll get either no error or some + other error. If we get the specific "does not exist" error for one + test and not for the other, there's a mismatch, and the test fails. - ** Okay, it actually skips some ASCII characters. Some characters - have special funny semantics, and it would be difficult to - accomodate them here. + ** Some format units have special funny semantics and it would + be difficult to accomodate them here. Since these are all + well-established and properly skipped in skipitem() we can + get away with not testing them--this test is really intended + to catch *new* format units. + + *** Python C source files must be ASCII. Therefore it's impossible + to have non-ASCII format units. + """ empty_tuple = () tuple_1 = (0,) dict_b = {'b':1} keywords = ["a", "b"] - # Python C source files must be ASCII, - # therefore we'll never have a format unit > 127 - for i in range(1, 128): + for i in range(32, 127): c = chr(i) - # skip non-printable characters, no one is insane enough to define - # one as a format unit # skip parentheses, the error reporting is inconsistent about them # skip 'e', it's always a two-character code # skip '|' and '$', they don't represent arguments anyway - if (not c.isprintable()) or (c in '()e|$'): + if c in '()e|$': continue # test the format unit when not skipped diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py --- a/Lib/test/test_reprlib.py +++ b/Lib/test/test_reprlib.py @@ -10,7 +10,7 @@ import importlib import unittest -from test.support import run_unittest, create_empty_file +from test.support import run_unittest, create_empty_file, verbose from reprlib import repr as r # Don't shadow builtin repr from reprlib import Repr from reprlib import recursive_repr @@ -248,6 +248,8 @@ # (see http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx#maxpath) self.skipTest("test paths too long (%d characters) for Windows' 260 character limit" % cached_path_len) + elif os.name == 'nt' and verbose: + print("len(cached_path_len) =", len(cached_path_len)) def test_module(self): self._check_path_limitations(self.pkgname) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1128,6 +1128,59 @@ self.assertEqual(['foo'], os.listdir(rv)) +class TestWhich(unittest.TestCase): + + def setUp(self): + self.temp_dir = tempfile.mkdtemp() + # Give the temp_file an ".exe" suffix for all. + # It's needed on Windows and not harmful on other platforms. + self.temp_file = tempfile.NamedTemporaryFile(dir=self.temp_dir, + suffix=".exe") + os.chmod(self.temp_file.name, stat.S_IXUSR) + self.addCleanup(self.temp_file.close) + self.dir, self.file = os.path.split(self.temp_file.name) + + def test_basic(self): + # Given an EXE in a directory, it should be returned. + rv = shutil.which(self.file, path=self.dir) + self.assertEqual(rv, self.temp_file.name) + + def test_full_path_short_circuit(self): + # When given the fully qualified path to an executable that exists, + # it should be returned. + rv = shutil.which(self.temp_file.name, path=self.temp_dir) + self.assertEqual(self.temp_file.name, rv) + + def test_non_matching_mode(self): + # Set the file read-only and ask for writeable files. + os.chmod(self.temp_file.name, stat.S_IREAD) + rv = shutil.which(self.file, path=self.dir, mode=os.W_OK) + self.assertIsNone(rv) + + def test_relative(self): + old_cwd = os.getcwd() + base_dir, tail_dir = os.path.split(self.dir) + os.chdir(base_dir) + try: + rv = shutil.which(self.file, path=tail_dir) + self.assertEqual(rv, os.path.join(tail_dir, self.file)) + finally: + os.chdir(old_cwd) + + def test_nonexistent_file(self): + # Return None when no matching executable file is found on the path. + rv = shutil.which("foo.exe", path=self.dir) + self.assertIsNone(rv) + + @unittest.skipUnless(sys.platform == "win32", + "pathext check is Windows-only") + def test_pathext_checking(self): + # Ask for the file without the ".exe" extension, then ensure that + # it gets found properly with the extension. + rv = shutil.which(self.temp_file.name[:-4], path=self.dir) + self.assertEqual(self.temp_file.name, rv) + + class TestMove(unittest.TestCase): def setUp(self): @@ -1460,7 +1513,7 @@ def test_main(): support.run_unittest(TestShutil, TestMove, TestCopyFile, - TermsizeTests) + TermsizeTests, TestWhich) if __name__ == '__main__': test_main() diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -552,7 +552,7 @@ with self.assertRaises(FileNotFoundError) as cm: ctx.load_dh_params(WRONGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) - with self.assertRaisesRegex(ssl.SSLError, "PEM routines"): + with self.assertRaises(ssl.SSLError) as cm: ctx.load_dh_params(CERTFILE) @skip_if_broken_ubuntu_ssl @@ -590,6 +590,47 @@ self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo") +class SSLErrorTests(unittest.TestCase): + + def test_str(self): + # The str() of a SSLError doesn't include the errno + e = ssl.SSLError(1, "foo") + self.assertEqual(str(e), "foo") + self.assertEqual(e.errno, 1) + # Same for a subclass + e = ssl.SSLZeroReturnError(1, "foo") + self.assertEqual(str(e), "foo") + self.assertEqual(e.errno, 1) + + def test_lib_reason(self): + # Test the library and reason attributes + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with self.assertRaises(ssl.SSLError) as cm: + ctx.load_dh_params(CERTFILE) + self.assertEqual(cm.exception.library, 'PEM') + self.assertEqual(cm.exception.reason, 'NO_START_LINE') + s = str(cm.exception) + self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s) + + def test_subclass(self): + # Check that the appropriate SSLError subclass is raised + # (this only tests one of them) + ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) + with socket.socket() as s: + s.bind(("127.0.0.1", 0)) + s.listen(5) + with socket.socket() as c: + c.connect(s.getsockname()) + c.setblocking(False) + c = ctx.wrap_socket(c, False, do_handshake_on_connect=False) + with self.assertRaises(ssl.SSLWantReadError) as cm: + c.do_handshake() + s = str(cm.exception) + self.assertTrue(s.startswith("The operation did not complete (read)"), s) + # For compatibility + self.assertEqual(cm.exception.errno, ssl.SSL_ERROR_WANT_READ) + + class NetworkedTests(unittest.TestCase): def test_connect(self): @@ -1931,7 +1972,7 @@ if not os.path.exists(filename): raise support.TestFailed("Can't read certificate file %r" % filename) - tests = [ContextTests, BasicSocketTests] + tests = [ContextTests, BasicSocketTests, SSLErrorTests] if support.is_resource_enabled('network'): tests.append(NetworkedTests) diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -603,6 +603,12 @@ self.assertEqual(sys.implementation.name, sys.implementation.name.lower()) + def test_debugmallocstats(self): + # Test sys._debugmallocstats() + from test.script_helper import assert_python_ok + args = ['-c', 'import sys; sys._debugmallocstats()'] + ret, out, err = assert_python_ok(*args) + self.assertIn(b"free PyDictObjects", err) class SizeofTest(unittest.TestCase): diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -42,6 +42,14 @@ - Issue: #15138: base64.urlsafe_{en,de}code() are now 3-4x faster. +- Issue #444582: Add shutil.which, for finding programs on the system path. + Original patch by Erik Demaine, with later iterations by Jan Killian + and Brian Curtin. + +- Issue #14837: SSL errors now have ``library`` and ``reason`` attributes + describing precisely what happened and in which OpenSSL submodule. The + str() of a SSLError is also enhanced accordingly. + - Issue #9527: datetime.astimezone() method will now supply a class timezone instance corresponding to the system local timezone when called with no arguments. @@ -149,6 +157,9 @@ - Issue #14963: Convert contextlib.ExitStack.__exit__ to use an iterative algorithm (Patch by Alon Horev) +- Issue #14785: Add sys._debugmallocstats() to help debug low-level memory + allocation issues + C-API ----- diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -809,14 +809,16 @@ } if (GET_TD_MICROSECONDS(offset) != 0 || GET_TD_SECONDS(offset) % 60 != 0) { PyErr_Format(PyExc_ValueError, "offset must be a timedelta" - " representing a whole number of minutes"); + " representing a whole number of minutes," + " not %R.", offset); return NULL; } if ((GET_TD_DAYS(offset) == -1 && GET_TD_SECONDS(offset) == 0) || GET_TD_DAYS(offset) < -1 || GET_TD_DAYS(offset) >= 1) { PyErr_Format(PyExc_ValueError, "offset must be a timedelta" " strictly between -timedelta(hours=24) and" - " timedelta(hours=24)."); + " timedelta(hours=24)," + " not %R.", offset); return NULL; } @@ -4686,12 +4688,11 @@ } static PyObject * -local_timezone(PyObject *utc_time) +local_timezone(PyDateTime_DateTime *utc_time) { PyObject *result = NULL; struct tm *timep; time_t timestamp; - long offset; PyObject *delta; PyObject *one_second; PyObject *seconds; @@ -4716,21 +4717,18 @@ return NULL; timep = localtime(×tamp); #ifdef HAVE_STRUCT_TM_TM_ZONE - offset = timep->tm_gmtoff; zone = timep->tm_zone; - delta = new_delta(0, -offset, 0, 0); + delta = new_delta(0, timep->tm_gmtoff, 0, 1); #else /* HAVE_STRUCT_TM_TM_ZONE */ { PyObject *local_time; - Py_INCREF(utc_time->tzinfo); local_time = new_datetime(timep->tm_year + 1900, timep->tm_mon + 1, timep->tm_mday, timep->tm_hour, timep->tm_min, - timep->tm_sec, utc_time->tzinfo); - if (local_time == NULL) { - Py_DECREF(utc_time->tzinfo); + timep->tm_sec, DATE_GET_MICROSECOND(utc_time), + utc_time->tzinfo); + if (local_time == NULL) goto error; - } - delta = datetime_subtract(local_time, utc_time); + delta = datetime_subtract(local_time, (PyObject*)utc_time); /* XXX: before relying on tzname, we should compare delta to the offset implied by timezone/altzone */ if (daylight && timep->tm_isdst >= 0) @@ -4752,10 +4750,10 @@ return result; } -static PyObject * +static PyDateTime_DateTime * datetime_astimezone(PyDateTime_DateTime *self, PyObject *args, PyObject *kw) { - PyObject *result; + PyDateTime_DateTime *result; PyObject *offset; PyObject *temp; PyObject *tzinfo = Py_None; @@ -4775,7 +4773,7 @@ /* Conversion to self's own time zone is a NOP. */ if (self->tzinfo == tzinfo) { Py_INCREF(self); - return (PyObject *)self; + return self; } /* Convert self to UTC. */ @@ -4791,14 +4789,14 @@ } /* result = self - offset */ - result = add_datetime_timedelta(self, - (PyDateTime_Delta *)offset, -1); + result = (PyDateTime_DateTime *)add_datetime_timedelta(self, + (PyDateTime_Delta *)offset, -1); Py_DECREF(offset); if (result == NULL) return NULL; /* Attach new tzinfo and let fromutc() do the rest. */ - temp = ((PyDateTime_DateTime *)result)->tzinfo; + temp = result->tzinfo; if (tzinfo == Py_None) { tzinfo = local_timezone(result); if (tzinfo == NULL) { @@ -4808,11 +4806,12 @@ } else Py_INCREF(tzinfo); - ((PyDateTime_DateTime *)result)->tzinfo = tzinfo; + result->tzinfo = tzinfo; Py_DECREF(temp); - temp = result; - result = _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "O", temp); + temp = (PyObject *)result; + result = (PyDateTime_DateTime *) + _PyObject_CallMethodId(tzinfo, &PyId_fromutc, "O", temp); Py_DECREF(temp); return result; diff --git a/Modules/_ssl.c b/Modules/_ssl.c --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -76,6 +76,16 @@ PY_SSL_VERSION_TLS1 }; +struct py_ssl_error_code { + const char *mnemonic; + int library, reason; +}; + +struct py_ssl_library_code { + const char *library; + int code; +}; + /* Include symbols from _socket module */ #include "socketmodule.h" @@ -97,6 +107,9 @@ #include "openssl/err.h" #include "openssl/rand.h" +/* Include generated data (error codes) */ +#include "_ssl_data.h" + /* SSL error object */ static PyObject *PySSLErrorObject; static PyObject *PySSLZeroReturnErrorObject; @@ -105,6 +118,11 @@ static PyObject *PySSLSyscallErrorObject; static PyObject *PySSLEOFErrorObject; +/* Error mappings */ +static PyObject *err_codes_to_names; +static PyObject *err_names_to_codes; +static PyObject *lib_codes_to_names; + #ifdef WITH_THREAD /* serves as a flag to see whether we've initialized the SSL thread support. */ @@ -202,22 +220,134 @@ #define ERRSTR1(x,y,z) (x ":" y ": " z) #define ERRSTR(x) ERRSTR1("_ssl.c", STRINGIFY2(__LINE__), x) -/* XXX It might be helpful to augment the error message generated - below with the name of the SSL function that generated the error. - I expect it's obvious most of the time. -*/ + +/* + * SSL errors. + */ + +PyDoc_STRVAR(SSLError_doc, +"An error occurred in the SSL implementation."); + +PyDoc_STRVAR(SSLZeroReturnError_doc, +"SSL/TLS session closed cleanly."); + +PyDoc_STRVAR(SSLWantReadError_doc, +"Non-blocking SSL socket needs to read more data\n" +"before the requested operation can be completed."); + +PyDoc_STRVAR(SSLWantWriteError_doc, +"Non-blocking SSL socket needs to write more data\n" +"before the requested operation can be completed."); + +PyDoc_STRVAR(SSLSyscallError_doc, +"System error when attempting SSL operation."); + +PyDoc_STRVAR(SSLEOFError_doc, +"SSL/TLS connection terminated abruptly."); + +static PyObject * +SSLError_str(PyOSErrorObject *self) +{ + if (self->strerror != NULL && PyUnicode_Check(self->strerror)) { + Py_INCREF(self->strerror); + return self->strerror; + } + else + return PyObject_Str(self->args); +} + +static PyType_Slot sslerror_type_slots[] = { + {Py_tp_base, NULL}, /* Filled out in module init as it's not a constant */ + {Py_tp_doc, SSLError_doc}, + {Py_tp_str, SSLError_str}, + {0, 0}, +}; + +static PyType_Spec sslerror_type_spec = { + "ssl.SSLError", + sizeof(PyOSErrorObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + sslerror_type_slots +}; + +static void +fill_and_set_sslerror(PyObject *type, int ssl_errno, const char *errstr, + int lineno, unsigned long errcode) +{ + PyObject *err_value = NULL, *reason_obj = NULL, *lib_obj = NULL; + PyObject *init_value, *msg, *key; + _Py_IDENTIFIER(reason); + _Py_IDENTIFIER(library); + + if (errcode != 0) { + int lib, reason; + + lib = ERR_GET_LIB(errcode); + reason = ERR_GET_REASON(errcode); + key = Py_BuildValue("ii", lib, reason); + if (key == NULL) + goto fail; + reason_obj = PyDict_GetItem(err_codes_to_names, key); + Py_DECREF(key); + if (reason_obj == NULL) { + /* XXX if reason < 100, it might reflect a library number (!!) */ + PyErr_Clear(); + } + key = PyLong_FromLong(lib); + if (key == NULL) + goto fail; + lib_obj = PyDict_GetItem(lib_codes_to_names, key); + Py_DECREF(key); + if (lib_obj == NULL) { + PyErr_Clear(); + } + if (errstr == NULL) + errstr = ERR_reason_error_string(errcode); + } + if (errstr == NULL) + errstr = "unknown error"; + + if (reason_obj && lib_obj) + msg = PyUnicode_FromFormat("[%S: %S] %s (_ssl.c:%d)", + lib_obj, reason_obj, errstr, lineno); + else if (lib_obj) + msg = PyUnicode_FromFormat("[%S] %s (_ssl.c:%d)", + lib_obj, errstr, lineno); + else + msg = PyUnicode_FromFormat("%s (_ssl.c:%d)", errstr, lineno); + + if (msg == NULL) + goto fail; + init_value = Py_BuildValue("iN", ssl_errno, msg); + err_value = PyObject_CallObject(type, init_value); + Py_DECREF(init_value); + if (err_value == NULL) + goto fail; + if (reason_obj == NULL) + reason_obj = Py_None; + if (_PyObject_SetAttrId(err_value, &PyId_reason, reason_obj)) + goto fail; + if (lib_obj == NULL) + lib_obj = Py_None; + if (_PyObject_SetAttrId(err_value, &PyId_library, lib_obj)) + goto fail; + PyErr_SetObject(type, err_value); +fail: + Py_XDECREF(err_value); +} static PyObject * PySSL_SetError(PySSLSocket *obj, int ret, char *filename, int lineno) { - PyObject *v; PyObject *type = PySSLErrorObject; - char buf[2048]; - char *errstr; + char *errstr = NULL; int err; enum py_ssl_error p = PY_SSL_ERROR_NONE; + unsigned long e = 0; assert(ret <= 0); + e = ERR_peek_last_error(); if (obj->ssl != NULL) { err = SSL_get_error(obj->ssl, ret); @@ -248,7 +378,6 @@ break; case SSL_ERROR_SYSCALL: { - unsigned long e = ERR_get_error(); if (e == 0) { PySocketSockObject *s = (PySocketSockObject *) PyWeakref_GetObject(obj->Socket); @@ -260,9 +389,9 @@ /* underlying BIO reported an I/O error */ Py_INCREF(s); ERR_clear_error(); - v = s->errorhandler(); + s->errorhandler(); Py_DECREF(s); - return v; + return NULL; } else { /* possible? */ p = PY_SSL_ERROR_SYSCALL; type = PySSLSyscallErrorObject; @@ -270,60 +399,43 @@ } } else { p = PY_SSL_ERROR_SYSCALL; - /* XXX Protected by global interpreter lock */ - errstr = ERR_error_string(e, NULL); } break; } case SSL_ERROR_SSL: { - unsigned long e = ERR_get_error(); p = PY_SSL_ERROR_SSL; - if (e != 0) - /* XXX Protected by global interpreter lock */ - errstr = ERR_error_string(e, NULL); - else { /* possible? */ + if (e == 0) + /* possible? */ errstr = "A failure in the SSL library occurred"; - } break; } default: p = PY_SSL_ERROR_INVALID_ERROR_CODE; errstr = "Invalid error code"; } - } else { - errstr = ERR_error_string(ERR_peek_last_error(), NULL); } - PyOS_snprintf(buf, sizeof(buf), "_ssl.c:%d: %s", lineno, errstr); + fill_and_set_sslerror(type, p, errstr, lineno, e); ERR_clear_error(); - v = Py_BuildValue("(is)", p, buf); - if (v != NULL) { - PyErr_SetObject(type, v); - Py_DECREF(v); - } return NULL; } static PyObject * _setSSLError (char *errstr, int errcode, char *filename, int lineno) { - char buf[2048]; - PyObject *v; - - if (errstr == NULL) { + if (errstr == NULL) errcode = ERR_peek_last_error(); - errstr = ERR_error_string(errcode, NULL); - } - PyOS_snprintf(buf, sizeof(buf), "_ssl.c:%d: %s", lineno, errstr); + else + errcode = 0; + fill_and_set_sslerror(PySSLErrorObject, errcode, errstr, lineno, errcode); ERR_clear_error(); - v = Py_BuildValue("(is)", errcode, buf); - if (v != NULL) { - PyErr_SetObject(PySSLErrorObject, v); - Py_DECREF(v); - } return NULL; } +/* + * SSL objects + */ + static PySSLSocket * newPySSLSocket(SSL_CTX *ctx, PySocketSockObject *sock, enum py_ssl_server_or_client socket_type, @@ -2520,27 +2632,6 @@ *major = libver & 0xFF; } -PyDoc_STRVAR(SSLError_doc, -"An error occurred in the SSL implementation."); - -PyDoc_STRVAR(SSLZeroReturnError_doc, -"SSL/TLS session closed cleanly."); - -PyDoc_STRVAR(SSLWantReadError_doc, -"Non-blocking SSL socket needs to read more data\n" -"before the requested operation can be completed."); - -PyDoc_STRVAR(SSLWantWriteError_doc, -"Non-blocking SSL socket needs to write more data\n" -"before the requested operation can be completed."); - -PyDoc_STRVAR(SSLSyscallError_doc, -"System error when attempting SSL operation."); - -PyDoc_STRVAR(SSLEOFError_doc, -"SSL/TLS connection terminated abruptly."); - - PyMODINIT_FUNC PyInit__ssl(void) { @@ -2548,6 +2639,8 @@ unsigned long libver; unsigned int major, minor, fix, patch, status; PySocketModule_APIObject *socket_api; + struct py_ssl_error_code *errcode; + struct py_ssl_library_code *libcode; if (PyType_Ready(&PySSLContext_Type) < 0) return NULL; @@ -2577,12 +2670,11 @@ OpenSSL_add_all_algorithms(); /* Add symbols to module dict */ - PySSLErrorObject = PyErr_NewExceptionWithDoc("ssl.SSLError", - SSLError_doc, - PyExc_OSError, - NULL); + sslerror_type_slots[0].pfunc = PyExc_OSError; + PySSLErrorObject = PyType_FromSpec(&sslerror_type_spec); if (PySSLErrorObject == NULL) return NULL; + PySSLZeroReturnErrorObject = PyErr_NewExceptionWithDoc( "ssl.SSLZeroReturnError", SSLZeroReturnError_doc, PySSLErrorObject, NULL); @@ -2705,6 +2797,50 @@ Py_INCREF(r); PyModule_AddObject(m, "HAS_NPN", r); + /* Mappings for error codes */ + err_codes_to_names = PyDict_New(); + err_names_to_codes = PyDict_New(); + if (err_codes_to_names == NULL || err_names_to_codes == NULL) + return NULL; + errcode = error_codes; + while (errcode->mnemonic != NULL) { + PyObject *mnemo, *key; + mnemo = PyUnicode_FromString(errcode->mnemonic); + key = Py_BuildValue("ii", errcode->library, errcode->reason); + if (mnemo == NULL || key == NULL) + return NULL; + if (PyDict_SetItem(err_codes_to_names, key, mnemo)) + return NULL; + if (PyDict_SetItem(err_names_to_codes, mnemo, key)) + return NULL; + Py_DECREF(key); + Py_DECREF(mnemo); + errcode++; + } + if (PyModule_AddObject(m, "err_codes_to_names", err_codes_to_names)) + return NULL; + if (PyModule_AddObject(m, "err_names_to_codes", err_names_to_codes)) + return NULL; + + lib_codes_to_names = PyDict_New(); + if (lib_codes_to_names == NULL) + return NULL; + libcode = library_codes; + while (libcode->library != NULL) { + PyObject *mnemo, *key; + key = PyLong_FromLong(libcode->code); + mnemo = PyUnicode_FromString(libcode->library); + if (key == NULL || mnemo == NULL) + return NULL; + if (PyDict_SetItem(lib_codes_to_names, key, mnemo)) + return NULL; + Py_DECREF(key); + Py_DECREF(mnemo); + libcode++; + } + if (PyModule_AddObject(m, "lib_codes_to_names", lib_codes_to_names)) + return NULL; + /* OpenSSL version */ /* SSLeay() gives us the version of the library linked against, which could be different from the headers version. diff --git a/Modules/_ssl_data.h b/Modules/_ssl_data.h new file mode 100644 --- /dev/null +++ b/Modules/_ssl_data.h @@ -0,0 +1,1653 @@ +/* File generated by Tools/ssl/make_ssl_data.py */ +/* Generated on 2012-05-16T23:56:40.981382 */ + +static struct py_ssl_library_code library_codes[] = { + {"PEM", ERR_LIB_PEM}, + {"SSL", ERR_LIB_SSL}, + {"X509", ERR_LIB_X509}, + { NULL } +}; + +static struct py_ssl_error_code error_codes[] = { + #ifdef PEM_R_BAD_BASE64_DECODE + {"BAD_BASE64_DECODE", ERR_LIB_PEM, PEM_R_BAD_BASE64_DECODE}, + #else + {"BAD_BASE64_DECODE", ERR_LIB_PEM, 100}, + #endif + #ifdef PEM_R_BAD_DECRYPT + {"BAD_DECRYPT", ERR_LIB_PEM, PEM_R_BAD_DECRYPT}, + #else + {"BAD_DECRYPT", ERR_LIB_PEM, 101}, + #endif + #ifdef PEM_R_BAD_END_LINE + {"BAD_END_LINE", ERR_LIB_PEM, PEM_R_BAD_END_LINE}, + #else + {"BAD_END_LINE", ERR_LIB_PEM, 102}, + #endif + #ifdef PEM_R_BAD_IV_CHARS + {"BAD_IV_CHARS", ERR_LIB_PEM, PEM_R_BAD_IV_CHARS}, + #else + {"BAD_IV_CHARS", ERR_LIB_PEM, 103}, + #endif + #ifdef PEM_R_BAD_MAGIC_NUMBER + {"BAD_MAGIC_NUMBER", ERR_LIB_PEM, PEM_R_BAD_MAGIC_NUMBER}, + #else + {"BAD_MAGIC_NUMBER", ERR_LIB_PEM, 116}, + #endif + #ifdef PEM_R_BAD_PASSWORD_READ + {"BAD_PASSWORD_READ", ERR_LIB_PEM, PEM_R_BAD_PASSWORD_READ}, + #else + {"BAD_PASSWORD_READ", ERR_LIB_PEM, 104}, + #endif + #ifdef PEM_R_BAD_VERSION_NUMBER + {"BAD_VERSION_NUMBER", ERR_LIB_PEM, PEM_R_BAD_VERSION_NUMBER}, + #else + {"BAD_VERSION_NUMBER", ERR_LIB_PEM, 117}, + #endif + #ifdef PEM_R_BIO_WRITE_FAILURE + {"BIO_WRITE_FAILURE", ERR_LIB_PEM, PEM_R_BIO_WRITE_FAILURE}, + #else + {"BIO_WRITE_FAILURE", ERR_LIB_PEM, 118}, + #endif + #ifdef PEM_R_CIPHER_IS_NULL + {"CIPHER_IS_NULL", ERR_LIB_PEM, PEM_R_CIPHER_IS_NULL}, + #else + {"CIPHER_IS_NULL", ERR_LIB_PEM, 127}, + #endif + #ifdef PEM_R_ERROR_CONVERTING_PRIVATE_KEY + {"ERROR_CONVERTING_PRIVATE_KEY", ERR_LIB_PEM, PEM_R_ERROR_CONVERTING_PRIVATE_KEY}, + #else + {"ERROR_CONVERTING_PRIVATE_KEY", ERR_LIB_PEM, 115}, + #endif + #ifdef PEM_R_EXPECTING_PRIVATE_KEY_BLOB + {"EXPECTING_PRIVATE_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PRIVATE_KEY_BLOB}, + #else + {"EXPECTING_PRIVATE_KEY_BLOB", ERR_LIB_PEM, 119}, + #endif + #ifdef PEM_R_EXPECTING_PUBLIC_KEY_BLOB + {"EXPECTING_PUBLIC_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PUBLIC_KEY_BLOB}, + #else + {"EXPECTING_PUBLIC_KEY_BLOB", ERR_LIB_PEM, 120}, + #endif + #ifdef PEM_R_INCONSISTENT_HEADER + {"INCONSISTENT_HEADER", ERR_LIB_PEM, PEM_R_INCONSISTENT_HEADER}, + #else + {"INCONSISTENT_HEADER", ERR_LIB_PEM, 121}, + #endif + #ifdef PEM_R_KEYBLOB_HEADER_PARSE_ERROR + {"KEYBLOB_HEADER_PARSE_ERROR", ERR_LIB_PEM, PEM_R_KEYBLOB_HEADER_PARSE_ERROR}, + #else + {"KEYBLOB_HEADER_PARSE_ERROR", ERR_LIB_PEM, 122}, + #endif + #ifdef PEM_R_KEYBLOB_TOO_SHORT + {"KEYBLOB_TOO_SHORT", ERR_LIB_PEM, PEM_R_KEYBLOB_TOO_SHORT}, + #else + {"KEYBLOB_TOO_SHORT", ERR_LIB_PEM, 123}, + #endif + #ifdef PEM_R_NOT_DEK_INFO + {"NOT_DEK_INFO", ERR_LIB_PEM, PEM_R_NOT_DEK_INFO}, + #else + {"NOT_DEK_INFO", ERR_LIB_PEM, 105}, + #endif + #ifdef PEM_R_NOT_ENCRYPTED + {"NOT_ENCRYPTED", ERR_LIB_PEM, PEM_R_NOT_ENCRYPTED}, + #else + {"NOT_ENCRYPTED", ERR_LIB_PEM, 106}, + #endif + #ifdef PEM_R_NOT_PROC_TYPE + {"NOT_PROC_TYPE", ERR_LIB_PEM, PEM_R_NOT_PROC_TYPE}, + #else + {"NOT_PROC_TYPE", ERR_LIB_PEM, 107}, + #endif + #ifdef PEM_R_NO_START_LINE + {"NO_START_LINE", ERR_LIB_PEM, PEM_R_NO_START_LINE}, + #else + {"NO_START_LINE", ERR_LIB_PEM, 108}, + #endif + #ifdef PEM_R_PROBLEMS_GETTING_PASSWORD + {"PROBLEMS_GETTING_PASSWORD", ERR_LIB_PEM, PEM_R_PROBLEMS_GETTING_PASSWORD}, + #else + {"PROBLEMS_GETTING_PASSWORD", ERR_LIB_PEM, 109}, + #endif + #ifdef PEM_R_PUBLIC_KEY_NO_RSA + {"PUBLIC_KEY_NO_RSA", ERR_LIB_PEM, PEM_R_PUBLIC_KEY_NO_RSA}, + #else + {"PUBLIC_KEY_NO_RSA", ERR_LIB_PEM, 110}, + #endif + #ifdef PEM_R_PVK_DATA_TOO_SHORT + {"PVK_DATA_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_DATA_TOO_SHORT}, + #else + {"PVK_DATA_TOO_SHORT", ERR_LIB_PEM, 124}, + #endif + #ifdef PEM_R_PVK_TOO_SHORT + {"PVK_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_TOO_SHORT}, + #else + {"PVK_TOO_SHORT", ERR_LIB_PEM, 125}, + #endif + #ifdef PEM_R_READ_KEY + {"READ_KEY", ERR_LIB_PEM, PEM_R_READ_KEY}, + #else + {"READ_KEY", ERR_LIB_PEM, 111}, + #endif + #ifdef PEM_R_SHORT_HEADER + {"SHORT_HEADER", ERR_LIB_PEM, PEM_R_SHORT_HEADER}, + #else + {"SHORT_HEADER", ERR_LIB_PEM, 112}, + #endif + #ifdef PEM_R_UNSUPPORTED_CIPHER + {"UNSUPPORTED_CIPHER", ERR_LIB_PEM, PEM_R_UNSUPPORTED_CIPHER}, + #else + {"UNSUPPORTED_CIPHER", ERR_LIB_PEM, 113}, + #endif + #ifdef PEM_R_UNSUPPORTED_ENCRYPTION + {"UNSUPPORTED_ENCRYPTION", ERR_LIB_PEM, PEM_R_UNSUPPORTED_ENCRYPTION}, + #else + {"UNSUPPORTED_ENCRYPTION", ERR_LIB_PEM, 114}, + #endif + #ifdef PEM_R_UNSUPPORTED_KEY_COMPONENTS + {"UNSUPPORTED_KEY_COMPONENTS", ERR_LIB_PEM, PEM_R_UNSUPPORTED_KEY_COMPONENTS}, + #else + {"UNSUPPORTED_KEY_COMPONENTS", ERR_LIB_PEM, 126}, + #endif + #ifdef SSL_R_APP_DATA_IN_HANDSHAKE + {"APP_DATA_IN_HANDSHAKE", ERR_LIB_SSL, SSL_R_APP_DATA_IN_HANDSHAKE}, + #else + {"APP_DATA_IN_HANDSHAKE", ERR_LIB_SSL, 100}, + #endif + #ifdef SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT + {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", ERR_LIB_SSL, SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT}, + #else + {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", ERR_LIB_SSL, 272}, + #endif + #ifdef SSL_R_BAD_ALERT_RECORD + {"BAD_ALERT_RECORD", ERR_LIB_SSL, SSL_R_BAD_ALERT_RECORD}, + #else + {"BAD_ALERT_RECORD", ERR_LIB_SSL, 101}, + #endif + #ifdef SSL_R_BAD_AUTHENTICATION_TYPE + {"BAD_AUTHENTICATION_TYPE", ERR_LIB_SSL, SSL_R_BAD_AUTHENTICATION_TYPE}, + #else + {"BAD_AUTHENTICATION_TYPE", ERR_LIB_SSL, 102}, + #endif + #ifdef SSL_R_BAD_CHANGE_CIPHER_SPEC + {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, SSL_R_BAD_CHANGE_CIPHER_SPEC}, + #else + {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, 103}, + #endif + #ifdef SSL_R_BAD_CHECKSUM + {"BAD_CHECKSUM", ERR_LIB_SSL, SSL_R_BAD_CHECKSUM}, + #else + {"BAD_CHECKSUM", ERR_LIB_SSL, 104}, + #endif + #ifdef SSL_R_BAD_DATA_RETURNED_BY_CALLBACK + {"BAD_DATA_RETURNED_BY_CALLBACK", ERR_LIB_SSL, SSL_R_BAD_DATA_RETURNED_BY_CALLBACK}, + #else + {"BAD_DATA_RETURNED_BY_CALLBACK", ERR_LIB_SSL, 106}, + #endif + #ifdef SSL_R_BAD_DECOMPRESSION + {"BAD_DECOMPRESSION", ERR_LIB_SSL, SSL_R_BAD_DECOMPRESSION}, + #else + {"BAD_DECOMPRESSION", ERR_LIB_SSL, 107}, + #endif + #ifdef SSL_R_BAD_DH_G_LENGTH + {"BAD_DH_G_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_G_LENGTH}, + #else + {"BAD_DH_G_LENGTH", ERR_LIB_SSL, 108}, + #endif + #ifdef SSL_R_BAD_DH_PUB_KEY_LENGTH + {"BAD_DH_PUB_KEY_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_PUB_KEY_LENGTH}, + #else + {"BAD_DH_PUB_KEY_LENGTH", ERR_LIB_SSL, 109}, + #endif + #ifdef SSL_R_BAD_DH_P_LENGTH + {"BAD_DH_P_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DH_P_LENGTH}, + #else + {"BAD_DH_P_LENGTH", ERR_LIB_SSL, 110}, + #endif + #ifdef SSL_R_BAD_DIGEST_LENGTH + {"BAD_DIGEST_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DIGEST_LENGTH}, + #else + {"BAD_DIGEST_LENGTH", ERR_LIB_SSL, 111}, + #endif + #ifdef SSL_R_BAD_DSA_SIGNATURE + {"BAD_DSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_DSA_SIGNATURE}, + #else + {"BAD_DSA_SIGNATURE", ERR_LIB_SSL, 112}, + #endif + #ifdef SSL_R_BAD_ECC_CERT + {"BAD_ECC_CERT", ERR_LIB_SSL, SSL_R_BAD_ECC_CERT}, + #else + {"BAD_ECC_CERT", ERR_LIB_SSL, 304}, + #endif + #ifdef SSL_R_BAD_ECDSA_SIGNATURE + {"BAD_ECDSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_ECDSA_SIGNATURE}, + #else + {"BAD_ECDSA_SIGNATURE", ERR_LIB_SSL, 305}, + #endif + #ifdef SSL_R_BAD_ECPOINT + {"BAD_ECPOINT", ERR_LIB_SSL, SSL_R_BAD_ECPOINT}, + #else + {"BAD_ECPOINT", ERR_LIB_SSL, 306}, + #endif + #ifdef SSL_R_BAD_HANDSHAKE_LENGTH + {"BAD_HANDSHAKE_LENGTH", ERR_LIB_SSL, SSL_R_BAD_HANDSHAKE_LENGTH}, + #else + {"BAD_HANDSHAKE_LENGTH", ERR_LIB_SSL, 332}, + #endif + #ifdef SSL_R_BAD_HELLO_REQUEST + {"BAD_HELLO_REQUEST", ERR_LIB_SSL, SSL_R_BAD_HELLO_REQUEST}, + #else + {"BAD_HELLO_REQUEST", ERR_LIB_SSL, 105}, + #endif + #ifdef SSL_R_BAD_LENGTH + {"BAD_LENGTH", ERR_LIB_SSL, SSL_R_BAD_LENGTH}, + #else + {"BAD_LENGTH", ERR_LIB_SSL, 271}, + #endif + #ifdef SSL_R_BAD_MAC_DECODE + {"BAD_MAC_DECODE", ERR_LIB_SSL, SSL_R_BAD_MAC_DECODE}, + #else + {"BAD_MAC_DECODE", ERR_LIB_SSL, 113}, + #endif + #ifdef SSL_R_BAD_MAC_LENGTH + {"BAD_MAC_LENGTH", ERR_LIB_SSL, SSL_R_BAD_MAC_LENGTH}, + #else + {"BAD_MAC_LENGTH", ERR_LIB_SSL, 333}, + #endif + #ifdef SSL_R_BAD_MESSAGE_TYPE + {"BAD_MESSAGE_TYPE", ERR_LIB_SSL, SSL_R_BAD_MESSAGE_TYPE}, + #else + {"BAD_MESSAGE_TYPE", ERR_LIB_SSL, 114}, + #endif + #ifdef SSL_R_BAD_PACKET_LENGTH + {"BAD_PACKET_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PACKET_LENGTH}, + #else + {"BAD_PACKET_LENGTH", ERR_LIB_SSL, 115}, + #endif + #ifdef SSL_R_BAD_PROTOCOL_VERSION_NUMBER + {"BAD_PROTOCOL_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_BAD_PROTOCOL_VERSION_NUMBER}, + #else + {"BAD_PROTOCOL_VERSION_NUMBER", ERR_LIB_SSL, 116}, + #endif + #ifdef SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH + {"BAD_PSK_IDENTITY_HINT_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH}, + #else + {"BAD_PSK_IDENTITY_HINT_LENGTH", ERR_LIB_SSL, 316}, + #endif + #ifdef SSL_R_BAD_RESPONSE_ARGUMENT + {"BAD_RESPONSE_ARGUMENT", ERR_LIB_SSL, SSL_R_BAD_RESPONSE_ARGUMENT}, + #else + {"BAD_RESPONSE_ARGUMENT", ERR_LIB_SSL, 117}, + #endif + #ifdef SSL_R_BAD_RSA_DECRYPT + {"BAD_RSA_DECRYPT", ERR_LIB_SSL, SSL_R_BAD_RSA_DECRYPT}, + #else + {"BAD_RSA_DECRYPT", ERR_LIB_SSL, 118}, + #endif + #ifdef SSL_R_BAD_RSA_ENCRYPT + {"BAD_RSA_ENCRYPT", ERR_LIB_SSL, SSL_R_BAD_RSA_ENCRYPT}, + #else + {"BAD_RSA_ENCRYPT", ERR_LIB_SSL, 119}, + #endif + #ifdef SSL_R_BAD_RSA_E_LENGTH + {"BAD_RSA_E_LENGTH", ERR_LIB_SSL, SSL_R_BAD_RSA_E_LENGTH}, + #else + {"BAD_RSA_E_LENGTH", ERR_LIB_SSL, 120}, + #endif + #ifdef SSL_R_BAD_RSA_MODULUS_LENGTH + {"BAD_RSA_MODULUS_LENGTH", ERR_LIB_SSL, SSL_R_BAD_RSA_MODULUS_LENGTH}, + #else + {"BAD_RSA_MODULUS_LENGTH", ERR_LIB_SSL, 121}, + #endif + #ifdef SSL_R_BAD_RSA_SIGNATURE + {"BAD_RSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_RSA_SIGNATURE}, + #else + {"BAD_RSA_SIGNATURE", ERR_LIB_SSL, 122}, + #endif + #ifdef SSL_R_BAD_SIGNATURE + {"BAD_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_SIGNATURE}, + #else + {"BAD_SIGNATURE", ERR_LIB_SSL, 123}, + #endif + #ifdef SSL_R_BAD_SSL_FILETYPE + {"BAD_SSL_FILETYPE", ERR_LIB_SSL, SSL_R_BAD_SSL_FILETYPE}, + #else + {"BAD_SSL_FILETYPE", ERR_LIB_SSL, 124}, + #endif + #ifdef SSL_R_BAD_SSL_SESSION_ID_LENGTH + {"BAD_SSL_SESSION_ID_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SSL_SESSION_ID_LENGTH}, + #else + {"BAD_SSL_SESSION_ID_LENGTH", ERR_LIB_SSL, 125}, + #endif + #ifdef SSL_R_BAD_STATE + {"BAD_STATE", ERR_LIB_SSL, SSL_R_BAD_STATE}, + #else + {"BAD_STATE", ERR_LIB_SSL, 126}, + #endif + #ifdef SSL_R_BAD_WRITE_RETRY + {"BAD_WRITE_RETRY", ERR_LIB_SSL, SSL_R_BAD_WRITE_RETRY}, + #else + {"BAD_WRITE_RETRY", ERR_LIB_SSL, 127}, + #endif + #ifdef SSL_R_BIO_NOT_SET + {"BIO_NOT_SET", ERR_LIB_SSL, SSL_R_BIO_NOT_SET}, + #else + {"BIO_NOT_SET", ERR_LIB_SSL, 128}, + #endif + #ifdef SSL_R_BLOCK_CIPHER_PAD_IS_WRONG + {"BLOCK_CIPHER_PAD_IS_WRONG", ERR_LIB_SSL, SSL_R_BLOCK_CIPHER_PAD_IS_WRONG}, + #else + {"BLOCK_CIPHER_PAD_IS_WRONG", ERR_LIB_SSL, 129}, + #endif + #ifdef SSL_R_BN_LIB + {"BN_LIB", ERR_LIB_SSL, SSL_R_BN_LIB}, + #else + {"BN_LIB", ERR_LIB_SSL, 130}, + #endif + #ifdef SSL_R_CA_DN_LENGTH_MISMATCH + {"CA_DN_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CA_DN_LENGTH_MISMATCH}, + #else + {"CA_DN_LENGTH_MISMATCH", ERR_LIB_SSL, 131}, + #endif + #ifdef SSL_R_CA_DN_TOO_LONG + {"CA_DN_TOO_LONG", ERR_LIB_SSL, SSL_R_CA_DN_TOO_LONG}, + #else + {"CA_DN_TOO_LONG", ERR_LIB_SSL, 132}, + #endif + #ifdef SSL_R_CCS_RECEIVED_EARLY + {"CCS_RECEIVED_EARLY", ERR_LIB_SSL, SSL_R_CCS_RECEIVED_EARLY}, + #else + {"CCS_RECEIVED_EARLY", ERR_LIB_SSL, 133}, + #endif + #ifdef SSL_R_CERTIFICATE_VERIFY_FAILED + {"CERTIFICATE_VERIFY_FAILED", ERR_LIB_SSL, SSL_R_CERTIFICATE_VERIFY_FAILED}, + #else + {"CERTIFICATE_VERIFY_FAILED", ERR_LIB_SSL, 134}, + #endif + #ifdef SSL_R_CERT_LENGTH_MISMATCH + {"CERT_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CERT_LENGTH_MISMATCH}, + #else + {"CERT_LENGTH_MISMATCH", ERR_LIB_SSL, 135}, + #endif + #ifdef SSL_R_CHALLENGE_IS_DIFFERENT + {"CHALLENGE_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_CHALLENGE_IS_DIFFERENT}, + #else + {"CHALLENGE_IS_DIFFERENT", ERR_LIB_SSL, 136}, + #endif + #ifdef SSL_R_CIPHER_CODE_WRONG_LENGTH + {"CIPHER_CODE_WRONG_LENGTH", ERR_LIB_SSL, SSL_R_CIPHER_CODE_WRONG_LENGTH}, + #else + {"CIPHER_CODE_WRONG_LENGTH", ERR_LIB_SSL, 137}, + #endif + #ifdef SSL_R_CIPHER_OR_HASH_UNAVAILABLE + {"CIPHER_OR_HASH_UNAVAILABLE", ERR_LIB_SSL, SSL_R_CIPHER_OR_HASH_UNAVAILABLE}, + #else + {"CIPHER_OR_HASH_UNAVAILABLE", ERR_LIB_SSL, 138}, + #endif + #ifdef SSL_R_CIPHER_TABLE_SRC_ERROR + {"CIPHER_TABLE_SRC_ERROR", ERR_LIB_SSL, SSL_R_CIPHER_TABLE_SRC_ERROR}, + #else + {"CIPHER_TABLE_SRC_ERROR", ERR_LIB_SSL, 139}, + #endif + #ifdef SSL_R_CLIENTHELLO_TLSEXT + {"CLIENTHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_CLIENTHELLO_TLSEXT}, + #else + {"CLIENTHELLO_TLSEXT", ERR_LIB_SSL, 226}, + #endif + #ifdef SSL_R_COMPRESSED_LENGTH_TOO_LONG + {"COMPRESSED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_COMPRESSED_LENGTH_TOO_LONG}, + #else + {"COMPRESSED_LENGTH_TOO_LONG", ERR_LIB_SSL, 140}, + #endif + #ifdef SSL_R_COMPRESSION_DISABLED + {"COMPRESSION_DISABLED", ERR_LIB_SSL, SSL_R_COMPRESSION_DISABLED}, + #else + {"COMPRESSION_DISABLED", ERR_LIB_SSL, 343}, + #endif + #ifdef SSL_R_COMPRESSION_FAILURE + {"COMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_COMPRESSION_FAILURE}, + #else + {"COMPRESSION_FAILURE", ERR_LIB_SSL, 141}, + #endif + #ifdef SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE + {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", ERR_LIB_SSL, SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE}, + #else + {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", ERR_LIB_SSL, 307}, + #endif + #ifdef SSL_R_COMPRESSION_LIBRARY_ERROR + {"COMPRESSION_LIBRARY_ERROR", ERR_LIB_SSL, SSL_R_COMPRESSION_LIBRARY_ERROR}, + #else + {"COMPRESSION_LIBRARY_ERROR", ERR_LIB_SSL, 142}, + #endif + #ifdef SSL_R_CONNECTION_ID_IS_DIFFERENT + {"CONNECTION_ID_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_CONNECTION_ID_IS_DIFFERENT}, + #else + {"CONNECTION_ID_IS_DIFFERENT", ERR_LIB_SSL, 143}, + #endif + #ifdef SSL_R_CONNECTION_TYPE_NOT_SET + {"CONNECTION_TYPE_NOT_SET", ERR_LIB_SSL, SSL_R_CONNECTION_TYPE_NOT_SET}, + #else + {"CONNECTION_TYPE_NOT_SET", ERR_LIB_SSL, 144}, + #endif + #ifdef SSL_R_COOKIE_MISMATCH + {"COOKIE_MISMATCH", ERR_LIB_SSL, SSL_R_COOKIE_MISMATCH}, + #else + {"COOKIE_MISMATCH", ERR_LIB_SSL, 308}, + #endif + #ifdef SSL_R_DATA_BETWEEN_CCS_AND_FINISHED + {"DATA_BETWEEN_CCS_AND_FINISHED", ERR_LIB_SSL, SSL_R_DATA_BETWEEN_CCS_AND_FINISHED}, + #else + {"DATA_BETWEEN_CCS_AND_FINISHED", ERR_LIB_SSL, 145}, + #endif + #ifdef SSL_R_DATA_LENGTH_TOO_LONG + {"DATA_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_DATA_LENGTH_TOO_LONG}, + #else + {"DATA_LENGTH_TOO_LONG", ERR_LIB_SSL, 146}, + #endif + #ifdef SSL_R_DECRYPTION_FAILED + {"DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED}, + #else + {"DECRYPTION_FAILED", ERR_LIB_SSL, 147}, + #endif + #ifdef SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC + {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC}, + #else + {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", ERR_LIB_SSL, 281}, + #endif + #ifdef SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG + {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG}, + #else + {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, 148}, + #endif + #ifdef SSL_R_DIGEST_CHECK_FAILED + {"DIGEST_CHECK_FAILED", ERR_LIB_SSL, SSL_R_DIGEST_CHECK_FAILED}, + #else + {"DIGEST_CHECK_FAILED", ERR_LIB_SSL, 149}, + #endif + #ifdef SSL_R_DTLS_MESSAGE_TOO_BIG + {"DTLS_MESSAGE_TOO_BIG", ERR_LIB_SSL, SSL_R_DTLS_MESSAGE_TOO_BIG}, + #else + {"DTLS_MESSAGE_TOO_BIG", ERR_LIB_SSL, 334}, + #endif + #ifdef SSL_R_DUPLICATE_COMPRESSION_ID + {"DUPLICATE_COMPRESSION_ID", ERR_LIB_SSL, SSL_R_DUPLICATE_COMPRESSION_ID}, + #else + {"DUPLICATE_COMPRESSION_ID", ERR_LIB_SSL, 309}, + #endif + #ifdef SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT + {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT}, + #else + {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", ERR_LIB_SSL, 317}, + #endif + #ifdef SSL_R_ECC_CERT_NOT_FOR_SIGNING + {"ECC_CERT_NOT_FOR_SIGNING", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_SIGNING}, + #else + {"ECC_CERT_NOT_FOR_SIGNING", ERR_LIB_SSL, 318}, + #endif + #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE + {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE}, + #else + {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", ERR_LIB_SSL, 322}, + #endif + #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE + {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE}, + #else + {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", ERR_LIB_SSL, 323}, + #endif + #ifdef SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER + {"ECGROUP_TOO_LARGE_FOR_CIPHER", ERR_LIB_SSL, SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER}, + #else + {"ECGROUP_TOO_LARGE_FOR_CIPHER", ERR_LIB_SSL, 310}, + #endif + #ifdef SSL_R_ENCRYPTED_LENGTH_TOO_LONG + {"ENCRYPTED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_ENCRYPTED_LENGTH_TOO_LONG}, + #else + {"ENCRYPTED_LENGTH_TOO_LONG", ERR_LIB_SSL, 150}, + #endif + #ifdef SSL_R_ERROR_GENERATING_TMP_RSA_KEY + {"ERROR_GENERATING_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_ERROR_GENERATING_TMP_RSA_KEY}, + #else + {"ERROR_GENERATING_TMP_RSA_KEY", ERR_LIB_SSL, 282}, + #endif + #ifdef SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST + {"ERROR_IN_RECEIVED_CIPHER_LIST", ERR_LIB_SSL, SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST}, + #else + {"ERROR_IN_RECEIVED_CIPHER_LIST", ERR_LIB_SSL, 151}, + #endif + #ifdef SSL_R_EXCESSIVE_MESSAGE_SIZE + {"EXCESSIVE_MESSAGE_SIZE", ERR_LIB_SSL, SSL_R_EXCESSIVE_MESSAGE_SIZE}, + #else + {"EXCESSIVE_MESSAGE_SIZE", ERR_LIB_SSL, 152}, + #endif + #ifdef SSL_R_EXTRA_DATA_IN_MESSAGE + {"EXTRA_DATA_IN_MESSAGE", ERR_LIB_SSL, SSL_R_EXTRA_DATA_IN_MESSAGE}, + #else + {"EXTRA_DATA_IN_MESSAGE", ERR_LIB_SSL, 153}, + #endif + #ifdef SSL_R_GOT_A_FIN_BEFORE_A_CCS + {"GOT_A_FIN_BEFORE_A_CCS", ERR_LIB_SSL, SSL_R_GOT_A_FIN_BEFORE_A_CCS}, + #else + {"GOT_A_FIN_BEFORE_A_CCS", ERR_LIB_SSL, 154}, + #endif + #ifdef SSL_R_HTTPS_PROXY_REQUEST + {"HTTPS_PROXY_REQUEST", ERR_LIB_SSL, SSL_R_HTTPS_PROXY_REQUEST}, + #else + {"HTTPS_PROXY_REQUEST", ERR_LIB_SSL, 155}, + #endif + #ifdef SSL_R_HTTP_REQUEST + {"HTTP_REQUEST", ERR_LIB_SSL, SSL_R_HTTP_REQUEST}, + #else + {"HTTP_REQUEST", ERR_LIB_SSL, 156}, + #endif + #ifdef SSL_R_ILLEGAL_PADDING + {"ILLEGAL_PADDING", ERR_LIB_SSL, SSL_R_ILLEGAL_PADDING}, + #else + {"ILLEGAL_PADDING", ERR_LIB_SSL, 283}, + #endif + #ifdef SSL_R_INCONSISTENT_COMPRESSION + {"INCONSISTENT_COMPRESSION", ERR_LIB_SSL, SSL_R_INCONSISTENT_COMPRESSION}, + #else + {"INCONSISTENT_COMPRESSION", ERR_LIB_SSL, 340}, + #endif + #ifdef SSL_R_INVALID_CHALLENGE_LENGTH + {"INVALID_CHALLENGE_LENGTH", ERR_LIB_SSL, SSL_R_INVALID_CHALLENGE_LENGTH}, + #else + {"INVALID_CHALLENGE_LENGTH", ERR_LIB_SSL, 158}, + #endif + #ifdef SSL_R_INVALID_COMMAND + {"INVALID_COMMAND", ERR_LIB_SSL, SSL_R_INVALID_COMMAND}, + #else + {"INVALID_COMMAND", ERR_LIB_SSL, 280}, + #endif + #ifdef SSL_R_INVALID_COMPRESSION_ALGORITHM + {"INVALID_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_INVALID_COMPRESSION_ALGORITHM}, + #else + {"INVALID_COMPRESSION_ALGORITHM", ERR_LIB_SSL, 341}, + #endif + #ifdef SSL_R_INVALID_PURPOSE + {"INVALID_PURPOSE", ERR_LIB_SSL, SSL_R_INVALID_PURPOSE}, + #else + {"INVALID_PURPOSE", ERR_LIB_SSL, 278}, + #endif + #ifdef SSL_R_INVALID_STATUS_RESPONSE + {"INVALID_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_INVALID_STATUS_RESPONSE}, + #else + {"INVALID_STATUS_RESPONSE", ERR_LIB_SSL, 328}, + #endif + #ifdef SSL_R_INVALID_TICKET_KEYS_LENGTH + {"INVALID_TICKET_KEYS_LENGTH", ERR_LIB_SSL, SSL_R_INVALID_TICKET_KEYS_LENGTH}, + #else + {"INVALID_TICKET_KEYS_LENGTH", ERR_LIB_SSL, 325}, + #endif + #ifdef SSL_R_INVALID_TRUST + {"INVALID_TRUST", ERR_LIB_SSL, SSL_R_INVALID_TRUST}, + #else + {"INVALID_TRUST", ERR_LIB_SSL, 279}, + #endif + #ifdef SSL_R_KEY_ARG_TOO_LONG + {"KEY_ARG_TOO_LONG", ERR_LIB_SSL, SSL_R_KEY_ARG_TOO_LONG}, + #else + {"KEY_ARG_TOO_LONG", ERR_LIB_SSL, 284}, + #endif + #ifdef SSL_R_KRB5 + {"KRB5", ERR_LIB_SSL, SSL_R_KRB5}, + #else + {"KRB5", ERR_LIB_SSL, 285}, + #endif + #ifdef SSL_R_KRB5_C_CC_PRINC + {"KRB5_C_CC_PRINC", ERR_LIB_SSL, SSL_R_KRB5_C_CC_PRINC}, + #else + {"KRB5_C_CC_PRINC", ERR_LIB_SSL, 286}, + #endif + #ifdef SSL_R_KRB5_C_GET_CRED + {"KRB5_C_GET_CRED", ERR_LIB_SSL, SSL_R_KRB5_C_GET_CRED}, + #else + {"KRB5_C_GET_CRED", ERR_LIB_SSL, 287}, + #endif + #ifdef SSL_R_KRB5_C_INIT + {"KRB5_C_INIT", ERR_LIB_SSL, SSL_R_KRB5_C_INIT}, + #else + {"KRB5_C_INIT", ERR_LIB_SSL, 288}, + #endif + #ifdef SSL_R_KRB5_C_MK_REQ + {"KRB5_C_MK_REQ", ERR_LIB_SSL, SSL_R_KRB5_C_MK_REQ}, + #else + {"KRB5_C_MK_REQ", ERR_LIB_SSL, 289}, + #endif + #ifdef SSL_R_KRB5_S_BAD_TICKET + {"KRB5_S_BAD_TICKET", ERR_LIB_SSL, SSL_R_KRB5_S_BAD_TICKET}, + #else + {"KRB5_S_BAD_TICKET", ERR_LIB_SSL, 290}, + #endif + #ifdef SSL_R_KRB5_S_INIT + {"KRB5_S_INIT", ERR_LIB_SSL, SSL_R_KRB5_S_INIT}, + #else + {"KRB5_S_INIT", ERR_LIB_SSL, 291}, + #endif + #ifdef SSL_R_KRB5_S_RD_REQ + {"KRB5_S_RD_REQ", ERR_LIB_SSL, SSL_R_KRB5_S_RD_REQ}, + #else + {"KRB5_S_RD_REQ", ERR_LIB_SSL, 292}, + #endif + #ifdef SSL_R_KRB5_S_TKT_EXPIRED + {"KRB5_S_TKT_EXPIRED", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_EXPIRED}, + #else + {"KRB5_S_TKT_EXPIRED", ERR_LIB_SSL, 293}, + #endif + #ifdef SSL_R_KRB5_S_TKT_NYV + {"KRB5_S_TKT_NYV", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_NYV}, + #else + {"KRB5_S_TKT_NYV", ERR_LIB_SSL, 294}, + #endif + #ifdef SSL_R_KRB5_S_TKT_SKEW + {"KRB5_S_TKT_SKEW", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_SKEW}, + #else + {"KRB5_S_TKT_SKEW", ERR_LIB_SSL, 295}, + #endif + #ifdef SSL_R_LENGTH_MISMATCH + {"LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_LENGTH_MISMATCH}, + #else + {"LENGTH_MISMATCH", ERR_LIB_SSL, 159}, + #endif + #ifdef SSL_R_LENGTH_TOO_SHORT + {"LENGTH_TOO_SHORT", ERR_LIB_SSL, SSL_R_LENGTH_TOO_SHORT}, + #else + {"LENGTH_TOO_SHORT", ERR_LIB_SSL, 160}, + #endif + #ifdef SSL_R_LIBRARY_BUG + {"LIBRARY_BUG", ERR_LIB_SSL, SSL_R_LIBRARY_BUG}, + #else + {"LIBRARY_BUG", ERR_LIB_SSL, 274}, + #endif + #ifdef SSL_R_LIBRARY_HAS_NO_CIPHERS + {"LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_LIBRARY_HAS_NO_CIPHERS}, + #else + {"LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, 161}, + #endif + #ifdef SSL_R_MESSAGE_TOO_LONG + {"MESSAGE_TOO_LONG", ERR_LIB_SSL, SSL_R_MESSAGE_TOO_LONG}, + #else + {"MESSAGE_TOO_LONG", ERR_LIB_SSL, 296}, + #endif + #ifdef SSL_R_MISSING_DH_DSA_CERT + {"MISSING_DH_DSA_CERT", ERR_LIB_SSL, SSL_R_MISSING_DH_DSA_CERT}, + #else + {"MISSING_DH_DSA_CERT", ERR_LIB_SSL, 162}, + #endif + #ifdef SSL_R_MISSING_DH_KEY + {"MISSING_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_DH_KEY}, + #else + {"MISSING_DH_KEY", ERR_LIB_SSL, 163}, + #endif + #ifdef SSL_R_MISSING_DH_RSA_CERT + {"MISSING_DH_RSA_CERT", ERR_LIB_SSL, SSL_R_MISSING_DH_RSA_CERT}, + #else + {"MISSING_DH_RSA_CERT", ERR_LIB_SSL, 164}, + #endif + #ifdef SSL_R_MISSING_DSA_SIGNING_CERT + {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_DSA_SIGNING_CERT}, + #else + {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, 165}, + #endif + #ifdef SSL_R_MISSING_EXPORT_TMP_DH_KEY + {"MISSING_EXPORT_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_EXPORT_TMP_DH_KEY}, + #else + {"MISSING_EXPORT_TMP_DH_KEY", ERR_LIB_SSL, 166}, + #endif + #ifdef SSL_R_MISSING_EXPORT_TMP_RSA_KEY + {"MISSING_EXPORT_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_MISSING_EXPORT_TMP_RSA_KEY}, + #else + {"MISSING_EXPORT_TMP_RSA_KEY", ERR_LIB_SSL, 167}, + #endif + #ifdef SSL_R_MISSING_RSA_CERTIFICATE + {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, SSL_R_MISSING_RSA_CERTIFICATE}, + #else + {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, 168}, + #endif + #ifdef SSL_R_MISSING_RSA_ENCRYPTING_CERT + {"MISSING_RSA_ENCRYPTING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_ENCRYPTING_CERT}, + #else + {"MISSING_RSA_ENCRYPTING_CERT", ERR_LIB_SSL, 169}, + #endif + #ifdef SSL_R_MISSING_RSA_SIGNING_CERT + {"MISSING_RSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_SIGNING_CERT}, + #else + {"MISSING_RSA_SIGNING_CERT", ERR_LIB_SSL, 170}, + #endif + #ifdef SSL_R_MISSING_TMP_DH_KEY + {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_DH_KEY}, + #else + {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, 171}, + #endif + #ifdef SSL_R_MISSING_TMP_ECDH_KEY + {"MISSING_TMP_ECDH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_ECDH_KEY}, + #else + {"MISSING_TMP_ECDH_KEY", ERR_LIB_SSL, 311}, + #endif + #ifdef SSL_R_MISSING_TMP_RSA_KEY + {"MISSING_TMP_RSA_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_RSA_KEY}, + #else + {"MISSING_TMP_RSA_KEY", ERR_LIB_SSL, 172}, + #endif + #ifdef SSL_R_MISSING_TMP_RSA_PKEY + {"MISSING_TMP_RSA_PKEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_RSA_PKEY}, + #else + {"MISSING_TMP_RSA_PKEY", ERR_LIB_SSL, 173}, + #endif + #ifdef SSL_R_MISSING_VERIFY_MESSAGE + {"MISSING_VERIFY_MESSAGE", ERR_LIB_SSL, SSL_R_MISSING_VERIFY_MESSAGE}, + #else + {"MISSING_VERIFY_MESSAGE", ERR_LIB_SSL, 174}, + #endif + #ifdef SSL_R_NON_SSLV2_INITIAL_PACKET + {"NON_SSLV2_INITIAL_PACKET", ERR_LIB_SSL, SSL_R_NON_SSLV2_INITIAL_PACKET}, + #else + {"NON_SSLV2_INITIAL_PACKET", ERR_LIB_SSL, 175}, + #endif + #ifdef SSL_R_NO_CERTIFICATES_RETURNED + {"NO_CERTIFICATES_RETURNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATES_RETURNED}, + #else + {"NO_CERTIFICATES_RETURNED", ERR_LIB_SSL, 176}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_ASSIGNED + {"NO_CERTIFICATE_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_ASSIGNED}, + #else + {"NO_CERTIFICATE_ASSIGNED", ERR_LIB_SSL, 177}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_RETURNED + {"NO_CERTIFICATE_RETURNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_RETURNED}, + #else + {"NO_CERTIFICATE_RETURNED", ERR_LIB_SSL, 178}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_SET + {"NO_CERTIFICATE_SET", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_SET}, + #else + {"NO_CERTIFICATE_SET", ERR_LIB_SSL, 179}, + #endif + #ifdef SSL_R_NO_CERTIFICATE_SPECIFIED + {"NO_CERTIFICATE_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_SPECIFIED}, + #else + {"NO_CERTIFICATE_SPECIFIED", ERR_LIB_SSL, 180}, + #endif + #ifdef SSL_R_NO_CIPHERS_AVAILABLE + {"NO_CIPHERS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_CIPHERS_AVAILABLE}, + #else + {"NO_CIPHERS_AVAILABLE", ERR_LIB_SSL, 181}, + #endif + #ifdef SSL_R_NO_CIPHERS_PASSED + {"NO_CIPHERS_PASSED", ERR_LIB_SSL, SSL_R_NO_CIPHERS_PASSED}, + #else + {"NO_CIPHERS_PASSED", ERR_LIB_SSL, 182}, + #endif + #ifdef SSL_R_NO_CIPHERS_SPECIFIED + {"NO_CIPHERS_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_CIPHERS_SPECIFIED}, + #else + {"NO_CIPHERS_SPECIFIED", ERR_LIB_SSL, 183}, + #endif + #ifdef SSL_R_NO_CIPHER_LIST + {"NO_CIPHER_LIST", ERR_LIB_SSL, SSL_R_NO_CIPHER_LIST}, + #else + {"NO_CIPHER_LIST", ERR_LIB_SSL, 184}, + #endif + #ifdef SSL_R_NO_CIPHER_MATCH + {"NO_CIPHER_MATCH", ERR_LIB_SSL, SSL_R_NO_CIPHER_MATCH}, + #else + {"NO_CIPHER_MATCH", ERR_LIB_SSL, 185}, + #endif + #ifdef SSL_R_NO_CLIENT_CERT_METHOD + {"NO_CLIENT_CERT_METHOD", ERR_LIB_SSL, SSL_R_NO_CLIENT_CERT_METHOD}, + #else + {"NO_CLIENT_CERT_METHOD", ERR_LIB_SSL, 331}, + #endif + #ifdef SSL_R_NO_CLIENT_CERT_RECEIVED + {"NO_CLIENT_CERT_RECEIVED", ERR_LIB_SSL, SSL_R_NO_CLIENT_CERT_RECEIVED}, + #else + {"NO_CLIENT_CERT_RECEIVED", ERR_LIB_SSL, 186}, + #endif + #ifdef SSL_R_NO_COMPRESSION_SPECIFIED + {"NO_COMPRESSION_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_COMPRESSION_SPECIFIED}, + #else + {"NO_COMPRESSION_SPECIFIED", ERR_LIB_SSL, 187}, + #endif + #ifdef SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER + {"NO_GOST_CERTIFICATE_SENT_BY_PEER", ERR_LIB_SSL, SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER}, + #else + {"NO_GOST_CERTIFICATE_SENT_BY_PEER", ERR_LIB_SSL, 330}, + #endif + #ifdef SSL_R_NO_METHOD_SPECIFIED + {"NO_METHOD_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_METHOD_SPECIFIED}, + #else + {"NO_METHOD_SPECIFIED", ERR_LIB_SSL, 188}, + #endif + #ifdef SSL_R_NO_PRIVATEKEY + {"NO_PRIVATEKEY", ERR_LIB_SSL, SSL_R_NO_PRIVATEKEY}, + #else + {"NO_PRIVATEKEY", ERR_LIB_SSL, 189}, + #endif + #ifdef SSL_R_NO_PRIVATE_KEY_ASSIGNED + {"NO_PRIVATE_KEY_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_PRIVATE_KEY_ASSIGNED}, + #else + {"NO_PRIVATE_KEY_ASSIGNED", ERR_LIB_SSL, 190}, + #endif + #ifdef SSL_R_NO_PROTOCOLS_AVAILABLE + {"NO_PROTOCOLS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_PROTOCOLS_AVAILABLE}, + #else + {"NO_PROTOCOLS_AVAILABLE", ERR_LIB_SSL, 191}, + #endif + #ifdef SSL_R_NO_PUBLICKEY + {"NO_PUBLICKEY", ERR_LIB_SSL, SSL_R_NO_PUBLICKEY}, + #else + {"NO_PUBLICKEY", ERR_LIB_SSL, 192}, + #endif + #ifdef SSL_R_NO_RENEGOTIATION + {"NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_NO_RENEGOTIATION}, + #else + {"NO_RENEGOTIATION", ERR_LIB_SSL, 339}, + #endif + #ifdef SSL_R_NO_REQUIRED_DIGEST + {"NO_REQUIRED_DIGEST", ERR_LIB_SSL, SSL_R_NO_REQUIRED_DIGEST}, + #else + {"NO_REQUIRED_DIGEST", ERR_LIB_SSL, 324}, + #endif + #ifdef SSL_R_NO_SHARED_CIPHER + {"NO_SHARED_CIPHER", ERR_LIB_SSL, SSL_R_NO_SHARED_CIPHER}, + #else + {"NO_SHARED_CIPHER", ERR_LIB_SSL, 193}, + #endif + #ifdef SSL_R_NO_VERIFY_CALLBACK + {"NO_VERIFY_CALLBACK", ERR_LIB_SSL, SSL_R_NO_VERIFY_CALLBACK}, + #else + {"NO_VERIFY_CALLBACK", ERR_LIB_SSL, 194}, + #endif + #ifdef SSL_R_NULL_SSL_CTX + {"NULL_SSL_CTX", ERR_LIB_SSL, SSL_R_NULL_SSL_CTX}, + #else + {"NULL_SSL_CTX", ERR_LIB_SSL, 195}, + #endif + #ifdef SSL_R_NULL_SSL_METHOD_PASSED + {"NULL_SSL_METHOD_PASSED", ERR_LIB_SSL, SSL_R_NULL_SSL_METHOD_PASSED}, + #else + {"NULL_SSL_METHOD_PASSED", ERR_LIB_SSL, 196}, + #endif + #ifdef SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED + {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED}, + #else + {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, 197}, + #endif + #ifdef SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED + {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED}, + #else + {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", ERR_LIB_SSL, 344}, + #endif + #ifdef SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE + {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", ERR_LIB_SSL, SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE}, + #else + {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", ERR_LIB_SSL, 297}, + #endif + #ifdef SSL_R_OPAQUE_PRF_INPUT_TOO_LONG + {"OPAQUE_PRF_INPUT_TOO_LONG", ERR_LIB_SSL, SSL_R_OPAQUE_PRF_INPUT_TOO_LONG}, + #else + {"OPAQUE_PRF_INPUT_TOO_LONG", ERR_LIB_SSL, 327}, + #endif + #ifdef SSL_R_PACKET_LENGTH_TOO_LONG + {"PACKET_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_PACKET_LENGTH_TOO_LONG}, + #else + {"PACKET_LENGTH_TOO_LONG", ERR_LIB_SSL, 198}, + #endif + #ifdef SSL_R_PARSE_TLSEXT + {"PARSE_TLSEXT", ERR_LIB_SSL, SSL_R_PARSE_TLSEXT}, + #else + {"PARSE_TLSEXT", ERR_LIB_SSL, 227}, + #endif + #ifdef SSL_R_PATH_TOO_LONG + {"PATH_TOO_LONG", ERR_LIB_SSL, SSL_R_PATH_TOO_LONG}, + #else + {"PATH_TOO_LONG", ERR_LIB_SSL, 270}, + #endif + #ifdef SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE + {"PEER_DID_NOT_RETURN_A_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE}, + #else + {"PEER_DID_NOT_RETURN_A_CERTIFICATE", ERR_LIB_SSL, 199}, + #endif + #ifdef SSL_R_PEER_ERROR + {"PEER_ERROR", ERR_LIB_SSL, SSL_R_PEER_ERROR}, + #else + {"PEER_ERROR", ERR_LIB_SSL, 200}, + #endif + #ifdef SSL_R_PEER_ERROR_CERTIFICATE + {"PEER_ERROR_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_ERROR_CERTIFICATE}, + #else + {"PEER_ERROR_CERTIFICATE", ERR_LIB_SSL, 201}, + #endif + #ifdef SSL_R_PEER_ERROR_NO_CERTIFICATE + {"PEER_ERROR_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_ERROR_NO_CERTIFICATE}, + #else + {"PEER_ERROR_NO_CERTIFICATE", ERR_LIB_SSL, 202}, + #endif + #ifdef SSL_R_PEER_ERROR_NO_CIPHER + {"PEER_ERROR_NO_CIPHER", ERR_LIB_SSL, SSL_R_PEER_ERROR_NO_CIPHER}, + #else + {"PEER_ERROR_NO_CIPHER", ERR_LIB_SSL, 203}, + #endif + #ifdef SSL_R_PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE + {"PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE}, + #else + {"PEER_ERROR_UNSUPPORTED_CERTIFICATE_TYPE", ERR_LIB_SSL, 204}, + #endif + #ifdef SSL_R_PRE_MAC_LENGTH_TOO_LONG + {"PRE_MAC_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_PRE_MAC_LENGTH_TOO_LONG}, + #else + {"PRE_MAC_LENGTH_TOO_LONG", ERR_LIB_SSL, 205}, + #endif + #ifdef SSL_R_PROBLEMS_MAPPING_CIPHER_FUNCTIONS + {"PROBLEMS_MAPPING_CIPHER_FUNCTIONS", ERR_LIB_SSL, SSL_R_PROBLEMS_MAPPING_CIPHER_FUNCTIONS}, + #else + {"PROBLEMS_MAPPING_CIPHER_FUNCTIONS", ERR_LIB_SSL, 206}, + #endif + #ifdef SSL_R_PROTOCOL_IS_SHUTDOWN + {"PROTOCOL_IS_SHUTDOWN", ERR_LIB_SSL, SSL_R_PROTOCOL_IS_SHUTDOWN}, + #else + {"PROTOCOL_IS_SHUTDOWN", ERR_LIB_SSL, 207}, + #endif + #ifdef SSL_R_PSK_IDENTITY_NOT_FOUND + {"PSK_IDENTITY_NOT_FOUND", ERR_LIB_SSL, SSL_R_PSK_IDENTITY_NOT_FOUND}, + #else + {"PSK_IDENTITY_NOT_FOUND", ERR_LIB_SSL, 223}, + #endif + #ifdef SSL_R_PSK_NO_CLIENT_CB + {"PSK_NO_CLIENT_CB", ERR_LIB_SSL, SSL_R_PSK_NO_CLIENT_CB}, + #else + {"PSK_NO_CLIENT_CB", ERR_LIB_SSL, 224}, + #endif + #ifdef SSL_R_PSK_NO_SERVER_CB + {"PSK_NO_SERVER_CB", ERR_LIB_SSL, SSL_R_PSK_NO_SERVER_CB}, + #else + {"PSK_NO_SERVER_CB", ERR_LIB_SSL, 225}, + #endif + #ifdef SSL_R_PUBLIC_KEY_ENCRYPT_ERROR + {"PUBLIC_KEY_ENCRYPT_ERROR", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_ENCRYPT_ERROR}, + #else + {"PUBLIC_KEY_ENCRYPT_ERROR", ERR_LIB_SSL, 208}, + #endif + #ifdef SSL_R_PUBLIC_KEY_IS_NOT_RSA + {"PUBLIC_KEY_IS_NOT_RSA", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_IS_NOT_RSA}, + #else + {"PUBLIC_KEY_IS_NOT_RSA", ERR_LIB_SSL, 209}, + #endif + #ifdef SSL_R_PUBLIC_KEY_NOT_RSA + {"PUBLIC_KEY_NOT_RSA", ERR_LIB_SSL, SSL_R_PUBLIC_KEY_NOT_RSA}, + #else + {"PUBLIC_KEY_NOT_RSA", ERR_LIB_SSL, 210}, + #endif + #ifdef SSL_R_READ_BIO_NOT_SET + {"READ_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_READ_BIO_NOT_SET}, + #else + {"READ_BIO_NOT_SET", ERR_LIB_SSL, 211}, + #endif + #ifdef SSL_R_READ_TIMEOUT_EXPIRED + {"READ_TIMEOUT_EXPIRED", ERR_LIB_SSL, SSL_R_READ_TIMEOUT_EXPIRED}, + #else + {"READ_TIMEOUT_EXPIRED", ERR_LIB_SSL, 312}, + #endif + #ifdef SSL_R_READ_WRONG_PACKET_TYPE + {"READ_WRONG_PACKET_TYPE", ERR_LIB_SSL, SSL_R_READ_WRONG_PACKET_TYPE}, + #else + {"READ_WRONG_PACKET_TYPE", ERR_LIB_SSL, 212}, + #endif + #ifdef SSL_R_RECORD_LENGTH_MISMATCH + {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_RECORD_LENGTH_MISMATCH}, + #else + {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, 213}, + #endif + #ifdef SSL_R_RECORD_TOO_LARGE + {"RECORD_TOO_LARGE", ERR_LIB_SSL, SSL_R_RECORD_TOO_LARGE}, + #else + {"RECORD_TOO_LARGE", ERR_LIB_SSL, 214}, + #endif + #ifdef SSL_R_RECORD_TOO_SMALL + {"RECORD_TOO_SMALL", ERR_LIB_SSL, SSL_R_RECORD_TOO_SMALL}, + #else + {"RECORD_TOO_SMALL", ERR_LIB_SSL, 298}, + #endif + #ifdef SSL_R_RENEGOTIATE_EXT_TOO_LONG + {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, SSL_R_RENEGOTIATE_EXT_TOO_LONG}, + #else + {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, 335}, + #endif + #ifdef SSL_R_RENEGOTIATION_ENCODING_ERR + {"RENEGOTIATION_ENCODING_ERR", ERR_LIB_SSL, SSL_R_RENEGOTIATION_ENCODING_ERR}, + #else + {"RENEGOTIATION_ENCODING_ERR", ERR_LIB_SSL, 336}, + #endif + #ifdef SSL_R_RENEGOTIATION_MISMATCH + {"RENEGOTIATION_MISMATCH", ERR_LIB_SSL, SSL_R_RENEGOTIATION_MISMATCH}, + #else + {"RENEGOTIATION_MISMATCH", ERR_LIB_SSL, 337}, + #endif + #ifdef SSL_R_REQUIRED_CIPHER_MISSING + {"REQUIRED_CIPHER_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_CIPHER_MISSING}, + #else + {"REQUIRED_CIPHER_MISSING", ERR_LIB_SSL, 215}, + #endif + #ifdef SSL_R_REQUIRED_COMPRESSSION_ALGORITHM_MISSING + {"REQUIRED_COMPRESSSION_ALGORITHM_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_COMPRESSSION_ALGORITHM_MISSING}, + #else + {"REQUIRED_COMPRESSSION_ALGORITHM_MISSING", ERR_LIB_SSL, 342}, + #endif + #ifdef SSL_R_REUSE_CERT_LENGTH_NOT_ZERO + {"REUSE_CERT_LENGTH_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CERT_LENGTH_NOT_ZERO}, + #else + {"REUSE_CERT_LENGTH_NOT_ZERO", ERR_LIB_SSL, 216}, + #endif + #ifdef SSL_R_REUSE_CERT_TYPE_NOT_ZERO + {"REUSE_CERT_TYPE_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CERT_TYPE_NOT_ZERO}, + #else + {"REUSE_CERT_TYPE_NOT_ZERO", ERR_LIB_SSL, 217}, + #endif + #ifdef SSL_R_REUSE_CIPHER_LIST_NOT_ZERO + {"REUSE_CIPHER_LIST_NOT_ZERO", ERR_LIB_SSL, SSL_R_REUSE_CIPHER_LIST_NOT_ZERO}, + #else + {"REUSE_CIPHER_LIST_NOT_ZERO", ERR_LIB_SSL, 218}, + #endif + #ifdef SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING + {"SCSV_RECEIVED_WHEN_RENEGOTIATING", ERR_LIB_SSL, SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING}, + #else + {"SCSV_RECEIVED_WHEN_RENEGOTIATING", ERR_LIB_SSL, 345}, + #endif + #ifdef SSL_R_SERVERHELLO_TLSEXT + {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_SERVERHELLO_TLSEXT}, + #else + {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, 275}, + #endif + #ifdef SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED + {"SESSION_ID_CONTEXT_UNINITIALIZED", ERR_LIB_SSL, SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED}, + #else + {"SESSION_ID_CONTEXT_UNINITIALIZED", ERR_LIB_SSL, 277}, + #endif + #ifdef SSL_R_SHORT_READ + {"SHORT_READ", ERR_LIB_SSL, SSL_R_SHORT_READ}, + #else + {"SHORT_READ", ERR_LIB_SSL, 219}, + #endif + #ifdef SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE + {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", ERR_LIB_SSL, SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE}, + #else + {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", ERR_LIB_SSL, 220}, + #endif + #ifdef SSL_R_SSL23_DOING_SESSION_ID_REUSE + {"SSL23_DOING_SESSION_ID_REUSE", ERR_LIB_SSL, SSL_R_SSL23_DOING_SESSION_ID_REUSE}, + #else + {"SSL23_DOING_SESSION_ID_REUSE", ERR_LIB_SSL, 221}, + #endif + #ifdef SSL_R_SSL2_CONNECTION_ID_TOO_LONG + {"SSL2_CONNECTION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL2_CONNECTION_ID_TOO_LONG}, + #else + {"SSL2_CONNECTION_ID_TOO_LONG", ERR_LIB_SSL, 299}, + #endif + #ifdef SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT + {"SSL3_EXT_INVALID_ECPOINTFORMAT", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT}, + #else + {"SSL3_EXT_INVALID_ECPOINTFORMAT", ERR_LIB_SSL, 321}, + #endif + #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME + {"SSL3_EXT_INVALID_SERVERNAME", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME}, + #else + {"SSL3_EXT_INVALID_SERVERNAME", ERR_LIB_SSL, 319}, + #endif + #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE + {"SSL3_EXT_INVALID_SERVERNAME_TYPE", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE}, + #else + {"SSL3_EXT_INVALID_SERVERNAME_TYPE", ERR_LIB_SSL, 320}, + #endif + #ifdef SSL_R_SSL3_SESSION_ID_TOO_LONG + {"SSL3_SESSION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL3_SESSION_ID_TOO_LONG}, + #else + {"SSL3_SESSION_ID_TOO_LONG", ERR_LIB_SSL, 300}, + #endif + #ifdef SSL_R_SSL3_SESSION_ID_TOO_SHORT + {"SSL3_SESSION_ID_TOO_SHORT", ERR_LIB_SSL, SSL_R_SSL3_SESSION_ID_TOO_SHORT}, + #else + {"SSL3_SESSION_ID_TOO_SHORT", ERR_LIB_SSL, 222}, + #endif + #ifdef SSL_R_SSLV3_ALERT_BAD_CERTIFICATE + {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_CERTIFICATE}, + #else + {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, 1042}, + #endif + #ifdef SSL_R_SSLV3_ALERT_BAD_RECORD_MAC + {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_RECORD_MAC}, + #else + {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, 1020}, + #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED + {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED}, + #else + {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, 1045}, + #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED + {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED}, + #else + {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, 1044}, + #endif + #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN + {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN}, + #else + {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, 1046}, + #endif + #ifdef SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE + {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE}, + #else + {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, 1030}, + #endif + #ifdef SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE + {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE}, + #else + {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, 1040}, + #endif + #ifdef SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER + {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER}, + #else + {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, 1047}, + #endif + #ifdef SSL_R_SSLV3_ALERT_NO_CERTIFICATE + {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_NO_CERTIFICATE}, + #else + {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, 1041}, + #endif + #ifdef SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE + {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE}, + #else + {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, 1010}, + #endif + #ifdef SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE + {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE}, + #else + {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, 1043}, + #endif + #ifdef SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION + {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", ERR_LIB_SSL, SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION}, + #else + {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", ERR_LIB_SSL, 228}, + #endif + #ifdef SSL_R_SSL_HANDSHAKE_FAILURE + {"SSL_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSL_HANDSHAKE_FAILURE}, + #else + {"SSL_HANDSHAKE_FAILURE", ERR_LIB_SSL, 229}, + #endif + #ifdef SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS + {"SSL_LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS}, + #else + {"SSL_LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, 230}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_CALLBACK_FAILED + {"SSL_SESSION_ID_CALLBACK_FAILED", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CALLBACK_FAILED}, + #else + {"SSL_SESSION_ID_CALLBACK_FAILED", ERR_LIB_SSL, 301}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_CONFLICT + {"SSL_SESSION_ID_CONFLICT", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONFLICT}, + #else + {"SSL_SESSION_ID_CONFLICT", ERR_LIB_SSL, 302}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG + {"SSL_SESSION_ID_CONTEXT_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG}, + #else + {"SSL_SESSION_ID_CONTEXT_TOO_LONG", ERR_LIB_SSL, 273}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH + {"SSL_SESSION_ID_HAS_BAD_LENGTH", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH}, + #else + {"SSL_SESSION_ID_HAS_BAD_LENGTH", ERR_LIB_SSL, 303}, + #endif + #ifdef SSL_R_SSL_SESSION_ID_IS_DIFFERENT + {"SSL_SESSION_ID_IS_DIFFERENT", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_IS_DIFFERENT}, + #else + {"SSL_SESSION_ID_IS_DIFFERENT", ERR_LIB_SSL, 231}, + #endif + #ifdef SSL_R_TLSV1_ALERT_ACCESS_DENIED + {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_ACCESS_DENIED}, + #else + {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, 1049}, + #endif + #ifdef SSL_R_TLSV1_ALERT_DECODE_ERROR + {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECODE_ERROR}, + #else + {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, 1050}, + #endif + #ifdef SSL_R_TLSV1_ALERT_DECRYPTION_FAILED + {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPTION_FAILED}, + #else + {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, 1021}, + #endif + #ifdef SSL_R_TLSV1_ALERT_DECRYPT_ERROR + {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPT_ERROR}, + #else + {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, 1051}, + #endif + #ifdef SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION + {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION}, + #else + {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, 1060}, + #endif + #ifdef SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY + {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY}, + #else + {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, 1071}, + #endif + #ifdef SSL_R_TLSV1_ALERT_INTERNAL_ERROR + {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INTERNAL_ERROR}, + #else + {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, 1080}, + #endif + #ifdef SSL_R_TLSV1_ALERT_NO_RENEGOTIATION + {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_RENEGOTIATION}, + #else + {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, 1100}, + #endif + #ifdef SSL_R_TLSV1_ALERT_PROTOCOL_VERSION + {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_PROTOCOL_VERSION}, + #else + {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, 1070}, + #endif + #ifdef SSL_R_TLSV1_ALERT_RECORD_OVERFLOW + {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_RECORD_OVERFLOW}, + #else + {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, 1022}, + #endif + #ifdef SSL_R_TLSV1_ALERT_UNKNOWN_CA + {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_UNKNOWN_CA}, + #else + {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, 1048}, + #endif + #ifdef SSL_R_TLSV1_ALERT_USER_CANCELLED + {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_USER_CANCELLED}, + #else + {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, 1090}, + #endif + #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE + {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE}, + #else + {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, 1114}, + #endif + #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE + {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE}, + #else + {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, 1113}, + #endif + #ifdef SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE + {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE}, + #else + {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, 1111}, + #endif + #ifdef SSL_R_TLSV1_UNRECOGNIZED_NAME + {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, SSL_R_TLSV1_UNRECOGNIZED_NAME}, + #else + {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, 1112}, + #endif + #ifdef SSL_R_TLSV1_UNSUPPORTED_EXTENSION + {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, SSL_R_TLSV1_UNSUPPORTED_EXTENSION}, + #else + {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, 1110}, + #endif + #ifdef SSL_R_TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER + {"TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER", ERR_LIB_SSL, SSL_R_TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER}, + #else + {"TLS_CLIENT_CERT_REQ_WITH_ANON_CIPHER", ERR_LIB_SSL, 232}, + #endif + #ifdef SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST + {"TLS_INVALID_ECPOINTFORMAT_LIST", ERR_LIB_SSL, SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST}, + #else + {"TLS_INVALID_ECPOINTFORMAT_LIST", ERR_LIB_SSL, 157}, + #endif + #ifdef SSL_R_TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST + {"TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST", ERR_LIB_SSL, SSL_R_TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST}, + #else + {"TLS_PEER_DID_NOT_RESPOND_WITH_CERTIFICATE_LIST", ERR_LIB_SSL, 233}, + #endif + #ifdef SSL_R_TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG + {"TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, SSL_R_TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG}, + #else + {"TLS_RSA_ENCRYPTED_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, 234}, + #endif + #ifdef SSL_R_TRIED_TO_USE_UNSUPPORTED_CIPHER + {"TRIED_TO_USE_UNSUPPORTED_CIPHER", ERR_LIB_SSL, SSL_R_TRIED_TO_USE_UNSUPPORTED_CIPHER}, + #else + {"TRIED_TO_USE_UNSUPPORTED_CIPHER", ERR_LIB_SSL, 235}, + #endif + #ifdef SSL_R_UNABLE_TO_DECODE_DH_CERTS + {"UNABLE_TO_DECODE_DH_CERTS", ERR_LIB_SSL, SSL_R_UNABLE_TO_DECODE_DH_CERTS}, + #else + {"UNABLE_TO_DECODE_DH_CERTS", ERR_LIB_SSL, 236}, + #endif + #ifdef SSL_R_UNABLE_TO_DECODE_ECDH_CERTS + {"UNABLE_TO_DECODE_ECDH_CERTS", ERR_LIB_SSL, SSL_R_UNABLE_TO_DECODE_ECDH_CERTS}, + #else + {"UNABLE_TO_DECODE_ECDH_CERTS", ERR_LIB_SSL, 313}, + #endif + #ifdef SSL_R_UNABLE_TO_EXTRACT_PUBLIC_KEY + {"UNABLE_TO_EXTRACT_PUBLIC_KEY", ERR_LIB_SSL, SSL_R_UNABLE_TO_EXTRACT_PUBLIC_KEY}, + #else + {"UNABLE_TO_EXTRACT_PUBLIC_KEY", ERR_LIB_SSL, 237}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_DH_PARAMETERS + {"UNABLE_TO_FIND_DH_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_DH_PARAMETERS}, + #else + {"UNABLE_TO_FIND_DH_PARAMETERS", ERR_LIB_SSL, 238}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS + {"UNABLE_TO_FIND_ECDH_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS}, + #else + {"UNABLE_TO_FIND_ECDH_PARAMETERS", ERR_LIB_SSL, 314}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS + {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS}, + #else + {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", ERR_LIB_SSL, 239}, + #endif + #ifdef SSL_R_UNABLE_TO_FIND_SSL_METHOD + {"UNABLE_TO_FIND_SSL_METHOD", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_SSL_METHOD}, + #else + {"UNABLE_TO_FIND_SSL_METHOD", ERR_LIB_SSL, 240}, + #endif + #ifdef SSL_R_UNABLE_TO_LOAD_SSL2_MD5_ROUTINES + {"UNABLE_TO_LOAD_SSL2_MD5_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL2_MD5_ROUTINES}, + #else + {"UNABLE_TO_LOAD_SSL2_MD5_ROUTINES", ERR_LIB_SSL, 241}, + #endif + #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES + {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES}, + #else + {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", ERR_LIB_SSL, 242}, + #endif + #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES + {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES}, + #else + {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", ERR_LIB_SSL, 243}, + #endif + #ifdef SSL_R_UNEXPECTED_MESSAGE + {"UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_UNEXPECTED_MESSAGE}, + #else + {"UNEXPECTED_MESSAGE", ERR_LIB_SSL, 244}, + #endif + #ifdef SSL_R_UNEXPECTED_RECORD + {"UNEXPECTED_RECORD", ERR_LIB_SSL, SSL_R_UNEXPECTED_RECORD}, + #else + {"UNEXPECTED_RECORD", ERR_LIB_SSL, 245}, + #endif + #ifdef SSL_R_UNINITIALIZED + {"UNINITIALIZED", ERR_LIB_SSL, SSL_R_UNINITIALIZED}, + #else + {"UNINITIALIZED", ERR_LIB_SSL, 276}, + #endif + #ifdef SSL_R_UNKNOWN_ALERT_TYPE + {"UNKNOWN_ALERT_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_ALERT_TYPE}, + #else + {"UNKNOWN_ALERT_TYPE", ERR_LIB_SSL, 246}, + #endif + #ifdef SSL_R_UNKNOWN_CERTIFICATE_TYPE + {"UNKNOWN_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CERTIFICATE_TYPE}, + #else + {"UNKNOWN_CERTIFICATE_TYPE", ERR_LIB_SSL, 247}, + #endif + #ifdef SSL_R_UNKNOWN_CIPHER_RETURNED + {"UNKNOWN_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_RETURNED}, + #else + {"UNKNOWN_CIPHER_RETURNED", ERR_LIB_SSL, 248}, + #endif + #ifdef SSL_R_UNKNOWN_CIPHER_TYPE + {"UNKNOWN_CIPHER_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_TYPE}, + #else + {"UNKNOWN_CIPHER_TYPE", ERR_LIB_SSL, 249}, + #endif + #ifdef SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE + {"UNKNOWN_KEY_EXCHANGE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE}, + #else + {"UNKNOWN_KEY_EXCHANGE_TYPE", ERR_LIB_SSL, 250}, + #endif + #ifdef SSL_R_UNKNOWN_PKEY_TYPE + {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_PKEY_TYPE}, + #else + {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, 251}, + #endif + #ifdef SSL_R_UNKNOWN_PROTOCOL + {"UNKNOWN_PROTOCOL", ERR_LIB_SSL, SSL_R_UNKNOWN_PROTOCOL}, + #else + {"UNKNOWN_PROTOCOL", ERR_LIB_SSL, 252}, + #endif + #ifdef SSL_R_UNKNOWN_REMOTE_ERROR_TYPE + {"UNKNOWN_REMOTE_ERROR_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_REMOTE_ERROR_TYPE}, + #else + {"UNKNOWN_REMOTE_ERROR_TYPE", ERR_LIB_SSL, 253}, + #endif + #ifdef SSL_R_UNKNOWN_SSL_VERSION + {"UNKNOWN_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNKNOWN_SSL_VERSION}, + #else + {"UNKNOWN_SSL_VERSION", ERR_LIB_SSL, 254}, + #endif + #ifdef SSL_R_UNKNOWN_STATE + {"UNKNOWN_STATE", ERR_LIB_SSL, SSL_R_UNKNOWN_STATE}, + #else + {"UNKNOWN_STATE", ERR_LIB_SSL, 255}, + #endif + #ifdef SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED + {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", ERR_LIB_SSL, SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED}, + #else + {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", ERR_LIB_SSL, 338}, + #endif + #ifdef SSL_R_UNSUPPORTED_CIPHER + {"UNSUPPORTED_CIPHER", ERR_LIB_SSL, SSL_R_UNSUPPORTED_CIPHER}, + #else + {"UNSUPPORTED_CIPHER", ERR_LIB_SSL, 256}, + #endif + #ifdef SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM + {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM}, + #else + {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_SSL, 257}, + #endif + #ifdef SSL_R_UNSUPPORTED_DIGEST_TYPE + {"UNSUPPORTED_DIGEST_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_DIGEST_TYPE}, + #else + {"UNSUPPORTED_DIGEST_TYPE", ERR_LIB_SSL, 326}, + #endif + #ifdef SSL_R_UNSUPPORTED_ELLIPTIC_CURVE + {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_ELLIPTIC_CURVE}, + #else + {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, 315}, + #endif + #ifdef SSL_R_UNSUPPORTED_PROTOCOL + {"UNSUPPORTED_PROTOCOL", ERR_LIB_SSL, SSL_R_UNSUPPORTED_PROTOCOL}, + #else + {"UNSUPPORTED_PROTOCOL", ERR_LIB_SSL, 258}, + #endif + #ifdef SSL_R_UNSUPPORTED_SSL_VERSION + {"UNSUPPORTED_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNSUPPORTED_SSL_VERSION}, + #else + {"UNSUPPORTED_SSL_VERSION", ERR_LIB_SSL, 259}, + #endif + #ifdef SSL_R_UNSUPPORTED_STATUS_TYPE + {"UNSUPPORTED_STATUS_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_STATUS_TYPE}, + #else + {"UNSUPPORTED_STATUS_TYPE", ERR_LIB_SSL, 329}, + #endif + #ifdef SSL_R_WRITE_BIO_NOT_SET + {"WRITE_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_WRITE_BIO_NOT_SET}, + #else + {"WRITE_BIO_NOT_SET", ERR_LIB_SSL, 260}, + #endif + #ifdef SSL_R_WRONG_CIPHER_RETURNED + {"WRONG_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_WRONG_CIPHER_RETURNED}, + #else + {"WRONG_CIPHER_RETURNED", ERR_LIB_SSL, 261}, + #endif + #ifdef SSL_R_WRONG_MESSAGE_TYPE + {"WRONG_MESSAGE_TYPE", ERR_LIB_SSL, SSL_R_WRONG_MESSAGE_TYPE}, + #else + {"WRONG_MESSAGE_TYPE", ERR_LIB_SSL, 262}, + #endif + #ifdef SSL_R_WRONG_NUMBER_OF_KEY_BITS + {"WRONG_NUMBER_OF_KEY_BITS", ERR_LIB_SSL, SSL_R_WRONG_NUMBER_OF_KEY_BITS}, + #else + {"WRONG_NUMBER_OF_KEY_BITS", ERR_LIB_SSL, 263}, + #endif + #ifdef SSL_R_WRONG_SIGNATURE_LENGTH + {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_LENGTH}, + #else + {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, 264}, + #endif + #ifdef SSL_R_WRONG_SIGNATURE_SIZE + {"WRONG_SIGNATURE_SIZE", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_SIZE}, + #else + {"WRONG_SIGNATURE_SIZE", ERR_LIB_SSL, 265}, + #endif + #ifdef SSL_R_WRONG_SSL_VERSION + {"WRONG_SSL_VERSION", ERR_LIB_SSL, SSL_R_WRONG_SSL_VERSION}, + #else + {"WRONG_SSL_VERSION", ERR_LIB_SSL, 266}, + #endif + #ifdef SSL_R_WRONG_VERSION_NUMBER + {"WRONG_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_WRONG_VERSION_NUMBER}, + #else + {"WRONG_VERSION_NUMBER", ERR_LIB_SSL, 267}, + #endif + #ifdef SSL_R_X509_LIB + {"X509_LIB", ERR_LIB_SSL, SSL_R_X509_LIB}, + #else + {"X509_LIB", ERR_LIB_SSL, 268}, + #endif + #ifdef SSL_R_X509_VERIFICATION_SETUP_PROBLEMS + {"X509_VERIFICATION_SETUP_PROBLEMS", ERR_LIB_SSL, SSL_R_X509_VERIFICATION_SETUP_PROBLEMS}, + #else + {"X509_VERIFICATION_SETUP_PROBLEMS", ERR_LIB_SSL, 269}, + #endif + #ifdef X509_R_BAD_X509_FILETYPE + {"BAD_X509_FILETYPE", ERR_LIB_X509, X509_R_BAD_X509_FILETYPE}, + #else + {"BAD_X509_FILETYPE", ERR_LIB_X509, 100}, + #endif + #ifdef X509_R_BASE64_DECODE_ERROR + {"BASE64_DECODE_ERROR", ERR_LIB_X509, X509_R_BASE64_DECODE_ERROR}, + #else + {"BASE64_DECODE_ERROR", ERR_LIB_X509, 118}, + #endif + #ifdef X509_R_CANT_CHECK_DH_KEY + {"CANT_CHECK_DH_KEY", ERR_LIB_X509, X509_R_CANT_CHECK_DH_KEY}, + #else + {"CANT_CHECK_DH_KEY", ERR_LIB_X509, 114}, + #endif + #ifdef X509_R_CERT_ALREADY_IN_HASH_TABLE + {"CERT_ALREADY_IN_HASH_TABLE", ERR_LIB_X509, X509_R_CERT_ALREADY_IN_HASH_TABLE}, + #else + {"CERT_ALREADY_IN_HASH_TABLE", ERR_LIB_X509, 101}, + #endif + #ifdef X509_R_ERR_ASN1_LIB + {"ERR_ASN1_LIB", ERR_LIB_X509, X509_R_ERR_ASN1_LIB}, + #else + {"ERR_ASN1_LIB", ERR_LIB_X509, 102}, + #endif + #ifdef X509_R_INVALID_DIRECTORY + {"INVALID_DIRECTORY", ERR_LIB_X509, X509_R_INVALID_DIRECTORY}, + #else + {"INVALID_DIRECTORY", ERR_LIB_X509, 113}, + #endif + #ifdef X509_R_INVALID_FIELD_NAME + {"INVALID_FIELD_NAME", ERR_LIB_X509, X509_R_INVALID_FIELD_NAME}, + #else + {"INVALID_FIELD_NAME", ERR_LIB_X509, 119}, + #endif + #ifdef X509_R_INVALID_TRUST + {"INVALID_TRUST", ERR_LIB_X509, X509_R_INVALID_TRUST}, + #else + {"INVALID_TRUST", ERR_LIB_X509, 123}, + #endif + #ifdef X509_R_KEY_TYPE_MISMATCH + {"KEY_TYPE_MISMATCH", ERR_LIB_X509, X509_R_KEY_TYPE_MISMATCH}, + #else + {"KEY_TYPE_MISMATCH", ERR_LIB_X509, 115}, + #endif + #ifdef X509_R_KEY_VALUES_MISMATCH + {"KEY_VALUES_MISMATCH", ERR_LIB_X509, X509_R_KEY_VALUES_MISMATCH}, + #else + {"KEY_VALUES_MISMATCH", ERR_LIB_X509, 116}, + #endif + #ifdef X509_R_LOADING_CERT_DIR + {"LOADING_CERT_DIR", ERR_LIB_X509, X509_R_LOADING_CERT_DIR}, + #else + {"LOADING_CERT_DIR", ERR_LIB_X509, 103}, + #endif + #ifdef X509_R_LOADING_DEFAULTS + {"LOADING_DEFAULTS", ERR_LIB_X509, X509_R_LOADING_DEFAULTS}, + #else + {"LOADING_DEFAULTS", ERR_LIB_X509, 104}, + #endif + #ifdef X509_R_METHOD_NOT_SUPPORTED + {"METHOD_NOT_SUPPORTED", ERR_LIB_X509, X509_R_METHOD_NOT_SUPPORTED}, + #else + {"METHOD_NOT_SUPPORTED", ERR_LIB_X509, 124}, + #endif + #ifdef X509_R_NO_CERT_SET_FOR_US_TO_VERIFY + {"NO_CERT_SET_FOR_US_TO_VERIFY", ERR_LIB_X509, X509_R_NO_CERT_SET_FOR_US_TO_VERIFY}, + #else + {"NO_CERT_SET_FOR_US_TO_VERIFY", ERR_LIB_X509, 105}, + #endif + #ifdef X509_R_PUBLIC_KEY_DECODE_ERROR + {"PUBLIC_KEY_DECODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_DECODE_ERROR}, + #else + {"PUBLIC_KEY_DECODE_ERROR", ERR_LIB_X509, 125}, + #endif + #ifdef X509_R_PUBLIC_KEY_ENCODE_ERROR + {"PUBLIC_KEY_ENCODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_ENCODE_ERROR}, + #else + {"PUBLIC_KEY_ENCODE_ERROR", ERR_LIB_X509, 126}, + #endif + #ifdef X509_R_SHOULD_RETRY + {"SHOULD_RETRY", ERR_LIB_X509, X509_R_SHOULD_RETRY}, + #else + {"SHOULD_RETRY", ERR_LIB_X509, 106}, + #endif + #ifdef X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN + {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", ERR_LIB_X509, X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN}, + #else + {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", ERR_LIB_X509, 107}, + #endif + #ifdef X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY + {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", ERR_LIB_X509, X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY}, + #else + {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", ERR_LIB_X509, 108}, + #endif + #ifdef X509_R_UNKNOWN_KEY_TYPE + {"UNKNOWN_KEY_TYPE", ERR_LIB_X509, X509_R_UNKNOWN_KEY_TYPE}, + #else + {"UNKNOWN_KEY_TYPE", ERR_LIB_X509, 117}, + #endif + #ifdef X509_R_UNKNOWN_NID + {"UNKNOWN_NID", ERR_LIB_X509, X509_R_UNKNOWN_NID}, + #else + {"UNKNOWN_NID", ERR_LIB_X509, 109}, + #endif + #ifdef X509_R_UNKNOWN_PURPOSE_ID + {"UNKNOWN_PURPOSE_ID", ERR_LIB_X509, X509_R_UNKNOWN_PURPOSE_ID}, + #else + {"UNKNOWN_PURPOSE_ID", ERR_LIB_X509, 121}, + #endif + #ifdef X509_R_UNKNOWN_TRUST_ID + {"UNKNOWN_TRUST_ID", ERR_LIB_X509, X509_R_UNKNOWN_TRUST_ID}, + #else + {"UNKNOWN_TRUST_ID", ERR_LIB_X509, 120}, + #endif + #ifdef X509_R_UNSUPPORTED_ALGORITHM + {"UNSUPPORTED_ALGORITHM", ERR_LIB_X509, X509_R_UNSUPPORTED_ALGORITHM}, + #else + {"UNSUPPORTED_ALGORITHM", ERR_LIB_X509, 111}, + #endif + #ifdef X509_R_WRONG_LOOKUP_TYPE + {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, X509_R_WRONG_LOOKUP_TYPE}, + #else + {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, 112}, + #endif + #ifdef X509_R_WRONG_TYPE + {"WRONG_TYPE", ERR_LIB_X509, X509_R_WRONG_TYPE}, + #else + {"WRONG_TYPE", ERR_LIB_X509, 122}, + #endif + { NULL } +}; diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -23,6 +23,7 @@ PyObject_HEAD PyThread_type_lock lock_lock; PyObject *in_weakreflist; + char locked; /* for sanity checking */ } lockobject; static void @@ -32,9 +33,8 @@ PyObject_ClearWeakRefs((PyObject *) self); if (self->lock_lock != NULL) { /* Unlock the lock so it's safe to free it */ - PyThread_acquire_lock(self->lock_lock, 0); - PyThread_release_lock(self->lock_lock); - + if (self->locked) + PyThread_release_lock(self->lock_lock); PyThread_free_lock(self->lock_lock); } PyObject_Del(self); @@ -62,9 +62,13 @@ do { - Py_BEGIN_ALLOW_THREADS - r = PyThread_acquire_lock_timed(lock, microseconds, 1); - Py_END_ALLOW_THREADS + /* first a simple non-blocking try without releasing the GIL */ + r = PyThread_acquire_lock_timed(lock, 0, 0); + if (r == PY_LOCK_FAILURE && microseconds != 0) { + Py_BEGIN_ALLOW_THREADS + r = PyThread_acquire_lock_timed(lock, microseconds, 1); + Py_END_ALLOW_THREADS + } if (r == PY_LOCK_INTR) { /* Run signal handlers if we were interrupted. Propagate @@ -135,6 +139,8 @@ return NULL; } + if (r == PY_LOCK_ACQUIRED) + self->locked = 1; return PyBool_FromLong(r == PY_LOCK_ACQUIRED); } @@ -153,13 +159,13 @@ lock_PyThread_release_lock(lockobject *self) { /* Sanity check: the lock must be locked */ - if (PyThread_acquire_lock(self->lock_lock, 0)) { - PyThread_release_lock(self->lock_lock); + if (!self->locked) { PyErr_SetString(ThreadError, "release unlocked lock"); return NULL; } PyThread_release_lock(self->lock_lock); + self->locked = 0; Py_INCREF(Py_None); return Py_None; } @@ -175,11 +181,7 @@ static PyObject * lock_locked_lock(lockobject *self) { - if (PyThread_acquire_lock(self->lock_lock, 0)) { - PyThread_release_lock(self->lock_lock); - return PyBool_FromLong(0L); - } - return PyBool_FromLong(1L); + return PyBool_FromLong((long)self->locked); } PyDoc_STRVAR(locked_doc, @@ -313,14 +315,7 @@ self->rlock_count = count; Py_RETURN_TRUE; } - - if (self->rlock_count > 0 || - !PyThread_acquire_lock(self->rlock_lock, 0)) { - if (microseconds == 0) { - Py_RETURN_FALSE; - } - r = acquire_timed(self->rlock_lock, microseconds); - } + r = acquire_timed(self->rlock_lock, microseconds); if (r == PY_LOCK_ACQUIRED) { assert(self->rlock_count == 0); self->rlock_owner = tid; @@ -548,6 +543,7 @@ if (self == NULL) return NULL; self->lock_lock = PyThread_allocate_lock(); + self->locked = 0; self->in_weakreflist = NULL; if (self->lock_lock == NULL) { Py_DECREF(self); diff --git a/Objects/classobject.c b/Objects/classobject.c --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -400,6 +400,15 @@ (void)PyMethod_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyMethod_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyMethodObject", + numfree, sizeof(PyMethodObject)); +} + /* ------------------------------------------------------------------------ * instance method */ diff --git a/Objects/dictobject.c b/Objects/dictobject.c --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -255,6 +255,15 @@ return ret; } +/* Print summary info about the state of the optimized allocator */ +void +_PyDict_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyDictObject", numfree, sizeof(PyDictObject)); +} + + void PyDict_Fini(void) { diff --git a/Objects/floatobject.c b/Objects/floatobject.c --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -1933,6 +1933,16 @@ (void)PyFloat_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyFloat_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyFloatObject", + numfree, sizeof(PyFloatObject)); +} + + /*---------------------------------------------------------------------------- * _PyFloat_{Pack,Unpack}{4,8}. See floatobject.h. */ diff --git a/Objects/frameobject.c b/Objects/frameobject.c --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -955,3 +955,13 @@ Py_XDECREF(builtin_object); builtin_object = NULL; } + +/* Print summary info about the state of the optimized allocator */ +void +_PyFrame_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyFrameObject", + numfree, sizeof(PyFrameObject)); +} + diff --git a/Objects/listobject.c b/Objects/listobject.c --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -117,6 +117,15 @@ PyList_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyList_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyListObject", + numfree, sizeof(PyListObject)); +} + PyObject * PyList_New(Py_ssize_t size) { diff --git a/Objects/methodobject.c b/Objects/methodobject.c --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -338,6 +338,15 @@ (void)PyCFunction_ClearFreeList(); } +/* Print summary info about the state of the optimized allocator */ +void +_PyCFunction_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PyCFunction", + numfree, sizeof(PyCFunction)); +} + /* PyCFunction_New() is now just a macro that calls PyCFunction_NewEx(), but it's part of the API so we need to keep a function around that existing C extensions can call. diff --git a/Objects/object.c b/Objects/object.c --- a/Objects/object.c +++ b/Objects/object.c @@ -1852,6 +1852,18 @@ PyMem_FREE(p); } +void +_PyObject_DebugTypeStats(FILE *out) +{ + _PyCFunction_DebugMallocStats(out); + _PyDict_DebugMallocStats(out); + _PyFloat_DebugMallocStats(out); + _PyFrame_DebugMallocStats(out); + _PyList_DebugMallocStats(out); + _PyMethod_DebugMallocStats(out); + _PySet_DebugMallocStats(out); + _PyTuple_DebugMallocStats(out); +} /* These methods are used to control infinite recursion in repr, str, print, etc. Container objects that may recursively contain themselves, diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -523,12 +523,10 @@ /* Number of arenas allocated that haven't been free()'d. */ static size_t narenas_currently_allocated = 0; -#ifdef PYMALLOC_DEBUG /* Total number of times malloc() called to allocate an arena. */ static size_t ntimes_arena_allocated = 0; /* High water mark (max value ever seen) for narenas_currently_allocated. */ static size_t narenas_highwater = 0; -#endif /* Allocate a new arena. If we run out of memory, return NULL. Else * allocate a new arena, and return the address of an arena_object @@ -545,7 +543,7 @@ #ifdef PYMALLOC_DEBUG if (Py_GETENV("PYTHONMALLOCSTATS")) - _PyObject_DebugMallocStats(); + _PyObject_DebugMallocStats(stderr); #endif if (unused_arena_objects == NULL) { uint i; @@ -613,11 +611,9 @@ arenaobj->address = (uptr)address; ++narenas_currently_allocated; -#ifdef PYMALLOC_DEBUG ++ntimes_arena_allocated; if (narenas_currently_allocated > narenas_highwater) narenas_highwater = narenas_currently_allocated; -#endif arenaobj->freepools = NULL; /* pool_address <- first pool-aligned address in the arena nfreepools <- number of whole pools that fit after alignment */ @@ -1723,17 +1719,19 @@ } } +#endif /* PYMALLOC_DEBUG */ + static size_t -printone(const char* msg, size_t value) +printone(FILE *out, const char* msg, size_t value) { int i, k; char buf[100]; size_t origvalue = value; - fputs(msg, stderr); + fputs(msg, out); for (i = (int)strlen(msg); i < 35; ++i) - fputc(' ', stderr); - fputc('=', stderr); + fputc(' ', out); + fputc('=', out); /* Write the value with commas. */ i = 22; @@ -1754,17 +1752,33 @@ while (i >= 0) buf[i--] = ' '; - fputs(buf, stderr); + fputs(buf, out); return origvalue; } -/* Print summary info to stderr about the state of pymalloc's structures. +void +_PyDebugAllocatorStats(FILE *out, + const char *block_name, int num_blocks, size_t sizeof_block) +{ + char buf1[128]; + char buf2[128]; + PyOS_snprintf(buf1, sizeof(buf1), + "%d %ss * %zd bytes each", + num_blocks, block_name, sizeof_block); + PyOS_snprintf(buf2, sizeof(buf2), + "%48s ", buf1); + (void)printone(out, buf2, num_blocks * sizeof_block); +} + +#ifdef WITH_PYMALLOC + +/* Print summary info to "out" about the state of pymalloc's structures. * In Py_DEBUG mode, also perform some expensive internal consistency * checks. */ void -_PyObject_DebugMallocStats(void) +_PyObject_DebugMallocStats(FILE *out) { uint i; const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT; @@ -1793,7 +1807,7 @@ size_t total; char buf[128]; - fprintf(stderr, "Small block threshold = %d, in %u size classes.\n", + fprintf(out, "Small block threshold = %d, in %u size classes.\n", SMALL_REQUEST_THRESHOLD, numclasses); for (i = 0; i < numclasses; ++i) @@ -1847,10 +1861,10 @@ } assert(narenas == narenas_currently_allocated); - fputc('\n', stderr); + fputc('\n', out); fputs("class size num pools blocks in use avail blocks\n" "----- ---- --------- ------------- ------------\n", - stderr); + out); for (i = 0; i < numclasses; ++i) { size_t p = numpools[i]; @@ -1861,7 +1875,7 @@ assert(b == 0 && f == 0); continue; } - fprintf(stderr, "%5u %6u " + fprintf(out, "%5u %6u " "%11" PY_FORMAT_SIZE_T "u " "%15" PY_FORMAT_SIZE_T "u " "%13" PY_FORMAT_SIZE_T "u\n", @@ -1871,35 +1885,36 @@ pool_header_bytes += p * POOL_OVERHEAD; quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size); } - fputc('\n', stderr); - (void)printone("# times object malloc called", serialno); - - (void)printone("# arenas allocated total", ntimes_arena_allocated); - (void)printone("# arenas reclaimed", ntimes_arena_allocated - narenas); - (void)printone("# arenas highwater mark", narenas_highwater); - (void)printone("# arenas allocated current", narenas); + fputc('\n', out); +#ifdef PYMALLOC_DEBUG + (void)printone(out, "# times object malloc called", serialno); +#endif + (void)printone(out, "# arenas allocated total", ntimes_arena_allocated); + (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas); + (void)printone(out, "# arenas highwater mark", narenas_highwater); + (void)printone(out, "# arenas allocated current", narenas); PyOS_snprintf(buf, sizeof(buf), "%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena", narenas, ARENA_SIZE); - (void)printone(buf, narenas * ARENA_SIZE); + (void)printone(out, buf, narenas * ARENA_SIZE); - fputc('\n', stderr); + fputc('\n', out); - total = printone("# bytes in allocated blocks", allocated_bytes); - total += printone("# bytes in available blocks", available_bytes); + total = printone(out, "# bytes in allocated blocks", allocated_bytes); + total += printone(out, "# bytes in available blocks", available_bytes); PyOS_snprintf(buf, sizeof(buf), "%u unused pools * %d bytes", numfreepools, POOL_SIZE); - total += printone(buf, (size_t)numfreepools * POOL_SIZE); + total += printone(out, buf, (size_t)numfreepools * POOL_SIZE); - total += printone("# bytes lost to pool headers", pool_header_bytes); - total += printone("# bytes lost to quantization", quantization); - total += printone("# bytes lost to arena alignment", arena_alignment); - (void)printone("Total", total); + total += printone(out, "# bytes lost to pool headers", pool_header_bytes); + total += printone(out, "# bytes lost to quantization", quantization); + total += printone(out, "# bytes lost to arena alignment", arena_alignment); + (void)printone(out, "Total", total); } -#endif /* PYMALLOC_DEBUG */ +#endif /* #ifdef WITH_PYMALLOC */ #ifdef Py_USING_MEMORY_DEBUGGER /* Make this function last so gcc won't inline it since the definition is diff --git a/Objects/setobject.c b/Objects/setobject.c --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -1133,6 +1133,16 @@ Py_CLEAR(emptyfrozenset); } +/* Print summary info about the state of the optimized allocator */ +void +_PySet_DebugMallocStats(FILE *out) +{ + _PyDebugAllocatorStats(out, + "free PySetObject", + numfree, sizeof(PySetObject)); +} + + static PyObject * set_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -45,6 +45,22 @@ } #endif +/* Print summary info about the state of the optimized allocator */ +void +_PyTuple_DebugMallocStats(FILE *out) +{ +#if PyTuple_MAXSAVESIZE > 0 + int i; + char buf[128]; + for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { + PyOS_snprintf(buf, sizeof(buf), + "free %d-sized PyTupleObject", i); + _PyDebugAllocatorStats(out, + buf, + numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i)); + } +#endif +} PyObject * PyTuple_New(register Py_ssize_t size) diff --git a/PCbuild/debug.props b/PCbuild/debug.props --- a/PCbuild/debug.props +++ b/PCbuild/debug.props @@ -12,6 +12,9 @@ _DEBUG;%(PreprocessorDefinitions) + + _DEBUG + diff --git a/Python/pythonrun.c b/Python/pythonrun.c --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -642,7 +642,7 @@ #endif /* Py_TRACE_REFS */ #ifdef PYMALLOC_DEBUG if (Py_GETENV("PYTHONMALLOCSTATS")) - _PyObject_DebugMallocStats(); + _PyObject_DebugMallocStats(stderr); #endif call_ll_exitfuncs(); diff --git a/Python/sysmodule.c b/Python/sysmodule.c --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -997,6 +997,27 @@ extern "C" { #endif +static PyObject * +sys_debugmallocstats(PyObject *self, PyObject *args) +{ +#ifdef WITH_PYMALLOC + _PyObject_DebugMallocStats(stderr); + fputc('\n', stderr); +#endif + _PyObject_DebugTypeStats(stderr); + + Py_RETURN_NONE; +} +PyDoc_STRVAR(debugmallocstats_doc, +"_debugmallocstats()\n\ +\n\ +Print summary info to stderr about the state of\n\ +pymalloc's structures.\n\ +\n\ +In Py_DEBUG mode, also perform some expensive internal consistency\n\ +checks.\n\ +"); + #ifdef Py_TRACE_REFS /* Defined in objects.c because it uses static globals if that file */ extern PyObject *_Py_GetObjects(PyObject *, PyObject *); @@ -1093,6 +1114,8 @@ {"settrace", sys_settrace, METH_O, settrace_doc}, {"gettrace", sys_gettrace, METH_NOARGS, gettrace_doc}, {"call_tracing", sys_call_tracing, METH_VARARGS, call_tracing_doc}, + {"_debugmallocstats", sys_debugmallocstats, METH_VARARGS, + debugmallocstats_doc}, {NULL, NULL} /* sentinel */ }; diff --git a/Tools/scripts/diff.py b/Tools/scripts/diff.py --- a/Tools/scripts/diff.py +++ b/Tools/scripts/diff.py @@ -9,6 +9,12 @@ """ import sys, os, time, difflib, optparse +from datetime import datetime, timezone + +def file_mtime(path): + t = datetime.fromtimestamp(os.stat(path).st_mtime, + timezone.utc) + return t.astimezone().isoformat() def main(): @@ -30,10 +36,12 @@ n = options.lines fromfile, tofile = args - fromdate = time.ctime(os.stat(fromfile).st_mtime) - todate = time.ctime(os.stat(tofile).st_mtime) - fromlines = open(fromfile, 'U').readlines() - tolines = open(tofile, 'U').readlines() + fromdate = file_mtime(fromfile) + todate = file_mtime(tofile) + with open(fromfile, 'U') as ff: + fromlines = ff.readlines() + with open(tofile, 'U') as tf: + tolines = tf.readlines() if options.u: diff = difflib.unified_diff(fromlines, tolines, fromfile, tofile, fromdate, todate, n=n) diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py new file mode 100644 --- /dev/null +++ b/Tools/ssl/make_ssl_data.py @@ -0,0 +1,68 @@ +#! /usr/bin/env python3 + +""" +This script should be called *manually* when we want to upgrade SSLError +`library` and `reason` mnemnonics to a more recent OpenSSL version. + +It takes two arguments: +- the path to the OpenSSL include files' directory + (e.g. openssl-1.0.1-beta3/include/openssl/) +- the path to the C file to be generated + (probably Modules/_ssl_data.h) +""" + +import datetime +import os +import re +import sys + + +def parse_error_codes(h_file, prefix): + pat = re.compile(r"#define\W+(%s([\w]+))\W+(\d+)\b" % re.escape(prefix)) + codes = [] + with open(h_file, "r", encoding="latin1") as f: + for line in f: + match = pat.search(line) + if match: + code, name, num = match.groups() + num = int(num) + codes.append((code, name, num)) + return codes + +if __name__ == "__main__": + openssl_inc = sys.argv[1] + outfile = sys.argv[2] + use_stdout = outfile == '-' + f = sys.stdout if use_stdout else open(outfile, "w") + error_libraries = ( + # (library code, mnemonic, error prefix, header file) + ('ERR_LIB_PEM', 'PEM', 'PEM_R_', 'pem.h'), + ('ERR_LIB_SSL', 'SSL', 'SSL_R_', 'ssl.h'), + ('ERR_LIB_X509', 'X509', 'X509_R_', 'x509.h'), + ) + def w(l): + f.write(l + "\n") + w("/* File generated by Tools/ssl/make_ssl_data.py */") + w("/* Generated on %s */" % datetime.datetime.now().isoformat()) + w("") + + w("static struct py_ssl_library_code library_codes[] = {") + for libcode, mnemo, _, _ in error_libraries: + w(' {"%s", %s},' % (mnemo, libcode)) + w(' { NULL }') + w('};') + w("") + + w("static struct py_ssl_error_code error_codes[] = {") + for libcode, _, prefix, h_file in error_libraries: + codes = parse_error_codes(os.path.join(openssl_inc, h_file), prefix) + for code, name, num in sorted(codes): + w(' #ifdef %s' % (code)) + w(' {"%s", %s, %s},' % (name, libcode, code)) + w(' #else') + w(' {"%s", %s, %d},' % (name, libcode, num)) + w(' #endif') + w(' { NULL }') + w('};') + if not use_stdout: + f.close() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 00:20:12 2012 From: python-checkins at python.org (larry.hastings) Date: Sat, 23 Jun 2012 00:20:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315008=3A_Implement?= =?utf8?q?_PEP_362_=22Signature_Objects=22=2E?= Message-ID: http://hg.python.org/cpython/rev/d892bf410478 changeset: 77594:d892bf410478 user: Larry Hastings date: Fri Jun 22 15:19:35 2012 -0700 summary: Issue #15008: Implement PEP 362 "Signature Objects". Patch by Yury Selivanov. files: Lib/inspect.py | 775 ++++++++++++++++++++++- Lib/test/test_inspect.py | 923 ++++++++++++++++++++++++++- Misc/NEWS | 3 + 3 files changed, 1697 insertions(+), 4 deletions(-) diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -22,12 +22,14 @@ getouterframes(), getinnerframes() - get info about frames currentframe() - get the current stack frame stack(), trace() - get info about frames on the stack or in a traceback + + signature() - get a Signature object for the callable """ # This module is in the public domain. No warranties. -__author__ = 'Ka-Ping Yee ' -__date__ = '1 Jan 2001' +__author__ = ('Ka-Ping Yee ', + 'Yury Selivanov ') import imp import importlib.machinery @@ -39,8 +41,9 @@ import tokenize import types import warnings +import functools from operator import attrgetter -from collections import namedtuple +from collections import namedtuple, OrderedDict # Create constants for the compiler flags in Include/code.h # We try to get them from dis to avoid duplication, but fall @@ -1223,3 +1226,769 @@ if generator.gi_frame.f_lasti == -1: return GEN_CREATED return GEN_SUSPENDED + + +############################################################################### +### Function Signature Object (PEP 362) +############################################################################### + + +_WrapperDescriptor = type(type.__call__) +_MethodWrapper = type(all.__call__) + +_NonUserDefinedCallables = (_WrapperDescriptor, + _MethodWrapper, + types.BuiltinFunctionType) + + +def _get_user_defined_method(cls, method_name): + try: + meth = getattr(cls, method_name) + except AttributeError: + return + else: + if not isinstance(meth, _NonUserDefinedCallables): + # Once '__signature__' will be added to 'C'-level + # callables, this check won't be necessary + return meth + + +def signature(obj): + '''Get a signature object for the passed callable.''' + + if not callable(obj): + raise TypeError('{!r} is not a callable object'.format(obj)) + + if isinstance(obj, types.MethodType): + # In this case we skip the first parameter of the underlying + # function (usually `self` or `cls`). + sig = signature(obj.__func__) + return sig.replace(parameters=tuple(sig.parameters.values())[1:]) + + try: + sig = obj.__signature__ + except AttributeError: + pass + else: + if sig is not None: + return sig + + try: + # Was this function wrapped by a decorator? + wrapped = obj.__wrapped__ + except AttributeError: + pass + else: + return signature(wrapped) + + if isinstance(obj, types.FunctionType): + return Signature.from_function(obj) + + if isinstance(obj, functools.partial): + sig = signature(obj.func) + + new_params = OrderedDict(sig.parameters.items()) + + partial_args = obj.args or () + partial_keywords = obj.keywords or {} + try: + ba = sig.bind_partial(*partial_args, **partial_keywords) + except TypeError as ex: + msg = 'partial object {!r} has incorrect arguments'.format(obj) + raise ValueError(msg) from ex + + for arg_name, arg_value in ba.arguments.items(): + param = new_params[arg_name] + if arg_name in partial_keywords: + # We set a new default value, because the following code + # is correct: + # + # >>> def foo(a): print(a) + # >>> print(partial(partial(foo, a=10), a=20)()) + # 20 + # >>> print(partial(partial(foo, a=10), a=20)(a=30)) + # 30 + # + # So, with 'partial' objects, passing a keyword argument is + # like setting a new default value for the corresponding + # parameter + # + # We also mark this parameter with '_partial_kwarg' + # flag. Later, in '_bind', the 'default' value of this + # parameter will be added to 'kwargs', to simulate + # the 'functools.partial' real call. + new_params[arg_name] = param.replace(default=arg_value, + _partial_kwarg=True) + + elif (param.kind not in (_VAR_KEYWORD, _VAR_POSITIONAL) and + not param._partial_kwarg): + new_params.pop(arg_name) + + return sig.replace(parameters=new_params.values()) + + sig = None + if isinstance(obj, type): + # obj is a class or a metaclass + + # First, let's see if it has an overloaded __call__ defined + # in its metaclass + call = _get_user_defined_method(type(obj), '__call__') + if call is not None: + sig = signature(call) + else: + # Now we check if the 'obj' class has a '__new__' method + new = _get_user_defined_method(obj, '__new__') + if new is not None: + sig = signature(new) + else: + # Finally, we should have at least __init__ implemented + init = _get_user_defined_method(obj, '__init__') + if init is not None: + sig = signature(init) + elif not isinstance(obj, _NonUserDefinedCallables): + # An object with __call__ + # We also check that the 'obj' is not an instance of + # _WrapperDescriptor or _MethodWrapper to avoid + # infinite recursion (and even potential segfault) + call = _get_user_defined_method(type(obj), '__call__') + if call is not None: + sig = signature(call) + + if sig is not None: + # For classes and objects we skip the first parameter of their + # __call__, __new__, or __init__ methods + return sig.replace(parameters=tuple(sig.parameters.values())[1:]) + + if isinstance(obj, types.BuiltinFunctionType): + # Raise a nicer error message for builtins + msg = 'no signature found for builtin function {!r}'.format(obj) + raise ValueError(msg) + + raise ValueError('callable {!r} is not supported by signature'.format(obj)) + + +class _void: + '''A private marker - used in Parameter & Signature''' + + +class _empty: + pass + + +class _ParameterKind(int): + def __new__(self, *args, name): + obj = int.__new__(self, *args) + obj._name = name + return obj + + def __str__(self): + return self._name + + def __repr__(self): + return '<_ParameterKind: {!r}>'.format(self._name) + + +_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY') +_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD') +_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL') +_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY') +_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD') + + +class Parameter: + '''Represents a parameter in a function signature. + + Has the following public attributes: + + * name : str + The name of the parameter as a string. + * default : object + The default value for the parameter if specified. If the + parameter has no default value, this attribute is not set. + * annotation + The annotation for the parameter if specified. If the + parameter has no annotation, this attribute is not set. + * kind : str + Describes how argument values are bound to the parameter. + Possible values: `Parameter.POSITIONAL_ONLY`, + `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, + `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. + ''' + + __slots__ = ('_name', '_kind', '_default', '_annotation', '_partial_kwarg') + + POSITIONAL_ONLY = _POSITIONAL_ONLY + POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD + VAR_POSITIONAL = _VAR_POSITIONAL + KEYWORD_ONLY = _KEYWORD_ONLY + VAR_KEYWORD = _VAR_KEYWORD + + empty = _empty + + def __init__(self, name, kind, *, default=_empty, annotation=_empty, + _partial_kwarg=False): + + if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD, + _VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD): + raise ValueError("invalid value for 'Parameter.kind' attribute") + self._kind = kind + + if default is not _empty: + if kind in (_VAR_POSITIONAL, _VAR_KEYWORD): + msg = '{} parameters cannot have default values'.format(kind) + raise ValueError(msg) + self._default = default + self._annotation = annotation + + if name is None: + if kind != _POSITIONAL_ONLY: + raise ValueError("None is not a valid name for a " + "non-positional-only parameter") + self._name = name + else: + name = str(name) + if kind != _POSITIONAL_ONLY and not name.isidentifier(): + msg = '{!r} is not a valid parameter name'.format(name) + raise ValueError(msg) + self._name = name + + self._partial_kwarg = _partial_kwarg + + @property + def name(self): + return self._name + + @property + def default(self): + return self._default + + @property + def annotation(self): + return self._annotation + + @property + def kind(self): + return self._kind + + def replace(self, *, name=_void, kind=_void, annotation=_void, + default=_void, _partial_kwarg=_void): + '''Creates a customized copy of the Parameter.''' + + if name is _void: + name = self._name + + if kind is _void: + kind = self._kind + + if annotation is _void: + annotation = self._annotation + + if default is _void: + default = self._default + + if _partial_kwarg is _void: + _partial_kwarg = self._partial_kwarg + + return type(self)(name, kind, default=default, annotation=annotation, + _partial_kwarg=_partial_kwarg) + + def __str__(self): + kind = self.kind + + formatted = self._name + if kind == _POSITIONAL_ONLY: + if formatted is None: + formatted = '' + formatted = '<{}>'.format(formatted) + + # Add annotation and default value + if self._annotation is not _empty: + formatted = '{}:{}'.format(formatted, + formatannotation(self._annotation)) + + if self._default is not _empty: + formatted = '{}={}'.format(formatted, repr(self._default)) + + if kind == _VAR_POSITIONAL: + formatted = '*' + formatted + elif kind == _VAR_KEYWORD: + formatted = '**' + formatted + + return formatted + + def __repr__(self): + return '<{} at {:#x} {!r}>'.format(self.__class__.__name__, + id(self), self.name) + + def __eq__(self, other): + return (issubclass(other.__class__, Parameter) and + self._name == other._name and + self._kind == other._kind and + self._default == other._default and + self._annotation == other._annotation) + + def __ne__(self, other): + return not self.__eq__(other) + + +class BoundArguments: + '''Result of `Signature.bind` call. Holds the mapping of arguments + to the function's parameters. + + Has the following public attributes: + + * arguments : OrderedDict + An ordered mutable mapping of parameters' names to arguments' values. + Does not contain arguments' default values. + * signature : Signature + The Signature object that created this instance. + * args : tuple + Tuple of positional arguments values. + * kwargs : dict + Dict of keyword arguments values. + ''' + + def __init__(self, signature, arguments): + self.arguments = arguments + self._signature = signature + + @property + def signature(self): + return self._signature + + @property + def args(self): + args = [] + for param_name, param in self._signature.parameters.items(): + if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or + param._partial_kwarg): + # Keyword arguments mapped by 'functools.partial' + # (Parameter._partial_kwarg is True) are mapped + # in 'BoundArguments.kwargs', along with VAR_KEYWORD & + # KEYWORD_ONLY + break + + try: + arg = self.arguments[param_name] + except KeyError: + # We're done here. Other arguments + # will be mapped in 'BoundArguments.kwargs' + break + else: + if param.kind == _VAR_POSITIONAL: + # *args + args.extend(arg) + else: + # plain argument + args.append(arg) + + return tuple(args) + + @property + def kwargs(self): + kwargs = {} + kwargs_started = False + for param_name, param in self._signature.parameters.items(): + if not kwargs_started: + if (param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY) or + param._partial_kwarg): + kwargs_started = True + else: + if param_name not in self.arguments: + kwargs_started = True + continue + + if not kwargs_started: + continue + + try: + arg = self.arguments[param_name] + except KeyError: + pass + else: + if param.kind == _VAR_KEYWORD: + # **kwargs + kwargs.update(arg) + else: + # plain keyword argument + kwargs[param_name] = arg + + return kwargs + + def __eq__(self, other): + return (issubclass(other.__class__, BoundArguments) and + self.signature == other.signature and + self.arguments == other.arguments) + + def __ne__(self, other): + return not self.__eq__(other) + + +class Signature: + '''A Signature object represents the overall signature of a function. + It stores a Parameter object for each parameter accepted by the + function, as well as information specific to the function itself. + + A Signature object has the following public attributes and methods: + + * parameters : OrderedDict + An ordered mapping of parameters' names to the corresponding + Parameter objects (keyword-only arguments are in the same order + as listed in `code.co_varnames`). + * return_annotation : object + The annotation for the return type of the function if specified. + If the function has no annotation for its return type, this + attribute is not set. + * bind(*args, **kwargs) -> BoundArguments + Creates a mapping from positional and keyword arguments to + parameters. + * bind_partial(*args, **kwargs) -> BoundArguments + Creates a partial mapping from positional and keyword arguments + to parameters (simulating 'functools.partial' behavior.) + ''' + + __slots__ = ('_return_annotation', '_parameters') + + _parameter_cls = Parameter + _bound_arguments_cls = BoundArguments + + empty = _empty + + def __init__(self, parameters=None, *, return_annotation=_empty, + __validate_parameters__=True): + '''Constructs Signature from the given list of Parameter + objects and 'return_annotation'. All arguments are optional. + ''' + + if parameters is None: + params = OrderedDict() + else: + if __validate_parameters__: + params = OrderedDict() + top_kind = _POSITIONAL_ONLY + + for idx, param in enumerate(parameters): + kind = param.kind + if kind < top_kind: + msg = 'wrong parameter order: {} before {}' + msg = msg.format(top_kind, param.kind) + raise ValueError(msg) + else: + top_kind = kind + + name = param.name + if name is None: + name = str(idx) + param = param.replace(name=name) + + if name in params: + msg = 'duplicate parameter name: {!r}'.format(name) + raise ValueError(msg) + params[name] = param + else: + params = OrderedDict(((param.name, param) + for param in parameters)) + + self._parameters = types.MappingProxyType(params) + self._return_annotation = return_annotation + + @classmethod + def from_function(cls, func): + '''Constructs Signature for the given python function''' + + if not isinstance(func, types.FunctionType): + raise TypeError('{!r} is not a Python function'.format(func)) + + Parameter = cls._parameter_cls + + # Parameter information. + func_code = func.__code__ + pos_count = func_code.co_argcount + arg_names = func_code.co_varnames + positional = tuple(arg_names[:pos_count]) + keyword_only_count = func_code.co_kwonlyargcount + keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)] + annotations = func.__annotations__ + defaults = func.__defaults__ + kwdefaults = func.__kwdefaults__ + + if defaults: + pos_default_count = len(defaults) + else: + pos_default_count = 0 + + parameters = [] + + # Non-keyword-only parameters w/o defaults. + non_default_count = pos_count - pos_default_count + for name in positional[:non_default_count]: + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD)) + + # ... w/ defaults. + for offset, name in enumerate(positional[non_default_count:]): + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD, + default=defaults[offset])) + + # *args + if func_code.co_flags & 0x04: + name = arg_names[pos_count + keyword_only_count] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_POSITIONAL)) + + # Keyword-only parameters. + for name in keyword_only: + default = _empty + if kwdefaults is not None: + default = kwdefaults.get(name, _empty) + + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_KEYWORD_ONLY, + default=default)) + # **kwargs + if func_code.co_flags & 0x08: + index = pos_count + keyword_only_count + if func_code.co_flags & 0x04: + index += 1 + + name = arg_names[index] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_KEYWORD)) + + return cls(parameters, + return_annotation=annotations.get('return', _empty), + __validate_parameters__=False) + + @property + def parameters(self): + return self._parameters + + @property + def return_annotation(self): + return self._return_annotation + + def replace(self, *, parameters=_void, return_annotation=_void): + '''Creates a customized copy of the Signature. + Pass 'parameters' and/or 'return_annotation' arguments + to override them in the new copy. + ''' + + if parameters is _void: + parameters = self.parameters.values() + + if return_annotation is _void: + return_annotation = self._return_annotation + + return type(self)(parameters, + return_annotation=return_annotation) + + def __eq__(self, other): + if (not issubclass(type(other), Signature) or + self.return_annotation != other.return_annotation or + len(self.parameters) != len(other.parameters)): + return False + + other_positions = {param: idx + for idx, param in enumerate(other.parameters.keys())} + + for idx, (param_name, param) in enumerate(self.parameters.items()): + if param.kind == _KEYWORD_ONLY: + try: + other_param = other.parameters[param_name] + except KeyError: + return False + else: + if param != other_param: + return False + else: + try: + other_idx = other_positions[param_name] + except KeyError: + return False + else: + if (idx != other_idx or + param != other.parameters[param_name]): + return False + + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def _bind(self, args, kwargs, *, partial=False): + '''Private method. Don't use directly.''' + + arguments = OrderedDict() + + parameters = iter(self.parameters.values()) + parameters_ex = () + arg_vals = iter(args) + + if partial: + # Support for binding arguments to 'functools.partial' objects. + # See 'functools.partial' case in 'signature()' implementation + # for details. + for param_name, param in self.parameters.items(): + if (param._partial_kwarg and param_name not in kwargs): + # Simulating 'functools.partial' behavior + kwargs[param_name] = param.default + + while True: + # Let's iterate through the positional arguments and corresponding + # parameters + try: + arg_val = next(arg_vals) + except StopIteration: + # No more positional arguments + try: + param = next(parameters) + except StopIteration: + # No more parameters. That's it. Just need to check that + # we have no `kwargs` after this while loop + break + else: + if param.kind == _VAR_POSITIONAL: + # That's OK, just empty *args. Let's start parsing + # kwargs + break + elif param.name in kwargs: + if param.kind == _POSITIONAL_ONLY: + msg = '{arg!r} parameter is positional only, ' \ + 'but was passed as a keyword' + msg = msg.format(arg=param.name) + raise TypeError(msg) from None + parameters_ex = (param,) + break + elif (param.kind == _VAR_KEYWORD or + param.default is not _empty): + # That's fine too - we have a default value for this + # parameter. So, lets start parsing `kwargs`, starting + # with the current parameter + parameters_ex = (param,) + break + else: + if partial: + parameters_ex = (param,) + break + else: + msg = '{arg!r} parameter lacking default value' + msg = msg.format(arg=param.name) + raise TypeError(msg) from None + else: + # We have a positional argument to process + try: + param = next(parameters) + except StopIteration: + raise TypeError('too many positional arguments') from None + else: + if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY): + # Looks like we have no parameter for this positional + # argument + raise TypeError('too many positional arguments') + + if param.kind == _VAR_POSITIONAL: + # We have an '*args'-like argument, let's fill it with + # all positional arguments we have left and move on to + # the next phase + values = [arg_val] + values.extend(arg_vals) + arguments[param.name] = tuple(values) + break + + if param.name in kwargs: + raise TypeError('multiple values for argument ' + '{arg!r}'.format(arg=param.name)) + + arguments[param.name] = arg_val + + # Now, we iterate through the remaining parameters to process + # keyword arguments + kwargs_param = None + for param in itertools.chain(parameters_ex, parameters): + if param.kind == _POSITIONAL_ONLY: + # This should never happen in case of a properly built + # Signature object (but let's have this check here + # to ensure correct behaviour just in case) + raise TypeError('{arg!r} parameter is positional only, ' + 'but was passed as a keyword'. \ + format(arg=param.name)) + + if param.kind == _VAR_KEYWORD: + # Memorize that we have a '**kwargs'-like parameter + kwargs_param = param + continue + + param_name = param.name + try: + arg_val = kwargs.pop(param_name) + except KeyError: + # We have no value for this parameter. It's fine though, + # if it has a default value, or it is an '*args'-like + # parameter, left alone by the processing of positional + # arguments. + if (not partial and param.kind != _VAR_POSITIONAL and + param.default is _empty): + raise TypeError('{arg!r} parameter lacking default value'. \ + format(arg=param_name)) from None + + else: + arguments[param_name] = arg_val + + if kwargs: + if kwargs_param is not None: + # Process our '**kwargs'-like parameter + arguments[kwargs_param.name] = kwargs + else: + raise TypeError('too many keyword arguments') + + return self._bound_arguments_cls(self, arguments) + + def bind(self, *args, **kwargs): + '''Get a BoundArguments object, that maps the passed `args` + and `kwargs` to the function's signature. Raises `TypeError` + if the passed arguments can not be bound. + ''' + return self._bind(args, kwargs) + + def bind_partial(self, *args, **kwargs): + '''Get a BoundArguments object, that partially maps the + passed `args` and `kwargs` to the function's signature. + Raises `TypeError` if the passed arguments can not be bound. + ''' + return self._bind(args, kwargs, partial=True) + + def __str__(self): + result = [] + render_kw_only_separator = True + for idx, param in enumerate(self.parameters.values()): + formatted = str(param) + + kind = param.kind + if kind == _VAR_POSITIONAL: + # OK, we have an '*args'-like parameter, so we won't need + # a '*' to separate keyword-only arguments + render_kw_only_separator = False + elif kind == _KEYWORD_ONLY and render_kw_only_separator: + # We have a keyword-only parameter to render and we haven't + # rendered an '*args'-like parameter before, so add a '*' + # separator to the parameters list ("foo(arg1, *, arg2)" case) + result.append('*') + # This condition should be only triggered once, so + # reset the flag + render_kw_only_separator = False + + result.append(formatted) + + rendered = '({})'.format(', '.join(result)) + + if self.return_annotation is not _empty: + anno = formatannotation(self.return_annotation) + rendered += ' -> {}'.format(anno) + + return rendered diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -1173,13 +1173,934 @@ self.assertIn(name, str(state)) +class TestSignatureObject(unittest.TestCase): + @staticmethod + def signature(func): + sig = inspect.signature(func) + return (tuple((param.name, + (... if param.default is param.empty else param.default), + (... if param.annotation is param.empty + else param.annotation), + str(param.kind).lower()) + for param in sig.parameters.values()), + (... if sig.return_annotation is sig.empty + else sig.return_annotation)) + + def test_signature_object(self): + S = inspect.Signature + P = inspect.Parameter + + self.assertEqual(str(S()), '()') + + def test(po, pk, *args, ko, **kwargs): + pass + sig = inspect.signature(test) + po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY) + pk = sig.parameters['pk'] + args = sig.parameters['args'] + ko = sig.parameters['ko'] + kwargs = sig.parameters['kwargs'] + + S((po, pk, args, ko, kwargs)) + + with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + S((pk, po, args, ko, kwargs)) + + with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + S((po, args, pk, ko, kwargs)) + + with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + S((args, po, pk, ko, kwargs)) + + with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + S((po, pk, args, kwargs, ko)) + + kwargs2 = kwargs.replace(name='args') + with self.assertRaisesRegexp(ValueError, 'duplicate parameter name'): + S((po, pk, args, kwargs2, ko)) + + def test_signature_immutability(self): + def test(a): + pass + sig = inspect.signature(test) + + with self.assertRaises(AttributeError): + sig.foo = 'bar' + + with self.assertRaises(TypeError): + sig.parameters['a'] = None + + def test_signature_on_noarg(self): + def test(): + pass + self.assertEqual(self.signature(test), ((), ...)) + + def test_signature_on_wargs(self): + def test(a, b:'foo') -> 123: + pass + self.assertEqual(self.signature(test), + ((('a', ..., ..., "positional_or_keyword"), + ('b', ..., 'foo', "positional_or_keyword")), + 123)) + + def test_signature_on_wkwonly(self): + def test(*, a:float, b:str) -> int: + pass + self.assertEqual(self.signature(test), + ((('a', ..., float, "keyword_only"), + ('b', ..., str, "keyword_only")), + int)) + + def test_signature_on_complex_args(self): + def test(a, b:'foo'=10, *args:'bar', spam:'baz', ham=123, **kwargs:int): + pass + self.assertEqual(self.signature(test), + ((('a', ..., ..., "positional_or_keyword"), + ('b', 10, 'foo', "positional_or_keyword"), + ('args', ..., 'bar', "var_positional"), + ('spam', ..., 'baz', "keyword_only"), + ('ham', 123, ..., "keyword_only"), + ('kwargs', ..., int, "var_keyword")), + ...)) + + def test_signature_on_builtin_function(self): + with self.assertRaisesRegexp(ValueError, 'not supported by signature'): + inspect.signature(type) + with self.assertRaisesRegexp(ValueError, 'not supported by signature'): + # support for 'wrapper_descriptor' + inspect.signature(type.__call__) + with self.assertRaisesRegexp(ValueError, 'not supported by signature'): + # support for 'method-wrapper' + inspect.signature(min.__call__) + with self.assertRaisesRegexp(ValueError, + 'no signature found for builtin function'): + # support for 'method-wrapper' + inspect.signature(min) + + def test_signature_on_non_function(self): + with self.assertRaisesRegexp(TypeError, 'is not a callable object'): + inspect.signature(42) + + with self.assertRaisesRegexp(TypeError, 'is not a Python function'): + inspect.Signature.from_function(42) + + def test_signature_on_method(self): + class Test: + def foo(self, arg1, arg2=1) -> int: + pass + + meth = Test().foo + + self.assertEqual(self.signature(meth), + ((('arg1', ..., ..., "positional_or_keyword"), + ('arg2', 1, ..., "positional_or_keyword")), + int)) + + def test_signature_on_classmethod(self): + class Test: + @classmethod + def foo(cls, arg1, *, arg2=1): + pass + + meth = Test().foo + self.assertEqual(self.signature(meth), + ((('arg1', ..., ..., "positional_or_keyword"), + ('arg2', 1, ..., "keyword_only")), + ...)) + + meth = Test.foo + self.assertEqual(self.signature(meth), + ((('arg1', ..., ..., "positional_or_keyword"), + ('arg2', 1, ..., "keyword_only")), + ...)) + + def test_signature_on_staticmethod(self): + class Test: + @staticmethod + def foo(cls, *, arg): + pass + + meth = Test().foo + self.assertEqual(self.signature(meth), + ((('cls', ..., ..., "positional_or_keyword"), + ('arg', ..., ..., "keyword_only")), + ...)) + + meth = Test.foo + self.assertEqual(self.signature(meth), + ((('cls', ..., ..., "positional_or_keyword"), + ('arg', ..., ..., "keyword_only")), + ...)) + + def test_signature_on_partial(self): + from functools import partial + + def test(): + pass + + self.assertEqual(self.signature(partial(test)), ((), ...)) + + with self.assertRaisesRegexp(ValueError, "has incorrect arguments"): + inspect.signature(partial(test, 1)) + + with self.assertRaisesRegexp(ValueError, "has incorrect arguments"): + inspect.signature(partial(test, a=1)) + + def test(a, b, *, c, d): + pass + + self.assertEqual(self.signature(partial(test)), + ((('a', ..., ..., "positional_or_keyword"), + ('b', ..., ..., "positional_or_keyword"), + ('c', ..., ..., "keyword_only"), + ('d', ..., ..., "keyword_only")), + ...)) + + self.assertEqual(self.signature(partial(test, 1)), + ((('b', ..., ..., "positional_or_keyword"), + ('c', ..., ..., "keyword_only"), + ('d', ..., ..., "keyword_only")), + ...)) + + self.assertEqual(self.signature(partial(test, 1, c=2)), + ((('b', ..., ..., "positional_or_keyword"), + ('c', 2, ..., "keyword_only"), + ('d', ..., ..., "keyword_only")), + ...)) + + self.assertEqual(self.signature(partial(test, b=1, c=2)), + ((('a', ..., ..., "positional_or_keyword"), + ('b', 1, ..., "positional_or_keyword"), + ('c', 2, ..., "keyword_only"), + ('d', ..., ..., "keyword_only")), + ...)) + + self.assertEqual(self.signature(partial(test, 0, b=1, c=2)), + ((('b', 1, ..., "positional_or_keyword"), + ('c', 2, ..., "keyword_only"), + ('d', ..., ..., "keyword_only"),), + ...)) + + def test(a, *args, b, **kwargs): + pass + + self.assertEqual(self.signature(partial(test, 1)), + ((('args', ..., ..., "var_positional"), + ('b', ..., ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + + self.assertEqual(self.signature(partial(test, 1, 2, 3)), + ((('args', ..., ..., "var_positional"), + ('b', ..., ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + + + self.assertEqual(self.signature(partial(test, 1, 2, 3, test=True)), + ((('args', ..., ..., "var_positional"), + ('b', ..., ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + + self.assertEqual(self.signature(partial(test, 1, 2, 3, test=1, b=0)), + ((('args', ..., ..., "var_positional"), + ('b', 0, ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + + self.assertEqual(self.signature(partial(test, b=0)), + ((('a', ..., ..., "positional_or_keyword"), + ('args', ..., ..., "var_positional"), + ('b', 0, ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + + self.assertEqual(self.signature(partial(test, b=0, test=1)), + ((('a', ..., ..., "positional_or_keyword"), + ('args', ..., ..., "var_positional"), + ('b', 0, ..., "keyword_only"), + ('kwargs', ..., ..., "var_keyword")), + ...)) + + def test(a, b, c:int) -> 42: + pass + + sig = test.__signature__ = inspect.signature(test) + + self.assertEqual(self.signature(partial(partial(test, 1))), + ((('b', ..., ..., "positional_or_keyword"), + ('c', ..., int, "positional_or_keyword")), + 42)) + + self.assertEqual(self.signature(partial(partial(test, 1), 2)), + ((('c', ..., int, "positional_or_keyword"),), + 42)) + + psig = inspect.signature(partial(partial(test, 1), 2)) + + def foo(a): + return a + _foo = partial(partial(foo, a=10), a=20) + self.assertEqual(self.signature(_foo), + ((('a', 20, ..., "positional_or_keyword"),), + ...)) + # check that we don't have any side-effects in signature(), + # and the partial object is still functioning + self.assertEqual(_foo(), 20) + + def foo(a, b, c): + return a, b, c + _foo = partial(partial(foo, 1, b=20), b=30) + self.assertEqual(self.signature(_foo), + ((('b', 30, ..., "positional_or_keyword"), + ('c', ..., ..., "positional_or_keyword")), + ...)) + self.assertEqual(_foo(c=10), (1, 30, 10)) + _foo = partial(_foo, 2) # now 'b' has two values - + # positional and keyword + with self.assertRaisesRegexp(ValueError, "has incorrect arguments"): + inspect.signature(_foo) + + def foo(a, b, c, *, d): + return a, b, c, d + _foo = partial(partial(foo, d=20, c=20), b=10, d=30) + self.assertEqual(self.signature(_foo), + ((('a', ..., ..., "positional_or_keyword"), + ('b', 10, ..., "positional_or_keyword"), + ('c', 20, ..., "positional_or_keyword"), + ('d', 30, ..., "keyword_only")), + ...)) + ba = inspect.signature(_foo).bind(a=200, b=11) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (200, 11, 20, 30)) + + def foo(a=1, b=2, c=3): + return a, b, c + _foo = partial(foo, a=10, c=13) + ba = inspect.signature(_foo).bind(11) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 2, 13)) + ba = inspect.signature(_foo).bind(11, 12) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13)) + ba = inspect.signature(_foo).bind(11, b=12) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (11, 12, 13)) + ba = inspect.signature(_foo).bind(b=12) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (10, 12, 13)) + _foo = partial(_foo, b=10) + ba = inspect.signature(_foo).bind(12, 14) + self.assertEqual(_foo(*ba.args, **ba.kwargs), (12, 14, 13)) + + def test_signature_on_decorated(self): + import functools + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs) -> int: + return func(*args, **kwargs) + return wrapper + + class Foo: + @decorator + def bar(self, a, b): + pass + + self.assertEqual(self.signature(Foo.bar), + ((('self', ..., ..., "positional_or_keyword"), + ('a', ..., ..., "positional_or_keyword"), + ('b', ..., ..., "positional_or_keyword")), + ...)) + + self.assertEqual(self.signature(Foo().bar), + ((('a', ..., ..., "positional_or_keyword"), + ('b', ..., ..., "positional_or_keyword")), + ...)) + + # Test that we handle method wrappers correctly + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs) -> int: + return func(42, *args, **kwargs) + sig = inspect.signature(func) + new_params = tuple(sig.parameters.values())[1:] + wrapper.__signature__ = sig.replace(parameters=new_params) + return wrapper + + class Foo: + @decorator + def __call__(self, a, b): + pass + + self.assertEqual(self.signature(Foo.__call__), + ((('a', ..., ..., "positional_or_keyword"), + ('b', ..., ..., "positional_or_keyword")), + ...)) + + self.assertEqual(self.signature(Foo().__call__), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) + + def test_signature_on_class(self): + class C: + def __init__(self, a): + pass + + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + class CM(type): + def __call__(cls, a): + pass + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(C), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + class CM(type): + def __new__(mcls, name, bases, dct, *, foo=1): + return super().__new__(mcls, name, bases, dct) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(C), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) + + self.assertEqual(self.signature(CM), + ((('name', ..., ..., "positional_or_keyword"), + ('bases', ..., ..., "positional_or_keyword"), + ('dct', ..., ..., "positional_or_keyword"), + ('foo', 1, ..., "keyword_only")), + ...)) + + class CMM(type): + def __new__(mcls, name, bases, dct, *, foo=1): + return super().__new__(mcls, name, bases, dct) + def __call__(cls, nm, bs, dt): + return type(nm, bs, dt) + class CM(type, metaclass=CMM): + def __new__(mcls, name, bases, dct, *, bar=2): + return super().__new__(mcls, name, bases, dct) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(CMM), + ((('name', ..., ..., "positional_or_keyword"), + ('bases', ..., ..., "positional_or_keyword"), + ('dct', ..., ..., "positional_or_keyword"), + ('foo', 1, ..., "keyword_only")), + ...)) + + self.assertEqual(self.signature(CM), + ((('nm', ..., ..., "positional_or_keyword"), + ('bs', ..., ..., "positional_or_keyword"), + ('dt', ..., ..., "positional_or_keyword")), + ...)) + + self.assertEqual(self.signature(C), + ((('b', ..., ..., "positional_or_keyword"),), + ...)) + + class CM(type): + def __init__(cls, name, bases, dct, *, bar=2): + return super().__init__(name, bases, dct) + class C(metaclass=CM): + def __init__(self, b): + pass + + self.assertEqual(self.signature(CM), + ((('name', ..., ..., "positional_or_keyword"), + ('bases', ..., ..., "positional_or_keyword"), + ('dct', ..., ..., "positional_or_keyword"), + ('bar', 2, ..., "keyword_only")), + ...)) + + def test_signature_on_callable_objects(self): + class Foo: + def __call__(self, a): + pass + + self.assertEqual(self.signature(Foo()), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + class Spam: + pass + with self.assertRaisesRegexp(TypeError, "is not a callable object"): + inspect.signature(Spam()) + + class Bar(Spam, Foo): + pass + + self.assertEqual(self.signature(Bar()), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + class ToFail: + __call__ = type + with self.assertRaisesRegexp(ValueError, "not supported by signature"): + inspect.signature(ToFail()) + + + class Wrapped: + pass + Wrapped.__wrapped__ = lambda a: None + self.assertEqual(self.signature(Wrapped), + ((('a', ..., ..., "positional_or_keyword"),), + ...)) + + def test_signature_on_lambdas(self): + self.assertEqual(self.signature((lambda a=10: a)), + ((('a', 10, ..., "positional_or_keyword"),), + ...)) + + def test_signature_equality(self): + def foo(a, *, b:int) -> float: pass + self.assertNotEqual(inspect.signature(foo), 42) + + def bar(a, *, b:int) -> float: pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, b:int) -> int: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, b:int): pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, b:int=42) -> float: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, *, c) -> float: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def bar(a, b:int) -> float: pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + def spam(b:int, a) -> float: pass + self.assertNotEqual(inspect.signature(spam), inspect.signature(bar)) + + def foo(*, a, b, c): pass + def bar(*, c, b, a): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(*, a=1, b, c): pass + def bar(*, c, b, a=1): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(pos, *, a=1, b, c): pass + def bar(pos, *, c, b, a=1): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(pos, *, a, b, c): pass + def bar(pos, *, c, b, a=1): pass + self.assertNotEqual(inspect.signature(foo), inspect.signature(bar)) + + def foo(pos, *args, a=42, b, c, **kwargs:int): pass + def bar(pos, *args, c, b, a=42, **kwargs:int): pass + self.assertEqual(inspect.signature(foo), inspect.signature(bar)) + + def test_signature_unhashable(self): + def foo(a): pass + sig = inspect.signature(foo) + with self.assertRaisesRegexp(TypeError, 'unhashable type'): + hash(sig) + + def test_signature_str(self): + def foo(a:int=1, *, b, c=None, **kwargs) -> 42: + pass + self.assertEqual(str(inspect.signature(foo)), + '(a:int=1, *, b, c=None, **kwargs) -> 42') + + def foo(a:int=1, *args, b, c=None, **kwargs) -> 42: + pass + self.assertEqual(str(inspect.signature(foo)), + '(a:int=1, *args, b, c=None, **kwargs) -> 42') + + def foo(): + pass + self.assertEqual(str(inspect.signature(foo)), '()') + + def test_signature_str_positional_only(self): + P = inspect.Parameter + + def test(a_po, *, b, **kwargs): + return a_po, kwargs + + sig = inspect.signature(test) + new_params = list(sig.parameters.values()) + new_params[0] = new_params[0].replace(kind=P.POSITIONAL_ONLY) + test.__signature__ = sig.replace(parameters=new_params) + + self.assertEqual(str(inspect.signature(test)), + '(, *, b, **kwargs)') + + sig = inspect.signature(test) + new_params = list(sig.parameters.values()) + new_params[0] = new_params[0].replace(name=None) + test.__signature__ = sig.replace(parameters=new_params) + self.assertEqual(str(inspect.signature(test)), + '(<0>, *, b, **kwargs)') + + def test_signature_replace_anno(self): + def test() -> 42: + pass + + sig = inspect.signature(test) + sig = sig.replace(return_annotation=None) + self.assertIs(sig.return_annotation, None) + sig = sig.replace(return_annotation=sig.empty) + self.assertIs(sig.return_annotation, sig.empty) + sig = sig.replace(return_annotation=42) + self.assertEqual(sig.return_annotation, 42) + self.assertEqual(sig, inspect.signature(test)) + + +class TestParameterObject(unittest.TestCase): + def test_signature_parameter_kinds(self): + P = inspect.Parameter + self.assertTrue(P.POSITIONAL_ONLY < P.POSITIONAL_OR_KEYWORD < \ + P.VAR_POSITIONAL < P.KEYWORD_ONLY < P.VAR_KEYWORD) + + self.assertEqual(str(P.POSITIONAL_ONLY), 'POSITIONAL_ONLY') + self.assertTrue('POSITIONAL_ONLY' in repr(P.POSITIONAL_ONLY)) + + def test_signature_parameter_object(self): + p = inspect.Parameter('foo', default=10, + kind=inspect.Parameter.POSITIONAL_ONLY) + self.assertEqual(p.name, 'foo') + self.assertEqual(p.default, 10) + self.assertIs(p.annotation, p.empty) + self.assertEqual(p.kind, inspect.Parameter.POSITIONAL_ONLY) + + with self.assertRaisesRegexp(ValueError, 'invalid value'): + inspect.Parameter('foo', default=10, kind='123') + + with self.assertRaisesRegexp(ValueError, 'not a valid parameter name'): + inspect.Parameter('1', kind=inspect.Parameter.VAR_KEYWORD) + + with self.assertRaisesRegexp(ValueError, + 'non-positional-only parameter'): + inspect.Parameter(None, kind=inspect.Parameter.VAR_KEYWORD) + + with self.assertRaisesRegexp(ValueError, 'cannot have default values'): + inspect.Parameter('a', default=42, + kind=inspect.Parameter.VAR_KEYWORD) + + with self.assertRaisesRegexp(ValueError, 'cannot have default values'): + inspect.Parameter('a', default=42, + kind=inspect.Parameter.VAR_POSITIONAL) + + p = inspect.Parameter('a', default=42, + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD) + with self.assertRaisesRegexp(ValueError, 'cannot have default values'): + p.replace(kind=inspect.Parameter.VAR_POSITIONAL) + + self.assertTrue(repr(p).startswith('') + + p = p.replace(name='1') + self.assertEqual(str(p), '<1>') + + def test_signature_parameter_immutability(self): + p = inspect.Parameter(None, kind=inspect.Parameter.POSITIONAL_ONLY) + + with self.assertRaises(AttributeError): + p.foo = 'bar' + + with self.assertRaises(AttributeError): + p.kind = 123 + + +class TestSignatureBind(unittest.TestCase): + @staticmethod + def call(func, *args, **kwargs): + sig = inspect.signature(func) + ba = sig.bind(*args, **kwargs) + return func(*ba.args, **ba.kwargs) + + def test_signature_bind_empty(self): + def test(): + return 42 + + self.assertEqual(self.call(test), 42) + with self.assertRaisesRegexp(TypeError, 'too many positional arguments'): + self.call(test, 1) + with self.assertRaisesRegexp(TypeError, 'too many positional arguments'): + self.call(test, 1, spam=10) + with self.assertRaisesRegexp(TypeError, 'too many keyword arguments'): + self.call(test, spam=1) + + def test_signature_bind_var(self): + def test(*args, **kwargs): + return args, kwargs + + self.assertEqual(self.call(test), ((), {})) + self.assertEqual(self.call(test, 1), ((1,), {})) + self.assertEqual(self.call(test, 1, 2), ((1, 2), {})) + self.assertEqual(self.call(test, foo='bar'), ((), {'foo': 'bar'})) + self.assertEqual(self.call(test, 1, foo='bar'), ((1,), {'foo': 'bar'})) + self.assertEqual(self.call(test, args=10), ((), {'args': 10})) + self.assertEqual(self.call(test, 1, 2, foo='bar'), + ((1, 2), {'foo': 'bar'})) + + def test_signature_bind_just_args(self): + def test(a, b, c): + return a, b, c + + self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3)) + + with self.assertRaisesRegexp(TypeError, 'too many positional arguments'): + self.call(test, 1, 2, 3, 4) + + with self.assertRaisesRegexp(TypeError, "'b' parameter lacking default"): + self.call(test, 1) + + with self.assertRaisesRegexp(TypeError, "'a' parameter lacking default"): + self.call(test) + + def test(a, b, c=10): + return a, b, c + self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3)) + self.assertEqual(self.call(test, 1, 2), (1, 2, 10)) + + def test(a=1, b=2, c=3): + return a, b, c + self.assertEqual(self.call(test, a=10, c=13), (10, 2, 13)) + self.assertEqual(self.call(test, a=10), (10, 2, 3)) + self.assertEqual(self.call(test, b=10), (1, 10, 3)) + + def test_signature_bind_varargs_order(self): + def test(*args): + return args + + self.assertEqual(self.call(test), ()) + self.assertEqual(self.call(test, 1, 2, 3), (1, 2, 3)) + + def test_signature_bind_args_and_varargs(self): + def test(a, b, c=3, *args): + return a, b, c, args + + self.assertEqual(self.call(test, 1, 2, 3, 4, 5), (1, 2, 3, (4, 5))) + self.assertEqual(self.call(test, 1, 2), (1, 2, 3, ())) + self.assertEqual(self.call(test, b=1, a=2), (2, 1, 3, ())) + self.assertEqual(self.call(test, 1, b=2), (1, 2, 3, ())) + + with self.assertRaisesRegexp(TypeError, + "multiple values for argument 'c'"): + self.call(test, 1, 2, 3, c=4) + + def test_signature_bind_just_kwargs(self): + def test(**kwargs): + return kwargs + + self.assertEqual(self.call(test), {}) + self.assertEqual(self.call(test, foo='bar', spam='ham'), + {'foo': 'bar', 'spam': 'ham'}) + + def test_signature_bind_args_and_kwargs(self): + def test(a, b, c=3, **kwargs): + return a, b, c, kwargs + + self.assertEqual(self.call(test, 1, 2), (1, 2, 3, {})) + self.assertEqual(self.call(test, 1, 2, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, b=2, a=1, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, a=1, b=2, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, 1, b=2, foo='bar', spam='ham'), + (1, 2, 3, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, 1, b=2, c=4, foo='bar', spam='ham'), + (1, 2, 4, {'foo': 'bar', 'spam': 'ham'})) + self.assertEqual(self.call(test, 1, 2, 4, foo='bar'), + (1, 2, 4, {'foo': 'bar'})) + self.assertEqual(self.call(test, c=5, a=4, b=3), + (4, 3, 5, {})) + + def test_signature_bind_kwonly(self): + def test(*, foo): + return foo + with self.assertRaisesRegexp(TypeError, + 'too many positional arguments'): + self.call(test, 1) + self.assertEqual(self.call(test, foo=1), 1) + + def test(a, *, foo=1, bar): + return foo + with self.assertRaisesRegexp(TypeError, + "'bar' parameter lacking default value"): + self.call(test, 1) + + def test(foo, *, bar): + return foo, bar + self.assertEqual(self.call(test, 1, bar=2), (1, 2)) + self.assertEqual(self.call(test, bar=2, foo=1), (1, 2)) + + with self.assertRaisesRegexp(TypeError, + 'too many keyword arguments'): + self.call(test, bar=2, foo=1, spam=10) + + with self.assertRaisesRegexp(TypeError, + 'too many positional arguments'): + self.call(test, 1, 2) + + with self.assertRaisesRegexp(TypeError, + 'too many positional arguments'): + self.call(test, 1, 2, bar=2) + + with self.assertRaisesRegexp(TypeError, + 'too many keyword arguments'): + self.call(test, 1, bar=2, spam='ham') + + with self.assertRaisesRegexp(TypeError, + "'bar' parameter lacking default value"): + self.call(test, 1) + + def test(foo, *, bar, **bin): + return foo, bar, bin + self.assertEqual(self.call(test, 1, bar=2), (1, 2, {})) + self.assertEqual(self.call(test, foo=1, bar=2), (1, 2, {})) + self.assertEqual(self.call(test, 1, bar=2, spam='ham'), + (1, 2, {'spam': 'ham'})) + self.assertEqual(self.call(test, spam='ham', foo=1, bar=2), + (1, 2, {'spam': 'ham'})) + with self.assertRaisesRegexp(TypeError, + "'foo' parameter lacking default value"): + self.call(test, spam='ham', bar=2) + self.assertEqual(self.call(test, 1, bar=2, bin=1, spam=10), + (1, 2, {'bin': 1, 'spam': 10})) + + def test_signature_bind_arguments(self): + def test(a, *args, b, z=100, **kwargs): + pass + sig = inspect.signature(test) + ba = sig.bind(10, 20, b=30, c=40, args=50, kwargs=60) + # we won't have 'z' argument in the bound arguments object, as we didn't + # pass it to the 'bind' + self.assertEqual(tuple(ba.arguments.items()), + (('a', 10), ('args', (20,)), ('b', 30), + ('kwargs', {'c': 40, 'args': 50, 'kwargs': 60}))) + self.assertEqual(ba.kwargs, + {'b': 30, 'c': 40, 'args': 50, 'kwargs': 60}) + self.assertEqual(ba.args, (10, 20)) + + def test_signature_bind_positional_only(self): + P = inspect.Parameter + + def test(a_po, b_po, c_po=3, foo=42, *, bar=50, **kwargs): + return a_po, b_po, c_po, foo, bar, kwargs + + sig = inspect.signature(test) + new_params = collections.OrderedDict(tuple(sig.parameters.items())) + for name in ('a_po', 'b_po', 'c_po'): + new_params[name] = new_params[name].replace(kind=P.POSITIONAL_ONLY) + new_sig = sig.replace(parameters=new_params.values()) + test.__signature__ = new_sig + + self.assertEqual(self.call(test, 1, 2, 4, 5, bar=6), + (1, 2, 4, 5, 6, {})) + + with self.assertRaisesRegexp(TypeError, "parameter is positional only"): + self.call(test, 1, 2, c_po=4) + + with self.assertRaisesRegexp(TypeError, "parameter is positional only"): + self.call(test, a_po=1, b_po=2) + + +class TestBoundArguments(unittest.TestCase): + def test_signature_bound_arguments_unhashable(self): + def foo(a): pass + ba = inspect.signature(foo).bind(1) + + with self.assertRaisesRegexp(TypeError, 'unhashable type'): + hash(ba) + + def test_signature_bound_arguments_equality(self): + def foo(a): pass + ba = inspect.signature(foo).bind(1) + self.assertEqual(ba, ba) + + ba2 = inspect.signature(foo).bind(1) + self.assertEqual(ba, ba2) + + ba3 = inspect.signature(foo).bind(2) + self.assertNotEqual(ba, ba3) + ba3.arguments['a'] = 1 + self.assertEqual(ba, ba3) + + def bar(b): pass + ba4 = inspect.signature(bar).bind(1) + self.assertNotEqual(ba, ba4) + + def test_main(): run_unittest( TestDecorators, TestRetrievingSourceCode, TestOneliners, TestBuggyCases, TestInterpreterStack, TestClassesAndFunctions, TestPredicates, TestGetcallargsFunctions, TestGetcallargsMethods, TestGetcallargsUnboundMethods, TestGetattrStatic, TestGetGeneratorState, - TestNoEOL + TestNoEOL, TestSignatureObject, TestSignatureBind, TestParameterObject, + TestBoundArguments ) if __name__ == "__main__": diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,9 @@ Library ------- +- Issue #15008: Implement PEP 362 "Signature Objects". + Patch by Yury Selivanov. + - Issue: #15138: base64.urlsafe_{en,de}code() are now 3-4x faster. - Issue #444582: Add shutil.which, for finding programs on the system path. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 00:32:25 2012 From: python-checkins at python.org (stefan.krah) Date: Sat, 23 Jun 2012 00:32:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Whitespace=2E?= Message-ID: http://hg.python.org/cpython/rev/b586bd95cb9b changeset: 77595:b586bd95cb9b user: Stefan Krah date: Sat Jun 23 00:31:04 2012 +0200 summary: Whitespace. files: Modules/_decimal/libmpdec/mpdecimal.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -6910,7 +6910,7 @@ void mpd_qtrunc(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, - uint32_t *status) + uint32_t *status) { (void)_mpd_qround_to_integral(TO_INT_TRUNC, result, a, ctx, status); } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 00:53:14 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 00:53:14 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_silly_mistake_in_debugg?= =?utf8?q?ing_code?= Message-ID: http://hg.python.org/cpython/rev/7d9795773e5c changeset: 77596:7d9795773e5c user: Antoine Pitrou date: Sat Jun 23 00:49:44 2012 +0200 summary: Fix silly mistake in debugging code files: Lib/test/test_reprlib.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py --- a/Lib/test/test_reprlib.py +++ b/Lib/test/test_reprlib.py @@ -249,7 +249,7 @@ self.skipTest("test paths too long (%d characters) for Windows' 260 character limit" % cached_path_len) elif os.name == 'nt' and verbose: - print("len(cached_path_len) =", len(cached_path_len)) + print("cached_path_len =", cached_path_len) def test_module(self): self._check_path_limitations(self.pkgname) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 01:30:50 2012 From: python-checkins at python.org (larry.hastings) Date: Sat, 23 Jun 2012 01:30:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314626=3A_Large_ref?= =?utf8?q?actoring_of_functions_/_parameters_in_the_os_module=2E?= Message-ID: http://hg.python.org/cpython/rev/27f9c26fdd8b changeset: 77597:27f9c26fdd8b user: Larry Hastings date: Fri Jun 22 16:30:09 2012 -0700 summary: Issue #14626: Large refactoring of functions / parameters in the os module. Many functions now support "dir_fd" and "follow_symlinks" parameters; some also support accepting an open file descriptor in place of of a path string. Added os.support_* collections as LBYL helpers. Removed many functions only previously seen in 3.3 alpha releases (often starting with "f" or "l", or ending with "at"). Originally suggested by Serhiy Storchaka; implemented by Larry Hastings. files: Doc/library/os.rst | 952 ++-- Lib/os.py | 117 +- Lib/shutil.py | 61 +- Lib/test/support.py | 4 +- Lib/test/test_os.py | 166 +- Lib/test/test_posix.py | 224 +- Lib/test/test_shutil.py | 12 +- Misc/NEWS | 10 +- Modules/posixmodule.c | 5288 ++++++++++++++------------ 9 files changed, 3603 insertions(+), 3231 deletions(-) diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -627,20 +627,6 @@ descriptor directly will bypass the file object methods, ignoring aspects such as internal buffering of data. -.. data:: AT_SYMLINK_NOFOLLOW - AT_EACCESS - AT_FDCWD - AT_REMOVEDIR - AT_SYMLINK_FOLLOW - UTIME_NOW - UTIME_OMIT - - These parameters are used as flags to the \*at family of functions. - - Availability: Unix. - - .. versionadded:: 3.3 - .. function:: close(fd) @@ -690,58 +676,23 @@ Availability: Unix, Windows. -.. function:: faccessat(dirfd, path, mode, flags=0) - - Like :func:`access` but if *path* is relative, it is taken as relative to *dirfd*. - *flags* is optional and can be constructed by ORing together zero or more - of these values: :data:`AT_SYMLINK_NOFOLLOW`, :data:`AT_EACCESS`. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - .. function:: fchmod(fd, mode) Change the mode of the file given by *fd* to the numeric *mode*. See the docs for :func:`chmod` for possible values of *mode*. + Equivalent to ``os.chmod(fd, mode)``. Availability: Unix. -.. function:: fchmodat(dirfd, path, mode, flags=0) - - Like :func:`chmod` but if *path* is relative, it is taken as relative to *dirfd*. - *flags* is optional and may be 0 or :data:`AT_SYMLINK_NOFOLLOW`. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. +.. function:: fchown(fd, uid, gid) + + Change the owner and group id of the file given by *fd* to the numeric *uid* + and *gid*. To leave one of the ids unchanged, set it to -1. Equivalent + to ``os.chown(fd, uid, gid)``. Availability: Unix. - .. versionadded:: 3.3 - - -.. function:: fchown(fd, uid, gid) - - Change the owner and group id of the file given by *fd* to the numeric *uid* - and *gid*. To leave one of the ids unchanged, set it to -1. - - Availability: Unix. - - -.. function:: fchownat(dirfd, path, uid, gid, flags=0) - - Like :func:`chown` but if *path* is relative, it is taken as relative to *dirfd*. - *flags* is optional and may be 0 or :data:`AT_SYMLINK_NOFOLLOW`. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - .. function:: fdatasync(fd) @@ -754,47 +705,6 @@ This function is not available on MacOS. -.. function:: fgetxattr(fd, attr) - - This works exactly like :func:`getxattr` but operates on a file descriptor, - *fd*, instead of a path. - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: flistxattr(fd) - - This is exactly like :func:`listxattr` but operates on a file descriptor, - *fd*, instead of a path. - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: flistdir(fd) - - Like :func:`listdir`, but uses a file descriptor instead and always returns - strings. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: fexecve(fd, args, env) - - Execute the program specified by a file descriptor *fd* with arguments given - by *args* and environment given by *env*, replacing the current process. - *args* and *env* are given as in :func:`execve`. - - Availability: Unix. - - .. versionadded:: 3.3 - - .. function:: fpathconf(fd, name) Return system configuration information relevant to an open file. *name* @@ -819,18 +729,6 @@ Availability: Unix, Windows. -.. function:: fstatat(dirfd, path, flags=0) - - Like :func:`stat` but if *path* is relative, it is taken as relative to *dirfd*. - *flags* is optional and may be 0 or :data:`AT_SYMLINK_NOFOLLOW`. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - .. function:: fstatvfs(fd) Return information about the filesystem containing the file associated with file @@ -859,78 +757,6 @@ Availability: Unix. -.. function:: fremovexattr(fd, attr) - - This works exactly like :func:`removexattr` but operates on a file - descriptor, *fd*, instead of a path. - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: fsetxattr(fd, attr, value, flags=0) - - This works exactly like :func:`setxattr` but on a file descriptor, *fd*, - instead of a path. - - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: futimesat(dirfd, path[, times]) - - Like :func:`utime` but if *path* is relative, it is taken as relative to *dirfd*. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. *times* must be a - 2-tuple of numbers, of the form ``(atime, mtime)``, or None. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: futimens(fd[, atimes, mtimes]) - - Updates the timestamps of a file specified by the file descriptor *fd*, with - nanosecond precision. - If no second argument is given, set *atime* and *mtime* to the current time. - *atimes* and *mtimes* must be 2-tuples of numbers, of the form - ``(atime_sec, atime_nsec)`` and ``(mtime_sec, mtime_nsec)`` respectively, - or ``None``. - If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_NOW`, the corresponding - timestamp is updated to the current time. - If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_OMIT`, the corresponding - timestamp is not updated. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. data:: UTIME_NOW - UTIME_OMIT - - Flags used with :func:`futimens` to specify that the timestamp must be - updated either to the current time or not updated at all. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: futimes(fd[, times, *, ns=times]) - - Set the access and modified time of the file specified by the file - descriptor *fd* to the given values. See :func:`utime` for proper - use of the *times* and *ns* arguments. - Availability: Unix. - - .. versionadded:: 3.3 - - .. function:: isatty(fd) Return ``True`` if the file descriptor *fd* is open and connected to a @@ -939,20 +765,6 @@ Availability: Unix. -.. function:: linkat(srcfd, srcpath, dstfd, dstpath, flags=0) - - Like :func:`link` but if *srcpath* is relative, it is taken as relative to *srcfd* - and if *dstpath* is relative, it is taken as relative to *dstfd*. - *flags* is optional and may be 0 or :data:`AT_SYMLINK_FOLLOW`. - If *srcpath* is relative and *srcfd* is the special value :data:`AT_FDCWD`, then - *srcpath* is interpreted relative to the current working directory. This - also applies for *dstpath*. - - Availability: Unix. - - .. versionadded:: 3.3 - - .. function:: lockf(fd, cmd, len) Apply, test or remove a POSIX lock on an open file descriptor. @@ -1000,51 +812,25 @@ :data:`os.SEEK_HOLE` or :data:`os.SEEK_DATA`. -.. function:: mkdirat(dirfd, path, mode=0o777) - - Like :func:`mkdir` but if *path* is relative, it is taken as relative to *dirfd*. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: mkfifoat(dirfd, path, mode=0o666) - - Like :func:`mkfifo` but if *path* is relative, it is taken as relative to *dirfd*. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: mknodat(dirfd, path, mode=0o600, device=0) - - Like :func:`mknod` but if *path* is relative, it is taken as relative to *dirfd*. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: open(file, flags[, mode]) +.. function:: open(file, flags, mode=0o777, *, dir_fd=None) Open the file *file* and set various flags according to *flags* and possibly - its mode according to *mode*. The default *mode* is ``0o777`` (octal), and - the current umask value is first masked out. Return the file descriptor for - the newly opened file. + its mode according to *mode*. When computing *mode*, the current umask value + is first masked out. Return the file descriptor for the newly opened file. For a description of the flag and mode values, see the C run-time documentation; flag constants (like :const:`O_RDONLY` and :const:`O_WRONLY`) are defined in this module too (see :ref:`open-constants`). In particular, on Windows adding :const:`O_BINARY` is needed to open files in binary mode. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + Availability: Unix, Windows. .. note:: @@ -1054,16 +840,8 @@ :meth:`~file.read` and :meth:`~file.write` methods (and many more). To wrap a file descriptor in a file object, use :func:`fdopen`. - -.. function:: openat(dirfd, path, flags, mode=0o777) - - Like :func:`open` but if *path* is relative, it is taken as relative to *dirfd*. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - .. versionadded:: 3.3 + The *dir_fd* argument. .. function:: openpty() @@ -1216,41 +994,6 @@ .. versionadded:: 3.3 -.. function:: readlinkat(dirfd, path) - - Like :func:`readlink` but if *path* is relative, it is taken as relative to *dirfd*. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: renameat(olddirfd, oldpath, newdirfd, newpath) - - Like :func:`rename` but if *oldpath* is relative, it is taken as relative to - *olddirfd* and if *newpath* is relative, it is taken as relative to *newdirfd*. - If *oldpath* is relative and *olddirfd* is the special value :data:`AT_FDCWD`, then - *oldpath* is interpreted relative to the current working directory. This - also applies for *newpath*. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: symlinkat(src, dstfd, dst) - - Like :func:`symlink` but if *dst* is relative, it is taken as relative to *dstfd*. - If *dst* is relative and *dstfd* is the special value :data:`AT_FDCWD`, then *dst* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - .. function:: readv(fd, buffers) Read from a file descriptor into a number of writable buffers. *buffers* is @@ -1287,38 +1030,6 @@ Availability: Unix. -.. function:: unlinkat(dirfd, path, flags=0) - - Like :func:`unlink` but if *path* is relative, it is taken as relative to *dirfd*. - *flags* is optional and may be 0 or :data:`AT_REMOVEDIR`. If :data:`AT_REMOVEDIR` is - specified, :func:`unlinkat` behaves like :func:`rmdir`. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: utimensat(dirfd, path[, atime=(atime_sec, atime_nsec), mtime=(mtime_sec, mtime_nsec), flags=0]) - - Updates the timestamps of a file with nanosecond precision. - The *atime* and *mtime* tuples default to ``None``, which sets those - values to the current time. - If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_NOW`, the corresponding - timestamp is updated to the current time. - If *atime_nsec* or *mtime_nsec* is specified as :data:`UTIME_OMIT`, the corresponding - timestamp is not updated. - If *path* is relative, it is taken as relative to *dirfd*. - *flags* is optional and may be 0 (the default) or :data:`AT_SYMLINK_NOFOLLOW`. - If *path* is relative and *dirfd* is the special value :data:`AT_FDCWD`, then *path* - is interpreted relative to the current working directory. - - Availability: Unix. - - .. versionadded:: 3.3 - - .. function:: write(fd, str) Write the bytestring in *str* to file descriptor *fd*. Return the number of @@ -1460,7 +1171,7 @@ Files and Directories --------------------- -.. function:: access(path, mode) +.. function:: access(path, mode, *, dir_fd=None, effective_ids=False, follow_symlinks=True) Use the real uid/gid to test for access to *path*. Note that most operations will use the effective uid/gid, therefore this routine can be used in a @@ -1471,6 +1182,27 @@ :const:`False` if not. See the Unix man page :manpage:`access(2)` for more information. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + + If *effective_ids* is ``True``, :func:`access` will perform its access + checks using the effective uid/gid instead of the real uid/gid. + *effective_ids* may not be supported on your platform; you can check whether + or not it is available using :data:`os.supports_effective_ids`. If it is + unavailable, using it will raise a :exc:`NotImplementedError`. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`access` will examine the symbolic link itself instead + of the file the link points to. *follow_symlinks* may not be supported + on your platform; you can check whether or not it is available using + :data:`os.supports_follow_symlinks`. If it is unavailable, + using it will raise a :exc:`NotImplementedError`. + Availability: Unix, Windows. .. note:: @@ -1502,6 +1234,9 @@ succeed, particularly for operations on network filesystems which may have permissions semantics beyond the usual POSIX permission-bit model. + .. versionchanged:: 3.3 + Added the *dir_fd*, *effective_ids*, and *follow_symlinks* parameters. + .. data:: F_OK @@ -1533,14 +1268,24 @@ Change the current working directory to *path*. + On some platforms, *path* may also be specified as an open file descriptor. + This functionality may not be supported on your platform; you can check + whether or not it is available using :data:`os.supports_fd`. If it is + unavailable, using it will raise a :exc:`NotImplementedError`. + Availability: Unix, Windows. + .. versionadded:: 3.3 + Added support for specifying *path* as a file descriptor + on some platforms, and the *dir_fd*, *effective_ids*, and + *follow_symlinks* parameters. + .. function:: fchdir(fd) Change the current working directory to the directory represented by the file descriptor *fd*. The descriptor must refer to an opened directory, not an open - file. + file. Equivalent to ``os.chdir(fd)``. Availability: Unix. @@ -1559,7 +1304,7 @@ Availability: Unix, Windows. -.. function:: chflags(path, flags) +.. function:: chflags(path, flags, *, follow_symlinks=True) Set the flags of *path* to the numeric *flags*. *flags* may take a combination (bitwise OR) of the following values (as defined in the :mod:`stat` module): @@ -1577,8 +1322,18 @@ * :data:`stat.SF_NOUNLINK` * :data:`stat.SF_SNAPSHOT` + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`follow_symlinks` will examine the symbolic link itself + instead of the file the link points to. *follow_symlinks* may not be + supported on your platform; you can check whether or not it is available + using :data:`os.supports_follow_symlinks`. If it is unavailable, + using it will raise a :exc:`NotImplementedError`. + Availability: Unix. + .. versionadded:: 3.3 + The *follow_symlinks* argument. + .. function:: chroot(path) @@ -1586,7 +1341,7 @@ Unix. -.. function:: chmod(path, mode) +.. function:: chmod(path, mode, *, dir_fd=None, follow_symlinks=True) Change the mode of *path* to the numeric *mode*. *mode* may take one of the following values (as defined in the :mod:`stat` module) or bitwise ORed @@ -1612,6 +1367,29 @@ * :data:`stat.S_IWOTH` * :data:`stat.S_IXOTH` + On some platforms, *path* may also be specified as an open file descriptor. + This functionality may not be supported on your platform; you can check + whether or not it is available using :data:`os.supports_fd`. If it is + unavailable, using it will raise a :exc:`NotImplementedError`. + + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`chmod` will examine the symbolic link itself instead + of the file the link points to. *follow_symlinks* may not be supported + on your platform; you can check whether or not it is available using + :data:`os.supports_follow_symlinks`. If it is unavailable, + using it will raise a :exc:`NotImplementedError`. + + It is an error to use *dir_fd* or *follow_symlinks* when specifying + *path* as an open file descriptor. + Availability: Unix, Windows. .. note:: @@ -1621,23 +1399,61 @@ constants or a corresponding integer value). All other bits are ignored. - -.. function:: chown(path, uid, gid) + .. versionadded:: 3.3 + Added support for specifying *path* as an open file descriptor, + and the *dir_fd* and *follow_symlinks* arguments. + + +.. function:: chown(path, uid, gid, *, dir_fd=None, follow_symlinks=True) Change the owner and group id of *path* to the numeric *uid* and *gid*. To leave one of the ids unchanged, set it to -1. + On some platforms, *path* may also be specified as an open file descriptor. + This functionality may not be supported on your platform; you can check + whether or not it is available using :data:`os.supports_fd`. If it is + unavailable, using it will raise a :exc:`NotImplementedError`. + + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`chown` will examine the symbolic link itself instead + of the file the link points to. *follow_symlinks* may not be supported + on your platform; you can check whether or not it is available using + :data:`os.supports_follow_symlinks`. If it is unavailable, + using it will raise a :exc:`NotImplementedError`. + + It is an error to use *dir_fd* or *follow_symlinks* when specifying + *path* as an open file descriptor. + See :func:`shutil.chown` for a higher-level function that accepts names in addition to numeric ids. Availability: Unix. - -.. function:: getxattr(path, attr) - - Return the value of the extended filesystem attribute *attr* for - *path*. *attr* can be bytes or str. If it is str, it is encoded with the - filesystem encoding. + .. versionadded:: 3.3 + Added support for specifying an open file descriptor for *path*, + and the *dir_fd* and *follow_symlinks* arguments. + + +.. function:: getxattr(path, attribute, *, follow_symlinks=True) + + Return the value of the extended filesystem attribute *attribute* for + *path*. *attribute* can be bytes or str. If it is str, it is encoded + with the filesystem encoding. + + *path* may be specified as either a string or an open file descriptor. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`setxattr` will examine the symbolic link itself + instead of the file the link points to. It is an error to use + *follow_symlinks* when specifying *path* as an open file descriptor. Availability: Linux @@ -1648,6 +1464,7 @@ Set the flags of *path* to the numeric *flags*, like :func:`chflags`, but do not follow symbolic links. + Equivalent to ``os.chflags(path, flags, follow_symlinks=False)``. Availability: Unix. @@ -1657,6 +1474,7 @@ Change the mode of *path* to the numeric *mode*. If path is a symlink, this affects the symlink rather than the target. See the docs for :func:`chmod` for possible values of *mode*. + Equivalent to ``os.chmod(path, mode, follow_symlinks=False)``. Availability: Unix. @@ -1665,28 +1483,39 @@ Change the owner and group id of *path* to the numeric *uid* and *gid*. This function will not follow symbolic links. + Equivalent to ``os.chown(path, uid, gid, follow_symlinks=False)``. Availability: Unix. -.. function:: lgetxattr(path, attr) - - This works exactly like :func:`getxattr` but doesn't follow symlinks. - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: link(source, link_name) - - Create a hard link pointing to *source* named *link_name*. +.. function:: link(src, dst, *, src_dir_fd=None, dst_dir_fd=None, follow_symlinks=True) + + Create a hard link pointing to *src* named *dst*. + + If either *src_dir_fd* or *dst_dir_fd* is not ``None``, it should be a + file descriptor referring to a directory, and the corresponding path + (*src* or *dst*) should be relative; that path will then be relative to + that directory. (If *src* is absolute, *src_dir_fd* is ignored; the same + goes for *dst* and *dst_dir_fd*.) + *src_dir_fd* and *dst_dir_fd* may not be supported on your platform; + you can check whether or not they are available using :data:`os.supports_dir_fd`. + If they are unavailable, using either will raise a :exc:`NotImplementedError`. + + If *follow_symlinks* is ``False``, and the last element of *src* is a + symbolic link, :func:`link` will use the symbolic link itself instead + of the file the link points to. *follow_symlinks* may not be supported + on your platform; you can check whether or not it is available using + :data:`os.supports_follow_symlinks`. If it is unavailable, + using it will raise a :exc:`NotImplementedError`. Availability: Unix, Windows. .. versionchanged:: 3.2 Added Windows support. + .. versionadded:: 3.3 + Added the *src_dir_fd*, *dst_dir_fd*, and *follow_symlinks* arguments. + .. function:: listdir(path='.') @@ -1697,76 +1526,73 @@ This function can be called with a bytes or string argument, and returns filenames of the same datatype. + On some platforms, *path* may also be specified as an open file descriptor. + This functionality may not be supported on your platform; you can check + whether or not it is available using :data:`os.supports_fd`. If it is + unavailable, using it will raise a :exc:`NotImplementedError`. + Availability: Unix, Windows. .. versionchanged:: 3.2 The *path* parameter became optional. - -.. function:: listxattr(path) - - Return a list of the extended filesystem attributes on *path*. Attributes are - returned as string decoded with the filesystem encoding. + .. versionadded:: 3.3 + Added support for specifying an open file descriptor for *path*. + +.. function:: listxattr(path=None, *, follow_symlinks=True) + + Return a list of the extended filesystem attributes on *path*. + The attributes in the list are represented as strings decoded + with the filesystem encoding. + + *path* may be specified as either ``None``, a string, or an open file + descriptor. If *path* is ``None``, :func:`listxattr` will examine the + current directory. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`listxattr` will examine the symbolic link itself + instead of the file the link points to. It is an error to use + *follow_symlinks* when specifying *path* as an open file descriptor. Availability: Linux .. versionadded:: 3.3 -.. function:: llistxattr(path) - - This works exactly like :func:`listxattr` but doesn't follow symlinks. - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: lremovexattr(path, attr) - - This works exactly like :func:`removexattr` but doesn't follow symlinks. - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: lsetxattr(path, attr, value, flags=0) - - This works exactly like :func:`setxattr` but doesn't follow symlinks. - - Availability: Linux - - .. versionadded:: 3.3 - - -.. function:: lstat(path) +.. function:: lstat(path, *, dir_fd=None) Perform the equivalent of an :c:func:`lstat` system call on the given path. Similar to :func:`~os.stat`, but does not follow symbolic links. On platforms that do not support symbolic links, this is an alias for - :func:`~os.stat`. + :func:`~os.stat`. (Equivalent to ``os.stat(path, follow_symlinks=False)``.) + + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. .. versionchanged:: 3.2 Added support for Windows 6.0 (Vista) symbolic links. - -.. function:: lutimes(path[, times, *, ns=times]) - - Like :func:`utime`, but if *path* is a symbolic link, it is not - dereferenced. See :func:`utime` for proper use of the - *times* and *ns* arguments. - - Availability: Unix. - - .. versionadded:: 3.3 - - -.. function:: mkfifo(path[, mode]) - - Create a FIFO (a named pipe) named *path* with numeric mode *mode*. The - default *mode* is ``0o666`` (octal). The current umask value is first masked - out from the mode. + .. versionchanged:: 3.3 + Added the *dir_fd* parameter. + + +.. function:: mkfifo(path, mode=0o666, *, dir_fd=None) + + Create a FIFO (a named pipe) named *path* with numeric mode *mode*. + The current umask value is first masked out from the mode. + + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. FIFOs are pipes that can be accessed like regular files. FIFOs exist until they are deleted (for example with :func:`os.unlink`). Generally, FIFOs are used as @@ -1776,8 +1602,11 @@ Availability: Unix. - -.. function:: mknod(filename[, mode=0o600[, device=0]]) + .. versionadded:: 3.3 + The *dir_fd* argument. + + +.. function:: mknod(filename, mode=0o600, device=0, *, dir_fd=None) Create a filesystem node (file, device special file or named pipe) named *filename*. *mode* specifies both the permissions to use and the type of node @@ -1787,6 +1616,17 @@ *device* defines the newly created device special file (probably using :func:`os.makedev`), otherwise it is ignored. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + + .. versionadded:: 3.3 + The *dir_fd* argument. + .. function:: major(device) @@ -1805,18 +1645,30 @@ Compose a raw device number from the major and minor device numbers. -.. function:: mkdir(path[, mode]) - - Create a directory named *path* with numeric mode *mode*. The default *mode* - is ``0o777`` (octal). On some systems, *mode* is ignored. Where it is used, - the current umask value is first masked out. If the directory already +.. function:: mkdir(path, mode=0o777, *, dir_fd=None) + + Create a directory named *path* with numeric mode *mode*. + + On some systems, *mode* is ignored. Where it is used, the current + umask value is first masked out. If the directory already exists, :exc:`OSError` is raised. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + It is also possible to create temporary directories; see the :mod:`tempfile` module's :func:`tempfile.mkdtemp` function. Availability: Unix, Windows. + .. versionadded:: 3.3 + The *dir_fd* argument. + .. function:: makedirs(path, mode=0o777, exist_ok=False) @@ -1870,7 +1722,7 @@ Unix. -.. function:: readlink(path) +.. function:: readlink(path, *, dir_fd=None) Return a string representing the path to which the symbolic link points. The result may be either an absolute or relative pathname; if it is relative, it may @@ -1881,23 +1733,49 @@ and the call may raise an UnicodeDecodeError. If the *path* is a bytes object, the result will be a bytes object. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + Availability: Unix, Windows .. versionchanged:: 3.2 Added support for Windows 6.0 (Vista) symbolic links. - -.. function:: remove(path) - - Remove (delete) the file *path*. If *path* is a directory, :exc:`OSError` is - raised; see :func:`rmdir` below to remove a directory. This is identical to - the :func:`unlink` function documented below. On Windows, attempting to - remove a file that is in use causes an exception to be raised; on Unix, the - directory entry is removed but the storage allocated to the file is not made - available until the original file is no longer in use. + .. versionadded:: 3.3 + The *dir_fd* argument. + + +.. function:: remove(path, *, dir_fd=None, rmdir=False) + + Remove (delete) the file *path*. This function is identical to + :func:`os.unlink`. + + Specify ``rmdir=True`` if *path* is a directory. Failing to do so + will raise an exception; likewise, specifying ``rmdir=True`` when + *path* is not a directory will also raise an exception. + + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + + On Windows, attempting to remove a file that is in use causes an exception to + be raised; on Unix, the directory entry is removed but the storage allocated + to the file is not made available until the original file is no longer in use. Availability: Unix, Windows. + .. versionadded:: 3.3 + The *dir_fd* and *rmdir* arguments. + .. function:: removedirs(path) @@ -1913,18 +1791,26 @@ successfully removed. -.. function:: removexattr(path, attr) - - Removes the extended filesystem attribute *attr* from *path*. *attr* should - be bytes or str. If it is a string, it is encoded with the filesystem - encoding. +.. function:: removexattr(path, attribute, *, follow_symlinks=True) + + Removes the extended filesystem attribute *attribute* from *path*. + *attribute* should be bytes or str. If it is a string, it is encoded + with the filesystem encoding. + + *path* may be specified as either a string or an open file descriptor. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`removexattr` will remove the attribute from the + symbolic link itself instead of the file the link points to. It is an + error to use *follow_symlinks* when specifying *path* as an open file + descriptor. Availability: Linux .. versionadded:: 3.3 -.. function:: rename(src, dst) +.. function:: rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None) Rename the file or directory *src* to *dst*. If *dst* is a directory, :exc:`OSError` will be raised. On Unix, if *dst* exists and is a file, it will @@ -1934,10 +1820,22 @@ Windows, if *dst* already exists, :exc:`OSError` will be raised even if it is a file. + If either *src_dir_fd* or *dst_dir_fd* is not ``None``, it should be a + file descriptor referring to a directory, and the corresponding path + (*src* or *dst*) should be relative; that path will then be relative to + that directory. (If *src* is absolute, *src_dir_fd* is ignored; the same + goes for *dst* and *dst_dir_fd*.) + *src_dir_fd* and *dst_dir_fd* may not be supported on your platform; + you can check whether or not they are available using :data:`os.supports_dir_fd`. + If they are unavailable, using either will raise a :exc:`NotImplementedError`. + If you want cross-platform overwriting of the destination, use :func:`replace`. Availability: Unix, Windows. + .. versionadded:: 3.3 + The *src_dir_fd* and *dst_dir_fd* arguments. + .. function:: renames(old, new) @@ -1952,7 +1850,7 @@ permissions needed to remove the leaf directory or file. -.. function:: replace(src, dst) +.. function:: replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None) Rename the file or directory *src* to *dst*. If *dst* is a directory, :exc:`OSError` will be raised. If *dst* exists and is a file, it will @@ -1960,6 +1858,15 @@ if *src* and *dst* are on different filesystems. If successful, the renaming will be an atomic operation (this is a POSIX requirement). + If either *src_dir_fd* or *dst_dir_fd* is not ``None``, it should be a + file descriptor referring to a directory, and the corresponding path + (*src* or *dst*) should be relative; that path will then be relative to + that directory. (If *src* is absolute, *src_dir_fd* is ignored; the same + goes for *dst* and *dst_dir_fd*.) + *src_dir_fd* and *dst_dir_fd* may not be supported on your platform; + you can check whether or not they are available using :data:`os.supports_dir_fd`. + If they are unavailable, using either will raise a :exc:`NotImplementedError`. + Availability: Unix, Windows .. versionadded:: 3.3 @@ -1992,15 +1899,22 @@ indicates the operation must replace an existing attribute. -.. function:: setxattr(path, attr, value, flags=0) - - Set the extended filesystem attribute *attr* on *path* to *value*. *attr* - must be a bytes or str with no embedded NULs. If it is str, it is encoded - with the filesystem encoding. *flags* may be :data:`XATTR_REPLACE` or - :data:`XATTR_CREATE`. If :data:`XATTR_REPLACE` is given and the attribute - does not exist, ``EEXISTS`` will be raised. If :data:`XATTR_CREATE` is given - and the attribute already exists, the attribute will not be created and - ``ENODATA`` will be raised. +.. function:: setxattr(path, attribute, value, flags=0, *, follow_symlinks=True) + + Set the extended filesystem attribute *attribute* on *path* to *value*. + *attribute* must be a bytes or str with no embedded NULs. If it is a str, + it is encoded with the filesystem encoding. *flags* may be + :data:`XATTR_REPLACE` or :data:`XATTR_CREATE`. If :data:`XATTR_REPLACE` is + given and the attribute does not exist, ``EEXISTS`` will be raised. + If :data:`XATTR_CREATE` is given and the attribute already exists, the + attribute will not be created and ``ENODATA`` will be raised. + + *path* may be specified as either a string or an open file descriptor. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`setxattr` will examine the symbolic link itself + instead of the file the link points to. It is an error to use + *follow_symlinks* when specifying *path* as an open file descriptor. Availability: Linux @@ -2012,10 +1926,12 @@ .. versionadded:: 3.3 -.. function:: stat(path) +.. function:: stat(path, *, dir_fd=None, follow_symlinks=True) Perform the equivalent of a :c:func:`stat` system call on the given path. - (This function follows symlinks; to stat a symlink use :func:`lstat`.) + *path* may be specified as either a string or as an open file descriptor. + (This function normally follows symlinks; to stat a symlink add the argument + ``follow_symlinks=False``, or use :func:`lstat`.) The return value is an object whose attributes correspond roughly to the members of the :c:type:`stat` structure, namely: @@ -2084,6 +2000,24 @@ :attr:`st_gid`, :attr:`st_size`, :attr:`st_atime`, :attr:`st_mtime`, :attr:`st_ctime`. More items may be added at the end by some implementations. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`stat` will examine the symbolic link itself instead + of the file the link points to. *follow_symlinks* may not be supported + on your platform; you can check whether or not it is available using + :data:`os.supports_follow_symlinks`. If it is unavailable, + using it will raise a :exc:`NotImplementedError`. + + It is an error to use *dir_fd* or *follow_symlinks* when specifying + *path* as an open file descriptor. + .. index:: module: stat The standard module :mod:`stat` defines functions and constants that are useful @@ -2104,7 +2038,9 @@ Availability: Unix, Windows. .. versionadded:: 3.3 - The :attr:`st_atime_ns`, :attr:`st_mtime_ns`, + Added the *dir_fd* and *follow_symlinks* arguments, + specifying a file descriptor instead of a path, + and the :attr:`st_atime_ns`, :attr:`st_mtime_ns`, and :attr:`st_ctime_ns` members. @@ -2149,34 +2085,122 @@ read-only, and if :const:`ST_NOSUID` is set, the semantics of setuid/setgid bits are disabled or not supported. + On some platforms, *path* may also be specified as an open file descriptor. + This functionality may not be supported on your platform; you can check + whether or not it is available using :data:`os.supports_fd`. If it is + unavailable, using it will raise a :exc:`NotImplementedError`. + .. versionchanged:: 3.2 The :const:`ST_RDONLY` and :const:`ST_NOSUID` constants were added. Availability: Unix. - -.. function:: symlink(source, link_name) - symlink(source, link_name, target_is_directory=False) + .. versionadded:: 3.3 + Added support for specifying an open file descriptor for *path*. + + +.. data:: supports_dir_fd + + An object implementing collections.Set indicating which functions in the + :mod:`os` permit use of their *dir_fd* parameter. Different platforms + provide different functionality, and an option that might work on one might + be unsupported on another. For consistency's sakes, functions that support + *dir_fd* always allow specifying the parameter, but will throw an exception + if the functionality is not actually available. + + To check whether a particular function permits use of its *dir_fd* + parameter, use the ``in`` operator on ``supports_dir_fd``. As an example, + this expression determines whether the *dir_fd* parameter of :func:`os.stat` + is locally available:: + + os.stat in os.supports_dir_fd + + Currently *dir_fd* parameters only work on UNIX platforms; + none of them work on Windows. + + .. versionadded:: 3.3 + +.. data:: supports_effective_ids + + An object implementing collections.Set indicating which functions in the + :mod:`os` permit use of the *effective_id* parameter for :func:`os.access`. + If the local platform supports it, the collection will contain + :func:`os.access`, otherwise it will be empty. + + To check whether you can use the *effective_id* parameter for + :func:`os.access`, use the ``in`` operator on ``supports_dir_fd``, like so:: + + os.access in os.supports_effective_ids + + Currently *effective_id* only works on UNIX platforms; + it does not work on Windows. + + .. versionadded:: 3.3 + +.. data:: supports_fd + + An object implementing collections.Set indicating which functions in the + :mod:`os` permit specifying their *path* parameter as an open file + descriptor. Different platforms provide different functionality, and an + option that might work on one might be unsupported on another. For + consistency's sakes, functions that support *fd* always allow specifying + the parameter, but will throw an exception if the functionality is not + actually available. + + To check whether a particular function permits specifying an open file + descriptor for its *path* parameter, use the ``in`` operator on + ``supports_fd``. As an example, this expression determines whether + :func:`os.chdir` accepts open file descriptors when called on your local + platform:: + + os.chdir in os.supports_fd + + .. versionadded:: 3.3 + +.. data:: supports_follow_symlinks + + An object implementing collections.Set indicating which functions in the + :mod:`os` permit use of their *follow_symlinks* parameter. Different + platforms provide different functionality, and an option that might work on + one might be unsupported on another. For consistency's sakes, functions that + support *follow_symlinks* always allow specifying the parameter, but will + throw an exception if the functionality is not actually available. + + To check whether a particular function permits use of its *follow_symlinks* + parameter, use the ``in`` operator on ``supports_follow_symlinks``. As an + example, this expression determines whether the *follow_symlinks* parameter + of :func:`os.stat` is locally available:: + + os.stat in os.supports_follow_symlinks + + .. versionadded:: 3.3 + +.. function:: symlink(source, link_name, target_is_directory=False, *, dir_fd=None) Create a symbolic link pointing to *source* named *link_name*. - On Windows, symlink version takes an additional optional parameter, - *target_is_directory*, which defaults to ``False``. - - On Windows, a symlink represents a file or a directory, and does not morph to - the target dynamically. If *target_is_directory* is set to ``True``, the - symlink will be created as a directory symlink, otherwise as a file symlink - (the default). + On Windows, a symlink represents either a file or a directory, and does not + morph to the target dynamically. If *target_is_directory* is set to ``True``, + the symlink will be created as a directory symlink, otherwise as a file symlink + (the default). On non-Window platforms, *target_is_directory* is ignored. Symbolic link support was introduced in Windows 6.0 (Vista). :func:`symlink` will raise a :exc:`NotImplementedError` on Windows versions earlier than 6.0. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + .. note:: - The *SeCreateSymbolicLinkPrivilege* is required in order to successfully - create symlinks. This privilege is not typically granted to regular - users but is available to accounts which can escalate privileges to the - administrator level. Either obtaining the privilege or running your + On Windows, the *SeCreateSymbolicLinkPrivilege* is required in order to + successfully create symlinks. This privilege is not typically granted to + regular users but is available to accounts which can escalate privileges + to the administrator level. Either obtaining the privilege or running your application as an administrator are ways to successfully create symlinks. :exc:`OSError` is raised when the function is called by an unprivileged @@ -2187,6 +2211,10 @@ .. versionchanged:: 3.2 Added support for Windows 6.0 (Vista) symbolic links. + .. versionadded:: 3.3 + Added the *dir_fd* argument, and now allow *target_is_directory* + on non-Windows platforms. + .. function:: sync() @@ -2207,37 +2235,39 @@ .. versionadded:: 3.3 -.. function:: unlink(path) +.. function:: unlink(path, *, dir_fd=None, rmdir=False) Remove (delete) the file *path*. This is the same function as :func:`remove`; the :func:`unlink` name is its traditional Unix - name. + name. Please see the documentation for :func:`remove` for + further information. Availability: Unix, Windows. - -.. function:: utime(path[, times, *, ns=(atime_ns, mtime_ns)]) + .. versionadded:: 3.3 + The *dir_fd* and *rmdir* parameters. + + +.. function:: utime(path, times=None, *, ns=None, dir_fd=None, follow_symlinks=True) Set the access and modified times of the file specified by *path*. :func:`utime` takes two optional parameters, *times* and *ns*. These specify the times set on *path* and are used as follows: - - If *ns* is specified, + - If *ns* is not ``None``, it must be a 2-tuple of the form ``(atime_ns, mtime_ns)`` where each member is an int expressing nanoseconds. - - If *times* is specified and is not ``None``, + - If *times* is not ``None``, it must be a 2-tuple of the form ``(atime, mtime)`` where each member is an int or float expressing seconds. - - If *times* is specified as ``None``, - this is equivalent to specifying an ``(atime, mtime)`` + - If *times* and *ns* are both ``None``, + this is equivalent to specifying ``ns=(atime_ns, mtime_ns)`` where both times are the current time. (The effect is similar to running the Unix program :program:`touch` on *path*.) - - If neither *ns* nor *times* is specified, this is - equivalent to specifying *times* as ``None``. - - Specifying both *times* and *ns* simultaneously is an error. + + It is an error to specify tuples for both *times* and *ns*. Whether a directory can be given for *path* depends on whether the operating system implements directories as files @@ -2248,10 +2278,34 @@ use the *st_atime_ns* and *st_mtime_ns* fields from the :func:`os.stat` result object with the *ns* parameter to `utime`. + On some platforms, *path* may also be specified as an open file descriptor. + This functionality may not be supported on your platform; you can check + whether or not it is available using :data:`os.supports_fd`. If it is + unavailable, using it will raise a :exc:`NotImplementedError`. + + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + + If *follow_symlinks* is ``False``, and the last element of the path is a + symbolic link, :func:`utime` will examine the symbolic link itself instead + of the file the link points to. *follow_symlinks* may not be supported + on your platform; you can check whether or not it is available using + :data:`os.supports_follow_symlinks`. If it is unavailable, + using it will raise a :exc:`NotImplementedError`. + + It is an error to use *dir_fd* or *follow_symlinks* when specifying + *path* as an open file descriptor. + Availability: Unix, Windows. .. versionadded:: 3.3 - The :attr:`ns` keyword parameter. + Added support for specifying an open file descriptor for *path*, + and the *dir_fd*, *follow_symlinks*, and *ns* parameters. .. function:: walk(top, topdown=True, onerror=None, followlinks=False) @@ -2461,8 +2515,16 @@ :func:`execlp`, :func:`execv`, and :func:`execvp` all cause the new process to inherit the environment of the current process. + For :func:`execve` on some platforms, *path* may also be specified as an open + file descriptor. This functionality may not be supported on your platform; + you can check whether or not it is available using :data:`os.supports_fd`. + If it is unavailable, using it will raise a :exc:`NotImplementedError`. + Availability: Unix, Windows. + .. versionadded:: 3.3 + Added support for specifying an open file descriptor for *path* + for :func:`execve`. .. function:: _exit(n) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -56,9 +56,10 @@ pass import posixpath as path - import posix - __all__.extend(_get_exports_list(posix)) - del posix + try: + from posix import _have_functions + except ImportError: + pass elif 'nt' in _names: name = 'nt' @@ -75,6 +76,11 @@ __all__.extend(_get_exports_list(nt)) del nt + try: + from nt import _have_functions + except ImportError: + pass + elif 'os2' in _names: name = 'os2' linesep = '\r\n' @@ -94,6 +100,11 @@ __all__.extend(_get_exports_list(os2)) del os2 + try: + from os2 import _have_functions + except ImportError: + pass + elif 'ce' in _names: name = 'ce' linesep = '\r\n' @@ -110,6 +121,11 @@ __all__.extend(_get_exports_list(ce)) del ce + try: + from ce import _have_functions + except ImportError: + pass + else: raise ImportError('no os specific module found') @@ -119,6 +135,84 @@ del _names + +if _exists("_have_functions"): + _globals = globals() + def _add(str, fn): + if (fn in _globals) and (str in _have_functions): + _set.add(_globals[fn]) + + _set = set() + _add("HAVE_FACCESSAT", "access") + _add("HAVE_FCHMODAT", "chmod") + _add("HAVE_FCHOWNAT", "chown") + _add("HAVE_FSTATAT", "stat") + _add("HAVE_FUTIMESAT", "utime") + _add("HAVE_LINKAT", "link") + _add("HAVE_MKDIRAT", "mkdir") + _add("HAVE_MKFIFOAT", "mkfifo") + _add("HAVE_MKNODAT", "mknod") + _add("HAVE_OPENAT", "open") + _add("HAVE_READLINKAT", "readlink") + _add("HAVE_RENAMEAT", "rename") + _add("HAVE_SYMLINKAT", "symlink") + _add("HAVE_UNLINKAT", "unlink") + _add("HAVE_UTIMENSAT", "utime") + supports_dir_fd = _set + + _set = set() + _add("HAVE_FACCESSAT", "access") + supports_effective_ids = _set + + _set = set() + _add("HAVE_FCHDIR", "chdir") + _add("HAVE_FCHMOD", "chmod") + _add("HAVE_FCHOWN", "chown") + _add("HAVE_FDOPENDIR", "listdir") + _add("HAVE_FEXECVE", "execve") + _set.add(stat) # fstat always works + _add("HAVE_FUTIMENS", "utime") + _add("HAVE_FUTIMES", "utime") + if _exists("statvfs") and _exists("fstatvfs"): # mac os x10.3 + _add("HAVE_FSTATVFS", "statvfs") + supports_fd = _set + + _set = set() + _add("HAVE_FACCESSAT", "access") + # Current linux (kernel 3.2, glibc 2.15) doesn't support lchmod. + # (The function exists, but it's a stub that always returns ENOSUP.) + # Now, linux *does* have fchmodat, which says it can ignore + # symbolic links. But that doesn't work either (also returns ENOSUP). + # I'm guessing that if they fix fchmodat, they'll also add lchmod at + # the same time. So, for now, assume that fchmodat doesn't support + # follow_symlinks unless lchmod works. + if ((sys.platform != "linux") or + ("HAVE_LCHMOD" in _have_functions)): + _add("HAVE_FCHMODAT", "chmod") + _add("HAVE_FCHOWNAT", "chown") + _add("HAVE_FSTATAT", "stat") + _add("HAVE_LCHFLAGS", "chflags") + _add("HAVE_LCHMOD", "chmod") + if _exists("lchown"): # mac os x10.3 + _add("HAVE_LCHOWN", "chown") + _add("HAVE_LINKAT", "link") + _add("HAVE_LUTIMES", "utime") + _add("HAVE_LSTAT", "stat") + _add("HAVE_FSTATAT", "stat") + _add("HAVE_UTIMENSAT", "utime") + _add("MS_WINDOWS", "stat") + supports_follow_symlinks = _set + + _set = set() + _add("HAVE_UNLINKAT", "unlink") + supports_remove_directory = _set + + del _set + del _have_functions + del _globals + del _add + + # Python uses fixed values for the SEEK_ constants; they are mapped # to native constants if necessary in posixmodule.c # Other possible SEEK values are directly imported from posixmodule.c @@ -318,7 +412,7 @@ __all__.append("walk") -if _exists("openat"): +if open in supports_dir_fd: def fwalk(top, topdown=True, onerror=None, followlinks=False): """Directory tree generator. @@ -343,7 +437,7 @@ import os for root, dirs, files, rootfd in os.fwalk('python/Lib/email'): print(root, "consumes", end="") - print(sum([os.fstatat(rootfd, name).st_size for name in files]), + print(sum([os.stat(name, dir_fd=rootfd).st_size for name in files]), end="") print("bytes in", len(files), "non-directory files") if 'CVS' in dirs: @@ -365,10 +459,7 @@ # necessary, it can be adapted to only require O(1) FDs, see issue # #13734. - # whether to follow symlinks - flag = 0 if followlinks else AT_SYMLINK_NOFOLLOW - - names = flistdir(topfd) + names = listdir(topfd) dirs, nondirs = [], [] for name in names: try: @@ -376,14 +467,14 @@ # walk() which reports symlinks to directories as directories. # We do however check for symlinks before recursing into # a subdirectory. - if st.S_ISDIR(fstatat(topfd, name).st_mode): + if st.S_ISDIR(stat(name, dir_fd=topfd).st_mode): dirs.append(name) else: nondirs.append(name) except FileNotFoundError: try: # Add dangling symlinks, ignore disappeared files - if st.S_ISLNK(fstatat(topfd, name, AT_SYMLINK_NOFOLLOW) + if st.S_ISLNK(stat(name, dir_fd=topfd, follow_symlinks=False) .st_mode): nondirs.append(name) except FileNotFoundError: @@ -394,8 +485,8 @@ for name in dirs: try: - orig_st = fstatat(topfd, name, flag) - dirfd = openat(topfd, name, O_RDONLY) + orig_st = stat(name, dir_fd=topfd, follow_symlinks=followlinks) + dirfd = open(name, O_RDONLY, dir_fd=topfd) except error as err: if onerror is not None: onerror(err) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -139,27 +139,45 @@ only if both `src` and `dst` are symlinks. """ - def _nop(*args, ns=None): + def _nop(*args, ns=None, follow_symlinks=None): pass - if symlinks and os.path.islink(src) and os.path.islink(dst): - stat_func = os.lstat - utime_func = os.lutimes if hasattr(os, 'lutimes') else _nop - chmod_func = os.lchmod if hasattr(os, 'lchmod') else _nop - chflags_func = os.lchflags if hasattr(os, 'lchflags') else _nop + # follow symlinks (aka don't not follow symlinks) + follow = not (symlinks and os.path.islink(src) and os.path.islink(dst)) + if follow: + # use the real function if it exists + def lookup(name): + return getattr(os, name, _nop) else: - stat_func = os.stat - utime_func = os.utime if hasattr(os, 'utime') else _nop - chmod_func = os.chmod if hasattr(os, 'chmod') else _nop - chflags_func = os.chflags if hasattr(os, 'chflags') else _nop + # use the real function only if it exists + # *and* it supports follow_symlinks + def lookup(name): + fn = getattr(os, name, _nop) + if fn in os.supports_follow_symlinks: + return fn + return _nop - st = stat_func(src) + st = lookup("stat")(src, follow_symlinks=follow) mode = stat.S_IMODE(st.st_mode) - utime_func(dst, ns=(st.st_atime_ns, st.st_mtime_ns)) - chmod_func(dst, mode) + lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns), + follow_symlinks=follow) + try: + lookup("chmod")(dst, mode, follow_symlinks=follow) + except NotImplementedError: + # if we got a NotImplementedError, it's because + # * follow_symlinks=False, + # * lchown() is unavailable, and + # * either + # * fchownat() is unvailable or + # * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW. + # (it returned ENOSUP.) + # therefore we're out of options--we simply cannot chown the + # symlink. give up, suppress the error. + # (which is what shutil always did in this circumstance.) + pass if hasattr(st, 'st_flags'): try: - chflags_func(dst, st.st_flags) + lookup("chflags")(dst, st.st_flags, follow_symlinks=follow) except OSError as why: for err in 'EOPNOTSUPP', 'ENOTSUP': if hasattr(errno, err) and why.errno == getattr(errno, err): @@ -176,20 +194,11 @@ If the optional flag `symlinks` is set, symlinks won't be followed. """ - if symlinks: - listxattr = os.llistxattr - removexattr = os.lremovexattr - setxattr = os.lsetxattr - getxattr = os.lgetxattr - else: - listxattr = os.listxattr - removexattr = os.removexattr - setxattr = os.setxattr - getxattr = os.getxattr - for attr in listxattr(src): + for name in os.listxattr(src, follow_symlinks=symlinks): try: - setxattr(dst, attr, getxattr(src, attr)) + value = os.getxattr(src, name, follow_symlinks=symlinks) + os.setxattr(dst, name, value, follow_symlinks=symlinks) except OSError as e: if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA): raise diff --git a/Lib/test/support.py b/Lib/test/support.py --- a/Lib/test/support.py +++ b/Lib/test/support.py @@ -1703,8 +1703,8 @@ try: # TESTFN & tempfile may use different file systems with # different capabilities - os.fsetxattr(tmp_fp, b"user.test", b"") - os.fsetxattr(fp.fileno(), b"user.test", b"") + os.setxattr(tmp_fp, b"user.test", b"") + os.setxattr(fp.fileno(), b"user.test", b"") # Kernels < 2.6.39 don't respect setxattr flags. kernel_version = platform.release() m = re.match("2.6.(\d{1,2})", kernel_version) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -345,40 +345,36 @@ return os.utime(file, ns=times) self._test_utime_ns(utime_ns) - requires_lutimes = unittest.skipUnless(hasattr(os, 'lutimes'), - "os.lutimes required for this test.") - requires_futimes = unittest.skipUnless(hasattr(os, 'futimes'), - "os.futimes required for this test.") + requires_utime_dir_fd = unittest.skipUnless( + os.utime in os.supports_dir_fd, + "dir_fd support for utime required for this test.") + requires_utime_fd = unittest.skipUnless( + os.utime in os.supports_fd, + "fd support for utime required for this test.") + requires_utime_nofollow_symlinks = unittest.skipUnless( + os.utime in os.supports_follow_symlinks, + "follow_symlinks support for utime required for this test.") - @requires_lutimes + @requires_utime_nofollow_symlinks def test_lutimes_ns(self): def lutimes_ns(file, times): - return os.lutimes(file, ns=times) + return os.utime(file, ns=times, follow_symlinks=False) self._test_utime_ns(lutimes_ns) - @requires_futimes + @requires_utime_fd def test_futimes_ns(self): def futimes_ns(file, times): with open(file, "wb") as f: - os.futimes(f.fileno(), ns=times) + os.utime(f.fileno(), ns=times) self._test_utime_ns(futimes_ns, test_dir=False) def _utime_invalid_arguments(self, name, arg): - with self.assertRaises(RuntimeError): + with self.assertRaises(ValueError): getattr(os, name)(arg, (5, 5), ns=(5, 5)) def test_utime_invalid_arguments(self): self._utime_invalid_arguments('utime', self.fname) - @requires_lutimes - def test_lutimes_invalid_arguments(self): - self._utime_invalid_arguments('lutimes', self.fname) - - @requires_futimes - def test_futimes_invalid_arguments(self): - with open(self.fname, "wb") as f: - self._utime_invalid_arguments('futimes', f.fileno()) - @unittest.skipUnless(stat_supports_subsecond, "os.stat() doesn't has a subsecond resolution") @@ -402,64 +398,46 @@ os.utime(filename, (atime, mtime)) self._test_utime_subsecond(set_time) - @requires_futimes + @requires_utime_fd def test_futimes_subsecond(self): def set_time(filename, atime, mtime): with open(filename, "wb") as f: - os.futimes(f.fileno(), (atime, mtime)) + os.utime(f.fileno(), times=(atime, mtime)) self._test_utime_subsecond(set_time) - @unittest.skipUnless(hasattr(os, 'futimens'), - "os.futimens required for this test.") + @requires_utime_fd def test_futimens_subsecond(self): def set_time(filename, atime, mtime): with open(filename, "wb") as f: - asec, ansec = divmod(atime, 1.0) - asec = int(asec) - ansec = int(ansec * 1e9) - msec, mnsec = divmod(mtime, 1.0) - msec = int(msec) - mnsec = int(mnsec * 1e9) - os.futimens(f.fileno(), - (asec, ansec), - (msec, mnsec)) + os.utime(f.fileno(), times=(atime, mtime)) self._test_utime_subsecond(set_time) - @unittest.skipUnless(hasattr(os, 'futimesat'), - "os.futimesat required for this test.") + @requires_utime_dir_fd def test_futimesat_subsecond(self): def set_time(filename, atime, mtime): dirname = os.path.dirname(filename) dirfd = os.open(dirname, os.O_RDONLY) try: - os.futimesat(dirfd, os.path.basename(filename), - (atime, mtime)) + os.utime(os.path.basename(filename), dir_fd=dirfd, + times=(atime, mtime)) finally: os.close(dirfd) self._test_utime_subsecond(set_time) - @requires_lutimes + @requires_utime_nofollow_symlinks def test_lutimes_subsecond(self): def set_time(filename, atime, mtime): - os.lutimes(filename, (atime, mtime)) + os.utime(filename, (atime, mtime), follow_symlinks=False) self._test_utime_subsecond(set_time) - @unittest.skipUnless(hasattr(os, 'utimensat'), - "os.utimensat required for this test.") + @requires_utime_dir_fd def test_utimensat_subsecond(self): def set_time(filename, atime, mtime): dirname = os.path.dirname(filename) dirfd = os.open(dirname, os.O_RDONLY) try: - asec, ansec = divmod(atime, 1.0) - asec = int(asec) - ansec = int(ansec * 1e9) - msec, mnsec = divmod(mtime, 1.0) - msec = int(msec) - mnsec = int(mnsec * 1e9) - os.utimensat(dirfd, os.path.basename(filename), - (asec, ansec), - (msec, mnsec)) + os.utime(os.path.basename(filename), dir_fd=dirfd, + times=(atime, mtime)) finally: os.close(dirfd) self._test_utime_subsecond(set_time) @@ -782,8 +760,10 @@ for root, dirs, files, rootfd in os.fwalk(*args): # check that the FD is valid os.fstat(rootfd) - # check that flistdir() returns consistent information - self.assertEqual(set(os.flistdir(rootfd)), set(dirs) | set(files)) + # redundant check + os.stat(rootfd) + # check that listdir() returns consistent information + self.assertEqual(set(os.listdir(rootfd)), set(dirs) | set(files)) def test_fd_leak(self): # Since we're opening a lot of FDs, we must be careful to avoid leaks: @@ -802,13 +782,10 @@ # cleanup for root, dirs, files, rootfd in os.fwalk(support.TESTFN, topdown=False): for name in files: - os.unlinkat(rootfd, name) + os.unlink(name, dir_fd=rootfd) for name in dirs: - st = os.fstatat(rootfd, name, os.AT_SYMLINK_NOFOLLOW) - if stat.S_ISDIR(st.st_mode): - os.unlinkat(rootfd, name, os.AT_REMOVEDIR) - else: - os.unlinkat(rootfd, name) + st = os.stat(name, dir_fd=rootfd, follow_symlinks=False) + os.unlink(name, dir_fd=rootfd, rmdir=stat.S_ISDIR(st.st_mode)) os.rmdir(support.TESTFN) @@ -1262,6 +1239,13 @@ expected = self.unicodefn found = set(os.listdir(self.dir)) self.assertEqual(found, expected) + # test listdir without arguments + current_directory = os.getcwd() + try: + os.chdir(os.sep) + self.assertEqual(set(os.listdir()), set(os.listdir(os.sep))) + finally: + os.chdir(current_directory) def test_open(self): for fn in self.unicodefn: @@ -1846,79 +1830,97 @@ raise - at support.skip_unless_xattr +def supports_extended_attributes(): + if not hasattr(os, "setxattr"): + return False + try: + with open(support.TESTFN, "wb") as fp: + try: + os.setxattr(fp.fileno(), b"user.test", b"") + except OSError: + return False + finally: + support.unlink(support.TESTFN) + # Kernels < 2.6.39 don't respect setxattr flags. + kernel_version = platform.release() + m = re.match("2.6.(\d{1,2})", kernel_version) + return m is None or int(m.group(1)) >= 39 + + + at unittest.skipUnless(supports_extended_attributes(), + "no non-broken extended attribute support") class ExtendedAttributeTests(unittest.TestCase): def tearDown(self): support.unlink(support.TESTFN) - def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr): + def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwargs): fn = support.TESTFN open(fn, "wb").close() with self.assertRaises(OSError) as cm: - getxattr(fn, s("user.test")) + getxattr(fn, s("user.test"), **kwargs) self.assertEqual(cm.exception.errno, errno.ENODATA) init_xattr = listxattr(fn) self.assertIsInstance(init_xattr, list) - setxattr(fn, s("user.test"), b"") + setxattr(fn, s("user.test"), b"", **kwargs) xattr = set(init_xattr) xattr.add("user.test") self.assertEqual(set(listxattr(fn)), xattr) - self.assertEqual(getxattr(fn, b"user.test"), b"") - setxattr(fn, s("user.test"), b"hello", os.XATTR_REPLACE) - self.assertEqual(getxattr(fn, b"user.test"), b"hello") + self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"") + setxattr(fn, s("user.test"), b"hello", os.XATTR_REPLACE, **kwargs) + self.assertEqual(getxattr(fn, b"user.test", **kwargs), b"hello") with self.assertRaises(OSError) as cm: - setxattr(fn, s("user.test"), b"bye", os.XATTR_CREATE) + setxattr(fn, s("user.test"), b"bye", os.XATTR_CREATE, **kwargs) self.assertEqual(cm.exception.errno, errno.EEXIST) with self.assertRaises(OSError) as cm: - setxattr(fn, s("user.test2"), b"bye", os.XATTR_REPLACE) + setxattr(fn, s("user.test2"), b"bye", os.XATTR_REPLACE, **kwargs) self.assertEqual(cm.exception.errno, errno.ENODATA) - setxattr(fn, s("user.test2"), b"foo", os.XATTR_CREATE) + setxattr(fn, s("user.test2"), b"foo", os.XATTR_CREATE, **kwargs) xattr.add("user.test2") self.assertEqual(set(listxattr(fn)), xattr) - removexattr(fn, s("user.test")) + removexattr(fn, s("user.test"), **kwargs) with self.assertRaises(OSError) as cm: - getxattr(fn, s("user.test")) + getxattr(fn, s("user.test"), **kwargs) self.assertEqual(cm.exception.errno, errno.ENODATA) xattr.remove("user.test") self.assertEqual(set(listxattr(fn)), xattr) - self.assertEqual(getxattr(fn, s("user.test2")), b"foo") - setxattr(fn, s("user.test"), b"a"*1024) - self.assertEqual(getxattr(fn, s("user.test")), b"a"*1024) - removexattr(fn, s("user.test")) + self.assertEqual(getxattr(fn, s("user.test2"), **kwargs), b"foo") + setxattr(fn, s("user.test"), b"a"*1024, **kwargs) + self.assertEqual(getxattr(fn, s("user.test"), **kwargs), b"a"*1024) + removexattr(fn, s("user.test"), **kwargs) many = sorted("user.test{}".format(i) for i in range(100)) for thing in many: - setxattr(fn, thing, b"x") + setxattr(fn, thing, b"x", **kwargs) self.assertEqual(set(listxattr(fn)), set(init_xattr) | set(many)) - def _check_xattrs(self, *args): + def _check_xattrs(self, *args, **kwargs): def make_bytes(s): return bytes(s, "ascii") - self._check_xattrs_str(str, *args) + self._check_xattrs_str(str, *args, **kwargs) support.unlink(support.TESTFN) - self._check_xattrs_str(make_bytes, *args) + self._check_xattrs_str(make_bytes, *args, **kwargs) def test_simple(self): self._check_xattrs(os.getxattr, os.setxattr, os.removexattr, os.listxattr) def test_lpath(self): - self._check_xattrs(os.lgetxattr, os.lsetxattr, os.lremovexattr, - os.llistxattr) + self._check_xattrs(os.getxattr, os.setxattr, os.removexattr, + os.listxattr, follow_symlinks=False) def test_fds(self): def getxattr(path, *args): with open(path, "rb") as fp: - return os.fgetxattr(fp.fileno(), *args) + return os.getxattr(fp.fileno(), *args) def setxattr(path, *args): with open(path, "wb") as fp: - os.fsetxattr(fp.fileno(), *args) + os.setxattr(fp.fileno(), *args) def removexattr(path, *args): with open(path, "wb") as fp: - os.fremovexattr(fp.fileno(), *args) + os.removexattr(fp.fileno(), *args) def listxattr(path, *args): with open(path, "rb") as fp: - return os.flistxattr(fp.fileno(), *args) + return os.listxattr(fp.fileno(), *args) self._check_xattrs(getxattr, setxattr, removexattr, listxattr) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -129,6 +129,7 @@ fp = open(support.TESTFN) try: self.assertTrue(posix.fstatvfs(fp.fileno())) + self.assertTrue(posix.statvfs(fp.fileno())) finally: fp.close() @@ -150,7 +151,7 @@ fp.flush() posix.truncate(support.TESTFN, 0) - @unittest.skipUnless(hasattr(posix, 'fexecve'), "test needs posix.fexecve()") + @unittest.skipUnless(getattr(os, 'execve', None) in os.supports_fd, "test needs execve() to support the fd parameter") @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()") @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()") def test_fexecve(self): @@ -159,7 +160,7 @@ pid = os.fork() if pid == 0: os.chdir(os.path.split(sys.executable)[0]) - posix.fexecve(fp, [sys.executable, '-c', 'pass'], os.environ) + posix.execve(fp, [sys.executable, '-c', 'pass'], os.environ) else: self.assertEqual(os.waitpid(pid, 0), (pid, 0)) finally: @@ -234,45 +235,37 @@ finally: os.close(fd) - @unittest.skipUnless(hasattr(posix, 'futimes'), "test needs posix.futimes()") - def test_futimes(self): + @unittest.skipUnless(os.utime in os.supports_fd, "test needs fd support in os.utime") + def test_utime_with_fd(self): now = time.time() fd = os.open(support.TESTFN, os.O_RDONLY) try: - posix.futimes(fd, None) - posix.futimes(fd) - self.assertRaises(TypeError, posix.futimes, fd, (None, None)) - self.assertRaises(TypeError, posix.futimes, fd, (now, None)) - self.assertRaises(TypeError, posix.futimes, fd, (None, now)) - posix.futimes(fd, (int(now), int(now))) - posix.futimes(fd, (now, now)) + posix.utime(fd) + posix.utime(fd, None) + self.assertRaises(TypeError, posix.utime, fd, (None, None)) + self.assertRaises(TypeError, posix.utime, fd, (now, None)) + self.assertRaises(TypeError, posix.utime, fd, (None, now)) + posix.utime(fd, (int(now), int(now))) + posix.utime(fd, (now, now)) + self.assertRaises(ValueError, posix.utime, fd, (now, now), ns=(now, now)) + self.assertRaises(ValueError, posix.utime, fd, (now, 0), ns=(None, None)) + self.assertRaises(ValueError, posix.utime, fd, (None, None), ns=(now, 0)) + posix.utime(fd, (int(now), int((now - int(now)) * 1e9))) + posix.utime(fd, ns=(int(now), int((now - int(now)) * 1e9))) + finally: os.close(fd) - @unittest.skipUnless(hasattr(posix, 'lutimes'), "test needs posix.lutimes()") - def test_lutimes(self): + @unittest.skipUnless(os.utime in os.supports_follow_symlinks, "test needs follow_symlinks support in os.utime") + def test_utime_nofollow_symlinks(self): now = time.time() - posix.lutimes(support.TESTFN, None) - self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, None)) - self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (now, None)) - self.assertRaises(TypeError, posix.lutimes, support.TESTFN, (None, now)) - posix.lutimes(support.TESTFN, (int(now), int(now))) - posix.lutimes(support.TESTFN, (now, now)) - posix.lutimes(support.TESTFN) - - @unittest.skipUnless(hasattr(posix, 'futimens'), "test needs posix.futimens()") - def test_futimens(self): - now = time.time() - fd = os.open(support.TESTFN, os.O_RDONLY) - try: - self.assertRaises(TypeError, posix.futimens, fd, (None, None), (None, None)) - self.assertRaises(TypeError, posix.futimens, fd, (now, 0), None) - self.assertRaises(TypeError, posix.futimens, fd, None, (now, 0)) - posix.futimens(fd, (int(now), int((now - int(now)) * 1e9)), - (int(now), int((now - int(now)) * 1e9))) - posix.futimens(fd) - finally: - os.close(fd) + posix.utime(support.TESTFN, None, follow_symlinks=False) + self.assertRaises(TypeError, posix.utime, support.TESTFN, (None, None), follow_symlinks=False) + self.assertRaises(TypeError, posix.utime, support.TESTFN, (now, None), follow_symlinks=False) + self.assertRaises(TypeError, posix.utime, support.TESTFN, (None, now), follow_symlinks=False) + posix.utime(support.TESTFN, (int(now), int(now)), follow_symlinks=False) + posix.utime(support.TESTFN, (now, now), follow_symlinks=False) + posix.utime(support.TESTFN, follow_symlinks=False) @unittest.skipUnless(hasattr(posix, 'writev'), "test needs posix.writev()") def test_writev(self): @@ -364,6 +357,7 @@ fp = open(support.TESTFN) try: self.assertTrue(posix.fstat(fp.fileno())) + self.assertTrue(posix.stat(fp.fileno())) finally: fp.close() @@ -462,18 +456,18 @@ if hasattr(posix, 'listdir'): self.assertTrue(support.TESTFN in posix.listdir()) - @unittest.skipUnless(hasattr(posix, 'flistdir'), "test needs posix.flistdir()") + @unittest.skipUnless(os.listdir in os.supports_fd, "test needs fd support for os.listdir()") def test_flistdir(self): f = posix.open(posix.getcwd(), posix.O_RDONLY) self.addCleanup(posix.close, f) self.assertEqual( sorted(posix.listdir('.')), - sorted(posix.flistdir(f)) + sorted(posix.listdir(f)) ) # Check that the fd offset was reset (issue #13739) self.assertEqual( sorted(posix.listdir('.')), - sorted(posix.flistdir(f)) + sorted(posix.listdir(f)) ) def test_access(self): @@ -532,10 +526,10 @@ posix.utime(support.TESTFN, (int(now), int(now))) posix.utime(support.TESTFN, (now, now)) - def _test_chflags_regular_file(self, chflags_func, target_file): + def _test_chflags_regular_file(self, chflags_func, target_file, **kwargs): st = os.stat(target_file) self.assertTrue(hasattr(st, 'st_flags')) - chflags_func(target_file, st.st_flags | stat.UF_IMMUTABLE) + chflags_func(target_file, st.st_flags | stat.UF_IMMUTABLE, **kwargs) try: new_st = os.stat(target_file) self.assertEqual(st.st_flags | stat.UF_IMMUTABLE, new_st.st_flags) @@ -553,6 +547,7 @@ @unittest.skipUnless(hasattr(posix, 'lchflags'), 'test needs os.lchflags()') def test_lchflags_regular_file(self): self._test_chflags_regular_file(posix.lchflags, support.TESTFN) + self._test_chflags_regular_file(posix.chflags, support.TESTFN, follow_symlinks=False) @unittest.skipUnless(hasattr(posix, 'lchflags'), 'test needs os.lchflags()') def test_lchflags_symlink(self): @@ -564,17 +559,21 @@ self.teardown_files.append(_DUMMY_SYMLINK) dummy_symlink_st = os.lstat(_DUMMY_SYMLINK) - posix.lchflags(_DUMMY_SYMLINK, - dummy_symlink_st.st_flags | stat.UF_IMMUTABLE) - try: - new_testfn_st = os.stat(support.TESTFN) - new_dummy_symlink_st = os.lstat(_DUMMY_SYMLINK) + def chflags_nofollow(path, flags): + return posix.chflags(path, flags, follow_symlinks=False) - self.assertEqual(testfn_st.st_flags, new_testfn_st.st_flags) - self.assertEqual(dummy_symlink_st.st_flags | stat.UF_IMMUTABLE, - new_dummy_symlink_st.st_flags) - finally: - posix.lchflags(_DUMMY_SYMLINK, dummy_symlink_st.st_flags) + for fn in (posix.lchflags, chflags_nofollow): + fn(_DUMMY_SYMLINK, + dummy_symlink_st.st_flags | stat.UF_IMMUTABLE) + try: + new_testfn_st = os.stat(support.TESTFN) + new_dummy_symlink_st = os.lstat(_DUMMY_SYMLINK) + + self.assertEqual(testfn_st.st_flags, new_testfn_st.st_flags) + self.assertEqual(dummy_symlink_st.st_flags | stat.UF_IMMUTABLE, + new_dummy_symlink_st.st_flags) + finally: + fn(_DUMMY_SYMLINK, dummy_symlink_st.st_flags) def test_environ(self): if os.name == "nt": @@ -657,40 +656,40 @@ # tests for the posix *at functions follow - @unittest.skipUnless(hasattr(posix, 'faccessat'), "test needs posix.faccessat()") - def test_faccessat(self): + @unittest.skipUnless(os.access in os.supports_dir_fd, "test needs dir_fd support for os.access()") + def test_access_dir_fd(self): f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - self.assertTrue(posix.faccessat(f, support.TESTFN, os.R_OK)) + self.assertTrue(posix.access(support.TESTFN, os.R_OK, dir_fd=f)) finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'fchmodat'), "test needs posix.fchmodat()") - def test_fchmodat(self): + @unittest.skipUnless(os.chmod in os.supports_dir_fd, "test needs dir_fd support in os.chmod()") + def test_chmod_dir_fd(self): os.chmod(support.TESTFN, stat.S_IRUSR) f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.fchmodat(f, support.TESTFN, stat.S_IRUSR | stat.S_IWUSR) + posix.chmod(support.TESTFN, stat.S_IRUSR | stat.S_IWUSR, dir_fd=f) s = posix.stat(support.TESTFN) self.assertEqual(s[0] & stat.S_IRWXU, stat.S_IRUSR | stat.S_IWUSR) finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'fchownat'), "test needs posix.fchownat()") - def test_fchownat(self): + @unittest.skipUnless(os.chown in os.supports_dir_fd, "test needs dir_fd support in os.chown()") + def test_chown_dir_fd(self): support.unlink(support.TESTFN) support.create_empty_file(support.TESTFN) f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.fchownat(f, support.TESTFN, os.getuid(), os.getgid()) + posix.chown(support.TESTFN, os.getuid(), os.getgid(), dir_fd=f) finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'fstatat'), "test needs posix.fstatat()") - def test_fstatat(self): + @unittest.skipUnless(os.stat in os.supports_dir_fd, "test needs dir_fd support in os.stat()") + def test_stat_dir_fd(self): support.unlink(support.TESTFN) with open(support.TESTFN, 'w') as outfile: outfile.write("testline\n") @@ -698,31 +697,41 @@ f = posix.open(posix.getcwd(), posix.O_RDONLY) try: s1 = posix.stat(support.TESTFN) - s2 = posix.fstatat(f, support.TESTFN) + s2 = posix.stat(support.TESTFN, dir_fd=f) self.assertEqual(s1, s2) finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'futimesat'), "test needs posix.futimesat()") - def test_futimesat(self): + @unittest.skipUnless(os.utime in os.supports_dir_fd, "test needs dir_fd support in os.utime()") + def test_utime_dir_fd(self): f = posix.open(posix.getcwd(), posix.O_RDONLY) try: now = time.time() - posix.futimesat(f, support.TESTFN, None) - posix.futimesat(f, support.TESTFN) - self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (None, None)) - self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (now, None)) - self.assertRaises(TypeError, posix.futimesat, f, support.TESTFN, (None, now)) - posix.futimesat(f, support.TESTFN, (int(now), int(now))) - posix.futimesat(f, support.TESTFN, (now, now)) + posix.utime(support.TESTFN, None, dir_fd=f) + posix.utime(support.TESTFN, dir_fd=f) + self.assertRaises(TypeError, posix.utime, support.TESTFN, now, dir_fd=f) + self.assertRaises(TypeError, posix.utime, support.TESTFN, (None, None), dir_fd=f) + self.assertRaises(TypeError, posix.utime, support.TESTFN, (now, None), dir_fd=f) + self.assertRaises(TypeError, posix.utime, support.TESTFN, (None, now), dir_fd=f) + self.assertRaises(TypeError, posix.utime, support.TESTFN, (now, "x"), dir_fd=f) + posix.utime(support.TESTFN, (int(now), int(now)), dir_fd=f) + posix.utime(support.TESTFN, (now, now), dir_fd=f) + posix.utime(support.TESTFN, + (int(now), int((now - int(now)) * 1e9)), dir_fd=f) + posix.utime(support.TESTFN, dir_fd=f, + times=(int(now), int((now - int(now)) * 1e9))) + + if os.utime in os.supports_follow_symlinks: + posix.utime(support.TESTFN, follow_symlinks=False, dir_fd=f) + finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'linkat'), "test needs posix.linkat()") - def test_linkat(self): + @unittest.skipUnless(os.link in os.supports_dir_fd, "test needs dir_fd support in os.link()") + def test_link_dir_fd(self): f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.linkat(f, support.TESTFN, f, support.TESTFN + 'link') + posix.link(support.TESTFN, support.TESTFN + 'link', src_dir_fd=f, dst_dir_fd=f) # should have same inodes self.assertEqual(posix.stat(support.TESTFN)[1], posix.stat(support.TESTFN + 'link')[1]) @@ -730,26 +739,26 @@ posix.close(f) support.unlink(support.TESTFN + 'link') - @unittest.skipUnless(hasattr(posix, 'mkdirat'), "test needs posix.mkdirat()") - def test_mkdirat(self): + @unittest.skipUnless(os.mkdir in os.supports_dir_fd, "test needs dir_fd support in os.mkdir()") + def test_mkdir_dir_fd(self): f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.mkdirat(f, support.TESTFN + 'dir') + posix.mkdir(support.TESTFN + 'dir', dir_fd=f) posix.stat(support.TESTFN + 'dir') # should not raise exception finally: posix.close(f) support.rmtree(support.TESTFN + 'dir') - @unittest.skipUnless(hasattr(posix, 'mknodat') and hasattr(stat, 'S_IFIFO'), - "don't have mknodat()/S_IFIFO") - def test_mknodat(self): + @unittest.skipUnless((os.mknod in os.supports_dir_fd) and hasattr(stat, 'S_IFIFO'), + "test requires both stat.S_IFIFO and dir_fd support for os.mknod()") + def test_mknod_dir_fd(self): # Test using mknodat() to create a FIFO (the only use specified # by POSIX). support.unlink(support.TESTFN) mode = stat.S_IFIFO | stat.S_IRUSR | stat.S_IWUSR f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.mknodat(f, support.TESTFN, mode, 0) + posix.mknod(support.TESTFN, mode, 0, dir_fd=f) except OSError as e: # Some old systems don't allow unprivileged users to use # mknod(), or only support creating device nodes. @@ -759,13 +768,13 @@ finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'openat'), "test needs posix.openat()") - def test_openat(self): + @unittest.skipUnless(os.open in os.supports_dir_fd, "test needs dir_fd support in os.open()") + def test_open_dir_fd(self): support.unlink(support.TESTFN) with open(support.TESTFN, 'w') as outfile: outfile.write("testline\n") a = posix.open(posix.getcwd(), posix.O_RDONLY) - b = posix.openat(a, support.TESTFN, posix.O_RDONLY) + b = posix.open(support.TESTFN, posix.O_RDONLY, dir_fd=a) try: res = posix.read(b, 9).decode(encoding="utf-8") self.assertEqual("testline\n", res) @@ -773,24 +782,24 @@ posix.close(a) posix.close(b) - @unittest.skipUnless(hasattr(posix, 'readlinkat'), "test needs posix.readlinkat()") - def test_readlinkat(self): + @unittest.skipUnless(os.readlink in os.supports_dir_fd, "test needs dir_fd support in os.readlink()") + def test_readlink_dir_fd(self): os.symlink(support.TESTFN, support.TESTFN + 'link') f = posix.open(posix.getcwd(), posix.O_RDONLY) try: self.assertEqual(posix.readlink(support.TESTFN + 'link'), - posix.readlinkat(f, support.TESTFN + 'link')) + posix.readlink(support.TESTFN + 'link', dir_fd=f)) finally: support.unlink(support.TESTFN + 'link') posix.close(f) - @unittest.skipUnless(hasattr(posix, 'renameat'), "test needs posix.renameat()") - def test_renameat(self): + @unittest.skipUnless(os.rename in os.supports_dir_fd, "test needs dir_fd support in os.rename()") + def test_rename_dir_fd(self): support.unlink(support.TESTFN) support.create_empty_file(support.TESTFN + 'ren') f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.renameat(f, support.TESTFN + 'ren', f, support.TESTFN) + posix.rename(support.TESTFN + 'ren', support.TESTFN, src_dir_fd=f, dst_dir_fd=f) except: posix.rename(support.TESTFN + 'ren', support.TESTFN) raise @@ -799,23 +808,23 @@ finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'symlinkat'), "test needs posix.symlinkat()") - def test_symlinkat(self): + @unittest.skipUnless(os.symlink in os.supports_dir_fd, "test needs dir_fd support in os.symlink()") + def test_symlink_dir_fd(self): f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.symlinkat(support.TESTFN, f, support.TESTFN + 'link') + posix.symlink(support.TESTFN, support.TESTFN + 'link', dir_fd=f) self.assertEqual(posix.readlink(support.TESTFN + 'link'), support.TESTFN) finally: posix.close(f) support.unlink(support.TESTFN + 'link') - @unittest.skipUnless(hasattr(posix, 'unlinkat'), "test needs posix.unlinkat()") - def test_unlinkat(self): + @unittest.skipUnless(os.unlink in os.supports_dir_fd, "test needs dir_fd support in os.unlink()") + def test_unlink_dir_fd(self): f = posix.open(posix.getcwd(), posix.O_RDONLY) support.create_empty_file(support.TESTFN + 'del') posix.stat(support.TESTFN + 'del') # should not throw exception try: - posix.unlinkat(f, support.TESTFN + 'del') + posix.unlink(support.TESTFN + 'del', dir_fd=f) except: support.unlink(support.TESTFN + 'del') raise @@ -824,31 +833,12 @@ finally: posix.close(f) - @unittest.skipUnless(hasattr(posix, 'utimensat'), "test needs posix.utimensat()") - def test_utimensat(self): - f = posix.open(posix.getcwd(), posix.O_RDONLY) - try: - now = time.time() - posix.utimensat(f, support.TESTFN, None, None) - posix.utimensat(f, support.TESTFN) - posix.utimensat(f, support.TESTFN, flags=os.AT_SYMLINK_NOFOLLOW) - self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, (None, None), (None, None)) - self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, (now, 0), None) - self.assertRaises(TypeError, posix.utimensat, f, support.TESTFN, None, (now, 0)) - posix.utimensat(f, support.TESTFN, (int(now), int((now - int(now)) * 1e9)), - (int(now), int((now - int(now)) * 1e9))) - posix.utimensat(dirfd=f, path=support.TESTFN, - atime=(int(now), int((now - int(now)) * 1e9)), - mtime=(int(now), int((now - int(now)) * 1e9))) - finally: - posix.close(f) - - @unittest.skipUnless(hasattr(posix, 'mkfifoat'), "don't have mkfifoat()") - def test_mkfifoat(self): + @unittest.skipUnless(os.mkfifo in os.supports_dir_fd, "test needs dir_fd support in os.mkfifo()") + def test_mkfifo_dir_fd(self): support.unlink(support.TESTFN) f = posix.open(posix.getcwd(), posix.O_RDONLY) try: - posix.mkfifoat(f, support.TESTFN, stat.S_IRUSR | stat.S_IWUSR) + posix.mkfifo(support.TESTFN, stat.S_IRUSR | stat.S_IWUSR, dir_fd=f) self.assertTrue(stat.S_ISFIFO(posix.stat(support.TESTFN).st_mode)) finally: posix.close(f) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -268,7 +268,7 @@ # don't follow shutil.copystat(src_link, dst_link, symlinks=True) dst_link_stat = os.lstat(dst_link) - if hasattr(os, 'lutimes'): + if os.utime in os.supports_follow_symlinks: for attr in 'st_atime', 'st_mtime': # The modification times may be truncated in the new file. self.assertLessEqual(getattr(src_link_stat, attr), @@ -334,11 +334,11 @@ write_file(dst, 'bar') os_error = OSError(errno.EPERM, 'EPERM') - def _raise_on_user_foo(fname, attr, val): + def _raise_on_user_foo(fname, attr, val, **kwargs): if attr == 'user.foo': raise os_error else: - orig_setxattr(fname, attr, val) + orig_setxattr(fname, attr, val, **kwargs) try: orig_setxattr = os.setxattr os.setxattr = _raise_on_user_foo @@ -361,13 +361,13 @@ write_file(src, 'foo') os.symlink(src, src_link) os.setxattr(src, 'trusted.foo', b'42') - os.lsetxattr(src_link, 'trusted.foo', b'43') + os.setxattr(src_link, 'trusted.foo', b'43', follow_symlinks=False) dst = os.path.join(tmp_dir, 'bar') dst_link = os.path.join(tmp_dir, 'qux') write_file(dst, 'bar') os.symlink(dst, dst_link) shutil._copyxattr(src_link, dst_link, symlinks=True) - self.assertEqual(os.lgetxattr(dst_link, 'trusted.foo'), b'43') + self.assertEqual(os.getxattr(dst_link, 'trusted.foo', follow_symlinks=False), b'43') self.assertRaises(OSError, os.getxattr, dst, 'trusted.foo') shutil._copyxattr(src_link, dst, symlinks=True) self.assertEqual(os.getxattr(dst, 'trusted.foo'), b'43') @@ -419,7 +419,7 @@ self.assertTrue(os.path.islink(dst)) self.assertEqual(os.readlink(dst), os.readlink(src_link)) dst_stat = os.lstat(dst) - if hasattr(os, 'lutimes'): + if os.utime in os.supports_follow_symlinks: for attr in 'st_atime', 'st_mtime': # The modification times may be truncated in the new file. self.assertLessEqual(getattr(src_link_stat, attr), diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,14 @@ Library ------- +- Issue #14626: Large refactoring of functions / parameters in the os module. + Many functions now support "dir_fd" and "follow_symlinks" parameters; + some also support accepting an open file descriptor in place of of a path + string. Added os.support_* collections as LBYL helpers. Removed many + functions only previously seen in 3.3 alpha releases (often starting with + "f" or "l", or ending with "at"). Originally suggested by Serhiy Storchaka; + implemented by Larry Hastings. + - Issue #15008: Implement PEP 362 "Signature Objects". Patch by Yury Selivanov. @@ -56,7 +64,7 @@ - Issue #9527: datetime.astimezone() method will now supply a class timezone instance corresponding to the system local timezone when called with no arguments. - + - Issue #14653: email.utils.mktime_tz() no longer relies on system mktime() when timezone offest is supplied. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -379,10 +379,12 @@ #undef STRUCT_STAT #if defined(MS_WIN64) || defined(MS_WINDOWS) # define STAT win32_stat +# define LSTAT win32_lstat # define FSTAT win32_fstat # define STRUCT_STAT struct win32_stat #else # define STAT stat +# define LSTAT lstat # define FSTAT fstat # define STRUCT_STAT struct stat #endif @@ -398,6 +400,379 @@ #endif #endif + +#ifdef MS_WINDOWS +static int +win32_warn_bytes_api() +{ + return PyErr_WarnEx(PyExc_DeprecationWarning, + "The Windows bytes API has been deprecated, " + "use Unicode filenames instead", + 1); +} +#endif + + +#ifdef AT_FDCWD +#define DEFAULT_DIR_FD AT_FDCWD +#else +#define DEFAULT_DIR_FD (-100) +#endif + +static int +_fd_converter(PyObject *o, int *p, int default_value) { + long long_value; + if (o == Py_None) { + *p = default_value; + return 1; + } + if (PyFloat_Check(o)) { + PyErr_SetString(PyExc_TypeError, + "integer argument expected, got float" ); + return 0; + } + long_value = PyLong_AsLong(o); + if (long_value == -1 && PyErr_Occurred()) + return 0; + if (long_value > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, + "signed integer is greater than maximum"); + return 0; + } + if (long_value < INT_MIN) { + PyErr_SetString(PyExc_OverflowError, + "signed integer is less than minimum"); + return 0; + } + *p = (int)long_value; + return 1; +} + +static int +dir_fd_converter(PyObject *o, void *p) { + return _fd_converter(o, (int *)p, DEFAULT_DIR_FD); +} + + + +/* + * A PyArg_ParseTuple "converter" function + * that handles filesystem paths in the manner + * preferred by the os module. + * + * path_converter accepts (Unicode) strings and their + * subclasses, and bytes and their subclasses. What + * it does with the argument depends on the platform: + * + * * On Windows, if we get a (Unicode) string we + * extract the wchar_t * and return it; if we get + * bytes we extract the char * and return that. + * + * * On all other platforms, strings are encoded + * to bytes using PyUnicode_FSConverter, then we + * extract the char * from the bytes object and + * return that. + * + * path_converter also optionally accepts signed + * integers (representing open file descriptors) instead + * of path strings. + * + * Input fields: + * path.nullable + * If nonzero, the path is permitted to be None. + * path.allow_fd + * If nonzero, the path is permitted to be a file handle + * (a signed int) instead of a string. + * path.function_name + * If non-NULL, path_converter will use that as the name + * of the function in error messages. + * (If path.argument_name is NULL it omits the function name.) + * path.argument_name + * If non-NULL, path_converter will use that as the name + * of the parameter in error messages. + * (If path.argument_name is NULL it uses "path".) + * + * Output fields: + * path.wide + * Points to the path if it was expressed as Unicode + * and was not encoded. (Only used on Windows.) + * path.narrow + * Points to the path if it was expressed as bytes, + * or it was Unicode and was encoded to bytes. + * path.fd + * Contains a file descriptor if path.accept_fd was true + * and the caller provided a signed integer instead of any + * sort of string. + * + * WARNING: if your "path" parameter is optional, and is + * unspecified, path_converter will never get called. + * So if you set allow_fd, you *MUST* initialize path.fd = -1 + * yourself! + * path.length + * The length of the path in characters, if specified as + * a string. + * path.object + * The original object passed in. + * path.cleanup + * For internal use only. May point to a temporary object. + * (Pay no attention to the man behind the curtain.) + * + * At most one of path.wide or path.narrow will be non-NULL. + * If path was None and path.nullable was set, + * or if path was an integer and path.allow_fd was set, + * both path.wide and path.narrow will be NULL + * and path.length will be 0. + * + * path_converter takes care to not write to the path_t + * unless it's successful. However it must reset the + * "cleanup" field each time it's called. + * + * Use as follows: + * path_t path; + * memset(&path, 0, sizeof(path)); + * PyArg_ParseTuple(args, "O&", path_converter, &path); + * // ... use values from path ... + * path_cleanup(&path); + * + * (Note that if PyArg_Parse fails you don't need to call + * path_cleanup(). However it is safe to do so.) + */ +typedef struct { + char *function_name; + char *argument_name; + int nullable; + int allow_fd; + wchar_t *wide; + char *narrow; + int fd; + Py_ssize_t length; + PyObject *object; + PyObject *cleanup; +} path_t; + +static void +path_cleanup(path_t *path) { + if (path->cleanup) { + Py_DECREF(path->cleanup); + path->cleanup = NULL; + } +} + +static int +path_converter(PyObject *o, void *p) { + path_t *path = (path_t *)p; + PyObject *unicode, *bytes; + Py_ssize_t length; + char *narrow; + +#define FORMAT_EXCEPTION(exc, fmt) \ + PyErr_Format(exc, "%s%s" fmt, \ + path->function_name ? path->function_name : "", \ + path->function_name ? ": " : "", \ + path->argument_name ? path->argument_name : "path") + + /* Py_CLEANUP_SUPPORTED support */ + if (o == NULL) { + path_cleanup(path); + return 1; + } + + /* ensure it's always safe to call path_cleanup() */ + path->cleanup = NULL; + + if (o == Py_None) { + if (!path->nullable) { + FORMAT_EXCEPTION(PyExc_TypeError, + "can't specify None for %s argument"); + return 0; + } + path->wide = NULL; + path->narrow = NULL; + path->length = 0; + path->object = o; + path->fd = -1; + return 1; + } + + unicode = PyUnicode_FromObject(o); + if (unicode) { +#ifdef MS_WINDOWS + wchar_t *wide; + length = PyUnicode_GET_SIZE(unicode); + if (length > 32767) { + FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows"); + Py_DECREF(unicode); + return 0; + } + + wide = PyUnicode_AsUnicode(unicode); + if (!wide) { + Py_DECREF(unicode); + return 0; + } + + path->wide = wide; + path->narrow = NULL; + path->length = length; + path->object = o; + path->fd = -1; + path->cleanup = unicode; + return Py_CLEANUP_SUPPORTED; +#else + int converted = PyUnicode_FSConverter(unicode, &bytes); + Py_DECREF(unicode); + if (!converted) + bytes = NULL; +#endif + } + else { + PyErr_Clear(); + bytes = PyBytes_FromObject(o); + if (!bytes) { + PyErr_Clear(); + if (path->allow_fd) { + int fd; + /* + * note: _fd_converter always permits None. + * but we've already done our None check. + * so o cannot be None at this point. + */ + int result = _fd_converter(o, &fd, -1); + if (result) { + path->wide = NULL; + path->narrow = NULL; + path->length = 0; + path->object = o; + path->fd = fd; + return result; + } + } + } + } + + if (!bytes) { + if (!PyErr_Occurred()) + FORMAT_EXCEPTION(PyExc_TypeError, "illegal type for %s parameter"); + return 0; + } + +#ifdef MS_WINDOWS + if (win32_warn_bytes_api()) { + Py_DECREF(bytes); + return 0; + } +#endif + + length = PyBytes_GET_SIZE(bytes); +#ifdef MS_WINDOWS + if (length > MAX_PATH) { + FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows"); + Py_DECREF(bytes); + return 0; + } +#endif + + narrow = PyBytes_AS_STRING(bytes); + if (length != strlen(narrow)) { + FORMAT_EXCEPTION(PyExc_ValueError, "embedded NUL character in %s"); + Py_DECREF(bytes); + return 0; + } + + path->wide = NULL; + path->narrow = narrow; + path->length = length; + path->object = o; + path->fd = -1; + path->cleanup = bytes; + return Py_CLEANUP_SUPPORTED; +} + +static void +argument_unavailable_error(char *function_name, char *argument_name) { + PyErr_Format(PyExc_NotImplementedError, + "%s%s%s unavailable on this platform", + (function_name != NULL) ? function_name : "", + (function_name != NULL) ? ": ": "", + argument_name); +} + +static int +dir_fd_unavailable(PyObject *o, void *p) { + int *dir_fd = (int *)p; + int return_value = _fd_converter(o, dir_fd, DEFAULT_DIR_FD); + if (!return_value) + return 0; + if (*dir_fd == DEFAULT_DIR_FD) + return 1; + argument_unavailable_error(NULL, "dir_fd"); + return 0; +} + +static int +fd_specified(char *function_name, int fd) { + if (fd == -1) + return 0; + + argument_unavailable_error(function_name, "fd"); + return 1; +} + +static int +follow_symlinks_specified(char *function_name, int follow_symlinks) { + if (follow_symlinks) + return 0; + + argument_unavailable_error(function_name, "follow_symlinks"); + return 1; +} + +static int +path_and_dir_fd_invalid(char *function_name, path_t *path, int dir_fd) { + if (!path->narrow && !path->wide && (dir_fd != DEFAULT_DIR_FD)) { + PyErr_Format(PyExc_ValueError, + "%s: can't specify dir_fd without matching path", + function_name); + return 1; + } + return 0; +} + +static int +dir_fd_and_fd_invalid(char *function_name, int dir_fd, int fd) { + if ((dir_fd != DEFAULT_DIR_FD) && (fd != -1)) { + PyErr_Format(PyExc_ValueError, + "%s: can't specify both dir_fd and fd", + function_name); + return 1; + } + return 0; +} + +static int +fd_and_follow_symlinks_invalid(char *function_name, int fd, + int follow_symlinks) { + if ((fd > 0) && (!follow_symlinks)) { + PyErr_Format(PyExc_ValueError, + "%s: cannot use fd and follow_symlinks together", + function_name); + return 1; + } + return 0; +} + +static int +dir_fd_and_follow_symlinks_invalid(char *function_name, int dir_fd, + int follow_symlinks) { + if ((dir_fd != DEFAULT_DIR_FD) && (!follow_symlinks)) { + PyErr_Format(PyExc_ValueError, + "%s: cannot use dir_fd and follow_symlinks together", + function_name); + return 1; + } + return 0; +} + /* A helper used by a number of POSIX-only functions */ #ifndef MS_WINDOWS static int @@ -566,33 +941,6 @@ return TRUE; } -static int -win32_warn_bytes_api() -{ - return PyErr_WarnEx(PyExc_DeprecationWarning, - "The Windows bytes API has been deprecated, " - "use Unicode filenames instead", - 1); -} - -static PyObject* -win32_decode_filename(PyObject *obj) -{ - PyObject *unicode; - if (PyUnicode_Check(obj)) { - if (PyUnicode_READY(obj)) - return NULL; - Py_INCREF(obj); - return obj; - } - if (!PyUnicode_FSDecoder(obj, &unicode)) - return NULL; - if (win32_warn_bytes_api()) { - Py_DECREF(unicode); - return NULL; - } - return unicode; -} #endif /* MS_WINDOWS */ /* Return a dictionary corresponding to the POSIX environment table */ @@ -774,6 +1122,32 @@ #endif /* MS_WINDOWS */ +/* + * Some functions return Win32 errors, others only ever use posix_error + * (this is for backwards compatibility with exceptions) + */ +static PyObject * +path_posix_error(char *function_name, path_t *path) +{ + if (path->narrow) + return posix_error_with_filename(path->narrow); + return posix_error(); +} + +static PyObject * +path_error(char *function_name, path_t *path) +{ +#ifdef MS_WINDOWS + if (path->narrow) + return win32_error(function_name, path->narrow); + if (path->wide) + return win32_error_unicode(function_name, path->wide); + return win32_error(function_name, NULL); +#else + return path_posix_error(function_name, path); +#endif +} + #if defined(PYOS_OS2) /********************************************************************** * Helper Function to Trim and Format OS/2 Messages @@ -892,32 +1266,6 @@ return Py_None; } -static PyObject * -posix_2str(PyObject *args, - char *format, - int (*func)(const char *, const char *)) -{ - PyObject *opath1 = NULL, *opath2 = NULL; - char *path1, *path2; - int res; - if (!PyArg_ParseTuple(args, format, - PyUnicode_FSConverter, &opath1, - PyUnicode_FSConverter, &opath2)) { - return NULL; - } - path1 = PyBytes_AsString(opath1); - path2 = PyBytes_AsString(opath2); - Py_BEGIN_ALLOW_THREADS - res = (*func)(path1, path2); - Py_END_ALLOW_THREADS - Py_DECREF(opath1); - Py_DECREF(opath2); - if (res != 0) - /* XXX how to report both path1 and path2??? */ - return posix_error(); - Py_INCREF(Py_None); - return Py_None; -} #ifdef MS_WINDOWS static PyObject* @@ -1525,7 +1873,7 @@ #endif /* MS_WINDOWS */ PyDoc_STRVAR(stat_result__doc__, -"stat_result: Result from stat or lstat.\n\n\ +"stat_result: Result from stat, fstat, or lstat.\n\n\ This object may be accessed either as a tuple of\n\ (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime)\n\ or via the attributes st_mode, st_ino, st_dev, st_nlink, st_uid, and so on.\n\ @@ -1879,132 +2227,238 @@ return v; } -static PyObject * -posix_do_stat(PyObject *self, PyObject *args, - char *format, -#ifdef __VMS - int (*statfunc)(const char *, STRUCT_STAT *, ...), -#else - int (*statfunc)(const char *, STRUCT_STAT *), -#endif - char *wformat, - int (*wstatfunc)(const wchar_t *, STRUCT_STAT *)) +/* POSIX methods */ + + +static PyObject * +posix_do_stat(char *function_name, path_t *path, + int dir_fd, int follow_symlinks) { STRUCT_STAT st; - PyObject *opath; - char *path; - int res; - PyObject *result; - + int result; + +#if !defined(MS_WINDOWS) && !defined(HAVE_FSTATAT) && !defined(HAVE_LSTAT) + if (follow_symlinks_specified(function_name, follow_symlinks)) + return NULL; +#endif + + if (path_and_dir_fd_invalid("stat", path, dir_fd) || + dir_fd_and_fd_invalid("stat", dir_fd, path->fd) || + fd_and_follow_symlinks_invalid("stat", path->fd, follow_symlinks)) + return NULL; + + Py_BEGIN_ALLOW_THREADS + if (path->fd != -1) + result = FSTAT(path->fd, &st); + else #ifdef MS_WINDOWS - PyObject *po; - if (PyArg_ParseTuple(args, wformat, &po)) { - wchar_t *wpath = PyUnicode_AsUnicode(po); - if (wpath == NULL) - return NULL; - - Py_BEGIN_ALLOW_THREADS - res = wstatfunc(wpath, &st); - Py_END_ALLOW_THREADS - - if (res != 0) - return win32_error_object("stat", po); - return _pystat_fromstructstat(&st); - } - /* Drop the argument parsing error as narrow strings - are also valid. */ - PyErr_Clear(); -#endif - - if (!PyArg_ParseTuple(args, format, - PyUnicode_FSConverter, &opath)) - return NULL; -#ifdef MS_WINDOWS - if (win32_warn_bytes_api()) { - Py_DECREF(opath); - return NULL; - } -#endif - path = PyBytes_AsString(opath); - Py_BEGIN_ALLOW_THREADS - res = (*statfunc)(path, &st); + if (path->wide) { + if (follow_symlinks) + result = win32_stat_w(path->wide, &st); + else + result = win32_lstat_w(path->wide, &st); + } + else +#endif +#if defined(HAVE_LSTAT) || defined(MS_WINDOWS) + if ((!follow_symlinks) && (dir_fd == DEFAULT_DIR_FD)) + result = LSTAT(path->narrow, &st); + else +#endif +#ifdef HAVE_FSTATAT + if ((dir_fd != DEFAULT_DIR_FD) || !follow_symlinks) + result = fstatat(dir_fd, path->narrow, &st, + follow_symlinks ? 0 : AT_SYMLINK_NOFOLLOW); + else +#endif + result = STAT(path->narrow, &st); Py_END_ALLOW_THREADS - if (res != 0) { -#ifdef MS_WINDOWS - result = win32_error("stat", path); -#else - result = posix_error_with_filename(path); -#endif - } - else - result = _pystat_fromstructstat(&st); - - Py_DECREF(opath); - return result; -} - -/* POSIX methods */ + if (result != 0) + return path_error("stat", path); + + return _pystat_fromstructstat(&st); +} + +PyDoc_STRVAR(posix_stat__doc__, +"stat(path, *, dir_fd=None, follow_symlinks=True) -> stat result\n\n\ +Perform a stat system call on the given path.\n\ +\n\ +path may be specified as either a string or as an open file descriptor.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ + dir_fd may not be supported on your platform; if it is unavailable, using\n\ + it will raise a NotImplementedError.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, stat will examine the symbolic link itself instead of the file the\n\ + link points to.\n\ +It is an error to use dir_fd or follow_symlinks when specifying path as\n\ + an open file descriptor."); + +static PyObject * +posix_stat(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"path", "dir_fd", "follow_symlinks", NULL}; + path_t path; + int dir_fd = DEFAULT_DIR_FD; + int follow_symlinks = 1; + PyObject *return_value; + + memset(&path, 0, sizeof(path)); + path.allow_fd = 1; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|$O&p:stat", keywords, + path_converter, &path, +#ifdef HAVE_FSTATAT + dir_fd_converter, &dir_fd, +#else + dir_fd_unavailable, &dir_fd, +#endif + &follow_symlinks)) + return NULL; + return_value = posix_do_stat("stat", &path, dir_fd, follow_symlinks); + path_cleanup(&path); + return return_value; +} + +PyDoc_STRVAR(posix_lstat__doc__, +"lstat(path, *, dir_fd=None) -> stat result\n\n\ +Like stat(), but do not follow symbolic links.\n\ +Equivalent to stat(path, follow_symlinks=False)."); + +static PyObject * +posix_lstat(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"path", "dir_fd", NULL}; + path_t path; + int dir_fd = DEFAULT_DIR_FD; + int follow_symlinks = 0; + PyObject *return_value; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|$O&:lstat", keywords, + path_converter, &path, +#ifdef HAVE_FSTATAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; + return_value = posix_do_stat("stat", &path, dir_fd, follow_symlinks); + path_cleanup(&path); + return return_value; +} PyDoc_STRVAR(posix_access__doc__, -"access(path, mode) -> True if granted, False otherwise\n\n\ -Use the real uid/gid to test for access to a path. Note that most\n\ -operations will use the effective uid/gid, therefore this routine can\n\ -be used in a suid/sgid environment to test if the invoking user has the\n\ -specified access to the path. The mode argument can be F_OK to test\n\ -existence, or the inclusive-OR of R_OK, W_OK, and X_OK."); - -static PyObject * -posix_access(PyObject *self, PyObject *args) -{ - const char *path; +"access(path, mode, *, dir_fd=None, effective_ids=False,\ + follow_symlinks=True)\n\n\ +Use the real uid/gid to test for access to a path. Returns True if granted,\n\ +False otherwise.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +If effective_ids is True, access will use the effective uid/gid instead of\n\ + the real uid/gid.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, access will examine the symbolic link itself instead of the file the\n\ + link points to.\n\ +dir_fd, effective_ids, and follow_symlinks may not be implemented\n\ + on your platform. If they are unavailable, using them will raise a\n\ + NotImplementedError.\n\ +\n\ +Note that most operations will use the effective uid/gid, therefore this\n\ + routine can be used in a suid/sgid environment to test if the invoking user\n\ + has the specified access to the path.\n\ +The mode argument can be F_OK to test existence, or the inclusive-OR\n\ + of R_OK, W_OK, and X_OK."); + +static PyObject * +posix_access(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"path", "mode", "dir_fd", "effective_ids", + "follow_symlinks", NULL}; + path_t path; int mode; + int dir_fd = DEFAULT_DIR_FD; + int effective_ids = 0; + int follow_symlinks = 1; + PyObject *return_value = NULL; #ifdef MS_WINDOWS DWORD attr; - PyObject *po; - if (PyArg_ParseTuple(args, "Ui:access", &po, &mode)) { - wchar_t* wpath = PyUnicode_AsUnicode(po); - if (wpath == NULL) - return NULL; - Py_BEGIN_ALLOW_THREADS - attr = GetFileAttributesW(wpath); - Py_END_ALLOW_THREADS - goto finish; - } - /* Drop the argument parsing error as narrow strings - are also valid. */ - PyErr_Clear(); - if (!PyArg_ParseTuple(args, "yi:access", &path, &mode)) - return NULL; - if (win32_warn_bytes_api()) - return NULL; +#else + int result; +#endif + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&i|$O&pp:access", keywords, + path_converter, &path, &mode, +#ifdef HAVE_FACCESSAT + dir_fd_converter, &dir_fd, +#else + dir_fd_unavailable, &dir_fd, +#endif + &effective_ids, &follow_symlinks)) + return NULL; + +#ifndef HAVE_FACCESSAT + if (follow_symlinks_specified("access", follow_symlinks)) + goto exit; + + if (effective_ids) { + argument_unavailable_error("access", "effective_ids"); + goto exit; + } +#endif + +#ifdef MS_WINDOWS Py_BEGIN_ALLOW_THREADS - attr = GetFileAttributesA(path); + if (path.wide != NULL) + attr = GetFileAttributesW(path.wide); + else + attr = GetFileAttributesA(path.narrow); Py_END_ALLOW_THREADS -finish: - if (attr == 0xFFFFFFFF) - /* File does not exist, or cannot read attributes */ - return PyBool_FromLong(0); - /* Access is possible if either write access wasn't requested, or - the file isn't read-only, or if it's a directory, as there are - no read-only directories on Windows. */ - return PyBool_FromLong(!(mode & 2) - || !(attr & FILE_ATTRIBUTE_READONLY) - || (attr & FILE_ATTRIBUTE_DIRECTORY)); -#else - PyObject *opath; - int res; - if (!PyArg_ParseTuple(args, "O&i:access", - PyUnicode_FSConverter, &opath, &mode)) - return NULL; - path = PyBytes_AsString(opath); + + /* + * Access is possible if + * * we didn't get a -1, and + * * write access wasn't requested, + * * or the file isn't read-only, + * * or it's a directory. + * (Directories cannot be read-only on Windows.) + */ + return_value = PyBool_FromLong( + (attr != 0xFFFFFFFF) && + ((mode & 2) || + !(attr & FILE_ATTRIBUTE_READONLY) || + (attr & FILE_ATTRIBUTE_DIRECTORY))); +#else + Py_BEGIN_ALLOW_THREADS - res = access(path, mode); +#ifdef HAVE_FACCESSAT + if ((dir_fd != DEFAULT_DIR_FD) || + effective_ids || + !follow_symlinks) { + int flags = 0; + if (!follow_symlinks) + flags |= AT_SYMLINK_NOFOLLOW; + if (effective_ids) + flags |= AT_EACCESS; + result = faccessat(dir_fd, path.narrow, mode, flags); + } + else +#endif + result = access(path.narrow, mode); Py_END_ALLOW_THREADS - Py_DECREF(opath); - return PyBool_FromLong(res == 0); -#endif + return_value = PyBool_FromLong(!result); +#endif + +#ifndef HAVE_FACCESSAT +exit: +#endif + path_cleanup(&path); + return return_value; } #ifndef F_OK @@ -2075,27 +2529,66 @@ PyDoc_STRVAR(posix_chdir__doc__, "chdir(path)\n\n\ -Change the current working directory to the specified path."); - -static PyObject * -posix_chdir(PyObject *self, PyObject *args) -{ +Change the current working directory to the specified path.\n\ +\n\ +path may always be specified as a string.\n\ +On some platforms, path may also be specified as an open file descriptor.\n\ + If this functionality is unavailable, using it raises an exception."); + +static PyObject * +posix_chdir(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int result; + PyObject *return_value = NULL; + static char *keywords[] = {"path", NULL}; + + memset(&path, 0, sizeof(path)); +#ifdef HAVE_FCHDIR + path.allow_fd = 1; +#endif + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&:chdir", keywords, + path_converter, &path + )) + return NULL; + + Py_BEGIN_ALLOW_THREADS #ifdef MS_WINDOWS - return win32_1str(args, "chdir", "y:chdir", win32_chdir, "U:chdir", win32_wchdir); + if (path.wide) + result = win32_wchdir(path.wide); + else + result = win32_chdir(path.narrow); + result = !result; /* on unix, success = 0, on windows, success = !0 */ #elif defined(PYOS_OS2) && defined(PYCC_GCC) - return posix_1str(args, "O&:chdir", _chdir2); -#elif defined(__VMS) - return posix_1str(args, "O&:chdir", (int (*)(const char *))chdir); -#else - return posix_1str(args, "O&:chdir", chdir); -#endif + result = _chdir2(path.narrow); +#else +#ifdef HAVE_FCHDIR + if (path.fd != -1) + result = fchdir(path.fd); + else +#endif + result = chdir(path.narrow); +#endif + Py_END_ALLOW_THREADS + + if (result) { + return_value = path_error("chdir", &path); + goto exit; + } + + return_value = Py_None; + Py_INCREF(Py_None); + +exit: + path_cleanup(&path); + return return_value; } #ifdef HAVE_FCHDIR PyDoc_STRVAR(posix_fchdir__doc__, -"fchdir(fildes)\n\n\ -Change to the directory of the given file descriptor. fildes must be\n\ -opened on a directory, not a file."); +"fchdir(fd)\n\n\ +Change to the directory of the given file descriptor. fd must be\n\ +opened on a directory, not a file. Equivalent to os.chdir(fd)."); static PyObject * posix_fchdir(PyObject *self, PyObject *fdobj) @@ -2106,87 +2599,152 @@ PyDoc_STRVAR(posix_chmod__doc__, -"chmod(path, mode)\n\n\ -Change the access permissions of a file."); - -static PyObject * -posix_chmod(PyObject *self, PyObject *args) -{ - PyObject *opath = NULL; - const char *path = NULL; - int i; - int res; +"chmod(path, mode, *, dir_fd=None, follow_symlinks=True)\n\n\ +Change the access permissions of a file.\n\ +\n\ +path may always be specified as a string.\n\ +On some platforms, path may also be specified as an open file descriptor.\n\ + If this functionality is unavailable, using it raises an exception.\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, chmod will modify the symbolic link itself instead of the file the\n\ + link points to.\n\ +It is an error to use dir_fd or follow_symlinks when specifying path as\n\ + an open file descriptor.\n\ +dir_fd and follow_symlinks may not be implemented on your platform.\n\ + If they are unavailable, using them will raise a NotImplementedError."); + +static PyObject * +posix_chmod(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int mode; + int dir_fd = DEFAULT_DIR_FD; + int follow_symlinks = 1; + int result; + PyObject *return_value = NULL; + static char *keywords[] = {"path", "mode", "dir_fd", + "follow_symlinks", NULL}; + #ifdef MS_WINDOWS DWORD attr; - PyObject *po; - if (PyArg_ParseTuple(args, "Ui|:chmod", &po, &i)) { - wchar_t *wpath = PyUnicode_AsUnicode(po); - if (wpath == NULL) - return NULL; - Py_BEGIN_ALLOW_THREADS - attr = GetFileAttributesW(wpath); - if (attr != 0xFFFFFFFF) { - if (i & _S_IWRITE) - attr &= ~FILE_ATTRIBUTE_READONLY; - else - attr |= FILE_ATTRIBUTE_READONLY; - res = SetFileAttributesW(wpath, attr); - } - else - res = 0; - Py_END_ALLOW_THREADS - if (!res) - return win32_error_object("chmod", po); - Py_INCREF(Py_None); - return Py_None; - } - /* Drop the argument parsing error as narrow strings - are also valid. */ - PyErr_Clear(); - - if (!PyArg_ParseTuple(args, "yi:chmod", &path, &i)) - return NULL; - if (win32_warn_bytes_api()) - return NULL; +#endif + +#ifdef HAVE_FCHMODAT + int fchmodat_nofollow_unsupported = 0; +#endif + + memset(&path, 0, sizeof(path)); +#ifdef HAVE_FCHMOD + path.allow_fd = 1; +#endif + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&i|$O&p:chmod", keywords, + path_converter, &path, + &mode, +#ifdef HAVE_FCHMODAT + dir_fd_converter, &dir_fd, +#else + dir_fd_unavailable, &dir_fd, +#endif + &follow_symlinks)) + return NULL; + +#if !(defined(HAVE_FCHMODAT) || defined(HAVE_LCHMOD)) + if (follow_symlinks_specified("chmod", follow_symlinks)) + goto exit; +#endif + +#ifdef MS_WINDOWS Py_BEGIN_ALLOW_THREADS - attr = GetFileAttributesA(path); - if (attr != 0xFFFFFFFF) { - if (i & _S_IWRITE) + if (path.wide) + attr = GetFileAttributesW(path.wide); + else + attr = GetFileAttributesA(path.narrow); + if (attr == 0xFFFFFFFF) + result = 0; + else { + if (mode & _S_IWRITE) attr &= ~FILE_ATTRIBUTE_READONLY; else attr |= FILE_ATTRIBUTE_READONLY; - res = SetFileAttributesA(path, attr); - } + if (path.wide) + result = SetFileAttributesW(path.wide, attr); + else + result = SetFileAttributesA(path.narrow, attr); + } + Py_END_ALLOW_THREADS + + if (!result) { + return_value = win32_error_object("chmod", path.object); + goto exit; + } +#else /* MS_WINDOWS */ + Py_BEGIN_ALLOW_THREADS +#ifdef HAVE_FCHMOD + if (path.fd != -1) + result = fchmod(path.fd, mode); else - res = 0; +#endif +#ifdef HAVE_LCHMOD + if ((!follow_symlinks) && (dir_fd == DEFAULT_DIR_FD)) + result = lchmod(path.narrow, mode); + else +#endif +#ifdef HAVE_FCHMODAT + if ((dir_fd != DEFAULT_DIR_FD) || !follow_symlinks) { + /* + * fchmodat() doesn't currently support AT_SYMLINK_NOFOLLOW! + * The documentation specifically shows how to use it, + * and then says it isn't implemented yet. (glibc 2.15) + * + * Once it is supported, os.chmod will automatically + * support dir_fd and follow_symlinks=False. (Hopefully.) + * Until then, we need to be careful what exception we raise. + */ + result = fchmodat(dir_fd, path.narrow, mode, + follow_symlinks ? 0 : AT_SYMLINK_NOFOLLOW); + /* + * But wait! We can't throw the exception without allowing threads, + * and we can't do that in this nested scope. (Macro trickery, sigh.) + */ + fchmodat_nofollow_unsupported = + result && (errno == ENOTSUP) && !follow_symlinks; + } + else +#endif + result = chmod(path.narrow, mode); Py_END_ALLOW_THREADS - if (!res) { - win32_error("chmod", path); - return NULL; - } + + if (result) { +#ifdef HAVE_FCHMODAT + if (fchmodat_nofollow_unsupported) { + if (dir_fd != DEFAULT_DIR_FD) + dir_fd_and_follow_symlinks_invalid("chmod", + dir_fd, follow_symlinks); + else + follow_symlinks_specified("chmod", follow_symlinks); + } + else +#endif + return_value = path_error("chmod", &path); + goto exit; + } +#endif + Py_INCREF(Py_None); - return Py_None; -#else /* MS_WINDOWS */ - if (!PyArg_ParseTuple(args, "O&i:chmod", PyUnicode_FSConverter, - &opath, &i)) - return NULL; - path = PyBytes_AsString(opath); - Py_BEGIN_ALLOW_THREADS - res = chmod(path, i); - Py_END_ALLOW_THREADS - if (res < 0) - return posix_error_with_allocated_filename(opath); - Py_DECREF(opath); - Py_INCREF(Py_None); - return Py_None; -#endif -} + return_value = Py_None; +exit: + path_cleanup(&path); + return return_value; +} + #ifdef HAVE_FCHMOD PyDoc_STRVAR(posix_fchmod__doc__, "fchmod(fd, mode)\n\n\ Change the access permissions of the file given by file\n\ -descriptor fd."); +descriptor fd. Equivalent to os.chmod(fd, mode)."); static PyObject * posix_fchmod(PyObject *self, PyObject *args) @@ -2207,7 +2765,8 @@ PyDoc_STRVAR(posix_lchmod__doc__, "lchmod(path, mode)\n\n\ Change the access permissions of a file. If path is a symlink, this\n\ -affects the link itself rather than the target."); +affects the link itself rather than the target.\n\ +Equivalent to chmod(path, mode, follow_symlinks=False)."); static PyObject * posix_lchmod(PyObject *self, PyObject *args) @@ -2233,28 +2792,56 @@ #ifdef HAVE_CHFLAGS PyDoc_STRVAR(posix_chflags__doc__, -"chflags(path, flags)\n\n\ -Set file flags."); - -static PyObject * -posix_chflags(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *path; +"chflags(path, flags, *, follow_symlinks=True)\n\n\ +Set file flags.\n\ +\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, chflags will change flags on the symbolic link itself instead of the\n\ + file the link points to.\n\ +follow_symlinks may not be implemented on your platform. If it is\n\ +unavailable, using it will raise a NotImplementedError."); + +static PyObject * +posix_chflags(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; unsigned long flags; - int res; - if (!PyArg_ParseTuple(args, "O&k:chflags", - PyUnicode_FSConverter, &opath, &flags)) - return NULL; - path = PyBytes_AsString(opath); + int follow_symlinks = 1; + int result; + PyObject *return_value; + static char *keywords[] = {"path", "flags", "follow_symlinks", NULL}; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&k|$i:chflags", keywords, + path_converter, &path, + &flags, &follow_symlinks)) + return NULL; + +#ifndef HAVE_LCHFLAGS + if (follow_symlinks_specified("chflags", follow_symlinks)) + goto exit; +#endif + Py_BEGIN_ALLOW_THREADS - res = chflags(path, flags); +#ifdef HAVE_LCHFLAGS + if (!follow_symlinks) + result = lchflags(path.narrow, flags); + else +#endif + result = chflags(path.narrow, flags); Py_END_ALLOW_THREADS - if (res < 0) - return posix_error_with_allocated_filename(opath); - Py_DECREF(opath); + + if (result) { + return_value = path_posix_error("chflags", &path); + goto exit; + } + + return_value = Py_None; Py_INCREF(Py_None); - return Py_None; + +exit: + path_cleanup(&path); + return return_value; } #endif /* HAVE_CHFLAGS */ @@ -2262,7 +2849,8 @@ PyDoc_STRVAR(posix_lchflags__doc__, "lchflags(path, flags)\n\n\ Set file flags.\n\ -This function will not follow symbolic links."); +This function will not follow symbolic links.\n\ +Equivalent to chflags(path, flags, follow_symlinks=False)."); static PyObject * posix_lchflags(PyObject *self, PyObject *args) @@ -2346,29 +2934,105 @@ #ifdef HAVE_CHOWN PyDoc_STRVAR(posix_chown__doc__, -"chown(path, uid, gid)\n\n\ -Change the owner and group id of path to the numeric uid and gid."); - -static PyObject * -posix_chown(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *path; - long uid, gid; - int res; - if (!PyArg_ParseTuple(args, "O&ll:chown", - PyUnicode_FSConverter, &opath, - &uid, &gid)) - return NULL; - path = PyBytes_AsString(opath); +"chown(path, uid, gid, *, dir_fd=None, follow_symlinks=True)\n\n\ +Change the owner and group id of path to the numeric uid and gid.\n\ +\n\ +path may always be specified as a string.\n\ +On some platforms, path may also be specified as an open file descriptor.\n\ + If this functionality is unavailable, using it raises an exception.\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, chown will modify the symbolic link itself instead of the file the\n\ + link points to.\n\ +It is an error to use dir_fd or follow_symlinks when specifying path as\n\ + an open file descriptor.\n\ +dir_fd and follow_symlinks may not be implemented on your platform.\n\ + If they are unavailable, using them will raise a NotImplementedError."); + +static PyObject * +posix_chown(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + long uid_l, gid_l; + uid_t uid; + gid_t gid; + int dir_fd = DEFAULT_DIR_FD; + int follow_symlinks = 1; + int result; + PyObject *return_value = NULL; + static char *keywords[] = {"path", "uid", "gid", "dir_fd", + "follow_symlinks", NULL}; + + memset(&path, 0, sizeof(path)); +#ifdef HAVE_FCHOWN + path.allow_fd = 1; +#endif + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&ll|$O&p:chown", keywords, + path_converter, &path, + &uid_l, &gid_l, +#ifdef HAVE_FCHOWNAT + dir_fd_converter, &dir_fd, +#else + dir_fd_unavailable, &dir_fd, +#endif + &follow_symlinks)) + return NULL; + +#if !(defined(HAVE_LCHOWN) || defined(HAVE_FCHOWNAT)) + if (follow_symlinks_specified("chown", follow_symlinks)) + goto exit; +#endif + if (dir_fd_and_fd_invalid("chown", dir_fd, path.fd) || + fd_and_follow_symlinks_invalid("chown", path.fd, follow_symlinks)) + goto exit; + +#ifdef __APPLE__ + /* + * This is for Mac OS X 10.3, which doesn't have lchown. + * (But we still have an lchown symbol because of weak-linking.) + * It doesn't have fchownat either. So there's no possibility + * of a graceful failover. + */ + if ((!follow_symlinks) && (lchown == NULL)) { + follow_symlinks_specified("chown", follow_symlinks); + goto exit; + } +#endif + Py_BEGIN_ALLOW_THREADS - res = chown(path, (uid_t) uid, (gid_t) gid); + uid = (uid_t)uid_l; + gid = (uid_t)gid_l; +#ifdef HAVE_FCHOWN + if (path.fd != -1) + result = fchown(path.fd, uid, gid); + else +#endif +#ifdef HAVE_LCHOWN + if ((!follow_symlinks) && (dir_fd == DEFAULT_DIR_FD)) + result = lchown(path.narrow, uid, gid); + else +#endif +#ifdef HAVE_FCHOWNAT + if ((dir_fd != DEFAULT_DIR_FD) || (!follow_symlinks)) + result = fchownat(dir_fd, path.narrow, uid, gid, + follow_symlinks ? 0 : AT_SYMLINK_NOFOLLOW); + else +#endif + result = chown(path.narrow, uid, gid); Py_END_ALLOW_THREADS - if (res < 0) - return posix_error_with_allocated_filename(opath); - Py_DECREF(opath); + + if (result) { + return_value = path_posix_error("chown", &path); + goto exit; + } + + return_value = Py_None; Py_INCREF(Py_None); - return Py_None; + +exit: + path_cleanup(&path); + return return_value; } #endif /* HAVE_CHOWN */ @@ -2376,7 +3040,7 @@ PyDoc_STRVAR(posix_fchown__doc__, "fchown(fd, uid, gid)\n\n\ Change the owner and group id of the file given by file descriptor\n\ -fd to the numeric uid and gid."); +fd to the numeric uid and gid. Equivalent to os.chown(fd, uid, gid)."); static PyObject * posix_fchown(PyObject *self, PyObject *args) @@ -2399,7 +3063,8 @@ PyDoc_STRVAR(posix_lchown__doc__, "lchown(path, uid, gid)\n\n\ Change the owner and group id of path to the numeric uid and gid.\n\ -This function will not follow symbolic links."); +This function will not follow symbolic links.\n\ +Equivalent to os.chown(path, uid, gid, follow_symlinks=False)."); static PyObject * posix_lchown(PyObject *self, PyObject *args) @@ -2501,125 +3166,186 @@ } #endif +#if ((!defined(HAVE_LINK)) && defined(MS_WINDOWS)) +#define HAVE_LINK 1 +#endif #ifdef HAVE_LINK PyDoc_STRVAR(posix_link__doc__, -"link(src, dst)\n\n\ -Create a hard link to a file."); - -static PyObject * -posix_link(PyObject *self, PyObject *args) -{ - return posix_2str(args, "O&O&:link", link); -} -#endif /* HAVE_LINK */ - +"link(src, dst, *, src_dir_fd=None, dst_dir_fd=None, follow_symlinks=True)\n\n\ +Create a hard link to a file.\n\ +\n\ +If either src_dir_fd or dst_dir_fd is not None, it should be a file\n\ + descriptor open to a directory, and the respective path string (src or dst)\n\ + should be relative; the path will then be relative to that directory.\n\ +If follow_symlinks is False, and the last element of src is a symbolic\n\ + link, link will create a link to the symbolic link itself instead of the\n\ + file the link points to.\n\ +src_dir_fd, dst_dir_fd, and follow_symlinks may not be implemented on your\n\ + platform. If they are unavailable, using them will raise a\n\ + NotImplementedError."); + +static PyObject * +posix_link(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t src, dst; + int src_dir_fd = DEFAULT_DIR_FD; + int dst_dir_fd = DEFAULT_DIR_FD; + int follow_symlinks = 1; + PyObject *return_value = NULL; + static char *keywords[] = {"src", "dst", "src_dir_fd", "dst_dir_fd", + "follow_symlinks", NULL}; #ifdef MS_WINDOWS -PyDoc_STRVAR(win32_link__doc__, -"link(src, dst)\n\n\ -Create a hard link to a file."); - -static PyObject * -win32_link(PyObject *self, PyObject *args) -{ - PyObject *src, *dst; - BOOL ok; - - if (PyArg_ParseTuple(args, "UU:link", &src, &dst)) - { - wchar_t *wsrc, *wdst; - - wsrc = PyUnicode_AsUnicode(src); - if (wsrc == NULL) - goto error; - wdst = PyUnicode_AsUnicode(dst); - if (wdst == NULL) - goto error; - - Py_BEGIN_ALLOW_THREADS - ok = CreateHardLinkW(wdst, wsrc, NULL); - Py_END_ALLOW_THREADS - - if (!ok) - return win32_error("link", NULL); - Py_RETURN_NONE; - } - else { - PyErr_Clear(); - if (!PyArg_ParseTuple(args, "O&O&:link", - PyUnicode_FSConverter, &src, - PyUnicode_FSConverter, &dst)) - return NULL; - - if (win32_warn_bytes_api()) - goto error; - - Py_BEGIN_ALLOW_THREADS - ok = CreateHardLinkA(PyBytes_AS_STRING(dst), - PyBytes_AS_STRING(src), - NULL); - Py_END_ALLOW_THREADS - - Py_XDECREF(src); - Py_XDECREF(dst); - - if (!ok) - return win32_error("link", NULL); - Py_RETURN_NONE; - - error: - Py_XDECREF(src); - Py_XDECREF(dst); - return NULL; - } -} -#endif /* MS_WINDOWS */ + BOOL result; +#else + int result; +#endif + + memset(&src, 0, sizeof(src)); + memset(&dst, 0, sizeof(dst)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&O&|O&O&p:link", keywords, + path_converter, &src, + path_converter, &dst, + dir_fd_converter, &src_dir_fd, + dir_fd_converter, &dst_dir_fd, + &follow_symlinks)) + return NULL; + +#ifndef HAVE_LINKAT + if ((src_dir_fd != DEFAULT_DIR_FD) || (dst_dir_fd != DEFAULT_DIR_FD)) { + argument_unavailable_error("link", "src_dir_fd and dst_dir_fd"); + goto exit; + } +#endif + + if ((src.narrow && dst.wide) || (src.wide && dst.narrow)) { + PyErr_SetString(PyExc_NotImplementedError, + "link: src and dst must be the same type"); + goto exit; + } + +#ifdef MS_WINDOWS + Py_BEGIN_ALLOW_THREADS + if (src.wide) + result = CreateHardLinkW(dst.wide, src.wide, NULL); + else + result = CreateHardLinkA(dst.narrow, src.narrow, NULL); + Py_END_ALLOW_THREADS + + if (!result) { + return_value = win32_error_object("link", dst.object); + goto exit; + } +#else + Py_BEGIN_ALLOW_THREADS +#ifndef HAVE_LINKAT + if ((src_dir_fd != DEFAULT_DIR_FD) || + (dst_dir_fd != DEFAULT_DIR_FD) || + (!follow_symlinks)) + result = linkat(src_dir_fd, src.narrow, + dst_dir_fd, dst.narrow, + follow_symlinks ? AT_SYMLINK_FOLLOW : 0); + else +#endif + result = link(src.narrow, dst.narrow); + Py_END_ALLOW_THREADS + + if (result) { + return_value = path_error("link", &dst); + goto exit; + } +#endif + + return_value = Py_None; + Py_INCREF(Py_None); + +exit: + path_cleanup(&src); + path_cleanup(&dst); + return return_value; +} +#endif + PyDoc_STRVAR(posix_listdir__doc__, -"listdir([path]) -> list_of_strings\n\n\ +"listdir(path='.') -> list_of_strings\n\n\ Return a list containing the names of the entries in the directory.\n\ \n\ - path: path of directory to list (default: '.')\n\ +The list is in arbitrary order. It does not include the special\n\ +entries '.' and '..' even if they are present in the directory.\n\ \n\ -The list is in arbitrary order. It does not include the special\n\ -entries '.' and '..' even if they are present in the directory."); - -static PyObject * -posix_listdir(PyObject *self, PyObject *args) -{ +path can always be specified as a string.\n\ +On some platforms, path may also be specified as an open file descriptor.\n\ + If this functionality is unavailable, using it raises NotImplementedError."); + +static PyObject * +posix_listdir(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + PyObject *list = NULL; + static char *keywords[] = {"path", NULL}; + int fd = -1; + +#if defined(MS_WINDOWS) && !defined(HAVE_OPENDIR) + PyObject *v; + HANDLE hFindFile = INVALID_HANDLE_VALUE; + BOOL result; + WIN32_FIND_DATA FileData; + char namebuf[MAX_PATH+5]; /* Overallocate for \\*.*\0 */ + char *bufptr = namebuf; + /* only claim to have space for MAX_PATH */ + Py_ssize_t len = sizeof(namebuf)-5; + PyObject *po = NULL; + wchar_t *wnamebuf = NULL; +#elif defined(PYOS_OS2) +#ifndef MAX_PATH +#define MAX_PATH CCHMAXPATH +#endif + char *pt; + PyObject *v; + char namebuf[MAX_PATH+5]; + HDIR hdir = 1; + ULONG srchcnt = 1; + FILEFINDBUF3 ep; + APIRET rc; +#else + PyObject *v; + DIR *dirp = NULL; + struct dirent *ep; + int arg_is_unicode = 1; +#endif + + memset(&path, 0, sizeof(path)); + path.nullable = 1; +#ifdef HAVE_FDOPENDIR + path.allow_fd = 1; + path.fd = -1; +#endif + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O&:listdir", keywords, + path_converter, &path + )) + return NULL; + /* XXX Should redo this putting the (now four) versions of opendir in separate files instead of having them all here... */ #if defined(MS_WINDOWS) && !defined(HAVE_OPENDIR) - - PyObject *d, *v; - HANDLE hFindFile; - BOOL result; - WIN32_FIND_DATA FileData; - const char *path; - Py_ssize_t pathlen; - char namebuf[MAX_PATH+5]; /* Overallocate for \\*.*\0 */ - char *bufptr = namebuf; - Py_ssize_t len = sizeof(namebuf)-5; /* only claim to have space for MAX_PATH */ - - PyObject *po = NULL; - if (PyArg_ParseTuple(args, "|U:listdir", &po)) { + if (!path.narrow) { WIN32_FIND_DATAW wFileData; - wchar_t *wnamebuf, *po_wchars; - - if (po == NULL) { /* Default arg: "." */ + wchar_t *po_wchars; + + if (!path.wide) { /* Default arg: "." */ po_wchars = L"."; len = 1; } else { - po_wchars = PyUnicode_AsUnicodeAndSize(po, &len); - if (po_wchars == NULL) - return NULL; + po_wchars = path.wide; + len = wcslen(path.wide); } - /* Overallocate for \\*.*\0 */ + /* The +5 is so we can append "\\*.*\0" */ wnamebuf = malloc((len + 5) * sizeof(wchar_t)); if (!wnamebuf) { PyErr_NoMemory(); - return NULL; + goto exit; } wcscpy(wnamebuf, po_wchars); if (len > 0) { @@ -2628,38 +3354,36 @@ wnamebuf[len++] = L'\\'; wcscpy(wnamebuf + len, L"*.*"); } - if ((d = PyList_New(0)) == NULL) { - free(wnamebuf); - return NULL; + if ((list = PyList_New(0)) == NULL) { + goto exit; } Py_BEGIN_ALLOW_THREADS hFindFile = FindFirstFileW(wnamebuf, &wFileData); Py_END_ALLOW_THREADS if (hFindFile == INVALID_HANDLE_VALUE) { int error = GetLastError(); - if (error == ERROR_FILE_NOT_FOUND) { - free(wnamebuf); - return d; - } - Py_DECREF(d); + if (error == ERROR_FILE_NOT_FOUND) + goto exit; + Py_DECREF(list); + list = NULL; win32_error_unicode("FindFirstFileW", wnamebuf); - free(wnamebuf); - return NULL; + goto exit; } do { /* Skip over . and .. */ if (wcscmp(wFileData.cFileName, L".") != 0 && wcscmp(wFileData.cFileName, L"..") != 0) { - v = PyUnicode_FromWideChar(wFileData.cFileName, wcslen(wFileData.cFileName)); + v = PyUnicode_FromWideChar(wFileData.cFileName, + wcslen(wFileData.cFileName)); if (v == NULL) { - Py_DECREF(d); - d = NULL; + Py_DECREF(list); + list = NULL; break; } - if (PyList_Append(d, v) != 0) { + if (PyList_Append(list, v) != 0) { Py_DECREF(v); - Py_DECREF(d); - d = NULL; + Py_DECREF(list); + list = NULL; break; } Py_DECREF(v); @@ -2670,37 +3394,16 @@ /* FindNextFile sets error to ERROR_NO_MORE_FILES if it got to the end of the directory. */ if (!result && GetLastError() != ERROR_NO_MORE_FILES) { - Py_DECREF(d); - win32_error_unicode("FindNextFileW", wnamebuf); - FindClose(hFindFile); - free(wnamebuf); - return NULL; + Py_DECREF(list); + list = win32_error_unicode("FindNextFileW", wnamebuf); + goto exit; } } while (result == TRUE); - if (FindClose(hFindFile) == FALSE) { - Py_DECREF(d); - win32_error_unicode("FindClose", wnamebuf); - free(wnamebuf); - return NULL; - } - free(wnamebuf); - return d; - } - /* Drop the argument parsing error as narrow strings - are also valid. */ - PyErr_Clear(); - - if (!PyArg_ParseTuple(args, "y#:listdir", &path, &pathlen)) - return NULL; - if (win32_warn_bytes_api()) - return NULL; - if (pathlen+1 > MAX_PATH) { - PyErr_SetString(PyExc_ValueError, "path too long"); - return NULL; - } - strcpy(namebuf, path); - len = pathlen; + goto exit; + } + strcpy(namebuf, path.narrow); + len = path.length; if (len > 0) { char ch = namebuf[len-1]; if (ch != SEP && ch != ALTSEP && ch != ':') @@ -2708,7 +3411,7 @@ strcpy(namebuf + len, "*.*"); } - if ((d = PyList_New(0)) == NULL) + if ((list = PyList_New(0)) == NULL) return NULL; Py_BEGIN_ALLOW_THREADS @@ -2717,9 +3420,10 @@ if (hFindFile == INVALID_HANDLE_VALUE) { int error = GetLastError(); if (error == ERROR_FILE_NOT_FOUND) - return d; - Py_DECREF(d); - return win32_error("FindFirstFile", namebuf); + goto exit; + Py_DECREF(list); + list = win32_error("FindFirstFile", namebuf); + goto exit; } do { /* Skip over . and .. */ @@ -2727,14 +3431,14 @@ strcmp(FileData.cFileName, "..") != 0) { v = PyBytes_FromString(FileData.cFileName); if (v == NULL) { - Py_DECREF(d); - d = NULL; + Py_DECREF(list); + list = NULL; break; } - if (PyList_Append(d, v) != 0) { + if (PyList_Append(list, v) != 0) { Py_DECREF(v); - Py_DECREF(d); - d = NULL; + Py_DECREF(list); + list = NULL; break; } Py_DECREF(v); @@ -2745,46 +3449,33 @@ /* FindNextFile sets error to ERROR_NO_MORE_FILES if it got to the end of the directory. */ if (!result && GetLastError() != ERROR_NO_MORE_FILES) { - Py_DECREF(d); - win32_error("FindNextFile", namebuf); - FindClose(hFindFile); - return NULL; + Py_DECREF(list); + list = win32_error("FindNextFile", namebuf); + goto exit; } } while (result == TRUE); - if (FindClose(hFindFile) == FALSE) { - Py_DECREF(d); - return win32_error("FindClose", namebuf); - } - - return d; +exit: + if (hFindFile != INVALID_HANDLE_VALUE) { + if (FindClose(hFindFile) == FALSE) { + if (list != NULL) { + Py_DECREF(list); + list = win32_error_object("FindClose", path.object); + } + } + } + if (wnamebuf) + free(wnamebuf); + path_cleanup(&path); + + return list; #elif defined(PYOS_OS2) - -#ifndef MAX_PATH -#define MAX_PATH CCHMAXPATH -#endif - PyObject *oname; - char *name, *pt; - Py_ssize_t len; - PyObject *d, *v; - char namebuf[MAX_PATH+5]; - HDIR hdir = 1; - ULONG srchcnt = 1; - FILEFINDBUF3 ep; - APIRET rc; - - if (!PyArg_ParseTuple(args, "O&:listdir", - PyUnicode_FSConverter, &oname)) - return NULL; - name = PyBytes_AsString(oname); - len = PyBytes_GET_SIZE(oname); - if (len >= MAX_PATH) { - Py_DECREF(oname); + if (path.length >= MAX_PATH) { PyErr_SetString(PyExc_ValueError, "path too long"); - return NULL; - } - strcpy(namebuf, name); + goto exit; + } + strcpy(namebuf, path.narrow); for (pt = namebuf; *pt; pt++) if (*pt == ALTSEP) *pt = SEP; @@ -2792,9 +3483,8 @@ namebuf[len++] = SEP; strcpy(namebuf + len, "*.*"); - if ((d = PyList_New(0)) == NULL) { - Py_DECREF(oname); - return NULL; + if ((list = PyList_New(0)) == NULL) { + goto exit; } rc = DosFindFirst(namebuf, /* Wildcard Pattern to Match */ @@ -2806,7 +3496,9 @@ if (rc != NO_ERROR) { errno = ENOENT; - return posix_error_with_allocated_filename(oname); + Py_DECREF(list); + list = posix_error_with_filename(path.narrow); + goto exit; } if (srchcnt > 0) { /* If Directory is NOT Totally Empty, */ @@ -2822,55 +3514,63 @@ v = PyBytes_FromString(namebuf); if (v == NULL) { - Py_DECREF(d); - d = NULL; + Py_DECREF(list); + list = NULL; break; } - if (PyList_Append(d, v) != 0) { + if (PyList_Append(list, v) != 0) { Py_DECREF(v); - Py_DECREF(d); - d = NULL; + Py_DECREF(list); + list = NULL; break; } Py_DECREF(v); } while (DosFindNext(hdir, &ep, sizeof(ep), &srchcnt) == NO_ERROR && srchcnt > 0); } - Py_DECREF(oname); - return d; -#else - PyObject *oname; - char *name; - PyObject *d, *v; - DIR *dirp; - struct dirent *ep; - int arg_is_unicode = 1; +exit: + path_cleanup(&path); + + return list; +#else errno = 0; /* v is never read, so it does not need to be initialized yet. */ - if (!PyArg_ParseTuple(args, "|U:listdir", &v)) { + if (path.narrow && !PyArg_ParseTuple(args, "U:listdir", &v)) { arg_is_unicode = 0; PyErr_Clear(); } - oname = NULL; - if (!PyArg_ParseTuple(args, "|O&:listdir", PyUnicode_FSConverter, &oname)) - return NULL; - if (oname == NULL) { /* Default arg: "." */ - oname = PyBytes_FromString("."); - } - name = PyBytes_AsString(oname); - Py_BEGIN_ALLOW_THREADS - dirp = opendir(name); - Py_END_ALLOW_THREADS +#ifdef HAVE_FDOPENDIR + if (path.fd != -1) { + /* closedir() closes the FD, so we duplicate it */ + Py_BEGIN_ALLOW_THREADS + fd = dup(path.fd); + Py_END_ALLOW_THREADS + + if (fd == -1) { + list = posix_error(); + goto exit; + } + + Py_BEGIN_ALLOW_THREADS + dirp = fdopendir(fd); + Py_END_ALLOW_THREADS + } + else +#endif + { + char *name = path.narrow ? path.narrow : "."; + Py_BEGIN_ALLOW_THREADS + dirp = opendir(name); + Py_END_ALLOW_THREADS + } + if (dirp == NULL) { - return posix_error_with_allocated_filename(oname); - } - if ((d = PyList_New(0)) == NULL) { - Py_BEGIN_ALLOW_THREADS - closedir(dirp); - Py_END_ALLOW_THREADS - Py_DECREF(oname); - return NULL; + list = path_error("listdir", &path); + goto exit; + } + if ((list = PyList_New(0)) == NULL) { + goto exit; } for (;;) { errno = 0; @@ -2881,11 +3581,9 @@ if (errno == 0) { break; } else { - Py_BEGIN_ALLOW_THREADS - closedir(dirp); - Py_END_ALLOW_THREADS - Py_DECREF(d); - return posix_error_with_allocated_filename(oname); + Py_DECREF(list); + list = path_error("listdir", &path); + goto exit; } } if (ep->d_name[0] == '.' && @@ -2897,101 +3595,33 @@ else v = PyBytes_FromStringAndSize(ep->d_name, NAMLEN(ep)); if (v == NULL) { - Py_CLEAR(d); + Py_CLEAR(list); break; } - if (PyList_Append(d, v) != 0) { + if (PyList_Append(list, v) != 0) { Py_DECREF(v); - Py_CLEAR(d); + Py_CLEAR(list); break; } Py_DECREF(v); } - Py_BEGIN_ALLOW_THREADS - closedir(dirp); - Py_END_ALLOW_THREADS - Py_DECREF(oname); - - return d; + +exit: + if (dirp != NULL) { + Py_BEGIN_ALLOW_THREADS + if (fd > -1) + rewinddir(dirp); + closedir(dirp); + Py_END_ALLOW_THREADS + } + + path_cleanup(&path); + + return list; #endif /* which OS */ } /* end of posix_listdir */ -#ifdef HAVE_FDOPENDIR -PyDoc_STRVAR(posix_flistdir__doc__, -"flistdir(fd) -> list_of_strings\n\n\ -Like listdir(), but uses a file descriptor instead."); - -static PyObject * -posix_flistdir(PyObject *self, PyObject *args) -{ - PyObject *d, *v; - DIR *dirp; - struct dirent *ep; - int fd; - - errno = 0; - if (!PyArg_ParseTuple(args, "i:flistdir", &fd)) - return NULL; - /* closedir() closes the FD, so we duplicate it */ - fd = dup(fd); - if (fd < 0) - return posix_error(); - Py_BEGIN_ALLOW_THREADS - dirp = fdopendir(fd); - Py_END_ALLOW_THREADS - if (dirp == NULL) { - close(fd); - return posix_error(); - } - if ((d = PyList_New(0)) == NULL) { - Py_BEGIN_ALLOW_THREADS - closedir(dirp); - Py_END_ALLOW_THREADS - return NULL; - } - for (;;) { - errno = 0; - Py_BEGIN_ALLOW_THREADS - ep = readdir(dirp); - Py_END_ALLOW_THREADS - if (ep == NULL) { - if (errno == 0) { - break; - } else { - Py_BEGIN_ALLOW_THREADS - rewinddir(dirp); - closedir(dirp); - Py_END_ALLOW_THREADS - Py_DECREF(d); - return posix_error(); - } - } - if (ep->d_name[0] == '.' && - (NAMLEN(ep) == 1 || - (ep->d_name[1] == '.' && NAMLEN(ep) == 2))) - continue; - v = PyUnicode_DecodeFSDefaultAndSize(ep->d_name, NAMLEN(ep)); - if (v == NULL) { - Py_CLEAR(d); - break; - } - if (PyList_Append(d, v) != 0) { - Py_DECREF(v); - Py_CLEAR(d); - break; - } - Py_DECREF(v); - } - Py_BEGIN_ALLOW_THREADS - rewinddir(dirp); - closedir(dirp); - Py_END_ALLOW_THREADS - - return d; -} -#endif - #ifdef MS_WINDOWS /* A helper function for abspath on win32 */ static PyObject * @@ -3182,68 +3812,72 @@ #endif /* MS_WINDOWS */ PyDoc_STRVAR(posix_mkdir__doc__, -"mkdir(path [, mode=0777])\n\n\ -Create a directory."); - -static PyObject * -posix_mkdir(PyObject *self, PyObject *args) -{ - int res; - const char *path; +"mkdir(path, mode=0o777, *, dir_fd=None)\n\n\ +Create a directory.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError.\n\ +\n\ +The mode argument is ignored on Windows."); + +static PyObject * +posix_mkdir(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; int mode = 0777; + int dir_fd = DEFAULT_DIR_FD; + static char *keywords[] = {"path", "mode", "dir_fd", NULL}; + PyObject *return_value = NULL; + int result; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|i$O&:mkdir", keywords, + path_converter, &path, &mode, +#ifdef HAVE_MKDIRAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; #ifdef MS_WINDOWS - PyObject *po; - if (PyArg_ParseTuple(args, "U|i:mkdir", &po, &mode)) - { - wchar_t *wpath = PyUnicode_AsUnicode(po); - if (wpath == NULL) - return NULL; - - Py_BEGIN_ALLOW_THREADS - res = CreateDirectoryW(wpath, NULL); - Py_END_ALLOW_THREADS - if (!res) - return win32_error_object("mkdir", po); - Py_INCREF(Py_None); - return Py_None; - } - /* Drop the argument parsing error as narrow strings - are also valid. */ - PyErr_Clear(); - if (!PyArg_ParseTuple(args, "y|i:mkdir", &path, &mode)) - return NULL; - if (win32_warn_bytes_api()) - return NULL; Py_BEGIN_ALLOW_THREADS - res = CreateDirectoryA(path, NULL); + if (path.wide) + result = CreateDirectoryW(path.wide, NULL); + else + result = CreateDirectoryA(path.narrow, NULL); Py_END_ALLOW_THREADS - if (!res) { - win32_error("mkdir", path); - return NULL; - } + + if (!result) { + return_value = win32_error_object("mkdir", path.object); + goto exit; + } +#else + Py_BEGIN_ALLOW_THREADS +#if HAVE_MKDIRAT + if (dir_fd != DEFAULT_DIR_FD) + result = mkdirat(dir_fd, path.narrow, mode); + else +#endif +#if ( defined(__WATCOMC__) || defined(PYCC_VACPP) ) && !defined(__QNX__) + result = mkdir(path.narrow); +#else + result = mkdir(path.narrow, mode); +#endif + Py_END_ALLOW_THREADS + if (result < 0) { + return_value = path_error("mkdir", &path); + goto exit; + } +#endif + return_value = Py_None; Py_INCREF(Py_None); - return Py_None; -#else - PyObject *opath; - - if (!PyArg_ParseTuple(args, "O&|i:mkdir", - PyUnicode_FSConverter, &opath, &mode)) - return NULL; - path = PyBytes_AsString(opath); - Py_BEGIN_ALLOW_THREADS -#if ( defined(__WATCOMC__) || defined(PYCC_VACPP) ) && !defined(__QNX__) - res = mkdir(path); -#else - res = mkdir(path, mode); -#endif - Py_END_ALLOW_THREADS - if (res < 0) - return posix_error_with_allocated_filename(opath); - Py_DECREF(opath); - Py_INCREF(Py_None); - return Py_None; -#endif +exit: + path_cleanup(&path); + return return_value; } @@ -3332,85 +3966,118 @@ static PyObject * -internal_rename(PyObject *self, PyObject *args, int is_replace) -{ +internal_rename(PyObject *args, PyObject *kwargs, int is_replace) +{ + char *function_name = is_replace ? "replace" : "rename"; + path_t src; + path_t dst; + int src_dir_fd = DEFAULT_DIR_FD; + int dst_dir_fd = DEFAULT_DIR_FD; + int dir_fd_specified; + PyObject *return_value = NULL; + char format[24]; + static char *keywords[] = {"src", "dst", "src_dir_fd", "dst_dir_fd", NULL}; + #ifdef MS_WINDOWS - PyObject *src, *dst; BOOL result; int flags = is_replace ? MOVEFILE_REPLACE_EXISTING : 0; - if (PyArg_ParseTuple(args, - is_replace ? "UU:replace" : "UU:rename", - &src, &dst)) - { - wchar_t *wsrc, *wdst; - - wsrc = PyUnicode_AsUnicode(src); - if (wsrc == NULL) - return NULL; - wdst = PyUnicode_AsUnicode(dst); - if (wdst == NULL) - return NULL; - Py_BEGIN_ALLOW_THREADS - result = MoveFileExW(wsrc, wdst, flags); - Py_END_ALLOW_THREADS - if (!result) - return win32_error(is_replace ? "replace" : "rename", NULL); - Py_INCREF(Py_None); - return Py_None; - } - else { - PyErr_Clear(); - if (!PyArg_ParseTuple(args, - is_replace ? "O&O&:replace" : "O&O&:rename", - PyUnicode_FSConverter, &src, - PyUnicode_FSConverter, &dst)) - return NULL; - - if (win32_warn_bytes_api()) - goto error; - - Py_BEGIN_ALLOW_THREADS - result = MoveFileExA(PyBytes_AS_STRING(src), - PyBytes_AS_STRING(dst), flags); - Py_END_ALLOW_THREADS - - Py_XDECREF(src); - Py_XDECREF(dst); - - if (!result) - return win32_error(is_replace ? "replace" : "rename", NULL); - Py_INCREF(Py_None); - return Py_None; - -error: - Py_XDECREF(src); - Py_XDECREF(dst); - return NULL; - } -#else - return posix_2str(args, - is_replace ? "O&O&:replace" : "O&O&:rename", rename); -#endif +#else + int result; +#endif + + memset(&src, 0, sizeof(src)); + memset(&dst, 0, sizeof(dst)); + strcpy(format, "O&O&|$O&O&:"); + strcat(format, function_name); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, format, keywords, + path_converter, &src, + path_converter, &dst, + dir_fd_converter, &src_dir_fd, + dir_fd_converter, &dst_dir_fd)) + return NULL; + + dir_fd_specified = (src_dir_fd != DEFAULT_DIR_FD) || + (dst_dir_fd != DEFAULT_DIR_FD); +#ifndef HAVE_RENAMEAT + if (dir_fd_specified) { + argument_unavailable_error(function_name, "src_dir_fd and dst_dir_fd"); + goto exit; + } +#endif + + if ((src.narrow && dst.wide) || (src.wide && dst.narrow)) { + PyErr_Format(PyExc_ValueError, + "%s: src and dst must be the same type", function_name); + goto exit; + } + +#ifdef MS_WINDOWS + Py_BEGIN_ALLOW_THREADS + if (src.wide) + result = MoveFileExW(src.wide, dst.wide, flags); + else + result = MoveFileExA(src.narrow, dst.narrow, flags); + Py_END_ALLOW_THREADS + + if (!result) { + return_value = win32_error_object(function_name, dst.object); + goto exit; + } + +#else + Py_BEGIN_ALLOW_THREADS +#ifdef HAVE_RENAMEAT + if (dir_fd_specified) + result = renameat(src_dir_fd, src.narrow, dst_dir_fd, dst.narrow); + else +#endif + result = rename(src.narrow, dst.narrow); + Py_END_ALLOW_THREADS + + if (result) { + return_value = path_error(function_name, &dst); + goto exit; + } +#endif + + Py_INCREF(Py_None); + return_value = Py_None; +exit: + path_cleanup(&src); + path_cleanup(&dst); + return return_value; } PyDoc_STRVAR(posix_rename__doc__, -"rename(old, new)\n\n\ -Rename a file or directory."); - -static PyObject * -posix_rename(PyObject *self, PyObject *args) -{ - return internal_rename(self, args, 0); +"rename(src, dst, *, src_dir_fd=None, dst_dir_fd=None)\n\n\ +Rename a file or directory.\n\ +\n\ +If either src_dir_fd or dst_dir_fd is not None, it should be a file\n\ + descriptor open to a directory, and the respective path string (src or dst)\n\ + should be relative; the path will then be relative to that directory.\n\ +src_dir_fd and dst_dir_fd, may not be implemented on your platform.\n\ + If they are unavailable, using them will raise a NotImplementedError."); + +static PyObject * +posix_rename(PyObject *self, PyObject *args, PyObject *kwargs) +{ + return internal_rename(args, kwargs, 0); } PyDoc_STRVAR(posix_replace__doc__, -"replace(old, new)\n\n\ -Rename a file or directory, overwriting the destination."); - -static PyObject * -posix_replace(PyObject *self, PyObject *args) -{ - return internal_rename(self, args, 1); +"replace(src, dst, *, src_dir_fd=None, dst_dir_fd=None)\n\n\ +Rename a file or directory, overwriting the destination.\n\ +\n\ +If either src_dir_fd or dst_dir_fd is not None, it should be a file\n\ + descriptor open to a directory, and the respective path string (src or dst)\n\ + should be relative; the path will then be relative to that directory.\n\ +src_dir_fd and dst_dir_fd, may not be implemented on your platform.\n\ + If they are unavailable, using them will raise a NotImplementedError."); + +static PyObject * +posix_replace(PyObject *self, PyObject *args, PyObject *kwargs) +{ + return internal_rename(args, kwargs, 1); } PyDoc_STRVAR(posix_rmdir__doc__, @@ -3428,21 +4095,6 @@ } -PyDoc_STRVAR(posix_stat__doc__, -"stat(path) -> stat result\n\n\ -Perform a stat system call on the given path."); - -static PyObject * -posix_stat(PyObject *self, PyObject *args) -{ -#ifdef MS_WINDOWS - return posix_do_stat(self, args, "O&:stat", STAT, "U:stat", win32_stat_w); -#else - return posix_do_stat(self, args, "O&:stat", STAT, NULL, NULL); -#endif -} - - #ifdef HAVE_SYSTEM PyDoc_STRVAR(posix_system__doc__, "system(command) -> exit_status\n\n\ @@ -3531,22 +4183,85 @@ #endif /* MS_WINDOWS */ PyDoc_STRVAR(posix_unlink__doc__, -"unlink(path)\n\n\ -Remove a file (same as remove(path))."); +"unlink(path, *, dir_fd=None, rmdir=False)\n\n\ +Remove a file (same as remove()).\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError.\n\ +If rmdir is True, unlink will behave like os.rmdir()."); PyDoc_STRVAR(posix_remove__doc__, -"remove(path)\n\n\ -Remove a file (same as unlink(path))."); - -static PyObject * -posix_unlink(PyObject *self, PyObject *args) -{ +"remove(path, *, dir_fd=None, rmdir=False)\n\n\ +Remove a file (same as unlink()).\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError.\n\ +If rmdir is True, remove will behave like os.rmdir()."); + +static PyObject * +posix_unlink(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int dir_fd = DEFAULT_DIR_FD; + int remove_dir = 0; + static char *keywords[] = {"path", "dir_fd", "rmdir", NULL}; + int result; + PyObject *return_value = NULL; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|$O&p:unlink", keywords, + path_converter, &path, +#ifdef HAVE_UNLINKAT + dir_fd_converter, &dir_fd, +#else + dir_fd_unavailable, &dir_fd, +#endif + &remove_dir)) + return NULL; + + Py_BEGIN_ALLOW_THREADS #ifdef MS_WINDOWS - return win32_1str(args, "remove", "y:remove", DeleteFileA, - "U:remove", Py_DeleteFileW); -#else - return posix_1str(args, "O&:remove", unlink); -#endif + if (remove_dir) { + if (path.wide) + result = RemoveDirectoryW(path.wide); + else + result = RemoveDirectoryA(path.narrow); + } + else { + if (path.wide) + result = Py_DeleteFileW(path.wide); + else + result = DeleteFileA(path.narrow); + } + result = !result; /* Windows, success=1, UNIX, success=0 */ +#else + if (remove_dir && (dir_fd == DEFAULT_DIR_FD)) + result = rmdir(path.narrow); + else +#ifdef HAVE_UNLINKAT + if (dir_fd != DEFAULT_DIR_FD) + result = unlinkat(dir_fd, path.narrow, remove_dir ? AT_REMOVEDIR : 0); + else +#endif /* HAVE_UNLINKAT */ + result = unlink(path.narrow); +#endif + Py_END_ALLOW_THREADS + + if (result) { + return_value = path_error("unlink", &path); + goto exit; + } + + return_value = Py_None; + Py_INCREF(Py_None); + +exit: + path_cleanup(&path); + return return_value; } @@ -3576,6 +4291,178 @@ #endif /* HAVE_UNAME */ +PyDoc_STRVAR(posix_utime__doc__, +"utime(path, times=None, *, ns=None, dir_fd=None, follow_symlinks=True)\n\ +Set the access and modified time of path.\n\ +\n\ +path may always be specified as a string.\n\ +On some platforms, path may also be specified as an open file descriptor.\n\ + If this functionality is unavailable, using it raises an exception.\n\ +\n\ +If times is not None, it must be a tuple (atime, mtime);\n\ + atime and mtime should be expressed as float seconds since the epoch.\n\ +If ns is not None, it must be a tuple (atime_ns, mtime_ns);\n\ + atime_ns and mtime_ns should be expressed as integer nanoseconds\n\ + since the epoch.\n\ +If both times and ns are None, utime uses the current time.\n\ +Specifying tuples for both times and ns is an error.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, utime will modify the symbolic link itself instead of the file the\n\ + link points to.\n\ +It is an error to use dir_fd or follow_symlinks when specifying path\n\ + as an open file descriptor.\n\ +dir_fd and follow_symlinks may not be available on your platform.\n\ + If they are unavailable, using them will raise a NotImplementedError."); + +typedef struct { + int now; + time_t atime_s; + long atime_ns; + time_t mtime_s; + long mtime_ns; +} utime_t; + +/* + * these macros assume that "utime" is a pointer to a utime_t + * they also intentionally leak the declaration of a pointer named "time" + */ +#define UTIME_TO_TIMESPEC \ + struct timespec ts[2]; \ + struct timespec *time; \ + if (utime->now) \ + time = NULL; \ + else { \ + ts[0].tv_sec = utime->atime_s; \ + ts[0].tv_nsec = utime->atime_ns; \ + ts[1].tv_sec = utime->mtime_s; \ + ts[1].tv_nsec = utime->mtime_ns; \ + time = ts; \ + } \ + +#define UTIME_TO_TIMEVAL \ + struct timeval tv[2]; \ + struct timeval *time; \ + if (utime->now) \ + time = NULL; \ + else { \ + tv[0].tv_sec = utime->atime_s; \ + tv[0].tv_usec = utime->atime_ns / 1000; \ + tv[1].tv_sec = utime->mtime_s; \ + tv[1].tv_usec = utime->mtime_ns / 1000; \ + time = tv; \ + } \ + +#define UTIME_TO_UTIMBUF \ + struct utimbuf u[2]; \ + struct utimbuf *time; \ + if (utime->now) \ + time = NULL; \ + else { \ + u.actime = utime->atime_s; \ + u.modtime = utime->mtime_s; \ + time = u; \ + } + +#define UTIME_TO_TIME_T \ + time_t timet[2]; \ + struct timet time; \ + if (utime->now) \ + time = NULL; \ + else { \ + timet[0] = utime->atime_s; \ + timet[1] = utime->mtime_s; \ + time = &timet; \ + } \ + + +#define UTIME_HAVE_DIR_FD (defined(HAVE_FUTIMESAT) || defined(HAVE_UTIMENSAT)) + +#if UTIME_HAVE_DIR_FD + +static int +utime_dir_fd(utime_t *utime, int dir_fd, char *path, int follow_symlinks) +{ +#ifdef HAVE_UTIMENSAT + int flags = follow_symlinks ? 0 : AT_SYMLINK_NOFOLLOW; + UTIME_TO_TIMESPEC; + return utimensat(dir_fd, path, time, flags); +#elif defined(HAVE_FUTIMESAT) + UTIME_TO_TIMEVAL; + /* + * follow_symlinks will never be false here; + * we only allow !follow_symlinks and dir_fd together + * if we have utimensat() + */ + assert(follow_symlinks); + return futimesat(dir_fd, path, time); +#endif +} + +#endif + +#define UTIME_HAVE_FD (defined(HAVE_FUTIMES) || defined(HAVE_FUTIMENS)) + +#if UTIME_HAVE_FD + +static int +utime_fd(utime_t *utime, int fd) +{ +#ifdef HAVE_FUTIMENS + UTIME_TO_TIMESPEC; + return futimens(fd, time); +#else + UTIME_TO_TIMEVAL; + return futimes(fd, time); +#endif +} + +#endif + + +#define UTIME_HAVE_NOFOLLOW_SYMLINKS \ + (defined(HAVE_UTIMENSAT) || defined(HAVE_LUTIMES)) + +#if UTIME_HAVE_NOFOLLOW_SYMLINKS + +static int +utime_nofollow_symlinks(utime_t *utime, char *path) +{ +#ifdef HAVE_UTIMENSAT + UTIME_TO_TIMESPEC; + return utimensat(DEFAULT_DIR_FD, path, time, AT_SYMLINK_NOFOLLOW); +#else + UTIME_TO_TIMEVAL; + return lutimes(path, time); +#endif +} + +#endif + +#ifndef MS_WINDOWS + +static int +utime_default(utime_t *utime, char *path) +{ +#ifdef HAVE_UTIMENSAT + UTIME_TO_TIMESPEC; + return utimensat(DEFAULT_DIR_FD, path, time, 0); +#elif defined(HAVE_UTIMES) + UTIME_TO_TIMEVAL; + return utimes(path, time); +#elif defined(HAVE_UTIME_H) + UTIME_TO_UTIMBUF; + return utime(path, time); +#else + UTIME_TO_TIME_T; + return utime(path, time); +#endif +} + +#endif + static int split_py_long_to_s_and_ns(PyObject *py_long, time_t *s, long *ns) { @@ -3597,250 +4484,133 @@ return result; } - -typedef int (*parameter_converter_t)(PyObject *, void *); - -typedef struct { - /* input only */ - char path_format; - parameter_converter_t converter; - char *function_name; - char *first_argument_name; - PyObject *args; - PyObject *kwargs; - - /* input/output */ - PyObject **path; - - /* output only */ - int now; - time_t atime_s; - long atime_ns; - time_t mtime_s; - long mtime_ns; -} utime_arguments; - -#define DECLARE_UA(ua, fname) \ - utime_arguments ua; \ - memset(&ua, 0, sizeof(ua)); \ - ua.function_name = fname; \ - ua.args = args; \ - ua.kwargs = kwargs; \ - ua.first_argument_name = "path"; \ - -/* UA_TO_FILETIME doesn't declare atime and mtime for you */ -#define UA_TO_FILETIME(ua, atime, mtime) \ - time_t_to_FILE_TIME(ua.atime_s, ua.atime_ns, &atime); \ - time_t_to_FILE_TIME(ua.mtime_s, ua.mtime_ns, &mtime) - -/* the rest of these macros declare the output variable for you */ -#define UA_TO_TIMESPEC(ua, ts) \ - struct timespec ts[2]; \ - ts[0].tv_sec = ua.atime_s; \ - ts[0].tv_nsec = ua.atime_ns; \ - ts[1].tv_sec = ua.mtime_s; \ - ts[1].tv_nsec = ua.mtime_ns - -#define UA_TO_TIMEVAL(ua, tv) \ - struct timeval tv[2]; \ - tv[0].tv_sec = ua.atime_s; \ - tv[0].tv_usec = ua.atime_ns / 1000; \ - tv[1].tv_sec = ua.mtime_s; \ - tv[1].tv_usec = ua.mtime_ns / 1000 - -#define UA_TO_UTIMBUF(ua, u) \ - struct utimbuf u; \ - utimbuf.actime = ua.atime_s; \ - utimbuf.modtime = ua.mtime_s - -#define UA_TO_TIME_T(ua, timet) \ - time_t timet[2]; \ - timet[0] = ua.atime_s; \ - timet[1] = ua.mtime_s - - -/* - * utime_read_time_arguments() processes arguments for the utime - * family of functions. - */ - -typedef enum { - UTIME_SUCCESS = 0, - UTIME_PARSE_FAILURE = 1, - UTIME_TIMES_AND_NS_COLLISION = 2, - UTIME_TIMES_CONVERSION_FAILURE = 3, - UTIME_NS_CONVERSION_FAILURE = 4, -} utime_status; - -static utime_status -utime_read_time_arguments(utime_arguments *ua) -{ +static PyObject * +posix_utime(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; PyObject *times = NULL; PyObject *ns = NULL; - char format[24]; - char *kwlist[4]; - char **kw = kwlist; - utime_status return_value; - int parse_result; - - *kw++ = ua->first_argument_name; - *kw++ = "times"; - *kw++ = "ns"; - *kw = NULL; - - sprintf(format, "%c%s|O$O:%s", - ua->path_format, - ua->converter ? "&" : "", - ua->function_name); - - if (ua->converter) - parse_result = PyArg_ParseTupleAndKeywords(ua->args, ua->kwargs, - format, kwlist, ua->converter, ua->path, ×, &ns); - else - parse_result = PyArg_ParseTupleAndKeywords(ua->args, ua->kwargs, - format, kwlist, ua->path, ×, &ns); - - if (!parse_result) - return UTIME_PARSE_FAILURE; - - if (times && ns) { - PyErr_Format(PyExc_RuntimeError, - "%s: you may specify either 'times'" - " or 'ns' but not both", - ua->function_name); - return_value = UTIME_TIMES_AND_NS_COLLISION; - goto fail; + int dir_fd = DEFAULT_DIR_FD; + int follow_symlinks = 1; + char *keywords[] = {"path", "times", "ns", "dir_fd", + "follow_symlinks", NULL}; + + utime_t utime; + +#ifdef MS_WINDOWS + HANDLE hFile; + FILETIME atime, mtime; +#else + int result; +#endif + + PyObject *return_value = NULL; + + memset(&path, 0, sizeof(path)); +#if UTIME_HAVE_FD + path.allow_fd = 1; +#endif + if (!PyArg_ParseTupleAndKeywords(args, kwargs, + "O&|O$OO&p:utime", keywords, + path_converter, &path, + ×, &ns, +#if UTIME_HAVE_DIR_FD + dir_fd_converter, &dir_fd, +#else + dir_fd_unavailable, &dir_fd, +#endif + &follow_symlinks + )) + return NULL; + + if (times && (times != Py_None) && ns) { + PyErr_SetString(PyExc_ValueError, + "utime: you may specify either 'times'" + " or 'ns' but not both"); + goto exit; } if (times && (times != Py_None)) { if (!PyTuple_CheckExact(times) || (PyTuple_Size(times) != 2)) { - PyErr_Format(PyExc_TypeError, - "%s: 'times' must be either" - " a tuple of two ints or None", - ua->function_name); - return_value = UTIME_TIMES_CONVERSION_FAILURE; - goto fail; + PyErr_SetString(PyExc_TypeError, + "utime: 'times' must be either" + " a tuple of two ints or None"); + goto exit; } - ua->now = 0; + utime.now = 0; if (_PyTime_ObjectToTimespec(PyTuple_GET_ITEM(times, 0), - &ua->atime_s, &ua->atime_ns) == -1 || + &utime.atime_s, &utime.atime_ns) == -1 || _PyTime_ObjectToTimespec(PyTuple_GET_ITEM(times, 1), - &ua->mtime_s, &ua->mtime_ns) == -1) { - return_value = UTIME_TIMES_CONVERSION_FAILURE; - goto fail; + &utime.mtime_s, &utime.mtime_ns) == -1) { + goto exit; } - return UTIME_SUCCESS; - } - - if (ns) { + } + else if (ns) { if (!PyTuple_CheckExact(ns) || (PyTuple_Size(ns) != 2)) { - PyErr_Format(PyExc_TypeError, - "%s: 'ns' must be a tuple of two ints", - ua->function_name); - return_value = UTIME_NS_CONVERSION_FAILURE; - goto fail; + PyErr_SetString(PyExc_TypeError, + "utime: 'ns' must be a tuple of two ints"); + goto exit; } - ua->now = 0; + utime.now = 0; if (!split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 0), - &ua->atime_s, &ua->atime_ns) || + &utime.atime_s, &utime.atime_ns) || !split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 1), - &ua->mtime_s, &ua->mtime_ns)) { - return_value = UTIME_NS_CONVERSION_FAILURE; - goto fail; + &utime.mtime_s, &utime.mtime_ns)) { + goto exit; } - return UTIME_SUCCESS; - } - - /* either times=None, or neither times nor ns was specified. use "now". */ - ua->now = 1; - return UTIME_SUCCESS; - - fail: - if (ua->converter) - Py_DECREF(*ua->path); - return return_value; -} - - -PyDoc_STRVAR(posix_utime__doc__, -"utime(path[, times=(atime, mtime), *, ns=(atime_ns, mtime_ns)])\n\ -Set the access and modified time of the file.\n\ -If the second argument ('times') is specified,\n\ - the values should be expressed as float seconds since the epoch.\n\ -If the keyword argument 'ns' is specified,\n\ - the values should be expressed as integer nanoseconds since the epoch.\n\ -If neither the second nor the 'ns' argument is specified,\n\ - utime uses the current time.\n\ -Specifying both 'times' and 'ns' is an error."); - -static PyObject * -posix_utime(PyObject *self, PyObject *args, PyObject *kwargs) -{ + } + else { + /* times and ns are both None/unspecified. use "now". */ + utime.now = 1; + } + +#if !UTIME_HAVE_NOFOLLOW_SYMLINKS + if (follow_symlinks_specified("utime", follow_symlinks)) + goto exit; +#endif + + if (path_and_dir_fd_invalid("utime", &path, dir_fd) || + dir_fd_and_fd_invalid("utime", dir_fd, path.fd) || + fd_and_follow_symlinks_invalid("utime", path.fd, follow_symlinks)) + goto exit; + +#if !defined(HAVE_UTIMENSAT) + if ((dir_fd != DEFAULT_DIR_FD) && (!follow_symlinks)) { + PyErr_SetString(PyExc_RuntimeError, + "utime: cannot use dir_fd and follow_symlinks " + "together on this platform"); + goto exit; + } +#endif + #ifdef MS_WINDOWS - PyObject *upath; - HANDLE hFile; - PyObject *result = NULL; - FILETIME atime, mtime; - - DECLARE_UA(ua, "utime"); - - ua.path_format = 'U'; - ua.path = &upath; - - switch (utime_read_time_arguments(&ua)) { - default: - return NULL; - case UTIME_SUCCESS: { - wchar_t *wpath = PyUnicode_AsUnicode(upath); - if (wpath == NULL) - return NULL; - Py_BEGIN_ALLOW_THREADS - hFile = CreateFileW(wpath, FILE_WRITE_ATTRIBUTES, 0, + Py_BEGIN_ALLOW_THREADS + if (path.wide) + hFile = CreateFileW(path.wide, FILE_WRITE_ATTRIBUTES, 0, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); - Py_END_ALLOW_THREADS - if (hFile == INVALID_HANDLE_VALUE) - return win32_error_object("utime", upath); - break; - } - case UTIME_PARSE_FAILURE: { - const char *apath; - /* Drop the argument parsing error as narrow strings - are also valid. */ - PyErr_Clear(); - - ua.path_format = 'y'; - ua.path = (PyObject **)&apath; - if (utime_read_time_arguments(&ua) != UTIME_SUCCESS) - return NULL; - if (win32_warn_bytes_api()) - return NULL; - - Py_BEGIN_ALLOW_THREADS - hFile = CreateFileA(apath, FILE_WRITE_ATTRIBUTES, 0, + else + hFile = CreateFileA(path.narrow, FILE_WRITE_ATTRIBUTES, 0, NULL, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, NULL); - Py_END_ALLOW_THREADS - if (hFile == INVALID_HANDLE_VALUE) { - win32_error("utime", apath); - return NULL; - } - break; - } - - } - - if (ua.now) { + Py_END_ALLOW_THREADS + if (hFile == INVALID_HANDLE_VALUE) { + win32_error_object("utime", path.object); + goto exit; + } + + if (utime.now) { SYSTEMTIME now; GetSystemTime(&now); if (!SystemTimeToFileTime(&now, &mtime) || !SystemTimeToFileTime(&now, &atime)) { win32_error("utime", NULL); - goto done; + goto exit; } } else { - UA_TO_FILETIME(ua, atime, mtime); + time_t_to_FILE_TIME(utime.atime_s, utime.atime_ns, &atime); + time_t_to_FILE_TIME(utime.mtime_s, utime.mtime_ns, &mtime); } if (!SetFileTime(hFile, NULL, &atime, &mtime)) { /* Avoid putting the file name into the error here, @@ -3848,151 +4618,52 @@ something is wrong with the file, when it also could be the time stamp that gives a problem. */ win32_error("utime", NULL); - goto done; - } + goto exit; + } +#else /* MS_WINDOWS */ + Py_BEGIN_ALLOW_THREADS + +#if UTIME_HAVE_NOFOLLOW_SYMLINKS + if ((!follow_symlinks) && (dir_fd == DEFAULT_DIR_FD)) + result = utime_nofollow_symlinks(&utime, path.narrow); + else +#endif + +#if UTIME_HAVE_DIR_FD + if ((dir_fd != DEFAULT_DIR_FD) || (!follow_symlinks)) + result = utime_dir_fd(&utime, dir_fd, path.narrow, follow_symlinks); + else +#endif + +#if UTIME_HAVE_FD + if (path.fd != -1) + result = utime_fd(&utime, path.fd); + else +#endif + + result = utime_default(&utime, path.narrow); + + Py_END_ALLOW_THREADS + + if (result < 0) { + /* see previous comment about not putting filename in error here */ + return_value = posix_error(); + goto exit; + } + +#endif /* MS_WINDOWS */ + Py_INCREF(Py_None); - result = Py_None; -done: - CloseHandle(hFile); - return result; -#else /* MS_WINDOWS */ - PyObject *opath; - char *path; - int res; - - DECLARE_UA(ua, "utime"); - - ua.path_format = 'O'; - ua.path = &opath; - ua.converter = PyUnicode_FSConverter; - - if (utime_read_time_arguments(&ua) != UTIME_SUCCESS) - return NULL; - path = PyBytes_AsString(opath); - if (ua.now) { - Py_BEGIN_ALLOW_THREADS - res = utime(path, NULL); - Py_END_ALLOW_THREADS - } - else { - Py_BEGIN_ALLOW_THREADS -#ifdef HAVE_UTIMENSAT - UA_TO_TIMESPEC(ua, buf); - res = utimensat(AT_FDCWD, path, buf, 0); -#elif defined(HAVE_UTIMES) - UA_TO_TIMEVAL(ua, buf); - res = utimes(path, buf); -#elif defined(HAVE_UTIME_H) - /* XXX should define struct utimbuf instead, above */ - UA_TO_UTIMBUF(ua, buf); - res = utime(path, &buf); -#else - UA_TO_TIME_T(ua, buf); - res = utime(path, buf); -#endif - Py_END_ALLOW_THREADS - } - - if (res < 0) { - return posix_error_with_allocated_filename(opath); - } - Py_DECREF(opath); - Py_RETURN_NONE; -#undef UTIME_EXTRACT -#endif /* MS_WINDOWS */ -} - -#ifdef HAVE_FUTIMES -PyDoc_STRVAR(posix_futimes__doc__, -"futimes(fd[, times=(atime, mtime), *, ns=(atime_ns, mtime_ns)])\n\ -Set the access and modified time of the file specified by the file\n\ -descriptor fd. See utime for the semantics of the times and ns parameters."); - -static PyObject * -posix_futimes(PyObject *self, PyObject *args, PyObject *kwargs) -{ - int res, fd; - - DECLARE_UA(ua, "futimes"); - - ua.path_format = 'i'; - ua.path = (PyObject **)&fd; - ua.first_argument_name = "fd"; - - if (utime_read_time_arguments(&ua) != UTIME_SUCCESS) - return NULL; - - if (ua.now) { - Py_BEGIN_ALLOW_THREADS - res = futimes(fd, NULL); - Py_END_ALLOW_THREADS - } - else { - Py_BEGIN_ALLOW_THREADS - { -#ifdef HAVE_FUTIMENS - UA_TO_TIMESPEC(ua, buf); - res = futimens(fd, buf); -#else - UA_TO_TIMEVAL(ua, buf); - res = futimes(fd, buf); -#endif - } - Py_END_ALLOW_THREADS - } - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif - -#ifdef HAVE_LUTIMES -PyDoc_STRVAR(posix_lutimes__doc__, -"lutimes(path[, times=(atime, mtime), *, ns=(atime_ns, mtime_ns)])\n\ -Like utime(), but if path is a symbolic link, it is not dereferenced."); - -static PyObject * -posix_lutimes(PyObject *self, PyObject *args, PyObject *kwargs) -{ - PyObject *opath; - const char *path; - int res; - - DECLARE_UA(ua, "lutimes"); - - ua.path_format = 'O'; - ua.path = &opath; - ua.converter = PyUnicode_FSConverter; - - if (utime_read_time_arguments(&ua) != UTIME_SUCCESS) - return NULL; - path = PyBytes_AsString(opath); - - if (ua.now) { - /* optional time values not given */ - Py_BEGIN_ALLOW_THREADS - res = lutimes(path, NULL); - Py_END_ALLOW_THREADS - } - else { - Py_BEGIN_ALLOW_THREADS - { -#ifdef HAVE_UTIMENSAT - UA_TO_TIMESPEC(ua, buf); - res = utimensat(AT_FDCWD, path, buf, AT_SYMLINK_NOFOLLOW); -#else - UA_TO_TIMEVAL(ua, buf); - res = lutimes(path, buf); -#endif - } - Py_END_ALLOW_THREADS - } - Py_DECREF(opath); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif + return_value = Py_None; + +exit: + path_cleanup(&path); +#ifdef MS_WINDOWS + if (hFile != INVALID_HANDLE_VALUE) + CloseHandle(hFile); +#endif + return return_value; +} /* Process operations */ @@ -4206,121 +4877,79 @@ \n\ path: path of executable file\n\ args: tuple or list of arguments\n\ - env: dictionary of strings mapping to strings"); - -static PyObject * -posix_execve(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *path; + env: dictionary of strings mapping to strings\n\ +\n\ +On some platforms, you may specify an open file descriptor for path;\n\ + execve will execute the program the file descriptor is open to.\n\ + If this functionality is unavailable, using it raises NotImplementedError."); + +static PyObject * +posix_execve(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; PyObject *argv, *env; - char **argvlist; + char **argvlist = NULL; char **envlist; Py_ssize_t argc, envc; + static char *keywords[] = {"path", "argv", "environment", NULL}; /* execve has three arguments: (path, argv, env), where argv is a list or tuple of strings and env is a dictionary like posix.environ. */ - if (!PyArg_ParseTuple(args, "O&OO:execve", - PyUnicode_FSConverter, - &opath, &argv, &env)) - return NULL; - path = PyBytes_AsString(opath); + memset(&path, 0, sizeof(path)); +#ifdef HAVE_FEXECVE + path.allow_fd = 1; +#endif + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&OO:execve", keywords, + path_converter, &path, + &argv, &env + )) + return NULL; + if (!PyList_Check(argv) && !PyTuple_Check(argv)) { PyErr_SetString(PyExc_TypeError, - "execve() arg 2 must be a tuple or list"); - goto fail_0; + "execve: argv must be a tuple or list"); + goto fail; } argc = PySequence_Size(argv); if (!PyMapping_Check(env)) { PyErr_SetString(PyExc_TypeError, - "execve() arg 3 must be a mapping object"); - goto fail_0; + "execve: environment must be a mapping object"); + goto fail; } argvlist = parse_arglist(argv, &argc); if (argvlist == NULL) { - goto fail_0; - } - - envlist = parse_envlist(env, &envc); - if (envlist == NULL) - goto fail_1; - - execve(path, argvlist, envlist); - - /* If we get here it's definitely an error */ - - (void) posix_error(); - - while (--envc >= 0) - PyMem_DEL(envlist[envc]); - PyMem_DEL(envlist); - fail_1: - free_string_array(argvlist, argc); - fail_0: - Py_DECREF(opath); - return NULL; -} -#endif /* HAVE_EXECV */ - -#ifdef HAVE_FEXECVE -PyDoc_STRVAR(posix_fexecve__doc__, -"fexecve(fd, args, env)\n\n\ -Execute the program specified by a file descriptor with arguments and\n\ -environment, replacing the current process.\n\ -\n\ - fd: file descriptor of executable\n\ - args: tuple or list of arguments\n\ - env: dictionary of strings mapping to strings"); - -static PyObject * -posix_fexecve(PyObject *self, PyObject *args) -{ - int fd; - PyObject *argv, *env; - char **argvlist; - char **envlist; - Py_ssize_t argc, envc; - - if (!PyArg_ParseTuple(args, "iOO:fexecve", - &fd, &argv, &env)) - return NULL; - if (!PyList_Check(argv) && !PyTuple_Check(argv)) { - PyErr_SetString(PyExc_TypeError, - "fexecve() arg 2 must be a tuple or list"); - return NULL; - } - argc = PySequence_Size(argv); - if (!PyMapping_Check(env)) { - PyErr_SetString(PyExc_TypeError, - "fexecve() arg 3 must be a mapping object"); - return NULL; - } - - argvlist = parse_arglist(argv, &argc); - if (argvlist == NULL) - return NULL; + goto fail; + } envlist = parse_envlist(env, &envc); if (envlist == NULL) goto fail; - fexecve(fd, argvlist, envlist); +#ifdef HAVE_FEXECVE + if (path.fd > -1) + fexecve(path.fd, argvlist, envlist); + else +#endif + execve(path.narrow, argvlist, envlist); /* If we get here it's definitely an error */ - (void) posix_error(); + path_posix_error("execve", &path); while (--envc >= 0) PyMem_DEL(envlist[envc]); PyMem_DEL(envlist); fail: - free_string_array(argvlist, argc); + if (argvlist) + free_string_array(argvlist, argc); + path_cleanup(&path); return NULL; } -#endif /* HAVE_FEXECVE */ +#endif /* HAVE_EXECV */ + #ifdef HAVE_SPAWNV PyDoc_STRVAR(posix_spawnv__doc__, @@ -6433,107 +7062,224 @@ #endif -PyDoc_STRVAR(posix_lstat__doc__, -"lstat(path) -> stat result\n\n\ -Like stat(path), but do not follow symbolic links."); - -static PyObject * -posix_lstat(PyObject *self, PyObject *args) -{ -#ifdef HAVE_LSTAT - return posix_do_stat(self, args, "O&:lstat", lstat, NULL, NULL); -#else /* !HAVE_LSTAT */ +#if defined(HAVE_READLINK) || defined(MS_WINDOWS) +PyDoc_STRVAR(readlink__doc__, +"readlink(path, *, dir_fd=None) -> path\n\n\ +Return a string representing the path to which the symbolic link points.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError."); +#endif + +#ifdef HAVE_READLINK + +static PyObject * +posix_readlink(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int dir_fd = DEFAULT_DIR_FD; + char buffer[MAXPATHLEN]; + ssize_t length; + PyObject *return_value = NULL; + static char *keywords[] = {"path", "dir_fd", NULL}; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|$O&:readlink", keywords, + path_converter, &path, +#ifdef HAVE_READLINKAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; + + Py_BEGIN_ALLOW_THREADS +#ifdef HAVE_READLINKAT + if (dir_fd != DEFAULT_DIR_FD) + length = readlinkat(dir_fd, path.narrow, buffer, sizeof(buffer)); + else +#endif + length = readlink(path.narrow, buffer, sizeof(buffer)); + Py_END_ALLOW_THREADS + + if (length < 0) { + return_value = path_posix_error("readlink", &path); + goto exit; + } + + if (PyUnicode_Check(path.object)) + return_value = PyUnicode_DecodeFSDefaultAndSize(buffer, length); + else + return_value = PyBytes_FromStringAndSize(buffer, length); +exit: + path_cleanup(&path); + return return_value; +} + + +#endif /* HAVE_READLINK */ + + +#ifdef HAVE_SYMLINK +PyDoc_STRVAR(posix_symlink__doc__, +"symlink(src, dst, target_is_directory=False, *, dir_fd=None)\n\n\ +Create a symbolic link pointing to src named dst.\n\n\ +target_is_directory is required on Windows if the target is to be\n\ + interpreted as a directory. (On Windows, symlink requires\n\ + Windows 6.0 or greater, and raises a NotImplementedError otherwise.)\n\ + target_is_directory is ignored on non-Windows platforms.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError."); + +#if defined(MS_WINDOWS) + +/* Grab CreateSymbolicLinkW dynamically from kernel32 */ +static DWORD (CALLBACK *Py_CreateSymbolicLinkW)(LPWSTR, LPWSTR, DWORD) = NULL; +static DWORD (CALLBACK *Py_CreateSymbolicLinkA)(LPSTR, LPSTR, DWORD) = NULL; +static int +check_CreateSymbolicLink() +{ + HINSTANCE hKernel32; + /* only recheck */ + if (Py_CreateSymbolicLinkW && Py_CreateSymbolicLinkA) + return 1; + hKernel32 = GetModuleHandleW(L"KERNEL32"); + *(FARPROC*)&Py_CreateSymbolicLinkW = GetProcAddress(hKernel32, + "CreateSymbolicLinkW"); + *(FARPROC*)&Py_CreateSymbolicLinkA = GetProcAddress(hKernel32, + "CreateSymbolicLinkA"); + return (Py_CreateSymbolicLinkW && Py_CreateSymbolicLinkA); +} + +#endif + +static PyObject * +posix_symlink(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t src; + path_t dst; + int dir_fd = DEFAULT_DIR_FD; + int target_is_directory = 0; + static char *keywords[] = {"src", "dst", "target_is_directory", + "dir_fd", NULL}; + PyObject *return_value; #ifdef MS_WINDOWS - return posix_do_stat(self, args, "O&:lstat", win32_lstat, "U:lstat", - win32_lstat_w); -#else - return posix_do_stat(self, args, "O&:lstat", STAT, NULL, NULL); -#endif -#endif /* !HAVE_LSTAT */ -} - - -#ifdef HAVE_READLINK -PyDoc_STRVAR(posix_readlink__doc__, -"readlink(path) -> path\n\n\ -Return a string representing the path to which the symbolic link points."); - -static PyObject * -posix_readlink(PyObject *self, PyObject *args) -{ - PyObject* v; - char buf[MAXPATHLEN]; - PyObject *opath; - char *path; - int n; - int arg_is_unicode = 0; - - if (!PyArg_ParseTuple(args, "O&:readlink", - PyUnicode_FSConverter, &opath)) - return NULL; - path = PyBytes_AsString(opath); - v = PySequence_GetItem(args, 0); - if (v == NULL) { - Py_DECREF(opath); - return NULL; - } - - if (PyUnicode_Check(v)) { - arg_is_unicode = 1; - } - Py_DECREF(v); - + DWORD result; +#else + int result; +#endif + + memset(&src, 0, sizeof(src)); + src.argument_name = "src"; + memset(&dst, 0, sizeof(dst)); + dst.argument_name = "dst"; + +#ifdef MS_WINDOWS + if (!check_CreateSymbolicLink()) { + PyErr_SetString(PyExc_NotImplementedError, + "CreateSymbolicLink functions not found"); + return NULL; + } + if (!win32_can_symlink) { + PyErr_SetString(PyExc_OSError, "symbolic link privilege not held"); + return NULL; + } +#endif + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&O&|i$O&:symlink", + keywords, + path_converter, &src, + path_converter, &dst, + &target_is_directory, +#ifdef HAVE_SYMLINKAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; + + if ((src.narrow && dst.wide) || (src.wide && dst.narrow)) { + PyErr_SetString(PyExc_ValueError, + "symlink: src and dst must be the same type"); + return_value = NULL; + goto exit; + } + +#ifdef MS_WINDOWS Py_BEGIN_ALLOW_THREADS - n = readlink(path, buf, (int) sizeof buf); + if (dst.wide) + result = Py_CreateSymbolicLinkW(dst.wide, src.wide, + target_is_directory); + else + result = Py_CreateSymbolicLinkA(dst.narrow, src.narrow, + target_is_directory); Py_END_ALLOW_THREADS - if (n < 0) - return posix_error_with_allocated_filename(opath); - - Py_DECREF(opath); - if (arg_is_unicode) - return PyUnicode_DecodeFSDefaultAndSize(buf, n); + + if (!result) { + return_value = win32_error_object("symlink", src.object); + goto exit; + } + +#else + + Py_BEGIN_ALLOW_THREADS +#if HAVE_SYMLINKAT + if (dir_fd != DEFAULT_DIR_FD) + result = symlinkat(src.narrow, dir_fd, dst.narrow); else - return PyBytes_FromStringAndSize(buf, n); -} -#endif /* HAVE_READLINK */ - - -#if defined(HAVE_SYMLINK) && !defined(MS_WINDOWS) -PyDoc_STRVAR(posix_symlink__doc__, -"symlink(src, dst)\n\n\ -Create a symbolic link pointing to src named dst."); - -static PyObject * -posix_symlink(PyObject *self, PyObject *args) -{ - return posix_2str(args, "O&O&:symlink", symlink); -} +#endif + result = symlink(src.narrow, dst.narrow); + Py_END_ALLOW_THREADS + + if (result) { + return_value = path_error("symlink", &dst); + goto exit; + } +#endif + + return_value = Py_None; + Py_INCREF(Py_None); + goto exit; /* silence "unused label" warning */ +exit: + path_cleanup(&src); + path_cleanup(&dst); + return return_value; +} + #endif /* HAVE_SYMLINK */ + #if !defined(HAVE_READLINK) && defined(MS_WINDOWS) -PyDoc_STRVAR(win_readlink__doc__, -"readlink(path) -> path\n\n\ -Return a string representing the path to which the symbolic link points."); - -/* Windows readlink implementation */ -static PyObject * -win_readlink(PyObject *self, PyObject *args) +static PyObject * +win_readlink(PyObject *self, PyObject *args, PyObject *kwargs) { wchar_t *path; DWORD n_bytes_returned; DWORD io_result; PyObject *po, *result; + int dir_fd; HANDLE reparse_point_handle; char target_buffer[MAXIMUM_REPARSE_DATA_BUFFER_SIZE]; REPARSE_DATA_BUFFER *rdb = (REPARSE_DATA_BUFFER *)target_buffer; wchar_t *print_name; - if (!PyArg_ParseTuple(args, - "U:readlink", - &po)) - return NULL; + static char *keywords[] = {"path", "dir_fd", NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "U|$O&:readlink", keywords, + &po, + dir_fd_unavailable, &dir_fd + )) + return NULL; + path = PyUnicode_AsUnicode(po); if (path == NULL) return NULL; @@ -6585,90 +7331,6 @@ #endif /* !defined(HAVE_READLINK) && defined(MS_WINDOWS) */ -#if defined(HAVE_SYMLINK) && defined(MS_WINDOWS) - -/* Grab CreateSymbolicLinkW dynamically from kernel32 */ -static int has_CreateSymbolicLinkW = 0; -static DWORD (CALLBACK *Py_CreateSymbolicLinkW)(LPWSTR, LPWSTR, DWORD); -static int -check_CreateSymbolicLinkW() -{ - HINSTANCE hKernel32; - /* only recheck */ - if (has_CreateSymbolicLinkW) - return has_CreateSymbolicLinkW; - hKernel32 = GetModuleHandleW(L"KERNEL32"); - *(FARPROC*)&Py_CreateSymbolicLinkW = GetProcAddress(hKernel32, - "CreateSymbolicLinkW"); - if (Py_CreateSymbolicLinkW) - has_CreateSymbolicLinkW = 1; - return has_CreateSymbolicLinkW; -} - -PyDoc_STRVAR(win_symlink__doc__, -"symlink(src, dst, target_is_directory=False)\n\n\ -Create a symbolic link pointing to src named dst.\n\ -target_is_directory is required if the target is to be interpreted as\n\ -a directory.\n\ -This function requires Windows 6.0 or greater, and raises a\n\ -NotImplementedError otherwise."); - -static PyObject * -win_symlink(PyObject *self, PyObject *args, PyObject *kwargs) -{ - static char *kwlist[] = {"src", "dest", "target_is_directory", NULL}; - PyObject *osrc, *odest; - PyObject *usrc = NULL, *udest = NULL; - wchar_t *wsrc, *wdest; - int target_is_directory = 0; - DWORD res; - - if (!check_CreateSymbolicLinkW()) - { - /* raise NotImplementedError */ - return PyErr_Format(PyExc_NotImplementedError, - "CreateSymbolicLinkW not found"); - } - if (!PyArg_ParseTupleAndKeywords( - args, kwargs, "OO|i:symlink", kwlist, - &osrc, &odest, &target_is_directory)) - return NULL; - - usrc = win32_decode_filename(osrc); - if (!usrc) - return NULL; - udest = win32_decode_filename(odest); - if (!udest) - goto error; - - if (win32_can_symlink == 0) - return PyErr_Format(PyExc_OSError, "symbolic link privilege not held"); - - wsrc = PyUnicode_AsUnicode(usrc); - if (wsrc == NULL) - goto error; - wdest = PyUnicode_AsUnicode(udest); - if (wsrc == NULL) - goto error; - - Py_BEGIN_ALLOW_THREADS - res = Py_CreateSymbolicLinkW(wdest, wsrc, target_is_directory); - Py_END_ALLOW_THREADS - - Py_DECREF(usrc); - Py_DECREF(udest); - if (!res) - return win32_error_object("symlink", osrc); - - Py_INCREF(Py_None); - return Py_None; - -error: - Py_XDECREF(usrc); - Py_XDECREF(udest); - return NULL; -} -#endif /* defined(HAVE_SYMLINK) && defined(MS_WINDOWS) */ #ifdef HAVE_TIMES #if defined(PYCC_VACPP) && defined(PYOS_OS2) @@ -6848,57 +7510,68 @@ /* Functions acting on file descriptors */ PyDoc_STRVAR(posix_open__doc__, -"open(filename, flag [, mode=0777]) -> fd\n\n\ -Open a file (for low level IO)."); - -static PyObject * -posix_open(PyObject *self, PyObject *args) -{ - PyObject *ofile; - char *file; - int flag; +"open(path, flags, mode=0o777, *, dir_fd=None)\n\n\ +Open a file for low level IO. Returns a file handle (integer).\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError."); + +static PyObject * +posix_open(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int flags; int mode = 0777; + int dir_fd = DEFAULT_DIR_FD; int fd; - + PyObject *return_value = NULL; + static char *keywords[] = {"path", "flags", "mode", "dir_fd", NULL}; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&i|i$O&:open", keywords, + path_converter, &path, + &flags, &mode, +#ifdef HAVE_OPENAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; + + Py_BEGIN_ALLOW_THREADS #ifdef MS_WINDOWS - PyObject *po; - if (PyArg_ParseTuple(args, "Ui|i:open", &po, &flag, &mode)) { - wchar_t *wpath = PyUnicode_AsUnicode(po); - if (wpath == NULL) - return NULL; - - Py_BEGIN_ALLOW_THREADS - fd = _wopen(wpath, flag, mode); - Py_END_ALLOW_THREADS - if (fd < 0) - return posix_error(); - return PyLong_FromLong((long)fd); - } - /* Drop the argument parsing error as narrow strings - are also valid. */ - PyErr_Clear(); -#endif - - if (!PyArg_ParseTuple(args, "O&i|i:open", - PyUnicode_FSConverter, &ofile, - &flag, &mode)) - return NULL; + if (path.wide) + fd = _wopen(path.wide, flags, mode); + else +#endif +#ifdef HAVE_OPENAT + if (dir_fd != DEFAULT_DIR_FD) + fd = openat(dir_fd, path.narrow, flags, mode); + else +#endif + fd = open(path.narrow, flags, mode); + Py_END_ALLOW_THREADS + + if (fd == -1) { #ifdef MS_WINDOWS - if (win32_warn_bytes_api()) { - Py_DECREF(ofile); - return NULL; - } -#endif - file = PyBytes_AsString(ofile); - Py_BEGIN_ALLOW_THREADS - fd = open(file, flag, mode); - Py_END_ALLOW_THREADS - if (fd < 0) - return posix_error_with_allocated_filename(ofile); - Py_DECREF(ofile); - return PyLong_FromLong((long)fd); -} - + /* force use of posix_error here for exact backwards compatibility */ + if (path.wide) + return_value = posix_error(); + else +#endif + return_value = path_error("open", &path); + goto exit; + } + + return_value = PyLong_FromLong((long)fd); + +exit: + path_cleanup(&path); + return return_value; +} PyDoc_STRVAR(posix_close__doc__, "close(fd)\n\n\ @@ -7407,7 +8080,8 @@ PyDoc_STRVAR(posix_fstat__doc__, "fstat(fd) -> stat result\n\n\ -Like stat(), but for an open file descriptor."); +Like stat(), but for an open file descriptor.\n\ +Equivalent to stat(fd=fd)."); static PyObject * posix_fstat(PyObject *self, PyObject *args) @@ -7589,63 +8263,118 @@ #ifdef HAVE_MKFIFO PyDoc_STRVAR(posix_mkfifo__doc__, -"mkfifo(filename [, mode=0666])\n\n\ -Create a FIFO (a POSIX named pipe)."); - -static PyObject * -posix_mkfifo(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *filename; +"mkfifo(path, mode=0o666, *, dir_fd=None)\n\n\ +Create a FIFO (a POSIX named pipe).\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError."); + +static PyObject * +posix_mkfifo(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; int mode = 0666; - int res; - if (!PyArg_ParseTuple(args, "O&|i:mkfifo", PyUnicode_FSConverter, &opath, - &mode)) - return NULL; - filename = PyBytes_AS_STRING(opath); + int dir_fd = DEFAULT_DIR_FD; + int result; + PyObject *return_value = NULL; + static char *keywords[] = {"path", "mode", "dir_fd", NULL}; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|i$O&:mkfifo", keywords, + path_converter, &path, + &mode, +#ifdef HAVE_MKFIFOAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; + Py_BEGIN_ALLOW_THREADS - res = mkfifo(filename, mode); +#ifdef HAVE_MKFIFOAT + if (dir_fd != DEFAULT_DIR_FD) + result = mkfifoat(dir_fd, path.narrow, mode); + else +#endif + result = mkfifo(path.narrow, mode); Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); + + if (result < 0) { + return_value = posix_error(); + goto exit; + } + + return_value = Py_None; Py_INCREF(Py_None); - return Py_None; -} -#endif - + +exit: + path_cleanup(&path); + return return_value; +} +#endif #if defined(HAVE_MKNOD) && defined(HAVE_MAKEDEV) PyDoc_STRVAR(posix_mknod__doc__, -"mknod(filename [, mode=0600, device])\n\n\ +"mknod(filename, mode=0o600, device=0, *, dir_fd=None)\n\n\ Create a filesystem node (file, device special file or named pipe)\n\ named filename. mode specifies both the permissions to use and the\n\ type of node to be created, being combined (bitwise OR) with one of\n\ S_IFREG, S_IFCHR, S_IFBLK, and S_IFIFO. For S_IFCHR and S_IFBLK,\n\ device defines the newly created device special file (probably using\n\ -os.makedev()), otherwise it is ignored."); - - -static PyObject * -posix_mknod(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *filename; - int mode = 0600; +os.makedev()), otherwise it is ignored.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError."); + + +static PyObject * +posix_mknod(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int mode = 0666; int device = 0; - int res; - if (!PyArg_ParseTuple(args, "O&|ii:mknod", PyUnicode_FSConverter, &opath, - &mode, &device)) - return NULL; - filename = PyBytes_AS_STRING(opath); + int dir_fd = DEFAULT_DIR_FD; + int result; + PyObject *return_value = NULL; + static char *keywords[] = {"path", "mode", "device", "dir_fd", NULL}; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|ii$O&:mknod", keywords, + path_converter, &path, + &mode, &device, +#ifdef HAVE_MKNODAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; + Py_BEGIN_ALLOW_THREADS - res = mknod(filename, mode, device); +#ifdef HAVE_MKNODAT + if (dir_fd != DEFAULT_DIR_FD) + result = mknodat(dir_fd, path.narrow, mode, device); + else +#endif + result = mknod(path.narrow, mode, device); Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); + + if (result < 0) { + return_value = posix_error(); + goto exit; + } + + return_value = Py_None; Py_INCREF(Py_None); - return Py_None; + +exit: + path_cleanup(&path); + return return_value; } #endif @@ -8163,7 +8892,8 @@ PyDoc_STRVAR(posix_fstatvfs__doc__, "fstatvfs(fd) -> statvfs result\n\n\ -Perform an fstatvfs system call on the given fd."); +Perform an fstatvfs system call on the given fd.\n\ +Equivalent to statvfs(fd)."); static PyObject * posix_fstatvfs(PyObject *self, PyObject *args) @@ -8188,28 +8918,58 @@ #include PyDoc_STRVAR(posix_statvfs__doc__, -"statvfs(path) -> statvfs result\n\n\ -Perform a statvfs system call on the given path."); - -static PyObject * -posix_statvfs(PyObject *self, PyObject *args) -{ - PyObject *path; - int res; +"statvfs(path)\n\n\ +Perform a statvfs system call on the given path.\n\ +\n\ +path may always be specified as a string.\n\ +On some platforms, path may also be specified as an open file descriptor.\n\ + If this functionality is unavailable, using it raises an exception."); + +static PyObject * +posix_statvfs(PyObject *self, PyObject *args, PyObject *kwargs) +{ + static char *keywords[] = {"path", NULL}; + path_t path; + int result; + PyObject *return_value = NULL; struct statvfs st; - if (!PyArg_ParseTuple(args, "O&:statvfs", PyUnicode_FSConverter, &path)) - return NULL; + + memset(&path, 0, sizeof(path)); +#ifdef HAVE_FSTATVFS + path.allow_fd = 1; +#endif + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&:statvfs", keywords, + path_converter, &path + )) + return NULL; + Py_BEGIN_ALLOW_THREADS - res = statvfs(PyBytes_AS_STRING(path), &st); +#ifdef HAVE_FSTATVFS + if (path.fd != -1) { +#ifdef __APPLE__ + /* handle weak-linking on Mac OS X 10.3 */ + if (fstatvfs == NULL) { + fd_specified("statvfs", path.fd); + goto exit; + } +#endif + result = fstatvfs(path.fd, &st); + } + else +#endif + result = statvfs(path.narrow, &st); Py_END_ALLOW_THREADS - if (res != 0) { - posix_error_with_filename(PyBytes_AS_STRING(path)); - Py_DECREF(path); - return NULL; - } - Py_DECREF(path); - - return _pystatvfs_fromstructstatvfs(st); + + if (result) { + return_value = path_posix_error("statvfs", &path); + goto exit; + } + + return_value = _pystatvfs_fromstructstatvfs(st); + +exit: + path_cleanup(&path); + return return_value; } #endif /* HAVE_STATVFS */ @@ -9517,946 +10277,301 @@ } #endif -/* Posix *at family of functions: - faccessat, fchmodat, fchownat, fstatat, futimesat, - linkat, mkdirat, mknodat, openat, readlinkat, renameat, symlinkat, - unlinkat, utimensat, mkfifoat */ - -#ifdef HAVE_FACCESSAT -PyDoc_STRVAR(posix_faccessat__doc__, -"faccessat(dirfd, path, mode, flags=0) -> True if granted, False otherwise\n\n\ -Like access() but if path is relative, it is taken as relative to dirfd.\n\ -flags is optional and can be constructed by ORing together zero or more\n\ -of these values: AT_SYMLINK_NOFOLLOW, AT_EACCESS.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_faccessat(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *path; - int mode; - int res; - int dirfd, flags = 0; - if (!PyArg_ParseTuple(args, "iO&i|i:faccessat", - &dirfd, PyUnicode_FSConverter, &opath, &mode, &flags)) - return NULL; - path = PyBytes_AsString(opath); - Py_BEGIN_ALLOW_THREADS - res = faccessat(dirfd, path, mode, flags); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - return PyBool_FromLong(res == 0); -} -#endif - -#ifdef HAVE_FCHMODAT -PyDoc_STRVAR(posix_fchmodat__doc__, -"fchmodat(dirfd, path, mode, flags=0)\n\n\ -Like chmod() but if path is relative, it is taken as relative to dirfd.\n\ -flags is optional and may be 0 or AT_SYMLINK_NOFOLLOW.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_fchmodat(PyObject *self, PyObject *args) -{ - int dirfd, mode, res; +#ifdef USE_XATTRS + +PyDoc_STRVAR(posix_getxattr__doc__, +"getxattr(path, attribute, *, follow_symlinks=True) -> value\n\n\ +Return the value of extended attribute attribute on path.\n\ +\n\ +path may be either a string or an open file descriptor.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, getxattr will examine the symbolic link itself instead of the file\n\ + the link points to."); + +static PyObject * +posix_getxattr(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + path_t attribute; + int follow_symlinks = 1; + PyObject *buffer = NULL; + int i; + static char *keywords[] = {"path", "attribute", "follow_symlinks", NULL}; + + memset(&path, 0, sizeof(path)); + memset(&attribute, 0, sizeof(attribute)); + path.allow_fd = 1; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&O&|$p:getxattr", keywords, + path_converter, &path, + path_converter, &attribute, + &follow_symlinks)) + return NULL; + + if (fd_and_follow_symlinks_invalid("getxattr", path.fd, follow_symlinks)) + goto exit; + + for (i = 0; ; i++) { + void *ptr; + ssize_t result; + static Py_ssize_t buffer_sizes[] = {128, XATTR_SIZE_MAX, 0}; + Py_ssize_t buffer_size = buffer_sizes[i]; + if (!buffer_size) { + path_error("getxattr", &path); + goto exit; + } + buffer = PyBytes_FromStringAndSize(NULL, buffer_size); + if (!buffer) + goto exit; + ptr = PyBytes_AS_STRING(buffer); + + Py_BEGIN_ALLOW_THREADS; + if (path.fd >= 0) + result = fgetxattr(path.fd, attribute.narrow, ptr, buffer_size); + else if (follow_symlinks) + result = getxattr(path.narrow, attribute.narrow, ptr, buffer_size); + else + result = lgetxattr(path.narrow, attribute.narrow, ptr, buffer_size); + Py_END_ALLOW_THREADS; + + if (result < 0) { + Py_DECREF(buffer); + buffer = NULL; + if (errno == ERANGE) + continue; + path_error("getxattr", &path); + goto exit; + } + + if (result != buffer_size) { + /* Can only shrink. */ + _PyBytes_Resize(&buffer, result); + } + break; + } + +exit: + path_cleanup(&path); + path_cleanup(&attribute); + return buffer; +} + +PyDoc_STRVAR(posix_setxattr__doc__, +"setxattr(path, attribute, value, flags=0, *, follow_symlinks=True)\n\n\ +Set extended attribute attribute on path to value.\n\ +path may be either a string or an open file descriptor.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, setxattr will modify the symbolic link itself instead of the file\n\ + the link points to."); + +static PyObject * +posix_setxattr(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + path_t attribute; + Py_buffer value; int flags = 0; - PyObject *opath; - char *path; - - if (!PyArg_ParseTuple(args, "iO&i|i:fchmodat", - &dirfd, PyUnicode_FSConverter, &opath, &mode, &flags)) - return NULL; - - path = PyBytes_AsString(opath); - - Py_BEGIN_ALLOW_THREADS - res = fchmodat(dirfd, path, mode, flags); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif /* HAVE_FCHMODAT */ - -#ifdef HAVE_FCHOWNAT -PyDoc_STRVAR(posix_fchownat__doc__, -"fchownat(dirfd, path, uid, gid, flags=0)\n\n\ -Like chown() but if path is relative, it is taken as relative to dirfd.\n\ -flags is optional and may be 0 or AT_SYMLINK_NOFOLLOW.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_fchownat(PyObject *self, PyObject *args) -{ - PyObject *opath; - int dirfd, res; - long uid, gid; - int flags = 0; - char *path; - - if (!PyArg_ParseTuple(args, "iO&ll|i:fchownat", - &dirfd, PyUnicode_FSConverter, &opath, &uid, &gid, &flags)) - return NULL; - - path = PyBytes_AsString(opath); - - Py_BEGIN_ALLOW_THREADS - res = fchownat(dirfd, path, (uid_t) uid, (gid_t) gid, flags); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif /* HAVE_FCHOWNAT */ - -#ifdef HAVE_FSTATAT -PyDoc_STRVAR(posix_fstatat__doc__, -"fstatat(dirfd, path, flags=0) -> stat result\n\n\ -Like stat() but if path is relative, it is taken as relative to dirfd.\n\ -flags is optional and may be 0 or AT_SYMLINK_NOFOLLOW.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_fstatat(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *path; - STRUCT_STAT st; - int dirfd, res, flags = 0; - - if (!PyArg_ParseTuple(args, "iO&|i:fstatat", - &dirfd, PyUnicode_FSConverter, &opath, &flags)) - return NULL; - path = PyBytes_AsString(opath); - - Py_BEGIN_ALLOW_THREADS - res = fstatat(dirfd, path, &st, flags); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res != 0) - return posix_error(); - - return _pystat_fromstructstat(&st); -} -#endif - -#ifdef HAVE_FUTIMESAT -PyDoc_STRVAR(posix_futimesat__doc__, -"futimesat(dirfd, path[, (atime, mtime)])\n\ -Like utime() but if path is relative, it is taken as relative to dirfd.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_futimesat(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *path; - int res, dirfd; - PyObject* arg = Py_None; - time_t atime, mtime; - long ansec, mnsec; - - if (!PyArg_ParseTuple(args, "iO&|O:futimesat", - &dirfd, PyUnicode_FSConverter, &opath, &arg)) - return NULL; - path = PyBytes_AsString(opath); - if (arg == Py_None) { - /* optional time values not given */ - Py_BEGIN_ALLOW_THREADS - res = futimesat(dirfd, path, NULL); - Py_END_ALLOW_THREADS - } - else if (!PyTuple_Check(arg) || PyTuple_Size(arg) != 2) { - PyErr_SetString(PyExc_TypeError, - "futimesat() arg 3 must be a tuple (atime, mtime)"); - Py_DECREF(opath); - return NULL; - } - else { - if (_PyTime_ObjectToTimespec(PyTuple_GET_ITEM(arg, 0), - &atime, &ansec) == -1) { - Py_DECREF(opath); - return NULL; + int follow_symlinks = 1; + int result; + PyObject *return_value = NULL; + static char *keywords[] = {"path", "attribute", "value", + "flags", "follow_symlinks", NULL}; + + memset(&path, 0, sizeof(path)); + path.allow_fd = 1; + memset(&attribute, 0, sizeof(attribute)); + memset(&value, 0, sizeof(value)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&O&y*|i$p:setxattr", + keywords, + path_converter, &path, + path_converter, &attribute, + &value, &flags, + &follow_symlinks)) + return NULL; + + if (fd_and_follow_symlinks_invalid("setxattr", path.fd, follow_symlinks)) + goto exit; + + Py_BEGIN_ALLOW_THREADS; + if (path.fd > -1) + result = fsetxattr(path.fd, attribute.narrow, + value.buf, value.len, flags); + else if (follow_symlinks) + result = setxattr(path.narrow, attribute.narrow, + value.buf, value.len, flags); + else + result = lsetxattr(path.narrow, attribute.narrow, + value.buf, value.len, flags); + Py_END_ALLOW_THREADS; + + if (result) { + return_value = path_error("setxattr", &path); + goto exit; + } + + return_value = Py_None; + Py_INCREF(return_value); + +exit: + path_cleanup(&path); + path_cleanup(&attribute); + PyBuffer_Release(&value); + + return return_value; +} + +PyDoc_STRVAR(posix_removexattr__doc__, +"removexattr(path, attribute, *, follow_symlinks=True)\n\n\ +Remove extended attribute attribute on path.\n\ +path may be either a string or an open file descriptor.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, removexattr will modify the symbolic link itself instead of the file\n\ + the link points to."); + +static PyObject * +posix_removexattr(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + path_t attribute; + int follow_symlinks = 1; + int result; + PyObject *return_value = NULL; + static char *keywords[] = {"path", "attribute", "follow_symlinks", NULL}; + + memset(&path, 0, sizeof(path)); + memset(&attribute, 0, sizeof(attribute)); + path.allow_fd = 1; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&O&|$p:removexattr", + keywords, + path_converter, &path, + path_converter, &attribute, + &follow_symlinks)) + return NULL; + + if (fd_and_follow_symlinks_invalid("removexattr", path.fd, follow_symlinks)) + goto exit; + + Py_BEGIN_ALLOW_THREADS; + if (path.fd > -1) + result = fremovexattr(path.fd, attribute.narrow); + else if (follow_symlinks) + result = removexattr(path.narrow, attribute.narrow); + else + result = lremovexattr(path.narrow, attribute.narrow); + Py_END_ALLOW_THREADS; + + if (result) { + return_value = path_error("removexattr", &path); + goto exit; + } + + return_value = Py_None; + Py_INCREF(return_value); + +exit: + path_cleanup(&path); + path_cleanup(&attribute); + + return return_value; +} + +PyDoc_STRVAR(posix_listxattr__doc__, +"listxattr(path='.', *, follow_symlinks=True)\n\n\ +Return a list of extended attributes on path.\n\ +\n\ +path may be either None, a string, or an open file descriptor.\n\ +if path is None, listxattr will examine the current directory.\n\ +If follow_symlinks is False, and the last element of the path is a symbolic\n\ + link, listxattr will examine the symbolic link itself instead of the file\n\ + the link points to."); + +static PyObject * +posix_listxattr(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int follow_symlinks = 1; + Py_ssize_t i; + PyObject *result = NULL; + char *buffer = NULL; + char *name; + static char *keywords[] = {"path", "follow_symlinks", NULL}; + + memset(&path, 0, sizeof(path)); + path.allow_fd = 1; + path.fd = -1; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O&$p:listxattr", keywords, + path_converter, &path, + &follow_symlinks)) + return NULL; + + if (fd_and_follow_symlinks_invalid("listxattr", path.fd, follow_symlinks)) + goto exit; + + name = path.narrow ? path.narrow : "."; + for (i = 0; ; i++) { + char *start, *trace, *end; + ssize_t length; + static Py_ssize_t buffer_sizes[] = { 256, XATTR_LIST_MAX, 0 }; + Py_ssize_t buffer_size = buffer_sizes[i]; + if (!buffer_size) { + // ERANGE + path_error("listxattr", &path); + break; } - if (_PyTime_ObjectToTimespec(PyTuple_GET_ITEM(arg, 1), - &mtime, &mnsec) == -1) { - Py_DECREF(opath); - return NULL; + buffer = PyMem_MALLOC(buffer_size); + if (!buffer) { + PyErr_NoMemory(); + break; } - Py_BEGIN_ALLOW_THREADS - { -#ifdef HAVE_UTIMENSAT - struct timespec buf[2]; - buf[0].tv_sec = atime; - buf[0].tv_nsec = ansec; - buf[1].tv_sec = mtime; - buf[1].tv_nsec = mnsec; - res = utimensat(dirfd, path, buf, 0); -#else - struct timeval buf[2]; - buf[0].tv_sec = atime; - buf[0].tv_usec = ansec / 1000; - buf[1].tv_sec = mtime; - buf[1].tv_usec = mnsec / 1000; - res = futimesat(dirfd, path, buf); -#endif + Py_BEGIN_ALLOW_THREADS; + if (path.fd > -1) + length = flistxattr(path.fd, buffer, buffer_size); + else if (follow_symlinks) + length = listxattr(name, buffer, buffer_size); + else + length = llistxattr(name, buffer, buffer_size); + Py_END_ALLOW_THREADS; + + if (length < 0) { + if (errno == ERANGE) + continue; + path_error("listxattr", &path); + break; } - Py_END_ALLOW_THREADS - } - Py_DECREF(opath); - if (res < 0) { - return posix_error(); - } - Py_RETURN_NONE; -} -#endif - -#ifdef HAVE_LINKAT -PyDoc_STRVAR(posix_linkat__doc__, -"linkat(srcfd, srcpath, dstfd, dstpath, flags=0)\n\n\ -Like link() but if srcpath is relative, it is taken as relative to srcfd\n\ -and if dstpath is relative, it is taken as relative to dstfd.\n\ -flags is optional and may be 0 or AT_SYMLINK_FOLLOW.\n\ -If srcpath is relative and srcfd is the special value AT_FDCWD, then\n\ -srcpath is interpreted relative to the current working directory. This\n\ -also applies for dstpath."); - -static PyObject * -posix_linkat(PyObject *self, PyObject *args) -{ - PyObject *osrc, *odst; - char *src, *dst; - int res, srcfd, dstfd; - int flags = 0; - - if (!PyArg_ParseTuple(args, "iO&iO&|i:linkat", - &srcfd, PyUnicode_FSConverter, &osrc, &dstfd, PyUnicode_FSConverter, &odst, &flags)) - return NULL; - src = PyBytes_AsString(osrc); - dst = PyBytes_AsString(odst); - Py_BEGIN_ALLOW_THREADS - res = linkat(srcfd, src, dstfd, dst, flags); - Py_END_ALLOW_THREADS - Py_DECREF(osrc); - Py_DECREF(odst); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif /* HAVE_LINKAT */ - -#ifdef HAVE_MKDIRAT -PyDoc_STRVAR(posix_mkdirat__doc__, -"mkdirat(dirfd, path, mode=0o777)\n\n\ -Like mkdir() but if path is relative, it is taken as relative to dirfd.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_mkdirat(PyObject *self, PyObject *args) -{ - int res, dirfd; - PyObject *opath; - char *path; - int mode = 0777; - - if (!PyArg_ParseTuple(args, "iO&|i:mkdirat", - &dirfd, PyUnicode_FSConverter, &opath, &mode)) - return NULL; - path = PyBytes_AsString(opath); - Py_BEGIN_ALLOW_THREADS - res = mkdirat(dirfd, path, mode); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif - -#if defined(HAVE_MKNODAT) && defined(HAVE_MAKEDEV) -PyDoc_STRVAR(posix_mknodat__doc__, -"mknodat(dirfd, path, mode=0o600, device=0)\n\n\ -Like mknod() but if path is relative, it is taken as relative to dirfd.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_mknodat(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *filename; - int mode = 0600; - int device = 0; - int res, dirfd; - if (!PyArg_ParseTuple(args, "iO&|ii:mknodat", &dirfd, - PyUnicode_FSConverter, &opath, &mode, &device)) - return NULL; - filename = PyBytes_AS_STRING(opath); - Py_BEGIN_ALLOW_THREADS - res = mknodat(dirfd, filename, mode, device); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif - -#ifdef HAVE_OPENAT -PyDoc_STRVAR(posix_openat__doc__, -"openat(dirfd, path, flag, mode=0o777) -> fd\n\n\ -Like open() but if path is relative, it is taken as relative to dirfd.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_openat(PyObject *self, PyObject *args) -{ - PyObject *ofile; - char *file; - int flag, dirfd, fd; - int mode = 0777; - - if (!PyArg_ParseTuple(args, "iO&i|i:openat", - &dirfd, PyUnicode_FSConverter, &ofile, - &flag, &mode)) - return NULL; - file = PyBytes_AsString(ofile); - Py_BEGIN_ALLOW_THREADS - fd = openat(dirfd, file, flag, mode); - Py_END_ALLOW_THREADS - Py_DECREF(ofile); - if (fd < 0) - return posix_error(); - return PyLong_FromLong((long)fd); -} -#endif - -#ifdef HAVE_READLINKAT -PyDoc_STRVAR(posix_readlinkat__doc__, -"readlinkat(dirfd, path) -> path\n\n\ -Like readlink() but if path is relative, it is taken as relative to dirfd.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_readlinkat(PyObject *self, PyObject *args) -{ - PyObject *v, *opath; - char buf[MAXPATHLEN]; - char *path; - int n, dirfd; - int arg_is_unicode = 0; - - if (!PyArg_ParseTuple(args, "iO&:readlinkat", - &dirfd, PyUnicode_FSConverter, &opath)) - return NULL; - path = PyBytes_AsString(opath); - v = PySequence_GetItem(args, 1); - if (v == NULL) { - Py_DECREF(opath); - return NULL; - } - - if (PyUnicode_Check(v)) { - arg_is_unicode = 1; - } - Py_DECREF(v); - - Py_BEGIN_ALLOW_THREADS - n = readlinkat(dirfd, path, buf, (int) sizeof buf); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (n < 0) - return posix_error(); - - if (arg_is_unicode) - return PyUnicode_DecodeFSDefaultAndSize(buf, n); - else - return PyBytes_FromStringAndSize(buf, n); -} -#endif /* HAVE_READLINKAT */ - -#ifdef HAVE_RENAMEAT -PyDoc_STRVAR(posix_renameat__doc__, -"renameat(olddirfd, oldpath, newdirfd, newpath)\n\n\ -Like rename() but if oldpath is relative, it is taken as relative to\n\ -olddirfd and if newpath is relative, it is taken as relative to newdirfd.\n\ -If oldpath is relative and olddirfd is the special value AT_FDCWD, then\n\ -oldpath is interpreted relative to the current working directory. This\n\ -also applies for newpath."); - -static PyObject * -posix_renameat(PyObject *self, PyObject *args) -{ - int res; - PyObject *opathold, *opathnew; - char *opath, *npath; - int oldfd, newfd; - - if (!PyArg_ParseTuple(args, "iO&iO&:renameat", - &oldfd, PyUnicode_FSConverter, &opathold, &newfd, PyUnicode_FSConverter, &opathnew)) - return NULL; - opath = PyBytes_AsString(opathold); - npath = PyBytes_AsString(opathnew); - Py_BEGIN_ALLOW_THREADS - res = renameat(oldfd, opath, newfd, npath); - Py_END_ALLOW_THREADS - Py_DECREF(opathold); - Py_DECREF(opathnew); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif - -#if HAVE_SYMLINKAT -PyDoc_STRVAR(posix_symlinkat__doc__, -"symlinkat(src, dstfd, dst)\n\n\ -Like symlink() but if dst is relative, it is taken as relative to dstfd.\n\ -If dst is relative and dstfd is the special value AT_FDCWD, then dst\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_symlinkat(PyObject *self, PyObject *args) -{ - int res, dstfd; - PyObject *osrc, *odst; - char *src, *dst; - - if (!PyArg_ParseTuple(args, "O&iO&:symlinkat", - PyUnicode_FSConverter, &osrc, &dstfd, PyUnicode_FSConverter, &odst)) - return NULL; - src = PyBytes_AsString(osrc); - dst = PyBytes_AsString(odst); - Py_BEGIN_ALLOW_THREADS - res = symlinkat(src, dstfd, dst); - Py_END_ALLOW_THREADS - Py_DECREF(osrc); - Py_DECREF(odst); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif /* HAVE_SYMLINKAT */ - -#ifdef HAVE_UNLINKAT -PyDoc_STRVAR(posix_unlinkat__doc__, -"unlinkat(dirfd, path, flags=0)\n\n\ -Like unlink() but if path is relative, it is taken as relative to dirfd.\n\ -flags is optional and may be 0 or AT_REMOVEDIR. If AT_REMOVEDIR is\n\ -specified, unlinkat() behaves like rmdir().\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_unlinkat(PyObject *self, PyObject *args) -{ - int dirfd, res, flags = 0; - PyObject *opath; - char *path; - - if (!PyArg_ParseTuple(args, "iO&|i:unlinkat", - &dirfd, PyUnicode_FSConverter, &opath, &flags)) - return NULL; - path = PyBytes_AsString(opath); - Py_BEGIN_ALLOW_THREADS - res = unlinkat(dirfd, path, flags); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif - -#ifdef HAVE_UTIMENSAT -PyDoc_STRVAR(posix_utimensat__doc__, -"utimensat(dirfd, path[, atime=(atime_sec, atime_nsec),\n\ - mtime=(mtime_sec, mtime_nsec), flags=0])\n\ -utimensat(dirfd, path, None, None, flags)\n\n\ -Updates the timestamps of a file with nanosecond precision. If path is\n\ -relative, it is taken as relative to dirfd.\n\ -If atime and mtime are both None, which is the default, set atime and\n\ -mtime to the current time.\n\ -flags is optional and may be 0 or AT_SYMLINK_NOFOLLOW.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory.\n\ -If *_nsec is specified as UTIME_NOW, the timestamp is updated to the\n\ -current time.\n\ -If *_nsec is specified as UTIME_OMIT, the timestamp is not updated."); - -static PyObject * -posix_utimensat(PyObject *self, PyObject *args, PyObject *kwargs) -{ - PyObject *opath; - char *path; - int res, dirfd, flags = 0; - PyObject *atime = Py_None; - PyObject *mtime = Py_None; - - static char *kwlist[] = {"dirfd", "path", "atime", "mtime", "flags", NULL}; - - struct timespec buf[2]; - - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "iO&|OOi:utimensat", kwlist, - &dirfd, PyUnicode_FSConverter, &opath, &atime, &mtime, &flags)) - return NULL; - path = PyBytes_AsString(opath); - if (atime == Py_None && mtime == Py_None) { - /* optional time values not given */ - Py_BEGIN_ALLOW_THREADS - res = utimensat(dirfd, path, NULL, flags); - Py_END_ALLOW_THREADS - } - else if (!PyTuple_Check(atime) || PyTuple_Size(atime) != 2) { - PyErr_SetString(PyExc_TypeError, - "utimensat() arg 3 must be a tuple (atime_sec, atime_nsec)"); - Py_DECREF(opath); - return NULL; - } - else if (!PyTuple_Check(mtime) || PyTuple_Size(mtime) != 2) { - PyErr_SetString(PyExc_TypeError, - "utimensat() arg 4 must be a tuple (mtime_sec, mtime_nsec)"); - Py_DECREF(opath); - return NULL; - } - else { - if (!PyArg_ParseTuple(atime, "ll:utimensat", - &(buf[0].tv_sec), &(buf[0].tv_nsec))) { - Py_DECREF(opath); - return NULL; + + result = PyList_New(0); + if (!result) { + goto exit; } - if (!PyArg_ParseTuple(mtime, "ll:utimensat", - &(buf[1].tv_sec), &(buf[1].tv_nsec))) { - Py_DECREF(opath); - return NULL; + + end = buffer + length; + for (trace = start = buffer; trace != end; trace++) { + if (!*trace) { + int error; + PyObject *attribute = PyUnicode_DecodeFSDefaultAndSize(start, + trace - start); + if (!attribute) { + Py_DECREF(result); + result = NULL; + goto exit; + } + error = PyList_Append(result, attribute); + Py_DECREF(attribute); + if (error) { + Py_DECREF(result); + result = NULL; + goto exit; + } + start = trace + 1; + } } - Py_BEGIN_ALLOW_THREADS - res = utimensat(dirfd, path, buf, flags); - Py_END_ALLOW_THREADS - } - Py_DECREF(opath); - if (res < 0) { - return posix_error(); - } - Py_RETURN_NONE; -} -#endif - -#ifdef HAVE_MKFIFOAT -PyDoc_STRVAR(posix_mkfifoat__doc__, -"mkfifoat(dirfd, path, mode=0o666)\n\n\ -Like mkfifo() but if path is relative, it is taken as relative to dirfd.\n\ -If path is relative and dirfd is the special value AT_FDCWD, then path\n\ -is interpreted relative to the current working directory."); - -static PyObject * -posix_mkfifoat(PyObject *self, PyObject *args) -{ - PyObject *opath; - char *filename; - int mode = 0666; - int res, dirfd; - if (!PyArg_ParseTuple(args, "iO&|i:mkfifoat", - &dirfd, PyUnicode_FSConverter, &opath, &mode)) - return NULL; - filename = PyBytes_AS_STRING(opath); - Py_BEGIN_ALLOW_THREADS - res = mkfifoat(dirfd, filename, mode); - Py_END_ALLOW_THREADS - Py_DECREF(opath); - if (res < 0) - return posix_error(); - Py_RETURN_NONE; -} -#endif - -#ifdef USE_XATTRS - -static int -try_getxattr(const char *path, const char *name, - ssize_t (*get)(const char *, const char *, void *, size_t), - Py_ssize_t buf_size, PyObject **res) -{ - PyObject *value; - Py_ssize_t len; - - assert(buf_size <= XATTR_SIZE_MAX); - value = PyBytes_FromStringAndSize(NULL, buf_size); - if (!value) - return 0; - Py_BEGIN_ALLOW_THREADS; - len = get(path, name, PyBytes_AS_STRING(value), buf_size); - Py_END_ALLOW_THREADS; - if (len < 0) { - Py_DECREF(value); - if (errno == ERANGE) { - value = NULL; - } - else { - posix_error(); - return 0; - } - } - else if (len != buf_size) { - /* Can only shrink. */ - _PyBytes_Resize(&value, len); - } - *res = value; - return 1; -} - -static PyObject * -getxattr_common(const char *path, PyObject *name_obj, - ssize_t (*get)(const char *, const char *, void *, size_t)) -{ - PyObject *value; - const char *name = PyBytes_AS_STRING(name_obj); - - /* Try a small value first. */ - if (!try_getxattr(path, name, get, 128, &value)) - return NULL; - if (value) - return value; - /* Now the maximum possible one. */ - if (!try_getxattr(path, name, get, XATTR_SIZE_MAX, &value)) - return NULL; - assert(value); - return value; -} - -PyDoc_STRVAR(posix_getxattr__doc__, -"getxattr(path, attr) -> value\n\n\ -Return the value of extended attribute *name* on *path*."); - -static PyObject * -posix_getxattr(PyObject *self, PyObject *args) -{ - PyObject *path, *res, *name; - - if (!PyArg_ParseTuple(args, "O&O&:getxattr", PyUnicode_FSConverter, &path, - PyUnicode_FSConverter, &name)) - return NULL; - res = getxattr_common(PyBytes_AS_STRING(path), name, getxattr); - Py_DECREF(path); - Py_DECREF(name); - return res; -} - -PyDoc_STRVAR(posix_lgetxattr__doc__, -"lgetxattr(path, attr) -> value\n\n\ -Like getxattr but don't follow symlinks."); - -static PyObject * -posix_lgetxattr(PyObject *self, PyObject *args) -{ - PyObject *path, *res, *name; - - if (!PyArg_ParseTuple(args, "O&O&:lgetxattr", PyUnicode_FSConverter, &path, - PyUnicode_FSConverter, &name)) - return NULL; - res = getxattr_common(PyBytes_AS_STRING(path), name, lgetxattr); - Py_DECREF(path); - Py_DECREF(name); - return res; -} - -static ssize_t -wrap_fgetxattr(const char *path, const char *name, void *value, size_t size) -{ - /* Hack to share code. */ - return fgetxattr((int)(Py_uintptr_t)path, name, value, size); -} - -PyDoc_STRVAR(posix_fgetxattr__doc__, -"fgetxattr(fd, attr) -> value\n\n\ -Like getxattr but operate on a fd instead of a path."); - -static PyObject * -posix_fgetxattr(PyObject *self, PyObject *args) -{ - PyObject *res, *name; - int fd; - - if (!PyArg_ParseTuple(args, "iO&:fgetxattr", &fd, PyUnicode_FSConverter, &name)) - return NULL; - res = getxattr_common((const char *)(Py_uintptr_t)fd, name, wrap_fgetxattr); - Py_DECREF(name); - return res; -} - -PyDoc_STRVAR(posix_setxattr__doc__, -"setxattr(path, attr, value, flags=0)\n\n\ -Set extended attribute *attr* on *path* to *value*."); - -static PyObject * -posix_setxattr(PyObject *self, PyObject *args) -{ - PyObject *path, *name; - Py_buffer data; - int flags = 0, err; - - if (!PyArg_ParseTuple(args, "O&O&y*|i:setxattr", PyUnicode_FSConverter, - &path, PyUnicode_FSConverter, &name, &data, &flags)) - return NULL; - Py_BEGIN_ALLOW_THREADS; - err = setxattr(PyBytes_AS_STRING(path), PyBytes_AS_STRING(name), - data.buf, data.len, flags); - Py_END_ALLOW_THREADS; - Py_DECREF(path); - Py_DECREF(name); - PyBuffer_Release(&data); - if (err) - return posix_error(); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(posix_lsetxattr__doc__, -"lsetxattr(path, attr, value, flags=0)\n\n\ -Like setxattr but don't follow symlinks."); - -static PyObject * -posix_lsetxattr(PyObject *self, PyObject *args) -{ - PyObject *path, *name; - Py_buffer data; - int flags = 0, err; - - if (!PyArg_ParseTuple(args, "O&O&y*|i:lsetxattr", PyUnicode_FSConverter, - &path, PyUnicode_FSConverter, &name, &data, &flags)) - return NULL; - Py_BEGIN_ALLOW_THREADS; - err = lsetxattr(PyBytes_AS_STRING(path), PyBytes_AS_STRING(name), - data.buf, data.len, flags); - Py_END_ALLOW_THREADS; - Py_DECREF(path); - Py_DECREF(name); - PyBuffer_Release(&data); - if (err) - return posix_error(); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(posix_fsetxattr__doc__, -"fsetxattr(fd, attr, value, flags=0)\n\n\ -Like setxattr but operates on *fd* instead of a path."); - -static PyObject * -posix_fsetxattr(PyObject *self, PyObject *args) -{ - Py_buffer data; - const char *name; - int fd, flags = 0, err; - - if (!PyArg_ParseTuple(args, "iO&y*|i:fsetxattr", &fd, PyUnicode_FSConverter, - &name, &data, &flags)) - return NULL; - Py_BEGIN_ALLOW_THREADS; - err = fsetxattr(fd, PyBytes_AS_STRING(name), data.buf, data.len, flags); - Py_END_ALLOW_THREADS; - Py_DECREF(name); - PyBuffer_Release(&data); - if (err) - return posix_error(); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(posix_removexattr__doc__, -"removexattr(path, attr)\n\n\ -Remove extended attribute *attr* on *path*."); - -static PyObject * -posix_removexattr(PyObject *self, PyObject *args) -{ - PyObject *path, *name; - int err; - - if (!PyArg_ParseTuple(args, "O&O&:removexattr", PyUnicode_FSConverter, &path, - PyUnicode_FSConverter, &name)) - return NULL; - Py_BEGIN_ALLOW_THREADS; - err = removexattr(PyBytes_AS_STRING(path), PyBytes_AS_STRING(name)); - Py_END_ALLOW_THREADS; - Py_DECREF(path); - Py_DECREF(name); - if (err) - return posix_error(); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(posix_lremovexattr__doc__, -"lremovexattr(path, attr)\n\n\ -Like removexattr but don't follow symlinks."); - -static PyObject * -posix_lremovexattr(PyObject *self, PyObject *args) -{ - PyObject *path, *name; - int err; - - if (!PyArg_ParseTuple(args, "O&O&:lremovexattr", PyUnicode_FSConverter, &path, - PyUnicode_FSConverter, &name)) - return NULL; - Py_BEGIN_ALLOW_THREADS; - err = lremovexattr(PyBytes_AS_STRING(path), PyBytes_AS_STRING(name)); - Py_END_ALLOW_THREADS; - Py_DECREF(path); - Py_DECREF(name); - if (err) - return posix_error(); - Py_RETURN_NONE; -} - -PyDoc_STRVAR(posix_fremovexattr__doc__, -"fremovexattr(fd, attr)\n\n\ -Like removexattr but operates on a file descriptor."); - -static PyObject * -posix_fremovexattr(PyObject *self, PyObject *args) -{ - PyObject *name; - int fd, err; - - if (!PyArg_ParseTuple(args, "iO&:fremovexattr", &fd, - PyUnicode_FSConverter, &name)) - return NULL; - Py_BEGIN_ALLOW_THREADS; - err = fremovexattr(fd, PyBytes_AS_STRING(name)); - Py_END_ALLOW_THREADS; - Py_DECREF(name); - if (err) - return posix_error(); - Py_RETURN_NONE; -} - -static Py_ssize_t -try_listxattr(const char *path, ssize_t (*list)(const char *, char *, size_t), - Py_ssize_t buf_size, char **buf) -{ - Py_ssize_t len; - - *buf = PyMem_MALLOC(buf_size); - if (!*buf) { - PyErr_NoMemory(); - return -1; - } - Py_BEGIN_ALLOW_THREADS; - len = list(path, *buf, buf_size); - Py_END_ALLOW_THREADS; - if (len < 0) { - PyMem_FREE(*buf); - if (errno != ERANGE) - posix_error(); - return -1; - } - return len; -} - -static PyObject * -listxattr_common(const char *path, ssize_t (*list)(const char *, char *, size_t)) -{ - PyObject *res, *attr; - Py_ssize_t len, err, start, i; - char *buf; - - len = try_listxattr(path, list, 256, &buf); - if (len < 0) { - if (PyErr_Occurred()) - return NULL; - len = try_listxattr(path, list, XATTR_LIST_MAX, &buf); - if (len < 0) - return NULL; - } - res = PyList_New(0); - if (!res) { - PyMem_FREE(buf); - return NULL; - } - for (start = i = 0; i < len; i++) { - if (!buf[i]) { - attr = PyUnicode_DecodeFSDefaultAndSize(&buf[start], i - start); - if (!attr) { - Py_DECREF(res); - PyMem_FREE(buf); - return NULL; - } - err = PyList_Append(res, attr); - Py_DECREF(attr); - if (err) { - Py_DECREF(res); - PyMem_FREE(buf); - return NULL; - } - start = i + 1; - } - } - PyMem_FREE(buf); - return res; -} - -PyDoc_STRVAR(posix_listxattr__doc__, -"listxattr(path)\n\n\ -Return a list of extended attributes on *path*."); - -static PyObject * -posix_listxattr(PyObject *self, PyObject *args) -{ - PyObject *path, *res; - - if (!PyArg_ParseTuple(args, "O&:listxattr", PyUnicode_FSConverter, &path)) - return NULL; - res = listxattr_common(PyBytes_AS_STRING(path), listxattr); - Py_DECREF(path); - return res; -} - -PyDoc_STRVAR(posix_llistxattr__doc__, -"llistxattr(path)\n\n\ -Like listxattr but don't follow symlinks.."); - -static PyObject * -posix_llistxattr(PyObject *self, PyObject *args) -{ - PyObject *path, *res; - - if (!PyArg_ParseTuple(args, "O&:llistxattr", PyUnicode_FSConverter, &path)) - return NULL; - res = listxattr_common(PyBytes_AS_STRING(path), llistxattr); - Py_DECREF(path); - return res; -} - -static ssize_t -wrap_flistxattr(const char *path, char *buf, size_t len) -{ - /* Hack to share code. */ - return flistxattr((int)(Py_uintptr_t)path, buf, len); -} - -PyDoc_STRVAR(posix_flistxattr__doc__, -"flistxattr(path)\n\n\ -Like flistxattr but operates on a file descriptor."); - -static PyObject * -posix_flistxattr(PyObject *self, PyObject *args) -{ - long fd; - - if (!PyArg_ParseTuple(args, "i:flistxattr", &fd)) - return NULL; - return listxattr_common((const char *)(Py_uintptr_t)fd, wrap_flistxattr); + break; + } +exit: + path_cleanup(&path); + if (buffer) + PyMem_FREE(buffer); + return result; } #endif /* USE_XATTRS */ @@ -10600,20 +10715,30 @@ static PyMethodDef posix_methods[] = { - {"access", posix_access, METH_VARARGS, posix_access__doc__}, + {"access", (PyCFunction)posix_access, + METH_VARARGS | METH_KEYWORDS, + posix_access__doc__}, #ifdef HAVE_TTYNAME {"ttyname", posix_ttyname, METH_VARARGS, posix_ttyname__doc__}, #endif - {"chdir", posix_chdir, METH_VARARGS, posix_chdir__doc__}, + {"chdir", (PyCFunction)posix_chdir, + METH_VARARGS | METH_KEYWORDS, + posix_chdir__doc__}, #ifdef HAVE_CHFLAGS - {"chflags", posix_chflags, METH_VARARGS, posix_chflags__doc__}, + {"chflags", (PyCFunction)posix_chflags, + METH_VARARGS | METH_KEYWORDS, + posix_chflags__doc__}, #endif /* HAVE_CHFLAGS */ - {"chmod", posix_chmod, METH_VARARGS, posix_chmod__doc__}, + {"chmod", (PyCFunction)posix_chmod, + METH_VARARGS | METH_KEYWORDS, + posix_chmod__doc__}, #ifdef HAVE_FCHMOD {"fchmod", posix_fchmod, METH_VARARGS, posix_fchmod__doc__}, #endif /* HAVE_FCHMOD */ #ifdef HAVE_CHOWN - {"chown", posix_chown, METH_VARARGS, posix_chown__doc__}, + {"chown", (PyCFunction)posix_chown, + METH_VARARGS | METH_KEYWORDS, + posix_chown__doc__}, #endif /* HAVE_CHOWN */ #ifdef HAVE_LCHMOD {"lchmod", posix_lchmod, METH_VARARGS, posix_lchmod__doc__}, @@ -10639,15 +10764,20 @@ {"getcwdb", (PyCFunction)posix_getcwd_bytes, METH_NOARGS, posix_getcwdb__doc__}, #endif -#ifdef HAVE_LINK - {"link", posix_link, METH_VARARGS, posix_link__doc__}, +#if defined(HAVE_LINK) || defined(MS_WINDOWS) + {"link", (PyCFunction)posix_link, + METH_VARARGS | METH_KEYWORDS, + posix_link__doc__}, #endif /* HAVE_LINK */ - {"listdir", posix_listdir, METH_VARARGS, posix_listdir__doc__}, -#ifdef HAVE_FDOPENDIR - {"flistdir", posix_flistdir, METH_VARARGS, posix_flistdir__doc__}, -#endif - {"lstat", posix_lstat, METH_VARARGS, posix_lstat__doc__}, - {"mkdir", posix_mkdir, METH_VARARGS, posix_mkdir__doc__}, + {"listdir", (PyCFunction)posix_listdir, + METH_VARARGS | METH_KEYWORDS, + posix_listdir__doc__}, + {"lstat", (PyCFunction)posix_lstat, + METH_VARARGS | METH_KEYWORDS, + posix_lstat__doc__}, + {"mkdir", (PyCFunction)posix_mkdir, + METH_VARARGS | METH_KEYWORDS, + posix_mkdir__doc__}, #ifdef HAVE_NICE {"nice", posix_nice, METH_VARARGS, posix_nice__doc__}, #endif /* HAVE_NICE */ @@ -10658,23 +10788,31 @@ {"setpriority", posix_setpriority, METH_VARARGS, posix_setpriority__doc__}, #endif /* HAVE_SETPRIORITY */ #ifdef HAVE_READLINK - {"readlink", posix_readlink, METH_VARARGS, posix_readlink__doc__}, + {"readlink", (PyCFunction)posix_readlink, + METH_VARARGS | METH_KEYWORDS, + readlink__doc__}, #endif /* HAVE_READLINK */ #if !defined(HAVE_READLINK) && defined(MS_WINDOWS) - {"readlink", win_readlink, METH_VARARGS, win_readlink__doc__}, + {"readlink", (PyCFunction)win_readlink, + METH_VARARGS | METH_KEYWORDS, + readlink__doc__}, #endif /* !defined(HAVE_READLINK) && defined(MS_WINDOWS) */ - {"rename", posix_rename, METH_VARARGS, posix_rename__doc__}, - {"replace", posix_replace, METH_VARARGS, posix_replace__doc__}, + {"rename", (PyCFunction)posix_rename, + METH_VARARGS | METH_KEYWORDS, + posix_rename__doc__}, + {"replace", (PyCFunction)posix_replace, + METH_VARARGS | METH_KEYWORDS, + posix_replace__doc__}, {"rmdir", posix_rmdir, METH_VARARGS, posix_rmdir__doc__}, - {"stat", posix_stat, METH_VARARGS, posix_stat__doc__}, + {"stat", (PyCFunction)posix_stat, + METH_VARARGS | METH_KEYWORDS, + posix_stat__doc__}, {"stat_float_times", stat_float_times, METH_VARARGS, stat_float_times__doc__}, -#if defined(HAVE_SYMLINK) && !defined(MS_WINDOWS) - {"symlink", posix_symlink, METH_VARARGS, posix_symlink__doc__}, +#if defined(HAVE_SYMLINK) + {"symlink", (PyCFunction)posix_symlink, + METH_VARARGS | METH_KEYWORDS, + posix_symlink__doc__}, #endif /* HAVE_SYMLINK */ -#if defined(HAVE_SYMLINK) && defined(MS_WINDOWS) - {"symlink", (PyCFunction)win_symlink, METH_VARARGS | METH_KEYWORDS, - win_symlink__doc__}, -#endif /* defined(HAVE_SYMLINK) && defined(MS_WINDOWS) */ #ifdef HAVE_SYSTEM {"system", posix_system, METH_VARARGS, posix_system__doc__}, #endif @@ -10682,29 +10820,24 @@ #ifdef HAVE_UNAME {"uname", posix_uname, METH_NOARGS, posix_uname__doc__}, #endif /* HAVE_UNAME */ - {"unlink", posix_unlink, METH_VARARGS, posix_unlink__doc__}, - {"remove", posix_unlink, METH_VARARGS, posix_remove__doc__}, + {"unlink", (PyCFunction)posix_unlink, + METH_VARARGS | METH_KEYWORDS, + posix_unlink__doc__}, + {"remove", (PyCFunction)posix_unlink, + METH_VARARGS | METH_KEYWORDS, + posix_remove__doc__}, {"utime", (PyCFunction)posix_utime, METH_VARARGS | METH_KEYWORDS, posix_utime__doc__}, -#ifdef HAVE_FUTIMES - {"futimes", (PyCFunction)posix_futimes, - METH_VARARGS | METH_KEYWORDS, posix_futimes__doc__}, -#endif -#ifdef HAVE_LUTIMES - {"lutimes", (PyCFunction)posix_lutimes, - METH_VARARGS | METH_KEYWORDS, posix_lutimes__doc__}, -#endif #ifdef HAVE_TIMES {"times", posix_times, METH_NOARGS, posix_times__doc__}, #endif /* HAVE_TIMES */ {"_exit", posix__exit, METH_VARARGS, posix__exit__doc__}, #ifdef HAVE_EXECV {"execv", posix_execv, METH_VARARGS, posix_execv__doc__}, - {"execve", posix_execve, METH_VARARGS, posix_execve__doc__}, + {"execve", (PyCFunction)posix_execve, + METH_VARARGS | METH_KEYWORDS, + posix_execve__doc__}, #endif /* HAVE_EXECV */ -#ifdef HAVE_FEXECVE - {"fexecve", posix_fexecve, METH_VARARGS, posix_fexecve__doc__}, -#endif #ifdef HAVE_SPAWNV {"spawnv", posix_spawnv, METH_VARARGS, posix_spawnv__doc__}, {"spawnve", posix_spawnve, METH_VARARGS, posix_spawnve__doc__}, @@ -10791,7 +10924,6 @@ #ifdef MS_WINDOWS {"startfile", win32_startfile, METH_VARARGS, win32_startfile__doc__}, {"kill", win32_kill, METH_VARARGS, win32_kill__doc__}, - {"link", win32_link, METH_VARARGS, win32_link__doc__}, #endif #ifdef HAVE_SETUID {"setuid", posix_setuid, METH_VARARGS, posix_setuid__doc__}, @@ -10853,7 +10985,9 @@ #ifdef HAVE_TCSETPGRP {"tcsetpgrp", posix_tcsetpgrp, METH_VARARGS, posix_tcsetpgrp__doc__}, #endif /* HAVE_TCSETPGRP */ - {"open", posix_open, METH_VARARGS, posix_open__doc__}, + {"open", (PyCFunction)posix_open,\ + METH_VARARGS | METH_KEYWORDS, + posix_open__doc__}, {"close", posix_close, METH_VARARGS, posix_close__doc__}, {"closerange", posix_closerange, METH_VARARGS, posix_closerange__doc__}, {"device_encoding", device_encoding, METH_VARARGS, device_encoding__doc__}, @@ -10890,10 +11024,14 @@ {"pipe2", posix_pipe2, METH_O, posix_pipe2__doc__}, #endif #ifdef HAVE_MKFIFO - {"mkfifo", posix_mkfifo, METH_VARARGS, posix_mkfifo__doc__}, + {"mkfifo", (PyCFunction)posix_mkfifo, + METH_VARARGS | METH_KEYWORDS, + posix_mkfifo__doc__}, #endif #if defined(HAVE_MKNOD) && defined(HAVE_MAKEDEV) - {"mknod", posix_mknod, METH_VARARGS, posix_mknod__doc__}, + {"mknod", (PyCFunction)posix_mknod, + METH_VARARGS | METH_KEYWORDS, + posix_mknod__doc__}, #endif #ifdef HAVE_DEVICE_MACROS {"major", posix_major, METH_VARARGS, posix_major__doc__}, @@ -10961,7 +11099,9 @@ {"fstatvfs", posix_fstatvfs, METH_VARARGS, posix_fstatvfs__doc__}, #endif #if defined(HAVE_STATVFS) && defined(HAVE_SYS_STATVFS_H) - {"statvfs", posix_statvfs, METH_VARARGS, posix_statvfs__doc__}, + {"statvfs", (PyCFunction)posix_statvfs, + METH_VARARGS | METH_KEYWORDS, + posix_statvfs__doc__}, #endif #ifdef HAVE_CONFSTR {"confstr", posix_confstr, METH_VARARGS, posix_confstr__doc__}, @@ -11000,67 +11140,19 @@ {"getresgid", posix_getresgid, METH_NOARGS, posix_getresgid__doc__}, #endif -/* posix *at family of functions */ -#ifdef HAVE_FACCESSAT - {"faccessat", posix_faccessat, METH_VARARGS, posix_faccessat__doc__}, -#endif -#ifdef HAVE_FCHMODAT - {"fchmodat", posix_fchmodat, METH_VARARGS, posix_fchmodat__doc__}, -#endif /* HAVE_FCHMODAT */ -#ifdef HAVE_FCHOWNAT - {"fchownat", posix_fchownat, METH_VARARGS, posix_fchownat__doc__}, -#endif /* HAVE_FCHOWNAT */ -#ifdef HAVE_FSTATAT - {"fstatat", posix_fstatat, METH_VARARGS, posix_fstatat__doc__}, -#endif -#ifdef HAVE_FUTIMESAT - {"futimesat", posix_futimesat, METH_VARARGS, posix_futimesat__doc__}, -#endif -#ifdef HAVE_LINKAT - {"linkat", posix_linkat, METH_VARARGS, posix_linkat__doc__}, -#endif /* HAVE_LINKAT */ -#ifdef HAVE_MKDIRAT - {"mkdirat", posix_mkdirat, METH_VARARGS, posix_mkdirat__doc__}, -#endif -#if defined(HAVE_MKNODAT) && defined(HAVE_MAKEDEV) - {"mknodat", posix_mknodat, METH_VARARGS, posix_mknodat__doc__}, -#endif -#ifdef HAVE_OPENAT - {"openat", posix_openat, METH_VARARGS, posix_openat__doc__}, -#endif -#ifdef HAVE_READLINKAT - {"readlinkat", posix_readlinkat, METH_VARARGS, posix_readlinkat__doc__}, -#endif /* HAVE_READLINKAT */ -#ifdef HAVE_RENAMEAT - {"renameat", posix_renameat, METH_VARARGS, posix_renameat__doc__}, -#endif -#if HAVE_SYMLINKAT - {"symlinkat", posix_symlinkat, METH_VARARGS, posix_symlinkat__doc__}, -#endif /* HAVE_SYMLINKAT */ -#ifdef HAVE_UNLINKAT - {"unlinkat", posix_unlinkat, METH_VARARGS, posix_unlinkat__doc__}, -#endif -#ifdef HAVE_UTIMENSAT - {"utimensat", (PyCFunction)posix_utimensat, - METH_VARARGS | METH_KEYWORDS, - posix_utimensat__doc__}, -#endif -#ifdef HAVE_MKFIFOAT - {"mkfifoat", posix_mkfifoat, METH_VARARGS, posix_mkfifoat__doc__}, -#endif #ifdef USE_XATTRS - {"setxattr", posix_setxattr, METH_VARARGS, posix_setxattr__doc__}, - {"lsetxattr", posix_lsetxattr, METH_VARARGS, posix_lsetxattr__doc__}, - {"fsetxattr", posix_fsetxattr, METH_VARARGS, posix_fsetxattr__doc__}, - {"getxattr", posix_getxattr, METH_VARARGS, posix_getxattr__doc__}, - {"lgetxattr", posix_lgetxattr, METH_VARARGS, posix_lgetxattr__doc__}, - {"fgetxattr", posix_fgetxattr, METH_VARARGS, posix_fgetxattr__doc__}, - {"removexattr", posix_removexattr, METH_VARARGS, posix_removexattr__doc__}, - {"lremovexattr", posix_lremovexattr, METH_VARARGS, posix_lremovexattr__doc__}, - {"fremovexattr", posix_fremovexattr, METH_VARARGS, posix_fremovexattr__doc__}, - {"listxattr", posix_listxattr, METH_VARARGS, posix_listxattr__doc__}, - {"llistxattr", posix_llistxattr, METH_VARARGS, posix_llistxattr__doc__}, - {"flistxattr", posix_flistxattr, METH_VARARGS, posix_flistxattr__doc__}, + {"setxattr", (PyCFunction)posix_setxattr, + METH_VARARGS | METH_KEYWORDS, + posix_setxattr__doc__}, + {"getxattr", (PyCFunction)posix_getxattr, + METH_VARARGS | METH_KEYWORDS, + posix_getxattr__doc__}, + {"removexattr", (PyCFunction)posix_removexattr, + METH_VARARGS | METH_KEYWORDS, + posix_removexattr__doc__}, + {"listxattr", (PyCFunction)posix_listxattr, + METH_VARARGS | METH_KEYWORDS, + posix_listxattr__doc__}, #endif #if defined(TERMSIZE_USE_CONIO) || defined(TERMSIZE_USE_IOCTL) {"get_terminal_size", get_terminal_size, METH_VARARGS, termsize__doc__}, @@ -11268,28 +11360,6 @@ #ifdef O_ACCMODE if (ins(d, "O_ACCMODE", (long)O_ACCMODE)) return -1; #endif -/* posix - constants for *at functions */ -#ifdef AT_SYMLINK_NOFOLLOW - if (ins(d, "AT_SYMLINK_NOFOLLOW", (long)AT_SYMLINK_NOFOLLOW)) return -1; -#endif -#ifdef AT_EACCESS - if (ins(d, "AT_EACCESS", (long)AT_EACCESS)) return -1; -#endif -#ifdef AT_FDCWD - if (ins(d, "AT_FDCWD", (long)AT_FDCWD)) return -1; -#endif -#ifdef AT_REMOVEDIR - if (ins(d, "AT_REMOVEDIR", (long)AT_REMOVEDIR)) return -1; -#endif -#ifdef AT_SYMLINK_FOLLOW - if (ins(d, "AT_SYMLINK_FOLLOW", (long)AT_SYMLINK_FOLLOW)) return -1; -#endif -#ifdef UTIME_NOW - if (ins(d, "UTIME_NOW", (long)UTIME_NOW)) return -1; -#endif -#ifdef UTIME_OMIT - if (ins(d, "UTIME_OMIT", (long)UTIME_OMIT)) return -1; -#endif #ifdef SEEK_HOLE @@ -11483,14 +11553,6 @@ if (ins(d, "F_TEST", (long)F_TEST)) return -1; #endif - /* constants for futimens */ -#ifdef UTIME_NOW - if (ins(d, "UTIME_NOW", (long)UTIME_NOW)) return -1; -#endif -#ifdef UTIME_OMIT - if (ins(d, "UTIME_OMIT", (long)UTIME_OMIT)) return -1; -#endif - #ifdef HAVE_SPAWNV #if defined(PYOS_OS2) && defined(PYCC_GCC) if (ins(d, "P_WAIT", (long)P_WAIT)) return -1; @@ -11613,10 +11675,130 @@ }; +static char *have_functions[] = { + +#ifdef HAVE_FACCESSAT + "HAVE_FACCESSAT", +#endif + +#ifdef HAVE_FCHDIR + "HAVE_FCHDIR", +#endif + +#ifdef HAVE_FCHMOD + "HAVE_FCHMOD", +#endif + +#ifdef HAVE_FCHMODAT + "HAVE_FCHMODAT", +#endif + +#ifdef HAVE_FCHOWN + "HAVE_FCHOWN", +#endif + +#ifdef HAVE_FEXECVE + "HAVE_FEXECVE", +#endif + +#ifdef HAVE_FDOPENDIR + "HAVE_FDOPENDIR", +#endif + +#ifdef HAVE_FSTATAT + "HAVE_FSTATAT", +#endif + +#ifdef HAVE_FSTATVFS + "HAVE_FSTATVFS", +#endif + +#ifdef HAVE_FUTIMENS + "HAVE_FUTIMENS", +#endif + +#ifdef HAVE_FUTIMES + "HAVE_FUTIMES", +#endif + +#ifdef HAVE_FUTIMESAT + "HAVE_FUTIMESAT", +#endif + +#ifdef HAVE_LINKAT + "HAVE_LINKAT", +#endif + +#ifdef HAVE_LCHFLAGS + "HAVE_LCHFLAGS", +#endif + +#ifdef HAVE_LCHMOD + "HAVE_LCHMOD", +#endif + +#ifdef HAVE_LCHOWN + "HAVE_LCHOWN", +#endif + +#ifdef HAVE_LSTAT + "HAVE_LSTAT", +#endif + +#ifdef HAVE_LUTIMES + "HAVE_LUTIMES", +#endif + +#ifdef HAVE_MKDIRAT + "HAVE_MKDIRAT", +#endif + +#ifdef HAVE_MKFIFOAT + "HAVE_MKFIFOAT", +#endif + +#ifdef HAVE_MKNODAT + "HAVE_MKNODAT", +#endif + +#ifdef HAVE_OPENAT + "HAVE_OPENAT", +#endif + +#ifdef HAVE_READLINKAT + "HAVE_READLINKAT", +#endif + +#ifdef HAVE_RENAMEAT + "HAVE_RENAMEAT", +#endif + +#ifdef HAVE_SYMLINKAT + "HAVE_SYMLINKAT", +#endif + +#ifdef HAVE_UNLINKAT + "HAVE_UNLINKAT", +#endif + +#ifdef HAVE_UTIMENSAT + "HAVE_UTIMENSAT", +#endif + +#ifdef MS_WINDOWS + "MS_WINDOWS", +#endif + + NULL +}; + + PyMODINIT_FUNC INITFUNC(void) { PyObject *m, *v; + PyObject *list; + char **trace; #if defined(HAVE_SYMLINK) && defined(MS_WINDOWS) win32_can_symlink = enable_symlink(); @@ -11704,7 +11886,6 @@ Py_INCREF(&SchedParamType); PyModule_AddObject(m, "sched_param", (PyObject *)&SchedParamType); #endif - initialized = 1; #ifdef __APPLE__ /* @@ -11714,7 +11895,7 @@ * currently active platform. * * This block allow one to use a python binary that was build on - * OSX 10.4 on OSX 10.3, without loosing access to new APIs on + * OSX 10.4 on OSX 10.3, without losing access to new APIs on * OSX 10.4. */ #ifdef HAVE_FSTATVFS @@ -11750,6 +11931,35 @@ if (!billion) return NULL; + /* suppress "function not used" warnings */ + { + int ignored; + fd_specified("", -1); + follow_symlinks_specified("", 1); + dir_fd_and_follow_symlinks_invalid("chmod", DEFAULT_DIR_FD, 1); + dir_fd_converter(Py_None, &ignored); + dir_fd_unavailable(Py_None, &ignored); + } + + /* + * provide list of locally available functions + * so os.py can populate support_* lists + */ + list = PyList_New(0); + if (!list) + return NULL; + for (trace = have_functions; *trace; trace++) { + PyObject *unicode = PyUnicode_DecodeASCII(*trace, strlen(*trace), NULL); + if (!unicode) + return NULL; + if (PyList_Append(list, unicode)) + return NULL; + Py_DECREF(unicode); + } + PyModule_AddObject(m, "_have_functions", list); + + initialized = 1; + return m; } -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 02:02:13 2012 From: python-checkins at python.org (larry.hastings) Date: Sat, 23 Jun 2012 02:02:13 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314626=3A_Fix_build?= =?utf8?q?bot_issues_on_FreeBSD_=28AMD64=29=2E__=28Fingers_crossed=2E=29?= Message-ID: http://hg.python.org/cpython/rev/04fd8f77a58e changeset: 77598:04fd8f77a58e user: Larry Hastings date: Fri Jun 22 17:01:41 2012 -0700 summary: Issue #14626: Fix buildbot issues on FreeBSD (AMD64). (Fingers crossed.) files: Lib/test/test_posix.py | 7 ++++++- Lib/test/test_shutil.py | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -721,8 +721,13 @@ posix.utime(support.TESTFN, dir_fd=f, times=(int(now), int((now - int(now)) * 1e9))) + # try dir_fd and follow_symlinks together if os.utime in os.supports_follow_symlinks: - posix.utime(support.TESTFN, follow_symlinks=False, dir_fd=f) + try: + posix.utime(support.TESTFN, follow_symlinks=False, dir_fd=f) + except RuntimeError: + # whoops! using both together not supported on this platform. + pass finally: posix.close(f) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -296,7 +296,7 @@ def make_chflags_raiser(err): ex = OSError() - def _chflags_raiser(path, flags): + def _chflags_raiser(path, flags, *, follow_symlinks=True): ex.errno = err raise ex return _chflags_raiser -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 02:07:33 2012 From: python-checkins at python.org (larry.hastings) Date: Sat, 23 Jun 2012 02:07:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314626=3A_Fix_build?= =?utf8?q?bot_issue_on_x86_Tiger_3=2Ex=2E?= Message-ID: http://hg.python.org/cpython/rev/e1e0eeb07398 changeset: 77599:e1e0eeb07398 user: Larry Hastings date: Fri Jun 22 17:06:48 2012 -0700 summary: Issue #14626: Fix buildbot issue on x86 Tiger 3.x. files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -3238,7 +3238,7 @@ } #else Py_BEGIN_ALLOW_THREADS -#ifndef HAVE_LINKAT +#ifdef HAVE_LINKAT if ((src_dir_fd != DEFAULT_DIR_FD) || (dst_dir_fd != DEFAULT_DIR_FD) || (!follow_symlinks)) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 02:17:58 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 02:17:58 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_A_better_repr=28=29_for_Fil?= =?utf8?q?eFinder?= Message-ID: http://hg.python.org/cpython/rev/df856963fc6c changeset: 77600:df856963fc6c user: Antoine Pitrou date: Sat Jun 23 02:12:56 2012 +0200 summary: A better repr() for FileFinder files: Lib/importlib/_bootstrap.py | 2 + Python/importlib.h | 2162 +++++++++++----------- 2 files changed, 1090 insertions(+), 1074 deletions(-) diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py --- a/Lib/importlib/_bootstrap.py +++ b/Lib/importlib/_bootstrap.py @@ -1169,6 +1169,8 @@ return path_hook_for_FileFinder + def __repr__(self): + return "FileFinder(%r)" % (self.path,) # Import itself ############################################################### diff --git a/Python/importlib.h b/Python/importlib.h --- a/Python/importlib.h +++ b/Python/importlib.h [stripped] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 02:56:06 2012 From: python-checkins at python.org (jesus.cea) Date: Sat, 23 Jun 2012 02:56:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Kernel_bug_in_freebsd9_-_?= =?utf8?q?=2310142=3A_Support_for_SEEK=5FHOLE/SEEK=5FDATA?= Message-ID: http://hg.python.org/cpython/rev/13f5a329d5ea changeset: 77601:13f5a329d5ea user: Jesus Cea date: Sat Jun 23 02:55:36 2012 +0200 summary: Kernel bug in freebsd9 - #10142: Support for SEEK_HOLE/SEEK_DATA files: Lib/test/test_posix.py | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -1010,6 +1010,9 @@ posix.RTLD_GLOBAL posix.RTLD_LOCAL + @unittest.skipIf(sys.platform == 'freebsd9', + "Skip test because known kernel bug - " \ + "http://lists.freebsd.org/pipermail/freebsd-amd64/2012-January/014332.html") @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), "test needs an OS that reports file holes") def test_fs_holes(self) : -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 02:58:48 2012 From: python-checkins at python.org (jesus.cea) Date: Sat, 23 Jun 2012 02:58:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Skip_the_test_only_if_necce?= =?utf8?q?sary_-_Kernel_bug_in_freebsd9_-_=2310142=3A_Support_for?= Message-ID: http://hg.python.org/cpython/rev/8acaa341df53 changeset: 77602:8acaa341df53 user: Jesus Cea date: Sat Jun 23 02:58:14 2012 +0200 summary: Skip the test only if neccesary - Kernel bug in freebsd9 - #10142: Support for SEEK_HOLE/SEEK_DATA files: Lib/test/test_posix.py | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -1010,11 +1010,11 @@ posix.RTLD_GLOBAL posix.RTLD_LOCAL + @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), + "test needs an OS that reports file holes") @unittest.skipIf(sys.platform == 'freebsd9', "Skip test because known kernel bug - " \ "http://lists.freebsd.org/pipermail/freebsd-amd64/2012-January/014332.html") - @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), - "test needs an OS that reports file holes") def test_fs_holes(self) : # Even if the filesystem doesn't report holes, # if the OS supports it the SEEK_* constants -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 03:11:01 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Sat, 23 Jun 2012 03:11:01 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issues_=2311024=3A_Fixes_an?= =?utf8?q?d_additional_tests_for_Time2Internaldate=2E?= Message-ID: http://hg.python.org/cpython/rev/42b9d9d795f7 changeset: 77603:42b9d9d795f7 user: Alexander Belopolsky date: Fri Jun 22 21:03:39 2012 -0400 summary: Issues #11024: Fixes and additional tests for Time2Internaldate. files: Doc/library/imaplib.rst | 16 +++++---- Lib/imaplib.py | 45 ++++++++++++++++----------- Lib/test/test_imaplib.py | 34 +++++++++++++++----- Misc/NEWS | 2 + 4 files changed, 62 insertions(+), 35 deletions(-) diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst --- a/Doc/library/imaplib.rst +++ b/Doc/library/imaplib.rst @@ -113,13 +113,15 @@ .. function:: Time2Internaldate(date_time) - Convert *date_time* to an IMAP4 ``INTERNALDATE`` representation. The - return value is a string in the form: ``"DD-Mmm-YYYY HH:MM:SS - +HHMM"`` (including double-quotes). The *date_time* argument can be a - number (int or float) representing seconds since epoch (as returned - by :func:`time.time`), a 9-tuple representing local time (as returned by - :func:`time.localtime`), or a double-quoted string. In the last case, it - is assumed to already be in the correct format. + Convert *date_time* to an IMAP4 ``INTERNALDATE`` representation. + The return value is a string in the form: ``"DD-Mmm-YYYY HH:MM:SS + +HHMM"`` (including double-quotes). The *date_time* argument can + be a number (int or float) representing seconds since epoch (as + returned by :func:`time.time`), a 9-tuple representing local time + an instance of :class:`time.struct_time` (as returned by + :func:`time.localtime`), an aware instance of + :class:`datetime.datetime`, or a double-quoted string. In the last + case, it is assumed to already be in the correct format. Note that IMAP4 message numbers change as the mailbox changes; in particular, after an ``EXPUNGE`` command performs deletions the remaining messages are diff --git a/Lib/imaplib.py b/Lib/imaplib.py --- a/Lib/imaplib.py +++ b/Lib/imaplib.py @@ -23,7 +23,7 @@ __version__ = "2.58" import binascii, errno, random, re, socket, subprocess, sys, time, calendar - +from datetime import datetime, timezone, timedelta try: import ssl HAVE_SSL = True @@ -1313,10 +1313,8 @@ return '' return binascii.a2b_base64(inp) - - -Mon2num = {b'Jan': 1, b'Feb': 2, b'Mar': 3, b'Apr': 4, b'May': 5, b'Jun': 6, - b'Jul': 7, b'Aug': 8, b'Sep': 9, b'Oct': 10, b'Nov': 11, b'Dec': 12} +Months = ' Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec'.split(' ') +Mon2num = {s.encode():n+1 for n, s in enumerate(Months[1:])} def Internaldate2tuple(resp): """Parse an IMAP4 INTERNALDATE string. @@ -1384,28 +1382,37 @@ Return string in form: '"DD-Mmm-YYYY HH:MM:SS +HHMM"'. The date_time argument can be a number (int or float) representing seconds since epoch (as returned by time.time()), a 9-tuple - representing local time (as returned by time.localtime()), or a + representing local time, an instance of time.struct_time (as + returned by time.localtime()), an aware datetime instance or a double-quoted string. In the last case, it is assumed to already be in the correct format. """ - if isinstance(date_time, (int, float)): - tt = time.localtime(date_time) - elif isinstance(date_time, (tuple, time.struct_time)): - tt = date_time + dt = datetime.fromtimestamp(date_time, + timezone.utc).astimezone() + elif isinstance(date_time, tuple): + try: + gmtoff = date_time.tm_gmtoff + except AttributeError: + if time.daylight: + dst = date_time[8] + if dst == -1: + dst = time.localtime(time.mktime(date_time))[8] + gmtoff = -(time.timezone, time.altzone)[dst] + else: + gmtoff = -time.timezone + delta = timedelta(seconds=gmtoff) + dt = datetime(*date_time[:6], tzinfo=timezone(delta)) + elif isinstance(date_time, datetime): + if date_time.tzinfo is None: + raise ValueError("date_time must be aware") + dt = date_time elif isinstance(date_time, str) and (date_time[0],date_time[-1]) == ('"','"'): return date_time # Assume in correct format else: raise ValueError("date_time not of a known type") - - dt = time.strftime("%d-%b-%Y %H:%M:%S", tt) - if dt[0] == '0': - dt = ' ' + dt[1:] - if time.daylight and tt[-1]: - zone = -time.altzone - else: - zone = -time.timezone - return '"' + dt + " %+03d%02d" % divmod(zone//60, 60) + '"' + fmt = '"%d-{}-%Y %H:%M:%S %z"'.format(Months[dt.month]) + return dt.strftime(fmt) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -11,9 +11,9 @@ import time import calendar -from test.support import reap_threads, verbose, transient_internet, run_with_tz +from test.support import reap_threads, verbose, transient_internet, run_with_tz, run_with_locale import unittest - +from datetime import datetime, timezone, timedelta try: import ssl except ImportError: @@ -43,14 +43,30 @@ imaplib.Internaldate2tuple( b'25 (INTERNALDATE "02-Apr-2000 03:30:00 +0000")')) + + + def timevalues(self): + return [2000000000, 2000000000.0, time.localtime(2000000000), + (2033, 5, 18, 5, 33, 20, -1, -1, -1), + (2033, 5, 18, 5, 33, 20, -1, -1, 1), + datetime.fromtimestamp(2000000000, + timezone(timedelta(0, 2*60*60))), + '"18-May-2033 05:33:20 +0200"'] + + @run_with_locale('LC_ALL', 'de_DE', 'fr_FR') + @run_with_tz('STD-1DST') + def test_Time2Internaldate(self): + expected = '"18-May-2033 05:33:20 +0200"' + + for t in self.timevalues(): + internal = imaplib.Time2Internaldate(t) + self.assertEqual(internal, expected) + def test_that_Time2Internaldate_returns_a_result(self): - # We can check only that it successfully produces a result, - # not the correctness of the result itself, since the result - # depends on the timezone the machine is in. - timevalues = [2000000000, 2000000000.0, time.localtime(2000000000), - '"18-May-2033 05:33:20 +0200"'] - - for t in timevalues: + # Without tzset, we can check only that it successfully + # produces a result, not the correctness of the result itself, + # since the result depends on the timezone the machine is in. + for t in self.timevalues(): imaplib.Time2Internaldate(t) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,8 @@ Library ------- +- Issues #11024: Fixes and additional tests for Time2Internaldate. + - Issue #14626: Large refactoring of functions / parameters in the os module. Many functions now support "dir_fd" and "follow_symlinks" parameters; some also support accepting an open file descriptor in place of of a path -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 03:11:04 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Sat, 23 Jun 2012 03:11:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_fixed_whitespace?= Message-ID: http://hg.python.org/cpython/rev/48a073385660 changeset: 77604:48a073385660 user: Alexander Belopolsky date: Fri Jun 22 21:10:50 2012 -0400 summary: fixed whitespace files: Lib/test/test_imaplib.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -49,7 +49,7 @@ return [2000000000, 2000000000.0, time.localtime(2000000000), (2033, 5, 18, 5, 33, 20, -1, -1, -1), (2033, 5, 18, 5, 33, 20, -1, -1, 1), - datetime.fromtimestamp(2000000000, + datetime.fromtimestamp(2000000000, timezone(timedelta(0, 2*60*60))), '"18-May-2033 05:33:20 +0200"'] -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 03:56:47 2012 From: python-checkins at python.org (alexander.belopolsky) Date: Sat, 23 Jun 2012 03:56:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315148=3A_Fixed_typ?= =?utf8?q?os_in_shutil=2Ewhich=28=29_docstring?= Message-ID: http://hg.python.org/cpython/rev/5975292ddf82 changeset: 77605:5975292ddf82 user: Alexander Belopolsky date: Fri Jun 22 21:56:42 2012 -0400 summary: Issue #15148: Fixed typos in shutil.which() docstring files: Lib/shutil.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -972,7 +972,7 @@ return os.terminal_size((columns, lines)) def which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a file, mode, and a path string, return the path whichs conform + """Given a file, mode, and a path string, return the path which conforms to the given mode on the path.""" # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 04:14:57 2012 From: python-checkins at python.org (brian.curtin) Date: Sat, 23 Jun 2012 04:14:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_file_was_the_old_arg_name_f?= =?utf8?q?rom_an_earlier_patch=2E_command_matches_the?= Message-ID: http://hg.python.org/cpython/rev/973b4806f760 changeset: 77606:973b4806f760 user: Brian Curtin date: Fri Jun 22 21:14:34 2012 -0500 summary: file was the old arg name from an earlier patch. command matches the implementation files: Lib/shutil.py | 4 +- Tools/msi/msi.py | 46 ++++++++++++++++++++++++++++++++--- 2 files changed, 44 insertions(+), 6 deletions(-) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -972,8 +972,8 @@ return os.terminal_size((columns, lines)) def which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a file, mode, and a path string, return the path which conforms - to the given mode on the path.""" + """Given a command, mode, and a path string, return the path which + conforms to the given mode on the path.""" # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -405,6 +405,7 @@ ("ErrorDialog", "ErrorDlg"), ("Progress1", "Install"), # modified in maintenance type dlg ("Progress2", "installs"), + ("ModifyPath", "0"), ("MaintenanceForm_Action", "Repair")]) # Fonts, see "TextStyle Table" @@ -634,7 +635,11 @@ c.event("SpawnDialog", "ExistingDirectoryDlg", 'TargetExists=1 and REMOVEOLDVERSION="" and REMOVEOLDSNAPSHOT=""', 2) c.event("SetTargetPath", "TARGETDIR", 'TargetExists=0 or REMOVEOLDVERSION<>"" or REMOVEOLDSNAPSHOT<>""', 3) c.event("SpawnWaitDialog", "WaitForCostingDlg", "CostingComplete=1", 4) - c.event("NewDialog", "SelectFeaturesDlg", 'TargetExists=0 or REMOVEOLDVERSION<>"" or REMOVEOLDSNAPSHOT<>""', 5) + #c.event("NewDialog", "PathInfoDlg", "1=1", 5) # Show this once no matter what. + c.event("NewDialog", "PathInfoDlg", 'TargetExists=0 or REMOVEOLDVERSION<>"" or REMOVEOLDSNAPSHOT<>""', 5) + + # SelectFeaturesDlg is no longer directly shown from here. PathInfoDlg + # currently takes care of showing it. c = seldlg.cancel("Cancel", "DirectoryCombo") c.event("SpawnDialog", "CancelDlg") @@ -649,6 +654,38 @@ c = seldlg.pushbutton("NewDir", 324, 70, 30, 18, 3, "New", None) c.event("DirectoryListNew", "0") + + ##################################################################### + # PathInfoDlg + path_dialog = PyDialog(db, "PathInfoDlg", x, y, w, h, modal, title, + "Yes", "No", "Yes") + path_dialog.title("New for Python 3.3") + path_dialog.text("News", 135, 65, 240, 130, 0x30003, + "New in 3.3 is the ability to add [TARGETDIR] to\n" + "your system's Path variable. This option allows you\n" + "to type `python` at a command prompt without\n" + "requiring anything else on your part.\n\n" + "However, users of multiple versions need to be\n" + "aware that this will overrule the behavior of any\n" + "existing Python installations that you have placed\n" + "on the Path.\n\n" + "If you choose to enable this feature, it will be\n" + "applied after you logout." + ) + + path_dialog.text("Question", 135, 235, 240, 40, 0x30003, + "{\\VerdanaBold10}Would you like to add Python to the Path?") + + c = path_dialog.back("< Back", "No") + c.event("NewDialog", "SelectDirectoryDlg") + + c = path_dialog.next("Yes", "Back", name="Yes") + c.event("[ModifyPath]", "1", order=1) + c.event("NewDialog", "SelectFeaturesDlg", order=2) + + c = path_dialog.cancel("No", "Yes", name="No") + c.event("NewDialog", "SelectFeaturesDlg", order=1) + ##################################################################### # SelectFeaturesDlg features = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal|track_disk_space, @@ -660,7 +697,7 @@ "Click on the icons in the tree below to change the way features will be installed.") c=features.back("< Back", "Next") - c.event("NewDialog", "SelectDirectoryDlg") + c.event("NewDialog", "PathInfoDlg") c=features.next("Next >", "Cancel") c.mapping("SelectionNoItems", "Enabled") @@ -853,8 +890,6 @@ level=0) private_crt = Feature(db, "PrivateCRT", "MSVCRT", "C Run-Time (private)", 0, level=0) - add_data(db, "Condition", [("SharedCRT", 1, sys32cond), - ("PrivateCRT", 1, "not "+sys32cond)]) # We don't support advertisement of extensions ext_feature = Feature(db, "Extensions", "Register Extensions", "Make this Python installation the default Python installation", 3, @@ -879,6 +914,9 @@ "prompt without needing the full path.", 13, parent = default_feature, attributes=2|8, level=2) + add_data(db, "Condition", [("SharedCRT", 1, sys32cond), + ("PrivateCRT", 1, "not "+sys32cond), + ("PrependPath", 1, "ModifyPath='0'")]) def extract_msvcr100(): # Find the redistributable files -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 04:41:06 2012 From: python-checkins at python.org (brian.curtin) Date: Sat, 23 Jun 2012 04:41:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Revert_unintended_change_in?= =?utf8?q?_973b4806f760?= Message-ID: http://hg.python.org/cpython/rev/130af4c43ce1 changeset: 77607:130af4c43ce1 user: Brian Curtin date: Fri Jun 22 21:40:52 2012 -0500 summary: Revert unintended change in 973b4806f760 files: Tools/msi/msi.py | 46 +++-------------------------------- 1 files changed, 4 insertions(+), 42 deletions(-) diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py --- a/Tools/msi/msi.py +++ b/Tools/msi/msi.py @@ -405,7 +405,6 @@ ("ErrorDialog", "ErrorDlg"), ("Progress1", "Install"), # modified in maintenance type dlg ("Progress2", "installs"), - ("ModifyPath", "0"), ("MaintenanceForm_Action", "Repair")]) # Fonts, see "TextStyle Table" @@ -635,11 +634,7 @@ c.event("SpawnDialog", "ExistingDirectoryDlg", 'TargetExists=1 and REMOVEOLDVERSION="" and REMOVEOLDSNAPSHOT=""', 2) c.event("SetTargetPath", "TARGETDIR", 'TargetExists=0 or REMOVEOLDVERSION<>"" or REMOVEOLDSNAPSHOT<>""', 3) c.event("SpawnWaitDialog", "WaitForCostingDlg", "CostingComplete=1", 4) - #c.event("NewDialog", "PathInfoDlg", "1=1", 5) # Show this once no matter what. - c.event("NewDialog", "PathInfoDlg", 'TargetExists=0 or REMOVEOLDVERSION<>"" or REMOVEOLDSNAPSHOT<>""', 5) - - # SelectFeaturesDlg is no longer directly shown from here. PathInfoDlg - # currently takes care of showing it. + c.event("NewDialog", "SelectFeaturesDlg", 'TargetExists=0 or REMOVEOLDVERSION<>"" or REMOVEOLDSNAPSHOT<>""', 5) c = seldlg.cancel("Cancel", "DirectoryCombo") c.event("SpawnDialog", "CancelDlg") @@ -654,38 +649,6 @@ c = seldlg.pushbutton("NewDir", 324, 70, 30, 18, 3, "New", None) c.event("DirectoryListNew", "0") - - ##################################################################### - # PathInfoDlg - path_dialog = PyDialog(db, "PathInfoDlg", x, y, w, h, modal, title, - "Yes", "No", "Yes") - path_dialog.title("New for Python 3.3") - path_dialog.text("News", 135, 65, 240, 130, 0x30003, - "New in 3.3 is the ability to add [TARGETDIR] to\n" - "your system's Path variable. This option allows you\n" - "to type `python` at a command prompt without\n" - "requiring anything else on your part.\n\n" - "However, users of multiple versions need to be\n" - "aware that this will overrule the behavior of any\n" - "existing Python installations that you have placed\n" - "on the Path.\n\n" - "If you choose to enable this feature, it will be\n" - "applied after you logout." - ) - - path_dialog.text("Question", 135, 235, 240, 40, 0x30003, - "{\\VerdanaBold10}Would you like to add Python to the Path?") - - c = path_dialog.back("< Back", "No") - c.event("NewDialog", "SelectDirectoryDlg") - - c = path_dialog.next("Yes", "Back", name="Yes") - c.event("[ModifyPath]", "1", order=1) - c.event("NewDialog", "SelectFeaturesDlg", order=2) - - c = path_dialog.cancel("No", "Yes", name="No") - c.event("NewDialog", "SelectFeaturesDlg", order=1) - ##################################################################### # SelectFeaturesDlg features = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal|track_disk_space, @@ -697,7 +660,7 @@ "Click on the icons in the tree below to change the way features will be installed.") c=features.back("< Back", "Next") - c.event("NewDialog", "PathInfoDlg") + c.event("NewDialog", "SelectDirectoryDlg") c=features.next("Next >", "Cancel") c.mapping("SelectionNoItems", "Enabled") @@ -890,6 +853,8 @@ level=0) private_crt = Feature(db, "PrivateCRT", "MSVCRT", "C Run-Time (private)", 0, level=0) + add_data(db, "Condition", [("SharedCRT", 1, sys32cond), + ("PrivateCRT", 1, "not "+sys32cond)]) # We don't support advertisement of extensions ext_feature = Feature(db, "Extensions", "Register Extensions", "Make this Python installation the default Python installation", 3, @@ -914,9 +879,6 @@ "prompt without needing the full path.", 13, parent = default_feature, attributes=2|8, level=2) - add_data(db, "Condition", [("SharedCRT", 1, sys32cond), - ("PrivateCRT", 1, "not "+sys32cond), - ("PrependPath", 1, "ModifyPath='0'")]) def extract_msvcr100(): # Find the redistributable files -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 04:51:04 2012 From: python-checkins at python.org (larry.hastings) Date: Sat, 23 Jun 2012 04:51:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314626=3A_Fix_build?= =?utf8?q?bot_issue_on_OpenIndiana_3=2Ex_machines=2E__=28Hopefully=2E=29?= Message-ID: http://hg.python.org/cpython/rev/66f7377547d5 changeset: 77608:66f7377547d5 user: Larry Hastings date: Fri Jun 22 19:50:21 2012 -0700 summary: Issue #14626: Fix buildbot issue on OpenIndiana 3.x machines. (Hopefully.) files: Lib/os.py | 31 +++++++++++++++++++++---------- Modules/posixmodule.c | 7 +++++-- 2 files changed, 26 insertions(+), 12 deletions(-) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -179,16 +179,27 @@ _set = set() _add("HAVE_FACCESSAT", "access") - # Current linux (kernel 3.2, glibc 2.15) doesn't support lchmod. - # (The function exists, but it's a stub that always returns ENOSUP.) - # Now, linux *does* have fchmodat, which says it can ignore - # symbolic links. But that doesn't work either (also returns ENOSUP). - # I'm guessing that if they fix fchmodat, they'll also add lchmod at - # the same time. So, for now, assume that fchmodat doesn't support - # follow_symlinks unless lchmod works. - if ((sys.platform != "linux") or - ("HAVE_LCHMOD" in _have_functions)): - _add("HAVE_FCHMODAT", "chmod") + # Some platforms don't support lchmod(). Often the function exists + # anyway, as a stub that always returns ENOSUP or perhaps EOPNOTSUPP. + # (No, I don't know why that's a good design.) ./configure will detect + # this and reject it--so HAVE_LCHMOD still won't be defined on such + # platforms. This is Very Helpful. + # + # However, sometimes platforms without a working lchmod() *do* have + # fchmodat(). (Examples: Linux kernel 3.2 with glibc 2.15, + # OpenIndiana 3.x.) And fchmodat() has a flag that theoretically makes + # it behave like lchmod(). So in theory it would be a suitable + # replacement for lchmod(). But when lchmod() doesn't work, fchmodat()'s + # flag doesn't work *either*. Sadly ./configure isn't sophisticated + # enough to detect this condition--it only determines whether or not + # fchmodat() minimally works. + # + # Therefore we simply ignore fchmodat() when deciding whether or not + # os.chmod supports follow_symlinks. Just checking lchmod() is + # sufficient. After all--if you have a working fchmodat(), your + # lchmod() almost certainly works too. + # + # _add("HAVE_FCHMODAT", "chmod") _add("HAVE_FCHOWNAT", "chown") _add("HAVE_FSTATAT", "stat") _add("HAVE_LCHFLAGS", "chflags") diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -2696,7 +2696,8 @@ /* * fchmodat() doesn't currently support AT_SYMLINK_NOFOLLOW! * The documentation specifically shows how to use it, - * and then says it isn't implemented yet. (glibc 2.15) + * and then says it isn't implemented yet. + * (true on linux with glibc 2.15, and openindiana 3.x) * * Once it is supported, os.chmod will automatically * support dir_fd and follow_symlinks=False. (Hopefully.) @@ -2709,7 +2710,9 @@ * and we can't do that in this nested scope. (Macro trickery, sigh.) */ fchmodat_nofollow_unsupported = - result && (errno == ENOTSUP) && !follow_symlinks; + result && + ((errno == ENOTSUP) || (errno == EOPNOTSUPP)) && + !follow_symlinks; } else #endif -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sat Jun 23 05:44:16 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sat, 23 Jun 2012 05:44:16 +0200 Subject: [Python-checkins] Daily reference leaks (48a073385660): sum=129 Message-ID: results for 48a073385660 on branch "default" -------------------------------------------- test_httplib leaked [1, 1, 1] references, sum=3 test_ssl leaked [38, 38, 38] references, sum=114 test_urllib2_localnet leaked [4, 4, 4] references, sum=12 Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogqLQNUS', '-x'] From python-checkins at python.org Sat Jun 23 05:49:24 2012 From: python-checkins at python.org (brian.curtin) Date: Sat, 23 Jun 2012 05:49:24 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_=2315148=2E_Make_the_sh?= =?utf8?q?util=2Ewhich_docstring_more_thorough?= Message-ID: http://hg.python.org/cpython/rev/5f18d9d34f73 changeset: 77609:5f18d9d34f73 user: Brian Curtin date: Fri Jun 22 22:48:06 2012 -0500 summary: Fix #15148. Make the shutil.which docstring more thorough files: Lib/shutil.py | 4 +++- 1 files changed, 3 insertions(+), 1 deletions(-) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -973,7 +973,9 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): """Given a command, mode, and a path string, return the path which - conforms to the given mode on the path.""" + conforms to the given mode on the PATH, or None if there is no such file. + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result of + os.environ.get("PATH"), or can be overridden with a custom search path.""" # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 05:49:25 2012 From: python-checkins at python.org (brian.curtin) Date: Sat, 23 Jun 2012 05:49:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_=2315148=2E_Capitalize_?= =?utf8?q?PATH=2C_hopefully_leading_to_less_confusion?= Message-ID: http://hg.python.org/cpython/rev/aa153b827d17 changeset: 77610:aa153b827d17 user: Brian Curtin date: Fri Jun 22 22:49:12 2012 -0500 summary: Fix #15148. Capitalize PATH, hopefully leading to less confusion files: Lib/shutil.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -972,7 +972,7 @@ return os.terminal_size((columns, lines)) def which(cmd, mode=os.F_OK | os.X_OK, path=None): - """Given a command, mode, and a path string, return the path which + """Given a command, mode, and a PATH string, return the path which conforms to the given mode on the PATH, or None if there is no such file. `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result of os.environ.get("PATH"), or can be overridden with a custom search path.""" -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 08:55:29 2012 From: python-checkins at python.org (georg.brandl) Date: Sat, 23 Jun 2012 08:55:29 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Release_schedule_update_for_3?= =?utf8?b?LjMu?= Message-ID: http://hg.python.org/peps/rev/2b207a5a4b23 changeset: 4476:2b207a5a4b23 user: Georg Brandl date: Sat Jun 23 08:51:34 2012 +0200 summary: Release schedule update for 3.3. files: pep-0398.txt | 16 ++++++++-------- 1 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pep-0398.txt b/pep-0398.txt --- a/pep-0398.txt +++ b/pep-0398.txt @@ -38,7 +38,7 @@ - 3.3.0 alpha 2: April 1, 2012 - 3.3.0 alpha 3: May 1, 2012 - 3.3.0 alpha 4: May 26, 2012 -- 3.3.0 beta 1: June 23, 2012 +- 3.3.0 beta 1: June 26, 2012 (No new features beyond this point.) @@ -61,6 +61,7 @@ * PEP 380: Syntax for Delegating to a Subgenerator * PEP 393: Flexible String Representation +* PEP 397: Python launcher for Windows * PEP 399: Pure Python/C Accelerator Module Compatibility Requirements * PEP 405: Python Virtual Environments * PEP 409: Suppressing exception context @@ -83,29 +84,28 @@ * Addition of the "lzma" module, and lzma/xz support in tarfile * Implementing ``__import__`` using importlib * Addition of the C decimal implementation +* Switch of Windows build toolchain to VS 2010 Candidate PEPs: * PEP 362: Function Signature Object -* PEP 397: Python launcher for Windows -* PEP 3154: Pickle protocol version 4 (Note that these are not accepted yet and even if they are, they might not be finished in time for Python 3.3.) Other planned large-scale changes: -* Addition of the "regex" module -* Email version 6 -* A standard event-loop interface (PEP by Jim Fulton pending) -* Switch of Windows build toolchain to VS 2010 or 2012 (depending on - availability and platform support) +* None Deferred to post-3.3: * PEP 395: Qualified Names for Modules * PEP 3143: Standard daemon process library +* PEP 3154: Pickle protocol version 4 * Breaking out standard library and docs in separate repos +* Addition of the "regex" module +* Email version 6 +* A standard event-loop interface (PEP by Jim Fulton pending) Copyright ========= -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Jun 23 10:27:15 2012 From: python-checkins at python.org (charles-francois.natali) Date: Sat, 23 Jun 2012 10:27:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=282=2E7=29=3A_Remove_useless_?= =?utf8?q?test_=28flowinfo_is_unsigned=29=2E?= Message-ID: http://hg.python.org/cpython/rev/e5478b6b93b5 changeset: 77611:e5478b6b93b5 branch: 2.7 parent: 77568:10faad45905a user: Charles-Fran?ois Natali date: Sat Jun 23 10:06:56 2012 +0200 summary: Remove useless test (flowinfo is unsigned). files: Modules/socketmodule.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1310,7 +1310,7 @@ "getsockaddrarg: port must be 0-65535."); return 0; } - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString( PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); @@ -4181,7 +4181,7 @@ if (!PyArg_ParseTuple(sa, "si|II", &hostp, &port, &flowinfo, &scope_id)) return NULL; - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString(PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); return NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 10:27:16 2012 From: python-checkins at python.org (charles-francois.natali) Date: Sat, 23 Jun 2012 10:27:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_Remove_useless_?= =?utf8?q?test_=28flowinfo_is_unsigned=29=2E?= Message-ID: http://hg.python.org/cpython/rev/55849442d7a9 changeset: 77612:55849442d7a9 branch: 3.2 parent: 77560:ffc048f43a70 user: Charles-Fran?ois Natali date: Sat Jun 23 10:17:05 2012 +0200 summary: Remove useless test (flowinfo is unsigned). files: Modules/socketmodule.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1347,7 +1347,7 @@ "getsockaddrarg: port must be 0-65535."); return 0; } - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString( PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); @@ -4129,7 +4129,7 @@ if (!PyArg_ParseTuple(sa, "si|II", &hostp, &port, &flowinfo, &scope_id)) return NULL; - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString(PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); return NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 10:27:16 2012 From: python-checkins at python.org (charles-francois.natali) Date: Sat, 23 Jun 2012 10:27:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Remove_useless_test_=28flowinfo_is_unsigned=29=2E?= Message-ID: http://hg.python.org/cpython/rev/9cf08b779954 changeset: 77613:9cf08b779954 parent: 77610:aa153b827d17 parent: 77612:55849442d7a9 user: Charles-Fran?ois Natali date: Sat Jun 23 10:26:54 2012 +0200 summary: Remove useless test (flowinfo is unsigned). files: Modules/socketmodule.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1437,7 +1437,7 @@ "getsockaddrarg: port must be 0-65535."); return 0; } - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString( PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); @@ -5119,7 +5119,7 @@ if (!PyArg_ParseTuple(sa, "si|II", &hostp, &port, &flowinfo, &scope_id)) return NULL; - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString(PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); return NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 10:28:15 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 10:28:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314742=3A_test=5Fun?= =?utf8?q?parse_now_only_checks_a_limited_number_of_files_unless_the?= Message-ID: http://hg.python.org/cpython/rev/0e5a698d3c4c changeset: 77614:0e5a698d3c4c parent: 77610:aa153b827d17 user: Mark Dickinson date: Sat Jun 23 09:27:47 2012 +0100 summary: Issue #14742: test_unparse now only checks a limited number of files unless the 'cpu' resource is specified. files: Lib/test/test_tools.py | 1 + Tools/parser/test_unparse.py | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_tools.py b/Lib/test/test_tools.py --- a/Lib/test/test_tools.py +++ b/Lib/test/test_tools.py @@ -127,6 +127,7 @@ # Run the tests in Tools/parser/test_unparse.py with support.DirsOnSysPath(os.path.join(basepath, 'parser')): from test_unparse import UnparseTestCase + from test_unparse import DirectoryTestCase def test_main(): diff --git a/Tools/parser/test_unparse.py b/Tools/parser/test_unparse.py --- a/Tools/parser/test_unparse.py +++ b/Tools/parser/test_unparse.py @@ -2,9 +2,10 @@ import test.support import io import os +import random import tokenize +import unparse import ast -import unparse def read_pyfile(filename): """Read and return the contents of a Python source file (as a @@ -257,6 +258,10 @@ if n.endswith('.py') and not n.startswith('bad'): names.append(os.path.join(test_dir, n)) + # Test limited subset of files unless the 'cpu' resource is specified. + if not test.support.is_resource_enabled("cpu"): + names = random.sample(names, 10) + for filename in names: if test.support.verbose: print('Testing %s' % filename) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 10:28:17 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 10:28:17 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_merge?= Message-ID: http://hg.python.org/cpython/rev/205b169c8446 changeset: 77615:205b169c8446 parent: 77614:0e5a698d3c4c parent: 77613:9cf08b779954 user: Mark Dickinson date: Sat Jun 23 09:28:08 2012 +0100 summary: merge files: Modules/socketmodule.c | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -1437,7 +1437,7 @@ "getsockaddrarg: port must be 0-65535."); return 0; } - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString( PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); @@ -5119,7 +5119,7 @@ if (!PyArg_ParseTuple(sa, "si|II", &hostp, &port, &flowinfo, &scope_id)) return NULL; - if (flowinfo < 0 || flowinfo > 0xfffff) { + if (flowinfo > 0xfffff) { PyErr_SetString(PyExc_OverflowError, "getsockaddrarg: flowinfo must be 0-1048575."); return NULL; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 10:39:52 2012 From: python-checkins at python.org (georg.brandl) Date: Sat, 23 Jun 2012 10:39:52 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_362_is_done_too=2E?= Message-ID: http://hg.python.org/peps/rev/d2b49105cdc9 changeset: 4477:d2b49105cdc9 user: Georg Brandl date: Sat Jun 23 10:38:04 2012 +0200 summary: 362 is done too. files: pep-0398.txt | 7 ++++--- 1 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pep-0398.txt b/pep-0398.txt --- a/pep-0398.txt +++ b/pep-0398.txt @@ -59,6 +59,7 @@ Implemented / Final PEPs: +* PEP 362: Function Signature Object * PEP 380: Syntax for Delegating to a Subgenerator * PEP 393: Flexible String Representation * PEP 397: Python launcher for Windows @@ -88,10 +89,10 @@ Candidate PEPs: -* PEP 362: Function Signature Object +* None -(Note that these are not accepted yet and even if they are, they might -not be finished in time for Python 3.3.) +.. (Note that these are not accepted yet and even if they are, they might + not be finished in time for Python 3.3.) Other planned large-scale changes: -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sat Jun 23 11:40:20 2012 From: python-checkins at python.org (nick.coghlan) Date: Sat, 23 Jun 2012 11:40:20 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Close_=2313062=3A_Add_inspe?= =?utf8?q?ct=2Egetclosurevars_to_simplify_testing_stateful_closures?= Message-ID: http://hg.python.org/cpython/rev/487fe648de56 changeset: 77616:487fe648de56 user: Nick Coghlan date: Sat Jun 23 19:39:55 2012 +1000 summary: Close #13062: Add inspect.getclosurevars to simplify testing stateful closures files: Doc/library/inspect.rst | 16 ++++ Doc/whatsnew/3.3.rst | 10 ++ Lib/inspect.py | 54 ++++++++++++++ Lib/test/test_inspect.py | 101 ++++++++++++++++++++++++++- Misc/NEWS | 5 +- 5 files changed, 184 insertions(+), 2 deletions(-) diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -497,6 +497,22 @@ .. versionadded:: 3.2 +.. function:: getclosurevars(func) + + Get the mapping of external name references in a Python function or + method *func* to their current values. A + :term:`named tuple` ``ClosureVars(nonlocals, globals, builtins, unbound)`` + is returned. *nonlocals* maps referenced names to lexical closure + variables, *globals* to the function's module globals and *builtins* to + the builtins visible from the function body. *unbound* is the set of names + referenced in the function that could not be resolved at all given the + current module globals and builtins. + + :exc:`TypeError` is raised if *func* is not a Python function or method. + + .. versionadded:: 3.3 + + .. _inspect-stack: The interpreter stack diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -1027,6 +1027,16 @@ (Contributed by Sijin Joseph in :issue:`8808`) +inspect +------- + +A new :func:`~inspect.getclosurevars` function has been added. This function +reports the current binding of all names referenced from the function body and +where those names were resolved, making it easier to verify correct internal +state when testing code that relies on stateful closures. + +(Contributed by Meador Inge and Nick Coghlan in :issue:`13062`) + io -- diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -42,6 +42,7 @@ import types import warnings import functools +import builtins from operator import attrgetter from collections import namedtuple, OrderedDict @@ -1036,6 +1037,59 @@ _missing_arguments(f_name, kwonlyargs, False, arg2value) return arg2value +ClosureVars = namedtuple('ClosureVars', 'nonlocals globals builtins unbound') + +def getclosurevars(func): + """ + Get the mapping of free variables to their current values. + + Returns a named tuple of dics mapping the current nonlocal, global + and builtin references as seen by the body of the function. A final + set of unbound names that could not be resolved is also provided. + """ + + if ismethod(func): + func = func.__func__ + + if not isfunction(func): + raise TypeError("'{!r}' is not a Python function".format(func)) + + code = func.__code__ + # Nonlocal references are named in co_freevars and resolved + # by looking them up in __closure__ by positional index + if func.__closure__ is None: + nonlocal_vars = {} + else: + nonlocal_vars = { + var : cell.cell_contents + for var, cell in zip(code.co_freevars, func.__closure__) + } + + # Global and builtin references are named in co_names and resolved + # by looking them up in __globals__ or __builtins__ + global_ns = func.__globals__ + builtin_ns = global_ns.get("__builtins__", builtins.__dict__) + if ismodule(builtin_ns): + builtin_ns = builtin_ns.__dict__ + global_vars = {} + builtin_vars = {} + unbound_names = set() + for name in code.co_names: + if name in ("None", "True", "False"): + # Because these used to be builtins instead of keywords, they + # may still show up as name references. We ignore them. + continue + try: + global_vars[name] = global_ns[name] + except KeyError: + try: + builtin_vars[name] = builtin_ns[name] + except KeyError: + unbound_names.add(name) + + return ClosureVars(nonlocal_vars, global_vars, + builtin_vars, unbound_names) + # -------------------------------------------------- stack frame extraction Traceback = namedtuple('Traceback', 'filename lineno function code_context index') diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -665,6 +665,105 @@ self.assertIn(('f', b.f), inspect.getmembers(b, inspect.ismethod)) +_global_ref = object() +class TestGetClosureVars(unittest.TestCase): + + def test_name_resolution(self): + # Basic test of the 4 different resolution mechanisms + def f(nonlocal_ref): + def g(local_ref): + print(local_ref, nonlocal_ref, _global_ref, unbound_ref) + return g + _arg = object() + nonlocal_vars = {"nonlocal_ref": _arg} + global_vars = {"_global_ref": _global_ref} + builtin_vars = {"print": print} + unbound_names = {"unbound_ref"} + expected = inspect.ClosureVars(nonlocal_vars, global_vars, + builtin_vars, unbound_names) + self.assertEqual(inspect.getclosurevars(f(_arg)), expected) + + def test_generator_closure(self): + def f(nonlocal_ref): + def g(local_ref): + print(local_ref, nonlocal_ref, _global_ref, unbound_ref) + yield + return g + _arg = object() + nonlocal_vars = {"nonlocal_ref": _arg} + global_vars = {"_global_ref": _global_ref} + builtin_vars = {"print": print} + unbound_names = {"unbound_ref"} + expected = inspect.ClosureVars(nonlocal_vars, global_vars, + builtin_vars, unbound_names) + self.assertEqual(inspect.getclosurevars(f(_arg)), expected) + + def test_method_closure(self): + class C: + def f(self, nonlocal_ref): + def g(local_ref): + print(local_ref, nonlocal_ref, _global_ref, unbound_ref) + return g + _arg = object() + nonlocal_vars = {"nonlocal_ref": _arg} + global_vars = {"_global_ref": _global_ref} + builtin_vars = {"print": print} + unbound_names = {"unbound_ref"} + expected = inspect.ClosureVars(nonlocal_vars, global_vars, + builtin_vars, unbound_names) + self.assertEqual(inspect.getclosurevars(C().f(_arg)), expected) + + def test_nonlocal_vars(self): + # More complex tests of nonlocal resolution + def _nonlocal_vars(f): + return inspect.getclosurevars(f).nonlocals + + def make_adder(x): + def add(y): + return x + y + return add + + def curry(func, arg1): + return lambda arg2: func(arg1, arg2) + + def less_than(a, b): + return a < b + + # The infamous Y combinator. + def Y(le): + def g(f): + return le(lambda x: f(f)(x)) + Y.g_ref = g + return g(g) + + def check_y_combinator(func): + self.assertEqual(_nonlocal_vars(func), {'f': Y.g_ref}) + + inc = make_adder(1) + add_two = make_adder(2) + greater_than_five = curry(less_than, 5) + + self.assertEqual(_nonlocal_vars(inc), {'x': 1}) + self.assertEqual(_nonlocal_vars(add_two), {'x': 2}) + self.assertEqual(_nonlocal_vars(greater_than_five), + {'arg1': 5, 'func': less_than}) + self.assertEqual(_nonlocal_vars((lambda x: lambda y: x + y)(3)), + {'x': 3}) + Y(check_y_combinator) + + def test_getclosurevars_empty(self): + def foo(): pass + _empty = inspect.ClosureVars({}, {}, {}, set()) + self.assertEqual(inspect.getclosurevars(lambda: True), _empty) + self.assertEqual(inspect.getclosurevars(foo), _empty) + + def test_getclosurevars_error(self): + class T: pass + self.assertRaises(TypeError, inspect.getclosurevars, 1) + self.assertRaises(TypeError, inspect.getclosurevars, list) + self.assertRaises(TypeError, inspect.getclosurevars, {}) + + class TestGetcallargsFunctions(unittest.TestCase): def assertEqualCallArgs(self, func, call_params_string, locs=None): @@ -2100,7 +2199,7 @@ TestGetcallargsFunctions, TestGetcallargsMethods, TestGetcallargsUnboundMethods, TestGetattrStatic, TestGetGeneratorState, TestNoEOL, TestSignatureObject, TestSignatureBind, TestParameterObject, - TestBoundArguments + TestBoundArguments, TestGetClosureVars ) if __name__ == "__main__": diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,7 +40,10 @@ Library ------- -- Issues #11024: Fixes and additional tests for Time2Internaldate. +- Issue #13062: Added inspect.getclosurevars to simplify testing stateful + closures + +- Issue #11024: Fixes and additional tests for Time2Internaldate. - Issue #14626: Large refactoring of functions / parameters in the os module. Many functions now support "dir_fd" and "follow_symlinks" parameters; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 11:50:04 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 11:50:04 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyOTY1?= =?utf8?q?=3A_Clean_up_C-API_docs_for_PyLong=5FAsLong=28AndOverflow=29=3B_?= =?utf8?q?clarify_that?= Message-ID: http://hg.python.org/cpython/rev/5ca9a51f3d85 changeset: 77617:5ca9a51f3d85 branch: 3.2 parent: 77612:55849442d7a9 user: Mark Dickinson date: Sat Jun 23 10:49:12 2012 +0100 summary: Issue #12965: Clean up C-API docs for PyLong_AsLong(AndOverflow); clarify that __int__ will be called for non-PyLongs files: Doc/c-api/long.rst | 29 ++++++++++++++++------------- 1 files changed, 16 insertions(+), 13 deletions(-) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -108,26 +108,29 @@ .. XXX alias PyLong_AS_LONG (for now) -.. c:function:: long PyLong_AsLong(PyObject *pylong) +.. c:function:: long PyLong_AsLong(PyObject *obj) .. index:: single: LONG_MAX single: OverflowError (built-in exception) - Return a C :c:type:`long` representation of the contents of *pylong*. If - *pylong* is greater than :const:`LONG_MAX`, raise an :exc:`OverflowError`, - and return -1. Convert non-long objects automatically to long first, - and return -1 if that raises exceptions. + Return a C :c:type:`long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. -.. c:function:: long PyLong_AsLongAndOverflow(PyObject *pylong, int *overflow) + Raise :exc:`OverflowError` if the value of *obj* is out of range for a + :c:type:`long`. - Return a C :c:type:`long` representation of the contents of - *pylong*. If *pylong* is greater than :const:`LONG_MAX` or less - than :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, - respectively, and return ``-1``; otherwise, set *\*overflow* to - ``0``. If any other exception occurs (for example a TypeError or - MemoryError), then ``-1`` will be returned and *\*overflow* will - be ``0``. +.. c:function:: long PyLong_AsLongAndOverflow(PyObject *obj, int *overflow) + + Return a C :c:type:`long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. + + If the value of *obj* is greater than :const:`LONG_MAX` or less than + :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and + return ``-1``; otherwise, set *\*overflow* to ``0``. If any other exception + occurs set *\*overflow* to ``0`` and return ``-1`` as usual. .. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *pylong, int *overflow) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 11:50:05 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 11:50:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2312965=3A_Merge_from_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/63fc1552cd36 changeset: 77618:63fc1552cd36 parent: 77616:487fe648de56 parent: 77617:5ca9a51f3d85 user: Mark Dickinson date: Sat Jun 23 10:49:36 2012 +0100 summary: Issue #12965: Merge from 3.2 files: Doc/c-api/long.rst | 29 ++++++++++++++++------------- 1 files changed, 16 insertions(+), 13 deletions(-) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -122,26 +122,29 @@ .. XXX alias PyLong_AS_LONG (for now) -.. c:function:: long PyLong_AsLong(PyObject *pylong) +.. c:function:: long PyLong_AsLong(PyObject *obj) .. index:: single: LONG_MAX single: OverflowError (built-in exception) - Return a C :c:type:`long` representation of the contents of *pylong*. If - *pylong* is greater than :const:`LONG_MAX`, raise an :exc:`OverflowError`, - and return -1. Convert non-long objects automatically to long first, - and return -1 if that raises exceptions. + Return a C :c:type:`long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. -.. c:function:: long PyLong_AsLongAndOverflow(PyObject *pylong, int *overflow) + Raise :exc:`OverflowError` if the value of *obj* is out of range for a + :c:type:`long`. - Return a C :c:type:`long` representation of the contents of - *pylong*. If *pylong* is greater than :const:`LONG_MAX` or less - than :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, - respectively, and return ``-1``; otherwise, set *\*overflow* to - ``0``. If any other exception occurs (for example a TypeError or - MemoryError), then ``-1`` will be returned and *\*overflow* will - be ``0``. +.. c:function:: long PyLong_AsLongAndOverflow(PyObject *obj, int *overflow) + + Return a C :c:type:`long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. + + If the value of *obj* is greater than :const:`LONG_MAX` or less than + :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and + return ``-1``; otherwise, set *\*overflow* to ``0``. If any other exception + occurs set *\*overflow* to ``0`` and return ``-1`` as usual. .. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *pylong, int *overflow) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 11:52:18 2012 From: python-checkins at python.org (nick.coghlan) Date: Sat, 23 Jun 2012 11:52:18 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Close_=2315153=3A_Added_ins?= =?utf8?q?pect=2Egetgeneratorlocals_to_simplify_whitebox_testing_of?= Message-ID: http://hg.python.org/cpython/rev/dd82a910eb07 changeset: 77619:dd82a910eb07 user: Nick Coghlan date: Sat Jun 23 19:52:05 2012 +1000 summary: Close #15153: Added inspect.getgeneratorlocals to simplify whitebox testing of generator state updates files: Doc/library/inspect.rst | 24 ++++++++++++++ Doc/whatsnew/3.3.rst | 7 ++++ Lib/inspect.py | 18 ++++++++++ Lib/test/test_inspect.py | 46 ++++++++++++++++++++++++++++ Misc/NEWS | 3 + 5 files changed, 98 insertions(+), 0 deletions(-) diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -676,3 +676,27 @@ * GEN_CLOSED: Execution has completed. .. versionadded:: 3.2 + +The current internal state of the generator can also be queried. This is +mostly useful for testing purposes, to ensure that internal state is being +updated as expected: + +.. function:: getgeneratorlocals(generator) + + Get the mapping of live local variables in *generator* to their current + values. A dictionary is returned that maps from variable names to values. + This is the equivalent of calling :func:`locals` in the body of the + generator, and all the same caveats apply. + + If *generator* is a :term:`generator` with no currently associated frame, + then an empty dictionary is returned. :exc:`TypeError` is raised if + *generator* is not a Python generator object. + + .. impl-detail:: + + This function relies on the generator exposing a Python stack frame + for introspection, which isn't guaranteed to be the case in all + implementations of Python. In such cases, this function will always + return an empty dictionary. + + .. versionadded:: 3.3 diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -1037,6 +1037,13 @@ (Contributed by Meador Inge and Nick Coghlan in :issue:`13062`) +A new :func:`~inspect.getgeneratorlocals` function has been added. This +function reports the current binding of local variables in the generator's +stack frame, making it easier to verify correct internal state when testing +generators. + +(Contributed by Meador Inge in :issue:`15153`) + io -- diff --git a/Lib/inspect.py b/Lib/inspect.py --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -1259,6 +1259,8 @@ raise AttributeError(attr) +# ------------------------------------------------ generator introspection + GEN_CREATED = 'GEN_CREATED' GEN_RUNNING = 'GEN_RUNNING' GEN_SUSPENDED = 'GEN_SUSPENDED' @@ -1282,6 +1284,22 @@ return GEN_SUSPENDED +def getgeneratorlocals(generator): + """ + Get the mapping of generator local variables to their current values. + + A dict is returned, with the keys the local variable names and values the + bound values.""" + + if not isgenerator(generator): + raise TypeError("'{!r}' is not a Python generator".format(generator)) + + frame = getattr(generator, "gi_frame", None) + if frame is not None: + return generator.gi_frame.f_locals + else: + return {} + ############################################################################### ### Function Signature Object (PEP 362) ############################################################################### diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -1271,6 +1271,52 @@ self.assertIn(name, repr(state)) self.assertIn(name, str(state)) + def test_getgeneratorlocals(self): + def each(lst, a=None): + b=(1, 2, 3) + for v in lst: + if v == 3: + c = 12 + yield v + + numbers = each([1, 2, 3]) + self.assertEqual(inspect.getgeneratorlocals(numbers), + {'a': None, 'lst': [1, 2, 3]}) + next(numbers) + self.assertEqual(inspect.getgeneratorlocals(numbers), + {'a': None, 'lst': [1, 2, 3], 'v': 1, + 'b': (1, 2, 3)}) + next(numbers) + self.assertEqual(inspect.getgeneratorlocals(numbers), + {'a': None, 'lst': [1, 2, 3], 'v': 2, + 'b': (1, 2, 3)}) + next(numbers) + self.assertEqual(inspect.getgeneratorlocals(numbers), + {'a': None, 'lst': [1, 2, 3], 'v': 3, + 'b': (1, 2, 3), 'c': 12}) + try: + next(numbers) + except StopIteration: + pass + self.assertEqual(inspect.getgeneratorlocals(numbers), {}) + + def test_getgeneratorlocals_empty(self): + def yield_one(): + yield 1 + one = yield_one() + self.assertEqual(inspect.getgeneratorlocals(one), {}) + try: + next(one) + except StopIteration: + pass + self.assertEqual(inspect.getgeneratorlocals(one), {}) + + def test_getgeneratorlocals_error(self): + self.assertRaises(TypeError, inspect.getgeneratorlocals, 1) + self.assertRaises(TypeError, inspect.getgeneratorlocals, lambda x: True) + self.assertRaises(TypeError, inspect.getgeneratorlocals, set) + self.assertRaises(TypeError, inspect.getgeneratorlocals, (2,3)) + class TestSignatureObject(unittest.TestCase): @staticmethod diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,9 @@ Library ------- +- Issue #15153: Added inspect.getgeneratorlocals to simplify white box + testing of generator state updates + - Issue #13062: Added inspect.getclosurevars to simplify testing stateful closures -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 12:15:10 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 12:15:10 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyOTY1?= =?utf8?q?=3A_Clean_up_C-API_docs_for_PyLong=5FAsLongLong=28AndOverflow=29?= =?utf8?q?=3B_clarify?= Message-ID: http://hg.python.org/cpython/rev/3ace8e17074a changeset: 77620:3ace8e17074a branch: 3.2 parent: 77617:5ca9a51f3d85 user: Mark Dickinson date: Sat Jun 23 11:14:22 2012 +0100 summary: Issue #12965: Clean up C-API docs for PyLong_AsLongLong(AndOverflow); clarify that __int__ will be called for non-PyLongs files: Doc/c-api/long.rst | 41 +++++++++++++++++++-------------- 1 files changed, 23 insertions(+), 18 deletions(-) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -121,6 +121,7 @@ Raise :exc:`OverflowError` if the value of *obj* is out of range for a :c:type:`long`. + .. c:function:: long PyLong_AsLongAndOverflow(PyObject *obj, int *overflow) Return a C :c:type:`long` representation of *obj*. If *obj* is not an @@ -133,15 +134,29 @@ occurs set *\*overflow* to ``0`` and return ``-1`` as usual. -.. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *pylong, int *overflow) +.. c:function:: PY_LONG_LONG PyLong_AsLongLong(PyObject *obj) - Return a C :c:type:`long long` representation of the contents of - *pylong*. If *pylong* is greater than :const:`PY_LLONG_MAX` or less - than :const:`PY_LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, - respectively, and return ``-1``; otherwise, set *\*overflow* to - ``0``. If any other exception occurs (for example a TypeError or - MemoryError), then ``-1`` will be returned and *\*overflow* will - be ``0``. + .. index:: + single: OverflowError (built-in exception) + + Return a C :c:type:`long long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *obj* is out of range for a + :c:type:`long`. + + +.. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *obj, int *overflow) + + Return a C :c:type:`long long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. + + If the value of *obj* is greater than :const:`PY_LLONG_MAX` or less than + :const:`PY_LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, + and return ``-1``; otherwise, set *\*overflow* to ``0``. If any other + exception occurs set *\*overflow* to ``0`` and return ``-1`` as usual. .. versionadded:: 3.2 @@ -175,16 +190,6 @@ :exc:`OverflowError` is raised. -.. c:function:: PY_LONG_LONG PyLong_AsLongLong(PyObject *pylong) - - .. index:: - single: OverflowError (built-in exception) - - Return a C :c:type:`long long` from a Python integer. If *pylong* - cannot be represented as a :c:type:`long long`, an - :exc:`OverflowError` is raised and ``-1`` is returned. - - .. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLong(PyObject *pylong) .. index:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 12:15:11 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 12:15:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2312965=3A_Merge_from_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/85683f005fc8 changeset: 77621:85683f005fc8 parent: 77619:dd82a910eb07 parent: 77620:3ace8e17074a user: Mark Dickinson date: Sat Jun 23 11:14:55 2012 +0100 summary: Issue #12965: Merge from 3.2. files: Doc/c-api/long.rst | 41 +++++++++++++++++++-------------- 1 files changed, 23 insertions(+), 18 deletions(-) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -135,6 +135,7 @@ Raise :exc:`OverflowError` if the value of *obj* is out of range for a :c:type:`long`. + .. c:function:: long PyLong_AsLongAndOverflow(PyObject *obj, int *overflow) Return a C :c:type:`long` representation of *obj*. If *obj* is not an @@ -147,15 +148,29 @@ occurs set *\*overflow* to ``0`` and return ``-1`` as usual. -.. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *pylong, int *overflow) +.. c:function:: PY_LONG_LONG PyLong_AsLongLong(PyObject *obj) - Return a C :c:type:`long long` representation of the contents of - *pylong*. If *pylong* is greater than :const:`PY_LLONG_MAX` or less - than :const:`PY_LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, - respectively, and return ``-1``; otherwise, set *\*overflow* to - ``0``. If any other exception occurs (for example a TypeError or - MemoryError), then ``-1`` will be returned and *\*overflow* will - be ``0``. + .. index:: + single: OverflowError (built-in exception) + + Return a C :c:type:`long long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *obj* is out of range for a + :c:type:`long`. + + +.. c:function:: PY_LONG_LONG PyLong_AsLongLongAndOverflow(PyObject *obj, int *overflow) + + Return a C :c:type:`long long` representation of *obj*. If *obj* is not an + instance of :c:type:`PyLongObject`, first call its :meth:`__int__` method + (if present) to convert it to a :c:type:`PyLongObject`. + + If the value of *obj* is greater than :const:`PY_LLONG_MAX` or less than + :const:`PY_LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, + and return ``-1``; otherwise, set *\*overflow* to ``0``. If any other + exception occurs set *\*overflow* to ``0`` and return ``-1`` as usual. .. versionadded:: 3.2 @@ -189,16 +204,6 @@ :exc:`OverflowError` is raised. -.. c:function:: PY_LONG_LONG PyLong_AsLongLong(PyObject *pylong) - - .. index:: - single: OverflowError (built-in exception) - - Return a C :c:type:`long long` from a Python integer. If *pylong* - cannot be represented as a :c:type:`long long`, an - :exc:`OverflowError` is raised and ``-1`` is returned. - - .. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLong(PyObject *pylong) .. index:: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 12:48:45 2012 From: python-checkins at python.org (georg.brandl) Date: Sat, 23 Jun 2012 12:48:45 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Try_to_fix_shutil=2Ewhich?= =?utf8?q?=28=29_tests_on_Windows_by_fixing_a_typo_introduced_in?= Message-ID: http://hg.python.org/cpython/rev/5a7080995fd9 changeset: 77622:5a7080995fd9 user: Georg Brandl date: Sat Jun 23 12:48:40 2012 +0200 summary: Try to fix shutil.which() tests on Windows by fixing a typo introduced in 27f9c26fdd8b in posix_access(). files: Modules/posixmodule.c | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -2430,7 +2430,7 @@ */ return_value = PyBool_FromLong( (attr != 0xFFFFFFFF) && - ((mode & 2) || + (!(mode & 2) || !(attr & FILE_ATTRIBUTE_READONLY) || (attr & FILE_ATTRIBUTE_DIRECTORY))); #else -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 13:13:28 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 13:13:28 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzEyOTY1?= =?utf8?q?=3A__More_PyLong=5FAs*_clarifications=2E__Thanks_Stefan_Krah=2E?= Message-ID: http://hg.python.org/cpython/rev/e1416a4d728a changeset: 77623:e1416a4d728a branch: 3.2 parent: 77620:3ace8e17074a user: Mark Dickinson date: Sat Jun 23 12:12:52 2012 +0100 summary: Issue #12965: More PyLong_As* clarifications. Thanks Stefan Krah. files: Doc/c-api/long.rst | 61 +++++++++++++++++++++------------ 1 files changed, 39 insertions(+), 22 deletions(-) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -167,9 +167,11 @@ single: PY_SSIZE_T_MAX single: OverflowError (built-in exception) - Return a C :c:type:`Py_ssize_t` representation of the contents of *pylong*. - If *pylong* is greater than :const:`PY_SSIZE_T_MAX`, an :exc:`OverflowError` - is raised and ``-1`` will be returned. + Return a C :c:type:`Py_ssize_t` representation of *pylong*. *pylong* must + be an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`Py_ssize_t`. .. c:function:: unsigned long PyLong_AsUnsignedLong(PyObject *pylong) @@ -178,16 +180,20 @@ single: ULONG_MAX single: OverflowError (built-in exception) - Return a C :c:type:`unsigned long` representation of the contents of *pylong*. - If *pylong* is greater than :const:`ULONG_MAX`, an :exc:`OverflowError` is - raised. + Return a C :c:type:`unsigned long` representation of *pylong*. *pylong* + must be an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`unsigned long`. .. c:function:: size_t PyLong_AsSize_t(PyObject *pylong) - Return a :c:type:`size_t` representation of the contents of *pylong*. If - *pylong* is greater than the maximum value for a :c:type:`size_t`, an - :exc:`OverflowError` is raised. + Return a C :c:type:`size_t` representation of of *pylong*. *pylong* must be + an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`size_t`. .. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLong(PyObject *pylong) @@ -195,32 +201,43 @@ .. index:: single: OverflowError (built-in exception) - Return a C :c:type:`unsigned long long` from a Python integer. If - *pylong* cannot be represented as an :c:type:`unsigned long long`, - an :exc:`OverflowError` is raised and ``(unsigned long long)-1`` is - returned. + Return a C :c:type:`unsigned PY_LONG_LONG` representation of of *pylong*. + *pylong* must be an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for an + :c:type:`unsigned PY_LONG_LONG`. .. versionchanged:: 3.1 A negative *pylong* now raises :exc:`OverflowError`, not :exc:`TypeError`. -.. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *io) +.. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *obj) - Return a C :c:type:`unsigned long` from a Python integer, without checking for - overflow. + Return a C :c:type:`unsigned long` representation of *obj*. If *obj* + is not an instance of :c:type:`PyLongObject`, first call its :meth:`__int__` + method (if present) to convert it to a :c:type:`PyLongObject`. + If the value of *obj* is out of range for an :c:type:`unsigned long`, + return the reduction of that value modulo :const:`ULONG_MAX + 1`. -.. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLongMask(PyObject *io) - Return a C :c:type:`unsigned long long` from a Python integer, without - checking for overflow. +.. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLongMask(PyObject *obj) + + Return a C :c:type:`unsigned long long` representation of *obj*. If *obj* + is not an instance of :c:type:`PyLongObject`, first call its :meth:`__int__` + method (if present) to convert it to a :c:type:`PyLongObject`. + + If the value of *obj* is out of range for an :c:type:`unsigned long long`, + return the reduction of that value modulo :const:`PY_ULLONG_MAX + 1`. .. c:function:: double PyLong_AsDouble(PyObject *pylong) - Return a C :c:type:`double` representation of the contents of *pylong*. If - *pylong* cannot be approximately represented as a :c:type:`double`, an - :exc:`OverflowError` exception is raised and ``-1.0`` will be returned. + Return a C :c:type:`double` representation of *pylong*. *pylong* must be + an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`double`. .. c:function:: void* PyLong_AsVoidPtr(PyObject *pylong) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 13:13:28 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 13:13:28 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2312965=3A_Merge_from_3=2E2=2E?= Message-ID: http://hg.python.org/cpython/rev/349bc58e8c66 changeset: 77624:349bc58e8c66 parent: 77622:5a7080995fd9 parent: 77623:e1416a4d728a user: Mark Dickinson date: Sat Jun 23 12:13:15 2012 +0100 summary: Issue #12965: Merge from 3.2. files: Doc/c-api/long.rst | 61 +++++++++++++++++++++------------ 1 files changed, 39 insertions(+), 22 deletions(-) diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -181,9 +181,11 @@ single: PY_SSIZE_T_MAX single: OverflowError (built-in exception) - Return a C :c:type:`Py_ssize_t` representation of the contents of *pylong*. - If *pylong* is greater than :const:`PY_SSIZE_T_MAX`, an :exc:`OverflowError` - is raised and ``-1`` will be returned. + Return a C :c:type:`Py_ssize_t` representation of *pylong*. *pylong* must + be an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`Py_ssize_t`. .. c:function:: unsigned long PyLong_AsUnsignedLong(PyObject *pylong) @@ -192,16 +194,20 @@ single: ULONG_MAX single: OverflowError (built-in exception) - Return a C :c:type:`unsigned long` representation of the contents of *pylong*. - If *pylong* is greater than :const:`ULONG_MAX`, an :exc:`OverflowError` is - raised. + Return a C :c:type:`unsigned long` representation of *pylong*. *pylong* + must be an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`unsigned long`. .. c:function:: size_t PyLong_AsSize_t(PyObject *pylong) - Return a :c:type:`size_t` representation of the contents of *pylong*. If - *pylong* is greater than the maximum value for a :c:type:`size_t`, an - :exc:`OverflowError` is raised. + Return a C :c:type:`size_t` representation of of *pylong*. *pylong* must be + an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`size_t`. .. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLong(PyObject *pylong) @@ -209,32 +215,43 @@ .. index:: single: OverflowError (built-in exception) - Return a C :c:type:`unsigned long long` from a Python integer. If - *pylong* cannot be represented as an :c:type:`unsigned long long`, - an :exc:`OverflowError` is raised and ``(unsigned long long)-1`` is - returned. + Return a C :c:type:`unsigned PY_LONG_LONG` representation of of *pylong*. + *pylong* must be an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for an + :c:type:`unsigned PY_LONG_LONG`. .. versionchanged:: 3.1 A negative *pylong* now raises :exc:`OverflowError`, not :exc:`TypeError`. -.. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *io) +.. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *obj) - Return a C :c:type:`unsigned long` from a Python integer, without checking for - overflow. + Return a C :c:type:`unsigned long` representation of *obj*. If *obj* + is not an instance of :c:type:`PyLongObject`, first call its :meth:`__int__` + method (if present) to convert it to a :c:type:`PyLongObject`. + If the value of *obj* is out of range for an :c:type:`unsigned long`, + return the reduction of that value modulo :const:`ULONG_MAX + 1`. -.. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLongMask(PyObject *io) - Return a C :c:type:`unsigned long long` from a Python integer, without - checking for overflow. +.. c:function:: unsigned PY_LONG_LONG PyLong_AsUnsignedLongLongMask(PyObject *obj) + + Return a C :c:type:`unsigned long long` representation of *obj*. If *obj* + is not an instance of :c:type:`PyLongObject`, first call its :meth:`__int__` + method (if present) to convert it to a :c:type:`PyLongObject`. + + If the value of *obj* is out of range for an :c:type:`unsigned long long`, + return the reduction of that value modulo :const:`PY_ULLONG_MAX + 1`. .. c:function:: double PyLong_AsDouble(PyObject *pylong) - Return a C :c:type:`double` representation of the contents of *pylong*. If - *pylong* cannot be approximately represented as a :c:type:`double`, an - :exc:`OverflowError` exception is raised and ``-1.0`` will be returned. + Return a C :c:type:`double` representation of *pylong*. *pylong* must be + an instance of :c:type:`PyLongObject`. + + Raise :exc:`OverflowError` if the value of *pylong* is out of range for a + :c:type:`double`. .. c:function:: void* PyLong_AsVoidPtr(PyObject *pylong) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 13:32:50 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 13:32:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=233665=3A_=5Cu_and_?= =?utf8?q?=5CU_escapes_are_now_supported_in_unicode_regular_expressions=2E?= Message-ID: http://hg.python.org/cpython/rev/b1dbd8827e79 changeset: 77625:b1dbd8827e79 user: Antoine Pitrou date: Sat Jun 23 13:29:19 2012 +0200 summary: Issue #3665: \u and \U escapes are now supported in unicode regular expressions. Patch by Serhiy Storchaka. files: Doc/library/re.rst | 11 +++- Lib/sre_parse.py | 66 ++++++++++++++++----- Lib/test/test_re.py | 98 +++++++++++++++++++++++++++----- Misc/NEWS | 3 + 4 files changed, 144 insertions(+), 34 deletions(-) diff --git a/Doc/library/re.rst b/Doc/library/re.rst --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -414,17 +414,24 @@ accepted by the regular expression parser:: \a \b \f \n - \r \t \v \x - \\ + \r \t \u \U + \v \x \\ (Note that ``\b`` is used to represent word boundaries, and means "backspace" only inside character classes.) +``'\u'`` and ``'\U'`` escape sequences are only recognized in Unicode +patterns. In bytes patterns they are not treated specially. + Octal escapes are included in a limited form. If the first digit is a 0, or if there are three octal digits, it is considered an octal escape. Otherwise, it is a group reference. As for string literals, octal escapes are always at most three digits in length. +.. versionchanged:: 3.3 + The ``'\u'`` and ``'\U'`` escape sequences have been added. + + .. _contents-of-module-re: diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py --- a/Lib/sre_parse.py +++ b/Lib/sre_parse.py @@ -177,6 +177,7 @@ class Tokenizer: def __init__(self, string): + self.istext = isinstance(string, str) self.string = string self.index = 0 self.__next() @@ -187,14 +188,14 @@ char = self.string[self.index:self.index+1] # Special case for the str8, since indexing returns a integer # XXX This is only needed for test_bug_926075 in test_re.py - if char and isinstance(char, bytes): + if char and not self.istext: char = chr(char[0]) if char == "\\": try: c = self.string[self.index + 1] except IndexError: raise error("bogus escape (end of line)") - if isinstance(self.string, bytes): + if not self.istext: c = chr(c) char = char + c self.index = self.index + len(char) @@ -209,6 +210,15 @@ this = self.next self.__next() return this + def getwhile(self, n, charset): + result = '' + for _ in range(n): + c = self.next + if c not in charset: + break + result += c + self.__next() + return result def tell(self): return self.index, self.next def seek(self, index): @@ -241,20 +251,30 @@ c = escape[1:2] if c == "x": # hexadecimal escape (exactly two digits) - while source.next in HEXDIGITS and len(escape) < 4: - escape = escape + source.get() - escape = escape[2:] - if len(escape) != 2: - raise error("bogus escape: %s" % repr("\\" + escape)) - return LITERAL, int(escape, 16) & 0xff + escape += source.getwhile(2, HEXDIGITS) + if len(escape) != 4: + raise ValueError + return LITERAL, int(escape[2:], 16) & 0xff + elif c == "u" and source.istext: + # unicode escape (exactly four digits) + escape += source.getwhile(4, HEXDIGITS) + if len(escape) != 6: + raise ValueError + return LITERAL, int(escape[2:], 16) + elif c == "U" and source.istext: + # unicode escape (exactly eight digits) + escape += source.getwhile(8, HEXDIGITS) + if len(escape) != 10: + raise ValueError + c = int(escape[2:], 16) + chr(c) # raise ValueError for invalid code + return LITERAL, c elif c in OCTDIGITS: # octal escape (up to three digits) - while source.next in OCTDIGITS and len(escape) < 4: - escape = escape + source.get() - escape = escape[1:] - return LITERAL, int(escape, 8) & 0xff + escape += source.getwhile(2, OCTDIGITS) + return LITERAL, int(escape[1:], 8) & 0xff elif c in DIGITS: - raise error("bogus escape: %s" % repr(escape)) + raise ValueError if len(escape) == 2: return LITERAL, ord(escape[1]) except ValueError: @@ -273,15 +293,27 @@ c = escape[1:2] if c == "x": # hexadecimal escape - while source.next in HEXDIGITS and len(escape) < 4: - escape = escape + source.get() + escape += source.getwhile(2, HEXDIGITS) if len(escape) != 4: raise ValueError return LITERAL, int(escape[2:], 16) & 0xff + elif c == "u" and source.istext: + # unicode escape (exactly four digits) + escape += source.getwhile(4, HEXDIGITS) + if len(escape) != 6: + raise ValueError + return LITERAL, int(escape[2:], 16) + elif c == "U" and source.istext: + # unicode escape (exactly eight digits) + escape += source.getwhile(8, HEXDIGITS) + if len(escape) != 10: + raise ValueError + c = int(escape[2:], 16) + chr(c) # raise ValueError for invalid code + return LITERAL, c elif c == "0": # octal escape - while source.next in OCTDIGITS and len(escape) < 4: - escape = escape + source.get() + escape += source.getwhile(2, OCTDIGITS) return LITERAL, int(escape[1:], 8) & 0xff elif c in DIGITS: # octal escape *or* decimal group reference (sigh) diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -526,24 +526,92 @@ self.assertNotEqual(re.compile('^pattern$', flag), None) def test_sre_character_literals(self): - for i in [0, 8, 16, 32, 64, 127, 128, 255]: - self.assertNotEqual(re.match(r"\%03o" % i, chr(i)), None) - self.assertNotEqual(re.match(r"\%03o0" % i, chr(i)+"0"), None) - self.assertNotEqual(re.match(r"\%03o8" % i, chr(i)+"8"), None) - self.assertNotEqual(re.match(r"\x%02x" % i, chr(i)), None) - self.assertNotEqual(re.match(r"\x%02x0" % i, chr(i)+"0"), None) - self.assertNotEqual(re.match(r"\x%02xz" % i, chr(i)+"z"), None) - self.assertRaises(re.error, re.match, "\911", "") + for i in [0, 8, 16, 32, 64, 127, 128, 255, 256, 0xFFFF, 0x10000, 0x10FFFF]: + if i < 256: + self.assertIsNotNone(re.match(r"\%03o" % i, chr(i))) + self.assertIsNotNone(re.match(r"\%03o0" % i, chr(i)+"0")) + self.assertIsNotNone(re.match(r"\%03o8" % i, chr(i)+"8")) + self.assertIsNotNone(re.match(r"\x%02x" % i, chr(i))) + self.assertIsNotNone(re.match(r"\x%02x0" % i, chr(i)+"0")) + self.assertIsNotNone(re.match(r"\x%02xz" % i, chr(i)+"z")) + if i < 0x10000: + self.assertIsNotNone(re.match(r"\u%04x" % i, chr(i))) + self.assertIsNotNone(re.match(r"\u%04x0" % i, chr(i)+"0")) + self.assertIsNotNone(re.match(r"\u%04xz" % i, chr(i)+"z")) + self.assertIsNotNone(re.match(r"\U%08x" % i, chr(i))) + self.assertIsNotNone(re.match(r"\U%08x0" % i, chr(i)+"0")) + self.assertIsNotNone(re.match(r"\U%08xz" % i, chr(i)+"z")) + self.assertIsNotNone(re.match(r"\0", "\000")) + self.assertIsNotNone(re.match(r"\08", "\0008")) + self.assertIsNotNone(re.match(r"\01", "\001")) + self.assertIsNotNone(re.match(r"\018", "\0018")) + self.assertIsNotNone(re.match(r"\567", chr(0o167))) + self.assertRaises(re.error, re.match, r"\911", "") + self.assertRaises(re.error, re.match, r"\x1", "") + self.assertRaises(re.error, re.match, r"\x1z", "") + self.assertRaises(re.error, re.match, r"\u123", "") + self.assertRaises(re.error, re.match, r"\u123z", "") + self.assertRaises(re.error, re.match, r"\U0001234", "") + self.assertRaises(re.error, re.match, r"\U0001234z", "") + self.assertRaises(re.error, re.match, r"\U00110000", "") def test_sre_character_class_literals(self): + for i in [0, 8, 16, 32, 64, 127, 128, 255, 256, 0xFFFF, 0x10000, 0x10FFFF]: + if i < 256: + self.assertIsNotNone(re.match(r"[\%o]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\%o8]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\%03o]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\%03o0]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\%03o8]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\x%02x]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\x%02x0]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\x%02xz]" % i, chr(i))) + if i < 0x10000: + self.assertIsNotNone(re.match(r"[\u%04x]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\u%04x0]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\u%04xz]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\U%08x]" % i, chr(i))) + self.assertIsNotNone(re.match(r"[\U%08x0]" % i, chr(i)+"0")) + self.assertIsNotNone(re.match(r"[\U%08xz]" % i, chr(i)+"z")) + self.assertRaises(re.error, re.match, r"[\911]", "") + self.assertRaises(re.error, re.match, r"[\x1z]", "") + self.assertRaises(re.error, re.match, r"[\u123z]", "") + self.assertRaises(re.error, re.match, r"[\U0001234z]", "") + self.assertRaises(re.error, re.match, r"[\U00110000]", "") + + def test_sre_byte_literals(self): for i in [0, 8, 16, 32, 64, 127, 128, 255]: - self.assertNotEqual(re.match(r"[\%03o]" % i, chr(i)), None) - self.assertNotEqual(re.match(r"[\%03o0]" % i, chr(i)), None) - self.assertNotEqual(re.match(r"[\%03o8]" % i, chr(i)), None) - self.assertNotEqual(re.match(r"[\x%02x]" % i, chr(i)), None) - self.assertNotEqual(re.match(r"[\x%02x0]" % i, chr(i)), None) - self.assertNotEqual(re.match(r"[\x%02xz]" % i, chr(i)), None) - self.assertRaises(re.error, re.match, "[\911]", "") + self.assertIsNotNone(re.match((r"\%03o" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"\%03o0" % i).encode(), bytes([i])+b"0")) + self.assertIsNotNone(re.match((r"\%03o8" % i).encode(), bytes([i])+b"8")) + self.assertIsNotNone(re.match((r"\x%02x" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"\x%02x0" % i).encode(), bytes([i])+b"0")) + self.assertIsNotNone(re.match((r"\x%02xz" % i).encode(), bytes([i])+b"z")) + self.assertIsNotNone(re.match(br"\u", b'u')) + self.assertIsNotNone(re.match(br"\U", b'U')) + self.assertIsNotNone(re.match(br"\0", b"\000")) + self.assertIsNotNone(re.match(br"\08", b"\0008")) + self.assertIsNotNone(re.match(br"\01", b"\001")) + self.assertIsNotNone(re.match(br"\018", b"\0018")) + self.assertIsNotNone(re.match(br"\567", bytes([0o167]))) + self.assertRaises(re.error, re.match, br"\911", b"") + self.assertRaises(re.error, re.match, br"\x1", b"") + self.assertRaises(re.error, re.match, br"\x1z", b"") + + def test_sre_byte_class_literals(self): + for i in [0, 8, 16, 32, 64, 127, 128, 255]: + self.assertIsNotNone(re.match((r"[\%o]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"[\%o8]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"[\%03o]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"[\%03o0]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"[\%03o8]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"[\x%02x]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"[\x%02x0]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match((r"[\x%02xz]" % i).encode(), bytes([i]))) + self.assertIsNotNone(re.match(br"[\u]", b'u')) + self.assertIsNotNone(re.match(br"[\U]", b'U')) + self.assertRaises(re.error, re.match, br"[\911]", "") + self.assertRaises(re.error, re.match, br"[\x1z]", "") def test_bug_113254(self): self.assertEqual(re.match(r'(a)|(b)', 'b').start(1), -1) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -40,6 +40,9 @@ Library ------- +- Issue #3665: \u and \U escapes are now supported in unicode regular + expressions. Patch by Serhiy Storchaka. + - Issue #15153: Added inspect.getgeneratorlocals to simplify white box testing of generator state updates -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 14:21:11 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 14:21:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_test=5Fre_failure_under?= =?utf8?q?_Windows=2E?= Message-ID: http://hg.python.org/cpython/rev/c67b7e0c818a changeset: 77626:c67b7e0c818a user: Antoine Pitrou date: Sat Jun 23 14:17:39 2012 +0200 summary: Fix test_re failure under Windows. files: Modules/sre.h | 6 +----- 1 files changed, 1 insertions(+), 5 deletions(-) diff --git a/Modules/sre.h b/Modules/sre.h --- a/Modules/sre.h +++ b/Modules/sre.h @@ -14,12 +14,8 @@ #include "sre_constants.h" /* size of a code word (must be unsigned short or larger, and - large enough to hold a Py_UNICODE character) */ -#ifdef Py_UNICODE_WIDE + large enough to hold a UCS4 character) */ #define SRE_CODE Py_UCS4 -#else -#define SRE_CODE unsigned short -#endif typedef struct { PyObject_VAR_HEAD -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 14:25:16 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 14:25:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Remove_outdated_statement?= Message-ID: http://hg.python.org/cpython/rev/837d51ba1aa2 changeset: 77627:837d51ba1aa2 user: Antoine Pitrou date: Sat Jun 23 14:19:58 2012 +0200 summary: Remove outdated statement files: Python/bltinmodule.c | 8 +------- 1 files changed, 1 insertions(+), 7 deletions(-) diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -1490,13 +1490,7 @@ "ord(c) -> integer\n\ \n\ Return the integer ordinal of a one-character string." -) -#ifndef Py_UNICODE_WIDE -PyDoc_STR( -"\nA valid surrogate pair is also accepted." -) -#endif -; +); static PyObject * -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 14:48:54 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 14:48:54 +0200 Subject: [Python-checkins] =?utf8?b?Y3B5dGhvbiAoMy4yKTogSXNzdWUgIzE1MTQy?= =?utf8?q?=3A_Fix_reference_leak_when_deallocating_instances_of_types_crea?= =?utf8?q?ted?= Message-ID: http://hg.python.org/cpython/rev/1794308c1ea7 changeset: 77628:1794308c1ea7 branch: 3.2 parent: 77623:e1416a4d728a user: Antoine Pitrou date: Sat Jun 23 14:42:38 2012 +0200 summary: Issue #15142: Fix reference leak when deallocating instances of types created using PyType_FromSpec(). files: Misc/NEWS | 3 +++ Objects/typeobject.c | 6 ++++++ 2 files changed, 9 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #15142: Fix reference leak when deallocating instances of types + created using PyType_FromSpec(). + - Issue #10053: Don't close FDs when FileIO.__init__ fails. Loosely based on the work by Hirokazu Yamamoto. diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2387,6 +2387,12 @@ res->ht_type.tp_doc = tp_doc; } } + if (res->ht_type.tp_dealloc == NULL) { + /* It's a heap type, so needs the heap types' dealloc. + subtype_dealloc will call the base type's tp_dealloc, if + necessary. */ + res->ht_type.tp_dealloc = subtype_dealloc; + } if (PyType_Ready(&res->ht_type) < 0) goto fail; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 14:48:55 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 14:48:55 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Issue_=2315142=3A_Fix_reference_leak_when_deallocating_insta?= =?utf8?q?nces_of_types_created?= Message-ID: http://hg.python.org/cpython/rev/9945d7dfa72c changeset: 77629:9945d7dfa72c parent: 77627:837d51ba1aa2 parent: 77628:1794308c1ea7 user: Antoine Pitrou date: Sat Jun 23 14:45:21 2012 +0200 summary: Issue #15142: Fix reference leak when deallocating instances of types created using PyType_FromSpec(). files: Misc/NEWS | 3 +++ Objects/typeobject.c | 6 ++++++ 2 files changed, 9 insertions(+), 0 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,9 @@ Core and Builtins ----------------- +- Issue #15142: Fix reference leak when deallocating instances of types + created using PyType_FromSpec(). + - Issue #15042: Add PyState_AddModule and PyState_RemoveModule. Add version guard for Py_LIMITED_API additions. Patch by Robin Schreiber. diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2417,6 +2417,12 @@ if (res->ht_type.tp_dictoffset) { res->ht_cached_keys = _PyDict_NewKeysForClass(); } + if (res->ht_type.tp_dealloc == NULL) { + /* It's a heap type, so needs the heap types' dealloc. + subtype_dealloc will call the base type's tp_dealloc, if + necessary. */ + res->ht_type.tp_dealloc = subtype_dealloc; + } if (PyType_Ready(&res->ht_type) < 0) goto fail; -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 15:28:00 2012 From: python-checkins at python.org (ezio.melotti) Date: Sat, 23 Jun 2012 15:28:00 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2315114=3A_the_strict_mode?= =?utf8?q?_of_HTMLParser_and_the_HTMLParseError_exception_are?= Message-ID: http://hg.python.org/cpython/rev/8dd2f5754b2f changeset: 77630:8dd2f5754b2f user: Ezio Melotti date: Sat Jun 23 15:27:51 2012 +0200 summary: #15114: the strict mode of HTMLParser and the HTMLParseError exception are deprecated now that the parser is able to parse invalid markup. files: Doc/library/html.parser.rst | 21 +++++++++++++++------ Lib/html/parser.py | 21 ++++++++++++--------- Lib/test/test_htmlparser.py | 6 ++++-- Misc/NEWS | 5 ++++- 4 files changed, 35 insertions(+), 18 deletions(-) diff --git a/Doc/library/html.parser.rst b/Doc/library/html.parser.rst --- a/Doc/library/html.parser.rst +++ b/Doc/library/html.parser.rst @@ -16,13 +16,14 @@ This module defines a class :class:`HTMLParser` which serves as the basis for parsing text files formatted in HTML (HyperText Mark-up Language) and XHTML. -.. class:: HTMLParser(strict=True) +.. class:: HTMLParser(strict=False) - Create a parser instance. If *strict* is ``True`` (the default), invalid - HTML results in :exc:`~html.parser.HTMLParseError` exceptions [#]_. If - *strict* is ``False``, the parser uses heuristics to make a best guess at - the intention of any invalid HTML it encounters, similar to the way most - browsers do. Using ``strict=False`` is advised. + Create a parser instance. If *strict* is ``False`` (the default), the parser + will accept and parse invalid markup. If *strict* is ``True`` the parser + will raise an :exc:`~html.parser.HTMLParseError` exception instead [#]_ when + it's not able to parse the markup. + The use of ``strict=True`` is discouraged and the *strict* argument is + deprecated. An :class:`.HTMLParser` instance is fed HTML data and calls handler methods when start tags, end tags, text, comments, and other markup elements are @@ -34,6 +35,10 @@ .. versionchanged:: 3.2 *strict* keyword added + .. deprecated-removed:: 3.3 3.5 + The *strict* argument and the strict mode have been deprecated. + The parser is now able to accept and parse invalid markup too. + An exception is defined as well: @@ -46,6 +51,10 @@ detected, and :attr:`offset` is the number of characters into the line at which the construct starts. + .. deprecated-removed:: 3.3 3.5 + This exception has been deprecated because it's never raised by the parser + (when the default non-strict mode is used). + Example HTML Parser Application ------------------------------- diff --git a/Lib/html/parser.py b/Lib/html/parser.py --- a/Lib/html/parser.py +++ b/Lib/html/parser.py @@ -10,6 +10,7 @@ import _markupbase import re +import warnings # Regular expressions used for parsing @@ -113,14 +114,16 @@ CDATA_CONTENT_ELEMENTS = ("script", "style") - def __init__(self, strict=True): + def __init__(self, strict=False): """Initialize and reset this instance. - If strict is set to True (the default), errors are raised when invalid - HTML is encountered. If set to False, an attempt is instead made to - continue parsing, making "best guesses" about the intended meaning, in - a fashion similar to what browsers typically do. + If strict is set to False (the default) the parser will parse invalid + markup, otherwise it will raise an error. Note that the strict mode + is deprecated. """ + if strict: + warnings.warn("The strict mode is deprecated.", + DeprecationWarning, stacklevel=2) self.strict = strict self.reset() @@ -271,8 +274,8 @@ # See also parse_declaration in _markupbase def parse_html_declaration(self, i): rawdata = self.rawdata - if rawdata[i:i+2] != '', i+2) if pos == -1: return -1 diff --git a/Lib/test/test_htmlparser.py b/Lib/test/test_htmlparser.py --- a/Lib/test/test_htmlparser.py +++ b/Lib/test/test_htmlparser.py @@ -102,7 +102,8 @@ class HTMLParserStrictTestCase(TestCaseBase): def get_collector(self): - return EventCollector(strict=True) + with support.check_warnings(("", DeprecationWarning), quite=False): + return EventCollector(strict=True) def test_processing_instruction_only(self): self._run_check("", [ @@ -594,7 +595,8 @@ class AttributesStrictTestCase(TestCaseBase): def get_collector(self): - return EventCollector(strict=True) + with support.check_warnings(("", DeprecationWarning), quite=False): + return EventCollector(strict=True) def test_attr_syntax(self): output = [ diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -43,6 +43,9 @@ Library ------- +- Issue #15114: the strict mode of HTMLParser and the HTMLParseError exception + are deprecated now that the parser is able to parse invalid markup. + - Issue #3665: \u and \U escapes are now supported in unicode regular expressions. Patch by Serhiy Storchaka. @@ -78,7 +81,7 @@ - Issue #9527: datetime.astimezone() method will now supply a class timezone instance corresponding to the system local timezone when called with no arguments. - + - Issue #14653: email.utils.mktime_tz() no longer relies on system mktime() when timezone offest is supplied. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 16:48:23 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 16:48:23 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_On_behalf_of_Nadeem_Vawda?= =?utf8?q?=3A_issue_=2310376=3A_micro-optimize_reading_from_a_Zipfile=2E?= Message-ID: http://hg.python.org/cpython/rev/0e8285321659 changeset: 77631:0e8285321659 user: Antoine Pitrou date: Sat Jun 23 16:44:48 2012 +0200 summary: On behalf of Nadeem Vawda: issue #10376: micro-optimize reading from a Zipfile. (patch by Serhiy) files: Lib/zipfile.py | 18 ++++++++++-------- 1 files changed, 10 insertions(+), 8 deletions(-) diff --git a/Lib/zipfile.py b/Lib/zipfile.py --- a/Lib/zipfile.py +++ b/Lib/zipfile.py @@ -733,12 +733,13 @@ buf += self._read1(self.MAX_N) return buf - n -= len(self._readbuffer) - self._offset - if n < 0: - buf = self._readbuffer[self._offset:n] - self._offset += len(buf) + end = n + self._offset + if end < len(self._readbuffer): + buf = self._readbuffer[self._offset:end] + self._offset = end return buf + n = end - len(self._readbuffer) buf = self._readbuffer[self._offset:] self._readbuffer = b'' self._offset = 0 @@ -774,12 +775,13 @@ buf += data return buf - n -= len(self._readbuffer) - self._offset - if n < 0: - buf = self._readbuffer[self._offset:n] - self._offset += len(buf) + end = n + self._offset + if end < len(self._readbuffer): + buf = self._readbuffer[self._offset:end] + self._offset = end return buf + n = end - len(self._readbuffer) buf = self._readbuffer[self._offset:] self._readbuffer = b'' self._offset = 0 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 17:45:34 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 17:45:34 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Try_to_further_debug_occasi?= =?utf8?q?onal_buildbot_failure?= Message-ID: http://hg.python.org/cpython/rev/de2eeedf3f69 changeset: 77632:de2eeedf3f69 user: Antoine Pitrou date: Sat Jun 23 17:27:56 2012 +0200 summary: Try to further debug occasional buildbot failure files: Lib/test/test_imp.py | 6 ++++++ 1 files changed, 6 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -320,6 +320,12 @@ '/foo/bar/foo.cpython-32.foo.pyc') def test_package___file__(self): + try: + m = __import__('pep3147') + except ImportError: + pass + else: + self.fail("pep3147 module already exists: %r" % (m,)) # Test that a package's __file__ points to the right source directory. os.mkdir('pep3147') sys.path.insert(0, os.curdir) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 18:13:43 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 18:13:43 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Properly_cleanup_pep3147_mo?= =?utf8?q?dules_in_test=5Fimport?= Message-ID: http://hg.python.org/cpython/rev/1ac6a2f6cbee changeset: 77633:1ac6a2f6cbee user: Antoine Pitrou date: Sat Jun 23 18:09:55 2012 +0200 summary: Properly cleanup pep3147 modules in test_import files: Lib/test/test_import.py | 8 ++++---- 1 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Lib/test/test_import.py b/Lib/test/test_import.py --- a/Lib/test/test_import.py +++ b/Lib/test/test_import.py @@ -636,6 +636,8 @@ # Like test___cached__ but for packages. def cleanup(): rmtree('pep3147') + unload('pep3147.foo') + unload('pep3147') os.mkdir('pep3147') self.addCleanup(cleanup) # Touch the __init__.py @@ -643,8 +645,6 @@ pass with open(os.path.join('pep3147', 'foo.py'), 'w'): pass - unload('pep3147.foo') - unload('pep3147') importlib.invalidate_caches() m = __import__('pep3147.foo') init_pyc = imp.cache_from_source( @@ -659,10 +659,10 @@ # PEP 3147 pyc file. def cleanup(): rmtree('pep3147') + unload('pep3147.foo') + unload('pep3147') os.mkdir('pep3147') self.addCleanup(cleanup) - unload('pep3147.foo') - unload('pep3147') # Touch the __init__.py with open(os.path.join('pep3147', '__init__.py'), 'w'): pass -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 18:15:39 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 18:15:39 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Use_non-deprecated_speling?= =?utf8?q?=2E?= Message-ID: http://hg.python.org/cpython/rev/46e29122d3f7 changeset: 77634:46e29122d3f7 user: Antoine Pitrou date: Sat Jun 23 18:11:59 2012 +0200 summary: Use non-deprecated speling. files: Lib/test/test_inspect.py | 88 ++++++++++++++-------------- 1 files changed, 44 insertions(+), 44 deletions(-) diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -1348,20 +1348,20 @@ S((po, pk, args, ko, kwargs)) - with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): S((pk, po, args, ko, kwargs)) - with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): S((po, args, pk, ko, kwargs)) - with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): S((args, po, pk, ko, kwargs)) - with self.assertRaisesRegexp(ValueError, 'wrong parameter order'): + with self.assertRaisesRegex(ValueError, 'wrong parameter order'): S((po, pk, args, kwargs, ko)) kwargs2 = kwargs.replace(name='args') - with self.assertRaisesRegexp(ValueError, 'duplicate parameter name'): + with self.assertRaisesRegex(ValueError, 'duplicate parameter name'): S((po, pk, args, kwargs2, ko)) def test_signature_immutability(self): @@ -1409,24 +1409,24 @@ ...)) def test_signature_on_builtin_function(self): - with self.assertRaisesRegexp(ValueError, 'not supported by signature'): + with self.assertRaisesRegex(ValueError, 'not supported by signature'): inspect.signature(type) - with self.assertRaisesRegexp(ValueError, 'not supported by signature'): + with self.assertRaisesRegex(ValueError, 'not supported by signature'): # support for 'wrapper_descriptor' inspect.signature(type.__call__) - with self.assertRaisesRegexp(ValueError, 'not supported by signature'): + with self.assertRaisesRegex(ValueError, 'not supported by signature'): # support for 'method-wrapper' inspect.signature(min.__call__) - with self.assertRaisesRegexp(ValueError, + with self.assertRaisesRegex(ValueError, 'no signature found for builtin function'): # support for 'method-wrapper' inspect.signature(min) def test_signature_on_non_function(self): - with self.assertRaisesRegexp(TypeError, 'is not a callable object'): + with self.assertRaisesRegex(TypeError, 'is not a callable object'): inspect.signature(42) - with self.assertRaisesRegexp(TypeError, 'is not a Python function'): + with self.assertRaisesRegex(TypeError, 'is not a Python function'): inspect.Signature.from_function(42) def test_signature_on_method(self): @@ -1485,10 +1485,10 @@ self.assertEqual(self.signature(partial(test)), ((), ...)) - with self.assertRaisesRegexp(ValueError, "has incorrect arguments"): + with self.assertRaisesRegex(ValueError, "has incorrect arguments"): inspect.signature(partial(test, 1)) - with self.assertRaisesRegexp(ValueError, "has incorrect arguments"): + with self.assertRaisesRegex(ValueError, "has incorrect arguments"): inspect.signature(partial(test, a=1)) def test(a, b, *, c, d): @@ -1604,7 +1604,7 @@ self.assertEqual(_foo(c=10), (1, 30, 10)) _foo = partial(_foo, 2) # now 'b' has two values - # positional and keyword - with self.assertRaisesRegexp(ValueError, "has incorrect arguments"): + with self.assertRaisesRegex(ValueError, "has incorrect arguments"): inspect.signature(_foo) def foo(a, b, c, *, d): @@ -1775,7 +1775,7 @@ class Spam: pass - with self.assertRaisesRegexp(TypeError, "is not a callable object"): + with self.assertRaisesRegex(TypeError, "is not a callable object"): inspect.signature(Spam()) class Bar(Spam, Foo): @@ -1787,7 +1787,7 @@ class ToFail: __call__ = type - with self.assertRaisesRegexp(ValueError, "not supported by signature"): + with self.assertRaisesRegex(ValueError, "not supported by signature"): inspect.signature(ToFail()) @@ -1850,7 +1850,7 @@ def test_signature_unhashable(self): def foo(a): pass sig = inspect.signature(foo) - with self.assertRaisesRegexp(TypeError, 'unhashable type'): + with self.assertRaisesRegex(TypeError, 'unhashable type'): hash(sig) def test_signature_str(self): @@ -1920,27 +1920,27 @@ self.assertIs(p.annotation, p.empty) self.assertEqual(p.kind, inspect.Parameter.POSITIONAL_ONLY) - with self.assertRaisesRegexp(ValueError, 'invalid value'): + with self.assertRaisesRegex(ValueError, 'invalid value'): inspect.Parameter('foo', default=10, kind='123') - with self.assertRaisesRegexp(ValueError, 'not a valid parameter name'): + with self.assertRaisesRegex(ValueError, 'not a valid parameter name'): inspect.Parameter('1', kind=inspect.Parameter.VAR_KEYWORD) - with self.assertRaisesRegexp(ValueError, + with self.assertRaisesRegex(ValueError, 'non-positional-only parameter'): inspect.Parameter(None, kind=inspect.Parameter.VAR_KEYWORD) - with self.assertRaisesRegexp(ValueError, 'cannot have default values'): + with self.assertRaisesRegex(ValueError, 'cannot have default values'): inspect.Parameter('a', default=42, kind=inspect.Parameter.VAR_KEYWORD) - with self.assertRaisesRegexp(ValueError, 'cannot have default values'): + with self.assertRaisesRegex(ValueError, 'cannot have default values'): inspect.Parameter('a', default=42, kind=inspect.Parameter.VAR_POSITIONAL) p = inspect.Parameter('a', default=42, kind=inspect.Parameter.POSITIONAL_OR_KEYWORD) - with self.assertRaisesRegexp(ValueError, 'cannot have default values'): + with self.assertRaisesRegex(ValueError, 'cannot have default values'): p.replace(kind=inspect.Parameter.VAR_POSITIONAL) self.assertTrue(repr(p).startswith(' http://hg.python.org/cpython/rev/c910af2e3c98 changeset: 77635:c910af2e3c98 user: Hynek Schlawack date: Sat Jun 23 17:58:42 2012 +0200 summary: #4489: Add a shutil.rmtree that isn't suspectible to symlink attacks It is used automatically on platforms supporting the necessary os.openat() and os.unlinkat() functions. Main code by Martin von L?wis. files: Doc/library/shutil.rst | 27 +++++-- Lib/shutil.py | 99 +++++++++++++++++++++++----- Lib/test/test_shutil.py | 63 +++++++++++++----- Misc/NEWS | 4 + 4 files changed, 150 insertions(+), 43 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -190,14 +190,27 @@ handled by calling a handler specified by *onerror* or, if that is omitted, they raise an exception. + .. warning:: + + The default :func:`rmtree` function is susceptible to a symlink attack: + given proper timing and circumstances, attackers can use it to delete + files they wouldn't be able to access otherwise. Thus -- on platforms + that support the necessary fd-based functions :func:`os.openat` and + :func:`os.unlinkat` -- a safe version of :func:`rmtree` is used, which + isn't vulnerable. + If *onerror* is provided, it must be a callable that accepts three - parameters: *function*, *path*, and *excinfo*. The first parameter, - *function*, is the function which raised the exception; it will be - :func:`os.path.islink`, :func:`os.listdir`, :func:`os.remove` or - :func:`os.rmdir`. The second parameter, *path*, will be the path name passed - to *function*. The third parameter, *excinfo*, will be the exception - information return by :func:`sys.exc_info`. Exceptions raised by *onerror* - will not be caught. + parameters: *function*, *path*, and *excinfo*. + + The first parameter, *function*, is the function which raised the exception; + it depends on the platform and implementation. The second parameter, + *path*, will be the path name passed to *function*. The third parameter, + *excinfo*, will be the exception information returned by + :func:`sys.exc_info`. Exceptions raised by *onerror* will not be caught. + + .. versionchanged:: 3.3 + Added a safe version that is used automatically if platform supports + the fd-based functions :func:`os.openat` and :func:`os.unlinkat`. .. function:: move(src, dst) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -337,23 +337,8 @@ raise Error(errors) return dst -def rmtree(path, ignore_errors=False, onerror=None): - """Recursively delete a directory tree. - - If ignore_errors is set, errors are ignored; otherwise, if onerror - is set, it is called to handle the error with arguments (func, - path, exc_info) where func is os.listdir, os.remove, or os.rmdir; - path is the argument to that function that caused it to fail; and - exc_info is a tuple returned by sys.exc_info(). If ignore_errors - is false and onerror is None, an exception is raised. - - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise +# version vulnerable to race conditions +def _rmtree_unsafe(path, onerror): try: if os.path.islink(path): # symlinks to directories are forbidden, see bug #1669 @@ -374,7 +359,7 @@ except os.error: mode = 0 if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) + _rmtree_unsafe(fullname, onerror) else: try: os.remove(fullname) @@ -385,6 +370,84 @@ except os.error: onerror(os.rmdir, path, sys.exc_info()) +# Version using fd-based APIs to protect against races +def _rmtree_safe_fd(topfd, path, onerror): + names = [] + try: + names = os.flistdir(topfd) + except os.error: + onerror(os.flistdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + orig_st = os.fstatat(topfd, name) + mode = orig_st.st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + try: + dirfd = os.openat(topfd, name, os.O_RDONLY) + except os.error: + onerror(os.openat, fullname, sys.exc_info()) + else: + try: + if os.path.samestat(orig_st, os.fstat(dirfd)): + _rmtree_safe_fd(dirfd, fullname, onerror) + finally: + os.close(dirfd) + else: + try: + os.unlinkat(topfd, name) + except os.error: + onerror(os.unlinkat, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + +_use_fd_functions = hasattr(os, 'openat') and hasattr(os, 'unlinkat') +def rmtree(path, ignore_errors=False, onerror=None): + """Recursively delete a directory tree. + + If ignore_errors is set, errors are ignored; otherwise, if onerror + is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If ignore_errors + is false and onerror is None, an exception is raised. + + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + if _use_fd_functions: + # Note: To guard against symlink races, we use the standard + # lstat()/open()/fstat() trick. + try: + orig_st = os.lstat(path) + except Exception: + onerror(os.lstat, path, sys.exc_info()) + return + try: + fd = os.open(path, os.O_RDONLY) + except Exception: + onerror(os.lstat, path, sys.exc_info()) + return + try: + if (stat.S_ISDIR(orig_st.st_mode) and + os.path.samestat(orig_st, os.fstat(fd))): + _rmtree_safe_fd(fd, path, onerror) + elif (stat.S_ISREG(orig_st.st_mode)): + raise NotADirectoryError(20, + "Not a directory: '{}'".format(path)) + finally: + os.close(fd) + else: + return _rmtree_unsafe(path, onerror) + def _basename(path): # A basename() variant which first strips the trailing slash, if present. diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -120,29 +120,36 @@ def test_on_error(self): self.errorState = 0 os.mkdir(TESTFN) - self.childpath = os.path.join(TESTFN, 'a') - support.create_empty_file(self.childpath) + self.child_file_path = os.path.join(TESTFN, 'a') + self.child_dir_path = os.path.join(TESTFN, 'b') + support.create_empty_file(self.child_file_path) + os.mkdir(self.child_dir_path) old_dir_mode = os.stat(TESTFN).st_mode - old_child_mode = os.stat(self.childpath).st_mode + old_child_file_mode = os.stat(self.child_file_path).st_mode + old_child_dir_mode = os.stat(self.child_dir_path).st_mode # Make unwritable. - os.chmod(self.childpath, stat.S_IREAD) - os.chmod(TESTFN, stat.S_IREAD) + new_mode = stat.S_IREAD|stat.S_IEXEC + os.chmod(self.child_file_path, new_mode) + os.chmod(self.child_dir_path, new_mode) + os.chmod(TESTFN, new_mode) shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror) # Test whether onerror has actually been called. - self.assertEqual(self.errorState, 2, - "Expected call to onerror function did not happen.") + self.assertEqual(self.errorState, 3, + "Expected call to onerror function did not " + "happen.") # Make writable again. os.chmod(TESTFN, old_dir_mode) - os.chmod(self.childpath, old_child_mode) + os.chmod(self.child_file_path, old_child_file_mode) + os.chmod(self.child_dir_path, old_child_dir_mode) # Clean up. shutil.rmtree(TESTFN) def check_args_to_onerror(self, func, arg, exc): # test_rmtree_errors deliberately runs rmtree - # on a directory that is chmod 400, which will fail. + # on a directory that is chmod 500, which will fail. # This function is run when shutil.rmtree fails. # 99.9% of the time it initially fails to remove # a file in the directory, so the first time through @@ -151,20 +158,39 @@ # FUSE experienced a failure earlier in the process # at os.listdir. The first failure may legally # be either. - if self.errorState == 0: - if func is os.remove: - self.assertEqual(arg, self.childpath) + if 0 <= self.errorState < 2: + if (func is os.remove or + hasattr(os, 'unlinkat') and func is os.unlinkat): + self.assertIn(arg, [self.child_file_path, self.child_dir_path]) else: - self.assertIs(func, os.listdir, - "func must be either os.remove or os.listdir") - self.assertEqual(arg, TESTFN) + if self.errorState == 1: + self.assertEqual(func, os.rmdir) + else: + self.assertIs(func, os.listdir, "func must be os.listdir") + self.assertIn(arg, [TESTFN, self.child_dir_path]) self.assertTrue(issubclass(exc[0], OSError)) - self.errorState = 1 + self.errorState += 1 else: self.assertEqual(func, os.rmdir) self.assertEqual(arg, TESTFN) self.assertTrue(issubclass(exc[0], OSError)) - self.errorState = 2 + self.errorState = 3 + + def test_rmtree_does_not_choke_on_failing_lstat(self): + try: + orig_lstat = os.lstat + def raiser(fn): + if fn != TESTFN: + raise OSError() + else: + return orig_lstat(fn) + os.lstat = raiser + + os.mkdir(TESTFN) + write_file((TESTFN, 'foo'), 'foo') + shutil.rmtree(TESTFN) + finally: + os.lstat = orig_lstat @unittest.skipUnless(hasattr(os, 'chmod'), 'requires os.chmod') @support.skip_unless_symlink @@ -464,7 +490,7 @@ # When called on a file instead of a directory, don't delete it. handle, path = tempfile.mkstemp() os.close(handle) - self.assertRaises(OSError, shutil.rmtree, path) + self.assertRaises(NotADirectoryError, shutil.rmtree, path) os.remove(path) def test_copytree_simple(self): @@ -629,6 +655,7 @@ os.mkdir(src) os.symlink(src, dst) self.assertRaises(OSError, shutil.rmtree, dst) + shutil.rmtree(dst, ignore_errors=True) finally: shutil.rmtree(TESTFN, ignore_errors=True) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -43,6 +43,10 @@ Library ------- +- Issue #4489: Add a shutil.rmtree that isn't suspectible to symlink attacks. + It is used automatically on platforms supporting the necessary os.openat() + and os.unlinkat() functions. Main code by Martin von L?wis. + - Issue #15114: the strict mode of HTMLParser and the HTMLParseError exception are deprecated now that the parser is able to parse invalid markup. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 18:21:16 2012 From: python-checkins at python.org (hynek.schlawack) Date: Sat, 23 Jun 2012 18:21:16 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_typo_in_Misc/NEWS?= Message-ID: http://hg.python.org/cpython/rev/93263cd0b7d0 changeset: 77636:93263cd0b7d0 user: Hynek Schlawack date: Sat Jun 23 18:21:11 2012 +0200 summary: Fix typo in Misc/NEWS files: Misc/NEWS | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -43,7 +43,7 @@ Library ------- -- Issue #4489: Add a shutil.rmtree that isn't suspectible to symlink attacks. +- Issue #4489: Add a shutil.rmtree that isn't susceptible to symlink attacks. It is used automatically on platforms supporting the necessary os.openat() and os.unlinkat() functions. Main code by Martin von L?wis. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 18:28:53 2012 From: python-checkins at python.org (nick.coghlan) Date: Sat, 23 Jun 2012 18:28:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Properly_test_the_various_b?= =?utf8?q?uiltins_lookup_cases_in_inspect=2Egetclosurevars?= Message-ID: http://hg.python.org/cpython/rev/61c1aeb7fb10 changeset: 77637:61c1aeb7fb10 user: Nick Coghlan date: Sat Jun 23 20:07:39 2012 +1000 summary: Properly test the various builtins lookup cases in inspect.getclosurevars files: Lib/test/test_inspect.py | 24 ++++++++++++++++++++++++ 1 files changed, 24 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -763,6 +763,30 @@ self.assertRaises(TypeError, inspect.getclosurevars, list) self.assertRaises(TypeError, inspect.getclosurevars, {}) + def _private_globals(self): + code = """def f(): print(path)""" + ns = {} + exec(code, ns) + return ns["f"], ns + + def test_builtins_fallback(self): + f, ns = self._private_globals() + ns.pop("__builtins__", None) + expected = inspect.ClosureVars({}, {}, {"print":print}, {"path"}) + self.assertEqual(inspect.getclosurevars(f), expected) + + def test_builtins_as_dict(self): + f, ns = self._private_globals() + ns["__builtins__"] = {"path":1} + expected = inspect.ClosureVars({}, {}, {"path":1}, {"print"}) + self.assertEqual(inspect.getclosurevars(f), expected) + + def test_builtins_as_module(self): + f, ns = self._private_globals() + ns["__builtins__"] = os + expected = inspect.ClosureVars({}, {}, {"path":os.path}, {"print"}) + self.assertEqual(inspect.getclosurevars(f), expected) + class TestGetcallargsFunctions(unittest.TestCase): -- Repository URL: http://hg.python.org/cpython From ncoghlan at gmail.com Sat Jun 23 18:40:14 2012 From: ncoghlan at gmail.com (Nick Coghlan) Date: Sun, 24 Jun 2012 02:40:14 +1000 Subject: [Python-checkins] cpython: #4489: Add a shutil.rmtree that isn't suspectible to symlink attacks In-Reply-To: References: Message-ID: On Sun, Jun 24, 2012 at 2:18 AM, hynek.schlawack wrote: > http://hg.python.org/cpython/rev/c910af2e3c98 > changeset: ? 77635:c910af2e3c98 > user: ? ? ? ?Hynek Schlawack > date: ? ? ? ?Sat Jun 23 17:58:42 2012 +0200 > summary: > ?#4489: Add a shutil.rmtree that isn't suspectible to symlink attacks > > It is used automatically on platforms supporting the necessary os.openat() and > os.unlinkat() functions. Main code by Martin von L?wis. Unfortunately, this isn't actually having any effect at the moment since the os module APIs changed for the beta release. The "hasattr(os, 'unlinkat')" and "hasattr(os, 'openat')" checks need to become "os.unlink in os.supports_dir_fd" and "os.open in os.supports_dir_fd", and the affected calls need to be updated to pass "dir_fd" as an argument to the normal versions of the functions. At least we know the graceful fallback to the old behaviour is indeed graceful, though :) Cheers, Nick. -- Nick Coghlan?? |?? ncoghlan at gmail.com?? |?? Brisbane, Australia From python-checkins at python.org Sat Jun 23 18:59:15 2012 From: python-checkins at python.org (stefan.krah) Date: Sat, 23 Jun 2012 18:59:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315102=3A_Allow_pla?= =?utf8?q?tform-specific_settings_for_the_current_project?= Message-ID: http://hg.python.org/cpython/rev/979567d33376 changeset: 77638:979567d33376 user: Stefan Krah date: Sat Jun 23 18:57:45 2012 +0200 summary: Issue #15102: Allow platform-specific settings for the current project to override environment variables. files: Tools/buildbot/build-amd64.bat | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Tools/buildbot/build-amd64.bat b/Tools/buildbot/build-amd64.bat --- a/Tools/buildbot/build-amd64.bat +++ b/Tools/buildbot/build-amd64.bat @@ -2,6 +2,6 @@ cmd /c Tools\buildbot\external-amd64.bat call "%VS100COMNTOOLS%\..\..\VC\vcvarsall.bat" x86_amd64 cmd /c Tools\buildbot\clean-amd64.bat -msbuild /p:useenv=true PCbuild\kill_python.vcxproj /p:Configuration=Debug /p:PlatformTarget=x64 +msbuild PCbuild\kill_python.vcxproj /p:Configuration=Debug /p:PlatformTarget=x64 PCbuild\amd64\kill_python_d.exe -msbuild /p:useenv=true PCbuild\pcbuild.sln /p:Configuration=Debug /p:Platform=x64 +msbuild PCbuild\pcbuild.sln /p:Configuration=Debug /p:Platform=x64 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 19:52:21 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 23 Jun 2012 19:52:21 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=283=2E2=29=3A_The_build_targe?= =?utf8?q?t_of_this_makefile_is_python3=2Edll=2C_not_python32=2Edll=2E?= Message-ID: http://hg.python.org/cpython/rev/b6efb4c2995b changeset: 77639:b6efb4c2995b branch: 3.2 parent: 77628:1794308c1ea7 user: Martin v. L?wis date: Sat Jun 23 19:36:08 2012 +0200 summary: The build target of this makefile is python3.dll, not python32.dll. files: PC/python3.mak | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/PC/python3.mak b/PC/python3.mak --- a/PC/python3.mak +++ b/PC/python3.mak @@ -1,4 +1,4 @@ -$(OutDir)python32.dll: python3.def $(OutDir)python32stub.lib +$(OutDir)python3.dll: python3.def $(OutDir)python32stub.lib cl /LD /Fe$(OutDir)python3.dll python3dll.c python3.def $(OutDir)python32stub.lib $(OutDir)python32stub.lib: python32stub.def @@ -7,4 +7,4 @@ clean: del $(OutDir)python3.dll $(OutDir)python3.lib $(OutDir)python32stub.lib $(OutDir)python3.exp $(OutDir)python32stub.exp -rebuild: clean $(OutDir)python32.dll +rebuild: clean $(OutDir)python3.dll -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 19:52:22 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 23 Jun 2012 19:52:22 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_3=2E2_-=3E_default=29?= =?utf8?q?=3A_Merge_with_3=2E2?= Message-ID: http://hg.python.org/cpython/rev/412e33fb716f changeset: 77640:412e33fb716f parent: 77638:979567d33376 parent: 77639:b6efb4c2995b user: Martin v. L?wis date: Sat Jun 23 19:51:48 2012 +0200 summary: Merge with 3.2 files: PC/python3.mak | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/PC/python3.mak b/PC/python3.mak --- a/PC/python3.mak +++ b/PC/python3.mak @@ -1,4 +1,4 @@ -$(OutDir)python33.dll: python3.def $(OutDir)python33stub.lib +$(OutDir)python3.dll: python3.def $(OutDir)python32stub.lib cl /LD /Fe$(OutDir)python3.dll python3dll.c python3.def $(OutDir)python33stub.lib $(OutDir)python33stub.lib: python33stub.def @@ -7,4 +7,4 @@ clean: del $(OutDir)python3.dll $(OutDir)python3.lib $(OutDir)python33stub.lib $(OutDir)python3.exp $(OutDir)python33stub.exp -rebuild: clean $(OutDir)python33.dll +rebuild: clean $(OutDir)python3.dll -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 19:57:19 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 23 Jun 2012 19:57:19 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315150=3A_regenerat?= =?utf8?q?e_python3stub=2Edef=2E?= Message-ID: http://hg.python.org/cpython/rev/f3d28feef2ea changeset: 77641:f3d28feef2ea user: Martin v. L?wis date: Sat Jun 23 19:56:19 2012 +0200 summary: Issue #15150: regenerate python3stub.def. files: PC/python3.mak | 2 +- PC/python33stub.def | 2 ++ 2 files changed, 3 insertions(+), 1 deletions(-) diff --git a/PC/python3.mak b/PC/python3.mak --- a/PC/python3.mak +++ b/PC/python3.mak @@ -1,4 +1,4 @@ -$(OutDir)python3.dll: python3.def $(OutDir)python32stub.lib +$(OutDir)python3.dll: python3.def $(OutDir)python33stub.lib cl /LD /Fe$(OutDir)python3.dll python3dll.c python3.def $(OutDir)python33stub.lib $(OutDir)python33stub.lib: python33stub.def diff --git a/PC/python33stub.def b/PC/python33stub.def --- a/PC/python33stub.def +++ b/PC/python33stub.def @@ -471,6 +471,8 @@ PySlice_Type PySortWrapper_Type PyState_FindModule +PyState_AddModule +PyState_RemoveModule PyStructSequence_GetItem PyStructSequence_New PyStructSequence_NewType -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 20:30:11 2012 From: python-checkins at python.org (hynek.schlawack) Date: Sat, 23 Jun 2012 20:30:11 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=234489=3A_Fix_usage_of_fd-?= =?utf8?q?based_functions_to_new_api_introduced_earlier_today?= Message-ID: http://hg.python.org/cpython/rev/53fc7f59c7bb changeset: 77642:53fc7f59c7bb user: Hynek Schlawack date: Sat Jun 23 20:28:32 2012 +0200 summary: #4489: Fix usage of fd-based functions to new api introduced earlier today Also add an explicit test for safe implementation usage on supported platforms. As a side effect, this commit adds a module-level attribute 'rmtree_is_safe' which offers introspection whether the current rmtree implementation is safe against symlink attacks. files: Doc/library/shutil.rst | 15 +++++++++++---- Lib/shutil.py | 23 ++++++++++++----------- Lib/test/test_shutil.py | 23 +++++++++++++++++++++-- 3 files changed, 44 insertions(+), 17 deletions(-) diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -195,9 +195,9 @@ The default :func:`rmtree` function is susceptible to a symlink attack: given proper timing and circumstances, attackers can use it to delete files they wouldn't be able to access otherwise. Thus -- on platforms - that support the necessary fd-based functions :func:`os.openat` and - :func:`os.unlinkat` -- a safe version of :func:`rmtree` is used, which - isn't vulnerable. + that support the necessary fd-based functions -- a safe version of + :func:`rmtree` is used, which isn't vulnerable. In this case + :data:`rmtree_is_safe` is set to True. If *onerror* is provided, it must be a callable that accepts three parameters: *function*, *path*, and *excinfo*. @@ -210,8 +210,15 @@ .. versionchanged:: 3.3 Added a safe version that is used automatically if platform supports - the fd-based functions :func:`os.openat` and :func:`os.unlinkat`. + fd-based functions. +.. data:: rmtree_is_safe + + Indicates whether the current platform and implementation has a symlink + attack-proof version of :func:`rmtree`. Currently this is only true for + platforms supporting fd-based directory access functions. + + .. versionadded:: 3.3 .. function:: move(src, dst) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -362,9 +362,9 @@ _rmtree_unsafe(fullname, onerror) else: try: - os.remove(fullname) + os.unlink(fullname) except os.error: - onerror(os.remove, fullname, sys.exc_info()) + onerror(os.unlink, fullname, sys.exc_info()) try: os.rmdir(path) except os.error: @@ -374,21 +374,21 @@ def _rmtree_safe_fd(topfd, path, onerror): names = [] try: - names = os.flistdir(topfd) + names = os.listdir(topfd) except os.error: - onerror(os.flistdir, path, sys.exc_info()) + onerror(os.listdir, path, sys.exc_info()) for name in names: fullname = os.path.join(path, name) try: - orig_st = os.fstatat(topfd, name) + orig_st = os.stat(name, dir_fd=topfd) mode = orig_st.st_mode except os.error: mode = 0 if stat.S_ISDIR(mode): try: - dirfd = os.openat(topfd, name, os.O_RDONLY) + dirfd = os.open(name, os.O_RDONLY, dir_fd=topfd) except os.error: - onerror(os.openat, fullname, sys.exc_info()) + onerror(os.open, fullname, sys.exc_info()) else: try: if os.path.samestat(orig_st, os.fstat(dirfd)): @@ -397,21 +397,22 @@ os.close(dirfd) else: try: - os.unlinkat(topfd, name) + os.unlink(name, dir_fd=topfd) except os.error: - onerror(os.unlinkat, fullname, sys.exc_info()) + onerror(os.unlink, fullname, sys.exc_info()) try: os.rmdir(path) except os.error: onerror(os.rmdir, path, sys.exc_info()) -_use_fd_functions = hasattr(os, 'openat') and hasattr(os, 'unlinkat') +rmtree_is_safe = _use_fd_functions = (os.unlink in os.supports_dir_fd and + os.open in os.supports_dir_fd) def rmtree(path, ignore_errors=False, onerror=None): """Recursively delete a directory tree. If ignore_errors is set, errors are ignored; otherwise, if onerror is set, it is called to handle the error with arguments (func, - path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path, exc_info) where func is platform and implementation dependent; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If ignore_errors is false and onerror is None, an exception is raised. diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -159,8 +159,7 @@ # at os.listdir. The first failure may legally # be either. if 0 <= self.errorState < 2: - if (func is os.remove or - hasattr(os, 'unlinkat') and func is os.unlinkat): + if func is os.unlink: self.assertIn(arg, [self.child_file_path, self.child_dir_path]) else: if self.errorState == 1: @@ -486,6 +485,26 @@ shutil.copyfile(link, dst) self.assertFalse(os.path.islink(dst)) + def test_rmtree_uses_safe_fd_version_if_available(self): + if os.unlink in os.supports_dir_fd and os.open in os.supports_dir_fd: + self.assertTrue(shutil._use_fd_functions) + self.assertTrue(shutil.rmtree_is_safe) + tmp_dir = self.mkdtemp() + d = os.path.join(tmp_dir, 'a') + os.mkdir(d) + try: + real_rmtree = shutil._rmtree_safe_fd + class Called(Exception): pass + def _raiser(*args, **kwargs): + raise Called + shutil._rmtree_safe_fd = _raiser + self.assertRaises(Called, shutil.rmtree, d) + finally: + shutil._rmtree_safe_fd = real_rmtree + else: + self.assertFalse(shutil._use_fd_functions) + self.assertFalse(shutil.rmtree_is_safe) + def test_rmtree_dont_delete_file(self): # When called on a file instead of a directory, don't delete it. handle, path = tempfile.mkstemp() -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 21:08:12 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 23 Jun 2012 21:08:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315102=3A_Use_HOST?= =?utf8?q?=5FPYTHON_only_if_it_is_set=2E?= Message-ID: http://hg.python.org/cpython/rev/a9d4cf7d15b9 changeset: 77643:a9d4cf7d15b9 user: Martin v. L?wis date: Sat Jun 23 21:07:39 2012 +0200 summary: Issue #15102: Use HOST_PYTHON only if it is set. files: PCbuild/x64.props | 4 ++-- 1 files changed, 2 insertions(+), 2 deletions(-) diff --git a/PCbuild/x64.props b/PCbuild/x64.props --- a/PCbuild/x64.props +++ b/PCbuild/x64.props @@ -1,6 +1,6 @@ ? - + $(HOST_PYTHON) @@ -23,4 +23,4 @@ $(PythonExe) -
\ No newline at end of file + -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 21:31:47 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 21:31:47 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Proper_cleanup_in_test=5Fsh?= =?utf8?q?util=2C_even_in_case_of_error=2E?= Message-ID: http://hg.python.org/cpython/rev/58c0f261a3b7 changeset: 77644:58c0f261a3b7 user: Antoine Pitrou date: Sat Jun 23 21:28:15 2012 +0200 summary: Proper cleanup in test_shutil, even in case of error. files: Lib/test/test_shutil.py | 14 ++++++-------- 1 files changed, 6 insertions(+), 8 deletions(-) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -120,6 +120,8 @@ def test_on_error(self): self.errorState = 0 os.mkdir(TESTFN) + self.addCleanup(shutil.rmtree, TESTFN) + self.child_file_path = os.path.join(TESTFN, 'a') self.child_dir_path = os.path.join(TESTFN, 'b') support.create_empty_file(self.child_file_path) @@ -133,20 +135,16 @@ os.chmod(self.child_dir_path, new_mode) os.chmod(TESTFN, new_mode) + self.addCleanup(os.chmod, TESTFN, old_dir_mode) + self.addCleanup(os.chmod, self.child_file_path, old_child_file_mode) + self.addCleanup(os.chmod, self.child_dir_path, old_child_dir_mode) + shutil.rmtree(TESTFN, onerror=self.check_args_to_onerror) # Test whether onerror has actually been called. self.assertEqual(self.errorState, 3, "Expected call to onerror function did not " "happen.") - # Make writable again. - os.chmod(TESTFN, old_dir_mode) - os.chmod(self.child_file_path, old_child_file_mode) - os.chmod(self.child_dir_path, old_child_dir_mode) - - # Clean up. - shutil.rmtree(TESTFN) - def check_args_to_onerror(self, func, arg, exc): # test_rmtree_errors deliberately runs rmtree # on a directory that is chmod 500, which will fail. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 21:36:23 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 21:36:23 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_debug_output_to_test=5F?= =?utf8?q?shutil?= Message-ID: http://hg.python.org/cpython/rev/baaaed041e49 changeset: 77645:baaaed041e49 user: Antoine Pitrou date: Sat Jun 23 21:32:36 2012 +0200 summary: Add debug output to test_shutil files: Lib/test/test_shutil.py | 2 ++ 1 files changed, 2 insertions(+), 0 deletions(-) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -156,6 +156,8 @@ # FUSE experienced a failure earlier in the process # at os.listdir. The first failure may legally # be either. + if support.verbose: + print("onerror [%d]: %r" % (self.errorState, (func, arg, exc[1]))) if 0 <= self.errorState < 2: if func is os.unlink: self.assertIn(arg, [self.child_file_path, self.child_dir_path]) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 22:08:44 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 22:08:44 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Avoid_depending_on_director?= =?utf8?q?y_iteration_order_in_test=5Fshutil?= Message-ID: http://hg.python.org/cpython/rev/ed0510588e2b changeset: 77646:ed0510588e2b user: Antoine Pitrou date: Sat Jun 23 22:05:11 2012 +0200 summary: Avoid depending on directory iteration order in test_shutil files: Lib/test/test_shutil.py | 11 +++++------ 1 files changed, 5 insertions(+), 6 deletions(-) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -158,14 +158,13 @@ # be either. if support.verbose: print("onerror [%d]: %r" % (self.errorState, (func, arg, exc[1]))) - if 0 <= self.errorState < 2: + if self.errorState < 2: if func is os.unlink: - self.assertIn(arg, [self.child_file_path, self.child_dir_path]) + self.assertEqual(arg, self.child_file_path) + elif func is os.rmdir: + self.assertEqual(arg, self.child_dir_path) else: - if self.errorState == 1: - self.assertEqual(func, os.rmdir) - else: - self.assertIs(func, os.listdir, "func must be os.listdir") + self.assertIs(func, os.listdir) self.assertIn(arg, [TESTFN, self.child_dir_path]) self.assertTrue(issubclass(exc[0], OSError)) self.errorState += 1 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 22:15:31 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 22:15:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Add_MSVC-related_entries_to?= =?utf8?q?_=2Ehgignore?= Message-ID: http://hg.python.org/cpython/rev/59ae57197bad changeset: 77647:59ae57197bad user: Antoine Pitrou date: Sat Jun 23 22:11:58 2012 +0200 summary: Add MSVC-related entries to .hgignore files: .hgignore | 4 ++++ 1 files changed, 4 insertions(+), 0 deletions(-) diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -56,6 +56,10 @@ PC/*.obj PC/*.exe PC/*/*.exe +PC/*/*.exp +PC/*/*.lib +PC/*/*.bsc +PC/*/*.dll PC/*/*.pdb PC/*/*.user PC/*/*.ncb -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 22:19:53 2012 From: python-checkins at python.org (stefan.krah) Date: Sat, 23 Jun 2012 22:19:53 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315102=3A_Fix_paths?= =?utf8?q?_in_external-amd64=2Ebat=2E?= Message-ID: http://hg.python.org/cpython/rev/c56783fe2e3b changeset: 77648:c56783fe2e3b user: Stefan Krah date: Sat Jun 23 22:18:19 2012 +0200 summary: Issue #15102: Fix paths in external-amd64.bat. files: Tools/buildbot/external-amd64.bat | 10 +++++----- 1 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Tools/buildbot/external-amd64.bat b/Tools/buildbot/external-amd64.bat --- a/Tools/buildbot/external-amd64.bat +++ b/Tools/buildbot/external-amd64.bat @@ -5,17 +5,17 @@ call "%VS100COMNTOOLS%\..\..\VC\vcvarsall.bat" x86_amd64 if not exist tcltk64\bin\tcl85g.dll ( - cd tcl-8.5.9.0\win + cd tcl-8.5.11.0\win nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 clean all nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 install cd ..\.. ) if not exist tcltk64\bin\tk85g.dll ( - cd tk-8.5.9.0\win - nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 TCLDIR=..\..\tcl-8.5.9.0 clean - nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 TCLDIR=..\..\tcl-8.5.9.0 all - nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 TCLDIR=..\..\tcl-8.5.9.0 install + cd tk-8.5.11.0\win + nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 TCLDIR=..\..\tcl-8.5.11.0 clean + nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 TCLDIR=..\..\tcl-8.5.11.0 all + nmake -f makefile.vc COMPILERFLAGS=-DWINVER=0x0500 OPTS=noxp DEBUG=1 MACHINE=AMD64 INSTALLDIR=..\..\tcltk64 TCLDIR=..\..\tcl-8.5.11.0 install cd ..\.. ) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 22:45:25 2012 From: python-checkins at python.org (mark.dickinson) Date: Sat, 23 Jun 2012 22:45:25 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2314923=3A_Optimize_?= =?utf8?q?continuation-byte_check_in_UTF-8_decoding=2E__Patch_by?= Message-ID: http://hg.python.org/cpython/rev/3214c9ebcf5e changeset: 77649:3214c9ebcf5e user: Mark Dickinson date: Sat Jun 23 21:45:14 2012 +0100 summary: Issue #14923: Optimize continuation-byte check in UTF-8 decoding. Patch by Serhiy Storchaka. files: Objects/stringlib/codecs.h | 16 ++++++++++------ 1 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Objects/stringlib/codecs.h b/Objects/stringlib/codecs.h --- a/Objects/stringlib/codecs.h +++ b/Objects/stringlib/codecs.h @@ -15,6 +15,9 @@ # error C 'long' size should be either 4 or 8! #endif +/* 10xxxxxx */ +#define IS_CONTINUATION_BYTE(ch) ((ch) >= 0x80 && (ch) < 0xC0) + Py_LOCAL_INLINE(Py_UCS4) STRINGLIB(utf8_decode)(const char **inptr, const char *end, STRINGLIB_CHAR *dest, @@ -107,7 +110,7 @@ break; } ch2 = (unsigned char)s[1]; - if ((ch2 & 0xC0) != 0x80) + if (!IS_CONTINUATION_BYTE(ch2)) /* invalid continuation byte */ goto InvalidContinuation; ch = (ch << 6) + ch2 - @@ -131,8 +134,8 @@ } ch2 = (unsigned char)s[1]; ch3 = (unsigned char)s[2]; - if ((ch2 & 0xC0) != 0x80 || - (ch3 & 0xC0) != 0x80) { + if (!IS_CONTINUATION_BYTE(ch2) || + !IS_CONTINUATION_BYTE(ch3)) { /* invalid continuation byte */ goto InvalidContinuation; } @@ -172,9 +175,9 @@ ch2 = (unsigned char)s[1]; ch3 = (unsigned char)s[2]; ch4 = (unsigned char)s[3]; - if ((ch2 & 0xC0) != 0x80 || - (ch3 & 0xC0) != 0x80 || - (ch4 & 0xC0) != 0x80) { + if (!IS_CONTINUATION_BYTE(ch2) || + !IS_CONTINUATION_BYTE(ch3) || + !IS_CONTINUATION_BYTE(ch4)) { /* invalid continuation byte */ goto InvalidContinuation; } @@ -216,6 +219,7 @@ } #undef ASCII_CHAR_MASK +#undef IS_CONTINUATION_BYTE /* UTF-8 encoder specialized for a Unicode kind to avoid the slow -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 22:59:31 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sat, 23 Jun 2012 22:59:31 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Lower_test-skipping_thresho?= =?utf8?q?ld_in_test=5Freprlib?= Message-ID: http://hg.python.org/cpython/rev/ec656d79b8ac changeset: 77650:ec656d79b8ac user: Antoine Pitrou date: Sat Jun 23 22:55:58 2012 +0200 summary: Lower test-skipping threshold in test_reprlib files: Lib/test/test_reprlib.py | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py --- a/Lib/test/test_reprlib.py +++ b/Lib/test/test_reprlib.py @@ -242,7 +242,7 @@ # a path separator + `module_name` + ".py" source_path_len += len(module_name) + 1 + len(".py") cached_path_len = source_path_len + len(imp.cache_from_source("x.py")) - len("x.py") - if os.name == 'nt' and cached_path_len >= 259: + if os.name == 'nt' and cached_path_len >= 258: # Under Windows, the max path len is 260 including C's terminating # NUL character. # (see http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx#maxpath) -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 23:21:54 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 23 Jun 2012 23:21:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315146=3A_Add_PyTyp?= =?utf8?q?e=5FFromSpecWithBases=2E_Patch_by_Robin_Schreiber=2E?= Message-ID: http://hg.python.org/cpython/rev/bb6df3781edb changeset: 77651:bb6df3781edb user: Martin v. L?wis date: Sat Jun 23 23:20:45 2012 +0200 summary: Issue #15146: Add PyType_FromSpecWithBases. Patch by Robin Schreiber. files: Doc/c-api/type.rst | 12 ++++ Include/object.h | 3 + Misc/NEWS | 2 + Objects/typeobject.c | 80 ++++++++++++++++++++++++++++++- PC/python3.def | 2 + PC/python33stub.def | 1 + 6 files changed, 96 insertions(+), 4 deletions(-) diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -85,3 +85,15 @@ their initialization. This function is responsible for adding inherited slots from a type's base class. Return ``0`` on success, or return ``-1`` and sets an exception on error. + +.. c:function:: PyObject* PyType_FromSpec(PyType_Spec *spec) + + Creates and returns a heap type object from the *spec* passed to the function. + +.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) + + Creates and returns a heap type object from the *spec*. In addition to that, + the created heap type contains all types contained by the *bases* tuple as base + types. This allows the caller to reference other heap types as base types. + + .. versionadded:: 3.3 diff --git a/Include/object.h b/Include/object.h --- a/Include/object.h +++ b/Include/object.h @@ -433,6 +433,9 @@ } PyType_Spec; PyAPI_FUNC(PyObject*) PyType_FromSpec(PyType_Spec*); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000 +PyAPI_FUNC(PyObject*) PyType_FromSpecWithBases(PyType_Spec*, PyObject*); +#endif #ifndef Py_LIMITED_API /* The *real* layout of a type object when allocated on the heap */ diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #15146: Add PyType_FromSpecWithBases. Patch by Robin Schreiber. + - Issue #15142: Fix reference leak when deallocating instances of types created using PyType_FromSpec(). diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -48,6 +48,9 @@ static PyObject * _PyType_LookupId(PyTypeObject *type, struct _Py_Identifier *name); +static PyObject * +slot_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds); + unsigned int PyType_ClearCache(void) { @@ -2375,22 +2378,75 @@ }; PyObject * -PyType_FromSpec(PyType_Spec *spec) +PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) { PyHeapTypeObject *res = (PyHeapTypeObject*)PyType_GenericAlloc(&PyType_Type, 0); + PyTypeObject *type, *base; + char *s; char *res_start = (char*)res; PyType_Slot *slot; + + /* Set the type name and qualname */ + s = strrchr(spec->name, '.'); + if (s == NULL) + s = (char*)spec->name; + else + s++; if (res == NULL) return NULL; - res->ht_name = PyUnicode_FromString(spec->name); + res->ht_name = PyUnicode_FromString(s); if (!res->ht_name) goto fail; res->ht_qualname = res->ht_name; Py_INCREF(res->ht_qualname); - res->ht_type.tp_name = _PyUnicode_AsString(res->ht_name); + res->ht_type.tp_name = spec->name; if (!res->ht_type.tp_name) goto fail; + + /* Adjust for empty tuple bases */ + if (!bases) { + base = &PyBaseObject_Type; + /* See whether Py_tp_base(s) was specified */ + for (slot = spec->slots; slot->slot; slot++) { + if (slot->slot == Py_tp_base) + base = slot->pfunc; + else if (slot->slot == Py_tp_bases) { + bases = slot->pfunc; + Py_INCREF(bases); + } + } + if (!bases) + bases = PyTuple_Pack(1, base); + if (!bases) + goto fail; + } + else + Py_INCREF(bases); + + /* Calculate best base, and check that all bases are type objects */ + base = best_base(bases); + if (base == NULL) { + goto fail; + } + if (!PyType_HasFeature(base, Py_TPFLAGS_BASETYPE)) { + PyErr_Format(PyExc_TypeError, + "type '%.100s' is not an acceptable base type", + base->tp_name); + goto fail; + } + + type = (PyTypeObject *)res; + /* Initialize essential fields */ + type->tp_as_number = &res->as_number; + type->tp_as_sequence = &res->as_sequence; + type->tp_as_mapping = &res->as_mapping; + type->tp_as_buffer = &res->as_buffer; + /* Set tp_base and tp_bases */ + type->tp_bases = bases; + bases = NULL; + Py_INCREF(base); + type->tp_base = base; res->ht_type.tp_basicsize = spec->basicsize; res->ht_type.tp_itemsize = spec->itemsize; @@ -2401,6 +2457,9 @@ PyErr_SetString(PyExc_RuntimeError, "invalid slot offset"); goto fail; } + if (slot->slot == Py_tp_base || slot->slot == Py_tp_bases) + /* Processed above */ + continue; *(void**)(res_start + slotoffsets[slot->slot]) = slot->pfunc; /* need to make a copy of the docstring slot, which usually @@ -2427,6 +2486,13 @@ if (PyType_Ready(&res->ht_type) < 0) goto fail; + /* Set type.__module__ */ + s = strrchr(spec->name, '.'); + if (s != NULL) + _PyDict_SetItemId(type->tp_dict, &PyId___module__, + PyUnicode_FromStringAndSize( + spec->name, (Py_ssize_t)(s - spec->name))); + return (PyObject*)res; fail: @@ -2434,6 +2500,12 @@ return NULL; } +PyObject * +PyType_FromSpec(PyType_Spec *spec) +{ + return PyType_FromSpecWithBases(spec, NULL); +} + /* Internal API to look for a name through the MRO. This returns a borrowed reference, and doesn't set an exception! */ @@ -4763,7 +4835,7 @@ object.__new__(dict). To do this, we check that the most derived base that's not a heap type is this type. */ staticbase = subtype; - while (staticbase && (staticbase->tp_flags & Py_TPFLAGS_HEAPTYPE)) + while (staticbase && (staticbase->tp_new == slot_tp_new)) staticbase = staticbase->tp_base; /* If staticbase is NULL now, it is a really weird type. In the spirit of backwards compatibility (?), just shut up. */ diff --git a/PC/python3.def b/PC/python3.def --- a/PC/python3.def +++ b/PC/python3.def @@ -1,3 +1,4 @@ +; When changing this file, run python33gen.py LIBRARY "python3" EXPORTS PyArg_Parse=python33.PyArg_Parse @@ -513,6 +514,7 @@ PyTuple_Type=python33.PyTuple_Type DATA PyType_ClearCache=python33.PyType_ClearCache PyType_FromSpec=python33.PyType_FromSpec + PyType_FromSpecWithBases=python33.PyType_FromSpecWithBases PyType_GenericAlloc=python33.PyType_GenericAlloc PyType_GenericNew=python33.PyType_GenericNew PyType_GetFlags=python33.PyType_GetFlags diff --git a/PC/python33stub.def b/PC/python33stub.def --- a/PC/python33stub.def +++ b/PC/python33stub.def @@ -513,6 +513,7 @@ PyTuple_Type PyType_ClearCache PyType_FromSpec +PyType_FromSpecWithBases PyType_GenericAlloc PyType_GenericNew PyType_GetFlags -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sat Jun 23 23:21:57 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sat, 23 Jun 2012 23:21:57 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Fix_whitespace=2E?= Message-ID: http://hg.python.org/cpython/rev/fae226a1d3c4 changeset: 77652:fae226a1d3c4 user: Martin v. L?wis date: Sat Jun 23 23:21:48 2012 +0200 summary: Fix whitespace. files: Doc/c-api/type.rst | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -85,7 +85,7 @@ their initialization. This function is responsible for adding inherited slots from a type's base class. Return ``0`` on success, or return ``-1`` and sets an exception on error. - + .. c:function:: PyObject* PyType_FromSpec(PyType_Spec *spec) Creates and returns a heap type object from the *spec* passed to the function. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:01:15 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 24 Jun 2012 00:01:15 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2311626=3A_Add_=5FSi?= =?utf8?q?zeT_functions_to_stable_ABI=2E?= Message-ID: http://hg.python.org/cpython/rev/3f14119e4af7 changeset: 77653:3f14119e4af7 user: Martin v. L?wis date: Sun Jun 24 00:00:30 2012 +0200 summary: Issue #11626: Add _SizeT functions to stable ABI. files: Include/modsupport.h | 5 ++++- Misc/NEWS | 2 ++ PC/python3.def | 6 ++++++ PC/python33gen.py | 4 +++- PC/python33stub.def | 6 ++++++ 5 files changed, 21 insertions(+), 2 deletions(-) diff --git a/Include/modsupport.h b/Include/modsupport.h --- a/Include/modsupport.h +++ b/Include/modsupport.h @@ -23,6 +23,8 @@ PyAPI_FUNC(PyObject *) _Py_VaBuildValue_SizeT(const char *, va_list); #endif +/* Due to a glitch in 3.2, the _SizeT versions weren't exported from the DLL. */ +#if !defined(PY_SSIZE_T_CLEAN) || !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000 PyAPI_FUNC(int) PyArg_Parse(PyObject *, const char *, ...); PyAPI_FUNC(int) PyArg_ParseTuple(PyObject *, const char *, ...) Py_FORMAT_PARSETUPLE(PyArg_ParseTuple, 2, 3); PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *, @@ -31,13 +33,14 @@ PyAPI_FUNC(int) PyArg_UnpackTuple(PyObject *, const char *, Py_ssize_t, Py_ssize_t, ...); PyAPI_FUNC(PyObject *) Py_BuildValue(const char *, ...); PyAPI_FUNC(PyObject *) _Py_BuildValue_SizeT(const char *, ...); +#endif #ifndef Py_LIMITED_API PyAPI_FUNC(int) _PyArg_NoKeywords(const char *funcname, PyObject *kw); -#endif PyAPI_FUNC(int) PyArg_VaParse(PyObject *, const char *, va_list); PyAPI_FUNC(int) PyArg_VaParseTupleAndKeywords(PyObject *, PyObject *, const char *, char **, va_list); +#endif PyAPI_FUNC(PyObject *) Py_VaBuildValue(const char *, va_list); PyAPI_FUNC(int) PyModule_AddObject(PyObject *, const char *, PyObject *); diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #11626: Add _SizeT functions to stable ABI. + - Issue #15146: Add PyType_FromSpecWithBases. Patch by Robin Schreiber. - Issue #15142: Fix reference leak when deallocating instances of types diff --git a/PC/python3.def b/PC/python3.def --- a/PC/python3.def +++ b/PC/python3.def @@ -691,3 +691,9 @@ _Py_SwappedOp=python33._Py_SwappedOp DATA _Py_TrueStruct=python33._Py_TrueStruct DATA _Py_VaBuildValue_SizeT=python33._Py_VaBuildValue_SizeT + _PyArg_Parse_SizeT=python33._PyArg_Parse_SizeT + _PyArg_ParseTuple_SizeT=python33._PyArg_ParseTuple_SizeT + _PyArg_ParseTupleAndKeywords_SizeT=python33._PyArg_ParseTupleAndKeywords_SizeT + _PyArg_VaParse_SizeT=python33._PyArg_VaParse_SizeT + _PyArg_VaParseTupleAndKeywords_SizeT=python33._PyArg_VaParseTupleAndKeywords_SizeT + _Py_BuildValue_SizeT=python33._Py_BuildValue_SizeT diff --git a/PC/python33gen.py b/PC/python33gen.py --- a/PC/python33gen.py +++ b/PC/python33gen.py @@ -7,8 +7,10 @@ out.write('EXPORTS\n') inp = open("python3.def") -inp.readline() line = inp.readline() +while line.strip().startswith(';'): + line = inp.readline() +line = inp.readline() # LIBRARY assert line.strip()=='EXPORTS' for line in inp: diff --git a/PC/python33stub.def b/PC/python33stub.def --- a/PC/python33stub.def +++ b/PC/python33stub.def @@ -690,3 +690,9 @@ _Py_SwappedOp _Py_TrueStruct _Py_VaBuildValue_SizeT +_PyArg_Parse_SizeT +_PyArg_ParseTuple_SizeT +_PyArg_ParseTupleAndKeywords_SizeT +_PyArg_VaParse_SizeT +_PyArg_VaParseTupleAndKeywords_SizeT +_Py_BuildValue_SizeT -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:22:35 2012 From: python-checkins at python.org (martin.v.loewis) Date: Sun, 24 Jun 2012 00:22:35 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315102=3A_find_pyth?= =?utf8?q?on=2Eexe_in_OutDir=2C_not_SolutionDir=2E?= Message-ID: http://hg.python.org/cpython/rev/9c71d7f057b3 changeset: 77654:9c71d7f057b3 user: Martin v. L?wis date: Sun Jun 24 00:22:28 2012 +0200 summary: Issue #15102: find python.exe in OutDir, not SolutionDir. files: PCbuild/pyproject.props | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props --- a/PCbuild/pyproject.props +++ b/PCbuild/pyproject.props @@ -2,7 +2,7 @@ python33$(PyDebugExt) - $(SolutionDir)python$(PyDebugExt).exe + $(OutDir)python$(PyDebugExt).exe $(OutDir)kill_python$(PyDebugExt).exe ..\.. $(externalsDir)\sqlite-3.7.12 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:24:04 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 24 Jun 2012 00:24:04 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Use_struct_member_=28ht=5Ft?= =?utf8?q?ype=29_instead_of_casting_pointers=2E?= Message-ID: http://hg.python.org/cpython/rev/7ce45e2625c3 changeset: 77655:7ce45e2625c3 parent: 77652:fae226a1d3c4 user: Antoine Pitrou date: Sun Jun 24 00:18:27 2012 +0200 summary: Use struct member (ht_type) instead of casting pointers. files: Objects/typeobject.c | 28 ++++++++++++++-------------- 1 files changed, 14 insertions(+), 14 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2394,14 +2394,15 @@ s++; if (res == NULL) - return NULL; + return NULL; + type = &res->ht_type; res->ht_name = PyUnicode_FromString(s); if (!res->ht_name) goto fail; res->ht_qualname = res->ht_name; Py_INCREF(res->ht_qualname); - res->ht_type.tp_name = spec->name; - if (!res->ht_type.tp_name) + type->tp_name = spec->name; + if (!type->tp_name) goto fail; /* Adjust for empty tuple bases */ @@ -2436,7 +2437,6 @@ goto fail; } - type = (PyTypeObject *)res; /* Initialize essential fields */ type->tp_as_number = &res->as_number; type->tp_as_sequence = &res->as_sequence; @@ -2448,9 +2448,9 @@ Py_INCREF(base); type->tp_base = base; - res->ht_type.tp_basicsize = spec->basicsize; - res->ht_type.tp_itemsize = spec->itemsize; - res->ht_type.tp_flags = spec->flags | Py_TPFLAGS_HEAPTYPE; + type->tp_basicsize = spec->basicsize; + type->tp_itemsize = spec->itemsize; + type->tp_flags = spec->flags | Py_TPFLAGS_HEAPTYPE; for (slot = spec->slots; slot->slot; slot++) { if (slot->slot >= Py_ARRAY_LENGTH(slotoffsets)) { @@ -2470,20 +2470,20 @@ if (tp_doc == NULL) goto fail; memcpy(tp_doc, slot->pfunc, len); - res->ht_type.tp_doc = tp_doc; + type->tp_doc = tp_doc; } } - if (res->ht_type.tp_dictoffset) { + if (type->tp_dictoffset) { res->ht_cached_keys = _PyDict_NewKeysForClass(); } - if (res->ht_type.tp_dealloc == NULL) { + if (type->tp_dealloc == NULL) { /* It's a heap type, so needs the heap types' dealloc. subtype_dealloc will call the base type's tp_dealloc, if necessary. */ - res->ht_type.tp_dealloc = subtype_dealloc; - } - - if (PyType_Ready(&res->ht_type) < 0) + type->tp_dealloc = subtype_dealloc; + } + + if (PyType_Ready(type) < 0) goto fail; /* Set type.__module__ */ -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:24:05 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 24 Jun 2012 00:24:05 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge?= Message-ID: http://hg.python.org/cpython/rev/b0c7e968bb68 changeset: 77656:b0c7e968bb68 parent: 77655:7ce45e2625c3 parent: 77653:3f14119e4af7 user: Antoine Pitrou date: Sun Jun 24 00:20:11 2012 +0200 summary: Merge files: Include/modsupport.h | 5 ++++- Misc/NEWS | 2 ++ PC/python3.def | 6 ++++++ PC/python33gen.py | 4 +++- PC/python33stub.def | 6 ++++++ 5 files changed, 21 insertions(+), 2 deletions(-) diff --git a/Include/modsupport.h b/Include/modsupport.h --- a/Include/modsupport.h +++ b/Include/modsupport.h @@ -23,6 +23,8 @@ PyAPI_FUNC(PyObject *) _Py_VaBuildValue_SizeT(const char *, va_list); #endif +/* Due to a glitch in 3.2, the _SizeT versions weren't exported from the DLL. */ +#if !defined(PY_SSIZE_T_CLEAN) || !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000 PyAPI_FUNC(int) PyArg_Parse(PyObject *, const char *, ...); PyAPI_FUNC(int) PyArg_ParseTuple(PyObject *, const char *, ...) Py_FORMAT_PARSETUPLE(PyArg_ParseTuple, 2, 3); PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *, @@ -31,13 +33,14 @@ PyAPI_FUNC(int) PyArg_UnpackTuple(PyObject *, const char *, Py_ssize_t, Py_ssize_t, ...); PyAPI_FUNC(PyObject *) Py_BuildValue(const char *, ...); PyAPI_FUNC(PyObject *) _Py_BuildValue_SizeT(const char *, ...); +#endif #ifndef Py_LIMITED_API PyAPI_FUNC(int) _PyArg_NoKeywords(const char *funcname, PyObject *kw); -#endif PyAPI_FUNC(int) PyArg_VaParse(PyObject *, const char *, va_list); PyAPI_FUNC(int) PyArg_VaParseTupleAndKeywords(PyObject *, PyObject *, const char *, char **, va_list); +#endif PyAPI_FUNC(PyObject *) Py_VaBuildValue(const char *, va_list); PyAPI_FUNC(int) PyModule_AddObject(PyObject *, const char *, PyObject *); diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -10,6 +10,8 @@ Core and Builtins ----------------- +- Issue #11626: Add _SizeT functions to stable ABI. + - Issue #15146: Add PyType_FromSpecWithBases. Patch by Robin Schreiber. - Issue #15142: Fix reference leak when deallocating instances of types diff --git a/PC/python3.def b/PC/python3.def --- a/PC/python3.def +++ b/PC/python3.def @@ -691,3 +691,9 @@ _Py_SwappedOp=python33._Py_SwappedOp DATA _Py_TrueStruct=python33._Py_TrueStruct DATA _Py_VaBuildValue_SizeT=python33._Py_VaBuildValue_SizeT + _PyArg_Parse_SizeT=python33._PyArg_Parse_SizeT + _PyArg_ParseTuple_SizeT=python33._PyArg_ParseTuple_SizeT + _PyArg_ParseTupleAndKeywords_SizeT=python33._PyArg_ParseTupleAndKeywords_SizeT + _PyArg_VaParse_SizeT=python33._PyArg_VaParse_SizeT + _PyArg_VaParseTupleAndKeywords_SizeT=python33._PyArg_VaParseTupleAndKeywords_SizeT + _Py_BuildValue_SizeT=python33._Py_BuildValue_SizeT diff --git a/PC/python33gen.py b/PC/python33gen.py --- a/PC/python33gen.py +++ b/PC/python33gen.py @@ -7,8 +7,10 @@ out.write('EXPORTS\n') inp = open("python3.def") -inp.readline() line = inp.readline() +while line.strip().startswith(';'): + line = inp.readline() +line = inp.readline() # LIBRARY assert line.strip()=='EXPORTS' for line in inp: diff --git a/PC/python33stub.def b/PC/python33stub.def --- a/PC/python33stub.def +++ b/PC/python33stub.def @@ -690,3 +690,9 @@ _Py_SwappedOp _Py_TrueStruct _Py_VaBuildValue_SizeT +_PyArg_Parse_SizeT +_PyArg_ParseTuple_SizeT +_PyArg_ParseTupleAndKeywords_SizeT +_PyArg_VaParse_SizeT +_PyArg_VaParseTupleAndKeywords_SizeT +_Py_BuildValue_SizeT -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:24:06 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 24 Jun 2012 00:24:06 +0200 Subject: [Python-checkins] =?utf8?q?cpython_=28merge_default_-=3E_default?= =?utf8?q?=29=3A_Merge?= Message-ID: http://hg.python.org/cpython/rev/192e97dd5acf changeset: 77657:192e97dd5acf parent: 77656:b0c7e968bb68 parent: 77654:9c71d7f057b3 user: Antoine Pitrou date: Sun Jun 24 00:20:26 2012 +0200 summary: Merge files: PCbuild/pyproject.props | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props --- a/PCbuild/pyproject.props +++ b/PCbuild/pyproject.props @@ -2,7 +2,7 @@ python33$(PyDebugExt) - $(SolutionDir)python$(PyDebugExt).exe + $(OutDir)python$(PyDebugExt).exe $(OutDir)kill_python$(PyDebugExt).exe ..\.. $(externalsDir)\sqlite-3.7.12 -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:34:12 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 24 Jun 2012 00:34:12 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Replace_assert=28=29_with_a?= =?utf8?q?_more_informative_fatal_error=2E?= Message-ID: http://hg.python.org/cpython/rev/515747aa191f changeset: 77658:515747aa191f user: Antoine Pitrou date: Sun Jun 24 00:30:12 2012 +0200 summary: Replace assert() with a more informative fatal error. files: Objects/typeobject.c | 7 ++++++- 1 files changed, 6 insertions(+), 1 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2858,7 +2858,12 @@ { /* Because of type_is_gc(), the collector only calls this for heaptypes. */ - assert(type->tp_flags & Py_TPFLAGS_HEAPTYPE); + if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) { + char msg[200]; + sprintf(msg, "type_traverse() called for non-heap type '%.100s'", + type->tp_name); + Py_FatalError(msg); + } Py_VISIT(type->tp_dict); Py_VISIT(type->tp_cache); -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:46:33 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 24 Jun 2012 00:46:33 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Try_to_fix_crash_on_x86_Ope?= =?utf8?q?nIndiana_buildbot=2E?= Message-ID: http://hg.python.org/cpython/rev/33737210c906 changeset: 77659:33737210c906 user: Antoine Pitrou date: Sun Jun 24 00:42:59 2012 +0200 summary: Try to fix crash on x86 OpenIndiana buildbot. files: Objects/typeobject.c | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/Objects/typeobject.c b/Objects/typeobject.c --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -2396,6 +2396,8 @@ if (res == NULL) return NULL; type = &res->ht_type; + /* The flags must be initialized early, before the GC traverses us */ + type->tp_flags = spec->flags | Py_TPFLAGS_HEAPTYPE; res->ht_name = PyUnicode_FromString(s); if (!res->ht_name) goto fail; @@ -2450,7 +2452,6 @@ type->tp_basicsize = spec->basicsize; type->tp_itemsize = spec->itemsize; - type->tp_flags = spec->flags | Py_TPFLAGS_HEAPTYPE; for (slot = spec->slots; slot->slot; slot++) { if (slot->slot >= Py_ARRAY_LENGTH(slotoffsets)) { -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 00:55:56 2012 From: python-checkins at python.org (philip.jenvey) Date: Sun, 24 Jun 2012 00:55:56 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_small_refactor_and_PEP8_lin?= =?utf8?q?e_width/docstring?= Message-ID: http://hg.python.org/cpython/rev/48623d846f80 changeset: 77660:48623d846f80 user: Philip Jenvey date: Sat Jun 23 15:54:38 2012 -0700 summary: small refactor and PEP8 line width/docstring files: Lib/shutil.py | 22 +++++++++++++--------- 1 files changed, 13 insertions(+), 9 deletions(-) diff --git a/Lib/shutil.py b/Lib/shutil.py --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -1037,17 +1037,20 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): """Given a command, mode, and a PATH string, return the path which - conforms to the given mode on the PATH, or None if there is no such file. - `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result of - os.environ.get("PATH"), or can be overridden with a custom search path.""" + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ # Check that a given file can be accessed with the correct mode. # Additionally check that `file` is not a directory, as on Windows # directories pass the os.access check. def _access_check(fn, mode): - if (os.path.exists(fn) and os.access(fn, mode) - and not os.path.isdir(fn)): - return True - return False + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) # Short circuit. If we're given a full path which matches the mode # and it exists, we're done here. @@ -1066,8 +1069,9 @@ # See if the given file matches any of the expected path extensions. # This will allow us to short circuit when given "python.exe". matches = [cmd for ext in pathext if cmd.lower().endswith(ext.lower())] - # If it does match, only test that one, otherwise we have to try others. - files = [cmd + ext.lower() for ext in pathext] if not matches else [cmd] + # If it does match, only test that one, otherwise we have to try + # others. + files = [cmd] if matches else [cmd + ext.lower() for ext in pathext] else: # On other platforms you don't have things like PATHEXT to tell you # what file suffixes are executable, so just pass on cmd as-is. -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 01:03:22 2012 From: python-checkins at python.org (ned.deily) Date: Sun, 24 Jun 2012 01:03:22 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2313590=3A_Improve_s?= =?utf8?q?upport_for_OS_X_Xcode_4=3A?= Message-ID: http://hg.python.org/cpython/rev/688d48e434e4 changeset: 77661:688d48e434e4 user: Ned Deily date: Sat Jun 23 16:02:19 2012 -0700 summary: Issue #13590: Improve support for OS X Xcode 4: - Try to avoid building Python or extension modules with problematic llvm-gcc compiler. - Since Xcode 4 removes ppc support, extension module builds now check for ppc compiler support and automatically remove ppc and ppc64 archs when not available. - Since Xcode 4 no longer install SDKs in default locations, extension module builds now revert to using installed headers and libs if the SDK used to build the interpreter is not available. - Update ./configure to use better defaults for universal builds; in particular, --enable-universalsdk=yes uses the Xcode default SDK and --with-universal-archs now defaults to "intel" if ppc not available. files: Lib/distutils/sysconfig.py | 147 +++++- Lib/distutils/unixccompiler.py | 21 +- Misc/NEWS | 15 + configure | 405 ++++++++++++++------- configure.ac | 87 ++++- 5 files changed, 498 insertions(+), 177 deletions(-) diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py --- a/Lib/distutils/sysconfig.py +++ b/Lib/distutils/sysconfig.py @@ -162,7 +162,7 @@ "I don't know where Python installs its library " "on platform '%s'" % os.name) -_USE_CLANG = None + def customize_compiler(compiler): """Do any platform-specific customization of a CCompiler instance. @@ -177,36 +177,7 @@ newcc = None if 'CC' in os.environ: - newcc = os.environ['CC'] - elif sys.platform == 'darwin' and cc == 'gcc-4.2': - # Issue #13590: - # Since Apple removed gcc-4.2 in Xcode 4.2, we can no - # longer assume it is available for extension module builds. - # If Python was built with gcc-4.2, check first to see if - # it is available on this system; if not, try to use clang - # instead unless the caller explicitly set CC. - global _USE_CLANG - if _USE_CLANG is None: - from distutils import log - from subprocess import Popen, PIPE - p = Popen("! type gcc-4.2 && type clang && exit 2", - shell=True, stdout=PIPE, stderr=PIPE) - p.wait() - if p.returncode == 2: - _USE_CLANG = True - log.warn("gcc-4.2 not found, using clang instead") - else: - _USE_CLANG = False - if _USE_CLANG: - newcc = 'clang' - if newcc: - # On OS X, if CC is overridden, use that as the default - # command for LDSHARED as well - if (sys.platform == 'darwin' - and 'LDSHARED' not in os.environ - and ldshared.startswith(cc)): - ldshared = newcc + ldshared[len(cc):] - cc = newcc + cc = os.environ['CC'] if 'CXX' in os.environ: cxx = os.environ['CXX'] if 'LDSHARED' in os.environ: @@ -522,6 +493,29 @@ _config_vars = g +def _read_output(commandstring): + """ + Returns os.popen(commandstring, "r").read(), but + without actually using os.popen because that + function is not usable during python bootstrap + """ + # NOTE: tempfile is also not useable during + # bootstrap + import contextlib + try: + import tempfile + fp = tempfile.NamedTemporaryFile() + except ImportError: + fp = open("/tmp/distutils.%s"%( + os.getpid(),), "w+b") + + with contextlib.closing(fp) as fp: + cmd = "%s >'%s'"%(commandstring, fp.name) + os.system(cmd) + data = fp.read() + + return data.decode('utf-8') + def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. Generally this includes @@ -561,9 +555,70 @@ _config_vars['srcdir'] = os.path.normpath(srcdir) if sys.platform == 'darwin': + from distutils.spawn import find_executable + kernel_version = os.uname()[2] # Kernel version (8.4.3) major_version = int(kernel_version.split('.')[0]) + # Issue #13590: + # The OSX location for the compiler varies between OSX + # (or rather Xcode) releases. With older releases (up-to 10.5) + # the compiler is in /usr/bin, with newer releases the compiler + # can only be found inside Xcode.app if the "Command Line Tools" + # are not installed. + # + # Futhermore, the compiler that can be used varies between + # Xcode releases. Upto Xcode 4 it was possible to use 'gcc-4.2' + # as the compiler, after that 'clang' should be used because + # gcc-4.2 is either not present, or a copy of 'llvm-gcc' that + # miscompiles Python. + + # skip checks if the compiler was overriden with a CC env variable + if 'CC' not in os.environ: + cc = oldcc = _config_vars['CC'] + if not find_executable(cc): + # Compiler is not found on the shell search PATH. + # Now search for clang, first on PATH (if the Command LIne + # Tools have been installed in / or if the user has provided + # another location via CC). If not found, try using xcrun + # to find an uninstalled clang (within a selected Xcode). + + # NOTE: Cannot use subprocess here because of bootstrap + # issues when building Python itself (and os.popen is + # implemented on top of subprocess and is therefore not + # usable as well) + + data = (find_executable('clang') or + _read_output( + "/usr/bin/xcrun -find clang 2>/dev/null").strip()) + if not data: + raise DistutilsPlatformError( + "Cannot locate working compiler") + + _config_vars['CC'] = cc = data + _config_vars['CXX'] = cc + '++' + + elif os.path.basename(cc).startswith('gcc'): + # Compiler is GCC, check if it is LLVM-GCC + data = _read_output("'%s' --version 2>/dev/null" + % (cc.replace("'", "'\"'\"'"),)) + if 'llvm-gcc' in data: + # Found LLVM-GCC, fall back to clang + data = (find_executable('clang') or + _read_output( + "/usr/bin/xcrun -find clang 2>/dev/null").strip()) + if find_executable(data): + _config_vars['CC'] = cc = data + _config_vars['CXX'] = cc + '++' + + if (cc != oldcc + and 'LDSHARED' in _config_vars + and 'LDSHARED' not in os.environ): + # modify LDSHARED if we modified CC + ldshared = _config_vars['LDSHARED'] + if ldshared.startswith(oldcc): + _config_vars['LDSHARED'] = cc + ldshared[len(oldcc):] + if major_version < 8: # On Mac OS X before 10.4, check if -arch and -isysroot # are in CFLAGS or LDFLAGS and remove them if they are. @@ -579,19 +634,45 @@ _config_vars[key] = flags else: + # Different Xcode releases support different sets for '-arch' + # flags. In particular, Xcode 4.x no longer supports the + # PPC architectures. + # + # This code automatically removes '-arch ppc' and '-arch ppc64' + # when these are not supported. That makes it possible to + # build extensions on OSX 10.7 and later with the prebuilt + # 32-bit installer on the python.org website. + flags = _config_vars['CFLAGS'] + if re.search('-arch\s+ppc', flags) is not None: + # NOTE: Cannot use subprocess here because of bootstrap + # issues when building Python itself + status = os.system("'%s' -arch ppc -x c /dev/null 2>/dev/null"%( + _config_vars['CC'].replace("'", "'\"'\"'"),)) + + if status != 0: + # Compiler doesn't support PPC, remove the related + # '-arch' flags. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED', 'LDSHARED'): + + flags = _config_vars[key] + flags = re.sub('-arch\s+ppc\w*\s', ' ', flags) + _config_vars[key] = flags + # Allow the user to override the architecture flags using # an environment variable. # NOTE: This name was introduced by Apple in OSX 10.5 and # is used by several scripting languages distributed with # that OS release. - if 'ARCHFLAGS' in os.environ: arch = os.environ['ARCHFLAGS'] for key in ('LDFLAGS', 'BASECFLAGS', # a number of derived variables. These need to be # patched up as well. - 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED', 'LDSHARED'): flags = _config_vars[key] flags = re.sub('-arch\s+\w+\s', ' ', flags) diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py --- a/Lib/distutils/unixccompiler.py +++ b/Lib/distutils/unixccompiler.py @@ -83,9 +83,8 @@ except ValueError: pass - # Check if the SDK that is used during compilation actually exists, - # the universal build requires the usage of a universal SDK and not all - # users have that installed by default. + # Check if the SDK that is used during compilation actually exists. + # If not, revert to using the installed headers and hope for the best. sysroot = None if '-isysroot' in cc_args: idx = cc_args.index('-isysroot') @@ -97,7 +96,21 @@ if sysroot and not os.path.isdir(sysroot): log.warn("Compiling with an SDK that doesn't seem to exist: %s", sysroot) - log.warn("Please check your Xcode installation") + log.warn("Attempting to compile without the SDK") + while True: + try: + index = cc_args.index('-isysroot') + # Strip this argument and the next one: + del cc_args[index:index+2] + except ValueError: + break + while True: + try: + index = compiler_so.index('-isysroot') + # Strip this argument and the next one: + del compiler_so[index:index+2] + except ValueError: + break return compiler_so diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -234,6 +234,21 @@ Build ----- +- Issue #13590: Improve support for OS X Xcode 4: + * Try to avoid building Python or extension modules with problematic + llvm-gcc compiler. + * Since Xcode 4 removes ppc support, extension module builds now + check for ppc compiler support and automatically remove ppc and + ppc64 archs when not available. + * Since Xcode 4 no longer install SDKs in default locations, + extension module builds now revert to using installed headers + and libs if the SDK used to build the interpreter is not + available. + * Update ./configure to use better defaults for universal builds; + in particular, --enable-universalsdk=yes uses the Xcode default + SDK and --with-universal-archs now defaults to "intel" if ppc + not available. + - Issue #14225: Fix Unicode support for curses (#12567) on OS X - Issue #14928: Fix importlib bootstrap issues by using a custom executable diff --git a/configure b/configure --- a/configure +++ b/configure @@ -1,13 +1,11 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.68 for python 3.3. +# Generated by GNU Autoconf 2.69 for python 3.3. # # Report bugs to . # # -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, -# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software -# Foundation, Inc. +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation @@ -136,6 +134,31 @@ # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh @@ -169,7 +192,8 @@ else exitcode=1; echo positional parameters were not saved. fi -test x\$exitcode = x0 || exit 1" +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && @@ -214,21 +238,25 @@ if test "x$CONFIG_SHELL" != x; then : - # We cannot yet assume a decent shell, so we have to provide a - # neutralization value for shells without unset; and this also - # works around shells that cannot unset nonexistent variables. - # Preserve -v and -x to the replacement shell. - BASH_ENV=/dev/null - ENV=/dev/null - (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV - export CONFIG_SHELL - case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; - esac - exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"} + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 fi if test x$as_have_required = xno; then : @@ -331,6 +359,14 @@ } # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take @@ -452,6 +488,10 @@ chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). @@ -486,16 +526,16 @@ # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. + # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' + as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -p' - fi -else - as_ln_s='cp -p' + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -507,28 +547,8 @@ as_mkdir_p=false fi -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x +as_test_x='test -x' +as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -1243,8 +1263,6 @@ if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe - $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host. - If a cross compiler is detected then cross compile mode will be used" >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi @@ -1404,7 +1422,7 @@ --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-universalsdk[=SDKDIR] - Build against Mac OS X 10.4u SDK (ppc/i386) + Build fat binary against Mac OS X SDK --enable-framework[=INSTALLDIR] Build (MacOSX|Darwin) framework --enable-shared disable/enable building shared python library @@ -1534,9 +1552,9 @@ if $ac_init_version; then cat <<\_ACEOF python configure 3.3 -generated by GNU Autoconf 2.68 - -Copyright (C) 2010 Free Software Foundation, Inc. +generated by GNU Autoconf 2.69 + +Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF @@ -1612,7 +1630,7 @@ test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || - $as_test_x conftest$ac_exeext + test -x conftest$ac_exeext }; then : ac_retval=0 else @@ -1910,7 +1928,8 @@ main () { static int test_array [1 - 2 * !((($ac_type) -1 >> ($2 / 2 - 1)) >> ($2 / 2 - 1) == 3)]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -1964,7 +1983,8 @@ main () { static int test_array [1 - 2 * !(0 < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1))]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -1980,7 +2000,8 @@ { static int test_array [1 - 2 * !(($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 1) < ($ac_type) ((((($ac_type) 1 << N) << N) - 1) * 2 + 2))]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -2030,7 +2051,8 @@ main () { static int test_array [1 - 2 * !(($2) >= 0)]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -2046,7 +2068,8 @@ main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -2072,7 +2095,8 @@ main () { static int test_array [1 - 2 * !(($2) < 0)]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -2088,7 +2112,8 @@ main () { static int test_array [1 - 2 * !(($2) >= $ac_mid)]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -2122,7 +2147,8 @@ main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -2365,7 +2391,7 @@ running configure, to aid debugging if configure makes a mistake. It was created by python $as_me 3.3, which was -generated by GNU Autoconf 2.68. Invocation command line was +generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -2735,7 +2761,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_HAS_HG="found" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2944,10 +2970,16 @@ enableval=$enable_universalsdk; case $enableval in yes) - enableval=/Developer/SDKs/MacOSX10.4u.sdk - if test ! -d "${enableval}" + # Locate the best usable SDK, see Mac/README.txt for more + # information + enableval="`/usr/bin/xcodebuild -version -sdk macosx Path 2>/dev/null`" + if test -z "${enableval}" then - enableval=/ + enableval=/Developer/SDKs/MacOSX10.4u.sdk + if test ! -d "${enableval}" + then + enableval=/ + fi fi ;; esac @@ -2985,7 +3017,20 @@ +# For backward compatibility reasons we prefer to select '32-bit' if available, +# otherwise use 'intel' UNIVERSAL_ARCHS="32-bit" +if test "`uname -s`" = "Darwin" +then + if test -n "${UNIVERSALSDK}" + then + if test -z "`/usr/bin/file "${UNIVERSALSDK}/usr/lib/libSystem.dylib" | grep ppc`" + then + UNIVERSAL_ARCHS="intel" + fi + fi +fi + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-universal-archs" >&5 $as_echo_n "checking for --with-universal-archs... " >&6; } @@ -2999,8 +3044,8 @@ else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: 32-bit" >&5 -$as_echo "32-bit" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${UNIVERSAL_ARCHS}" >&5 +$as_echo "${UNIVERSAL_ARCHS}" >&6; } fi @@ -3377,6 +3422,66 @@ if test -z "$CFLAGS"; then CFLAGS= fi + +if test "$ac_sys_system" = "Darwin" +then + # Compiler selection on MacOSX is more complicated than + # AC_PROG_CC can handle, see Mac/README.txt for more + # information + if test -z "${CC}" + then + found_gcc= + found_clang= + as_save_IFS=$IFS; IFS=: + for as_dir in $PATH + do + IFS=$as_save_IFS + if test -x $as_dir/gcc; then + if test -z "${found_gcc}"; then + found_gcc=$as_dir/gcc + fi + fi + if test -x $as_dir/clang; then + if test -z "${found_clang}"; then + found_clang=$as_dir/clang + fi + fi + done + IFS=$as_save_IFS + + if test -n "$found_gcc" -a -n "$found_clang" + then + if test -n "`"$found_gcc" --version | grep llvm-gcc`" + then + { $as_echo "$as_me:${as_lineno-$LINENO}: Detected llvm-gcc, falling back to clang" >&5 +$as_echo "$as_me: Detected llvm-gcc, falling back to clang" >&6;} + CC="$found_clang" + CXX="$found_clang++" + fi + + + elif test -z "$found_gcc" -a -n "$found_clang" + then + { $as_echo "$as_me:${as_lineno-$LINENO}: No GCC found, use CLANG" >&5 +$as_echo "$as_me: No GCC found, use CLANG" >&6;} + CC="$found_clang" + CXX="$found_clang++" + + elif test -z "$found_gcc" -a -z "$found_clang" + then + found_clang=`/usr/bin/xcrun -find clang 2>/dev/null` + if test -n "${found_clang}" + then + { $as_echo "$as_me:${as_lineno-$LINENO}: Using clang from Xcode.app" >&5 +$as_echo "$as_me: Using clang from Xcode.app" >&6;} + CC="${found_clang}" + CXX="`/usr/bin/xcrun -find clang++`" + + # else: use default behaviour + fi + fi + fi +fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' @@ -3399,7 +3504,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3439,7 +3544,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3492,7 +3597,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3533,7 +3638,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue @@ -3591,7 +3696,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3635,7 +3740,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4081,8 +4186,7 @@ /* end confdefs.h. */ #include #include -#include -#include +struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); @@ -4221,7 +4325,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_CXX="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4262,7 +4366,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_CXX="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4285,6 +4389,47 @@ fi ;; + clang|*/clang) # Extract the first word of "clang++", so it can be a program name with args. +set dummy clang++; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_CXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $CXX in + [\\/]* | ?:[\\/]*) + ac_cv_path_CXX="$CXX" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in notfound +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_path_CXX="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + test -z "$ac_cv_path_CXX" && ac_cv_path_CXX="clang++" + ;; +esac +fi +CXX=$ac_cv_path_CXX +if test -n "$CXX"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 +$as_echo "$CXX" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + ;; esac if test "$CXX" = "notfound" then @@ -4311,7 +4456,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4542,7 +4687,7 @@ for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue + as_fn_executable_p "$ac_path_GREP" || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in @@ -4608,7 +4753,7 @@ for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue + as_fn_executable_p "$ac_path_EGREP" || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in @@ -4815,8 +4960,8 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -# define __EXTENSIONS__ 1 - $ac_includes_default +# define __EXTENSIONS__ 1 + $ac_includes_default int main () { @@ -5210,7 +5355,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5250,7 +5395,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5304,7 +5449,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AR="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5355,7 +5500,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_HAS_PYTHON="found" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5428,7 +5573,7 @@ # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. @@ -5497,7 +5642,7 @@ test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do - { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue + as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ @@ -8387,7 +8532,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -8430,7 +8575,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -10032,7 +10177,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_TRUE="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -11469,7 +11614,8 @@ main () { static int test_array [1 - 2 * !(((char) -1) < 0)]; -test_array [0] = 0 +test_array [0] = 0; +return test_array [0]; ; return 0; @@ -11500,11 +11646,11 @@ int main () { -/* FIXME: Include the comments suggested by Paul. */ + #ifndef __cplusplus - /* Ultrix mips cc rejects this. */ + /* Ultrix mips cc rejects this sort of thing. */ typedef int charset[2]; - const charset cs; + const charset cs = { 0, 0 }; /* SunOS 4.1.1 cc rejects this. */ char const *const *pcpcc; char **ppc; @@ -11521,8 +11667,9 @@ ++pcpcc; ppc = (char**) pcpcc; pcpcc = (char const *const *) ppc; - { /* SCO 3.2v4 cc rejects this. */ - char *t; + { /* SCO 3.2v4 cc rejects this sort of thing. */ + char tx; + char *t = &tx; char const *s = 0 ? (char *) 0 : (char const *) 0; *t++ = 0; @@ -11538,10 +11685,10 @@ iptr p = 0; ++p; } - { /* AIX XL C 1.02.0.0 rejects this saying + { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ - struct s { int j; const int *ap[3]; }; - struct s *b; b->j = 5; + struct s { int j; const int *ap[3]; } bx; + struct s *b = &bx; b->j = 5; } { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ const int foo = 10; @@ -14830,16 +14977,16 @@ # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. + # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' + as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -p' - fi -else - as_ln_s='cp -p' + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -14899,28 +15046,16 @@ as_mkdir_p=false fi -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -14942,7 +15077,7 @@ # values after options handling. ac_log=" This file was extended by python $as_me 3.3, which was -generated by GNU Autoconf 2.68. Invocation command line was +generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS @@ -15004,10 +15139,10 @@ ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ python config.status 3.3 -configured by $0, generated by GNU Autoconf 2.68, +configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" -Copyright (C) 2010 Free Software Foundation, Inc. +Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." @@ -15097,7 +15232,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then - set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' diff --git a/configure.ac b/configure.ac --- a/configure.ac +++ b/configure.ac @@ -104,14 +104,20 @@ AC_MSG_CHECKING([for --enable-universalsdk]) AC_ARG_ENABLE(universalsdk, - AS_HELP_STRING([--enable-universalsdk@<:@=SDKDIR@:>@], [Build against Mac OS X 10.4u SDK (ppc/i386)]), + AS_HELP_STRING([--enable-universalsdk@<:@=SDKDIR@:>@], [Build fat binary against Mac OS X SDK]), [ case $enableval in yes) - enableval=/Developer/SDKs/MacOSX10.4u.sdk - if test ! -d "${enableval}" + # Locate the best usable SDK, see Mac/README.txt for more + # information + enableval="`/usr/bin/xcodebuild -version -sdk macosx Path 2>/dev/null`" + if test -z "${enableval}" then - enableval=/ + enableval=/Developer/SDKs/MacOSX10.4u.sdk + if test ! -d "${enableval}" + then + enableval=/ + fi fi ;; esac @@ -143,7 +149,20 @@ AC_SUBST(ARCH_RUN_32BIT) +# For backward compatibility reasons we prefer to select '32-bit' if available, +# otherwise use 'intel' UNIVERSAL_ARCHS="32-bit" +if test "`uname -s`" = "Darwin" +then + if test -n "${UNIVERSALSDK}" + then + if test -z "`/usr/bin/file "${UNIVERSALSDK}/usr/lib/libSystem.dylib" | grep ppc`" + then + UNIVERSAL_ARCHS="intel" + fi + fi +fi + AC_SUBST(LIPO_32BIT_FLAGS) AC_MSG_CHECKING(for --with-universal-archs) AC_ARG_WITH(universal-archs, @@ -153,7 +172,7 @@ UNIVERSAL_ARCHS="$withval" ], [ - AC_MSG_RESULT(32-bit) + AC_MSG_RESULT(${UNIVERSAL_ARCHS}) ]) @@ -501,6 +520,63 @@ if test -z "$CFLAGS"; then CFLAGS= fi + +if test "$ac_sys_system" = "Darwin" +then + # Compiler selection on MacOSX is more complicated than + # AC_PROG_CC can handle, see Mac/README.txt for more + # information + if test -z "${CC}" + then + found_gcc= + found_clang= + as_save_IFS=$IFS; IFS=: + for as_dir in $PATH + do + IFS=$as_save_IFS + if test -x $as_dir/gcc; then + if test -z "${found_gcc}"; then + found_gcc=$as_dir/gcc + fi + fi + if test -x $as_dir/clang; then + if test -z "${found_clang}"; then + found_clang=$as_dir/clang + fi + fi + done + IFS=$as_save_IFS + + if test -n "$found_gcc" -a -n "$found_clang" + then + if test -n "`"$found_gcc" --version | grep llvm-gcc`" + then + AC_MSG_NOTICE([Detected llvm-gcc, falling back to clang]) + CC="$found_clang" + CXX="$found_clang++" + fi + + + elif test -z "$found_gcc" -a -n "$found_clang" + then + AC_MSG_NOTICE([No GCC found, use CLANG]) + CC="$found_clang" + CXX="$found_clang++" + + elif test -z "$found_gcc" -a -z "$found_clang" + then + found_clang=`/usr/bin/xcrun -find clang 2>/dev/null` + if test -n "${found_clang}" + then + AC_MSG_NOTICE([Using clang from Xcode.app]) + CC="${found_clang}" + CXX="`/usr/bin/xcrun -find clang++`" + + # else: use default behaviour + fi + fi + fi +fi AC_PROG_CC AC_SUBST(CXX) @@ -534,6 +610,7 @@ case "$CC" in gcc) AC_PATH_PROG(CXX, [g++], [g++], [notfound]) ;; cc) AC_PATH_PROG(CXX, [c++], [c++], [notfound]) ;; + clang|*/clang) AC_PATH_PROG(CXX, [clang++], [clang++], [notfound]) ;; esac if test "$CXX" = "notfound" then -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 01:37:48 2012 From: python-checkins at python.org (antoine.pitrou) Date: Sun, 24 Jun 2012 01:37:48 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Errors_in_=22client=22_meth?= =?utf8?q?ods_in_test=5Fsocket_were_ignored_because_of_a_bogus?= Message-ID: http://hg.python.org/cpython/rev/e2c749bafd6d changeset: 77662:e2c749bafd6d user: Antoine Pitrou date: Sun Jun 24 01:34:13 2012 +0200 summary: Errors in "client" methods in test_socket were ignored because of a bogus except clause. (this could reveal test failures!) files: Lib/test/test_socket.py | 3 ++- 1 files changed, 2 insertions(+), 1 deletions(-) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -2,6 +2,7 @@ import unittest from test import support +from unittest.case import _ExpectedFailure import errno import io @@ -241,7 +242,7 @@ raise TypeError("test_func must be a callable function") try: test_func() - except unittest._ExpectedFailure: + except _ExpectedFailure: # We deliberately ignore expected failures pass except BaseException as e: -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 01:55:45 2012 From: python-checkins at python.org (larry.hastings) Date: Sun, 24 Jun 2012 01:55:45 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Issue_=2315154=3A_Add_=22di?= =?utf8?q?r=5Ffd=22_parameter_to_os=2Ermdir=2C_remove_=22rmdir=22?= Message-ID: http://hg.python.org/cpython/rev/3b7230997425 changeset: 77663:3b7230997425 user: Larry Hastings date: Sat Jun 23 16:55:07 2012 -0700 summary: Issue #15154: Add "dir_fd" parameter to os.rmdir, remove "rmdir" parameter from os.remove / os.unlink. Patch written by Georg Brandl. (I'm really looking forward to George getting commit privileges so I don't have to keep doing checkins on his behalf.) files: Doc/library/os.rst | 37 ++++++--- Lib/os.py | 5 +- Lib/test/test_os.py | 5 +- Misc/NEWS | 3 + Modules/posixmodule.c | 115 +++++++++++++++++++---------- 5 files changed, 105 insertions(+), 60 deletions(-) diff --git a/Doc/library/os.rst b/Doc/library/os.rst --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1750,14 +1750,10 @@ The *dir_fd* argument. -.. function:: remove(path, *, dir_fd=None, rmdir=False) - - Remove (delete) the file *path*. This function is identical to - :func:`os.unlink`. - - Specify ``rmdir=True`` if *path* is a directory. Failing to do so - will raise an exception; likewise, specifying ``rmdir=True`` when - *path* is not a directory will also raise an exception. +.. function:: remove(path, *, dir_fd=None) + + Remove (delete) the file *path*. If *path* is a directory, :exc:`OSError` + is raised. Use :func:`rmdir` to remove directories. If *dir_fd* is not ``None``, it should be a file descriptor referring to a directory, and *path* should be relative; path will then be relative to @@ -1771,10 +1767,12 @@ be raised; on Unix, the directory entry is removed but the storage allocated to the file is not made available until the original file is no longer in use. + This function is identical to :func:`unlink`. + Availability: Unix, Windows. .. versionadded:: 3.3 - The *dir_fd* and *rmdir* arguments. + The *dir_fd* argument. .. function:: removedirs(path) @@ -1872,14 +1870,25 @@ .. versionadded:: 3.3 -.. function:: rmdir(path) +.. function:: rmdir(path, *, dir_fd=None) Remove (delete) the directory *path*. Only works when the directory is empty, otherwise, :exc:`OSError` is raised. In order to remove whole directory trees, :func:`shutil.rmtree` can be used. + If *dir_fd* is not ``None``, it should be a file descriptor referring to a + directory, and *path* should be relative; path will then be relative to + that directory. (If *path* is absolute, *dir_fd* is ignored.) + *dir_fd* may not be supported on your platform; + you can check whether or not it is available using + :data:`os.supports_dir_fd`. If it is unavailable, using it will raise + a :exc:`NotImplementedError`. + Availability: Unix, Windows. + .. versionadded:: 3.3 + The *dir_fd* parameter. + .. data:: XATTR_SIZE_MAX @@ -2235,9 +2244,9 @@ .. versionadded:: 3.3 -.. function:: unlink(path, *, dir_fd=None, rmdir=False) - - Remove (delete) the file *path*. This is the same function as +.. function:: unlink(path, *, dir_fd=None) + + Remove (delete) the file *path*. This function is identical to :func:`remove`; the :func:`unlink` name is its traditional Unix name. Please see the documentation for :func:`remove` for further information. @@ -2245,7 +2254,7 @@ Availability: Unix, Windows. .. versionadded:: 3.3 - The *dir_fd* and *rmdir* parameters. + The *dir_fd* parameter. .. function:: utime(path, times=None, *, ns=None, dir_fd=None, follow_symlinks=True) diff --git a/Lib/os.py b/Lib/os.py --- a/Lib/os.py +++ b/Lib/os.py @@ -157,6 +157,7 @@ _add("HAVE_RENAMEAT", "rename") _add("HAVE_SYMLINKAT", "symlink") _add("HAVE_UNLINKAT", "unlink") + _add("HAVE_UNLINKAT", "rmdir") _add("HAVE_UTIMENSAT", "utime") supports_dir_fd = _set @@ -214,10 +215,6 @@ _add("MS_WINDOWS", "stat") supports_follow_symlinks = _set - _set = set() - _add("HAVE_UNLINKAT", "unlink") - supports_remove_directory = _set - del _set del _have_functions del _globals diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -785,7 +785,10 @@ os.unlink(name, dir_fd=rootfd) for name in dirs: st = os.stat(name, dir_fd=rootfd, follow_symlinks=False) - os.unlink(name, dir_fd=rootfd, rmdir=stat.S_ISDIR(st.st_mode)) + if stat.S_ISDIR(st.st_mode): + os.rmdir(name, dir_fd=rootfd) + else: + os.unlink(name, dir_fd=rootfd) os.rmdir(support.TESTFN) diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -47,6 +47,9 @@ Library ------- +- Issue #15154: Add "dir_fd" parameter to os.rmdir, remove "rmdir" + parameter from os.remove / os.unlink. + - Issue #4489: Add a shutil.rmtree that isn't susceptible to symlink attacks. It is used automatically on platforms supporting the necessary os.openat() and os.unlinkat() functions. Main code by Martin von L?wis. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -4084,17 +4084,62 @@ } PyDoc_STRVAR(posix_rmdir__doc__, -"rmdir(path)\n\n\ -Remove a directory."); - -static PyObject * -posix_rmdir(PyObject *self, PyObject *args) -{ +"rmdir(path, *, dir_fd=None)\n\n\ +Remove a directory.\n\ +\n\ +If dir_fd is not None, it should be a file descriptor open to a directory,\n\ + and path should be relative; path will then be relative to that directory.\n\ +dir_fd may not be implemented on your platform.\n\ + If it is unavailable, using it will raise a NotImplementedError."); + +static PyObject * +posix_rmdir(PyObject *self, PyObject *args, PyObject *kwargs) +{ + path_t path; + int dir_fd = DEFAULT_DIR_FD; + static char *keywords[] = {"path", "dir_fd", NULL}; + int result; + PyObject *return_value = NULL; + + memset(&path, 0, sizeof(path)); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|$O&:rmdir", keywords, + path_converter, &path, +#ifdef HAVE_UNLINKAT + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) + return NULL; + + Py_BEGIN_ALLOW_THREADS #ifdef MS_WINDOWS - return win32_1str(args, "rmdir", "y:rmdir", RemoveDirectoryA, "U:rmdir", RemoveDirectoryW); -#else - return posix_1str(args, "O&:rmdir", rmdir); -#endif + if (path.wide) + result = RemoveDirectoryW(path.wide); + else + result = RemoveDirectoryA(path.narrow); + result = !result; /* Windows, success=1, UNIX, success=0 */ +#else +#ifdef HAVE_UNLINKAT + if (dir_fd != DEFAULT_DIR_FD) + result = unlinkat(dir_fd, path.narrow, AT_REMOVEDIR); + else +#endif + result = rmdir(path.narrow); +#endif + Py_END_ALLOW_THREADS + + if (result) { + return_value = path_error("rmdir", &path); + goto exit; + } + + return_value = Py_None; + Py_INCREF(Py_None); + +exit: + path_cleanup(&path); + return return_value; } @@ -4186,68 +4231,54 @@ #endif /* MS_WINDOWS */ PyDoc_STRVAR(posix_unlink__doc__, -"unlink(path, *, dir_fd=None, rmdir=False)\n\n\ +"unlink(path, *, dir_fd=None)\n\n\ Remove a file (same as remove()).\n\ \n\ If dir_fd is not None, it should be a file descriptor open to a directory,\n\ and path should be relative; path will then be relative to that directory.\n\ dir_fd may not be implemented on your platform.\n\ - If it is unavailable, using it will raise a NotImplementedError.\n\ -If rmdir is True, unlink will behave like os.rmdir()."); + If it is unavailable, using it will raise a NotImplementedError."); PyDoc_STRVAR(posix_remove__doc__, -"remove(path, *, dir_fd=None, rmdir=False)\n\n\ +"remove(path, *, dir_fd=None)\n\n\ Remove a file (same as unlink()).\n\ \n\ If dir_fd is not None, it should be a file descriptor open to a directory,\n\ and path should be relative; path will then be relative to that directory.\n\ dir_fd may not be implemented on your platform.\n\ - If it is unavailable, using it will raise a NotImplementedError.\n\ -If rmdir is True, remove will behave like os.rmdir()."); + If it is unavailable, using it will raise a NotImplementedError."); static PyObject * posix_unlink(PyObject *self, PyObject *args, PyObject *kwargs) { path_t path; int dir_fd = DEFAULT_DIR_FD; - int remove_dir = 0; - static char *keywords[] = {"path", "dir_fd", "rmdir", NULL}; + static char *keywords[] = {"path", "dir_fd", NULL}; int result; PyObject *return_value = NULL; memset(&path, 0, sizeof(path)); - if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|$O&p:unlink", keywords, + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O&|$O&:unlink", keywords, path_converter, &path, #ifdef HAVE_UNLINKAT - dir_fd_converter, &dir_fd, -#else - dir_fd_unavailable, &dir_fd, -#endif - &remove_dir)) + dir_fd_converter, &dir_fd +#else + dir_fd_unavailable, &dir_fd +#endif + )) return NULL; Py_BEGIN_ALLOW_THREADS #ifdef MS_WINDOWS - if (remove_dir) { - if (path.wide) - result = RemoveDirectoryW(path.wide); - else - result = RemoveDirectoryA(path.narrow); - } - else { - if (path.wide) - result = Py_DeleteFileW(path.wide); - else - result = DeleteFileA(path.narrow); - } + if (path.wide) + result = Py_DeleteFileW(path.wide); + else + result = DeleteFileA(path.narrow); result = !result; /* Windows, success=1, UNIX, success=0 */ #else - if (remove_dir && (dir_fd == DEFAULT_DIR_FD)) - result = rmdir(path.narrow); - else #ifdef HAVE_UNLINKAT if (dir_fd != DEFAULT_DIR_FD) - result = unlinkat(dir_fd, path.narrow, remove_dir ? AT_REMOVEDIR : 0); + result = unlinkat(dir_fd, path.narrow, 0); else #endif /* HAVE_UNLINKAT */ result = unlink(path.narrow); @@ -10806,7 +10837,9 @@ {"replace", (PyCFunction)posix_replace, METH_VARARGS | METH_KEYWORDS, posix_replace__doc__}, - {"rmdir", posix_rmdir, METH_VARARGS, posix_rmdir__doc__}, + {"rmdir", (PyCFunction)posix_rmdir, + METH_VARARGS | METH_KEYWORDS, + posix_rmdir__doc__}, {"stat", (PyCFunction)posix_stat, METH_VARARGS | METH_KEYWORDS, posix_stat__doc__}, -- Repository URL: http://hg.python.org/cpython From python-checkins at python.org Sun Jun 24 04:28:08 2012 From: python-checkins at python.org (eric.araujo) Date: Sun, 24 Jun 2012 04:28:08 +0200 Subject: [Python-checkins] =?utf8?q?peps=3A_Remove_packaging_module_from_3?= =?utf8?q?=2E3_additions?= Message-ID: http://hg.python.org/peps/rev/3b4eb5655e20 changeset: 4478:3b4eb5655e20 user: ?ric Araujo date: Sat Jun 23 22:27:28 2012 -0400 summary: Remove packaging module from 3.3 additions files: pep-0398.txt | 2 +- 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pep-0398.txt b/pep-0398.txt --- a/pep-0398.txt +++ b/pep-0398.txt @@ -80,7 +80,6 @@ Other final large-scale changes: -* Addition of the "packaging" module, deprecating "distutils" * Addition of the "faulthandler" module * Addition of the "lzma" module, and lzma/xz support in tarfile * Implementing ``__import__`` using importlib @@ -104,6 +103,7 @@ * PEP 3143: Standard daemon process library * PEP 3154: Pickle protocol version 4 * Breaking out standard library and docs in separate repos +* Addition of the "packaging" module, deprecating "distutils" * Addition of the "regex" module * Email version 6 * A standard event-loop interface (PEP by Jim Fulton pending) -- Repository URL: http://hg.python.org/peps From python-checkins at python.org Sun Jun 24 04:37:50 2012 From: python-checkins at python.org (ezio.melotti) Date: Sun, 24 Jun 2012 04:37:50 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_=2311113=3A_add_a_new_=22ht?= =?utf8?q?ml5=22_dictionary_containing_the_named_character_references?= Message-ID: http://hg.python.org/cpython/rev/2b54e25d6ecb changeset: 77664:2b54e25d6ecb user: Ezio Melotti date: Sun Jun 24 04:37:41 2012 +0200 summary: #11113: add a new "html5" dictionary containing the named character references defined by the HTML5 standard and the equivalent Unicode character(s) to the html.entities module. files: Doc/library/html.entities.rst | 21 +- Lib/html/entities.py | 2236 +++++++++++++++++++++ Misc/NEWS | 4 + 3 files changed, 2259 insertions(+), 2 deletions(-) diff --git a/Doc/library/html.entities.rst b/Doc/library/html.entities.rst --- a/Doc/library/html.entities.rst +++ b/Doc/library/html.entities.rst @@ -9,13 +9,25 @@ -------------- -This module defines three dictionaries, ``name2codepoint``, ``codepoint2name``, -and ``entitydefs``. ``entitydefs`` is used to provide the :attr:`entitydefs` +This module defines four dictionaries, :data:`html5`, +:data:`name2codepoint`, :data:`codepoint2name`, and :data:`entitydefs`. +:data:`entitydefs` is used to provide the :attr:`entitydefs` attribute of the :class:`html.parser.HTMLParser` class. The definition provided here contains all the entities defined by XHTML 1.0 that can be handled using simple textual substitution in the Latin-1 character set (ISO-8859-1). +.. data:: html5 + + A dictionary that maps HTML5 named character references [#]_ to the + equivalent Unicode character(s), e.g. ``html5['gt;'] == '>'``. + Note that the trailing semicolon is included in the name (e.g. ``'gt;'``), + however some of the names are accepted by the standard even without the + semicolon: in this case the name is present with and without the ``';'``. + + .. versionadded:: 3.3 + + .. data:: entitydefs A dictionary mapping XHTML 1.0 entity definitions to their replacement text in @@ -30,3 +42,8 @@ .. data:: codepoint2name A dictionary that maps Unicode codepoints to HTML entity names. + + +.. rubric:: Footnotes + +.. [#] See http://www.w3.org/TR/html5/named-character-references.html diff --git a/Lib/html/entities.py b/Lib/html/entities.py --- a/Lib/html/entities.py +++ b/Lib/html/entities.py @@ -256,6 +256,2242 @@ 'zwnj': 0x200c, # zero width non-joiner, U+200C NEW RFC 2070 } + +# maps the HTML5 named character references to the equivalent Unicode character(s) +html5 = { + 'Aacute;': '\xc1', + 'Aacute': '\xc1', + 'aacute;': '\xe1', + 'aacute': '\xe1', + 'Abreve;': '\u0102', + 'abreve;': '\u0103', + 'ac;': '\u223e', + 'acd;': '\u223f', + 'acE;': '\u223e\u0333', + 'Acirc;': '\xc2', + 'Acirc': '\xc2', + 'acirc;': '\xe2', + 'acirc': '\xe2', + 'acute;': '\xb4', + 'acute': '\xb4', + 'Acy;': '\u0410', + 'acy;': '\u0430', + 'AElig;': '\xc6', + 'AElig': '\xc6', + 'aelig;': '\xe6', + 'aelig': '\xe6', + 'af;': '\u2061', + 'Afr;': '\U0001d504', + 'afr;': '\U0001d51e', + 'Agrave;': '\xc0', + 'Agrave': '\xc0', + 'agrave;': '\xe0', + 'agrave': '\xe0', + 'alefsym;': '\u2135', + 'aleph;': '\u2135', + 'Alpha;': '\u0391', + 'alpha;': '\u03b1', + 'Amacr;': '\u0100', + 'amacr;': '\u0101', + 'amalg;': '\u2a3f', + 'AMP;': '&', + 'AMP': '&', + 'amp;': '&', + 'amp': '&', + 'And;': '\u2a53', + 'and;': '\u2227', + 'andand;': '\u2a55', + 'andd;': '\u2a5c', + 'andslope;': '\u2a58', + 'andv;': '\u2a5a', + 'ang;': '\u2220', + 'ange;': '\u29a4', + 'angle;': '\u2220', + 'angmsd;': '\u2221', + 'angmsdaa;': '\u29a8', + 'angmsdab;': '\u29a9', + 'angmsdac;': '\u29aa', + 'angmsdad;': '\u29ab', + 'angmsdae;': '\u29ac', + 'angmsdaf;': '\u29ad', + 'angmsdag;': '\u29ae', + 'angmsdah;': '\u29af', + 'angrt;': '\u221f', + 'angrtvb;': '\u22be', + 'angrtvbd;': '\u299d', + 'angsph;': '\u2222', + 'angst;': '\xc5', + 'angzarr;': '\u237c', + 'Aogon;': '\u0104', + 'aogon;': '\u0105', + 'Aopf;': '\U0001d538', + 'aopf;': '\U0001d552', + 'ap;': '\u2248', + 'apacir;': '\u2a6f', + 'apE;': '\u2a70', + 'ape;': '\u224a', + 'apid;': '\u224b', + 'apos;': "'", + 'ApplyFunction;': '\u2061', + 'approx;': '\u2248', + 'approxeq;': '\u224a', + 'Aring;': '\xc5', + 'Aring': '\xc5', + 'aring;': '\xe5', + 'aring': '\xe5', + 'Ascr;': '\U0001d49c', + 'ascr;': '\U0001d4b6', + 'Assign;': '\u2254', + 'ast;': '*', + 'asymp;': '\u2248', + 'asympeq;': '\u224d', + 'Atilde;': '\xc3', + 'Atilde': '\xc3', + 'atilde;': '\xe3', + 'atilde': '\xe3', + 'Auml;': '\xc4', + 'Auml': '\xc4', + 'auml;': '\xe4', + 'auml': '\xe4', + 'awconint;': '\u2233', + 'awint;': '\u2a11', + 'backcong;': '\u224c', + 'backepsilon;': '\u03f6', + 'backprime;': '\u2035', + 'backsim;': '\u223d', + 'backsimeq;': '\u22cd', + 'Backslash;': '\u2216', + 'Barv;': '\u2ae7', + 'barvee;': '\u22bd', + 'Barwed;': '\u2306', + 'barwed;': '\u2305', + 'barwedge;': '\u2305', + 'bbrk;': '\u23b5', + 'bbrktbrk;': '\u23b6', + 'bcong;': '\u224c', + 'Bcy;': '\u0411', + 'bcy;': '\u0431', + 'bdquo;': '\u201e', + 'becaus;': '\u2235', + 'Because;': '\u2235', + 'because;': '\u2235', + 'bemptyv;': '\u29b0', + 'bepsi;': '\u03f6', + 'bernou;': '\u212c', + 'Bernoullis;': '\u212c', + 'Beta;': '\u0392', + 'beta;': '\u03b2', + 'beth;': '\u2136', + 'between;': '\u226c', + 'Bfr;': '\U0001d505', + 'bfr;': '\U0001d51f', + 'bigcap;': '\u22c2', + 'bigcirc;': '\u25ef', + 'bigcup;': '\u22c3', + 'bigodot;': '\u2a00', + 'bigoplus;': '\u2a01', + 'bigotimes;': '\u2a02', + 'bigsqcup;': '\u2a06', + 'bigstar;': '\u2605', + 'bigtriangledown;': '\u25bd', + 'bigtriangleup;': '\u25b3', + 'biguplus;': '\u2a04', + 'bigvee;': '\u22c1', + 'bigwedge;': '\u22c0', + 'bkarow;': '\u290d', + 'blacklozenge;': '\u29eb', + 'blacksquare;': '\u25aa', + 'blacktriangle;': '\u25b4', + 'blacktriangledown;': '\u25be', + 'blacktriangleleft;': '\u25c2', + 'blacktriangleright;': '\u25b8', + 'blank;': '\u2423', + 'blk12;': '\u2592', + 'blk14;': '\u2591', + 'blk34;': '\u2593', + 'block;': '\u2588', + 'bne;': '=\u20e5', + 'bnequiv;': '\u2261\u20e5', + 'bNot;': '\u2aed', + 'bnot;': '\u2310', + 'Bopf;': '\U0001d539', + 'bopf;': '\U0001d553', + 'bot;': '\u22a5', + 'bottom;': '\u22a5', + 'bowtie;': '\u22c8', + 'boxbox;': '\u29c9', + 'boxDL;': '\u2557', + 'boxDl;': '\u2556', + 'boxdL;': '\u2555', + 'boxdl;': '\u2510', + 'boxDR;': '\u2554', + 'boxDr;': '\u2553', + 'boxdR;': '\u2552', + 'boxdr;': '\u250c', + 'boxH;': '\u2550', + 'boxh;': '\u2500', + 'boxHD;': '\u2566', + 'boxHd;': '\u2564', + 'boxhD;': '\u2565', + 'boxhd;': '\u252c', + 'boxHU;': '\u2569', + 'boxHu;': '\u2567', + 'boxhU;': '\u2568', + 'boxhu;': '\u2534', + 'boxminus;': '\u229f', + 'boxplus;': '\u229e', + 'boxtimes;': '\u22a0', + 'boxUL;': '\u255d', + 'boxUl;': '\u255c', + 'boxuL;': '\u255b', + 'boxul;': '\u2518', + 'boxUR;': '\u255a', + 'boxUr;': '\u2559', + 'boxuR;': '\u2558', + 'boxur;': '\u2514', + 'boxV;': '\u2551', + 'boxv;': '\u2502', + 'boxVH;': '\u256c', + 'boxVh;': '\u256b', + 'boxvH;': '\u256a', + 'boxvh;': '\u253c', + 'boxVL;': '\u2563', + 'boxVl;': '\u2562', + 'boxvL;': '\u2561', + 'boxvl;': '\u2524', + 'boxVR;': '\u2560', + 'boxVr;': '\u255f', + 'boxvR;': '\u255e', + 'boxvr;': '\u251c', + 'bprime;': '\u2035', + 'Breve;': '\u02d8', + 'breve;': '\u02d8', + 'brvbar;': '\xa6', + 'brvbar': '\xa6', + 'Bscr;': '\u212c', + 'bscr;': '\U0001d4b7', + 'bsemi;': '\u204f', + 'bsim;': '\u223d', + 'bsime;': '\u22cd', + 'bsol;': '\\', + 'bsolb;': '\u29c5', + 'bsolhsub;': '\u27c8', + 'bull;': '\u2022', + 'bullet;': '\u2022', + 'bump;': '\u224e', + 'bumpE;': '\u2aae', + 'bumpe;': '\u224f', + 'Bumpeq;': '\u224e', + 'bumpeq;': '\u224f', + 'Cacute;': '\u0106', + 'cacute;': '\u0107', + 'Cap;': '\u22d2', + 'cap;': '\u2229', + 'capand;': '\u2a44', + 'capbrcup;': '\u2a49', + 'capcap;': '\u2a4b', + 'capcup;': '\u2a47', + 'capdot;': '\u2a40', + 'CapitalDifferentialD;': '\u2145', + 'caps;': '\u2229\ufe00', + 'caret;': '\u2041', + 'caron;': '\u02c7', + 'Cayleys;': '\u212d', + 'ccaps;': '\u2a4d', + 'Ccaron;': '\u010c', + 'ccaron;': '\u010d', + 'Ccedil;': '\xc7', + 'Ccedil': '\xc7', + 'ccedil;': '\xe7', + 'ccedil': '\xe7', + 'Ccirc;': '\u0108', + 'ccirc;': '\u0109', + 'Cconint;': '\u2230', + 'ccups;': '\u2a4c', + 'ccupssm;': '\u2a50', + 'Cdot;': '\u010a', + 'cdot;': '\u010b', + 'cedil;': '\xb8', + 'cedil': '\xb8', + 'Cedilla;': '\xb8', + 'cemptyv;': '\u29b2', + 'cent;': '\xa2', + 'cent': '\xa2', + 'CenterDot;': '\xb7', + 'centerdot;': '\xb7', + 'Cfr;': '\u212d', + 'cfr;': '\U0001d520', + 'CHcy;': '\u0427', + 'chcy;': '\u0447', + 'check;': '\u2713', + 'checkmark;': '\u2713', + 'Chi;': '\u03a7', + 'chi;': '\u03c7', + 'cir;': '\u25cb', + 'circ;': '\u02c6', + 'circeq;': '\u2257', + 'circlearrowleft;': '\u21ba', + 'circlearrowright;': '\u21bb', + 'circledast;': '\u229b', + 'circledcirc;': '\u229a', + 'circleddash;': '\u229d', + 'CircleDot;': '\u2299', + 'circledR;': '\xae', + 'circledS;': '\u24c8', + 'CircleMinus;': '\u2296', + 'CirclePlus;': '\u2295', + 'CircleTimes;': '\u2297', + 'cirE;': '\u29c3', + 'cire;': '\u2257', + 'cirfnint;': '\u2a10', + 'cirmid;': '\u2aef', + 'cirscir;': '\u29c2', + 'ClockwiseContourIntegral;': '\u2232', + 'CloseCurlyDoubleQuote;': '\u201d', + 'CloseCurlyQuote;': '\u2019', + 'clubs;': '\u2663', + 'clubsuit;': '\u2663', + 'Colon;': '\u2237', + 'colon;': ':', + 'Colone;': '\u2a74', + 'colone;': '\u2254', + 'coloneq;': '\u2254', + 'comma;': ',', + 'commat;': '@', + 'comp;': '\u2201', + 'compfn;': '\u2218', + 'complement;': '\u2201', + 'complexes;': '\u2102', + 'cong;': '\u2245', + 'congdot;': '\u2a6d', + 'Congruent;': '\u2261', + 'Conint;': '\u222f', + 'conint;': '\u222e', + 'ContourIntegral;': '\u222e', + 'Copf;': '\u2102', + 'copf;': '\U0001d554', + 'coprod;': '\u2210', + 'Coproduct;': '\u2210', + 'COPY;': '\xa9', + 'COPY': '\xa9', + 'copy;': '\xa9', + 'copy': '\xa9', + 'copysr;': '\u2117', + 'CounterClockwiseContourIntegral;': '\u2233', + 'crarr;': '\u21b5', + 'Cross;': '\u2a2f', + 'cross;': '\u2717', + 'Cscr;': '\U0001d49e', + 'cscr;': '\U0001d4b8', + 'csub;': '\u2acf', + 'csube;': '\u2ad1', + 'csup;': '\u2ad0', + 'csupe;': '\u2ad2', + 'ctdot;': '\u22ef', + 'cudarrl;': '\u2938', + 'cudarrr;': '\u2935', + 'cuepr;': '\u22de', + 'cuesc;': '\u22df', + 'cularr;': '\u21b6', + 'cularrp;': '\u293d', + 'Cup;': '\u22d3', + 'cup;': '\u222a', + 'cupbrcap;': '\u2a48', + 'CupCap;': '\u224d', + 'cupcap;': '\u2a46', + 'cupcup;': '\u2a4a', + 'cupdot;': '\u228d', + 'cupor;': '\u2a45', + 'cups;': '\u222a\ufe00', + 'curarr;': '\u21b7', + 'curarrm;': '\u293c', + 'curlyeqprec;': '\u22de', + 'curlyeqsucc;': '\u22df', + 'curlyvee;': '\u22ce', + 'curlywedge;': '\u22cf', + 'curren;': '\xa4', + 'curren': '\xa4', + 'curvearrowleft;': '\u21b6', + 'curvearrowright;': '\u21b7', + 'cuvee;': '\u22ce', + 'cuwed;': '\u22cf', + 'cwconint;': '\u2232', + 'cwint;': '\u2231', + 'cylcty;': '\u232d', + 'Dagger;': '\u2021', + 'dagger;': '\u2020', + 'daleth;': '\u2138', + 'Darr;': '\u21a1', + 'dArr;': '\u21d3', + 'darr;': '\u2193', + 'dash;': '\u2010', + 'Dashv;': '\u2ae4', + 'dashv;': '\u22a3', + 'dbkarow;': '\u290f', + 'dblac;': '\u02dd', + 'Dcaron;': '\u010e', + 'dcaron;': '\u010f', + 'Dcy;': '\u0414', + 'dcy;': '\u0434', + 'DD;': '\u2145', + 'dd;': '\u2146', + 'ddagger;': '\u2021', + 'ddarr;': '\u21ca', + 'DDotrahd;': '\u2911', + 'ddotseq;': '\u2a77', + 'deg;': '\xb0', + 'deg': '\xb0', + 'Del;': '\u2207', + 'Delta;': '\u0394', + 'delta;': '\u03b4', + 'demptyv;': '\u29b1', + 'dfisht;': '\u297f', + 'Dfr;': '\U0001d507', + 'dfr;': '\U0001d521', + 'dHar;': '\u2965', + 'dharl;': '\u21c3', + 'dharr;': '\u21c2', + 'DiacriticalAcute;': '\xb4', + 'DiacriticalDot;': '\u02d9', + 'DiacriticalDoubleAcute;': '\u02dd', + 'DiacriticalGrave;': '`', + 'DiacriticalTilde;': '\u02dc', + 'diam;': '\u22c4', + 'Diamond;': '\u22c4', + 'diamond;': '\u22c4', + 'diamondsuit;': '\u2666', + 'diams;': '\u2666', + 'die;': '\xa8', + 'DifferentialD;': '\u2146', + 'digamma;': '\u03dd', + 'disin;': '\u22f2', + 'div;': '\xf7', + 'divide;': '\xf7', + 'divide': '\xf7', + 'divideontimes;': '\u22c7', + 'divonx;': '\u22c7', + 'DJcy;': '\u0402', + 'djcy;': '\u0452', + 'dlcorn;': '\u231e', + 'dlcrop;': '\u230d', + 'dollar;': '$', + 'Dopf;': '\U0001d53b', + 'dopf;': '\U0001d555', + 'Dot;': '\xa8', + 'dot;': '\u02d9', + 'DotDot;': '\u25cc\u20dc', + 'doteq;': '\u2250', + 'doteqdot;': '\u2251', + 'DotEqual;': '\u2250', + 'dotminus;': '\u2238', + 'dotplus;': '\u2214', + 'dotsquare;': '\u22a1', + 'doublebarwedge;': '\u2306', + 'DoubleContourIntegral;': '\u222f', + 'DoubleDot;': '\xa8', + 'DoubleDownArrow;': '\u21d3', + 'DoubleLeftArrow;': '\u21d0', + 'DoubleLeftRightArrow;': '\u21d4', + 'DoubleLeftTee;': '\u2ae4', + 'DoubleLongLeftArrow;': '\u27f8', + 'DoubleLongLeftRightArrow;': '\u27fa', + 'DoubleLongRightArrow;': '\u27f9', + 'DoubleRightArrow;': '\u21d2', + 'DoubleRightTee;': '\u22a8', + 'DoubleUpArrow;': '\u21d1', + 'DoubleUpDownArrow;': '\u21d5', + 'DoubleVerticalBar;': '\u2225', + 'DownArrow;': '\u2193', + 'Downarrow;': '\u21d3', + 'downarrow;': '\u2193', + 'DownArrowBar;': '\u2913', + 'DownArrowUpArrow;': '\u21f5', + 'DownBreve;': '\u25cc\u0311', + 'downdownarrows;': '\u21ca', + 'downharpoonleft;': '\u21c3', + 'downharpoonright;': '\u21c2', + 'DownLeftRightVector;': '\u2950', + 'DownLeftTeeVector;': '\u295e', + 'DownLeftVector;': '\u21bd', + 'DownLeftVectorBar;': '\u2956', + 'DownRightTeeVector;': '\u295f', + 'DownRightVector;': '\u21c1', + 'DownRightVectorBar;': '\u2957', + 'DownTee;': '\u22a4', + 'DownTeeArrow;': '\u21a7', + 'drbkarow;': '\u2910', + 'drcorn;': '\u231f', + 'drcrop;': '\u230c', + 'Dscr;': '\U0001d49f', + 'dscr;': '\U0001d4b9', + 'DScy;': '\u0405', + 'dscy;': '\u0455', + 'dsol;': '\u29f6', + 'Dstrok;': '\u0110', + 'dstrok;': '\u0111', + 'dtdot;': '\u22f1', + 'dtri;': '\u25bf', + 'dtrif;': '\u25be', + 'duarr;': '\u21f5', + 'duhar;': '\u296f', + 'dwangle;': '\u29a6', + 'DZcy;': '\u040f', + 'dzcy;': '\u045f', + 'dzigrarr;': '\u27ff', + 'Eacute;': '\xc9', + 'Eacute': '\xc9', + 'eacute;': '\xe9', + 'eacute': '\xe9', + 'easter;': '\u2a6e', + 'Ecaron;': '\u011a', + 'ecaron;': '\u011b', + 'ecir;': '\u2256', + 'Ecirc;': '\xca', + 'Ecirc': '\xca', + 'ecirc;': '\xea', + 'ecirc': '\xea', + 'ecolon;': '\u2255', + 'Ecy;': '\u042d', + 'ecy;': '\u044d', + 'eDDot;': '\u2a77', + 'Edot;': '\u0116', + 'eDot;': '\u2251', + 'edot;': '\u0117', + 'ee;': '\u2147', + 'efDot;': '\u2252', + 'Efr;': '\U0001d508', + 'efr;': '\U0001d522', + 'eg;': '\u2a9a', + 'Egrave;': '\xc8', + 'Egrave': '\xc8', + 'egrave;': '\xe8', + 'egrave': '\xe8', + 'egs;': '\u2a96', + 'egsdot;': '\u2a98', + 'el;': '\u2a99', + 'Element;': '\u2208', + 'elinters;': '\u23e7', + 'ell;': '\u2113', + 'els;': '\u2a95', + 'elsdot;': '\u2a97', + 'Emacr;': '\u0112', + 'emacr;': '\u0113', + 'empty;': '\u2205', + 'emptyset;': '\u2205', + 'EmptySmallSquare;': '\u25fb', + 'emptyv;': '\u2205', + 'EmptyVerySmallSquare;': '\u25ab', + 'emsp;': '\u2003', + 'emsp13;': '\u2004', + 'emsp14;': '\u2005', + 'ENG;': '\u014a', + 'eng;': '\u014b', + 'ensp;': '\u2002', + 'Eogon;': '\u0118', + 'eogon;': '\u0119', + 'Eopf;': '\U0001d53c', + 'eopf;': '\U0001d556', + 'epar;': '\u22d5', + 'eparsl;': '\u29e3', + 'eplus;': '\u2a71', + 'epsi;': '\u03b5', + 'Epsilon;': '\u0395', + 'epsilon;': '\u03b5', + 'epsiv;': '\u03f5', + 'eqcirc;': '\u2256', + 'eqcolon;': '\u2255', + 'eqsim;': '\u2242', + 'eqslantgtr;': '\u2a96', + 'eqslantless;': '\u2a95', + 'Equal;': '\u2a75', + 'equals;': '=', + 'EqualTilde;': '\u2242', + 'equest;': '\u225f', + 'Equilibrium;': '\u21cc', + 'equiv;': '\u2261', + 'equivDD;': '\u2a78', + 'eqvparsl;': '\u29e5', + 'erarr;': '\u2971', + 'erDot;': '\u2253', + 'Escr;': '\u2130', + 'escr;': '\u212f', + 'esdot;': '\u2250', + 'Esim;': '\u2a73', + 'esim;': '\u2242', + 'Eta;': '\u0397', + 'eta;': '\u03b7', + 'ETH;': '\xd0', + 'ETH': '\xd0', + 'eth;': '\xf0', + 'eth': '\xf0', + 'Euml;': '\xcb', + 'Euml': '\xcb', + 'euml;': '\xeb', + 'euml': '\xeb', + 'euro;': '\u20ac', + 'excl;': '!', + 'exist;': '\u2203', + 'Exists;': '\u2203', + 'expectation;': '\u2130', + 'ExponentialE;': '\u2147', + 'exponentiale;': '\u2147', + 'fallingdotseq;': '\u2252', + 'Fcy;': '\u0424', + 'fcy;': '\u0444', + 'female;': '\u2640', + 'ffilig;': '\ufb03', + 'fflig;': '\ufb00', + 'ffllig;': '\ufb04', + 'Ffr;': '\U0001d509', + 'ffr;': '\U0001d523', + 'filig;': '\ufb01', + 'FilledSmallSquare;': '\u25fc', + 'FilledVerySmallSquare;': '\u25aa', + 'fjlig;': 'fj', + 'flat;': '\u266d', + 'fllig;': '\ufb02', + 'fltns;': '\u25b1', + 'fnof;': '\u0192', + 'Fopf;': '\U0001d53d', + 'fopf;': '\U0001d557', + 'ForAll;': '\u2200', + 'forall;': '\u2200', + 'fork;': '\u22d4', + 'forkv;': '\u2ad9', + 'Fouriertrf;': '\u2131', + 'fpartint;': '\u2a0d', + 'frac12;': '\xbd', + 'frac12': '\xbd', + 'frac13;': '\u2153', + 'frac14;': '\xbc', + 'frac14': '\xbc', + 'frac15;': '\u2155', + 'frac16;': '\u2159', + 'frac18;': '\u215b', + 'frac23;': '\u2154', + 'frac25;': '\u2156', + 'frac34;': '\xbe', + 'frac34': '\xbe', + 'frac35;': '\u2157', + 'frac38;': '\u215c', + 'frac45;': '\u2158', + 'frac56;': '\u215a', + 'frac58;': '\u215d', + 'frac78;': '\u215e', + 'frasl;': '\u2044', + 'frown;': '\u2322', + 'Fscr;': '\u2131', + 'fscr;': '\U0001d4bb', + 'gacute;': '\u01f5', + 'Gamma;': '\u0393', + 'gamma;': '\u03b3', + 'Gammad;': '\u03dc', + 'gammad;': '\u03dd', + 'gap;': '\u2a86', + 'Gbreve;': '\u011e', + 'gbreve;': '\u011f', + 'Gcedil;': '\u0122', + 'Gcirc;': '\u011c', + 'gcirc;': '\u011d', + 'Gcy;': '\u0413', + 'gcy;': '\u0433', + 'Gdot;': '\u0120', + 'gdot;': '\u0121', + 'gE;': '\u2267', + 'ge;': '\u2265', + 'gEl;': '\u2a8c', + 'gel;': '\u22db', + 'geq;': '\u2265', + 'geqq;': '\u2267', + 'geqslant;': '\u2a7e', + 'ges;': '\u2a7e', + 'gescc;': '\u2aa9', + 'gesdot;': '\u2a80', + 'gesdoto;': '\u2a82', + 'gesdotol;': '\u2a84', + 'gesl;': '\u22db\ufe00', + 'gesles;': '\u2a94', + 'Gfr;': '\U0001d50a', + 'gfr;': '\U0001d524', + 'Gg;': '\u22d9', + 'gg;': '\u226b', + 'ggg;': '\u22d9', + 'gimel;': '\u2137', + 'GJcy;': '\u0403', + 'gjcy;': '\u0453', + 'gl;': '\u2277', + 'gla;': '\u2aa5', + 'glE;': '\u2a92', + 'glj;': '\u2aa4', + 'gnap;': '\u2a8a', + 'gnapprox;': '\u2a8a', + 'gnE;': '\u2269', + 'gne;': '\u2a88', + 'gneq;': '\u2a88', + 'gneqq;': '\u2269', + 'gnsim;': '\u22e7', + 'Gopf;': '\U0001d53e', + 'gopf;': '\U0001d558', + 'grave;': '`', + 'GreaterEqual;': '\u2265', + 'GreaterEqualLess;': '\u22db', + 'GreaterFullEqual;': '\u2267', + 'GreaterGreater;': '\u2aa2', + 'GreaterLess;': '\u2277', + 'GreaterSlantEqual;': '\u2a7e', + 'GreaterTilde;': '\u2273', + 'Gscr;': '\U0001d4a2', + 'gscr;': '\u210a', + 'gsim;': '\u2273', + 'gsime;': '\u2a8e', + 'gsiml;': '\u2a90', + 'GT;': '>', + 'GT': '>', + 'Gt;': '\u226b', + 'gt;': '>', + 'gt': '>', + 'gtcc;': '\u2aa7', + 'gtcir;': '\u2a7a', + 'gtdot;': '\u22d7', + 'gtlPar;': '\u2995', + 'gtquest;': '\u2a7c', + 'gtrapprox;': '\u2a86', + 'gtrarr;': '\u2978', + 'gtrdot;': '\u22d7', + 'gtreqless;': '\u22db', + 'gtreqqless;': '\u2a8c', + 'gtrless;': '\u2277', + 'gtrsim;': '\u2273', + 'gvertneqq;': '\u2269\ufe00', + 'gvnE;': '\u2269\ufe00', + 'Hacek;': '\u02c7', + 'hairsp;': '\u200a', + 'half;': '\xbd', + 'hamilt;': '\u210b', + 'HARDcy;': '\u042a', + 'hardcy;': '\u044a', + 'hArr;': '\u21d4', + 'harr;': '\u2194', + 'harrcir;': '\u2948', + 'harrw;': '\u21ad', + 'Hat;': '^', + 'hbar;': '\u210f', + 'Hcirc;': '\u0124', + 'hcirc;': '\u0125', + 'hearts;': '\u2665', + 'heartsuit;': '\u2665', + 'hellip;': '\u2026', + 'hercon;': '\u22b9', + 'Hfr;': '\u210c', + 'hfr;': '\U0001d525', + 'HilbertSpace;': '\u210b', + 'hksearow;': '\u2925', + 'hkswarow;': '\u2926', + 'hoarr;': '\u21ff', + 'homtht;': '\u223b', + 'hookleftarrow;': '\u21a9', + 'hookrightarrow;': '\u21aa', + 'Hopf;': '\u210d', + 'hopf;': '\U0001d559', + 'horbar;': '\u2015', + 'HorizontalLine;': '\u2500', + 'Hscr;': '\u210b', + 'hscr;': '\U0001d4bd', + 'hslash;': '\u210f', + 'Hstrok;': '\u0126', + 'hstrok;': '\u0127', + 'HumpDownHump;': '\u224e', + 'HumpEqual;': '\u224f', + 'hybull;': '\u2043', + 'hyphen;': '\u2010', + 'Iacute;': '\xcd', + 'Iacute': '\xcd', + 'iacute;': '\xed', + 'iacute': '\xed', + 'ic;': '\u2063', + 'Icirc;': '\xce', + 'Icirc': '\xce', + 'icirc;': '\xee', + 'icirc': '\xee', + 'Icy;': '\u0418', + 'icy;': '\u0438', + 'Idot;': '\u0130', + 'IEcy;': '\u0415', + 'iecy;': '\u0435', + 'iexcl;': '\xa1', + 'iexcl': '\xa1', + 'iff;': '\u21d4', + 'Ifr;': '\u2111', + 'ifr;': '\U0001d526', + 'Igrave;': '\xcc', + 'Igrave': '\xcc', + 'igrave;': '\xec', + 'igrave': '\xec', + 'ii;': '\u2148', + 'iiiint;': '\u2a0c', + 'iiint;': '\u222d', + 'iinfin;': '\u29dc', + 'iiota;': '\u2129', + 'IJlig;': '\u0132', + 'ijlig;': '\u0133', + 'Im;': '\u2111', + 'Imacr;': '\u012a', + 'imacr;': '\u012b', + 'image;': '\u2111', + 'ImaginaryI;': '\u2148', + 'imagline;': '\u2110', + 'imagpart;': '\u2111', + 'imath;': '\u0131', + 'imof;': '\u22b7', + 'imped;': '\u01b5', + 'Implies;': '\u21d2', + 'in;': '\u2208', + 'incare;': '\u2105', + 'infin;': '\u221e', + 'infintie;': '\u29dd', + 'inodot;': '\u0131', + 'Int;': '\u222c', + 'int;': '\u222b', + 'intcal;': '\u22ba', + 'integers;': '\u2124', + 'Integral;': '\u222b', + 'intercal;': '\u22ba', + 'Intersection;': '\u22c2', + 'intlarhk;': '\u2a17', + 'intprod;': '\u2a3c', + 'InvisibleComma;': '\u2063', + 'InvisibleTimes;': '\u2062', + 'IOcy;': '\u0401', + 'iocy;': '\u0451', + 'Iogon;': '\u012e', + 'iogon;': '\u012f', + 'Iopf;': '\U0001d540', + 'iopf;': '\U0001d55a', + 'Iota;': '\u0399', + 'iota;': '\u03b9', + 'iprod;': '\u2a3c', + 'iquest;': '\xbf', + 'iquest': '\xbf', + 'Iscr;': '\u2110', + 'iscr;': '\U0001d4be', + 'isin;': '\u2208', + 'isindot;': '\u22f5', + 'isinE;': '\u22f9', + 'isins;': '\u22f4', + 'isinsv;': '\u22f3', + 'isinv;': '\u2208', + 'it;': '\u2062', + 'Itilde;': '\u0128', + 'itilde;': '\u0129', + 'Iukcy;': '\u0406', + 'iukcy;': '\u0456', + 'Iuml;': '\xcf', + 'Iuml': '\xcf', + 'iuml;': '\xef', + 'iuml': '\xef', + 'Jcirc;': '\u0134', + 'jcirc;': '\u0135', + 'Jcy;': '\u0419', + 'jcy;': '\u0439', + 'Jfr;': '\U0001d50d', + 'jfr;': '\U0001d527', + 'jmath;': '\u0237', + 'Jopf;': '\U0001d541', + 'jopf;': '\U0001d55b', + 'Jscr;': '\U0001d4a5', + 'jscr;': '\U0001d4bf', + 'Jsercy;': '\u0408', + 'jsercy;': '\u0458', + 'Jukcy;': '\u0404', + 'jukcy;': '\u0454', + 'Kappa;': '\u039a', + 'kappa;': '\u03ba', + 'kappav;': '\u03f0', + 'Kcedil;': '\u0136', + 'kcedil;': '\u0137', + 'Kcy;': '\u041a', + 'kcy;': '\u043a', + 'Kfr;': '\U0001d50e', + 'kfr;': '\U0001d528', + 'kgreen;': '\u0138', + 'KHcy;': '\u0425', + 'khcy;': '\u0445', + 'KJcy;': '\u040c', + 'kjcy;': '\u045c', + 'Kopf;': '\U0001d542', + 'kopf;': '\U0001d55c', + 'Kscr;': '\U0001d4a6', + 'kscr;': '\U0001d4c0', + 'lAarr;': '\u21da', + 'Lacute;': '\u0139', + 'lacute;': '\u013a', + 'laemptyv;': '\u29b4', + 'lagran;': '\u2112', + 'Lambda;': '\u039b', + 'lambda;': '\u03bb', + 'Lang;': '\u27ea', + 'lang;': '\u2329', + 'langd;': '\u2991', + 'langle;': '\u2329', + 'lap;': '\u2a85', + 'Laplacetrf;': '\u2112', + 'laquo;': '\xab', + 'laquo': '\xab', + 'Larr;': '\u219e', + 'lArr;': '\u21d0', + 'larr;': '\u2190', + 'larrb;': '\u21e4', + 'larrbfs;': '\u291f', + 'larrfs;': '\u291d', + 'larrhk;': '\u21a9', + 'larrlp;': '\u21ab', + 'larrpl;': '\u2939', + 'larrsim;': '\u2973', + 'larrtl;': '\u21a2', + 'lat;': '\u2aab', + 'lAtail;': '\u291b', + 'latail;': '\u2919', + 'late;': '\u2aad', + 'lates;': '\u2aad\ufe00', + 'lBarr;': '\u290e', + 'lbarr;': '\u290c', + 'lbbrk;': '\u2772', + 'lbrace;': '{', + 'lbrack;': '[', + 'lbrke;': '\u298b', + 'lbrksld;': '\u298f', + 'lbrkslu;': '\u298d', + 'Lcaron;': '\u013d', + 'lcaron;': '\u013e', + 'Lcedil;': '\u013b', + 'lcedil;': '\u013c', + 'lceil;': '\u2308', + 'lcub;': '{', + 'Lcy;': '\u041b', + 'lcy;': '\u043b', + 'ldca;': '\u2936', + 'ldquo;': '\u201c', + 'ldquor;': '\u201e', + 'ldrdhar;': '\u2967', + 'ldrushar;': '\u294b', + 'ldsh;': '\u21b2', + 'lE;': '\u2266', + 'le;': '\u2264', + 'LeftAngleBracket;': '\u2329', + 'LeftArrow;': '\u2190', + 'Leftarrow;': '\u21d0', + 'leftarrow;': '\u2190', + 'LeftArrowBar;': '\u21e4', + 'LeftArrowRightArrow;': '\u21c6', + 'leftarrowtail;': '\u21a2', + 'LeftCeiling;': '\u2308', + 'LeftDoubleBracket;': '\u27e6', + 'LeftDownTeeVector;': '\u2961', + 'LeftDownVector;': '\u21c3', + 'LeftDownVectorBar;': '\u2959', + 'LeftFloor;': '\u230a', + 'leftharpoondown;': '\u21bd', + 'leftharpoonup;': '\u21bc', + 'leftleftarrows;': '\u21c7', + 'LeftRightArrow;': '\u2194', + 'Leftrightarrow;': '\u21d4', + 'leftrightarrow;': '\u2194', + 'leftrightarrows;': '\u21c6', + 'leftrightharpoons;': '\u21cb', + 'leftrightsquigarrow;': '\u21ad', + 'LeftRightVector;': '\u294e', + 'LeftTee;': '\u22a3', + 'LeftTeeArrow;': '\u21a4', + 'LeftTeeVector;': '\u295a', + 'leftthreetimes;': '\u22cb', + 'LeftTriangle;': '\u22b2', + 'LeftTriangleBar;': '\u29cf', + 'LeftTriangleEqual;': '\u22b4', + 'LeftUpDownVector;': '\u2951', + 'LeftUpTeeVector;': '\u2960', + 'LeftUpVector;': '\u21bf', + 'LeftUpVectorBar;': '\u2958', + 'LeftVector;': '\u21bc', + 'LeftVectorBar;': '\u2952', + 'lEg;': '\u2a8b', + 'leg;': '\u22da', + 'leq;': '\u2264', + 'leqq;': '\u2266', + 'leqslant;': '\u2a7d', + 'les;': '\u2a7d', + 'lescc;': '\u2aa8', + 'lesdot;': '\u2a7f', + 'lesdoto;': '\u2a81', + 'lesdotor;': '\u2a83', + 'lesg;': '\u22da\ufe00', + 'lesges;': '\u2a93', + 'lessapprox;': '\u2a85', + 'lessdot;': '\u22d6', + 'lesseqgtr;': '\u22da', + 'lesseqqgtr;': '\u2a8b', + 'LessEqualGreater;': '\u22da', + 'LessFullEqual;': '\u2266', + 'LessGreater;': '\u2276', + 'lessgtr;': '\u2276', + 'LessLess;': '\u2aa1', + 'lesssim;': '\u2272', + 'LessSlantEqual;': '\u2a7d', + 'LessTilde;': '\u2272', + 'lfisht;': '\u297c', + 'lfloor;': '\u230a', + 'Lfr;': '\U0001d50f', + 'lfr;': '\U0001d529', + 'lg;': '\u2276', + 'lgE;': '\u2a91', + 'lHar;': '\u2962', + 'lhard;': '\u21bd', + 'lharu;': '\u21bc', + 'lharul;': '\u296a', + 'lhblk;': '\u2584', + 'LJcy;': '\u0409', + 'ljcy;': '\u0459', + 'Ll;': '\u22d8', + 'll;': '\u226a', + 'llarr;': '\u21c7', + 'llcorner;': '\u231e', + 'Lleftarrow;': '\u21da', + 'llhard;': '\u296b', + 'lltri;': '\u25fa', + 'Lmidot;': '\u013f', + 'lmidot;': '\u0140', + 'lmoust;': '\u23b0', + 'lmoustache;': '\u23b0', + 'lnap;': '\u2a89', + 'lnapprox;': '\u2a89', + 'lnE;': '\u2268', + 'lne;': '\u2a87', + 'lneq;': '\u2a87', + 'lneqq;': '\u2268', + 'lnsim;': '\u22e6', + 'loang;': '\u27ec', + 'loarr;': '\u21fd', + 'lobrk;': '\u27e6', + 'LongLeftArrow;': '\u27f5', + 'Longleftarrow;': '\u27f8', + 'longleftarrow;': '\u27f5', + 'LongLeftRightArrow;': '\u27f7', + 'Longleftrightarrow;': '\u27fa', + 'longleftrightarrow;': '\u27f7', + 'longmapsto;': '\u27fc', + 'LongRightArrow;': '\u27f6', + 'Longrightarrow;': '\u27f9', + 'longrightarrow;': '\u27f6', + 'looparrowleft;': '\u21ab', + 'looparrowright;': '\u21ac', + 'lopar;': '\u2985', + 'Lopf;': '\U0001d543', + 'lopf;': '\U0001d55d', + 'loplus;': '\u2a2d', + 'lotimes;': '\u2a34', + 'lowast;': '\u2217', + 'lowbar;': '_', + 'LowerLeftArrow;': '\u2199', + 'LowerRightArrow;': '\u2198', + 'loz;': '\u25ca', + 'lozenge;': '\u25ca', + 'lozf;': '\u29eb', + 'lpar;': '(', + 'lparlt;': '\u2993', + 'lrarr;': '\u21c6', + 'lrcorner;': '\u231f', + 'lrhar;': '\u21cb', + 'lrhard;': '\u296d', + 'lrm;': '\u200e', + 'lrtri;': '\u22bf', + 'lsaquo;': '\u2039', + 'Lscr;': '\u2112', + 'lscr;': '\U0001d4c1', + 'Lsh;': '\u21b0', + 'lsh;': '\u21b0', + 'lsim;': '\u2272', + 'lsime;': '\u2a8d', + 'lsimg;': '\u2a8f', + 'lsqb;': '[', + 'lsquo;': '\u2018', + 'lsquor;': '\u201a', + 'Lstrok;': '\u0141', + 'lstrok;': '\u0142', + 'LT;': '<', + 'LT': '<', + 'Lt;': '\u226a', + 'lt;': '<', + 'lt': '<', + 'ltcc;': '\u2aa6', + 'ltcir;': '\u2a79', + 'ltdot;': '\u22d6', + 'lthree;': '\u22cb', + 'ltimes;': '\u22c9', + 'ltlarr;': '\u2976', + 'ltquest;': '\u2a7b', + 'ltri;': '\u25c3', + 'ltrie;': '\u22b4', + 'ltrif;': '\u25c2', + 'ltrPar;': '\u2996', + 'lurdshar;': '\u294a', + 'luruhar;': '\u2966', + 'lvertneqq;': '\u2268\ufe00', + 'lvnE;': '\u2268\ufe00', + 'macr;': '\xaf', + 'macr': '\xaf', + 'male;': '\u2642', + 'malt;': '\u2720', + 'maltese;': '\u2720', + 'Map;': '\u2905', + 'map;': '\u21a6', + 'mapsto;': '\u21a6', + 'mapstodown;': '\u21a7', + 'mapstoleft;': '\u21a4', + 'mapstoup;': '\u21a5', + 'marker;': '\u25ae', + 'mcomma;': '\u2a29', + 'Mcy;': '\u041c', + 'mcy;': '\u043c', + 'mdash;': '\u2014', + 'mDDot;': '\u223a', + 'measuredangle;': '\u2221', + 'MediumSpace;': '\u205f', + 'Mellintrf;': '\u2133', + 'Mfr;': '\U0001d510', + 'mfr;': '\U0001d52a', + 'mho;': '\u2127', + 'micro;': '\xb5', + 'micro': '\xb5', + 'mid;': '\u2223', + 'midast;': '*', + 'midcir;': '\u2af0', + 'middot;': '\xb7', + 'middot': '\xb7', + 'minus;': '\u2212', + 'minusb;': '\u229f', + 'minusd;': '\u2238', + 'minusdu;': '\u2a2a', + 'MinusPlus;': '\u2213', + 'mlcp;': '\u2adb', + 'mldr;': '\u2026', + 'mnplus;': '\u2213', + 'models;': '\u22a7', + 'Mopf;': '\U0001d544', + 'mopf;': '\U0001d55e', + 'mp;': '\u2213', + 'Mscr;': '\u2133', + 'mscr;': '\U0001d4c2', + 'mstpos;': '\u223e', + 'Mu;': '\u039c', + 'mu;': '\u03bc', + 'multimap;': '\u22b8', + 'mumap;': '\u22b8', + 'nabla;': '\u2207', + 'Nacute;': '\u0143', + 'nacute;': '\u0144', + 'nang;': '\u2220\u20d2', + 'nap;': '\u2249', + 'napE;': '\u2a70\u0338', + 'napid;': '\u224b\u0338', + 'napos;': '\u0149', + 'napprox;': '\u2249', + 'natur;': '\u266e', + 'natural;': '\u266e', + 'naturals;': '\u2115', + 'nbsp;': '\xa0', + 'nbsp': '\xa0', + 'nbump;': '\u224e\u0338', + 'nbumpe;': '\u224f\u0338', + 'ncap;': '\u2a43', + 'Ncaron;': '\u0147', + 'ncaron;': '\u0148', + 'Ncedil;': '\u0145', + 'ncedil;': '\u0146', + 'ncong;': '\u2247', + 'ncongdot;': '\u2a6d\u0338', + 'ncup;': '\u2a42', + 'Ncy;': '\u041d', + 'ncy;': '\u043d', + 'ndash;': '\u2013', + 'ne;': '\u2260', + 'nearhk;': '\u2924', + 'neArr;': '\u21d7', + 'nearr;': '\u2197', + 'nearrow;': '\u2197', + 'nedot;': '\u2250\u0338', + 'NegativeMediumSpace;': '\u200b', + 'NegativeThickSpace;': '\u200b', + 'NegativeThinSpace;': '\u200b', + 'NegativeVeryThinSpace;': '\u200b', + 'nequiv;': '\u2262', + 'nesear;': '\u2928', + 'nesim;': '\u2242\u0338', + 'NestedGreaterGreater;': '\u226b', + 'NestedLessLess;': '\u226a', + 'NewLine;': '\u240a', + 'nexist;': '\u2204', + 'nexists;': '\u2204', + 'Nfr;': '\U0001d511', + 'nfr;': '\U0001d52b', + 'ngE;': '\u2267\u0338', + 'nge;': '\u2271', + 'ngeq;': '\u2271', + 'ngeqq;': '\u2267\u0338', + 'ngeqslant;': '\u2a7e\u0338', + 'nges;': '\u2a7e\u0338', + 'nGg;': '\u22d9\u0338', + 'ngsim;': '\u2275', + 'nGt;': '\u226b\u20d2', + 'ngt;': '\u226f', + 'ngtr;': '\u226f', + 'nGtv;': '\u226b\u0338', + 'nhArr;': '\u21ce', + 'nharr;': '\u21ae', + 'nhpar;': '\u2af2', + 'ni;': '\u220b', + 'nis;': '\u22fc', + 'nisd;': '\u22fa', + 'niv;': '\u220b', + 'NJcy;': '\u040a', + 'njcy;': '\u045a', + 'nlArr;': '\u21cd', + 'nlarr;': '\u219a', + 'nldr;': '\u2025', + 'nlE;': '\u2266\u0338', + 'nle;': '\u2270', + 'nLeftarrow;': '\u21cd', + 'nleftarrow;': '\u219a', + 'nLeftrightarrow;': '\u21ce', + 'nleftrightarrow;': '\u21ae', + 'nleq;': '\u2270', + 'nleqq;': '\u2266\u0338', + 'nleqslant;': '\u2a7d\u0338', + 'nles;': '\u2a7d\u0338', + 'nless;': '\u226e', + 'nLl;': '\u22d8\u0338', + 'nlsim;': '\u2274', + 'nLt;': '\u226a\u20d2', + 'nlt;': '\u226e', + 'nltri;': '\u22ea', + 'nltrie;': '\u22ec', + 'nLtv;': '\u226a\u0338', + 'nmid;': '\u2224', + 'NoBreak;': '\u2060', + 'NonBreakingSpace;': '\xa0', + 'Nopf;': '\u2115', + 'nopf;': '\U0001d55f', + 'Not;': '\u2aec', + 'not;': '\xac', + 'not': '\xac', + 'NotCongruent;': '\u2262', + 'NotCupCap;': '\u226d', + 'NotDoubleVerticalBar;': '\u2226', + 'NotElement;': '\u2209', + 'NotEqual;': '\u2260', + 'NotEqualTilde;': '\u2242\u0338', + 'NotExists;': '\u2204', + 'NotGreater;': '\u226f', + 'NotGreaterEqual;': '\u2271', + 'NotGreaterFullEqual;': '\u2267\u0338', + 'NotGreaterGreater;': '\u226b\u0338', + 'NotGreaterLess;': '\u2279', + 'NotGreaterSlantEqual;': '\u2a7e\u0338', + 'NotGreaterTilde;': '\u2275', + 'NotHumpDownHump;': '\u224e\u0338', + 'NotHumpEqual;': '\u224f\u0338', + 'notin;': '\u2209', + 'notindot;': '\u22f5\u0338', + 'notinE;': '\u22f9\u0338', + 'notinva;': '\u2209', + 'notinvb;': '\u22f7', + 'notinvc;': '\u22f6', + 'NotLeftTriangle;': '\u22ea', + 'NotLeftTriangleBar;': '\u29cf\u0338', + 'NotLeftTriangleEqual;': '\u22ec', + 'NotLess;': '\u226e', + 'NotLessEqual;': '\u2270', + 'NotLessGreater;': '\u2278', + 'NotLessLess;': '\u226a\u0338', + 'NotLessSlantEqual;': '\u2a7d\u0338', + 'NotLessTilde;': '\u2274', + 'NotNestedGreaterGreater;': '\u2aa2\u0338', + 'NotNestedLessLess;': '\u2aa1\u0338', + 'notni;': '\u220c', + 'notniva;': '\u220c', + 'notnivb;': '\u22fe', + 'notnivc;': '\u22fd', + 'NotPrecedes;': '\u2280', + 'NotPrecedesEqual;': '\u2aaf\u0338', + 'NotPrecedesSlantEqual;': '\u22e0', + 'NotReverseElement;': '\u220c', + 'NotRightTriangle;': '\u22eb', + 'NotRightTriangleBar;': '\u29d0\u0338', + 'NotRightTriangleEqual;': '\u22ed', + 'NotSquareSubset;': '\u228f\u0338', + 'NotSquareSubsetEqual;': '\u22e2', + 'NotSquareSuperset;': '\u2290\u0338', + 'NotSquareSupersetEqual;': '\u22e3', + 'NotSubset;': '\u2282\u20d2', + 'NotSubsetEqual;': '\u2288', + 'NotSucceeds;': '\u2281', + 'NotSucceedsEqual;': '\u2ab0\u0338', + 'NotSucceedsSlantEqual;': '\u22e1', + 'NotSucceedsTilde;': '\u227f\u0338', + 'NotSuperset;': '\u2283\u20d2', + 'NotSupersetEqual;': '\u2289', + 'NotTilde;': '\u2241', + 'NotTildeEqual;': '\u2244', + 'NotTildeFullEqual;': '\u2247', + 'NotTildeTilde;': '\u2249', + 'NotVerticalBar;': '\u2224', + 'npar;': '\u2226', + 'nparallel;': '\u2226', + 'nparsl;': '\u2afd\u20e5', + 'npart;': '\u2202\u0338', + 'npolint;': '\u2a14', + 'npr;': '\u2280', + 'nprcue;': '\u22e0', + 'npre;': '\u2aaf\u0338', + 'nprec;': '\u2280', + 'npreceq;': '\u2aaf\u0338', + 'nrArr;': '\u21cf', + 'nrarr;': '\u219b', + 'nrarrc;': '\u2933\u0338', + 'nrarrw;': '\u219d\u0338', + 'nRightarrow;': '\u21cf', + 'nrightarrow;': '\u219b', + 'nrtri;': '\u22eb', + 'nrtrie;': '\u22ed', + 'nsc;': '\u2281', + 'nsccue;': '\u22e1', + 'nsce;': '\u2ab0\u0338', + 'Nscr;': '\U0001d4a9', + 'nscr;': '\U0001d4c3', + 'nshortmid;': '\u2224', + 'nshortparallel;': '\u2226', + 'nsim;': '\u2241', + 'nsime;': '\u2244', + 'nsimeq;': '\u2244', + 'nsmid;': '\u2224', + 'nspar;': '\u2226', + 'nsqsube;': '\u22e2', + 'nsqsupe;': '\u22e3', + 'nsub;': '\u2284', + 'nsubE;': '\u2ac5\u0338', + 'nsube;': '\u2288', + 'nsubset;': '\u2282\u20d2', + 'nsubseteq;': '\u2288', + 'nsubseteqq;': '\u2ac5\u0338', + 'nsucc;': '\u2281', + 'nsucceq;': '\u2ab0\u0338', + 'nsup;': '\u2285', + 'nsupE;': '\u2ac6\u0338', + 'nsupe;': '\u2289', + 'nsupset;': '\u2283\u20d2', + 'nsupseteq;': '\u2289', + 'nsupseteqq;': '\u2ac6\u0338', + 'ntgl;': '\u2279', + 'Ntilde;': '\xd1', + 'Ntilde': '\xd1', + 'ntilde;': '\xf1', + 'ntilde': '\xf1', + 'ntlg;': '\u2278', + 'ntriangleleft;': '\u22ea', + 'ntrianglelefteq;': '\u22ec', + 'ntriangleright;': '\u22eb', + 'ntrianglerighteq;': '\u22ed', + 'Nu;': '\u039d', + 'nu;': '\u03bd', + 'num;': '#', + 'numero;': '\u2116', + 'numsp;': '\u2007', + 'nvap;': '\u224d\u20d2', + 'nVDash;': '\u22af', + 'nVdash;': '\u22ae', + 'nvDash;': '\u22ad', + 'nvdash;': '\u22ac', + 'nvge;': '\u2265\u20d2', + 'nvgt;': '>\u20d2', + 'nvHarr;': '\u2904', + 'nvinfin;': '\u29de', + 'nvlArr;': '\u2902', + 'nvle;': '\u2264\u20d2', + 'nvlt;': '<\u20d2', + 'nvltrie;': '\u22b4\u20d2', + 'nvrArr;': '\u2903', + 'nvrtrie;': '\u22b5\u20d2', + 'nvsim;': '\u223c\u20d2', + 'nwarhk;': '\u2923', + 'nwArr;': '\u21d6', + 'nwarr;': '\u2196', + 'nwarrow;': '\u2196', + 'nwnear;': '\u2927', + 'Oacute;': '\xd3', + 'Oacute': '\xd3', + 'oacute;': '\xf3', + 'oacute': '\xf3', + 'oast;': '\u229b', + 'ocir;': '\u229a', + 'Ocirc;': '\xd4', + 'Ocirc': '\xd4', + 'ocirc;': '\xf4', + 'ocirc': '\xf4', + 'Ocy;': '\u041e', + 'ocy;': '\u043e', + 'odash;': '\u229d', + 'Odblac;': '\u0150', + 'odblac;': '\u0151', + 'odiv;': '\u2a38', + 'odot;': '\u2299', + 'odsold;': '\u29bc', + 'OElig;': '\u0152', + 'oelig;': '\u0153', + 'ofcir;': '\u29bf', + 'Ofr;': '\U0001d512', + 'ofr;': '\U0001d52c', + 'ogon;': '\u02db', + 'Ograve;': '\xd2', + 'Ograve': '\xd2', + 'ograve;': '\xf2', + 'ograve': '\xf2', + 'ogt;': '\u29c1', + 'ohbar;': '\u29b5', + 'ohm;': '\u03a9', + 'oint;': '\u222e', + 'olarr;': '\u21ba', + 'olcir;': '\u29be', + 'olcross;': '\u29bb', + 'oline;': '\u203e', + 'olt;': '\u29c0', + 'Omacr;': '\u014c', + 'omacr;': '\u014d', + 'Omega;': '\u03a9', + 'omega;': '\u03c9', + 'Omicron;': '\u039f', + 'omicron;': '\u03bf', + 'omid;': '\u29b6', + 'ominus;': '\u2296', + 'Oopf;': '\U0001d546', + 'oopf;': '\U0001d560', + 'opar;': '\u29b7', + 'OpenCurlyDoubleQuote;': '\u201c', + 'OpenCurlyQuote;': '\u2018', + 'operp;': '\u29b9', + 'oplus;': '\u2295', + 'Or;': '\u2a54', + 'or;': '\u2228', + 'orarr;': '\u21bb', + 'ord;': '\u2a5d', + 'order;': '\u2134', + 'orderof;': '\u2134', + 'ordf;': '\xaa', + 'ordf': '\xaa', + 'ordm;': '\xba', + 'ordm': '\xba', + 'origof;': '\u22b6', + 'oror;': '\u2a56', + 'orslope;': '\u2a57', + 'orv;': '\u2a5b', + 'oS;': '\u24c8', + 'Oscr;': '\U0001d4aa', + 'oscr;': '\u2134', + 'Oslash;': '\xd8', + 'Oslash': '\xd8', + 'oslash;': '\xf8', + 'oslash': '\xf8', + 'osol;': '\u2298', + 'Otilde;': '\xd5', + 'Otilde': '\xd5', + 'otilde;': '\xf5', + 'otilde': '\xf5', + 'Otimes;': '\u2a37', + 'otimes;': '\u2297', + 'otimesas;': '\u2a36', + 'Ouml;': '\xd6', + 'Ouml': '\xd6', + 'ouml;': '\xf6', + 'ouml': '\xf6', + 'ovbar;': '\u233d', + 'OverBar;': '\u203e', + 'OverBrace;': '\u23de', + 'OverBracket;': '\u23b4', + 'OverParenthesis;': '\u23dc', + 'par;': '\u2225', + 'para;': '\xb6', + 'para': '\xb6', + 'parallel;': '\u2225', + 'parsim;': '\u2af3', + 'parsl;': '\u2afd', + 'part;': '\u2202', + 'PartialD;': '\u2202', + 'Pcy;': '\u041f', + 'pcy;': '\u043f', + 'percnt;': '%', + 'period;': '.', + 'permil;': '\u2030', + 'perp;': '\u22a5', + 'pertenk;': '\u2031', + 'Pfr;': '\U0001d513', + 'pfr;': '\U0001d52d', + 'Phi;': '\u03a6', + 'phi;': '\u03c6', + 'phiv;': '\u03d5', + 'phmmat;': '\u2133', + 'phone;': '\u260e', + 'Pi;': '\u03a0', + 'pi;': '\u03c0', + 'pitchfork;': '\u22d4', + 'piv;': '\u03d6', + 'planck;': '\u210f', + 'planckh;': '\u210e', + 'plankv;': '\u210f', + 'plus;': '+', + 'plusacir;': '\u2a23', + 'plusb;': '\u229e', + 'pluscir;': '\u2a22', + 'plusdo;': '\u2214', + 'plusdu;': '\u2a25', + 'pluse;': '\u2a72', + 'PlusMinus;': '\xb1', + 'plusmn;': '\xb1', + 'plusmn': '\xb1', + 'plussim;': '\u2a26', + 'plustwo;': '\u2a27', + 'pm;': '\xb1', + 'Poincareplane;': '\u210c', + 'pointint;': '\u2a15', + 'Popf;': '\u2119', + 'popf;': '\U0001d561', + 'pound;': '\xa3', + 'pound': '\xa3', + 'Pr;': '\u2abb', + 'pr;': '\u227a', + 'prap;': '\u2ab7', + 'prcue;': '\u227c', + 'prE;': '\u2ab3', + 'pre;': '\u2aaf', + 'prec;': '\u227a', + 'precapprox;': '\u2ab7', + 'preccurlyeq;': '\u227c', + 'Precedes;': '\u227a', + 'PrecedesEqual;': '\u2aaf', + 'PrecedesSlantEqual;': '\u227c', + 'PrecedesTilde;': '\u227e', + 'preceq;': '\u2aaf', + 'precnapprox;': '\u2ab9', + 'precneqq;': '\u2ab5', + 'precnsim;': '\u22e8', + 'precsim;': '\u227e', + 'Prime;': '\u2033', + 'prime;': '\u2032', + 'primes;': '\u2119', + 'prnap;': '\u2ab9', + 'prnE;': '\u2ab5', + 'prnsim;': '\u22e8', + 'prod;': '\u220f', + 'Product;': '\u220f', + 'profalar;': '\u232e', + 'profline;': '\u2312', + 'profsurf;': '\u2313', + 'prop;': '\u221d', + 'Proportion;': '\u2237', + 'Proportional;': '\u221d', + 'propto;': '\u221d', + 'prsim;': '\u227e', + 'prurel;': '\u22b0', + 'Pscr;': '\U0001d4ab', + 'pscr;': '\U0001d4c5', + 'Psi;': '\u03a8', + 'psi;': '\u03c8', + 'puncsp;': '\u2008', + 'Qfr;': '\U0001d514', + 'qfr;': '\U0001d52e', + 'qint;': '\u2a0c', + 'Qopf;': '\u211a', + 'qopf;': '\U0001d562', + 'qprime;': '\u2057', + 'Qscr;': '\U0001d4ac', + 'qscr;': '\U0001d4c6', + 'quaternions;': '\u210d', + 'quatint;': '\u2a16', + 'quest;': '?', + 'questeq;': '\u225f', + 'QUOT;': '"', + 'QUOT': '"', + 'quot;': '"', + 'quot': '"', + 'rAarr;': '\u21db', + 'race;': '\u223d\u0331', + 'Racute;': '\u0154', + 'racute;': '\u0155', + 'radic;': '\u221a', + 'raemptyv;': '\u29b3', + 'Rang;': '\u27eb', + 'rang;': '\u232a', + 'rangd;': '\u2992', + 'range;': '\u29a5', + 'rangle;': '\u232a', + 'raquo;': '\xbb', + 'raquo': '\xbb', + 'Rarr;': '\u21a0', + 'rArr;': '\u21d2', + 'rarr;': '\u2192', + 'rarrap;': '\u2975', + 'rarrb;': '\u21e5', + 'rarrbfs;': '\u2920', + 'rarrc;': '\u2933', + 'rarrfs;': '\u291e', + 'rarrhk;': '\u21aa', + 'rarrlp;': '\u21ac', + 'rarrpl;': '\u2945', + 'rarrsim;': '\u2974', + 'Rarrtl;': '\u2916', + 'rarrtl;': '\u21a3', + 'rarrw;': '\u219d', + 'rAtail;': '\u291c', + 'ratail;': '\u291a', + 'ratio;': '\u2236', + 'rationals;': '\u211a', + 'RBarr;': '\u2910', + 'rBarr;': '\u290f', + 'rbarr;': '\u290d', + 'rbbrk;': '\u2773', + 'rbrace;': '}', + 'rbrack;': ']', + 'rbrke;': '\u298c', + 'rbrksld;': '\u298e', + 'rbrkslu;': '\u2990', + 'Rcaron;': '\u0158', + 'rcaron;': '\u0159', + 'Rcedil;': '\u0156', + 'rcedil;': '\u0157', + 'rceil;': '\u2309', + 'rcub;': '}', + 'Rcy;': '\u0420', + 'rcy;': '\u0440', + 'rdca;': '\u2937', + 'rdldhar;': '\u2969', + 'rdquo;': '\u201d', + 'rdquor;': '\u201d', + 'rdsh;': '\u21b3', + 'Re;': '\u211c', + 'real;': '\u211c', + 'realine;': '\u211b', + 'realpart;': '\u211c', + 'reals;': '\u211d', + 'rect;': '\u25ad', + 'REG;': '\xae', + 'REG': '\xae', + 'reg;': '\xae', + 'reg': '\xae', + 'ReverseElement;': '\u220b', + 'ReverseEquilibrium;': '\u21cb', + 'ReverseUpEquilibrium;': '\u296f', + 'rfisht;': '\u297d', + 'rfloor;': '\u230b', + 'Rfr;': '\u211c', + 'rfr;': '\U0001d52f', + 'rHar;': '\u2964', + 'rhard;': '\u21c1', + 'rharu;': '\u21c0', + 'rharul;': '\u296c', + 'Rho;': '\u03a1', + 'rho;': '\u03c1', + 'rhov;': '\u03f1', + 'RightAngleBracket;': '\u232a', + 'RightArrow;': '\u2192', + 'Rightarrow;': '\u21d2', + 'rightarrow;': '\u2192', + 'RightArrowBar;': '\u21e5', + 'RightArrowLeftArrow;': '\u21c4', + 'rightarrowtail;': '\u21a3', + 'RightCeiling;': '\u2309', + 'RightDoubleBracket;': '\u27e7', + 'RightDownTeeVector;': '\u295d', + 'RightDownVector;': '\u21c2', + 'RightDownVectorBar;': '\u2955', + 'RightFloor;': '\u230b', + 'rightharpoondown;': '\u21c1', + 'rightharpoonup;': '\u21c0', + 'rightleftarrows;': '\u21c4', + 'rightleftharpoons;': '\u21cc', + 'rightrightarrows;': '\u21c9', + 'rightsquigarrow;': '\u219d', + 'RightTee;': '\u22a2', + 'RightTeeArrow;': '\u21a6', + 'RightTeeVector;': '\u295b', + 'rightthreetimes;': '\u22cc', + 'RightTriangle;': '\u22b3', + 'RightTriangleBar;': '\u29d0', + 'RightTriangleEqual;': '\u22b5', + 'RightUpDownVector;': '\u294f', + 'RightUpTeeVector;': '\u295c', + 'RightUpVector;': '\u21be', + 'RightUpVectorBar;': '\u2954', + 'RightVector;': '\u21c0', + 'RightVectorBar;': '\u2953', + 'ring;': '\u02da', + 'risingdotseq;': '\u2253', + 'rlarr;': '\u21c4', + 'rlhar;': '\u21cc', + 'rlm;': '\u200f', + 'rmoust;': '\u23b1', + 'rmoustache;': '\u23b1', + 'rnmid;': '\u2aee', + 'roang;': '\u27ed', + 'roarr;': '\u21fe', + 'robrk;': '\u27e7', + 'ropar;': '\u2986', + 'Ropf;': '\u211d', + 'ropf;': '\U0001d563', + 'roplus;': '\u2a2e', + 'rotimes;': '\u2a35', + 'RoundImplies;': '\u2970', + 'rpar;': ')', + 'rpargt;': '\u2994', + 'rppolint;': '\u2a12', + 'rrarr;': '\u21c9', + 'Rrightarrow;': '\u21db', + 'rsaquo;': '\u203a', + 'Rscr;': '\u211b', + 'rscr;': '\U0001d4c7', + 'Rsh;': '\u21b1', + 'rsh;': '\u21b1', + 'rsqb;': ']', + 'rsquo;': '\u2019', + 'rsquor;': '\u2019', + 'rthree;': '\u22cc', + 'rtimes;': '\u22ca', + 'rtri;': '\u25b9', + 'rtrie;': '\u22b5', + 'rtrif;': '\u25b8', + 'rtriltri;': '\u29ce', + 'RuleDelayed;': '\u29f4', + 'ruluhar;': '\u2968', + 'rx;': '\u211e', + 'Sacute;': '\u015a', + 'sacute;': '\u015b', + 'sbquo;': '\u201a', + 'Sc;': '\u2abc', + 'sc;': '\u227b', + 'scap;': '\u2ab8', + 'Scaron;': '\u0160', + 'scaron;': '\u0161', + 'sccue;': '\u227d', + 'scE;': '\u2ab4', + 'sce;': '\u2ab0', + 'Scedil;': '\u015e', + 'scedil;': '\u015f', + 'Scirc;': '\u015c', + 'scirc;': '\u015d', + 'scnap;': '\u2aba', + 'scnE;': '\u2ab6', + 'scnsim;': '\u22e9', + 'scpolint;': '\u2a13', + 'scsim;': '\u227f', + 'Scy;': '\u0421', + 'scy;': '\u0441', + 'sdot;': '\u22c5', + 'sdotb;': '\u22a1', + 'sdote;': '\u2a66', + 'searhk;': '\u2925', + 'seArr;': '\u21d8', + 'searr;': '\u2198', + 'searrow;': '\u2198', + 'sect;': '\xa7', + 'sect': '\xa7', + 'semi;': ';', + 'seswar;': '\u2929', + 'setminus;': '\u2216', + 'setmn;': '\u2216', + 'sext;': '\u2736', + 'Sfr;': '\U0001d516', + 'sfr;': '\U0001d530', + 'sfrown;': '\u2322', + 'sharp;': '\u266f', + 'SHCHcy;': '\u0429', + 'shchcy;': '\u0449', + 'SHcy;': '\u0428', + 'shcy;': '\u0448', + 'ShortDownArrow;': '\u2193', + 'ShortLeftArrow;': '\u2190', + 'shortmid;': '\u2223', + 'shortparallel;': '\u2225', + 'ShortRightArrow;': '\u2192', + 'ShortUpArrow;': '\u2191', + 'shy;': '\xad', + 'shy': '\xad', + 'Sigma;': '\u03a3', + 'sigma;': '\u03c3', + 'sigmaf;': '\u03c2', + 'sigmav;': '\u03c2', + 'sim;': '\u223c', + 'simdot;': '\u2a6a', + 'sime;': '\u2243', + 'simeq;': '\u2243', + 'simg;': '\u2a9e', + 'simgE;': '\u2aa0', + 'siml;': '\u2a9d', + 'simlE;': '\u2a9f', + 'simne;': '\u2246', + 'simplus;': '\u2a24', + 'simrarr;': '\u2972', + 'slarr;': '\u2190', + 'SmallCircle;': '\u2218', + 'smallsetminus;': '\u2216', + 'smashp;': '\u2a33', + 'smeparsl;': '\u29e4', + 'smid;': '\u2223', + 'smile;': '\u2323', + 'smt;': '\u2aaa', + 'smte;': '\u2aac', + 'smtes;': '\u2aac\ufe00', + 'SOFTcy;': '\u042c', + 'softcy;': '\u044c', + 'sol;': '/', + 'solb;': '\u29c4', + 'solbar;': '\u233f', + 'Sopf;': '\U0001d54a', + 'sopf;': '\U0001d564', + 'spades;': '\u2660', + 'spadesuit;': '\u2660', + 'spar;': '\u2225', + 'sqcap;': '\u2293', + 'sqcaps;': '\u2293\ufe00', + 'sqcup;': '\u2294', + 'sqcups;': '\u2294\ufe00', + 'Sqrt;': '\u221a', + 'sqsub;': '\u228f', + 'sqsube;': '\u2291', + 'sqsubset;': '\u228f', + 'sqsubseteq;': '\u2291', + 'sqsup;': '\u2290', + 'sqsupe;': '\u2292', + 'sqsupset;': '\u2290', + 'sqsupseteq;': '\u2292', + 'squ;': '\u25a1', + 'Square;': '\u25a1', + 'square;': '\u25a1', + 'SquareIntersection;': '\u2293', + 'SquareSubset;': '\u228f', + 'SquareSubsetEqual;': '\u2291', + 'SquareSuperset;': '\u2290', + 'SquareSupersetEqual;': '\u2292', + 'SquareUnion;': '\u2294', + 'squarf;': '\u25aa', + 'squf;': '\u25aa', + 'srarr;': '\u2192', + 'Sscr;': '\U0001d4ae', + 'sscr;': '\U0001d4c8', + 'ssetmn;': '\u2216', + 'ssmile;': '\u2323', + 'sstarf;': '\u22c6', + 'Star;': '\u22c6', + 'star;': '\u2606', + 'starf;': '\u2605', + 'straightepsilon;': '\u03f5', + 'straightphi;': '\u03d5', + 'strns;': '\xaf', + 'Sub;': '\u22d0', + 'sub;': '\u2282', + 'subdot;': '\u2abd', + 'subE;': '\u2ac5', + 'sube;': '\u2286', + 'subedot;': '\u2ac3', + 'submult;': '\u2ac1', + 'subnE;': '\u2acb', + 'subne;': '\u228a', + 'subplus;': '\u2abf', + 'subrarr;': '\u2979', + 'Subset;': '\u22d0', + 'subset;': '\u2282', + 'subseteq;': '\u2286', + 'subseteqq;': '\u2ac5', + 'SubsetEqual;': '\u2286', + 'subsetneq;': '\u228a', + 'subsetneqq;': '\u2acb', + 'subsim;': '\u2ac7', + 'subsub;': '\u2ad5', + 'subsup;': '\u2ad3', + 'succ;': '\u227b', + 'succapprox;': '\u2ab8', + 'succcurlyeq;': '\u227d', + 'Succeeds;': '\u227b', + 'SucceedsEqual;': '\u2ab0', + 'SucceedsSlantEqual;': '\u227d', + 'SucceedsTilde;': '\u227f', + 'succeq;': '\u2ab0', + 'succnapprox;': '\u2aba', + 'succneqq;': '\u2ab6', + 'succnsim;': '\u22e9', + 'succsim;': '\u227f', + 'SuchThat;': '\u220b', + 'Sum;': '\u2211', + 'sum;': '\u2211', + 'sung;': '\u266a', + 'Sup;': '\u22d1', + 'sup;': '\u2283', + 'sup1;': '\xb9', + 'sup1': '\xb9', + 'sup2;': '\xb2', + 'sup2': '\xb2', + 'sup3;': '\xb3', + 'sup3': '\xb3', + 'supdot;': '\u2abe', + 'supdsub;': '\u2ad8', + 'supE;': '\u2ac6', + 'supe;': '\u2287', + 'supedot;': '\u2ac4', + 'Superset;': '\u2283', + 'SupersetEqual;': '\u2287', + 'suphsol;': '\u27c9', + 'suphsub;': '\u2ad7', + 'suplarr;': '\u297b', + 'supmult;': '\u2ac2', + 'supnE;': '\u2acc', + 'supne;': '\u228b', + 'supplus;': '\u2ac0', + 'Supset;': '\u22d1', + 'supset;': '\u2283', + 'supseteq;': '\u2287', + 'supseteqq;': '\u2ac6', + 'supsetneq;': '\u228b', + 'supsetneqq;': '\u2acc', + 'supsim;': '\u2ac8', + 'supsub;': '\u2ad4', + 'supsup;': '\u2ad6', + 'swarhk;': '\u2926', + 'swArr;': '\u21d9', + 'swarr;': '\u2199', + 'swarrow;': '\u2199', + 'swnwar;': '\u292a', + 'szlig;': '\xdf', + 'szlig': '\xdf', + 'Tab;': '\u2409', + 'target;': '\u2316', + 'Tau;': '\u03a4', + 'tau;': '\u03c4', + 'tbrk;': '\u23b4', + 'Tcaron;': '\u0164', + 'tcaron;': '\u0165', + 'Tcedil;': '\u0162', + 'tcedil;': '\u0163', + 'Tcy;': '\u0422', + 'tcy;': '\u0442', + 'tdot;': '\u25cc\u20db', + 'telrec;': '\u2315', + 'Tfr;': '\U0001d517', + 'tfr;': '\U0001d531', + 'there4;': '\u2234', + 'Therefore;': '\u2234', + 'therefore;': '\u2234', + 'Theta;': '\u0398', + 'theta;': '\u03b8', + 'thetasym;': '\u03d1', + 'thetav;': '\u03d1', + 'thickapprox;': '\u2248', + 'thicksim;': '\u223c', + 'ThickSpace;': '\u205f\u200a', + 'thinsp;': '\u2009', + 'ThinSpace;': '\u2009', + 'thkap;': '\u2248', + 'thksim;': '\u223c', + 'THORN;': '\xde', + 'THORN': '\xde', + 'thorn;': '\xfe', + 'thorn': '\xfe', + 'Tilde;': '\u223c', + 'tilde;': '\u02dc', + 'TildeEqual;': '\u2243', + 'TildeFullEqual;': '\u2245', + 'TildeTilde;': '\u2248', + 'times;': '\xd7', + 'times': '\xd7', + 'timesb;': '\u22a0', + 'timesbar;': '\u2a31', + 'timesd;': '\u2a30', + 'tint;': '\u222d', + 'toea;': '\u2928', + 'top;': '\u22a4', + 'topbot;': '\u2336', + 'topcir;': '\u2af1', + 'Topf;': '\U0001d54b', + 'topf;': '\U0001d565', + 'topfork;': '\u2ada', + 'tosa;': '\u2929', + 'tprime;': '\u2034', + 'TRADE;': '\u2122', + 'trade;': '\u2122', + 'triangle;': '\u25b5', + 'triangledown;': '\u25bf', + 'triangleleft;': '\u25c3', + 'trianglelefteq;': '\u22b4', + 'triangleq;': '\u225c', + 'triangleright;': '\u25b9', + 'trianglerighteq;': '\u22b5', + 'tridot;': '\u25ec', + 'trie;': '\u225c', + 'triminus;': '\u2a3a', + 'TripleDot;': '\u25cc\u20db', + 'triplus;': '\u2a39', + 'trisb;': '\u29cd', + 'tritime;': '\u2a3b', + 'trpezium;': '\u23e2', + 'Tscr;': '\U0001d4af', + 'tscr;': '\U0001d4c9', + 'TScy;': '\u0426', + 'tscy;': '\u0446', + 'TSHcy;': '\u040b', + 'tshcy;': '\u045b', + 'Tstrok;': '\u0166', + 'tstrok;': '\u0167', + 'twixt;': '\u226c', + 'twoheadleftarrow;': '\u219e', + 'twoheadrightarrow;': '\u21a0', + 'Uacute;': '\xda', + 'Uacute': '\xda', + 'uacute;': '\xfa', + 'uacute': '\xfa', + 'Uarr;': '\u219f', + 'uArr;': '\u21d1', + 'uarr;': '\u2191', + 'Uarrocir;': '\u2949', + 'Ubrcy;': '\u040e', + 'ubrcy;': '\u045e', + 'Ubreve;': '\u016c', + 'ubreve;': '\u016d', + 'Ucirc;': '\xdb', + 'Ucirc': '\xdb', + 'ucirc;': '\xfb', + 'ucirc': '\xfb', + 'Ucy;': '\u0423', + 'ucy;': '\u0443', + 'udarr;': '\u21c5', + 'Udblac;': '\u0170', + 'udblac;': '\u0171', + 'udhar;': '\u296e', + 'ufisht;': '\u297e', + 'Ufr;': '\U0001d518', + 'ufr;': '\U0001d532', + 'Ugrave;': '\xd9', + 'Ugrave': '\xd9', + 'ugrave;': '\xf9', + 'ugrave': '\xf9', + 'uHar;': '\u2963', + 'uharl;': '\u21bf', + 'uharr;': '\u21be', + 'uhblk;': '\u2580', + 'ulcorn;': '\u231c', + 'ulcorner;': '\u231c', + 'ulcrop;': '\u230f', + 'ultri;': '\u25f8', + 'Umacr;': '\u016a', + 'umacr;': '\u016b', + 'uml;': '\xa8', + 'uml': '\xa8', + 'UnderBar;': '_', + 'UnderBrace;': '\u23df', + 'UnderBracket;': '\u23b5', + 'UnderParenthesis;': '\u23dd', + 'Union;': '\u22c3', + 'UnionPlus;': '\u228e', + 'Uogon;': '\u0172', + 'uogon;': '\u0173', + 'Uopf;': '\U0001d54c', + 'uopf;': '\U0001d566', + 'UpArrow;': '\u2191', + 'Uparrow;': '\u21d1', + 'uparrow;': '\u2191', + 'UpArrowBar;': '\u2912', + 'UpArrowDownArrow;': '\u21c5', + 'UpDownArrow;': '\u2195', + 'Updownarrow;': '\u21d5', + 'updownarrow;': '\u2195', + 'UpEquilibrium;': '\u296e', + 'upharpoonleft;': '\u21bf', + 'upharpoonright;': '\u21be', + 'uplus;': '\u228e', + 'UpperLeftArrow;': '\u2196', + 'UpperRightArrow;': '\u2197', + 'Upsi;': '\u03d2', + 'upsi;': '\u03c5', + 'upsih;': '\u03d2', + 'Upsilon;': '\u03a5', + 'upsilon;': '\u03c5', + 'UpTee;': '\u22a5', + 'UpTeeArrow;': '\u21a5', + 'upuparrows;': '\u21c8', + 'urcorn;': '\u231d', + 'urcorner;': '\u231d', + 'urcrop;': '\u230e', + 'Uring;': '\u016e', + 'uring;': '\u016f', + 'urtri;': '\u25f9', + 'Uscr;': '\U0001d4b0', + 'uscr;': '\U0001d4ca', + 'utdot;': '\u22f0', + 'Utilde;': '\u0168', + 'utilde;': '\u0169', + 'utri;': '\u25b5', + 'utrif;': '\u25b4', + 'uuarr;': '\u21c8', + 'Uuml;': '\xdc', + 'Uuml': '\xdc', + 'uuml;': '\xfc', + 'uuml': '\xfc', + 'uwangle;': '\u29a7', + 'vangrt;': '\u299c', + 'varepsilon;': '\u03f5', + 'varkappa;': '\u03f0', + 'varnothing;': '\u2205', + 'varphi;': '\u03d5', + 'varpi;': '\u03d6', + 'varpropto;': '\u221d', + 'vArr;': '\u21d5', + 'varr;': '\u2195', + 'varrho;': '\u03f1', + 'varsigma;': '\u03c2', + 'varsubsetneq;': '\u228a\ufe00', + 'varsubsetneqq;': '\u2acb\ufe00', + 'varsupsetneq;': '\u228b\ufe00', + 'varsupsetneqq;': '\u2acc\ufe00', + 'vartheta;': '\u03d1', + 'vartriangleleft;': '\u22b2', + 'vartriangleright;': '\u22b3', + 'Vbar;': '\u2aeb', + 'vBar;': '\u2ae8', + 'vBarv;': '\u2ae9', + 'Vcy;': '\u0412', + 'vcy;': '\u0432', + 'VDash;': '\u22ab', + 'Vdash;': '\u22a9', + 'vDash;': '\u22a8', + 'vdash;': '\u22a2', + 'Vdashl;': '\u2ae6', + 'Vee;': '\u22c1', + 'vee;': '\u2228', + 'veebar;': '\u22bb', + 'veeeq;': '\u225a', + 'vellip;': '\u22ee', + 'Verbar;': '\u2016', + 'verbar;': '|', + 'Vert;': '\u2016', + 'vert;': '|', + 'VerticalBar;': '\u2223', + 'VerticalLine;': '|', + 'VerticalSeparator;': '\u2758', + 'VerticalTilde;': '\u2240', + 'VeryThinSpace;': '\u200a', + 'Vfr;': '\U0001d519', + 'vfr;': '\U0001d533', + 'vltri;': '\u22b2', + 'vnsub;': '\u2282\u20d2', + 'vnsup;': '\u2283\u20d2', + 'Vopf;': '\U0001d54d', + 'vopf;': '\U0001d567', + 'vprop;': '\u221d', + 'vrtri;': '\u22b3', + 'Vscr;': '\U0001d4b1', + 'vscr;': '\U0001d4cb', + 'vsubnE;': '\u2acb\ufe00', + 'vsubne;': '\u228a\ufe00', + 'vsupnE;': '\u2acc\ufe00', + 'vsupne;': '\u228b\ufe00', + 'Vvdash;': '\u22aa', + 'vzigzag;': '\u299a', + 'Wcirc;': '\u0174', + 'wcirc;': '\u0175', + 'wedbar;': '\u2a5f', + 'Wedge;': '\u22c0', + 'wedge;': '\u2227', + 'wedgeq;': '\u2259', + 'weierp;': '\u2118', + 'Wfr;': '\U0001d51a', + 'wfr;': '\U0001d534', + 'Wopf;': '\U0001d54e', + 'wopf;': '\U0001d568', + 'wp;': '\u2118', + 'wr;': '\u2240', + 'wreath;': '\u2240', + 'Wscr;': '\U0001d4b2', + 'wscr;': '\U0001d4cc', + 'xcap;': '\u22c2', + 'xcirc;': '\u25ef', + 'xcup;': '\u22c3', + 'xdtri;': '\u25bd', + 'Xfr;': '\U0001d51b', + 'xfr;': '\U0001d535', + 'xhArr;': '\u27fa', + 'xharr;': '\u27f7', + 'Xi;': '\u039e', + 'xi;': '\u03be', + 'xlArr;': '\u27f8', + 'xlarr;': '\u27f5', + 'xmap;': '\u27fc', + 'xnis;': '\u22fb', + 'xodot;': '\u2a00', + 'Xopf;': '\U0001d54f', + 'xopf;': '\U0001d569', + 'xoplus;': '\u2a01', + 'xotime;': '\u2a02', + 'xrArr;': '\u27f9', + 'xrarr;': '\u27f6', + 'Xscr;': '\U0001d4b3', + 'xscr;': '\U0001d4cd', + 'xsqcup;': '\u2a06', + 'xuplus;': '\u2a04', + 'xutri;': '\u25b3', + 'xvee;': '\u22c1', + 'xwedge;': '\u22c0', + 'Yacute;': '\xdd', + 'Yacute': '\xdd', + 'yacute;': '\xfd', + 'yacute': '\xfd', + 'YAcy;': '\u042f', + 'yacy;': '\u044f', + 'Ycirc;': '\u0176', + 'ycirc;': '\u0177', + 'Ycy;': '\u042b', + 'ycy;': '\u044b', + 'yen;': '\xa5', + 'yen': '\xa5', + 'Yfr;': '\U0001d51c', + 'yfr;': '\U0001d536', + 'YIcy;': '\u0407', + 'yicy;': '\u0457', + 'Yopf;': '\U0001d550', + 'yopf;': '\U0001d56a', + 'Yscr;': '\U0001d4b4', + 'yscr;': '\U0001d4ce', + 'YUcy;': '\u042e', + 'yucy;': '\u044e', + 'Yuml;': '\u0178', + 'yuml;': '\xff', + 'yuml': '\xff', + 'Zacute;': '\u0179', + 'zacute;': '\u017a', + 'Zcaron;': '\u017d', + 'zcaron;': '\u017e', + 'Zcy;': '\u0417', + 'zcy;': '\u0437', + 'Zdot;': '\u017b', + 'zdot;': '\u017c', + 'zeetrf;': '\u2128', + 'ZeroWidthSpace;': '\u200b', + 'Zeta;': '\u0396', + 'zeta;': '\u03b6', + 'Zfr;': '\u2128', + 'zfr;': '\U0001d537', + 'ZHcy;': '\u0416', + 'zhcy;': '\u0436', + 'zigrarr;': '\u21dd', + 'Zopf;': '\u2124', + 'zopf;': '\U0001d56b', + 'Zscr;': '\U0001d4b5', + 'zscr;': '\U0001d4cf', + 'zwj;': '\u200d', + 'zwnj;': '\u200c', +} + # maps the Unicode codepoint to the HTML entity name codepoint2name = {} diff --git a/Misc/NEWS b/Misc/NEWS --- a/Misc/NEWS +++ b/Misc/NEWS @@ -54,6 +54,10 @@ It is used automatically on platforms supporting the necessary os.openat() and os.unlinkat() functions. Main code by Martin von L?wis. +- Issue #11113: add a new "html5" dictionary containing the named character + references defined by the HTML5 standard and the equivalent Unicode + character(s) to the html.entities module. + - Issue #15114: the strict mode of HTMLParser and the HTMLParseError exception are deprecated now that the parser is able to parse invalid markup. -- Repository URL: http://hg.python.org/cpython From solipsis at pitrou.net Sun Jun 24 06:03:43 2012 From: solipsis at pitrou.net (solipsis at pitrou.net) Date: Sun, 24 Jun 2012 06:03:43 +0200 Subject: [Python-checkins] Daily reference leaks (3b7230997425): sum=0 Message-ID: results for 3b7230997425 on branch "default" -------------------------------------------- Command line was: ['./python', '-m', 'test.regrtest', '-uall', '-R', '3:3:/home/antoine/cpython/refleaks/reflogjhN_HO', '-x'] From python-checkins at python.org Sun Jun 24 06:07:54 2012 From: python-checkins at python.org (eric.araujo) Date: Sun, 24 Jun 2012 06:07:54 +0200 Subject: [Python-checkins] =?utf8?q?cpython=3A_Remove_packaging_from_the_s?= =?utf8?q?tandard_library=2E?= Message-ID: http://hg.python.org/cpython/rev/576b8b182039 changeset: 77665:576b8b182039 user: ?ric Araujo date: Sun Jun 24 00:07:41 2012 -0400 summary: Remove packaging from the standard library. Distutils2 will live on on PyPI and be included in the stdlib when it is ready. See discussion starting at http://mail.python.org/pipermail/python-dev/2012-June/120430.html files: Doc/contents.rst | 2 +- Doc/distutils/index.rst | 12 +- Doc/install/index.rst | 56 - Doc/install/install.rst | 1101 ------- Doc/install/pysetup-config.rst | 43 - Doc/install/pysetup-servers.rst | 59 - Doc/install/pysetup.rst | 158 - Doc/library/distutils.rst | 12 - Doc/library/packaging-misc.rst | 27 - Doc/library/packaging.command.rst | 110 - Doc/library/packaging.compiler.rst | 679 ---- Doc/library/packaging.database.rst | 341 -- Doc/library/packaging.depgraph.rst | 196 - Doc/library/packaging.dist.rst | 108 - Doc/library/packaging.fancy_getopt.rst | 75 - Doc/library/packaging.install.rst | 111 - Doc/library/packaging.metadata.rst | 119 - Doc/library/packaging.pypi.dist.rst | 110 - Doc/library/packaging.pypi.rst | 74 - Doc/library/packaging.pypi.simple.rst | 216 - Doc/library/packaging.pypi.xmlrpc.rst | 141 - Doc/library/packaging.rst | 72 - Doc/library/packaging.tests.pypi_server.rst | 103 - Doc/library/packaging.util.rst | 155 - Doc/library/packaging.version.rst | 101 - Doc/library/python.rst | 1 - Doc/library/site.rst | 4 +- Doc/library/venv.rst | 3 +- Doc/packaging/builtdist.rst | 300 -- Doc/packaging/commandhooks.rst | 47 - Doc/packaging/commandref.rst | 351 -- Doc/packaging/configfile.rst | 124 - Doc/packaging/examples.rst | 334 -- Doc/packaging/extending.rst | 95 - Doc/packaging/index.rst | 45 - Doc/packaging/introduction.rst | 193 - Doc/packaging/packageindex.rst | 104 - Doc/packaging/setupcfg.rst | 883 ----- Doc/packaging/setupscript.rst | 688 ---- Doc/packaging/sourcedist.rst | 264 - Doc/packaging/tutorial.rst | 109 - Doc/packaging/uploading.rst | 80 - Doc/tools/sphinxext/indexcontent.html | 8 +- Doc/tools/sphinxext/susp-ignored.csv | 22 - Doc/using/cmdline.rst | 4 +- Doc/using/scripts.rst | 3 +- Doc/whatsnew/3.3.rst | 46 +- Lib/packaging/__init__.py | 17 - Lib/packaging/_trove.py | 571 --- Lib/packaging/command/__init__.py | 53 - Lib/packaging/command/bdist.py | 141 - Lib/packaging/command/bdist_dumb.py | 139 - Lib/packaging/command/bdist_msi.py | 743 ----- Lib/packaging/command/bdist_wininst.py | 345 -- Lib/packaging/command/build.py | 151 - Lib/packaging/command/build_clib.py | 197 - Lib/packaging/command/build_ext.py | 644 ---- Lib/packaging/command/build_py.py | 392 -- Lib/packaging/command/build_scripts.py | 154 - Lib/packaging/command/check.py | 88 - Lib/packaging/command/clean.py | 76 - Lib/packaging/command/cmd.py | 461 --- Lib/packaging/command/command_template | 35 - Lib/packaging/command/config.py | 349 -- Lib/packaging/command/install_data.py | 79 - Lib/packaging/command/install_dist.py | 605 ---- Lib/packaging/command/install_distinfo.py | 143 - Lib/packaging/command/install_headers.py | 43 - Lib/packaging/command/install_lib.py | 188 - Lib/packaging/command/install_scripts.py | 59 - Lib/packaging/command/register.py | 263 - Lib/packaging/command/sdist.py | 347 -- Lib/packaging/command/test.py | 80 - Lib/packaging/command/upload.py | 168 - Lib/packaging/command/upload_docs.py | 131 - Lib/packaging/command/wininst-10.0-amd64.exe | Bin Lib/packaging/command/wininst-10.0.exe | Bin Lib/packaging/command/wininst-6.0.exe | Bin Lib/packaging/command/wininst-7.1.exe | Bin Lib/packaging/command/wininst-8.0.exe | Bin Lib/packaging/command/wininst-9.0-amd64.exe | Bin Lib/packaging/command/wininst-9.0.exe | Bin Lib/packaging/compat.py | 50 - Lib/packaging/compiler/__init__.py | 274 - Lib/packaging/compiler/bcppcompiler.py | 355 -- Lib/packaging/compiler/ccompiler.py | 863 ----- Lib/packaging/compiler/cygwinccompiler.py | 355 -- Lib/packaging/compiler/extension.py | 121 - Lib/packaging/compiler/msvc9compiler.py | 721 ---- Lib/packaging/compiler/msvccompiler.py | 635 ---- Lib/packaging/compiler/unixccompiler.py | 339 -- Lib/packaging/config.py | 391 -- Lib/packaging/create.py | 682 ---- Lib/packaging/database.py | 651 ---- Lib/packaging/depgraph.py | 270 - Lib/packaging/dist.py | 769 ----- Lib/packaging/errors.py | 138 - Lib/packaging/fancy_getopt.py | 388 -- Lib/packaging/install.py | 529 --- Lib/packaging/manifest.py | 381 -- Lib/packaging/markers.py | 189 - Lib/packaging/metadata.py | 570 --- Lib/packaging/pypi/__init__.py | 9 - Lib/packaging/pypi/base.py | 48 - Lib/packaging/pypi/dist.py | 544 --- Lib/packaging/pypi/errors.py | 39 - Lib/packaging/pypi/mirrors.py | 52 - Lib/packaging/pypi/simple.py | 462 --- Lib/packaging/pypi/wrapper.py | 99 - Lib/packaging/pypi/xmlrpc.py | 200 - Lib/packaging/run.py | 663 ---- Lib/packaging/tests/LONG_DESC.txt | 44 - Lib/packaging/tests/PKG-INFO | 57 - Lib/packaging/tests/SETUPTOOLS-PKG-INFO | 182 - Lib/packaging/tests/SETUPTOOLS-PKG-INFO2 | 183 - Lib/packaging/tests/__init__.py | 28 - Lib/packaging/tests/__main__.py | 24 - Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER | 0 Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA | 4 - Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD | 0 Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED | 0 Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES | 2 - Lib/packaging/tests/fake_dists/babar.cfg | 1 - Lib/packaging/tests/fake_dists/babar.png | 0 Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO | 6 - Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO | 18 - Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt | 0 Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt | 1 - Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt | 3 - Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe | 1 - Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt | 6 - Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt | 0 Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info | 5 - Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER | 0 Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA | 9 - Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD | 0 Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED | 0 Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py | 1 - Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py | 10 - Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py | 5 - Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO | 5 - Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER | 0 Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA | 5 - Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD | 0 Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED | 0 Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py | 1 - Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py | 8 - Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info | 3 - Lib/packaging/tests/fake_dists/strawberry-0.6.egg | Bin Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER | 0 Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA | 7 - Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD | 0 Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED | 0 Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py | 18 - Lib/packaging/tests/fake_dists/truffles-5.0.egg-info | 3 - Lib/packaging/tests/fixer/__init__.py | 0 Lib/packaging/tests/fixer/fix_echo.py | 16 - Lib/packaging/tests/fixer/fix_echo2.py | 16 - Lib/packaging/tests/pypi_server.py | 449 --- Lib/packaging/tests/pypi_test_server.py | 59 - Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz | Bin Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz | 0 Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html | 3 - Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html | 3 - Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html | 2 - Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html | 6 - Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html | 6 - Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html | 6 - Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html | 3 - Lib/packaging/tests/pypiserver/project_list/simple/index.html | 5 - Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html | 6 - Lib/packaging/tests/pypiserver/test_found_links/simple/index.html | 1 - Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html | 1 - Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html | 1 - Lib/packaging/tests/pypiserver/with_externals/external/external.html | 3 - Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html | 4 - Lib/packaging/tests/pypiserver/with_externals/simple/index.html | 1 - Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html | 7 - Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html | 1 - Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html | 6 - Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html | 1 - Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html | 4 - Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html | 1 - Lib/packaging/tests/support.py | 400 -- Lib/packaging/tests/test_ccompiler.py | 15 - Lib/packaging/tests/test_command_bdist.py | 61 - Lib/packaging/tests/test_command_bdist_dumb.py | 91 - Lib/packaging/tests/test_command_bdist_msi.py | 25 - Lib/packaging/tests/test_command_bdist_wininst.py | 32 - Lib/packaging/tests/test_command_build.py | 56 - Lib/packaging/tests/test_command_build_clib.py | 141 - Lib/packaging/tests/test_command_build_ext.py | 394 -- Lib/packaging/tests/test_command_build_py.py | 146 - Lib/packaging/tests/test_command_build_scripts.py | 109 - Lib/packaging/tests/test_command_check.py | 161 - Lib/packaging/tests/test_command_clean.py | 46 - Lib/packaging/tests/test_command_cmd.py | 102 - Lib/packaging/tests/test_command_config.py | 76 - Lib/packaging/tests/test_command_install_data.py | 148 - Lib/packaging/tests/test_command_install_dist.py | 241 - Lib/packaging/tests/test_command_install_distinfo.py | 252 - Lib/packaging/tests/test_command_install_headers.py | 38 - Lib/packaging/tests/test_command_install_lib.py | 110 - Lib/packaging/tests/test_command_install_scripts.py | 75 - Lib/packaging/tests/test_command_register.py | 260 - Lib/packaging/tests/test_command_sdist.py | 394 -- Lib/packaging/tests/test_command_test.py | 223 - Lib/packaging/tests/test_command_upload.py | 159 - Lib/packaging/tests/test_command_upload_docs.py | 186 - Lib/packaging/tests/test_compiler.py | 66 - Lib/packaging/tests/test_config.py | 519 --- Lib/packaging/tests/test_create.py | 233 - Lib/packaging/tests/test_cygwinccompiler.py | 88 - Lib/packaging/tests/test_database.py | 686 ---- Lib/packaging/tests/test_depgraph.py | 310 -- Lib/packaging/tests/test_dist.py | 264 - Lib/packaging/tests/test_extension.py | 15 - Lib/packaging/tests/test_install.py | 391 -- Lib/packaging/tests/test_manifest.py | 331 -- Lib/packaging/tests/test_markers.py | 75 - Lib/packaging/tests/test_metadata.py | 454 --- Lib/packaging/tests/test_mixin2to3.py | 87 - Lib/packaging/tests/test_msvc9compiler.py | 140 - Lib/packaging/tests/test_pypi_dist.py | 287 - Lib/packaging/tests/test_pypi_server.py | 88 - Lib/packaging/tests/test_pypi_simple.py | 353 -- Lib/packaging/tests/test_pypi_xmlrpc.py | 101 - Lib/packaging/tests/test_run.py | 92 - Lib/packaging/tests/test_support.py | 78 - Lib/packaging/tests/test_uninstall.py | 124 - Lib/packaging/tests/test_unixccompiler.py | 132 - Lib/packaging/tests/test_util.py | 1013 ------ Lib/packaging/tests/test_version.py | 271 - Lib/packaging/util.py | 1480 ---------- Lib/packaging/version.py | 451 --- Lib/sysconfig.cfg | 3 +- Lib/sysconfig.py | 2 +- Lib/test/regrtest.py | 41 - Lib/test/test_packaging.py | 5 - Lib/test/test_venv.py | 4 - Lib/venv/scripts/nt/pysetup3.py | 11 - Lib/venv/scripts/posix/pysetup3 | 11 - Makefile.pre.in | 54 - Misc/NEWS | 2 + Tools/scripts/pysetup3 | 4 - setup.py | 3 +- 246 files changed, 35 insertions(+), 38764 deletions(-) diff --git a/Doc/contents.rst b/Doc/contents.rst --- a/Doc/contents.rst +++ b/Doc/contents.rst @@ -11,7 +11,7 @@ library/index.rst extending/index.rst c-api/index.rst - packaging/index.rst + distutils/index.rst install/index.rst howto/index.rst faq/index.rst diff --git a/Doc/distutils/index.rst b/Doc/distutils/index.rst --- a/Doc/distutils/index.rst +++ b/Doc/distutils/index.rst @@ -14,12 +14,9 @@ make Python modules and extensions easily available to a wider audience with very little overhead for build/release/install mechanics. -.. deprecated:: 3.3 - :mod:`packaging` replaces Distutils. See :ref:`packaging-index` and - :ref:`packaging-install-index`. - .. toctree:: :maxdepth: 2 + :numbered: introduction.rst setupscript.rst @@ -32,10 +29,3 @@ extending.rst commandref.rst apiref.rst - -Another document describes how to install modules and extensions packaged -following the above guidelines: - -.. toctree:: - - install.rst diff --git a/Doc/install/index.rst b/Doc/install/index.rst deleted file mode 100644 --- a/Doc/install/index.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. _packaging-install-index: - -****************************** - Installing Python Projects -****************************** - -:Author: The Fellowship of the Packaging -:Release: |version| -:Date: |today| - -.. TODO: Fill in XXX comments - -.. The audience for this document includes people who don't know anything - about Python and aren't about to learn the language just in order to - install and maintain it for their users, i.e. system administrators. - Thus, I have to be sure to explain the basics at some point: - sys.path and PYTHONPATH at least. Should probably give pointers to - other docs on "import site", PYTHONSTARTUP, PYTHONHOME, etc. - - Finally, it might be useful to include all the material from my "Care - and Feeding of a Python Installation" talk in here somewhere. Yow! - -.. topic:: Abstract - - This document describes Packaging from the end-user's point of view: it - explains how to extend the functionality of a standard Python installation by - building and installing third-party Python modules and applications. - - -This guide is split into a simple overview followed by a longer presentation of -the :program:`pysetup` script, the Python package management tool used to -build, distribute, search for, install, remove and list Python distributions. - -.. TODO integrate install and pysetup instead of duplicating - -.. toctree:: - :maxdepth: 2 - :numbered: - - install - pysetup - pysetup-config - pysetup-servers - - -.. seealso:: - - :ref:`packaging-index` - The manual for developers of Python projects who want to package and - distribute them. This describes how to use :mod:`packaging` to make - projects easily found and added to an existing Python installation. - - :mod:`packaging` - A library reference for developers of packaging tools wanting to use - standalone building blocks like :mod:`~packaging.version` or - :mod:`~packaging.metadata`, or extend Packaging itself. diff --git a/Doc/install/install.rst b/Doc/install/install.rst deleted file mode 100644 --- a/Doc/install/install.rst +++ /dev/null @@ -1,1119 +0,0 @@ -.. highlightlang:: none - -==================================== -Installing Python projects: overview -==================================== - -.. _packaging-install-intro: - -Introduction -============ - -Although Python's extensive standard library covers many programming needs, -there often comes a time when you need to add new functionality to your Python -installation in the form of third-party modules. This might be necessary to -support your own programming, or to support an application that you want to use -and that happens to be written in Python. - -In the past, there was little support for adding third-party modules to an -existing Python installation. With the introduction of the Python Distribution -Utilities (Distutils for short) in Python 2.0, this changed. However, not all -problems were solved; end-users had to rely on ``easy_install`` or -``pip`` to download third-party modules from PyPI, uninstall distributions or do -other maintenance operations. Packaging is a more complete replacement for -Distutils, in the standard library, with a backport named Distutils2 available -for older Python versions. - -This document is aimed primarily at people who need to install third-party -Python modules: end-users and system administrators who just need to get some -Python application running, and existing Python programmers who want to add -new goodies to their toolbox. You don't need to know Python to read this -document; there will be some brief forays into using Python's interactive mode -to explore your installation, but that's it. If you're looking for information -on how to distribute your own Python modules so that others may use them, see -the :ref:`packaging-index` manual. - - -.. _packaging-trivial-install: - -Best case: trivial installation -------------------------------- - -In the best case, someone will have prepared a special version of the module -distribution you want to install that is targeted specifically at your platform -and can be installed just like any other software on your platform. For example, -the module's developer might make an executable installer available for Windows -users, an RPM package for users of RPM-based Linux systems (Red Hat, SuSE, -Mandrake, and many others), a Debian package for users of Debian and derivative -systems, and so forth. - -In that case, you would use the standard system tools to download and install -the specific installer for your platform and its dependencies. - -Of course, things will not always be that easy. You might be interested in a -module whose distribution doesn't have an easy-to-use installer for your -platform. In that case, you'll have to start with the source distribution -released by the module's author/maintainer. Installing from a source -distribution is not too hard, as long as the modules are packaged in the -standard way. The bulk of this document addresses the building and installing -of modules from standard source distributions. - - -.. _packaging-distutils: - -The Python standard: Distutils ------------------------------- - -If you download a source distribution of a module, it will be obvious whether -it was packaged and distributed using Distutils. First, the distribution's name -and version number will be featured prominently in the name of the downloaded -archive, e.g. :file:`foo-1.0.tar.gz` or :file:`widget-0.9.7.zip`. Next, the -archive will unpack into a similarly-named directory: :file:`foo-1.0` or -:file:`widget-0.9.7`. Additionally, the distribution may contain a -:file:`setup.cfg` file and a file named :file:`README.txt` ---or possibly just -:file:`README`--- explaining that building and installing the module -distribution is a simple matter of issuing the following command at your shell's -prompt:: - - python setup.py install - -Third-party projects have extended Distutils to work around its limitations or -add functionality. After some years of near-inactivity in Distutils, a new -maintainer has started to standardize good ideas in PEPs and implement them in a -new, improved version of Distutils, called Distutils2 or Packaging. - - -.. _packaging-new-standard: - -The new standard: Packaging ---------------------------- - -The rules described in the first paragraph above apply to Packaging-based -projects too: a source distribution will have a name like -:file:`widget-0.9.7.zip`. One of the main differences with Distutils is that -distributions no longer have a :file:`setup.py` script; it used to cause a -number of issues. Now there is a unique script installed with Python itself:: - - pysetup install widget-0.9.7.zip - -Running this command is enough to build and install projects (Python modules or -packages, scripts or whole applications), without even having to unpack the -archive. It is also compatible with Distutils-based distributions. - -Unless you have to perform non-standard installations or customize the build -process, you can stop reading this manual ---the above command is everything you -need to get out of it. - -With :program:`pysetup`, you won't even have to manually download a distribution -before installing it; see :ref:`packaging-pysetup`. - - -.. _packaging-standard-install: - -Standard build and install -========================== - -As described in section :ref:`packaging-new-standard`, building and installing -a module distribution using Packaging usually comes down to one simple -command:: - - pysetup run install_dist - -This is a command that should be run in a terminal. On Windows, it is called a -command prompt and found in :menuselection:`Start --> Accessories`; Powershell -is a popular alternative. - - -.. _packaging-platform-variations: - -Platform variations -------------------- - -The setup command is meant to be run from the root directory of the source -distribution, i.e. the top-level subdirectory that the module source -distribution unpacks into. For example, if you've just downloaded a module -source distribution :file:`foo-1.0.tar.gz` onto a Unix system, the normal -steps to follow are these:: - - gunzip -c foo-1.0.tar.gz | tar xf - # unpacks into directory foo-1.0 - cd foo-1.0 - pysetup run install_dist - -On Windows, you'd probably download :file:`foo-1.0.zip`. If you downloaded the -archive file to :file:`C:\\Temp`, then it would unpack into -:file:`C:\\Temp\\foo-1.0`. To actually unpack the archive, you can use either -an archive manipulator with a graphical user interface (such as WinZip or 7-Zip) -or a command-line tool (such as :program:`unzip`, :program:`pkunzip` or, again, -:program:`7z`). Then, open a command prompt window and run:: - - cd c:\Temp\foo-1.0 - pysetup run install_dist - - -.. _packaging-splitting-up: - -Splitting the job up --------------------- - -Running ``pysetup run install_dist`` builds and installs all modules in one go. If you -prefer to work incrementally ---especially useful if you want to customize the -build process, or if things are going wrong--- you can use the setup script to -do one thing at a time. This is a valuable tool when different users will perform -separately the build and install steps. For example, you might want to build a -module distribution and hand it off to a system administrator for installation -(or do it yourself, but with super-user or admin privileges). - -For example, to build everything in one step and then install everything -in a second step, you aptly invoke two distinct Packaging commands:: - - pysetup run build - pysetup run install_dist - -If you do this, you will notice that invoking the :command:`install_dist` command -first runs the :command:`build` command, which ---in this case--- quickly -notices it can spare itself the work, since everything in the :file:`build` -directory is up-to-date. - -You may often ignore this ability to divide the process in steps if all you do -is installing modules downloaded from the Internet, but it's very handy for -more advanced tasks. If you find yourself in the need for distributing your own -Python modules and extensions, though, you'll most likely run many individual -Packaging commands. - - -.. _packaging-how-build-works: - -How building works ------------------- - -As implied above, the :command:`build` command is responsible for collecting -and placing the files to be installed into a *build directory*. By default, -this is :file:`build`, under the distribution root. If you're excessively -concerned with speed, or want to keep the source tree pristine, you can specify -a different build directory with the :option:`--build-base` option. For example:: - - pysetup run build --build-base /tmp/pybuild/foo-1.0 - -(Or you could do this permanently with a directive in your system or personal -Packaging configuration file; see section :ref:`packaging-config-files`.) -In the usual case, however, all this is unnecessary. - -The build tree's default layout looks like so:: - - --- build/ --- lib/ - or - --- build/ --- lib./ - temp./ - -where ```` expands to a brief description of the current OS/hardware -platform and Python version. The first form, with just a :file:`lib` directory, -is used for pure module distributions (module distributions that -include only pure Python modules). If a module distribution contains any -extensions (modules written in C/C++), then the second form, with two ```` -directories, is used. In that case, the :file:`temp.{plat}` directory holds -temporary files generated during the compile/link process which are not intended -to be installed. In either case, the :file:`lib` (or :file:`lib.{plat}`) directory -contains all Python modules (pure Python and extensions) to be installed. - -In the future, more directories will be added to handle Python scripts, -documentation, binary executables, and whatever else is required to install -Python modules and applications. - - -.. _packaging-how-install-works: - -How installation works ----------------------- - -After the :command:`build` command is run (whether explicitly or by the -:command:`install_dist` command on your behalf), the work of the :command:`install_dist` -command is relatively simple: all it has to do is copy the contents of -:file:`build/lib` (or :file:`build/lib.{plat}`) to the installation directory -of your choice. - -If you don't choose an installation directory ---i.e., if you just run -``pysetup run install_dist``\ --- then the :command:`install_dist` command -installs to the standard location for third-party Python modules. This location -varies by platform and depending on how you built/installed Python itself. On -Unix (and Mac OS X, which is also Unix-based), it also depends on whether the -module distribution being installed is pure Python or contains extensions -("non-pure"): - -+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+ -| Platform | Standard installation location | Default value | Notes | -+=================+=====================================================+==================================================+=======+ -| Unix (pure) | :file:`{prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) | -+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+ -| Unix (non-pure) | :file:`{exec-prefix}/lib/python{X.Y}/site-packages` | :file:`/usr/local/lib/python{X.Y}/site-packages` | \(1) | -+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+ -| Windows | :file:`{prefix}\\Lib\\site-packages` | :file:`C:\\Python{XY}\\Lib\\site-packages` | \(2) | -+-----------------+-----------------------------------------------------+--------------------------------------------------+-------+ - -Notes: - -(1) - Most Linux distributions include Python as a standard part of the system, so - :file:`{prefix}` and :file:`{exec-prefix}` are usually both :file:`/usr` on - Linux. If you build Python yourself on Linux (or any Unix-like system), the - default :file:`{prefix}` and :file:`{exec-prefix}` are :file:`/usr/local`. - -(2) - The default installation directory on Windows was :file:`C:\\Program - Files\\Python` under Python 1.6a1, 1.5.2, and earlier. - -:file:`{prefix}` and :file:`{exec-prefix}` stand for the directories that Python -is installed to, and where it finds its libraries at run-time. They are always -the same under Windows, and very often the same under Unix and Mac OS X. You -can find out what your Python installation uses for :file:`{prefix}` and -:file:`{exec-prefix}` by running Python in interactive mode and typing a few -simple commands. - -.. TODO link to Doc/using instead of duplicating - -To start the interactive Python interpreter, you need to follow a slightly -different recipe for each platform. Under Unix, just type :command:`python` at -the shell prompt. Under Windows (assuming the Python executable is on your -:envvar:`PATH`, which is the usual case), you can choose :menuselection:`Start --> Run`, -type ``python`` and press ``enter``. Alternatively, you can simply execute -:command:`python` at a command prompt (:menuselection:`Start --> Accessories`) -or in Powershell. - -Once the interpreter is started, you type Python code at the prompt. For -example, on my Linux system, I type the three Python statements shown below, -and get the output as shown, to find out my :file:`{prefix}` and :file:`{exec-prefix}`:: - - Python 3.3 (r32:88445, Apr 2 2011, 10:43:54) - Type "help", "copyright", "credits" or "license" for more information. - >>> import sys - >>> sys.prefix - '/usr' - >>> sys.exec_prefix - '/usr' - -A few other placeholders are used in this document: :file:`{X.Y}` stands for the -version of Python, for example ``3.2``; :file:`{abiflags}` will be replaced by -the value of :data:`sys.abiflags` or the empty string for platforms which don't -define ABI flags; :file:`{distname}` will be replaced by the name of the module -distribution being installed. Dots and capitalization are important in the -paths; for example, a value that uses ``python3.2`` on UNIX will typically use -``Python32`` on Windows. - -If you don't want to install modules to the standard location, or if you don't -have permission to write there, then you need to read about alternate -installations in section :ref:`packaging-alt-install`. If you want to customize your -installation directories more heavily, see section :ref:`packaging-custom-install`. - - -.. _packaging-alt-install: - -Alternate installation -====================== - -Often, it is necessary or desirable to install modules to a location other than -the standard location for third-party Python modules. For example, on a Unix -system you might not have permission to write to the standard third-party module -directory. Or you might wish to try out a module before making it a standard -part of your local Python installation. This is especially true when upgrading -a distribution already present: you want to make sure your existing base of -scripts still works with the new version before actually upgrading. - -The Packaging :command:`install_dist` command is designed to make installing module -distributions to an alternate location simple and painless. The basic idea is -that you supply a base directory for the installation, and the -:command:`install_dist` command picks a set of directories (called an *installation -scheme*) under this base directory in which to install files. The details -differ across platforms, so read whichever of the following sections applies to -you. - -Note that the various alternate installation schemes are mutually exclusive: you -can pass ``--user``, or ``--home``, or ``--prefix`` and ``--exec-prefix``, or -``--install-base`` and ``--install-platbase``, but you can't mix from these -groups. - - -.. _packaging-alt-install-user: - -Alternate installation: the user scheme ---------------------------------------- - -This scheme is designed to be the most convenient solution for users that don't -have write permission to the global site-packages directory or don't want to -install into it. It is enabled with a simple option:: - - pysetup run install_dist --user - -Files will be installed into subdirectories of :data:`site.USER_BASE` (written -as :file:`{userbase}` hereafter). This scheme installs pure Python modules and -extension modules in the same location (also known as :data:`site.USER_SITE`). -Here are the values for UNIX, including non-framework builds on Mac OS X: - -=============== =========================================================== -Type of file Installation directory -=============== =========================================================== -modules :file:`{userbase}/lib/python{X.Y}/site-packages` -scripts :file:`{userbase}/bin` -data :file:`{userbase}` -C headers :file:`{userbase}/include/python{X.Y}` -=============== =========================================================== - -Framework builds on Mac OS X use these paths: - -=============== =========================================================== -Type of file Installation directory -=============== =========================================================== -modules :file:`{userbase}/lib/python/site-packages` -scripts :file:`{userbase}/bin` -data :file:`{userbase}` -C headers :file:`{userbase}/include/python` -=============== =========================================================== - -And here are the values used on Windows: - -=============== =========================================================== -Type of file Installation directory -=============== =========================================================== -modules :file:`{userbase}\\Python{XY}\\site-packages` -scripts :file:`{userbase}\\Scripts` -data :file:`{userbase}` -C headers :file:`{userbase}\\Python{XY}\\Include` -=============== =========================================================== - -The advantage of using this scheme compared to the other ones described below is -that the user site-packages directory is under normal conditions always included -in :data:`sys.path` (see :mod:`site` for more information), which means that -there is no additional step to perform after running ``pysetup`` to finalize the -installation. - -The :command:`build_ext` command also has a ``--user`` option to add -:file:`{userbase}/include` to the compiler search path for header files and -:file:`{userbase}/lib` to the compiler search path for libraries as well as to -the runtime search path for shared C libraries (rpath). - - -.. _packaging-alt-install-home: - -Alternate installation: the home scheme ---------------------------------------- - -The idea behind the "home scheme" is that you build and maintain a personal -stash of Python modules. This scheme's name is derived from the concept of a -"home" directory on Unix, since it's not unusual for a Unix user to make their -home directory have a layout similar to :file:`/usr/` or :file:`/usr/local/`. -In spite of its name's origin, this scheme can be used by anyone, regardless -of the operating system. - -Installing a new module distribution in this way is as simple as :: - - pysetup run install_dist --home - -where you can supply any directory you like for the :option:`--home` option. On -Unix, lazy typists can just type a tilde (``~``); the :command:`install_dist` command -will expand this to your home directory:: - - pysetup run install_dist --home ~ - -To make Python find the distributions installed with this scheme, you may have -to :ref:`modify Python's search path ` or edit -:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit -:data:`sys.path`. - -The :option:`--home` option defines the base directory for the installation. -Under it, files are installed to the following directories: - -=============== =========================================================== -Type of file Installation directory -=============== =========================================================== -modules :file:`{home}/lib/python` -scripts :file:`{home}/bin` -data :file:`{home}` -C headers :file:`{home}/include/python` -=============== =========================================================== - -(Mentally replace slashes with backslashes if you're on Windows.) - - -.. _packaging-alt-install-prefix-unix: - -Alternate installation: Unix (the prefix scheme) ------------------------------------------------- - -The "prefix scheme" is useful when you wish to use one Python installation to -run the build command, but install modules into the third-party module directory -of a different Python installation (or something that looks like a different -Python installation). If this sounds a trifle unusual, it is ---that's why the -user and home schemes come before. However, there are at least two known cases -where the prefix scheme will be useful. - -First, consider that many Linux distributions put Python in :file:`/usr`, rather -than the more traditional :file:`/usr/local`. This is entirely appropriate, -since in those cases Python is part of "the system" rather than a local add-on. -However, if you are installing Python modules from source, you probably want -them to go in :file:`/usr/local/lib/python2.{X}` rather than -:file:`/usr/lib/python2.{X}`. This can be done with :: - - pysetup run install_dist --prefix /usr/local - -Another possibility is a network filesystem where the name used to write to a -remote directory is different from the name used to read it: for example, the -Python interpreter accessed as :file:`/usr/local/bin/python` might search for -modules in :file:`/usr/local/lib/python2.{X}`, but those modules would have to -be installed to, say, :file:`/mnt/{@server}/export/lib/python2.{X}`. This could -be done with :: - - pysetup run install_dist --prefix=/mnt/@server/export - -In either case, the :option:`--prefix` option defines the installation base, and -the :option:`--exec-prefix` option defines the platform-specific installation -base, which is used for platform-specific files. (Currently, this just means -non-pure module distributions, but could be expanded to C libraries, binary -executables, etc.) If :option:`--exec-prefix` is not supplied, it defaults to -:option:`--prefix`. Files are installed as follows: - -================= ========================================================== -Type of file Installation directory -================= ========================================================== -Python modules :file:`{prefix}/lib/python{X.Y}/site-packages` -extension modules :file:`{exec-prefix}/lib/python{X.Y}/site-packages` -scripts :file:`{prefix}/bin` -data :file:`{prefix}` -C headers :file:`{prefix}/include/python{X.Y}{abiflags}` -================= ========================================================== - -.. XXX misses an entry for platinclude - -There is no requirement that :option:`--prefix` or :option:`--exec-prefix` -actually point to an alternate Python installation; if the directories listed -above do not already exist, they are created at installation time. - -Incidentally, the real reason the prefix scheme is important is simply that a -standard Unix installation uses the prefix scheme, but with :option:`--prefix` -and :option:`--exec-prefix` supplied by Python itself as ``sys.prefix`` and -``sys.exec_prefix``. Thus, you might think you'll never use the prefix scheme, -but every time you run ``pysetup run install_dist`` without any other -options, you're using it. - -Note that installing extensions to an alternate Python installation doesn't have -anything to do with how those extensions are built: in particular, extensions -will be compiled using the Python header files (:file:`Python.h` and friends) -installed with the Python interpreter used to run the build command. It is -therefore your responsibility to ensure compatibility between the interpreter -intended to run extensions installed in this way and the interpreter used to -build these same extensions. To avoid problems, it is best to make sure that -the two interpreters are the same version of Python (possibly different builds, -or possibly copies of the same build). (Of course, if your :option:`--prefix` -and :option:`--exec-prefix` don't even point to an alternate Python installation, -this is immaterial.) - - -.. _packaging-alt-install-prefix-windows: - -Alternate installation: Windows (the prefix scheme) ---------------------------------------------------- - -Windows has a different and vaguer notion of home directories than Unix, and -since its standard Python installation is simpler, the :option:`--prefix` option -has traditionally been used to install additional packages to arbitrary -locations. :: - - pysetup run install_dist --prefix "\Temp\Python" - -to install modules to the :file:`\\Temp\\Python` directory on the current drive. - -The installation base is defined by the :option:`--prefix` option; the -:option:`--exec-prefix` option is not supported under Windows, which means that -pure Python modules and extension modules are installed into the same location. -Files are installed as follows: - -=============== ========================================================== -Type of file Installation directory -=============== ========================================================== -modules :file:`{prefix}\\Lib\\site-packages` -scripts :file:`{prefix}\\Scripts` -data :file:`{prefix}` -C headers :file:`{prefix}\\Include` -=============== ========================================================== - - -.. _packaging-custom-install: - -Custom installation -=================== - -Sometimes, the alternate installation schemes described in section -:ref:`packaging-alt-install` just don't do what you want. You might want to tweak -just one or two directories while keeping everything under the same base -directory, or you might want to completely redefine the installation scheme. -In either case, you're creating a *custom installation scheme*. - -To create a custom installation scheme, you start with one of the alternate -schemes and override some of the installation directories used for the various -types of files, using these options: - -====================== ======================= -Type of file Override option -====================== ======================= -Python modules ``--install-purelib`` -extension modules ``--install-platlib`` -all modules ``--install-lib`` -scripts ``--install-scripts`` -data ``--install-data`` -C headers ``--install-headers`` -====================== ======================= - -These override options can be relative, absolute, -or explicitly defined in terms of one of the installation base directories. -(There are two installation base directories, and they are normally the same ----they only differ when you use the Unix "prefix scheme" and supply different -``--prefix`` and ``--exec-prefix`` options; using ``--install-lib`` will -override values computed or given for ``--install-purelib`` and -``--install-platlib``, and is recommended for schemes that don't make a -difference between Python and extension modules.) - -For example, say you're installing a module distribution to your home directory -under Unix, but you want scripts to go in :file:`~/scripts` rather than -:file:`~/bin`. As you might expect, you can override this directory with the -:option:`--install-scripts` option and, in this case, it makes most sense to supply -a relative path, which will be interpreted relative to the installation base -directory (in our example, your home directory):: - - pysetup run install_dist --home ~ --install-scripts scripts - -Another Unix example: suppose your Python installation was built and installed -with a prefix of :file:`/usr/local/python`. Thus, in a standard installation, -scripts will wind up in :file:`/usr/local/python/bin`. If you want them in -:file:`/usr/local/bin` instead, you would supply this absolute directory for -the :option:`--install-scripts` option:: - - pysetup run install_dist --install-scripts /usr/local/bin - -This command performs an installation using the "prefix scheme", where the -prefix is whatever your Python interpreter was installed with ---in this case, -:file:`/usr/local/python`. - -If you maintain Python on Windows, you might want third-party modules to live in -a subdirectory of :file:`{prefix}`, rather than right in :file:`{prefix}` -itself. This is almost as easy as customizing the script installation directory ----you just have to remember that there are two types of modules to worry about, -Python and extension modules, which can conveniently be both controlled by one -option:: - - pysetup run install_dist --install-lib Site - -.. XXX Nothing is installed right under prefix in windows, is it?? - -The specified installation directory is relative to :file:`{prefix}`. Of -course, you also have to ensure that this directory is in Python's module -search path, such as by putting a :file:`.pth` file in a site directory (see -:mod:`site`). See section :ref:`packaging-search-path` to find out how to modify -Python's search path. - -If you want to define an entire installation scheme, you just have to supply all -of the installation directory options. Using relative paths is recommended here. -For example, if you want to maintain all Python module-related files under -:file:`python` in your home directory, and you want a separate directory for -each platform that you use your home directory from, you might define the -following installation scheme:: - - pysetup run install_dist --home ~ \ - --install-purelib python/lib \ - --install-platlib python/'lib.$PLAT' \ - --install-scripts python/scripts \ - --install-data python/data - -or, equivalently, :: - - pysetup run install_dist --home ~/python \ - --install-purelib lib \ - --install-platlib 'lib.$PLAT' \ - --install-scripts scripts \ - --install-data data - -``$PLAT`` doesn't need to be defined as an environment variable ---it will also -be expanded by Packaging as it parses your command line options, just as it -does when parsing your configuration file(s). (More on that later.) - -Obviously, specifying the entire installation scheme every time you install a -new module distribution would be very tedious. To spare you all that work, you -can store it in a Packaging configuration file instead (see section -:ref:`packaging-config-files`), like so:: - - [install_dist] - install-base = $HOME - install-purelib = python/lib - install-platlib = python/lib.$PLAT - install-scripts = python/scripts - install-data = python/data - -or, equivalently, :: - - [install_dist] - install-base = $HOME/python - install-purelib = lib - install-platlib = lib.$PLAT - install-scripts = scripts - install-data = data - -Note that these two are *not* equivalent if you override their installation -base directory when running the setup script. For example, :: - - pysetup run install_dist --install-base /tmp - -would install pure modules to :file:`/tmp/python/lib` in the first case, and -to :file:`/tmp/lib` in the second case. (For the second case, you'd probably -want to supply an installation base of :file:`/tmp/python`.) - -You may have noticed the use of ``$HOME`` and ``$PLAT`` in the sample -configuration file. These are Packaging configuration variables, which -bear a strong resemblance to environment variables. In fact, you can use -environment variables in configuration files on platforms that have such a notion, but -Packaging additionally defines a few extra variables that may not be in your -environment, such as ``$PLAT``. Of course, on systems that don't have -environment variables, such as Mac OS 9, the configuration variables supplied by -the Packaging are the only ones you can use. See section :ref:`packaging-config-files` -for details. - -.. XXX which vars win out eventually in case of clash env or Packaging? - -.. XXX need some Windows examples---when would custom installation schemes be - needed on those platforms? - - -.. XXX Move this section to Doc/using - -.. _packaging-search-path: - -Modifying Python's search path ------------------------------- - -When the Python interpreter executes an :keyword:`import` statement, it searches -for both Python code and extension modules along a search path. A default value -for this path is configured into the Python binary when the interpreter is built. -You can obtain the search path by importing the :mod:`sys` module and printing -the value of ``sys.path``. :: - - $ python - Python 2.2 (#11, Oct 3 2002, 13:31:27) - [GCC 2.96 20000731 (Red Hat Linux 7.3 2.96-112)] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - >>> import sys - >>> sys.path - ['', '/usr/local/lib/python2.3', '/usr/local/lib/python2.3/plat-linux2', - '/usr/local/lib/python2.3/lib-tk', '/usr/local/lib/python2.3/lib-dynload', - '/usr/local/lib/python2.3/site-packages'] - >>> - -The null string in ``sys.path`` represents the current working directory. - -The expected convention for locally installed packages is to put them in the -:file:`{...}/site-packages/` directory, but you may want to choose a different -location for some reason. For example, if your site kept by convention all web -server-related software under :file:`/www`. Add-on Python modules might then -belong in :file:`/www/python`, and in order to import them, this directory would -have to be added to ``sys.path``. There are several ways to solve this problem. - -The most convenient way is to add a path configuration file to a directory -that's already on Python's path, usually to the :file:`.../site-packages/` -directory. Path configuration files have an extension of :file:`.pth`, and each -line must contain a single path that will be appended to ``sys.path``. (Because -the new paths are appended to ``sys.path``, modules in the added directories -will not override standard modules. This means you can't use this mechanism for -installing fixed versions of standard modules.) - -Paths can be absolute or relative, in which case they're relative to the -directory containing the :file:`.pth` file. See the documentation of -the :mod:`site` module for more information. - -A slightly less convenient way is to edit the :file:`site.py` file in Python's -standard library, and modify ``sys.path``. :file:`site.py` is automatically -imported when the Python interpreter is executed, unless the :option:`-S` switch -is supplied to suppress this behaviour. So you could simply edit -:file:`site.py` and add two lines to it:: - - import sys - sys.path.append('/www/python/') - -However, if you reinstall the same major version of Python (perhaps when -upgrading from 3.3 to 3.3.1, for example) :file:`site.py` will be overwritten by -the stock version. You'd have to remember that it was modified and save a copy -before doing the installation. - -Alternatively, there are two environment variables that can modify ``sys.path``. -:envvar:`PYTHONHOME` sets an alternate value for the prefix of the Python -installation. For example, if :envvar:`PYTHONHOME` is set to ``/www/python``, -the search path will be set to ``['', '/www/python/lib/pythonX.Y/', -'/www/python/lib/pythonX.Y/plat-linux2', ...]``. - -The :envvar:`PYTHONPATH` variable can be set to a list of paths that will be -added to the beginning of ``sys.path``. For example, if :envvar:`PYTHONPATH` is -set to ``/www/python:/opt/py``, the search path will begin with -``['/www/python', '/opt/py']``. (Note that directories must exist in order to -be added to ``sys.path``; the :mod:`site` module removes non-existent paths.) - -Finally, ``sys.path`` is just a regular Python list, so any Python application -can modify it by adding or removing entries. - - -.. _packaging-config-files: - -Configuration files for Packaging -================================= - -As mentioned above, you can use configuration files to store personal or site -preferences for any option supported by any Packaging command. Depending on your -platform, you can use one of two or three possible configuration files. These -files will be read before parsing the command-line, so they take precedence over -default values. In turn, the command-line will override configuration files. -Lastly, if there are multiple configuration files, values from files read -earlier will be overridden by values from files read later. - -.. XXX "one of two or three possible..." seems wrong info. Below always 3 files - are indicated in the tables. - - -.. _packaging-config-filenames: - -Location and names of configuration files ------------------------------------------ - -The name and location of the configuration files vary slightly across -platforms. On Unix and Mac OS X, these are the three configuration files listed -in the order they are processed: - -+--------------+----------------------------------------------------------+-------+ -| Type of file | Location and filename | Notes | -+==============+==========================================================+=======+ -| system | :file:`{prefix}/lib/python{ver}/packaging/packaging.cfg` | \(1) | -+--------------+----------------------------------------------------------+-------+ -| personal | :file:`$HOME/.pydistutils.cfg` | \(2) | -+--------------+----------------------------------------------------------+-------+ -| local | :file:`setup.cfg` | \(3) | -+--------------+----------------------------------------------------------+-------+ - -Similarly, the configuration files on Windows ---also listed in the order they -are processed--- are these: - -+--------------+-------------------------------------------------+-------+ -| Type of file | Location and filename | Notes | -+==============+=================================================+=======+ -| system | :file:`{prefix}\\Lib\\packaging\\packaging.cfg` | \(4) | -+--------------+-------------------------------------------------+-------+ -| personal | :file:`%HOME%\\pydistutils.cfg` | \(5) | -+--------------+-------------------------------------------------+-------+ -| local | :file:`setup.cfg` | \(3) | -+--------------+-------------------------------------------------+-------+ - -On all platforms, the *personal* file can be temporarily disabled by -means of the `--no-user-cfg` option. - -Notes: - -(1) - Strictly speaking, the system-wide configuration file lives in the directory - where Packaging is installed. - -(2) - On Unix, if the :envvar:`HOME` environment variable is not defined, the - user's home directory will be determined with the :func:`getpwuid` function - from the standard :mod:`pwd` module. Packaging uses the - :func:`os.path.expanduser` function to do this. - -(3) - I.e., in the current directory (usually the location of the setup script). - -(4) - (See also note (1).) Python's default installation prefix is - :file:`C:\\Python`, so the system configuration file is normally - :file:`C:\\Python\\Lib\\packaging\\packaging.cfg`. - -(5) - On Windows, if the :envvar:`HOME` environment variable is not defined, - :envvar:`USERPROFILE` then :envvar:`HOMEDRIVE` and :envvar:`HOMEPATH` will - be tried. Packaging uses the :func:`os.path.expanduser` function to do this. - - -.. _packaging-config-syntax: - -Syntax of configuration files ------------------------------ - -All Packaging configuration files share the same syntax. Options defined in -them are grouped into sections, and each Packaging command gets its own section. -Additionally, there's a ``global`` section for options that affect every command. -Sections consist of one or more lines containing a single option specified as -``option = value``. - -.. XXX use dry-run in the next example or use a pysetup option as example - -For example, here's a complete configuration file that forces all commands to -run quietly by default:: - - [global] - verbose = 0 - -If this was the system configuration file, it would affect all processing -of any Python module distribution by any user on the current system. If it was -installed as your personal configuration file (on systems that support them), -it would affect only module distributions processed by you. Lastly, if it was -used as the :file:`setup.cfg` for a particular module distribution, it would -affect that distribution only. - -.. XXX "(on systems that support them)" seems wrong info - -If you wanted to, you could override the default "build base" directory and -make the :command:`build\*` commands always forcibly rebuild all files with -the following:: - - [build] - build-base = blib - force = 1 - -which corresponds to the command-line arguments:: - - pysetup run build --build-base blib --force - -except that including the :command:`build` command on the command-line means -that command will be run. Including a particular command in configuration files -has no such implication; it only means that if the command is run, the options -for it in the configuration file will apply. (This is also true if you run -other commands that derive values from it.) - -You can find out the complete list of options for any command using the -:option:`--help` option, e.g.:: - - pysetup run build --help - -and you can find out the complete list of global options by using -:option:`--help` without a command:: - - pysetup run --help - -See also the "Reference" section of the "Distributing Python Modules" manual. - -.. XXX no links to the relevant section exist. - - -.. _packaging-building-ext: - -Building extensions: tips and tricks -==================================== - -Whenever possible, Packaging tries to use the configuration information made -available by the Python interpreter used to run `pysetup`. -For example, the same compiler and linker flags used to compile Python will also -be used for compiling extensions. Usually this will work well, but in -complicated situations this might be inappropriate. This section discusses how -to override the usual Packaging behaviour. - - -.. _packaging-tweak-flags: - -Tweaking compiler/linker flags ------------------------------- - -Compiling a Python extension written in C or C++ will sometimes require -specifying custom flags for the compiler and linker in order to use a particular -library or produce a special kind of object code. This is especially true if the -extension hasn't been tested on your platform, or if you're trying to -cross-compile Python. - -.. TODO update to new setup.cfg - -In the most general case, the extension author might have foreseen that -compiling the extensions would be complicated, and provided a :file:`Setup` file -for you to edit. This will likely only be done if the module distribution -contains many separate extension modules, or if they often require elaborate -sets of compiler flags in order to work. - -A :file:`Setup` file, if present, is parsed in order to get a list of extensions -to build. Each line in a :file:`Setup` describes a single module. Lines have -the following structure:: - - module ... [sourcefile ...] [cpparg ...] [library ...] - - -Let's examine each of the fields in turn. - -* *module* is the name of the extension module to be built, and should be a - valid Python identifier. You can't just change this in order to rename a module - (edits to the source code would also be needed), so this should be left alone. - -* *sourcefile* is anything that's likely to be a source code file, at least - judging by the filename. Filenames ending in :file:`.c` are assumed to be - written in C, filenames ending in :file:`.C`, :file:`.cc`, and :file:`.c++` are - assumed to be C++, and filenames ending in :file:`.m` or :file:`.mm` are assumed - to be in Objective C. - -* *cpparg* is an argument for the C preprocessor, and is anything starting with - :option:`-I`, :option:`-D`, :option:`-U` or :option:`-C`. - -* *library* is anything ending in :file:`.a` or beginning with :option:`-l` or - :option:`-L`. - -If a particular platform requires a special library on your platform, you can -add it by editing the :file:`Setup` file and running ``pysetup run build``. -For example, if the module defined by the line :: - - foo foomodule.c - -must be linked with the math library :file:`libm.a` on your platform, simply add -:option:`-lm` to the line:: - - foo foomodule.c -lm - -Arbitrary switches intended for the compiler or the linker can be supplied with -the :option:`-Xcompiler` *arg* and :option:`-Xlinker` *arg* options:: - - foo foomodule.c -Xcompiler -o32 -Xlinker -shared -lm - -The next option after :option:`-Xcompiler` and :option:`-Xlinker` will be -appended to the proper command line, so in the above example the compiler will -be passed the :option:`-o32` option, and the linker will be passed -:option:`-shared`. If a compiler option requires an argument, you'll have to -supply multiple :option:`-Xcompiler` options; for example, to pass ``-x c++`` -the :file:`Setup` file would have to contain ``-Xcompiler -x -Xcompiler c++``. - -Compiler flags can also be supplied through setting the :envvar:`CFLAGS` -environment variable. If set, the contents of :envvar:`CFLAGS` will be added to -the compiler flags specified in the :file:`Setup` file. - - -.. _packaging-non-ms-compilers: - -Using non-Microsoft compilers on Windows ----------------------------------------- - -.. sectionauthor:: Rene Liebscher - - - -Borland/CodeGear C++ -^^^^^^^^^^^^^^^^^^^^ - -This subsection describes the necessary steps to use Packaging with the Borland -C++ compiler version 5.5. First you have to know that Borland's object file -format (OMF) is different from the format used by the Python version you can -download from the Python or ActiveState Web site. (Python is built with -Microsoft Visual C++, which uses COFF as the object file format.) For this -reason, you have to convert Python's library :file:`python25.lib` into the -Borland format. You can do this as follows: - -.. Should we mention that users have to create cfg-files for the compiler? -.. see also http://community.borland.com/article/0,1410,21205,00.html - -:: - - coff2omf python25.lib python25_bcpp.lib - -The :file:`coff2omf` program comes with the Borland compiler. The file -:file:`python25.lib` is in the :file:`Libs` directory of your Python -installation. If your extension uses other libraries (zlib, ...) you have to -convert them too. - -The converted files have to reside in the same directories as the normal -libraries. - -How does Packaging manage to use these libraries with their changed names? If -the extension needs a library (eg. :file:`foo`) Packaging checks first if it -finds a library with suffix :file:`_bcpp` (eg. :file:`foo_bcpp.lib`) and then -uses this library. In the case it doesn't find such a special library it uses -the default name (:file:`foo.lib`.) [#]_ - -To let Packaging compile your extension with Borland, C++ you now have to -type:: - - pysetup run build --compiler bcpp - -If you want to use the Borland C++ compiler as the default, you could specify -this in your personal or system-wide configuration file for Packaging (see -section :ref:`packaging-config-files`.) - - -.. seealso:: - - `C++Builder Compiler `_ - Information about the free C++ compiler from Borland, including links to the - download pages. - - `Creating Python Extensions Using Borland's Free Compiler `_ - Document describing how to use Borland's free command-line C++ compiler to build - Python. - - -GNU C / Cygwin / MinGW -^^^^^^^^^^^^^^^^^^^^^^ - -This section describes the necessary steps to use Packaging with the GNU C/C++ -compilers in their Cygwin and MinGW distributions. [#]_ For a Python interpreter -that was built with Cygwin, everything should work without any of these -following steps. - -Not all extensions can be built with MinGW or Cygwin, but many can. Extensions -most likely to not work are those that use C++ or depend on Microsoft Visual C -extensions. - -To let Packaging compile your extension with Cygwin, you have to type:: - - pysetup run build --compiler=cygwin - -and for Cygwin in no-cygwin mode [#]_ or for MinGW, type:: - - pysetup run build --compiler=mingw32 - -If you want to use any of these options/compilers as default, you should -consider writing it in your personal or system-wide configuration file for -Packaging (see section :ref:`packaging-config-files`.) - -Older Versions of Python and MinGW -"""""""""""""""""""""""""""""""""" -The following instructions only apply if you're using a version of Python -inferior to 2.4.1 with a MinGW inferior to 3.0.0 (with -:file:`binutils-2.13.90-20030111-1`). - -These compilers require some special libraries. This task is more complex than -for Borland's C++, because there is no program to convert the library. First -you have to create a list of symbols which the Python DLL exports. (You can find -a good program for this task at -http://www.emmestech.com/software/pexports-0.43/download_pexports.html). - -.. I don't understand what the next line means. --amk - (inclusive the references on data structures.) - -:: - - pexports python25.dll > python25.def - -The location of an installed :file:`python25.dll` will depend on the -installation options and the version and language of Windows. In a "just for -me" installation, it will appear in the root of the installation directory. In -a shared installation, it will be located in the system directory. - -Then you can create from these information an import library for gcc. :: - - /cygwin/bin/dlltool --dllname python25.dll --def python25.def --output-lib libpython25.a - -The resulting library has to be placed in the same directory as -:file:`python25.lib`. (Should be the :file:`libs` directory under your Python -installation directory.) - -If your extension uses other libraries (zlib,...) you might have to convert -them too. The converted files have to reside in the same directories as the -normal libraries do. - - -.. seealso:: - - `Building Python modules on MS Windows platform with MinGW `_ - Information about building the required libraries for the MinGW - environment. - - -.. rubric:: Footnotes - -.. [#] This also means you could replace all existing COFF-libraries with - OMF-libraries of the same name. - -.. [#] Check http://sources.redhat.com/cygwin/ and http://www.mingw.org/ for - more information. - -.. [#] Then you have no POSIX emulation available, but you also don't need - :file:`cygwin1.dll`. diff --git a/Doc/install/pysetup-config.rst b/Doc/install/pysetup-config.rst deleted file mode 100644 --- a/Doc/install/pysetup-config.rst +++ /dev/null @@ -1,44 +0,0 @@ -.. _packaging-pysetup-config: - -===================== -Pysetup Configuration -===================== - -Pysetup supports two configuration files: :file:`.pypirc` and :file:`packaging.cfg`. - -.. FIXME integrate with configfile instead of duplicating - -Configuring indexes -------------------- - -You can configure additional indexes in :file:`.pypirc` to be used for index-related -operations. By default, all configured index-servers and package-servers will be used -in an additive fashion. To limit operations to specific indexes, use the :option:`--index` -and :option:`--package-server options`:: - - $ pysetup install --index pypi --package-server django some.project - -Adding indexes to :file:`.pypirc`:: - - [packaging] - index-servers = - pypi - other - - package-servers = - django - - [pypi] - repository: - username: - password: - - [other] - repository: - username: - password: - - [django] - repository: - username: - password: diff --git a/Doc/install/pysetup-servers.rst b/Doc/install/pysetup-servers.rst deleted file mode 100644 --- a/Doc/install/pysetup-servers.rst +++ /dev/null @@ -1,61 +0,0 @@ -.. _packaging-pysetup-servers: - -=============== -Package Servers -=============== - -Pysetup supports installing Python packages from *Package Servers* in addition -to PyPI indexes and mirrors. - -Package Servers are simple directory listings of Python distributions. Directories -can be served via HTTP or a local file system. This is useful when you want to -dump source distributions in a directory and not worry about the full index structure. - -Serving distributions from Apache ---------------------------------- -:: - - $ mkdir -p /var/www/html/python/distributions - $ cp *.tar.gz /var/www/html/python/distributions/ - - - ServerAdmin webmaster at domain.com - DocumentRoot "/var/www/html/python" - ServerName python.example.org - ErrorLog logs/python.example.org-error.log - CustomLog logs/python.example.org-access.log common - Options Indexes FollowSymLinks MultiViews - DirectoryIndex index.html index.htm - - - Options Indexes FollowSymLinks MultiViews - Order allow,deny - Allow from all - - - -Add the Apache based distribution server to :file:`.pypirc`:: - - [packaging] - package-servers = - apache - - [apache] - repository: http://python.example.org/distributions/ - - -Serving distributions from a file system ----------------------------------------- -:: - - $ mkdir -p /data/python/distributions - $ cp *.tar.gz /data/python/distributions/ - -Add the directory to :file:`.pypirc`:: - - [packaging] - package-servers = - local - - [local] - repository: file:///data/python/distributions/ diff --git a/Doc/install/pysetup.rst b/Doc/install/pysetup.rst deleted file mode 100644 --- a/Doc/install/pysetup.rst +++ /dev/null @@ -1,164 +0,0 @@ -.. _packaging-pysetup: - -================ -Pysetup Tutorial -================ - -Getting started ---------------- - -Pysetup is a simple script that supports the following features: - -- install, remove, list, and verify Python packages; -- search for available packages on PyPI or any *Simple Index*; -- verify installed packages (md5sum, installed files, version). - - -Finding out what's installed ----------------------------- - -Pysetup makes it easy to find out what Python packages are installed:: - - $ pysetup list virtualenv - 'virtualenv' 1.6 at '/opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info' - - $ pysetup list - 'pyverify' 0.8.1 at '/opt/python3.3/lib/python3.3/site-packages/pyverify-0.8.1.dist-info' - 'virtualenv' 1.6 at '/opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info' - ... - - -Installing a distribution -------------------------- - -Pysetup can install a Python project from the following sources: - -- PyPI and Simple Indexes; -- source directories containing a valid :file:`setup.py` or :file:`setup.cfg`; -- distribution source archives (:file:`project-1.0.tar.gz`, :file:`project-1.0.zip`); -- HTTP (http://host/packages/project-1.0.tar.gz). - - -Installing from PyPI and Simple Indexes:: - - $ pysetup install project - $ pysetup install project==1.0 - -Installing from a distribution source archive:: - - $ pysetup install project-1.0.tar.gz - -Installing from a source directory containing a valid :file:`setup.py` or -:file:`setup.cfg`:: - - $ cd path/to/source/directory - $ pysetup install - - $ pysetup install path/to/source/directory - -Installing from HTTP:: - - $ pysetup install http://host/packages/project-1.0.tar.gz - - -Retrieving metadata -------------------- - -You can gather metadata from two sources, a project's source directory or an -installed distribution. The `pysetup metadata` command can retrieve one or -more metadata fields using the `-f` option and a metadata field as the -argument. :: - - $ pysetup metadata virtualenv -f version -f name - Version: - 1.6 - Name: - virtualenv - - $ pysetup metadata virtualenv - Metadata-Version: - 1.0 - Name: - virtualenv - Version: - 1.6 - Platform: - UNKNOWN - Summary: - Virtual Python Environment builder - ... - -.. seealso:: - - There are three metadata versions, 1.0, 1.1, and 1.2. The following PEPs - describe specifics of the field names, and their semantics and usage. 1.0 - :PEP:`241`, 1.1 :PEP:`314`, and 1.2 :PEP:`345` - - -Removing a distribution ------------------------ - -You can remove one or more installed distributions using the `pysetup remove` -command:: - - $ pysetup remove virtualenv - removing 'virtualenv': - /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/dependency_links.txt - /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/entry_points.txt - /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/not-zip-safe - /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/PKG-INFO - /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/SOURCES.txt - /opt/python3.3/lib/python3.3/site-packages/virtualenv-1.6-py3.3.egg-info/top_level.txt - Proceed (y/n)? y - success: removed 6 files and 1 dirs - -The optional '-y' argument auto confirms, skipping the conformation prompt:: - - $ pysetup remove virtualenv -y - - -Getting help ------------- - -All pysetup actions take the `-h` and `--help` options which prints the commands -help string to stdout. :: - - $ pysetup remove -h - Usage: pysetup remove dist [-y] - or: pysetup remove --help - - Uninstall a Python package. - - positional arguments: - dist installed distribution name - - optional arguments: - -y auto confirm package removal - -Getting a list of all pysetup actions and global options:: - - $ pysetup --help - Usage: pysetup [options] action [action_options] - - Actions: - run: Run one or several commands - metadata: Display the metadata of a project - install: Install a project - remove: Remove a project - search: Search for a project in the indexes - list: List installed projects - graph: Display a graph - create: Create a project - generate-setup: Generate a backward-compatible setup.py - - To get more help on an action, use: - - pysetup action --help - - Global options: - --verbose (-v) run verbosely (default) - --quiet (-q) run quietly (turns verbosity off) - --dry-run (-n) don't actually do anything - --help (-h) show detailed help message - --no-user-cfg ignore pydistutils.cfg in your home directory - --version Display the version diff --git a/Doc/library/distutils.rst b/Doc/library/distutils.rst --- a/Doc/library/distutils.rst +++ b/Doc/library/distutils.rst @@ -12,10 +12,6 @@ 100%-pure Python, or may be extension modules written in C, or may be collections of Python packages which include modules coded in both Python and C. -.. deprecated:: 3.3 - :mod:`packaging` replaces Distutils. See :ref:`packaging-index` and - :ref:`packaging-install-index`. - User documentation and API reference are provided in another document: @@ -27,11 +23,3 @@ easily installed into an existing Python installation. If also contains instructions for end-users wanting to install a distutils-based package, :ref:`install-index`. - - -.. trick to silence a Sphinx warning - -.. toctree:: - :hidden: - - ../distutils/index diff --git a/Doc/library/packaging-misc.rst b/Doc/library/packaging-misc.rst deleted file mode 100644 --- a/Doc/library/packaging-misc.rst +++ /dev/null @@ -1,27 +0,0 @@ -.. temporary file for modules that don't need a dedicated file yet - -:mod:`packaging.errors` --- Packaging exceptions -================================================ - -.. module:: packaging.errors - :synopsis: Packaging exceptions. - - -Provides exceptions used by the Packaging modules. Note that Packaging modules -may raise standard exceptions; in particular, SystemExit is usually raised for -errors that are obviously the end-user's fault (e.g. bad command-line arguments). - -This module is safe to use in ``from ... import *`` mode; it only exports -symbols whose names start with ``Packaging`` and end with ``Error``. - - -:mod:`packaging.manifest` --- The Manifest class -================================================ - -.. module:: packaging.manifest - :synopsis: The Manifest class, used for poking about the file system and - building lists of files. - - -This module provides the :class:`Manifest` class, used for poking about the -filesystem and building lists of files. diff --git a/Doc/library/packaging.command.rst b/Doc/library/packaging.command.rst deleted file mode 100644 --- a/Doc/library/packaging.command.rst +++ /dev/null @@ -1,111 +0,0 @@ -:mod:`packaging.command` --- Standard Packaging commands -======================================================== - -.. module:: packaging.command - :synopsis: Standard packaging commands. - - -This subpackage contains one module for each standard Packaging command, such as -:command:`build` or :command:`upload`. Each command is implemented as a -separate module, with the command name as the name of the module and of the -class defined therein. - - - -:mod:`packaging.command.cmd` --- Abstract base class for Packaging commands -=========================================================================== - -.. module:: packaging.command.cmd - :synopsis: Abstract base class for commands. - - -This module supplies the abstract base class :class:`Command`. This class is -subclassed by the modules in the packaging.command subpackage. - - -.. class:: Command(dist) - - Abstract base class for defining command classes, the "worker bees" of the - Packaging. A useful analogy for command classes is to think of them as - subroutines with local variables called *options*. The options are declared - in :meth:`initialize_options` and defined (given their final values) in - :meth:`finalize_options`, both of which must be defined by every command - class. The distinction between the two is necessary because option values - might come from the outside world (command line, config file, ...), and any - options dependent on other options must be computed after these outside - influences have been processed --- hence :meth:`finalize_options`. The body - of the subroutine, where it does all its work based on the values of its - options, is the :meth:`run` method, which must also be implemented by every - command class. - - The class constructor takes a single argument *dist*, a - :class:`~packaging.dist.Distribution` instance. - - -Creating a new Packaging command --------------------------------- - -This section outlines the steps to create a new Packaging command. - -.. XXX the following paragraph is focused on the stdlib; expand it to document - how to write and register a command in third-party projects - -A new command lives in a module in the :mod:`packaging.command` package. There -is a sample template in that directory called :file:`command_template`. Copy -this file to a new module with the same name as the new command you're -implementing. This module should implement a class with the same name as the -module (and the command). So, for instance, to create the command -``peel_banana`` (so that users can run ``setup.py peel_banana``), you'd copy -:file:`command_template` to :file:`packaging/command/peel_banana.py`, then edit -it so that it's implementing the class :class:`peel_banana`, a subclass of -:class:`Command`. It must define the following methods: - -.. method:: Command.initialize_options() - - Set default values for all the options that this command supports. Note that - these defaults may be overridden by other commands, by the setup script, by - config files, or by the command line. Thus, this is not the place to code - dependencies between options; generally, :meth:`initialize_options` - implementations are just a bunch of ``self.foo = None`` assignments. - - -.. method:: Command.finalize_options() - - Set final values for all the options that this command supports. This is - always called as late as possible, i.e. after any option assignments from the - command line or from other commands have been done. Thus, this is the place - to code option dependencies: if *foo* depends on *bar*, then it is safe to - set *foo* from *bar* as long as *foo* still has the same value it was - assigned in :meth:`initialize_options`. - - -.. method:: Command.run() - - A command's raison d'etre: carry out the action it exists to perform, - controlled by the options initialized in :meth:`initialize_options`, - customized by other commands, the setup script, the command line, and config - files, and finalized in :meth:`finalize_options`. All terminal output and - filesystem interaction should be done by :meth:`run`. - - -Command classes may define this attribute: - - -.. attribute:: Command.sub_commands - - *sub_commands* formalizes the notion of a "family" of commands, - e.g. ``install_dist`` as the parent with sub-commands ``install_lib``, - ``install_headers``, etc. The parent of a family of commands defines - *sub_commands* as a class attribute; it's a list of 2-tuples ``(command_name, - predicate)``, with *command_name* a string and *predicate* a function, a - string or ``None``. *predicate* is a method of the parent command that - determines whether the corresponding command is applicable in the current - situation. (E.g. ``install_headers`` is only applicable if we have any C - header files to install.) If *predicate* is ``None``, that command is always - applicable. - - *sub_commands* is usually defined at the *end* of a class, because - predicates can be methods of the class, so they must already have been - defined. The canonical example is the :command:`install_dist` command. - -.. XXX document how to add a custom command to another one's subcommands diff --git a/Doc/library/packaging.compiler.rst b/Doc/library/packaging.compiler.rst deleted file mode 100644 --- a/Doc/library/packaging.compiler.rst +++ /dev/null @@ -1,681 +0,0 @@ -:mod:`packaging.compiler` --- Compiler classes -============================================== - -.. module:: packaging.compiler - :synopsis: Compiler classes to build C/C++ extensions or libraries. - - -This subpackage contains an abstract base class representing a compiler and -concrete implementations for common compilers. The compiler classes should not -be instantiated directly, but created using the :func:`new_compiler` factory -function. Compiler types provided by Packaging are listed in -:ref:`packaging-standard-compilers`. - - -Public functions ----------------- - -.. function:: new_compiler(plat=None, compiler=None, dry_run=False, force=False) - - Factory function to generate an instance of some - :class:`~.ccompiler.CCompiler` subclass for the requested platform or - compiler type. - - If no argument is given for *plat* and *compiler*, the default compiler type - for the platform (:attr:`os.name`) will be used: ``'unix'`` for Unix and - Mac OS X, ``'msvc'`` for Windows. - - If *plat* is given, it must be one of ``'posix'``, ``'darwin'`` or ``'nt'``. - An invalid value will not raise an exception but use the default compiler - type for the current platform. - - .. XXX errors should never pass silently; this behavior is particularly - harmful when a compiler type is given as first argument - - If *compiler* is given, *plat* will be ignored, allowing you to get for - example a ``'unix'`` compiler object under Windows or an ``'msvc'`` compiler - under Unix. However, not all compiler types can be instantiated on every - platform. - - -.. function:: customize_compiler(compiler) - - Do any platform-specific customization of a CCompiler instance. Mainly - needed on Unix to plug in the information that varies across Unices and is - stored in CPython's Makefile. - - -.. function:: gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries) - - Generate linker options for searching library directories and linking with - specific libraries. *libraries* and *library_dirs* are, respectively, lists - of library names (not filenames!) and search directories. Returns a list of - command-line options suitable for use with some compiler (depending on the - two format strings passed in). - - -.. function:: gen_preprocess_options(macros, include_dirs) - - Generate C preprocessor options (:option:`-D`, :option:`-U`, :option:`-I`) as - used by at least two types of compilers: the typical Unix compiler and Visual - C++. *macros* is the usual thing, a list of 1- or 2-tuples, where ``(name,)`` - means undefine (:option:`-U`) macro *name*, and ``(name, value)`` means - define (:option:`-D`) macro *name* to *value*. *include_dirs* is just a list - of directory names to be added to the header file search path (:option:`-I`). - Returns a list of command-line options suitable for either Unix compilers or - Visual C++. - - -.. function:: get_default_compiler(osname, platform) - - Determine the default compiler to use for the given platform. - - *osname* should be one of the standard Python OS names (i.e. the ones - returned by ``os.name``) and *platform* the common value returned by - ``sys.platform`` for the platform in question. - - The default values are ``os.name`` and ``sys.platform``. - - -.. function:: set_compiler(location) - - Add or change a compiler - - -.. function:: show_compilers() - - Print list of available compilers (used by the :option:`--help-compiler` - options to :command:`build`, :command:`build_ext`, :command:`build_clib`). - - -.. _packaging-standard-compilers: - -Standard compilers ------------------- - -Concrete subclasses of :class:`~.ccompiler.CCompiler` are provided in submodules -of the :mod:`packaging.compiler` package. You do not need to import them, using -:func:`new_compiler` is the public API to use. This table documents the -standard compilers; be aware that they can be replaced by other classes on your -platform. - -=============== ======================================================== ======= -name description notes -=============== ======================================================== ======= -``'unix'`` typical Unix-style command-line C compiler [#]_ -``'msvc'`` Microsoft compiler [#]_ -``'bcpp'`` Borland C++ compiler -``'cygwin'`` Cygwin compiler (Windows port of GCC) -``'mingw32'`` Mingw32 port of GCC (same as Cygwin in no-Cygwin mode) -=============== ======================================================== ======= - - -.. [#] The Unix compiler class assumes this behavior: - - * macros defined with :option:`-Dname[=value]` - - * macros undefined with :option:`-Uname` - - * include search directories specified with :option:`-Idir` - - * libraries specified with :option:`-llib` - - * library search directories specified with :option:`-Ldir` - - * compile handled by :program:`cc` (or similar) executable with - :option:`-c` option: compiles :file:`.c` to :file:`.o` - - * link static library handled by :program:`ar` command (possibly with - :program:`ranlib`) - - * link shared library handled by :program:`cc` :option:`-shared` - - -.. [#] On Windows, extension modules typically need to be compiled with the same - compiler that was used to compile CPython (for example Microsoft Visual - Studio .NET 2003 for CPython 2.4 and 2.5). The AMD64 and Itanium - binaries are created using the Platform SDK. - - Under the hood, there are actually two different subclasses of - :class:`~.ccompiler.CCompiler` defined: one is compatible with MSVC 2005 - and 2008, the other works with older versions. This should not be a - concern for regular use of the functions in this module. - - Packaging will normally choose the right compiler, linker etc. on its - own. To override this choice, the environment variables - *DISTUTILS_USE_SDK* and *MSSdk* must be both set. *MSSdk* indicates that - the current environment has been setup by the SDK's ``SetEnv.Cmd`` - script, or that the environment variables had been registered when the - SDK was installed; *DISTUTILS_USE_SDK* indicates that the user has made - an explicit choice to override the compiler selection done by Packaging. - - .. TODO document the envvars in Doc/using and the man page - - -:mod:`packaging.compiler.ccompiler` --- CCompiler base class -============================================================ - -.. module:: packaging.compiler.ccompiler - :synopsis: Abstract CCompiler class. - - -This module provides the abstract base class for the :class:`CCompiler` -classes. A :class:`CCompiler` instance can be used for all the compile and -link steps needed to build a single project. Methods are provided to set -options for the compiler --- macro definitions, include directories, link path, -libraries and the like. - -.. class:: CCompiler(dry_run=False, force=False) - - The abstract base class :class:`CCompiler` defines the interface that must be - implemented by real compiler classes. The class also has some utility - methods used by several compiler classes. - - The basic idea behind a compiler abstraction class is that each instance can - be used for all the compile/link steps in building a single project. Thus, - attributes common to all of those compile and link steps --- include - directories, macros to define, libraries to link against, etc. --- are - attributes of the compiler instance. To allow for variability in how - individual files are treated, most of those attributes may be varied on a - per-compilation or per-link basis. - - The constructor for each subclass creates an instance of the Compiler object. - Flags are *dry_run* (don't actually execute - the steps) and *force* (rebuild everything, regardless of dependencies). All - of these flags default to ``False`` (off). Note that you probably don't want to - instantiate :class:`CCompiler` or one of its subclasses directly - use the - :func:`new_compiler` factory function instead. - - The following methods allow you to manually alter compiler options for the - instance of the Compiler class. - - - .. method:: CCompiler.add_include_dir(dir) - - Add *dir* to the list of directories that will be searched for header - files. The compiler is instructed to search directories in the order in - which they are supplied by successive calls to :meth:`add_include_dir`. - - - .. method:: CCompiler.set_include_dirs(dirs) - - Set the list of directories that will be searched to *dirs* (a list of - strings). Overrides any preceding calls to :meth:`add_include_dir`; - subsequent calls to :meth:`add_include_dir` add to the list passed to - :meth:`set_include_dirs`. This does not affect any list of standard - include directories that the compiler may search by default. - - - .. method:: CCompiler.add_library(libname) - - Add *libname* to the list of libraries that will be included in all links - driven by this compiler object. Note that *libname* should *not* be the - name of a file containing a library, but the name of the library itself: - the actual filename will be inferred by the linker, the compiler, or the - compiler class (depending on the platform). - - The linker will be instructed to link against libraries in the order they - were supplied to :meth:`add_library` and/or :meth:`set_libraries`. It is - perfectly valid to duplicate library names; the linker will be instructed - to link against libraries as many times as they are mentioned. - - - .. method:: CCompiler.set_libraries(libnames) - - Set the list of libraries to be included in all links driven by this - compiler object to *libnames* (a list of strings). This does not affect - any standard system libraries that the linker may include by default. - - - .. method:: CCompiler.add_library_dir(dir) - - Add *dir* to the list of directories that will be searched for libraries - specified to :meth:`add_library` and :meth:`set_libraries`. The linker - will be instructed to search for libraries in the order they are supplied - to :meth:`add_library_dir` and/or :meth:`set_library_dirs`. - - - .. method:: CCompiler.set_library_dirs(dirs) - - Set the list of library search directories to *dirs* (a list of strings). - This does not affect any standard library search path that the linker may - search by default. - - - .. method:: CCompiler.add_runtime_library_dir(dir) - - Add *dir* to the list of directories that will be searched for shared - libraries at runtime. - - - .. method:: CCompiler.set_runtime_library_dirs(dirs) - - Set the list of directories to search for shared libraries at runtime to - *dirs* (a list of strings). This does not affect any standard search path - that the runtime linker may search by default. - - - .. method:: CCompiler.define_macro(name, value=None) - - Define a preprocessor macro for all compilations driven by this compiler - object. The optional parameter *value* should be a string; if it is not - supplied, then the macro will be defined without an explicit value and the - exact outcome depends on the compiler used (XXX true? does ANSI say - anything about this?) - - - .. method:: CCompiler.undefine_macro(name) - - Undefine a preprocessor macro for all compilations driven by this compiler - object. If the same macro is defined by :meth:`define_macro` and - undefined by :meth:`undefine_macro` the last call takes precedence - (including multiple redefinitions or undefinitions). If the macro is - redefined/undefined on a per-compilation basis (i.e. in the call to - :meth:`compile`), then that takes precedence. - - - .. method:: CCompiler.add_link_object(object) - - Add *object* to the list of object files (or analogues, such as explicitly - named library files or the output of "resource compilers") to be included - in every link driven by this compiler object. - - - .. method:: CCompiler.set_link_objects(objects) - - Set the list of object files (or analogues) to be included in every link - to *objects*. This does not affect any standard object files that the - linker may include by default (such as system libraries). - - The following methods implement methods for autodetection of compiler - options, providing some functionality similar to GNU :program:`autoconf`. - - - .. method:: CCompiler.detect_language(sources) - - Detect the language of a given file, or list of files. Uses the instance - attributes :attr:`language_map` (a dictionary), and :attr:`language_order` - (a list) to do the job. - - - .. method:: CCompiler.find_library_file(dirs, lib, debug=0) - - Search the specified list of directories for a static or shared library file - *lib* and return the full path to that file. If *debug* is true, look for a - debugging version (if that makes sense on the current platform). Return - ``None`` if *lib* wasn't found in any of the specified directories. - - - .. method:: CCompiler.has_function(funcname, includes=None, include_dirs=None, libraries=None, library_dirs=None) - - Return a boolean indicating whether *funcname* is supported on the current - platform. The optional arguments can be used to augment the compilation - environment by providing additional include files and paths and libraries and - paths. - - - .. method:: CCompiler.library_dir_option(dir) - - Return the compiler option to add *dir* to the list of directories searched for - libraries. - - - .. method:: CCompiler.library_option(lib) - - Return the compiler option to add *dir* to the list of libraries linked into the - shared library or executable. - - - .. method:: CCompiler.runtime_library_dir_option(dir) - - Return the compiler option to add *dir* to the list of directories searched for - runtime libraries. - - - .. method:: CCompiler.set_executables(**args) - - Define the executables (and options for them) that will be run to perform the - various stages of compilation. The exact set of executables that may be - specified here depends on the compiler class (via the 'executables' class - attribute), but most will have: - - +--------------+------------------------------------------+ - | attribute | description | - +==============+==========================================+ - | *compiler* | the C/C++ compiler | - +--------------+------------------------------------------+ - | *linker_so* | linker used to create shared objects and | - | | libraries | - +--------------+------------------------------------------+ - | *linker_exe* | linker used to create binary executables | - +--------------+------------------------------------------+ - | *archiver* | static library creator | - +--------------+------------------------------------------+ - - On platforms with a command line (Unix, DOS/Windows), each of these is a string - that will be split into executable name and (optional) list of arguments. - (Splitting the string is done similarly to how Unix shells operate: words are - delimited by spaces, but quotes and backslashes can override this. See - :func:`packaging.util.split_quoted`.) - - The following methods invoke stages in the build process. - - - .. method:: CCompiler.compile(sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None) - - Compile one or more source files. Generates object files (e.g. transforms a - :file:`.c` file to a :file:`.o` file.) - - *sources* must be a list of filenames, most likely C/C++ files, but in reality - anything that can be handled by a particular compiler and compiler class (e.g. - an ``'msvc'`` compiler can handle resource files in *sources*). Return a list of - object filenames, one per source filename in *sources*. Depending on the - implementation, not all source files will necessarily be compiled, but all - corresponding object filenames will be returned. - - If *output_dir* is given, object files will be put under it, while retaining - their original path component. That is, :file:`foo/bar.c` normally compiles to - :file:`foo/bar.o` (for a Unix implementation); if *output_dir* is *build*, then - it would compile to :file:`build/foo/bar.o`. - - *macros*, if given, must be a list of macro definitions. A macro definition is - either a ``(name, value)`` 2-tuple or a ``(name,)`` 1-tuple. The former defines - a macro; if the value is ``None``, the macro is defined without an explicit - value. The 1-tuple case undefines a macro. Later - definitions/redefinitions/undefinitions take precedence. - - *include_dirs*, if given, must be a list of strings, the directories to add to - the default include file search path for this compilation only. - - *debug* is a boolean; if true, the compiler will be instructed to output debug - symbols in (or alongside) the object file(s). - - *extra_preargs* and *extra_postargs* are implementation-dependent. On platforms - that have the notion of a command line (e.g. Unix, DOS/Windows), they are most - likely lists of strings: extra command-line arguments to prepend/append to the - compiler command line. On other platforms, consult the implementation class - documentation. In any event, they are intended as an escape hatch for those - occasions when the abstract compiler framework doesn't cut the mustard. - - *depends*, if given, is a list of filenames that all targets depend on. If a - source file is older than any file in depends, then the source file will be - recompiled. This supports dependency tracking, but only at a coarse - granularity. - - Raises :exc:`CompileError` on failure. - - - .. method:: CCompiler.create_static_lib(objects, output_libname, output_dir=None, debug=0, target_lang=None) - - Link a bunch of stuff together to create a static library file. The "bunch of - stuff" consists of the list of object files supplied as *objects*, the extra - object files supplied to :meth:`add_link_object` and/or - :meth:`set_link_objects`, the libraries supplied to :meth:`add_library` and/or - :meth:`set_libraries`, and the libraries supplied as *libraries* (if any). - - *output_libname* should be a library name, not a filename; the filename will be - inferred from the library name. *output_dir* is the directory where the library - file will be put. XXX defaults to what? - - *debug* is a boolean; if true, debugging information will be included in the - library (note that on most platforms, it is the compile step where this matters: - the *debug* flag is included here just for consistency). - - *target_lang* is the target language for which the given objects are being - compiled. This allows specific linkage time treatment of certain languages. - - Raises :exc:`LibError` on failure. - - - .. method:: CCompiler.link(target_desc, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None) - - Link a bunch of stuff together to create an executable or shared library file. - - The "bunch of stuff" consists of the list of object files supplied as *objects*. - *output_filename* should be a filename. If *output_dir* is supplied, - *output_filename* is relative to it (i.e. *output_filename* can provide - directory components if needed). - - *libraries* is a list of libraries to link against. These are library names, - not filenames, since they're translated into filenames in a platform-specific - way (e.g. *foo* becomes :file:`libfoo.a` on Unix and :file:`foo.lib` on - DOS/Windows). However, they can include a directory component, which means the - linker will look in that specific directory rather than searching all the normal - locations. - - *library_dirs*, if supplied, should be a list of directories to search for - libraries that were specified as bare library names (i.e. no directory - component). These are on top of the system default and those supplied to - :meth:`add_library_dir` and/or :meth:`set_library_dirs`. *runtime_library_dirs* - is a list of directories that will be embedded into the shared library and used - to search for other shared libraries that \*it\* depends on at run-time. (This - may only be relevant on Unix.) - - *export_symbols* is a list of symbols that the shared library will export. - (This appears to be relevant only on Windows.) - - *debug* is as for :meth:`compile` and :meth:`create_static_lib`, with the - slight distinction that it actually matters on most platforms (as opposed to - :meth:`create_static_lib`, which includes a *debug* flag mostly for form's - sake). - - *extra_preargs* and *extra_postargs* are as for :meth:`compile` (except of - course that they supply command-line arguments for the particular linker being - used). - - *target_lang* is the target language for which the given objects are being - compiled. This allows specific linkage time treatment of certain languages. - - Raises :exc:`LinkError` on failure. - - - .. method:: CCompiler.link_executable(objects, output_progname, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, target_lang=None) - - Link an executable. *output_progname* is the name of the file executable, while - *objects* are a list of object filenames to link in. Other arguments are as for - the :meth:`link` method. - - - .. method:: CCompiler.link_shared_lib(objects, output_libname, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None) - - Link a shared library. *output_libname* is the name of the output library, - while *objects* is a list of object filenames to link in. Other arguments are - as for the :meth:`link` method. - - - .. method:: CCompiler.link_shared_object(objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None) - - Link a shared object. *output_filename* is the name of the shared object that - will be created, while *objects* is a list of object filenames to link in. - Other arguments are as for the :meth:`link` method. - - - .. method:: CCompiler.preprocess(source, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None) - - Preprocess a single C/C++ source file, named in *source*. Output will be written - to file named *output_file*, or *stdout* if *output_file* not supplied. - *macros* is a list of macro definitions as for :meth:`compile`, which will - augment the macros set with :meth:`define_macro` and :meth:`undefine_macro`. - *include_dirs* is a list of directory names that will be added to the default - list, in the same way as :meth:`add_include_dir`. - - Raises :exc:`PreprocessError` on failure. - - The following utility methods are defined by the :class:`CCompiler` class, for - use by the various concrete subclasses. - - - .. method:: CCompiler.executable_filename(basename, strip_dir=0, output_dir='') - - Returns the filename of the executable for the given *basename*. Typically for - non-Windows platforms this is the same as the basename, while Windows will get - a :file:`.exe` added. - - - .. method:: CCompiler.library_filename(libname, lib_type='static', strip_dir=0, output_dir='') - - Returns the filename for the given library name on the current platform. On Unix - a library with *lib_type* of ``'static'`` will typically be of the form - :file:`liblibname.a`, while a *lib_type* of ``'dynamic'`` will be of the form - :file:`liblibname.so`. - - - .. method:: CCompiler.object_filenames(source_filenames, strip_dir=0, output_dir='') - - Returns the name of the object files for the given source files. - *source_filenames* should be a list of filenames. - - - .. method:: CCompiler.shared_object_filename(basename, strip_dir=0, output_dir='') - - Returns the name of a shared object file for the given file name *basename*. - - - .. method:: CCompiler.execute(func, args, msg=None, level=1) - - Invokes :func:`packaging.util.execute` This method invokes a Python function - *func* with the given arguments *args*, after logging and taking into account - the *dry_run* flag. XXX see also. - - - .. method:: CCompiler.spawn(cmd) - - Invokes :func:`packaging.util.spawn`. This invokes an external process to run - the given command. XXX see also. - - - .. method:: CCompiler.mkpath(name, mode=511) - - Invokes :func:`packaging.dir_util.mkpath`. This creates a directory and any - missing ancestor directories. XXX see also. - - - .. method:: CCompiler.move_file(src, dst) - - Invokes :meth:`packaging.file_util.move_file`. Renames *src* to *dst*. XXX see - also. - - -:mod:`packaging.compiler.extension` --- The Extension class -=========================================================== - -.. module:: packaging.compiler.extension - :synopsis: Class used to represent C/C++ extension modules. - - -This module provides the :class:`Extension` class, used to represent C/C++ -extension modules. - -.. class:: Extension - - The Extension class describes a single C or C++ extension module. It accepts - the following keyword arguments in its constructor: - - +------------------------+--------------------------------+---------------------------+ - | argument name | value | type | - +========================+================================+===========================+ - | *name* | the full name of the | string | - | | extension, including any | | - | | packages --- i.e. *not* a | | - | | filename or pathname, but | | - | | Python dotted name | | - +------------------------+--------------------------------+---------------------------+ - | *sources* | list of source filenames, | list of strings | - | | relative to the distribution | | - | | root (where the setup script | | - | | lives), in Unix form (slash- | | - | | separated) for portability. | | - | | Source files may be C, C++, | | - | | SWIG (.i), platform-specific | | - | | resource files, or whatever | | - | | else is recognized by the | | - | | :command:`build_ext` command | | - | | as source for a Python | | - | | extension. | | - +------------------------+--------------------------------+---------------------------+ - | *include_dirs* | list of directories to search | list of strings | - | | for C/C++ header files (in | | - | | Unix form for portability) | | - +------------------------+--------------------------------+---------------------------+ - | *define_macros* | list of macros to define; each | list of tuples | - | | macro is defined using a | | - | | 2-tuple ``(name, value)``, | | - | | where *value* is | | - | | either the string to define it | | - | | to or ``None`` to define it | | - | | without a particular value | | - | | (equivalent of ``#define FOO`` | | - | | in source or :option:`-DFOO` | | - | | on Unix C compiler command | | - | | line) | | - +------------------------+--------------------------------+---------------------------+ - | *undef_macros* | list of macros to undefine | list of strings | - | | explicitly | | - +------------------------+--------------------------------+---------------------------+ - | *library_dirs* | list of directories to search | list of strings | - | | for C/C++ libraries at link | | - | | time | | - +------------------------+--------------------------------+---------------------------+ - | *libraries* | list of library names (not | list of strings | - | | filenames or paths) to link | | - | | against | | - +------------------------+--------------------------------+---------------------------+ - | *runtime_library_dirs* | list of directories to search | list of strings | - | | for C/C++ libraries at run | | - | | time (for shared extensions, | | - | | this is when the extension is | | - | | loaded) | | - +------------------------+--------------------------------+---------------------------+ - | *extra_objects* | list of extra files to link | list of strings | - | | with (e.g. object files not | | - | | implied by 'sources', static | | - | | library that must be | | - | | explicitly specified, binary | | - | | resource files, etc.) | | - +------------------------+--------------------------------+---------------------------+ - | *extra_compile_args* | any extra platform- and | list of strings | - | | compiler-specific information | | - | | to use when compiling the | | - | | source files in 'sources'. For | | - | | platforms and compilers where | | - | | a command line makes sense, | | - | | this is typically a list of | | - | | command-line arguments, but | | - | | for other platforms it could | | - | | be anything. | | - +------------------------+--------------------------------+---------------------------+ - | *extra_link_args* | any extra platform- and | list of strings | - | | compiler-specific information | | - | | to use when linking object | | - | | files together to create the | | - | | extension (or to create a new | | - | | static Python interpreter). | | - | | Similar interpretation as for | | - | | 'extra_compile_args'. | | - +------------------------+--------------------------------+---------------------------+ - | *export_symbols* | list of symbols to be exported | list of strings | - | | from a shared extension. Not | | - | | used on all platforms, and not | | - | | generally necessary for Python | | - | | extensions, which typically | | - | | export exactly one symbol: | | - | | ``init`` + extension_name. | | - +------------------------+--------------------------------+---------------------------+ - | *depends* | list of files that the | list of strings | - | | extension depends on | | - +------------------------+--------------------------------+---------------------------+ - | *language* | extension language (i.e. | string | - | | ``'c'``, ``'c++'``, | | - | | ``'objc'``). Will be detected | | - | | from the source extensions if | | - | | not provided. | | - +------------------------+--------------------------------+---------------------------+ - | *optional* | specifies that a build failure | boolean | - | | in the extension should not | | - | | abort the build process, but | | - | | simply skip the extension. | | - +------------------------+--------------------------------+---------------------------+ - -To distribute extension modules that live in a package (e.g. ``package.ext``), -you need to create a :file:`{package}/__init__.py` file to let Python recognize -and import your module. diff --git a/Doc/library/packaging.database.rst b/Doc/library/packaging.database.rst deleted file mode 100644 --- a/Doc/library/packaging.database.rst +++ /dev/null @@ -1,345 +0,0 @@ -:mod:`packaging.database` --- Database of installed distributions -================================================================= - -.. module:: packaging.database - :synopsis: Functions to query and manipulate installed distributions. - - -This module provides an implementation of :PEP:`376`. It was originally -intended to land in :mod:`pkgutil`, but with the inclusion of Packaging in the -standard library, it was thought best to include it in a submodule of -:mod:`packaging`, leaving :mod:`pkgutil` to deal with imports. - -Installed Python distributions are represented by instances of -:class:`Distribution`, or :class:`EggInfoDistribution` for legacy egg formats. -Most functions also provide an extra argument ``use_egg_info`` to take legacy -distributions into account. - -For the purpose of this module, "installed" means that the distribution's -:file:`.dist-info`, :file:`.egg-info` or :file:`egg` directory or file is found -on :data:`sys.path`. For example, if the parent directory of a -:file:`dist-info` directory is added to :envvar:`PYTHONPATH`, then it will be -available in the database. - -Classes representing installed distributions --------------------------------------------- - -.. class:: Distribution(path) - - Class representing an installed distribution. It is different from - :class:`packaging.dist.Distribution` which holds the list of files, the - metadata and options during the run of a Packaging command. - - Instantiate with the *path* to a ``.dist-info`` directory. Instances can be - compared and sorted. Other available methods are: - - .. XXX describe how comparison works - - .. method:: get_distinfo_file(path, binary=False) - - Return a read-only file object for a file located at - :file:`{project}-{version}.dist-info/{path}`. *path* should be a - ``'/'``-separated path relative to the ``.dist-info`` directory or an - absolute path; if it is an absolute path and doesn't start with the path - to the :file:`.dist-info` directory, a :class:`PackagingError` is raised. - - If *binary* is ``True``, the file is opened in binary mode. - - .. method:: get_resource_path(relative_path) - - .. TODO - - .. method:: list_distinfo_files(local=False) - - Return an iterator over all files located in the :file:`.dist-info` - directory. If *local* is ``True``, each returned path is transformed into - a local absolute path, otherwise the raw value found in the :file:`RECORD` - file is returned. - - .. method:: list_installed_files(local=False) - - Iterate over the files installed with the distribution and registered in - the :file:`RECORD` file and yield a tuple ``(path, md5, size)`` for each - line. If *local* is ``True``, the returned path is transformed into a - local absolute path, otherwise the raw value is returned. - - A local absolute path is an absolute path in which occurrences of ``'/'`` - have been replaced by :data:`os.sep`. - - .. method:: uses(path) - - Check whether *path* was installed by this distribution (i.e. if the path - is present in the :file:`RECORD` file). *path* can be a local absolute - path or a relative ``'/'``-separated path. Returns a boolean. - - Available attributes: - - .. attribute:: metadata - - Instance of :class:`packaging.metadata.Metadata` filled with the contents - of the :file:`{project}-{version}.dist-info/METADATA` file. - - .. attribute:: name - - Shortcut for ``metadata['Name']``. - - .. attribute:: version - - Shortcut for ``metadata['Version']``. - - .. attribute:: requested - - Boolean indicating whether this distribution was requested by the user of - automatically installed as a dependency. - - -.. class:: EggInfoDistribution(path) - - Class representing a legacy distribution. It is compatible with distutils' - and setuptools' :file:`.egg-info` and :file:`.egg` files and directories. - - .. FIXME should be named EggDistribution - - Instantiate with the *path* to an egg file or directory. Instances can be - compared and sorted. Other available methods are: - - .. method:: list_installed_files(local=False) - - .. method:: uses(path) - - Available attributes: - - .. attribute:: metadata - - Instance of :class:`packaging.metadata.Metadata` filled with the contents - of the :file:`{project-version}.egg-info/PKG-INFO` or - :file:`{project-version}.egg` file. - - .. attribute:: name - - Shortcut for ``metadata['Name']``. - - .. attribute:: version - - Shortcut for ``metadata['Version']``. - - -Functions to work with the database ------------------------------------ - -.. function:: get_distribution(name, use_egg_info=False, paths=None) - - Return an instance of :class:`Distribution` or :class:`EggInfoDistribution` - for the first installed distribution matching *name*. Egg distributions are - considered only if *use_egg_info* is true; if both a dist-info and an egg - file are found, the dist-info prevails. The directories to be searched are - given in *paths*, which defaults to :data:`sys.path`. Returns ``None`` if no - matching distribution is found. - - .. FIXME param should be named use_egg - - -.. function:: get_distributions(use_egg_info=False, paths=None) - - Return an iterator of :class:`Distribution` instances for all installed - distributions found in *paths* (defaults to :data:`sys.path`). If - *use_egg_info* is true, also return instances of :class:`EggInfoDistribution` - for legacy distributions found. - - -.. function:: get_file_users(path) - - Return an iterator over all distributions using *path*, a local absolute path - or a relative ``'/'``-separated path. - - .. XXX does this work with prefixes or full file path only? - - -.. function:: obsoletes_distribution(name, version=None, use_egg_info=False) - - Return an iterator over all distributions that declare they obsolete *name*. - *version* is an optional argument to match only specific releases (see - :mod:`packaging.version`). If *use_egg_info* is true, legacy egg - distributions will be considered as well. - - -.. function:: provides_distribution(name, version=None, use_egg_info=False) - - Return an iterator over all distributions that declare they provide *name*. - *version* is an optional argument to match only specific releases (see - :mod:`packaging.version`). If *use_egg_info* is true, legacy egg - distributions will be considered as well. - - -Utility functions ------------------ - -.. function:: distinfo_dirname(name, version) - - Escape *name* and *version* into a filename-safe form and return the - directory name built from them, for example - :file:`{safename}-{safeversion}.dist-info.` In *name*, runs of - non-alphanumeric characters are replaced with one ``'_'``; in *version*, - spaces become dots, and runs of other non-alphanumeric characters (except - dots) a replaced by one ``'-'``. - - .. XXX wth spaces in version numbers? - -For performance purposes, the list of distributions is being internally -cached. Caching is enabled by default, but you can control it with these -functions: - -.. function:: clear_cache() - - Clear the cache. - -.. function:: disable_cache() - - Disable the cache, without clearing it. - -.. function:: enable_cache() - - Enable the internal cache, without clearing it. - - -Examples --------- - -Printing all information about a distribution -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Given the name of an installed distribution, we shall print out all -information that can be obtained using functions provided in this module:: - - import sys - import packaging.database - - try: - name = sys.argv[1] - except ValueError: - sys.exit('Not enough arguments') - - # first create the Distribution instance - dist = packaging.database.Distribution(path) - if dist is None: - sys.exit('No such distribution') - - print('Information about %r' % dist.name) - print() - - print('Files') - print('=====') - for path, md5, size in dist.list_installed_files(): - print('* Path: %s' % path) - print(' Hash %s, Size: %s bytes' % (md5, size)) - print() - - print('Metadata') - print('========') - for key, value in dist.metadata.items(): - print('%20s: %s' % (key, value)) - print() - - print('Extra') - print('=====') - if dist.requested: - print('* It was installed by user request') - else: - print('* It was installed as a dependency') - -If we save the script above as ``print_info.py``, we can use it to extract -information from a :file:`.dist-info` directory. By typing in the console: - -.. code-block:: sh - - python print_info.py choxie - -we get the following output: - -.. code-block:: none - - Information about 'choxie' - - Files - ===== - * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/truffles.py - Hash 5e052db6a478d06bad9ae033e6bc08af, Size: 111 bytes - * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py - Hash ac56bf496d8d1d26f866235b95f31030, Size: 214 bytes - * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py - Hash 416aab08dfa846f473129e89a7625bbc, Size: 25 bytes - * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER - Hash d41d8cd98f00b204e9800998ecf8427e, Size: 0 bytes - * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA - Hash 696a209967fef3c8b8f5a7bb10386385, Size: 225 bytes - * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED - Hash d41d8cd98f00b204e9800998ecf8427e, Size: 0 bytes - * Path: ../tmp/distutils2/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD - Hash None, Size: None bytes - - Metadata - ======== - Metadata-Version: 1.2 - Name: choxie - Version: 2.0.0.9 - Platform: [] - Supported-Platform: UNKNOWN - Summary: Chocolate with a kick! - Description: UNKNOWN - Keywords: [] - Home-page: UNKNOWN - Author: UNKNOWN - Author-email: UNKNOWN - Maintainer: UNKNOWN - Maintainer-email: UNKNOWN - License: UNKNOWN - Classifier: [] - Download-URL: UNKNOWN - Obsoletes-Dist: ['truffles (<=0.8,>=0.5)', 'truffles (<=0.9,>=0.6)'] - Project-URL: [] - Provides-Dist: ['truffles (1.0)'] - Requires-Dist: ['towel-stuff (0.1)'] - Requires-Python: UNKNOWN - Requires-External: [] - - Extra - ===== - * It was installed as a dependency - - -Getting metadata about a distribution -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Sometimes you're not interested about the packaging information contained in a -full :class:`Distribution` object but just want to do something with its -:attr:`~Distribution.metadata`:: - - >>> from packaging.database import get_distribution - >>> info = get_distribution('chocolate').metadata - >>> info['Keywords'] - ['cooking', 'happiness'] - - -Finding out obsoleted distributions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Now, we tackle a different problem, we are interested in finding out -which distributions have been obsoleted. This can be easily done as follows:: - - import packaging.database - - # iterate over all distributions in the system - for dist in packaging.database.get_distributions(): - name, version = dist.name, dist.version - # find out which distributions obsolete this name/version combination - replacements = packaging.database.obsoletes_distribution(name, version) - if replacements: - print('%r %s is obsoleted by' % (name, version), - ', '.join(repr(r.name) for r in replacements)) - -This is how the output might look like: - -.. code-block:: none - - 'strawberry' 0.6 is obsoleted by 'choxie' - 'grammar' 1.0a4 is obsoleted by 'towel-stuff' diff --git a/Doc/library/packaging.depgraph.rst b/Doc/library/packaging.depgraph.rst deleted file mode 100644 --- a/Doc/library/packaging.depgraph.rst +++ /dev/null @@ -1,199 +0,0 @@ -:mod:`packaging.depgraph` --- Dependency graph builder -====================================================== - -.. module:: packaging.depgraph - :synopsis: Graph builder for dependencies between releases. - - -This module provides the means to analyse the dependencies between various -distributions and to create a graph representing these dependency relationships. -In this document, "distribution" refers to an instance of -:class:`packaging.database.Distribution` or -:class:`packaging.database.EggInfoDistribution`. - -.. XXX terminology problem with dist vs. release: dists are installed, but deps - use releases - -.. XXX explain how to use it with dists not installed: Distribution can only be - instantiated with a path, but this module is useful for remote dist too - -.. XXX functions should accept and return iterators, not lists - - -The :class:`DependencyGraph` class ----------------------------------- - -.. class:: DependencyGraph - - Represent a dependency graph between releases. The nodes are distribution - instances; the edge model dependencies. An edge from ``a`` to ``b`` means - that ``a`` depends on ``b``. - - .. method:: add_distribution(distribution) - - Add *distribution* to the graph. - - .. method:: add_edge(x, y, label=None) - - Add an edge from distribution *x* to distribution *y* with the given - *label* (string). - - .. method:: add_missing(distribution, requirement) - - Add a missing *requirement* (string) for the given *distribution*. - - .. method:: repr_node(dist, level=1) - - Print a subgraph starting from *dist*. *level* gives the depth of the - subgraph. - - Direct access to the graph nodes and edges is provided through these - attributes: - - .. attribute:: adjacency_list - - Dictionary mapping distributions to a list of ``(other, label)`` tuples - where ``other`` is a distribution and the edge is labeled with ``label`` - (i.e. the version specifier, if such was provided). - - .. attribute:: reverse_list - - Dictionary mapping distributions to a list of predecessors. This allows - efficient traversal. - - .. attribute:: missing - - Dictionary mapping distributions to a list of requirements that were not - provided by any distribution. - - -Auxiliary functions -------------------- - -.. function:: dependent_dists(dists, dist) - - Recursively generate a list of distributions from *dists* that are dependent - on *dist*. - - .. XXX what does member mean here: "dist is a member of *dists* for which we - are interested" - -.. function:: generate_graph(dists) - - Generate a :class:`DependencyGraph` from the given list of distributions. - - .. XXX make this alternate constructor a DepGraph classmethod or rename; - 'generate' can suggest it creates a file or an image, use 'make' - -.. function:: graph_to_dot(graph, f, skip_disconnected=True) - - Write a DOT output for the graph to the file-like object *f*. - - If *skip_disconnected* is true, all distributions that are not dependent on - any other distribution are skipped. - - .. XXX why is this not a DepGraph method? - - -Example Usage -------------- - -Depict all dependenciess in the system -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -First, we shall generate a graph of all the distributions on the system -and then create an image out of it using the tools provided by -`Graphviz `_:: - - from packaging.database import get_distributions - from packaging.depgraph import generate_graph - - dists = list(get_distributions()) - graph = generate_graph(dists) - -It would be interesting to print out the missing requirements. This can be done -as follows:: - - for dist, reqs in graph.missing.items(): - if reqs: - reqs = ' ,'.join(repr(req) for req in reqs) - print('Missing dependencies for %r: %s' % (dist.name, reqs)) - -Example output is: - -.. code-block:: none - - Missing dependencies for 'TurboCheetah': 'Cheetah' - Missing dependencies for 'TurboGears': 'ConfigObj', 'DecoratorTools', 'RuleDispatch' - Missing dependencies for 'jockey': 'PyKDE4.kdecore', 'PyKDE4.kdeui', 'PyQt4.QtCore', 'PyQt4.QtGui' - Missing dependencies for 'TurboKid': 'kid' - Missing dependencies for 'TurboJson: 'DecoratorTools', 'RuleDispatch' - -Now, we proceed with generating a graphical representation of the graph. First -we write it to a file, and then we generate a PNG image using the -:program:`dot` command-line tool:: - - from packaging.depgraph import graph_to_dot - with open('output.dot', 'w') as f: - # only show the interesting distributions, skipping the disconnected ones - graph_to_dot(graph, f, skip_disconnected=True) - -We can create the final picture using: - -.. code-block:: sh - - $ dot -Tpng output.dot > output.png - -An example result is: - -.. figure:: depgraph-output.png - :alt: Example PNG output from packaging.depgraph and dot - -If you want to include egg distributions as well, then the code requires only -one change, namely the line:: - - dists = list(packaging.database.get_distributions()) - -has to be replaced with:: - - dists = list(packaging.database.get_distributions(use_egg_info=True)) - -On many platforms, a richer graph is obtained because at the moment most -distributions are provided in the egg rather than the new standard -``.dist-info`` format. - -.. XXX missing image - - An example of a more involved graph for illustrative reasons can be seen - here: - - .. image:: depgraph_big.png - - -List all dependent distributions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -We will list all distributions that are dependent on some given distibution. -This time, egg distributions will be considered as well:: - - import sys - from packaging.database import get_distribution, get_distributions - from packaging.depgraph import dependent_dists - - dists = list(get_distributions(use_egg_info=True)) - dist = get_distribution('bacon', use_egg_info=True) - if dist is None: - sys.exit('No such distribution in the system') - - deps = dependent_dists(dists, dist) - deps = ', '.join(repr(x.name) for x in deps) - print('Distributions depending on %r: %s' % (dist.name, deps)) - -And this is example output: - -.. with the dependency relationships as in the previous section - (depgraph_big) - -.. code-block:: none - - Distributions depending on 'bacon': 'towel-stuff', 'choxie', 'grammar' diff --git a/Doc/library/packaging.dist.rst b/Doc/library/packaging.dist.rst deleted file mode 100644 --- a/Doc/library/packaging.dist.rst +++ /dev/null @@ -1,108 +0,0 @@ -:mod:`packaging.dist` --- The Distribution class -================================================ - -.. module:: packaging.dist - :synopsis: Core Distribution class. - - -This module provides the :class:`Distribution` class, which represents the -module distribution being built/packaged/distributed/installed. - -.. class:: Distribution(arguments) - - A :class:`Distribution` describes how to build, package, distribute and - install a Python project. - - The arguments accepted by the constructor are laid out in the following - table. Some of them will end up in a metadata object, the rest will become - data attributes of the :class:`Distribution` instance. - - .. TODO improve constructor to take a Metadata object + named params? - (i.e. Distribution(metadata, cmdclass, py_modules, etc) - .. TODO also remove obsolete(?) script_name, etc. parameters? see what - py2exe and other tools need - - +--------------------+--------------------------------+-------------------------------------------------------------+ - | argument name | value | type | - +====================+================================+=============================================================+ - | *name* | The name of the project | a string | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *version* | The version number of the | a string | - | | release; see | | - | | :mod:`packaging.version` | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *summary* | A single line describing the | a string | - | | project | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *description* | Longer description of the | a string | - | | project | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *author* | The name of the project author | a string | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *author_email* | The email address of the | a string | - | | project author | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *maintainer* | The name of the current | a string | - | | maintainer, if different from | | - | | the author | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *maintainer_email* | The email address of the | a string | - | | current maintainer, if | | - | | different from the author | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *home_page* | A URL for the proejct | a string | - | | (homepage) | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *download_url* | A URL to download the project | a string | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *packages* | A list of Python packages that | a list of strings | - | | packaging will manipulate | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *py_modules* | A list of Python modules that | a list of strings | - | | packaging will manipulate | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *scripts* | A list of standalone scripts | a list of strings | - | | to be built and installed | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *ext_modules* | A list of Python extensions to | a list of instances of | - | | be built | :class:`packaging.compiler.extension.Extension` | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *classifiers* | A list of categories for the | a list of strings; valid classifiers are listed on `PyPi | - | | distribution | `_. | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *distclass* | the :class:`Distribution` | a subclass of | - | | class to use | :class:`packaging.dist.Distribution` | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *script_name* | The name of the setup.py | a string | - | | script - defaults to | | - | | ``sys.argv[0]`` | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *script_args* | Arguments to supply to the | a list of strings | - | | setup script | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *options* | default options for the setup | a string | - | | script | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *license* | The license for the | a string | - | | distribution; should be used | | - | | when there is no suitable | | - | | License classifier, or to | | - | | refine a classifier | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *keywords* | Descriptive keywords; used by | a list of strings or a comma-separated string | - | | catalogs such as PyPI | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *platforms* | Platforms compatible with this | a list of strings or a comma-separated string | - | | distribution; should be used | | - | | when there is no suitable | | - | | Platform classifier | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *cmdclass* | A mapping of command names to | a dictionary | - | | :class:`Command` subclasses | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *data_files* | A list of data files to | a list | - | | install | | - +--------------------+--------------------------------+-------------------------------------------------------------+ - | *package_dir* | A mapping of Python packages | a dictionary | - | | to directory names | | - +--------------------+--------------------------------+-------------------------------------------------------------+ diff --git a/Doc/library/packaging.fancy_getopt.rst b/Doc/library/packaging.fancy_getopt.rst deleted file mode 100644 --- a/Doc/library/packaging.fancy_getopt.rst +++ /dev/null @@ -1,75 +0,0 @@ -:mod:`packaging.fancy_getopt` --- Wrapper around the getopt module -================================================================== - -.. module:: packaging.fancy_getopt - :synopsis: Additional getopt functionality. - - -.. warning:: - This module is deprecated and will be replaced with :mod:`optparse`. - -This module provides a wrapper around the standard :mod:`getopt` module that -provides the following additional features: - -* short and long options are tied together - -* options have help strings, so :func:`fancy_getopt` could potentially create a - complete usage summary - -* options set attributes of a passed-in object - -* boolean options can have "negative aliases" --- e.g. if :option:`--quiet` is - the "negative alias" of :option:`--verbose`, then :option:`--quiet` on the - command line sets *verbose* to false. - -.. function:: fancy_getopt(options, negative_opt, object, args) - - Wrapper function. *options* is a list of ``(long_option, short_option, - help_string)`` 3-tuples as described in the constructor for - :class:`FancyGetopt`. *negative_opt* should be a dictionary mapping option names - to option names, both the key and value should be in the *options* list. - *object* is an object which will be used to store values (see the :meth:`getopt` - method of the :class:`FancyGetopt` class). *args* is the argument list. Will use - ``sys.argv[1:]`` if you pass ``None`` as *args*. - - -.. class:: FancyGetopt(option_table=None) - - The option_table is a list of 3-tuples: ``(long_option, short_option, - help_string)`` - - If an option takes an argument, its *long_option* should have ``'='`` appended; - *short_option* should just be a single character, no ``':'`` in any case. - *short_option* should be ``None`` if a *long_option* doesn't have a - corresponding *short_option*. All option tuples must have long options. - -The :class:`FancyGetopt` class provides the following methods: - - -.. method:: FancyGetopt.getopt(args=None, object=None) - - Parse command-line options in args. Store as attributes on *object*. - - If *args* is ``None`` or not supplied, uses ``sys.argv[1:]``. If *object* is - ``None`` or not supplied, creates a new :class:`OptionDummy` instance, stores - option values there, and returns a tuple ``(args, object)``. If *object* is - supplied, it is modified in place and :func:`getopt` just returns *args*; in - both cases, the returned *args* is a modified copy of the passed-in *args* list, - which is left untouched. - - .. TODO and args returned are? - - -.. method:: FancyGetopt.get_option_order() - - Returns the list of ``(option, value)`` tuples processed by the previous run of - :meth:`getopt` Raises :exc:`RuntimeError` if :meth:`getopt` hasn't been called - yet. - - -.. method:: FancyGetopt.generate_help(header=None) - - Generate help text (a list of strings, one per suggested line of output) from - the option table for this :class:`FancyGetopt` object. - - If supplied, prints the supplied *header* at the top of the help. diff --git a/Doc/library/packaging.install.rst b/Doc/library/packaging.install.rst deleted file mode 100644 --- a/Doc/library/packaging.install.rst +++ /dev/null @@ -1,112 +0,0 @@ -:mod:`packaging.install` --- Installation tools -=============================================== - -.. module:: packaging.install - :synopsis: Download and installation building blocks - - -Packaging provides a set of tools to deal with downloads and installation of -distributions. Their role is to download the distribution from indexes, resolve -the dependencies, and provide a safe way to install distributions. An operation -that fails will cleanly roll back, not leave half-installed distributions on the -system. Here's the basic process followed: - -#. Move all distributions that will be removed to a temporary location. - -#. Install all the distributions that will be installed in a temporary location. - -#. If the installation fails, move the saved distributions back to their - location and delete the installed distributions. - -#. Otherwise, move the installed distributions to the right location and delete - the temporary locations. - -This is a higher-level module built on :mod:`packaging.database` and -:mod:`packaging.pypi`. - - -Public functions ----------------- - -.. function:: get_infos(requirements, index=None, installed=None, \ - prefer_final=True) - - Return information about what's going to be installed and upgraded. - *requirements* is a string containing the requirements for this - project, for example ``'FooBar 1.1'`` or ``'BarBaz (<1.2)'``. - - .. XXX are requirements comma-separated? - - If you want to use another index than the main PyPI, give its URI as *index* - argument. - - *installed* is a list of already installed distributions used to find - satisfied dependencies, obsoleted distributions and eventual conflicts. - - By default, alpha, beta and candidate versions are not picked up. Set - *prefer_final* to false to accept them too. - - The results are returned in a dictionary containing all the information - needed to perform installation of the requirements with the - :func:`install_from_infos` function: - - >>> get_install_info("FooBar (<=1.2)") - {'install': [], 'remove': [], 'conflict': []} - - .. TODO should return tuple or named tuple, not dict - .. TODO use "predicate" or "requirement" consistently in version and here - .. FIXME "info" cannot be plural in English, s/infos/info/ - - -.. function:: install(project) - - -.. function:: install_dists(dists, path, paths=None) - - Safely install all distributions provided in *dists* into *path*. *paths* is - a list of paths where already-installed distributions will be looked for to - find satisfied dependencies and conflicts (default: :data:`sys.path`). - Returns a list of installed dists. - - .. FIXME dists are instances of what? - - -.. function:: install_from_infos(install_path=None, install=[], remove=[], \ - conflicts=[], paths=None) - - Safely install and remove given distributions. This function is designed to - work with the return value of :func:`get_infos`: *install*, *remove* and - *conflicts* should be list of distributions returned by :func:`get_infos`. - If *install* is not empty, *install_path* must be given to specify the path - where the distributions should be installed. *paths* is a list of paths - where already-installed distributions will be looked for (default: - :data:`sys.path`). - - This function is a very basic installer; if *conflicts* is not empty, the - system will be in a conflicting state after the function completes. It is a - building block for more sophisticated installers with conflict resolution - systems. - - .. TODO document typical value for install_path - .. TODO document integration with default schemes, esp. user site-packages - - -.. function:: install_local_project(path) - - Install a distribution from a source directory, which must contain either a - Packaging-compliant :file:`setup.cfg` file or a legacy Distutils - :file:`setup.py` script (in which case Distutils will be used under the hood - to perform the installation). - - -.. function:: remove(project_name, paths=None, auto_confirm=True) - - Remove one distribution from the system. - - .. FIXME this is the only function using "project" instead of dist/release - -.. - Example usage - -------------- - - Get the scheme of what's gonna be installed if we install "foobar": diff --git a/Doc/library/packaging.metadata.rst b/Doc/library/packaging.metadata.rst deleted file mode 100644 --- a/Doc/library/packaging.metadata.rst +++ /dev/null @@ -1,122 +0,0 @@ -:mod:`packaging.metadata` --- Metadata handling -=============================================== - -.. module:: packaging.metadata - :synopsis: Class holding the metadata of a release. - - -.. TODO use sphinx-autogen to generate basic doc from the docstrings - -.. class:: Metadata - - This class can read and write metadata files complying with any of the - defined versions: 1.0 (:PEP:`241`), 1.1 (:PEP:`314`) and 1.2 (:PEP:`345`). It - implements methods to parse Metadata files and write them, and a mapping - interface to its contents. - - The :PEP:`345` implementation supports the micro-language for the environment - markers, and displays warnings when versions that are supposed to be - :PEP:`386`-compliant are violating the specification. - - -Reading metadata ----------------- - -The :class:`Metadata` class can be instantiated -with the path of the metadata file, and provides a dict-like interface to the -values:: - - >>> from packaging.metadata import Metadata - >>> metadata = Metadata('PKG-INFO') - >>> metadata.keys()[:5] - ('Metadata-Version', 'Name', 'Version', 'Platform', 'Supported-Platform') - >>> metadata['Name'] - 'CLVault' - >>> metadata['Version'] - '0.5' - >>> metadata['Requires-Dist'] - ["pywin32; sys.platform == 'win32'", "Sphinx"] - - -The fields that support environment markers can be automatically ignored if -the object is instantiated using the ``platform_dependent`` option. -:class:`Metadata` will interpret in this case -the markers and will automatically remove the fields that are not compliant -with the running environment. Here's an example under Mac OS X. The win32 -dependency we saw earlier is ignored:: - - >>> from packaging.metadata import Metadata - >>> metadata = Metadata('PKG-INFO', platform_dependent=True) - >>> metadata['Requires-Dist'] - ['Sphinx'] - - -If you want to provide your own execution context, let's say to test the -metadata under a particular environment that is not the current environment, -you can provide your own values in the ``execution_context`` option, which -is the dict that may contain one or more keys of the context the micro-language -expects. - -Here's an example, simulating a win32 environment:: - - >>> from packaging.metadata import Metadata - >>> context = {'sys.platform': 'win32'} - >>> metadata = Metadata('PKG-INFO', platform_dependent=True, - ... execution_context=context) - ... - >>> metadata['Requires-Dist'] = ["pywin32; sys.platform == 'win32'", - ... "Sphinx"] - ... - >>> metadata['Requires-Dist'] - ['pywin32', 'Sphinx'] - - -Writing metadata ----------------- - -Writing metadata can be done using the ``write`` method:: - - >>> metadata.write('/to/my/PKG-INFO') - -The class will pick the best version for the metadata, depending on the values -provided. If all the values provided exist in all versions, the class will -use :attr:`PKG_INFO_PREFERRED_VERSION`. It is set by default to 1.0, the most -widespread version. - - -Conflict checking and best version ----------------------------------- - -Some fields in :PEP:`345` have to comply with the version number specification -defined in :PEP:`386`. When they don't comply, a warning is emitted:: - - >>> from packaging.metadata import Metadata - >>> metadata = Metadata() - >>> metadata['Requires-Dist'] = ['Funky (Groovie)'] - "Funky (Groovie)" is not a valid predicate - >>> metadata['Requires-Dist'] = ['Funky (1.2)'] - -See also :mod:`packaging.version`. - - -.. TODO talk about check() - - -:mod:`packaging.markers` --- Environment markers -================================================ - -.. module:: packaging.markers - :synopsis: Micro-language for environment markers - - -This is an implementation of environment markers `as defined in PEP 345 -`_. It is used -for some metadata fields. - -.. function:: interpret(marker, execution_context=None) - - Interpret a marker and return a boolean result depending on the environment. - Example: - - >>> interpret("python_version > '1.0'") - True diff --git a/Doc/library/packaging.pypi.dist.rst b/Doc/library/packaging.pypi.dist.rst deleted file mode 100644 --- a/Doc/library/packaging.pypi.dist.rst +++ /dev/null @@ -1,114 +0,0 @@ -:mod:`packaging.pypi.dist` --- Classes representing query results -================================================================= - -.. module:: packaging.pypi.dist - :synopsis: Classes representing the results of queries to indexes. - - -Information coming from the indexes is held in instances of the classes defined -in this module. - -Keep in mind that each project (eg. FooBar) can have several releases -(eg. 1.1, 1.2, 1.3), and each of these releases can be provided in multiple -distributions (eg. a source distribution, a binary one, etc). - - -ReleaseInfo ------------ - -Each release has a project name, version, metadata, and related distributions. - -This information is stored in :class:`ReleaseInfo` -objects. - -.. class:: ReleaseInfo - - -DistInfo ---------- - -:class:`DistInfo` is a simple class that contains -information related to distributions; mainly the URLs where distributions -can be found. - -.. class:: DistInfo - - -ReleasesList ------------- - -The :mod:`~packaging.pypi.dist` module provides a class which works -with lists of :class:`ReleaseInfo` classes; -used to filter and order results. - -.. class:: ReleasesList - - -Example usage -------------- - -Build a list of releases and order them -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Assuming we have a list of releases:: - - >>> from packaging.pypi.dist import ReleasesList, ReleaseInfo - >>> fb10 = ReleaseInfo("FooBar", "1.0") - >>> fb11 = ReleaseInfo("FooBar", "1.1") - >>> fb11a = ReleaseInfo("FooBar", "1.1a1") - >>> ReleasesList("FooBar", [fb11, fb11a, fb10]) - >>> releases.sort_releases() - >>> releases.get_versions() - ['1.1', '1.1a1', '1.0'] - >>> releases.add_release("1.2a1") - >>> releases.get_versions() - ['1.1', '1.1a1', '1.0', '1.2a1'] - >>> releases.sort_releases() - ['1.2a1', '1.1', '1.1a1', '1.0'] - >>> releases.sort_releases(prefer_final=True) - >>> releases.get_versions() - ['1.1', '1.0', '1.2a1', '1.1a1'] - - -Add distribution related information to releases -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -It's easy to add distribution information to releases:: - - >>> from packaging.pypi.dist import ReleasesList, ReleaseInfo - >>> r = ReleaseInfo("FooBar", "1.0") - >>> r.add_distribution("sdist", url="http://example.org/foobar-1.0.tar.gz") - >>> r.dists - {'sdist': FooBar 1.0 sdist} - >>> r['sdist'].url - {'url': 'http://example.org/foobar-1.0.tar.gz', 'hashname': None, 'hashval': - None, 'is_external': True} - - -Getting attributes from the dist objects -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To abstract querying information returned from the indexes, attributes and -release information can be retrieved directly from dist objects. - -For instance, if you have a release instance that does not contain the metadata -attribute, it can be fetched by using the "fetch_metadata" method:: - - >>> r = Release("FooBar", "1.1") - >>> print r.metadata - None # metadata field is actually set to "None" - >>> r.fetch_metadata() - - -.. XXX add proper roles to these constructs - - -It's possible to retrieve a project's releases (`fetch_releases`), -metadata (`fetch_metadata`) and distributions (`fetch_distributions`) using -a similar work flow. - -.. XXX what is possible? - -Internally, this is possible because while retrieving information about -projects, releases or distributions, a reference to the client used is -stored which can be accessed using the objects `_index` attribute. diff --git a/Doc/library/packaging.pypi.rst b/Doc/library/packaging.pypi.rst deleted file mode 100644 --- a/Doc/library/packaging.pypi.rst +++ /dev/null @@ -1,74 +0,0 @@ -:mod:`packaging.pypi` --- Interface to projects indexes -======================================================= - -.. module:: packaging.pypi - :synopsis: Low-level and high-level APIs to query projects indexes. - - -Packaging queries PyPI to get information about projects or download them. The -low-level facilities used internally are also part of the public API designed to -be used by other tools. - -The :mod:`packaging.pypi` package provides those facilities, which can be -used to access information about Python projects registered at indexes, the -main one being PyPI, located ad http://pypi.python.org/. - -There is two ways to retrieve data from these indexes: a screen-scraping -interface called the "simple API", and XML-RPC. The first one uses HTML pages -located under http://pypi.python.org/simple/, the second one makes XML-RPC -requests to http://pypi.python.org/pypi/. All functions and classes also work -with other indexes such as mirrors, which typically implement only the simple -interface. - -Packaging provides a class that wraps both APIs to provide full query and -download functionality: :class:`packaging.pypi.client.ClientWrapper`. If you -want more control, you can use the underlying classes -:class:`packaging.pypi.simple.Crawler` and :class:`packaging.pypi.xmlrpc.Client` -to connect to one specific interface. - - -:mod:`packaging.pypi.client` --- High-level query API -===================================================== - -.. module:: packaging.pypi.client - :synopsis: Wrapper around :mod;`packaging.pypi.xmlrpc` and - :mod:`packaging.pypi.simple` to query indexes. - - -This module provides a high-level API to query indexes and search -for releases and distributions. The aim of this module is to choose the best -way to query the API automatically, either using XML-RPC or the simple index, -with a preference toward the latter. - -.. class:: ClientWrapper - - Instances of this class will use the simple interface or XML-RPC requests to - query indexes and return :class:`packaging.pypi.dist.ReleaseInfo` and - :class:`packaging.pypi.dist.ReleasesList` objects. - - .. method:: find_projects - - .. method:: get_release - - .. method:: get_releases - - -:mod:`packaging.pypi.base` --- Base class for index crawlers -============================================================ - -.. module:: packaging.pypi.base - :synopsis: Base class used to implement crawlers. - - -.. class:: BaseClient(prefer_final, prefer_source) - - Base class containing common methods for the index crawlers or clients. One - method is currently defined: - - .. method:: download_distribution(requirements, temp_path=None, \ - prefer_source=None, prefer_final=None) - - Download a distribution from the last release according to the - requirements. If *temp_path* is provided, download to this path, - otherwise, create a temporary directory for the download. If a release is - found, the full path to the downloaded file is returned. diff --git a/Doc/library/packaging.pypi.simple.rst b/Doc/library/packaging.pypi.simple.rst deleted file mode 100644 --- a/Doc/library/packaging.pypi.simple.rst +++ /dev/null @@ -1,218 +0,0 @@ -:mod:`packaging.pypi.simple` --- Crawler using the PyPI "simple" interface -========================================================================== - -.. module:: packaging.pypi.simple - :synopsis: Crawler using the screen-scraping "simple" interface to fetch info - and distributions. - - -The class provided by :mod:`packaging.pypi.simple` can access project indexes -and provide useful information about distributions. PyPI, other indexes and -local indexes are supported. - -You should use this module to search distributions by name and versions, process -index external pages and download distributions. It is not suited for things -that will end up in too long index processing (like "finding all distributions -with a specific version, no matter the name"); use :mod:`packaging.pypi.xmlrpc` -for that. - - -API ---- - -.. class:: Crawler(index_url=DEFAULT_SIMPLE_INDEX_URL, \ - prefer_final=False, prefer_source=True, \ - hosts=('*',), follow_externals=False, \ - mirrors_url=None, mirrors=None, timeout=15, \ - mirrors_max_tries=0) - - *index_url* is the address of the index to use for requests. - - The first two parameters control the query results. *prefer_final* - indicates whether a final version (not alpha, beta or candidate) is to be - preferred over a newer but non-final version (for example, whether to pick - up 1.0 over 2.0a3). It is used only for queries that don't give a version - argument. Likewise, *prefer_source* tells whether to prefer a source - distribution over a binary one, if no distribution argument was prodived. - - Other parameters are related to external links (that is links that go - outside the simple index): *hosts* is a list of hosts allowed to be - processed if *follow_externals* is true (default behavior is to follow all - hosts), *follow_externals* enables or disables following external links - (default is false, meaning disabled). - - The remaining parameters are related to the mirroring infrastructure - defined in :PEP:`381`. *mirrors_url* gives a URL to look on for DNS - records giving mirror adresses; *mirrors* is a list of mirror URLs (see - the PEP). If both *mirrors* and *mirrors_url* are given, *mirrors_url* - will only be used if *mirrors* is set to ``None``. *timeout* is the time - (in seconds) to wait before considering a URL has timed out; - *mirrors_max_tries"* is the number of times to try requesting informations - on mirrors before switching. - - The following methods are defined: - - .. method:: get_distributions(project_name, version) - - Return the distributions found in the index for the given release. - - .. method:: get_metadata(project_name, version) - - Return the metadata found on the index for this project name and - version. Currently downloads and unpacks a distribution to read the - PKG-INFO file. - - .. method:: get_release(requirements, prefer_final=None) - - Return one release that fulfills the given requirements. - - .. method:: get_releases(requirements, prefer_final=None, force_update=False) - - Search for releases and return a - :class:`~packaging.pypi.dist.ReleasesList` object containing the - results. - - .. method:: search_projects(name=None) - - Search the index for projects containing the given name and return a - list of matching names. - - See also the base class :class:`packaging.pypi.base.BaseClient` for inherited - methods. - - -.. data:: DEFAULT_SIMPLE_INDEX_URL - - The address used by default by the crawler class. It is currently - ``'http://a.pypi.python.org/simple/'``, the main PyPI installation. - - - - -Usage Examples ---------------- - -To help you understand how using the `Crawler` class, here are some basic -usages. - -Request the simple index to get a specific distribution -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Supposing you want to scan an index to get a list of distributions for -the "foobar" project. You can use the "get_releases" method for that. -The get_releases method will browse the project page, and return -:class:`ReleaseInfo` objects for each found link that rely on downloads. :: - - >>> from packaging.pypi.simple import Crawler - >>> crawler = Crawler() - >>> crawler.get_releases("FooBar") - [, ] - - -Note that you also can request the client about specific versions, using version -specifiers (described in `PEP 345 -`_):: - - >>> client.get_releases("FooBar < 1.2") - [, ] - - -`get_releases` returns a list of :class:`ReleaseInfo`, but you also can get the -best distribution that fullfil your requirements, using "get_release":: - - >>> client.get_release("FooBar < 1.2") - - - -Download distributions -^^^^^^^^^^^^^^^^^^^^^^ - -As it can get the urls of distributions provided by PyPI, the `Crawler` -client also can download the distributions and put it for you in a temporary -destination:: - - >>> client.download("foobar") - /tmp/temp_dir/foobar-1.2.tar.gz - - -You also can specify the directory you want to download to:: - - >>> client.download("foobar", "/path/to/my/dir") - /path/to/my/dir/foobar-1.2.tar.gz - - -While downloading, the md5 of the archive will be checked, if not matches, it -will try another time, then if fails again, raise `MD5HashDoesNotMatchError`. - -Internally, that's not the Crawler which download the distributions, but the -`DistributionInfo` class. Please refer to this documentation for more details. - - -Following PyPI external links -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The default behavior for packaging is to *not* follow the links provided -by HTML pages in the "simple index", to find distributions related -downloads. - -It's possible to tell the PyPIClient to follow external links by setting the -`follow_externals` attribute, on instantiation or after:: - - >>> client = Crawler(follow_externals=True) - -or :: - - >>> client = Crawler() - >>> client.follow_externals = True - - -Working with external indexes, and mirrors -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The default `Crawler` behavior is to rely on the Python Package index stored -on PyPI (http://pypi.python.org/simple). - -As you can need to work with a local index, or private indexes, you can specify -it using the index_url parameter:: - - >>> client = Crawler(index_url="file://filesystem/path/") - -or :: - - >>> client = Crawler(index_url="http://some.specific.url/") - - -You also can specify mirrors to fallback on in case the first index_url you -provided doesnt respond, or not correctly. The default behavior for -`Crawler` is to use the list provided by Python.org DNS records, as -described in the :PEP:`381` about mirroring infrastructure. - -If you don't want to rely on these, you could specify the list of mirrors you -want to try by specifying the `mirrors` attribute. It's a simple iterable:: - - >>> mirrors = ["http://first.mirror","http://second.mirror"] - >>> client = Crawler(mirrors=mirrors) - - -Searching in the simple index -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -It's possible to search for projects with specific names in the package index. -Assuming you want to find all projects containing the "distutils" keyword:: - - >>> c.search_projects("distutils") - [, , , , , , ] - - -You can also search the projects starting with a specific text, or ending with -that text, using a wildcard:: - - >>> c.search_projects("distutils*") - [, , ] - - >>> c.search_projects("*distutils") - [, , , , ] diff --git a/Doc/library/packaging.pypi.xmlrpc.rst b/Doc/library/packaging.pypi.xmlrpc.rst deleted file mode 100644 --- a/Doc/library/packaging.pypi.xmlrpc.rst +++ /dev/null @@ -1,143 +0,0 @@ -:mod:`packaging.pypi.xmlrpc` --- Crawler using the PyPI XML-RPC interface -========================================================================= - -.. module:: packaging.pypi.xmlrpc - :synopsis: Client using XML-RPC requests to fetch info and distributions. - - -Indexes can be queried using XML-RPC calls, and Packaging provides a simple -way to interface with XML-RPC. - -You should **use** XML-RPC when: - -* Searching the index for projects **on other fields than project - names**. For instance, you can search for projects based on the - author_email field. -* Searching all the versions that have existed for a project. -* you want to retrieve METADATAs information from releases or - distributions. - - -You should **avoid using** XML-RPC method calls when: - -* Retrieving the last version of a project -* Getting the projects with a specific name and version. -* The simple index can match your needs - - -When dealing with indexes, keep in mind that the index queries will always -return you :class:`packaging.pypi.dist.ReleaseInfo` and -:class:`packaging.pypi.dist.ReleasesList` objects. - -Some methods here share common APIs with the one you can find on -:class:`packaging.pypi.simple`, internally, :class:`packaging.pypi.client` -is inherited by :class:`Client` - - -API ---- - -.. class:: Client - - -Usage examples --------------- - -Use case described here are use case that are not common to the other clients. -If you want to see all the methods, please refer to API or to usage examples -described in :class:`packaging.pypi.client.Client` - - -Finding releases -^^^^^^^^^^^^^^^^ - -It's a common use case to search for "things" within the index. We can -basically search for projects by their name, which is the most used way for -users (eg. "give me the last version of the FooBar project"). - -This can be accomplished using the following syntax:: - - >>> client = xmlrpc.Client() - >>> client.get_release("Foobar (<= 1.3)) - - >>> client.get_releases("FooBar (<= 1.3)") - [FooBar 1.1, FooBar 1.1.1, FooBar 1.2, FooBar 1.2.1] - - -And we also can find for specific fields:: - - >>> client.search_projects(field=value) - - -You could specify the operator to use, default is "or":: - - >>> client.search_projects(field=value, operator="and") - - -The specific fields you can search are: - -* name -* version -* author -* author_email -* maintainer -* maintainer_email -* home_page -* license -* summary -* description -* keywords -* platform -* download_url - - -Getting metadata information -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -XML-RPC is a preferred way to retrieve metadata information from indexes. -It's really simple to do so:: - - >>> client = xmlrpc.Client() - >>> client.get_metadata("FooBar", "1.1") - - - -Assuming we already have a :class:`packaging.pypi.ReleaseInfo` object defined, -it's possible to pass it to the xmlrpc client to retrieve and complete its -metadata:: - - >>> foobar11 = ReleaseInfo("FooBar", "1.1") - >>> client = xmlrpc.Client() - >>> returned_release = client.get_metadata(release=foobar11) - >>> returned_release - - - -Get all the releases of a project -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -To retrieve all the releases for a project, you can build them using -`get_releases`:: - - >>> client = xmlrpc.Client() - >>> client.get_releases("FooBar") - [, , ] - - -Get information about distributions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Indexes have information about projects, releases **and** distributions. -If you're not familiar with those, please refer to the documentation of -:mod:`packaging.pypi.dist`. - -It's possible to retrieve information about distributions, e.g "what are the -existing distributions for this release ? How to retrieve them ?":: - - >>> client = xmlrpc.Client() - >>> release = client.get_distributions("FooBar", "1.1") - >>> release.dists - {'sdist': , 'bdist': } - -As you see, this does not return a list of distributions, but a release, -because a release can be used like a list of distributions. diff --git a/Doc/library/packaging.rst b/Doc/library/packaging.rst deleted file mode 100644 --- a/Doc/library/packaging.rst +++ /dev/null @@ -1,75 +0,0 @@ -:mod:`packaging` --- Packaging support -====================================== - -.. module:: packaging - :synopsis: Packaging system and building blocks for other packaging systems. -.. sectionauthor:: Fred L. Drake, Jr. , distutils and packaging - contributors - - -The :mod:`packaging` package provides support for building, packaging, -distributing and installing additional projects into a Python installation. -Projects may include Python modules, extension modules, packages and scripts. -:mod:`packaging` also provides building blocks for other packaging systems -that are not tied to the command system. - -This manual is the reference documentation for those standalone building -blocks and for extending Packaging. If you're looking for the user-centric -guides to install a project or package your own code, head to `See also`__. - - -Building blocks ---------------- - -.. toctree:: - :maxdepth: 2 - - packaging-misc - packaging.version - packaging.metadata - packaging.database - packaging.depgraph - packaging.pypi - packaging.pypi.dist - packaging.pypi.simple - packaging.pypi.xmlrpc - packaging.install - - -The command machinery ---------------------- - -.. toctree:: - :maxdepth: 2 - - packaging.dist - packaging.command - packaging.compiler - packaging.fancy_getopt - - -Other utilities ----------------- - -.. toctree:: - :maxdepth: 2 - - packaging.util - packaging.tests.pypi_server - -.. XXX missing: compat config create (dir_util) run pypi.{base,mirrors} - - -.. __: - -.. seealso:: - - :ref:`packaging-index` - The manual for developers of Python projects who want to package and - distribute them. This describes how to use :mod:`packaging` to make - projects easily found and added to an existing Python installation. - - :ref:`packaging-install-index` - A user-centered manual which includes information on adding projects - into an existing Python installation. You do not need to be a Python - programmer to read this manual. diff --git a/Doc/library/packaging.tests.pypi_server.rst b/Doc/library/packaging.tests.pypi_server.rst deleted file mode 100644 --- a/Doc/library/packaging.tests.pypi_server.rst +++ /dev/null @@ -1,105 +0,0 @@ -:mod:`packaging.tests.pypi_server` --- PyPI mock server -======================================================= - -.. module:: packaging.tests.pypi_server - :synopsis: Mock server used to test PyPI-related modules and commands. - - -When you are testing code that works with Packaging, you might find these tools -useful. - - -The mock server ---------------- - -.. class:: PyPIServer - - PyPIServer is a class that implements an HTTP server running in a separate - thread. All it does is record the requests for further inspection. The recorded - data is available under ``requests`` attribute. The default - HTTP response can be overridden with the ``default_response_status``, - ``default_response_headers`` and ``default_response_data`` attributes. - - By default, when accessing the server with urls beginning with `/simple/`, - the server also record your requests, but will look for files under - the `/tests/pypiserver/simple/` path. - - You can tell the sever to serve static files for other paths. This could be - accomplished by using the `static_uri_paths` parameter, as below:: - - server = PyPIServer(static_uri_paths=["first_path", "second_path"]) - - - You need to create the content that will be served under the - `/tests/pypiserver/default` path. If you want to serve content from another - place, you also can specify another filesystem path (which needs to be under - `tests/pypiserver/`. This will replace the default behavior of the server, and - it will not serve content from the `default` dir :: - - server = PyPIServer(static_filesystem_paths=["path/to/your/dir"]) - - - If you just need to add some paths to the existing ones, you can do as shown, - keeping in mind that the server will always try to load paths in reverse order - (e.g here, try "another/super/path" then the default one) :: - - server = PyPIServer(test_static_path="another/super/path") - server = PyPIServer("another/super/path") - # or - server.static_filesystem_paths.append("another/super/path") - - - As a result of what, in your tests, while you need to use the PyPIServer, in - order to isolates the test cases, the best practice is to place the common files - in the `default` folder, and to create a directory for each specific test case:: - - server = PyPIServer(static_filesystem_paths = ["default", "test_pypi_server"], - static_uri_paths=["simple", "external"]) - - -Base class and decorator for tests ----------------------------------- - -.. class:: PyPIServerTestCase - - ``PyPIServerTestCase`` is a test case class with setUp and tearDown methods that - take care of a single PyPIServer instance attached as a ``pypi`` attribute on - the test class. Use it as one of the base classes in your test case:: - - - class UploadTestCase(PyPIServerTestCase): - - def test_something(self): - cmd = self.prepare_command() - cmd.ensure_finalized() - cmd.repository = self.pypi.full_address - cmd.run() - - environ, request_data = self.pypi.requests[-1] - self.assertEqual(request_data, EXPECTED_REQUEST_DATA) - - -.. decorator:: use_pypi_server - - You also can use a decorator for your tests, if you do not need the same server - instance along all you test case. So, you can specify, for each test method, - some initialisation parameters for the server. - - For this, you need to add a `server` parameter to your method, like this:: - - class SampleTestCase(TestCase): - - @use_pypi_server() - def test_something(self, server): - ... - - - The decorator will instantiate the server for you, and run and stop it just - before and after your method call. You also can pass the server initializer, - just like this:: - - class SampleTestCase(TestCase): - - @use_pypi_server("test_case_name") - def test_something(self, server): - ... diff --git a/Doc/library/packaging.util.rst b/Doc/library/packaging.util.rst deleted file mode 100644 --- a/Doc/library/packaging.util.rst +++ /dev/null @@ -1,155 +0,0 @@ -:mod:`packaging.util` --- Miscellaneous utility functions -========================================================= - -.. module:: packaging.util - :synopsis: Miscellaneous utility functions. - - -This module contains various helpers for the other modules. - -.. XXX a number of functions are missing, but the module may be split first - (it's ginormous right now, some things could go to compat for example) - -.. function:: get_platform() - - Return a string that identifies the current platform. This is used mainly to - distinguish platform-specific build directories and platform-specific built - distributions. Typically includes the OS name and version and the - architecture (as supplied by 'os.uname()'), although the exact information - included depends on the OS; e.g. for IRIX the architecture isn't particularly - important (IRIX only runs on SGI hardware), but for Linux the kernel version - isn't particularly important. - - Examples of returned values: - - * ``linux-i586`` - * ``linux-alpha`` - * ``solaris-2.6-sun4u`` - * ``irix-5.3`` - * ``irix64-6.2`` - - For non-POSIX platforms, currently just returns ``sys.platform``. - - For Mac OS X systems the OS version reflects the minimal version on which - binaries will run (that is, the value of ``MACOSX_DEPLOYMENT_TARGET`` - during the build of Python), not the OS version of the current system. - - For universal binary builds on Mac OS X the architecture value reflects - the univeral binary status instead of the architecture of the current - processor. For 32-bit universal binaries the architecture is ``fat``, - for 64-bit universal binaries the architecture is ``fat64``, and - for 4-way universal binaries the architecture is ``universal``. Starting - from Python 2.7 and Python 3.2 the architecture ``fat3`` is used for - a 3-way universal build (ppc, i386, x86_64) and ``intel`` is used for - a univeral build with the i386 and x86_64 architectures - - Examples of returned values on Mac OS X: - - * ``macosx-10.3-ppc`` - - * ``macosx-10.3-fat`` - - * ``macosx-10.5-universal`` - - * ``macosx-10.6-intel`` - - .. XXX reinvention of platform module? - - -.. function:: convert_path(pathname) - - Return 'pathname' as a name that will work on the native filesystem, i.e. - split it on '/' and put it back together again using the current directory - separator. Needed because filenames in the setup script are always supplied - in Unix style, and have to be converted to the local convention before we - can actually use them in the filesystem. Raises :exc:`ValueError` on - non-Unix-ish systems if *pathname* either starts or ends with a slash. - - -.. function:: change_root(new_root, pathname) - - Return *pathname* with *new_root* prepended. If *pathname* is relative, this - is equivalent to ``os.path.join(new_root,pathname)`` Otherwise, it requires - making *pathname* relative and then joining the two, which is tricky on - DOS/Windows. - - -.. function:: check_environ() - - Ensure that 'os.environ' has all the environment variables we guarantee that - users can use in config files, command-line options, etc. Currently this - includes: - - * :envvar:`HOME` - user's home directory (Unix only) - * :envvar:`PLAT` - description of the current platform, including hardware - and OS (see :func:`get_platform`) - - -.. function:: find_executable(executable, path=None) - - Search the path for a given executable name. - - -.. function:: execute(func, args, msg=None, dry_run=False) - - Perform some action that affects the outside world (for instance, writing to - the filesystem). Such actions are special because they are disabled by the - *dry_run* flag. This method takes care of all that bureaucracy for you; - all you have to do is supply the function to call and an argument tuple for - it (to embody the "external action" being performed), and an optional message - to print. - - -.. function:: newer(source, target) - - Return true if *source* exists and is more recently modified than *target*, - or if *source* exists and *target* doesn't. Return false if both exist and - *target* is the same age or newer than *source*. Raise - :exc:`PackagingFileError` if *source* does not exist. - - -.. function:: strtobool(val) - - Convert a string representation of truth to true (1) or false (0). - - True values are ``y``, ``yes``, ``t``, ``true``, ``on`` and ``1``; false - values are ``n``, ``no``, ``f``, ``false``, ``off`` and ``0``. Raises - :exc:`ValueError` if *val* is anything else. - - -.. function:: byte_compile(py_files, optimize=0, force=0, prefix=None, \ - base_dir=None, dry_run=0, direct=None) - - Byte-compile a collection of Python source files to either :file:`.pyc` or - :file:`.pyo` files in a :file:`__pycache__` subdirectory (see :pep:`3147`), - or to the same directory when using the distutils2 backport on Python - versions older than 3.2. - - *py_files* is a list of files to compile; any files that don't end in - :file:`.py` are silently skipped. *optimize* must be one of the following: - - * ``0`` - don't optimize (generate :file:`.pyc`) - * ``1`` - normal optimization (like ``python -O``) - * ``2`` - extra optimization (like ``python -OO``) - - This function is independent from the running Python's :option:`-O` or - :option:`-B` options; it is fully controlled by the parameters passed in. - - If *force* is true, all files are recompiled regardless of timestamps. - - The source filename encoded in each :term:`bytecode` file defaults to the filenames - listed in *py_files*; you can modify these with *prefix* and *basedir*. - *prefix* is a string that will be stripped off of each source filename, and - *base_dir* is a directory name that will be prepended (after *prefix* is - stripped). You can supply either or both (or neither) of *prefix* and - *base_dir*, as you wish. - - If *dry_run* is true, doesn't actually do anything that would affect the - filesystem. - - Byte-compilation is either done directly in this interpreter process with the - standard :mod:`py_compile` module, or indirectly by writing a temporary - script and executing it. Normally, you should let :func:`byte_compile` - figure out to use direct compilation or not (see the source for details). - The *direct* flag is used by the script generated in indirect mode; unless - you know what you're doing, leave it set to ``None``. diff --git a/Doc/library/packaging.version.rst b/Doc/library/packaging.version.rst deleted file mode 100644 --- a/Doc/library/packaging.version.rst +++ /dev/null @@ -1,104 +0,0 @@ -:mod:`packaging.version` --- Version number classes -=================================================== - -.. module:: packaging.version - :synopsis: Classes that represent project version numbers. - - -This module contains classes and functions useful to deal with version numbers. -It's an implementation of version specifiers `as defined in PEP 345 -`_. - - -Version numbers ---------------- - -.. class:: NormalizedVersion(self, s, error_on_huge_major_num=True) - - A specific version of a distribution, as described in PEP 345. *s* is a - string object containing the version number (for example ``'1.2b1'``), - *error_on_huge_major_num* a boolean specifying whether to consider an - apparent use of a year or full date as the major version number an error. - - The rationale for the second argument is that there were projects using years - or full dates as version numbers, which could cause problems with some - packaging systems sorting. - - Instances of this class can be compared and sorted:: - - >>> NormalizedVersion('1.2b1') < NormalizedVersion('1.2') - True - - :class:`NormalizedVersion` is used internally by :class:`VersionPredicate` to - do its work. - - -.. class:: IrrationalVersionError - - Exception raised when an invalid string is given to - :class:`NormalizedVersion`. - - >>> NormalizedVersion("irrational_version_number") - ... - IrrationalVersionError: irrational_version_number - - -.. function:: suggest_normalized_version(s) - - Before standardization in PEP 386, various schemes were in use. Packaging - provides a function to try to convert any string to a valid, normalized - version:: - - >>> suggest_normalized_version('2.1-rc1') - 2.1c1 - - - If :func:`suggest_normalized_version` can't make sense of the given string, - it will return ``None``:: - - >>> print(suggest_normalized_version('not a version')) - None - - -Version predicates ------------------- - -.. class:: VersionPredicate(predicate) - - This class deals with the parsing of field values like - ``ProjectName (>=version)``. - - .. method:: match(version) - - Test if a version number matches the predicate: - - >>> version = VersionPredicate("ProjectName (<1.2, >1.0)") - >>> version.match("1.2.1") - False - >>> version.match("1.1.1") - True - - -Validation helpers ------------------- - -If you want to use :term:`LBYL`-style checks instead of instantiating the -classes and catching :class:`IrrationalVersionError` and :class:`ValueError`, -you can use these functions: - -.. function:: is_valid_version(predicate) - - Check whether the given string is a valid version number. Example of valid - strings: ``'1.2'``, ``'4.2.0.dev4'``, ``'2.5.4.post2'``. - - -.. function:: is_valid_versions(predicate) - - Check whether the given string is a valid value for specifying multiple - versions, such as in the Requires-Python field. Example: ``'2.7, >=3.2'``. - - -.. function:: is_valid_predicate(predicate) - - Check whether the given string is a valid version predicate. Examples: - ``'some.project == 4.5, <= 4.7'``, ``'speciallib (> 1.0, != 1.4.2, < 2.0)'``. diff --git a/Doc/library/python.rst b/Doc/library/python.rst --- a/Doc/library/python.rst +++ b/Doc/library/python.rst @@ -25,5 +25,4 @@ inspect.rst site.rst fpectl.rst - packaging.rst distutils.rst diff --git a/Doc/library/site.rst b/Doc/library/site.rst --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -134,9 +134,9 @@ :func:`getuserbase` hasn't been called yet. Default value is :file:`~/.local` for UNIX and Mac OS X non-framework builds, :file:`~/Library/Python/{X.Y}` for Mac framework builds, and - :file:`{%APPDATA%}\\Python` for Windows. This value is used by Packaging to + :file:`{%APPDATA%}\\Python` for Windows. This value is used by Distutils to compute the installation directories for scripts, data files, Python modules, - etc. for the :ref:`user installation scheme `. + etc. for the :ref:`user installation scheme `. See also :envvar:`PYTHONUSERBASE`. diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -35,8 +35,7 @@ in it with a ``home`` key pointing to the Python installation the command was run from. It also creates a ``bin`` (or ``Scripts`` on Windows) subdirectory containing a copy of the ``python`` binary (or -binaries, in the case of Windows) and the ``pysetup3`` script (to -facilitate easy installation of packages from PyPI into the new virtualenv). +binaries, in the case of Windows). It also creates an (initially empty) ``lib/pythonX.Y/site-packages`` subdirectory (on Windows, this is ``Lib\site-packages``). diff --git a/Doc/packaging/builtdist.rst b/Doc/packaging/builtdist.rst deleted file mode 100644 --- a/Doc/packaging/builtdist.rst +++ /dev/null @@ -1,302 +0,0 @@ -.. _packaging-built-dist: - -**************************** -Creating Built Distributions -**************************** - -A "built distribution" is what you're probably used to thinking of either as a -"binary package" or an "installer" (depending on your background). It's not -necessarily binary, though, because it might contain only Python source code -and/or byte-code; and we don't call it a package, because that word is already -spoken for in Python. (And "installer" is a term specific to the world of -mainstream desktop systems.) - -A built distribution is how you make life as easy as possible for installers of -your module distribution: for users of RPM-based Linux systems, it's a binary -RPM; for Windows users, it's an executable installer; for Debian-based Linux -users, it's a Debian package; and so forth. Obviously, no one person will be -able to create built distributions for every platform under the sun, so the -Distutils are designed to enable module developers to concentrate on their -specialty---writing code and creating source distributions---while an -intermediary species called *packagers* springs up to turn source distributions -into built distributions for as many platforms as there are packagers. - -Of course, the module developer could be his own packager; or the packager could -be a volunteer "out there" somewhere who has access to a platform which the -original developer does not; or it could be software periodically grabbing new -source distributions and turning them into built distributions for as many -platforms as the software has access to. Regardless of who they are, a packager -uses the setup script and the :command:`bdist` command family to generate built -distributions. - -As a simple example, if I run the following command in the Distutils source -tree:: - - python setup.py bdist - -then the Distutils builds my module distribution (the Distutils itself in this -case), does a "fake" installation (also in the :file:`build` directory), and -creates the default type of built distribution for my platform. The default -format for built distributions is a "dumb" tar file on Unix, and a simple -executable installer on Windows. (That tar file is considered "dumb" because it -has to be unpacked in a specific location to work.) - -Thus, the above command on a Unix system creates -:file:`Distutils-1.0.{plat}.tar.gz`; unpacking this tarball from the right place -installs the Distutils just as though you had downloaded the source distribution -and run ``python setup.py install``. (The "right place" is either the root of -the filesystem or Python's :file:`{prefix}` directory, depending on the options -given to the :command:`bdist_dumb` command; the default is to make dumb -distributions relative to :file:`{prefix}`.) - -Obviously, for pure Python distributions, this isn't any simpler than just -running ``python setup.py install``\ ---but for non-pure distributions, which -include extensions that would need to be compiled, it can mean the difference -between someone being able to use your extensions or not. And creating "smart" -built distributions, such as an executable installer for -Windows, is far more convenient for users even if your distribution doesn't -include any extensions. - -The :command:`bdist` command has a :option:`--formats` option, similar to the -:command:`sdist` command, which you can use to select the types of built -distribution to generate: for example, :: - - python setup.py bdist --format=zip - -would, when run on a Unix system, create :file:`Distutils-1.0.{plat}.zip`\ ----again, this archive would be unpacked from the root directory to install the -Distutils. - -The available formats for built distributions are: - -+-------------+------------------------------+---------+ -| Format | Description | Notes | -+=============+==============================+=========+ -| ``gztar`` | gzipped tar file | (1),(3) | -| | (:file:`.tar.gz`) | | -+-------------+------------------------------+---------+ -| ``tar`` | tar file (:file:`.tar`) | \(3) | -+-------------+------------------------------+---------+ -| ``zip`` | zip file (:file:`.zip`) | (2),(4) | -+-------------+------------------------------+---------+ -| ``wininst`` | self-extracting ZIP file for | \(4) | -| | Windows | | -+-------------+------------------------------+---------+ -| ``msi`` | Microsoft Installer. | | -+-------------+------------------------------+---------+ - - -Notes: - -(1) - default on Unix - -(2) - default on Windows - -(3) - requires external utilities: :program:`tar` and possibly one of :program:`gzip` - or :program:`bzip2` - -(4) - requires either external :program:`zip` utility or :mod:`zipfile` module (part - of the standard Python library since Python 1.6) - -You don't have to use the :command:`bdist` command with the :option:`--formats` -option; you can also use the command that directly implements the format you're -interested in. Some of these :command:`bdist` "sub-commands" actually generate -several similar formats; for instance, the :command:`bdist_dumb` command -generates all the "dumb" archive formats (``tar``, ``gztar``, and -``zip``). The :command:`bdist` sub-commands, and the formats generated by -each, are: - -+--------------------------+-----------------------+ -| Command | Formats | -+==========================+=======================+ -| :command:`bdist_dumb` | tar, gztar, zip | -+--------------------------+-----------------------+ -| :command:`bdist_wininst` | wininst | -+--------------------------+-----------------------+ -| :command:`bdist_msi` | msi | -+--------------------------+-----------------------+ - -The following sections give details on the individual :command:`bdist_\*` -commands. - - -.. _packaging-creating-dumb: - -Creating dumb built distributions -================================= - -.. XXX Need to document absolute vs. prefix-relative packages here, but first - I have to implement it! - - -.. _packaging-creating-wininst: - -Creating Windows Installers -=========================== - -Executable installers are the natural format for binary distributions on -Windows. They display a nice graphical user interface, display some information -about the module distribution to be installed taken from the metadata in the -setup script, let the user select a few options, and start or cancel the -installation. - -Since the metadata is taken from the setup script, creating Windows installers -is usually as easy as running:: - - python setup.py bdist_wininst - -or the :command:`bdist` command with the :option:`--formats` option:: - - python setup.py bdist --formats=wininst - -If you have a pure module distribution (only containing pure Python modules and -packages), the resulting installer will be version independent and have a name -like :file:`foo-1.0.win32.exe`. These installers can even be created on Unix -platforms or Mac OS X. - -If you have a non-pure distribution, the extensions can only be created on a -Windows platform, and will be Python version dependent. The installer filename -will reflect this and now has the form :file:`foo-1.0.win32-py2.0.exe`. You -have to create a separate installer for every Python version you want to -support. - -The installer will try to compile pure modules into :term:`bytecode` after installation -on the target system in normal and optimizing mode. If you don't want this to -happen for some reason, you can run the :command:`bdist_wininst` command with -the :option:`--no-target-compile` and/or the :option:`--no-target-optimize` -option. - -By default the installer will display the cool "Python Powered" logo when it is -run, but you can also supply your own 152x261 bitmap which must be a Windows -:file:`.bmp` file with the :option:`--bitmap` option. - -The installer will also display a large title on the desktop background window -when it is run, which is constructed from the name of your distribution and the -version number. This can be changed to another text by using the -:option:`--title` option. - -The installer file will be written to the "distribution directory" --- normally -:file:`dist/`, but customizable with the :option:`--dist-dir` option. - -.. _packaging-cross-compile-windows: - -Cross-compiling on Windows -========================== - -Starting with Python 2.6, packaging is capable of cross-compiling between -Windows platforms. In practice, this means that with the correct tools -installed, you can use a 32bit version of Windows to create 64bit extensions -and vice-versa. - -To build for an alternate platform, specify the :option:`--plat-name` option -to the build command. Valid values are currently 'win32', 'win-amd64' and -'win-ia64'. For example, on a 32bit version of Windows, you could execute:: - - python setup.py build --plat-name=win-amd64 - -to build a 64bit version of your extension. The Windows Installers also -support this option, so the command:: - - python setup.py build --plat-name=win-amd64 bdist_wininst - -would create a 64bit installation executable on your 32bit version of Windows. - -To cross-compile, you must download the Python source code and cross-compile -Python itself for the platform you are targetting - it is not possible from a -binary installtion of Python (as the .lib etc file for other platforms are -not included.) In practice, this means the user of a 32 bit operating -system will need to use Visual Studio 2008 to open the -:file:`PCBuild/PCbuild.sln` solution in the Python source tree and build the -"x64" configuration of the 'pythoncore' project before cross-compiling -extensions is possible. - -Note that by default, Visual Studio 2008 does not install 64bit compilers or -tools. You may need to reexecute the Visual Studio setup process and select -these tools (using Control Panel->[Add/Remove] Programs is a convenient way to -check or modify your existing install.) - -.. _packaging-postinstallation-script: - -The Postinstallation script ---------------------------- - -Starting with Python 2.3, a postinstallation script can be specified with the -:option:`--install-script` option. The basename of the script must be -specified, and the script filename must also be listed in the scripts argument -to the setup function. - -This script will be run at installation time on the target system after all the -files have been copied, with ``argv[1]`` set to :option:`-install`, and again at -uninstallation time before the files are removed with ``argv[1]`` set to -:option:`-remove`. - -The installation script runs embedded in the windows installer, every output -(``sys.stdout``, ``sys.stderr``) is redirected into a buffer and will be -displayed in the GUI after the script has finished. - -Some functions especially useful in this context are available as additional -built-in functions in the installation script. - -.. currentmodule:: bdist_wininst-postinst-script - -.. function:: directory_created(path) - file_created(path) - - These functions should be called when a directory or file is created by the - postinstall script at installation time. It will register *path* with the - uninstaller, so that it will be removed when the distribution is uninstalled. - To be safe, directories are only removed if they are empty. - - -.. function:: get_special_folder_path(csidl_string) - - This function can be used to retrieve special folder locations on Windows like - the Start Menu or the Desktop. It returns the full path to the folder. - *csidl_string* must be one of the following strings:: - - "CSIDL_APPDATA" - - "CSIDL_COMMON_STARTMENU" - "CSIDL_STARTMENU" - - "CSIDL_COMMON_DESKTOPDIRECTORY" - "CSIDL_DESKTOPDIRECTORY" - - "CSIDL_COMMON_STARTUP" - "CSIDL_STARTUP" - - "CSIDL_COMMON_PROGRAMS" - "CSIDL_PROGRAMS" - - "CSIDL_FONTS" - - If the folder cannot be retrieved, :exc:`OSError` is raised. - - Which folders are available depends on the exact Windows version, and probably - also the configuration. For details refer to Microsoft's documentation of the - :c:func:`SHGetSpecialFolderPath` function. - - -.. function:: create_shortcut(target, description, filename[, arguments[, workdir[, iconpath[, iconindex]]]]) - - This function creates a shortcut. *target* is the path to the program to be - started by the shortcut. *description* is the description of the shortcut. - *filename* is the title of the shortcut that the user will see. *arguments* - specifies the command-line arguments, if any. *workdir* is the working directory - for the program. *iconpath* is the file containing the icon for the shortcut, - and *iconindex* is the index of the icon in the file *iconpath*. Again, for - details consult the Microsoft documentation for the :class:`IShellLink` - interface. - - -Vista User Access Control (UAC) -=============================== - -Starting with Python 2.6, bdist_wininst supports a :option:`--user-access-control` -option. The default is 'none' (meaning no UAC handling is done), and other -valid values are 'auto' (meaning prompt for UAC elevation if Python was -installed for all users) and 'force' (meaning always prompt for elevation). diff --git a/Doc/packaging/commandhooks.rst b/Doc/packaging/commandhooks.rst deleted file mode 100644 --- a/Doc/packaging/commandhooks.rst +++ /dev/null @@ -1,47 +0,0 @@ -.. TODO integrate this in commandref and configfile - -.. _packaging-command-hooks: - -============= -Command hooks -============= - -Packaging provides a way of extending its commands by the use of pre- and -post-command hooks. Hooks are Python functions (or any callable object) that -take a command object as argument. They're specified in :ref:`config files -` using their fully qualified names. After a -command is finalized (its options are processed), the pre-command hooks are -executed, then the command itself is run, and finally the post-command hooks are -executed. - -See also global setup hooks in :ref:`setupcfg-spec`. - - -.. _packaging-finding-hooks: - -Finding hooks -============= - -As a hook is configured with a Python dotted name, it must either be defined in -a module installed on the system, or in a module present in the project -directory, where the :file:`setup.cfg` file lives:: - - # file: _setuphooks.py - - def hook(install_cmd): - metadata = install_cmd.dist.metadata - print('Hooked while installing %r %s!' % (metadata['Name'], - metadata['Version'])) - -Then you need to configure it in :file:`setup.cfg`:: - - [install_dist] - pre-hook.a = _setuphooks.hook - -Packaging will add the project directory to :data:`sys.path` and find the -``_setuphooks`` module. - -Hooks defined in different config files (system-wide, user-wide and -project-wide) do not override each other as long as they are specified with -different aliases (additional names after the dot). The alias in the example -above is ``a``. diff --git a/Doc/packaging/commandref.rst b/Doc/packaging/commandref.rst deleted file mode 100644 --- a/Doc/packaging/commandref.rst +++ /dev/null @@ -1,374 +0,0 @@ -.. _packaging-command-reference: - -***************** -Command Reference -***************** - -This reference briefly documents all standard Packaging commands and some of -their options. - -.. FIXME does not work: Use pysetup run --help-commands to list all - standard and extra commands availavble on your system, with their - description. Use pysetup run --help to get help about the options - of one command. - -.. XXX sections from this document should be merged with other docs (e.g. check - and upload with uploading.rst, install_* with install/install.rst, etc.); - there is no value in partially duplicating information. this file could - however serve as an index, i.e. just a list of all commands with links to - every section that describes options or usage - - -Preparing distributions -======================= - -:command:`check` ----------------- - -Perform some tests on the metadata of a distribution. - -For example, it verifies that all required metadata fields are provided in the -:file:`setup.cfg` file. - -.. TODO document reST checks - - -:command:`test` ---------------- - -Run a test suite. - -When doing test-driven development, or running automated builds that need -testing before they are installed for downloading or use, it's often useful to -be able to run a project's unit tests without actually installing the project -anywhere. The :command:`test` command runs project's unit tests without -actually installing it, by temporarily putting the project's source on -:data:`sys.path`, after first running :command:`build_ext -i` to ensure that any -C extensions are built. - -You can use this command in one of two ways: either by specifying a -unittest-compatible test suite for your project (or any callable that returns -it) or by passing a test runner function that will run your tests and display -results in the console. Both options take a Python dotted name in the form -``package.module.callable`` to specify the object to use. - -If none of these options are specified, Packaging will try to perform test -discovery using either unittest (for Python 3.2 and higher) or unittest2 (for -older versions, if installed). - -.. this is a pseudo-command name used to disambiguate the options in indexes and - links -.. program:: packaging test - -.. cmdoption:: --suite=NAME, -s NAME - - Specify the test suite (or module, class, or method) to be run. The default - for this option can be set by in the project's :file:`setup.cfg` file: - - .. code-block:: cfg - - [test] - suite = mypackage.tests.get_all_tests - -.. cmdoption:: --runner=NAME, -r NAME - - Specify the test runner to be called. - - -:command:`config` ------------------ - -Perform distribution configuration. - - -The build step -============== - -This step is mainly useful to compile C/C++ libraries or extension modules. The -build commands can be run manually to check for syntax errors or packaging -issues (for example if the addition of a new source file was forgotten in the -:file:`setup.cfg` file), and is also run automatically by commands which need -it. Packaging checks the mtime of source and built files to avoid re-building -if it's not necessary. - - -:command:`build` ----------------- - -Build all files of a distribution, delegating to the other :command:`build_*` -commands to do the work. - - -:command:`build_clib` ---------------------- - -Build C libraries. - - -:command:`build_ext` --------------------- - -Build C/C++ extension modules. - - -:command:`build_py` -------------------- - -Build the Python modules (just copy them to the build directory) and -:term:`byte-compile ` them to :file:`.pyc` and/or :file:`.pyo` files. - -The byte compilation is controlled by two sets of options: - -- ``--compile`` and ``--no-compile`` are used to control the creation of - :file:`.pyc` files; the default is ``--no-compile``. - -- ``--optimize N`` (or ``-ON``) is used to control the creation of :file:`.pyo` - files: ``-O1`` turns on basic optimizations, ``-O2`` also discards docstrings, - ``-O0`` does not create :file:`.pyo` files; the default is ``-O0``. - -You can mix and match these options: for example, ``--no-compile --optimize 2`` -will create :file:`.pyo` files but no :file:`.pyc` files. - -.. XXX these option roles do not work - -Calling Python with :option:`-O` or :option:`-B` does not control the creation -of bytecode files, only the options described above do. - - -:command:`build_scripts` ------------------------- -Build the scripts (just copy them to the build directory and adjust their -shebang if they're Python scripts). - - -:command:`clean` ----------------- - -Clean the build tree of the release. - -.. program:: packaging clean - -.. cmdoption:: --all, -a - - Remove build directories for modules, scripts, etc., not only temporary build - by-products. - - -Creating source and built distributions -======================================= - -:command:`sdist` ----------------- - -Build a source distribution for a release. - -It is recommended that you always build and upload a source distribution. Users -of OSes with easy access to compilers and users of advanced packaging tools will -prefer to compile from source rather than using pre-built distributions. For -Windows users, providing a binary installer is also recommended practice. - - -:command:`bdist` ----------------- - -Build a binary distribution for a release. - -This command will call other :command:`bdist_*` commands to create one or more -distributions depending on the options given. The default is to create a -.tar.gz archive on Unix and a zip archive on Windows or OS/2. - -.. program:: packaging bdist - -.. cmdoption:: --formats - - Binary formats to build (comma-separated list). - -.. cmdoption:: --show-formats - - Dump list of available formats. - - -:command:`bdist_dumb` ---------------------- - -Build a "dumb" installer, a simple archive of files that could be unpacked under -``$prefix`` or ``$exec_prefix``. - - -:command:`bdist_wininst` ------------------------- - -Build a Windows installer. - - -:command:`bdist_msi` --------------------- - -Build a `Microsoft Installer`_ (.msi) file. - -.. _Microsoft Installer: http://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx - -In most cases, the :command:`bdist_msi` installer is a better choice than the -:command:`bdist_wininst` installer, because it provides better support for Win64 -platforms, allows administrators to perform non-interactive installations, and -allows installation through group policies. - - -Publishing distributions -======================== - -:command:`register` -------------------- - -This command registers the current release with the Python Package Index. This -is described in more detail in :PEP:`301`. - -.. TODO explain user and project registration with the web UI - - -:command:`upload` ------------------ - -Upload source and/or binary distributions to PyPI. - -The distributions have to be built on the same command line as the -:command:`upload` command; see :ref:`packaging-package-upload` for more info. - -.. program:: packaging upload - -.. cmdoption:: --sign, -s - - Sign each uploaded file using GPG (GNU Privacy Guard). The ``gpg`` program - must be available for execution on the system ``PATH``. - -.. cmdoption:: --identity=NAME, -i NAME - - Specify the identity or key name for GPG to use when signing. The value of - this option will be passed through the ``--local-user`` option of the - ``gpg`` program. - -.. cmdoption:: --show-response - - Display the full response text from server; this is useful for debugging - PyPI problems. - -.. cmdoption:: --repository=URL, -r URL - - The URL of the repository to upload to. Defaults to - http://pypi.python.org/pypi (i.e., the main PyPI installation). - -.. cmdoption:: --upload-docs - - Also run :command:`upload_docs`. Mainly useful as a default value in - :file:`setup.cfg` (on the command line, it's shorter to just type both - commands). - - -:command:`upload_docs` ----------------------- - -Upload HTML documentation to PyPI. - -PyPI now supports publishing project documentation at a URI of the form -``http://packages.python.org/``. :command:`upload_docs` will create -the necessary zip file out of a documentation directory and will post to the -repository. - -Note that to upload the documentation of a project, the corresponding version -must already be registered with PyPI, using the :command:`register` command --- -just like with :command:`upload`. - -Assuming there is an ``Example`` project with documentation in the subdirectory -:file:`docs`, for example:: - - Example/ - example.py - setup.cfg - docs/ - build/ - html/ - index.html - tips_tricks.html - conf.py - index.txt - tips_tricks.txt - -You can simply specify the directory with the HTML files in your -:file:`setup.cfg` file: - -.. code-block:: cfg - - [upload_docs] - upload-dir = docs/build/html - - -.. program:: packaging upload_docs - -.. cmdoption:: --upload-dir - - The directory to be uploaded to the repository. By default documentation - is searched for in ``docs`` (or ``doc``) directory in project root. - -.. cmdoption:: --show-response - - Display the full response text from server; this is useful for debugging - PyPI problems. - -.. cmdoption:: --repository=URL, -r URL - - The URL of the repository to upload to. Defaults to - http://pypi.python.org/pypi (i.e., the main PyPI installation). - - -The install step -================ - -These commands are used by end-users of a project using :program:`pysetup` or -another compatible installer. Each command will run the corresponding -:command:`build_*` command and then move the built files to their destination on -the target system. - - -:command:`install_dist` ------------------------ - -Install a distribution, delegating to the other :command:`install_*` commands to -do the work. See :ref:`packaging-how-install-works` for complete usage -instructions. - - -:command:`install_data` ------------------------ - -Install data files. - - -:command:`install_distinfo` ---------------------------- - -Install files recording details of the installation as specified in :PEP:`376`. - - -:command:`install_headers` --------------------------- - -Install C/C++ header files. - - -:command:`install_lib` ----------------------- - -Install all modules (extensions and pure Python). - -.. XXX what about C libraries created with build_clib? - -Similarly to ``build_py``, there are options to control the compilation of -Python code to :term:`bytecode` files (see above). By default, :file:`.pyc` -files will be created (``--compile``) and :file:`.pyo` files will not -(``--optimize 0``). - - -:command:`install_scripts` --------------------------- - -Install scripts. diff --git a/Doc/packaging/configfile.rst b/Doc/packaging/configfile.rst deleted file mode 100644 --- a/Doc/packaging/configfile.rst +++ /dev/null @@ -1,125 +0,0 @@ -.. _packaging-setup-config: - -************************************ -Writing the Setup Configuration File -************************************ - -Often, it's not possible to write down everything needed to build a distribution -*a priori*: you may need to get some information from the user, or from the -user's system, in order to proceed. As long as that information is fairly -simple---a list of directories to search for C header files or libraries, for -example---then providing a configuration file, :file:`setup.cfg`, for users to -edit is a cheap and easy way to solicit it. Configuration files also let you -provide default values for any command option, which the installer can then -override either on the command line or by editing the config file. - -The setup configuration file is a useful middle-ground between the setup script ----which, ideally, would be opaque to installers [#]_---and the command line to -the setup script, which is outside of your control and entirely up to the -installer. In fact, :file:`setup.cfg` (and any other Distutils configuration -files present on the target system) are processed after the contents of the -setup script, but before the command line. This has several useful -consequences: - -.. If you have more advanced needs, such as determining which extensions to - build based on what capabilities are present on the target system, then you - need the Distutils auto-configuration facility. This started to appear in - Distutils 0.9 but, as of this writing, isn't mature or stable enough yet - for real-world use. - -* installers can override some of what you put in :file:`setup.py` by editing - :file:`setup.cfg` - -* you can provide non-standard defaults for options that are not easily set in - :file:`setup.py` - -* installers can override anything in :file:`setup.cfg` using the command-line - options to :file:`setup.py` - -The basic syntax of the configuration file is simple:: - - [command] - option = value - ... - -where *command* is one of the Distutils commands (e.g. :command:`build_py`, -:command:`install_dist`), and *option* is one of the options that command supports. -Any number of options can be supplied for each command, and any number of -command sections can be included in the file. Blank lines are ignored, as are -comments, which run from a ``'#'`` character until the end of the line. Long -option values can be split across multiple lines simply by indenting the -continuation lines. - -You can find out the list of options supported by a particular command with the -universal :option:`--help` option, e.g. :: - - > python setup.py --help build_ext - [...] - Options for 'build_ext' command: - --build-lib (-b) directory for compiled extension modules - --build-temp (-t) directory for temporary files (build by-products) - --inplace (-i) ignore build-lib and put compiled extensions into the - source directory alongside your pure Python modules - --include-dirs (-I) list of directories to search for header files - --define (-D) C preprocessor macros to define - --undef (-U) C preprocessor macros to undefine - --swig-opts list of SWIG command-line options - [...] - -.. XXX do we want to support ``setup.py --help metadata``? - -Note that an option spelled :option:`--foo-bar` on the command line is spelled -:option:`foo_bar` in configuration files. - -For example, say you want your extensions to be built "in-place"---that is, you -have an extension :mod:`pkg.ext`, and you want the compiled extension file -(:file:`ext.so` on Unix, say) to be put in the same source directory as your -pure Python modules :mod:`pkg.mod1` and :mod:`pkg.mod2`. You can always use the -:option:`--inplace` option on the command line to ensure this:: - - python setup.py build_ext --inplace - -But this requires that you always specify the :command:`build_ext` command -explicitly, and remember to provide :option:`--inplace`. An easier way is to -"set and forget" this option, by encoding it in :file:`setup.cfg`, the -configuration file for this distribution:: - - [build_ext] - inplace = 1 - -This will affect all builds of this module distribution, whether or not you -explicitly specify :command:`build_ext`. If you include :file:`setup.cfg` in -your source distribution, it will also affect end-user builds---which is -probably a bad idea for this option, since always building extensions in-place -would break installation of the module distribution. In certain peculiar cases, -though, modules are built right in their installation directory, so this is -conceivably a useful ability. (Distributing extensions that expect to be built -in their installation directory is almost always a bad idea, though.) - -Another example: certain commands take options that vary from project to -project but not depending on the installation system, for example, -:command:`test` needs to know where your test suite is located and what test -runner to use; likewise, :command:`upload_docs` can find HTML documentation in -a :file:`doc` or :file:`docs` directory, but needs an option to find files in -:file:`docs/build/html`. Instead of having to type out these options each -time you want to run the command, you can put them in the project's -:file:`setup.cfg`:: - - [test] - suite = packaging.tests - - [upload_docs] - upload-dir = docs/build/html - - -.. seealso:: - - :ref:`packaging-config-syntax` in "Installing Python Projects" - More information on the configuration files is available in the manual for - system administrators. - - -.. rubric:: Footnotes - -.. [#] This ideal probably won't be achieved until auto-configuration is fully - supported by the Distutils. diff --git a/Doc/packaging/examples.rst b/Doc/packaging/examples.rst deleted file mode 100644 --- a/Doc/packaging/examples.rst +++ /dev/null @@ -1,334 +0,0 @@ -.. _packaging-examples: - -******** -Examples -******** - -This chapter provides a number of basic examples to help get started with -Packaging. - - -.. _packaging-pure-mod: - -Pure Python distribution (by module) -==================================== - -If you're just distributing a couple of modules, especially if they don't live -in a particular package, you can specify them individually using the -:option:`py_modules` option in the setup script. - -In the simplest case, you'll have two files to worry about: a setup script and -the single module you're distributing, :file:`foo.py` in this example:: - - / - setup.py - foo.py - -(In all diagrams in this section, ** will refer to the distribution root -directory.) A minimal setup script to describe this situation would be:: - - from packaging.core import setup - setup(name='foo', - version='1.0', - py_modules=['foo']) - -Note that the name of the distribution is specified independently with the -:option:`name` option, and there's no rule that says it has to be the same as -the name of the sole module in the distribution (although that's probably a good -convention to follow). However, the distribution name is used to generate -filenames, so you should stick to letters, digits, underscores, and hyphens. - -Since :option:`py_modules` is a list, you can of course specify multiple -modules, e.g. if you're distributing modules :mod:`foo` and :mod:`bar`, your -setup might look like this:: - - / - setup.py - foo.py - bar.py - -and the setup script might be :: - - from packaging.core import setup - setup(name='foobar', - version='1.0', - py_modules=['foo', 'bar']) - -You can put module source files into another directory, but if you have enough -modules to do that, it's probably easier to specify modules by package rather -than listing them individually. - - -.. _packaging-pure-pkg: - -Pure Python distribution (by package) -===================================== - -If you have more than a couple of modules to distribute, especially if they are -in multiple packages, it's probably easier to specify whole packages rather than -individual modules. This works even if your modules are not in a package; you -can just tell the Distutils to process modules from the root package, and that -works the same as any other package (except that you don't have to have an -:file:`__init__.py` file). - -The setup script from the last example could also be written as :: - - from packaging.core import setup - setup(name='foobar', - version='1.0', - packages=['']) - -(The empty string stands for the root package.) - -If those two files are moved into a subdirectory, but remain in the root -package, e.g.:: - - / - setup.py - src/ - foo.py - bar.py - -then you would still specify the root package, but you have to tell the -Distutils where source files in the root package live:: - - from packaging.core import setup - setup(name='foobar', - version='1.0', - package_dir={'': 'src'}, - packages=['']) - -More typically, though, you will want to distribute multiple modules in the same -package (or in sub-packages). For example, if the :mod:`foo` and :mod:`bar` -modules belong in package :mod:`foobar`, one way to lay out your source tree is - -:: - - / - setup.py - foobar/ - __init__.py - foo.py - bar.py - -This is in fact the default layout expected by the Distutils, and the one that -requires the least work to describe in your setup script:: - - from packaging.core import setup - setup(name='foobar', - version='1.0', - packages=['foobar']) - -If you want to put modules in directories not named for their package, then you -need to use the :option:`package_dir` option again. For example, if the -:file:`src` directory holds modules in the :mod:`foobar` package:: - - / - setup.py - src/ - __init__.py - foo.py - bar.py - -an appropriate setup script would be :: - - from packaging.core import setup - setup(name='foobar', - version='1.0', - package_dir={'foobar': 'src'}, - packages=['foobar']) - -Or, you might put modules from your main package right in the distribution -root:: - - / - setup.py - __init__.py - foo.py - bar.py - -in which case your setup script would be :: - - from packaging.core import setup - setup(name='foobar', - version='1.0', - package_dir={'foobar': ''}, - packages=['foobar']) - -(The empty string also stands for the current directory.) - -If you have sub-packages, they must be explicitly listed in :option:`packages`, -but any entries in :option:`package_dir` automatically extend to sub-packages. -(In other words, the Distutils does *not* scan your source tree, trying to -figure out which directories correspond to Python packages by looking for -:file:`__init__.py` files.) Thus, if the default layout grows a sub-package:: - - / - setup.py - foobar/ - __init__.py - foo.py - bar.py - subfoo/ - __init__.py - blah.py - -then the corresponding setup script would be :: - - from packaging.core import setup - setup(name='foobar', - version='1.0', - packages=['foobar', 'foobar.subfoo']) - -(Again, the empty string in :option:`package_dir` stands for the current -directory.) - - -.. _packaging-single-ext: - -Single extension module -======================= - -Extension modules are specified using the :option:`ext_modules` option. -:option:`package_dir` has no effect on where extension source files are found; -it only affects the source for pure Python modules. The simplest case, a -single extension module in a single C source file, is:: - - / - setup.py - foo.c - -If the :mod:`foo` extension belongs in the root package, the setup script for -this could be :: - - from packaging.core import setup, Extension - setup(name='foobar', - version='1.0', - ext_modules=[Extension('foo', ['foo.c'])]) - -If the extension actually belongs in a package, say :mod:`foopkg`, then - -With exactly the same source tree layout, this extension can be put in the -:mod:`foopkg` package simply by changing the name of the extension:: - - from packaging.core import setup, Extension - setup(name='foobar', - version='1.0', - packages=['foopkg'], - ext_modules=[Extension('foopkg.foo', ['foo.c'])]) - - -Checking metadata -================= - -The ``check`` command allows you to verify if your project's metadata -meets the minimum requirements to build a distribution. - -To run it, just call it using your :file:`setup.py` script. If something is -missing, ``check`` will display a warning. - -Let's take an example with a simple script:: - - from packaging.core import setup - - setup(name='foobar') - -.. TODO configure logging StreamHandler to match this output - -Running the ``check`` command will display some warnings:: - - $ python setup.py check - running check - warning: check: missing required metadata: version, home_page - warning: check: missing metadata: either (author and author_email) or - (maintainer and maintainer_email) must be supplied - - -If you use the reStructuredText syntax in the ``long_description`` field and -`Docutils `_ is installed you can check if -the syntax is fine with the ``check`` command, using the ``restructuredtext`` -option. - -For example, if the :file:`setup.py` script is changed like this:: - - from packaging.core import setup - - desc = """\ - Welcome to foobar! - =============== - - This is the description of the ``foobar`` project. - """ - - setup(name='foobar', - version='1.0', - author=u'Tarek Ziad?', - author_email='tarek at ziade.org', - summary='Foobar utilities' - description=desc, - home_page='http://example.com') - -Where the long description is broken, ``check`` will be able to detect it -by using the :mod:`docutils` parser:: - - $ python setup.py check --restructuredtext - running check - warning: check: Title underline too short. (line 2) - warning: check: Could not finish the parsing. - - -.. _packaging-reading-metadata: - -Reading the metadata -==================== - -The :func:`packaging.core.setup` function provides a command-line interface -that allows you to query the metadata fields of a project through the -:file:`setup.py` script of a given project:: - - $ python setup.py --name - foobar - -This call reads the ``name`` metadata by running the -:func:`packaging.core.setup` function. When a source or binary -distribution is created with Distutils, the metadata fields are written -in a static file called :file:`PKG-INFO`. When a Distutils-based project is -installed in Python, the :file:`PKG-INFO` file is copied alongside the modules -and packages of the distribution under :file:`NAME-VERSION-pyX.X.egg-info`, -where ``NAME`` is the name of the project, ``VERSION`` its version as defined -in the Metadata, and ``pyX.X`` the major and minor version of Python like -``2.7`` or ``3.2``. - -You can read back this static file, by using the -:class:`packaging.dist.Metadata` class and its -:func:`read_pkg_file` method:: - - >>> from packaging.metadata import Metadata - >>> metadata = Metadata() - >>> metadata.read_pkg_file(open('distribute-0.6.8-py2.7.egg-info')) - >>> metadata.name - 'distribute' - >>> metadata.version - '0.6.8' - >>> metadata.description - 'Easily download, build, install, upgrade, and uninstall Python packages' - -Notice that the class can also be instantiated with a metadata file path to -loads its values:: - - >>> pkg_info_path = 'distribute-0.6.8-py2.7.egg-info' - >>> Metadata(pkg_info_path).name - 'distribute' - - -.. XXX These comments have been here for at least ten years. Write the - sections or delete the comments (we can maybe ask Greg Ward about - the planned contents). (Unindent to make them section titles) - - .. multiple-ext:: - - Multiple extension modules - ========================== - - Putting it all together - ======================= diff --git a/Doc/packaging/extending.rst b/Doc/packaging/extending.rst deleted file mode 100644 --- a/Doc/packaging/extending.rst +++ /dev/null @@ -1,95 +0,0 @@ -.. _extending-packaging: - -******************* -Extending Distutils -******************* - -Distutils can be extended in various ways. Most extensions take the form of new -commands or replacements for existing commands. New commands may be written to -support new types of platform-specific packaging, for example, while -replacements for existing commands may be made to modify details of how the -command operates on a package. - -Most extensions of the packaging are made within :file:`setup.py` scripts that -want to modify existing commands; many simply add a few file extensions that -should be copied into packages in addition to :file:`.py` files as a -convenience. - -Most packaging command implementations are subclasses of the -:class:`packaging.cmd.Command` class. New commands may directly inherit from -:class:`Command`, while replacements often derive from :class:`Command` -indirectly, directly subclassing the command they are replacing. Commands are -required to derive from :class:`Command`. - -.. .. _extend-existing: - Extending existing commands - =========================== - - -.. .. _new-commands: - Writing new commands - ==================== - - -Integrating new commands -======================== - -There are different ways to integrate new command implementations into -packaging. The most difficult is to lobby for the inclusion of the new features -in packaging itself, and wait for (and require) a version of Python that -provides that support. This is really hard for many reasons. - -The most common, and possibly the most reasonable for most needs, is to include -the new implementations with your :file:`setup.py` script, and cause the -:func:`packaging.core.setup` function use them:: - - from packaging.core import setup - from packaging.command.build_py import build_py as _build_py - - class build_py(_build_py): - """Specialized Python source builder.""" - - # implement whatever needs to be different... - - setup(..., cmdclass={'build_py': build_py}) - -This approach is most valuable if the new implementations must be used to use a -particular package, as everyone interested in the package will need to have the -new command implementation. - -Beginning with Python 2.4, a third option is available, intended to allow new -commands to be added which can support existing :file:`setup.py` scripts without -requiring modifications to the Python installation. This is expected to allow -third-party extensions to provide support for additional packaging systems, but -the commands can be used for anything packaging commands can be used for. A new -configuration option, :option:`command_packages` (command-line option -:option:`--command-packages`), can be used to specify additional packages to be -searched for modules implementing commands. Like all packaging options, this -can be specified on the command line or in a configuration file. This option -can only be set in the ``[global]`` section of a configuration file, or before -any commands on the command line. If set in a configuration file, it can be -overridden from the command line; setting it to an empty string on the command -line causes the default to be used. This should never be set in a configuration -file provided with a package. - -This new option can be used to add any number of packages to the list of -packages searched for command implementations; multiple package names should be -separated by commas. When not specified, the search is only performed in the -:mod:`packaging.command` package. When :file:`setup.py` is run with the option -:option:`--command-packages` :option:`distcmds,buildcmds`, however, the packages -:mod:`packaging.command`, :mod:`distcmds`, and :mod:`buildcmds` will be searched -in that order. New commands are expected to be implemented in modules of the -same name as the command by classes sharing the same name. Given the example -command-line option above, the command :command:`bdist_openpkg` could be -implemented by the class :class:`distcmds.bdist_openpkg.bdist_openpkg` or -:class:`buildcmds.bdist_openpkg.bdist_openpkg`. - - -Adding new distribution types -============================= - -Commands that create distributions (files in the :file:`dist/` directory) need -to add ``(command, filename)`` pairs to ``self.distribution.dist_files`` so that -:command:`upload` can upload it to PyPI. The *filename* in the pair contains no -path information, only the name of the file itself. In dry-run mode, pairs -should still be added to represent what would have been created. diff --git a/Doc/packaging/index.rst b/Doc/packaging/index.rst deleted file mode 100644 --- a/Doc/packaging/index.rst +++ /dev/null @@ -1,45 +0,0 @@ -.. _packaging-index: - -############################## - Distributing Python Projects -############################## - -:Authors: The Fellowship of the Packaging -:Email: distutils-sig at python.org -:Release: |version| -:Date: |today| - -This document describes Packaging for Python authors, describing how to use the -module to make Python applications, packages or modules easily available to a -wider audience with very little overhead for build/release/install mechanics. - -.. toctree:: - :maxdepth: 2 - :numbered: - - tutorial - setupcfg - introduction - setupscript - configfile - sourcedist - builtdist - packageindex - uploading - examples - extending - commandhooks - commandref - - -.. seealso:: - - :ref:`packaging-install-index` - A user-centered manual which includes information on adding projects - into an existing Python installation. You do not need to be a Python - programmer to read this manual. - - :mod:`packaging` - A library reference for developers of packaging tools wanting to use - standalone building blocks like :mod:`~packaging.version` or - :mod:`~packaging.metadata`, or extend Packaging itself. diff --git a/Doc/packaging/introduction.rst b/Doc/packaging/introduction.rst deleted file mode 100644 --- a/Doc/packaging/introduction.rst +++ /dev/null @@ -1,193 +0,0 @@ -.. _packaging-intro: - -***************************** -An Introduction to Packaging -***************************** - -This document covers using Packaging to distribute your Python modules, -concentrating on the role of developer/distributor. If you're looking for -information on installing Python modules you should refer to the -:ref:`packaging-install-index` chapter. - -Throughout this documentation, the terms "Distutils", "the Distutils" and -"Packaging" will be used interchangeably. - -.. _packaging-concepts: - -Concepts & Terminology -====================== - -Using Distutils is quite simple both for module developers and for -users/administrators installing third-party modules. As a developer, your -responsibilities (apart from writing solid, well-documented and well-tested -code, of course!) are: - -* writing a setup script (:file:`setup.py` by convention) - -* (optional) writing a setup configuration file - -* creating a source distribution - -* (optional) creating one or more "built" (binary) distributions of your - project - -All of these tasks are covered in this document. - -Not all module developers have access to multiple platforms, so one cannot -expect them to create buildt distributions for every platform. To remedy -this, it is hoped that intermediaries called *packagers* will arise to address -this need. Packagers take source distributions released by module developers, -build them on one or more platforms and release the resulting built -distributions. Thus, users on a greater range of platforms will be able to -install the most popular Python modules in the most natural way for their -platform without having to run a setup script or compile a single line of code. - - -.. _packaging-simple-example: - -A Simple Example -================ - -A setup script is usually quite simple, although since it's written in Python -there are no arbitrary limits to what you can do with it, though you should be -careful about putting expensive operations in your setup script. -Unlike, say, Autoconf-style configure scripts the setup script may be run -multiple times in the course of building and installing a module -distribution. - -If all you want to do is distribute a module called :mod:`foo`, contained in a -file :file:`foo.py`, then your setup script can be as simple as:: - - from packaging.core import setup - setup(name='foo', - version='1.0', - py_modules=['foo']) - -Some observations: - -* most information that you supply to the Distutils is supplied as keyword - arguments to the :func:`setup` function - -* those keyword arguments fall into two categories: package metadata (name, - version number, etc.) and information about what's in the package (a list - of pure Python modules in this case) - -* modules are specified by module name, not filename (the same will hold true - for packages and extensions) - -* it's recommended that you supply a little more metadata than we have in the - example. In particular your name, email address and a URL for the - project if appropriate (see section :ref:`packaging-setup-script` for an example) - -To create a source distribution for this module you would create a setup -script, :file:`setup.py`, containing the above code and run:: - - python setup.py sdist - -which will create an archive file (e.g., tarball on Unix, ZIP file on Windows) -containing your setup script :file:`setup.py`, and your module :file:`foo.py`. -The archive file will be named :file:`foo-1.0.tar.gz` (or :file:`.zip`), and -will unpack into a directory :file:`foo-1.0`. - -If an end-user wishes to install your :mod:`foo` module all he has to do is -download :file:`foo-1.0.tar.gz` (or :file:`.zip`), unpack it, and from the -:file:`foo-1.0` directory run :: - - python setup.py install - -which will copy :file:`foo.py` to the appropriate directory for -third-party modules in their Python installation. - -This simple example demonstrates some fundamental concepts of Distutils. -First, both developers and installers have the same basic user interface, i.e. -the setup script. The difference is which Distutils *commands* they use: the -:command:`sdist` command is almost exclusively for module developers, while -:command:`install` is more often used by installers (although some developers -will want to install their own code occasionally). - -If you want to make things really easy for your users, you can create more -than one built distributions for them. For instance, if you are running on a -Windows machine and want to make things easy for other Windows users, you can -create an executable installer (the most appropriate type of built distribution -for this platform) with the :command:`bdist_wininst` command. For example:: - - python setup.py bdist_wininst - -will create an executable installer, :file:`foo-1.0.win32.exe`, in the current -directory. You can find out what distribution formats are available at any time -by running :: - - python setup.py bdist --help-formats - - -.. _packaging-python-terms: - -General Python terminology -========================== - -If you're reading this document, you probably have a good idea of what Python -modules, extensions and so forth are. Nevertheless, just to be sure that -everyone is on the same page, here's a quick overview of Python terms: - -module - The basic unit of code reusability in Python: a block of code imported by - some other code. Three types of modules are important to us here: pure - Python modules, extension modules and packages. - -pure Python module - A module written in Python and contained in a single :file:`.py` file (and - possibly associated :file:`.pyc` and/or :file:`.pyo` files). Sometimes - referred to as a "pure module." - -extension module - A module written in the low-level language of the Python implementation: C/C++ - for Python, Java for Jython. Typically contained in a single dynamically - loaded pre-compiled file, e.g. a shared object (:file:`.so`) file for Python - extensions on Unix, a DLL (given the :file:`.pyd` extension) for Python - extensions on Windows, or a Java class file for Jython extensions. Note that - currently Distutils only handles C/C++ extensions for Python. - -package - A module that contains other modules, typically contained in a directory of - the filesystem and distinguished from other directories by the presence of a - file :file:`__init__.py`. - -root package - The root of the hierarchy of packages. (This isn't really a package, - since it doesn't have an :file:`__init__.py` file. But... we have to - call it something, right?) The vast majority of the standard library is - in the root package, as are many small standalone third-party modules that - don't belong to a larger module collection. Unlike regular packages, - modules in the root package can be found in many directories: in fact, - every directory listed in ``sys.path`` contributes modules to the root - package. - - -.. _packaging-term: - -Distutils-specific terminology -============================== - -The following terms apply more specifically to the domain of distributing Python -modules using Distutils: - -module distribution - A collection of Python modules distributed together as a single downloadable - resource and meant to be installed all as one. Examples of some well-known - module distributions are NumPy, SciPy, PIL (the Python Imaging - Library) or mxBase. (Module distributions would be called a *package*, - except that term is already taken in the Python context: a single module - distribution may contain zero, one, or many Python packages.) - -pure module distribution - A module distribution that contains only pure Python modules and packages. - Sometimes referred to as a "pure distribution." - -non-pure module distribution - A module distribution that contains at least one extension module. Sometimes - referred to as a "non-pure distribution." - -distribution root - The top-level directory of your source tree (or source distribution). The - directory where :file:`setup.py` exists. Generally :file:`setup.py` will - be run from this directory. diff --git a/Doc/packaging/packageindex.rst b/Doc/packaging/packageindex.rst deleted file mode 100644 --- a/Doc/packaging/packageindex.rst +++ /dev/null @@ -1,104 +0,0 @@ -.. _packaging-package-index: - -********************************** -Registering with the Package Index -********************************** - -The Python Package Index (PyPI) holds metadata describing distributions -packaged with packaging. The packaging command :command:`register` is used to -submit your distribution's metadata to the index. It is invoked as follows:: - - python setup.py register - -Distutils will respond with the following prompt:: - - running register - We need to know who you are, so please choose either: - 1. use your existing login, - 2. register as a new user, - 3. have the server generate a new password for you (and email it to you), or - 4. quit - Your selection [default 1]: - -Note: if your username and password are saved locally, you will not see this -menu. - -If you have not registered with PyPI, then you will need to do so now. You -should choose option 2, and enter your details as required. Soon after -submitting your details, you will receive an email which will be used to confirm -your registration. - -Once you are registered, you may choose option 1 from the menu. You will be -prompted for your PyPI username and password, and :command:`register` will then -submit your metadata to the index. - -You may submit any number of versions of your distribution to the index. If you -alter the metadata for a particular version, you may submit it again and the -index will be updated. - -PyPI holds a record for each (name, version) combination submitted. The first -user to submit information for a given name is designated the Owner of that -name. They may submit changes through the :command:`register` command or through -the web interface. They may also designate other users as Owners or Maintainers. -Maintainers may edit the package information, but not designate other Owners or -Maintainers. - -By default PyPI will list all versions of a given package. To hide certain -versions, the Hidden property should be set to yes. This must be edited through -the web interface. - - -.. _packaging-pypirc: - -The .pypirc file -================ - -The format of the :file:`.pypirc` file is as follows:: - - [packaging] - index-servers = - pypi - - [pypi] - repository: - username: - password: - -The *packaging* section defines a *index-servers* variable that lists the -name of all sections describing a repository. - -Each section describing a repository defines three variables: - -- *repository*, that defines the url of the PyPI server. Defaults to - ``http://www.python.org/pypi``. -- *username*, which is the registered username on the PyPI server. -- *password*, that will be used to authenticate. If omitted the user - will be prompt to type it when needed. - -If you want to define another server a new section can be created and -listed in the *index-servers* variable:: - - [packaging] - index-servers = - pypi - other - - [pypi] - repository: - username: - password: - - [other] - repository: http://example.com/pypi - username: - password: - -:command:`register` can then be called with the -r option to point the -repository to work with:: - - python setup.py register -r http://example.com/pypi - -For convenience, the name of the section that describes the repository -may also be used:: - - python setup.py register -r other diff --git a/Doc/packaging/setupcfg.rst b/Doc/packaging/setupcfg.rst deleted file mode 100644 --- a/Doc/packaging/setupcfg.rst +++ /dev/null @@ -1,890 +0,0 @@ -.. highlightlang:: cfg - -.. _setupcfg-spec: - -******************************************* -Specification of the :file:`setup.cfg` file -******************************************* - -:version: 0.9 - -This document describes the :file:`setup.cfg`, an ini-style configuration file -used by Packaging to replace the :file:`setup.py` file used by Distutils. -This specification is language-agnostic, and will therefore repeat some -information that's already documented for Python in the -:class:`configparser.RawConfigParser` documentation. - -.. contents:: - :depth: 3 - :local: - - -.. _setupcfg-syntax: - -Syntax -====== - -The ini-style format used in the configuration file is a simple collection of -sections that group sets of key-value fields separated by ``=`` or ``:`` and -optional whitespace. Lines starting with ``#`` or ``;`` are comments and will -be ignored. Empty lines are also ignored. Example:: - - [section1] - # comment - name = value - name2 = "other value" - - [section2] - foo = bar - - -Parsing values ---------------- - -Here are a set of rules to parse values: - -- If a value is quoted with ``"`` chars, it's a string. If a quote character is - present in the quoted value, it can be escaped as ``\"`` or left as-is. - -- If the value is ``true``, ``t``, ``yes``, ``y`` (case-insensitive) or ``1``, - it's converted to the language equivalent of a ``True`` value; if it's - ``false``, ``f``, ``no``, ``n`` (case-insensitive) or ``0``, it's converted to - the equivalent of ``False``. - -- A value can contain multiple lines. When read, lines are converted into a - sequence of values. Each line after the first must start with a least one - space or tab character; this leading indentation will be stripped. - -- All other values are considered strings. - -Examples:: - - [section] - foo = one - two - three - - bar = false - baz = 1.3 - boo = "ok" - beee = "wqdqw pojpj w\"ddq" - - -Extending files ---------------- - -A configuration file can be extended (i.e. included) by other files. For this, -a ``DEFAULT`` section must contain an ``extends`` key whose value points to one -or more files which will be merged into the current files by adding new sections -and fields. If a file loaded by ``extends`` contains sections or keys that -already exist in the original file, they will not override the previous values. - -Contents of :file:`one.cfg`:: - - [section1] - name = value - - [section2] - foo = foo from one.cfg - -Contents of :file:`two.cfg`:: - - [DEFAULT] - extends = one.cfg - - [section2] - foo = foo from two.cfg - baz = baz from two.cfg - -The result of parsing :file:`two.cfg` is equivalent to this file:: - - [section1] - name = value - - [section2] - foo = foo from one.cfg - baz = baz from two.cfg - -Example use of multi-line notation to include more than one file:: - - [DEFAULT] - extends = one.cfg - two.cfg - -When several files are provided, they are processed sequentially, following the -precedence rules explained above. This means that the list of files should go -from most specialized to most common. - -**Tools will need to provide a way to produce a merged version of the -file**. This will be useful to let users publish a single file. - - -.. _setupcfg-sections: - -Description of sections and fields -================================== - -Each section contains a description of its options. - -- Options that are marked *multi* can have multiple values, one value per - line. -- Options that are marked *optional* can be omitted. -- Options that are marked *environ* can use environment markers, as described - in :PEP:`345`. - - -The sections are: - -global - Global options not related to one command. - -metadata - Name, version and other information defined by :PEP:`345`. - -files - Modules, scripts, data, documentation and other files to include in the - distribution. - -extension sections - Options used to build extension modules. - -command sections - Options given for specific commands, identical to those that can be given - on the command line. - - -.. _setupcfg-section-global: - -Global options --------------- - -Contains global options for Packaging. This section is shared with Distutils. - - -commands - Defined Packaging command. A command is defined by its fully - qualified name. *optional*, *multi* - - Examples:: - - [global] - commands = - package.setup.CustomSdistCommand - package.setup.BdistDeb - -compilers - Defined Packaging compiler. A compiler is defined by its fully - qualified name. *optional*, *multi* - - Example:: - - [global] - compilers = - hotcompiler.SmartCCompiler - -setup_hooks - Defines a list of callables to be called right after the :file:`setup.cfg` - file is read, before any other processing. Each value is a Python dotted - name to an object, which has to be defined in a module present in the project - directory alonside :file:`setup.cfg` or on Python's :data:`sys.path` (see - :ref:`packaging-finding-hooks`). The callables are executed in the - order they're found in the file; if one of them cannot be found, tools should - not stop, but for example produce a warning and continue with the next line. - Each callable receives the configuration as a dictionary (keys are - :file:`setup.cfg` sections, values are dictionaries of fields) and can make - any change to it. *optional*, *multi* - - Example:: - - [global] - setup_hooks = _setuphooks.customize_config - - - -.. _setupcfg-section-metadata: - -Metadata --------- - -The metadata section contains the metadata for the project as described in -:PEP:`345`. Field names are case-insensitive. - -Fields: - -name - Name of the project. - -version - Version of the project. Must comply with :PEP:`386`. - -platform - Platform specification describing an operating system - supported by the distribution which is not listed in the "Operating System" - Trove classifiers (:PEP:`301`). *optional*, *multi* - -supported-platform - Binary distributions containing a PKG-INFO file will - use the Supported-Platform field in their metadata to specify the OS and - CPU for which the binary distribution was compiled. The semantics of - the Supported-Platform field are free form. *optional*, *multi* - -summary - A one-line summary of what the distribution does. - (Used to be called *description* in Distutils1.) - -description - A longer description. (Used to be called *long_description* - in Distutils1.) A file can be provided in the *description-file* field. - *optional* - -keywords - A list of additional keywords to be used to assist searching - for the distribution in a larger catalog. Comma or space-separated. - *optional* - -home-page - The URL for the distribution's home page. - -download-url - The URL from which this version of the distribution - can be downloaded. *optional* - -author - Author's name. *optional* - -author-email - Author's e-mail. *optional* - -maintainer - Maintainer's name. *optional* - -maintainer-email - Maintainer's e-mail. *optional* - -license - A text indicating the term of uses, when a trove classifier does - not match. *optional*. - -classifiers - Classification for the distribution, as described in PEP 301. - *optional*, *multi*, *environ* - -requires-dist - name of another packaging project required as a dependency. - The format is *name (version)* where version is an optional - version declaration, as described in PEP 345. *optional*, *multi*, *environ* - -provides-dist - name of another packaging project contained within this - distribution. Same format than *requires-dist*. *optional*, *multi*, - *environ* - -obsoletes-dist - name of another packaging project this version obsoletes. - Same format than *requires-dist*. *optional*, *multi*, *environ* - -requires-python - Specifies the Python version the distribution requires. The value is a - comma-separated list of version predicates, as described in PEP 345. - *optional*, *environ* - -requires-externals - a dependency in the system. This field is free-form, - and just a hint for downstream maintainers. *optional*, *multi*, - *environ* - -project-url - A label, followed by a browsable URL for the project. - "label, url". The label is limited to 32 signs. *optional*, *multi* - -One extra field not present in PEP 345 is supported: - -description-file - Path to a text file that will be used to fill the ``description`` field. - Multiple values are accepted; they must be separated by whitespace. - ``description-file`` and ``description`` are mutually exclusive. *optional* - - - -Example:: - - [metadata] - name = pypi2rpm - version = 0.1 - author = Tarek Ziad? - author-email = tarek at ziade.org - summary = Script that transforms an sdist archive into a RPM package - description-file = README - home-page = http://bitbucket.org/tarek/pypi2rpm/wiki/Home - project-url: - Repository, http://bitbucket.org/tarek/pypi2rpm/ - RSS feed, https://bitbucket.org/tarek/pypi2rpm/rss - classifier = - Development Status :: 3 - Alpha - License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1) - -You should not give any explicit value for metadata-version: it will be guessed -from the fields present in the file. - - -.. _setupcfg-section-files: - -Files ------ - -This section describes the files included in the project. - -packages_root - the root directory containing all packages and modules - (default: current directory, i.e. the project's top-level - directory where :file:`setup.cfg` lives). *optional* - -packages - a list of packages the project includes *optional*, *multi* - -modules - a list of packages the project includes *optional*, *multi* - -scripts - a list of scripts the project includes *optional*, *multi* - -extra_files - a list of patterns for additional files to include in source distributions - (see :ref:`packaging-manifest`) *optional*, *multi* - -Example:: - - [files] - packages_root = src - packages = - pypi2rpm - pypi2rpm.command - - scripts = - pypi2rpm/pypi2rpm.py - - extra_files = - setup.py - README - - -.. Note:: - The :file:`setup.cfg` configuration file is included by default. Contrary to - Distutils, :file:`README` (or :file:`README.txt`) and :file:`setup.py` are - not included by default. - - -Resources -^^^^^^^^^ - -This section describes the files used by the project which must not be installed -in the same place that python modules or libraries, they are called -**resources**. They are for example documentation files, script files, -databases, etc... - -For declaring resources, you must use this notation:: - - source = destination - -Data-files are declared in the **resources** field in the **file** section, for -example:: - - [files] - resources = - source1 = destination1 - source2 = destination2 - -The **source** part of the declaration are relative paths of resources files -(using unix path separator **/**). For example, if you've this source tree:: - - foo/ - doc/ - doc.man - scripts/ - foo.sh - -Your setup.cfg will look like:: - - [files] - resources = - doc/doc.man = destination_doc - scripts/foo.sh = destination_scripts - -The final paths where files will be placed are composed by : **source** + -**destination**. In the previous example, **doc/doc.man** will be placed in -**destination_doc/doc/doc.man** and **scripts/foo.sh** will be placed in -**destination_scripts/scripts/foo.sh**. (If you want more control on the final -path, take a look at :ref:`setupcfg-resources-base-prefix`). - -The **destination** part of resources declaration are paths with categories. -Indeed, it's generally a bad idea to give absolute path as it will be cross -incompatible. So, you must use resources categories in your **destination** -declaration. Categories will be replaced by their real path at the installation -time. Using categories is all benefit, your declaration will be simpler, cross -platform and it will allow packager to place resources files where they want -without breaking your code. - -Categories can be specified by using this syntax:: - - {category} - -Default categories are: - -* config -* appdata -* appdata.arch -* appdata.persistent -* appdata.disposable -* help -* icon -* scripts -* doc -* info -* man - -A special category also exists **{distribution.name}** that will be replaced by -the name of the distribution, but as most of the defaults categories use them, -so it's not necessary to add **{distribution.name}** into your destination. - -If you use categories in your declarations, and you are encouraged to do, final -path will be:: - - source + destination_expanded - -.. _example_final_path: - -For example, if you have this setup.cfg:: - - [metadata] - name = foo - - [files] - resources = - doc/doc.man = {doc} - -And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final -path will be:: - - {datadir}/doc/foo/doc/doc.man - -Where {datafir} category will be platform-dependent. - - -More control on source part -""""""""""""""""""""""""""" - -Glob syntax -''''''''''' - -When you declare source file, you can use a glob-like syntax to match multiples file, for example:: - - scripts/* = {script} - -Will match all the files in the scripts directory and placed them in the script category. - -Glob tokens are: - - * ``*``: match all files. - * ``?``: match any character. - * ``**``: match any level of tree recursion (even 0). - * ``{}``: will match any part separated by comma (example: ``{sh,bat}``). - -.. TODO Add examples - -Order of declaration -'''''''''''''''''''' - -The order of declaration is important if one file match multiple rules. The last -rules matched by file is used, this is useful if you have this source tree:: - - foo/ - doc/ - index.rst - setup.rst - documentation.txt - doc.tex - README - -And you want all the files in the doc directory to be placed in {doc} category, -but README must be placed in {help} category, instead of listing all the files -one by one, you can declare them in this way:: - - [files] - resources = - doc/* = {doc} - doc/README = {help} - -Exclude -''''''' - -You can exclude some files of resources declaration by giving no destination, it -can be useful if you have a non-resources file in the same directory of -resources files:: - - foo/ - doc/ - RELEASES - doc.tex - documentation.txt - docu.rst - -Your **files** section will be:: - - [files] - resources = - doc/* = {doc} - doc/RELEASES = - -More control on destination part -"""""""""""""""""""""""""""""""" - -.. _setupcfg-resources-base-prefix: - -Defining a base prefix -'''''''''''''''''''''' - -When you define your resources, you can have more control of how the final path -is computed. - -By default, the final path is:: - - destination + source - -This can generate long paths, for example (example_final_path_):: - - {datadir}/doc/foo/doc/doc.man - -When you declare your source, you can use whitespace to split the source in -**prefix** **suffix**. So, for example, if you have this source:: - - docs/ doc.man - -The **prefix** is "docs/" and the **suffix** is "doc.html". - -.. note:: - - Separator can be placed after a path separator or replace it. So these two - sources are equivalent:: - - docs/ doc.man - docs doc.man - -.. note:: - - Glob syntax is working the same way with standard source and split source. - So these rules:: - - docs/* - docs/ * - docs * - - Will match all the files in the docs directory. - -When you use split source, the final path is computed this way:: - - destination + prefix - -So for example, if you have this setup.cfg:: - - [metadata] - name = foo - - [files] - resources = - doc/ doc.man = {doc} - -And if **{doc}** is replaced by **{datadir}/doc/{distribution.name}**, final -path will be:: - - {datadir}/doc/foo/doc.man - - -Overwriting paths for categories -"""""""""""""""""""""""""""""""" - -This part is intended for system administrators or downstream OS packagers. - -The real paths of categories are registered in the *sysconfig.cfg* file -installed in your python installation. This file uses an ini format too. -The content of the file is organized into several sections: - -* globals: Standard categories's paths. -* posix_prefix: Standard paths for categories and installation paths for posix - system. -* other ones XXX - -Standard categories paths are platform independent, they generally refers to -other categories, which are platform dependent. :mod:`sysconfig` will choose -these category from sections matching os.name. For example:: - - doc = {datadir}/doc/{distribution.name} - -It refers to datadir category, which can be different between platforms. In -posix system, it may be:: - - datadir = /usr/share - -So the final path will be:: - - doc = /usr/share/doc/{distribution.name} - -The platform-dependent categories are: - -* confdir -* datadir -* libdir -* base - - -Defining extra categories -""""""""""""""""""""""""" - -.. TODO - - -Examples -"""""""" - -These examples are incremental but work unitarily. - -Resources in root dir -''''''''''''''''''''' - -Source tree:: - - babar-1.0/ - README - babar.sh - launch.sh - babar.py - -:file:`setup.cfg`:: - - [files] - resources = - README = {doc} - *.sh = {scripts} - -So babar.sh and launch.sh will be placed in {scripts} directory. - -Now let's move all the scripts into a scripts directory. - -Resources in sub-directory -'''''''''''''''''''''''''' - -Source tree:: - - babar-1.1/ - README - scripts/ - babar.sh - launch.sh - LAUNCH - babar.py - -:file:`setup.cfg`:: - - [files] - resources = - README = {doc} - scripts/ LAUNCH = {doc} - scripts/ *.sh = {scripts} - -It's important to use the separator after scripts/ to install all the shell -scripts into {scripts} instead of {scripts}/scripts. - -Now let's add some docs. - -Resources in multiple sub-directories -''''''''''''''''''''''''''''''''''''' - -Source tree:: - - babar-1.2/ - README - scripts/ - babar.sh - launch.sh - LAUNCH - docs/ - api - man - babar.py - -:file:`setup.cfg`:: - - [files] - resources = - README = {doc} - scripts/ LAUNCH = {doc} - scripts/ *.sh = {scripts} - doc/ * = {doc} - doc/ man = {man} - -You want to place all the file in the docs script into {doc} category, instead -of man, which must be placed into {man} category, we will use the order of -declaration of globs to choose the destination, the last glob that match the -file is used. - -Now let's add some scripts for windows users. - -Complete example -'''''''''''''''' - -Source tree:: - - babar-1.3/ - README - doc/ - api - man - scripts/ - babar.sh - launch.sh - babar.bat - launch.bat - LAUNCH - -:file:`setup.cfg`:: - - [files] - resources = - README = {doc} - scripts/ LAUNCH = {doc} - scripts/ *.{sh,bat} = {scripts} - doc/ * = {doc} - doc/ man = {man} - -We use brace expansion syntax to place all the shell and batch scripts into -{scripts} category. - - -.. _setupcfg-section-extensions: - -Extension modules sections --------------------------- - -If a project includes extension modules written in C or C++, each one of them -needs to have its options defined in a dedicated section. Here's an example:: - - [files] - packages = coconut - - [extension: coconut._fastcoconut] - language = cxx - sources = cxx_src/cononut_utils.cxx - cxx_src/python_module.cxx - include_dirs = /usr/include/gecode - /usr/include/blitz - extra_compile_args = - -fPIC -O2 - -DGECODE_VERSION=$(./gecode_version) -- sys.platform != 'win32' - /DGECODE_VERSION=win32 -- sys.platform == 'win32' - -The section name must start with ``extension:``; the right-hand part is used as -the full name (including a parent package, if any) of the extension. Whitespace -around the extension name is allowed. If the extension module is not standalone -(e.g. ``_bisect``) but part of a package (e.g. ``thing._speedups``), the parent -package must be listed in the ``packages`` field. -Valid fields and their values are listed in the documentation of the -:class:`packaging.compiler.extension.Extension` class; values documented as -Python lists translate to multi-line values in the configuration file. In -addition, multi-line values accept environment markers on each line, after a -``--``. - - -.. _setupcfg-section-commands: - -Commands sections ------------------ - -To pass options to commands without having to type them on the command line -for each invocation, you can write them in the :file:`setup.cfg` file, in a -section named after the command. Example:: - - [sdist] - # special function to add custom files - manifest-builders = package.setup.list_extra_files - - [build] - use-2to3 = True - - [build_ext] - inplace = on - - [check] - strict = on - all = on - -Option values given in the configuration file can be overriden on the command -line. See :ref:`packaging-setup-config` for more information. - -These sections are also used to define :ref:`command hooks -`. - - -.. _setupcfg-extensibility: - -Extensibility -============= - -Every section can have fields that are not part of this specification. They are -called **extensions**. - -An extension field starts with ``X-``. Example:: - - [metadata] - name = Distribute - X-Debian-Name = python-distribute - - -.. _setupcfg-changes: - -Changes in the specification -============================ - -The versioning scheme for this specification is **MAJOR.MINOR**. Changes in the -specification will cause the version number to be updated. - -Changes to the minor number reflect backwards-compatible changes: - -- New fields and sections (optional or mandatory) can be added. -- Optional fields can be removed. - -The major number will be incremented for backwards-incompatible changes: - -- Mandatory fields or sections are removed. -- Fields change their meaning. - -As a consequence, a tool written to consume 1.5 has these properties: - -- Can read 1.1, 1.2 and all versions < 1.5, since the tool knows what - optional fields weren't there. - - .. XXX clarify - -- Can also read 1.6 and other 1.x versions: The tool will just ignore fields it - doesn't know about, even if they are mandatory in the new version. If - optional fields were removed, the tool will just consider them absent. - -- Cannot read 2.x and should refuse to interpret such files. - -A tool written to produce 1.x should have these properties: - -- Writes all mandatory fields. -- May write optional fields. - - -.. _setupcfg-acks: - -Acknowledgments -=============== - -This specification includes work and feedback from these people: - -- Tarek Ziad? -- Julien Jehannet -- Boris Feld -- ?ric Araujo - -(If your name is missing, please :ref:`let us know `.) diff --git a/Doc/packaging/setupscript.rst b/Doc/packaging/setupscript.rst deleted file mode 100644 --- a/Doc/packaging/setupscript.rst +++ /dev/null @@ -1,693 +0,0 @@ -.. _packaging-setup-script: - -************************ -Writing the Setup Script -************************ - -The setup script is the center of all activity in building, distributing, and -installing modules using Distutils. The main purpose of the setup script is -to describe your module distribution to Distutils, so that the various -commands that operate on your modules do the right thing. As we saw in section -:ref:`packaging-simple-example`, the setup script consists mainly of a -call to :func:`setup` where the most information is supplied as -keyword arguments to :func:`setup`. - -Here's a slightly more involved example, which we'll follow for the next couple -of sections: a setup script that could be used for Packaging itself:: - - #!/usr/bin/env python - - from packaging.core import setup, find_packages - - setup(name='Packaging', - version='1.0', - summary='Python Distribution Utilities', - keywords=['packaging', 'packaging'], - author=u'Tarek Ziad?', - author_email='tarek at ziade.org', - home_page='http://bitbucket.org/tarek/packaging/wiki/Home', - license='PSF', - packages=find_packages()) - - -There are only two differences between this and the trivial one-file -distribution presented in section :ref:`packaging-simple-example`: more -metadata and the specification of pure Python modules by package rather than -by module. This is important since Ristutils consist of a couple of dozen -modules split into (so far) two packages; an explicit list of every module -would be tedious to generate and difficult to maintain. For more information -on the additional metadata, see section :ref:`packaging-metadata`. - -Note that any pathnames (files or directories) supplied in the setup script -should be written using the Unix convention, i.e. slash-separated. The -Distutils will take care of converting this platform-neutral representation into -whatever is appropriate on your current platform before actually using the -pathname. This makes your setup script portable across operating systems, which -of course is one of the major goals of the Distutils. In this spirit, all -pathnames in this document are slash-separated. - -This, of course, only applies to pathnames given to Distutils functions. If -you, for example, use standard Python functions such as :func:`glob.glob` or -:func:`os.listdir` to specify files, you should be careful to write portable -code instead of hardcoding path separators:: - - glob.glob(os.path.join('mydir', 'subdir', '*.html')) - os.listdir(os.path.join('mydir', 'subdir')) - - -.. _packaging-listing-packages: - -Listing whole packages -====================== - -The :option:`packages` option tells the Distutils to process (build, distribute, -install, etc.) all pure Python modules found in each package mentioned in the -:option:`packages` list. In order to do this, of course, there has to be a -correspondence between package names and directories in the filesystem. The -default correspondence is the most obvious one, i.e. package :mod:`packaging` is -found in the directory :file:`packaging` relative to the distribution root. -Thus, when you say ``packages = ['foo']`` in your setup script, you are -promising that the Distutils will find a file :file:`foo/__init__.py` (which -might be spelled differently on your system, but you get the idea) relative to -the directory where your setup script lives. If you break this promise, the -Distutils will issue a warning but still process the broken package anyway. - -If you use a different convention to lay out your source directory, that's no -problem: you just have to supply the :option:`package_dir` option to tell the -Distutils about your convention. For example, say you keep all Python source -under :file:`lib`, so that modules in the "root package" (i.e., not in any -package at all) are in :file:`lib`, modules in the :mod:`foo` package are in -:file:`lib/foo`, and so forth. Then you would put :: - - package_dir = {'': 'lib'} - -in your setup script. The keys to this dictionary are package names, and an -empty package name stands for the root package. The values are directory names -relative to your distribution root. In this case, when you say ``packages = -['foo']``, you are promising that the file :file:`lib/foo/__init__.py` exists. - -Another possible convention is to put the :mod:`foo` package right in -:file:`lib`, the :mod:`foo.bar` package in :file:`lib/bar`, etc. This would be -written in the setup script as :: - - package_dir = {'foo': 'lib'} - -A ``package: dir`` entry in the :option:`package_dir` dictionary implicitly -applies to all packages below *package*, so the :mod:`foo.bar` case is -automatically handled here. In this example, having ``packages = ['foo', -'foo.bar']`` tells the Distutils to look for :file:`lib/__init__.py` and -:file:`lib/bar/__init__.py`. (Keep in mind that although :option:`package_dir` -applies recursively, you must explicitly list all packages in -:option:`packages`: the Distutils will *not* recursively scan your source tree -looking for any directory with an :file:`__init__.py` file.) - - -.. _packaging-listing-modules: - -Listing individual modules -========================== - -For a small module distribution, you might prefer to list all modules rather -than listing packages---especially the case of a single module that goes in the -"root package" (i.e., no package at all). This simplest case was shown in -section :ref:`packaging-simple-example`; here is a slightly more involved -example:: - - py_modules = ['mod1', 'pkg.mod2'] - -This describes two modules, one of them in the "root" package, the other in the -:mod:`pkg` package. Again, the default package/directory layout implies that -these two modules can be found in :file:`mod1.py` and :file:`pkg/mod2.py`, and -that :file:`pkg/__init__.py` exists as well. And again, you can override the -package/directory correspondence using the :option:`package_dir` option. - - -.. _packaging-describing-extensions: - -Describing extension modules -============================ - -Just as writing Python extension modules is a bit more complicated than writing -pure Python modules, describing them to the Distutils is a bit more complicated. -Unlike pure modules, it's not enough just to list modules or packages and expect -the Distutils to go out and find the right files; you have to specify the -extension name, source file(s), and any compile/link requirements (include -directories, libraries to link with, etc.). - -.. XXX read over this section - -All of this is done through another keyword argument to :func:`setup`, the -:option:`ext_modules` option. :option:`ext_modules` is just a list of -:class:`Extension` instances, each of which describes a single extension module. -Suppose your distribution includes a single extension, called :mod:`foo` and -implemented by :file:`foo.c`. If no additional instructions to the -compiler/linker are needed, describing this extension is quite simple:: - - Extension('foo', ['foo.c']) - -The :class:`Extension` class can be imported from :mod:`packaging.core` along -with :func:`setup`. Thus, the setup script for a module distribution that -contains only this one extension and nothing else might be:: - - from packaging.core import setup, Extension - setup(name='foo', - version='1.0', - ext_modules=[Extension('foo', ['foo.c'])]) - -The :class:`Extension` class (actually, the underlying extension-building -machinery implemented by the :command:`build_ext` command) supports a great deal -of flexibility in describing Python extensions, which is explained in the -following sections. - - -Extension names and packages ----------------------------- - -The first argument to the :class:`Extension` constructor is always the name of -the extension, including any package names. For example, :: - - Extension('foo', ['src/foo1.c', 'src/foo2.c']) - -describes an extension that lives in the root package, while :: - - Extension('pkg.foo', ['src/foo1.c', 'src/foo2.c']) - -describes the same extension in the :mod:`pkg` package. The source files and -resulting object code are identical in both cases; the only difference is where -in the filesystem (and therefore where in Python's namespace hierarchy) the -resulting extension lives. - -If your distribution contains only one or more extension modules in a package, -you need to create a :file:`{package}/__init__.py` file anyway, otherwise Python -won't be able to import anything. - -If you have a number of extensions all in the same package (or all under the -same base package), use the :option:`ext_package` keyword argument to -:func:`setup`. For example, :: - - setup(..., - ext_package='pkg', - ext_modules=[Extension('foo', ['foo.c']), - Extension('subpkg.bar', ['bar.c'])]) - -will compile :file:`foo.c` to the extension :mod:`pkg.foo`, and :file:`bar.c` to -:mod:`pkg.subpkg.bar`. - - -Extension source files ----------------------- - -The second argument to the :class:`Extension` constructor is a list of source -files. Since the Distutils currently only support C, C++, and Objective-C -extensions, these are normally C/C++/Objective-C source files. (Be sure to use -appropriate extensions to distinguish C++\ source files: :file:`.cc` and -:file:`.cpp` seem to be recognized by both Unix and Windows compilers.) - -However, you can also include SWIG interface (:file:`.i`) files in the list; the -:command:`build_ext` command knows how to deal with SWIG extensions: it will run -SWIG on the interface file and compile the resulting C/C++ file into your -extension. - -.. XXX SWIG support is rough around the edges and largely untested! - -This warning notwithstanding, options to SWIG can be currently passed like -this:: - - setup(..., - ext_modules=[Extension('_foo', ['foo.i'], - swig_opts=['-modern', '-I../include'])], - py_modules=['foo']) - -Or on the command line like this:: - - > python setup.py build_ext --swig-opts="-modern -I../include" - -On some platforms, you can include non-source files that are processed by the -compiler and included in your extension. Currently, this just means Windows -message text (:file:`.mc`) files and resource definition (:file:`.rc`) files for -Visual C++. These will be compiled to binary resource (:file:`.res`) files and -linked into the executable. - - -Preprocessor options --------------------- - -Three optional arguments to :class:`Extension` will help if you need to specify -include directories to search or preprocessor macros to define/undefine: -``include_dirs``, ``define_macros``, and ``undef_macros``. - -For example, if your extension requires header files in the :file:`include` -directory under your distribution root, use the ``include_dirs`` option:: - - Extension('foo', ['foo.c'], include_dirs=['include']) - -You can specify absolute directories there; if you know that your extension will -only be built on Unix systems with X11R6 installed to :file:`/usr`, you can get -away with :: - - Extension('foo', ['foo.c'], include_dirs=['/usr/include/X11']) - -You should avoid this sort of non-portable usage if you plan to distribute your -code: it's probably better to write C code like :: - - #include - -If you need to include header files from some other Python extension, you can -take advantage of the fact that header files are installed in a consistent way -by the Distutils :command:`install_header` command. For example, the Numerical -Python header files are installed (on a standard Unix installation) to -:file:`/usr/local/include/python1.5/Numerical`. (The exact location will differ -according to your platform and Python installation.) Since the Python include -directory---\ :file:`/usr/local/include/python1.5` in this case---is always -included in the search path when building Python extensions, the best approach -is to write C code like :: - - #include - -.. TODO check if it's d2.sysconfig or the new sysconfig module now - -If you must put the :file:`Numerical` include directory right into your header -search path, though, you can find that directory using the Distutils -:mod:`packaging.sysconfig` module:: - - from packaging.sysconfig import get_python_inc - incdir = os.path.join(get_python_inc(plat_specific=1), 'Numerical') - setup(..., - Extension(..., include_dirs=[incdir])) - -Even though this is quite portable---it will work on any Python installation, -regardless of platform---it's probably easier to just write your C code in the -sensible way. - -You can define and undefine preprocessor macros with the ``define_macros`` and -``undef_macros`` options. ``define_macros`` takes a list of ``(name, value)`` -tuples, where ``name`` is the name of the macro to define (a string) and -``value`` is its value: either a string or ``None``. (Defining a macro ``FOO`` -to ``None`` is the equivalent of a bare ``#define FOO`` in your C source: with -most compilers, this sets ``FOO`` to the string ``1``.) ``undef_macros`` is -just a list of macros to undefine. - -For example:: - - Extension(..., - define_macros=[('NDEBUG', '1'), - ('HAVE_STRFTIME', None)], - undef_macros=['HAVE_FOO', 'HAVE_BAR']) - -is the equivalent of having this at the top of every C source file:: - - #define NDEBUG 1 - #define HAVE_STRFTIME - #undef HAVE_FOO - #undef HAVE_BAR - - -Library options ---------------- - -You can also specify the libraries to link against when building your extension, -and the directories to search for those libraries. The ``libraries`` option is -a list of libraries to link against, ``library_dirs`` is a list of directories -to search for libraries at link-time, and ``runtime_library_dirs`` is a list of -directories to search for shared (dynamically loaded) libraries at run-time. - -For example, if you need to link against libraries known to be in the standard -library search path on target systems :: - - Extension(..., - libraries=['gdbm', 'readline']) - -If you need to link with libraries in a non-standard location, you'll have to -include the location in ``library_dirs``:: - - Extension(..., - library_dirs=['/usr/X11R6/lib'], - libraries=['X11', 'Xt']) - -(Again, this sort of non-portable construct should be avoided if you intend to -distribute your code.) - -.. XXX Should mention clib libraries here or somewhere else! - - -Other options -------------- - -There are still some other options which can be used to handle special cases. - -The :option:`optional` option is a boolean; if it is true, -a build failure in the extension will not abort the build process, but -instead simply not install the failing extension. - -The :option:`extra_objects` option is a list of object files to be passed to the -linker. These files must not have extensions, as the default extension for the -compiler is used. - -:option:`extra_compile_args` and :option:`extra_link_args` can be used to -specify additional command-line options for the respective compiler and linker -command lines. - -:option:`export_symbols` is only useful on Windows. It can contain a list of -symbols (functions or variables) to be exported. This option is not needed when -building compiled extensions: Distutils will automatically add ``initmodule`` -to the list of exported symbols. - -The :option:`depends` option is a list of files that the extension depends on -(for example header files). The build command will call the compiler on the -sources to rebuild extension if any on this files has been modified since the -previous build. - -Relationships between Distributions and Packages -================================================ - -.. FIXME rewrite to update to PEP 345 (but without dist/release confusion) - -A distribution may relate to packages in three specific ways: - -#. It can require packages or modules. - -#. It can provide packages or modules. - -#. It can obsolete packages or modules. - -These relationships can be specified using keyword arguments to the -:func:`packaging.core.setup` function. - -Dependencies on other Python modules and packages can be specified by supplying -the *requires* keyword argument to :func:`setup`. The value must be a list of -strings. Each string specifies a package that is required, and optionally what -versions are sufficient. - -To specify that any version of a module or package is required, the string -should consist entirely of the module or package name. Examples include -``'mymodule'`` and ``'xml.parsers.expat'``. - -If specific versions are required, a sequence of qualifiers can be supplied in -parentheses. Each qualifier may consist of a comparison operator and a version -number. The accepted comparison operators are:: - - < > == - <= >= != - -These can be combined by using multiple qualifiers separated by commas (and -optional whitespace). In this case, all of the qualifiers must be matched; a -logical AND is used to combine the evaluations. - -Let's look at a bunch of examples: - -+-------------------------+----------------------------------------------+ -| Requires Expression | Explanation | -+=========================+==============================================+ -| ``==1.0`` | Only version ``1.0`` is compatible | -+-------------------------+----------------------------------------------+ -| ``>1.0, !=1.5.1, <2.0`` | Any version after ``1.0`` and before ``2.0`` | -| | is compatible, except ``1.5.1`` | -+-------------------------+----------------------------------------------+ - -Now that we can specify dependencies, we also need to be able to specify what we -provide that other distributions can require. This is done using the *provides* -keyword argument to :func:`setup`. The value for this keyword is a list of -strings, each of which names a Python module or package, and optionally -identifies the version. If the version is not specified, it is assumed to match -that of the distribution. - -Some examples: - -+---------------------+----------------------------------------------+ -| Provides Expression | Explanation | -+=====================+==============================================+ -| ``mypkg`` | Provide ``mypkg``, using the distribution | -| | version | -+---------------------+----------------------------------------------+ -| ``mypkg (1.1)`` | Provide ``mypkg`` version 1.1, regardless of | -| | the distribution version | -+---------------------+----------------------------------------------+ - -A package can declare that it obsoletes other packages using the *obsoletes* -keyword argument. The value for this is similar to that of the *requires* -keyword: a list of strings giving module or package specifiers. Each specifier -consists of a module or package name optionally followed by one or more version -qualifiers. Version qualifiers are given in parentheses after the module or -package name. - -The versions identified by the qualifiers are those that are obsoleted by the -distribution being described. If no qualifiers are given, all versions of the -named module or package are understood to be obsoleted. - -.. _packaging-installing-scripts: - -Installing Scripts -================== - -So far we have been dealing with pure and non-pure Python modules, which are -usually not run by themselves but imported by scripts. - -Scripts are files containing Python source code, intended to be started from the -command line. Scripts don't require Distutils to do anything very complicated. -The only clever feature is that if the first line of the script starts with -``#!`` and contains the word "python", the Distutils will adjust the first line -to refer to the current interpreter location. By default, it is replaced with -the current interpreter location. The :option:`--executable` (or :option:`-e`) -option will allow the interpreter path to be explicitly overridden. - -The :option:`scripts` option simply is a list of files to be handled in this -way. From the PyXML setup script:: - - setup(..., - scripts=['scripts/xmlproc_parse', 'scripts/xmlproc_val']) - -All the scripts will also be added to the ``MANIFEST`` file if no template is -provided. See :ref:`packaging-manifest`. - -.. _packaging-installing-package-data: - -Installing Package Data -======================= - -Often, additional files need to be installed into a package. These files are -often data that's closely related to the package's implementation, or text files -containing documentation that might be of interest to programmers using the -package. These files are called :dfn:`package data`. - -Package data can be added to packages using the ``package_data`` keyword -argument to the :func:`setup` function. The value must be a mapping from -package name to a list of relative path names that should be copied into the -package. The paths are interpreted as relative to the directory containing the -package (information from the ``package_dir`` mapping is used if appropriate); -that is, the files are expected to be part of the package in the source -directories. They may contain glob patterns as well. - -The path names may contain directory portions; any necessary directories will be -created in the installation. - -For example, if a package should contain a subdirectory with several data files, -the files can be arranged like this in the source tree:: - - setup.py - src/ - mypkg/ - __init__.py - module.py - data/ - tables.dat - spoons.dat - forks.dat - -The corresponding call to :func:`setup` might be:: - - setup(..., - packages=['mypkg'], - package_dir={'mypkg': 'src/mypkg'}, - package_data={'mypkg': ['data/*.dat']}) - - -All the files that match ``package_data`` will be added to the ``MANIFEST`` -file if no template is provided. See :ref:`packaging-manifest`. - - -.. _packaging-additional-files: - -Installing Additional Files -=========================== - -The :option:`data_files` option can be used to specify additional files needed -by the module distribution: configuration files, message catalogs, data files, -anything which doesn't fit in the previous categories. - -:option:`data_files` specifies a sequence of (*directory*, *files*) pairs in the -following way:: - - setup(..., - data_files=[('bitmaps', ['bm/b1.gif', 'bm/b2.gif']), - ('config', ['cfg/data.cfg']), - ('/etc/init.d', ['init-script'])]) - -Note that you can specify the directory names where the data files will be -installed, but you cannot rename the data files themselves. - -Each (*directory*, *files*) pair in the sequence specifies the installation -directory and the files to install there. If *directory* is a relative path, it -is interpreted relative to the installation prefix (Python's ``sys.prefix`` for -pure-Python packages, ``sys.exec_prefix`` for packages that contain extension -modules). Each file name in *files* is interpreted relative to the -:file:`setup.py` script at the top of the package source distribution. No -directory information from *files* is used to determine the final location of -the installed file; only the name of the file is used. - -You can specify the :option:`data_files` options as a simple sequence of files -without specifying a target directory, but this is not recommended, and the -:command:`install_dist` command will print a warning in this case. To install data -files directly in the target directory, an empty string should be given as the -directory. - -All the files that match ``data_files`` will be added to the ``MANIFEST`` file -if no template is provided. See :ref:`packaging-manifest`. - - - -.. _packaging-metadata: - -Metadata reference -================== - -The setup script may include additional metadata beyond the name and version. -This table describes required and additional information: - -.. TODO synchronize with setupcfg; link to it (but don't remove it, it's a - useful summary) - -+----------------------+---------------------------+-----------------+--------+ -| Meta-Data | Description | Value | Notes | -+======================+===========================+=================+========+ -| ``name`` | name of the project | short string | \(1) | -+----------------------+---------------------------+-----------------+--------+ -| ``version`` | version of this release | short string | (1)(2) | -+----------------------+---------------------------+-----------------+--------+ -| ``author`` | project author's name | short string | \(3) | -+----------------------+---------------------------+-----------------+--------+ -| ``author_email`` | email address of the | email address | \(3) | -| | project author | | | -+----------------------+---------------------------+-----------------+--------+ -| ``maintainer`` | project maintainer's name | short string | \(3) | -+----------------------+---------------------------+-----------------+--------+ -| ``maintainer_email`` | email address of the | email address | \(3) | -| | project maintainer | | | -+----------------------+---------------------------+-----------------+--------+ -| ``home_page`` | home page for the project | URL | \(1) | -+----------------------+---------------------------+-----------------+--------+ -| ``summary`` | short description of the | short string | | -| | project | | | -+----------------------+---------------------------+-----------------+--------+ -| ``description`` | longer description of the | long string | \(5) | -| | project | | | -+----------------------+---------------------------+-----------------+--------+ -| ``download_url`` | location where the | URL | | -| | project may be downloaded | | | -+----------------------+---------------------------+-----------------+--------+ -| ``classifiers`` | a list of classifiers | list of strings | \(4) | -+----------------------+---------------------------+-----------------+--------+ -| ``platforms`` | a list of platforms | list of strings | | -+----------------------+---------------------------+-----------------+--------+ -| ``license`` | license for the release | short string | \(6) | -+----------------------+---------------------------+-----------------+--------+ - -Notes: - -(1) - These fields are required. - -(2) - It is recommended that versions take the form *major.minor[.patch[.sub]]*. - -(3) - Either the author or the maintainer must be identified. - -(4) - The list of classifiers is available from the `PyPI website - `_. See also :mod:`packaging.create`. - -(5) - The ``description`` field is used by PyPI when you are registering a - release, to build its PyPI page. - -(6) - The ``license`` field is a text indicating the license covering the - distribution where the license is not a selection from the "License" Trove - classifiers. See the ``Classifier`` field. Notice that - there's a ``licence`` distribution option which is deprecated but still - acts as an alias for ``license``. - -'short string' - A single line of text, not more than 200 characters. - -'long string' - Multiple lines of plain text in reStructuredText format (see - http://docutils.sf.net/). - -'list of strings' - See below. - -In Python 2.x, "string value" means a unicode object. If a byte string (str or -bytes) is given, it has to be valid ASCII. - -.. TODO move this section to the version document, keep a summary, add a link - -Encoding the version information is an art in itself. Python projects generally -adhere to the version format *major.minor[.patch][sub]*. The major number is 0 -for initial, experimental releases of software. It is incremented for releases -that represent major milestones in a project. The minor number is incremented -when important new features are added to the project. The patch number -increments when bug-fix releases are made. Additional trailing version -information is sometimes used to indicate sub-releases. These are -"a1,a2,...,aN" (for alpha releases, where functionality and API may change), -"b1,b2,...,bN" (for beta releases, which only fix bugs) and "pr1,pr2,...,prN" -(for final pre-release release testing). Some examples: - -0.1.0 - the first, experimental release of a project - -1.0.1a2 - the second alpha release of the first patch version of 1.0 - -:option:`classifiers` are specified in a Python list:: - - setup(..., - classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Environment :: Web Environment', - 'Intended Audience :: End Users/Desktop', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: Python Software Foundation License', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: Microsoft :: Windows', - 'Operating System :: POSIX', - 'Programming Language :: Python', - 'Topic :: Communications :: Email', - 'Topic :: Office/Business', - 'Topic :: Software Development :: Bug Tracking', - ]) - - -Debugging the setup script -========================== - -Sometimes things go wrong, and the setup script doesn't do what the developer -wants. - -Distutils catches any exceptions when running the setup script, and print a -simple error message before the script is terminated. The motivation for this -behaviour is to not confuse administrators who don't know much about Python and -are trying to install a project. If they get a big long traceback from deep -inside the guts of Distutils, they may think the project or the Python -installation is broken because they don't read all the way down to the bottom -and see that it's a permission problem. - -.. FIXME DISTUTILS_DEBUG is dead, document logging/warnings here - -On the other hand, this doesn't help the developer to find the cause of the -failure. For this purpose, the DISTUTILS_DEBUG environment variable can be set -to anything except an empty string, and Packaging will now print detailed -information about what it is doing, and prints the full traceback in case an -exception occurs. diff --git a/Doc/packaging/sourcedist.rst b/Doc/packaging/sourcedist.rst deleted file mode 100644 --- a/Doc/packaging/sourcedist.rst +++ /dev/null @@ -1,266 +0,0 @@ -.. _packaging-source-dist: - -****************************** -Creating a Source Distribution -****************************** - -As shown in section :ref:`packaging-simple-example`, you use the :command:`sdist` command -to create a source distribution. In the simplest case, :: - - python setup.py sdist - -(assuming you haven't specified any :command:`sdist` options in the setup script -or config file), :command:`sdist` creates the archive of the default format for -the current platform. The default format is a gzip'ed tar file -(:file:`.tar.gz`) on Unix, and ZIP file on Windows. - -You can specify as many formats as you like using the :option:`--formats` -option, for example:: - - python setup.py sdist --formats=gztar,zip - -to create a gzipped tarball and a zip file. The available formats are: - -+-----------+-------------------------+---------+ -| Format | Description | Notes | -+===========+=========================+=========+ -| ``zip`` | zip file (:file:`.zip`) | (1),(3) | -+-----------+-------------------------+---------+ -| ``gztar`` | gzip'ed tar file | \(2) | -| | (:file:`.tar.gz`) | | -+-----------+-------------------------+---------+ -| ``bztar`` | bzip2'ed tar file | | -| | (:file:`.tar.bz2`) | | -+-----------+-------------------------+---------+ -| ``tar`` | tar file (:file:`.tar`) | | -+-----------+-------------------------+---------+ - -Notes: - -(1) - default on Windows - -(2) - default on Unix - -(3) - requires either external :program:`zip` utility or :mod:`zipfile` module (part - of the standard Python library since Python 1.6) - -When using any ``tar`` format (``gztar``, ``bztar`` or -``tar``) under Unix, you can specify the ``owner`` and ``group`` names -that will be set for each member of the archive. - -For example, if you want all files of the archive to be owned by root:: - - python setup.py sdist --owner=root --group=root - - -.. _packaging-manifest: - -Specifying the files to distribute -================================== - -If you don't supply an explicit list of files (or instructions on how to -generate one), the :command:`sdist` command puts a minimal default set into the -source distribution: - -* all Python source files implied by the :option:`py_modules` and - :option:`packages` options - -* all C source files mentioned in the :option:`ext_modules` or - :option:`libraries` options - -* scripts identified by the :option:`scripts` option - See :ref:`packaging-installing-scripts`. - -* anything that looks like a test script: :file:`test/test\*.py` (currently, the - Packaging don't do anything with test scripts except include them in source - distributions, but in the future there will be a standard for testing Python - module distributions) - -* the configuration file :file:`setup.cfg` - -* all files that matches the ``package_data`` metadata. - See :ref:`packaging-installing-package-data`. - -* all files that matches the ``data_files`` metadata. - See :ref:`packaging-additional-files`. - -Contrary to Distutils, :file:`README` (or :file:`README.txt`) and -:file:`setup.py` are not included by default. - -Sometimes this is enough, but usually you will want to specify additional files -to distribute. The typical way to do this is to write a *manifest template*, -called :file:`MANIFEST.in` by default. The manifest template is just a list of -instructions for how to generate your manifest file, :file:`MANIFEST`, which is -the exact list of files to include in your source distribution. The -:command:`sdist` command processes this template and generates a manifest based -on its instructions and what it finds in the filesystem. - -If you prefer to roll your own manifest file, the format is simple: one filename -per line, regular files (or symlinks to them) only. If you do supply your own -:file:`MANIFEST`, you must specify everything: the default set of files -described above does not apply in this case. - -:file:`MANIFEST` files start with a comment indicating they are generated. -Files without this comment are not overwritten or removed. - -See :ref:`packaging-manifest-template` section for a syntax reference. - - -.. _packaging-manifest-options: - -Manifest-related options -======================== - -The normal course of operations for the :command:`sdist` command is as follows: - -* if the manifest file, :file:`MANIFEST` doesn't exist, read :file:`MANIFEST.in` - and create the manifest - -* if neither :file:`MANIFEST` nor :file:`MANIFEST.in` exist, create a manifest - with just the default file set - -* if either :file:`MANIFEST.in` or the setup script (:file:`setup.py`) are more - recent than :file:`MANIFEST`, recreate :file:`MANIFEST` by reading - :file:`MANIFEST.in` - -* use the list of files now in :file:`MANIFEST` (either just generated or read - in) to create the source distribution archive(s) - -There are a couple of options that modify this behaviour. First, use the -:option:`--no-defaults` and :option:`--no-prune` to disable the standard -"include" and "exclude" sets. - -Second, you might just want to (re)generate the manifest, but not create a -source distribution:: - - python setup.py sdist --manifest-only - -:option:`-o` is a shortcut for :option:`--manifest-only`. - - -.. _packaging-manifest-template: - -The MANIFEST.in template -======================== - -A :file:`MANIFEST.in` file can be added in a project to define the list of -files to include in the distribution built by the :command:`sdist` command. - -When :command:`sdist` is run, it will look for the :file:`MANIFEST.in` file -and interpret it to generate the :file:`MANIFEST` file that contains the -list of files that will be included in the package. - -This mechanism can be used when the default list of files is not enough. -(See :ref:`packaging-manifest`). - -Principle ---------- - -The manifest template has one command per line, where each command specifies a -set of files to include or exclude from the source distribution. For an -example, let's look at the Packaging' own manifest template:: - - include *.txt - recursive-include examples *.txt *.py - prune examples/sample?/build - -The meanings should be fairly clear: include all files in the distribution root -matching :file:`\*.txt`, all files anywhere under the :file:`examples` directory -matching :file:`\*.txt` or :file:`\*.py`, and exclude all directories matching -:file:`examples/sample?/build`. All of this is done *after* the standard -include set, so you can exclude files from the standard set with explicit -instructions in the manifest template. (Or, you can use the -:option:`--no-defaults` option to disable the standard set entirely.) - -The order of commands in the manifest template matters: initially, we have the -list of default files as described above, and each command in the template adds -to or removes from that list of files. Once we have fully processed the -manifest template, we remove files that should not be included in the source -distribution: - -* all files in the Packaging "build" tree (default :file:`build/`) - -* all files in directories named :file:`RCS`, :file:`CVS`, :file:`.svn`, - :file:`.hg`, :file:`.git`, :file:`.bzr` or :file:`_darcs` - -Now we have our complete list of files, which is written to the manifest for -future reference, and then used to build the source distribution archive(s). - -You can disable the default set of included files with the -:option:`--no-defaults` option, and you can disable the standard exclude set -with :option:`--no-prune`. - -Following the Packaging' own manifest template, let's trace how the -:command:`sdist` command builds the list of files to include in the Packaging -source distribution: - -#. include all Python source files in the :file:`packaging` and - :file:`packaging/command` subdirectories (because packages corresponding to - those two directories were mentioned in the :option:`packages` option in the - setup script---see section :ref:`packaging-setup-script`) - -#. include :file:`README.txt`, :file:`setup.py`, and :file:`setup.cfg` (standard - files) - -#. include :file:`test/test\*.py` (standard files) - -#. include :file:`\*.txt` in the distribution root (this will find - :file:`README.txt` a second time, but such redundancies are weeded out later) - -#. include anything matching :file:`\*.txt` or :file:`\*.py` in the sub-tree - under :file:`examples`, - -#. exclude all files in the sub-trees starting at directories matching - :file:`examples/sample?/build`\ ---this may exclude files included by the - previous two steps, so it's important that the ``prune`` command in the manifest - template comes after the ``recursive-include`` command - -#. exclude the entire :file:`build` tree, and any :file:`RCS`, :file:`CVS`, - :file:`.svn`, :file:`.hg`, :file:`.git`, :file:`.bzr` and :file:`_darcs` - directories - -Just like in the setup script, file and directory names in the manifest template -should always be slash-separated; the Packaging will take care of converting -them to the standard representation on your platform. That way, the manifest -template is portable across operating systems. - -Commands --------- - -The manifest template commands are: - -+-------------------------------------------+-----------------------------------------------+ -| Command | Description | -+===========================================+===============================================+ -| :command:`include pat1 pat2 ...` | include all files matching any of the listed | -| | patterns | -+-------------------------------------------+-----------------------------------------------+ -| :command:`exclude pat1 pat2 ...` | exclude all files matching any of the listed | -| | patterns | -+-------------------------------------------+-----------------------------------------------+ -| :command:`recursive-include dir pat1 pat2 | include all files under *dir* matching any of | -| ...` | the listed patterns | -+-------------------------------------------+-----------------------------------------------+ -| :command:`recursive-exclude dir pat1 pat2 | exclude all files under *dir* matching any of | -| ...` | the listed patterns | -+-------------------------------------------+-----------------------------------------------+ -| :command:`global-include pat1 pat2 ...` | include all files anywhere in the source tree | -| | matching --- & any of the listed patterns | -+-------------------------------------------+-----------------------------------------------+ -| :command:`global-exclude pat1 pat2 ...` | exclude all files anywhere in the source tree | -| | matching --- & any of the listed patterns | -+-------------------------------------------+-----------------------------------------------+ -| :command:`prune dir` | exclude all files under *dir* | -+-------------------------------------------+-----------------------------------------------+ -| :command:`graft dir` | include all files under *dir* | -+-------------------------------------------+-----------------------------------------------+ - -The patterns here are Unix-style "glob" patterns: ``*`` matches any sequence of -regular filename characters, ``?`` matches any single regular filename -character, and ``[range]`` matches any of the characters in *range* (e.g., -``a-z``, ``a-zA-Z``, ``a-f0-9_.``). The definition of "regular filename -character" is platform-specific: on Unix it is anything except slash; on Windows -anything except backslash or colon. diff --git a/Doc/packaging/tutorial.rst b/Doc/packaging/tutorial.rst deleted file mode 100644 --- a/Doc/packaging/tutorial.rst +++ /dev/null @@ -1,112 +0,0 @@ -================== -Packaging tutorial -================== - -Welcome to the Packaging tutorial! We will learn how to use Packaging -to package your project. - -.. TODO merge with introduction.rst - - -Getting started ---------------- - -Packaging works with the *setup.cfg* file. It contains all the metadata for -your project, as defined in PEP 345, but also declare what your project -contains. - -Let's say you have a project called *CLVault* containing one package called -*clvault*, and a few scripts inside. You can use the *pysetup* script to create -a *setup.cfg* file for the project. The script will ask you a few questions:: - - $ mkdir CLVault - $ cd CLVault - $ pysetup create - Project name [CLVault]: - Current version number: 0.1 - Package description: - >Command-line utility to store and retrieve passwords - Author name: Tarek Ziade - Author e-mail address: tarek at ziade.org - Project Home Page: http://bitbucket.org/tarek/clvault - Do you want to add a package ? (y/n): y - Package name: clvault - Do you want to add a package ? (y/n): n - Do you want to set Trove classifiers? (y/n): y - Please select the project status: - - 1 - Planning - 2 - Pre-Alpha - 3 - Alpha - 4 - Beta - 5 - Production/Stable - 6 - Mature - 7 - Inactive - - Status: 3 - What license do you use: GPL - Matching licenses: - - 1) License :: OSI Approved :: GNU General Public License (GPL) - 2) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) - - Type the number of the license you wish to use or ? to try again:: 1 - Do you want to set other trove identifiers (y/n) [n]: n - Wrote "setup.cfg". - - -A setup.cfg file is created, containing the metadata of your project and the -list of the packages it contains:: - - $ cat setup.cfg - [metadata] - name = CLVault - version = 0.1 - author = Tarek Ziade - author_email = tarek at ziade.org - description = Command-line utility to store and retrieve passwords - home_page = http://bitbucket.org/tarek/clvault - - classifier = Development Status :: 3 - Alpha - License :: OSI Approved :: GNU General Public License (GPL) - - [files] - packages = clvault - - -Our project will depend on the *keyring* project. Let's add it in the -[metadata] section:: - - [metadata] - ... - requires_dist = - keyring - - -Running commands ----------------- - -You can run useful commands on your project once the setup.cfg file is ready: - -- sdist: creates a source distribution -- register: register your project to PyPI -- upload: upload the distribution to PyPI -- install_dist: install it - -All commands are run using the run script:: - - $ pysetup run install_dist - $ pysetup run sdist - $ pysetup run upload - -If you want to push a source distribution of your project to PyPI, do:: - - $ pysetup run sdist register upload - - -Installing the project ----------------------- - -The project can be installed by manually running the packaging install command:: - - $ pysetup run install_dist diff --git a/Doc/packaging/uploading.rst b/Doc/packaging/uploading.rst deleted file mode 100644 --- a/Doc/packaging/uploading.rst +++ /dev/null @@ -1,80 +0,0 @@ -.. _packaging-package-upload: - -*************************************** -Uploading Packages to the Package Index -*************************************** - -The Python Package Index (PyPI) not only stores the package info, but also the -package data if the author of the package wishes to. The packaging command -:command:`upload` pushes the distribution files to PyPI. - -The command is invoked immediately after building one or more distribution -files. For example, the command :: - - python setup.py sdist bdist_wininst upload - -will cause the source distribution and the Windows installer to be uploaded to -PyPI. Note that these will be uploaded even if they are built using an earlier -invocation of :file:`setup.py`, but that only distributions named on the command -line for the invocation including the :command:`upload` command are uploaded. - -The :command:`upload` command uses the username, password, and repository URL -from the :file:`$HOME/.pypirc` file (see section :ref:`packaging-pypirc` for more on this -file). If a :command:`register` command was previously called in the same -command, and if the password was entered in the prompt, :command:`upload` will -reuse the entered password. This is useful if you do not want to store a clear -text password in the :file:`$HOME/.pypirc` file. - -You can specify another PyPI server with the :option:`--repository=*url*` -option:: - - python setup.py sdist bdist_wininst upload -r http://example.com/pypi - -See section :ref:`packaging-pypirc` for more on defining several servers. - -You can use the :option:`--sign` option to tell :command:`upload` to sign each -uploaded file using GPG (GNU Privacy Guard). The :program:`gpg` program must -be available for execution on the system :envvar:`PATH`. You can also specify -which key to use for signing using the :option:`--identity=*name*` option. - -Other :command:`upload` options include :option:`--repository=` or -:option:`--repository=
` where *url* is the url of the server and -*section* the name of the section in :file:`$HOME/.pypirc`, and -:option:`--show-response` (which displays the full response text from the PyPI -server for help in debugging upload problems). - -PyPI package display -==================== - -The ``description`` field plays a special role at PyPI. It is used by -the server to display a home page for the registered package. - -If you use the `reStructuredText `_ -syntax for this field, PyPI will parse it and display an HTML output for -the package home page. - -The ``description`` field can be filled from a text file located in the -project:: - - from packaging.core import setup - - fp = open('README.txt') - try: - description = fp.read() - finally: - fp.close() - - setup(name='Packaging', - description=description) - -In that case, :file:`README.txt` is a regular reStructuredText text file located -in the root of the package besides :file:`setup.py`. - -To prevent registering broken reStructuredText content, you can use the -:program:`rst2html` program that is provided by the :mod:`docutils` package -and check the ``description`` from the command line:: - - $ python setup.py --description | rst2html.py > output.html - -:mod:`docutils` will display a warning if there's something wrong with your -syntax. diff --git a/Doc/tools/sphinxext/indexcontent.html b/Doc/tools/sphinxext/indexcontent.html --- a/Doc/tools/sphinxext/indexcontent.html +++ b/Doc/tools/sphinxext/indexcontent.html @@ -20,10 +20,10 @@ tutorial for C/C++ programmers

- - + + diff --git a/Doc/tools/sphinxext/susp-ignored.csv b/Doc/tools/sphinxext/susp-ignored.csv --- a/Doc/tools/sphinxext/susp-ignored.csv +++ b/Doc/tools/sphinxext/susp-ignored.csv @@ -243,28 +243,6 @@ license,,`,* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY license,,`,THIS SOFTWARE IS PROVIDED BY THE PROJECT AND CONTRIBUTORS ``AS IS'' AND license,,:zooko,mailto:zooko at zooko.com -packaging/examples,,`,This is the description of the ``foobar`` project. -packaging/setupcfg,,::,Development Status :: 3 - Alpha -packaging/setupcfg,,::,License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1) -packaging/setupscript,,::,"'Development Status :: 4 - Beta'," -packaging/setupscript,,::,"'Environment :: Console'," -packaging/setupscript,,::,"'Environment :: Web Environment'," -packaging/setupscript,,::,"'Intended Audience :: Developers'," -packaging/setupscript,,::,"'Intended Audience :: End Users/Desktop'," -packaging/setupscript,,::,"'Intended Audience :: System Administrators'," -packaging/setupscript,,::,"'License :: OSI Approved :: Python Software Foundation License'," -packaging/setupscript,,::,"'Operating System :: MacOS :: MacOS X'," -packaging/setupscript,,::,"'Operating System :: Microsoft :: Windows'," -packaging/setupscript,,::,"'Operating System :: POSIX'," -packaging/setupscript,,::,"'Programming Language :: Python'," -packaging/setupscript,,::,"'Topic :: Communications :: Email'," -packaging/setupscript,,::,"'Topic :: Office/Business'," -packaging/setupscript,,::,"'Topic :: Software Development :: Bug Tracking'," -packaging/tutorial,,::,1) License :: OSI Approved :: GNU General Public License (GPL) -packaging/tutorial,,::,2) License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) -packaging/tutorial,,::,classifier = Development Status :: 3 - Alpha -packaging/tutorial,,::,License :: OSI Approved :: GNU General Public License (GPL) -packaging/tutorial,,::,Type the number of the license you wish to use or ? to try again:: 1 reference/datamodel,,:max, reference/datamodel,,:step,a[i:j:step] reference/expressions,,:datum,{key:datum...} diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -528,8 +528,8 @@ Defines the :data:`user base directory `, which is used to compute the path of the :data:`user site-packages directory ` - and :ref:`Packaging installation paths ` for - ``pysetup run install_dist --user``. + and :ref:`Distutils installation paths ` for + ``python setup.py install --user``. .. seealso:: diff --git a/Doc/using/scripts.rst b/Doc/using/scripts.rst --- a/Doc/using/scripts.rst +++ b/Doc/using/scripts.rst @@ -16,8 +16,7 @@ in it with a ``home`` key pointing to the Python installation the command was run from. It also creates a ``bin`` (or ``Scripts`` on Windows) subdirectory containing a copy of the ``python`` binary (or -binaries, in the case of Windows) and the ``pysetup3`` script (to -facilitate easy installation of packages from PyPI into the new virtualenv). +binaries, in the case of Windows). It also creates an (initially empty) ``lib/pythonX.Y/site-packages`` subdirectory (on Windows, this is ``Lib\site-packages``). diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -53,23 +53,28 @@ release, so it's worth checking back even after reading earlier versions. -New packaging infrastructure -============================ +PEP 405: Virtual Environments +============================= -The standard library's packaging infrastructure has been updated to adopt -some of the features developed by the wider community. +- inspired by ``virtualenv``, a tool widely used by the community +- change to the interpreter to avoid hacks -* the :mod:`packaging` package and ``pysetup`` script (inspired by - ``setuptools``, ``distribute``, ``distutil2`` and ``pip``) -* the :mod:`venv` module and ``pyvenv`` script (inspired by ``virtualenv``) - (Note: at time of writing, :pep:`405` is accepted, but not yet implemented) -* native support for package directories that don't require ``__init__.py`` - marker files and can automatically span multiple path segments - (inspired by various third party approaches to namespace packages, as - described in :pep:`420`) +The :mod:`venv` module and ``pyvenv`` script (inspired by ``virtualenv``, a +tool widely used by the community). +.. also mention the interpreter changes that avoid the hacks used in virtualenv -.. pep-3118-update: + +PEP 420: Namespace Packages +=========================== + +Native support for package directories that don't require ``__init__.py`` +marker files and can automatically span multiple path segments (inspired by +various third party approaches to namespace packages, as described in +:pep:`420`) + + +.. _pep-3118-update: PEP 3118: New memoryview implementation and buffer protocol documentation ========================================================================= @@ -1219,20 +1224,6 @@ * :func:`~os.getgrouplist` (:issue:`9344`) -packaging ---------- - -:mod:`distutils` has undergone additions and refactoring under a new name, -:mod:`packaging`, to allow developers to make far-reaching changes without -being constrained by backward compatibility. -:mod:`distutils` is still provided in the standard library, but users are -encouraged to transition to :mod:`packaging`. For older versions of Python, a -backport compatible with Python 2.5 and newer and 3.2 is available on PyPI -under the name `distutils2 `_. - -.. TODO add examples and howto to the packaging docs and link to them - - pdb --- @@ -1560,8 +1551,6 @@ Deprecated Python modules, functions and methods ------------------------------------------------ -* The :mod:`distutils` module has been deprecated. Use the new - :mod:`packaging` module instead. * The ``unicode_internal`` codec has been deprecated because of the :pep:`393`, use UTF-8, UTF-16 (``utf-16-le`` or ``utf-16-be``), or UTF-32 (``utf-32-le`` or ``utf-32-be``) diff --git a/Lib/packaging/__init__.py b/Lib/packaging/__init__.py deleted file mode 100644 --- a/Lib/packaging/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Support for packaging, distribution and installation of Python projects. - -Third-party tools can use parts of packaging as building blocks -without causing the other modules to be imported: - - import packaging.version - import packaging.metadata - import packaging.pypi.simple - import packaging.tests.pypi_server -""" - -from logging import getLogger - -__all__ = ['__version__', 'logger'] - -__version__ = "1.0a3" -logger = getLogger('packaging') diff --git a/Lib/packaging/_trove.py b/Lib/packaging/_trove.py deleted file mode 100644 --- a/Lib/packaging/_trove.py +++ /dev/null @@ -1,571 +0,0 @@ -"""Temporary helper for create.""" - -# XXX get the list from PyPI and cache it instead of hardcoding - -# XXX see if it would be more useful to store it as another structure -# than a list of strings - -all_classifiers = [ -'Development Status :: 1 - Planning', -'Development Status :: 2 - Pre-Alpha', -'Development Status :: 3 - Alpha', -'Development Status :: 4 - Beta', -'Development Status :: 5 - Production/Stable', -'Development Status :: 6 - Mature', -'Development Status :: 7 - Inactive', -'Environment :: Console', -'Environment :: Console :: Curses', -'Environment :: Console :: Framebuffer', -'Environment :: Console :: Newt', -'Environment :: Console :: svgalib', -"Environment :: Handhelds/PDA's", -'Environment :: MacOS X', -'Environment :: MacOS X :: Aqua', -'Environment :: MacOS X :: Carbon', -'Environment :: MacOS X :: Cocoa', -'Environment :: No Input/Output (Daemon)', -'Environment :: Other Environment', -'Environment :: Plugins', -'Environment :: Web Environment', -'Environment :: Web Environment :: Buffet', -'Environment :: Web Environment :: Mozilla', -'Environment :: Web Environment :: ToscaWidgets', -'Environment :: Win32 (MS Windows)', -'Environment :: X11 Applications', -'Environment :: X11 Applications :: Gnome', -'Environment :: X11 Applications :: GTK', -'Environment :: X11 Applications :: KDE', -'Environment :: X11 Applications :: Qt', -'Framework :: BFG', -'Framework :: Buildout', -'Framework :: Buildout :: Extension', -'Framework :: Buildout :: Recipe', -'Framework :: Chandler', -'Framework :: CherryPy', -'Framework :: CubicWeb', -'Framework :: Django', -'Framework :: IDLE', -'Framework :: Paste', -'Framework :: Plone', -'Framework :: Plone :: 3.2', -'Framework :: Plone :: 3.3', -'Framework :: Plone :: 4.0', -'Framework :: Plone :: 4.1', -'Framework :: Plone :: 4.2', -'Framework :: Plone :: 4.3', -'Framework :: Pylons', -'Framework :: Setuptools Plugin', -'Framework :: Trac', -'Framework :: Tryton', -'Framework :: TurboGears', -'Framework :: TurboGears :: Applications', -'Framework :: TurboGears :: Widgets', -'Framework :: Twisted', -'Framework :: ZODB', -'Framework :: Zope2', -'Framework :: Zope3', -'Intended Audience :: Customer Service', -'Intended Audience :: Developers', -'Intended Audience :: Education', -'Intended Audience :: End Users/Desktop', -'Intended Audience :: Financial and Insurance Industry', -'Intended Audience :: Healthcare Industry', -'Intended Audience :: Information Technology', -'Intended Audience :: Legal Industry', -'Intended Audience :: Manufacturing', -'Intended Audience :: Other Audience', -'Intended Audience :: Religion', -'Intended Audience :: Science/Research', -'Intended Audience :: System Administrators', -'Intended Audience :: Telecommunications Industry', -'License :: Aladdin Free Public License (AFPL)', -'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication', -'License :: DFSG approved', -'License :: Eiffel Forum License (EFL)', -'License :: Free For Educational Use', -'License :: Free For Home Use', -'License :: Free for non-commercial use', -'License :: Freely Distributable', -'License :: Free To Use But Restricted', -'License :: Freeware', -'License :: Netscape Public License (NPL)', -'License :: Nokia Open Source License (NOKOS)', -'License :: OSI Approved', -'License :: OSI Approved :: Academic Free License (AFL)', -'License :: OSI Approved :: Apache Software License', -'License :: OSI Approved :: Apple Public Source License', -'License :: OSI Approved :: Artistic License', -'License :: OSI Approved :: Attribution Assurance License', -'License :: OSI Approved :: BSD License', -'License :: OSI Approved :: Common Public License', -'License :: OSI Approved :: Eiffel Forum License', -'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)', -'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)', -'License :: OSI Approved :: GNU Affero General Public License v3', -'License :: OSI Approved :: GNU Free Documentation License (FDL)', -'License :: OSI Approved :: GNU General Public License (GPL)', -'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', -'License :: OSI Approved :: IBM Public License', -'License :: OSI Approved :: Intel Open Source License', -'License :: OSI Approved :: ISC License (ISCL)', -'License :: OSI Approved :: Jabber Open Source License', -'License :: OSI Approved :: MIT License', -'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)', -'License :: OSI Approved :: Motosoto License', -'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)', -'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)', -'License :: OSI Approved :: Nethack General Public License', -'License :: OSI Approved :: Nokia Open Source License', -'License :: OSI Approved :: Open Group Test Suite License', -'License :: OSI Approved :: Python License (CNRI Python License)', -'License :: OSI Approved :: Python Software Foundation License', -'License :: OSI Approved :: Qt Public License (QPL)', -'License :: OSI Approved :: Ricoh Source Code Public License', -'License :: OSI Approved :: Sleepycat License', -'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)', -'License :: OSI Approved :: Sun Public License', -'License :: OSI Approved :: University of Illinois/NCSA Open Source License', -'License :: OSI Approved :: Vovida Software License 1.0', -'License :: OSI Approved :: W3C License', -'License :: OSI Approved :: X.Net License', -'License :: OSI Approved :: zlib/libpng License', -'License :: OSI Approved :: Zope Public License', -'License :: Other/Proprietary License', -'License :: Public Domain', -'License :: Repoze Public License', -'Natural Language :: Afrikaans', -'Natural Language :: Arabic', -'Natural Language :: Bengali', -'Natural Language :: Bosnian', -'Natural Language :: Bulgarian', -'Natural Language :: Catalan', -'Natural Language :: Chinese (Simplified)', -'Natural Language :: Chinese (Traditional)', -'Natural Language :: Croatian', -'Natural Language :: Czech', -'Natural Language :: Danish', -'Natural Language :: Dutch', -'Natural Language :: English', -'Natural Language :: Esperanto', -'Natural Language :: Finnish', -'Natural Language :: French', -'Natural Language :: German', -'Natural Language :: Greek', -'Natural Language :: Hebrew', -'Natural Language :: Hindi', -'Natural Language :: Hungarian', -'Natural Language :: Icelandic', -'Natural Language :: Indonesian', -'Natural Language :: Italian', -'Natural Language :: Japanese', -'Natural Language :: Javanese', -'Natural Language :: Korean', -'Natural Language :: Latin', -'Natural Language :: Latvian', -'Natural Language :: Macedonian', -'Natural Language :: Malay', -'Natural Language :: Marathi', -'Natural Language :: Norwegian', -'Natural Language :: Panjabi', -'Natural Language :: Persian', -'Natural Language :: Polish', -'Natural Language :: Portuguese', -'Natural Language :: Portuguese (Brazilian)', -'Natural Language :: Romanian', -'Natural Language :: Russian', -'Natural Language :: Serbian', -'Natural Language :: Slovak', -'Natural Language :: Slovenian', -'Natural Language :: Spanish', -'Natural Language :: Swedish', -'Natural Language :: Tamil', -'Natural Language :: Telugu', -'Natural Language :: Thai', -'Natural Language :: Turkish', -'Natural Language :: Ukranian', -'Natural Language :: Urdu', -'Natural Language :: Vietnamese', -'Operating System :: BeOS', -'Operating System :: MacOS', -'Operating System :: MacOS :: MacOS 9', -'Operating System :: MacOS :: MacOS X', -'Operating System :: Microsoft', -'Operating System :: Microsoft :: MS-DOS', -'Operating System :: Microsoft :: Windows', -'Operating System :: Microsoft :: Windows :: Windows 3.1 or Earlier', -'Operating System :: Microsoft :: Windows :: Windows 95/98/2000', -'Operating System :: Microsoft :: Windows :: Windows CE', -'Operating System :: Microsoft :: Windows :: Windows NT/2000', -'Operating System :: OS/2', -'Operating System :: OS Independent', -'Operating System :: Other OS', -'Operating System :: PalmOS', -'Operating System :: PDA Systems', -'Operating System :: POSIX', -'Operating System :: POSIX :: AIX', -'Operating System :: POSIX :: BSD', -'Operating System :: POSIX :: BSD :: BSD/OS', -'Operating System :: POSIX :: BSD :: FreeBSD', -'Operating System :: POSIX :: BSD :: NetBSD', -'Operating System :: POSIX :: BSD :: OpenBSD', -'Operating System :: POSIX :: GNU Hurd', -'Operating System :: POSIX :: HP-UX', -'Operating System :: POSIX :: IRIX', -'Operating System :: POSIX :: Linux', -'Operating System :: POSIX :: Other', -'Operating System :: POSIX :: SCO', -'Operating System :: POSIX :: SunOS/Solaris', -'Operating System :: Unix', -'Programming Language :: Ada', -'Programming Language :: APL', -'Programming Language :: ASP', -'Programming Language :: Assembly', -'Programming Language :: Awk', -'Programming Language :: Basic', -'Programming Language :: C', -'Programming Language :: C#', -'Programming Language :: C++', -'Programming Language :: Cold Fusion', -'Programming Language :: Cython', -'Programming Language :: Delphi/Kylix', -'Programming Language :: Dylan', -'Programming Language :: Eiffel', -'Programming Language :: Emacs-Lisp', -'Programming Language :: Erlang', -'Programming Language :: Euler', -'Programming Language :: Euphoria', -'Programming Language :: Forth', -'Programming Language :: Fortran', -'Programming Language :: Haskell', -'Programming Language :: Java', -'Programming Language :: JavaScript', -'Programming Language :: Lisp', -'Programming Language :: Logo', -'Programming Language :: ML', -'Programming Language :: Modula', -'Programming Language :: Objective C', -'Programming Language :: Object Pascal', -'Programming Language :: OCaml', -'Programming Language :: Other', -'Programming Language :: Other Scripting Engines', -'Programming Language :: Pascal', -'Programming Language :: Perl', -'Programming Language :: PHP', -'Programming Language :: Pike', -'Programming Language :: Pliant', -'Programming Language :: PL/SQL', -'Programming Language :: PROGRESS', -'Programming Language :: Prolog', -'Programming Language :: Python', -'Programming Language :: Python :: 2', -'Programming Language :: Python :: 2.3', -'Programming Language :: Python :: 2.4', -'Programming Language :: Python :: 2.5', -'Programming Language :: Python :: 2.6', -'Programming Language :: Python :: 2.7', -'Programming Language :: Python :: 3', -'Programming Language :: Python :: 3.0', -'Programming Language :: Python :: 3.1', -'Programming Language :: Python :: 3.2', -'Programming Language :: Python :: Implementation', -'Programming Language :: Python :: Implementation :: CPython', -'Programming Language :: Python :: Implementation :: IronPython', -'Programming Language :: Python :: Implementation :: Jython', -'Programming Language :: Python :: Implementation :: PyPy', -'Programming Language :: Python :: Implementation :: Stackless', -'Programming Language :: REBOL', -'Programming Language :: Rexx', -'Programming Language :: Ruby', -'Programming Language :: Scheme', -'Programming Language :: Simula', -'Programming Language :: Smalltalk', -'Programming Language :: SQL', -'Programming Language :: Tcl', -'Programming Language :: Unix Shell', -'Programming Language :: Visual Basic', -'Programming Language :: XBasic', -'Programming Language :: YACC', -'Programming Language :: Zope', -'Topic :: Adaptive Technologies', -'Topic :: Artistic Software', -'Topic :: Communications', -'Topic :: Communications :: BBS', -'Topic :: Communications :: Chat', -'Topic :: Communications :: Chat :: AOL Instant Messenger', -'Topic :: Communications :: Chat :: ICQ', -'Topic :: Communications :: Chat :: Internet Relay Chat', -'Topic :: Communications :: Chat :: Unix Talk', -'Topic :: Communications :: Conferencing', -'Topic :: Communications :: Email', -'Topic :: Communications :: Email :: Address Book', -'Topic :: Communications :: Email :: Email Clients (MUA)', -'Topic :: Communications :: Email :: Filters', -'Topic :: Communications :: Email :: Mailing List Servers', -'Topic :: Communications :: Email :: Mail Transport Agents', -'Topic :: Communications :: Email :: Post-Office', -'Topic :: Communications :: Email :: Post-Office :: IMAP', -'Topic :: Communications :: Email :: Post-Office :: POP3', -'Topic :: Communications :: Fax', -'Topic :: Communications :: FIDO', -'Topic :: Communications :: File Sharing', -'Topic :: Communications :: File Sharing :: Gnutella', -'Topic :: Communications :: File Sharing :: Napster', -'Topic :: Communications :: Ham Radio', -'Topic :: Communications :: Internet Phone', -'Topic :: Communications :: Telephony', -'Topic :: Communications :: Usenet News', -'Topic :: Database', -'Topic :: Database :: Database Engines/Servers', -'Topic :: Database :: Front-Ends', -'Topic :: Desktop Environment', -'Topic :: Desktop Environment :: File Managers', -'Topic :: Desktop Environment :: Gnome', -'Topic :: Desktop Environment :: GNUstep', -'Topic :: Desktop Environment :: K Desktop Environment (KDE)', -'Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes', -'Topic :: Desktop Environment :: PicoGUI', -'Topic :: Desktop Environment :: PicoGUI :: Applications', -'Topic :: Desktop Environment :: PicoGUI :: Themes', -'Topic :: Desktop Environment :: Screen Savers', -'Topic :: Desktop Environment :: Window Managers', -'Topic :: Desktop Environment :: Window Managers :: Afterstep', -'Topic :: Desktop Environment :: Window Managers :: Afterstep :: Themes', -'Topic :: Desktop Environment :: Window Managers :: Applets', -'Topic :: Desktop Environment :: Window Managers :: Blackbox', -'Topic :: Desktop Environment :: Window Managers :: Blackbox :: Themes', -'Topic :: Desktop Environment :: Window Managers :: CTWM', -'Topic :: Desktop Environment :: Window Managers :: CTWM :: Themes', -'Topic :: Desktop Environment :: Window Managers :: Enlightenment', -'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Epplets', -'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR15', -'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR16', -'Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes DR17', -'Topic :: Desktop Environment :: Window Managers :: Fluxbox', -'Topic :: Desktop Environment :: Window Managers :: Fluxbox :: Themes', -'Topic :: Desktop Environment :: Window Managers :: FVWM', -'Topic :: Desktop Environment :: Window Managers :: FVWM :: Themes', -'Topic :: Desktop Environment :: Window Managers :: IceWM', -'Topic :: Desktop Environment :: Window Managers :: IceWM :: Themes', -'Topic :: Desktop Environment :: Window Managers :: MetaCity', -'Topic :: Desktop Environment :: Window Managers :: MetaCity :: Themes', -'Topic :: Desktop Environment :: Window Managers :: Oroborus', -'Topic :: Desktop Environment :: Window Managers :: Oroborus :: Themes', -'Topic :: Desktop Environment :: Window Managers :: Sawfish', -'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes 0.30', -'Topic :: Desktop Environment :: Window Managers :: Sawfish :: Themes pre-0.30', -'Topic :: Desktop Environment :: Window Managers :: Waimea', -'Topic :: Desktop Environment :: Window Managers :: Waimea :: Themes', -'Topic :: Desktop Environment :: Window Managers :: Window Maker', -'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Applets', -'Topic :: Desktop Environment :: Window Managers :: Window Maker :: Themes', -'Topic :: Desktop Environment :: Window Managers :: XFCE', -'Topic :: Desktop Environment :: Window Managers :: XFCE :: Themes', -'Topic :: Documentation', -'Topic :: Education', -'Topic :: Education :: Computer Aided Instruction (CAI)', -'Topic :: Education :: Testing', -'Topic :: Games/Entertainment', -'Topic :: Games/Entertainment :: Arcade', -'Topic :: Games/Entertainment :: Board Games', -'Topic :: Games/Entertainment :: First Person Shooters', -'Topic :: Games/Entertainment :: Fortune Cookies', -'Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)', -'Topic :: Games/Entertainment :: Puzzle Games', -'Topic :: Games/Entertainment :: Real Time Strategy', -'Topic :: Games/Entertainment :: Role-Playing', -'Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games', -'Topic :: Games/Entertainment :: Simulation', -'Topic :: Games/Entertainment :: Turn Based Strategy', -'Topic :: Home Automation', -'Topic :: Internet', -'Topic :: Internet :: File Transfer Protocol (FTP)', -'Topic :: Internet :: Finger', -'Topic :: Internet :: Log Analysis', -'Topic :: Internet :: Name Service (DNS)', -'Topic :: Internet :: Proxy Servers', -'Topic :: Internet :: WAP', -'Topic :: Internet :: WWW/HTTP', -'Topic :: Internet :: WWW/HTTP :: Browsers', -'Topic :: Internet :: WWW/HTTP :: Dynamic Content', -'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries', -'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards', -'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary', -'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters', -'Topic :: Internet :: WWW/HTTP :: HTTP Servers', -'Topic :: Internet :: WWW/HTTP :: Indexing/Search', -'Topic :: Internet :: WWW/HTTP :: Session', -'Topic :: Internet :: WWW/HTTP :: Site Management', -'Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking', -'Topic :: Internet :: WWW/HTTP :: WSGI', -'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', -'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', -'Topic :: Internet :: WWW/HTTP :: WSGI :: Server', -'Topic :: Internet :: Z39.50', -'Topic :: Multimedia', -'Topic :: Multimedia :: Graphics', -'Topic :: Multimedia :: Graphics :: 3D Modeling', -'Topic :: Multimedia :: Graphics :: 3D Rendering', -'Topic :: Multimedia :: Graphics :: Capture', -'Topic :: Multimedia :: Graphics :: Capture :: Digital Camera', -'Topic :: Multimedia :: Graphics :: Capture :: Scanners', -'Topic :: Multimedia :: Graphics :: Capture :: Screen Capture', -'Topic :: Multimedia :: Graphics :: Editors', -'Topic :: Multimedia :: Graphics :: Editors :: Raster-Based', -'Topic :: Multimedia :: Graphics :: Editors :: Vector-Based', -'Topic :: Multimedia :: Graphics :: Graphics Conversion', -'Topic :: Multimedia :: Graphics :: Presentation', -'Topic :: Multimedia :: Graphics :: Viewers', -'Topic :: Multimedia :: Sound/Audio', -'Topic :: Multimedia :: Sound/Audio :: Analysis', -'Topic :: Multimedia :: Sound/Audio :: Capture/Recording', -'Topic :: Multimedia :: Sound/Audio :: CD Audio', -'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing', -'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping', -'Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Writing', -'Topic :: Multimedia :: Sound/Audio :: Conversion', -'Topic :: Multimedia :: Sound/Audio :: Editors', -'Topic :: Multimedia :: Sound/Audio :: MIDI', -'Topic :: Multimedia :: Sound/Audio :: Mixers', -'Topic :: Multimedia :: Sound/Audio :: Players', -'Topic :: Multimedia :: Sound/Audio :: Players :: MP3', -'Topic :: Multimedia :: Sound/Audio :: Sound Synthesis', -'Topic :: Multimedia :: Sound/Audio :: Speech', -'Topic :: Multimedia :: Video', -'Topic :: Multimedia :: Video :: Capture', -'Topic :: Multimedia :: Video :: Conversion', -'Topic :: Multimedia :: Video :: Display', -'Topic :: Multimedia :: Video :: Non-Linear Editor', -'Topic :: Office/Business', -'Topic :: Office/Business :: Financial', -'Topic :: Office/Business :: Financial :: Accounting', -'Topic :: Office/Business :: Financial :: Investment', -'Topic :: Office/Business :: Financial :: Point-Of-Sale', -'Topic :: Office/Business :: Financial :: Spreadsheet', -'Topic :: Office/Business :: Groupware', -'Topic :: Office/Business :: News/Diary', -'Topic :: Office/Business :: Office Suites', -'Topic :: Office/Business :: Scheduling', -'Topic :: Other/Nonlisted Topic', -'Topic :: Printing', -'Topic :: Religion', -'Topic :: Scientific/Engineering', -'Topic :: Scientific/Engineering :: Artificial Life', -'Topic :: Scientific/Engineering :: Artificial Intelligence', -'Topic :: Scientific/Engineering :: Astronomy', -'Topic :: Scientific/Engineering :: Atmospheric Science', -'Topic :: Scientific/Engineering :: Bio-Informatics', -'Topic :: Scientific/Engineering :: Chemistry', -'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)', -'Topic :: Scientific/Engineering :: GIS', -'Topic :: Scientific/Engineering :: Human Machine Interfaces', -'Topic :: Scientific/Engineering :: Image Recognition', -'Topic :: Scientific/Engineering :: Information Analysis', -'Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator', -'Topic :: Scientific/Engineering :: Mathematics', -'Topic :: Scientific/Engineering :: Medical Science Apps.', -'Topic :: Scientific/Engineering :: Physics', -'Topic :: Scientific/Engineering :: Visualization', -'Topic :: Security', -'Topic :: Security :: Cryptography', -'Topic :: Sociology', -'Topic :: Sociology :: Genealogy', -'Topic :: Sociology :: History', -'Topic :: Software Development', -'Topic :: Software Development :: Assemblers', -'Topic :: Software Development :: Bug Tracking', -'Topic :: Software Development :: Build Tools', -'Topic :: Software Development :: Code Generators', -'Topic :: Software Development :: Compilers', -'Topic :: Software Development :: Debuggers', -'Topic :: Software Development :: Disassemblers', -'Topic :: Software Development :: Documentation', -'Topic :: Software Development :: Embedded Systems', -'Topic :: Software Development :: Internationalization', -'Topic :: Software Development :: Interpreters', -'Topic :: Software Development :: Libraries', -'Topic :: Software Development :: Libraries :: Application Frameworks', -'Topic :: Software Development :: Libraries :: Java Libraries', -'Topic :: Software Development :: Libraries :: Perl Modules', -'Topic :: Software Development :: Libraries :: PHP Classes', -'Topic :: Software Development :: Libraries :: Pike Modules', -'Topic :: Software Development :: Libraries :: pygame', -'Topic :: Software Development :: Libraries :: Python Modules', -'Topic :: Software Development :: Libraries :: Ruby Modules', -'Topic :: Software Development :: Libraries :: Tcl Extensions', -'Topic :: Software Development :: Localization', -'Topic :: Software Development :: Object Brokering', -'Topic :: Software Development :: Object Brokering :: CORBA', -'Topic :: Software Development :: Pre-processors', -'Topic :: Software Development :: Quality Assurance', -'Topic :: Software Development :: Testing', -'Topic :: Software Development :: Testing :: Traffic Generation', -'Topic :: Software Development :: User Interfaces', -'Topic :: Software Development :: Version Control', -'Topic :: Software Development :: Version Control :: CVS', -'Topic :: Software Development :: Version Control :: RCS', -'Topic :: Software Development :: Version Control :: SCCS', -'Topic :: Software Development :: Widget Sets', -'Topic :: System', -'Topic :: System :: Archiving', -'Topic :: System :: Archiving :: Backup', -'Topic :: System :: Archiving :: Compression', -'Topic :: System :: Archiving :: Mirroring', -'Topic :: System :: Archiving :: Packaging', -'Topic :: System :: Benchmark', -'Topic :: System :: Boot', -'Topic :: System :: Boot :: Init', -'Topic :: System :: Clustering', -'Topic :: System :: Console Fonts', -'Topic :: System :: Distributed Computing', -'Topic :: System :: Emulators', -'Topic :: System :: Filesystems', -'Topic :: System :: Hardware', -'Topic :: System :: Hardware :: Hardware Drivers', -'Topic :: System :: Hardware :: Mainframes', -'Topic :: System :: Hardware :: Symmetric Multi-processing', -'Topic :: System :: Installation/Setup', -'Topic :: System :: Logging', -'Topic :: System :: Monitoring', -'Topic :: System :: Networking', -'Topic :: System :: Networking :: Firewalls', -'Topic :: System :: Networking :: Monitoring', -'Topic :: System :: Networking :: Monitoring :: Hardware Watchdog', -'Topic :: System :: Networking :: Time Synchronization', -'Topic :: System :: Operating System', -'Topic :: System :: Operating System Kernels', -'Topic :: System :: Operating System Kernels :: BSD', -'Topic :: System :: Operating System Kernels :: GNU Hurd', -'Topic :: System :: Operating System Kernels :: Linux', -'Topic :: System :: Power (UPS)', -'Topic :: System :: Recovery Tools', -'Topic :: System :: Shells', -'Topic :: System :: Software Distribution', -'Topic :: System :: Systems Administration', -'Topic :: System :: Systems Administration :: Authentication/Directory', -'Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP', -'Topic :: System :: Systems Administration :: Authentication/Directory :: NIS', -'Topic :: System :: System Shells', -'Topic :: Terminals', -'Topic :: Terminals :: Serial', -'Topic :: Terminals :: Telnet', -'Topic :: Terminals :: Terminal Emulators/X Terminals', -'Topic :: Text Editors', -'Topic :: Text Editors :: Documentation', -'Topic :: Text Editors :: Emacs', -'Topic :: Text Editors :: Integrated Development Environments (IDE)', -'Topic :: Text Editors :: Text Processing', -'Topic :: Text Editors :: Word Processors', -'Topic :: Text Processing', -'Topic :: Text Processing :: Filters', -'Topic :: Text Processing :: Fonts', -'Topic :: Text Processing :: General', -'Topic :: Text Processing :: Indexing', -'Topic :: Text Processing :: Linguistic', -'Topic :: Text Processing :: Markup', -'Topic :: Text Processing :: Markup :: HTML', -'Topic :: Text Processing :: Markup :: LaTeX', -'Topic :: Text Processing :: Markup :: SGML', -'Topic :: Text Processing :: Markup :: VRML', -'Topic :: Text Processing :: Markup :: XML', -'Topic :: Utilities', -] diff --git a/Lib/packaging/command/__init__.py b/Lib/packaging/command/__init__.py deleted file mode 100644 --- a/Lib/packaging/command/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Subpackage containing all standard commands.""" -import os -from packaging.errors import PackagingModuleError -from packaging.util import resolve_name - -__all__ = ['get_command_names', 'set_command', 'get_command_class', - 'STANDARD_COMMANDS'] - - -STANDARD_COMMANDS = [ - # packaging - 'check', 'test', - # building - 'build', 'build_py', 'build_ext', 'build_clib', 'build_scripts', 'clean', - # installing - 'install_dist', 'install_lib', 'install_headers', 'install_scripts', - 'install_data', 'install_distinfo', - # distributing - 'sdist', 'bdist', 'bdist_dumb', 'bdist_wininst', - 'register', 'upload', 'upload_docs', - ] - -if os.name == 'nt': - STANDARD_COMMANDS.insert(STANDARD_COMMANDS.index('bdist_wininst'), - 'bdist_msi') - -# XXX maybe we need more than one registry, so that --list-comands can display -# standard, custom and overriden standard commands differently -_COMMANDS = dict((name, 'packaging.command.%s.%s' % (name, name)) - for name in STANDARD_COMMANDS) - - -def get_command_names(): - """Return registered commands""" - return sorted(_COMMANDS) - - -def set_command(location): - cls = resolve_name(location) - # XXX we want to do the duck-type checking here - _COMMANDS[cls.get_command_name()] = cls - - -def get_command_class(name): - """Return the registered command""" - try: - cls = _COMMANDS[name] - except KeyError: - raise PackagingModuleError("Invalid command %s" % name) - if isinstance(cls, str): - cls = resolve_name(cls) - _COMMANDS[name] = cls - return cls diff --git a/Lib/packaging/command/bdist.py b/Lib/packaging/command/bdist.py deleted file mode 100644 --- a/Lib/packaging/command/bdist.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Create a built (binary) distribution. - -If a --formats option was given on the command line, this command will -call the corresponding bdist_* commands; if the option was absent, a -bdist_* command depending on the current platform will be called. -""" - -import os - -from packaging import util -from packaging.command.cmd import Command -from packaging.errors import PackagingPlatformError, PackagingOptionError - - -def show_formats(): - """Print list of available formats (arguments to "--format" option). - """ - from packaging.fancy_getopt import FancyGetopt - formats = [] - for format in bdist.format_commands: - formats.append(("formats=" + format, None, - bdist.format_command[format][1])) - pretty_printer = FancyGetopt(formats) - pretty_printer.print_help("List of available distribution formats:") - - -class bdist(Command): - - description = "create a built (binary) distribution" - - user_options = [('bdist-base=', 'b', - "temporary directory for creating built distributions"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % util.get_platform()), - ('formats=', None, - "formats for distribution (comma-separated list)"), - ('dist-dir=', 'd', - "directory to put final built distributions in " - "[default: dist]"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['skip-build'] - - help_options = [ - ('help-formats', None, - "lists available distribution formats", show_formats), - ] - - # This is of course very simplistic. The various UNIX family operating - # systems have their specific formats, but they are out of scope for us; - # bdist_dumb is, well, dumb; it's more a building block for other - # packaging tools than a real end-user binary format. - default_format = {'posix': 'gztar', - 'nt': 'zip', - 'os2': 'zip'} - - # Establish the preferred order (for the --help-formats option). - format_commands = ['gztar', 'bztar', 'tar', - 'wininst', 'zip', 'msi'] - - # And the real information. - format_command = {'gztar': ('bdist_dumb', "gzip'ed tar file"), - 'bztar': ('bdist_dumb', "bzip2'ed tar file"), - 'tar': ('bdist_dumb', "tar file"), - 'wininst': ('bdist_wininst', - "Windows executable installer"), - 'zip': ('bdist_dumb', "ZIP file"), - 'msi': ('bdist_msi', "Microsoft Installer"), - } - - def initialize_options(self): - self.bdist_base = None - self.plat_name = None - self.formats = None - self.dist_dir = None - self.skip_build = False - self.group = None - self.owner = None - - def finalize_options(self): - # have to finalize 'plat_name' before 'bdist_base' - if self.plat_name is None: - if self.skip_build: - self.plat_name = util.get_platform() - else: - self.plat_name = self.get_finalized_command('build').plat_name - - # 'bdist_base' -- parent of per-built-distribution-format - # temporary directories (eg. we'll probably have - # "build/bdist./dumb", etc.) - if self.bdist_base is None: - build_base = self.get_finalized_command('build').build_base - self.bdist_base = os.path.join(build_base, - 'bdist.' + self.plat_name) - - self.ensure_string_list('formats') - if self.formats is None: - try: - self.formats = [self.default_format[os.name]] - except KeyError: - raise PackagingPlatformError( - "don't know how to create built distributions " - "on platform %s" % os.name) - - if self.dist_dir is None: - self.dist_dir = "dist" - - def run(self): - # Figure out which sub-commands we need to run. - commands = [] - for format in self.formats: - try: - commands.append(self.format_command[format][0]) - except KeyError: - raise PackagingOptionError("invalid format '%s'" % format) - - # Reinitialize and run each command. - for i in range(len(self.formats)): - cmd_name = commands[i] - sub_cmd = self.reinitialize_command(cmd_name) - sub_cmd.format = self.formats[i] - - # passing the owner and group names for tar archiving - if cmd_name == 'bdist_dumb': - sub_cmd.owner = self.owner - sub_cmd.group = self.group - - # If we're going to need to run this command again, tell it to - # keep its temporary files around so subsequent runs go faster. - if cmd_name in commands[i+1:]: - sub_cmd.keep_temp = True - self.run_command(cmd_name) diff --git a/Lib/packaging/command/bdist_dumb.py b/Lib/packaging/command/bdist_dumb.py deleted file mode 100644 --- a/Lib/packaging/command/bdist_dumb.py +++ /dev/null @@ -1,139 +0,0 @@ -"""Create a "dumb" built distribution. - -A dumb distribution is just an archive meant to be unpacked under -sys.prefix or sys.exec_prefix. -""" - -import os -from shutil import rmtree -from sysconfig import get_python_version - -from packaging.util import get_platform -from packaging.command.cmd import Command -from packaging.errors import PackagingPlatformError -from packaging import logger - - -class bdist_dumb(Command): - - description = 'create a "dumb" built distribution' - - user_options = [('bdist-dir=', 'd', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('format=', 'f', - "archive format to create (tar, gztar, bztar, zip)"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('relative', None, - "build the archive using relative paths" - "(default: false)"), - ('owner=', 'u', - "Owner name used when creating a tar file" - " [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file" - " [default: current group]"), - ] - - boolean_options = ['keep-temp', 'skip-build', 'relative'] - - default_format = {'posix': 'gztar', - 'nt': 'zip', - 'os2': 'zip'} - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.format = None - self.keep_temp = False - self.dist_dir = None - self.skip_build = None - self.relative = False - self.owner = None - self.group = None - - def finalize_options(self): - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'dumb') - - if self.format is None: - try: - self.format = self.default_format[os.name] - except KeyError: - raise PackagingPlatformError( - "don't know how to create dumb built distributions " - "on platform %s" % os.name) - - self.set_undefined_options('bdist', - 'dist_dir', 'plat_name', 'skip_build') - - def run(self): - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install_dist', - reinit_subcommands=True) - install.root = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = False - - logger.info("installing to %s", self.bdist_dir) - self.run_command('install_dist') - - # And make an archive relative to the root of the - # pseudo-installation tree. - archive_basename = "%s.%s" % (self.distribution.get_fullname(), - self.plat_name) - - # OS/2 objects to any ":" characters in a filename (such as when - # a timestamp is used in a version) so change them to hyphens. - if os.name == "os2": - archive_basename = archive_basename.replace(":", "-") - - pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) - if not self.relative: - archive_root = self.bdist_dir - else: - if (self.distribution.has_ext_modules() and - (install.install_base != install.install_platbase)): - raise PackagingPlatformError( - "can't make a dumb built distribution where base and " - "platbase are different (%r, %r)" % - (install.install_base, install.install_platbase)) - else: - archive_root = os.path.join( - self.bdist_dir, - self._ensure_relative(install.install_base)) - - # Make the archive - filename = self.make_archive(pseudoinstall_root, - self.format, root_dir=archive_root, - owner=self.owner, group=self.group) - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - self.distribution.dist_files.append(('bdist_dumb', pyversion, - filename)) - - if not self.keep_temp: - if self.dry_run: - logger.info('removing %s', self.bdist_dir) - else: - rmtree(self.bdist_dir) - - def _ensure_relative(self, path): - # copied from dir_util, deleted - drive, path = os.path.splitdrive(path) - if path[0:1] == os.sep: - path = drive + path[1:] - return path diff --git a/Lib/packaging/command/bdist_msi.py b/Lib/packaging/command/bdist_msi.py deleted file mode 100644 --- a/Lib/packaging/command/bdist_msi.py +++ /dev/null @@ -1,743 +0,0 @@ -"""Create a Microsoft Installer (.msi) binary distribution.""" - -# Copyright (C) 2005, 2006 Martin von L?wis -# Licensed to PSF under a Contributor Agreement. - -import sys -import os -import msilib - -from shutil import rmtree -from sysconfig import get_python_version -from packaging.command.cmd import Command -from packaging.version import NormalizedVersion -from packaging.errors import PackagingOptionError -from packaging import logger as log -from packaging.util import get_platform -from msilib import schema, sequence, text -from msilib import Directory, Feature, Dialog, add_data - -class MSIVersion(NormalizedVersion): - """ - MSI ProductVersion must be strictly numeric. - MSIVersion disallows prerelease and postrelease versions. - """ - def __init__(self, *args, **kwargs): - super(MSIVersion, self).__init__(*args, **kwargs) - if not self.is_final: - raise ValueError("ProductVersion must be strictly numeric") - -class PyDialog(Dialog): - """Dialog class with a fixed layout: controls at the top, then a ruler, - then a list of buttons: back, next, cancel. Optionally a bitmap at the - left.""" - def __init__(self, *args, **kw): - """Dialog(database, name, x, y, w, h, attributes, title, first, - default, cancel, bitmap=true)""" - super(PyDialog, self).__init__(*args) - ruler = self.h - 36 - #if kw.get("bitmap", True): - # self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin") - self.line("BottomLine", 0, ruler, self.w, 0) - - def title(self, title): - "Set the title text of the dialog at the top." - # name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix, - # text, in VerdanaBold10 - self.text("Title", 15, 10, 320, 60, 0x30003, - r"{\VerdanaBold10}%s" % title) - - def back(self, title, next, name = "Back", active = 1): - """Add a back button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next) - - def cancel(self, title, next, name = "Cancel", active = 1): - """Add a cancel button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next) - - def next(self, title, next, name = "Next", active = 1): - """Add a Next button with a given title, the tab-next button, - its name in the Control table, possibly initially disabled. - - Return the button, so that events can be associated""" - if active: - flags = 3 # Visible|Enabled - else: - flags = 1 # Visible - return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next) - - def xbutton(self, name, title, next, xpos): - """Add a button with a given title, the tab-next button, - its name in the Control table, giving its x position; the - y-position is aligned with the other buttons. - - Return the button, so that events can be associated""" - return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next) - -class bdist_msi(Command): - - description = "create a Microsoft Installer (.msi) binary distribution" - - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized)" - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after" - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] - - all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4', - '2.5', '2.6', '2.7', '2.8', '2.9', - '3.0', '3.1', '3.2', '3.3', '3.4', - '3.5', '3.6', '3.7', '3.8', '3.9'] - other_version = 'X' - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = False - self.no_target_compile = False - self.no_target_optimize = False - self.target_version = None - self.dist_dir = None - self.skip_build = None - self.install_script = None - self.pre_install_script = None - self.versions = None - - def finalize_options(self): - self.set_undefined_options('bdist', 'skip_build') - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'msi') - - short_version = get_python_version() - if (not self.target_version) and self.distribution.has_ext_modules(): - self.target_version = short_version - - if self.target_version: - self.versions = [self.target_version] - if not self.skip_build and self.distribution.has_ext_modules()\ - and self.target_version != short_version: - raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \ - " option must be specified" % (short_version,)) - else: - self.versions = list(self.all_versions) - - self.set_undefined_options('bdist', 'dist_dir', 'plat_name') - - if self.pre_install_script: - raise PackagingOptionError("the pre-install-script feature is not yet implemented") - - if self.install_script: - for script in self.distribution.scripts: - if self.install_script == os.path.basename(script): - break - else: - raise PackagingOptionError("install_script '%s' not found in scripts" % \ - self.install_script) - self.install_script_key = None - - - def run(self): - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install_dist', - reinit_subcommands=True) - install.prefix = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = False - - install_lib = self.reinitialize_command('install_lib') - # we do not want to include pyc or pyo files - install_lib.compile = False - install_lib.optimize = 0 - - if self.distribution.has_ext_modules(): - # If we are building an installer for a Python version other - # than the one we are currently running, then we need to ensure - # our build_lib reflects the other Python version rather than ours. - # Note that for target_version!=sys.version, we must have skipped the - # build step, so there is no issue with enforcing the build of this - # version. - target_version = self.target_version - if not target_version: - assert self.skip_build, "Should have already checked this" - target_version = '%s.%s' % sys.version_info[:2] - plat_specifier = ".%s-%s" % (self.plat_name, target_version) - build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) - - log.info("installing to %s", self.bdist_dir) - install.ensure_finalized() - - # avoid warning of 'install_lib' about installing - # into a directory not in sys.path - sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) - - install.run() - - del sys.path[0] - - self.mkpath(self.dist_dir) - fullname = self.distribution.get_fullname() - installer_name = self.get_installer_filename(fullname) - installer_name = os.path.abspath(installer_name) - if os.path.exists(installer_name): os.unlink(installer_name) - - metadata = self.distribution.metadata - author = metadata.author - if not author: - author = metadata.maintainer - if not author: - author = "UNKNOWN" - version = MSIVersion(metadata.get_version()) - # Prefix ProductName with Python x.y, so that - # it sorts together with the other Python packages - # in Add-Remove-Programs (APR) - fullname = self.distribution.get_fullname() - if self.target_version: - product_name = "Python %s %s" % (self.target_version, fullname) - else: - product_name = "Python %s" % (fullname) - self.db = msilib.init_database(installer_name, schema, - product_name, msilib.gen_uuid(), - str(version), author) - msilib.add_tables(self.db, sequence) - props = [('DistVersion', version)] - email = metadata.author_email or metadata.maintainer_email - if email: - props.append(("ARPCONTACT", email)) - if metadata.url: - props.append(("ARPURLINFOABOUT", metadata.url)) - if props: - add_data(self.db, 'Property', props) - - self.add_find_python() - self.add_files() - self.add_scripts() - self.add_ui() - self.db.Commit() - - if hasattr(self.distribution, 'dist_files'): - tup = 'bdist_msi', self.target_version or 'any', fullname - self.distribution.dist_files.append(tup) - - if not self.keep_temp: - log.info("removing temporary build directory %s", self.bdist_dir) - if not self.dry_run: - rmtree(self.bdist_dir) - - def add_files(self): - db = self.db - cab = msilib.CAB("distfiles") - rootdir = os.path.abspath(self.bdist_dir) - - root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir") - f = Feature(db, "Python", "Python", "Everything", - 0, 1, directory="TARGETDIR") - - items = [(f, root, '')] - for version in self.versions + [self.other_version]: - target = "TARGETDIR" + version - name = default = "Python" + version - desc = "Everything" - if version is self.other_version: - title = "Python from another location" - level = 2 - else: - title = "Python %s from registry" % version - level = 1 - f = Feature(db, name, title, desc, 1, level, directory=target) - dir = Directory(db, cab, root, rootdir, target, default) - items.append((f, dir, version)) - db.Commit() - - seen = {} - for feature, dir, version in items: - todo = [dir] - while todo: - dir = todo.pop() - for file in os.listdir(dir.absolute): - afile = os.path.join(dir.absolute, file) - if os.path.isdir(afile): - short = "%s|%s" % (dir.make_short(file), file) - default = file + version - newdir = Directory(db, cab, dir, file, default, short) - todo.append(newdir) - else: - if not dir.component: - dir.start_component(dir.logical, feature, 0) - if afile not in seen: - key = seen[afile] = dir.add_file(file) - if file==self.install_script: - if self.install_script_key: - raise PackagingOptionError( - "Multiple files with name %s" % file) - self.install_script_key = '[#%s]' % key - else: - key = seen[afile] - add_data(self.db, "DuplicateFile", - [(key + version, dir.component, key, None, dir.logical)]) - db.Commit() - cab.commit(db) - - def add_find_python(self): - """Adds code to the installer to compute the location of Python. - - Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the - registry for each version of Python. - - Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined, - else from PYTHON.MACHINE.X.Y. - - Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe""" - - start = 402 - for ver in self.versions: - install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver - machine_reg = "python.machine." + ver - user_reg = "python.user." + ver - machine_prop = "PYTHON.MACHINE." + ver - user_prop = "PYTHON.USER." + ver - machine_action = "PythonFromMachine" + ver - user_action = "PythonFromUser" + ver - exe_action = "PythonExe" + ver - target_dir_prop = "TARGETDIR" + ver - exe_prop = "PYTHON" + ver - if msilib.Win64: - # type: msidbLocatorTypeRawValue + msidbLocatorType64bit - Type = 2+16 - else: - Type = 2 - add_data(self.db, "RegLocator", - [(machine_reg, 2, install_path, None, Type), - (user_reg, 1, install_path, None, Type)]) - add_data(self.db, "AppSearch", - [(machine_prop, machine_reg), - (user_prop, user_reg)]) - add_data(self.db, "CustomAction", - [(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"), - (user_action, 51+256, target_dir_prop, "[" + user_prop + "]"), - (exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"), - ]) - add_data(self.db, "InstallExecuteSequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "InstallUISequence", - [(machine_action, machine_prop, start), - (user_action, user_prop, start + 1), - (exe_action, None, start + 2), - ]) - add_data(self.db, "Condition", - [("Python" + ver, 0, "NOT TARGETDIR" + ver)]) - start += 4 - assert start < 500 - - def add_scripts(self): - if self.install_script: - start = 6800 - for ver in self.versions + [self.other_version]: - install_action = "install_script." + ver - exe_prop = "PYTHON" + ver - add_data(self.db, "CustomAction", - [(install_action, 50, exe_prop, self.install_script_key)]) - add_data(self.db, "InstallExecuteSequence", - [(install_action, "&Python%s=3" % ver, start)]) - start += 1 - # XXX pre-install scripts are currently refused in finalize_options() - # but if this feature is completed, it will also need to add - # entries for each version as the above code does - if self.pre_install_script: - scriptfn = os.path.join(self.bdist_dir, "preinstall.bat") - with open(scriptfn, "w") as f: - # The batch file will be executed with [PYTHON], so that %1 - # is the path to the Python interpreter; %0 will be the path - # of the batch file. - # rem =""" - # %1 %0 - # exit - # """ - # - f.write('rem ="""\n%1 %0\nexit\n"""\n') - with open(self.pre_install_script) as fp: - f.write(fp.read()) - add_data(self.db, "Binary", - [("PreInstall", msilib.Binary(scriptfn)), - ]) - add_data(self.db, "CustomAction", - [("PreInstall", 2, "PreInstall", None), - ]) - add_data(self.db, "InstallExecuteSequence", - [("PreInstall", "NOT Installed", 450), - ]) - - def add_ui(self): - db = self.db - x = y = 50 - w = 370 - h = 300 - title = "[ProductName] Setup" - - # see "Dialog Style Bits" - modal = 3 # visible | modal - modeless = 1 # visible - - # UI customization properties - add_data(db, "Property", - # See "DefaultUIFont Property" - [("DefaultUIFont", "DlgFont8"), - # See "ErrorDialog Style Bit" - ("ErrorDialog", "ErrorDlg"), - ("Progress1", "Install"), # modified in maintenance type dlg - ("Progress2", "installs"), - ("MaintenanceForm_Action", "Repair"), - # possible values: ALL, JUSTME - ("WhichUsers", "ALL") - ]) - - # Fonts, see "TextStyle Table" - add_data(db, "TextStyle", - [("DlgFont8", "Tahoma", 9, None, 0), - ("DlgFontBold8", "Tahoma", 8, None, 1), #bold - ("VerdanaBold10", "Verdana", 10, None, 1), - ("VerdanaRed9", "Verdana", 9, 255, 0), - ]) - - # UI Sequences, see "InstallUISequence Table", "Using a Sequence Table" - # Numbers indicate sequence; see sequence.py for how these action integrate - add_data(db, "InstallUISequence", - [("PrepareDlg", "Not Privileged or Windows9x or Installed", 140), - ("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141), - # In the user interface, assume all-users installation if privileged. - ("SelectFeaturesDlg", "Not Installed", 1230), - # XXX no support for resume installations yet - #("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240), - ("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250), - ("ProgressDlg", None, 1280)]) - - add_data(db, 'ActionText', text.ActionText) - add_data(db, 'UIText', text.UIText) - ##################################################################### - # Standard dialogs: FatalError, UserExit, ExitDialog - fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - fatal.title("[ProductName] Installer ended prematurely") - fatal.back("< Back", "Finish", active = 0) - fatal.cancel("Cancel", "Back", active = 0) - fatal.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.") - fatal.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c=fatal.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Exit") - - user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - user_exit.title("[ProductName] Installer was interrupted") - user_exit.back("< Back", "Finish", active = 0) - user_exit.cancel("Cancel", "Back", active = 0) - user_exit.text("Description1", 15, 70, 320, 80, 0x30003, - "[ProductName] setup was interrupted. Your system has not been modified. " - "To install this program at a later time, please run the installation again.") - user_exit.text("Description2", 15, 155, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c = user_exit.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Exit") - - exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title, - "Finish", "Finish", "Finish") - exit_dialog.title("Completing the [ProductName] Installer") - exit_dialog.back("< Back", "Finish", active = 0) - exit_dialog.cancel("Cancel", "Back", active = 0) - exit_dialog.text("Description", 15, 235, 320, 20, 0x30003, - "Click the Finish button to exit the Installer.") - c = exit_dialog.next("Finish", "Cancel", name="Finish") - c.event("EndDialog", "Return") - - ##################################################################### - # Required dialog: FilesInUse, ErrorDlg - inuse = PyDialog(db, "FilesInUse", - x, y, w, h, - 19, # KeepModeless|Modal|Visible - title, - "Retry", "Retry", "Retry", bitmap=False) - inuse.text("Title", 15, 6, 200, 15, 0x30003, - r"{\DlgFontBold8}Files in Use") - inuse.text("Description", 20, 23, 280, 20, 0x30003, - "Some files that need to be updated are currently in use.") - inuse.text("Text", 20, 55, 330, 50, 3, - "The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.") - inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess", - None, None, None) - c=inuse.back("Exit", "Ignore", name="Exit") - c.event("EndDialog", "Exit") - c=inuse.next("Ignore", "Retry", name="Ignore") - c.event("EndDialog", "Ignore") - c=inuse.cancel("Retry", "Exit", name="Retry") - c.event("EndDialog","Retry") - - # See "Error Dialog". See "ICE20" for the required names of the controls. - error = Dialog(db, "ErrorDlg", - 50, 10, 330, 101, - 65543, # Error|Minimize|Modal|Visible - title, - "ErrorText", None, None) - error.text("ErrorText", 50,9,280,48,3, "") - #error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None) - error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo") - error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes") - error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort") - error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel") - error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore") - error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk") - error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry") - - ##################################################################### - # Global "Query Cancel" dialog - cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title, - "No", "No", "No") - cancel.text("Text", 48, 15, 194, 30, 3, - "Are you sure you want to cancel [ProductName] installation?") - #cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None, - # "py.ico", None, None) - c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No") - c.event("EndDialog", "Exit") - - c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes") - c.event("EndDialog", "Return") - - ##################################################################### - # Global "Wait for costing" dialog - costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title, - "Return", "Return", "Return") - costing.text("Text", 48, 15, 194, 30, 3, - "Please wait while the installer finishes determining your disk space requirements.") - c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None) - c.event("EndDialog", "Exit") - - ##################################################################### - # Preparation dialog: no user input except cancellation - prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel") - prep.text("Description", 15, 70, 320, 40, 0x30003, - "Please wait while the Installer prepares to guide you through the installation.") - prep.title("Welcome to the [ProductName] Installer") - c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...") - c.mapping("ActionText", "Text") - c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None) - c.mapping("ActionData", "Text") - prep.back("Back", None, active=0) - prep.next("Next", None, active=0) - c=prep.cancel("Cancel", None) - c.event("SpawnDialog", "CancelDlg") - - ##################################################################### - # Feature (Python directory) selection - seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") - seldlg.title("Select Python Installations") - - seldlg.text("Hint", 15, 30, 300, 20, 3, - "Select the Python locations where %s should be installed." - % self.distribution.get_fullname()) - - seldlg.back("< Back", None, active=0) - c = seldlg.next("Next >", "Cancel") - order = 1 - c.event("[TARGETDIR]", "[SourceDir]", ordering=order) - for version in self.versions + [self.other_version]: - order += 1 - c.event("[TARGETDIR]", "[TARGETDIR%s]" % version, - "FEATURE_SELECTED AND &Python%s=3" % version, - ordering=order) - c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1) - c.event("EndDialog", "Return", ordering=order + 2) - c = seldlg.cancel("Cancel", "Features") - c.event("SpawnDialog", "CancelDlg") - - c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3, - "FEATURE", None, "PathEdit", None) - c.event("[FEATURE_SELECTED]", "1") - ver = self.other_version - install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver - dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver - - c = seldlg.text("Other", 15, 200, 300, 15, 3, - "Provide an alternate Python location") - c.condition("Enable", install_other_cond) - c.condition("Show", install_other_cond) - c.condition("Disable", dont_install_other_cond) - c.condition("Hide", dont_install_other_cond) - - c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1, - "TARGETDIR" + ver, None, "Next", None) - c.condition("Enable", install_other_cond) - c.condition("Show", install_other_cond) - c.condition("Disable", dont_install_other_cond) - c.condition("Hide", dont_install_other_cond) - - ##################################################################### - # Disk cost - cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title, - "OK", "OK", "OK", bitmap=False) - cost.text("Title", 15, 6, 200, 15, 0x30003, - "{\DlgFontBold8}Disk Space Requirements") - cost.text("Description", 20, 20, 280, 20, 0x30003, - "The disk space required for the installation of the selected features.") - cost.text("Text", 20, 53, 330, 60, 3, - "The highlighted volumes (if any) do not have enough disk space " - "available for the currently selected features. You can either " - "remove some files from the highlighted volumes, or choose to " - "install less features onto local drive(s), or select different " - "destination drive(s).") - cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223, - None, "{120}{70}{70}{70}{70}", None, None) - cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return") - - ##################################################################### - # WhichUsers Dialog. Only available on NT, and for privileged users. - # This must be run before FindRelatedProducts, because that will - # take into account whether the previous installation was per-user - # or per-machine. We currently don't support going back to this - # dialog after "Next" was selected; to support this, we would need to - # find how to reset the ALLUSERS property, and how to re-run - # FindRelatedProducts. - # On Windows9x, the ALLUSERS property is ignored on the command line - # and in the Property table, but installer fails according to the documentation - # if a dialog attempts to set ALLUSERS. - whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title, - "AdminInstall", "Next", "Cancel") - whichusers.title("Select whether to install [ProductName] for all users of this computer.") - # A radio group with two options: allusers, justme - g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3, - "WhichUsers", "", "Next") - g.add("ALL", 0, 5, 150, 20, "Install for all users") - g.add("JUSTME", 0, 25, 150, 20, "Install just for me") - - whichusers.back("Back", None, active=0) - - c = whichusers.next("Next >", "Cancel") - c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1) - c.event("EndDialog", "Return", ordering = 2) - - c = whichusers.cancel("Cancel", "AdminInstall") - c.event("SpawnDialog", "CancelDlg") - - ##################################################################### - # Installation Progress dialog (modeless) - progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title, - "Cancel", "Cancel", "Cancel", bitmap=False) - progress.text("Title", 20, 15, 200, 15, 0x30003, - "{\DlgFontBold8}[Progress1] [ProductName]") - progress.text("Text", 35, 65, 300, 30, 3, - "Please wait while the Installer [Progress2] [ProductName]. " - "This may take several minutes.") - progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:") - - c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...") - c.mapping("ActionText", "Text") - - #c=progress.text("ActionData", 35, 140, 300, 20, 3, None) - #c.mapping("ActionData", "Text") - - c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537, - None, "Progress done", None, None) - c.mapping("SetProgress", "Progress") - - progress.back("< Back", "Next", active=False) - progress.next("Next >", "Cancel", active=False) - progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg") - - ################################################################### - # Maintenance type: repair/uninstall - maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title, - "Next", "Next", "Cancel") - maint.title("Welcome to the [ProductName] Setup Wizard") - maint.text("BodyText", 15, 63, 330, 42, 3, - "Select whether you want to repair or remove [ProductName].") - g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3, - "MaintenanceForm_Action", "", "Next") - #g.add("Change", 0, 0, 200, 17, "&Change [ProductName]") - g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]") - g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]") - - maint.back("< Back", None, active=False) - c=maint.next("Finish", "Cancel") - # Change installation: Change progress dialog to "Change", then ask - # for feature selection - #c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1) - #c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2) - - # Reinstall: Change progress dialog to "Repair", then invoke reinstall - # Also set list of reinstalled features to "ALL" - c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5) - c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6) - c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7) - c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8) - - # Uninstall: Change progress to "Remove", then invoke uninstall - # Also set list of removed features to "ALL" - c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11) - c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12) - c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13) - c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14) - - # Close dialog when maintenance action scheduled - c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20) - #c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21) - - maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg") - - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name, - self.target_version) - else: - base_name = "%s.%s.msi" % (fullname, self.plat_name) - installer_name = os.path.join(self.dist_dir, base_name) - return installer_name diff --git a/Lib/packaging/command/bdist_wininst.py b/Lib/packaging/command/bdist_wininst.py deleted file mode 100644 --- a/Lib/packaging/command/bdist_wininst.py +++ /dev/null @@ -1,345 +0,0 @@ -"""Create an executable installer for Windows.""" - -import sys -import os - -from shutil import rmtree -from sysconfig import get_python_version -from packaging.command.cmd import Command -from packaging.errors import PackagingOptionError, PackagingPlatformError -from packaging import logger -from packaging.util import get_platform - - -class bdist_wininst(Command): - - description = "create an executable installer for Windows" - - user_options = [('bdist-dir=', None, - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_platform()), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('target-version=', None, - "require a specific python version" + - " on the target system"), - ('no-target-compile', 'c', - "do not compile .py to .pyc on the target system"), - ('no-target-optimize', 'o', - "do not compile .py to .pyo (optimized)" - "on the target system"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('bitmap=', 'b', - "bitmap to use for the installer instead of python-powered logo"), - ('title=', 't', - "title to display on the installer background instead of default"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ('install-script=', None, - "basename of installation script to be run after" - "installation or before deinstallation"), - ('pre-install-script=', None, - "Fully qualified filename of a script to be run before " - "any files are installed. This script need not be in the " - "distribution"), - ('user-access-control=', None, - "specify Vista's UAC handling - 'none'/default=no " - "handling, 'auto'=use UAC if target Python installed for " - "all users, 'force'=always use UAC"), - ] - - boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize', - 'skip-build'] - - def initialize_options(self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = False - self.no_target_compile = False - self.no_target_optimize = False - self.target_version = None - self.dist_dir = None - self.bitmap = None - self.title = None - self.skip_build = None - self.install_script = None - self.pre_install_script = None - self.user_access_control = None - - - def finalize_options(self): - self.set_undefined_options('bdist', 'skip_build') - - if self.bdist_dir is None: - if self.skip_build and self.plat_name: - # If build is skipped and plat_name is overridden, bdist will - # not see the correct 'plat_name' - so set that up manually. - bdist = self.distribution.get_command_obj('bdist') - bdist.plat_name = self.plat_name - # next the command will be initialized using that name - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'wininst') - - if not self.target_version: - self.target_version = "" - - if not self.skip_build and self.distribution.has_ext_modules(): - short_version = get_python_version() - if self.target_version and self.target_version != short_version: - raise PackagingOptionError("target version can only be %s, or the '--skip-build'" \ - " option must be specified" % (short_version,)) - self.target_version = short_version - - self.set_undefined_options('bdist', 'dist_dir', 'plat_name') - - if self.install_script: - for script in self.distribution.scripts: - if self.install_script == os.path.basename(script): - break - else: - raise PackagingOptionError("install_script '%s' not found in scripts" % \ - self.install_script) - - def run(self): - if (sys.platform != "win32" and - (self.distribution.has_ext_modules() or - self.distribution.has_c_libraries())): - raise PackagingPlatformError \ - ("distribution contains extensions and/or C libraries; " - "must be compiled on a Windows 32 platform") - - if not self.skip_build: - self.run_command('build') - - install = self.reinitialize_command('install', reinit_subcommands=True) - install.root = self.bdist_dir - install.skip_build = self.skip_build - install.warn_dir = False - install.plat_name = self.plat_name - - install_lib = self.reinitialize_command('install_lib') - # we do not want to include pyc or pyo files - install_lib.compile = False - install_lib.optimize = 0 - - if self.distribution.has_ext_modules(): - # If we are building an installer for a Python version other - # than the one we are currently running, then we need to ensure - # our build_lib reflects the other Python version rather than ours. - # Note that for target_version!=sys.version, we must have skipped the - # build step, so there is no issue with enforcing the build of this - # version. - target_version = self.target_version - if not target_version: - assert self.skip_build, "Should have already checked this" - target_version = '%s.%s' % sys.version_info[:2] - plat_specifier = ".%s-%s" % (self.plat_name, target_version) - build = self.get_finalized_command('build') - build.build_lib = os.path.join(build.build_base, - 'lib' + plat_specifier) - - # Use a custom scheme for the zip-file, because we have to decide - # at installation time which scheme to use. - for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'): - value = key.upper() - if key == 'headers': - value = value + '/Include/$dist_name' - setattr(install, - 'install_' + key, - value) - - logger.info("installing to %s", self.bdist_dir) - install.ensure_finalized() - - # avoid warning of 'install_lib' about installing - # into a directory not in sys.path - sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB')) - - install.run() - - del sys.path[0] - - # And make an archive relative to the root of the - # pseudo-installation tree. - from tempfile import NamedTemporaryFile - archive_basename = NamedTemporaryFile().name - fullname = self.distribution.get_fullname() - arcname = self.make_archive(archive_basename, "zip", - root_dir=self.bdist_dir) - # create an exe containing the zip-file - self.create_exe(arcname, fullname, self.bitmap) - if self.distribution.has_ext_modules(): - pyversion = get_python_version() - else: - pyversion = 'any' - self.distribution.dist_files.append(('bdist_wininst', pyversion, - self.get_installer_filename(fullname))) - # remove the zip-file again - logger.debug("removing temporary file '%s'", arcname) - os.remove(arcname) - - if not self.keep_temp: - logger.info('removing %s', self.bdist_dir) - if not self.dry_run: - rmtree(self.bdist_dir) - - def get_inidata(self): - # Return data describing the installation. - - lines = [] - metadata = self.distribution.metadata - - # Write the [metadata] section. - lines.append("[metadata]") - - # 'info' will be displayed in the installer's dialog box, - # describing the items to be installed. - info = (metadata.long_description or '') + '\n' - - # Escape newline characters - def escape(s): - return s.replace("\n", "\\n") - - for name in ["author", "author_email", "description", "maintainer", - "maintainer_email", "name", "url", "version"]: - data = getattr(metadata, name, "") - if data: - info = info + ("\n %s: %s" % \ - (name.capitalize(), escape(data))) - lines.append("%s=%s" % (name, escape(data))) - - # The [setup] section contains entries controlling - # the installer runtime. - lines.append("\n[Setup]") - if self.install_script: - lines.append("install_script=%s" % self.install_script) - lines.append("info=%s" % escape(info)) - lines.append("target_compile=%d" % (not self.no_target_compile)) - lines.append("target_optimize=%d" % (not self.no_target_optimize)) - if self.target_version: - lines.append("target_version=%s" % self.target_version) - if self.user_access_control: - lines.append("user_access_control=%s" % self.user_access_control) - - title = self.title or self.distribution.get_fullname() - lines.append("title=%s" % escape(title)) - import time - import packaging - build_info = "Built %s with packaging-%s" % \ - (time.ctime(time.time()), packaging.__version__) - lines.append("build_info=%s" % build_info) - return "\n".join(lines) - - def create_exe(self, arcname, fullname, bitmap=None): - import struct - - self.mkpath(self.dist_dir) - - cfgdata = self.get_inidata() - - installer_name = self.get_installer_filename(fullname) - logger.info("creating %s", installer_name) - - if bitmap: - with open(bitmap, "rb") as fp: - bitmapdata = fp.read() - bitmaplen = len(bitmapdata) - else: - bitmaplen = 0 - - with open(installer_name, "wb") as file: - file.write(self.get_exe_bytes()) - if bitmap: - file.write(bitmapdata) - - # Convert cfgdata from unicode to ascii, mbcs encoded - if isinstance(cfgdata, str): - cfgdata = cfgdata.encode("mbcs") - - # Append the pre-install script - cfgdata = cfgdata + b"\0" - if self.pre_install_script: - # We need to normalize newlines, so we open in text mode and - # convert back to bytes. "latin-1" simply avoids any possible - # failures. - with open(self.pre_install_script, encoding="latin-1") as fp: - script_data = fp.read().encode("latin-1") - cfgdata = cfgdata + script_data + b"\n\0" - else: - # empty pre-install script - cfgdata = cfgdata + b"\0" - file.write(cfgdata) - - # The 'magic number' 0x1234567B is used to make sure that the - # binary layout of 'cfgdata' is what the wininst.exe binary - # expects. If the layout changes, increment that number, make - # the corresponding changes to the wininst.exe sources, and - # recompile them. - header = struct.pack(" cur_version: - bv = get_build_version() - else: - if self.target_version < "2.4": - bv = 6.0 - else: - bv = 7.1 - else: - # for current version - use authoritative check. - bv = get_build_version() - - # wininst-x.y.exe is in the same directory as this file - directory = os.path.dirname(__file__) - # we must use a wininst-x.y.exe built with the same C compiler - # used for python. XXX What about mingw, borland, and so on? - - # if plat_name starts with "win" but is not "win32" - # we want to strip "win" and leave the rest (e.g. -amd64) - # for all other cases, we don't want any suffix - if self.plat_name != 'win32' and self.plat_name[:3] == 'win': - sfix = self.plat_name[3:] - else: - sfix = '' - - filename = os.path.join(directory, "wininst-%.1f%s.exe" % (bv, sfix)) - with open(filename, "rb") as fp: - return fp.read() diff --git a/Lib/packaging/command/build.py b/Lib/packaging/command/build.py deleted file mode 100644 --- a/Lib/packaging/command/build.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Main build command, which calls the other build_* commands.""" - -import sys -import os - -from packaging.util import get_platform -from packaging.command.cmd import Command -from packaging.errors import PackagingOptionError -from packaging.compiler import show_compilers - - -class build(Command): - - description = "build everything needed to install" - - user_options = [ - ('build-base=', 'b', - "base directory for build library"), - ('build-purelib=', None, - "build directory for platform-neutral distributions"), - ('build-platlib=', None, - "build directory for platform-specific distributions"), - ('build-lib=', None, - "build directory for all distribution (defaults to either " + - "build-purelib or build-platlib"), - ('build-scripts=', None, - "build directory for scripts"), - ('build-temp=', 't', - "temporary build directory"), - ('plat-name=', 'p', - "platform name to build for, if supported " - "(default: %s)" % get_platform()), - ('compiler=', 'c', - "specify the compiler type"), - ('debug', 'g', - "compile extensions and libraries with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('executable=', 'e', - "specify final destination interpreter path (build.py)"), - ('use-2to3', None, - "use 2to3 to make source python 3.x compatible"), - ('convert-2to3-doctests', None, - "use 2to3 to convert doctests in separate text files"), - ('use-2to3-fixers', None, - "list additional fixers opted for during 2to3 conversion"), - ] - - boolean_options = ['debug', 'force'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.build_base = 'build' - # these are decided only after 'build_base' has its final value - # (unless overridden by the user or client) - self.build_purelib = None - self.build_platlib = None - self.build_lib = None - self.build_temp = None - self.build_scripts = None - self.compiler = None - self.plat_name = None - self.debug = None - self.force = False - self.executable = None - self.use_2to3 = False - self.convert_2to3_doctests = None - self.use_2to3_fixers = None - - def finalize_options(self): - if self.plat_name is None: - self.plat_name = get_platform() - else: - # plat-name only supported for windows (other platforms are - # supported via ./configure flags, if at all). Avoid misleading - # other platforms. - if os.name != 'nt': - raise PackagingOptionError( - "--plat-name only supported on Windows (try " - "using './configure --help' on your platform)") - pyversion = '%s.%s' % sys.version_info[:2] - plat_specifier = ".%s-%s" % (self.plat_name, pyversion) - - # Make it so Python 2.x and Python 2.x with --with-pydebug don't - # share the same build directories. Doing so confuses the build - # process for C modules - if hasattr(sys, 'gettotalrefcount'): - plat_specifier += '-pydebug' - - # 'build_purelib' and 'build_platlib' just default to 'lib' and - # 'lib.' under the base build directory. We only use one of - # them for a given distribution, though -- - if self.build_purelib is None: - self.build_purelib = os.path.join(self.build_base, 'lib') - if self.build_platlib is None: - self.build_platlib = os.path.join(self.build_base, - 'lib' + plat_specifier) - - # 'build_lib' is the actual directory that we will use for this - # particular module distribution -- if user didn't supply it, pick - # one of 'build_purelib' or 'build_platlib'. - if self.build_lib is None: - if self.distribution.ext_modules: - self.build_lib = self.build_platlib - else: - self.build_lib = self.build_purelib - - # 'build_temp' -- temporary directory for compiler turds, - # "build/temp." - if self.build_temp is None: - self.build_temp = os.path.join(self.build_base, - 'temp' + plat_specifier) - if self.build_scripts is None: - self.build_scripts = os.path.join(self.build_base, - 'scripts-' + pyversion) - - if self.executable is None: - self.executable = os.path.normpath(sys.executable) - - def run(self): - # Run all relevant sub-commands. This will be some subset of: - # - build_py - pure Python modules - # - build_clib - standalone C libraries - # - build_ext - Python extension modules - # - build_scripts - Python scripts - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - # -- Predicates for the sub-command list --------------------------- - - def has_pure_modules(self): - return self.distribution.has_pure_modules() - - def has_c_libraries(self): - return self.distribution.has_c_libraries() - - def has_ext_modules(self): - return self.distribution.has_ext_modules() - - def has_scripts(self): - return self.distribution.has_scripts() - - sub_commands = [('build_py', has_pure_modules), - ('build_clib', has_c_libraries), - ('build_ext', has_ext_modules), - ('build_scripts', has_scripts), - ] diff --git a/Lib/packaging/command/build_clib.py b/Lib/packaging/command/build_clib.py deleted file mode 100644 --- a/Lib/packaging/command/build_clib.py +++ /dev/null @@ -1,197 +0,0 @@ -"""Build C/C++ libraries. - -This command is useful to build libraries that are included in the -distribution and needed by extension modules. -""" - -# XXX this module has *lots* of code ripped-off quite transparently from -# build_ext.py -- not surprisingly really, as the work required to build -# a static library from a collection of C source files is not really all -# that different from what's required to build a shared object file from -# a collection of C source files. Nevertheless, I haven't done the -# necessary refactoring to account for the overlap in code between the -# two modules, mainly because a number of subtle details changed in the -# cut 'n paste. Sigh. - -import os -from packaging.command.cmd import Command -from packaging.errors import PackagingSetupError -from packaging.compiler import customize_compiler, new_compiler -from packaging import logger - - -def show_compilers(): - from packaging.compiler import show_compilers - show_compilers() - - -class build_clib(Command): - - description = "build C/C++ libraries used by extension modules" - - user_options = [ - ('build-clib=', 'b', - "directory to build C/C++ libraries to"), - ('build-temp=', 't', - "directory to put temporary build by-products"), - ('debug', 'g', - "compile with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ] - - boolean_options = ['debug', 'force'] - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.build_clib = None - self.build_temp = None - - # List of libraries to build - self.libraries = None - - # Compilation options for all libraries - self.include_dirs = None - self.define = None - self.undef = None - self.debug = None - self.force = False - self.compiler = None - - - def finalize_options(self): - # This might be confusing: both build-clib and build-temp default - # to build-temp as defined by the "build" command. This is because - # I think that C libraries are really just temporary build - # by-products, at least from the point of view of building Python - # extensions -- but I want to keep my options open. - self.set_undefined_options('build', - ('build_temp', 'build_clib'), - ('build_temp', 'build_temp'), - 'compiler', 'debug', 'force') - - self.libraries = self.distribution.libraries - if self.libraries: - self.check_library_list(self.libraries) - - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # XXX same as for build_ext -- what about 'self.define' and - # 'self.undef' ? - - def run(self): - if not self.libraries: - return - - # Yech -- this is cut 'n pasted from build_ext.py! - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, - force=self.force) - customize_compiler(self.compiler) - - if self.include_dirs is not None: - self.compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for name, value in self.define: - self.compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler.undefine_macro(macro) - - self.build_libraries(self.libraries) - - - def check_library_list(self, libraries): - """Ensure that the list of libraries is valid. - - `library` is presumably provided as a command option 'libraries'. - This method checks that it is a list of 2-tuples, where the tuples - are (library_name, build_info_dict). - - Raise PackagingSetupError if the structure is invalid anywhere; - just returns otherwise. - """ - if not isinstance(libraries, list): - raise PackagingSetupError("'libraries' option must be a list of tuples") - - for lib in libraries: - if not isinstance(lib, tuple) and len(lib) != 2: - raise PackagingSetupError("each element of 'libraries' must a 2-tuple") - - name, build_info = lib - - if not isinstance(name, str): - raise PackagingSetupError("first element of each tuple in 'libraries' " + \ - "must be a string (the library name)") - if '/' in name or (os.sep != '/' and os.sep in name): - raise PackagingSetupError(("bad library name '%s': " + - "may not contain directory separators") % \ - lib[0]) - - if not isinstance(build_info, dict): - raise PackagingSetupError("second element of each tuple in 'libraries' " + \ - "must be a dictionary (build info)") - - def get_library_names(self): - # Assume the library list is valid -- 'check_library_list()' is - # called from 'finalize_options()', so it should be! - if not self.libraries: - return None - - lib_names = [] - for lib_name, build_info in self.libraries: - lib_names.append(lib_name) - return lib_names - - - def get_source_files(self): - self.check_library_list(self.libraries) - filenames = [] - for lib_name, build_info in self.libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise PackagingSetupError(("in 'libraries' option (library '%s'), " - "'sources' must be present and must be " - "a list of source filenames") % lib_name) - - filenames.extend(sources) - return filenames - - def build_libraries(self, libraries): - for lib_name, build_info in libraries: - sources = build_info.get('sources') - if sources is None or not isinstance(sources, (list, tuple)): - raise PackagingSetupError(("in 'libraries' option (library '%s'), " + - "'sources' must be present and must be " + - "a list of source filenames") % lib_name) - sources = list(sources) - - logger.info("building '%s' library", lib_name) - - # First, compile the source code to object files in the library - # directory. (This should probably change to putting object - # files in a temporary build directory.) - macros = build_info.get('macros') - include_dirs = build_info.get('include_dirs') - objects = self.compiler.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=include_dirs, - debug=self.debug) - - # Now "link" the object files together into a static library. - # (On Unix at least, this isn't really linking -- it just - # builds an archive. Whatever.) - self.compiler.create_static_lib(objects, lib_name, - output_dir=self.build_clib, - debug=self.debug) diff --git a/Lib/packaging/command/build_ext.py b/Lib/packaging/command/build_ext.py deleted file mode 100644 --- a/Lib/packaging/command/build_ext.py +++ /dev/null @@ -1,644 +0,0 @@ -"""Build extension modules.""" - -import os -import re -import sys -import site -import sysconfig - -from packaging.util import get_platform -from packaging.command.cmd import Command -from packaging.errors import (CCompilerError, CompileError, PackagingError, - PackagingPlatformError, PackagingSetupError) -from packaging.compiler import customize_compiler, show_compilers -from packaging.util import newer_group -from packaging.compiler.extension import Extension -from packaging import logger - -if os.name == 'nt': - from packaging.compiler.msvccompiler import get_build_version - MSVC_VERSION = int(get_build_version()) - -# An extension name is just a dot-separated list of Python NAMEs (ie. -# the same as a fully-qualified module name). -extension_name_re = re.compile \ - (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') - - -class build_ext(Command): - - description = "build C/C++ extension modules (compile/link to build directory)" - - # XXX thoughts on how to deal with complex command-line options like - # these, i.e. how to make it so fancy_getopt can suck them off the - # command line and turn them into the appropriate - # lists of tuples of what-have-you. - # - each command needs a callback to process its command-line options - # - Command.__init__() needs access to its share of the whole - # command line (must ultimately come from - # Distribution.parse_command_line()) - # - it then calls the current command class' option-parsing - # callback to deal with weird options like -D, which have to - # parse the option text and churn out some custom data - # structure - # - that data structure (in this case, a list of 2-tuples) - # will then be present in the command object by the time - # we get to finalize_options() (i.e. the constructor - # takes care of both command-line and client options - # in between initialize_options() and finalize_options()) - - sep_by = " (separated by '%s')" % os.pathsep - user_options = [ - ('build-lib=', 'b', - "directory for compiled extension modules"), - ('build-temp=', 't', - "directory for temporary files (build by-products)"), - ('plat-name=', 'p', - "platform name to cross-compile for, if supported " - "(default: %s)" % get_platform()), - ('inplace', 'i', - "ignore build-lib and put compiled extensions into the source " + - "directory alongside your pure Python modules"), - ('user', None, - "add user include, library and rpath"), - ('include-dirs=', 'I', - "list of directories to search for header files" + sep_by), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries" + sep_by), - ('rpath=', 'R', - "directories to search for shared C libraries at runtime"), - ('link-objects=', 'O', - "extra explicit link objects to include in the link"), - ('debug', 'g', - "compile/link with debugging information"), - ('force', 'f', - "forcibly build everything (ignore file timestamps)"), - ('compiler=', 'c', - "specify the compiler type"), - ('swig-opts=', None, - "list of SWIG command-line options"), - ('swig=', None, - "path to the SWIG executable"), - ] - - boolean_options = ['inplace', 'debug', 'force', 'user'] - - - help_options = [ - ('help-compiler', None, - "list available compilers", show_compilers), - ] - - def initialize_options(self): - self.extensions = None - self.build_lib = None - self.plat_name = None - self.build_temp = None - self.inplace = False - self.package = None - - self.include_dirs = None - self.define = None - self.undef = None - self.libraries = None - self.library_dirs = None - self.rpath = None - self.link_objects = None - self.debug = None - self.force = None - self.compiler = None - self.swig = None - self.swig_opts = None - self.user = None - - def finalize_options(self): - self.set_undefined_options('build', - 'build_lib', 'build_temp', 'compiler', - 'debug', 'force', 'plat_name') - - if self.package is None: - self.package = self.distribution.ext_package - - # Ensure that the list of extensions is valid, i.e. it is a list of - # Extension objects. - self.extensions = self.distribution.ext_modules - if self.extensions: - if not isinstance(self.extensions, (list, tuple)): - type_name = (self.extensions is None and 'None' - or type(self.extensions).__name__) - raise PackagingSetupError( - "'ext_modules' must be a sequence of Extension instances," - " not %s" % (type_name,)) - for i, ext in enumerate(self.extensions): - if isinstance(ext, Extension): - continue # OK! (assume type-checking done - # by Extension constructor) - type_name = (ext is None and 'None' or type(ext).__name__) - raise PackagingSetupError( - "'ext_modules' item %d must be an Extension instance," - " not %s" % (i, type_name)) - - # Make sure Python's include directories (for Python.h, pyconfig.h, - # etc.) are in the include search path. - py_include = sysconfig.get_path('include') - plat_py_include = sysconfig.get_path('platinclude') - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - if isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - # Put the Python "system" include dir at the end, so that - # any local include dirs take precedence. - self.include_dirs.append(py_include) - if plat_py_include != py_include: - self.include_dirs.append(plat_py_include) - - self.ensure_string_list('libraries') - - # Life is easier if we're not forever checking for None, so - # simplify these options to empty lists if unset - if self.libraries is None: - self.libraries = [] - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - if self.rpath is None: - self.rpath = [] - elif isinstance(self.rpath, str): - self.rpath = self.rpath.split(os.pathsep) - - # for extensions under windows use different directories - # for Release and Debug builds. - # also Python's library directory must be appended to library_dirs - if os.name == 'nt': - # the 'libs' directory is for binary installs - we assume that - # must be the *native* platform. But we don't really support - # cross-compiling via a binary install anyway, so we let it go. - # Note that we must use sys.base_exec_prefix here rather than - # exec_prefix, since the Python libs are not copied to a virtual - # environment. - self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) - if self.debug: - self.build_temp = os.path.join(self.build_temp, "Debug") - else: - self.build_temp = os.path.join(self.build_temp, "Release") - - # Append the source distribution include and library directories, - # this allows distutils on windows to work in the source tree - self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC')) - if MSVC_VERSION >= 9: - # Use the .lib files for the correct architecture - if self.plat_name == 'win32': - suffix = '' - else: - # win-amd64 or win-ia64 - suffix = self.plat_name[4:] - new_lib = os.path.join(sys.exec_prefix, 'PCbuild') - if suffix: - new_lib = os.path.join(new_lib, suffix) - self.library_dirs.append(new_lib) - - elif MSVC_VERSION == 8: - self.library_dirs.append(os.path.join(sys.exec_prefix, - 'PC', 'VS8.0')) - elif MSVC_VERSION == 7: - self.library_dirs.append(os.path.join(sys.exec_prefix, - 'PC', 'VS7.1')) - else: - self.library_dirs.append(os.path.join(sys.exec_prefix, - 'PC', 'VC6')) - - # OS/2 (EMX) doesn't support Debug vs Release builds, but has the - # import libraries in its "Config" subdirectory - if os.name == 'os2': - self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config')) - - # for extensions under Cygwin and AtheOS Python's library directory must be - # appended to library_dirs - if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos': - if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): - # building third party extensions - self.library_dirs.append(os.path.join(sys.prefix, "lib", - "python" + sysconfig.get_python_version(), - "config")) - else: - # building python standard extensions - self.library_dirs.append(os.curdir) - - # for extensions under Linux or Solaris with a shared Python library, - # Python's library directory must be appended to library_dirs - sysconfig.get_config_var('Py_ENABLE_SHARED') - if (sys.platform.startswith(('linux', 'gnu', 'sunos')) - and sysconfig.get_config_var('Py_ENABLE_SHARED')): - if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): - # building third party extensions - self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) - else: - # building python standard extensions - self.library_dirs.append(os.curdir) - - # The argument parsing will result in self.define being a string, but - # it has to be a list of 2-tuples. All the preprocessor symbols - # specified by the 'define' option will be set to '1'. Multiple - # symbols can be separated with commas. - - if self.define: - defines = self.define.split(',') - self.define = [(symbol, '1') for symbol in defines] - - # The option for macros to undefine is also a string from the - # option parsing, but has to be a list. Multiple symbols can also - # be separated with commas here. - if self.undef: - self.undef = self.undef.split(',') - - if self.swig_opts is None: - self.swig_opts = [] - else: - self.swig_opts = self.swig_opts.split(' ') - - # Finally add the user include and library directories if requested - if self.user: - user_include = os.path.join(site.USER_BASE, "include") - user_lib = os.path.join(site.USER_BASE, "lib") - if os.path.isdir(user_include): - self.include_dirs.append(user_include) - if os.path.isdir(user_lib): - self.library_dirs.append(user_lib) - self.rpath.append(user_lib) - - def run(self): - from packaging.compiler import new_compiler - - if not self.extensions: - return - - # If we were asked to build any C/C++ libraries, make sure that the - # directory where we put them is in the library search path for - # linking extensions. - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.libraries.extend(build_clib.get_library_names() or []) - self.library_dirs.append(build_clib.build_clib) - - # Setup the CCompiler object that we'll use to do all the - # compiling and linking - self.compiler_obj = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, - force=self.force) - - customize_compiler(self.compiler_obj) - # If we are cross-compiling, init the compiler now (if we are not - # cross-compiling, init would not hurt, but people may rely on - # late initialization of compiler even if they shouldn't...) - if os.name == 'nt' and self.plat_name != get_platform(): - self.compiler_obj.initialize(self.plat_name) - - # And make sure that any compile/link-related options (which might - # come from the command line or from the setup script) are set in - # that CCompiler object -- that way, they automatically apply to - # all compiling and linking done here. - if self.include_dirs is not None: - self.compiler_obj.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for name, value in self.define: - self.compiler_obj.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - self.compiler_obj.undefine_macro(macro) - if self.libraries is not None: - self.compiler_obj.set_libraries(self.libraries) - if self.library_dirs is not None: - self.compiler_obj.set_library_dirs(self.library_dirs) - if self.rpath is not None: - self.compiler_obj.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - self.compiler_obj.set_link_objects(self.link_objects) - - # Now actually compile and link everything. - self.build_extensions() - - def get_source_files(self): - filenames = [] - - # Wouldn't it be neat if we knew the names of header files too... - for ext in self.extensions: - filenames.extend(ext.sources) - - return filenames - - def get_outputs(self): - # And build the list of output (built) filenames. Note that this - # ignores the 'inplace' flag, and assumes everything goes in the - # "build" tree. - outputs = [] - for ext in self.extensions: - outputs.append(self.get_ext_fullpath(ext.name)) - return outputs - - def build_extensions(self): - for ext in self.extensions: - try: - self.build_extension(ext) - except (CCompilerError, PackagingError, CompileError) as e: - if not ext.optional: - raise - logger.warning('%s: building extension %r failed: %s', - self.get_command_name(), ext.name, e) - - def build_extension(self, ext): - sources = ext.sources - if sources is None or not isinstance(sources, (list, tuple)): - raise PackagingSetupError(("in 'ext_modules' option (extension '%s'), " + - "'sources' must be present and must be " + - "a list of source filenames") % ext.name) - sources = list(sources) - - ext_path = self.get_ext_fullpath(ext.name) - depends = sources + ext.depends - if not (self.force or newer_group(depends, ext_path, 'newer')): - logger.debug("skipping '%s' extension (up-to-date)", ext.name) - return - else: - logger.info("building '%s' extension", ext.name) - - # First, scan the sources for SWIG definition files (.i), run - # SWIG on 'em to create .c files, and modify the sources list - # accordingly. - sources = self.swig_sources(sources, ext) - - # Next, compile the source code to object files. - - # XXX not honouring 'define_macros' or 'undef_macros' -- the - # CCompiler API needs to change to accommodate this, and I - # want to do one thing at a time! - - # Two possible sources for extra compiler arguments: - # - 'extra_compile_args' in Extension object - # - CFLAGS environment variable (not particularly - # elegant, but people seem to expect it and I - # guess it's useful) - # The environment variable should take precedence, and - # any sensible compiler will give precedence to later - # command-line args. Hence we combine them in order: - extra_args = ext.extra_compile_args or [] - - macros = ext.define_macros[:] - for undef in ext.undef_macros: - macros.append((undef,)) - - objects = self.compiler_obj.compile(sources, - output_dir=self.build_temp, - macros=macros, - include_dirs=ext.include_dirs, - debug=self.debug, - extra_postargs=extra_args, - depends=ext.depends) - - # XXX -- this is a Vile HACK! - # - # The setup.py script for Python on Unix needs to be able to - # get this list so it can perform all the clean up needed to - # avoid keeping object files around when cleaning out a failed - # build of an extension module. Since Packaging does not - # track dependencies, we have to get rid of intermediates to - # ensure all the intermediates will be properly re-built. - # - self._built_objects = objects[:] - - # Now link the object files together into a "shared object" -- - # of course, first we have to figure out all the other things - # that go into the mix. - if ext.extra_objects: - objects.extend(ext.extra_objects) - extra_args = ext.extra_link_args or [] - - # Detect target language, if not provided - language = ext.language or self.compiler_obj.detect_language(sources) - - self.compiler_obj.link_shared_object( - objects, ext_path, - libraries=self.get_libraries(ext), - library_dirs=ext.library_dirs, - runtime_library_dirs=ext.runtime_library_dirs, - extra_postargs=extra_args, - export_symbols=self.get_export_symbols(ext), - debug=self.debug, - build_temp=self.build_temp, - target_lang=language) - - - def swig_sources(self, sources, extension): - """Walk the list of source files in 'sources', looking for SWIG - interface (.i) files. Run SWIG on all that are found, and - return a modified 'sources' list with SWIG source files replaced - by the generated C (or C++) files. - """ - new_sources = [] - swig_sources = [] - swig_targets = {} - - # XXX this drops generated C/C++ files into the source tree, which - # is fine for developers who want to distribute the generated - # source -- but there should be an option to put SWIG output in - # the temp dir. - - if ('-c++' in self.swig_opts or '-c++' in extension.swig_opts): - target_ext = '.cpp' - else: - target_ext = '.c' - - for source in sources: - base, ext = os.path.splitext(source) - if ext == ".i": # SWIG interface file - new_sources.append(base + '_wrap' + target_ext) - swig_sources.append(source) - swig_targets[source] = new_sources[-1] - else: - new_sources.append(source) - - if not swig_sources: - return new_sources - - swig = self.swig or self.find_swig() - swig_cmd = [swig, "-python"] - swig_cmd.extend(self.swig_opts) - - # Do not override commandline arguments - if not self.swig_opts: - for o in extension.swig_opts: - swig_cmd.append(o) - - for source in swig_sources: - target = swig_targets[source] - logger.info("swigging %s to %s", source, target) - self.spawn(swig_cmd + ["-o", target, source]) - - return new_sources - - def find_swig(self): - """Return the name of the SWIG executable. On Unix, this is - just "swig" -- it should be in the PATH. Tries a bit harder on - Windows. - """ - - if os.name == "posix": - return "swig" - elif os.name == "nt": - - # Look for SWIG in its standard installation directory on - # Windows (or so I presume!). If we find it there, great; - # if not, act like Unix and assume it's in the PATH. - for vers in ("1.3", "1.2", "1.1"): - fn = os.path.join("c:\\swig%s" % vers, "swig.exe") - if os.path.isfile(fn): - return fn - else: - return "swig.exe" - - elif os.name == "os2": - # assume swig available in the PATH. - return "swig.exe" - - else: - raise PackagingPlatformError(("I don't know how to find (much less run) SWIG " - "on platform '%s'") % os.name) - - # -- Name generators ----------------------------------------------- - # (extension names, filenames, whatever) - def get_ext_fullpath(self, ext_name): - """Returns the path of the filename for a given extension. - - The file is located in `build_lib` or directly in the package - (inplace option). - """ - fullname = self.get_ext_fullname(ext_name) - modpath = fullname.split('.') - filename = self.get_ext_filename(modpath[-1]) - - if not self.inplace: - # no further work needed - # returning : - # build_dir/package/path/filename - filename = os.path.join(*modpath[:-1]+[filename]) - return os.path.join(self.build_lib, filename) - - # the inplace option requires to find the package directory - # using the build_py command for that - package = '.'.join(modpath[0:-1]) - build_py = self.get_finalized_command('build_py') - package_dir = os.path.abspath(build_py.get_package_dir(package)) - - # returning - # package_dir/filename - return os.path.join(package_dir, filename) - - def get_ext_fullname(self, ext_name): - """Returns the fullname of a given extension name. - - Adds the `package.` prefix""" - if self.package is None: - return ext_name - else: - return self.package + '.' + ext_name - - def get_ext_filename(self, ext_name): - r"""Convert the name of an extension (eg. "foo.bar") into the name - of the file from which it will be loaded (eg. "foo/bar.so", or - "foo\bar.pyd"). - """ - ext_path = ext_name.split('.') - # OS/2 has an 8 character module (extension) limit :-( - if os.name == "os2": - ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8] - # extensions in debug_mode are named 'module_d.pyd' under windows - so_ext = sysconfig.get_config_var('SO') - if os.name == 'nt' and self.debug: - return os.path.join(*ext_path) + '_d' + so_ext - return os.path.join(*ext_path) + so_ext - - def get_export_symbols(self, ext): - """Return the list of symbols that a shared extension has to - export. This either uses 'ext.export_symbols' or, if it's not - provided, "init" + module_name. Only relevant on Windows, where - the .pyd file (DLL) must export the module "init" function. - """ - initfunc_name = "PyInit_" + ext.name.split('.')[-1] - if initfunc_name not in ext.export_symbols: - ext.export_symbols.append(initfunc_name) - return ext.export_symbols - - def get_libraries(self, ext): - """Return the list of libraries to link against when building a - shared extension. On most platforms, this is just 'ext.libraries'; - on Windows and OS/2, we add the Python library (eg. python20.dll). - """ - # The python library is always needed on Windows. For MSVC, this - # is redundant, since the library is mentioned in a pragma in - # pyconfig.h that MSVC groks. The other Windows compilers all seem - # to need it mentioned explicitly, though, so that's what we do. - # Append '_d' to the python import library on debug builds. - if sys.platform == "win32": - from packaging.compiler.msvccompiler import MSVCCompiler - if not isinstance(self.compiler_obj, MSVCCompiler): - template = "python%d%d" - if self.debug: - template = template + '_d' - pythonlib = template % sys.version_info[:2] - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] - else: - return ext.libraries - elif sys.platform == "os2emx": - # EMX/GCC requires the python library explicitly, and I - # believe VACPP does as well (though not confirmed) - AIM Apr01 - template = "python%d%d" - # debug versions of the main DLL aren't supported, at least - # not at this time - AIM Apr01 - #if self.debug: - # template = template + '_d' - pythonlib = template % sys.version_info[:2] - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] - elif sys.platform[:6] == "cygwin": - template = "python%d.%d" - pythonlib = template % sys.version_info[:2] - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] - elif sys.platform[:6] == "atheos": - template = "python%d.%d" - pythonlib = template % sys.version_info[:2] - # Get SHLIBS from Makefile - extra = [] - for lib in sysconfig.get_config_var('SHLIBS').split(): - if lib.startswith('-l'): - extra.append(lib[2:]) - else: - extra.append(lib) - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib, "m"] + extra - - elif sys.platform == 'darwin': - # Don't use the default code below - return ext.libraries - - else: - if sysconfig.get_config_var('Py_ENABLE_SHARED'): - template = 'python%d.%d' + sys.abiflags - pythonlib = template % sys.version_info[:2] - return ext.libraries + [pythonlib] - else: - return ext.libraries diff --git a/Lib/packaging/command/build_py.py b/Lib/packaging/command/build_py.py deleted file mode 100644 --- a/Lib/packaging/command/build_py.py +++ /dev/null @@ -1,392 +0,0 @@ -"""Build pure Python modules (just copy to build directory).""" - -import os -import imp -from glob import glob - -from packaging import logger -from packaging.command.cmd import Command -from packaging.errors import PackagingOptionError, PackagingFileError -from packaging.util import convert_path -from packaging.compat import Mixin2to3 - -# marking public APIs -__all__ = ['build_py'] - - -class build_py(Command, Mixin2to3): - - description = "build pure Python modules (copy to build directory)" - - # The options for controlling byte compilation are two independent sets; - # more info in install_lib or the reST docs - - user_options = [ - ('build-lib=', 'd', "directory to build (copy) to"), - ('compile', 'c', "compile .py to .pyc"), - ('no-compile', None, "don't compile .py files [default]"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('force', 'f', "forcibly build everything (ignore file timestamps)"), - ('use-2to3', None, - "use 2to3 to make source python 3.x compatible"), - ('convert-2to3-doctests', None, - "use 2to3 to convert doctests in separate text files"), - ('use-2to3-fixers', None, - "list additional fixers opted for during 2to3 conversion"), - ] - - boolean_options = ['compile', 'force'] - - negative_opt = {'no-compile': 'compile'} - - def initialize_options(self): - self.build_lib = None - self.py_modules = None - self.package = None - self.package_data = None - self.package_dir = None - self.compile = False - self.optimize = 0 - self.force = None - self._updated_files = [] - self._doctests_2to3 = [] - self.use_2to3 = False - self.convert_2to3_doctests = None - self.use_2to3_fixers = None - - def finalize_options(self): - self.set_undefined_options('build', - 'use_2to3', 'use_2to3_fixers', - 'convert_2to3_doctests', 'build_lib', - 'force') - - # Get the distribution options that are aliases for build_py - # options -- list of packages and list of modules. - self.packages = self.distribution.packages - self.py_modules = self.distribution.py_modules - self.package_data = self.distribution.package_data - self.package_dir = None - if self.distribution.package_dir is not None: - self.package_dir = convert_path(self.distribution.package_dir) - self.data_files = self.get_data_files() - - # Ick, copied straight from install_lib.py (fancy_getopt needs a - # type system! Hell, *everything* needs a type system!!!) - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - assert 0 <= self.optimize <= 2 - except (ValueError, AssertionError): - raise PackagingOptionError("optimize must be 0, 1, or 2") - - def run(self): - # XXX copy_file by default preserves atime and mtime. IMHO this is - # the right thing to do, but perhaps it should be an option -- in - # particular, a site administrator might want installed files to - # reflect the time of installation rather than the last - # modification time before the installed release. - - # XXX copy_file by default preserves mode, which appears to be the - # wrong thing to do: if a file is read-only in the working - # directory, we want it to be installed read/write so that the next - # installation of the same module distribution can overwrite it - # without problems. (This might be a Unix-specific issue.) Thus - # we turn off 'preserve_mode' when copying to the build directory, - # since the build directory is supposed to be exactly what the - # installation will look like (ie. we preserve mode when - # installing). - - # Two options control which modules will be installed: 'packages' - # and 'py_modules'. The former lets us work with whole packages, not - # specifying individual modules at all; the latter is for - # specifying modules one-at-a-time. - - if self.py_modules: - self.build_modules() - if self.packages: - self.build_packages() - self.build_package_data() - - if self.use_2to3 and self._updated_files: - self.run_2to3(self._updated_files, self._doctests_2to3, - self.use_2to3_fixers) - - self.byte_compile(self.get_outputs(include_bytecode=False), - prefix=self.build_lib) - - # -- Top-level worker functions ------------------------------------ - - def get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples. - - Helper function for finalize_options. - """ - data = [] - if not self.packages: - return data - for package in self.packages: - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = 0 - if src_dir: - plen = len(src_dir) + 1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'. - - Helper function for get_data_files. - """ - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = [] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - filelist = glob(os.path.join(src_dir, convert_path(pattern))) - # Files that match more than one pattern are only added once - files.extend(fn for fn in filelist if fn not in files) - return files - - def build_package_data(self): - """Copy data files into build directory. - - Helper function for run. - """ - # FIXME add tests for this method - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - srcfile = os.path.join(src_dir, filename) - self.mkpath(os.path.dirname(target)) - outf, copied = self.copy_file(srcfile, - target, preserve_mode=False) - doctests = self.distribution.convert_2to3_doctests - if copied and srcfile in doctests: - self._doctests_2to3.append(outf) - - # XXX - this should be moved to the Distribution class as it is not - # only needed for build_py. It also has no dependencies on this class. - def get_package_dir(self, package): - """Return the directory, relative to the top of the source - distribution, where package 'package' should be found - (at least according to the 'package_dir' option, if any). - """ - path = package.split('.') - if self.package_dir is not None: - path.insert(0, self.package_dir) - - if len(path) > 0: - return os.path.join(*path) - - return '' - - def check_package(self, package, package_dir): - """Helper function for find_package_modules and find_modules.""" - # Empty dir name means current directory, which we can probably - # assume exists. Also, os.path.exists and isdir don't know about - # my "empty string means current dir" convention, so we have to - # circumvent them. - if package_dir != "": - if not os.path.exists(package_dir): - raise PackagingFileError( - "package directory '%s' does not exist" % package_dir) - if not os.path.isdir(package_dir): - raise PackagingFileError( - "supposed package directory '%s' exists, " - "but is not a directory" % package_dir) - - # Require __init__.py for all but the "root package" - if package: - init_py = os.path.join(package_dir, "__init__.py") - if os.path.isfile(init_py): - return init_py - else: - logger.warning("package init file %r not found " - "(or not a regular file)", init_py) - - # Either not in a package at all (__init__.py not expected), or - # __init__.py doesn't exist -- so don't return the filename. - return None - - def check_module(self, module, module_file): - if not os.path.isfile(module_file): - logger.warning("file %r (for module %r) not found", - module_file, module) - return False - else: - return True - - def find_package_modules(self, package, package_dir): - self.check_package(package, package_dir) - module_files = glob(os.path.join(package_dir, "*.py")) - modules = [] - if self.distribution.script_name is not None: - setup_script = os.path.abspath(self.distribution.script_name) - else: - setup_script = None - - for f in module_files: - abs_f = os.path.abspath(f) - if abs_f != setup_script: - module = os.path.splitext(os.path.basename(f))[0] - modules.append((package, module, f)) - else: - logger.debug("excluding %r", setup_script) - return modules - - def find_modules(self): - """Finds individually-specified Python modules, ie. those listed by - module name in 'self.py_modules'. Returns a list of tuples (package, - module_base, filename): 'package' is a tuple of the path through - package-space to the module; 'module_base' is the bare (no - packages, no dots) module name, and 'filename' is the path to the - ".py" file (relative to the distribution root) that implements the - module. - """ - # Map package names to tuples of useful info about the package: - # (package_dir, checked) - # package_dir - the directory where we'll find source files for - # this package - # checked - true if we have checked that the package directory - # is valid (exists, contains __init__.py, ... ?) - packages = {} - - # List of (package, module, filename) tuples to return - modules = [] - - # We treat modules-in-packages almost the same as toplevel modules, - # just the "package" for a toplevel is empty (either an empty - # string or empty list, depending on context). Differences: - # - don't check for __init__.py in directory for empty package - for module in self.py_modules: - path = module.split('.') - package = '.'.join(path[0:-1]) - module_base = path[-1] - - try: - package_dir, checked = packages[package] - except KeyError: - package_dir = self.get_package_dir(package) - checked = False - - if not checked: - init_py = self.check_package(package, package_dir) - packages[package] = (package_dir, 1) - if init_py: - modules.append((package, "__init__", init_py)) - - # XXX perhaps we should also check for just .pyc files - # (so greedy closed-source bastards can distribute Python - # modules too) - module_file = os.path.join(package_dir, module_base + ".py") - if not self.check_module(module, module_file): - continue - - modules.append((package, module_base, module_file)) - - return modules - - def find_all_modules(self): - """Compute the list of all modules that will be built, whether - they are specified one-module-at-a-time ('self.py_modules') or - by whole packages ('self.packages'). Return a list of tuples - (package, module, module_file), just like 'find_modules()' and - 'find_package_modules()' do.""" - modules = [] - if self.py_modules: - modules.extend(self.find_modules()) - if self.packages: - for package in self.packages: - package_dir = self.get_package_dir(package) - m = self.find_package_modules(package, package_dir) - modules.extend(m) - return modules - - def get_source_files(self): - sources = [module[-1] for module in self.find_all_modules()] - sources += [ - os.path.join(src_dir, filename) - for package, src_dir, build_dir, filenames in self.data_files - for filename in filenames] - return sources - - def get_module_outfile(self, build_dir, package, module): - outfile_path = [build_dir] + list(package) + [module + ".py"] - return os.path.join(*outfile_path) - - def get_outputs(self, include_bytecode=True): - modules = self.find_all_modules() - outputs = [] - for package, module, module_file in modules: - package = package.split('.') - filename = self.get_module_outfile(self.build_lib, package, module) - outputs.append(filename) - if include_bytecode: - if self.compile: - outputs.append(imp.cache_from_source(filename, True)) - if self.optimize: - outputs.append(imp.cache_from_source(filename, False)) - - outputs += [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir, filenames in self.data_files - for filename in filenames] - - return outputs - - def build_module(self, module, module_file, package): - if isinstance(package, str): - package = package.split('.') - elif not isinstance(package, (list, tuple)): - raise TypeError( - "'package' must be a string (dot-separated), list, or tuple") - - # Now put the module source file into the "build" area -- this is - # easy, we just copy it somewhere under self.build_lib (the build - # directory for Python source). - outfile = self.get_module_outfile(self.build_lib, package, module) - dir = os.path.dirname(outfile) - self.mkpath(dir) - return self.copy_file(module_file, outfile, preserve_mode=False) - - def build_modules(self): - modules = self.find_modules() - for package, module, module_file in modules: - # Now "build" the module -- ie. copy the source file to - # self.build_lib (the build directory for Python source). - # (Actually, it gets copied to the directory for this package - # under self.build_lib.) - self.build_module(module, module_file, package) - - def build_packages(self): - for package in self.packages: - # Get list of (package, module, module_file) tuples based on - # scanning the package directory. 'package' is only included - # in the tuple so that 'find_modules()' and - # 'find_package_tuples()' have a consistent interface; it's - # ignored here (apart from a sanity check). Also, 'module' is - # the *unqualified* module name (ie. no dots, no package -- we - # already know its package!), and 'module_file' is the path to - # the .py file, relative to the current directory - # (ie. including 'package_dir'). - package_dir = self.get_package_dir(package) - modules = self.find_package_modules(package, package_dir) - - # Now loop over the modules we found, "building" each one (just - # copy it to self.build_lib). - for package_, module, module_file in modules: - assert package == package_ - self.build_module(module, module_file, package) diff --git a/Lib/packaging/command/build_scripts.py b/Lib/packaging/command/build_scripts.py deleted file mode 100644 --- a/Lib/packaging/command/build_scripts.py +++ /dev/null @@ -1,154 +0,0 @@ -"""Build scripts (copy to build dir and fix up shebang line).""" - -import os -import re -import sysconfig -from tokenize import detect_encoding - -from packaging.command.cmd import Command -from packaging.util import convert_path, newer -from packaging import logger -from packaging.compat import Mixin2to3 - - -# check if Python is called on the first line with this expression -first_line_re = re.compile(b'^#!.*python[0-9.]*([ \t].*)?$') - -class build_scripts(Command, Mixin2to3): - - description = "build scripts (copy and fix up shebang line)" - - user_options = [ - ('build-dir=', 'd', "directory to build (copy) to"), - ('force', 'f', "forcibly build everything (ignore file timestamps"), - ('executable=', 'e', "specify final destination interpreter path"), - ] - - boolean_options = ['force'] - - - def initialize_options(self): - self.build_dir = None - self.scripts = None - self.force = None - self.executable = None - self.outfiles = None - self.use_2to3 = False - self.convert_2to3_doctests = None - self.use_2to3_fixers = None - - def finalize_options(self): - self.set_undefined_options('build', - ('build_scripts', 'build_dir'), - 'use_2to3', 'use_2to3_fixers', - 'convert_2to3_doctests', 'force', - 'executable') - self.scripts = self.distribution.scripts - - def get_source_files(self): - return self.scripts - - def run(self): - if not self.scripts: - return - copied_files = self.copy_scripts() - if self.use_2to3 and copied_files: - self._run_2to3(copied_files, fixers=self.use_2to3_fixers) - - def copy_scripts(self): - """Copy each script listed in 'self.scripts'; if it's marked as a - Python script in the Unix way (first line matches 'first_line_re', - ie. starts with "\#!" and contains "python"), then adjust the first - line to refer to the current Python interpreter as we copy. - """ - self.mkpath(self.build_dir) - outfiles = [] - for script in self.scripts: - adjust = False - script = convert_path(script) - outfile = os.path.join(self.build_dir, os.path.basename(script)) - outfiles.append(outfile) - - if not self.force and not newer(script, outfile): - logger.debug("not copying %s (up-to-date)", script) - continue - - # Always open the file, but ignore failures in dry-run mode -- - # that way, we'll get accurate feedback if we can read the - # script. - try: - f = open(script, "rb") - except IOError: - if not self.dry_run: - raise - f = None - else: - encoding, lines = detect_encoding(f.readline) - f.seek(0) - first_line = f.readline() - if not first_line: - logger.warning('%s: %s is an empty file (skipping)', - self.get_command_name(), script) - continue - - match = first_line_re.match(first_line) - if match: - adjust = True - post_interp = match.group(1) or b'' - - if adjust: - logger.info("copying and adjusting %s -> %s", script, - self.build_dir) - if not self.dry_run: - if not sysconfig.is_python_build(): - executable = self.executable - else: - executable = os.path.join( - sysconfig.get_config_var("BINDIR"), - "python%s%s" % (sysconfig.get_config_var("VERSION"), - sysconfig.get_config_var("EXE"))) - executable = os.fsencode(executable) - shebang = b"#!" + executable + post_interp + b"\n" - # Python parser starts to read a script using UTF-8 until - # it gets a #coding:xxx cookie. The shebang has to be the - # first line of a file, the #coding:xxx cookie cannot be - # written before. So the shebang has to be decodable from - # UTF-8. - try: - shebang.decode('utf-8') - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from utf-8".format(shebang)) - # If the script is encoded to a custom encoding (use a - # #coding:xxx cookie), the shebang has to be decodable from - # the script encoding too. - try: - shebang.decode(encoding) - except UnicodeDecodeError: - raise ValueError( - "The shebang ({!r}) is not decodable " - "from the script encoding ({})" - .format(shebang, encoding)) - with open(outfile, "wb") as outf: - outf.write(shebang) - outf.writelines(f.readlines()) - if f: - f.close() - else: - if f: - f.close() - self.copy_file(script, outfile) - - if os.name == 'posix': - for file in outfiles: - if self.dry_run: - logger.info("changing mode of %s", file) - else: - oldmode = os.stat(file).st_mode & 0o7777 - newmode = (oldmode | 0o555) & 0o7777 - if newmode != oldmode: - logger.info("changing mode of %s from %o to %o", - file, oldmode, newmode) - os.chmod(file, newmode) - return outfiles diff --git a/Lib/packaging/command/check.py b/Lib/packaging/command/check.py deleted file mode 100644 --- a/Lib/packaging/command/check.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Check PEP compliance of metadata.""" - -from packaging import logger -from packaging.command.cmd import Command -from packaging.errors import PackagingSetupError -from packaging.util import resolve_name - -class check(Command): - - description = "check PEP compliance of metadata" - - user_options = [('metadata', 'm', 'Verify metadata'), - ('all', 'a', - ('runs extended set of checks')), - ('strict', 's', - 'Will exit with an error if a check fails')] - - boolean_options = ['metadata', 'all', 'strict'] - - def initialize_options(self): - """Sets default values for options.""" - self.all = False - self.metadata = True - self.strict = False - self._warnings = [] - - def finalize_options(self): - pass - - def warn(self, msg, *args): - """Wrapper around logging that also remembers messages.""" - # XXX we could use a special handler for this, but would need to test - # if it works even if the logger has a too high level - self._warnings.append((msg, args)) - return logger.warning('%s: %s' % (self.get_command_name(), msg), *args) - - def run(self): - """Runs the command.""" - # perform the various tests - if self.metadata: - self.check_metadata() - if self.all: - self.check_restructuredtext() - self.check_hooks_resolvable() - - # let's raise an error in strict mode, if we have at least - # one warning - if self.strict and len(self._warnings) > 0: - msg = '\n'.join(msg % args for msg, args in self._warnings) - raise PackagingSetupError(msg) - - def check_metadata(self): - """Ensures that all required elements of metadata are supplied. - - name, version, URL, author - - Warns if any are missing. - """ - missing, warnings = self.distribution.metadata.check(strict=True) - if missing != []: - self.warn('missing required metadata: %s', ', '.join(missing)) - for warning in warnings: - self.warn(warning) - - def check_restructuredtext(self): - """Checks if the long string fields are reST-compliant.""" - missing, warnings = self.distribution.metadata.check(restructuredtext=True) - if self.distribution.metadata.docutils_support: - for warning in warnings: - line = warning[-1].get('line') - if line is None: - warning = warning[1] - else: - warning = '%s (line %s)' % (warning[1], line) - self.warn(warning) - elif self.strict: - raise PackagingSetupError('The docutils package is needed.') - - def check_hooks_resolvable(self): - for options in self.distribution.command_options.values(): - for hook_kind in ("pre_hook", "post_hook"): - if hook_kind not in options: - break - for hook_name in options[hook_kind][1].values(): - try: - resolve_name(hook_name) - except ImportError: - self.warn('name %r cannot be resolved', hook_name) diff --git a/Lib/packaging/command/clean.py b/Lib/packaging/command/clean.py deleted file mode 100644 --- a/Lib/packaging/command/clean.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Clean up temporary files created by the build command.""" - -# Contributed by Bastian Kleineidam - -import os -from shutil import rmtree -from packaging.command.cmd import Command -from packaging import logger - -class clean(Command): - - description = "clean up temporary files from 'build' command" - user_options = [ - ('build-base=', 'b', - "base build directory (default: 'build.build-base')"), - ('build-lib=', None, - "build directory for all modules (default: 'build.build-lib')"), - ('build-temp=', 't', - "temporary build directory (default: 'build.build-temp')"), - ('build-scripts=', None, - "build directory for scripts (default: 'build.build-scripts')"), - ('bdist-base=', None, - "temporary directory for built distributions"), - ('all', 'a', - "remove all build output, not just temporary by-products") - ] - - boolean_options = ['all'] - - def initialize_options(self): - self.build_base = None - self.build_lib = None - self.build_temp = None - self.build_scripts = None - self.bdist_base = None - self.all = None - - def finalize_options(self): - self.set_undefined_options('build', 'build_base', 'build_lib', - 'build_scripts', 'build_temp') - self.set_undefined_options('bdist', 'bdist_base') - - def run(self): - # remove the build/temp. directory (unless it's already - # gone) - if os.path.exists(self.build_temp): - if self.dry_run: - logger.info('removing %s', self.build_temp) - else: - rmtree(self.build_temp) - else: - logger.debug("'%s' does not exist -- can't clean it", - self.build_temp) - - if self.all: - # remove build directories - for directory in (self.build_lib, - self.bdist_base, - self.build_scripts): - if os.path.exists(directory): - if self.dry_run: - logger.info('removing %s', directory) - else: - rmtree(directory) - else: - logger.warning("'%s' does not exist -- can't clean it", - directory) - - # just for the heck of it, try to remove the base build directory: - # we might have emptied it right now, but if not we don't care - if not self.dry_run: - try: - os.rmdir(self.build_base) - logger.info("removing '%s'", self.build_base) - except OSError: - pass diff --git a/Lib/packaging/command/cmd.py b/Lib/packaging/command/cmd.py deleted file mode 100644 --- a/Lib/packaging/command/cmd.py +++ /dev/null @@ -1,461 +0,0 @@ -"""Base class for commands.""" - -import os -import re -from shutil import copyfile, move, make_archive -from packaging import util -from packaging import logger -from packaging.errors import PackagingOptionError - - -class Command: - """Abstract base class for defining command classes, the "worker bees" - of Packaging. A useful analogy for command classes is to think of - them as subroutines with local variables called "options". The options - are "declared" in 'initialize_options()' and "defined" (given their - final values, aka "finalized") in 'finalize_options()', both of which - must be defined by every command class. The distinction between the - two is necessary because option values might come from the outside - world (command line, config file, ...), and any options dependent on - other options must be computed *after* these outside influences have - been processed -- hence 'finalize_options()'. The "body" of the - subroutine, where it does all its work based on the values of its - options, is the 'run()' method, which must also be implemented by every - command class. - """ - - # 'sub_commands' formalizes the notion of a "family" of commands, - # eg. "install_dist" as the parent with sub-commands "install_lib", - # "install_headers", etc. The parent of a family of commands - # defines 'sub_commands' as a class attribute; it's a list of - # (command_name : string, predicate : unbound_method | string | None) - # tuples, where 'predicate' is a method of the parent command that - # determines whether the corresponding command is applicable in the - # current situation. (Eg. we "install_headers" is only applicable if - # we have any C header files to install.) If 'predicate' is None, - # that command is always applicable. - # - # 'sub_commands' is usually defined at the *end* of a class, because - # predicates can be unbound methods, so they must already have been - # defined. The canonical example is the "install_dist" command. - sub_commands = [] - - # Pre and post command hooks are run just before or just after the command - # itself. They are simple functions that receive the command instance. They - # are specified as callable objects or dotted strings (for lazy loading). - pre_hook = None - post_hook = None - - # -- Creation/initialization methods ------------------------------- - - def __init__(self, dist): - """Create and initialize a new Command object. Most importantly, - invokes the 'initialize_options()' method, which is the real - initializer and depends on the actual command being instantiated. - """ - # late import because of mutual dependence between these classes - from packaging.dist import Distribution - - if not isinstance(dist, Distribution): - raise TypeError("dist must be an instance of Distribution, not %r" - % type(dist)) - if self.__class__ is Command: - raise RuntimeError("Command is an abstract class") - - self.distribution = dist - self.initialize_options() - - # Per-command versions of the global flags, so that the user can - # customize Packaging' behaviour command-by-command and let some - # commands fall back on the Distribution's behaviour. None means - # "not defined, check self.distribution's copy", while 0 or 1 mean - # false and true (duh). Note that this means figuring out the real - # value of each flag is a touch complicated -- hence "self._dry_run" - # will be handled by a property, below. - # XXX This needs to be fixed. [I changed it to a property--does that - # "fix" it?] - self._dry_run = None - - # Some commands define a 'self.force' option to ignore file - # timestamps, but methods defined *here* assume that - # 'self.force' exists for all commands. So define it here - # just to be safe. - self.force = None - - # The 'help' flag is just used for command line parsing, so - # none of that complicated bureaucracy is needed. - self.help = False - - # 'finalized' records whether or not 'finalize_options()' has been - # called. 'finalize_options()' itself should not pay attention to - # this flag: it is the business of 'ensure_finalized()', which - # always calls 'finalize_options()', to respect/update it. - self.finalized = False - - # XXX A more explicit way to customize dry_run would be better. - @property - def dry_run(self): - if self._dry_run is None: - return getattr(self.distribution, 'dry_run') - else: - return self._dry_run - - def ensure_finalized(self): - if not self.finalized: - self.finalize_options() - self.finalized = True - - # Subclasses must define: - # initialize_options() - # provide default values for all options; may be customized by - # setup script, by options from config file(s), or by command-line - # options - # finalize_options() - # decide on the final values for all options; this is called - # after all possible intervention from the outside world - # (command line, option file, etc.) has been processed - # run() - # run the command: do whatever it is we're here to do, - # controlled by the command's various option values - - def initialize_options(self): - """Set default values for all the options that this command - supports. Note that these defaults may be overridden by other - commands, by the setup script, by config files, or by the - command line. Thus, this is not the place to code dependencies - between options; generally, 'initialize_options()' implementations - are just a bunch of "self.foo = None" assignments. - - This method must be implemented by all command classes. - """ - raise RuntimeError( - "abstract method -- subclass %s must override" % self.__class__) - - def finalize_options(self): - """Set final values for all the options that this command supports. - This is always called as late as possible, ie. after any option - assignments from the command line or from other commands have been - done. Thus, this is the place to code option dependencies: if - 'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as - long as 'foo' still has the same value it was assigned in - 'initialize_options()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError( - "abstract method -- subclass %s must override" % self.__class__) - - def dump_options(self, header=None, indent=""): - if header is None: - header = "command options for '%s':" % self.get_command_name() - logger.info(indent + header) - indent = indent + " " - negative_opt = getattr(self, 'negative_opt', ()) - for option, _, _ in self.user_options: - if option in negative_opt: - continue - option = option.replace('-', '_') - if option[-1] == "=": - option = option[:-1] - value = getattr(self, option) - logger.info(indent + "%s = %s", option, value) - - def run(self): - """A command's raison d'etre: carry out the action it exists to - perform, controlled by the options initialized in - 'initialize_options()', customized by other commands, the setup - script, the command line and config files, and finalized in - 'finalize_options()'. All terminal output and filesystem - interaction should be done by 'run()'. - - This method must be implemented by all command classes. - """ - raise RuntimeError( - "abstract method -- subclass %s must override" % self.__class__) - - # -- External interface -------------------------------------------- - # (called by outsiders) - - def get_source_files(self): - """Return the list of files that are used as inputs to this command, - i.e. the files used to generate the output files. The result is used - by the `sdist` command in determining the set of default files. - - Command classes should implement this method if they operate on files - from the source tree. - """ - return [] - - def get_outputs(self): - """Return the list of files that would be produced if this command - were actually run. Not affected by the "dry-run" flag or whether - any other commands have been run. - - Command classes should implement this method if they produce any - output files that get consumed by another command. e.g., `build_ext` - returns the list of built extension modules, but not any temporary - files used in the compilation process. - """ - return [] - - # -- Option validation methods ------------------------------------- - # (these are very handy in writing the 'finalize_options()' method) - # - # NB. the general philosophy here is to ensure that a particular option - # value meets certain type and value constraints. If not, we try to - # force it into conformance (eg. if we expect a list but have a string, - # split the string on comma and/or whitespace). If we can't force the - # option into conformance, raise PackagingOptionError. Thus, command - # classes need do nothing more than (eg.) - # self.ensure_string_list('foo') - # and they can be guaranteed that thereafter, self.foo will be - # a list of strings. - - def _ensure_stringlike(self, option, what, default=None): - val = getattr(self, option) - if val is None: - setattr(self, option, default) - return default - elif not isinstance(val, str): - raise PackagingOptionError("'%s' must be a %s (got `%s`)" % - (option, what, val)) - return val - - def ensure_string(self, option, default=None): - """Ensure that 'option' is a string; if not defined, set it to - 'default'. - """ - self._ensure_stringlike(option, "string", default) - - def ensure_string_list(self, option): - r"""Ensure that 'option' is a list of strings. If 'option' is - currently a string, we split it either on /,\s*/ or /\s+/, so - "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become - ["foo", "bar", "baz"]. - """ - val = getattr(self, option) - if val is None: - return - elif isinstance(val, str): - setattr(self, option, re.split(r',\s*|\s+', val)) - else: - if isinstance(val, list): - # checks if all elements are str - ok = True - for element in val: - if not isinstance(element, str): - ok = False - break - else: - ok = False - - if not ok: - raise PackagingOptionError( - "'%s' must be a list of strings (got %r)" % (option, val)) - - def _ensure_tested_string(self, option, tester, - what, error_fmt, default=None): - val = self._ensure_stringlike(option, what, default) - if val is not None and not tester(val): - raise PackagingOptionError( - ("error in '%s' option: " + error_fmt) % (option, val)) - - def ensure_filename(self, option): - """Ensure that 'option' is the name of an existing file.""" - self._ensure_tested_string(option, os.path.isfile, - "filename", - "'%s' does not exist or is not a file") - - def ensure_dirname(self, option): - self._ensure_tested_string(option, os.path.isdir, - "directory name", - "'%s' does not exist or is not a directory") - - # -- Convenience methods for commands ------------------------------ - - @classmethod - def get_command_name(cls): - if hasattr(cls, 'command_name'): - return cls.command_name - else: - return cls.__name__ - - def set_undefined_options(self, src_cmd, *options): - """Set values of undefined options from another command. - - Undefined options are options set to None, which is the convention - used to indicate that an option has not been changed between - 'initialize_options()' and 'finalize_options()'. This method is - usually called from 'finalize_options()' for options that depend on - some other command rather than another option of the same command, - typically subcommands. - - The 'src_cmd' argument is the other command from which option values - will be taken (a command object will be created for it if necessary); - the remaining positional arguments are strings that give the name of - the option to set. If the name is different on the source and target - command, you can pass a tuple with '(name_on_source, name_on_dest)' so - that 'self.name_on_dest' will be set from 'src_cmd.name_on_source'. - """ - src_cmd_obj = self.distribution.get_command_obj(src_cmd) - src_cmd_obj.ensure_finalized() - for obj in options: - if isinstance(obj, tuple): - src_option, dst_option = obj - else: - src_option, dst_option = obj, obj - if getattr(self, dst_option) is None: - setattr(self, dst_option, - getattr(src_cmd_obj, src_option)) - - def get_finalized_command(self, command, create=True): - """Wrapper around Distribution's 'get_command_obj()' method: find - (create if necessary and 'create' is true) the command object for - 'command', call its 'ensure_finalized()' method, and return the - finalized command object. - """ - cmd_obj = self.distribution.get_command_obj(command, create) - cmd_obj.ensure_finalized() - return cmd_obj - - def reinitialize_command(self, command, reinit_subcommands=False): - return self.distribution.reinitialize_command( - command, reinit_subcommands) - - def run_command(self, command): - """Run some other command: uses the 'run_command()' method of - Distribution, which creates and finalizes the command object if - necessary and then invokes its 'run()' method. - """ - self.distribution.run_command(command) - - def get_sub_commands(self): - """Determine the sub-commands that are relevant in the current - distribution (ie., that need to be run). This is based on the - 'sub_commands' class attribute: each tuple in that list may include - a method that we call to determine if the subcommand needs to be - run for the current distribution. Return a list of command names. - """ - commands = [] - for sub_command in self.sub_commands: - if len(sub_command) == 2: - cmd_name, method = sub_command - if method is None or method(self): - commands.append(cmd_name) - else: - commands.append(sub_command) - return commands - - # -- External world manipulation ----------------------------------- - - def execute(self, func, args, msg=None, level=1): - util.execute(func, args, msg, dry_run=self.dry_run) - - def mkpath(self, name, mode=0o777, dry_run=None): - if dry_run is None: - dry_run = self.dry_run - name = os.path.normpath(name) - if os.path.isdir(name) or name == '': - return - if dry_run: - head = '' - for part in name.split(os.sep): - logger.info("created directory %s%s", head, part) - head += part + os.sep - return - os.makedirs(name, mode) - - def copy_file(self, infile, outfile, - preserve_mode=True, preserve_times=True, link=None, level=1): - """Copy a file respecting dry-run and force flags. - - (dry-run defaults to whatever is in the Distribution object, and - force to false for commands that don't define it.) - """ - if self.dry_run: - # XXX add a comment - return - if os.path.isdir(outfile): - outfile = os.path.join(outfile, os.path.split(infile)[-1]) - copyfile(infile, outfile) - return outfile, None # XXX - - def copy_tree(self, infile, outfile, preserve_mode=True, - preserve_times=True, preserve_symlinks=False, level=1): - """Copy an entire directory tree respecting dry-run - and force flags. - """ - if self.dry_run: - # XXX should not return but let copy_tree log and decide to execute - # or not based on its dry_run argument - return - - return util.copy_tree(infile, outfile, preserve_mode, preserve_times, - preserve_symlinks, not self.force, dry_run=self.dry_run) - - def move_file(self, src, dst, level=1): - """Move a file respecting the dry-run flag.""" - if self.dry_run: - return # XXX same thing - return move(src, dst) - - def spawn(self, cmd, search_path=True, level=1): - """Spawn an external command respecting dry-run flag.""" - from packaging.util import spawn - spawn(cmd, search_path, dry_run=self.dry_run) - - def make_archive(self, base_name, format, root_dir=None, base_dir=None, - owner=None, group=None): - return make_archive(base_name, format, root_dir, - base_dir, dry_run=self.dry_run, - owner=owner, group=group) - - def make_file(self, infiles, outfile, func, args, - exec_msg=None, skip_msg=None, level=1): - """Special case of 'execute()' for operations that process one or - more input files and generate one output file. Works just like - 'execute()', except the operation is skipped and a different - message printed if 'outfile' already exists and is newer than all - files listed in 'infiles'. If the command defined 'self.force', - and it is true, then the command is unconditionally run -- does no - timestamp checks. - """ - if skip_msg is None: - skip_msg = "skipping %s (inputs unchanged)" % outfile - - # Allow 'infiles' to be a single string - if isinstance(infiles, str): - infiles = (infiles,) - elif not isinstance(infiles, (list, tuple)): - raise TypeError( - "'infiles' must be a string, or a list or tuple of strings") - - if exec_msg is None: - exec_msg = "generating %s from %s" % (outfile, ', '.join(infiles)) - - # If 'outfile' must be regenerated (either because it doesn't - # exist, is out-of-date, or the 'force' flag is true) then - # perform the action that presumably regenerates it - if self.force or util.newer_group(infiles, outfile): - self.execute(func, args, exec_msg, level) - - # Otherwise, print the "skip" message - else: - logger.debug(skip_msg) - - def byte_compile(self, files, prefix=None): - """Byte-compile files to pyc and/or pyo files. - - This method requires that the calling class define compile and - optimize options, like build_py and install_lib. It also - automatically respects the force and dry-run options. - - prefix, if given, is a string that will be stripped off the - filenames encoded in bytecode files. - """ - if self.compile: - util.byte_compile(files, optimize=False, prefix=prefix, - force=self.force, dry_run=self.dry_run) - if self.optimize: - util.byte_compile(files, optimize=self.optimize, prefix=prefix, - force=self.force, dry_run=self.dry_run) diff --git a/Lib/packaging/command/command_template b/Lib/packaging/command/command_template deleted file mode 100644 --- a/Lib/packaging/command/command_template +++ /dev/null @@ -1,35 +0,0 @@ -"""Do X and Y.""" - -from packaging import logger -from packaging.command.cmd import Command - - -class x(Command): - - # Brief (40-50 characters) description of the command - description = "" - - # List of option tuples: long name, short name (None if no short - # name), and help string. - user_options = [ - ('', '', # long option, short option (one letter) or None - ""), # help text - ] - - def initialize_options(self): - self. = None - self. = None - self. = None - - def finalize_options(self): - if self.x is None: - self.x = ... - - def run(self): - ... - logger.info(...) - - if not self.dry_run: - ... - - self.execute(..., dry_run=self.dry_run) diff --git a/Lib/packaging/command/config.py b/Lib/packaging/command/config.py deleted file mode 100644 --- a/Lib/packaging/command/config.py +++ /dev/null @@ -1,349 +0,0 @@ -"""Prepare the build. - -This module provides config, a (mostly) empty command class -that exists mainly to be sub-classed by specific module distributions and -applications. The idea is that while every "config" command is different, -at least they're all named the same, and users always see "config" in the -list of standard commands. Also, this is a good place to put common -configure-like tasks: "try to compile this C code", or "figure out where -this header file lives". -""" - -import os -import re - -from packaging.command.cmd import Command -from packaging.errors import PackagingExecError -from packaging.compiler import customize_compiler -from packaging import logger - -LANG_EXT = {'c': '.c', 'c++': '.cxx'} - -class config(Command): - - description = "prepare the build" - - user_options = [ - ('compiler=', None, - "specify the compiler type"), - ('cc=', None, - "specify the compiler executable"), - ('include-dirs=', 'I', - "list of directories to search for header files"), - ('define=', 'D', - "C preprocessor macros to define"), - ('undef=', 'U', - "C preprocessor macros to undefine"), - ('libraries=', 'l', - "external C libraries to link with"), - ('library-dirs=', 'L', - "directories to search for external C libraries"), - - ('noisy', None, - "show every action (compile, link, run, ...) taken"), - ('dump-source', None, - "dump generated source files before attempting to compile them"), - ] - - - # The three standard command methods: since the "config" command - # does nothing by default, these are empty. - - def initialize_options(self): - self.compiler = None - self.cc = None - self.include_dirs = None - self.libraries = None - self.library_dirs = None - - # maximal output for now - self.noisy = True - self.dump_source = True - - # list of temporary files generated along-the-way that we have - # to clean at some point - self.temp_files = [] - - def finalize_options(self): - if self.include_dirs is None: - self.include_dirs = self.distribution.include_dirs or [] - elif isinstance(self.include_dirs, str): - self.include_dirs = self.include_dirs.split(os.pathsep) - - if self.libraries is None: - self.libraries = [] - elif isinstance(self.libraries, str): - self.libraries = [self.libraries] - - if self.library_dirs is None: - self.library_dirs = [] - elif isinstance(self.library_dirs, str): - self.library_dirs = self.library_dirs.split(os.pathsep) - - def run(self): - pass - - - # Utility methods for actual "config" commands. The interfaces are - # loosely based on Autoconf macros of similar names. Sub-classes - # may use these freely. - - def _check_compiler(self): - """Check that 'self.compiler' really is a CCompiler object; - if not, make it one. - """ - # We do this late, and only on-demand, because this is an expensive - # import. - from packaging.compiler.ccompiler import CCompiler - from packaging.compiler import new_compiler - if not isinstance(self.compiler, CCompiler): - self.compiler = new_compiler(compiler=self.compiler, - dry_run=self.dry_run, force=True) - customize_compiler(self.compiler) - if self.include_dirs: - self.compiler.set_include_dirs(self.include_dirs) - if self.libraries: - self.compiler.set_libraries(self.libraries) - if self.library_dirs: - self.compiler.set_library_dirs(self.library_dirs) - - - def _gen_temp_sourcefile(self, body, headers, lang): - filename = "_configtest" + LANG_EXT[lang] - with open(filename, "w") as file: - if headers: - for header in headers: - file.write("#include <%s>\n" % header) - file.write("\n") - file.write(body) - if body[-1] != "\n": - file.write("\n") - return filename - - def _preprocess(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - out = "_configtest.i" - self.temp_files.extend((src, out)) - self.compiler.preprocess(src, out, include_dirs=include_dirs) - return src, out - - def _compile(self, body, headers, include_dirs, lang): - src = self._gen_temp_sourcefile(body, headers, lang) - if self.dump_source: - dump_file(src, "compiling '%s':" % src) - obj = self.compiler.object_filenames([src])[0] - self.temp_files.extend((src, obj)) - self.compiler.compile([src], include_dirs=include_dirs) - return src, obj - - def _link(self, body, headers, include_dirs, libraries, library_dirs, - lang): - src, obj = self._compile(body, headers, include_dirs, lang) - prog = os.path.splitext(os.path.basename(src))[0] - self.compiler.link_executable([obj], prog, - libraries=libraries, - library_dirs=library_dirs, - target_lang=lang) - - if self.compiler.exe_extension is not None: - prog = prog + self.compiler.exe_extension - self.temp_files.append(prog) - - return src, obj, prog - - def _clean(self, *filenames): - if not filenames: - filenames = self.temp_files - self.temp_files = [] - logger.info("removing: %s", ' '.join(filenames)) - for filename in filenames: - try: - os.remove(filename) - except OSError: - pass - - - # XXX these ignore the dry-run flag: what to do, what to do? even if - # you want a dry-run build, you still need some sort of configuration - # info. My inclination is to make it up to the real config command to - # consult 'dry_run', and assume a default (minimal) configuration if - # true. The problem with trying to do it here is that you'd have to - # return either true or false from all the 'try' methods, neither of - # which is correct. - - # XXX need access to the header search path and maybe default macros. - - def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): - """Construct a source file from 'body' (a string containing lines - of C/C++ code) and 'headers' (a list of header files to include) - and run it through the preprocessor. Return true if the - preprocessor succeeded, false if there were any errors. - ('body' probably isn't of much use, but what the heck.) - """ - from packaging.compiler.ccompiler import CompileError - self._check_compiler() - ok = True - try: - self._preprocess(body, headers, include_dirs, lang) - except CompileError: - ok = False - - self._clean() - return ok - - def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, - lang="c"): - """Construct a source file (just like 'try_cpp()'), run it through - the preprocessor, and return true if any line of the output matches - 'pattern'. 'pattern' should either be a compiled regex object or a - string containing a regex. If both 'body' and 'headers' are None, - preprocesses an empty file -- which can be useful to determine the - symbols the preprocessor and compiler set by default. - """ - self._check_compiler() - src, out = self._preprocess(body, headers, include_dirs, lang) - - if isinstance(pattern, str): - pattern = re.compile(pattern) - - with open(out) as file: - match = False - while True: - line = file.readline() - if line == '': - break - if pattern.search(line): - match = True - break - - self._clean() - return match - - def try_compile(self, body, headers=None, include_dirs=None, lang="c"): - """Try to compile a source file built from 'body' and 'headers'. - Return true on success, false otherwise. - """ - from packaging.compiler.ccompiler import CompileError - self._check_compiler() - try: - self._compile(body, headers, include_dirs, lang) - ok = True - except CompileError: - ok = False - - logger.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_link(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile and link a source file, built from 'body' and - 'headers', to executable form. Return true on success, false - otherwise. - """ - from packaging.compiler.ccompiler import CompileError, LinkError - self._check_compiler() - try: - self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - ok = True - except (CompileError, LinkError): - ok = False - - logger.info(ok and "success!" or "failure.") - self._clean() - return ok - - def try_run(self, body, headers=None, include_dirs=None, libraries=None, - library_dirs=None, lang="c"): - """Try to compile, link to an executable, and run a program - built from 'body' and 'headers'. Return true on success, false - otherwise. - """ - from packaging.compiler.ccompiler import CompileError, LinkError - self._check_compiler() - try: - src, obj, exe = self._link(body, headers, include_dirs, - libraries, library_dirs, lang) - self.spawn([exe]) - ok = True - except (CompileError, LinkError, PackagingExecError): - ok = False - - logger.info(ok and "success!" or "failure.") - self._clean() - return ok - - - # -- High-level methods -------------------------------------------- - # (these are the ones that are actually likely to be useful - # when implementing a real-world config command!) - - def check_func(self, func, headers=None, include_dirs=None, - libraries=None, library_dirs=None, decl=False, call=False): - - """Determine if function 'func' is available by constructing a - source file that refers to 'func', and compiles and links it. - If everything succeeds, returns true; otherwise returns false. - - The constructed source file starts out by including the header - files listed in 'headers'. If 'decl' is true, it then declares - 'func' (as "int func()"); you probably shouldn't supply 'headers' - and set 'decl' true in the same call, or you might get errors about - a conflicting declarations for 'func'. Finally, the constructed - 'main()' function either references 'func' or (if 'call' is true) - calls it. 'libraries' and 'library_dirs' are used when - linking. - """ - - self._check_compiler() - body = [] - if decl: - body.append("int %s ();" % func) - body.append("int main () {") - if call: - body.append(" %s();" % func) - else: - body.append(" %s;" % func) - body.append("}") - body = "\n".join(body) + "\n" - - return self.try_link(body, headers, include_dirs, - libraries, library_dirs) - - def check_lib(self, library, library_dirs=None, headers=None, - include_dirs=None, other_libraries=[]): - """Determine if 'library' is available to be linked against, - without actually checking that any particular symbols are provided - by it. 'headers' will be used in constructing the source file to - be compiled, but the only effect of this is to check if all the - header files listed are available. Any libraries listed in - 'other_libraries' will be included in the link, in case 'library' - has symbols that depend on other libraries. - """ - self._check_compiler() - return self.try_link("int main (void) { }", - headers, include_dirs, - [library]+other_libraries, library_dirs) - - def check_header(self, header, include_dirs=None, library_dirs=None, - lang="c"): - """Determine if the system header file named by 'header_file' - exists and can be found by the preprocessor; return true if so, - false otherwise. - """ - return self.try_cpp(body="/* No body */", headers=[header], - include_dirs=include_dirs) - - -def dump_file(filename, head=None): - """Dumps a file content into log.info. - - If head is not None, will be dumped before the file content. - """ - if head is None: - logger.info(filename) - else: - logger.info(head) - with open(filename) as file: - logger.info(file.read()) diff --git a/Lib/packaging/command/install_data.py b/Lib/packaging/command/install_data.py deleted file mode 100644 --- a/Lib/packaging/command/install_data.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Install platform-independent data files.""" - -# Contributed by Bastian Kleineidam - -import os -from shutil import Error -from sysconfig import get_paths, format_value -from packaging import logger -from packaging.util import convert_path -from packaging.command.cmd import Command - - -class install_data(Command): - - description = "install platform-independent data files" - - user_options = [ - ('install-dir=', 'd', - "base directory for installing data files " - "(default: installation base dir)"), - ('root=', None, - "install everything relative to this alternate root directory"), - ('force', 'f', "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.outfiles = [] - self.data_files_out = [] - self.root = None - self.force = False - self.data_files = self.distribution.data_files - self.warn_dir = True - - def finalize_options(self): - self.set_undefined_options('install_dist', - ('install_data', 'install_dir'), - 'root', 'force') - - def run(self): - self.mkpath(self.install_dir) - for _file in self.data_files.items(): - destination = convert_path(self.expand_categories(_file[1])) - dir_dest = os.path.abspath(os.path.dirname(destination)) - - self.mkpath(dir_dest) - try: - out = self.copy_file(_file[0], dir_dest)[0] - except Error as e: - logger.warning('%s: %s', self.get_command_name(), e) - out = destination - - self.outfiles.append(out) - self.data_files_out.append((_file[0], destination)) - - def expand_categories(self, path_with_categories): - local_vars = get_paths() - local_vars['distribution.name'] = self.distribution.metadata['Name'] - expanded_path = format_value(path_with_categories, local_vars) - expanded_path = format_value(expanded_path, local_vars) - if '{' in expanded_path and '}' in expanded_path: - logger.warning( - '%s: unable to expand %s, some categories may be missing', - self.get_command_name(), path_with_categories) - return expanded_path - - def get_source_files(self): - return list(self.data_files) - - def get_inputs(self): - return list(self.data_files) - - def get_outputs(self): - return self.outfiles - - def get_resources_out(self): - return self.data_files_out diff --git a/Lib/packaging/command/install_dist.py b/Lib/packaging/command/install_dist.py deleted file mode 100644 --- a/Lib/packaging/command/install_dist.py +++ /dev/null @@ -1,605 +0,0 @@ -"""Main install command, which calls the other install_* commands.""" - -import sys -import os - -import sysconfig -from sysconfig import get_config_vars, get_paths, get_path, get_config_var - -from packaging import logger -from packaging.command.cmd import Command -from packaging.errors import PackagingPlatformError -from packaging.util import write_file -from packaging.util import convert_path, change_root, get_platform -from packaging.errors import PackagingOptionError - - -class install_dist(Command): - - description = "install everything from build directory" - - user_options = [ - # Select installation scheme and set base director(y|ies) - ('prefix=', None, - "installation prefix"), - ('exec-prefix=', None, - "(Unix only) prefix for platform-specific files"), - ('user', None, - "install in user site-packages directory [%s]" % - get_path('purelib', '%s_user' % os.name)), - ('home=', None, - "(Unix only) home directory to install under"), - - # Or just set the base director(y|ies) - ('install-base=', None, - "base installation directory (instead of --prefix or --home)"), - ('install-platbase=', None, - "base installation directory for platform-specific files " + - "(instead of --exec-prefix or --home)"), - ('root=', None, - "install everything relative to this alternate root directory"), - - # Or explicitly set the installation scheme - ('install-purelib=', None, - "installation directory for pure Python module distributions"), - ('install-platlib=', None, - "installation directory for non-pure module distributions"), - ('install-lib=', None, - "installation directory for all module distributions " + - "(overrides --install-purelib and --install-platlib)"), - - ('install-headers=', None, - "installation directory for C/C++ headers"), - ('install-scripts=', None, - "installation directory for Python scripts"), - ('install-data=', None, - "installation directory for data files"), - - # Byte-compilation options -- see install_lib for details - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - 'also compile with optimization: -O1 for "python -O", ' - '-O2 for "python -OO", and -O0 to disable [default: -O0]'), - - # Miscellaneous control options - ('force', 'f', - "force installation (overwrite any existing files)"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - - # Where to install documentation (eventually!) - #('doc-format=', None, "format of documentation to generate"), - #('install-man=', None, "directory for Unix man pages"), - #('install-html=', None, "directory for HTML documentation"), - #('install-info=', None, "directory for GNU info files"), - - # XXX use a name that makes clear this is the old format - ('record=', None, - "filename in which to record a list of installed files " - "(not PEP 376-compliant)"), - ('resources=', None, - "data files mapping"), - - # .dist-info related arguments, read by install_dist_info - ('no-distinfo', None, - "do not create a .dist-info directory"), - ('installer=', None, - "the name of the installer"), - ('requested', None, - "generate a REQUESTED file (i.e."), - ('no-requested', None, - "do not generate a REQUESTED file"), - ('no-record', None, - "do not generate a RECORD file"), - ] - - boolean_options = ['compile', 'force', 'skip-build', 'no-distinfo', - 'requested', 'no-record', 'user'] - - negative_opt = {'no-compile': 'compile', 'no-requested': 'requested'} - - def initialize_options(self): - # High-level options: these select both an installation base - # and scheme. - self.prefix = None - self.exec_prefix = None - self.home = None - self.user = False - - # These select only the installation base; it's up to the user to - # specify the installation scheme (currently, that means supplying - # the --install-{platlib,purelib,scripts,data} options). - self.install_base = None - self.install_platbase = None - self.root = None - - # These options are the actual installation directories; if not - # supplied by the user, they are filled in using the installation - # scheme implied by prefix/exec-prefix/home and the contents of - # that installation scheme. - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_userbase = get_config_var('userbase') - self.install_usersite = get_path('purelib', '%s_user' % os.name) - - self.compile = None - self.optimize = None - - # These two are for putting non-packagized distributions into their - # own directory and creating a .pth file if it makes sense. - # 'extra_path' comes from the setup file; 'install_path_file' can - # be turned off if it makes no sense to install a .pth file. (But - # better to install it uselessly than to guess wrong and not - # install it when it's necessary and would be used!) Currently, - # 'install_path_file' is always true unless some outsider meddles - # with it. - self.extra_path = None - self.install_path_file = True - - # 'force' forces installation, even if target files are not - # out-of-date. 'skip_build' skips running the "build" command, - # handy if you know it's not necessary. 'warn_dir' (which is *not* - # a user option, it's just there so the bdist_* commands can turn - # it off) determines whether we warn about installing to a - # directory not in sys.path. - self.force = False - self.skip_build = False - self.warn_dir = True - - # These are only here as a conduit from the 'build' command to the - # 'install_*' commands that do the real work. ('build_base' isn't - # actually used anywhere, but it might be useful in future.) They - # are not user options, because if the user told the install - # command where the build directory is, that wouldn't affect the - # build command. - self.build_base = None - self.build_lib = None - - # Not defined yet because we don't know anything about - # documentation yet. - #self.install_man = None - #self.install_html = None - #self.install_info = None - - self.record = None - self.resources = None - - # .dist-info related options - self.no_distinfo = None - self.installer = None - self.requested = None - self.no_record = None - - # -- Option finalizing methods ------------------------------------- - # (This is rather more involved than for most commands, - # because this is where the policy for installing third- - # party Python modules on various platforms given a wide - # array of user input is decided. Yes, it's quite complex!) - - def finalize_options(self): - # This method (and its pliant slaves, like 'finalize_unix()', - # 'finalize_other()', and 'select_scheme()') is where the default - # installation directories for modules, extension modules, and - # anything else we care to install from a Python module - # distribution. Thus, this code makes a pretty important policy - # statement about how third-party stuff is added to a Python - # installation! Note that the actual work of installation is done - # by the relatively simple 'install_*' commands; they just take - # their orders from the installation directory options determined - # here. - - # Check for errors/inconsistencies in the options; first, stuff - # that's wrong on any platform. - - if ((self.prefix or self.exec_prefix or self.home) and - (self.install_base or self.install_platbase)): - raise PackagingOptionError( - "must supply either prefix/exec-prefix/home or " - "install-base/install-platbase -- not both") - - if self.home and (self.prefix or self.exec_prefix): - raise PackagingOptionError( - "must supply either home or prefix/exec-prefix -- not both") - - if self.user and (self.prefix or self.exec_prefix or self.home or - self.install_base or self.install_platbase): - raise PackagingOptionError( - "can't combine user with prefix/exec_prefix/home or " - "install_base/install_platbase") - - # Next, stuff that's wrong (or dubious) only on certain platforms. - if os.name != "posix": - if self.exec_prefix: - logger.warning( - '%s: exec-prefix option ignored on this platform', - self.get_command_name()) - self.exec_prefix = None - - # Now the interesting logic -- so interesting that we farm it out - # to other methods. The goal of these methods is to set the final - # values for the install_{lib,scripts,data,...} options, using as - # input a heady brew of prefix, exec_prefix, home, install_base, - # install_platbase, user-supplied versions of - # install_{purelib,platlib,lib,scripts,data,...}, and the - # INSTALL_SCHEME dictionary above. Phew! - - self.dump_dirs("pre-finalize_{unix,other}") - - if os.name == 'posix': - self.finalize_unix() - else: - self.finalize_other() - - self.dump_dirs("post-finalize_{unix,other}()") - - # Expand configuration variables, tilde, etc. in self.install_base - # and self.install_platbase -- that way, we can use $base or - # $platbase in the other installation directories and not worry - # about needing recursive variable expansion (shudder). - - py_version = '%s.%s' % sys.version_info[:2] - prefix, exec_prefix, srcdir, projectbase = get_config_vars( - 'prefix', 'exec_prefix', 'srcdir', 'projectbase') - - metadata = self.distribution.metadata - self.config_vars = { - 'dist_name': metadata['Name'], - 'dist_version': metadata['Version'], - 'dist_fullname': metadata.get_fullname(), - 'py_version': py_version, - 'py_version_short': py_version[:3], - 'py_version_nodot': py_version[:3:2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - 'srcdir': srcdir, - 'projectbase': projectbase, - 'userbase': self.install_userbase, - 'usersite': self.install_usersite, - } - - self.expand_basedirs() - - self.dump_dirs("post-expand_basedirs()") - - # Now define config vars for the base directories so we can expand - # everything else. - self.config_vars['base'] = self.install_base - self.config_vars['platbase'] = self.install_platbase - - # Expand "~" and configuration variables in the installation - # directories. - self.expand_dirs() - - self.dump_dirs("post-expand_dirs()") - - # Create directories under USERBASE - if self.user: - self.create_user_dirs() - - # Pick the actual directory to install all modules to: either - # install_purelib or install_platlib, depending on whether this - # module distribution is pure or not. Of course, if the user - # already specified install_lib, use their selection. - if self.install_lib is None: - if self.distribution.ext_modules: # has extensions: non-pure - self.install_lib = self.install_platlib - else: - self.install_lib = self.install_purelib - - # Convert directories from Unix /-separated syntax to the local - # convention. - self.convert_paths('lib', 'purelib', 'platlib', 'scripts', - 'data', 'headers', 'userbase', 'usersite') - - # Well, we're not actually fully completely finalized yet: we still - # have to deal with 'extra_path', which is the hack for allowing - # non-packagized module distributions (hello, Numerical Python!) to - # get their own directories. - self.handle_extra_path() - self.install_libbase = self.install_lib # needed for .pth file - self.install_lib = os.path.join(self.install_lib, self.extra_dirs) - - # If a new root directory was supplied, make all the installation - # dirs relative to it. - if self.root is not None: - self.change_roots('libbase', 'lib', 'purelib', 'platlib', - 'scripts', 'data', 'headers') - - self.dump_dirs("after prepending root") - - # Find out the build directories, ie. where to install from. - self.set_undefined_options('build', 'build_base', 'build_lib') - - # Punt on doc directories for now -- after all, we're punting on - # documentation completely! - - if self.no_distinfo is None: - self.no_distinfo = False - - def finalize_unix(self): - """Finalize options for posix platforms.""" - if self.install_base is not None or self.install_platbase is not None: - if ((self.install_lib is None and - self.install_purelib is None and - self.install_platlib is None) or - self.install_headers is None or - self.install_scripts is None or - self.install_data is None): - raise PackagingOptionError( - "install-base or install-platbase supplied, but " - "installation scheme is incomplete") - return - - if self.user: - if self.install_userbase is None: - raise PackagingPlatformError( - "user base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme("posix_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("posix_home") - else: - if self.prefix is None: - if self.exec_prefix is not None: - raise PackagingOptionError( - "must not supply exec-prefix without prefix") - - self.prefix = os.path.normpath(sys.prefix) - self.exec_prefix = os.path.normpath(sys.exec_prefix) - - else: - if self.exec_prefix is None: - self.exec_prefix = self.prefix - - self.install_base = self.prefix - self.install_platbase = self.exec_prefix - self.select_scheme("posix_prefix") - - def finalize_other(self): - """Finalize options for non-posix platforms""" - if self.user: - if self.install_userbase is None: - raise PackagingPlatformError( - "user base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - self.select_scheme(os.name + "_user") - elif self.home is not None: - self.install_base = self.install_platbase = self.home - self.select_scheme("posix_home") - else: - if self.prefix is None: - self.prefix = os.path.normpath(sys.prefix) - - self.install_base = self.install_platbase = self.prefix - try: - self.select_scheme(os.name) - except KeyError: - raise PackagingPlatformError( - "no support for installation on '%s'" % os.name) - - def dump_dirs(self, msg): - """Dump the list of user options.""" - logger.debug(msg + ":") - for opt in self.user_options: - opt_name = opt[0] - if opt_name[-1] == "=": - opt_name = opt_name[0:-1] - if opt_name in self.negative_opt: - opt_name = self.negative_opt[opt_name] - opt_name = opt_name.replace('-', '_') - val = not getattr(self, opt_name) - else: - opt_name = opt_name.replace('-', '_') - val = getattr(self, opt_name) - logger.debug(" %s: %s", opt_name, val) - - def select_scheme(self, name): - """Set the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = get_paths(name, expand=False) - for key, value in scheme.items(): - if key == 'platinclude': - key = 'headers' - value = os.path.join(value, self.distribution.metadata['Name']) - attrname = 'install_' + key - if hasattr(self, attrname): - if getattr(self, attrname) is None: - setattr(self, attrname, value) - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - # see if we want to push this work in sysconfig XXX - val = sysconfig._subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Call `os.path.expanduser` on install_{base,platbase} and root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Call `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data']) - - def convert_paths(self, *names): - """Call `convert_path` over `names`.""" - for name in names: - attr = "install_" + name - setattr(self, attr, convert_path(getattr(self, attr))) - - def handle_extra_path(self): - """Set `path_file` and `extra_dirs` using `extra_path`.""" - if self.extra_path is None: - self.extra_path = self.distribution.extra_path - - if self.extra_path is not None: - if isinstance(self.extra_path, str): - self.extra_path = self.extra_path.split(',') - - if len(self.extra_path) == 1: - path_file = extra_dirs = self.extra_path[0] - elif len(self.extra_path) == 2: - path_file, extra_dirs = self.extra_path - else: - raise PackagingOptionError( - "'extra_path' option must be a list, tuple, or " - "comma-separated string with 1 or 2 elements") - - # convert to local form in case Unix notation used (as it - # should be in setup scripts) - extra_dirs = convert_path(extra_dirs) - else: - path_file = None - extra_dirs = '' - - # XXX should we warn if path_file and not extra_dirs? (in which - # case the path file would be harmless but pointless) - self.path_file = path_file - self.extra_dirs = extra_dirs - - def change_roots(self, *names): - """Change the install direcories pointed by name using root.""" - for name in names: - attr = "install_" + name - setattr(self, attr, change_root(self.root, getattr(self, attr))) - - def create_user_dirs(self): - """Create directories under USERBASE as needed.""" - home = convert_path(os.path.expanduser("~")) - for name, path in self.config_vars.items(): - if path.startswith(home) and not os.path.isdir(path): - os.makedirs(path, 0o700) - - # -- Command execution methods ------------------------------------- - - def run(self): - """Runs the command.""" - # Obviously have to build before we can install - if not self.skip_build: - self.run_command('build') - # If we built for any other platform, we can't install. - build_plat = self.distribution.get_command_obj('build').plat_name - # check warn_dir - it is a clue that the 'install_dist' is happening - # internally, and not to sys.path, so we don't check the platform - # matches what we are running. - if self.warn_dir and build_plat != get_platform(): - raise PackagingPlatformError("Can't install when " - "cross-compiling") - - # Run all sub-commands (at least those that need to be run) - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - if self.path_file: - self.create_path_file() - - # write list of installed files, if requested. - if self.record: - outputs = self.get_outputs() - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - self.execute(write_file, - (self.record, outputs), - "writing list of installed files to '%s'" % - self.record) - - normpath, normcase = os.path.normpath, os.path.normcase - sys_path = [normcase(normpath(p)) for p in sys.path] - install_lib = normcase(normpath(self.install_lib)) - if (self.warn_dir and - not (self.path_file and self.install_path_file) and - install_lib not in sys_path): - logger.debug(("modules installed to '%s', which is not in " - "Python's module search path (sys.path) -- " - "you'll have to change the search path yourself"), - self.install_lib) - - def create_path_file(self): - """Creates the .pth file""" - filename = os.path.join(self.install_libbase, - self.path_file + ".pth") - if self.install_path_file: - self.execute(write_file, - (filename, [self.extra_dirs]), - "creating %s" % filename) - else: - logger.warning('%s: path file %r not created', - self.get_command_name(), filename) - - # -- Reporting methods --------------------------------------------- - - def get_outputs(self): - """Assembles the outputs of all the sub-commands.""" - outputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - # Add the contents of cmd.get_outputs(), ensuring - # that outputs doesn't contain duplicate entries - for filename in cmd.get_outputs(): - if filename not in outputs: - outputs.append(filename) - - if self.path_file and self.install_path_file: - outputs.append(os.path.join(self.install_libbase, - self.path_file + ".pth")) - - return outputs - - def get_inputs(self): - """Returns the inputs of all the sub-commands""" - # XXX gee, this looks familiar ;-( - inputs = [] - for cmd_name in self.get_sub_commands(): - cmd = self.get_finalized_command(cmd_name) - inputs.extend(cmd.get_inputs()) - - return inputs - - # -- Predicates for sub-command list ------------------------------- - - def has_lib(self): - """Returns true if the current distribution has any Python - modules to install.""" - return (self.distribution.has_pure_modules() or - self.distribution.has_ext_modules()) - - def has_headers(self): - """Returns true if the current distribution has any headers to - install.""" - return self.distribution.has_headers() - - def has_scripts(self): - """Returns true if the current distribution has any scripts to. - install.""" - return self.distribution.has_scripts() - - def has_data(self): - """Returns true if the current distribution has any data to. - install.""" - return self.distribution.has_data_files() - - # 'sub_commands': a list of commands this command might have to run to - # get its work done. See cmd.py for more info. - sub_commands = [('install_lib', has_lib), - ('install_headers', has_headers), - ('install_scripts', has_scripts), - ('install_data', has_data), - # keep install_distinfo last, as it needs the record - # with files to be completely generated - ('install_distinfo', lambda self: not self.no_distinfo), - ] diff --git a/Lib/packaging/command/install_distinfo.py b/Lib/packaging/command/install_distinfo.py deleted file mode 100644 --- a/Lib/packaging/command/install_distinfo.py +++ /dev/null @@ -1,143 +0,0 @@ -"""Create the PEP 376-compliant .dist-info directory.""" - -# Forked from the former install_egg_info command by Josip Djolonga - -import os -import csv -import hashlib -from shutil import rmtree - -from packaging import logger -from packaging.command.cmd import Command - - -class install_distinfo(Command): - - description = 'create a .dist-info directory for the distribution' - - user_options = [ - ('install-dir=', None, - "directory where the the .dist-info directory will be created"), - ('installer=', None, - "the name of the installer"), - ('requested', None, - "generate a REQUESTED file"), - ('no-requested', None, - "do not generate a REQUESTED file"), - ('no-record', None, - "do not generate a RECORD file"), - ('no-resources', None, - "do not generate a RESOURCES file"), - ] - - boolean_options = ['requested', 'no-record', 'no-resources'] - - negative_opt = {'no-requested': 'requested'} - - def initialize_options(self): - self.install_dir = None - self.installer = None - self.requested = None - self.no_record = None - self.no_resources = None - self.outfiles = [] - - def finalize_options(self): - self.set_undefined_options('install_dist', - 'installer', 'requested', 'no_record') - - self.set_undefined_options('install_lib', 'install_dir') - - if self.installer is None: - # FIXME distutils or packaging? - # + document default in the option help text above and in install - self.installer = 'distutils' - if self.requested is None: - self.requested = True - if self.no_record is None: - self.no_record = False - if self.no_resources is None: - self.no_resources = False - - metadata = self.distribution.metadata - - basename = metadata.get_fullname(filesafe=True) + ".dist-info" - - self.install_dir = os.path.join(self.install_dir, basename) - - def run(self): - target = self.install_dir - - if os.path.isdir(target) and not os.path.islink(target): - if not self.dry_run: - rmtree(target) - elif os.path.exists(target): - self.execute(os.unlink, (self.install_dir,), - "removing " + target) - - self.execute(os.makedirs, (target,), "creating " + target) - - metadata_path = os.path.join(self.install_dir, 'METADATA') - self.execute(self.distribution.metadata.write, (metadata_path,), - "creating " + metadata_path) - self.outfiles.append(metadata_path) - - installer_path = os.path.join(self.install_dir, 'INSTALLER') - logger.info('creating %s', installer_path) - if not self.dry_run: - with open(installer_path, 'w') as f: - f.write(self.installer) - self.outfiles.append(installer_path) - - if self.requested: - requested_path = os.path.join(self.install_dir, 'REQUESTED') - logger.info('creating %s', requested_path) - if not self.dry_run: - open(requested_path, 'wb').close() - self.outfiles.append(requested_path) - - if not self.no_resources: - install_data = self.get_finalized_command('install_data') - if install_data.get_resources_out() != []: - resources_path = os.path.join(self.install_dir, - 'RESOURCES') - logger.info('creating %s', resources_path) - if not self.dry_run: - with open(resources_path, 'w') as f: - writer = csv.writer(f, delimiter=',', - lineterminator='\n', - quotechar='"') - for row in install_data.get_resources_out(): - writer.writerow(row) - - self.outfiles.append(resources_path) - - if not self.no_record: - record_path = os.path.join(self.install_dir, 'RECORD') - logger.info('creating %s', record_path) - if not self.dry_run: - with open(record_path, 'w', encoding='utf-8') as f: - writer = csv.writer(f, delimiter=',', - lineterminator='\n', - quotechar='"') - - install = self.get_finalized_command('install_dist') - - for fpath in install.get_outputs(): - if fpath.endswith('.pyc') or fpath.endswith('.pyo'): - # do not put size and md5 hash, as in PEP-376 - writer.writerow((fpath, '', '')) - else: - size = os.path.getsize(fpath) - with open(fpath, 'rb') as fp: - hash = hashlib.md5() - hash.update(fp.read()) - md5sum = hash.hexdigest() - writer.writerow((fpath, md5sum, size)) - - # add the RECORD file itself - writer.writerow((record_path, '', '')) - self.outfiles.append(record_path) - - def get_outputs(self): - return self.outfiles diff --git a/Lib/packaging/command/install_headers.py b/Lib/packaging/command/install_headers.py deleted file mode 100644 --- a/Lib/packaging/command/install_headers.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Install C/C++ header files to the Python include directory.""" - -from packaging.command.cmd import Command - - -# XXX force is never used -class install_headers(Command): - - description = "install C/C++ header files" - - user_options = [('install-dir=', 'd', - "directory to install header files to"), - ('force', 'f', - "force installation (overwrite existing files)"), - ] - - boolean_options = ['force'] - - def initialize_options(self): - self.install_dir = None - self.force = False - self.outfiles = [] - - def finalize_options(self): - self.set_undefined_options('install_dist', - ('install_headers', 'install_dir'), - 'force') - - def run(self): - headers = self.distribution.headers - if not headers: - return - - self.mkpath(self.install_dir) - for header in headers: - out = self.copy_file(header, self.install_dir)[0] - self.outfiles.append(out) - - def get_inputs(self): - return self.distribution.headers or [] - - def get_outputs(self): - return self.outfiles diff --git a/Lib/packaging/command/install_lib.py b/Lib/packaging/command/install_lib.py deleted file mode 100644 --- a/Lib/packaging/command/install_lib.py +++ /dev/null @@ -1,188 +0,0 @@ -"""Install all modules (extensions and pure Python).""" - -import os -import imp - -from packaging import logger -from packaging.command.cmd import Command -from packaging.errors import PackagingOptionError - - -# Extension for Python source files. -# XXX dead code? most of the codebase checks for literal '.py' -if hasattr(os, 'extsep'): - PYTHON_SOURCE_EXTENSION = os.extsep + "py" -else: - PYTHON_SOURCE_EXTENSION = ".py" - - -class install_lib(Command): - - description = "install all modules (extensions and pure Python)" - - # The options for controlling byte compilation are two independent sets: - # 'compile' is strictly boolean, and only decides whether to - # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and - # decides both whether to generate .pyo files and what level of - # optimization to use. - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ('build-dir=', 'b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('compile', 'c', "compile .py to .pyc [default]"), - ('no-compile', None, "don't compile .py files"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'compile', 'skip-build'] - - negative_opt = {'no-compile': 'compile'} - - def initialize_options(self): - # let the 'install_dist' command dictate our installation directory - self.install_dir = None - self.build_dir = None - self.force = False - self.compile = None - self.optimize = None - self.skip_build = None - - def finalize_options(self): - # Get all the information we need to install pure Python modules - # from the umbrella 'install_dist' command -- build (source) directory, - # install (target) directory, and whether to compile .py files. - self.set_undefined_options('install_dist', - ('build_lib', 'build_dir'), - ('install_lib', 'install_dir'), - 'force', 'compile', 'optimize', - 'skip_build') - - if self.compile is None: - self.compile = True - if self.optimize is None: - self.optimize = 0 - - if not isinstance(self.optimize, int): - try: - self.optimize = int(self.optimize) - if self.optimize not in (0, 1, 2): - raise AssertionError - except (ValueError, AssertionError): - raise PackagingOptionError("optimize must be 0, 1, or 2") - - def run(self): - # Make sure we have built everything we need first - self.build() - - # Install everything: simply dump the entire contents of the build - # directory to the installation directory (that's the beauty of - # having a build directory!) - outfiles = self.install() - - # (Optionally) compile .py to .pyc and/or .pyo - if outfiles is not None and self.distribution.has_pure_modules(): - # XXX comment from distutils: "This [prefix stripping] is far from - # complete, but it should at least generate usable bytecode in RPM - # distributions." -> need to find exact requirements for - # byte-compiled files and fix it - install_root = self.get_finalized_command('install_dist').root - self.byte_compile(outfiles, prefix=install_root) - - # -- Top-level worker functions ------------------------------------ - # (called from 'run()') - - def build(self): - if not self.skip_build: - if self.distribution.has_pure_modules(): - self.run_command('build_py') - if self.distribution.has_ext_modules(): - self.run_command('build_ext') - - def install(self): - if os.path.isdir(self.build_dir): - outfiles = self.copy_tree(self.build_dir, self.install_dir) - else: - logger.warning( - '%s: %r does not exist -- no Python modules to install', - self.get_command_name(), self.build_dir) - return - return outfiles - - # -- Utility methods ----------------------------------------------- - - def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir): - if not has_any: - return [] - - build_cmd = self.get_finalized_command(build_cmd) - build_files = build_cmd.get_outputs() - build_dir = getattr(build_cmd, cmd_option) - - prefix_len = len(build_dir) + len(os.sep) - outputs = [] - for file in build_files: - outputs.append(os.path.join(output_dir, file[prefix_len:])) - - return outputs - - def _bytecode_filenames(self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - # Since build_py handles package data installation, the - # list of outputs can contain more than just .py files. - # Make sure we only report bytecode for the .py files. - ext = os.path.splitext(os.path.normcase(py_file))[1] - if ext != PYTHON_SOURCE_EXTENSION: - continue - if self.compile: - bytecode_files.append(imp.cache_from_source(py_file, True)) - if self.optimize: - bytecode_files.append(imp.cache_from_source(py_file, False)) - - return bytecode_files - - # -- External interface -------------------------------------------- - # (called by outsiders) - - def get_outputs(self): - """Return the list of files that would be installed if this command - were actually run. Not affected by the "dry-run" flag or whether - modules have actually been built yet. - """ - pure_outputs = \ - self._mutate_outputs(self.distribution.has_pure_modules(), - 'build_py', 'build_lib', - self.install_dir) - if self.compile: - bytecode_outputs = self._bytecode_filenames(pure_outputs) - else: - bytecode_outputs = [] - - ext_outputs = \ - self._mutate_outputs(self.distribution.has_ext_modules(), - 'build_ext', 'build_lib', - self.install_dir) - - return pure_outputs + bytecode_outputs + ext_outputs - - def get_inputs(self): - """Get the list of files that are input to this command, ie. the - files that get installed as they are named in the build tree. - The files in this list correspond one-to-one to the output - filenames returned by 'get_outputs()'. - """ - inputs = [] - - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - inputs.extend(build_py.get_outputs()) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - inputs.extend(build_ext.get_outputs()) - - return inputs diff --git a/Lib/packaging/command/install_scripts.py b/Lib/packaging/command/install_scripts.py deleted file mode 100644 --- a/Lib/packaging/command/install_scripts.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Install scripts.""" - -# Contributed by Bastian Kleineidam - -import os -from packaging.command.cmd import Command -from packaging import logger - -class install_scripts(Command): - - description = "install scripts (Python or otherwise)" - - user_options = [ - ('install-dir=', 'd', "directory to install scripts to"), - ('build-dir=','b', "build directory (where to install from)"), - ('force', 'f', "force installation (overwrite existing files)"), - ('skip-build', None, "skip the build steps"), - ] - - boolean_options = ['force', 'skip-build'] - - - def initialize_options(self): - self.install_dir = None - self.force = False - self.build_dir = None - self.skip_build = None - - def finalize_options(self): - self.set_undefined_options('build', ('build_scripts', 'build_dir')) - self.set_undefined_options('install_dist', - ('install_scripts', 'install_dir'), - 'force', 'skip_build') - - def run(self): - if not self.skip_build: - self.run_command('build_scripts') - - if not os.path.exists(self.build_dir): - self.outfiles = [] - return - - self.outfiles = self.copy_tree(self.build_dir, self.install_dir) - if os.name == 'posix': - # Set the executable bits (owner, group, and world) on - # all the scripts we just installed. - for file in self.get_outputs(): - if self.dry_run: - logger.info("changing mode of %s", file) - else: - mode = (os.stat(file).st_mode | 0o555) & 0o7777 - logger.info("changing mode of %s to %o", file, mode) - os.chmod(file, mode) - - def get_inputs(self): - return self.distribution.scripts or [] - - def get_outputs(self): - return self.outfiles or [] diff --git a/Lib/packaging/command/register.py b/Lib/packaging/command/register.py deleted file mode 100644 --- a/Lib/packaging/command/register.py +++ /dev/null @@ -1,263 +0,0 @@ -"""Register a release with a project index.""" - -# Contributed by Richard Jones - -import getpass -import urllib.error -import urllib.parse -import urllib.request - -from packaging import logger -from packaging.util import (read_pypirc, generate_pypirc, DEFAULT_REPOSITORY, - DEFAULT_REALM, get_pypirc_path, encode_multipart) -from packaging.command.cmd import Command - -class register(Command): - - description = "register a release with PyPI" - user_options = [ - ('repository=', 'r', - "repository URL [default: %s]" % DEFAULT_REPOSITORY), - ('show-response', None, - "display full response text from server"), - ('list-classifiers', None, - "list valid Trove classifiers"), - ('strict', None , - "stop the registration if the metadata is not fully compliant") - ] - - boolean_options = ['show-response', 'list-classifiers', 'strict'] - - def initialize_options(self): - self.repository = None - self.realm = None - self.show_response = False - self.list_classifiers = False - self.strict = False - - def finalize_options(self): - if self.repository is None: - self.repository = DEFAULT_REPOSITORY - if self.realm is None: - self.realm = DEFAULT_REALM - - def run(self): - self._set_config() - - # Check the package metadata - check = self.distribution.get_command_obj('check') - if check.strict != self.strict and not check.all: - # If check was already run but with different options, - # re-run it - check.strict = self.strict - check.all = True - self.distribution.have_run.pop('check', None) - self.run_command('check') - - if self.dry_run: - self.verify_metadata() - elif self.list_classifiers: - self.classifiers() - else: - self.send_metadata() - - def _set_config(self): - ''' Reads the configuration file and set attributes. - ''' - config = read_pypirc(self.repository, self.realm) - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - self.has_config = True - else: - if self.repository not in ('pypi', DEFAULT_REPOSITORY): - raise ValueError('%s not found in .pypirc' % self.repository) - if self.repository == 'pypi': - self.repository = DEFAULT_REPOSITORY - self.has_config = False - - def classifiers(self): - ''' Fetch the list of classifiers from the server. - ''' - response = urllib.request.urlopen(self.repository+'?:action=list_classifiers') - logger.info(response.read()) - - def verify_metadata(self): - ''' Send the metadata to the package index server to be checked. - ''' - # send the info to the server and report the result - code, result = self.post_to_server(self.build_post_data('verify')) - logger.info('server response (%s): %s', code, result) - - - def send_metadata(self): - ''' Send the metadata to the package index server. - - Well, do the following: - 1. figure who the user is, and then - 2. send the data as a Basic auth'ed POST. - - First we try to read the username/password from $HOME/.pypirc, - which is a ConfigParser-formatted file with a section - [distutils] containing username and password entries (both - in clear text). Eg: - - [distutils] - index-servers = - pypi - - [pypi] - username: fred - password: sekrit - - Otherwise, to figure who the user is, we offer the user three - choices: - - 1. use existing login, - 2. register as a new user, or - 3. set the password to a random string and email the user. - - ''' - # TODO factor registration out into another method - # TODO use print to print, not logging - - # see if we can short-cut and get the username/password from the - # config - if self.has_config: - choice = '1' - username = self.username - password = self.password - else: - choice = 'x' - username = password = '' - - # get the user's login info - choices = '1 2 3 4'.split() - while choice not in choices: - logger.info('''\ -We need to know who you are, so please choose either: - 1. use your existing login, - 2. register as a new user, - 3. have the server generate a new password for you (and email it to you), or - 4. quit -Your selection [default 1]: ''') - - choice = input() - if not choice: - choice = '1' - elif choice not in choices: - print('Please choose one of the four options!') - - if choice == '1': - # get the username and password - while not username: - username = input('Username: ') - while not password: - password = getpass.getpass('Password: ') - - # set up the authentication - auth = urllib.request.HTTPPasswordMgr() - host = urllib.parse.urlparse(self.repository)[1] - auth.add_password(self.realm, host, username, password) - # send the info to the server and report the result - code, result = self.post_to_server(self.build_post_data('submit'), - auth) - logger.info('Server response (%s): %s', code, result) - - # possibly save the login - if code == 200: - if self.has_config: - # sharing the password in the distribution instance - # so the upload command can reuse it - self.distribution.password = password - else: - logger.info( - 'I can store your PyPI login so future submissions ' - 'will be faster.\n(the login will be stored in %s)', - get_pypirc_path()) - choice = 'X' - while choice.lower() not in ('y', 'n'): - choice = input('Save your login (y/N)?') - if not choice: - choice = 'n' - if choice.lower() == 'y': - generate_pypirc(username, password) - - elif choice == '2': - data = {':action': 'user'} - data['name'] = data['password'] = data['email'] = '' - data['confirm'] = None - while not data['name']: - data['name'] = input('Username: ') - while data['password'] != data['confirm']: - while not data['password']: - data['password'] = getpass.getpass('Password: ') - while not data['confirm']: - data['confirm'] = getpass.getpass(' Confirm: ') - if data['password'] != data['confirm']: - data['password'] = '' - data['confirm'] = None - print("Password and confirm don't match!") - while not data['email']: - data['email'] = input(' EMail: ') - code, result = self.post_to_server(data) - if code != 200: - logger.info('server response (%s): %s', code, result) - else: - logger.info('you will receive an email shortly; follow the ' - 'instructions in it to complete registration.') - elif choice == '3': - data = {':action': 'password_reset'} - data['email'] = '' - while not data['email']: - data['email'] = input('Your email address: ') - code, result = self.post_to_server(data) - logger.info('server response (%s): %s', code, result) - - def build_post_data(self, action): - # figure the data to send - the metadata plus some additional - # information used by the package server - data = self.distribution.metadata.todict() - data[':action'] = action - return data - - # XXX to be refactored with upload.upload_file - def post_to_server(self, data, auth=None): - ''' Post a query to the server, and return a string response. - ''' - if 'name' in data: - logger.info('Registering %s to %s', data['name'], self.repository) - # Build up the MIME payload for the urllib2 POST data - content_type, body = encode_multipart(data.items(), []) - - # build the Request - headers = { - 'Content-type': content_type, - 'Content-length': str(len(body)) - } - req = urllib.request.Request(self.repository, body, headers) - - # handle HTTP and include the Basic Auth handler - opener = urllib.request.build_opener( - urllib.request.HTTPBasicAuthHandler(password_mgr=auth) - ) - data = '' - try: - result = opener.open(req) - except urllib.error.HTTPError as e: - if self.show_response: - data = e.fp.read() - result = e.code, e.msg - except urllib.error.URLError as e: - result = 500, str(e) - else: - if self.show_response: - data = result.read() - result = 200, 'OK' - if self.show_response: - dashes = '-' * 75 - logger.info('%s%s%s', dashes, data, dashes) - - return result diff --git a/Lib/packaging/command/sdist.py b/Lib/packaging/command/sdist.py deleted file mode 100644 --- a/Lib/packaging/command/sdist.py +++ /dev/null @@ -1,347 +0,0 @@ -"""Create a source distribution.""" - -import os -import re -import sys -from io import StringIO -from shutil import get_archive_formats, rmtree - -from packaging import logger -from packaging.util import resolve_name -from packaging.errors import (PackagingPlatformError, PackagingOptionError, - PackagingModuleError, PackagingFileError) -from packaging.command import get_command_names -from packaging.command.cmd import Command -from packaging.manifest import Manifest - - -def show_formats(): - """Print all possible values for the 'formats' option (used by - the "--help-formats" command-line option). - """ - from packaging.fancy_getopt import FancyGetopt - formats = sorted(('formats=' + name, None, desc) - for name, desc in get_archive_formats()) - FancyGetopt(formats).print_help( - "List of available source distribution formats:") - -# a \ followed by some spaces + EOL -_COLLAPSE_PATTERN = re.compile('\\\w\n', re.M) -_COMMENTED_LINE = re.compile('^#.*\n$|^\w*\n$', re.M) - - -class sdist(Command): - - description = "create a source distribution (tarball, zip file, etc.)" - - user_options = [ - ('manifest=', 'm', - "name of manifest file [default: MANIFEST]"), - ('use-defaults', None, - "include the default file set in the manifest " - "[default; disable with --no-defaults]"), - ('no-defaults', None, - "don't include the default file set"), - ('prune', None, - "specifically exclude files/directories that should not be " - "distributed (build tree, RCS/CVS dirs, etc.) " - "[default; disable with --no-prune]"), - ('no-prune', None, - "don't automatically exclude anything"), - ('manifest-only', 'o', - "just regenerate the manifest and then stop "), - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ('check-metadata', None, - "Ensure that all required elements of metadata " - "are supplied. Warn if any missing. [default]"), - ('owner=', 'u', - "Owner name used when creating a tar file [default: current user]"), - ('group=', 'g', - "Group name used when creating a tar file [default: current group]"), - ('manifest-builders=', None, - "manifest builders (comma-separated list)"), - ] - - boolean_options = ['use-defaults', 'prune', - 'manifest-only', 'keep-temp', 'check-metadata'] - - help_options = [ - ('help-formats', None, - "list available distribution formats", show_formats), - ] - - negative_opt = {'no-defaults': 'use-defaults', - 'no-prune': 'prune'} - - default_format = {'posix': 'gztar', - 'nt': 'zip'} - - def initialize_options(self): - self.manifest = None - # 'use_defaults': if true, we will include the default file set - # in the manifest - self.use_defaults = True - self.prune = True - self.manifest_only = False - self.formats = None - self.keep_temp = False - self.dist_dir = None - - self.archive_files = None - self.metadata_check = True - self.owner = None - self.group = None - self.filelist = None - self.manifest_builders = None - - def _check_archive_formats(self, formats): - supported_formats = [name for name, desc in get_archive_formats()] - for format in formats: - if format not in supported_formats: - return format - return None - - def finalize_options(self): - if self.manifest is None: - self.manifest = "MANIFEST" - - self.ensure_string_list('formats') - if self.formats is None: - try: - self.formats = [self.default_format[os.name]] - except KeyError: - raise PackagingPlatformError("don't know how to create source " - "distributions on platform %s" % os.name) - - bad_format = self._check_archive_formats(self.formats) - if bad_format: - raise PackagingOptionError("unknown archive format '%s'" \ - % bad_format) - - if self.dist_dir is None: - self.dist_dir = "dist" - - if self.filelist is None: - self.filelist = Manifest() - - if self.manifest_builders is None: - self.manifest_builders = [] - else: - if isinstance(self.manifest_builders, str): - self.manifest_builders = self.manifest_builders.split(',') - builders = [] - for builder in self.manifest_builders: - builder = builder.strip() - if builder == '': - continue - try: - builder = resolve_name(builder) - except ImportError as e: - raise PackagingModuleError(e) - - builders.append(builder) - - self.manifest_builders = builders - - def run(self): - # 'filelist' contains the list of files that will make up the - # manifest - self.filelist.clear() - - # Check the package metadata - if self.metadata_check: - self.run_command('check') - - # Do whatever it takes to get the list of files to process - # (process the manifest template, read an existing manifest, - # whatever). File list is accumulated in 'self.filelist'. - self.get_file_list() - - # If user just wanted us to regenerate the manifest, stop now. - if self.manifest_only: - return - - # Otherwise, go ahead and create the source distribution tarball, - # or zipfile, or whatever. - self.make_distribution() - - def get_file_list(self): - """Figure out the list of files to include in the source - distribution, and put it in 'self.filelist'. This might involve - reading the manifest template (and writing the manifest), or just - reading the manifest, or just using the default file set -- it all - depends on the user's options. - """ - template_exists = len(self.distribution.extra_files) > 0 - if not template_exists: - logger.warning('%s: using default file list', - self.get_command_name()) - self.filelist.findall() - - if self.use_defaults: - self.add_defaults() - if template_exists: - template = '\n'.join(self.distribution.extra_files) - self.filelist.read_template(StringIO(template)) - - # call manifest builders, if any. - for builder in self.manifest_builders: - builder(self.distribution, self.filelist) - - if self.prune: - self.prune_file_list() - - self.filelist.write(self.manifest) - - def add_defaults(self): - """Add all default files to self.filelist. - - In addition to the setup.cfg file, this will include all files returned - by the get_source_files of every registered command. This will find - Python modules and packages, data files listed in package_data_, - data_files and extra_files, scripts, C sources of extension modules or - C libraries (headers are missing). - """ - if os.path.exists('setup.cfg'): - self.filelist.append('setup.cfg') - else: - logger.warning("%s: standard 'setup.cfg' file not found", - self.get_command_name()) - - for cmd_name in get_command_names(): - try: - cmd_obj = self.get_finalized_command(cmd_name) - except PackagingOptionError: - pass - else: - self.filelist.extend(cmd_obj.get_source_files()) - - def prune_file_list(self): - """Prune off branches that might slip into the file list as created - by 'read_template()', but really don't belong there: - * the build tree (typically "build") - * the release tree itself (only an issue if we ran "sdist" - previously with --keep-temp, or it aborted) - * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories - """ - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - - # pruning out vcs directories - # both separators are used under win32 - if sys.platform == 'win32': - seps = r'/|\\' - else: - seps = '/' - - vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', - '_darcs'] - vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps) - self.filelist.exclude_pattern(vcs_ptrn, is_regex=True) - - def make_release_tree(self, base_dir, files): - """Create the directory tree that will become the source - distribution archive. All directories implied by the filenames in - 'files' are created under 'base_dir', and then we hard link or copy - (if hard linking is unavailable) those files into place. - Essentially, this duplicates the developer's source tree, but in a - directory named after the distribution, containing only the files - to be distributed. - """ - # Create all the directories under 'base_dir' necessary to - # put 'files' there; the 'mkpath()' is just so we don't die - # if the manifest happens to be empty. - self.mkpath(base_dir) - self.create_tree(base_dir, files, dry_run=self.dry_run) - - # And walk over the list of files, either making a hard link (if - # os.link exists) to each one that doesn't already exist in its - # corresponding location under 'base_dir', or copying each file - # that's out-of-date in 'base_dir'. (Usually, all files will be - # out-of-date, because by default we blow away 'base_dir' when - # we're done making the distribution archives.) - - if hasattr(os, 'link'): # can make hard links on this system - link = 'hard' - msg = "making hard links in %s..." % base_dir - else: # nope, have to copy - link = None - msg = "copying files to %s..." % base_dir - - if not files: - logger.warning("no files to distribute -- empty manifest?") - else: - logger.info(msg) - - for file in self.distribution.metadata.requires_files: - if file not in files: - msg = "'%s' must be included explicitly in 'extra_files'" \ - % file - raise PackagingFileError(msg) - - for file in files: - if not os.path.isfile(file): - logger.warning("'%s' not a regular file -- skipping", file) - else: - dest = os.path.join(base_dir, file) - self.copy_file(file, dest, link=link) - - self.distribution.metadata.write(os.path.join(base_dir, 'PKG-INFO')) - - def make_distribution(self): - """Create the source distribution(s). First, we create the release - tree with 'make_release_tree()'; then, we create all required - archive files (according to 'self.formats') from the release tree. - Finally, we clean up by blowing away the release tree (unless - 'self.keep_temp' is true). The list of archive files created is - stored so it can be retrieved later by 'get_archive_files()'. - """ - # Don't warn about missing metadata here -- should be (and is!) - # done elsewhere. - base_dir = self.distribution.get_fullname() - base_name = os.path.join(self.dist_dir, base_dir) - - self.make_release_tree(base_dir, self.filelist.files) - archive_files = [] # remember names of files we create - # tar archive must be created last to avoid overwrite and remove - if 'tar' in self.formats: - self.formats.append(self.formats.pop(self.formats.index('tar'))) - - for fmt in self.formats: - file = self.make_archive(base_name, fmt, base_dir=base_dir, - owner=self.owner, group=self.group) - archive_files.append(file) - self.distribution.dist_files.append(('sdist', '', file)) - - self.archive_files = archive_files - - if not self.keep_temp: - if self.dry_run: - logger.info('removing %s', base_dir) - else: - rmtree(base_dir) - - def get_archive_files(self): - """Return the list of archive files created when the command - was run, or None if the command hasn't run yet. - """ - return self.archive_files - - def create_tree(self, base_dir, files, mode=0o777, dry_run=False): - need_dir = set() - for file in files: - need_dir.add(os.path.join(base_dir, os.path.dirname(file))) - - # Now create them - for dir in sorted(need_dir): - self.mkpath(dir, mode, dry_run=dry_run) diff --git a/Lib/packaging/command/test.py b/Lib/packaging/command/test.py deleted file mode 100644 --- a/Lib/packaging/command/test.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Run the project's test suite.""" - -import os -import sys -import logging -import unittest - -from packaging import logger -from packaging.command.cmd import Command -from packaging.database import get_distribution -from packaging.errors import PackagingOptionError -from packaging.util import resolve_name - - -class test(Command): - - description = "run the project's test suite" - - user_options = [ - ('suite=', 's', - "test suite to run (for example: 'some_module.test_suite')"), - ('runner=', None, - "test runner to be called."), - ('tests-require=', None, - "list of distributions required to run the test suite."), - ] - - def initialize_options(self): - self.suite = None - self.runner = None - self.tests_require = [] - - def finalize_options(self): - self.build_lib = self.get_finalized_command("build").build_lib - for requirement in self.tests_require: - if get_distribution(requirement) is None: - logger.warning("test dependency %s is not installed, " - "tests may fail", requirement) - if (not self.suite and not self.runner and - self.get_ut_with_discovery() is None): - raise PackagingOptionError( - "no test discovery available, please give a 'suite' or " - "'runner' option or install unittest2") - - def get_ut_with_discovery(self): - if hasattr(unittest.TestLoader, "discover"): - return unittest - else: - try: - import unittest2 - return unittest2 - except ImportError: - return None - - def run(self): - prev_syspath = sys.path[:] - try: - # build release - build = self.reinitialize_command('build') - self.run_command('build') - sys.path.insert(0, build.build_lib) - - # XXX maybe we could pass the verbose argument of pysetup here - logger = logging.getLogger('packaging') - verbose = logger.getEffectiveLevel() >= logging.DEBUG - verbosity = verbose + 1 - - # run the tests - if self.runner: - resolve_name(self.runner)() - elif self.suite: - runner = unittest.TextTestRunner(verbosity=verbosity) - runner.run(resolve_name(self.suite)()) - elif self.get_ut_with_discovery(): - ut = self.get_ut_with_discovery() - test_suite = ut.TestLoader().discover(os.curdir) - runner = ut.TextTestRunner(verbosity=verbosity) - runner.run(test_suite) - finally: - sys.path[:] = prev_syspath diff --git a/Lib/packaging/command/upload.py b/Lib/packaging/command/upload.py deleted file mode 100644 --- a/Lib/packaging/command/upload.py +++ /dev/null @@ -1,168 +0,0 @@ -"""Upload a distribution to a project index.""" - -import os -import socket -import logging -import platform -import urllib.parse -from base64 import standard_b64encode -from hashlib import md5 -from urllib.error import HTTPError -from urllib.request import urlopen, Request - -from packaging import logger -from packaging.errors import PackagingOptionError -from packaging.util import (spawn, read_pypirc, DEFAULT_REPOSITORY, - DEFAULT_REALM, encode_multipart) -from packaging.command.cmd import Command - - -class upload(Command): - - description = "upload distribution to PyPI" - - user_options = [ - ('repository=', 'r', - "repository URL [default: %s]" % DEFAULT_REPOSITORY), - ('show-response', None, - "display full response text from server"), - ('sign', 's', - "sign files to upload using gpg"), - ('identity=', 'i', - "GPG identity used to sign files"), - ('upload-docs', None, - "upload documentation too"), - ] - - boolean_options = ['show-response', 'sign'] - - def initialize_options(self): - self.repository = None - self.realm = None - self.show_response = False - self.username = '' - self.password = '' - self.show_response = False - self.sign = False - self.identity = None - self.upload_docs = False - - def finalize_options(self): - if self.repository is None: - self.repository = DEFAULT_REPOSITORY - if self.realm is None: - self.realm = DEFAULT_REALM - if self.identity and not self.sign: - raise PackagingOptionError( - "Must use --sign for --identity to have meaning") - config = read_pypirc(self.repository, self.realm) - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - - # getting the password from the distribution - # if previously set by the register command - if not self.password and self.distribution.password: - self.password = self.distribution.password - - def run(self): - if not self.distribution.dist_files: - raise PackagingOptionError( - "No dist file created in earlier command") - for command, pyversion, filename in self.distribution.dist_files: - self.upload_file(command, pyversion, filename) - if self.upload_docs: - upload_docs = self.get_finalized_command("upload_docs") - upload_docs.repository = self.repository - upload_docs.username = self.username - upload_docs.password = self.password - upload_docs.run() - - # XXX to be refactored with register.post_to_server - def upload_file(self, command, pyversion, filename): - # Makes sure the repository URL is compliant - scheme, netloc, url, params, query, fragments = \ - urllib.parse.urlparse(self.repository) - if params or query or fragments: - raise AssertionError("Incompatible url %s" % self.repository) - - if scheme not in ('http', 'https'): - raise AssertionError("unsupported scheme " + scheme) - - # Sign if requested - if self.sign: - gpg_args = ["gpg", "--detach-sign", "-a", filename] - if self.identity: - gpg_args[2:2] = ["--local-user", self.identity] - spawn(gpg_args, - dry_run=self.dry_run) - - # Fill in the data - send all the metadata in case we need to - # register a new release - with open(filename, 'rb') as f: - content = f.read() - - data = self.distribution.metadata.todict() - - # extra upload infos - data[':action'] = 'file_upload' - data['protcol_version'] = '1' - data['content'] = (os.path.basename(filename), content) - data['filetype'] = command - data['pyversion'] = pyversion - data['md5_digest'] = md5(content).hexdigest() - - if command == 'bdist_dumb': - data['comment'] = 'built for %s' % platform.platform(terse=True) - - if self.sign: - with open(filename + '.asc') as fp: - sig = fp.read() - data['gpg_signature'] = [ - (os.path.basename(filename) + ".asc", sig)] - - # set up the authentication - # The exact encoding of the authentication string is debated. - # Anyway PyPI only accepts ascii for both username or password. - user_pass = (self.username + ":" + self.password).encode('ascii') - auth = b"Basic " + standard_b64encode(user_pass) - - # Build up the MIME payload for the POST data - files = [] - for key in ('content', 'gpg_signature'): - if key in data: - filename_, value = data.pop(key) - files.append((key, filename_, value)) - - content_type, body = encode_multipart(data.items(), files) - - logger.info("Submitting %s to %s", filename, self.repository) - - # build the Request - headers = {'Content-type': content_type, - 'Content-length': str(len(body)), - 'Authorization': auth} - - request = Request(self.repository, body, headers) - # send the data - try: - result = urlopen(request) - status = result.code - reason = result.msg - except socket.error as e: - logger.error(e) - return - except HTTPError as e: - status = e.code - reason = e.msg - - if status == 200: - logger.info('Server response (%s): %s', status, reason) - else: - logger.error('Upload failed (%s): %s', status, reason) - - if self.show_response and logger.isEnabledFor(logging.INFO): - sep = '-' * 75 - logger.info('%s\n%s\n%s', sep, result.read().decode(), sep) diff --git a/Lib/packaging/command/upload_docs.py b/Lib/packaging/command/upload_docs.py deleted file mode 100644 --- a/Lib/packaging/command/upload_docs.py +++ /dev/null @@ -1,131 +0,0 @@ -"""Upload HTML documentation to a project index.""" - -import os -import base64 -import socket -import zipfile -import logging -import http.client -import urllib.parse -from io import BytesIO - -from packaging import logger -from packaging.util import (read_pypirc, DEFAULT_REPOSITORY, DEFAULT_REALM, - encode_multipart) -from packaging.errors import PackagingFileError -from packaging.command.cmd import Command - - -def zip_dir(directory): - """Compresses recursively contents of directory into a BytesIO object""" - destination = BytesIO() - with zipfile.ZipFile(destination, "w") as zip_file: - for root, dirs, files in os.walk(directory): - for name in files: - full = os.path.join(root, name) - relative = root[len(directory):].lstrip(os.path.sep) - dest = os.path.join(relative, name) - zip_file.write(full, dest) - return destination - - -class upload_docs(Command): - - description = "upload HTML documentation to PyPI" - - user_options = [ - ('repository=', 'r', - "repository URL [default: %s]" % DEFAULT_REPOSITORY), - ('show-response', None, - "display full response text from server"), - ('upload-dir=', None, - "directory to upload"), - ] - - def initialize_options(self): - self.repository = None - self.realm = None - self.show_response = False - self.upload_dir = None - self.username = '' - self.password = '' - - def finalize_options(self): - if self.repository is None: - self.repository = DEFAULT_REPOSITORY - if self.realm is None: - self.realm = DEFAULT_REALM - if self.upload_dir is None: - build = self.get_finalized_command('build') - self.upload_dir = os.path.join(build.build_base, "docs") - if not os.path.isdir(self.upload_dir): - self.upload_dir = os.path.join(build.build_base, "doc") - logger.info('Using upload directory %s', self.upload_dir) - self.verify_upload_dir(self.upload_dir) - config = read_pypirc(self.repository, self.realm) - if config != {}: - self.username = config['username'] - self.password = config['password'] - self.repository = config['repository'] - self.realm = config['realm'] - - def verify_upload_dir(self, upload_dir): - self.ensure_dirname('upload_dir') - index_location = os.path.join(upload_dir, "index.html") - if not os.path.exists(index_location): - mesg = "No 'index.html found in docs directory (%s)" - raise PackagingFileError(mesg % upload_dir) - - def run(self): - name = self.distribution.metadata['Name'] - version = self.distribution.metadata['Version'] - zip_file = zip_dir(self.upload_dir) - - fields = [(':action', 'doc_upload'), - ('name', name), ('version', version)] - files = [('content', name, zip_file.getvalue())] - content_type, body = encode_multipart(fields, files) - - credentials = self.username + ':' + self.password - # FIXME should use explicit encoding - auth = b"Basic " + base64.encodebytes(credentials.encode()).strip() - - logger.info("Submitting documentation to %s", self.repository) - - scheme, netloc, url, params, query, fragments = urllib.parse.urlparse( - self.repository) - if scheme == "http": - conn = http.client.HTTPConnection(netloc) - elif scheme == "https": - conn = http.client.HTTPSConnection(netloc) - else: - raise AssertionError("unsupported scheme %r" % scheme) - - try: - conn.connect() - conn.putrequest("POST", url) - conn.putheader('Content-type', content_type) - conn.putheader('Content-length', str(len(body))) - conn.putheader('Authorization', auth) - conn.endheaders() - conn.send(body) - - except socket.error as e: - logger.error(e) - return - - r = conn.getresponse() - - if r.status == 200: - logger.info('Server response (%s): %s', r.status, r.reason) - elif r.status == 301: - location = r.getheader('Location') - if location is None: - location = 'http://packages.python.org/%s/' % name - logger.info('Upload successful. Visit %s', location) - else: - logger.error('Upload failed (%s): %s', r.status, r.reason) - - if self.show_response and logger.isEnabledFor(logging.INFO): - sep = '-' * 75 - logger.info('%s\n%s\n%s', sep, r.read().decode('utf-8'), sep) diff --git a/Lib/packaging/command/wininst-10.0-amd64.exe b/Lib/packaging/command/wininst-10.0-amd64.exe deleted file mode 100644 Binary file Lib/packaging/command/wininst-10.0-amd64.exe has changed diff --git a/Lib/packaging/command/wininst-10.0.exe b/Lib/packaging/command/wininst-10.0.exe deleted file mode 100644 Binary file Lib/packaging/command/wininst-10.0.exe has changed diff --git a/Lib/packaging/command/wininst-6.0.exe b/Lib/packaging/command/wininst-6.0.exe deleted file mode 100644 Binary file Lib/packaging/command/wininst-6.0.exe has changed diff --git a/Lib/packaging/command/wininst-7.1.exe b/Lib/packaging/command/wininst-7.1.exe deleted file mode 100644 Binary file Lib/packaging/command/wininst-7.1.exe has changed diff --git a/Lib/packaging/command/wininst-8.0.exe b/Lib/packaging/command/wininst-8.0.exe deleted file mode 100644 Binary file Lib/packaging/command/wininst-8.0.exe has changed diff --git a/Lib/packaging/command/wininst-9.0-amd64.exe b/Lib/packaging/command/wininst-9.0-amd64.exe deleted file mode 100644 Binary file Lib/packaging/command/wininst-9.0-amd64.exe has changed diff --git a/Lib/packaging/command/wininst-9.0.exe b/Lib/packaging/command/wininst-9.0.exe deleted file mode 100644 Binary file Lib/packaging/command/wininst-9.0.exe has changed diff --git a/Lib/packaging/compat.py b/Lib/packaging/compat.py deleted file mode 100644 --- a/Lib/packaging/compat.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Support for build-time 2to3 conversion.""" - -from packaging import logger - - -# XXX Having two classes with the same name is not a good thing. -# XXX 2to3-related code should move from util to this module - -try: - from packaging.util import Mixin2to3 as _Mixin2to3 - _CONVERT = True - _KLASS = _Mixin2to3 -except ImportError: - _CONVERT = False - _KLASS = object - -__all__ = ['Mixin2to3'] - - -class Mixin2to3(_KLASS): - """ The base class which can be used for refactoring. When run under - Python 3.0, the run_2to3 method provided by Mixin2to3 is overridden. - When run on Python 2.x, it merely creates a class which overrides run_2to3, - yet does nothing in particular with it. - """ - if _CONVERT: - - def _run_2to3(self, files=[], doctests=[], fixers=[]): - """ Takes a list of files and doctests, and performs conversion - on those. - - First, the files which contain the code(`files`) are converted. - - Second, the doctests in `files` are converted. - - Thirdly, the doctests in `doctests` are converted. - """ - if fixers: - self.fixer_names = fixers - - if files: - logger.info('converting Python code and doctests') - _KLASS.run_2to3(self, files) - _KLASS.run_2to3(self, files, doctests_only=True) - - if doctests: - logger.info('converting doctests in text files') - _KLASS.run_2to3(self, doctests, doctests_only=True) - else: - # If run on Python 2.x, there is nothing to do. - - def _run_2to3(self, files=[], doctests=[], fixers=[]): - pass diff --git a/Lib/packaging/compiler/__init__.py b/Lib/packaging/compiler/__init__.py deleted file mode 100644 --- a/Lib/packaging/compiler/__init__.py +++ /dev/null @@ -1,274 +0,0 @@ -"""Compiler abstraction model used by packaging. - -An abstract base class is defined in the ccompiler submodule, and -concrete implementations suitable for various platforms are defined in -the other submodules. The extension module is also placed in this -package. - -In general, code should not instantiate compiler classes directly but -use the new_compiler and customize_compiler functions provided in this -module. - -The compiler system has a registration API: get_default_compiler, -set_compiler, show_compilers. -""" - -import os -import sys -import re -import sysconfig - -from packaging.util import resolve_name -from packaging.errors import PackagingPlatformError -from packaging import logger - -def customize_compiler(compiler): - """Do any platform-specific customization of a CCompiler instance. - - Mainly needed on Unix, so we can plug in the information that - varies across Unices and is stored in Python's Makefile. - """ - if compiler.name == "unix": - cc, cxx, opt, cflags, ccshared, ldshared, so_ext, ar, ar_flags = ( - sysconfig.get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', - 'CCSHARED', 'LDSHARED', 'SO', 'AR', - 'ARFLAGS')) - - if 'CC' in os.environ: - cc = os.environ['CC'] - if 'CXX' in os.environ: - cxx = os.environ['CXX'] - if 'LDSHARED' in os.environ: - ldshared = os.environ['LDSHARED'] - if 'CPP' in os.environ: - cpp = os.environ['CPP'] - else: - cpp = cc + " -E" # not always - if 'LDFLAGS' in os.environ: - ldshared = ldshared + ' ' + os.environ['LDFLAGS'] - if 'CFLAGS' in os.environ: - cflags = opt + ' ' + os.environ['CFLAGS'] - ldshared = ldshared + ' ' + os.environ['CFLAGS'] - if 'CPPFLAGS' in os.environ: - cpp = cpp + ' ' + os.environ['CPPFLAGS'] - cflags = cflags + ' ' + os.environ['CPPFLAGS'] - ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] - if 'AR' in os.environ: - ar = os.environ['AR'] - if 'ARFLAGS' in os.environ: - archiver = ar + ' ' + os.environ['ARFLAGS'] - else: - if ar_flags is not None: - archiver = ar + ' ' + ar_flags - else: - # see if its the proper default value - # mmm I don't want to backport the makefile - archiver = ar + ' rc' - - cc_cmd = cc + ' ' + cflags - compiler.set_executables( - preprocessor=cpp, - compiler=cc_cmd, - compiler_so=cc_cmd + ' ' + ccshared, - compiler_cxx=cxx, - linker_so=ldshared, - linker_exe=cc, - archiver=archiver) - - compiler.shared_lib_extension = so_ext - - -# Map a sys.platform/os.name ('posix', 'nt') to the default compiler -# type for that platform. Keys are interpreted as re match -# patterns. Order is important; platform mappings are preferred over -# OS names. -_default_compilers = ( - # Platform string mappings - - # on a cygwin built python we can use gcc like an ordinary UNIXish - # compiler - ('cygwin.*', 'unix'), - - # OS name mappings - ('posix', 'unix'), - ('nt', 'msvc'), -) - -def get_default_compiler(osname=None, platform=None): - """ Determine the default compiler to use for the given platform. - - osname should be one of the standard Python OS names (i.e. the - ones returned by os.name) and platform the common value - returned by sys.platform for the platform in question. - - The default values are os.name and sys.platform in case the - parameters are not given. - - """ - if osname is None: - osname = os.name - if platform is None: - platform = sys.platform - for pattern, compiler in _default_compilers: - if re.match(pattern, platform) is not None or \ - re.match(pattern, osname) is not None: - return compiler - # Defaults to Unix compiler - return 'unix' - - -# compiler mapping -# XXX useful to expose them? (i.e. get_compiler_names) -_COMPILERS = { - 'unix': 'packaging.compiler.unixccompiler.UnixCCompiler', - 'msvc': 'packaging.compiler.msvccompiler.MSVCCompiler', - 'cygwin': 'packaging.compiler.cygwinccompiler.CygwinCCompiler', - 'mingw32': 'packaging.compiler.cygwinccompiler.Mingw32CCompiler', - 'bcpp': 'packaging.compiler.bcppcompiler.BCPPCompiler', -} - -def set_compiler(location): - """Add or change a compiler""" - cls = resolve_name(location) - # XXX we want to check the class here - _COMPILERS[cls.name] = cls - - -def show_compilers(): - """Print list of available compilers (used by the "--help-compiler" - options to "build", "build_ext", "build_clib"). - """ - from packaging.fancy_getopt import FancyGetopt - compilers = [] - - for name, cls in _COMPILERS.items(): - if isinstance(cls, str): - cls = resolve_name(cls) - _COMPILERS[name] = cls - - compilers.append(("compiler=" + name, None, cls.description)) - - compilers.sort() - pretty_printer = FancyGetopt(compilers) - pretty_printer.print_help("List of available compilers:") - - -def new_compiler(plat=None, compiler=None, dry_run=False, force=False): - """Generate an instance of some CCompiler subclass for the supplied - platform/compiler combination. 'plat' defaults to 'os.name' - (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler - for that platform. Currently only 'posix' and 'nt' are supported, and - the default compilers are "traditional Unix interface" (UnixCCompiler - class) and Visual C++ (MSVCCompiler class). Note that it's perfectly - possible to ask for a Unix compiler object under Windows, and a - Microsoft compiler object under Unix -- if you supply a value for - 'compiler', 'plat' is ignored. - """ - if plat is None: - plat = os.name - - try: - if compiler is None: - compiler = get_default_compiler(plat) - - cls = _COMPILERS[compiler] - except KeyError: - msg = "don't know how to compile C/C++ code on platform '%s'" % plat - if compiler is not None: - msg = msg + " with '%s' compiler" % compiler - raise PackagingPlatformError(msg) - - if isinstance(cls, str): - cls = resolve_name(cls) - _COMPILERS[compiler] = cls - - return cls(dry_run, force) - - -def gen_preprocess_options(macros, include_dirs): - """Generate C pre-processor options (-D, -U, -I) as used by at least - two types of compilers: the typical Unix compiler and Visual C++. - 'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) - means undefine (-U) macro 'name', and (name,value) means define (-D) - macro 'name' to 'value'. 'include_dirs' is just a list of directory - names to be added to the header file search path (-I). Returns a list - of command-line options suitable for either Unix compilers or Visual - C++. - """ - # XXX it would be nice (mainly aesthetic, and so we don't generate - # stupid-looking command lines) to go over 'macros' and eliminate - # redundant definitions/undefinitions (ie. ensure that only the - # latest mention of a particular macro winds up on the command - # line). I don't think it's essential, though, since most (all?) - # Unix C compilers only pay attention to the latest -D or -U - # mention of a macro on their command line. Similar situation for - # 'include_dirs'. I'm punting on both for now. Anyways, weeding out - # redundancies like this should probably be the province of - # CCompiler, since the data structures used are inherited from it - # and therefore common to all CCompiler classes. - - pp_opts = [] - for macro in macros: - - if not isinstance(macro, tuple) and 1 <= len(macro) <= 2: - raise TypeError( - "bad macro definition '%s': each element of 'macros'" - "list must be a 1- or 2-tuple" % macro) - - if len(macro) == 1: # undefine this macro - pp_opts.append("-U%s" % macro[0]) - elif len(macro) == 2: - if macro[1] is None: # define with no explicit value - pp_opts.append("-D%s" % macro[0]) - else: - # XXX *don't* need to be clever about quoting the - # macro value here, because we're going to avoid the - # shell at all costs when we spawn the command! - pp_opts.append("-D%s=%s" % macro) - - for dir in include_dirs: - pp_opts.append("-I%s" % dir) - - return pp_opts - - -def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries): - """Generate linker options for searching library directories and - linking with specific libraries. - - 'libraries' and 'library_dirs' are, respectively, lists of library names - (not filenames!) and search directories. Returns a list of command-line - options suitable for use with some compiler (depending on the two format - strings passed in). - """ - lib_opts = [] - - for dir in library_dirs: - lib_opts.append(compiler.library_dir_option(dir)) - - for dir in runtime_library_dirs: - opt = compiler.runtime_library_dir_option(dir) - if isinstance(opt, list): - lib_opts.extend(opt) - else: - lib_opts.append(opt) - - # XXX it's important that we *not* remove redundant library mentions! - # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to - # resolve all symbols. I just hope we never have to say "-lfoo obj.o - # -lbar" to get things to work -- that's certainly a possibility, but a - # pretty nasty way to arrange your C code. - - for lib in libraries: - lib_dir, lib_name = os.path.split(lib) - if lib_dir != '': - lib_file = compiler.find_library_file([lib_dir], lib_name) - if lib_file is not None: - lib_opts.append(lib_file) - else: - logger.warning("no library file corresponding to " - "'%s' found (skipping)" % lib) - else: - lib_opts.append(compiler.library_option(lib)) - - return lib_opts diff --git a/Lib/packaging/compiler/bcppcompiler.py b/Lib/packaging/compiler/bcppcompiler.py deleted file mode 100644 --- a/Lib/packaging/compiler/bcppcompiler.py +++ /dev/null @@ -1,355 +0,0 @@ -"""CCompiler implementation for the Borland C++ compiler.""" - -# This implementation by Lyle Johnson, based on the original msvccompiler.py -# module and using the directions originally published by Gordon Williams. - -# XXX looks like there's a LOT of overlap between these two classes: -# someone should sit down and factor out the common code as -# WindowsCCompiler! --GPW - -import os - -from packaging.errors import (PackagingExecError, CompileError, LibError, - LinkError, UnknownFileError) -from packaging.compiler.ccompiler import CCompiler -from packaging.compiler import gen_preprocess_options -from packaging.file_util import write_file -from packaging.dep_util import newer -from packaging import logger - - -class BCPPCompiler(CCompiler) : - """Concrete class that implements an interface to the Borland C/C++ - compiler, as defined by the CCompiler abstract class. - """ - - name = 'bcpp' - description = 'Borland C++ Compiler' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = _c_extensions + _cpp_extensions - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - - def __init__(self, dry_run=False, force=False): - super(BCPPCompiler, self).__init__(dry_run, force) - - # These executables are assumed to all be in the path. - # Borland doesn't seem to use any special registry settings to - # indicate their installation locations. - - self.cc = "bcc32.exe" - self.linker = "ilink32.exe" - self.lib = "tlib.exe" - - self.preprocess_options = None - self.compile_options = ['/tWM', '/O2', '/q', '/g0'] - self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] - - self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] - self.ldflags_static = [] - self.ldflags_exe = ['/Gn', '/q', '/x'] - self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] - - - # -- Worker methods ------------------------------------------------ - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=False, - extra_preargs=None, extra_postargs=None, depends=None): - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - compile_opts = extra_preargs or [] - compile_opts.append('-c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - # XXX why do the normpath here? - src = os.path.normpath(src) - obj = os.path.normpath(obj) - # XXX _setup_compile() did a mkpath() too but before the normpath. - # Is it possible to skip the normpath? - self.mkpath(os.path.dirname(obj)) - - if ext == '.res': - # This is already a binary file -- skip it. - continue # the 'for' loop - if ext == '.rc': - # This needs to be compiled to a .res file -- do it now. - try: - self.spawn(["brcc32", "-fo", obj, src]) - except PackagingExecError as msg: - raise CompileError(msg) - continue # the 'for' loop - - # The next two are both for the real compiler. - if ext in self._c_extensions: - input_opt = "" - elif ext in self._cpp_extensions: - input_opt = "-P" - else: - # Unknown file type -- no extra options. The compiler - # will probably fail, but let it just in case this is a - # file the compiler recognizes even if we don't. - input_opt = "" - - output_opt = "-o" + obj - - # Compiler command line syntax is: "bcc32 [options] file(s)". - # Note that the source file names must appear at the end of - # the command line. - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs + [src]) - except PackagingExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=False, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = [output_filename, '/u'] + objects - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except PackagingExecError as msg: - raise LibError(msg) - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=False, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - - # XXX this ignores 'build_temp'! should follow the lead of - # msvccompiler.py - - objects, output_dir = self._fix_object_args(objects, output_dir) - libraries, library_dirs, runtime_library_dirs = \ - self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) - - if runtime_library_dirs: - logger.warning("don't know what to do with " - "'runtime_library_dirs': %r", runtime_library_dirs) - - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - - # Figure out linker args based on type of target. - if target_desc == CCompiler.EXECUTABLE: - startup_obj = 'c0w32' - if debug: - ld_args = self.ldflags_exe_debug[:] - else: - ld_args = self.ldflags_exe[:] - else: - startup_obj = 'c0d32' - if debug: - ld_args = self.ldflags_shared_debug[:] - else: - ld_args = self.ldflags_shared[:] - - - # Create a temporary exports file for use by the linker - if export_symbols is None: - def_file = '' - else: - head, tail = os.path.split(output_filename) - modname, ext = os.path.splitext(tail) - temp_dir = os.path.dirname(objects[0]) # preserve tree structure - def_file = os.path.join(temp_dir, '%s.def' % modname) - contents = ['EXPORTS'] - for sym in (export_symbols or []): - contents.append(' %s=_%s' % (sym, sym)) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # Borland C++ has problems with '/' in paths - objects2 = [os.path.normpath(o) for o in objects] - # split objects in .obj and .res files - # Borland C++ needs them at different positions in the command line - objects = [startup_obj] - resources = [] - for file in objects2: - base, ext = os.path.splitext(os.path.normcase(file)) - if ext == '.res': - resources.append(file) - else: - objects.append(file) - - - for l in library_dirs: - ld_args.append("/L%s" % os.path.normpath(l)) - ld_args.append("/L.") # we sometimes use relative paths - - # list of object files - ld_args.extend(objects) - - # XXX the command line syntax for Borland C++ is a bit wonky; - # certain filenames are jammed together in one big string, but - # comma-delimited. This doesn't mesh too well with the - # Unix-centric attitude (with a DOS/Windows quoting hack) of - # 'spawn()', so constructing the argument list is a bit - # awkward. Note that doing the obvious thing and jamming all - # the filenames and commas into one argument would be wrong, - # because 'spawn()' would quote any filenames with spaces in - # them. Arghghh!. Apparently it works fine as coded... - - # name of dll/exe file - ld_args.extend((',',output_filename)) - # no map file and start libraries - ld_args.append(',,') - - for lib in libraries: - # see if we find it and if there is a bcpp specific lib - # (xxx_bcpp.lib) - libfile = self.find_library_file(library_dirs, lib, debug) - if libfile is None: - ld_args.append(lib) - # probably a BCPP internal library -- don't warn - else: - # full name which prefers bcpp_xxx.lib over xxx.lib - ld_args.append(libfile) - - # some default libraries - ld_args.append('import32') - ld_args.append('cw32mt') - - # def file for export symbols - ld_args.extend((',',def_file)) - # add resource files - ld_args.append(',') - ld_args.extend(resources) - - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except PackagingExecError as msg: - raise LinkError(msg) - - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - # -- Miscellaneous methods ----------------------------------------- - - - def find_library_file(self, dirs, lib, debug=False): - # List of effective library names to try, in order of preference: - # xxx_bcpp.lib is better than xxx.lib - # and xxx_d.lib is better than xxx.lib if debug is set - # - # The "_bcpp" suffix is to handle a Python installation for people - # with multiple compilers (primarily Packaging hackers, I suspect - # ;-). The idea is they'd have one static library for each - # compiler they care about, since (almost?) every Windows compiler - # seems to have a different format for static libraries. - if debug: - dlib = (lib + "_d") - try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) - else: - try_names = (lib + "_bcpp", lib) - - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # overwrite the one from CCompiler to support rc and res-files - def object_filenames(self, source_filenames, strip_dir=False, - output_dir=''): - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - base, ext = os.path.splitext(os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % \ - (ext, src_name)) - if strip_dir: - base = os.path.basename(base) - if ext == '.res': - # these can go unchanged - obj_names.append(os.path.join(output_dir, base + ext)) - elif ext == '.rc': - # these need to be compiled to .res-files - obj_names.append(os.path.join(output_dir, base + '.res')) - else: - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, - extra_postargs=None): - _, macros, include_dirs = \ - self._fix_compile_args(None, macros, include_dirs) - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = ['cpp32.exe'] + pp_opts - if output_file is not None: - pp_args.append('-o' + output_file) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or the - # source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except PackagingExecError as msg: - raise CompileError(msg) diff --git a/Lib/packaging/compiler/ccompiler.py b/Lib/packaging/compiler/ccompiler.py deleted file mode 100644 --- a/Lib/packaging/compiler/ccompiler.py +++ /dev/null @@ -1,863 +0,0 @@ -"""Abstract base class for compilers. - -This modules contains CCompiler, an abstract base class that defines the -interface for the compiler abstraction model used by packaging. -""" - -import os -from shutil import move -from packaging import logger -from packaging.util import split_quoted, execute, newer_group, spawn -from packaging.errors import (CompileError, LinkError, UnknownFileError) -from packaging.compiler import gen_preprocess_options - - -class CCompiler: - """Abstract base class to define the interface that must be implemented - by real compiler classes. Also has some utility methods used by - several compiler classes. - - The basic idea behind a compiler abstraction class is that each - instance can be used for all the compile/link steps in building a - single project. Thus, attributes common to all of those compile and - link steps -- include directories, macros to define, libraries to link - against, etc. -- are attributes of the compiler instance. To allow for - variability in how individual files are treated, most of those - attributes may be varied on a per-compilation or per-link basis. - """ - - # 'name' is a class attribute that identifies this class. It - # keeps code that wants to know what kind of compiler it's dealing with - # from having to import all possible compiler classes just to do an - # 'isinstance'. - name = None - description = None - - # XXX things not handled by this compiler abstraction model: - # * client can't provide additional options for a compiler, - # e.g. warning, optimization, debugging flags. Perhaps this - # should be the domain of concrete compiler abstraction classes - # (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base - # class should have methods for the common ones. - # * can't completely override the include or library searchg - # path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2". - # I'm not sure how widely supported this is even by Unix - # compilers, much less on other platforms. And I'm even less - # sure how useful it is; maybe for cross-compiling, but - # support for that is a ways off. (And anyways, cross - # compilers probably have a dedicated binary with the - # right paths compiled in. I hope.) - # * can't do really freaky things with the library list/library - # dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against - # different versions of libfoo.a in different locations. I - # think this is useless without the ability to null out the - # library search path anyways. - - - # Subclasses that rely on the standard filename generation methods - # implemented below should override these; see the comment near - # those methods ('object_filenames()' et. al.) for details: - src_extensions = None # list of strings - obj_extension = None # string - static_lib_extension = None - shared_lib_extension = None # string - static_lib_format = None # format string - shared_lib_format = None # prob. same as static_lib_format - exe_extension = None # string - - # Default language settings. language_map is used to detect a source - # file or Extension target language, checking source filenames. - # language_order is used to detect the language precedence, when deciding - # what language to use when mixing source types. For example, if some - # extension has two files with ".c" extension, and one with ".cpp", it - # is still linked as c++. - language_map = {".c": "c", - ".cc": "c++", - ".cpp": "c++", - ".cxx": "c++", - ".m": "objc", - } - language_order = ["c++", "objc", "c"] - - def __init__(self, dry_run=False, force=False): - self.dry_run = dry_run - self.force = force - - # 'output_dir': a common output directory for object, library, - # shared object, and shared library files - self.output_dir = None - - # 'macros': a list of macro definitions (or undefinitions). A - # macro definition is a 2-tuple (name, value), where the value is - # either a string or None (no explicit value). A macro - # undefinition is a 1-tuple (name,). - self.macros = [] - - # 'include_dirs': a list of directories to search for include files - self.include_dirs = [] - - # 'libraries': a list of libraries to include in any link - # (library names, not filenames: eg. "foo" not "libfoo.a") - self.libraries = [] - - # 'library_dirs': a list of directories to search for libraries - self.library_dirs = [] - - # 'runtime_library_dirs': a list of directories to search for - # shared libraries/objects at runtime - self.runtime_library_dirs = [] - - # 'objects': a list of object files (or similar, such as explicitly - # named library files) to include on any link - self.objects = [] - - for key, value in self.executables.items(): - self.set_executable(key, value) - - def set_executables(self, **args): - """Define the executables (and options for them) that will be run - to perform the various stages of compilation. The exact set of - executables that may be specified here depends on the compiler - class (via the 'executables' class attribute), but most will have: - compiler the C/C++ compiler - linker_so linker used to create shared objects and libraries - linker_exe linker used to create binary executables - archiver static library creator - - On platforms with a command line (Unix, DOS/Windows), each of these - is a string that will be split into executable name and (optional) - list of arguments. (Splitting the string is done similarly to how - Unix shells operate: words are delimited by spaces, but quotes and - backslashes can override this. See - 'distutils.util.split_quoted()'.) - """ - - # Note that some CCompiler implementation classes will define class - # attributes 'cpp', 'cc', etc. with hard-coded executable names; - # this is appropriate when a compiler class is for exactly one - # compiler/OS combination (eg. MSVCCompiler). Other compiler - # classes (UnixCCompiler, in particular) are driven by information - # discovered at run-time, since there are many different ways to do - # basically the same things with Unix C compilers. - - for key, value in args.items(): - if key not in self.executables: - raise ValueError("unknown executable '%s' for class %s" % \ - (key, self.__class__.__name__)) - self.set_executable(key, value) - - def set_executable(self, key, value): - if isinstance(value, str): - setattr(self, key, split_quoted(value)) - else: - setattr(self, key, value) - - def _find_macro(self, name): - i = 0 - for defn in self.macros: - if defn[0] == name: - return i - i = i + 1 - return None - - def _check_macro_definitions(self, definitions): - """Ensures that every element of 'definitions' is a valid macro - definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do - nothing if all definitions are OK, raise TypeError otherwise. - """ - for defn in definitions: - if not (isinstance(defn, tuple) and - (len(defn) == 1 or - (len(defn) == 2 and - (isinstance(defn[1], str) or defn[1] is None))) and - isinstance(defn[0], str)): - raise TypeError(("invalid macro definition '%s': " % defn) + \ - "must be tuple (string,), (string, string), or " + \ - "(string, None)") - - - # -- Bookkeeping methods ------------------------------------------- - - def define_macro(self, name, value=None): - """Define a preprocessor macro for all compilations driven by this - compiler object. The optional parameter 'value' should be a - string; if it is not supplied, then the macro will be defined - without an explicit value and the exact outcome depends on the - compiler used (XXX true? does ANSI say anything about this?) - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro(name) - if i is not None: - del self.macros[i] - - defn = (name, value) - self.macros.append(defn) - - def undefine_macro(self, name): - """Undefine a preprocessor macro for all compilations driven by - this compiler object. If the same macro is defined by - 'define_macro()' and undefined by 'undefine_macro()' the last call - takes precedence (including multiple redefinitions or - undefinitions). If the macro is redefined/undefined on a - per-compilation basis (ie. in the call to 'compile()'), then that - takes precedence. - """ - # Delete from the list of macro definitions/undefinitions if - # already there (so that this one will take precedence). - i = self._find_macro(name) - if i is not None: - del self.macros[i] - - undefn = (name,) - self.macros.append(undefn) - - def add_include_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - header files. The compiler is instructed to search directories in - the order in which they are supplied by successive calls to - 'add_include_dir()'. - """ - self.include_dirs.append(dir) - - def set_include_dirs(self, dirs): - """Set the list of directories that will be searched to 'dirs' (a - list of strings). Overrides any preceding calls to - 'add_include_dir()'; subsequence calls to 'add_include_dir()' add - to the list passed to 'set_include_dirs()'. This does not affect - any list of standard include directories that the compiler may - search by default. - """ - self.include_dirs = dirs[:] - - def add_library(self, libname): - """Add 'libname' to the list of libraries that will be included in - all links driven by this compiler object. Note that 'libname' - should *not* be the name of a file containing a library, but the - name of the library itself: the actual filename will be inferred by - the linker, the compiler, or the compiler class (depending on the - platform). - - The linker will be instructed to link against libraries in the - order they were supplied to 'add_library()' and/or - 'set_libraries()'. It is perfectly valid to duplicate library - names; the linker will be instructed to link against libraries as - many times as they are mentioned. - """ - self.libraries.append(libname) - - def set_libraries(self, libnames): - """Set the list of libraries to be included in all links driven by - this compiler object to 'libnames' (a list of strings). This does - not affect any standard system libraries that the linker may - include by default. - """ - self.libraries = libnames[:] - - - def add_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - libraries specified to 'add_library()' and 'set_libraries()'. The - linker will be instructed to search for libraries in the order they - are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. - """ - self.library_dirs.append(dir) - - def set_library_dirs(self, dirs): - """Set the list of library search directories to 'dirs' (a list of - strings). This does not affect any standard library search path - that the linker may search by default. - """ - self.library_dirs = dirs[:] - - def add_runtime_library_dir(self, dir): - """Add 'dir' to the list of directories that will be searched for - shared libraries at runtime. - """ - self.runtime_library_dirs.append(dir) - - def set_runtime_library_dirs(self, dirs): - """Set the list of directories to search for shared libraries at - runtime to 'dirs' (a list of strings). This does not affect any - standard search path that the runtime linker may search by - default. - """ - self.runtime_library_dirs = dirs[:] - - def add_link_object(self, object): - """Add 'object' to the list of object files (or analogues, such as - explicitly named library files or the output of "resource - compilers") to be included in every link driven by this compiler - object. - """ - self.objects.append(object) - - def set_link_objects(self, objects): - """Set the list of object files (or analogues) to be included in - every link to 'objects'. This does not affect any standard object - files that the linker may include by default (such as system - libraries). - """ - self.objects = objects[:] - - - # -- Private utility methods -------------------------------------- - # (here for the convenience of subclasses) - - # Helper method to prep compiler in subclass compile() methods - def _setup_compile(self, outdir, macros, incdirs, sources, depends, - extra): - """Process arguments and decide which source files to compile.""" - if outdir is None: - outdir = self.output_dir - elif not isinstance(outdir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if incdirs is None: - incdirs = self.include_dirs - elif isinstance(incdirs, (list, tuple)): - incdirs = list(incdirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - if extra is None: - extra = [] - - # Get the list of expected output (object) files - objects = self.object_filenames(sources, - strip_dir=False, - output_dir=outdir) - assert len(objects) == len(sources) - - pp_opts = gen_preprocess_options(macros, incdirs) - - build = {} - for i in range(len(sources)): - src = sources[i] - obj = objects[i] - ext = os.path.splitext(src)[1] - self.mkpath(os.path.dirname(obj)) - build[obj] = (src, ext) - - return macros, objects, extra, pp_opts, build - - def _get_cc_args(self, pp_opts, debug, before): - # works for unixccompiler and cygwinccompiler - cc_args = pp_opts + ['-c'] - if debug: - cc_args[:0] = ['-g'] - if before: - cc_args[:0] = before - return cc_args - - def _fix_compile_args(self, output_dir, macros, include_dirs): - """Typecheck and fix-up some of the arguments to the 'compile()' - method, and return fixed-up values. Specifically: if 'output_dir' - is None, replaces it with 'self.output_dir'; ensures that 'macros' - is a list, and augments it with 'self.macros'; ensures that - 'include_dirs' is a list, and augments it with 'self.include_dirs'. - Guarantees that the returned values are of the correct type, - i.e. for 'output_dir' either string or None, and for 'macros' and - 'include_dirs' either list or None. - """ - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - if macros is None: - macros = self.macros - elif isinstance(macros, list): - macros = macros + (self.macros or []) - else: - raise TypeError("'macros' (if supplied) must be a list of tuples") - - if include_dirs is None: - include_dirs = self.include_dirs - elif isinstance(include_dirs, (list, tuple)): - include_dirs = list(include_dirs) + (self.include_dirs or []) - else: - raise TypeError( - "'include_dirs' (if supplied) must be a list of strings") - - return output_dir, macros, include_dirs - - def _fix_object_args(self, objects, output_dir): - """Typecheck and fix up some arguments supplied to various methods. - Specifically: ensure that 'objects' is a list; if output_dir is - None, replace with self.output_dir. Return fixed versions of - 'objects' and 'output_dir'. - """ - if not isinstance(objects, (list, tuple)): - raise TypeError("'objects' must be a list or tuple of strings") - objects = list(objects) - - if output_dir is None: - output_dir = self.output_dir - elif not isinstance(output_dir, str): - raise TypeError("'output_dir' must be a string or None") - - return objects, output_dir - - def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): - """Typecheck and fix up some of the arguments supplied to the - 'link_*' methods. Specifically: ensure that all arguments are - lists, and augment them with their permanent versions - (eg. 'self.libraries' augments 'libraries'). Return a tuple with - fixed versions of all arguments. - """ - if libraries is None: - libraries = self.libraries - elif isinstance(libraries, (list, tuple)): - libraries = list(libraries) + (self.libraries or []) - else: - raise TypeError( - "'libraries' (if supplied) must be a list of strings") - - if library_dirs is None: - library_dirs = self.library_dirs - elif isinstance(library_dirs, (list, tuple)): - library_dirs = list(library_dirs) + (self.library_dirs or []) - else: - raise TypeError( - "'library_dirs' (if supplied) must be a list of strings") - - if runtime_library_dirs is None: - runtime_library_dirs = self.runtime_library_dirs - elif isinstance(runtime_library_dirs, (list, tuple)): - runtime_library_dirs = (list(runtime_library_dirs) + - (self.runtime_library_dirs or [])) - else: - raise TypeError("'runtime_library_dirs' (if supplied) " - "must be a list of strings") - - return libraries, library_dirs, runtime_library_dirs - - def _need_link(self, objects, output_file): - """Return true if we need to relink the files listed in 'objects' - to recreate 'output_file'. - """ - if self.force: - return True - else: - if self.dry_run: - newer = newer_group(objects, output_file, missing='newer') - else: - newer = newer_group(objects, output_file) - return newer - - def detect_language(self, sources): - """Detect the language of a given file, or list of files. Uses - language_map, and language_order to do the job. - """ - if not isinstance(sources, list): - sources = [sources] - lang = None - index = len(self.language_order) - for source in sources: - base, ext = os.path.splitext(source) - extlang = self.language_map.get(ext) - try: - extindex = self.language_order.index(extlang) - if extindex < index: - lang = extlang - index = extindex - except ValueError: - pass - return lang - - # -- Worker methods ------------------------------------------------ - # (must be implemented by subclasses) - - def preprocess(self, source, output_file=None, macros=None, - include_dirs=None, extra_preargs=None, extra_postargs=None): - """Preprocess a single C/C++ source file, named in 'source'. - Output will be written to file named 'output_file', or stdout if - 'output_file' not supplied. 'macros' is a list of macro - definitions as for 'compile()', which will augment the macros set - with 'define_macro()' and 'undefine_macro()'. 'include_dirs' is a - list of directory names that will be added to the default list. - - Raises PreprocessError on failure. - """ - pass - - def compile(self, sources, output_dir=None, macros=None, - include_dirs=None, debug=False, extra_preargs=None, - extra_postargs=None, depends=None): - """Compile one or more source files. - - 'sources' must be a list of filenames, most likely C/C++ - files, but in reality anything that can be handled by a - particular compiler and compiler class (eg. MSVCCompiler can - handle resource files in 'sources'). Return a list of object - filenames, one per source filename in 'sources'. Depending on - the implementation, not all source files will necessarily be - compiled, but all corresponding object filenames will be - returned. - - If 'output_dir' is given, object files will be put under it, while - retaining their original path component. That is, "foo/bar.c" - normally compiles to "foo/bar.o" (for a Unix implementation); if - 'output_dir' is "build", then it would compile to - "build/foo/bar.o". - - 'macros', if given, must be a list of macro definitions. A macro - definition is either a (name, value) 2-tuple or a (name,) 1-tuple. - The former defines a macro; if the value is None, the macro is - defined without an explicit value. The 1-tuple case undefines a - macro. Later definitions/redefinitions/ undefinitions take - precedence. - - 'include_dirs', if given, must be a list of strings, the - directories to add to the default include file search path for this - compilation only. - - 'debug' is a boolean; if true, the compiler will be instructed to - output debug symbols in (or alongside) the object file(s). - - 'extra_preargs' and 'extra_postargs' are implementation- dependent. - On platforms that have the notion of a command line (e.g. Unix, - DOS/Windows), they are most likely lists of strings: extra - command-line arguments to prepand/append to the compiler command - line. On other platforms, consult the implementation class - documentation. In any event, they are intended as an escape hatch - for those occasions when the abstract compiler framework doesn't - cut the mustard. - - 'depends', if given, is a list of filenames that all targets - depend on. If a source file is older than any file in - depends, then the source file will be recompiled. This - supports dependency tracking, but only at a coarse - granularity. - - Raises CompileError on failure. - """ - # A concrete compiler class can either override this method - # entirely or implement _compile(). - - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) - - # Return *all* object filenames, not just the ones we just built. - return objects - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compile 'src' to product 'obj'.""" - - # A concrete compiler class that does not override compile() - # should implement _compile(). - pass - - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=False, target_lang=None): - """Link a bunch of stuff together to create a static library file. - The "bunch of stuff" consists of the list of object files supplied - as 'objects', the extra object files supplied to - 'add_link_object()' and/or 'set_link_objects()', the libraries - supplied to 'add_library()' and/or 'set_libraries()', and the - libraries supplied as 'libraries' (if any). - - 'output_libname' should be a library name, not a filename; the - filename will be inferred from the library name. 'output_dir' is - the directory where the library file will be put. - - 'debug' is a boolean; if true, debugging information will be - included in the library (note that on most platforms, it is the - compile step where this matters: the 'debug' flag is included here - just for consistency). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LibError on failure. - """ - pass - - # values for target_desc parameter in link() - SHARED_OBJECT = "shared_object" - SHARED_LIBRARY = "shared_library" - EXECUTABLE = "executable" - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=False, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - """Link a bunch of stuff together to create an executable or - shared library file. - - The "bunch of stuff" consists of the list of object files supplied - as 'objects'. 'output_filename' should be a filename. If - 'output_dir' is supplied, 'output_filename' is relative to it - (i.e. 'output_filename' can provide directory components if - needed). - - 'libraries' is a list of libraries to link against. These are - library names, not filenames, since they're translated into - filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" - on Unix and "foo.lib" on DOS/Windows). However, they can include a - directory component, which means the linker will look in that - specific directory rather than searching all the normal locations. - - 'library_dirs', if supplied, should be a list of directories to - search for libraries that were specified as bare library names - (ie. no directory component). These are on top of the system - default and those supplied to 'add_library_dir()' and/or - 'set_library_dirs()'. 'runtime_library_dirs' is a list of - directories that will be embedded into the shared library and used - to search for other shared libraries that *it* depends on at - run-time. (This may only be relevant on Unix.) - - 'export_symbols' is a list of symbols that the shared library will - export. (This appears to be relevant only on Windows.) - - 'debug' is as for 'compile()' and 'create_static_lib()', with the - slight distinction that it actually matters on most platforms (as - opposed to 'create_static_lib()', which includes a 'debug' flag - mostly for form's sake). - - 'extra_preargs' and 'extra_postargs' are as for 'compile()' (except - of course that they supply command-line arguments for the - particular linker being used). - - 'target_lang' is the target language for which the given objects - are being compiled. This allows specific linkage time treatment of - certain languages. - - Raises LinkError on failure. - """ - raise NotImplementedError - - - # Old 'link_*()' methods, rewritten to use the new 'link()' method. - - def link_shared_lib(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, - runtime_library_dirs=None, export_symbols=None, - debug=False, extra_preargs=None, extra_postargs=None, - build_temp=None, target_lang=None): - self.link(CCompiler.SHARED_LIBRARY, objects, - self.library_filename(output_libname, lib_type='shared'), - output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - def link_shared_object(self, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, - runtime_library_dirs=None, export_symbols=None, - debug=False, extra_preargs=None, extra_postargs=None, - build_temp=None, target_lang=None): - self.link(CCompiler.SHARED_OBJECT, objects, - output_filename, output_dir, - libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, - extra_preargs, extra_postargs, build_temp, target_lang) - - def link_executable(self, objects, output_progname, output_dir=None, - libraries=None, library_dirs=None, - runtime_library_dirs=None, debug=False, - extra_preargs=None, extra_postargs=None, - target_lang=None): - self.link(CCompiler.EXECUTABLE, objects, - self.executable_filename(output_progname), output_dir, - libraries, library_dirs, runtime_library_dirs, None, - debug, extra_preargs, extra_postargs, None, target_lang) - - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function; there is - # no appropriate default implementation so subclasses should - # implement all of these. - - def library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for libraries. - """ - raise NotImplementedError - - def runtime_library_dir_option(self, dir): - """Return the compiler option to add 'dir' to the list of - directories searched for runtime libraries. - """ - raise NotImplementedError - - def library_option(self, lib): - """Return the compiler option to add 'dir' to the list of libraries - linked into the shared library or executable. - """ - raise NotImplementedError - - def has_function(self, funcname, includes=None, include_dirs=None, - libraries=None, library_dirs=None): - """Return a boolean indicating whether funcname is supported on - the current platform. The optional arguments can be used to - augment the compilation environment. - """ - - # this can't be included at module scope because it tries to - # import math which might not be available at that point - maybe - # the necessary logic should just be inlined? - import tempfile - if includes is None: - includes = [] - if include_dirs is None: - include_dirs = [] - if libraries is None: - libraries = [] - if library_dirs is None: - library_dirs = [] - fd, fname = tempfile.mkstemp(".c", funcname, text=True) - with os.fdopen(fd, "w") as f: - for incl in includes: - f.write("""#include "%s"\n""" % incl) - f.write("""\ -main (int argc, char **argv) { - %s(); -} -""" % funcname) - try: - objects = self.compile([fname], include_dirs=include_dirs) - except CompileError: - return False - - try: - self.link_executable(objects, "a.out", - libraries=libraries, - library_dirs=library_dirs) - except (LinkError, TypeError): - return False - return True - - def find_library_file(self, dirs, lib, debug=False): - """Search the specified list of directories for a static or shared - library file 'lib' and return the full path to that file. If - 'debug' is true, look for a debugging version (if that makes sense on - the current platform). Return None if 'lib' wasn't found in any of - the specified directories. - """ - raise NotImplementedError - - # -- Filename generation methods ----------------------------------- - - # The default implementation of the filename generating methods are - # prejudiced towards the Unix/DOS/Windows view of the world: - # * object files are named by replacing the source file extension - # (eg. .c/.cpp -> .o/.obj) - # * library files (shared or static) are named by plugging the - # library name and extension into a format string, eg. - # "lib%s.%s" % (lib_name, ".a") for Unix static libraries - # * executables are named by appending an extension (possibly - # empty) to the program name: eg. progname + ".exe" for - # Windows - # - # To reduce redundant code, these methods expect to find - # several attributes in the current object (presumably defined - # as class attributes): - # * src_extensions - - # list of C/C++ source file extensions, eg. ['.c', '.cpp'] - # * obj_extension - - # object file extension, eg. '.o' or '.obj' - # * static_lib_extension - - # extension for static library files, eg. '.a' or '.lib' - # * shared_lib_extension - - # extension for shared library/object files, eg. '.so', '.dll' - # * static_lib_format - - # format string for generating static library filenames, - # eg. 'lib%s.%s' or '%s.%s' - # * shared_lib_format - # format string for generating shared library filenames - # (probably same as static_lib_format, since the extension - # is one of the intended parameters to the format string) - # * exe_extension - - # extension for executable files, eg. '' or '.exe' - - def object_filenames(self, source_filenames, strip_dir=False, output_dir=''): - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - raise UnknownFileError("unknown file type '%s' (from '%s')" % - (ext, src_name)) - if strip_dir: - base = os.path.basename(base) - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - - def shared_object_filename(self, basename, strip_dir=False, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + self.shared_lib_extension) - - def executable_filename(self, basename, strip_dir=False, output_dir=''): - assert output_dir is not None - if strip_dir: - basename = os.path.basename(basename) - return os.path.join(output_dir, basename + (self.exe_extension or '')) - - def library_filename(self, libname, lib_type='static', # or 'shared' - strip_dir=False, output_dir=''): - assert output_dir is not None - if lib_type not in ("static", "shared", "dylib"): - raise ValueError( - "'lib_type' must be 'static', 'shared' or 'dylib'") - fmt = getattr(self, lib_type + "_lib_format") - ext = getattr(self, lib_type + "_lib_extension") - - dir, base = os.path.split(libname) - filename = fmt % (base, ext) - if strip_dir: - dir = '' - - return os.path.join(output_dir, dir, filename) - - - # -- Utility methods ----------------------------------------------- - - def execute(self, func, args, msg=None, level=1): - execute(func, args, msg, self.dry_run) - - def spawn(self, cmd): - spawn(cmd, dry_run=self.dry_run) - - def move_file(self, src, dst): - logger.info("moving %r to %r", src, dst) - if self.dry_run: - return - return move(src, dst) - - def mkpath(self, name, mode=0o777): - name = os.path.normpath(name) - if os.path.isdir(name) or name == '': - return - if self.dry_run: - head = '' - for part in name.split(os.sep): - logger.info("created directory %s%s", head, part) - head += part + os.sep - return - os.makedirs(name, mode) diff --git a/Lib/packaging/compiler/cygwinccompiler.py b/Lib/packaging/compiler/cygwinccompiler.py deleted file mode 100644 --- a/Lib/packaging/compiler/cygwinccompiler.py +++ /dev/null @@ -1,355 +0,0 @@ -"""CCompiler implementations for Cygwin and mingw32 versions of GCC. - -This module contains the CygwinCCompiler class, a subclass of -UnixCCompiler that handles the Cygwin port of the GNU C compiler to -Windows, and the Mingw32CCompiler class which handles the mingw32 port -of GCC (same as cygwin in no-cygwin mode). -""" - -# problems: -# -# * if you use a msvc compiled python version (1.5.2) -# 1. you have to insert a __GNUC__ section in its config.h -# 2. you have to generate a import library for its dll -# - create a def-file for python??.dll -# - create a import library using -# dlltool --dllname python15.dll --def python15.def \ -# --output-lib libpython15.a -# -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# -# * We put export_symbols in a def-file, and don't use -# --export-all-symbols because it doesn't worked reliable in some -# tested configurations. And because other windows compilers also -# need their symbols specified this no serious problem. -# -# tested configurations: -# -# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works -# (after patching python's config.h and for C++ some other include files) -# see also http://starship.python.net/crew/kernr/mingw32/Notes.html -# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works -# (ld doesn't support -shared, so we use dllwrap) -# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now -# - its dllwrap doesn't work, there is a bug in binutils 2.10.90 -# see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html -# - using gcc -mdll instead dllwrap doesn't work without -static because -# it tries to link against dlls instead their import libraries. (If -# it finds the dll first.) -# By specifying -static we force ld to link against the import libraries, -# this is windows standard and there are normally not the necessary symbols -# in the dlls. -# *** only the version of June 2000 shows these problems -# * cygwin gcc 3.2/ld 2.13.90 works -# (ld supports -shared) -# * mingw gcc 3.2/ld 2.13 works -# (ld supports -shared) - - -import os -import sys - -from packaging import logger -from packaging.compiler.unixccompiler import UnixCCompiler -from packaging.util import write_file -from packaging.errors import PackagingExecError, CompileError, UnknownFileError -from packaging.util import get_compiler_versions -import sysconfig - -# TODO use platform instead of sys.version -# (platform does unholy sys.version parsing too, but at least it gives other -# VMs a chance to override the returned values) - - -def get_msvcr(): - """Include the appropriate MSVC runtime library if Python was built - with MSVC 7.0 or later. - """ - msc_pos = sys.version.find('MSC v.') - if msc_pos != -1: - msc_ver = sys.version[msc_pos+6:msc_pos+10] - if msc_ver == '1300': - # MSVC 7.0 - return ['msvcr70'] - elif msc_ver == '1310': - # MSVC 7.1 - return ['msvcr71'] - elif msc_ver == '1400': - # VS2005 / MSVC 8.0 - return ['msvcr80'] - elif msc_ver == '1500': - # VS2008 / MSVC 9.0 - return ['msvcr90'] - else: - raise ValueError("Unknown MS Compiler version %s " % msc_ver) - - -class CygwinCCompiler(UnixCCompiler): - """ Handles the Cygwin port of the GNU C compiler to Windows. - """ - name = 'cygwin' - description = 'Cygwin port of GNU C Compiler for Win32' - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".dll" - static_lib_format = "lib%s%s" - shared_lib_format = "%s%s" - exe_extension = ".exe" - - def __init__(self, dry_run=False, force=False): - super(CygwinCCompiler, self).__init__(dry_run, force) - - status, details = check_config_h() - logger.debug("Python's GCC status: %s (details: %s)", status, details) - if status is not CONFIG_H_OK: - self.warn( - "Python's pyconfig.h doesn't seem to support your compiler. " - "Reason: %s. " - "Compiling may fail because of undefined preprocessor macros." - % details) - - self.gcc_version, self.ld_version, self.dllwrap_version = \ - get_compiler_versions() - logger.debug(self.name + ": gcc %s, ld %s, dllwrap %s\n", - self.gcc_version, - self.ld_version, - self.dllwrap_version) - - # ld_version >= "2.10.90" and < "2.13" should also be able to use - # gcc -mdll instead of dllwrap - # Older dllwraps had own version numbers, newer ones use the - # same as the rest of binutils ( also ld ) - # dllwrap 2.10.90 is buggy - if self.ld_version >= "2.10.90": - self.linker_dll = "gcc" - else: - self.linker_dll = "dllwrap" - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if self.ld_version >= "2.13": - shared_option = "-shared" - else: - shared_option = "-mdll -static" - - # Hard-code GCC because that's what this is all about. - # XXX optimization, warnings etc. should be customizable. - self.set_executables(compiler='gcc -mcygwin -O -Wall', - compiler_so='gcc -mcygwin -mdll -O -Wall', - compiler_cxx='g++ -mcygwin -O -Wall', - linker_exe='gcc -mcygwin', - linker_so=('%s -mcygwin %s' % - (self.linker_dll, shared_option))) - - # cygwin and mingw32 need different sets of libraries - if self.gcc_version == "2.91.57": - # cygwin shouldn't need msvcrt, but without the dlls will crash - # (gcc version 2.91.57) -- perhaps something about initialization - self.dll_libraries=["msvcrt"] - self.warn( - "Consider upgrading to a newer version of gcc") - else: - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - """Compile the source by spawning GCC and windres if needed.""" - if ext == '.rc' or ext == '.res': - # gcc needs '.res' and '.rc' compiled to object files !!! - try: - self.spawn(["windres", "-i", src, "-o", obj]) - except PackagingExecError as msg: - raise CompileError(msg) - else: # for other files use the C-compiler - try: - self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except PackagingExecError as msg: - raise CompileError(msg) - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=False, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - """Link the objects.""" - # use separate copies, so we can modify the lists - extra_preargs = list(extra_preargs or []) - libraries = list(libraries or []) - objects = list(objects or []) - - # Additional libraries - libraries.extend(self.dll_libraries) - - # handle export symbols by creating a def-file - # with executables this only works with gcc/ld as linker - if ((export_symbols is not None) and - (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - # (The linker doesn't do anything if output is up-to-date. - # So it would probably better to check if we really need this, - # but for this we had to insert some unchanged parts of - # UnixCCompiler, and this is not what we want.) - - # we want to put some files in the same directory as the - # object files are, build_temp doesn't help much - # where are the object files - temp_dir = os.path.dirname(objects[0]) - # name of dll to give the helper files the same base name - dll_name, dll_extension = os.path.splitext( - os.path.basename(output_filename)) - - # generate the filenames for these files - def_file = os.path.join(temp_dir, dll_name + ".def") - lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a") - - # Generate .def file - contents = [ - "LIBRARY %s" % os.path.basename(output_filename), - "EXPORTS"] - for sym in export_symbols: - contents.append(sym) - self.execute(write_file, (def_file, contents), - "writing %s" % def_file) - - # next add options for def-file and to creating import libraries - - # dllwrap uses different options than gcc/ld - if self.linker_dll == "dllwrap": - extra_preargs.extend(("--output-lib", lib_file)) - # for dllwrap we have to use a special option - extra_preargs.extend(("--def", def_file)) - # we use gcc/ld here and can be sure ld is >= 2.9.10 - else: - # doesn't work: bfd_close build\...\libfoo.a: Invalid operation - #extra_preargs.extend(("-Wl,--out-implib,%s" % lib_file)) - # for gcc/ld the def-file is specified as any object files - objects.append(def_file) - - #end: if ((export_symbols is not None) and - # (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): - - # who wants symbols and a many times larger output file - # should explicitly switch the debug mode on - # otherwise we let dllwrap/ld strip the output file - # (On my machine: 10KB < stripped_file < ??100KB - # unstripped_file = stripped_file + XXX KB - # ( XXX=254 for a typical python extension)) - if not debug: - extra_preargs.append("-s") - - super(CygwinCCompiler, self).link( - target_desc, objects, output_filename, output_dir, libraries, - library_dirs, runtime_library_dirs, - None, # export_symbols, we do this in our def-file - debug, extra_preargs, extra_postargs, build_temp, target_lang) - - # -- Miscellaneous methods ----------------------------------------- - - def object_filenames(self, source_filenames, strip_dir=False, - output_dir=''): - """Adds supports for rc and res files.""" - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - # use normcase to make sure '.rc' is really '.rc' and not '.RC' - base, ext = os.path.splitext(os.path.normcase(src_name)) - if ext not in (self.src_extensions + ['.rc','.res']): - raise UnknownFileError("unknown file type '%s' (from '%s')" % (ext, src_name)) - if strip_dir: - base = os.path.basename(base) - if ext in ('.res', '.rc'): - # these need to be compiled to object files - obj_names.append(os.path.join(output_dir, - base + ext + self.obj_extension)) - else: - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - -# the same as cygwin plus some additional parameters -class Mingw32CCompiler(CygwinCCompiler): - """ Handles the Mingw32 port of the GNU C compiler to Windows. - """ - name = 'mingw32' - description = 'MinGW32 compiler' - - def __init__(self, dry_run=False, force=False): - super(Mingw32CCompiler, self).__init__(dry_run, force) - - # ld_version >= "2.13" support -shared so use it instead of - # -mdll -static - if self.ld_version >= "2.13": - shared_option = "-shared" - else: - shared_option = "-mdll -static" - - # A real mingw32 doesn't need to specify a different entry point, - # but cygwin 2.91.57 in no-cygwin-mode needs it. - if self.gcc_version <= "2.91.57": - entry_point = '--entry _DllMain at 12' - else: - entry_point = '' - - self.set_executables(compiler='gcc -mno-cygwin -O -Wall', - compiler_so='gcc -mno-cygwin -mdll -O -Wall', - compiler_cxx='g++ -mno-cygwin -O -Wall', - linker_exe='gcc -mno-cygwin', - linker_so='%s -mno-cygwin %s %s' - % (self.linker_dll, shared_option, - entry_point)) - # Maybe we should also append -mthreads, but then the finished - # dlls need another dll (mingwm10.dll see Mingw32 docs) - # (-mthreads: Support thread-safe exception handling on `Mingw32') - - # no additional libraries needed - self.dll_libraries=[] - - # Include the appropriate MSVC runtime library if Python was built - # with MSVC 7.0 or later. - self.dll_libraries = get_msvcr() - -# Because these compilers aren't configured in Python's pyconfig.h file by -# default, we should at least warn the user if he is using a unmodified -# version. - -CONFIG_H_OK = "ok" -CONFIG_H_NOTOK = "not ok" -CONFIG_H_UNCERTAIN = "uncertain" - -def check_config_h(): - """Check if the current Python installation appears amenable to building - extensions with GCC. - - Returns a tuple (status, details), where 'status' is one of the following - constants: - - - CONFIG_H_OK: all is well, go ahead and compile - - CONFIG_H_NOTOK: doesn't look good - - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h - - 'details' is a human-readable string explaining the situation. - - Note there are two ways to conclude "OK": either 'sys.version' contains - the string "GCC" (implying that this Python was built with GCC), or the - installed "pyconfig.h" contains the string "__GNUC__". - """ - - # XXX since this function also checks sys.version, it's not strictly a - # "pyconfig.h" check -- should probably be renamed... - # if sys.version contains GCC then python was compiled with GCC, and the - # pyconfig.h file should be OK - if "GCC" in sys.version: - return CONFIG_H_OK, "sys.version mentions 'GCC'" - - # let's see if __GNUC__ is mentioned in python.h - fn = sysconfig.get_config_h_filename() - try: - with open(fn) as config_h: - if "__GNUC__" in config_h.read(): - return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn - else: - return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn - except IOError as exc: - return (CONFIG_H_UNCERTAIN, - "couldn't read '%s': %s" % (fn, exc.strerror)) diff --git a/Lib/packaging/compiler/extension.py b/Lib/packaging/compiler/extension.py deleted file mode 100644 --- a/Lib/packaging/compiler/extension.py +++ /dev/null @@ -1,121 +0,0 @@ -"""Class representing C/C++ extension modules.""" - -from packaging import logger - -# This class is really only used by the "build_ext" command, so it might -# make sense to put it in distutils.command.build_ext. However, that -# module is already big enough, and I want to make this class a bit more -# complex to simplify some common cases ("foo" module in "foo.c") and do -# better error-checking ("foo.c" actually exists). -# -# Also, putting this in build_ext.py means every setup script would have to -# import that large-ish module (indirectly, through distutils.core) in -# order to do anything. - - -class Extension: - """Just a collection of attributes that describes an extension - module and everything needed to build it (hopefully in a portable - way, but there are hooks that let you be as unportable as you need). - - Instance attributes: - name : string - the full name of the extension, including any packages -- ie. - *not* a filename or pathname, but Python dotted name - sources : [string] - list of source filenames, relative to the distribution root - (where the setup script lives), in Unix form (slash-separated) - for portability. Source files may be C, C++, SWIG (.i), - platform-specific resource files, or whatever else is recognized - by the "build_ext" command as source for a Python extension. - include_dirs : [string] - list of directories to search for C/C++ header files (in Unix - form for portability) - define_macros : [(name : string, value : string|None)] - list of macros to define; each macro is defined using a 2-tuple, - where 'value' is either the string to define it to or None to - define it without a particular value (equivalent of "#define - FOO" in source or -DFOO on Unix C compiler command line) - undef_macros : [string] - list of macros to undefine explicitly - library_dirs : [string] - list of directories to search for C/C++ libraries at link time - libraries : [string] - list of library names (not filenames or paths) to link against - runtime_library_dirs : [string] - list of directories to search for C/C++ libraries at run time - (for shared extensions, this is when the extension is loaded) - extra_objects : [string] - list of extra files to link with (eg. object files not implied - by 'sources', static library that must be explicitly specified, - binary resource files, etc.) - extra_compile_args : [string] - any extra platform- and compiler-specific information to use - when compiling the source files in 'sources'. For platforms and - compilers where "command line" makes sense, this is typically a - list of command-line arguments, but for other platforms it could - be anything. - extra_link_args : [string] - any extra platform- and compiler-specific information to use - when linking object files together to create the extension (or - to create a new static Python interpreter). Similar - interpretation as for 'extra_compile_args'. - export_symbols : [string] - list of symbols to be exported from a shared extension. Not - used on all platforms, and not generally necessary for Python - extensions, which typically export exactly one symbol: "init" + - extension_name. - swig_opts : [string] - any extra options to pass to SWIG if a source file has the .i - extension. - depends : [string] - list of files that the extension depends on - language : string - extension language (i.e. "c", "c++", "objc"). Will be detected - from the source extensions if not provided. - optional : boolean - specifies that a build failure in the extension should not abort the - build process, but simply not install the failing extension. - """ - - # **kwargs are allowed so that a warning is emitted instead of an - # exception - def __init__(self, name, sources, include_dirs=None, define_macros=None, - undef_macros=None, library_dirs=None, libraries=None, - runtime_library_dirs=None, extra_objects=None, - extra_compile_args=None, extra_link_args=None, - export_symbols=None, swig_opts=None, depends=None, - language=None, optional=None, **kw): - if not isinstance(name, str): - raise AssertionError("'name' must be a string") - - if not isinstance(sources, list): - raise AssertionError("'sources' must be a list of strings") - - for v in sources: - if not isinstance(v, str): - raise AssertionError("'sources' must be a list of strings") - - self.name = name - self.sources = sources - self.include_dirs = include_dirs or [] - self.define_macros = define_macros or [] - self.undef_macros = undef_macros or [] - self.library_dirs = library_dirs or [] - self.libraries = libraries or [] - self.runtime_library_dirs = runtime_library_dirs or [] - self.extra_objects = extra_objects or [] - self.extra_compile_args = extra_compile_args or [] - self.extra_link_args = extra_link_args or [] - self.export_symbols = export_symbols or [] - self.swig_opts = swig_opts or [] - self.depends = depends or [] - self.language = language - self.optional = optional - - # If there are unknown keyword options, warn about them - if len(kw) > 0: - options = [repr(option) for option in kw] - options = ', '.join(sorted(options)) - logger.warning( - 'unknown arguments given to Extension: %s', options) diff --git a/Lib/packaging/compiler/msvc9compiler.py b/Lib/packaging/compiler/msvc9compiler.py deleted file mode 100644 --- a/Lib/packaging/compiler/msvc9compiler.py +++ /dev/null @@ -1,721 +0,0 @@ -"""CCompiler implementation for the Microsoft Visual Studio 2008 compiler. - -The MSVCCompiler class is compatible with VS 2005 and VS 2008. Legacy -support for older versions of VS are in the msvccompiler module. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) -# ported to VS2005 and VS 2008 by Christian Heimes -import os -import subprocess -import sys -import re - -from packaging.errors import (PackagingExecError, PackagingPlatformError, - CompileError, LibError, LinkError) -from packaging.compiler.ccompiler import CCompiler -from packaging.compiler import gen_lib_options -from packaging import logger -from packaging.util import get_platform - -import winreg - -RegOpenKeyEx = winreg.OpenKeyEx -RegEnumKey = winreg.EnumKey -RegEnumValue = winreg.EnumValue -RegError = winreg.error - -HKEYS = (winreg.HKEY_USERS, - winreg.HKEY_CURRENT_USER, - winreg.HKEY_LOCAL_MACHINE, - winreg.HKEY_CLASSES_ROOT) - -VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" -WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" -NET_BASE = r"Software\Microsoft\.NETFramework" - -# A map keyed by get_platform() return values to values accepted by -# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is -# the param to cross-compile on x86 targetting amd64.) -PLAT_TO_VCVARS = { - 'win32' : 'x86', - 'win-amd64' : 'amd64', - 'win-ia64' : 'ia64', -} - - -class Reg: - """Helper class to read values from the registry - """ - - def get_value(cls, path, key): - for base in HKEYS: - d = cls.read_values(base, path) - if d and key in d: - return d[key] - raise KeyError(key) - get_value = classmethod(get_value) - - def read_keys(cls, base, key): - """Return list of registry keys.""" - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i += 1 - return L - read_keys = classmethod(read_keys) - - def read_values(cls, base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) - i += 1 - return d - read_values = classmethod(read_values) - - def convert_mbcs(s): - dec = getattr(s, "decode", None) - if dec is not None: - try: - s = dec("mbcs") - except UnicodeError: - pass - return s - convert_mbcs = staticmethod(convert_mbcs) - -class MacroExpander: - - def __init__(self, version): - self.macros = {} - self.vsbase = VS_BASE % version - self.load_macros(version) - - def set_macro(self, macro, path, key): - self.macros["$(%s)" % macro] = Reg.get_value(path, key) - - def load_macros(self, version): - self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") - self.set_macro("FrameworkDir", NET_BASE, "installroot") - try: - if version >= 8.0: - self.set_macro("FrameworkSDKDir", NET_BASE, - "sdkinstallrootv2.0") - else: - raise KeyError("sdkinstallrootv2.0") - except KeyError: - raise PackagingPlatformError( -"""Python was built with Visual Studio 2008; extensions must be built with a -compiler than can generate compatible binaries. Visual Studio 2008 was not -found on this system. If you have Cygwin installed, you can try compiling -with MingW32, by passing "-c mingw32" to pysetup.""") - - if version >= 9.0: - self.set_macro("FrameworkVersion", self.vsbase, "clr version") - self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") - else: - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = Reg.get_value(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - -def removeDuplicates(variable): - """Remove duplicate values of an environment variable. - """ - oldList = variable.split(os.pathsep) - newList = [] - for i in oldList: - if i not in newList: - newList.append(i) - newVariable = os.pathsep.join(newList) - return newVariable - -def find_vcvarsall(version): - """Find the vcvarsall.bat file - - At first it tries to find the productdir of VS 2008 in the registry. If - that fails it falls back to the VS90COMNTOOLS env var. - """ - vsbase = VS_BASE % version - try: - productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, - "productdir") - except KeyError: - logger.debug("Unable to find productdir in registry") - productdir = None - - if not productdir or not os.path.isdir(productdir): - toolskey = "VS%0.f0COMNTOOLS" % version - toolsdir = os.environ.get(toolskey, None) - - if toolsdir and os.path.isdir(toolsdir): - productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") - productdir = os.path.abspath(productdir) - if not os.path.isdir(productdir): - logger.debug("%s is not a valid directory", productdir) - return None - else: - logger.debug("env var %s is not set or invalid", toolskey) - if not productdir: - logger.debug("no productdir found") - return None - vcvarsall = os.path.join(productdir, "vcvarsall.bat") - if os.path.isfile(vcvarsall): - return vcvarsall - logger.debug("unable to find vcvarsall.bat") - return None - -def query_vcvarsall(version, arch="x86"): - """Launch vcvarsall.bat and read the settings from its environment - """ - vcvarsall = find_vcvarsall(version) - interesting = set(("include", "lib", "libpath", "path")) - result = {} - - if vcvarsall is None: - raise PackagingPlatformError("Unable to find vcvarsall.bat") - logger.debug("calling 'vcvarsall.bat %s' (version=%s)", arch, version) - popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - stdout, stderr = popen.communicate() - if popen.wait() != 0: - raise PackagingPlatformError(stderr.decode("mbcs")) - - stdout = stdout.decode("mbcs") - for line in stdout.split("\n"): - line = Reg.convert_mbcs(line) - if '=' not in line: - continue - line = line.strip() - key, value = line.split('=', 1) - key = key.lower() - if key in interesting: - if value.endswith(os.pathsep): - value = value[:-1] - result[key] = removeDuplicates(value) - - if len(result) != len(interesting): - raise ValueError(str(list(result))) - - return result - -# More globals -VERSION = get_build_version() -if VERSION < 8.0: - raise PackagingPlatformError("VC %0.1f is not supported by this module" % VERSION) -# MACROS = MacroExpander(VERSION) - -class MSVCCompiler(CCompiler) : - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - name = 'msvc' - description = 'Microsoft Visual C++' - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, dry_run=False, force=False): - super(MSVCCompiler, self).__init__(dry_run, force) - self.__version = VERSION - self.__root = r"Software\Microsoft\VisualStudio" - # self.__macros = MACROS - self.__paths = [] - # target platform (.plat_name is consistent with 'bdist') - self.plat_name = None - self.__arch = None # deprecated name - self.initialized = False - - def initialize(self, plat_name=None): - # multi-init means we would need to check platform same each time... - assert not self.initialized, "don't init multiple times" - if plat_name is None: - plat_name = get_platform() - # sanity check for platforms to prevent obscure errors later. - ok_plats = 'win32', 'win-amd64', 'win-ia64' - if plat_name not in ok_plats: - raise PackagingPlatformError("--plat-name must be one of %s" % - (ok_plats,)) - - if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; - # to cross compile, you use 'x86_amd64'. - # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross - # compile use 'x86' (ie, it runs the x86 compiler directly) - # No idea how itanium handles this, if at all. - if plat_name == get_platform() or plat_name == 'win32': - # native build or cross-compile to win32 - plat_spec = PLAT_TO_VCVARS[plat_name] - else: - # cross compile from win32 -> some 64bit - plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \ - PLAT_TO_VCVARS[plat_name] - - vc_env = query_vcvarsall(VERSION, plat_spec) - - # take care to only use strings in the environment. - self.__paths = vc_env['path'].split(os.pathsep) - os.environ['lib'] = vc_env['lib'] - os.environ['include'] = vc_env['include'] - - if len(self.__paths) == 0: - raise PackagingPlatformError("Python was built with %s, " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." - % self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - #self.set_path_env_var('lib') - #self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ";".join(self.__paths) - - self.preprocess_options = None - if self.__arch == "x86": - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' , - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, - source_filenames, - strip_dir=False, - output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: output_dir = '' - obj_names = [] - for src_name in source_filenames: - base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename(base) - if ext in self._rc_extensions: - obj_names.append(os.path.join(output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append(os.path.join(output_dir, - base + self.res_extension)) - else: - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=False, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - compile_info = self._setup_compile(output_dir, macros, include_dirs, - sources, depends, extra_postargs) - macros, objects, extra_postargs, pp_opts, build = compile_info - - compile_opts = extra_preargs or [] - compile_opts.append('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except PackagingExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext(os.path.basename(src)) - rc_file = os.path.join(rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except PackagingExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError("Don't know how to compile %s to %s" - % (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except PackagingExecError as msg: - raise CompileError(msg) - - return objects - - - def create_static_lib(self, - objects, - output_libname, - output_dir=None, - debug=False, - target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = self.library_filename(output_libname, - output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except PackagingExecError as msg: - raise LibError(msg) - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=False, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - fixed_args = self._fix_lib_args(libraries, library_dirs, - runtime_library_dirs) - libraries, library_dirs, runtime_library_dirs = fixed_args - - if runtime_library_dirs: - self.warn("don't know what to do with 'runtime_library_dirs': " - + str(runtime_library_dirs)) - - lib_opts = gen_lib_options(self, - library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - build_temp = os.path.dirname(objects[0]) - if export_symbols is not None: - dll_name, dll_ext = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - build_temp, - self.library_filename(dll_name)) - ld_args.append('/IMPLIB:' + implib_file) - - # Embedded manifests are recommended - see MSDN article titled - # "How to: Embed a Manifest Inside a C/C++ Application" - # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) - # Ask the linker to generate the manifest in the temp dir, so - # we can embed it later. - temp_manifest = os.path.join( - build_temp, - os.path.basename(output_filename) + ".manifest") - ld_args.append('/MANIFESTFILE:' + temp_manifest) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except PackagingExecError as msg: - raise LinkError(msg) - - # embed the manifest - # XXX - this is somewhat fragile - if mt.exe fails, distutils - # will still consider the DLL up-to-date, but it will not have a - # manifest. Maybe we should link to a temp file? OTOH, that - # implies a build environment error that shouldn't go undetected. - if target_desc == CCompiler.EXECUTABLE: - mfid = 1 - else: - mfid = 2 - self._remove_visual_c_ref(temp_manifest) - out_arg = '-outputresource:%s;%s' % (output_filename, mfid) - if self.__version < 10: - try: - self.spawn(['mt.exe', '-nologo', '-manifest', - temp_manifest, out_arg]) - except PackagingExecError as msg: - raise LinkError(msg) - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - def _remove_visual_c_ref(self, manifest_file): - try: - # Remove references to the Visual C runtime, so they will - # fall through to the Visual C dependency of Python.exe. - # This way, when installed for a restricted user (e.g. - # runtimes are not in WinSxS folder, but in Python's own - # folder), the runtimes do not need to be in every folder - # with .pyd's. - with open(manifest_file) as manifest_f: - manifest_buf = manifest_f.read() - pattern = re.compile( - r"""|)""", - re.DOTALL) - manifest_buf = re.sub(pattern, "", manifest_buf) - pattern = "\s*" - manifest_buf = re.sub(pattern, "", manifest_buf) - with open(manifest_file, 'w') as manifest_f: - manifest_f.write(manifest_buf) - except IOError: - pass - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise PackagingPlatformError( - "don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - - def find_library_file(self, dirs, lib, debug=False): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p),exe) - if os.path.isfile(fn): - return fn - - return exe diff --git a/Lib/packaging/compiler/msvccompiler.py b/Lib/packaging/compiler/msvccompiler.py deleted file mode 100644 --- a/Lib/packaging/compiler/msvccompiler.py +++ /dev/null @@ -1,635 +0,0 @@ -"""CCompiler implementation for old Microsoft Visual Studio compilers. - -For a compiler compatible with VS 2005 and 2008, use msvc9compiler. -""" - -# Written by Perry Stoll -# hacked by Robin Becker and Thomas Heller to do a better job of -# finding DevStudio (through the registry) - - -import sys -import os - -from packaging.errors import (PackagingExecError, PackagingPlatformError, - CompileError, LibError, LinkError) -from packaging.compiler.ccompiler import CCompiler -from packaging.compiler import gen_lib_options -from packaging import logger - -_can_read_reg = False -try: - import winreg - - _can_read_reg = True - hkey_mod = winreg - - RegOpenKeyEx = winreg.OpenKeyEx - RegEnumKey = winreg.EnumKey - RegEnumValue = winreg.EnumValue - RegError = winreg.error - -except ImportError: - try: - import win32api - import win32con - _can_read_reg = True - hkey_mod = win32con - - RegOpenKeyEx = win32api.RegOpenKeyEx - RegEnumKey = win32api.RegEnumKey - RegEnumValue = win32api.RegEnumValue - RegError = win32api.error - - except ImportError: - logger.warning( - "can't read registry to find the necessary compiler setting;\n" - "make sure that Python modules _winreg, win32api or win32con " - "are installed.") - -if _can_read_reg: - HKEYS = (hkey_mod.HKEY_USERS, - hkey_mod.HKEY_CURRENT_USER, - hkey_mod.HKEY_LOCAL_MACHINE, - hkey_mod.HKEY_CLASSES_ROOT) - - -def read_keys(base, key): - """Return list of registry keys.""" - - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - L = [] - i = 0 - while True: - try: - k = RegEnumKey(handle, i) - except RegError: - break - L.append(k) - i = i + 1 - return L - - -def read_values(base, key): - """Return dict of registry keys and values. - - All names are converted to lowercase. - """ - try: - handle = RegOpenKeyEx(base, key) - except RegError: - return None - d = {} - i = 0 - while True: - try: - name, value, type = RegEnumValue(handle, i) - except RegError: - break - name = name.lower() - d[convert_mbcs(name)] = convert_mbcs(value) - i = i + 1 - return d - - -def convert_mbcs(s): - enc = getattr(s, "encode", None) - if enc is not None: - try: - s = enc("mbcs") - except UnicodeError: - pass - return s - - -class MacroExpander: - - def __init__(self, version): - self.macros = {} - self.load_macros(version) - - def set_macro(self, macro, path, key): - for base in HKEYS: - d = read_values(base, path) - if d: - self.macros["$(%s)" % macro] = d[key] - break - - def load_macros(self, version): - vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version - self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") - self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") - net = r"Software\Microsoft\.NETFramework" - self.set_macro("FrameworkDir", net, "installroot") - try: - if version > 7.0: - self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") - else: - self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") - except KeyError: - raise PackagingPlatformError( -"""Python was built with Visual Studio 2003; extensions must be built with -a compiler than can generate compatible binaries. Visual Studio 2003 was -not found on this system. If you have Cygwin installed, you can try -compiling with MingW32, by passing "-c mingw32" to pysetup.""") - - p = r"Software\Microsoft\NET Framework Setup\Product" - for base in HKEYS: - try: - h = RegOpenKeyEx(base, p) - except RegError: - continue - key = RegEnumKey(h, 0) - d = read_values(base, r"%s\%s" % (p, key)) - self.macros["$(FrameworkVersion)"] = d["version"] - - def sub(self, s): - for k, v in self.macros.items(): - s = s.replace(k, v) - return s - - -def get_build_version(): - """Return the version of MSVC that was used to build Python. - - For Python 2.3 and up, the version number is included in - sys.version. For earlier versions, assume the compiler is MSVC 6. - """ - - prefix = "MSC v." - i = sys.version.find(prefix) - if i == -1: - return 6 - i = i + len(prefix) - s, rest = sys.version[i:].split(" ", 1) - majorVersion = int(s[:-2]) - 6 - minorVersion = int(s[2:3]) / 10.0 - # I don't think paths are affected by minor version in version 6 - if majorVersion == 6: - minorVersion = 0 - if majorVersion >= 6: - return majorVersion + minorVersion - # else we don't know what version of the compiler this is - return None - - -def get_build_architecture(): - """Return the processor architecture. - - Possible results are "Intel", "Itanium", or "AMD64". - """ - - prefix = " bit (" - i = sys.version.find(prefix) - if i == -1: - return "Intel" - j = sys.version.find(")", i) - return sys.version[i+len(prefix):j] - - -def normalize_and_reduce_paths(paths): - """Return a list of normalized paths with duplicates removed. - - The current order of paths is maintained. - """ - # Paths are normalized so things like: /a and /a/ aren't both preserved. - reduced_paths = [] - for p in paths: - np = os.path.normpath(p) - # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. - if np not in reduced_paths: - reduced_paths.append(np) - return reduced_paths - - -class MSVCCompiler(CCompiler): - """Concrete class that implements an interface to Microsoft Visual C++, - as defined by the CCompiler abstract class.""" - - name = 'msvc' - description = "Microsoft Visual C++" - - # Just set this so CCompiler's constructor doesn't barf. We currently - # don't use the 'set_executables()' bureaucracy provided by CCompiler, - # as it really isn't necessary for this sort of single-compiler class. - # Would be nice to have a consistent interface with UnixCCompiler, - # though, so it's worth thinking about. - executables = {} - - # Private class data (need to distinguish C from C++ source for compiler) - _c_extensions = ['.c'] - _cpp_extensions = ['.cc', '.cpp', '.cxx'] - _rc_extensions = ['.rc'] - _mc_extensions = ['.mc'] - - # Needed for the filename generation methods provided by the - # base class, CCompiler. - src_extensions = (_c_extensions + _cpp_extensions + - _rc_extensions + _mc_extensions) - res_extension = '.res' - obj_extension = '.obj' - static_lib_extension = '.lib' - shared_lib_extension = '.dll' - static_lib_format = shared_lib_format = '%s%s' - exe_extension = '.exe' - - def __init__(self, dry_run=False, force=False): - super(MSVCCompiler, self).__init__(dry_run, force) - self.__version = get_build_version() - self.__arch = get_build_architecture() - if self.__arch == "Intel": - # x86 - if self.__version >= 7: - self.__root = r"Software\Microsoft\VisualStudio" - self.__macros = MacroExpander(self.__version) - else: - self.__root = r"Software\Microsoft\Devstudio" - self.__product = "Visual Studio version %s" % self.__version - else: - # Win64. Assume this was built with the platform SDK - self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) - - self.initialized = False - - def initialize(self): - self.__paths = [] - if ("DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and - self.find_exe("cl.exe")): - # Assume that the SDK set up everything alright; don't try to be - # smarter - self.cc = "cl.exe" - self.linker = "link.exe" - self.lib = "lib.exe" - self.rc = "rc.exe" - self.mc = "mc.exe" - else: - self.__paths = self.get_msvc_paths("path") - - if len(self.__paths) == 0: - raise PackagingPlatformError("Python was built with %s " - "and extensions need to be built with the same " - "version of the compiler, but it isn't installed." % - self.__product) - - self.cc = self.find_exe("cl.exe") - self.linker = self.find_exe("link.exe") - self.lib = self.find_exe("lib.exe") - self.rc = self.find_exe("rc.exe") # resource compiler - self.mc = self.find_exe("mc.exe") # message compiler - self.set_path_env_var('lib') - self.set_path_env_var('include') - - # extend the MSVC path with the current path - try: - for p in os.environ['path'].split(';'): - self.__paths.append(p) - except KeyError: - pass - self.__paths = normalize_and_reduce_paths(self.__paths) - os.environ['path'] = ';'.join(self.__paths) - - self.preprocess_options = None - if self.__arch == "Intel": - self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GX', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX', - '/Z7', '/D_DEBUG'] - else: - # Win64 - self.compile_options = ['/nologo', '/Ox', '/MD', '/W3', '/GS-', - '/DNDEBUG'] - self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-', - '/Z7', '/D_DEBUG'] - - self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] - if self.__version >= 7: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' - ] - else: - self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG' - ] - self.ldflags_static = [ '/nologo'] - - self.initialized = True - - # -- Worker methods ------------------------------------------------ - - def object_filenames(self, source_filenames, strip_dir=False, output_dir=''): - # Copied from ccompiler.py, extended to return .res as 'object'-file - # for .rc input file - if output_dir is None: - output_dir = '' - obj_names = [] - for src_name in source_filenames: - base, ext = os.path.splitext(src_name) - base = os.path.splitdrive(base)[1] # Chop off the drive - base = base[os.path.isabs(base):] # If abs, chop off leading / - if ext not in self.src_extensions: - # Better to raise an exception instead of silently continuing - # and later complain about sources and targets having - # different lengths - raise CompileError("Don't know how to compile %s" % src_name) - if strip_dir: - base = os.path.basename(base) - if ext in self._rc_extensions: - obj_names.append(os.path.join(output_dir, - base + self.res_extension)) - elif ext in self._mc_extensions: - obj_names.append(os.path.join(output_dir, - base + self.res_extension)) - else: - obj_names.append(os.path.join(output_dir, - base + self.obj_extension)) - return obj_names - - def compile(self, sources, - output_dir=None, macros=None, include_dirs=None, debug=False, - extra_preargs=None, extra_postargs=None, depends=None): - - if not self.initialized: - self.initialize() - macros, objects, extra_postargs, pp_opts, build = \ - self._setup_compile(output_dir, macros, include_dirs, sources, - depends, extra_postargs) - - compile_opts = extra_preargs or [] - compile_opts.append('/c') - if debug: - compile_opts.extend(self.compile_options_debug) - else: - compile_opts.extend(self.compile_options) - - for obj in objects: - try: - src, ext = build[obj] - except KeyError: - continue - if debug: - # pass the full pathname to MSVC in debug mode, - # this allows the debugger to find the source file - # without asking the user to browse for it - src = os.path.abspath(src) - - if ext in self._c_extensions: - input_opt = "/Tc" + src - elif ext in self._cpp_extensions: - input_opt = "/Tp" + src - elif ext in self._rc_extensions: - # compile .RC to .RES file - input_opt = src - output_opt = "/fo" + obj - try: - self.spawn([self.rc] + pp_opts + - [output_opt] + [input_opt]) - except PackagingExecError as msg: - raise CompileError(msg) - continue - elif ext in self._mc_extensions: - - # Compile .MC to .RC file to .RES file. - # * '-h dir' specifies the directory for the - # generated include file - # * '-r dir' specifies the target directory of the - # generated RC file and the binary message resource - # it includes - # - # For now (since there are no options to change this), - # we use the source-directory for the include file and - # the build directory for the RC file and message - # resources. This works at least for win32all. - - h_dir = os.path.dirname(src) - rc_dir = os.path.dirname(obj) - try: - # first compile .MC to .RC and .H file - self.spawn([self.mc] + - ['-h', h_dir, '-r', rc_dir] + [src]) - base, _ = os.path.splitext(os.path.basename(src)) - rc_file = os.path.join(rc_dir, base + '.rc') - # then compile .RC to .RES file - self.spawn([self.rc] + - ["/fo" + obj] + [rc_file]) - - except PackagingExecError as msg: - raise CompileError(msg) - continue - else: - # how to handle this file? - raise CompileError( - "Don't know how to compile %s to %s" % - (src, obj)) - - output_opt = "/Fo" + obj - try: - self.spawn([self.cc] + compile_opts + pp_opts + - [input_opt, output_opt] + - extra_postargs) - except PackagingExecError as msg: - raise CompileError(msg) - - return objects - - def create_static_lib(self, objects, output_libname, output_dir=None, - debug=False, target_lang=None): - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - lib_args = objects + ['/OUT:' + output_filename] - if debug: - pass # XXX what goes here? - try: - self.spawn([self.lib] + lib_args) - except PackagingExecError as msg: - raise LibError(msg) - - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - def link(self, target_desc, objects, output_filename, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=False, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - - if not self.initialized: - self.initialize() - objects, output_dir = self._fix_object_args(objects, output_dir) - libraries, library_dirs, runtime_library_dirs = \ - self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) - - if runtime_library_dirs: - self.warn("don't know what to do with 'runtime_library_dirs': %s" - % (runtime_library_dirs,)) - - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, - libraries) - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - - if target_desc == CCompiler.EXECUTABLE: - if debug: - ldflags = self.ldflags_shared_debug[1:] - else: - ldflags = self.ldflags_shared[1:] - else: - if debug: - ldflags = self.ldflags_shared_debug - else: - ldflags = self.ldflags_shared - - export_opts = [] - for sym in (export_symbols or []): - export_opts.append("/EXPORT:" + sym) - - ld_args = (ldflags + lib_opts + export_opts + - objects + ['/OUT:' + output_filename]) - - # The MSVC linker generates .lib and .exp files, which cannot be - # suppressed by any linker switches. The .lib files may even be - # needed! Make sure they are generated in the temporary build - # directory. Since they have different names for debug and release - # builds, they can go into the same directory. - if export_symbols is not None: - dll_name, dll_ext = os.path.splitext( - os.path.basename(output_filename)) - implib_file = os.path.join( - os.path.dirname(objects[0]), - self.library_filename(dll_name)) - ld_args.append('/IMPLIB:' + implib_file) - - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - - self.mkpath(os.path.dirname(output_filename)) - try: - self.spawn([self.linker] + ld_args) - except PackagingExecError as msg: - raise LinkError(msg) - - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "/LIBPATH:" + dir - - def runtime_library_dir_option(self, dir): - raise PackagingPlatformError("don't know how to set runtime library search path for MSVC++") - - def library_option(self, lib): - return self.library_filename(lib) - - def find_library_file(self, dirs, lib, debug=False): - # Prefer a debugging library if found (and requested), but deal - # with it if we don't have one. - if debug: - try_names = [lib + "_d", lib] - else: - try_names = [lib] - for dir in dirs: - for name in try_names: - libfile = os.path.join(dir, self.library_filename(name)) - if os.path.exists(libfile): - return libfile - else: - # Oops, didn't find it in *any* of 'dirs' - return None - - # Helper methods for using the MSVC registry settings - - def find_exe(self, exe): - """Return path to an MSVC executable program. - - Tries to find the program in several places: first, one of the - MSVC program search paths from the registry; next, the directories - in the PATH environment variable. If any of those work, return an - absolute path that is known to exist. If none of them work, just - return the original program name, 'exe'. - """ - - for p in self.__paths: - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - # didn't find it; try existing path - for p in os.environ['Path'].split(';'): - fn = os.path.join(os.path.abspath(p), exe) - if os.path.isfile(fn): - return fn - - return exe - - def get_msvc_paths(self, path, platform='x86'): - """Get a list of devstudio directories (include, lib or path). - - Return a list of strings. The list will be empty if unable to - access the registry or appropriate registry keys not found. - """ - - if not _can_read_reg: - return [] - - path = path + " dirs" - if self.__version >= 7: - key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories" - % (self.__root, self.__version)) - else: - key = (r"%s\6.0\Build System\Components\Platforms" - r"\Win32 (%s)\Directories" % (self.__root, platform)) - - for base in HKEYS: - d = read_values(base, key) - if d: - if self.__version >= 7: - return self.__macros.sub(d[path]).split(";") - else: - return d[path].split(";") - # MSVC 6 seems to create the registry entries we need only when - # the GUI is run. - if self.__version == 6: - for base in HKEYS: - if read_values(base, r"%s\6.0" % self.__root) is not None: - self.warn("It seems you have Visual Studio 6 installed, " - "but the expected registry settings are not present.\n" - "You must at least run the Visual Studio GUI once " - "so that these entries are created.") - break - return [] - - def set_path_env_var(self, name): - """Set environment variable 'name' to an MSVC path type value. - - This is equivalent to a SET command prior to execution of spawned - commands. - """ - - if name == "lib": - p = self.get_msvc_paths("library") - else: - p = self.get_msvc_paths(name) - if p: - os.environ[name] = ';'.join(p) - - -if get_build_version() >= 8.0: - logger.debug("importing new compiler from distutils.msvc9compiler") - OldMSVCCompiler = MSVCCompiler - from packaging.compiler.msvc9compiler import MSVCCompiler - # get_build_architecture not really relevant now we support cross-compile - from packaging.compiler.msvc9compiler import MacroExpander diff --git a/Lib/packaging/compiler/unixccompiler.py b/Lib/packaging/compiler/unixccompiler.py deleted file mode 100644 --- a/Lib/packaging/compiler/unixccompiler.py +++ /dev/null @@ -1,339 +0,0 @@ -"""CCompiler implementation for Unix compilers. - -This module contains the UnixCCompiler class, a subclass of CCompiler -that handles the "typical" Unix-style command-line C compiler: - * macros defined with -Dname[=value] - * macros undefined with -Uname - * include search directories specified with -Idir - * libraries specified with -lllib - * library search directories specified with -Ldir - * compile handled by 'cc' (or similar) executable with -c option: - compiles .c to .o - * link static library handled by 'ar' command (possibly with 'ranlib') - * link shared library handled by 'cc -shared' -""" - -import os, sys - -from packaging.util import newer -from packaging.compiler.ccompiler import CCompiler -from packaging.compiler import gen_preprocess_options, gen_lib_options -from packaging.errors import (PackagingExecError, CompileError, - LibError, LinkError) -from packaging import logger -import sysconfig - - -# XXX Things not currently handled: -# * optimization/debug/warning flags; we just use whatever's in Python's -# Makefile and live with it. Is this adequate? If not, we might -# have to have a bunch of subclasses GNUCCompiler, SGICCompiler, -# SunCCompiler, and I suspect down that road lies madness. -# * even if we don't know a warning flag from an optimization flag, -# we need some way for outsiders to feed preprocessor/compiler/linker -# flags in to us -- eg. a sysadmin might want to mandate certain flags -# via a site config file, or a user might want to set something for -# compiling this module distribution only via the pysetup command -# line, whatever. As long as these options come from something on the -# current system, they can be as system-dependent as they like, and we -# should just happily stuff them into the preprocessor/compiler/linker -# options and carry on. - -def _darwin_compiler_fixup(compiler_so, cc_args): - """ - This function will strip '-isysroot PATH' and '-arch ARCH' from the - compile flags if the user has specified one them in extra_compile_flags. - - This is needed because '-arch ARCH' adds another architecture to the - build, without a way to remove an architecture. Furthermore GCC will - barf if multiple '-isysroot' arguments are present. - """ - stripArch = stripSysroot = False - - compiler_so = list(compiler_so) - kernel_version = os.uname()[2] # 8.4.3 - major_version = int(kernel_version.split('.')[0]) - - if major_version < 8: - # OSX before 10.4.0, these don't support -arch and -isysroot at - # all. - stripArch = stripSysroot = True - else: - stripArch = '-arch' in cc_args - stripSysroot = '-isysroot' in cc_args - - if stripArch or 'ARCHFLAGS' in os.environ: - while True: - try: - index = compiler_so.index('-arch') - # Strip this argument and the next one: - del compiler_so[index:index+2] - except ValueError: - break - - if 'ARCHFLAGS' in os.environ and not stripArch: - # User specified different -arch flags in the environ, - # see also the sysconfig - compiler_so = compiler_so + os.environ['ARCHFLAGS'].split() - - if stripSysroot: - try: - index = compiler_so.index('-isysroot') - # Strip this argument and the next one: - del compiler_so[index:index+2] - except ValueError: - pass - - # Check if the SDK that is used during compilation actually exists, - # the universal build requires the usage of a universal SDK and not all - # users have that installed by default. - sysroot = None - if '-isysroot' in cc_args: - idx = cc_args.index('-isysroot') - sysroot = cc_args[idx+1] - elif '-isysroot' in compiler_so: - idx = compiler_so.index('-isysroot') - sysroot = compiler_so[idx+1] - - if sysroot and not os.path.isdir(sysroot): - logger.warning( - "compiling with an SDK that doesn't seem to exist: %r;\n" - "please check your Xcode installation", sysroot) - - return compiler_so - -class UnixCCompiler(CCompiler): - - name = 'unix' - description = 'Standard UNIX-style compiler' - - # These are used by CCompiler in two places: the constructor sets - # instance attributes 'preprocessor', 'compiler', etc. from them, and - # 'set_executable()' allows any of these to be set. The defaults here - # are pretty generic; they will probably have to be set by an outsider - # (eg. using information discovered by the sysconfig about building - # Python extensions). - executables = {'preprocessor' : None, - 'compiler' : ["cc"], - 'compiler_so' : ["cc"], - 'compiler_cxx' : ["cc"], - 'linker_so' : ["cc", "-shared"], - 'linker_exe' : ["cc"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : None, - } - - if sys.platform[:6] == "darwin": - executables['ranlib'] = ["ranlib"] - - # Needed for the filename generation methods provided by the base - # class, CCompiler. XXX whoever instantiates/uses a particular - # UnixCCompiler instance should set 'shared_lib_ext' -- we set a - # reasonable common default here, but it's not necessarily used on all - # Unices! - - src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"] - obj_extension = ".o" - static_lib_extension = ".a" - shared_lib_extension = ".so" - dylib_lib_extension = ".dylib" - static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" - if sys.platform == "cygwin": - exe_extension = ".exe" - - def preprocess(self, source, - output_file=None, macros=None, include_dirs=None, - extra_preargs=None, extra_postargs=None): - ignore, macros, include_dirs = \ - self._fix_compile_args(None, macros, include_dirs) - pp_opts = gen_preprocess_options(macros, include_dirs) - pp_args = self.preprocessor + pp_opts - if output_file: - pp_args.extend(('-o', output_file)) - if extra_preargs: - pp_args[:0] = extra_preargs - if extra_postargs: - pp_args.extend(extra_postargs) - pp_args.append(source) - - # We need to preprocess: either we're being forced to, or we're - # generating output to stdout, or there's a target output file and - # the source file is newer than the target (or the target doesn't - # exist). - if self.force or output_file is None or newer(source, output_file): - if output_file: - self.mkpath(os.path.dirname(output_file)) - try: - self.spawn(pp_args) - except PackagingExecError as msg: - raise CompileError(msg) - - def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): - compiler_so = self.compiler_so - if sys.platform == 'darwin': - compiler_so = _darwin_compiler_fixup(compiler_so, cc_args + extra_postargs) - try: - self.spawn(compiler_so + cc_args + [src, '-o', obj] + - extra_postargs) - except PackagingExecError as msg: - raise CompileError(msg) - - def create_static_lib(self, objects, output_libname, - output_dir=None, debug=False, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - - output_filename = \ - self.library_filename(output_libname, output_dir=output_dir) - - if self._need_link(objects, output_filename): - self.mkpath(os.path.dirname(output_filename)) - self.spawn(self.archiver + - [output_filename] + - objects + self.objects) - - # Not many Unices required ranlib anymore -- SunOS 4.x is, I - # think the only major Unix that does. Maybe we need some - # platform intelligence here to skip ranlib if it's not - # needed -- or maybe Python's configure script took care of - # it for us, hence the check for leading colon. - if self.ranlib: - try: - self.spawn(self.ranlib + [output_filename]) - except PackagingExecError as msg: - raise LibError(msg) - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - def link(self, target_desc, objects, - output_filename, output_dir=None, libraries=None, - library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=False, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None): - objects, output_dir = self._fix_object_args(objects, output_dir) - libraries, library_dirs, runtime_library_dirs = \ - self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) - - lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, - libraries) - if type(output_dir) not in (str, type(None)): - raise TypeError("'output_dir' must be a string or None") - if output_dir is not None: - output_filename = os.path.join(output_dir, output_filename) - - if self._need_link(objects, output_filename): - ld_args = (objects + self.objects + - lib_opts + ['-o', output_filename]) - if debug: - ld_args[:0] = ['-g'] - if extra_preargs: - ld_args[:0] = extra_preargs - if extra_postargs: - ld_args.extend(extra_postargs) - self.mkpath(os.path.dirname(output_filename)) - try: - if target_desc == CCompiler.EXECUTABLE: - linker = self.linker_exe[:] - else: - linker = self.linker_so[:] - if target_lang == "c++" and self.compiler_cxx: - # skip over environment variable settings if /usr/bin/env - # is used to set up the linker's environment. - # This is needed on OSX. Note: this assumes that the - # normal and C++ compiler have the same environment - # settings. - i = 0 - if os.path.basename(linker[0]) == "env": - i = 1 - while '=' in linker[i]: - i = i + 1 - - linker[i] = self.compiler_cxx[i] - - if sys.platform == 'darwin': - linker = _darwin_compiler_fixup(linker, ld_args) - - self.spawn(linker + ld_args) - except PackagingExecError as msg: - raise LinkError(msg) - else: - logger.debug("skipping %s (up-to-date)", output_filename) - - # -- Miscellaneous methods ----------------------------------------- - # These are all used by the 'gen_lib_options() function, in - # ccompiler.py. - - def library_dir_option(self, dir): - return "-L" + dir - - def _is_gcc(self, compiler_name): - return "gcc" in compiler_name or "g++" in compiler_name - - def runtime_library_dir_option(self, dir): - # XXX Hackish, at the very least. See Python bug #445902: - # http://sourceforge.net/tracker/index.php - # ?func=detail&aid=445902&group_id=5470&atid=105470 - # Linkers on different platforms need different options to - # specify that directories need to be added to the list of - # directories searched for dependencies when a dynamic library - # is sought. GCC on GNU systems (Linux, FreeBSD, ...) has to - # be told to pass the -R option through to the linker, whereas - # other compilers and gcc on other systems just know this. - # Other compilers may need something slightly different. At - # this time, there's no way to determine this information from - # the configuration data stored in the Python installation, so - # we use this hack. - - compiler = os.path.basename(sysconfig.get_config_var("CC")) - if sys.platform[:6] == "darwin": - # MacOSX's linker doesn't understand the -R flag at all - return "-L" + dir - elif sys.platform[:5] == "hp-ux": - if self._is_gcc(compiler): - return ["-Wl,+s", "-L" + dir] - return ["+s", "-L" + dir] - elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5": - return ["-rpath", dir] - elif self._is_gcc(compiler): - # gcc on non-GNU systems does not need -Wl, but can - # use it anyway. Since distutils has always passed in - # -Wl whenever gcc was used in the past it is probably - # safest to keep doing so. - if sysconfig.get_config_var("GNULD") == "yes": - # GNU ld needs an extra option to get a RUNPATH - # instead of just an RPATH. - return "-Wl,--enable-new-dtags,-R" + dir - else: - return "-Wl,-R" + dir - elif sys.platform[:3] == "aix": - return "-blibpath:" + dir - else: - # No idea how --enable-new-dtags would be passed on to - # ld if this system was using GNU ld. Don't know if a - # system like this even exists. - return "-R" + dir - - def library_option(self, lib): - return "-l" + lib - - def find_library_file(self, dirs, lib, debug=False): - shared_f = self.library_filename(lib, lib_type='shared') - dylib_f = self.library_filename(lib, lib_type='dylib') - static_f = self.library_filename(lib, lib_type='static') - - for dir in dirs: - shared = os.path.join(dir, shared_f) - dylib = os.path.join(dir, dylib_f) - static = os.path.join(dir, static_f) - # We're second-guessing the linker here, with not much hard - # data to go on: GCC seems to prefer the shared library, so I'm - # assuming that *all* Unix C compilers do. And of course I'm - # ignoring even GCC's "-static" option. So sue me. - if os.path.exists(dylib): - return dylib - elif os.path.exists(shared): - return shared - elif os.path.exists(static): - return static - - # Oops, didn't find it in *any* of 'dirs' - return None diff --git a/Lib/packaging/config.py b/Lib/packaging/config.py deleted file mode 100644 --- a/Lib/packaging/config.py +++ /dev/null @@ -1,391 +0,0 @@ -"""Utilities to find and read config files used by packaging.""" - -import os -import sys -import logging - -from shlex import split -from configparser import RawConfigParser -from packaging import logger -from packaging.errors import PackagingOptionError -from packaging.compiler.extension import Extension -from packaging.util import (check_environ, iglob, resolve_name, strtobool, - split_multiline) -from packaging.compiler import set_compiler -from packaging.command import set_command -from packaging.markers import interpret - - -def _check_name(name, packages): - if '.' not in name: - return - parts = name.split('.') - parent = '.'.join(parts[:-1]) - if parent not in packages: - # we could log a warning instead of raising, but what's the use - # of letting people build modules they can't import? - raise PackagingOptionError( - 'parent package for extension %r not found' % name) - - -def _pop_values(values_dct, key): - """Remove values from the dictionary and convert them as a list""" - vals_str = values_dct.pop(key, '') - if not vals_str: - return - fields = [] - # the line separator is \n for setup.cfg files - for field in vals_str.split('\n'): - tmp_vals = field.split('--') - if len(tmp_vals) == 2 and not interpret(tmp_vals[1]): - continue - fields.append(tmp_vals[0]) - # Get bash options like `gcc -print-file-name=libgcc.a` XXX bash options? - vals = split(' '.join(fields)) - if vals: - return vals - - -def _rel_path(base, path): - # normalizes and returns a lstripped-/-separated path - base = base.replace(os.path.sep, '/') - path = path.replace(os.path.sep, '/') - assert path.startswith(base) - return path[len(base):].lstrip('/') - - -def get_resources_dests(resources_root, rules): - """Find destinations for resources files""" - destinations = {} - for base, suffix, dest in rules: - prefix = os.path.join(resources_root, base) - for abs_base in iglob(prefix): - abs_glob = os.path.join(abs_base, suffix) - for abs_path in iglob(abs_glob): - resource_file = _rel_path(resources_root, abs_path) - if dest is None: # remove the entry if it was here - destinations.pop(resource_file, None) - else: - rel_path = _rel_path(abs_base, abs_path) - rel_dest = dest.replace(os.path.sep, '/').rstrip('/') - destinations[resource_file] = rel_dest + '/' + rel_path - return destinations - - -class Config: - """Class used to work with configuration files""" - def __init__(self, dist): - self.dist = dist - self.setup_hooks = [] - - def run_hooks(self, config): - """Run setup hooks in the order defined in the spec.""" - for hook in self.setup_hooks: - hook(config) - - def find_config_files(self): - """Find as many configuration files as should be processed for this - platform, and return a list of filenames in the order in which they - should be parsed. The filenames returned are guaranteed to exist - (modulo nasty race conditions). - - There are three possible config files: packaging.cfg in the - Packaging installation directory (ie. where the top-level - Packaging __inst__.py file lives), a file in the user's home - directory named .pydistutils.cfg on Unix and pydistutils.cfg - on Windows/Mac; and setup.cfg in the current directory. - - The file in the user's home directory can be disabled with the - --no-user-cfg option. - """ - files = [] - check_environ() - - # Where to look for the system-wide Packaging config file - sys_dir = os.path.dirname(sys.modules['packaging'].__file__) - - # Look for the system config file - sys_file = os.path.join(sys_dir, "packaging.cfg") - if os.path.isfile(sys_file): - files.append(sys_file) - - # What to call the per-user config file - if os.name == 'posix': - user_filename = ".pydistutils.cfg" - else: - user_filename = "pydistutils.cfg" - - # And look for the user config file - if self.dist.want_user_cfg: - user_file = os.path.join(os.path.expanduser('~'), user_filename) - if os.path.isfile(user_file): - files.append(user_file) - - # All platforms support local setup.cfg - local_file = "setup.cfg" - if os.path.isfile(local_file): - files.append(local_file) - - if logger.isEnabledFor(logging.DEBUG): - logger.debug("using config files: %s", ', '.join(files)) - return files - - def _convert_metadata(self, name, value): - # converts a value found in setup.cfg into a valid metadata - # XXX - return value - - def _read_setup_cfg(self, parser, cfg_filename): - cfg_directory = os.path.dirname(os.path.abspath(cfg_filename)) - content = {} - for section in parser.sections(): - content[section] = dict(parser.items(section)) - - # global setup hooks are called first - if 'global' in content: - if 'setup_hooks' in content['global']: - setup_hooks = split_multiline(content['global']['setup_hooks']) - - # add project directory to sys.path, to allow hooks to be - # distributed with the project - sys.path.insert(0, cfg_directory) - try: - for line in setup_hooks: - try: - hook = resolve_name(line) - except ImportError as e: - logger.warning('cannot find setup hook: %s', - e.args[0]) - else: - self.setup_hooks.append(hook) - self.run_hooks(content) - finally: - sys.path.pop(0) - - metadata = self.dist.metadata - - # setting the metadata values - if 'metadata' in content: - for key, value in content['metadata'].items(): - key = key.replace('_', '-') - if metadata.is_multi_field(key): - value = split_multiline(value) - - if key == 'project-url': - value = [(label.strip(), url.strip()) - for label, url in - [v.split(',') for v in value]] - - if key == 'description-file': - if 'description' in content['metadata']: - msg = ("description and description-file' are " - "mutually exclusive") - raise PackagingOptionError(msg) - - filenames = value.split() - - # concatenate all files - value = [] - for filename in filenames: - # will raise if file not found - with open(filename) as description_file: - value.append(description_file.read().strip()) - # add filename as a required file - if filename not in metadata.requires_files: - metadata.requires_files.append(filename) - value = '\n'.join(value).strip() - key = 'description' - - if metadata.is_metadata_field(key): - metadata[key] = self._convert_metadata(key, value) - - if 'files' in content: - files = content['files'] - self.dist.package_dir = files.pop('packages_root', None) - - files = dict((key, split_multiline(value)) for key, value in - files.items()) - - self.dist.packages = [] - - packages = files.get('packages', []) - if isinstance(packages, str): - packages = [packages] - - for package in packages: - if ':' in package: - dir_, package = package.split(':') - self.dist.package_dir[package] = dir_ - self.dist.packages.append(package) - - self.dist.py_modules = files.get('modules', []) - if isinstance(self.dist.py_modules, str): - self.dist.py_modules = [self.dist.py_modules] - self.dist.scripts = files.get('scripts', []) - if isinstance(self.dist.scripts, str): - self.dist.scripts = [self.dist.scripts] - - self.dist.package_data = {} - # bookkeeping for the loop below - firstline = True - prev = None - - for line in files.get('package_data', []): - if '=' in line: - # package name -- file globs or specs - key, value = line.split('=') - prev = self.dist.package_data[key.strip()] = value.split() - elif firstline: - # invalid continuation on the first line - raise PackagingOptionError( - 'malformed package_data first line: %r (misses "=")' % - line) - else: - # continuation, add to last seen package name - prev.extend(line.split()) - - firstline = False - - self.dist.data_files = [] - for data in files.get('data_files', []): - data = data.split('=') - if len(data) != 2: - continue - key, value = data - values = [v.strip() for v in value.split(',')] - self.dist.data_files.append((key, values)) - - # manifest template - self.dist.extra_files = files.get('extra_files', []) - - resources = [] - for rule in files.get('resources', []): - glob, destination = rule.split('=', 1) - rich_glob = glob.strip().split(' ', 1) - if len(rich_glob) == 2: - prefix, suffix = rich_glob - else: - assert len(rich_glob) == 1 - prefix = '' - suffix = glob - if destination == '': - destination = None - resources.append( - (prefix.strip(), suffix.strip(), destination.strip())) - self.dist.data_files = get_resources_dests( - cfg_directory, resources) - - ext_modules = self.dist.ext_modules - for section_key in content: - # no str.partition in 2.4 :( - labels = section_key.split(':') - if len(labels) == 2 and labels[0] == 'extension': - values_dct = content[section_key] - if 'name' in values_dct: - raise PackagingOptionError( - 'extension name should be given as [extension: name], ' - 'not as key') - name = labels[1].strip() - _check_name(name, self.dist.packages) - ext_modules.append(Extension( - name, - _pop_values(values_dct, 'sources'), - _pop_values(values_dct, 'include_dirs'), - _pop_values(values_dct, 'define_macros'), - _pop_values(values_dct, 'undef_macros'), - _pop_values(values_dct, 'library_dirs'), - _pop_values(values_dct, 'libraries'), - _pop_values(values_dct, 'runtime_library_dirs'), - _pop_values(values_dct, 'extra_objects'), - _pop_values(values_dct, 'extra_compile_args'), - _pop_values(values_dct, 'extra_link_args'), - _pop_values(values_dct, 'export_symbols'), - _pop_values(values_dct, 'swig_opts'), - _pop_values(values_dct, 'depends'), - values_dct.pop('language', None), - values_dct.pop('optional', None), - **values_dct)) - - def parse_config_files(self, filenames=None): - if filenames is None: - filenames = self.find_config_files() - - logger.debug("Distribution.parse_config_files():") - - parser = RawConfigParser() - - for filename in filenames: - logger.debug(" reading %s", filename) - parser.read(filename, encoding='utf-8') - - if os.path.split(filename)[-1] == 'setup.cfg': - self._read_setup_cfg(parser, filename) - - for section in parser.sections(): - if section == 'global': - if parser.has_option('global', 'compilers'): - self._load_compilers(parser.get('global', 'compilers')) - - if parser.has_option('global', 'commands'): - self._load_commands(parser.get('global', 'commands')) - - options = parser.options(section) - opt_dict = self.dist.get_option_dict(section) - - for opt in options: - if opt == '__name__': - continue - val = parser.get(section, opt) - opt = opt.replace('-', '_') - - if opt == 'sub_commands': - val = split_multiline(val) - if isinstance(val, str): - val = [val] - - # Hooks use a suffix system to prevent being overriden - # by a config file processed later (i.e. a hook set in - # the user config file cannot be replaced by a hook - # set in a project config file, unless they have the - # same suffix). - if (opt.startswith("pre_hook.") or - opt.startswith("post_hook.")): - hook_type, alias = opt.split(".") - hook_dict = opt_dict.setdefault( - hook_type, (filename, {}))[1] - hook_dict[alias] = val - else: - opt_dict[opt] = filename, val - - # Make the RawConfigParser forget everything (so we retain - # the original filenames that options come from) - parser.__init__() - - # If there was a "global" section in the config file, use it - # to set Distribution options. - if 'global' in self.dist.command_options: - for opt, (src, val) in self.dist.command_options['global'].items(): - alias = self.dist.negative_opt.get(opt) - try: - if alias: - setattr(self.dist, alias, not strtobool(val)) - elif opt == 'dry_run': # FIXME ugh! - setattr(self.dist, opt, strtobool(val)) - else: - setattr(self.dist, opt, val) - except ValueError as msg: - raise PackagingOptionError(msg) - - def _load_compilers(self, compilers): - compilers = split_multiline(compilers) - if isinstance(compilers, str): - compilers = [compilers] - for compiler in compilers: - set_compiler(compiler.strip()) - - def _load_commands(self, commands): - commands = split_multiline(commands) - if isinstance(commands, str): - commands = [commands] - for command in commands: - set_command(command.strip()) diff --git a/Lib/packaging/create.py b/Lib/packaging/create.py deleted file mode 100644 --- a/Lib/packaging/create.py +++ /dev/null @@ -1,682 +0,0 @@ -"""Interactive helper used to create a setup.cfg file. - -This script will generate a packaging configuration file by looking at -the current directory and asking the user questions. It is intended to -be called as *pysetup create*. -""" - -# Original code by Sean Reifschneider - -# Original TODO list: -# Look for a license file and automatically add the category. -# When a .c file is found during the walk, can we add it as an extension? -# Ask if there is a maintainer different that the author -# Ask for the platform (can we detect this via "import win32" or something?) -# Ask for the dependencies. -# Ask for the Requires-Dist -# Ask for the Provides-Dist -# Ask for a description -# Detect scripts (not sure how. #! outside of package?) - -import os -import re -import imp -import sys -import glob -import shutil -import sysconfig -from hashlib import md5 -from textwrap import dedent -from tokenize import detect_encoding -from configparser import RawConfigParser - -from packaging import logger -# importing this with an underscore as it should be replaced by the -# dict form or another structures for all purposes -from packaging._trove import all_classifiers as _CLASSIFIERS_LIST -from packaging.version import is_valid_version - -_FILENAME = 'setup.cfg' -_DEFAULT_CFG = '.pypkgcreate' # FIXME use a section in user .pydistutils.cfg - -_helptext = { - 'name': ''' -The name of the project to be packaged, usually a single word composed -of lower-case characters such as "zope.interface", "sqlalchemy" or -"CherryPy". -''', - 'version': ''' -Version number of the software, typically 2 or 3 numbers separated by -dots such as "1.0", "0.6b3", or "3.2.1". "0.1.0" is recommended for -initial development. -''', - 'summary': ''' -A one-line summary of what this project is or does, typically a sentence -80 characters or less in length. -''', - 'author': ''' -The full name of the author (typically you). -''', - 'author_email': ''' -Email address of the project author. -''', - 'do_classifier': ''' -Trove classifiers are optional identifiers that allow you to specify the -intended audience by saying things like "Beta software with a text UI -for Linux under the PSF license". However, this can be a somewhat -involved process. -''', - 'packages': ''' -Python packages included in the project. -''', - 'modules': ''' -Pure Python modules included in the project. -''', - 'extra_files': ''' -You can provide extra files/dirs contained in your project. -It has to follow the template syntax. XXX add help here. -''', - - 'home_page': ''' -The home page for the project, typically a public Web page. -''', - 'trove_license': ''' -Optionally you can specify a license. Type a string that identifies a -common license, and then you can select a list of license specifiers. -''', - 'trove_generic': ''' -Optionally, you can set other trove identifiers for things such as the -human language, programming language, user interface, etc. -''', - 'setup.py found': ''' -The setup.py script will be executed to retrieve the metadata. -An interactive helper will be run if you answer "n", -''', -} - -PROJECT_MATURITY = ['Development Status :: 1 - Planning', - 'Development Status :: 2 - Pre-Alpha', - 'Development Status :: 3 - Alpha', - 'Development Status :: 4 - Beta', - 'Development Status :: 5 - Production/Stable', - 'Development Status :: 6 - Mature', - 'Development Status :: 7 - Inactive'] - -# XXX everything needs docstrings and tests (both low-level tests of various -# methods and functional tests of running the script) - - -def load_setup(): - """run the setup script (i.e the setup.py file) - - This function load the setup file in all cases (even if it have already - been loaded before, because we are monkey patching its setup function with - a particular one""" - with open("setup.py", "rb") as f: - encoding, lines = detect_encoding(f.readline) - with open("setup.py", encoding=encoding) as f: - imp.load_module("setup", f, "setup.py", (".py", "r", imp.PY_SOURCE)) - - -def ask_yn(question, default=None, helptext=None): - question += ' (y/n)' - while True: - answer = ask(question, default, helptext, required=True) - if answer and answer[0].lower() in ('y', 'n'): - return answer[0].lower() - - logger.error('You must select "Y" or "N".') - - -# XXX use util.ask -# FIXME: if prompt ends with '?', don't add ':' - - -def ask(question, default=None, helptext=None, required=True, - lengthy=False, multiline=False): - prompt = '%s: ' % (question,) - if default: - prompt = '%s [%s]: ' % (question, default) - if default and len(question) + len(default) > 70: - prompt = '%s\n [%s]: ' % (question, default) - if lengthy or multiline: - prompt += '\n > ' - - if not helptext: - helptext = 'No additional help available.' - - helptext = helptext.strip("\n") - - while True: - line = input(prompt).strip() - if line == '?': - print('=' * 70) - print(helptext) - print('=' * 70) - continue - if default and not line: - return default - if not line and required: - print('*' * 70) - print('This value cannot be empty.') - print('===========================') - if helptext: - print(helptext) - print('*' * 70) - continue - return line - - -def convert_yn_to_bool(yn, yes=True, no=False): - """Convert a y/yes or n/no to a boolean value.""" - if yn.lower().startswith('y'): - return yes - else: - return no - - -def _build_classifiers_dict(classifiers): - d = {} - for key in classifiers: - subdict = d - for subkey in key.split(' :: '): - if subkey not in subdict: - subdict[subkey] = {} - subdict = subdict[subkey] - return d - -CLASSIFIERS = _build_classifiers_dict(_CLASSIFIERS_LIST) - - -def _build_licences(classifiers): - res = [] - for index, item in enumerate(classifiers): - if not item.startswith('License :: '): - continue - res.append((index, item.split(' :: ')[-1].lower())) - return res - -LICENCES = _build_licences(_CLASSIFIERS_LIST) - - -class MainProgram: - """Make a project setup configuration file (setup.cfg).""" - - def __init__(self): - self.configparser = None - self.classifiers = set() - self.data = {'name': '', - 'version': '1.0.0', - 'classifier': self.classifiers, - 'packages': [], - 'modules': [], - 'platform': [], - 'resources': [], - 'extra_files': [], - 'scripts': [], - } - self._load_defaults() - - def __call__(self): - setupcfg_defined = False - if self.has_setup_py() and self._prompt_user_for_conversion(): - setupcfg_defined = self.convert_py_to_cfg() - if not setupcfg_defined: - self.define_cfg_values() - self._write_cfg() - - def has_setup_py(self): - """Test for the existence of a setup.py file.""" - return os.path.exists('setup.py') - - def define_cfg_values(self): - self.inspect() - self.query_user() - - def _lookup_option(self, key): - if not self.configparser.has_option('DEFAULT', key): - return None - return self.configparser.get('DEFAULT', key) - - def _load_defaults(self): - # Load default values from a user configuration file - self.configparser = RawConfigParser() - # TODO replace with section in distutils config file - default_cfg = os.path.expanduser(os.path.join('~', _DEFAULT_CFG)) - self.configparser.read(default_cfg) - self.data['author'] = self._lookup_option('author') - self.data['author_email'] = self._lookup_option('author_email') - - def _prompt_user_for_conversion(self): - # Prompt the user about whether they would like to use the setup.py - # conversion utility to generate a setup.cfg or generate the setup.cfg - # from scratch - answer = ask_yn(('A legacy setup.py has been found.\n' - 'Would you like to convert it to a setup.cfg?'), - default="y", - helptext=_helptext['setup.py found']) - return convert_yn_to_bool(answer) - - def _dotted_packages(self, data): - packages = sorted(data) - modified_pkgs = [] - for pkg in packages: - pkg = pkg.lstrip('./') - pkg = pkg.replace('/', '.') - modified_pkgs.append(pkg) - return modified_pkgs - - def _write_cfg(self): - if os.path.exists(_FILENAME): - if os.path.exists('%s.old' % _FILENAME): - message = ("ERROR: %(name)s.old backup exists, please check " - "that current %(name)s is correct and remove " - "%(name)s.old" % {'name': _FILENAME}) - logger.error(message) - return - shutil.move(_FILENAME, '%s.old' % _FILENAME) - - with open(_FILENAME, 'w', encoding='utf-8') as fp: - fp.write('[metadata]\n') - # TODO use metadata module instead of hard-coding field-specific - # behavior here - - # simple string entries - for name in ('name', 'version', 'summary', 'download_url'): - fp.write('%s = %s\n' % (name, self.data.get(name, 'UNKNOWN'))) - - # optional string entries - if 'keywords' in self.data and self.data['keywords']: - # XXX shoud use comma to separate, not space - fp.write('keywords = %s\n' % ' '.join(self.data['keywords'])) - for name in ('home_page', 'author', 'author_email', - 'maintainer', 'maintainer_email', 'description-file'): - if name in self.data and self.data[name]: - fp.write('%s = %s\n' % (name, self.data[name])) - if 'description' in self.data: - fp.write( - 'description = %s\n' - % '\n |'.join(self.data['description'].split('\n'))) - - # multiple use string entries - for name in ('platform', 'supported-platform', 'classifier', - 'requires-dist', 'provides-dist', 'obsoletes-dist', - 'requires-external'): - if not(name in self.data and self.data[name]): - continue - fp.write('%s = ' % name) - fp.write(''.join(' %s\n' % val - for val in self.data[name]).lstrip()) - - fp.write('\n[files]\n') - - for name in ('packages', 'modules', 'scripts', 'extra_files'): - if not(name in self.data and self.data[name]): - continue - fp.write('%s = %s\n' - % (name, '\n '.join(self.data[name]).strip())) - - if self.data.get('package_data'): - fp.write('package_data =\n') - for pkg, spec in sorted(self.data['package_data'].items()): - # put one spec per line, indented under the package name - indent = ' ' * (len(pkg) + 7) - spec = ('\n' + indent).join(spec) - fp.write(' %s = %s\n' % (pkg, spec)) - fp.write('\n') - - if self.data.get('resources'): - fp.write('resources =\n') - for src, dest in self.data['resources']: - fp.write(' %s = %s\n' % (src, dest)) - fp.write('\n') - - os.chmod(_FILENAME, 0o644) - logger.info('Wrote "%s".' % _FILENAME) - - def convert_py_to_cfg(self): - """Generate a setup.cfg from an existing setup.py. - - It only exports the distutils metadata (setuptools specific metadata - is not currently supported). - """ - data = self.data - - def setup_mock(**attrs): - """Mock the setup(**attrs) in order to retrieve metadata.""" - - # TODO use config and metadata instead of Distribution - from distutils.dist import Distribution - dist = Distribution(attrs) - dist.parse_config_files() - - # 1. retrieve metadata fields that are quite similar in - # PEP 314 and PEP 345 - labels = (('name',) * 2, - ('version',) * 2, - ('author',) * 2, - ('author_email',) * 2, - ('maintainer',) * 2, - ('maintainer_email',) * 2, - ('description', 'summary'), - ('long_description', 'description'), - ('url', 'home_page'), - ('platforms', 'platform'), - ('provides', 'provides-dist'), - ('obsoletes', 'obsoletes-dist'), - ('requires', 'requires-dist')) - - get = lambda lab: getattr(dist.metadata, lab.replace('-', '_')) - data.update((new, get(old)) for old, new in labels if get(old)) - - # 2. retrieve data that requires special processing - data['classifier'].update(dist.get_classifiers() or []) - data['scripts'].extend(dist.scripts or []) - data['packages'].extend(dist.packages or []) - data['modules'].extend(dist.py_modules or []) - # 2.1 data_files -> resources - if dist.data_files: - if (len(dist.data_files) < 2 or - isinstance(dist.data_files[1], str)): - dist.data_files = [('', dist.data_files)] - # add tokens in the destination paths - vars = {'distribution.name': data['name']} - path_tokens = sysconfig.get_paths(vars=vars).items() - # sort tokens to use the longest one first - path_tokens = sorted(path_tokens, key=lambda x: len(x[1])) - for dest, srcs in (dist.data_files or []): - dest = os.path.join(sys.prefix, dest) - dest = dest.replace(os.path.sep, '/') - for tok, path in path_tokens: - path = path.replace(os.path.sep, '/') - if not dest.startswith(path): - continue - - dest = ('{%s}' % tok) + dest[len(path):] - files = [('/ '.join(src.rsplit('/', 1)), dest) - for src in srcs] - data['resources'].extend(files) - - # 2.2 package_data - data['package_data'] = dist.package_data.copy() - - # Use README file if its content is the desciption - if "description" in data: - ref = md5(re.sub('\s', '', - self.data['description']).lower().encode()) - ref = ref.digest() - for readme in glob.glob('README*'): - with open(readme, encoding='utf-8') as fp: - contents = fp.read() - contents = re.sub('\s', '', contents.lower()).encode() - val = md5(contents).digest() - if val == ref: - del data['description'] - data['description-file'] = readme - break - - # apply monkey patch to distutils (v1) and setuptools (if needed) - # (abort the feature if distutils v1 has been killed) - try: - from distutils import core - core.setup # make sure it's not d2 maskerading as d1 - except (ImportError, AttributeError): - return - saved_setups = [(core, core.setup)] - core.setup = setup_mock - try: - import setuptools - except ImportError: - pass - else: - saved_setups.append((setuptools, setuptools.setup)) - setuptools.setup = setup_mock - # get metadata by executing the setup.py with the patched setup(...) - success = False # for python < 2.4 - try: - load_setup() - success = True - finally: # revert monkey patches - for patched_module, original_setup in saved_setups: - patched_module.setup = original_setup - if not self.data: - raise ValueError('Unable to load metadata from setup.py') - return success - - def inspect(self): - """Inspect the current working diretory for a name and version. - - This information is harvested in where the directory is named - like [name]-[version]. - """ - dir_name = os.path.basename(os.getcwd()) - self.data['name'] = dir_name - match = re.match(r'(.*)-(\d.+)', dir_name) - if match: - self.data['name'] = match.group(1) - self.data['version'] = match.group(2) - # TODO needs testing! - if not is_valid_version(self.data['version']): - msg = "Invalid version discovered: %s" % self.data['version'] - raise ValueError(msg) - - def query_user(self): - self.data['name'] = ask('Project name', self.data['name'], - _helptext['name']) - - self.data['version'] = ask('Current version number', - self.data.get('version'), _helptext['version']) - self.data['summary'] = ask('Project description summary', - self.data.get('summary'), _helptext['summary'], - lengthy=True) - self.data['author'] = ask('Author name', - self.data.get('author'), _helptext['author']) - self.data['author_email'] = ask('Author email address', - self.data.get('author_email'), _helptext['author_email']) - self.data['home_page'] = ask('Project home page', - self.data.get('home_page'), _helptext['home_page'], - required=False) - - if ask_yn('Do you want me to automatically build the file list ' - 'with everything I can find in the current directory? ' - 'If you say no, you will have to define them manually.') == 'y': - self._find_files() - else: - while ask_yn('Do you want to add a single module?' - ' (you will be able to add full packages next)', - helptext=_helptext['modules']) == 'y': - self._set_multi('Module name', 'modules') - - while ask_yn('Do you want to add a package?', - helptext=_helptext['packages']) == 'y': - self._set_multi('Package name', 'packages') - - while ask_yn('Do you want to add an extra file?', - helptext=_helptext['extra_files']) == 'y': - self._set_multi('Extra file/dir name', 'extra_files') - - if ask_yn('Do you want to set Trove classifiers?', - helptext=_helptext['do_classifier']) == 'y': - self.set_classifier() - - def _find_files(self): - # we are looking for python modules and packages, - # other stuff are added as regular files - pkgs = self.data['packages'] - modules = self.data['modules'] - extra_files = self.data['extra_files'] - - def is_package(path): - return os.path.exists(os.path.join(path, '__init__.py')) - - curdir = os.getcwd() - scanned = [] - _pref = ['lib', 'include', 'dist', 'build', '.', '~'] - _suf = ['.pyc'] - - def to_skip(path): - path = relative(path) - - for pref in _pref: - if path.startswith(pref): - return True - - for suf in _suf: - if path.endswith(suf): - return True - - return False - - def relative(path): - return path[len(curdir) + 1:] - - def dotted(path): - res = relative(path).replace(os.path.sep, '.') - if res.endswith('.py'): - res = res[:-len('.py')] - return res - - # first pass: packages - for root, dirs, files in os.walk(curdir): - if to_skip(root): - continue - for dir_ in sorted(dirs): - if to_skip(dir_): - continue - fullpath = os.path.join(root, dir_) - dotted_name = dotted(fullpath) - if is_package(fullpath) and dotted_name not in pkgs: - pkgs.append(dotted_name) - scanned.append(fullpath) - - # modules and extra files - for root, dirs, files in os.walk(curdir): - if to_skip(root): - continue - - if any(root.startswith(path) for path in scanned): - continue - - for file in sorted(files): - fullpath = os.path.join(root, file) - if to_skip(fullpath): - continue - # single module? - if os.path.splitext(file)[-1] == '.py': - modules.append(dotted(fullpath)) - else: - extra_files.append(relative(fullpath)) - - def _set_multi(self, question, name): - existing_values = self.data[name] - value = ask(question, helptext=_helptext[name]).strip() - if value not in existing_values: - existing_values.append(value) - - def set_classifier(self): - self.set_maturity_status(self.classifiers) - self.set_license(self.classifiers) - self.set_other_classifier(self.classifiers) - - def set_other_classifier(self, classifiers): - if ask_yn('Do you want to set other trove identifiers?', 'n', - _helptext['trove_generic']) != 'y': - return - self.walk_classifiers(classifiers, [CLASSIFIERS], '') - - def walk_classifiers(self, classifiers, trovepath, desc): - trove = trovepath[-1] - - if not trove: - return - - for key in sorted(trove): - if len(trove[key]) == 0: - if ask_yn('Add "%s"' % desc[4:] + ' :: ' + key, 'n') == 'y': - classifiers.add(desc[4:] + ' :: ' + key) - continue - - if ask_yn('Do you want to set items under\n "%s" (%d sub-items)?' - % (key, len(trove[key])), 'n', - _helptext['trove_generic']) == 'y': - self.walk_classifiers(classifiers, trovepath + [trove[key]], - desc + ' :: ' + key) - - def set_license(self, classifiers): - while True: - license = ask('What license do you use?', - helptext=_helptext['trove_license'], required=False) - if not license: - return - - license_words = license.lower().split(' ') - found_list = [] - - for index, licence in LICENCES: - for word in license_words: - if word in licence: - found_list.append(index) - break - - if len(found_list) == 0: - logger.error('Could not find a matching license for "%s"' % - license) - continue - - question = 'Matching licenses:\n\n' - - for index, list_index in enumerate(found_list): - question += ' %s) %s\n' % (index + 1, - _CLASSIFIERS_LIST[list_index]) - - question += ('\nType the number of the license you wish to use or ' - '? to try again:') - choice = ask(question, required=False) - - if choice == '?': - continue - if choice == '': - return - - try: - index = found_list[int(choice) - 1] - except ValueError: - logger.error( - "Invalid selection, type a number from the list above.") - - classifiers.add(_CLASSIFIERS_LIST[index]) - - def set_maturity_status(self, classifiers): - maturity_name = lambda mat: mat.split('- ')[-1] - maturity_question = '''\ - Please select the project status: - - %s - - Status''' % '\n'.join('%s - %s' % (i, maturity_name(n)) - for i, n in enumerate(PROJECT_MATURITY)) - while True: - choice = ask(dedent(maturity_question), required=False) - - if choice: - try: - choice = int(choice) - 1 - key = PROJECT_MATURITY[choice] - classifiers.add(key) - return - except (IndexError, ValueError): - logger.error( - "Invalid selection, type a single digit number.") - - -def main(): - """Main entry point.""" - program = MainProgram() - # # uncomment when implemented - # if not program.load_existing_setup_script(): - # program.inspect_directory() - # program.query_user() - # program.update_config_file() - # program.write_setup_script() - # packaging.util.cfg_to_args() - program() diff --git a/Lib/packaging/database.py b/Lib/packaging/database.py deleted file mode 100644 --- a/Lib/packaging/database.py +++ /dev/null @@ -1,651 +0,0 @@ -"""PEP 376 implementation.""" - -import os -import re -import csv -import sys -import zipimport -from io import StringIO -from hashlib import md5 - -from packaging import logger -from packaging.errors import PackagingError -from packaging.version import suggest_normalized_version, VersionPredicate -from packaging.metadata import Metadata - - -__all__ = [ - 'Distribution', 'EggInfoDistribution', 'distinfo_dirname', - 'get_distributions', 'get_distribution', 'get_file_users', - 'provides_distribution', 'obsoletes_distribution', - 'enable_cache', 'disable_cache', 'clear_cache', - # XXX these functions' names look like get_file_users but are not related - 'get_file_path', 'get_file'] - - -# TODO update docs - -DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES') - -# Cache -_cache_name = {} # maps names to Distribution instances -_cache_name_egg = {} # maps names to EggInfoDistribution instances -_cache_path = {} # maps paths to Distribution instances -_cache_path_egg = {} # maps paths to EggInfoDistribution instances -_cache_generated = False # indicates if .dist-info distributions are cached -_cache_generated_egg = False # indicates if .dist-info and .egg are cached -_cache_enabled = True - - -def enable_cache(): - """ - Enables the internal cache. - - Note that this function will not clear the cache in any case, for that - functionality see :func:`clear_cache`. - """ - global _cache_enabled - - _cache_enabled = True - - -def disable_cache(): - """ - Disables the internal cache. - - Note that this function will not clear the cache in any case, for that - functionality see :func:`clear_cache`. - """ - global _cache_enabled - - _cache_enabled = False - - -def clear_cache(): - """ Clears the internal cache. """ - global _cache_generated, _cache_generated_egg - - _cache_name.clear() - _cache_name_egg.clear() - _cache_path.clear() - _cache_path_egg.clear() - _cache_generated = False - _cache_generated_egg = False - - -def _yield_distributions(include_dist, include_egg, paths): - """ - Yield .dist-info and .egg(-info) distributions, based on the arguments - - :parameter include_dist: yield .dist-info distributions - :parameter include_egg: yield .egg(-info) distributions - """ - for path in paths: - realpath = os.path.realpath(path) - if not os.path.isdir(realpath): - continue - for dir in os.listdir(realpath): - dist_path = os.path.join(realpath, dir) - if include_dist and dir.endswith('.dist-info'): - yield Distribution(dist_path) - elif include_egg and (dir.endswith('.egg-info') or - dir.endswith('.egg')): - yield EggInfoDistribution(dist_path) - - -def _generate_cache(use_egg_info, paths): - global _cache_generated, _cache_generated_egg - - if _cache_generated_egg or (_cache_generated and not use_egg_info): - return - else: - gen_dist = not _cache_generated - gen_egg = use_egg_info - - for dist in _yield_distributions(gen_dist, gen_egg, paths): - if isinstance(dist, Distribution): - _cache_path[dist.path] = dist - if dist.name not in _cache_name: - _cache_name[dist.name] = [] - _cache_name[dist.name].append(dist) - else: - _cache_path_egg[dist.path] = dist - if dist.name not in _cache_name_egg: - _cache_name_egg[dist.name] = [] - _cache_name_egg[dist.name].append(dist) - - if gen_dist: - _cache_generated = True - if gen_egg: - _cache_generated_egg = True - - -class Distribution: - """Created with the *path* of the ``.dist-info`` directory provided to the - constructor. It reads the metadata contained in ``METADATA`` when it is - instantiated.""" - - name = '' - """The name of the distribution.""" - - version = '' - """The version of the distribution.""" - - metadata = None - """A :class:`packaging.metadata.Metadata` instance loaded with - the distribution's ``METADATA`` file.""" - - requested = False - """A boolean that indicates whether the ``REQUESTED`` metadata file is - present (in other words, whether the package was installed by user - request or it was installed as a dependency).""" - - def __init__(self, path): - if _cache_enabled and path in _cache_path: - self.metadata = _cache_path[path].metadata - else: - metadata_path = os.path.join(path, 'METADATA') - self.metadata = Metadata(path=metadata_path) - - self.name = self.metadata['Name'] - self.version = self.metadata['Version'] - self.path = path - - if _cache_enabled and path not in _cache_path: - _cache_path[path] = self - - def __repr__(self): - return '' % ( - self.name, self.version, self.path) - - def _get_records(self, local=False): - results = [] - with self.get_distinfo_file('RECORD') as record: - record_reader = csv.reader(record, delimiter=',', - lineterminator='\n') - for row in record_reader: - missing = [None for i in range(len(row), 3)] - path, checksum, size = row + missing - if local: - path = path.replace('/', os.sep) - path = os.path.join(sys.prefix, path) - results.append((path, checksum, size)) - return results - - def get_resource_path(self, relative_path): - with self.get_distinfo_file('RESOURCES') as resources_file: - resources_reader = csv.reader(resources_file, delimiter=',', - lineterminator='\n') - for relative, destination in resources_reader: - if relative == relative_path: - return destination - raise KeyError( - 'no resource file with relative path %r is installed' % - relative_path) - - def list_installed_files(self, local=False): - """ - Iterates over the ``RECORD`` entries and returns a tuple - ``(path, md5, size)`` for each line. If *local* is ``True``, - the returned path is transformed into a local absolute path. - Otherwise the raw value from RECORD is returned. - - A local absolute path is an absolute path in which occurrences of - ``'/'`` have been replaced by the system separator given by ``os.sep``. - - :parameter local: flag to say if the path should be returned as a local - absolute path - - :type local: boolean - :returns: iterator of (path, md5, size) - """ - for result in self._get_records(local): - yield result - - def uses(self, path): - """ - Returns ``True`` if path is listed in ``RECORD``. *path* can be a local - absolute path or a relative ``'/'``-separated path. - - :rtype: boolean - """ - for p, checksum, size in self._get_records(): - local_absolute = os.path.join(sys.prefix, p) - if path == p or path == local_absolute: - return True - return False - - def get_distinfo_file(self, path, binary=False): - """ - Returns a file located under the ``.dist-info`` directory. Returns a - ``file`` instance for the file pointed by *path*. - - :parameter path: a ``'/'``-separated path relative to the - ``.dist-info`` directory or an absolute path; - If *path* is an absolute path and doesn't start - with the ``.dist-info`` directory path, - a :class:`PackagingError` is raised - :type path: string - :parameter binary: If *binary* is ``True``, opens the file in read-only - binary mode (``rb``), otherwise opens it in - read-only mode (``r``). - :rtype: file object - """ - open_flags = 'r' - if binary: - open_flags += 'b' - - # Check if it is an absolute path # XXX use relpath, add tests - if path.find(os.sep) >= 0: - # it's an absolute path? - distinfo_dirname, path = path.split(os.sep)[-2:] - if distinfo_dirname != self.path.split(os.sep)[-1]: - raise PackagingError( - 'dist-info file %r does not belong to the %r %s ' - 'distribution' % (path, self.name, self.version)) - - # The file must be relative - if path not in DIST_FILES: - raise PackagingError('invalid path for a dist-info file: %r' % - path) - - path = os.path.join(self.path, path) - return open(path, open_flags) - - def list_distinfo_files(self, local=False): - """ - Iterates over the ``RECORD`` entries and returns paths for each line if - the path is pointing to a file located in the ``.dist-info`` directory - or one of its subdirectories. - - :parameter local: If *local* is ``True``, each returned path is - transformed into a local absolute path. Otherwise the - raw value from ``RECORD`` is returned. - :type local: boolean - :returns: iterator of paths - """ - for path, checksum, size in self._get_records(local): - # XXX add separator or use real relpath algo - if path.startswith(self.path): - yield path - - def __eq__(self, other): - return isinstance(other, Distribution) and self.path == other.path - - # See http://docs.python.org/reference/datamodel#object.__hash__ - __hash__ = object.__hash__ - - -class EggInfoDistribution: - """Created with the *path* of the ``.egg-info`` directory or file provided - to the constructor. It reads the metadata contained in the file itself, or - if the given path happens to be a directory, the metadata is read from the - file ``PKG-INFO`` under that directory.""" - - name = '' - """The name of the distribution.""" - - version = '' - """The version of the distribution.""" - - metadata = None - """A :class:`packaging.metadata.Metadata` instance loaded with - the distribution's ``METADATA`` file.""" - - _REQUIREMENT = re.compile( - r'(?P[-A-Za-z0-9_.]+)\s*' - r'(?P(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)?\s*' - r'(?P(?:\s*,\s*(?:<|<=|!=|==|>=|>)[-A-Za-z0-9_.]+)*)\s*' - r'(?P\[.*\])?') - - def __init__(self, path): - self.path = path - if _cache_enabled and path in _cache_path_egg: - self.metadata = _cache_path_egg[path].metadata - self.name = self.metadata['Name'] - self.version = self.metadata['Version'] - return - - # reused from Distribute's pkg_resources - def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` - or sequence""" - if isinstance(strs, str): - for s in strs.splitlines(): - s = s.strip() - # skip blank lines/comments - if s and not s.startswith('#'): - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - - requires = None - - if path.endswith('.egg'): - if os.path.isdir(path): - meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO') - self.metadata = Metadata(path=meta_path) - try: - req_path = os.path.join(path, 'EGG-INFO', 'requires.txt') - with open(req_path, 'r') as fp: - requires = fp.read() - except IOError: - requires = None - else: - # FIXME handle the case where zipfile is not available - zipf = zipimport.zipimporter(path) - fileobj = StringIO( - zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) - self.metadata = Metadata(fileobj=fileobj) - try: - requires = zipf.get_data('EGG-INFO/requires.txt') - except IOError: - requires = None - self.name = self.metadata['Name'] - self.version = self.metadata['Version'] - - elif path.endswith('.egg-info'): - if os.path.isdir(path): - path = os.path.join(path, 'PKG-INFO') - try: - with open(os.path.join(path, 'requires.txt'), 'r') as fp: - requires = fp.read() - except IOError: - requires = None - self.metadata = Metadata(path=path) - self.name = self.metadata['Name'] - self.version = self.metadata['Version'] - - else: - raise ValueError('path must end with .egg-info or .egg, got %r' % - path) - - if requires is not None: - if self.metadata['Metadata-Version'] == '1.1': - # we can't have 1.1 metadata *and* Setuptools requires - for field in ('Obsoletes', 'Requires', 'Provides'): - del self.metadata[field] - - reqs = [] - - if requires is not None: - for line in yield_lines(requires): - if line.startswith('['): - logger.warning( - 'extensions in requires.txt are not supported ' - '(used by %r %s)', self.name, self.version) - break - else: - match = self._REQUIREMENT.match(line.strip()) - if not match: - # this happens when we encounter extras; since they - # are written at the end of the file we just exit - break - else: - if match.group('extras'): - msg = ('extra requirements are not supported ' - '(used by %r %s)', self.name, self.version) - logger.warning(msg, self.name) - name = match.group('name') - version = None - if match.group('first'): - version = match.group('first') - if match.group('rest'): - version += match.group('rest') - version = version.replace(' ', '') # trim spaces - if version is None: - reqs.append(name) - else: - reqs.append('%s (%s)' % (name, version)) - - if len(reqs) > 0: - self.metadata['Requires-Dist'] += reqs - - if _cache_enabled: - _cache_path_egg[self.path] = self - - def __repr__(self): - return '' % ( - self.name, self.version, self.path) - - def list_installed_files(self, local=False): - - def _md5(path): - with open(path, 'rb') as f: - content = f.read() - return md5(content).hexdigest() - - def _size(path): - return os.stat(path).st_size - - path = self.path - if local: - path = path.replace('/', os.sep) - - # XXX What about scripts and data files ? - if os.path.isfile(path): - return [(path, _md5(path), _size(path))] - else: - files = [] - for root, dir, files_ in os.walk(path): - for item in files_: - item = os.path.join(root, item) - files.append((item, _md5(item), _size(item))) - return files - - return [] - - def uses(self, path): - return False - - def __eq__(self, other): - return (isinstance(other, EggInfoDistribution) and - self.path == other.path) - - # See http://docs.python.org/reference/datamodel#object.__hash__ - __hash__ = object.__hash__ - - -def distinfo_dirname(name, version): - """ - The *name* and *version* parameters are converted into their - filename-escaped form, i.e. any ``'-'`` characters are replaced - with ``'_'`` other than the one in ``'dist-info'`` and the one - separating the name from the version number. - - :parameter name: is converted to a standard distribution name by replacing - any runs of non- alphanumeric characters with a single - ``'-'``. - :type name: string - :parameter version: is converted to a standard version string. Spaces - become dots, and all other non-alphanumeric characters - (except dots) become dashes, with runs of multiple - dashes condensed to a single dash. - :type version: string - :returns: directory name - :rtype: string""" - file_extension = '.dist-info' - name = name.replace('-', '_') - normalized_version = suggest_normalized_version(version) - # Because this is a lookup procedure, something will be returned even if - # it is a version that cannot be normalized - if normalized_version is None: - # Unable to achieve normality? - normalized_version = version - return '-'.join([name, normalized_version]) + file_extension - - -def get_distributions(use_egg_info=False, paths=None): - """ - Provides an iterator that looks for ``.dist-info`` directories in - ``sys.path`` and returns :class:`Distribution` instances for each one of - them. If the parameters *use_egg_info* is ``True``, then the ``.egg-info`` - files and directores are iterated as well. - - :rtype: iterator of :class:`Distribution` and :class:`EggInfoDistribution` - instances - """ - if paths is None: - paths = sys.path - - if not _cache_enabled: - for dist in _yield_distributions(True, use_egg_info, paths): - yield dist - else: - _generate_cache(use_egg_info, paths) - - for dist in _cache_path.values(): - yield dist - - if use_egg_info: - for dist in _cache_path_egg.values(): - yield dist - - -def get_distribution(name, use_egg_info=False, paths=None): - """ - Scans all elements in ``sys.path`` and looks for all directories - ending with ``.dist-info``. Returns a :class:`Distribution` - corresponding to the ``.dist-info`` directory that contains the - ``METADATA`` that matches *name* for the *name* metadata field. - If no distribution exists with the given *name* and the parameter - *use_egg_info* is set to ``True``, then all files and directories ending - with ``.egg-info`` are scanned. A :class:`EggInfoDistribution` instance is - returned if one is found that has metadata that matches *name* for the - *name* metadata field. - - This function only returns the first result found, as no more than one - value is expected. If the directory is not found, ``None`` is returned. - - :rtype: :class:`Distribution` or :class:`EggInfoDistribution` or None - """ - if paths is None: - paths = sys.path - - if not _cache_enabled: - for dist in _yield_distributions(True, use_egg_info, paths): - if dist.name == name: - return dist - else: - _generate_cache(use_egg_info, paths) - - if name in _cache_name: - return _cache_name[name][0] - elif use_egg_info and name in _cache_name_egg: - return _cache_name_egg[name][0] - else: - return None - - -def obsoletes_distribution(name, version=None, use_egg_info=False): - """ - Iterates over all distributions to find which distributions obsolete - *name*. - - If a *version* is provided, it will be used to filter the results. - If the argument *use_egg_info* is set to ``True``, then ``.egg-info`` - distributions will be considered as well. - - :type name: string - :type version: string - :parameter name: - """ - for dist in get_distributions(use_egg_info): - obsoleted = (dist.metadata['Obsoletes-Dist'] + - dist.metadata['Obsoletes']) - for obs in obsoleted: - o_components = obs.split(' ', 1) - if len(o_components) == 1 or version is None: - if name == o_components[0]: - yield dist - break - else: - try: - predicate = VersionPredicate(obs) - except ValueError: - raise PackagingError( - 'distribution %r has ill-formed obsoletes field: ' - '%r' % (dist.name, obs)) - if name == o_components[0] and predicate.match(version): - yield dist - break - - -def provides_distribution(name, version=None, use_egg_info=False): - """ - Iterates over all distributions to find which distributions provide *name*. - If a *version* is provided, it will be used to filter the results. Scans - all elements in ``sys.path`` and looks for all directories ending with - ``.dist-info``. Returns a :class:`Distribution` corresponding to the - ``.dist-info`` directory that contains a ``METADATA`` that matches *name* - for the name metadata. If the argument *use_egg_info* is set to ``True``, - then all files and directories ending with ``.egg-info`` are considered - as well and returns an :class:`EggInfoDistribution` instance. - - This function only returns the first result found, since no more than - one values are expected. If the directory is not found, returns ``None``. - - :parameter version: a version specifier that indicates the version - required, conforming to the format in ``PEP-345`` - - :type name: string - :type version: string - """ - predicate = None - if not version is None: - try: - predicate = VersionPredicate(name + ' (' + version + ')') - except ValueError: - raise PackagingError('invalid name or version: %r, %r' % - (name, version)) - - for dist in get_distributions(use_egg_info): - provided = dist.metadata['Provides-Dist'] + dist.metadata['Provides'] - - for p in provided: - p_components = p.rsplit(' ', 1) - if len(p_components) == 1 or predicate is None: - if name == p_components[0]: - yield dist - break - else: - p_name, p_ver = p_components - if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')': - raise PackagingError( - 'distribution %r has invalid Provides field: %r' % - (dist.name, p)) - p_ver = p_ver[1:-1] # trim off the parenthesis - if p_name == name and predicate.match(p_ver): - yield dist - break - - -def get_file_users(path): - """ - Iterates over all distributions to find out which distributions use - *path*. - - :parameter path: can be a local absolute path or a relative - ``'/'``-separated path. - :type path: string - :rtype: iterator of :class:`Distribution` instances - """ - for dist in get_distributions(): - if dist.uses(path): - yield dist - - -def get_file_path(distribution_name, relative_path): - """Return the path to a resource file.""" - dist = get_distribution(distribution_name) - if dist is not None: - return dist.get_resource_path(relative_path) - raise LookupError('no distribution named %r found' % distribution_name) - - -def get_file(distribution_name, relative_path, *args, **kwargs): - """Open and return a resource file.""" - return open(get_file_path(distribution_name, relative_path), - *args, **kwargs) diff --git a/Lib/packaging/depgraph.py b/Lib/packaging/depgraph.py deleted file mode 100644 --- a/Lib/packaging/depgraph.py +++ /dev/null @@ -1,270 +0,0 @@ -"""Class and functions dealing with dependencies between distributions. - -This module provides a DependencyGraph class to represent the -dependencies between distributions. Auxiliary functions can generate a -graph, find reverse dependencies, and print a graph in DOT format. -""" - -import sys - -from io import StringIO -from packaging.errors import PackagingError -from packaging.version import VersionPredicate, IrrationalVersionError - -__all__ = ['DependencyGraph', 'generate_graph', 'dependent_dists', - 'graph_to_dot'] - - -class DependencyGraph: - """ - Represents a dependency graph between distributions. - - The dependency relationships are stored in an ``adjacency_list`` that maps - distributions to a list of ``(other, label)`` tuples where ``other`` - is a distribution and the edge is labeled with ``label`` (i.e. the version - specifier, if such was provided). Also, for more efficient traversal, for - every distribution ``x``, a list of predecessors is kept in - ``reverse_list[x]``. An edge from distribution ``a`` to - distribution ``b`` means that ``a`` depends on ``b``. If any missing - dependencies are found, they are stored in ``missing``, which is a - dictionary that maps distributions to a list of requirements that were not - provided by any other distributions. - """ - - def __init__(self): - self.adjacency_list = {} - self.reverse_list = {} - self.missing = {} - - def add_distribution(self, distribution): - """Add the *distribution* to the graph. - - :type distribution: :class:`packaging.database.Distribution` or - :class:`packaging.database.EggInfoDistribution` - """ - self.adjacency_list[distribution] = [] - self.reverse_list[distribution] = [] - self.missing[distribution] = [] - - def add_edge(self, x, y, label=None): - """Add an edge from distribution *x* to distribution *y* with the given - *label*. - - :type x: :class:`packaging.database.Distribution` or - :class:`packaging.database.EggInfoDistribution` - :type y: :class:`packaging.database.Distribution` or - :class:`packaging.database.EggInfoDistribution` - :type label: ``str`` or ``None`` - """ - self.adjacency_list[x].append((y, label)) - # multiple edges are allowed, so be careful - if x not in self.reverse_list[y]: - self.reverse_list[y].append(x) - - def add_missing(self, distribution, requirement): - """ - Add a missing *requirement* for the given *distribution*. - - :type distribution: :class:`packaging.database.Distribution` or - :class:`packaging.database.EggInfoDistribution` - :type requirement: ``str`` - """ - self.missing[distribution].append(requirement) - - def _repr_dist(self, dist): - return '%r %s' % (dist.name, dist.version) - - def repr_node(self, dist, level=1): - """Prints only a subgraph""" - output = [] - output.append(self._repr_dist(dist)) - for other, label in self.adjacency_list[dist]: - dist = self._repr_dist(other) - if label is not None: - dist = '%s [%s]' % (dist, label) - output.append(' ' * level + str(dist)) - suboutput = self.repr_node(other, level + 1) - subs = suboutput.split('\n') - output.extend(subs[1:]) - return '\n'.join(output) - - def __repr__(self): - """Representation of the graph""" - output = [] - for dist, adjs in self.adjacency_list.items(): - output.append(self.repr_node(dist)) - return '\n'.join(output) - - -def graph_to_dot(graph, f, skip_disconnected=True): - """Writes a DOT output for the graph to the provided file *f*. - - If *skip_disconnected* is set to ``True``, then all distributions - that are not dependent on any other distribution are skipped. - - :type f: has to support ``file``-like operations - :type skip_disconnected: ``bool`` - """ - disconnected = [] - - f.write("digraph dependencies {\n") - for dist, adjs in graph.adjacency_list.items(): - if len(adjs) == 0 and not skip_disconnected: - disconnected.append(dist) - for other, label in adjs: - if not label is None: - f.write('"%s" -> "%s" [label="%s"]\n' % - (dist.name, other.name, label)) - else: - f.write('"%s" -> "%s"\n' % (dist.name, other.name)) - if not skip_disconnected and len(disconnected) > 0: - f.write('subgraph disconnected {\n') - f.write('label = "Disconnected"\n') - f.write('bgcolor = red\n') - - for dist in disconnected: - f.write('"%s"' % dist.name) - f.write('\n') - f.write('}\n') - f.write('}\n') - - -def generate_graph(dists): - """Generates a dependency graph from the given distributions. - - :parameter dists: a list of distributions - :type dists: list of :class:`packaging.database.Distribution` and - :class:`packaging.database.EggInfoDistribution` instances - :rtype: a :class:`DependencyGraph` instance - """ - graph = DependencyGraph() - provided = {} # maps names to lists of (version, dist) tuples - - # first, build the graph and find out the provides - for dist in dists: - graph.add_distribution(dist) - provides = (dist.metadata['Provides-Dist'] + - dist.metadata['Provides'] + - ['%s (%s)' % (dist.name, dist.version)]) - - for p in provides: - comps = p.strip().rsplit(" ", 1) - name = comps[0] - version = None - if len(comps) == 2: - version = comps[1] - if len(version) < 3 or version[0] != '(' or version[-1] != ')': - raise PackagingError('distribution %r has ill-formed' - 'provides field: %r' % (dist.name, p)) - version = version[1:-1] # trim off parenthesis - if name not in provided: - provided[name] = [] - provided[name].append((version, dist)) - - # now make the edges - for dist in dists: - requires = dist.metadata['Requires-Dist'] + dist.metadata['Requires'] - for req in requires: - try: - predicate = VersionPredicate(req) - except IrrationalVersionError: - # XXX compat-mode if cannot read the version - name = req.split()[0] - predicate = VersionPredicate(name) - - name = predicate.name - - if name not in provided: - graph.add_missing(dist, req) - else: - matched = False - for version, provider in provided[name]: - try: - match = predicate.match(version) - except IrrationalVersionError: - # XXX small compat-mode - if version.split(' ') == 1: - match = True - else: - match = False - - if match: - graph.add_edge(dist, provider, req) - matched = True - break - if not matched: - graph.add_missing(dist, req) - return graph - - -def dependent_dists(dists, dist): - """Recursively generate a list of distributions from *dists* that are - dependent on *dist*. - - :param dists: a list of distributions - :param dist: a distribution, member of *dists* for which we are interested - """ - if dist not in dists: - raise ValueError('given distribution %r is not a member of the list' % - dist.name) - graph = generate_graph(dists) - - dep = [dist] # dependent distributions - fringe = graph.reverse_list[dist] # list of nodes we should inspect - - while not len(fringe) == 0: - node = fringe.pop() - dep.append(node) - for prev in graph.reverse_list[node]: - if prev not in dep: - fringe.append(prev) - - dep.pop(0) # remove dist from dep, was there to prevent infinite loops - return dep - - -def main(): - # XXX move to run._graph - from packaging.database import get_distributions - tempout = StringIO() - try: - old = sys.stderr - sys.stderr = tempout - try: - dists = list(get_distributions(use_egg_info=True)) - graph = generate_graph(dists) - finally: - sys.stderr = old - except Exception as e: - tempout.seek(0) - tempout = tempout.read() - print('Could not generate the graph') - print(tempout) - print(e) - sys.exit(1) - - for dist, reqs in graph.missing.items(): - if len(reqs) > 0: - print("Warning: Missing dependencies for %r:" % dist.name, - ", ".join(reqs)) - # XXX replace with argparse - if len(sys.argv) == 1: - print('Dependency graph:') - print(' ', repr(graph).replace('\n', '\n ')) - sys.exit(0) - elif len(sys.argv) > 1 and sys.argv[1] in ('-d', '--dot'): - if len(sys.argv) > 2: - filename = sys.argv[2] - else: - filename = 'depgraph.dot' - - with open(filename, 'w') as f: - graph_to_dot(graph, f, True) - tempout.seek(0) - tempout = tempout.read() - print(tempout) - print('Dot file written at %r' % filename) - sys.exit(0) - else: - print('Supported option: -d [filename]') - sys.exit(1) diff --git a/Lib/packaging/dist.py b/Lib/packaging/dist.py deleted file mode 100644 --- a/Lib/packaging/dist.py +++ /dev/null @@ -1,769 +0,0 @@ -"""Class representing the project being built/installed/etc.""" - -import os -import re - -from packaging import logger -from packaging.util import strtobool, resolve_name -from packaging.config import Config -from packaging.errors import (PackagingOptionError, PackagingArgError, - PackagingModuleError, PackagingClassError) -from packaging.command import get_command_class, STANDARD_COMMANDS -from packaging.command.cmd import Command -from packaging.metadata import Metadata -from packaging.fancy_getopt import FancyGetopt - -# Regex to define acceptable Packaging command names. This is not *quite* -# the same as a Python name -- leading underscores are not allowed. The fact -# that they're very similar is no coincidence: the default naming scheme is -# to look for a Python module named after the command. -command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') - -USAGE = """\ -usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] - or: %(script)s --help [cmd1 cmd2 ...] - or: %(script)s --help-commands - or: %(script)s cmd --help -""" - - -def gen_usage(script_name): - script = os.path.basename(script_name) - return USAGE % {'script': script} - - -class Distribution: - """Class used to represent a project and work with it. - - Most of the work hiding behind 'pysetup run' is really done within a - Distribution instance, which farms the work out to the commands - specified on the command line. - """ - - # 'global_options' describes the command-line options that may be - # supplied to the setup script prior to any actual commands. - # Eg. "pysetup run -n" or "pysetup run --dry-run" both take advantage of - # these global options. This list should be kept to a bare minimum, - # since every global option is also valid as a command option -- and we - # don't want to pollute the commands with too many options that they - # have minimal control over. - global_options = [ - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'), - ] - - # 'common_usage' is a short (2-3 line) string describing the common - # usage of the setup script. - common_usage = """\ -Common commands: (see '--help-commands' for more) - - pysetup run build will build the project underneath 'build/' - pysetup run install will install the project -""" - - # options that are not propagated to the commands - display_options = [ - ('help-commands', None, - "list all available commands"), - ('use-2to3', None, - "use 2to3 to make source python 3.x compatible"), - ('convert-2to3-doctests', None, - "use 2to3 to convert doctests in separate text files"), - ] - display_option_names = [x[0].replace('-', '_') for x in display_options] - - # negative options are options that exclude other options - negative_opt = {} - - # -- Creation/initialization methods ------------------------------- - def __init__(self, attrs=None): - """Construct a new Distribution instance: initialize all the - attributes of a Distribution, and then use 'attrs' (a dictionary - mapping attribute names to values) to assign some of those - attributes their "real" values. (Any attributes not mentioned in - 'attrs' will be assigned to some null value: 0, None, an empty list - or dictionary, etc.) Most importantly, initialize the - 'command_obj' attribute to the empty dictionary; this will be - filled in with real command objects by 'parse_command_line()'. - """ - - # Default values for our command-line options - self.dry_run = False - self.help = False - for attr in self.display_option_names: - setattr(self, attr, False) - - # Store the configuration - self.config = Config(self) - - # Store the distribution metadata (name, version, author, and so - # forth) in a separate object -- we're getting to have enough - # information here (and enough command-line options) that it's - # worth it. - self.metadata = Metadata() - - # 'cmdclass' maps command names to class objects, so we - # can 1) quickly figure out which class to instantiate when - # we need to create a new command object, and 2) have a way - # for the setup script to override command classes - self.cmdclass = {} - - # 'script_name' and 'script_args' are usually set to sys.argv[0] - # and sys.argv[1:], but they can be overridden when the caller is - # not necessarily a setup script run from the command line. - self.script_name = None - self.script_args = None - - # 'command_options' is where we store command options between - # parsing them (from config files, the command line, etc.) and when - # they are actually needed -- ie. when the command in question is - # instantiated. It is a dictionary of dictionaries of 2-tuples: - # command_options = { command_name : { option : (source, value) } } - self.command_options = {} - - # 'dist_files' is the list of (command, pyversion, file) that - # have been created by any dist commands run so far. This is - # filled regardless of whether the run is dry or not. pyversion - # gives sysconfig.get_python_version() if the dist file is - # specific to a Python version, 'any' if it is good for all - # Python versions on the target platform, and '' for a source - # file. pyversion should not be used to specify minimum or - # maximum required Python versions; use the metainfo for that - # instead. - self.dist_files = [] - - # These options are really the business of various commands, rather - # than of the Distribution itself. We provide aliases for them in - # Distribution as a convenience to the developer. - self.packages = [] - self.package_data = {} - self.package_dir = None - self.py_modules = [] - self.libraries = [] - self.headers = [] - self.ext_modules = [] - self.ext_package = None - self.include_dirs = [] - self.extra_path = None - self.scripts = [] - self.data_files = {} - self.password = '' - self.use_2to3 = False - self.convert_2to3_doctests = [] - self.extra_files = [] - - # And now initialize bookkeeping stuff that can't be supplied by - # the caller at all. 'command_obj' maps command names to - # Command instances -- that's how we enforce that every command - # class is a singleton. - self.command_obj = {} - - # 'have_run' maps command names to boolean values; it keeps track - # of whether we have actually run a particular command, to make it - # cheap to "run" a command whenever we think we might need to -- if - # it's already been done, no need for expensive filesystem - # operations, we just check the 'have_run' dictionary and carry on. - # It's only safe to query 'have_run' for a command class that has - # been instantiated -- a false value will be inserted when the - # command object is created, and replaced with a true value when - # the command is successfully run. Thus it's probably best to use - # '.get()' rather than a straight lookup. - self.have_run = {} - - # Now we'll use the attrs dictionary (ultimately, keyword args from - # the setup script) to possibly override any or all of these - # distribution options. - - if attrs is not None: - # Pull out the set of command options and work on them - # specifically. Note that this order guarantees that aliased - # command options will override any supplied redundantly - # through the general options dictionary. - options = attrs.get('options') - if options is not None: - del attrs['options'] - for command, cmd_options in options.items(): - opt_dict = self.get_option_dict(command) - for opt, val in cmd_options.items(): - opt_dict[opt] = ("setup script", val) - - # Now work on the rest of the attributes. Any attribute that's - # not already defined is invalid! - for key, val in attrs.items(): - if self.metadata.is_metadata_field(key): - self.metadata[key] = val - elif hasattr(self, key): - setattr(self, key, val) - else: - logger.warning( - 'unknown argument given to Distribution: %r', key) - - # no-user-cfg is handled before other command line args - # because other args override the config files, and this - # one is needed before we can load the config files. - # If attrs['script_args'] wasn't passed, assume false. - # - # This also make sure we just look at the global options - self.want_user_cfg = True - - if self.script_args is not None: - for arg in self.script_args: - if not arg.startswith('-'): - break - if arg == '--no-user-cfg': - self.want_user_cfg = False - break - - self.finalize_options() - - def get_option_dict(self, command): - """Get the option dictionary for a given command. If that - command's option dictionary hasn't been created yet, then create it - and return the new dictionary; otherwise, return the existing - option dictionary. - """ - d = self.command_options.get(command) - if d is None: - d = self.command_options[command] = {} - return d - - def get_fullname(self, filesafe=False): - return self.metadata.get_fullname(filesafe) - - def dump_option_dicts(self, header=None, commands=None, indent=""): - from pprint import pformat - - if commands is None: # dump all command option dicts - commands = sorted(self.command_options) - - if header is not None: - logger.info(indent + header) - indent = indent + " " - - if not commands: - logger.info(indent + "no commands known yet") - return - - for cmd_name in commands: - opt_dict = self.command_options.get(cmd_name) - if opt_dict is None: - logger.info(indent + "no option dict for %r command", - cmd_name) - else: - logger.info(indent + "option dict for %r command:", cmd_name) - out = pformat(opt_dict) - for line in out.split('\n'): - logger.info(indent + " " + line) - - # -- Config file finding/parsing methods --------------------------- - # XXX to be removed - def parse_config_files(self, filenames=None): - return self.config.parse_config_files(filenames) - - def find_config_files(self): - return self.config.find_config_files() - - # -- Command-line parsing methods ---------------------------------- - - def parse_command_line(self): - """Parse the setup script's command line, taken from the - 'script_args' instance attribute (which defaults to 'sys.argv[1:]' - -- see 'setup()' in run.py). This list is first processed for - "global options" -- options that set attributes of the Distribution - instance. Then, it is alternately scanned for Packaging commands - and options for that command. Each new command terminates the - options for the previous command. The allowed options for a - command are determined by the 'user_options' attribute of the - command class -- thus, we have to be able to load command classes - in order to parse the command line. Any error in that 'options' - attribute raises PackagingGetoptError; any error on the - command line raises PackagingArgError. If no Packaging commands - were found on the command line, raises PackagingArgError. Return - true if command line was successfully parsed and we should carry - on with executing commands; false if no errors but we shouldn't - execute commands (currently, this only happens if user asks for - help). - """ - # - # We now have enough information to show the Macintosh dialog - # that allows the user to interactively specify the "command line". - # - toplevel_options = self._get_toplevel_options() - - # We have to parse the command line a bit at a time -- global - # options, then the first command, then its options, and so on -- - # because each command will be handled by a different class, and - # the options that are valid for a particular class aren't known - # until we have loaded the command class, which doesn't happen - # until we know what the command is. - - self.commands = [] - parser = FancyGetopt(toplevel_options + self.display_options) - parser.set_negative_aliases(self.negative_opt) - args = parser.getopt(args=self.script_args, object=self) - option_order = parser.get_option_order() - - # for display options we return immediately - if self.handle_display_options(option_order): - return - - while args: - args = self._parse_command_opts(parser, args) - if args is None: # user asked for help (and got it) - return - - # Handle the cases of --help as a "global" option, ie. - # "pysetup run --help" and "pysetup run --help command ...". For the - # former, we show global options (--dry-run, etc.) - # and display-only options (--name, --version, etc.); for the - # latter, we omit the display-only options and show help for - # each command listed on the command line. - if self.help: - self._show_help(parser, - display_options=len(self.commands) == 0, - commands=self.commands) - return - - return True - - def _get_toplevel_options(self): - """Return the non-display options recognized at the top level. - - This includes options that are recognized *only* at the top - level as well as options recognized for commands. - """ - return self.global_options - - def _parse_command_opts(self, parser, args): - """Parse the command-line options for a single command. - 'parser' must be a FancyGetopt instance; 'args' must be the list - of arguments, starting with the current command (whose options - we are about to parse). Returns a new version of 'args' with - the next command at the front of the list; will be the empty - list if there are no more commands on the command line. Returns - None if the user asked for help on this command. - """ - # Pull the current command from the head of the command line - command = args[0] - if not command_re.match(command): - raise SystemExit("invalid command name %r" % command) - self.commands.append(command) - - # Dig up the command class that implements this command, so we - # 1) know that it's a valid command, and 2) know which options - # it takes. - try: - cmd_class = get_command_class(command) - except PackagingModuleError as msg: - raise PackagingArgError(msg) - - # XXX We want to push this in packaging.command - # - # Require that the command class be derived from Command -- want - # to be sure that the basic "command" interface is implemented. - for meth in ('initialize_options', 'finalize_options', 'run'): - if hasattr(cmd_class, meth): - continue - raise PackagingClassError( - 'command %r must implement %r' % (cmd_class, meth)) - - # Also make sure that the command object provides a list of its - # known options. - if not (hasattr(cmd_class, 'user_options') and - isinstance(cmd_class.user_options, list)): - raise PackagingClassError( - "command class %s must provide " - "'user_options' attribute (a list of tuples)" % cmd_class) - - # If the command class has a list of negative alias options, - # merge it in with the global negative aliases. - negative_opt = self.negative_opt - if hasattr(cmd_class, 'negative_opt'): - negative_opt = negative_opt.copy() - negative_opt.update(cmd_class.negative_opt) - - # Check for help_options in command class. They have a different - # format (tuple of four) so we need to preprocess them here. - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_options = cmd_class.help_options[:] - else: - help_options = [] - - # All commands support the global options too, just by adding - # in 'global_options'. - parser.set_option_table(self.global_options + - cmd_class.user_options + - help_options) - parser.set_negative_aliases(negative_opt) - args, opts = parser.getopt(args[1:]) - if hasattr(opts, 'help') and opts.help: - self._show_help(parser, display_options=False, - commands=[cmd_class]) - return - - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_option_found = False - for help_option, short, desc, func in cmd_class.help_options: - if hasattr(opts, help_option.replace('-', '_')): - help_option_found = True - if callable(func): - func() - else: - raise PackagingClassError( - "invalid help function %r for help option %r: " - "must be a callable object (function, etc.)" - % (func, help_option)) - - if help_option_found: - return - - # Put the options from the command line into their official - # holding pen, the 'command_options' dictionary. - opt_dict = self.get_option_dict(command) - for name, value in vars(opts).items(): - opt_dict[name] = ("command line", value) - - return args - - def finalize_options(self): - """Set final values for all the options on the Distribution - instance, analogous to the .finalize_options() method of Command - objects. - """ - if getattr(self, 'convert_2to3_doctests', None): - self.convert_2to3_doctests = [os.path.join(p) - for p in self.convert_2to3_doctests] - else: - self.convert_2to3_doctests = [] - - def _show_help(self, parser, global_options=True, display_options=True, - commands=[]): - """Show help for the setup script command line in the form of - several lists of command-line options. 'parser' should be a - FancyGetopt instance; do not expect it to be returned in the - same state, as its option table will be reset to make it - generate the correct help text. - - If 'global_options' is true, lists the global options: - --dry-run, etc. If 'display_options' is true, lists - the "display-only" options: --help-commands. Finally, - lists per-command help for every command name or command class - in 'commands'. - """ - if global_options: - if display_options: - options = self._get_toplevel_options() - else: - options = self.global_options - parser.set_option_table(options) - parser.print_help(self.common_usage + "\nGlobal options:") - print() - - if display_options: - parser.set_option_table(self.display_options) - parser.print_help( - "Information display options (just display " + - "information, ignore any commands)") - print() - - for command in self.commands: - if isinstance(command, type) and issubclass(command, Command): - cls = command - else: - cls = get_command_class(command) - if (hasattr(cls, 'help_options') and - isinstance(cls.help_options, list)): - parser.set_option_table(cls.user_options + cls.help_options) - else: - parser.set_option_table(cls.user_options) - parser.print_help("Options for %r command:" % cls.__name__) - print() - - print(gen_usage(self.script_name)) - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands) on the command line, display the requested info and - return true; else return false. - """ - # User just wants a list of commands -- we'll print it out and stop - # processing now (ie. if they ran "setup --help-commands foo bar", - # we ignore "foo bar"). - if self.help_commands: - self.print_commands() - print() - print(gen_usage(self.script_name)) - return True - - # If user supplied any of the "display metadata" options, then - # display that metadata in the order in which the user supplied the - # metadata options. - any_display_options = False - is_display_option = set() - for option in self.display_options: - is_display_option.add(option[0]) - - for opt, val in option_order: - if val and opt in is_display_option: - opt = opt.replace('-', '_') - value = self.metadata[opt] - if opt in ('keywords', 'platform'): - print(','.join(value)) - elif opt in ('classifier', 'provides', 'requires', - 'obsoletes'): - print('\n'.join(value)) - else: - print(value) - any_display_options = True - - return any_display_options - - def print_command_list(self, commands, header, max_length): - """Print a subset of the list of all commands -- used by - 'print_commands()'. - """ - print(header + ":") - - for cmd in commands: - cls = self.cmdclass.get(cmd) or get_command_class(cmd) - description = getattr(cls, 'description', - '(no description available)') - - print(" %-*s %s" % (max_length, cmd, description)) - - def _get_command_groups(self): - """Helper function to retrieve all the command class names divided - into standard commands (listed in - packaging.command.STANDARD_COMMANDS) and extra commands (given in - self.cmdclass and not standard commands). - """ - extra_commands = [cmd for cmd in self.cmdclass - if cmd not in STANDARD_COMMANDS] - return STANDARD_COMMANDS, extra_commands - - def print_commands(self): - """Print out a help message listing all available commands with a - description of each. The list is divided into standard commands - (listed in packaging.command.STANDARD_COMMANDS) and extra commands - (given in self.cmdclass and not standard commands). The - descriptions come from the command class attribute - 'description'. - """ - std_commands, extra_commands = self._get_command_groups() - max_length = 0 - for cmd in (std_commands + extra_commands): - if len(cmd) > max_length: - max_length = len(cmd) - - self.print_command_list(std_commands, - "Standard commands", - max_length) - if extra_commands: - print() - self.print_command_list(extra_commands, - "Extra commands", - max_length) - - # -- Command class/object methods ---------------------------------- - - def get_command_obj(self, command, create=True): - """Return the command object for 'command'. Normally this object - is cached on a previous call to 'get_command_obj()'; if no command - object for 'command' is in the cache, then we either create and - return it (if 'create' is true) or return None. - """ - cmd_obj = self.command_obj.get(command) - if not cmd_obj and create: - logger.debug("Distribution.get_command_obj(): " - "creating %r command object", command) - - cls = get_command_class(command) - cmd_obj = self.command_obj[command] = cls(self) - self.have_run[command] = 0 - - # Set any options that were supplied in config files or on the - # command line. (XXX support for error reporting is suboptimal - # here: errors aren't reported until finalize_options is called, - # which means we won't report the source of the error.) - options = self.command_options.get(command) - if options: - self._set_command_options(cmd_obj, options) - - return cmd_obj - - def _set_command_options(self, command_obj, option_dict=None): - """Set the options for 'command_obj' from 'option_dict'. Basically - this means copying elements of a dictionary ('option_dict') to - attributes of an instance ('command'). - - 'command_obj' must be a Command instance. If 'option_dict' is not - supplied, uses the standard option dictionary for this command - (from 'self.command_options'). - """ - command_name = command_obj.get_command_name() - if option_dict is None: - option_dict = self.get_option_dict(command_name) - - logger.debug(" setting options for %r command:", command_name) - - for option, (source, value) in option_dict.items(): - logger.debug(" %s = %s (from %s)", option, value, source) - try: - bool_opts = [x.replace('-', '_') - for x in command_obj.boolean_options] - except AttributeError: - bool_opts = [] - try: - neg_opt = command_obj.negative_opt - except AttributeError: - neg_opt = {} - - try: - is_string = isinstance(value, str) - if option in neg_opt and is_string: - setattr(command_obj, neg_opt[option], not strtobool(value)) - elif option in bool_opts and is_string: - setattr(command_obj, option, strtobool(value)) - elif hasattr(command_obj, option): - setattr(command_obj, option, value) - else: - raise PackagingOptionError( - "error in %s: command %r has no such option %r" % - (source, command_name, option)) - except ValueError as msg: - raise PackagingOptionError(msg) - - def reinitialize_command(self, command, reinit_subcommands=False): - """Reinitializes a command to the state it was in when first - returned by 'get_command_obj()': i.e., initialized but not yet - finalized. This provides the opportunity to sneak option - values in programmatically, overriding or supplementing - user-supplied values from the config files and command line. - You'll have to re-finalize the command object (by calling - 'finalize_options()' or 'ensure_finalized()') before using it for - real. - - 'command' should be a command name (string) or command object. If - 'reinit_subcommands' is true, also reinitializes the command's - sub-commands, as declared by the 'sub_commands' class attribute (if - it has one). See the "install_dist" command for an example. Only - reinitializes the sub-commands that actually matter, i.e. those - whose test predicate return true. - - Returns the reinitialized command object. It will be the same - object as the one stored in the self.command_obj attribute. - """ - if not isinstance(command, Command): - command_name = command - command = self.get_command_obj(command_name) - else: - command_name = command.get_command_name() - - if not command.finalized: - return command - - command.initialize_options() - self.have_run[command_name] = 0 - command.finalized = False - self._set_command_options(command) - - if reinit_subcommands: - for sub in command.get_sub_commands(): - self.reinitialize_command(sub, reinit_subcommands) - - return command - - # -- Methods that operate on the Distribution ---------------------- - - def run_commands(self): - """Run each command that was seen on the setup script command line. - Uses the list of commands found and cache of command objects - created by 'get_command_obj()'. - """ - for cmd in self.commands: - self.run_command(cmd) - - # -- Methods that operate on its Commands -------------------------- - - def run_command(self, command, options=None): - """Do whatever it takes to run a command (including nothing at all, - if the command has already been run). Specifically: if we have - already created and run the command named by 'command', return - silently without doing anything. If the command named by 'command' - doesn't even have a command object yet, create one. Then invoke - 'run()' on that command object (or an existing one). - """ - # Already been here, done that? then return silently. - if self.have_run.get(command): - return - - if options is not None: - self.command_options[command] = options - - cmd_obj = self.get_command_obj(command) - cmd_obj.ensure_finalized() - self.run_command_hooks(cmd_obj, 'pre_hook') - logger.info("running %s", command) - cmd_obj.run() - self.run_command_hooks(cmd_obj, 'post_hook') - self.have_run[command] = 1 - - def run_command_hooks(self, cmd_obj, hook_kind): - """Run hooks registered for that command and phase. - - *cmd_obj* is a finalized command object; *hook_kind* is either - 'pre_hook' or 'post_hook'. - """ - if hook_kind not in ('pre_hook', 'post_hook'): - raise ValueError('invalid hook kind: %r' % hook_kind) - - hooks = getattr(cmd_obj, hook_kind, None) - - if hooks is None: - return - - for hook in hooks.values(): - if isinstance(hook, str): - try: - hook_obj = resolve_name(hook) - except ImportError as e: - raise PackagingModuleError(e) - else: - hook_obj = hook - - if not callable(hook_obj): - raise PackagingOptionError('hook %r is not callable' % hook) - - logger.info('running %s %s for command %s', - hook_kind, hook, cmd_obj.get_command_name()) - hook_obj(cmd_obj) - - # -- Distribution query methods ------------------------------------ - def has_pure_modules(self): - return len(self.packages or self.py_modules or []) > 0 - - def has_ext_modules(self): - return self.ext_modules and len(self.ext_modules) > 0 - - def has_c_libraries(self): - return self.libraries and len(self.libraries) > 0 - - def has_modules(self): - return self.has_pure_modules() or self.has_ext_modules() - - def has_headers(self): - return self.headers and len(self.headers) > 0 - - def has_scripts(self): - return self.scripts and len(self.scripts) > 0 - - def has_data_files(self): - return self.data_files and len(self.data_files) > 0 - - def is_pure(self): - return (self.has_pure_modules() and - not self.has_ext_modules() and - not self.has_c_libraries()) diff --git a/Lib/packaging/errors.py b/Lib/packaging/errors.py deleted file mode 100644 --- a/Lib/packaging/errors.py +++ /dev/null @@ -1,138 +0,0 @@ -"""Exceptions used throughout the package. - -Submodules of packaging may raise exceptions defined in this module as -well as standard exceptions; in particular, SystemExit is usually raised -for errors that are obviously the end-user's fault (e.g. bad -command-line arguments). -""" - - -class PackagingError(Exception): - """The root of all Packaging evil.""" - - -class PackagingModuleError(PackagingError): - """Unable to load an expected module, or to find an expected class - within some module (in particular, command modules and classes).""" - - -class PackagingClassError(PackagingError): - """Some command class (or possibly distribution class, if anyone - feels a need to subclass Distribution) is found not to be holding - up its end of the bargain, ie. implementing some part of the - "command "interface.""" - - -class PackagingGetoptError(PackagingError): - """The option table provided to 'fancy_getopt()' is bogus.""" - - -class PackagingArgError(PackagingError): - """Raised by fancy_getopt in response to getopt.error -- ie. an - error in the command line usage.""" - - -class PackagingFileError(PackagingError): - """Any problems in the filesystem: expected file not found, etc. - Typically this is for problems that we detect before IOError or - OSError could be raised.""" - - -class PackagingOptionError(PackagingError): - """Syntactic/semantic errors in command options, such as use of - mutually conflicting options, or inconsistent options, - badly-spelled values, etc. No distinction is made between option - values originating in the setup script, the command line, config - files, or what-have-you -- but if we *know* something originated in - the setup script, we'll raise PackagingSetupError instead.""" - - -class PackagingSetupError(PackagingError): - """For errors that can be definitely blamed on the setup script, - such as invalid keyword arguments to 'setup()'.""" - - -class PackagingPlatformError(PackagingError): - """We don't know how to do something on the current platform (but - we do know how to do it on some platform) -- eg. trying to compile - C files on a platform not supported by a CCompiler subclass.""" - - -class PackagingExecError(PackagingError): - """Any problems executing an external program (such as the C - compiler, when compiling C files).""" - - -class PackagingInternalError(PackagingError): - """Internal inconsistencies or impossibilities (obviously, this - should never be seen if the code is working!).""" - - -class PackagingTemplateError(PackagingError): - """Syntax error in a file list template.""" - - -class PackagingPyPIError(PackagingError): - """Any problem occuring during using the indexes.""" - - -# Exception classes used by the CCompiler implementation classes -class CCompilerError(Exception): - """Some compile/link operation failed.""" - - -class PreprocessError(CCompilerError): - """Failure to preprocess one or more C/C++ files.""" - - -class CompileError(CCompilerError): - """Failure to compile one or more C/C++ source files.""" - - -class LibError(CCompilerError): - """Failure to create a static library from one or more C/C++ object - files.""" - - -class LinkError(CCompilerError): - """Failure to link one or more C/C++ object files into an executable - or shared library file.""" - - -class UnknownFileError(CCompilerError): - """Attempt to process an unknown file type.""" - - -class MetadataMissingError(PackagingError): - """A required metadata is missing""" - - -class MetadataConflictError(PackagingError): - """Attempt to read or write metadata fields that are conflictual.""" - - -class MetadataUnrecognizedVersionError(PackagingError): - """Unknown metadata version number.""" - - -class IrrationalVersionError(Exception): - """This is an irrational version.""" - pass - - -class HugeMajorVersionNumError(IrrationalVersionError): - """An irrational version because the major version number is huge - (often because a year or date was used). - - See `error_on_huge_major_num` option in `NormalizedVersion` for details. - This guard can be disabled by setting that option False. - """ - pass - - -class InstallationException(Exception): - """Base exception for installation scripts""" - - -class InstallationConflict(InstallationException): - """Raised when a conflict is detected""" diff --git a/Lib/packaging/fancy_getopt.py b/Lib/packaging/fancy_getopt.py deleted file mode 100644 --- a/Lib/packaging/fancy_getopt.py +++ /dev/null @@ -1,388 +0,0 @@ -"""Command line parsing machinery. - -The FancyGetopt class is a Wrapper around the getopt module that -provides the following additional features: - * short and long options are tied together - * options have help strings, so fancy_getopt could potentially - create a complete usage summary - * options set attributes of a passed-in object. - -It is used under the hood by the command classes. Do not use directly. -""" - -import getopt -import re -import sys -import textwrap - -from packaging.errors import PackagingGetoptError, PackagingArgError - -# Much like command_re in packaging.core, this is close to but not quite -# the same as a Python NAME -- except, in the spirit of most GNU -# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!) -# The similarities to NAME are again not a coincidence... -longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)' -longopt_re = re.compile(r'^%s$' % longopt_pat) - -# For recognizing "negative alias" options, eg. "quiet=!verbose" -neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat)) - - -class FancyGetopt: - """Wrapper around the standard 'getopt()' module that provides some - handy extra functionality: - * short and long options are tied together - * options have help strings, and help text can be assembled - from them - * options set attributes of a passed-in object - * boolean options can have "negative aliases" -- eg. if - --quiet is the "negative alias" of --verbose, then "--quiet" - on the command line sets 'verbose' to false - """ - - def __init__(self, option_table=None): - - # The option table is (currently) a list of tuples. The - # tuples may have 3 or four values: - # (long_option, short_option, help_string [, repeatable]) - # if an option takes an argument, its long_option should have '=' - # appended; short_option should just be a single character, no ':' - # in any case. If a long_option doesn't have a corresponding - # short_option, short_option should be None. All option tuples - # must have long options. - self.option_table = option_table - - # 'option_index' maps long option names to entries in the option - # table (ie. those 3-tuples). - self.option_index = {} - if self.option_table: - self._build_index() - - # 'alias' records (duh) alias options; {'foo': 'bar'} means - # --foo is an alias for --bar - self.alias = {} - - # 'negative_alias' keeps track of options that are the boolean - # opposite of some other option - self.negative_alias = {} - - # These keep track of the information in the option table. We - # don't actually populate these structures until we're ready to - # parse the command line, since the 'option_table' passed in here - # isn't necessarily the final word. - self.short_opts = [] - self.long_opts = [] - self.short2long = {} - self.attr_name = {} - self.takes_arg = {} - - # And 'option_order' is filled up in 'getopt()'; it records the - # original order of options (and their values) on the command line, - # but expands short options, converts aliases, etc. - self.option_order = [] - - def _build_index(self): - self.option_index.clear() - for option in self.option_table: - self.option_index[option[0]] = option - - def set_option_table(self, option_table): - self.option_table = option_table - self._build_index() - - def add_option(self, long_option, short_option=None, help_string=None): - if long_option in self.option_index: - raise PackagingGetoptError( - "option conflict: already an option '%s'" % long_option) - else: - option = (long_option, short_option, help_string) - self.option_table.append(option) - self.option_index[long_option] = option - - def has_option(self, long_option): - """Return true if the option table for this parser has an - option with long name 'long_option'.""" - return long_option in self.option_index - - def _check_alias_dict(self, aliases, what): - assert isinstance(aliases, dict) - for alias, opt in aliases.items(): - if alias not in self.option_index: - raise PackagingGetoptError( - ("invalid %s '%s': " - "option '%s' not defined") % (what, alias, alias)) - if opt not in self.option_index: - raise PackagingGetoptError( - ("invalid %s '%s': " - "aliased option '%s' not defined") % (what, alias, opt)) - - def set_aliases(self, alias): - """Set the aliases for this option parser.""" - self._check_alias_dict(alias, "alias") - self.alias = alias - - def set_negative_aliases(self, negative_alias): - """Set the negative aliases for this option parser. - 'negative_alias' should be a dictionary mapping option names to - option names, both the key and value must already be defined - in the option table.""" - self._check_alias_dict(negative_alias, "negative alias") - self.negative_alias = negative_alias - - def _grok_option_table(self): - """Populate the various data structures that keep tabs on the - option table. Called by 'getopt()' before it can do anything - worthwhile. - """ - self.long_opts = [] - self.short_opts = [] - self.short2long.clear() - self.repeat = {} - - for option in self.option_table: - if len(option) == 3: - longopt, short, help = option - repeat = 0 - elif len(option) == 4: - longopt, short, help, repeat = option - else: - # the option table is part of the code, so simply - # assert that it is correct - raise ValueError("invalid option tuple: %r" % option) - - # Type- and value-check the option names - if not isinstance(longopt, str) or len(longopt) < 2: - raise PackagingGetoptError( - ("invalid long option '%s': " - "must be a string of length >= 2") % longopt) - - if (not ((short is None) or - (isinstance(short, str) and len(short) == 1))): - raise PackagingGetoptError( - ("invalid short option '%s': " - "must be a single character or None") % short) - - self.repeat[longopt] = repeat - self.long_opts.append(longopt) - - if longopt[-1] == '=': # option takes an argument? - if short: - short = short + ':' - longopt = longopt[0:-1] - self.takes_arg[longopt] = 1 - else: - - # Is option is a "negative alias" for some other option (eg. - # "quiet" == "!verbose")? - alias_to = self.negative_alias.get(longopt) - if alias_to is not None: - if self.takes_arg[alias_to]: - raise PackagingGetoptError( - ("invalid negative alias '%s': " - "aliased option '%s' takes a value") % \ - (longopt, alias_to)) - - self.long_opts[-1] = longopt # XXX redundant?! - self.takes_arg[longopt] = 0 - - else: - self.takes_arg[longopt] = 0 - - # If this is an alias option, make sure its "takes arg" flag is - # the same as the option it's aliased to. - alias_to = self.alias.get(longopt) - if alias_to is not None: - if self.takes_arg[longopt] != self.takes_arg[alias_to]: - raise PackagingGetoptError( - ("invalid alias '%s': inconsistent with " - "aliased option '%s' (one of them takes a value, " - "the other doesn't") % (longopt, alias_to)) - - # Now enforce some bondage on the long option name, so we can - # later translate it to an attribute name on some object. Have - # to do this a bit late to make sure we've removed any trailing - # '='. - if not longopt_re.match(longopt): - raise PackagingGetoptError( - ("invalid long option name '%s' " + - "(must be letters, numbers, hyphens only") % longopt) - - self.attr_name[longopt] = longopt.replace('-', '_') - if short: - self.short_opts.append(short) - self.short2long[short[0]] = longopt - - def getopt(self, args=None, object=None): - """Parse command-line options in args. Store as attributes on object. - - If 'args' is None or not supplied, uses 'sys.argv[1:]'. If - 'object' is None or not supplied, creates a new OptionDummy - object, stores option values there, and returns a tuple (args, - object). If 'object' is supplied, it is modified in place and - 'getopt()' just returns 'args'; in both cases, the returned - 'args' is a modified copy of the passed-in 'args' list, which - is left untouched. - """ - if args is None: - args = sys.argv[1:] - if object is None: - object = OptionDummy() - created_object = 1 - else: - created_object = 0 - - self._grok_option_table() - - short_opts = ' '.join(self.short_opts) - - try: - opts, args = getopt.getopt(args, short_opts, self.long_opts) - except getopt.error as msg: - raise PackagingArgError(msg) - - for opt, val in opts: - if len(opt) == 2 and opt[0] == '-': # it's a short option - opt = self.short2long[opt[1]] - else: - assert len(opt) > 2 and opt[:2] == '--' - opt = opt[2:] - - alias = self.alias.get(opt) - if alias: - opt = alias - - if not self.takes_arg[opt]: # boolean option? - assert val == '', "boolean option can't have value" - alias = self.negative_alias.get(opt) - if alias: - opt = alias - val = 0 - else: - val = 1 - - attr = self.attr_name[opt] - # The only repeating option at the moment is 'verbose'. - # It has a negative option -q quiet, which should set verbose = 0. - if val and self.repeat.get(attr) is not None: - val = getattr(object, attr, 0) + 1 - setattr(object, attr, val) - self.option_order.append((opt, val)) - - # for opts - if created_object: - return args, object - else: - return args - - def get_option_order(self): - """Returns the list of (option, value) tuples processed by the - previous run of 'getopt()'. Raises RuntimeError if - 'getopt()' hasn't been called yet. - """ - if self.option_order is None: - raise RuntimeError("'getopt()' hasn't been called yet") - else: - return self.option_order - - return self.option_order - - def generate_help(self, header=None): - """Generate help text (a list of strings, one per suggested line of - output) from the option table for this FancyGetopt object. - """ - # Blithely assume the option table is good: probably wouldn't call - # 'generate_help()' unless you've already called 'getopt()'. - - # First pass: determine maximum length of long option names - max_opt = 0 - for option in self.option_table: - longopt = option[0] - short = option[1] - l = len(longopt) - if longopt[-1] == '=': - l = l - 1 - if short is not None: - l = l + 5 # " (-x)" where short == 'x' - if l > max_opt: - max_opt = l - - opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter - - # Typical help block looks like this: - # --foo controls foonabulation - # Help block for longest option looks like this: - # --flimflam set the flim-flam level - # and with wrapped text: - # --flimflam set the flim-flam level (must be between - # 0 and 100, except on Tuesdays) - # Options with short names will have the short name shown (but - # it doesn't contribute to max_opt): - # --foo (-f) controls foonabulation - # If adding the short option would make the left column too wide, - # we push the explanation off to the next line - # --flimflam (-l) - # set the flim-flam level - # Important parameters: - # - 2 spaces before option block start lines - # - 2 dashes for each long option name - # - min. 2 spaces between option and explanation (gutter) - # - 5 characters (incl. space) for short option name - - # Now generate lines of help text. (If 80 columns were good enough - # for Jesus, then 78 columns are good enough for me!) - line_width = 78 - text_width = line_width - opt_width - big_indent = ' ' * opt_width - if header: - lines = [header] - else: - lines = ['Option summary:'] - - for option in self.option_table: - longopt, short, help = option[:3] - text = textwrap.wrap(help, text_width) - - # Case 1: no short option at all (makes life easy) - if short is None: - if text: - lines.append(" --%-*s %s" % (max_opt, longopt, text[0])) - else: - lines.append(" --%-*s " % (max_opt, longopt)) - - # Case 2: we have a short option, so we have to include it - # just after the long option - else: - opt_names = "%s (-%s)" % (longopt, short) - if text: - lines.append(" --%-*s %s" % - (max_opt, opt_names, text[0])) - else: - lines.append(" --%-*s" % opt_names) - - for l in text[1:]: - lines.append(big_indent + l) - - return lines - - def print_help(self, header=None, file=None): - if file is None: - file = sys.stdout - for line in self.generate_help(header): - file.write(line + "\n") - - -def fancy_getopt(options, negative_opt, object, args): - parser = FancyGetopt(options) - parser.set_negative_aliases(negative_opt) - return parser.getopt(args, object) - - -class OptionDummy: - """Dummy class just used as a place to hold command-line option - values as instance attributes.""" - - def __init__(self, options=[]): - """Create a new OptionDummy instance. The attributes listed in - 'options' will be initialized to None.""" - for opt in options: - setattr(self, opt, None) diff --git a/Lib/packaging/install.py b/Lib/packaging/install.py deleted file mode 100644 --- a/Lib/packaging/install.py +++ /dev/null @@ -1,529 +0,0 @@ -"""Building blocks for installers. - -When used as a script, this module installs a release thanks to info -obtained from an index (e.g. PyPI), with dependencies. - -This is a higher-level module built on packaging.database and -packaging.pypi. -""" -import os -import sys -import stat -import errno -import shutil -import logging -import tempfile -from sysconfig import get_config_var, get_path, is_python_build - -from packaging import logger -from packaging.dist import Distribution -from packaging.util import (_is_archive_file, ask, get_install_method, - egginfo_to_distinfo) -from packaging.pypi import wrapper -from packaging.version import get_version_predicate -from packaging.database import get_distributions, get_distribution -from packaging.depgraph import generate_graph - -from packaging.errors import (PackagingError, InstallationException, - InstallationConflict, CCompilerError) -from packaging.pypi.errors import ProjectNotFound, ReleaseNotFound -from packaging import database - - -__all__ = ['install_dists', 'install_from_infos', 'get_infos', 'remove', - 'install', 'install_local_project'] - - -def _move_files(files, destination): - """Move the list of files in the destination folder, keeping the same - structure. - - Return a list of tuple (old, new) emplacement of files - - :param files: a list of files to move. - :param destination: the destination directory to put on the files. - """ - - for old in files: - filename = os.path.split(old)[-1] - new = os.path.join(destination, filename) - # try to make the paths. - try: - os.makedirs(os.path.dirname(new)) - except OSError as e: - if e.errno != errno.EEXIST: - raise - os.rename(old, new) - yield old, new - - -def _run_distutils_install(path): - # backward compat: using setuptools or plain-distutils - cmd = '%s setup.py install --record=%s' - record_file = os.path.join(path, 'RECORD') - os.system(cmd % (sys.executable, record_file)) - if not os.path.exists(record_file): - raise ValueError('failed to install') - else: - egginfo_to_distinfo(record_file, remove_egginfo=True) - - -def _run_setuptools_install(path): - cmd = '%s setup.py install --record=%s --single-version-externally-managed' - record_file = os.path.join(path, 'RECORD') - - os.system(cmd % (sys.executable, record_file)) - if not os.path.exists(record_file): - raise ValueError('failed to install') - else: - egginfo_to_distinfo(record_file, remove_egginfo=True) - - -def _run_packaging_install(path): - # XXX check for a valid setup.cfg? - dist = Distribution() - dist.parse_config_files() - try: - dist.run_command('install_dist') - name = dist.metadata['Name'] - return database.get_distribution(name) is not None - except (IOError, os.error, PackagingError, CCompilerError) as msg: - raise ValueError("Failed to install, " + str(msg)) - - -def _install_dist(dist, path): - """Install a distribution into a path. - - This: - - * unpack the distribution - * copy the files in "path" - * determine if the distribution is packaging or distutils1. - """ - where = dist.unpack() - - if where is None: - raise ValueError('Cannot locate the unpacked archive') - - return _run_install_from_archive(where) - - -def install_local_project(path): - """Install a distribution from a source directory. - - If the source directory contains a setup.py install using distutils1. - If a setup.cfg is found, install using the install_dist command. - - Returns True on success, False on Failure. - """ - path = os.path.abspath(path) - if os.path.isdir(path): - logger.info('Installing from source directory: %r', path) - return _run_install_from_dir(path) - elif _is_archive_file(path): - logger.info('Installing from archive: %r', path) - _unpacked_dir = tempfile.mkdtemp() - try: - shutil.unpack_archive(path, _unpacked_dir) - return _run_install_from_archive(_unpacked_dir) - finally: - shutil.rmtree(_unpacked_dir) - else: - logger.warning('No project to install.') - return False - - -def _run_install_from_archive(source_dir): - # XXX need a better way - for item in os.listdir(source_dir): - fullpath = os.path.join(source_dir, item) - if os.path.isdir(fullpath): - source_dir = fullpath - break - return _run_install_from_dir(source_dir) - - -install_methods = { - 'packaging': _run_packaging_install, - 'setuptools': _run_setuptools_install, - 'distutils': _run_distutils_install} - - -def _run_install_from_dir(source_dir): - old_dir = os.getcwd() - os.chdir(source_dir) - install_method = get_install_method(source_dir) - func = install_methods[install_method] - try: - func = install_methods[install_method] - try: - func(source_dir) - return True - except ValueError as err: - # failed to install - logger.info(str(err)) - return False - finally: - os.chdir(old_dir) - - -def install_dists(dists, path, paths=None): - """Install all distributions provided in dists, with the given prefix. - - If an error occurs while installing one of the distributions, uninstall all - the installed distribution (in the context if this function). - - Return a list of installed dists. - - :param dists: distributions to install - :param path: base path to install distribution in - :param paths: list of paths (defaults to sys.path) to look for info - """ - - installed_dists = [] - for dist in dists: - logger.info('Installing %r %s...', dist.name, dist.version) - try: - _install_dist(dist, path) - installed_dists.append(dist) - except Exception as e: - logger.info('Failed: %s', e) - - # reverting - for installed_dist in installed_dists: - logger.info('Reverting %r', installed_dist) - remove(installed_dist.name, paths) - raise e - return installed_dists - - -def install_from_infos(install_path=None, install=[], remove=[], conflicts=[], - paths=None): - """Install and remove the given distributions. - - The function signature is made to be compatible with the one of get_infos. - The aim of this script is to povide a way to install/remove what's asked, - and to rollback if needed. - - So, it's not possible to be in an inconsistant state, it could be either - installed, either uninstalled, not half-installed. - - The process follow those steps: - - 1. Move all distributions that will be removed in a temporary location - 2. Install all the distributions that will be installed in a temp. loc. - 3. If the installation fails, rollback (eg. move back) those - distributions, or remove what have been installed. - 4. Else, move the distributions to the right locations, and remove for - real the distributions thats need to be removed. - - :param install_path: the installation path where we want to install the - distributions. - :param install: list of distributions that will be installed; install_path - must be provided if this list is not empty. - :param remove: list of distributions that will be removed. - :param conflicts: list of conflicting distributions, eg. that will be in - conflict once the install and remove distribution will be - processed. - :param paths: list of paths (defaults to sys.path) to look for info - """ - # first of all, if we have conflicts, stop here. - if conflicts: - raise InstallationConflict(conflicts) - - if install and not install_path: - raise ValueError("Distributions are to be installed but `install_path`" - " is not provided.") - - # before removing the files, we will start by moving them away - # then, if any error occurs, we could replace them in the good place. - temp_files = {} # contains lists of {dist: (old, new)} paths - temp_dir = None - if remove: - temp_dir = tempfile.mkdtemp() - for dist in remove: - files = dist.list_installed_files() - temp_files[dist] = _move_files(files, temp_dir) - try: - if install: - install_dists(install, install_path, paths) - except: - # if an error occurs, put back the files in the right place. - for files in temp_files.values(): - for old, new in files: - shutil.move(new, old) - if temp_dir: - shutil.rmtree(temp_dir) - # now re-raising - raise - - # we can remove them for good - for files in temp_files.values(): - for old, new in files: - os.remove(new) - if temp_dir: - shutil.rmtree(temp_dir) - - -def _get_setuptools_deps(release): - # NotImplementedError - pass - - -def get_infos(requirements, index=None, installed=None, prefer_final=True): - """Return the informations on what's going to be installed and upgraded. - - :param requirements: is a *string* containing the requirements for this - project (for instance "FooBar 1.1" or "BarBaz (<1.2)") - :param index: If an index is specified, use this one, otherwise, use - :class index.ClientWrapper: to get project metadatas. - :param installed: a list of already installed distributions. - :param prefer_final: when picking up the releases, prefer a "final" one - over a beta/alpha/etc one. - - The results are returned in a dict, containing all the operations - needed to install the given requirements:: - - >>> get_install_info("FooBar (<=1.2)") - {'install': [], 'remove': [], 'conflict': []} - - Conflict contains all the conflicting distributions, if there is a - conflict. - """ - # this function does several things: - # 1. get a release specified by the requirements - # 2. gather its metadata, using setuptools compatibility if needed - # 3. compare this tree with what is currently installed on the system, - # return the requirements of what is missing - # 4. do that recursively and merge back the results - # 5. return a dict containing information about what is needed to install - # or remove - - if not installed: - logger.debug('Reading installed distributions') - installed = list(get_distributions(use_egg_info=True)) - - infos = {'install': [], 'remove': [], 'conflict': []} - # Is a compatible version of the project already installed ? - predicate = get_version_predicate(requirements) - found = False - - # check that the project isn't already installed - for installed_project in installed: - # is it a compatible project ? - if predicate.name.lower() != installed_project.name.lower(): - continue - found = True - logger.info('Found %r %s', installed_project.name, - installed_project.version) - - # if we already have something installed, check it matches the - # requirements - if predicate.match(installed_project.version): - return infos - break - - if not found: - logger.debug('Project not installed') - - if not index: - index = wrapper.ClientWrapper() - - if not installed: - installed = get_distributions(use_egg_info=True) - - # Get all the releases that match the requirements - try: - release = index.get_release(requirements) - except (ReleaseNotFound, ProjectNotFound): - raise InstallationException('Release not found: %r' % requirements) - - if release is None: - logger.info('Could not find a matching project') - return infos - - metadata = release.fetch_metadata() - - # we need to build setuptools deps if any - if 'requires_dist' not in metadata: - metadata['requires_dist'] = _get_setuptools_deps(release) - - # build the dependency graph with local and required dependencies - dists = list(installed) - dists.append(release) - depgraph = generate_graph(dists) - - # Get what the missing deps are - dists = depgraph.missing[release] - if dists: - logger.info("Missing dependencies found, retrieving metadata") - # we have missing deps - for dist in dists: - _update_infos(infos, get_infos(dist, index, installed)) - - # Fill in the infos - existing = [d for d in installed if d.name == release.name] - if existing: - infos['remove'].append(existing[0]) - infos['conflict'].extend(depgraph.reverse_list[existing[0]]) - infos['install'].append(release) - return infos - - -def _update_infos(infos, new_infos): - """extends the lists contained in the `info` dict with those contained - in the `new_info` one - """ - for key, value in infos.items(): - if key in new_infos: - infos[key].extend(new_infos[key]) - - -def remove(project_name, paths=None, auto_confirm=True): - """Removes a single project from the installation. - - Returns True on success - """ - dist = get_distribution(project_name, use_egg_info=True, paths=paths) - if dist is None: - raise PackagingError('Distribution %r not found' % project_name) - files = dist.list_installed_files(local=True) - rmdirs = [] - rmfiles = [] - tmp = tempfile.mkdtemp(prefix=project_name + '-uninstall') - - def _move_file(source, target): - try: - os.rename(source, target) - except OSError as err: - return err - return None - - success = True - error = None - try: - for file_, md5, size in files: - if os.path.isfile(file_): - dirname, filename = os.path.split(file_) - tmpfile = os.path.join(tmp, filename) - try: - error = _move_file(file_, tmpfile) - if error is not None: - success = False - break - finally: - if not os.path.isfile(file_): - os.rename(tmpfile, file_) - if file_ not in rmfiles: - rmfiles.append(file_) - if dirname not in rmdirs: - rmdirs.append(dirname) - finally: - shutil.rmtree(tmp) - - if not success: - logger.info('%r cannot be removed.', project_name) - logger.info('Error: %s', error) - return False - - logger.info('Removing %r: ', project_name) - - for file_ in rmfiles: - logger.info(' %s', file_) - - # Taken from the pip project - if auto_confirm: - response = 'y' - else: - response = ask('Proceed (y/n)? ', ('y', 'n')) - - if response == 'y': - file_count = 0 - for file_ in rmfiles: - os.remove(file_) - file_count += 1 - - dir_count = 0 - for dirname in rmdirs: - if not os.path.exists(dirname): - # could - continue - - files_count = 0 - for root, dir, files in os.walk(dirname): - files_count += len(files) - - if files_count > 0: - # XXX Warning - continue - - # empty dirs with only empty dirs - if os.stat(dirname).st_mode & stat.S_IWUSR: - # XXX Add a callable in shutil.rmtree to count - # the number of deleted elements - shutil.rmtree(dirname) - dir_count += 1 - - # removing the top path - # XXX count it ? - if os.path.exists(dist.path): - shutil.rmtree(dist.path) - - logger.info('Success: removed %d files and %d dirs', - file_count, dir_count) - - return True - - -def install(project): - """Installs a project. - - Returns True on success, False on failure - """ - if is_python_build(): - # Python would try to install into the site-packages directory under - # $PREFIX, but when running from an uninstalled code checkout we don't - # want to create directories under the installation root - message = ('installing third-party projects from an uninstalled ' - 'Python is not supported') - logger.error(message) - return False - - logger.info('Checking the installation location...') - purelib_path = get_path('purelib') - - # trying to write a file there - try: - with tempfile.NamedTemporaryFile(suffix=project, - dir=purelib_path) as testfile: - testfile.write(b'test') - except OSError: - # FIXME this should check the errno, or be removed altogether (race - # condition: the directory permissions could be changed between here - # and the actual install) - logger.info('Unable to write in "%s". Do you have the permissions ?' - % purelib_path) - return False - - logger.info('Getting information about %r...', project) - try: - info = get_infos(project) - except InstallationException: - logger.info('Cound not find %r', project) - return False - - if info['install'] == []: - logger.info('Nothing to install') - return False - - install_path = get_config_var('base') - try: - install_from_infos(install_path, - info['install'], info['remove'], info['conflict']) - - except InstallationConflict as e: - if logger.isEnabledFor(logging.INFO): - projects = ('%r %s' % (p.name, p.version) for p in e.args[0]) - logger.info('%r conflicts with %s', project, ','.join(projects)) - - return True diff --git a/Lib/packaging/manifest.py b/Lib/packaging/manifest.py deleted file mode 100644 --- a/Lib/packaging/manifest.py +++ /dev/null @@ -1,381 +0,0 @@ -"""Class representing the list of files in a distribution. - -The Manifest class can be used to: - - - read or write a MANIFEST file - - read a template file and find out the file list -""" -# XXX todo: document + add tests -import re -import os -import fnmatch - -from packaging import logger -from packaging.util import write_file, convert_path -from packaging.errors import (PackagingTemplateError, - PackagingInternalError) - -__all__ = ['Manifest'] - -# a \ followed by some spaces + EOL -_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M) -_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) - - -class Manifest(object): - """A list of files built by on exploring the filesystem and filtered by - applying various patterns to what we find there. - """ - - def __init__(self): - self.allfiles = None - self.files = [] - - # - # Public API - # - - def findall(self, dir=os.curdir): - self.allfiles = _findall(dir) - - def append(self, item): - self.files.append(item) - - def extend(self, items): - self.files.extend(items) - - def sort(self): - # Not a strict lexical sort! - self.files = [os.path.join(*path_tuple) for path_tuple in - sorted(os.path.split(path) for path in self.files)] - - def clear(self): - """Clear all collected files.""" - self.files = [] - if self.allfiles is not None: - self.allfiles = [] - - def remove_duplicates(self): - # Assumes list has been sorted! - for i in range(len(self.files) - 1, 0, -1): - if self.files[i] == self.files[i - 1]: - del self.files[i] - - def read_template(self, path_or_file): - """Read and parse a manifest template file. - 'path' can be a path or a file-like object. - - Updates the list accordingly. - """ - if isinstance(path_or_file, str): - f = open(path_or_file) - else: - f = path_or_file - - try: - content = f.read() - # first, let's unwrap collapsed lines - content = _COLLAPSE_PATTERN.sub('', content) - # next, let's remove commented lines and empty lines - content = _COMMENTED_LINE.sub('', content) - - # now we have our cleaned up lines - lines = [line.strip() for line in content.split('\n')] - finally: - f.close() - - for line in lines: - if line == '': - continue - try: - self._process_template_line(line) - except PackagingTemplateError as msg: - logger.warning("%s, %s", path_or_file, msg) - - def write(self, path): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - if os.path.isfile(path): - with open(path) as fp: - first_line = fp.readline() - - if first_line != '# file GENERATED by packaging, do NOT edit\n': - logger.info("not writing to manually maintained " - "manifest file %r", path) - return - - self.sort() - self.remove_duplicates() - content = self.files[:] - content.insert(0, '# file GENERATED by packaging, do NOT edit') - logger.info("writing manifest file %r", path) - write_file(path, content) - - def read(self, path): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - logger.info("reading manifest file %r", path) - with open(path) as manifest: - for line in manifest.readlines(): - self.append(line) - - def exclude_pattern(self, pattern, anchor=True, prefix=None, - is_regex=False): - """Remove strings (presumably filenames) from 'files' that match - 'pattern'. - - Other parameters are the same as for 'include_pattern()', above. - The list 'self.files' is modified in place. Return True if files are - found. - """ - files_found = False - pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex) - for i in range(len(self.files) - 1, -1, -1): - if pattern_re.search(self.files[i]): - del self.files[i] - files_found = True - - return files_found - - # - # Private API - # - - def _parse_template_line(self, line): - words = line.split() - if len(words) == 1 and words[0] not in ( - 'include', 'exclude', 'global-include', 'global-exclude', - 'recursive-include', 'recursive-exclude', 'graft', 'prune'): - # no action given, let's use the default 'include' - words.insert(0, 'include') - - action = words[0] - patterns = dir = dir_pattern = None - - if action in ('include', 'exclude', - 'global-include', 'global-exclude'): - if len(words) < 2: - raise PackagingTemplateError( - "%r expects ..." % action) - - patterns = [convert_path(word) for word in words[1:]] - - elif action in ('recursive-include', 'recursive-exclude'): - if len(words) < 3: - raise PackagingTemplateError( - "%r expects ..." % action) - - dir = convert_path(words[1]) - patterns = [convert_path(word) for word in words[2:]] - - elif action in ('graft', 'prune'): - if len(words) != 2: - raise PackagingTemplateError( - "%r expects a single " % action) - - dir_pattern = convert_path(words[1]) - - else: - raise PackagingTemplateError("unknown action %r" % action) - - return action, patterns, dir, dir_pattern - - def _process_template_line(self, line): - # Parse the line: split it up, make sure the right number of words - # is there, and return the relevant words. 'action' is always - # defined: it's the first word of the line. Which of the other - # three are defined depends on the action; it'll be either - # patterns, (dir and patterns), or (dir_pattern). - action, patterns, dir, dir_pattern = self._parse_template_line(line) - - # OK, now we know that the action is valid and we have the - # right number of words on the line for that action -- so we - # can proceed with minimal error-checking. - if action == 'include': - for pattern in patterns: - if not self._include_pattern(pattern, anchor=True): - logger.warning("no files found matching %r", pattern) - - elif action == 'exclude': - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=True): - logger.warning("no previously-included files " - "found matching %r", pattern) - - elif action == 'global-include': - for pattern in patterns: - if not self._include_pattern(pattern, anchor=False): - logger.warning("no files found matching %r " - "anywhere in distribution", pattern) - - elif action == 'global-exclude': - for pattern in patterns: - if not self.exclude_pattern(pattern, anchor=False): - logger.warning("no previously-included files " - "matching %r found anywhere in " - "distribution", pattern) - - elif action == 'recursive-include': - for pattern in patterns: - if not self._include_pattern(pattern, prefix=dir): - logger.warning("no files found matching %r " - "under directory %r", pattern, dir) - - elif action == 'recursive-exclude': - for pattern in patterns: - if not self.exclude_pattern(pattern, prefix=dir): - logger.warning("no previously-included files " - "matching %r found under directory %r", - pattern, dir) - - elif action == 'graft': - if not self._include_pattern(None, prefix=dir_pattern): - logger.warning("no directories found matching %r", - dir_pattern) - - elif action == 'prune': - if not self.exclude_pattern(None, prefix=dir_pattern): - logger.warning("no previously-included directories found " - "matching %r", dir_pattern) - else: - raise PackagingInternalError( - "this cannot happen: invalid action %r" % action) - - def _include_pattern(self, pattern, anchor=True, prefix=None, - is_regex=False): - """Select strings (presumably filenames) from 'self.files' that - match 'pattern', a Unix-style wildcard (glob) pattern. - - Patterns are not quite the same as implemented by the 'fnmatch' - module: '*' and '?' match non-special characters, where "special" - is platform-dependent: slash on Unix; colon, slash, and backslash on - DOS/Windows; and colon on Mac OS. - - If 'anchor' is true (the default), then the pattern match is more - stringent: "*.py" will match "foo.py" but not "foo/bar.py". If - 'anchor' is false, both of these will match. - - If 'prefix' is supplied, then only filenames starting with 'prefix' - (itself a pattern) and ending with 'pattern', with anything in between - them, will match. 'anchor' is ignored in this case. - - If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and - 'pattern' is assumed to be either a string containing a regex or a - regex object -- no translation is done, the regex is just compiled - and used as-is. - - Selected strings will be added to self.files. - - Return True if files are found. - """ - # XXX docstring lying about what the special chars are? - files_found = False - pattern_re = _translate_pattern(pattern, anchor, prefix, is_regex) - - # delayed loading of allfiles list - if self.allfiles is None: - self.findall() - - for name in self.allfiles: - if pattern_re.search(name): - self.files.append(name) - files_found = True - - return files_found - - -# -# Utility functions -# -def _findall(dir=os.curdir): - """Find all files under 'dir' and return the list of full filenames - (relative to 'dir'). - """ - from stat import S_ISREG, S_ISDIR, S_ISLNK - - list = [] - stack = [dir] - pop = stack.pop - push = stack.append - - while stack: - dir = pop() - names = os.listdir(dir) - - for name in names: - if dir != os.curdir: # avoid the dreaded "./" syndrome - fullname = os.path.join(dir, name) - else: - fullname = name - - # Avoid excess stat calls -- just one will do, thank you! - stat = os.stat(fullname) - mode = stat.st_mode - if S_ISREG(mode): - list.append(fullname) - elif S_ISDIR(mode) and not S_ISLNK(mode): - push(fullname) - - return list - - -def _glob_to_re(pattern): - """Translate a shell-like glob pattern to a regular expression. - - Return a string containing the regex. Differs from - 'fnmatch.translate()' in that '*' does not match "special characters" - (which are platform-specific). - """ - pattern_re = fnmatch.translate(pattern) - - # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which - # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, - # and by extension they shouldn't match such "special characters" under - # any OS. So change all non-escaped dots in the RE to match any - # character except the special characters (currently: just os.sep). - sep = os.sep - if os.sep == '\\': - # we're using a regex to manipulate a regex, so we need - # to escape the backslash twice - sep = r'\\\\' - escaped = r'\1[^%s]' % sep - pattern_re = re.sub(r'((?': lambda x, y: x > y, - '>=': lambda x, y: x >= y, - '<': lambda x, y: x < y, - '<=': lambda x, y: x <= y, - 'in': lambda x, y: x in y, - 'not in': lambda x, y: x not in y} - - -def _operate(operation, x, y): - return _OPERATORS[operation](x, y) - - -# restricted set of variables -_VARS = {'sys.platform': sys.platform, - 'python_version': '%s.%s' % sys.version_info[:2], - # FIXME parsing sys.platform is not reliable, but there is no other - # way to get e.g. 2.7.2+, and the PEP is defined with sys.version - 'python_full_version': sys.version.split(' ', 1)[0], - 'os.name': os.name, - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'platform.python_implementation': platform.python_implementation(), - } - - -class _Operation: - - def __init__(self, execution_context=None): - self.left = None - self.op = None - self.right = None - if execution_context is None: - execution_context = {} - self.execution_context = execution_context - - def _get_var(self, name): - if name in self.execution_context: - return self.execution_context[name] - return _VARS[name] - - def __repr__(self): - return '%s %s %s' % (self.left, self.op, self.right) - - def _is_string(self, value): - if value is None or len(value) < 2: - return False - for delimiter in '"\'': - if value[0] == value[-1] == delimiter: - return True - return False - - def _is_name(self, value): - return value in _VARS - - def _convert(self, value): - if value in _VARS: - return self._get_var(value) - return value.strip('"\'') - - def _check_name(self, value): - if value not in _VARS: - raise NameError(value) - - def _nonsense_op(self): - msg = 'This operation is not supported : "%s"' % self - raise SyntaxError(msg) - - def __call__(self): - # make sure we do something useful - if self._is_string(self.left): - if self._is_string(self.right): - self._nonsense_op() - self._check_name(self.right) - else: - if not self._is_string(self.right): - self._nonsense_op() - self._check_name(self.left) - - if self.op not in _OPERATORS: - raise TypeError('Operator not supported "%s"' % self.op) - - left = self._convert(self.left) - right = self._convert(self.right) - return _operate(self.op, left, right) - - -class _OR: - def __init__(self, left, right=None): - self.left = left - self.right = right - - def filled(self): - return self.right is not None - - def __repr__(self): - return 'OR(%r, %r)' % (self.left, self.right) - - def __call__(self): - return self.left() or self.right() - - -class _AND: - def __init__(self, left, right=None): - self.left = left - self.right = right - - def filled(self): - return self.right is not None - - def __repr__(self): - return 'AND(%r, %r)' % (self.left, self.right) - - def __call__(self): - return self.left() and self.right() - - -def interpret(marker, execution_context=None): - """Interpret a marker and return a result depending on environment.""" - marker = marker.strip().encode() - ops = [] - op_starting = True - for token in tokenize(BytesIO(marker).readline): - # Unpack token - toktype, tokval, rowcol, line, logical_line = token - if toktype not in (NAME, OP, STRING, ENDMARKER, ENCODING): - raise SyntaxError('Type not supported "%s"' % tokval) - - if op_starting: - op = _Operation(execution_context) - if len(ops) > 0: - last = ops[-1] - if isinstance(last, (_OR, _AND)) and not last.filled(): - last.right = op - else: - ops.append(op) - else: - ops.append(op) - op_starting = False - else: - op = ops[-1] - - if (toktype == ENDMARKER or - (toktype == NAME and tokval in ('and', 'or'))): - if toktype == NAME and tokval == 'and': - ops.append(_AND(ops.pop())) - elif toktype == NAME and tokval == 'or': - ops.append(_OR(ops.pop())) - op_starting = True - continue - - if isinstance(op, (_OR, _AND)) and op.right is not None: - op = op.right - - if ((toktype in (NAME, STRING) and tokval not in ('in', 'not')) - or (toktype == OP and tokval == '.')): - if op.op is None: - if op.left is None: - op.left = tokval - else: - op.left += tokval - else: - if op.right is None: - op.right = tokval - else: - op.right += tokval - elif toktype == OP or tokval in ('in', 'not'): - if tokval == 'in' and op.op == 'not': - op.op = 'not in' - else: - op.op = tokval - - for op in ops: - if not op(): - return False - return True diff --git a/Lib/packaging/metadata.py b/Lib/packaging/metadata.py deleted file mode 100644 --- a/Lib/packaging/metadata.py +++ /dev/null @@ -1,570 +0,0 @@ -"""Implementation of the Metadata for Python packages PEPs. - -Supports all metadata formats (1.0, 1.1, 1.2). -""" - -import re -import logging - -from io import StringIO -from email import message_from_file -from packaging import logger -from packaging.markers import interpret -from packaging.version import (is_valid_predicate, is_valid_version, - is_valid_versions) -from packaging.errors import (MetadataMissingError, - MetadataConflictError, - MetadataUnrecognizedVersionError) - -try: - # docutils is installed - from docutils.utils import Reporter - from docutils.parsers.rst import Parser - from docutils import frontend - from docutils import nodes - - class SilentReporter(Reporter): - - def __init__(self, source, report_level, halt_level, stream=None, - debug=0, encoding='ascii', error_handler='replace'): - self.messages = [] - super(SilentReporter, self).__init__( - source, report_level, halt_level, stream, - debug, encoding, error_handler) - - def system_message(self, level, message, *children, **kwargs): - self.messages.append((level, message, children, kwargs)) - - _HAS_DOCUTILS = True -except ImportError: - # docutils is not installed - _HAS_DOCUTILS = False - -# public API of this module -__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] - -# Encoding used for the PKG-INFO files -PKG_INFO_ENCODING = 'utf-8' - -# preferred version. Hopefully will be changed -# to 1.2 once PEP 345 is supported everywhere -PKG_INFO_PREFERRED_VERSION = '1.0' - -_LINE_PREFIX = re.compile('\n \|') -_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License') - -_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'License', 'Classifier', 'Download-URL', 'Obsoletes', - 'Provides', 'Requires') - -_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', - 'Download-URL') - -_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', - 'Supported-Platform', 'Summary', 'Description', - 'Keywords', 'Home-page', 'Author', 'Author-email', - 'Maintainer', 'Maintainer-email', 'License', - 'Classifier', 'Download-URL', 'Obsoletes-Dist', - 'Project-URL', 'Provides-Dist', 'Requires-Dist', - 'Requires-Python', 'Requires-External') - -_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', - 'Obsoletes-Dist', 'Requires-External', 'Maintainer', - 'Maintainer-email', 'Project-URL') - -_ALL_FIELDS = set() -_ALL_FIELDS.update(_241_FIELDS) -_ALL_FIELDS.update(_314_FIELDS) -_ALL_FIELDS.update(_345_FIELDS) - - -def _version2fieldlist(version): - if version == '1.0': - return _241_FIELDS - elif version == '1.1': - return _314_FIELDS - elif version == '1.2': - return _345_FIELDS - raise MetadataUnrecognizedVersionError(version) - - -def _best_version(fields): - """Detect the best version depending on the fields used.""" - def _has_marker(keys, markers): - for marker in markers: - if marker in keys: - return True - return False - - keys = list(fields) - possible_versions = ['1.0', '1.1', '1.2'] - - # first let's try to see if a field is not part of one of the version - for key in keys: - if key not in _241_FIELDS and '1.0' in possible_versions: - possible_versions.remove('1.0') - if key not in _314_FIELDS and '1.1' in possible_versions: - possible_versions.remove('1.1') - if key not in _345_FIELDS and '1.2' in possible_versions: - possible_versions.remove('1.2') - - # possible_version contains qualified versions - if len(possible_versions) == 1: - return possible_versions[0] # found ! - elif len(possible_versions) == 0: - raise MetadataConflictError('Unknown metadata set') - - # let's see if one unique marker is found - is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) - is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) - if is_1_1 and is_1_2: - raise MetadataConflictError('You used incompatible 1.1 and 1.2 fields') - - # we have the choice, either 1.0, or 1.2 - # - 1.0 has a broken Summary field but works with all tools - # - 1.1 is to avoid - # - 1.2 fixes Summary but is not widespread yet - if not is_1_1 and not is_1_2: - # we couldn't find any specific marker - if PKG_INFO_PREFERRED_VERSION in possible_versions: - return PKG_INFO_PREFERRED_VERSION - if is_1_1: - return '1.1' - - # default marker when 1.0 is disqualified - return '1.2' - - -_ATTR2FIELD = { - 'metadata_version': 'Metadata-Version', - 'name': 'Name', - 'version': 'Version', - 'platform': 'Platform', - 'supported_platform': 'Supported-Platform', - 'summary': 'Summary', - 'description': 'Description', - 'keywords': 'Keywords', - 'home_page': 'Home-page', - 'author': 'Author', - 'author_email': 'Author-email', - 'maintainer': 'Maintainer', - 'maintainer_email': 'Maintainer-email', - 'license': 'License', - 'classifier': 'Classifier', - 'download_url': 'Download-URL', - 'obsoletes_dist': 'Obsoletes-Dist', - 'provides_dist': 'Provides-Dist', - 'requires_dist': 'Requires-Dist', - 'requires_python': 'Requires-Python', - 'requires_external': 'Requires-External', - 'requires': 'Requires', - 'provides': 'Provides', - 'obsoletes': 'Obsoletes', - 'project_url': 'Project-URL', -} - -_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') -_VERSIONS_FIELDS = ('Requires-Python',) -_VERSION_FIELDS = ('Version',) -_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', - 'Requires', 'Provides', 'Obsoletes-Dist', - 'Provides-Dist', 'Requires-Dist', 'Requires-External', - 'Project-URL', 'Supported-Platform') -_LISTTUPLEFIELDS = ('Project-URL',) - -_ELEMENTSFIELD = ('Keywords',) - -_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') - -_MISSING = object() - -_FILESAFE = re.compile('[^A-Za-z0-9.]+') - - -class Metadata: - """The metadata of a release. - - Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can - instantiate the class with one of these arguments (or none): - - *path*, the path to a METADATA file - - *fileobj* give a file-like object with METADATA as content - - *mapping* is a dict-like object - """ - # TODO document that execution_context and platform_dependent are used - # to filter on query, not when setting a key - # also document the mapping API and UNKNOWN default key - - def __init__(self, path=None, platform_dependent=False, - execution_context=None, fileobj=None, mapping=None): - self._fields = {} - self.requires_files = [] - self.docutils_support = _HAS_DOCUTILS - self.platform_dependent = platform_dependent - self.execution_context = execution_context - if [path, fileobj, mapping].count(None) < 2: - raise TypeError('path, fileobj and mapping are exclusive') - if path is not None: - self.read(path) - elif fileobj is not None: - self.read_file(fileobj) - elif mapping is not None: - self.update(mapping) - - def _set_best_version(self): - self._fields['Metadata-Version'] = _best_version(self._fields) - - def _write_field(self, file, name, value): - file.write('%s: %s\n' % (name, value)) - - def __getitem__(self, name): - return self.get(name) - - def __setitem__(self, name, value): - return self.set(name, value) - - def __delitem__(self, name): - field_name = self._convert_name(name) - try: - del self._fields[field_name] - except KeyError: - raise KeyError(name) - self._set_best_version() - - def __contains__(self, name): - return (name in self._fields or - self._convert_name(name) in self._fields) - - def _convert_name(self, name): - if name in _ALL_FIELDS: - return name - name = name.replace('-', '_').lower() - return _ATTR2FIELD.get(name, name) - - def _default_value(self, name): - if name in _LISTFIELDS or name in _ELEMENTSFIELD: - return [] - return 'UNKNOWN' - - def _check_rst_data(self, data): - """Return warnings when the provided data has syntax errors.""" - source_path = StringIO() - parser = Parser() - settings = frontend.OptionParser().get_default_values() - settings.tab_width = 4 - settings.pep_references = None - settings.rfc_references = None - reporter = SilentReporter(source_path, - settings.report_level, - settings.halt_level, - stream=settings.warning_stream, - debug=settings.debug, - encoding=settings.error_encoding, - error_handler=settings.error_encoding_error_handler) - - document = nodes.document(settings, reporter, source=source_path) - document.note_source(source_path, -1) - try: - parser.parse(data, document) - except AttributeError: - reporter.messages.append((-1, 'Could not finish the parsing.', - '', {})) - - return reporter.messages - - def _platform(self, value): - if not self.platform_dependent or ';' not in value: - return True, value - value, marker = value.split(';') - return interpret(marker, self.execution_context), value - - def _remove_line_prefix(self, value): - return _LINE_PREFIX.sub('\n', value) - - # - # Public API - # - def get_fullname(self, filesafe=False): - """Return the distribution name with version. - - If filesafe is true, return a filename-escaped form.""" - name, version = self['Name'], self['Version'] - if filesafe: - # For both name and version any runs of non-alphanumeric or '.' - # characters are replaced with a single '-'. Additionally any - # spaces in the version string become '.' - name = _FILESAFE.sub('-', name) - version = _FILESAFE.sub('-', version.replace(' ', '.')) - return '%s-%s' % (name, version) - - def is_metadata_field(self, name): - """return True if name is a valid metadata key""" - name = self._convert_name(name) - return name in _ALL_FIELDS - - def is_multi_field(self, name): - name = self._convert_name(name) - return name in _LISTFIELDS - - def read(self, filepath): - """Read the metadata values from a file path.""" - with open(filepath, 'r', encoding='utf-8') as fp: - self.read_file(fp) - - def read_file(self, fileob): - """Read the metadata values from a file object.""" - msg = message_from_file(fileob) - self._fields['Metadata-Version'] = msg['metadata-version'] - - for field in _version2fieldlist(self['Metadata-Version']): - if field in _LISTFIELDS: - # we can have multiple lines - values = msg.get_all(field) - if field in _LISTTUPLEFIELDS and values is not None: - values = [tuple(value.split(',')) for value in values] - self.set(field, values) - else: - # single line - value = msg[field] - if value is not None and value != 'UNKNOWN': - self.set(field, value) - - def write(self, filepath): - """Write the metadata fields to filepath.""" - with open(filepath, 'w', encoding='utf-8') as fp: - self.write_file(fp) - - def write_file(self, fileobject): - """Write the PKG-INFO format data to a file object.""" - self._set_best_version() - for field in _version2fieldlist(self['Metadata-Version']): - values = self.get(field) - if field in _ELEMENTSFIELD: - self._write_field(fileobject, field, ','.join(values)) - continue - if field not in _LISTFIELDS: - if field == 'Description': - values = values.replace('\n', '\n |') - values = [values] - - if field in _LISTTUPLEFIELDS: - values = [','.join(value) for value in values] - - for value in values: - self._write_field(fileobject, field, value) - - def update(self, other=None, **kwargs): - """Set metadata values from the given iterable `other` and kwargs. - - Behavior is like `dict.update`: If `other` has a ``keys`` method, - they are looped over and ``self[key]`` is assigned ``other[key]``. - Else, ``other`` is an iterable of ``(key, value)`` iterables. - - Keys that don't match a metadata field or that have an empty value are - dropped. - """ - # XXX the code should just use self.set, which does tbe same checks and - # conversions already, but that would break packaging.pypi: it uses the - # update method, which does not call _set_best_version (which set - # does), and thus allows having a Metadata object (as long as you don't - # modify or write it) with extra fields from PyPI that are not fields - # defined in Metadata PEPs. to solve it, the best_version system - # should be reworked so that it's called only for writing, or in a new - # strict mode, or with a new, more lax Metadata subclass in p7g.pypi - def _set(key, value): - if key in _ATTR2FIELD and value: - self.set(self._convert_name(key), value) - - if not other: - # other is None or empty container - pass - elif hasattr(other, 'keys'): - for k in other.keys(): - _set(k, other[k]) - else: - for k, v in other: - _set(k, v) - - if kwargs: - for k, v in kwargs.items(): - _set(k, v) - - def set(self, name, value): - """Control then set a metadata field.""" - name = self._convert_name(name) - - if ((name in _ELEMENTSFIELD or name == 'Platform') and - not isinstance(value, (list, tuple))): - if isinstance(value, str): - value = [v.strip() for v in value.split(',')] - else: - value = [] - elif (name in _LISTFIELDS and - not isinstance(value, (list, tuple))): - if isinstance(value, str): - value = [value] - else: - value = [] - - if logger.isEnabledFor(logging.WARNING): - project_name = self['Name'] - - if name in _PREDICATE_FIELDS and value is not None: - for v in value: - # check that the values are valid predicates - if not is_valid_predicate(v.split(';')[0]): - logger.warning( - '%r: %r is not a valid predicate (field %r)', - project_name, v, name) - # FIXME this rejects UNKNOWN, is that right? - elif name in _VERSIONS_FIELDS and value is not None: - if not is_valid_versions(value): - logger.warning('%r: %r is not a valid version (field %r)', - project_name, value, name) - elif name in _VERSION_FIELDS and value is not None: - if not is_valid_version(value): - logger.warning('%r: %r is not a valid version (field %r)', - project_name, value, name) - - if name in _UNICODEFIELDS: - if name == 'Description': - value = self._remove_line_prefix(value) - - self._fields[name] = value - self._set_best_version() - - def get(self, name, default=_MISSING): - """Get a metadata field.""" - name = self._convert_name(name) - if name not in self._fields: - if default is _MISSING: - default = self._default_value(name) - return default - if name in _UNICODEFIELDS: - value = self._fields[name] - return value - elif name in _LISTFIELDS: - value = self._fields[name] - if value is None: - return [] - res = [] - for val in value: - valid, val = self._platform(val) - if not valid: - continue - if name not in _LISTTUPLEFIELDS: - res.append(val) - else: - # That's for Project-URL - res.append((val[0], val[1])) - return res - - elif name in _ELEMENTSFIELD: - valid, value = self._platform(self._fields[name]) - if not valid: - return [] - if isinstance(value, str): - return value.split(',') - valid, value = self._platform(self._fields[name]) - if not valid: - return None - return value - - def check(self, strict=False, restructuredtext=False): - """Check if the metadata is compliant. If strict is False then raise if - no Name or Version are provided""" - # XXX should check the versions (if the file was loaded) - missing, warnings = [], [] - - for attr in ('Name', 'Version'): # required by PEP 345 - if attr not in self: - missing.append(attr) - - if strict and missing != []: - msg = 'missing required metadata: %s' % ', '.join(missing) - raise MetadataMissingError(msg) - - for attr in ('Home-page', 'Author'): - if attr not in self: - missing.append(attr) - - if _HAS_DOCUTILS and restructuredtext: - warnings.extend(self._check_rst_data(self['Description'])) - - # checking metadata 1.2 (XXX needs to check 1.1, 1.0) - if self['Metadata-Version'] != '1.2': - return missing, warnings - - def is_valid_predicates(value): - for v in value: - if not is_valid_predicate(v.split(';')[0]): - return False - return True - - for fields, controller in ((_PREDICATE_FIELDS, is_valid_predicates), - (_VERSIONS_FIELDS, is_valid_versions), - (_VERSION_FIELDS, is_valid_version)): - for field in fields: - value = self.get(field, None) - if value is not None and not controller(value): - warnings.append('Wrong value for %r: %s' % (field, value)) - - return missing, warnings - - def todict(self): - """Return fields as a dict. - - Field names will be converted to use the underscore-lowercase style - instead of hyphen-mixed case (i.e. home_page instead of Home-page). - """ - data = { - 'metadata_version': self['Metadata-Version'], - 'name': self['Name'], - 'version': self['Version'], - 'summary': self['Summary'], - 'home_page': self['Home-page'], - 'author': self['Author'], - 'author_email': self['Author-email'], - 'license': self['License'], - 'description': self['Description'], - 'keywords': self['Keywords'], - 'platform': self['Platform'], - 'classifier': self['Classifier'], - 'download_url': self['Download-URL'], - } - - if self['Metadata-Version'] == '1.2': - data['requires_dist'] = self['Requires-Dist'] - data['requires_python'] = self['Requires-Python'] - data['requires_external'] = self['Requires-External'] - data['provides_dist'] = self['Provides-Dist'] - data['obsoletes_dist'] = self['Obsoletes-Dist'] - data['project_url'] = [','.join(url) for url in - self['Project-URL']] - - elif self['Metadata-Version'] == '1.1': - data['provides'] = self['Provides'] - data['requires'] = self['Requires'] - data['obsoletes'] = self['Obsoletes'] - - return data - - # Mapping API - # XXX these methods should return views or sets in 3.x - - def keys(self): - return list(_version2fieldlist(self['Metadata-Version'])) - - def __iter__(self): - for key in self.keys(): - yield key - - def values(self): - return [self[key] for key in self.keys()] - - def items(self): - return [(key, self[key]) for key in self.keys()] diff --git a/Lib/packaging/pypi/__init__.py b/Lib/packaging/pypi/__init__.py deleted file mode 100644 --- a/Lib/packaging/pypi/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Low-level and high-level APIs to interact with project indexes.""" - -__all__ = ['simple', - 'xmlrpc', - 'dist', - 'errors', - 'mirrors'] - -from packaging.pypi.dist import ReleaseInfo, ReleasesList, DistInfo diff --git a/Lib/packaging/pypi/base.py b/Lib/packaging/pypi/base.py deleted file mode 100644 --- a/Lib/packaging/pypi/base.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Base class for index crawlers.""" - -from packaging.pypi.dist import ReleasesList - - -class BaseClient: - """Base class containing common methods for the index crawlers/clients""" - - def __init__(self, prefer_final, prefer_source): - self._prefer_final = prefer_final - self._prefer_source = prefer_source - self._index = self - - def _get_prefer_final(self, prefer_final=None): - """Return the prefer_final internal parameter or the specified one if - provided""" - if prefer_final: - return prefer_final - else: - return self._prefer_final - - def _get_prefer_source(self, prefer_source=None): - """Return the prefer_source internal parameter or the specified one if - provided""" - if prefer_source: - return prefer_source - else: - return self._prefer_source - - def _get_project(self, project_name): - """Return an project instance, create it if necessary""" - return self._projects.setdefault(project_name.lower(), - ReleasesList(project_name, index=self._index)) - - def download_distribution(self, requirements, temp_path=None, - prefer_source=None, prefer_final=None): - """Download a distribution from the last release according to the - requirements. - - If temp_path is provided, download to this path, otherwise, create a - temporary location for the download and return it. - """ - prefer_final = self._get_prefer_final(prefer_final) - prefer_source = self._get_prefer_source(prefer_source) - release = self.get_release(requirements, prefer_final) - if release: - dist = release.get_distribution(prefer_source=prefer_source) - return dist.download(temp_path) diff --git a/Lib/packaging/pypi/dist.py b/Lib/packaging/pypi/dist.py deleted file mode 100644 --- a/Lib/packaging/pypi/dist.py +++ /dev/null @@ -1,544 +0,0 @@ -"""Classes representing releases and distributions retrieved from indexes. - -A project (= unique name) can have several releases (= versions) and -each release can have several distributions (= sdist and bdists). - -Release objects contain metadata-related information (see PEP 376); -distribution objects contain download-related information. -""" - -import re -import hashlib -import tempfile -import urllib.request -import urllib.parse -import urllib.error -import urllib.parse -from shutil import unpack_archive - -from packaging.errors import IrrationalVersionError -from packaging.version import (suggest_normalized_version, NormalizedVersion, - get_version_predicate) -from packaging.metadata import Metadata -from packaging.pypi.errors import (HashDoesNotMatch, UnsupportedHashName, - CantParseArchiveName) - - -__all__ = ['ReleaseInfo', 'DistInfo', 'ReleasesList', 'get_infos_from_url'] - -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz .egg".split() -MD5_HASH = re.compile(r'^.*#md5=([a-f0-9]+)$') -DIST_TYPES = ['bdist', 'sdist'] - - -class IndexReference: - """Mixin used to store the index reference""" - def set_index(self, index=None): - self._index = index - - -class ReleaseInfo(IndexReference): - """Represent a release of a project (a project with a specific version). - The release contain the _metadata informations related to this specific - version, and is also a container for distribution related informations. - - See the DistInfo class for more information about distributions. - """ - - def __init__(self, name, version, metadata=None, hidden=False, - index=None, **kwargs): - """ - :param name: the name of the distribution - :param version: the version of the distribution - :param metadata: the metadata fields of the release. - :type metadata: dict - :param kwargs: optional arguments for a new distribution. - """ - self.set_index(index) - self.name = name - self._version = None - self.version = version - if metadata: - self.metadata = Metadata(mapping=metadata) - else: - self.metadata = None - self.dists = {} - self.hidden = hidden - - if 'dist_type' in kwargs: - dist_type = kwargs.pop('dist_type') - self.add_distribution(dist_type, **kwargs) - - def set_version(self, version): - try: - self._version = NormalizedVersion(version) - except IrrationalVersionError: - suggestion = suggest_normalized_version(version) - if suggestion: - self.version = suggestion - else: - raise IrrationalVersionError(version) - - def get_version(self): - return self._version - - version = property(get_version, set_version) - - def fetch_metadata(self): - """If the metadata is not set, use the indexes to get it""" - if not self.metadata: - self._index.get_metadata(self.name, str(self.version)) - return self.metadata - - @property - def is_final(self): - """proxy to version.is_final""" - return self.version.is_final - - def fetch_distributions(self): - if self.dists is None: - self._index.get_distributions(self.name, str(self.version)) - if self.dists is None: - self.dists = {} - return self.dists - - def add_distribution(self, dist_type='sdist', python_version=None, - **params): - """Add distribution informations to this release. - If distribution information is already set for this distribution type, - add the given url paths to the distribution. This can be useful while - some of them fails to download. - - :param dist_type: the distribution type (eg. "sdist", "bdist", etc.) - :param params: the fields to be passed to the distribution object - (see the :class:DistInfo constructor). - """ - if dist_type not in DIST_TYPES: - raise ValueError(dist_type) - if dist_type in self.dists: - self.dists[dist_type].add_url(**params) - else: - self.dists[dist_type] = DistInfo(self, dist_type, - index=self._index, **params) - if python_version: - self.dists[dist_type].python_version = python_version - - def get_distribution(self, dist_type=None, prefer_source=True): - """Return a distribution. - - If dist_type is set, find first for this distribution type, and just - act as an alias of __get_item__. - - If prefer_source is True, search first for source distribution, and if - not return one existing distribution. - """ - if len(self.dists) == 0: - raise LookupError - if dist_type: - return self[dist_type] - if prefer_source: - if "sdist" in self.dists: - dist = self["sdist"] - else: - dist = next(self.dists.values()) - return dist - - def unpack(self, path=None, prefer_source=True): - """Unpack the distribution to the given path. - - If not destination is given, creates a temporary location. - - Returns the location of the extracted files (root). - """ - return self.get_distribution(prefer_source=prefer_source)\ - .unpack(path=path) - - def download(self, temp_path=None, prefer_source=True): - """Download the distribution, using the requirements. - - If more than one distribution match the requirements, use the last - version. - Download the distribution, and put it in the temp_path. If no temp_path - is given, creates and return one. - - Returns the complete absolute path to the downloaded archive. - """ - return self.get_distribution(prefer_source=prefer_source)\ - .download(path=temp_path) - - def set_metadata(self, metadata): - if not self.metadata: - self.metadata = Metadata() - self.metadata.update(metadata) - - def __getitem__(self, item): - """distributions are available using release["sdist"]""" - return self.dists[item] - - def _check_is_comparable(self, other): - if not isinstance(other, ReleaseInfo): - raise TypeError("cannot compare %s and %s" - % (type(self).__name__, type(other).__name__)) - elif self.name != other.name: - raise TypeError("cannot compare %s and %s" - % (self.name, other.name)) - - def __repr__(self): - return "<%s %s>" % (self.name, self.version) - - def __eq__(self, other): - self._check_is_comparable(other) - return self.version == other.version - - def __lt__(self, other): - self._check_is_comparable(other) - return self.version < other.version - - def __ne__(self, other): - return not self.__eq__(other) - - def __gt__(self, other): - return not (self.__lt__(other) or self.__eq__(other)) - - def __le__(self, other): - return self.__eq__(other) or self.__lt__(other) - - def __ge__(self, other): - return self.__eq__(other) or self.__gt__(other) - - # See http://docs.python.org/reference/datamodel#object.__hash__ - __hash__ = object.__hash__ - - -class DistInfo(IndexReference): - """Represents a distribution retrieved from an index (sdist, bdist, ...) - """ - - def __init__(self, release, dist_type=None, url=None, hashname=None, - hashval=None, is_external=True, python_version=None, - index=None): - """Create a new instance of DistInfo. - - :param release: a DistInfo class is relative to a release. - :param dist_type: the type of the dist (eg. source, bin-*, etc.) - :param url: URL where we found this distribution - :param hashname: the name of the hash we want to use. Refer to the - hashlib.new documentation for more information. - :param hashval: the hash value. - :param is_external: we need to know if the provided url comes from - an index browsing, or from an external resource. - - """ - self.set_index(index) - self.release = release - self.dist_type = dist_type - self.python_version = python_version - self._unpacked_dir = None - # set the downloaded path to None by default. The goal here - # is to not download distributions multiple times - self.downloaded_location = None - # We store urls in dict, because we need to have a bit more infos - # than the simple URL. It will be used later to find the good url to - # use. - # We have two _url* attributes: _url and urls. urls contains a list - # of dict for the different urls, and _url contains the choosen url, in - # order to dont make the selection process multiple times. - self.urls = [] - self._url = None - self.add_url(url, hashname, hashval, is_external) - - def add_url(self, url=None, hashname=None, hashval=None, is_external=True): - """Add a new url to the list of urls""" - if hashname is not None: - try: - hashlib.new(hashname) - except ValueError: - raise UnsupportedHashName(hashname) - if url not in [u['url'] for u in self.urls]: - self.urls.append({ - 'url': url, - 'hashname': hashname, - 'hashval': hashval, - 'is_external': is_external, - }) - # reset the url selection process - self._url = None - - @property - def url(self): - """Pick up the right url for the list of urls in self.urls""" - # We return internal urls over externals. - # If there is more than one internal or external, return the first - # one. - if self._url is None: - if len(self.urls) > 1: - internals_urls = [u for u in self.urls \ - if u['is_external'] == False] - if len(internals_urls) >= 1: - self._url = internals_urls[0] - if self._url is None: - self._url = self.urls[0] - return self._url - - @property - def is_source(self): - """return if the distribution is a source one or not""" - return self.dist_type == 'sdist' - - def download(self, path=None): - """Download the distribution to a path, and return it. - - If the path is given in path, use this, otherwise, generates a new one - Return the download location. - """ - if path is None: - path = tempfile.mkdtemp() - - # if we do not have downloaded it yet, do it. - if self.downloaded_location is None: - url = self.url['url'] - archive_name = urllib.parse.urlparse(url)[2].split('/')[-1] - filename, headers = urllib.request.urlretrieve(url, - path + "/" + archive_name) - self.downloaded_location = filename - self._check_md5(filename) - return self.downloaded_location - - def unpack(self, path=None): - """Unpack the distribution to the given path. - - If not destination is given, creates a temporary location. - - Returns the location of the extracted files (root). - """ - if not self._unpacked_dir: - if path is None: - path = tempfile.mkdtemp() - - filename = self.download(path) - unpack_archive(filename, path) - self._unpacked_dir = path - - return path - - def _check_md5(self, filename): - """Check that the md5 checksum of the given file matches the one in - url param""" - hashname = self.url['hashname'] - expected_hashval = self.url['hashval'] - if None not in (expected_hashval, hashname): - with open(filename, 'rb') as f: - hashval = hashlib.new(hashname) - hashval.update(f.read()) - - if hashval.hexdigest() != expected_hashval: - raise HashDoesNotMatch("got %s instead of %s" - % (hashval.hexdigest(), expected_hashval)) - - def __repr__(self): - if self.release is None: - return "" % self.dist_type - - return "<%s %s %s>" % ( - self.release.name, self.release.version, self.dist_type or "") - - -class ReleasesList(IndexReference): - """A container of Release. - - Provides useful methods and facilities to sort and filter releases. - """ - def __init__(self, name, releases=None, contains_hidden=False, index=None): - self.set_index(index) - self.releases = [] - self.name = name - self.contains_hidden = contains_hidden - if releases: - self.add_releases(releases) - - def fetch_releases(self): - self._index.get_releases(self.name) - return self.releases - - def filter(self, predicate): - """Filter and return a subset of releases matching the given predicate. - """ - return ReleasesList(self.name, [release for release in self.releases - if predicate.match(release.version)], - index=self._index) - - def get_last(self, requirements, prefer_final=None): - """Return the "last" release, that satisfy the given predicates. - - "last" is defined by the version number of the releases, you also could - set prefer_final parameter to True or False to change the order results - """ - predicate = get_version_predicate(requirements) - releases = self.filter(predicate) - if len(releases) == 0: - return None - releases.sort_releases(prefer_final, reverse=True) - return releases[0] - - def add_releases(self, releases): - """Add releases in the release list. - - :param: releases is a list of ReleaseInfo objects. - """ - for r in releases: - self.add_release(release=r) - - def add_release(self, version=None, dist_type='sdist', release=None, - **dist_args): - """Add a release to the list. - - The release can be passed in the `release` parameter, and in this case, - it will be crawled to extract the useful informations if necessary, or - the release informations can be directly passed in the `version` and - `dist_type` arguments. - - Other keywords arguments can be provided, and will be forwarded to the - distribution creation (eg. the arguments of the DistInfo constructor). - """ - if release: - if release.name.lower() != self.name.lower(): - raise ValueError("%s is not the same project as %s" % - (release.name, self.name)) - version = str(release.version) - - if version not in self.get_versions(): - # append only if not already exists - self.releases.append(release) - for dist in release.dists.values(): - for url in dist.urls: - self.add_release(version, dist.dist_type, **url) - else: - matches = [r for r in self.releases - if str(r.version) == version and r.name == self.name] - if not matches: - release = ReleaseInfo(self.name, version, index=self._index) - self.releases.append(release) - else: - release = matches[0] - - release.add_distribution(dist_type=dist_type, **dist_args) - - def sort_releases(self, prefer_final=False, reverse=True, *args, **kwargs): - """Sort the results with the given properties. - - The `prefer_final` argument can be used to specify if final - distributions (eg. not dev, beta or alpha) would be preferred or not. - - Results can be inverted by using `reverse`. - - Any other parameter provided will be forwarded to the sorted call. You - cannot redefine the key argument of "sorted" here, as it is used - internally to sort the releases. - """ - - sort_by = [] - if prefer_final: - sort_by.append("is_final") - sort_by.append("version") - - self.releases.sort( - key=lambda i: tuple(getattr(i, arg) for arg in sort_by), - reverse=reverse, *args, **kwargs) - - def get_release(self, version): - """Return a release from its version.""" - matches = [r for r in self.releases if str(r.version) == version] - if len(matches) != 1: - raise KeyError(version) - return matches[0] - - def get_versions(self): - """Return a list of releases versions contained""" - return [str(r.version) for r in self.releases] - - def __getitem__(self, key): - return self.releases[key] - - def __len__(self): - return len(self.releases) - - def __repr__(self): - string = 'Project "%s"' % self.name - if self.get_versions(): - string += ' versions: %s' % ', '.join(self.get_versions()) - return '<%s>' % string - - -def get_infos_from_url(url, probable_dist_name=None, is_external=True): - """Get useful informations from an URL. - - Return a dict of (name, version, url, hashtype, hash, is_external) - - :param url: complete url of the distribution - :param probable_dist_name: A probable name of the project. - :param is_external: Tell if the url commes from an index or from - an external URL. - """ - # if the url contains a md5 hash, get it. - md5_hash = None - match = MD5_HASH.match(url) - if match is not None: - md5_hash = match.group(1) - # remove the hash - url = url.replace("#md5=%s" % md5_hash, "") - - # parse the archive name to find dist name and version - archive_name = urllib.parse.urlparse(url)[2].split('/')[-1] - extension_matched = False - # remove the extension from the name - for ext in EXTENSIONS: - if archive_name.endswith(ext): - archive_name = archive_name[:-len(ext)] - extension_matched = True - - name, version = split_archive_name(archive_name) - if extension_matched is True: - return {'name': name, - 'version': version, - 'url': url, - 'hashname': "md5", - 'hashval': md5_hash, - 'is_external': is_external, - 'dist_type': 'sdist'} - - -def split_archive_name(archive_name, probable_name=None): - """Split an archive name into two parts: name and version. - - Return the tuple (name, version) - """ - # Try to determine wich part is the name and wich is the version using the - # "-" separator. Take the larger part to be the version number then reduce - # if this not works. - def eager_split(str, maxsplit=2): - # split using the "-" separator - splits = str.rsplit("-", maxsplit) - name = splits[0] - version = "-".join(splits[1:]) - if version.startswith("-"): - version = version[1:] - if suggest_normalized_version(version) is None and maxsplit >= 0: - # we dont get a good version number: recurse ! - return eager_split(str, maxsplit - 1) - else: - return name, version - if probable_name is not None: - probable_name = probable_name.lower() - name = None - if probable_name is not None and probable_name in archive_name: - # we get the name from probable_name, if given. - name = probable_name - version = archive_name.lstrip(name) - else: - name, version = eager_split(archive_name) - - version = suggest_normalized_version(version) - if version is not None and name != "": - return name.lower(), version - else: - raise CantParseArchiveName(archive_name) diff --git a/Lib/packaging/pypi/errors.py b/Lib/packaging/pypi/errors.py deleted file mode 100644 --- a/Lib/packaging/pypi/errors.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Exceptions raised by packaging.pypi code.""" - -from packaging.errors import PackagingPyPIError - - -class ProjectNotFound(PackagingPyPIError): - """Project has not been found""" - - -class DistributionNotFound(PackagingPyPIError): - """The release has not been found""" - - -class ReleaseNotFound(PackagingPyPIError): - """The release has not been found""" - - -class CantParseArchiveName(PackagingPyPIError): - """An archive name can't be parsed to find distribution name and version""" - - -class DownloadError(PackagingPyPIError): - """An error has occurs while downloading""" - - -class HashDoesNotMatch(DownloadError): - """Compared hashes does not match""" - - -class UnsupportedHashName(PackagingPyPIError): - """A unsupported hashname has been used""" - - -class UnableToDownload(PackagingPyPIError): - """All mirrors have been tried, without success""" - - -class InvalidSearchField(PackagingPyPIError): - """An invalid search field has been used""" diff --git a/Lib/packaging/pypi/mirrors.py b/Lib/packaging/pypi/mirrors.py deleted file mode 100644 --- a/Lib/packaging/pypi/mirrors.py +++ /dev/null @@ -1,52 +0,0 @@ -"""Utilities related to the mirror infrastructure defined in PEP 381.""" - -from string import ascii_lowercase -import socket - -DEFAULT_MIRROR_URL = "last.pypi.python.org" - - -def get_mirrors(hostname=None): - """Return the list of mirrors from the last record found on the DNS - entry:: - - >>> from packaging.pypi.mirrors import get_mirrors - >>> get_mirrors() - ['a.pypi.python.org', 'b.pypi.python.org', 'c.pypi.python.org', - 'd.pypi.python.org'] - - """ - if hostname is None: - hostname = DEFAULT_MIRROR_URL - - # return the last mirror registered on PyPI. - try: - hostname = socket.gethostbyname_ex(hostname)[0] - except socket.gaierror: - return [] - end_letter = hostname.split(".", 1) - - # determine the list from the last one. - return ["%s.%s" % (s, end_letter[1]) for s in string_range(end_letter[0])] - - -def string_range(last): - """Compute the range of string between "a" and last. - - This works for simple "a to z" lists, but also for "a to zz" lists. - """ - for k in range(len(last)): - for x in product(ascii_lowercase, repeat=(k + 1)): - result = ''.join(x) - yield result - if result == last: - return - - -def product(*args, **kwds): - pools = [tuple(arg) for arg in args] * kwds.get('repeat', 1) - result = [[]] - for pool in pools: - result = [x + [y] for x in result for y in pool] - for prod in result: - yield tuple(prod) diff --git a/Lib/packaging/pypi/simple.py b/Lib/packaging/pypi/simple.py deleted file mode 100644 --- a/Lib/packaging/pypi/simple.py +++ /dev/null @@ -1,462 +0,0 @@ -"""Spider using the screen-scraping "simple" PyPI API. - -This module contains the class Crawler, a simple spider that -can be used to find and retrieve distributions from a project index -(like the Python Package Index), using its so-called simple API (see -reference implementation available at http://pypi.python.org/simple/). -""" - -import http.client -import re -import socket -import sys -import urllib.request -import urllib.parse -import urllib.error -import os - -from fnmatch import translate -from functools import wraps -from packaging import logger -from packaging.metadata import Metadata -from packaging.version import get_version_predicate -from packaging import __version__ as packaging_version -from packaging.pypi.base import BaseClient -from packaging.pypi.dist import (ReleasesList, EXTENSIONS, - get_infos_from_url, MD5_HASH) -from packaging.pypi.errors import (PackagingPyPIError, DownloadError, - UnableToDownload, CantParseArchiveName, - ReleaseNotFound, ProjectNotFound) -from packaging.pypi.mirrors import get_mirrors - -__all__ = ['Crawler', 'DEFAULT_SIMPLE_INDEX_URL'] - -# -- Constants ----------------------------------------------- -DEFAULT_SIMPLE_INDEX_URL = "http://a.pypi.python.org/simple/" -DEFAULT_HOSTS = ("*",) -SOCKET_TIMEOUT = 15 -USER_AGENT = "Python-urllib/%s.%s packaging/%s" % ( - sys.version_info[0], sys.version_info[1], packaging_version) - -# -- Regexps ------------------------------------------------- -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') -HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match - -# This pattern matches a character entity reference (a decimal numeric -# references, a hexadecimal numeric reference, or a named reference). -ENTITY_SUB = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub -REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) - - -def socket_timeout(timeout=SOCKET_TIMEOUT): - """Decorator to add a socket timeout when requesting pages on PyPI. - """ - def wrapper(func): - @wraps(func) - def wrapped(self, *args, **kwargs): - old_timeout = socket.getdefaulttimeout() - if hasattr(self, "_timeout"): - timeout = self._timeout - socket.setdefaulttimeout(timeout) - try: - return func(self, *args, **kwargs) - finally: - socket.setdefaulttimeout(old_timeout) - return wrapped - return wrapper - - -def with_mirror_support(): - """Decorator that makes the mirroring support easier""" - def wrapper(func): - @wraps(func) - def wrapped(self, *args, **kwargs): - try: - return func(self, *args, **kwargs) - except DownloadError: - # if an error occurs, try with the next index_url - if self._mirrors_tries >= self._mirrors_max_tries: - try: - self._switch_to_next_mirror() - except KeyError: - raise UnableToDownload("Tried all mirrors") - else: - self._mirrors_tries += 1 - self._projects.clear() - return wrapped(self, *args, **kwargs) - return wrapped - return wrapper - - -class Crawler(BaseClient): - """Provides useful tools to request the Python Package Index simple API. - - You can specify both mirrors and mirrors_url, but mirrors_url will only be - used if mirrors is set to None. - - :param index_url: the url of the simple index to search on. - :param prefer_final: if the version is not mentioned, and the last - version is not a "final" one (alpha, beta, etc.), - pick up the last final version. - :param prefer_source: if the distribution type is not mentioned, pick up - the source one if available. - :param follow_externals: tell if following external links is needed or - not. Default is False. - :param hosts: a list of hosts allowed to be processed while using - follow_externals=True. Default behavior is to follow all - hosts. - :param follow_externals: tell if following external links is needed or - not. Default is False. - :param mirrors_url: the url to look on for DNS records giving mirror - addresses. - :param mirrors: a list of mirrors (see PEP 381). - :param timeout: time in seconds to consider a url has timeouted. - :param mirrors_max_tries": number of times to try requesting informations - on mirrors before switching. - """ - - def __init__(self, index_url=DEFAULT_SIMPLE_INDEX_URL, prefer_final=False, - prefer_source=True, hosts=DEFAULT_HOSTS, - follow_externals=False, mirrors_url=None, mirrors=None, - timeout=SOCKET_TIMEOUT, mirrors_max_tries=0): - super(Crawler, self).__init__(prefer_final, prefer_source) - self.follow_externals = follow_externals - - # mirroring attributes. - parsed = urllib.parse.urlparse(index_url) - self.scheme = parsed[0] - if self.scheme == 'file': - ender = os.path.sep - else: - ender = '/' - if not index_url.endswith(ender): - index_url += ender - # if no mirrors are defined, use the method described in PEP 381. - if mirrors is None: - mirrors = get_mirrors(mirrors_url) - self._mirrors = set(mirrors) - self._mirrors_used = set() - self.index_url = index_url - self._mirrors_max_tries = mirrors_max_tries - self._mirrors_tries = 0 - self._timeout = timeout - - # create a regexp to match all given hosts - self._allowed_hosts = re.compile('|'.join(map(translate, hosts))).match - - # we keep an index of pages we have processed, in order to avoid - # scanning them multple time (eg. if there is multiple pages pointing - # on one) - self._processed_urls = [] - self._projects = {} - - @with_mirror_support() - def search_projects(self, name=None, **kwargs): - """Search the index for projects containing the given name. - - Return a list of names. - """ - if '*' in name: - name.replace('*', '.*') - else: - name = "%s%s%s" % ('*.?', name, '*.?') - name = name.replace('*', '[^<]*') # avoid matching end tag - pattern = (']*>(%s)' % name).encode('utf-8') - projectname = re.compile(pattern, re.I) - matching_projects = [] - - with self._open_url(self.index_url) as index: - index_content = index.read() - - for match in projectname.finditer(index_content): - project_name = match.group(1).decode('utf-8') - matching_projects.append(self._get_project(project_name)) - return matching_projects - - def get_releases(self, requirements, prefer_final=None, - force_update=False): - """Search for releases and return a ReleasesList object containing - the results. - """ - predicate = get_version_predicate(requirements) - if predicate.name.lower() in self._projects and not force_update: - return self._projects.get(predicate.name.lower()) - prefer_final = self._get_prefer_final(prefer_final) - logger.debug('Reading info on PyPI about %s', predicate.name) - self._process_index_page(predicate.name) - - if predicate.name.lower() not in self._projects: - raise ProjectNotFound - - releases = self._projects.get(predicate.name.lower()) - releases.sort_releases(prefer_final=prefer_final) - return releases - - def get_release(self, requirements, prefer_final=None): - """Return only one release that fulfill the given requirements""" - predicate = get_version_predicate(requirements) - release = self.get_releases(predicate, prefer_final)\ - .get_last(predicate) - if not release: - raise ReleaseNotFound("No release matches the given criterias") - return release - - def get_distributions(self, project_name, version): - """Return the distributions found on the index for the specific given - release""" - # as the default behavior of get_release is to return a release - # containing the distributions, just alias it. - return self.get_release("%s (%s)" % (project_name, version)) - - def get_metadata(self, project_name, version): - """Return the metadatas from the simple index. - - Currently, download one archive, extract it and use the PKG-INFO file. - """ - release = self.get_distributions(project_name, version) - if not release.metadata: - location = release.get_distribution().unpack() - pkg_info = os.path.join(location, 'PKG-INFO') - release.metadata = Metadata(pkg_info) - return release - - def _switch_to_next_mirror(self): - """Switch to the next mirror (eg. point self.index_url to the next - mirror url. - - Raise a KeyError if all mirrors have been tried. - """ - self._mirrors_used.add(self.index_url) - index_url = self._mirrors.pop() - # XXX use urllib.parse for a real check of missing scheme part - if not index_url.startswith(("http://", "https://", "file://")): - index_url = "http://%s" % index_url - - if not index_url.endswith("/simple"): - index_url = "%s/simple/" % index_url - - self.index_url = index_url - - def _is_browsable(self, url): - """Tell if the given URL can be browsed or not. - - It uses the follow_externals and the hosts list to tell if the given - url is browsable or not. - """ - # if _index_url is contained in the given URL, we are browsing the - # index, and it's always "browsable". - # local files are always considered browable resources - if self.index_url in url or urllib.parse.urlparse(url)[0] == "file": - return True - elif self.follow_externals: - if self._allowed_hosts(urllib.parse.urlparse(url)[1]): # 1 is netloc - return True - else: - return False - return False - - def _is_distribution(self, link): - """Tell if the given URL matches to a distribution name or not. - """ - #XXX find a better way to check that links are distributions - # Using a regexp ? - for ext in EXTENSIONS: - if ext in link: - return True - return False - - def _register_release(self, release=None, release_info={}): - """Register a new release. - - Both a release or a dict of release_info can be provided, the preferred - way (eg. the quicker) is the dict one. - - Return the list of existing releases for the given project. - """ - # Check if the project already has a list of releases (refering to - # the project name). If not, create a new release list. - # Then, add the release to the list. - if release: - name = release.name - else: - name = release_info['name'] - if name.lower() not in self._projects: - self._projects[name.lower()] = ReleasesList(name, index=self._index) - - if release: - self._projects[name.lower()].add_release(release=release) - else: - name = release_info.pop('name') - version = release_info.pop('version') - dist_type = release_info.pop('dist_type') - self._projects[name.lower()].add_release(version, dist_type, - **release_info) - return self._projects[name.lower()] - - def _process_url(self, url, project_name=None, follow_links=True): - """Process an url and search for distributions packages. - - For each URL found, if it's a download, creates a PyPIdistribution - object. If it's a homepage and we can follow links, process it too. - - :param url: the url to process - :param project_name: the project name we are searching for. - :param follow_links: Do not want to follow links more than from one - level. This parameter tells if we want to follow - the links we find (eg. run recursively this - method on it) - """ - with self._open_url(url) as f: - base_url = f.url - if url not in self._processed_urls: - self._processed_urls.append(url) - link_matcher = self._get_link_matcher(url) - for link, is_download in link_matcher(f.read().decode(), base_url): - if link not in self._processed_urls: - if self._is_distribution(link) or is_download: - self._processed_urls.append(link) - # it's a distribution, so create a dist object - try: - infos = get_infos_from_url(link, project_name, - is_external=self.index_url not in url) - except CantParseArchiveName as e: - logger.warning( - "version has not been parsed: %s", e) - else: - self._register_release(release_info=infos) - else: - if self._is_browsable(link) and follow_links: - self._process_url(link, project_name, - follow_links=False) - - def _get_link_matcher(self, url): - """Returns the right link matcher function of the given url - """ - if self.index_url in url: - return self._simple_link_matcher - else: - return self._default_link_matcher - - def _get_full_url(self, url, base_url): - return urllib.parse.urljoin(base_url, self._htmldecode(url)) - - def _simple_link_matcher(self, content, base_url): - """Yield all links with a rel="download" or rel="homepage". - - This matches the simple index requirements for matching links. - If follow_externals is set to False, dont yeld the external - urls. - - :param content: the content of the page we want to parse - :param base_url: the url of this page. - """ - for match in HREF.finditer(content): - url = self._get_full_url(match.group(1), base_url) - if MD5_HASH.match(url): - yield (url, True) - - for match in REL.finditer(content): - # search for rel links. - tag, rel = match.groups() - rels = [s.strip() for s in rel.lower().split(',')] - if 'homepage' in rels or 'download' in rels: - for match in HREF.finditer(tag): - url = self._get_full_url(match.group(1), base_url) - if 'download' in rels or self._is_browsable(url): - # yield a list of (url, is_download) - yield (url, 'download' in rels) - - def _default_link_matcher(self, content, base_url): - """Yield all links found on the page. - """ - for match in HREF.finditer(content): - url = self._get_full_url(match.group(1), base_url) - if self._is_browsable(url): - yield (url, False) - - @with_mirror_support() - def _process_index_page(self, name): - """Find and process a PyPI page for the given project name. - - :param name: the name of the project to find the page - """ - # Browse and index the content of the given PyPI page. - if self.scheme == 'file': - ender = os.path.sep - else: - ender = '/' - url = self.index_url + name + ender - self._process_url(url, name) - - @socket_timeout() - def _open_url(self, url): - """Open a urllib2 request, handling HTTP authentication, and local - files support. - - """ - scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url) - - # authentication stuff - if scheme in ('http', 'https'): - auth, host = urllib.parse.splituser(netloc) - else: - auth = None - - # add index.html automatically for filesystem paths - if scheme == 'file': - if url.endswith(os.path.sep): - url += "index.html" - - # add authorization headers if auth is provided - if auth: - auth = "Basic " + \ - urllib.parse.unquote(auth).encode('base64').strip() - new_url = urllib.parse.urlunparse(( - scheme, host, path, params, query, frag)) - request = urllib.request.Request(new_url) - request.add_header("Authorization", auth) - else: - request = urllib.request.Request(url) - request.add_header('User-Agent', USER_AGENT) - try: - fp = urllib.request.urlopen(request) - except (ValueError, http.client.InvalidURL) as v: - msg = ' '.join([str(arg) for arg in v.args]) - raise PackagingPyPIError('%s %s' % (url, msg)) - except urllib.error.HTTPError as v: - return v - except urllib.error.URLError as v: - raise DownloadError("Download error for %s: %s" % (url, v.reason)) - except http.client.BadStatusLine as v: - raise DownloadError('%s returned a bad status line. ' - 'The server might be down, %s' % (url, v.line)) - except http.client.HTTPException as v: - raise DownloadError("Download error for %s: %s" % (url, v)) - except socket.timeout: - raise DownloadError("The server timeouted") - - if auth: - # Put authentication info back into request URL if same host, - # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = \ - urllib.parse.urlparse(fp.url) - if s2 == scheme and h2 == host: - fp.url = urllib.parse.urlunparse( - (s2, netloc, path2, param2, query2, frag2)) - return fp - - def _decode_entity(self, match): - what = match.group(1) - if what.startswith('#x'): - what = int(what[2:], 16) - elif what.startswith('#'): - what = int(what[1:]) - else: - from html.entities import name2codepoint - what = name2codepoint.get(what, match.group(0)) - return chr(what) - - def _htmldecode(self, text): - """Decode HTML entities in the given text.""" - return ENTITY_SUB(self._decode_entity, text) diff --git a/Lib/packaging/pypi/wrapper.py b/Lib/packaging/pypi/wrapper.py deleted file mode 100644 --- a/Lib/packaging/pypi/wrapper.py +++ /dev/null @@ -1,99 +0,0 @@ -"""Convenient client for all PyPI APIs. - -This module provides a ClientWrapper class which will use the "simple" -or XML-RPC API to request information or files from an index. -""" - -from packaging.pypi import simple, xmlrpc - -_WRAPPER_MAPPINGS = {'get_release': 'simple', - 'get_releases': 'simple', - 'search_projects': 'simple', - 'get_metadata': 'xmlrpc', - 'get_distributions': 'simple'} - -_WRAPPER_INDEXES = {'xmlrpc': xmlrpc.Client, - 'simple': simple.Crawler} - - -def switch_index_if_fails(func, wrapper): - """Decorator that switch of index (for instance from xmlrpc to simple) - if the first mirror return an empty list or raises an exception. - """ - def decorator(*args, **kwargs): - retry = True - exception = None - methods = [func] - for f in wrapper._indexes.values(): - if f != func.__self__ and hasattr(f, func.__name__): - methods.append(getattr(f, func.__name__)) - for method in methods: - try: - response = method(*args, **kwargs) - retry = False - except Exception as e: - exception = e - if not retry: - break - if retry and exception: - raise exception - else: - return response - return decorator - - -class ClientWrapper: - """Wrapper around simple and xmlrpc clients, - - Choose the best implementation to use depending the needs, using the given - mappings. - If one of the indexes returns an error, tries to use others indexes. - - :param index: tell which index to rely on by default. - :param index_classes: a dict of name:class to use as indexes. - :param indexes: a dict of name:index already instantiated - :param mappings: the mappings to use for this wrapper - """ - - def __init__(self, default_index='simple', index_classes=_WRAPPER_INDEXES, - indexes={}, mappings=_WRAPPER_MAPPINGS): - self._projects = {} - self._mappings = mappings - self._indexes = indexes - self._default_index = default_index - - # instantiate the classes and set their _project attribute to the one - # of the wrapper. - for name, cls in index_classes.items(): - obj = self._indexes.setdefault(name, cls()) - obj._projects = self._projects - obj._index = self - - def __getattr__(self, method_name): - """When asking for methods of the wrapper, return the implementation of - the wrapped classes, depending the mapping. - - Decorate the methods to switch of implementation if an error occurs - """ - real_method = None - if method_name in _WRAPPER_MAPPINGS: - obj = self._indexes[_WRAPPER_MAPPINGS[method_name]] - real_method = getattr(obj, method_name) - else: - # the method is not defined in the mappings, so we try first to get - # it via the default index, and rely on others if needed. - try: - real_method = getattr(self._indexes[self._default_index], - method_name) - except AttributeError: - other_indexes = [i for i in self._indexes - if i != self._default_index] - for index in other_indexes: - real_method = getattr(self._indexes[index], method_name, - None) - if real_method: - break - if real_method: - return switch_index_if_fails(real_method, self) - else: - raise AttributeError("No index have attribute '%s'" % method_name) diff --git a/Lib/packaging/pypi/xmlrpc.py b/Lib/packaging/pypi/xmlrpc.py deleted file mode 100644 --- a/Lib/packaging/pypi/xmlrpc.py +++ /dev/null @@ -1,200 +0,0 @@ -"""Spider using the XML-RPC PyPI API. - -This module contains the class Client, a spider that can be used to find -and retrieve distributions from a project index (like the Python Package -Index), using its XML-RPC API (see documentation of the reference -implementation at http://wiki.python.org/moin/PyPiXmlRpc). -""" - -import xmlrpc.client - -from packaging import logger -from packaging.errors import IrrationalVersionError -from packaging.version import get_version_predicate -from packaging.pypi.base import BaseClient -from packaging.pypi.errors import (ProjectNotFound, InvalidSearchField, - ReleaseNotFound) -from packaging.pypi.dist import ReleaseInfo - -__all__ = ['Client', 'DEFAULT_XMLRPC_INDEX_URL'] - -DEFAULT_XMLRPC_INDEX_URL = 'http://python.org/pypi' - -_SEARCH_FIELDS = ['name', 'version', 'author', 'author_email', 'maintainer', - 'maintainer_email', 'home_page', 'license', 'summary', - 'description', 'keywords', 'platform', 'download_url'] - - -class Client(BaseClient): - """Client to query indexes using XML-RPC method calls. - - If no server_url is specified, use the default PyPI XML-RPC URL, - defined in the DEFAULT_XMLRPC_INDEX_URL constant:: - - >>> client = Client() - >>> client.server_url == DEFAULT_XMLRPC_INDEX_URL - True - - >>> client = Client("http://someurl/") - >>> client.server_url - 'http://someurl/' - """ - - def __init__(self, server_url=DEFAULT_XMLRPC_INDEX_URL, prefer_final=False, - prefer_source=True): - super(Client, self).__init__(prefer_final, prefer_source) - self.server_url = server_url - self._projects = {} - - def get_release(self, requirements, prefer_final=False): - """Return a release with all complete metadata and distribution - related informations. - """ - prefer_final = self._get_prefer_final(prefer_final) - predicate = get_version_predicate(requirements) - releases = self.get_releases(predicate.name) - release = releases.get_last(predicate, prefer_final) - self.get_metadata(release.name, str(release.version)) - self.get_distributions(release.name, str(release.version)) - return release - - def get_releases(self, requirements, prefer_final=None, show_hidden=True, - force_update=False): - """Return the list of existing releases for a specific project. - - Cache the results from one call to another. - - If show_hidden is True, return the hidden releases too. - If force_update is True, reprocess the index to update the - informations (eg. make a new XML-RPC call). - :: - - >>> client = Client() - >>> client.get_releases('Foo') - ['1.1', '1.2', '1.3'] - - If no such project exists, raise a ProjectNotFound exception:: - - >>> client.get_project_versions('UnexistingProject') - ProjectNotFound: UnexistingProject - - """ - def get_versions(project_name, show_hidden): - return self.proxy.package_releases(project_name, show_hidden) - - predicate = get_version_predicate(requirements) - prefer_final = self._get_prefer_final(prefer_final) - project_name = predicate.name - if not force_update and (project_name.lower() in self._projects): - project = self._projects[project_name.lower()] - if not project.contains_hidden and show_hidden: - # if hidden releases are requested, and have an existing - # list of releases that does not contains hidden ones - all_versions = get_versions(project_name, show_hidden) - existing_versions = project.get_versions() - hidden_versions = set(all_versions) - set(existing_versions) - for version in hidden_versions: - project.add_release(release=ReleaseInfo(project_name, - version, index=self._index)) - else: - versions = get_versions(project_name, show_hidden) - if not versions: - raise ProjectNotFound(project_name) - project = self._get_project(project_name) - project.add_releases([ReleaseInfo(project_name, version, - index=self._index) - for version in versions]) - project = project.filter(predicate) - if len(project) == 0: - raise ReleaseNotFound("%s" % predicate) - project.sort_releases(prefer_final) - return project - - - def get_distributions(self, project_name, version): - """Grab informations about distributions from XML-RPC. - - Return a ReleaseInfo object, with distribution-related informations - filled in. - """ - url_infos = self.proxy.release_urls(project_name, version) - project = self._get_project(project_name) - if version not in project.get_versions(): - project.add_release(release=ReleaseInfo(project_name, version, - index=self._index)) - release = project.get_release(version) - for info in url_infos: - packagetype = info['packagetype'] - dist_infos = {'url': info['url'], - 'hashval': info['md5_digest'], - 'hashname': 'md5', - 'is_external': False, - 'python_version': info['python_version']} - release.add_distribution(packagetype, **dist_infos) - return release - - def get_metadata(self, project_name, version): - """Retrieve project metadata. - - Return a ReleaseInfo object, with metadata informations filled in. - """ - # to be case-insensitive, get the informations from the XMLRPC API - projects = [d['name'] for d in - self.proxy.search({'name': project_name}) - if d['name'].lower() == project_name] - if len(projects) > 0: - project_name = projects[0] - - metadata = self.proxy.release_data(project_name, version) - project = self._get_project(project_name) - if version not in project.get_versions(): - project.add_release(release=ReleaseInfo(project_name, version, - index=self._index)) - release = project.get_release(version) - release.set_metadata(metadata) - return release - - def search_projects(self, name=None, operator="or", **kwargs): - """Find using the keys provided in kwargs. - - You can set operator to "and" or "or". - """ - for key in kwargs: - if key not in _SEARCH_FIELDS: - raise InvalidSearchField(key) - if name: - kwargs["name"] = name - projects = self.proxy.search(kwargs, operator) - for p in projects: - project = self._get_project(p['name']) - try: - project.add_release(release=ReleaseInfo(p['name'], - p['version'], metadata={'summary': p['summary']}, - index=self._index)) - except IrrationalVersionError as e: - logger.warning("Irrational version error found: %s", e) - return [self._projects[p['name'].lower()] for p in projects] - - def get_all_projects(self): - """Return the list of all projects registered in the package index""" - projects = self.proxy.list_packages() - for name in projects: - self.get_releases(name, show_hidden=True) - - return [self._projects[name.lower()] for name in set(projects)] - - @property - def proxy(self): - """Property used to return the XMLRPC server proxy. - - If no server proxy is defined yet, creates a new one:: - - >>> client = Client() - >>> client.proxy() - - - """ - if not hasattr(self, '_server_proxy'): - self._server_proxy = xmlrpc.client.ServerProxy(self.server_url) - - return self._server_proxy diff --git a/Lib/packaging/run.py b/Lib/packaging/run.py deleted file mode 100644 --- a/Lib/packaging/run.py +++ /dev/null @@ -1,663 +0,0 @@ -"""Main command line parser. Implements the pysetup script.""" - -import os -import re -import sys -import getopt -import logging - -from packaging import logger -from packaging.dist import Distribution -from packaging.util import _is_archive_file, generate_setup_py -from packaging.command import get_command_class, STANDARD_COMMANDS -from packaging.install import install, install_local_project, remove -from packaging.database import get_distribution, get_distributions -from packaging.depgraph import generate_graph -from packaging.fancy_getopt import FancyGetopt -from packaging.errors import (PackagingArgError, PackagingError, - PackagingModuleError, PackagingClassError, - CCompilerError) - - -command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') - -common_usage = """\ -Actions: -%(actions)s - -To get more help on an action, use: - - pysetup action --help -""" - -global_options = [ - # The fourth entry for verbose means that it can be repeated. - ('verbose', 'v', "run verbosely (default)", True), - ('quiet', 'q', "run quietly (turns verbosity off)"), - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'), - ('version', None, 'Display the version'), -] - -negative_opt = {'quiet': 'verbose'} - -display_options = [ - ('help-commands', None, "list all available commands"), -] - -display_option_names = [x[0].replace('-', '_') for x in display_options] - - -def _parse_args(args, options, long_options): - """Transform sys.argv input into a dict. - - :param args: the args to parse (i.e sys.argv) - :param options: the list of options to pass to getopt - :param long_options: the list of string with the names of the long options - to be passed to getopt. - - The function returns a dict with options/long_options as keys and matching - values as values. - """ - optlist, args = getopt.gnu_getopt(args, options, long_options) - optdict = {} - optdict['args'] = args - for k, v in optlist: - k = k.lstrip('-') - if k not in optdict: - optdict[k] = [] - if v: - optdict[k].append(v) - else: - optdict[k].append(v) - return optdict - - -class action_help: - """Prints a help message when the standard help flags: -h and --help - are used on the commandline. - """ - - def __init__(self, help_msg): - self.help_msg = help_msg - - def __call__(self, f): - def wrapper(*args, **kwargs): - f_args = args[1] - if '--help' in f_args or '-h' in f_args: - print(self.help_msg) - return - return f(*args, **kwargs) - return wrapper - - - at action_help("""\ -Usage: pysetup create - or: pysetup create --help - -Create a new Python project. -""") -def _create(distpatcher, args, **kw): - from packaging.create import main - return main() - - - at action_help("""\ -Usage: pysetup generate-setup - or: pysetup generate-setup --help - -Generate a setup.py script for backward-compatibility purposes. -""") -def _generate(distpatcher, args, **kw): - generate_setup_py() - logger.info('The setup.py was generated') - - - at action_help("""\ -Usage: pysetup graph dist - or: pysetup graph --help - -Print dependency graph for the distribution. - -positional arguments: - dist installed distribution name -""") -def _graph(dispatcher, args, **kw): - name = args[1] - dist = get_distribution(name, use_egg_info=True) - if dist is None: - logger.warning('Distribution not found.') - return 1 - else: - dists = get_distributions(use_egg_info=True) - graph = generate_graph(dists) - print(graph.repr_node(dist)) - - - at action_help("""\ -Usage: pysetup install [dist] - or: pysetup install [archive] - or: pysetup install [src_dir] - or: pysetup install --help - -Install a Python distribution from the indexes, source directory, or sdist. - -positional arguments: - archive path to source distribution (zip, tar.gz) - dist distribution name to install from the indexes - scr_dir path to source directory -""") -def _install(dispatcher, args, **kw): - # first check if we are in a source directory - if len(args) < 2: - # are we inside a project dir? - if os.path.isfile('setup.cfg') or os.path.isfile('setup.py'): - args.insert(1, os.getcwd()) - else: - logger.warning('No project to install.') - return 1 - - target = args[1] - # installing from a source dir or archive file? - if os.path.isdir(target) or _is_archive_file(target): - return not install_local_project(target) - else: - # download from PyPI - return not install(target) - - - at action_help("""\ -Usage: pysetup metadata [dist] - or: pysetup metadata [dist] [-f field ...] - or: pysetup metadata --help - -Print metadata for the distribution. - -positional arguments: - dist installed distribution name - -optional arguments: - -f metadata field to print; omit to get all fields -""") -def _metadata(dispatcher, args, **kw): - opts = _parse_args(args[1:], 'f:', []) - if opts['args']: - name = opts['args'][0] - dist = get_distribution(name, use_egg_info=True) - if dist is None: - logger.warning('%r not installed', name) - return 1 - elif os.path.isfile('setup.cfg'): - logger.info('searching local dir for metadata') - dist = Distribution() # XXX use config module - dist.parse_config_files() - else: - logger.warning('no argument given and no local setup.cfg found') - return 1 - - metadata = dist.metadata - - if 'f' in opts: - keys = (k for k in opts['f'] if k in metadata) - else: - keys = metadata.keys() - - for key in keys: - if key in metadata: - print(metadata._convert_name(key) + ':') - value = metadata[key] - if isinstance(value, list): - for v in value: - print(' ', v) - else: - print(' ', value.replace('\n', '\n ')) - - - at action_help("""\ -Usage: pysetup remove dist [-y] - or: pysetup remove --help - -Uninstall a Python distribution. - -positional arguments: - dist installed distribution name - -optional arguments: - -y auto confirm distribution removal -""") -def _remove(distpatcher, args, **kw): - opts = _parse_args(args[1:], 'y', []) - if 'y' in opts: - auto_confirm = True - else: - auto_confirm = False - - retcode = 0 - for dist in set(opts['args']): - try: - remove(dist, auto_confirm=auto_confirm) - except PackagingError: - logger.warning('%r not installed', dist) - retcode = 1 - - return retcode - - - at action_help("""\ -Usage: pysetup run [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] - or: pysetup run --help - or: pysetup run --list-commands - or: pysetup run cmd --help -""") -def _run(dispatcher, args, **kw): - parser = dispatcher.parser - args = args[1:] - - commands = STANDARD_COMMANDS # FIXME display extra commands - - if args == ['--list-commands']: - print('List of available commands:') - for cmd in commands: - cls = dispatcher.cmdclass.get(cmd) or get_command_class(cmd) - desc = getattr(cls, 'description', '(no description available)') - print(' %s: %s' % (cmd, desc)) - return - - while args: - args = dispatcher._parse_command_opts(parser, args) - if args is None: - return - - # create the Distribution class - # need to feed setup.cfg here ! - dist = Distribution() - - # Find and parse the config file(s): they will override options from - # the setup script, but be overridden by the command line. - - # XXX still need to be extracted from Distribution - dist.parse_config_files() - - for cmd in dispatcher.commands: - # FIXME need to catch MetadataMissingError here (from the check command - # e.g.)--or catch any exception, print an error message and exit with 1 - dist.run_command(cmd, dispatcher.command_options[cmd]) - - return 0 - - - at action_help("""\ -Usage: pysetup list [dist ...] - or: pysetup list --help - -Print name, version and location for the matching installed distributions. - -positional arguments: - dist installed distribution name; omit to get all distributions -""") -def _list(dispatcher, args, **kw): - opts = _parse_args(args[1:], '', []) - dists = get_distributions(use_egg_info=True) - if opts['args']: - results = (d for d in dists if d.name.lower() in opts['args']) - listall = False - else: - results = dists - listall = True - - number = 0 - for dist in results: - print('%r %s (from %r)' % (dist.name, dist.version, dist.path)) - number += 1 - - if number == 0: - if listall: - logger.info('Nothing seems to be installed.') - else: - logger.warning('No matching distribution found.') - return 1 - else: - logger.info('Found %d projects installed.', number) - - - at action_help("""\ -Usage: pysetup search [project] [--simple [url]] [--xmlrpc [url] [--fieldname value ...] --operator or|and] - or: pysetup search --help - -Search the indexes for the matching projects. - -positional arguments: - project the project pattern to search for - -optional arguments: - --xmlrpc [url] whether to use the xmlrpc index or not. If an url is - specified, it will be used rather than the default one. - - --simple [url] whether to use the simple index or not. If an url is - specified, it will be used rather than the default one. - - --fieldname value Make a search on this field. Can only be used if - --xmlrpc has been selected or is the default index. - - --operator or|and Defines what is the operator to use when doing xmlrpc - searchs with multiple fieldnames. Can only be used if - --xmlrpc has been selected or is the default index. -""") -def _search(dispatcher, args, **kw): - """The search action. - - It is able to search for a specific index (specified with --index), using - the simple or xmlrpc index types (with --type xmlrpc / --type simple) - """ - #opts = _parse_args(args[1:], '', ['simple', 'xmlrpc']) - # 1. what kind of index is requested ? (xmlrpc / simple) - logger.error('not implemented') - return 1 - - -actions = [ - ('run', 'Run one or several commands', _run), - ('metadata', 'Display the metadata of a project', _metadata), - ('install', 'Install a project', _install), - ('remove', 'Remove a project', _remove), - ('search', 'Search for a project in the indexes', _search), - ('list', 'List installed projects', _list), - ('graph', 'Display a graph', _graph), - ('create', 'Create a project', _create), - ('generate-setup', 'Generate a backward-compatible setup.py', _generate), -] - - -class Dispatcher: - """Reads the command-line options - """ - def __init__(self, args=None): - self.verbose = 1 - self.dry_run = False - self.help = False - self.cmdclass = {} - self.commands = [] - self.command_options = {} - - for attr in display_option_names: - setattr(self, attr, False) - - self.parser = FancyGetopt(global_options + display_options) - self.parser.set_negative_aliases(negative_opt) - # FIXME this parses everything, including command options (e.g. "run - # build -i" errors with "option -i not recognized") - args = self.parser.getopt(args=args, object=self) - - # if first arg is "run", we have some commands - if len(args) == 0: - self.action = None - else: - self.action = args[0] - - allowed = [action[0] for action in actions] + [None] - if self.action not in allowed: - msg = 'Unrecognized action "%s"' % self.action - raise PackagingArgError(msg) - - self._set_logger() - self.args = args - - # for display options we return immediately - if self.help or self.action is None: - self._show_help(self.parser, display_options_=False) - - def _set_logger(self): - # setting up the logging level from the command-line options - # -q gets warning, error and critical - if self.verbose == 0: - level = logging.WARNING - # default level or -v gets info too - # XXX there's a bug somewhere: the help text says that -v is default - # (and verbose is set to 1 above), but when the user explicitly gives - # -v on the command line, self.verbose is incremented to 2! Here we - # compensate for that (I tested manually). On a related note, I think - # it's a good thing to use -q/nothing/-v/-vv on the command line - # instead of logging constants; it will be easy to add support for - # logging configuration in setup.cfg for advanced users. --merwok - elif self.verbose in (1, 2): - level = logging.INFO - else: # -vv and more for debug - level = logging.DEBUG - - # setting up the stream handler - handler = logging.StreamHandler(sys.stderr) - handler.setLevel(level) - logger.addHandler(handler) - logger.setLevel(level) - - def _parse_command_opts(self, parser, args): - # Pull the current command from the head of the command line - command = args[0] - if not command_re.match(command): - raise SystemExit("invalid command name %r" % (command,)) - self.commands.append(command) - - # Dig up the command class that implements this command, so we - # 1) know that it's a valid command, and 2) know which options - # it takes. - try: - cmd_class = get_command_class(command) - except PackagingModuleError as msg: - raise PackagingArgError(msg) - - # XXX We want to push this in packaging.command - # - # Require that the command class be derived from Command -- want - # to be sure that the basic "command" interface is implemented. - for meth in ('initialize_options', 'finalize_options', 'run'): - if hasattr(cmd_class, meth): - continue - raise PackagingClassError( - 'command %r must implement %r' % (cmd_class, meth)) - - # Also make sure that the command object provides a list of its - # known options. - if not (hasattr(cmd_class, 'user_options') and - isinstance(cmd_class.user_options, list)): - raise PackagingClassError( - "command class %s must provide " - "'user_options' attribute (a list of tuples)" % cmd_class) - - # If the command class has a list of negative alias options, - # merge it in with the global negative aliases. - _negative_opt = negative_opt.copy() - - if hasattr(cmd_class, 'negative_opt'): - _negative_opt.update(cmd_class.negative_opt) - - # Check for help_options in command class. They have a different - # format (tuple of four) so we need to preprocess them here. - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_options = cmd_class.help_options[:] - else: - help_options = [] - - # All commands support the global options too, just by adding - # in 'global_options'. - parser.set_option_table(global_options + - cmd_class.user_options + - help_options) - parser.set_negative_aliases(_negative_opt) - args, opts = parser.getopt(args[1:]) - - if hasattr(opts, 'help') and opts.help: - self._show_command_help(cmd_class) - return - - if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): - help_option_found = False - for help_option, short, desc, func in cmd_class.help_options: - if hasattr(opts, help_option.replace('-', '_')): - help_option_found = True - if callable(func): - func() - else: - raise PackagingClassError( - "invalid help function %r for help option %r: " - "must be a callable object (function, etc.)" - % (func, help_option)) - - if help_option_found: - return - - # Put the options from the command line into their official - # holding pen, the 'command_options' dictionary. - opt_dict = self.get_option_dict(command) - for name, value in vars(opts).items(): - opt_dict[name] = ("command line", value) - - return args - - def get_option_dict(self, command): - """Get the option dictionary for a given command. If that - command's option dictionary hasn't been created yet, then create it - and return the new dictionary; otherwise, return the existing - option dictionary. - """ - d = self.command_options.get(command) - if d is None: - d = self.command_options[command] = {} - return d - - def show_help(self): - self._show_help(self.parser) - - def print_usage(self, parser): - parser.set_option_table(global_options) - - actions_ = [' %s: %s' % (name, desc) for name, desc, __ in actions] - usage = common_usage % {'actions': '\n'.join(actions_)} - - parser.print_help(usage + "\nGlobal options:") - - def _show_help(self, parser, global_options_=True, display_options_=True, - commands=[]): - # late import because of mutual dependence between these modules - from packaging.command.cmd import Command - - print('Usage: pysetup [options] action [action_options]') - print() - if global_options_: - self.print_usage(self.parser) - print() - - if display_options_: - parser.set_option_table(display_options) - parser.print_help( - "Information display options (just display " + - "information, ignore any commands)") - print() - - for command in commands: - if isinstance(command, type) and issubclass(command, Command): - cls = command - else: - cls = get_command_class(command) - if (hasattr(cls, 'help_options') and - isinstance(cls.help_options, list)): - parser.set_option_table(cls.user_options + cls.help_options) - else: - parser.set_option_table(cls.user_options) - - parser.print_help("Options for %r command:" % cls.__name__) - print() - - def _show_command_help(self, command): - if isinstance(command, str): - command = get_command_class(command) - - desc = getattr(command, 'description', '(no description available)') - print('Description:', desc) - print() - - if (hasattr(command, 'help_options') and - isinstance(command.help_options, list)): - self.parser.set_option_table(command.user_options + - command.help_options) - else: - self.parser.set_option_table(command.user_options) - - self.parser.print_help("Options:") - print() - - def _get_command_groups(self): - """Helper function to retrieve all the command class names divided - into standard commands (listed in - packaging.command.STANDARD_COMMANDS) and extra commands (given in - self.cmdclass and not standard commands). - """ - extra_commands = [cmd for cmd in self.cmdclass - if cmd not in STANDARD_COMMANDS] - return STANDARD_COMMANDS, extra_commands - - def print_commands(self): - """Print out a help message listing all available commands with a - description of each. The list is divided into standard commands - (listed in packaging.command.STANDARD_COMMANDS) and extra commands - (given in self.cmdclass and not standard commands). The - descriptions come from the command class attribute - 'description'. - """ - std_commands, extra_commands = self._get_command_groups() - max_length = max(len(command) - for commands in (std_commands, extra_commands) - for command in commands) - - self.print_command_list(std_commands, "Standard commands", max_length) - if extra_commands: - print() - self.print_command_list(extra_commands, "Extra commands", - max_length) - - def print_command_list(self, commands, header, max_length): - """Print a subset of the list of all commands -- used by - 'print_commands()'. - """ - print(header + ":") - - for cmd in commands: - cls = self.cmdclass.get(cmd) or get_command_class(cmd) - description = getattr(cls, 'description', - '(no description available)') - - print(" %-*s %s" % (max_length, cmd, description)) - - def __call__(self): - if self.action is None: - return - - for action, desc, func in actions: - if action == self.action: - return func(self, self.args) - return -1 - - -def main(args=None): - old_level = logger.level - old_handlers = list(logger.handlers) - try: - dispatcher = Dispatcher(args) - if dispatcher.action is None: - return - return dispatcher() - except KeyboardInterrupt: - logger.info('interrupted') - return 1 - except (IOError, os.error, PackagingError, CCompilerError) as exc: - logger.exception(exc) - return 1 - finally: - logger.setLevel(old_level) - logger.handlers[:] = old_handlers - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/Lib/packaging/tests/LONG_DESC.txt b/Lib/packaging/tests/LONG_DESC.txt deleted file mode 100644 --- a/Lib/packaging/tests/LONG_DESC.txt +++ /dev/null @@ -1,44 +0,0 @@ -CLVault -======= - -CLVault uses Keyring to provide a command-line utility to safely store -and retrieve passwords. - -Install it using pip or the setup.py script:: - - $ python setup.py install - - $ pip install clvault - -Once it's installed, you will have three scripts installed in your -Python scripts folder, you can use to list, store and retrieve passwords:: - - $ clvault-set blog - Set your password: - Set the associated username (can be blank): tarek - Set a description (can be blank): My blog password - Password set. - - $ clvault-get blog - The username is "tarek" - The password has been copied in your clipboard - - $ clvault-list - Registered services: - blog My blog password - - -*clvault-set* takes a service name then prompt you for a password, and some -optional information about your service. The password is safely stored in -a keyring while the description is saved in a ``.clvault`` file in your -home directory. This file is created automatically the first time the command -is used. - -*clvault-get* copies the password for a given service in your clipboard, and -displays the associated user if any. - -*clvault-list* lists all registered services, with their description when -given. - - -Project page: http://bitbucket.org/tarek/clvault diff --git a/Lib/packaging/tests/PKG-INFO b/Lib/packaging/tests/PKG-INFO deleted file mode 100644 --- a/Lib/packaging/tests/PKG-INFO +++ /dev/null @@ -1,57 +0,0 @@ -Metadata-Version: 1.2 -Name: CLVault -Version: 0.5 -Summary: Command-Line utility to store and retrieve passwords -Home-page: http://bitbucket.org/tarek/clvault -Author: Tarek Ziade -Author-email: tarek at ziade.org -License: PSF -Keywords: keyring,password,crypt -Requires-Dist: foo; sys.platform == 'okook' -Requires-Dist: bar; sys.platform == '%s' -Platform: UNKNOWN -Description: CLVault - |======= - | - |CLVault uses Keyring to provide a command-line utility to safely store - |and retrieve passwords. - | - |Install it using pip or the setup.py script:: - | - | $ python setup.py install - | - | $ pip install clvault - | - |Once it's installed, you will have three scripts installed in your - |Python scripts folder, you can use to list, store and retrieve passwords:: - | - | $ clvault-set blog - | Set your password: - | Set the associated username (can be blank): tarek - | Set a description (can be blank): My blog password - | Password set. - | - | $ clvault-get blog - | The username is "tarek" - | The password has been copied in your clipboard - | - | $ clvault-list - | Registered services: - | blog My blog password - | - | - |*clvault-set* takes a service name then prompt you for a password, and some - |optional information about your service. The password is safely stored in - |a keyring while the description is saved in a ``.clvault`` file in your - |home directory. This file is created automatically the first time the command - |is used. - | - |*clvault-get* copies the password for a given service in your clipboard, and - |displays the associated user if any. - | - |*clvault-list* lists all registered services, with their description when - |given. - | - | - |Project page: http://bitbucket.org/tarek/clvault - | diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO deleted file mode 100644 --- a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO +++ /dev/null @@ -1,182 +0,0 @@ -Metadata-Version: 1.0 -Name: setuptools -Version: 0.6c9 -Summary: Download, build, install, upgrade, and uninstall Python packages -- easily! -Home-page: http://pypi.python.org/pypi/setuptools -Author: Phillip J. Eby -Author-email: distutils-sig at python.org -License: PSF or ZPL -Description: =============================== - Installing and Using Setuptools - =============================== - - .. contents:: **Table of Contents** - - - ------------------------- - Installation Instructions - ------------------------- - - Windows - ======= - - Install setuptools using the provided ``.exe`` installer. If you've previously - installed older versions of setuptools, please delete all ``setuptools*.egg`` - and ``setuptools.pth`` files from your system's ``site-packages`` directory - (and any other ``sys.path`` directories) FIRST. - - If you are upgrading a previous version of setuptools that was installed using - an ``.exe`` installer, please be sure to also *uninstall that older version* - via your system's "Add/Remove Programs" feature, BEFORE installing the newer - version. - - Once installation is complete, you will find an ``easy_install.exe`` program in - your Python ``Scripts`` subdirectory. Be sure to add this directory to your - ``PATH`` environment variable, if you haven't already done so. - - - RPM-Based Systems - ================= - - Install setuptools using the provided source RPM. The included ``.spec`` file - assumes you are installing using the default ``python`` executable, and is not - specific to a particular Python version. The ``easy_install`` executable will - be installed to a system ``bin`` directory such as ``/usr/bin``. - - If you wish to install to a location other than the default Python - installation's default ``site-packages`` directory (and ``$prefix/bin`` for - scripts), please use the ``.egg``-based installation approach described in the - following section. - - - Cygwin, Mac OS X, Linux, Other - ============================== - - 1. Download the appropriate egg for your version of Python (e.g. - ``setuptools-0.6c9-py2.4.egg``). Do NOT rename it. - - 2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``. - Setuptools will install itself using the matching version of Python (e.g. - ``python2.4``), and will place the ``easy_install`` executable in the - default location for installing Python scripts (as determined by the - standard distutils configuration files, or by the Python installation). - - If you want to install setuptools to somewhere other than ``site-packages`` or - your default distutils installation locations for libraries and scripts, you - may include EasyInstall command-line options such as ``--prefix``, - ``--install-dir``, and so on, following the ``.egg`` filename on the same - command line. For example:: - - sh setuptools-0.6c9-py2.4.egg --prefix=~ - - You can use ``--help`` to get a full options list, but we recommend consulting - the `EasyInstall manual`_ for detailed instructions, especially `the section - on custom installation locations`_. - - .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall - .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations - - - Cygwin Note - ----------- - - If you are trying to install setuptools for the **Windows** version of Python - (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make - sure that an appropriate executable (``python2.3``, ``python2.4``, or - ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg. For - example, doing the following at a Cygwin bash prompt will install setuptools - for the **Windows** Python found at ``C:\\Python24``:: - - ln -s /cygdrive/c/Python24/python.exe python2.4 - PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg - rm python2.4 - - - Downloads - ========= - - All setuptools downloads can be found at `the project's home page in the Python - Package Index`_. Scroll to the very bottom of the page to find the links. - - .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools - - In addition to the PyPI downloads, the development version of ``setuptools`` - is available from the `Python SVN sandbox`_, and in-development versions of the - `0.6 branch`_ are available as well. - - .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - - .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev - - -------------------------------- - Using Setuptools and EasyInstall - -------------------------------- - - Here are some of the available manuals, tutorials, and other resources for - learning about Setuptools, Python Eggs, and EasyInstall: - - * `The EasyInstall user's guide and reference manual`_ - * `The setuptools Developer's Guide`_ - * `The pkg_resources API reference`_ - * `Package Compatibility Notes`_ (user-maintained) - * `The Internal Structure of Python Eggs`_ - - Questions, comments, and bug reports should be directed to the `distutils-sig - mailing list`_. If you have written (or know of) any tutorials, documentation, - plug-ins, or other resources for setuptools users, please let us know about - them there, so this reference list can be updated. If you have working, - *tested* patches to correct problems or add features, you may submit them to - the `setuptools bug tracker`_. - - .. _setuptools bug tracker: http://bugs.python.org/setuptools/ - .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes - .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats - .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools - .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources - .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall - .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - - ------- - Credits - ------- - - * The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - - * Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - - * Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - - * Phillip J. Eby is the principal author and maintainer of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - - * Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - - -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities diff --git a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2 b/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2 deleted file mode 100644 --- a/Lib/packaging/tests/SETUPTOOLS-PKG-INFO2 +++ /dev/null @@ -1,183 +0,0 @@ -Metadata-Version: 1.1 -Name: setuptools -Version: 0.6c9 -Summary: Download, build, install, upgrade, and uninstall Python packages -- easily! -Home-page: http://pypi.python.org/pypi/setuptools -Author: Phillip J. Eby -Author-email: distutils-sig at python.org -License: PSF or ZPL -Description: =============================== - Installing and Using Setuptools - =============================== - - .. contents:: **Table of Contents** - - - ------------------------- - Installation Instructions - ------------------------- - - Windows - ======= - - Install setuptools using the provided ``.exe`` installer. If you've previously - installed older versions of setuptools, please delete all ``setuptools*.egg`` - and ``setuptools.pth`` files from your system's ``site-packages`` directory - (and any other ``sys.path`` directories) FIRST. - - If you are upgrading a previous version of setuptools that was installed using - an ``.exe`` installer, please be sure to also *uninstall that older version* - via your system's "Add/Remove Programs" feature, BEFORE installing the newer - version. - - Once installation is complete, you will find an ``easy_install.exe`` program in - your Python ``Scripts`` subdirectory. Be sure to add this directory to your - ``PATH`` environment variable, if you haven't already done so. - - - RPM-Based Systems - ================= - - Install setuptools using the provided source RPM. The included ``.spec`` file - assumes you are installing using the default ``python`` executable, and is not - specific to a particular Python version. The ``easy_install`` executable will - be installed to a system ``bin`` directory such as ``/usr/bin``. - - If you wish to install to a location other than the default Python - installation's default ``site-packages`` directory (and ``$prefix/bin`` for - scripts), please use the ``.egg``-based installation approach described in the - following section. - - - Cygwin, Mac OS X, Linux, Other - ============================== - - 1. Download the appropriate egg for your version of Python (e.g. - ``setuptools-0.6c9-py2.4.egg``). Do NOT rename it. - - 2. Run it as if it were a shell script, e.g. ``sh setuptools-0.6c9-py2.4.egg``. - Setuptools will install itself using the matching version of Python (e.g. - ``python2.4``), and will place the ``easy_install`` executable in the - default location for installing Python scripts (as determined by the - standard distutils configuration files, or by the Python installation). - - If you want to install setuptools to somewhere other than ``site-packages`` or - your default distutils installation locations for libraries and scripts, you - may include EasyInstall command-line options such as ``--prefix``, - ``--install-dir``, and so on, following the ``.egg`` filename on the same - command line. For example:: - - sh setuptools-0.6c9-py2.4.egg --prefix=~ - - You can use ``--help`` to get a full options list, but we recommend consulting - the `EasyInstall manual`_ for detailed instructions, especially `the section - on custom installation locations`_. - - .. _EasyInstall manual: http://peak.telecommunity.com/DevCenter/EasyInstall - .. _the section on custom installation locations: http://peak.telecommunity.com/DevCenter/EasyInstall#custom-installation-locations - - - Cygwin Note - ----------- - - If you are trying to install setuptools for the **Windows** version of Python - (as opposed to the Cygwin version that lives in ``/usr/bin``), you must make - sure that an appropriate executable (``python2.3``, ``python2.4``, or - ``python2.5``) is on your **Cygwin** ``PATH`` when invoking the egg. For - example, doing the following at a Cygwin bash prompt will install setuptools - for the **Windows** Python found at ``C:\\Python24``:: - - ln -s /cygdrive/c/Python24/python.exe python2.4 - PATH=.:$PATH sh setuptools-0.6c9-py2.4.egg - rm python2.4 - - - Downloads - ========= - - All setuptools downloads can be found at `the project's home page in the Python - Package Index`_. Scroll to the very bottom of the page to find the links. - - .. _the project's home page in the Python Package Index: http://pypi.python.org/pypi/setuptools - - In addition to the PyPI downloads, the development version of ``setuptools`` - is available from the `Python SVN sandbox`_, and in-development versions of the - `0.6 branch`_ are available as well. - - .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - - .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev - - -------------------------------- - Using Setuptools and EasyInstall - -------------------------------- - - Here are some of the available manuals, tutorials, and other resources for - learning about Setuptools, Python Eggs, and EasyInstall: - - * `The EasyInstall user's guide and reference manual`_ - * `The setuptools Developer's Guide`_ - * `The pkg_resources API reference`_ - * `Package Compatibility Notes`_ (user-maintained) - * `The Internal Structure of Python Eggs`_ - - Questions, comments, and bug reports should be directed to the `distutils-sig - mailing list`_. If you have written (or know of) any tutorials, documentation, - plug-ins, or other resources for setuptools users, please let us know about - them there, so this reference list can be updated. If you have working, - *tested* patches to correct problems or add features, you may submit them to - the `setuptools bug tracker`_. - - .. _setuptools bug tracker: http://bugs.python.org/setuptools/ - .. _Package Compatibility Notes: http://peak.telecommunity.com/DevCenter/PackageNotes - .. _The Internal Structure of Python Eggs: http://peak.telecommunity.com/DevCenter/EggFormats - .. _The setuptools Developer's Guide: http://peak.telecommunity.com/DevCenter/setuptools - .. _The pkg_resources API reference: http://peak.telecommunity.com/DevCenter/PkgResources - .. _The EasyInstall user's guide and reference manual: http://peak.telecommunity.com/DevCenter/EasyInstall - .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - - ------- - Credits - ------- - - * The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - - * Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - - * Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - - * Phillip J. Eby is the principal author and maintainer of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - - * Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - - -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities -Requires: Foo diff --git a/Lib/packaging/tests/__init__.py b/Lib/packaging/tests/__init__.py deleted file mode 100644 --- a/Lib/packaging/tests/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Test suite for packaging. - -This test suite consists of a collection of test modules in the -packaging.tests package. Each test module has a name starting with -'test' and contains a function test_suite(). The function is expected -to return an initialized unittest.TestSuite instance. - -Utility code is included in packaging.tests.support. - -Always import unittest from this module: it will be unittest from the -standard library for packaging tests and unittest2 for distutils2 tests. -""" - -import os -import sys -import unittest - - -def test_suite(): - suite = unittest.TestSuite() - here = os.path.dirname(__file__) or os.curdir - for fn in os.listdir(here): - if fn.startswith("test") and fn.endswith(".py"): - modname = "packaging.tests." + fn[:-3] - __import__(modname) - module = sys.modules[modname] - suite.addTest(module.test_suite()) - return suite diff --git a/Lib/packaging/tests/__main__.py b/Lib/packaging/tests/__main__.py deleted file mode 100644 --- a/Lib/packaging/tests/__main__.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Packaging test suite runner.""" - -# Ripped from importlib tests, thanks Brett! - -import os -import unittest -from test.support import run_unittest, reap_children, reap_threads - - - at reap_threads -def test_main(): - try: - start_dir = os.path.dirname(__file__) - top_dir = os.path.dirname(os.path.dirname(start_dir)) - test_loader = unittest.TestLoader() - # XXX find out how to use unittest.main, to get command-line options - # (failfast, catch, etc.) - run_unittest(test_loader.discover(start_dir, top_level_dir=top_dir)) - finally: - reap_children() - - -if __name__ == '__main__': - test_main() diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/INSTALLER deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/METADATA +++ /dev/null @@ -1,4 +0,0 @@ -Metadata-version: 1.2 -Name: babar -Version: 0.1 -Author: FELD Boris \ No newline at end of file diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RECORD deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/REQUESTED deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES b/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/babar-0.1.dist-info/RESOURCES +++ /dev/null @@ -1,2 +0,0 @@ -babar.png,babar.png -babar.cfg,babar.cfg \ No newline at end of file diff --git a/Lib/packaging/tests/fake_dists/babar.cfg b/Lib/packaging/tests/fake_dists/babar.cfg deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/babar.cfg +++ /dev/null @@ -1,1 +0,0 @@ -Config \ No newline at end of file diff --git a/Lib/packaging/tests/fake_dists/babar.png b/Lib/packaging/tests/fake_dists/babar.png deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/bacon-0.1.egg-info/PKG-INFO +++ /dev/null @@ -1,6 +0,0 @@ -Metadata-Version: 1.2 -Name: bacon -Version: 0.1 -Provides-Dist: truffles (2.0) -Provides-Dist: bacon (0.1) -Obsoletes-Dist: truffles (>=0.9,<=1.5) diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/PKG-INFO +++ /dev/null @@ -1,18 +0,0 @@ -Metadata-Version: 1.0 -Name: banana -Version: 0.4 -Summary: A yellow fruit -Home-page: http://en.wikipedia.org/wiki/Banana -Author: Josip Djolonga -Author-email: foo at nbar.com -License: BSD -Description: A fruit -Keywords: foo bar -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Science/Research -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Scientific/Engineering :: GIS diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/SOURCES.txt deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/dependency_links.txt +++ /dev/null @@ -1,1 +0,0 @@ - diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ - - # -*- Entry points: -*- - \ No newline at end of file diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/not-zip-safe +++ /dev/null @@ -1,1 +0,0 @@ - diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/requires.txt +++ /dev/null @@ -1,6 +0,0 @@ -# this should be ignored - -strawberry >=0.5 - -[section ignored] -foo ==0.5 diff --git a/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt b/Lib/packaging/tests/fake_dists/banana-0.4.egg/EGG-INFO/top_level.txt deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info b/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/cheese-2.0.2.egg-info +++ /dev/null @@ -1,5 +0,0 @@ -Metadata-Version: 1.2 -Name: cheese -Version: 2.0.2 -Provides-Dist: truffles (1.0.2) -Obsoletes-Dist: truffles (!=1.2,<=2.0) diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/INSTALLER deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/METADATA +++ /dev/null @@ -1,9 +0,0 @@ -Metadata-Version: 1.2 -Name: choxie -Version: 2.0.0.9 -Summary: Chocolate with a kick! -Requires-Dist: towel-stuff (0.1) -Requires-Dist: nut -Provides-Dist: truffles (1.0) -Obsoletes-Dist: truffles (<=0.8,>=0.5) -Obsoletes-Dist: truffles (<=0.9,>=0.6) diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/RECORD deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9.dist-info/REQUESTED deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/choxie/chocolate.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- -from towel_stuff import Towel - -class Chocolate(object): - """A piece of chocolate.""" - - def wrap_with_towel(self): - towel = Towel() - towel.wrap(self) - return towel diff --git a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py b/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/choxie-2.0.0.9/truffles.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -from choxie.chocolate import Chocolate - -class Truffle(Chocolate): - """A truffle.""" diff --git a/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO b/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/coconuts-aster-10.3.egg-info/PKG-INFO +++ /dev/null @@ -1,5 +0,0 @@ -Metadata-Version: 1.2 -Name: coconuts-aster -Version: 10.3 -Provides-Dist: strawberry (0.6) -Provides-Dist: banana (0.4) diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/INSTALLER deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/METADATA +++ /dev/null @@ -1,5 +0,0 @@ -Metadata-Version: 1.2 -Name: grammar -Version: 1.0a4 -Requires-Dist: truffles (>=1.2) -Author: Sherlock Holmes diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/RECORD deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/grammar-1.0a4.dist-info/REQUESTED deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py b/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/grammar-1.0a4/grammar/utils.py +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -from random import randint - -def is_valid_grammar(sentence): - if randint(0, 10) < 2: - return False - else: - return True diff --git a/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info b/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/nut-funkyversion.egg-info +++ /dev/null @@ -1,3 +0,0 @@ -Metadata-Version: 1.2 -Name: nut -Version: funkyversion diff --git a/Lib/packaging/tests/fake_dists/strawberry-0.6.egg b/Lib/packaging/tests/fake_dists/strawberry-0.6.egg deleted file mode 100644 Binary file Lib/packaging/tests/fake_dists/strawberry-0.6.egg has changed diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/INSTALLER deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/METADATA +++ /dev/null @@ -1,7 +0,0 @@ -Metadata-Version: 1.2 -Name: towel-stuff -Version: 0.1 -Provides-Dist: truffles (1.1.2) -Provides-Dist: towel-stuff (0.1) -Obsoletes-Dist: truffles (!=0.8,<1.0) -Requires-Dist: bacon (<=0.2) diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/RECORD deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED b/Lib/packaging/tests/fake_dists/towel_stuff-0.1.dist-info/REQUESTED deleted file mode 100644 diff --git a/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py b/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/towel_stuff-0.1/towel_stuff/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- - -class Towel(object): - """A towel, that one should never be without.""" - - def __init__(self, color='tie-dye'): - self.color = color - self.wrapped_obj = None - - def wrap(self, obj): - """Wrap an object up in our towel.""" - self.wrapped_obj = obj - - def unwrap(self): - """Unwrap whatever is in our towel and return whatever it is.""" - obj = self.wrapped_obj - self.wrapped_obj = None - return obj diff --git a/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info b/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info deleted file mode 100644 --- a/Lib/packaging/tests/fake_dists/truffles-5.0.egg-info +++ /dev/null @@ -1,3 +0,0 @@ -Metadata-Version: 1.2 -Name: truffles -Version: 5.0 diff --git a/Lib/packaging/tests/fixer/__init__.py b/Lib/packaging/tests/fixer/__init__.py deleted file mode 100644 diff --git a/Lib/packaging/tests/fixer/fix_echo.py b/Lib/packaging/tests/fixer/fix_echo.py deleted file mode 100644 --- a/Lib/packaging/tests/fixer/fix_echo.py +++ /dev/null @@ -1,16 +0,0 @@ -# Example custom fixer, derived from fix_raw_input by Andre Roberge - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name - - -class FixEcho(fixer_base.BaseFix): - - BM_compatible = True - PATTERN = """ - power< name='echo' trailer< '(' [any] ')' > any* > - """ - - def transform(self, node, results): - name = results['name'] - name.replace(Name('print', prefix=name.prefix)) diff --git a/Lib/packaging/tests/fixer/fix_echo2.py b/Lib/packaging/tests/fixer/fix_echo2.py deleted file mode 100644 --- a/Lib/packaging/tests/fixer/fix_echo2.py +++ /dev/null @@ -1,16 +0,0 @@ -# Example custom fixer, derived from fix_raw_input by Andre Roberge - -from lib2to3 import fixer_base -from lib2to3.fixer_util import Name - - -class FixEcho2(fixer_base.BaseFix): - - BM_compatible = True - PATTERN = """ - power< name='echo2' trailer< '(' [any] ')' > any* > - """ - - def transform(self, node, results): - name = results['name'] - name.replace(Name('print', prefix=name.prefix)) diff --git a/Lib/packaging/tests/pypi_server.py b/Lib/packaging/tests/pypi_server.py deleted file mode 100644 --- a/Lib/packaging/tests/pypi_server.py +++ /dev/null @@ -1,449 +0,0 @@ -"""Mock PyPI Server implementation, to use in tests. - -This module also provides a simple test case to extend if you need to use -the PyPIServer all along your test case. Be sure to read the documentation -before any use. - -XXX TODO: - -The mock server can handle simple HTTP request (to simulate a simple index) or -XMLRPC requests, over HTTP. Both does not have the same intergface to deal -with, and I think it's a pain. - -A good idea could be to re-think a bit the way dstributions are handled in the -mock server. As it should return malformed HTML pages, we need to keep the -static behavior. - -I think of something like that: - - >>> server = PyPIMockServer() - >>> server.startHTTP() - >>> server.startXMLRPC() - -Then, the server must have only one port to rely on, eg. - - >>> server.fulladdress() - "http://ip:port/" - -It could be simple to have one HTTP server, relaying the requests to the two -implementations (static HTTP and XMLRPC over HTTP). -""" - -import os -import queue -import select -import threading -from functools import wraps -from http.server import HTTPServer, SimpleHTTPRequestHandler -from xmlrpc.server import SimpleXMLRPCServer - -from packaging.tests import unittest - - -PYPI_DEFAULT_STATIC_PATH = os.path.join( - os.path.dirname(os.path.abspath(__file__)), 'pypiserver') - - -def use_xmlrpc_server(*server_args, **server_kwargs): - server_kwargs['serve_xmlrpc'] = True - return use_pypi_server(*server_args, **server_kwargs) - - -def use_http_server(*server_args, **server_kwargs): - server_kwargs['serve_xmlrpc'] = False - return use_pypi_server(*server_args, **server_kwargs) - - -def use_pypi_server(*server_args, **server_kwargs): - """Decorator to make use of the PyPIServer for test methods, - just when needed, and not for the entire duration of the testcase. - """ - def wrapper(func): - @wraps(func) - def wrapped(*args, **kwargs): - server = PyPIServer(*server_args, **server_kwargs) - server.start() - try: - func(server=server, *args, **kwargs) - finally: - server.stop() - return wrapped - return wrapper - - -class PyPIServerTestCase(unittest.TestCase): - - def setUp(self): - super(PyPIServerTestCase, self).setUp() - self.pypi = PyPIServer() - self.pypi.start() - self.addCleanup(self.pypi.stop) - - -class PyPIServer(threading.Thread): - """PyPI Mocked server. - Provides a mocked version of the PyPI API's, to ease tests. - - Support serving static content and serving previously given text. - """ - - def __init__(self, test_static_path=None, - static_filesystem_paths=None, - static_uri_paths=["simple", "packages"], serve_xmlrpc=False): - """Initialize the server. - - Default behavior is to start the HTTP server. You can either start the - xmlrpc server by setting xmlrpc to True. Caution: Only one server will - be started. - - static_uri_paths and static_base_path are parameters used to provides - respectively the http_paths to serve statically, and where to find the - matching files on the filesystem. - """ - # we want to launch the server in a new dedicated thread, to not freeze - # tests. - super(PyPIServer, self).__init__() - self._run = True - self._serve_xmlrpc = serve_xmlrpc - if static_filesystem_paths is None: - static_filesystem_paths = ["default"] - - #TODO allow to serve XMLRPC and HTTP static files at the same time. - if not self._serve_xmlrpc: - self.server = HTTPServer(('127.0.0.1', 0), PyPIRequestHandler) - self.server.RequestHandlerClass.pypi_server = self - - self.request_queue = queue.Queue() - self._requests = [] - self.default_response_status = 404 - self.default_response_headers = [('Content-type', 'text/plain')] - self.default_response_data = "The page does not exists" - - # initialize static paths / filesystems - self.static_uri_paths = static_uri_paths - - # append the static paths defined locally - if test_static_path is not None: - static_filesystem_paths.append(test_static_path) - self.static_filesystem_paths = [ - PYPI_DEFAULT_STATIC_PATH + "/" + path - for path in static_filesystem_paths] - else: - # XMLRPC server - self.server = PyPIXMLRPCServer(('127.0.0.1', 0)) - self.xmlrpc = XMLRPCMockIndex() - # register the xmlrpc methods - self.server.register_introspection_functions() - self.server.register_instance(self.xmlrpc) - - self.address = ('127.0.0.1', self.server.server_port) - # to not have unwanted outputs. - self.server.RequestHandlerClass.log_request = lambda *_: None - - def run(self): - # loop because we can't stop it otherwise, for python < 2.6 - while self._run: - r, w, e = select.select([self.server], [], [], 0.5) - if r: - self.server.handle_request() - - def stop(self): - """self shutdown is not supported for python < 2.6""" - self._run = False - if self.is_alive(): - self.join() - self.server.server_close() - - def get_next_response(self): - return (self.default_response_status, - self.default_response_headers, - self.default_response_data) - - @property - def requests(self): - """Use this property to get all requests that have been made - to the server - """ - while True: - try: - self._requests.append(self.request_queue.get_nowait()) - except queue.Empty: - break - return self._requests - - @property - def full_address(self): - return "http://%s:%s" % self.address - - -class PyPIRequestHandler(SimpleHTTPRequestHandler): - # we need to access the pypi server while serving the content - pypi_server = None - - def serve_request(self): - """Serve the content. - - Also record the requests to be accessed later. If trying to access an - url matching a static uri, serve static content, otherwise serve - what is provided by the `get_next_response` method. - - If nothing is defined there, return a 404 header. - """ - # record the request. Read the input only on PUT or POST requests - if self.command in ("PUT", "POST"): - if 'content-length' in self.headers: - request_data = self.rfile.read( - int(self.headers['content-length'])) - else: - request_data = self.rfile.read() - - elif self.command in ("GET", "DELETE"): - request_data = '' - - self.pypi_server.request_queue.put((self, request_data)) - - # serve the content from local disc if we request an URL beginning - # by a pattern defined in `static_paths` - url_parts = self.path.split("/") - if (len(url_parts) > 1 and - url_parts[1] in self.pypi_server.static_uri_paths): - data = None - # always take the last first. - fs_paths = [] - fs_paths.extend(self.pypi_server.static_filesystem_paths) - fs_paths.reverse() - relative_path = self.path - for fs_path in fs_paths: - try: - if self.path.endswith("/"): - relative_path += "index.html" - - if relative_path.endswith('.tar.gz'): - with open(fs_path + relative_path, 'rb') as file: - data = file.read() - headers = [('Content-type', 'application/x-gtar')] - else: - with open(fs_path + relative_path) as file: - data = file.read().encode() - headers = [('Content-type', 'text/html')] - - headers.append(('Content-Length', len(data))) - self.make_response(data, headers=headers) - - except IOError: - pass - - if data is None: - self.make_response("Not found", 404) - - # otherwise serve the content from get_next_response - else: - # send back a response - status, headers, data = self.pypi_server.get_next_response() - self.make_response(data, status, headers) - - do_POST = do_GET = do_DELETE = do_PUT = serve_request - - def make_response(self, data, status=200, - headers=[('Content-type', 'text/html')]): - """Send the response to the HTTP client""" - if not isinstance(status, int): - try: - status = int(status) - except ValueError: - # we probably got something like YYY Codename. - # Just get the first 3 digits - status = int(status[:3]) - - self.send_response(status) - for header, value in headers: - self.send_header(header, value) - self.end_headers() - - if isinstance(data, str): - data = data.encode('utf-8') - - self.wfile.write(data) - - -class PyPIXMLRPCServer(SimpleXMLRPCServer): - def server_bind(self): - """Override server_bind to store the server name.""" - super(PyPIXMLRPCServer, self).server_bind() - host, port = self.socket.getsockname()[:2] - self.server_port = port - - -class MockDist: - """Fake distribution, used in the Mock PyPI Server""" - - def __init__(self, name, version="1.0", hidden=False, url="http://url/", - type="sdist", filename="", size=10000, - digest="123456", downloads=7, has_sig=False, - python_version="source", comment="comment", - author="John Doe", author_email="john at doe.name", - maintainer="Main Tayner", maintainer_email="maintainer_mail", - project_url="http://project_url/", homepage="http://homepage/", - keywords="", platform="UNKNOWN", classifiers=[], licence="", - description="Description", summary="Summary", stable_version="", - ordering="", documentation_id="", code_kwalitee_id="", - installability_id="", obsoletes=[], obsoletes_dist=[], - provides=[], provides_dist=[], requires=[], requires_dist=[], - requires_external=[], requires_python=""): - - # basic fields - self.name = name - self.version = version - self.hidden = hidden - - # URL infos - self.url = url - self.digest = digest - self.downloads = downloads - self.has_sig = has_sig - self.python_version = python_version - self.comment = comment - self.type = type - - # metadata - self.author = author - self.author_email = author_email - self.maintainer = maintainer - self.maintainer_email = maintainer_email - self.project_url = project_url - self.homepage = homepage - self.keywords = keywords - self.platform = platform - self.classifiers = classifiers - self.licence = licence - self.description = description - self.summary = summary - self.stable_version = stable_version - self.ordering = ordering - self.cheesecake_documentation_id = documentation_id - self.cheesecake_code_kwalitee_id = code_kwalitee_id - self.cheesecake_installability_id = installability_id - - self.obsoletes = obsoletes - self.obsoletes_dist = obsoletes_dist - self.provides = provides - self.provides_dist = provides_dist - self.requires = requires - self.requires_dist = requires_dist - self.requires_external = requires_external - self.requires_python = requires_python - - def url_infos(self): - return { - 'url': self.url, - 'packagetype': self.type, - 'filename': 'filename.tar.gz', - 'size': '6000', - 'md5_digest': self.digest, - 'downloads': self.downloads, - 'has_sig': self.has_sig, - 'python_version': self.python_version, - 'comment_text': self.comment, - } - - def metadata(self): - return { - 'maintainer': self.maintainer, - 'project_url': [self.project_url], - 'maintainer_email': self.maintainer_email, - 'cheesecake_code_kwalitee_id': self.cheesecake_code_kwalitee_id, - 'keywords': self.keywords, - 'obsoletes_dist': self.obsoletes_dist, - 'requires_external': self.requires_external, - 'author': self.author, - 'author_email': self.author_email, - 'download_url': self.url, - 'platform': self.platform, - 'version': self.version, - 'obsoletes': self.obsoletes, - 'provides': self.provides, - 'cheesecake_documentation_id': self.cheesecake_documentation_id, - '_pypi_hidden': self.hidden, - 'description': self.description, - '_pypi_ordering': 19, - 'requires_dist': self.requires_dist, - 'requires_python': self.requires_python, - 'classifiers': [], - 'name': self.name, - 'licence': self.licence, # XXX licence or license? - 'summary': self.summary, - 'home_page': self.homepage, - 'stable_version': self.stable_version, - # FIXME doesn't that reproduce the bug from 6527d3106e9f? - 'provides_dist': (self.provides_dist or - "%s (%s)" % (self.name, self.version)), - 'requires': self.requires, - 'cheesecake_installability_id': self.cheesecake_installability_id, - } - - def search_result(self): - return { - '_pypi_ordering': 0, - 'version': self.version, - 'name': self.name, - 'summary': self.summary, - } - - -class XMLRPCMockIndex: - """Mock XMLRPC server""" - - def __init__(self, dists=[]): - self._dists = dists - self._search_result = [] - - def add_distributions(self, dists): - for dist in dists: - self._dists.append(MockDist(**dist)) - - def set_distributions(self, dists): - self._dists = [] - self.add_distributions(dists) - - def set_search_result(self, result): - """set a predefined search result""" - self._search_result = result - - def _get_search_results(self): - results = [] - for name in self._search_result: - found_dist = [d for d in self._dists if d.name == name] - if found_dist: - results.append(found_dist[0]) - else: - dist = MockDist(name) - results.append(dist) - self._dists.append(dist) - return [r.search_result() for r in results] - - def list_packages(self): - return [d.name for d in self._dists] - - def package_releases(self, package_name, show_hidden=False): - if show_hidden: - # return all - return [d.version for d in self._dists if d.name == package_name] - else: - # return only un-hidden - return [d.version for d in self._dists if d.name == package_name - and not d.hidden] - - def release_urls(self, package_name, version): - return [d.url_infos() for d in self._dists - if d.name == package_name and d.version == version] - - def release_data(self, package_name, version): - release = [d for d in self._dists - if d.name == package_name and d.version == version] - if release: - return release[0].metadata() - else: - return {} - - def search(self, spec, operator="and"): - return self._get_search_results() diff --git a/Lib/packaging/tests/pypi_test_server.py b/Lib/packaging/tests/pypi_test_server.py deleted file mode 100644 --- a/Lib/packaging/tests/pypi_test_server.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Test PyPI Server implementation at testpypi.python.org, to use in tests. - -This is a drop-in replacement for the mock pypi server for testing against a -real pypi server hosted by python.org especially for testing against. -""" - -import unittest - -PYPI_DEFAULT_STATIC_PATH = None - - -def use_xmlrpc_server(*server_args, **server_kwargs): - server_kwargs['serve_xmlrpc'] = True - return use_pypi_server(*server_args, **server_kwargs) - - -def use_http_server(*server_args, **server_kwargs): - server_kwargs['serve_xmlrpc'] = False - return use_pypi_server(*server_args, **server_kwargs) - - -def use_pypi_server(*server_args, **server_kwargs): - """Decorator to make use of the PyPIServer for test methods, - just when needed, and not for the entire duration of the testcase. - """ - def wrapper(func): - def wrapped(*args, **kwargs): - server = PyPIServer(*server_args, **server_kwargs) - func(server=server, *args, **kwargs) - return wrapped - return wrapper - - -class PyPIServerTestCase(unittest.TestCase): - - def setUp(self): - super(PyPIServerTestCase, self).setUp() - self.pypi = PyPIServer() - self.pypi.start() - self.addCleanup(self.pypi.stop) - - -class PyPIServer: - """Shim to access testpypi.python.org, for testing a real server.""" - - def __init__(self, test_static_path=None, - static_filesystem_paths=["default"], - static_uri_paths=["simple"], serve_xmlrpc=False): - self.address = ('testpypi.python.org', '80') - - def start(self): - pass - - def stop(self): - pass - - @property - def full_address(self): - return "http://%s:%s" % self.address diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz deleted file mode 100644 Binary file Lib/packaging/tests/pypiserver/downloads_with_md5/packages/source/f/foobar/foobar-0.1.tar.gz has changed diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/badmd5-0.1.tar.gz deleted file mode 100644 diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/badmd5/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -badmd5-0.1.tar.gz
- diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/foobar/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -foobar-0.1.tar.gz
- diff --git a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html b/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/downloads_with_md5/simple/index.html +++ /dev/null @@ -1,2 +0,0 @@ -foobar/ -badmd5/ diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/bar/index.html +++ /dev/null @@ -1,6 +0,0 @@ -Links for bar

Links for bar

-bar-1.0.tar.gz
-bar-1.0.1.tar.gz
-bar-2.0.tar.gz
-bar-2.0.1.tar.gz
- diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/baz/index.html +++ /dev/null @@ -1,6 +0,0 @@ -Links for baz

Links for baz

-baz-1.0.tar.gz
-baz-1.0.1.tar.gz
-baz-2.0.tar.gz
-baz-2.0.1.tar.gz
- diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/foo/index.html +++ /dev/null @@ -1,6 +0,0 @@ -Links for foo

Links for foo

-foo-1.0.tar.gz
-foo-1.0.1.tar.gz
-foo-2.0.tar.gz
-foo-2.0.1.tar.gz
- diff --git a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html b/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/foo_bar_baz/simple/index.html +++ /dev/null @@ -1,3 +0,0 @@ -foo/ -bar/ -baz/ diff --git a/Lib/packaging/tests/pypiserver/project_list/simple/index.html b/Lib/packaging/tests/pypiserver/project_list/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/project_list/simple/index.html +++ /dev/null @@ -1,5 +0,0 @@ -FooBar-bar -Foobar-baz -Baz-FooBar -Baz -Foo diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/test_found_links/simple/foobar/index.html +++ /dev/null @@ -1,6 +0,0 @@ -Links for Foobar

Links for Foobar

-Foobar-1.0.tar.gz
-Foobar-1.0.1.tar.gz
-Foobar-2.0.tar.gz
-Foobar-2.0.1.tar.gz
- diff --git a/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html b/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/test_found_links/simple/index.html +++ /dev/null @@ -1,1 +0,0 @@ -foobar/ diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/test_pypi_server/external/index.html +++ /dev/null @@ -1,1 +0,0 @@ -index.html from external server diff --git a/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html b/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/test_pypi_server/simple/index.html +++ /dev/null @@ -1,1 +0,0 @@ -Yeah diff --git a/Lib/packaging/tests/pypiserver/with_externals/external/external.html b/Lib/packaging/tests/pypiserver/with_externals/external/external.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_externals/external/external.html +++ /dev/null @@ -1,3 +0,0 @@ - -bad old link - diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_externals/simple/foobar/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -foobar-0.1.tar.gz
-external homepage
- diff --git a/Lib/packaging/tests/pypiserver/with_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_externals/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_externals/simple/index.html +++ /dev/null @@ -1,1 +0,0 @@ -foobar/ diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_norel_links/external/homepage.html +++ /dev/null @@ -1,7 +0,0 @@ - - -

a rel=homepage HTML page

-foobar 2.0 - - - diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html b/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_norel_links/external/nonrel.html +++ /dev/null @@ -1,1 +0,0 @@ -A page linked without rel="download" or rel="homepage" link. diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_norel_links/simple/foobar/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -foobar-0.1.tar.gz
-external homepage
-unrelated link
-unrelated download
- diff --git a/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html b/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_norel_links/simple/index.html +++ /dev/null @@ -1,1 +0,0 @@ -foobar/ diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_real_externals/simple/foobar/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -foobar-0.1.tar.gz
-external homepage
- diff --git a/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html b/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html deleted file mode 100644 --- a/Lib/packaging/tests/pypiserver/with_real_externals/simple/index.html +++ /dev/null @@ -1,1 +0,0 @@ -foobar/ diff --git a/Lib/packaging/tests/support.py b/Lib/packaging/tests/support.py deleted file mode 100644 --- a/Lib/packaging/tests/support.py +++ /dev/null @@ -1,400 +0,0 @@ -"""Support code for packaging test cases. - -*This module should not be considered public: its content and API may -change in incompatible ways.* - -A few helper classes are provided: LoggingCatcher, TempdirManager and -EnvironRestorer. They are written to be used as mixins:: - - from packaging.tests import unittest - from packaging.tests.support import LoggingCatcher - - class SomeTestCase(LoggingCatcher, unittest.TestCase): - ... - -If you need to define a setUp method on your test class, you have to -call the mixin class' setUp method or it won't work (same thing for -tearDown): - - def setUp(self): - super(SomeTestCase, self).setUp() - ... # other setup code - -Also provided is a DummyCommand class, useful to mock commands in the -tests of another command that needs them, for example to fake -compilation in build_ext (this requires that the mock build_ext command -be injected into the distribution object's command_obj dictionary). - -For tests that need to compile an extension module, use the -copy_xxmodule_c and fixup_build_ext functions. - -Each class or function has a docstring to explain its purpose and usage. -Existing tests should also be used as examples. -""" - -import os -import sys -import shutil -import logging -import weakref -import tempfile -import sysconfig - -from packaging.dist import Distribution -from packaging.util import resolve_name -from packaging.command import set_command, _COMMANDS - -from packaging.tests import unittest -from test.support import requires_zlib, unlink - -# define __all__ to make pydoc more useful -__all__ = [ - # TestCase mixins - 'LoggingCatcher', 'TempdirManager', 'EnvironRestorer', - # mocks - 'DummyCommand', 'TestDistribution', 'Inputs', - # misc. functions and decorators - 'fake_dec', 'create_distribution', 'use_command', - 'copy_xxmodule_c', 'fixup_build_ext', - 'skip_2to3_optimize', - # imported from this module for backport purposes - 'unittest', 'requires_zlib', 'skip_unless_symlink', -] - - -logger = logging.getLogger('packaging') -logger2to3 = logging.getLogger('RefactoringTool') - - -class _TestHandler(logging.handlers.BufferingHandler): - # stolen and adapted from test.support - - def __init__(self): - super(_TestHandler, self).__init__(0) - self.setLevel(logging.DEBUG) - - def shouldFlush(self): - return False - - def emit(self, record): - self.buffer.append(record) - - -class LoggingCatcher: - """TestCase-compatible mixin to receive logging calls. - - Upon setUp, instances of this classes get a BufferingHandler that's - configured to record all messages logged to the 'packaging' logger. - - Use get_logs to retrieve messages and self.loghandler.flush to discard - them. get_logs automatically flushes the logs, unless you pass - *flush=False*, for example to make multiple calls to the method with - different level arguments. If your test calls some code that generates - logging message and then you don't call get_logs, you will need to flush - manually before testing other code in the same test_* method, otherwise - get_logs in the next lines will see messages from the previous lines. - See example in test_command_check. - """ - - def setUp(self): - super(LoggingCatcher, self).setUp() - self.loghandler = handler = _TestHandler() - self._old_levels = logger.level, logger2to3.level - logger.addHandler(handler) - logger.setLevel(logging.DEBUG) # we want all messages - logger2to3.setLevel(logging.CRITICAL) # we don't want 2to3 messages - - def tearDown(self): - handler = self.loghandler - # All this is necessary to properly shut down the logging system and - # avoid a regrtest complaint. Thanks to Vinay Sajip for the help. - handler.close() - logger.removeHandler(handler) - for ref in weakref.getweakrefs(handler): - logging._removeHandlerRef(ref) - del self.loghandler - logger.setLevel(self._old_levels[0]) - logger2to3.setLevel(self._old_levels[1]) - super(LoggingCatcher, self).tearDown() - - def get_logs(self, level=logging.WARNING, flush=True): - """Return all log messages with given level. - - *level* defaults to logging.WARNING. - - For log calls with arguments (i.e. logger.info('bla bla %r', arg)), - the messages will be formatted before being returned (e.g. "bla bla - 'thing'"). - - Returns a list. Automatically flushes the loghandler after being - called, unless *flush* is False (this is useful to get e.g. all - warnings then all info messages). - """ - messages = [log.getMessage() for log in self.loghandler.buffer - if log.levelno == level] - if flush: - self.loghandler.flush() - return messages - - -class TempdirManager: - """TestCase-compatible mixin to create temporary directories and files. - - Directories and files created in a test_* method will be removed after it - has run. - """ - - def setUp(self): - super(TempdirManager, self).setUp() - self._olddir = os.getcwd() - self._basetempdir = tempfile.mkdtemp() - self._files = [] - - def tearDown(self): - for handle, name in self._files: - handle.close() - unlink(name) - - os.chdir(self._olddir) - shutil.rmtree(self._basetempdir) - super(TempdirManager, self).tearDown() - - def mktempfile(self): - """Create a read-write temporary file and return it.""" - fd, fn = tempfile.mkstemp(dir=self._basetempdir) - os.close(fd) - fp = open(fn, 'w+') - self._files.append((fp, fn)) - return fp - - def mkdtemp(self): - """Create a temporary directory and return its path.""" - d = tempfile.mkdtemp(dir=self._basetempdir) - return d - - def write_file(self, path, content='xxx', encoding=None): - """Write a file at the given path. - - path can be a string, a tuple or a list; if it's a tuple or list, - os.path.join will be used to produce a path. - """ - if isinstance(path, (list, tuple)): - path = os.path.join(*path) - with open(path, 'w', encoding=encoding) as f: - f.write(content) - - def create_dist(self, **kw): - """Create a stub distribution object and files. - - This function creates a Distribution instance (use keyword arguments - to customize it) and a temporary directory with a project structure - (currently an empty directory). - - It returns the path to the directory and the Distribution instance. - You can use self.write_file to write any file in that - directory, e.g. setup scripts or Python modules. - """ - if 'name' not in kw: - kw['name'] = 'foo' - tmp_dir = self.mkdtemp() - project_dir = os.path.join(tmp_dir, kw['name']) - os.mkdir(project_dir) - dist = Distribution(attrs=kw) - return project_dir, dist - - def assertIsFile(self, *args): - path = os.path.join(*args) - dirname = os.path.dirname(path) - file = os.path.basename(path) - if os.path.isdir(dirname): - files = os.listdir(dirname) - msg = "%s not found in %s: %s" % (file, dirname, files) - assert os.path.isfile(path), msg - else: - raise AssertionError( - '%s not found. %s does not exist' % (file, dirname)) - - def assertIsNotFile(self, *args): - path = os.path.join(*args) - self.assertFalse(os.path.isfile(path), "%r exists" % path) - - -class EnvironRestorer: - """TestCase-compatible mixin to restore or delete environment variables. - - The variables to restore (or delete if they were not originally present) - must be explicitly listed in self.restore_environ. It's better to be - aware of what we're modifying instead of saving and restoring the whole - environment. - """ - - def setUp(self): - super(EnvironRestorer, self).setUp() - self._saved = [] - self._added = [] - for key in self.restore_environ: - if key in os.environ: - self._saved.append((key, os.environ[key])) - else: - self._added.append(key) - - def tearDown(self): - for key, value in self._saved: - os.environ[key] = value - for key in self._added: - os.environ.pop(key, None) - super(EnvironRestorer, self).tearDown() - - -class DummyCommand: - """Class to store options for retrieval via set_undefined_options(). - - Useful for mocking one dependency command in the tests for another - command, see e.g. the dummy build command in test_build_scripts. - """ - # XXX does not work with dist.reinitialize_command, which typechecks - # and wants a finalized attribute - - def __init__(self, **kwargs): - for kw, val in kwargs.items(): - setattr(self, kw, val) - - def ensure_finalized(self): - pass - - -class TestDistribution(Distribution): - """Distribution subclasses that avoids the default search for - configuration files. - - The ._config_files attribute must be set before - .parse_config_files() is called. - """ - - def find_config_files(self): - return self._config_files - - -class Inputs: - """Fakes user inputs.""" - # TODO document usage - # TODO use context manager or something for auto cleanup - - def __init__(self, *answers): - self.answers = answers - self.index = 0 - - def __call__(self, prompt=''): - try: - return self.answers[self.index] - finally: - self.index += 1 - - -def create_distribution(configfiles=()): - """Prepares a distribution with given config files parsed.""" - d = TestDistribution() - d.config.find_config_files = d.find_config_files - d._config_files = configfiles - d.parse_config_files() - d.parse_command_line() - return d - - -def use_command(testcase, fullname): - """Register command at *fullname* for the duration of a test.""" - set_command(fullname) - # XXX maybe set_command should return the class object - name = resolve_name(fullname).get_command_name() - # XXX maybe we need a public API to remove commands - testcase.addCleanup(_COMMANDS.__delitem__, name) - - -def fake_dec(*args, **kw): - """Fake decorator""" - def _wrap(func): - def __wrap(*args, **kw): - return func(*args, **kw) - return __wrap - return _wrap - - -def copy_xxmodule_c(directory): - """Helper for tests that need the xxmodule.c source file. - - Example use: - - def test_compile(self): - copy_xxmodule_c(self.tmpdir) - self.assertIn('xxmodule.c', os.listdir(self.tmpdir)) - - If the source file can be found, it will be copied to *directory*. If not, - the test will be skipped. Errors during copy are not caught. - """ - filename = _get_xxmodule_path() - if filename is None: - raise unittest.SkipTest('cannot find xxmodule.c') - shutil.copy(filename, directory) - - -def _get_xxmodule_path(): - if sysconfig.is_python_build(): - srcdir = sysconfig.get_config_var('projectbase') - path = os.path.join(os.getcwd(), srcdir, 'Modules', 'xxmodule.c') - else: - path = os.path.join(os.path.dirname(__file__), 'xxmodule.c') - if os.path.exists(path): - return path - - -def fixup_build_ext(cmd): - """Function needed to make build_ext tests pass. - - When Python was built with --enable-shared on Unix, -L. is not enough to - find libpython.so, because regrtest runs in a tempdir, not in the - source directory where the .so lives. (Mac OS X embeds absolute paths - to shared libraries into executables, so the fixup is a no-op on that - platform.) - - When Python was built with in debug mode on Windows, build_ext commands - need their debug attribute set, and it is not done automatically for - some reason. - - This function handles both of these things, and also fixes - cmd.distribution.include_dirs if the running Python is an uninstalled - build. Example use: - - cmd = build_ext(dist) - support.fixup_build_ext(cmd) - cmd.ensure_finalized() - """ - if os.name == 'nt': - cmd.debug = sys.executable.endswith('_d.exe') - elif sysconfig.get_config_var('Py_ENABLE_SHARED'): - # To further add to the shared builds fun on Unix, we can't just add - # library_dirs to the Extension() instance because that doesn't get - # plumbed through to the final compiler command. - runshared = sysconfig.get_config_var('RUNSHARED') - if runshared is None: - cmd.library_dirs = ['.'] - else: - if sys.platform == 'darwin': - cmd.library_dirs = [] - else: - name, equals, value = runshared.partition('=') - cmd.library_dirs = value.split(os.pathsep) - - # Allow tests to run with an uninstalled Python - if sysconfig.is_python_build(): - pysrcdir = sysconfig.get_config_var('projectbase') - cmd.distribution.include_dirs.append(os.path.join(pysrcdir, 'Include')) - - -try: - from test.support import skip_unless_symlink -except ImportError: - skip_unless_symlink = unittest.skip( - 'requires test.support.skip_unless_symlink') - -skip_2to3_optimize = unittest.skipIf(sys.flags.optimize, - "2to3 doesn't work under -O") diff --git a/Lib/packaging/tests/test_ccompiler.py b/Lib/packaging/tests/test_ccompiler.py deleted file mode 100644 --- a/Lib/packaging/tests/test_ccompiler.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Tests for distutils.compiler.ccompiler.""" - -from packaging.compiler import ccompiler -from packaging.tests import unittest, support - - -class CCompilerTestCase(unittest.TestCase): - pass # XXX need some tests on CCompiler - - -def test_suite(): - return unittest.makeSuite(CCompilerTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_bdist.py b/Lib/packaging/tests/test_command_bdist.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_bdist.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Tests for distutils.command.bdist.""" -import os -from test.support import captured_stdout -from packaging.command.bdist import bdist, show_formats -from packaging.tests import unittest, support - - -class BuildTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_formats(self): - # let's create a command and make sure - # we can set the format - dist = self.create_dist()[1] - cmd = bdist(dist) - cmd.formats = ['msi'] - cmd.ensure_finalized() - self.assertEqual(cmd.formats, ['msi']) - - # what formats does bdist offer? - # XXX hard-coded lists are not the best way to find available bdist_* - # commands; we should add a registry - formats = ['bztar', 'gztar', 'msi', 'tar', 'wininst', 'zip'] - found = sorted(cmd.format_command) - self.assertEqual(found, formats) - - def test_skip_build(self): - # bug #10946: bdist --skip-build should trickle down to subcommands - dist = self.create_dist()[1] - cmd = bdist(dist) - cmd.skip_build = True - cmd.ensure_finalized() - dist.command_obj['bdist'] = cmd - - names = ['bdist_dumb', 'bdist_wininst'] - if os.name == 'nt': - names.append('bdist_msi') - - for name in names: - subcmd = cmd.get_finalized_command(name) - self.assertTrue(subcmd.skip_build, - '%s should take --skip-build from bdist' % name) - - def test_show_formats(self): - with captured_stdout() as stdout: - show_formats() - stdout = stdout.getvalue() - - # the output should be a header line + one line per format - num_formats = len(bdist.format_commands) - output = [line for line in stdout.split('\n') - if line.strip().startswith('--formats=')] - self.assertEqual(len(output), num_formats) - - -def test_suite(): - return unittest.makeSuite(BuildTestCase) - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_command_bdist_dumb.py b/Lib/packaging/tests/test_command_bdist_dumb.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_bdist_dumb.py +++ /dev/null @@ -1,91 +0,0 @@ -"""Tests for distutils.command.bdist_dumb.""" - -import os -import imp -import sys -import zipfile -import packaging.util - -from packaging.dist import Distribution -from packaging.command.bdist_dumb import bdist_dumb -from packaging.tests import unittest, support -from packaging.tests.support import requires_zlib - - -class BuildDumbTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def setUp(self): - super(BuildDumbTestCase, self).setUp() - self.old_location = os.getcwd() - - def tearDown(self): - os.chdir(self.old_location) - packaging.util._path_created.clear() - super(BuildDumbTestCase, self).tearDown() - - @requires_zlib - def test_simple_built(self): - - # let's create a simple package - tmp_dir = self.mkdtemp() - pkg_dir = os.path.join(tmp_dir, 'foo') - os.mkdir(pkg_dir) - self.write_file((pkg_dir, 'foo.py'), '#') - self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py') - self.write_file((pkg_dir, 'README'), '') - - dist = Distribution({'name': 'foo', 'version': '0.1', - 'py_modules': ['foo'], - 'home_page': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'}) - os.chdir(pkg_dir) - cmd = bdist_dumb(dist) - - # so the output is the same no matter - # what is the platform - cmd.format = 'zip' - - cmd.ensure_finalized() - cmd.run() - - # see what we have - dist_created = os.listdir(os.path.join(pkg_dir, 'dist')) - base = "%s.%s.zip" % (dist.get_fullname(), cmd.plat_name) - if os.name == 'os2': - base = base.replace(':', '-') - - self.assertEqual(dist_created, [base]) - - # now let's check what we have in the zip file - with zipfile.ZipFile(os.path.join('dist', base)) as fp: - contents = fp.namelist() - - contents = sorted(os.path.basename(fn) for fn in contents) - wanted = ['foo.py', - 'foo.%s.pyc' % imp.get_tag(), - 'METADATA', 'INSTALLER', 'REQUESTED', 'RECORD'] - self.assertEqual(contents, sorted(wanted)) - - def test_finalize_options(self): - pkg_dir, dist = self.create_dist() - os.chdir(pkg_dir) - cmd = bdist_dumb(dist) - self.assertEqual(cmd.bdist_dir, None) - cmd.finalize_options() - - # bdist_dir is initialized to bdist_base/dumb if not set - base = cmd.get_finalized_command('bdist').bdist_base - self.assertEqual(cmd.bdist_dir, os.path.join(base, 'dumb')) - - # the format is set to a default value depending on the os.name - default = cmd.default_format[os.name] - self.assertEqual(cmd.format, default) - - -def test_suite(): - return unittest.makeSuite(BuildDumbTestCase) - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_command_bdist_msi.py b/Lib/packaging/tests/test_command_bdist_msi.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_bdist_msi.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Tests for distutils.command.bdist_msi.""" -import sys - -from packaging.tests import unittest, support - - - at unittest.skipUnless(sys.platform == 'win32', 'these tests require Windows') -class BDistMSITestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_minimal(self): - # minimal test XXX need more tests - from packaging.command.bdist_msi import bdist_msi - project_dir, dist = self.create_dist() - cmd = bdist_msi(dist) - cmd.ensure_finalized() - - -def test_suite(): - return unittest.makeSuite(BDistMSITestCase) - - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_command_bdist_wininst.py b/Lib/packaging/tests/test_command_bdist_wininst.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_bdist_wininst.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Tests for distutils.command.bdist_wininst.""" - -from packaging.command.bdist_wininst import bdist_wininst -from packaging.tests import unittest, support - - -class BuildWinInstTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_get_exe_bytes(self): - - # issue5731: command was broken on non-windows platforms - # this test makes sure it works now for every platform - # let's create a command - pkg_pth, dist = self.create_dist() - cmd = bdist_wininst(dist) - cmd.ensure_finalized() - - # let's run the code that finds the right wininst*.exe file - # and make sure it finds it and returns its content - # no matter what platform we have - exe_file = cmd.get_exe_bytes() - self.assertGreater(len(exe_file), 10) - - -def test_suite(): - return unittest.makeSuite(BuildWinInstTestCase) - - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_command_build.py b/Lib/packaging/tests/test_command_build.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_build.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Tests for distutils.command.build.""" -import os -import sys - -from packaging.command.build import build -from sysconfig import get_platform -from packaging.tests import unittest, support - - -class BuildTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_finalize_options(self): - pkg_dir, dist = self.create_dist() - cmd = build(dist) - cmd.finalize_options() - - # if not specified, plat_name gets the current platform - self.assertEqual(cmd.plat_name, get_platform()) - - # build_purelib is build + lib - wanted = os.path.join(cmd.build_base, 'lib') - self.assertEqual(cmd.build_purelib, wanted) - - # build_platlib is 'build/lib.platform-x.x[-pydebug]' - # examples: - # build/lib.macosx-10.3-i386-2.7 - pyversion = '%s.%s' % sys.version_info[:2] - plat_spec = '.%s-%s' % (cmd.plat_name, pyversion) - if hasattr(sys, 'gettotalrefcount'): - self.assertTrue(cmd.build_platlib.endswith('-pydebug')) - plat_spec += '-pydebug' - wanted = os.path.join(cmd.build_base, 'lib' + plat_spec) - self.assertEqual(cmd.build_platlib, wanted) - - # by default, build_lib = build_purelib - self.assertEqual(cmd.build_lib, cmd.build_purelib) - - # build_temp is build/temp. - wanted = os.path.join(cmd.build_base, 'temp' + plat_spec) - self.assertEqual(cmd.build_temp, wanted) - - # build_scripts is build/scripts-x.x - wanted = os.path.join(cmd.build_base, 'scripts-' + pyversion) - self.assertEqual(cmd.build_scripts, wanted) - - # executable is os.path.normpath(sys.executable) - self.assertEqual(cmd.executable, os.path.normpath(sys.executable)) - - -def test_suite(): - return unittest.makeSuite(BuildTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_build_clib.py b/Lib/packaging/tests/test_command_build_clib.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_build_clib.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Tests for distutils.command.build_clib.""" -import os -import sys - -from packaging.util import find_executable -from packaging.command.build_clib import build_clib -from packaging.errors import PackagingSetupError -from packaging.tests import unittest, support - - -class BuildCLibTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_check_library_dist(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - # 'libraries' option must be a list - self.assertRaises(PackagingSetupError, cmd.check_library_list, 'foo') - - # each element of 'libraries' must a 2-tuple - self.assertRaises(PackagingSetupError, cmd.check_library_list, - ['foo1', 'foo2']) - - # first element of each tuple in 'libraries' - # must be a string (the library name) - self.assertRaises(PackagingSetupError, cmd.check_library_list, - [(1, 'foo1'), ('name', 'foo2')]) - - # library name may not contain directory separators - self.assertRaises(PackagingSetupError, cmd.check_library_list, - [('name', 'foo1'), - ('another/name', 'foo2')]) - - # second element of each tuple must be a dictionary (build info) - self.assertRaises(PackagingSetupError, cmd.check_library_list, - [('name', {}), - ('another', 'foo2')]) - - # those work - libs = [('name', {}), ('name', {'ok': 'good'})] - cmd.check_library_list(libs) - - def test_get_source_files(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - # "in 'libraries' option 'sources' must be present and must be - # a list of source filenames - cmd.libraries = [('name', {})] - self.assertRaises(PackagingSetupError, cmd.get_source_files) - - cmd.libraries = [('name', {'sources': 1})] - self.assertRaises(PackagingSetupError, cmd.get_source_files) - - cmd.libraries = [('name', {'sources': ['a', 'b']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) - - cmd.libraries = [('name', {'sources': ('a', 'b')})] - self.assertEqual(cmd.get_source_files(), ['a', 'b']) - - cmd.libraries = [('name', {'sources': ('a', 'b')}), - ('name2', {'sources': ['c', 'd']})] - self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd']) - - def test_build_libraries(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - class FakeCompiler: - def compile(*args, **kw): - pass - create_static_lib = compile - - cmd.compiler = FakeCompiler() - - # build_libraries is also doing a bit of type checking - lib = [('name', {'sources': 'notvalid'})] - self.assertRaises(PackagingSetupError, cmd.build_libraries, lib) - - lib = [('name', {'sources': []})] - cmd.build_libraries(lib) - - lib = [('name', {'sources': ()})] - cmd.build_libraries(lib) - - def test_finalize_options(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - cmd.include_dirs = 'one-dir' - cmd.finalize_options() - self.assertEqual(cmd.include_dirs, ['one-dir']) - - cmd.include_dirs = None - cmd.finalize_options() - self.assertEqual(cmd.include_dirs, []) - - cmd.distribution.libraries = 'WONTWORK' - self.assertRaises(PackagingSetupError, cmd.finalize_options) - - @unittest.skipIf(sys.platform == 'win32', 'disabled on win32') - def test_run(self): - pkg_dir, dist = self.create_dist() - cmd = build_clib(dist) - - foo_c = os.path.join(pkg_dir, 'foo.c') - self.write_file(foo_c, 'int main(void) { return 1;}\n') - cmd.libraries = [('foo', {'sources': [foo_c]})] - - build_temp = os.path.join(pkg_dir, 'build') - os.mkdir(build_temp) - cmd.build_temp = build_temp - cmd.build_clib = build_temp - - # before we run the command, we want to make sure - # all commands are present on the system - # by creating a compiler and checking its executables - from packaging.compiler import new_compiler, customize_compiler - - compiler = new_compiler() - customize_compiler(compiler) - for ccmd in compiler.executables.values(): - if ccmd is None: - continue - if find_executable(ccmd[0]) is None: - raise unittest.SkipTest("can't test") - - # this should work - cmd.run() - - # let's check the result - self.assertIn('libfoo.a', os.listdir(build_temp)) - - -def test_suite(): - return unittest.makeSuite(BuildCLibTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_build_ext.py b/Lib/packaging/tests/test_command_build_ext.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_build_ext.py +++ /dev/null @@ -1,394 +0,0 @@ -import os -import sys -import site -import sysconfig -import textwrap -from packaging.dist import Distribution -from packaging.errors import (UnknownFileError, CompileError, - PackagingPlatformError) -from packaging.command.build_ext import build_ext -from packaging.compiler.extension import Extension - -from test.script_helper import assert_python_ok -from packaging.tests import support, unittest - - -class BuildExtTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - def setUp(self): - super(BuildExtTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - self.old_user_base = site.USER_BASE - site.USER_BASE = self.mkdtemp() - - def tearDown(self): - site.USER_BASE = self.old_user_base - super(BuildExtTestCase, self).tearDown() - - def test_build_ext(self): - support.copy_xxmodule_c(self.tmp_dir) - xx_c = os.path.join(self.tmp_dir, 'xxmodule.c') - xx_ext = Extension('xx', [xx_c]) - dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]}) - dist.package_dir = self.tmp_dir - cmd = build_ext(dist) - support.fixup_build_ext(cmd) - cmd.build_lib = self.tmp_dir - cmd.build_temp = self.tmp_dir - cmd.ensure_finalized() - cmd.run() - - code = textwrap.dedent("""\ - import sys - sys.path.insert(0, %r) - - import xx - - for attr in ('error', 'foo', 'new', 'roj'): - assert hasattr(xx, attr) - - assert xx.foo(2, 5) == 7 - assert xx.foo(13, 15) == 28 - assert xx.new().demo() is None - doc = 'This is a template module just for instruction.' - assert xx.__doc__ == doc - assert isinstance(xx.Null(), xx.Null) - assert isinstance(xx.Str(), xx.Str) - """) - code = code % self.tmp_dir - assert_python_ok('-c', code) - - def test_solaris_enable_shared(self): - dist = Distribution({'name': 'xx'}) - cmd = build_ext(dist) - old = sys.platform - - sys.platform = 'sunos' # fooling finalize_options - - old_var = sysconfig.get_config_var('Py_ENABLE_SHARED') - sysconfig._CONFIG_VARS['Py_ENABLE_SHARED'] = 1 - try: - cmd.ensure_finalized() - finally: - sys.platform = old - if old_var is None: - del sysconfig._CONFIG_VARS['Py_ENABLE_SHARED'] - else: - sysconfig._CONFIG_VARS['Py_ENABLE_SHARED'] = old_var - - # make sure we get some library dirs under solaris - self.assertGreater(len(cmd.library_dirs), 0) - - def test_user_site(self): - dist = Distribution({'name': 'xx'}) - cmd = build_ext(dist) - - # making sure the user option is there - options = [name for name, short, label in - cmd.user_options] - self.assertIn('user', options) - - # setting a value - cmd.user = True - - # setting user based lib and include - lib = os.path.join(site.USER_BASE, 'lib') - incl = os.path.join(site.USER_BASE, 'include') - os.mkdir(lib) - os.mkdir(incl) - - # let's run finalize - cmd.ensure_finalized() - - # see if include_dirs and library_dirs - # were set - self.assertIn(lib, cmd.library_dirs) - self.assertIn(lib, cmd.rpath) - self.assertIn(incl, cmd.include_dirs) - - def test_optional_extension(self): - - # this extension will fail, but let's ignore this failure - # with the optional argument. - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) - cmd.ensure_finalized() - self.assertRaises((UnknownFileError, CompileError), - cmd.run) # should raise an error - - modules = [Extension('foo', ['xxx'], optional=True)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) - cmd.ensure_finalized() - cmd.run() # should pass - - def test_finalize_options(self): - # Make sure Python's include directories (for Python.h, pyconfig.h, - # etc.) are in the include search path. - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) - cmd.finalize_options() - - py_include = sysconfig.get_path('include') - self.assertIn(py_include, cmd.include_dirs) - - plat_py_include = sysconfig.get_path('platinclude') - self.assertIn(plat_py_include, cmd.include_dirs) - - # make sure cmd.libraries is turned into a list - # if it's a string - cmd = build_ext(dist) - cmd.libraries = 'my_lib, other_lib lastlib' - cmd.finalize_options() - self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib']) - - # make sure cmd.library_dirs is turned into a list - # if it's a string - cmd = build_ext(dist) - cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep - cmd.finalize_options() - self.assertIn('my_lib_dir', cmd.library_dirs) - self.assertIn('other_lib_dir', cmd.library_dirs) - - # make sure rpath is turned into a list - # if it's a string - cmd = build_ext(dist) - cmd.rpath = 'one%stwo' % os.pathsep - cmd.finalize_options() - self.assertEqual(cmd.rpath, ['one', 'two']) - - # XXX more tests to perform for win32 - - # make sure define is turned into 2-tuples - # strings if they are ','-separated strings - cmd = build_ext(dist) - cmd.define = 'one,two' - cmd.finalize_options() - self.assertEqual(cmd.define, [('one', '1'), ('two', '1')]) - - # make sure undef is turned into a list of - # strings if they are ','-separated strings - cmd = build_ext(dist) - cmd.undef = 'one,two' - cmd.finalize_options() - self.assertEqual(cmd.undef, ['one', 'two']) - - # make sure swig_opts is turned into a list - cmd = build_ext(dist) - cmd.swig_opts = None - cmd.finalize_options() - self.assertEqual(cmd.swig_opts, []) - - cmd = build_ext(dist) - cmd.swig_opts = '1 2' - cmd.finalize_options() - self.assertEqual(cmd.swig_opts, ['1', '2']) - - def test_get_source_files(self): - modules = [Extension('foo', ['xxx'], optional=False)] - dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) - cmd.ensure_finalized() - self.assertEqual(cmd.get_source_files(), ['xxx']) - - def test_compiler_option(self): - # cmd.compiler is an option and - # should not be overriden by a compiler instance - # when the command is run - dist = Distribution() - cmd = build_ext(dist) - cmd.compiler = 'unix' - cmd.ensure_finalized() - cmd.run() - self.assertEqual(cmd.compiler, 'unix') - - def test_get_outputs(self): - tmp_dir = self.mkdtemp() - c_file = os.path.join(tmp_dir, 'foo.c') - self.write_file(c_file, 'void PyInit_foo(void) {}\n') - ext = Extension('foo', [c_file], optional=False) - dist = Distribution({'name': 'xx', - 'ext_modules': [ext]}) - cmd = build_ext(dist) - support.fixup_build_ext(cmd) - cmd.ensure_finalized() - self.assertEqual(len(cmd.get_outputs()), 1) - - cmd.build_lib = os.path.join(self.tmp_dir, 'build') - cmd.build_temp = os.path.join(self.tmp_dir, 'tempt') - - # issue #5977 : distutils build_ext.get_outputs - # returns wrong result with --inplace - other_tmp_dir = os.path.realpath(self.mkdtemp()) - old_wd = os.getcwd() - os.chdir(other_tmp_dir) - try: - cmd.inplace = True - cmd.run() - so_file = cmd.get_outputs()[0] - finally: - os.chdir(old_wd) - self.assertTrue(os.path.exists(so_file)) - so_ext = sysconfig.get_config_var('SO') - self.assertTrue(so_file.endswith(so_ext)) - so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, other_tmp_dir) - - cmd.inplace = False - cmd.run() - so_file = cmd.get_outputs()[0] - self.assertTrue(os.path.exists(so_file)) - self.assertTrue(so_file.endswith(so_ext)) - so_dir = os.path.dirname(so_file) - self.assertEqual(so_dir, cmd.build_lib) - - # inplace = False, cmd.package = 'bar' - build_py = cmd.get_finalized_command('build_py') - build_py.package_dir = 'bar' - path = cmd.get_ext_fullpath('foo') - # checking that the last directory is the build_dir - path = os.path.split(path)[0] - self.assertEqual(path, cmd.build_lib) - - # inplace = True, cmd.package = 'bar' - cmd.inplace = True - other_tmp_dir = os.path.realpath(self.mkdtemp()) - old_wd = os.getcwd() - os.chdir(other_tmp_dir) - try: - path = cmd.get_ext_fullpath('foo') - finally: - os.chdir(old_wd) - # checking that the last directory is bar - path = os.path.split(path)[0] - lastdir = os.path.split(path)[-1] - self.assertEqual(lastdir, 'bar') - - def test_ext_fullpath(self): - ext = sysconfig.get_config_vars()['SO'] - # building lxml.etree inplace - #etree_c = os.path.join(self.tmp_dir, 'lxml.etree.c') - #etree_ext = Extension('lxml.etree', [etree_c]) - #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) - dist = Distribution() - cmd = build_ext(dist) - cmd.inplace = True - cmd.distribution.package_dir = 'src' - cmd.distribution.packages = ['lxml', 'lxml.html'] - curdir = os.getcwd() - wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) - path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) - - # building lxml.etree not inplace - cmd.inplace = False - cmd.build_lib = os.path.join(curdir, 'tmpdir') - wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) - path = cmd.get_ext_fullpath('lxml.etree') - self.assertEqual(wanted, path) - - # building twisted.runner.portmap not inplace - build_py = cmd.get_finalized_command('build_py') - build_py.package_dir = None - cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] - path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', - 'portmap' + ext) - self.assertEqual(wanted, path) - - # building twisted.runner.portmap inplace - cmd.inplace = True - path = cmd.get_ext_fullpath('twisted.runner.portmap') - wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) - self.assertEqual(wanted, path) - - @unittest.skipUnless(sys.platform == 'darwin', - 'test only relevant for Mac OS X') - def test_deployment_target_default(self): - # Issue 9516: Test that, in the absence of the environment variable, - # an extension module is compiled with the same deployment target as - # the interpreter. - self._try_compile_deployment_target('==', None) - - @unittest.skipUnless(sys.platform == 'darwin', - 'test only relevant for Mac OS X') - def test_deployment_target_too_low(self): - # Issue 9516: Test that an extension module is not allowed to be - # compiled with a deployment target less than that of the interpreter. - self.assertRaises(PackagingPlatformError, - self._try_compile_deployment_target, '>', '10.1') - - @unittest.skipUnless(sys.platform == 'darwin', - 'test only relevant for Mac OS X') - def test_deployment_target_higher_ok(self): - # Issue 9516: Test that an extension module can be compiled with a - # deployment target higher than that of the interpreter: the ext - # module may depend on some newer OS feature. - deptarget = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if deptarget: - # increment the minor version number (i.e. 10.6 -> 10.7) - deptarget = [int(x) for x in deptarget.split('.')] - deptarget[-1] += 1 - deptarget = '.'.join(str(i) for i in deptarget) - self._try_compile_deployment_target('<', deptarget) - - def _try_compile_deployment_target(self, operator, target): - orig_environ = os.environ - os.environ = orig_environ.copy() - self.addCleanup(setattr, os, 'environ', orig_environ) - - if target is None: - if os.environ.get('MACOSX_DEPLOYMENT_TARGET'): - del os.environ['MACOSX_DEPLOYMENT_TARGET'] - else: - os.environ['MACOSX_DEPLOYMENT_TARGET'] = target - - deptarget_c = os.path.join(self.tmp_dir, 'deptargetmodule.c') - - with open(deptarget_c, 'w') as fp: - fp.write(textwrap.dedent('''\ - #include - - int dummy; - - #if TARGET %s MAC_OS_X_VERSION_MIN_REQUIRED - #else - #error "Unexpected target" - #endif - - ''' % operator)) - - # get the deployment target that the interpreter was built with - target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - target = tuple(map(int, target.split('.'))) - target = '%02d%01d0' % target - - deptarget_ext = Extension( - 'deptarget', - [deptarget_c], - extra_compile_args=['-DTARGET=%s' % (target,)], - ) - dist = Distribution({ - 'name': 'deptarget', - 'ext_modules': [deptarget_ext], - }) - dist.package_dir = self.tmp_dir - cmd = build_ext(dist) - cmd.build_lib = self.tmp_dir - cmd.build_temp = self.tmp_dir - - try: - cmd.ensure_finalized() - cmd.run() - except CompileError: - self.fail("Wrong deployment target during compilation") - - -def test_suite(): - return unittest.makeSuite(BuildExtTestCase) - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_command_build_py.py b/Lib/packaging/tests/test_command_build_py.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_build_py.py +++ /dev/null @@ -1,146 +0,0 @@ -"""Tests for distutils.command.build_py.""" - -import os -import sys -import imp - -from packaging.command.build_py import build_py -from packaging.dist import Distribution -from packaging.errors import PackagingFileError - -from packaging.tests import unittest, support - - -class BuildPyTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_package_data(self): - sources = self.mkdtemp() - pkg_dir = os.path.join(sources, 'pkg') - os.mkdir(pkg_dir) - f = open(os.path.join(pkg_dir, "__init__.py"), "w") - try: - f.write("# Pretend this is a package.") - finally: - f.close() - # let's have two files to make sure globbing works - f = open(os.path.join(pkg_dir, "README.txt"), "w") - try: - f.write("Info about this package") - finally: - f.close() - f = open(os.path.join(pkg_dir, "HACKING.txt"), "w") - try: - f.write("How to contribute") - finally: - f.close() - - destination = self.mkdtemp() - - dist = Distribution({"packages": ["pkg"], - "package_dir": sources}) - - dist.command_obj["build"] = support.DummyCommand( - force=False, - build_lib=destination, - use_2to3_fixers=None, - convert_2to3_doctests=None, - use_2to3=False) - dist.packages = ["pkg"] - dist.package_data = {"pkg": ["*.txt"]} - dist.package_dir = sources - - cmd = build_py(dist) - cmd.compile = True - cmd.ensure_finalized() - self.assertEqual(cmd.package_data, dist.package_data) - - cmd.run() - - # This makes sure the list of outputs includes byte-compiled - # files for Python modules but not for package data files - # (there shouldn't *be* byte-code files for those!). - # FIXME the test below is not doing what the comment above says, and - # if it did it would show a code bug: if we add a demo.py file to - # package_data, it gets byte-compiled! - outputs = cmd.get_outputs() - self.assertEqual(len(outputs), 4, outputs) - pkgdest = os.path.join(destination, "pkg") - files = os.listdir(pkgdest) - pycache_dir = os.path.join(pkgdest, "__pycache__") - self.assertIn("__init__.py", files) - self.assertIn("README.txt", files) - self.assertIn("HACKING.txt", files) - pyc_files = os.listdir(pycache_dir) - self.assertEqual(["__init__.%s.pyc" % imp.get_tag()], pyc_files) - - def test_empty_package_dir(self): - # See SF 1668596/1720897. - # create the distribution files. - sources = self.mkdtemp() - pkg = os.path.join(sources, 'pkg') - os.mkdir(pkg) - open(os.path.join(pkg, "__init__.py"), "wb").close() - testdir = os.path.join(pkg, "doc") - os.mkdir(testdir) - open(os.path.join(testdir, "testfile"), "wb").close() - - os.chdir(sources) - dist = Distribution({"packages": ["pkg"], - "package_dir": sources, - "package_data": {"pkg": ["doc/*"]}}) - dist.script_args = ["build"] - dist.parse_command_line() - - try: - dist.run_commands() - except PackagingFileError: - self.fail("failed package_data test when package_dir is ''") - - def test_byte_compile(self): - project_dir, dist = self.create_dist(py_modules=['boiledeggs']) - os.chdir(project_dir) - self.write_file('boiledeggs.py', 'import antigravity') - cmd = build_py(dist) - cmd.compile = True - cmd.build_lib = 'here' - cmd.finalize_options() - cmd.run() - - found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) - found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - self.assertEqual(found, ['boiledeggs.%s.pyc' % imp.get_tag()]) - - def test_byte_compile_optimized(self): - project_dir, dist = self.create_dist(py_modules=['boiledeggs']) - os.chdir(project_dir) - self.write_file('boiledeggs.py', 'import antigravity') - cmd = build_py(dist) - cmd.compile = True - cmd.optimize = 1 - cmd.build_lib = 'here' - cmd.finalize_options() - cmd.run() - - found = os.listdir(cmd.build_lib) - self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) - found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - self.assertEqual(sorted(found), ['boiledeggs.%s.pyc' % imp.get_tag(), - 'boiledeggs.%s.pyo' % imp.get_tag()]) - - def test_byte_compile_under_B(self): - # make sure byte compilation works under -B (dont_write_bytecode) - self.addCleanup(setattr, sys, 'dont_write_bytecode', - sys.dont_write_bytecode) - sys.dont_write_bytecode = True - self.test_byte_compile() - self.test_byte_compile_optimized() - - -def test_suite(): - return unittest.makeSuite(BuildPyTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_build_scripts.py b/Lib/packaging/tests/test_command_build_scripts.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_build_scripts.py +++ /dev/null @@ -1,109 +0,0 @@ -"""Tests for distutils.command.build_scripts.""" - -import os -import sys -import sysconfig -from packaging.dist import Distribution -from packaging.command.build_scripts import build_scripts - -from packaging.tests import unittest, support - - -class BuildScriptsTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_default_settings(self): - cmd = self.get_build_scripts_cmd("/foo/bar", []) - self.assertFalse(cmd.force) - self.assertIs(cmd.build_dir, None) - - cmd.finalize_options() - - self.assertTrue(cmd.force) - self.assertEqual(cmd.build_dir, "/foo/bar") - - def test_build(self): - source = self.mkdtemp() - target = self.mkdtemp() - expected = self.write_sample_scripts(source) - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) - cmd.finalize_options() - cmd.run() - - built = os.listdir(target) - for name in expected: - self.assertIn(name, built) - - def get_build_scripts_cmd(self, target, scripts): - dist = Distribution() - dist.scripts = scripts - dist.command_obj["build"] = support.DummyCommand( - build_scripts=target, - force=True, - executable=sys.executable, - use_2to3=False, - use_2to3_fixers=None, - convert_2to3_doctests=None - ) - return build_scripts(dist) - - def write_sample_scripts(self, dir): - expected = [] - expected.append("script1.py") - self.write_script(dir, "script1.py", - ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - expected.append("script2.py") - self.write_script(dir, "script2.py", - ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - expected.append("shell.sh") - self.write_script(dir, "shell.sh", - ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) - return expected - - def write_script(self, dir, name, text): - with open(os.path.join(dir, name), "w") as f: - f.write(text) - - def test_version_int(self): - source = self.mkdtemp() - target = self.mkdtemp() - expected = self.write_sample_scripts(source) - - - cmd = self.get_build_scripts_cmd(target, - [os.path.join(source, fn) - for fn in expected]) - cmd.finalize_options() - - # http://bugs.python.org/issue4524 - # - # On linux-g++-32 with command line `./configure --enable-ipv6 - # --with-suffix=3`, python is compiled okay but the build scripts - # failed when writing the name of the executable - old = sysconfig.get_config_vars().get('VERSION') - sysconfig._CONFIG_VARS['VERSION'] = 4 - try: - cmd.run() - finally: - if old is not None: - sysconfig._CONFIG_VARS['VERSION'] = old - - built = os.listdir(target) - for name in expected: - self.assertIn(name, built) - -def test_suite(): - return unittest.makeSuite(BuildScriptsTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_check.py b/Lib/packaging/tests/test_command_check.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_check.py +++ /dev/null @@ -1,161 +0,0 @@ -"""Tests for distutils.command.check.""" - -from packaging.command.check import check -from packaging.metadata import _HAS_DOCUTILS -from packaging.errors import PackagingSetupError, MetadataMissingError -from packaging.tests import unittest, support - - -class CheckTestCase(support.LoggingCatcher, - support.TempdirManager, - unittest.TestCase): - - def _run(self, metadata=None, **options): - if metadata is None: - metadata = {'name': 'xxx', 'version': '1.2'} - pkg_info, dist = self.create_dist(**metadata) - cmd = check(dist) - cmd.initialize_options() - for name, value in options.items(): - setattr(cmd, name, value) - cmd.ensure_finalized() - cmd.run() - return cmd - - def test_check_metadata(self): - # let's run the command with no metadata at all - # by default, check is checking the metadata - # should have some warnings - self._run() - # trick: using assertNotEqual with an empty list will give us a more - # useful error message than assertGreater(.., 0) when the code change - # and the test fails - self.assertNotEqual(self.get_logs(), []) - - # now let's add the required fields - # and run it again, to make sure we don't get - # any warning anymore - metadata = {'home_page': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': '4.2', - } - self._run(metadata) - self.assertEqual(self.get_logs(), []) - - # now with the strict mode, we should - # get an error if there are missing metadata - self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1}) - self.assertRaises(PackagingSetupError, self._run, - {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1}) - - # clear warnings from the previous calls - self.loghandler.flush() - - # and of course, no error when all metadata fields are present - self._run(metadata, strict=True) - self.assertEqual(self.get_logs(), []) - - # now a test with non-ASCII characters - metadata = {'home_page': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': '1.2', - 'summary': 'Something about esszet \u00df', - 'description': 'More things about esszet \u00df'} - self._run(metadata) - self.assertEqual(self.get_logs(), []) - - def test_check_metadata_1_2(self): - # let's run the command with no metadata at all - # by default, check is checking the metadata - # should have some warnings - self._run() - self.assertNotEqual(self.get_logs(), []) - - # now let's add the required fields and run it again, to make sure we - # don't get any warning anymore let's use requires_python as a marker - # to enforce Metadata-Version 1.2 - metadata = {'home_page': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': '4.2', - 'requires_python': '2.4', - } - self._run(metadata) - self.assertEqual(self.get_logs(), []) - - # now with the strict mode, we should - # get an error if there are missing metadata - self.assertRaises(MetadataMissingError, self._run, {}, **{'strict': 1}) - self.assertRaises(PackagingSetupError, self._run, - {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1}) - - # complain about version format - metadata['version'] = 'xxx' - self.assertRaises(PackagingSetupError, self._run, metadata, - **{'strict': 1}) - - # clear warnings from the previous calls - self.loghandler.flush() - - # now with correct version format again - metadata['version'] = '4.2' - self._run(metadata, strict=True) - self.assertEqual(self.get_logs(), []) - - @unittest.skipUnless(_HAS_DOCUTILS, "requires docutils") - def test_check_restructuredtext(self): - # let's see if it detects broken rest in description - broken_rest = 'title\n===\n\ntest' - pkg_info, dist = self.create_dist(description=broken_rest) - cmd = check(dist) - cmd.check_restructuredtext() - self.assertEqual(len(self.get_logs()), 1) - - # let's see if we have an error with strict=1 - metadata = {'home_page': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': '1.2', - 'description': broken_rest} - self.assertRaises(PackagingSetupError, self._run, metadata, - strict=True, all=True) - self.loghandler.flush() - - # and non-broken rest, including a non-ASCII character to test #12114 - dist = self.create_dist(description='title\n=====\n\ntest \u00df')[1] - cmd = check(dist) - cmd.check_restructuredtext() - self.assertEqual(self.get_logs(), []) - - def test_check_all(self): - self.assertRaises(PackagingSetupError, self._run, - {'name': 'xxx', 'version': 'xxx'}, **{'strict': 1, - 'all': 1}) - self.assertRaises(MetadataMissingError, self._run, - {}, **{'strict': 1, - 'all': 1}) - - def test_check_hooks(self): - pkg_info, dist = self.create_dist() - dist.command_options['install_dist'] = { - 'pre_hook': ('file', {"a": 'some.nonextistant.hook.ghrrraarrhll'}), - } - cmd = check(dist) - cmd.check_hooks_resolvable() - self.assertEqual(len(self.get_logs()), 1) - - def test_warn(self): - _, dist = self.create_dist() - cmd = check(dist) - self.assertEqual(self.get_logs(), []) - cmd.warn('hello') - self.assertEqual(self.get_logs(), ['check: hello']) - cmd.warn('hello %s', 'world') - self.assertEqual(self.get_logs(), ['check: hello world']) - cmd.warn('hello %s %s', 'beautiful', 'world') - self.assertEqual(self.get_logs(), ['check: hello beautiful world']) - - -def test_suite(): - return unittest.makeSuite(CheckTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_clean.py b/Lib/packaging/tests/test_command_clean.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_clean.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Tests for distutils.command.clean.""" -import os - -from packaging.command.clean import clean -from packaging.tests import unittest, support - - -class cleanTestCase(support.TempdirManager, support.LoggingCatcher, - unittest.TestCase): - - def test_simple_run(self): - pkg_dir, dist = self.create_dist() - cmd = clean(dist) - - # let's add some elements clean should remove - dirs = [(d, os.path.join(pkg_dir, d)) - for d in ('build_temp', 'build_lib', 'bdist_base', - 'build_scripts', 'build_base')] - - for name, path in dirs: - os.mkdir(path) - setattr(cmd, name, path) - if name == 'build_base': - continue - for f in ('one', 'two', 'three'): - self.write_file((path, f)) - - # let's run the command - cmd.all = True - cmd.ensure_finalized() - cmd.run() - - # make sure the files where removed - for name, path in dirs: - self.assertFalse(os.path.exists(path), - '%r was not removed' % path) - - # let's run the command again (should spit warnings but succeed) - cmd.run() - - -def test_suite(): - return unittest.makeSuite(cleanTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_cmd.py b/Lib/packaging/tests/test_command_cmd.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_cmd.py +++ /dev/null @@ -1,102 +0,0 @@ -"""Tests for distutils.cmd.""" -import os -import logging - -from packaging.command.cmd import Command -from packaging.dist import Distribution -from packaging.errors import PackagingOptionError -from packaging.tests import support, unittest - - -class MyCmd(Command): - def initialize_options(self): - pass - - -class CommandTestCase(support.LoggingCatcher, - unittest.TestCase): - - def setUp(self): - super(CommandTestCase, self).setUp() - dist = Distribution() - self.cmd = MyCmd(dist) - - def test_make_file(self): - cmd = self.cmd - - # making sure it raises when infiles is not a string or a list/tuple - self.assertRaises(TypeError, cmd.make_file, - infiles=1, outfile='', func='func', args=()) - - # making sure execute gets called properly - def _execute(func, args, exec_msg, level): - self.assertEqual(exec_msg, 'generating out from in') - cmd.force = True - cmd.execute = _execute - cmd.make_file(infiles='in', outfile='out', func='func', args=()) - - def test_dump_options(self): - cmd = self.cmd - cmd.option1 = 1 - cmd.option2 = 1 - cmd.user_options = [('option1', '', ''), ('option2', '', '')] - cmd.dump_options() - - wanted = ["command options for 'MyCmd':", ' option1 = 1', - ' option2 = 1'] - msgs = self.get_logs(logging.INFO) - self.assertEqual(msgs, wanted) - - def test_ensure_string(self): - cmd = self.cmd - cmd.option1 = 'ok' - cmd.ensure_string('option1') - - cmd.option2 = None - cmd.ensure_string('option2', 'xxx') - self.assertTrue(hasattr(cmd, 'option2')) - - cmd.option3 = 1 - self.assertRaises(PackagingOptionError, cmd.ensure_string, 'option3') - - def test_ensure_string_list(self): - cmd = self.cmd - cmd.option1 = 'ok,dok' - cmd.ensure_string_list('option1') - self.assertEqual(cmd.option1, ['ok', 'dok']) - - cmd.yes_string_list = ['one', 'two', 'three'] - cmd.yes_string_list2 = 'ok' - cmd.ensure_string_list('yes_string_list') - cmd.ensure_string_list('yes_string_list2') - self.assertEqual(cmd.yes_string_list, ['one', 'two', 'three']) - self.assertEqual(cmd.yes_string_list2, ['ok']) - - cmd.not_string_list = ['one', 2, 'three'] - cmd.not_string_list2 = object() - self.assertRaises(PackagingOptionError, - cmd.ensure_string_list, 'not_string_list') - - self.assertRaises(PackagingOptionError, - cmd.ensure_string_list, 'not_string_list2') - - def test_ensure_filename(self): - cmd = self.cmd - cmd.option1 = __file__ - cmd.ensure_filename('option1') - cmd.option2 = 'xxx' - self.assertRaises(PackagingOptionError, cmd.ensure_filename, 'option2') - - def test_ensure_dirname(self): - cmd = self.cmd - cmd.option1 = os.path.dirname(__file__) or os.curdir - cmd.ensure_dirname('option1') - cmd.option2 = 'xxx' - self.assertRaises(PackagingOptionError, cmd.ensure_dirname, 'option2') - - -def test_suite(): - return unittest.makeSuite(CommandTestCase) - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_command_config.py b/Lib/packaging/tests/test_command_config.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_config.py +++ /dev/null @@ -1,76 +0,0 @@ -"""Tests for distutils.command.config.""" -import os -import sys -import logging - -from packaging.command.config import dump_file, config -from packaging.tests import unittest, support - - -class ConfigTestCase(support.LoggingCatcher, - support.TempdirManager, - unittest.TestCase): - - def test_dump_file(self): - this_file = __file__.rstrip('co') - with open(this_file) as f: - numlines = len(f.readlines()) - - dump_file(this_file, 'I am the header') - - logs = [] - for log in self.get_logs(logging.INFO): - logs.extend(line for line in log.split('\n')) - self.assertEqual(len(logs), numlines + 2) - - @unittest.skipIf(sys.platform == 'win32', 'disabled on win32') - def test_search_cpp(self): - pkg_dir, dist = self.create_dist() - cmd = config(dist) - - # simple pattern searches - match = cmd.search_cpp(pattern='xxx', body='/* xxx */') - self.assertEqual(match, 0) - - match = cmd.search_cpp(pattern='_configtest', body='/* xxx */') - self.assertEqual(match, 1) - - def test_finalize_options(self): - # finalize_options does a bit of transformation - # on options - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd.include_dirs = 'one%stwo' % os.pathsep - cmd.libraries = 'one' - cmd.library_dirs = 'three%sfour' % os.pathsep - cmd.ensure_finalized() - - self.assertEqual(cmd.include_dirs, ['one', 'two']) - self.assertEqual(cmd.libraries, ['one']) - self.assertEqual(cmd.library_dirs, ['three', 'four']) - - def test_clean(self): - # _clean removes files - tmp_dir = self.mkdtemp() - f1 = os.path.join(tmp_dir, 'one') - f2 = os.path.join(tmp_dir, 'two') - - self.write_file(f1, 'xxx') - self.write_file(f2, 'xxx') - - for f in (f1, f2): - self.assertTrue(os.path.exists(f)) - - pkg_dir, dist = self.create_dist() - cmd = config(dist) - cmd._clean(f1, f2) - - for f in (f1, f2): - self.assertFalse(os.path.exists(f)) - - -def test_suite(): - return unittest.makeSuite(ConfigTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_install_data.py b/Lib/packaging/tests/test_command_install_data.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_install_data.py +++ /dev/null @@ -1,148 +0,0 @@ -"""Tests for packaging.command.install_data.""" -import os -import sys -import sysconfig -import packaging.database -from sysconfig import _get_default_scheme -from packaging.tests import unittest, support -from packaging.command.install_data import install_data -from packaging.command.install_dist import install_dist -from packaging.command.install_distinfo import install_distinfo - - -class InstallDataTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def setUp(self): - super(InstallDataTestCase, self).setUp() - scheme = _get_default_scheme() - old_items = sysconfig._SCHEMES.items(scheme) - - def restore(): - sysconfig._SCHEMES.remove_section(scheme) - sysconfig._SCHEMES.add_section(scheme) - for option, value in old_items: - sysconfig._SCHEMES.set(scheme, option, value) - - self.addCleanup(restore) - - def test_simple_run(self): - pkg_dir, dist = self.create_dist() - cmd = install_data(dist) - cmd.install_dir = inst = os.path.join(pkg_dir, 'inst') - scheme = _get_default_scheme() - - sysconfig._SCHEMES.set(scheme, 'inst', - os.path.join(pkg_dir, 'inst')) - sysconfig._SCHEMES.set(scheme, 'inst2', - os.path.join(pkg_dir, 'inst2')) - - one = os.path.join(pkg_dir, 'one') - self.write_file(one, 'xxx') - inst2 = os.path.join(pkg_dir, 'inst2') - two = os.path.join(pkg_dir, 'two') - self.write_file(two, 'xxx') - - # FIXME this creates a literal \{inst2\} directory! - cmd.data_files = {one: '{inst}/one', two: '{inst2}/two'} - self.assertCountEqual(cmd.get_inputs(), [one, two]) - - # let's run the command - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) - rtwo = os.path.split(two)[-1] - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - rone = os.path.split(one)[-1] - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - cmd.outfiles = [] - - # let's try with warn_dir one - cmd.warn_dir = True - cmd.finalized = False - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 2) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - cmd.outfiles = [] - - # now using root and empty dir - cmd.root = os.path.join(pkg_dir, 'root') - three = os.path.join(cmd.install_dir, 'three') - self.write_file(three, 'xx') - - sysconfig._SCHEMES.set(scheme, 'inst3', cmd.install_dir) - - cmd.data_files = {one: '{inst}/one', two: '{inst2}/two', - three: '{inst3}/three'} - cmd.finalized = False - cmd.ensure_finalized() - cmd.run() - - # let's check the result - self.assertEqual(len(cmd.get_outputs()), 3) - self.assertTrue(os.path.exists(os.path.join(inst2, rtwo))) - self.assertTrue(os.path.exists(os.path.join(inst, rone))) - - def test_resources(self): - install_dir = self.mkdtemp() - scripts_dir = self.mkdtemp() - project_dir, dist = self.create_dist( - name='Spamlib', version='0.1', - data_files={'spamd': '{scripts}/spamd'}) - - os.chdir(project_dir) - self.write_file('spamd', '# Python script') - sysconfig._SCHEMES.set(_get_default_scheme(), 'scripts', scripts_dir) - sys.path.insert(0, install_dir) - packaging.database.disable_cache() - self.addCleanup(sys.path.remove, install_dir) - self.addCleanup(packaging.database.enable_cache) - - cmd = install_dist(dist) - cmd.outputs = ['spamd'] - cmd.install_lib = install_dir - dist.command_obj['install_dist'] = cmd - - cmd = install_data(dist) - cmd.install_dir = install_dir - cmd.ensure_finalized() - dist.command_obj['install_data'] = cmd - cmd.run() - - cmd = install_distinfo(dist) - cmd.ensure_finalized() - dist.command_obj['install_distinfo'] = cmd - cmd.run() - - # first a few sanity checks - self.assertEqual(os.listdir(scripts_dir), ['spamd']) - self.assertEqual(os.listdir(install_dir), ['Spamlib-0.1.dist-info']) - - # now the real test - fn = os.path.join(install_dir, 'Spamlib-0.1.dist-info', 'RESOURCES') - with open(fn, encoding='utf-8') as fp: - content = fp.read().strip() - - expected = 'spamd,%s' % os.path.join(scripts_dir, 'spamd') - self.assertEqual(content, expected) - - # just to be sure, we also test that get_file works here, even though - # packaging.database has its own test file - with packaging.database.get_file('Spamlib', 'spamd') as fp: - content = fp.read() - - self.assertEqual('# Python script', content) - - -def test_suite(): - return unittest.makeSuite(InstallDataTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_install_dist.py b/Lib/packaging/tests/test_command_install_dist.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_install_dist.py +++ /dev/null @@ -1,241 +0,0 @@ -"""Tests for packaging.command.install.""" - -import os -import imp -import sys -from sysconfig import (get_scheme_names, get_config_vars, - _SCHEMES, get_config_var, get_path) - -from packaging.command.build_ext import build_ext -from packaging.command.install_dist import install_dist -from packaging.compiler.extension import Extension -from packaging.dist import Distribution -from packaging.errors import PackagingOptionError - -from packaging.tests import unittest, support - - -_CONFIG_VARS = get_config_vars() - - -def _make_ext_name(modname): - if os.name == 'nt' and sys.executable.endswith('_d.exe'): - modname += '_d' - return modname + get_config_var('SO') - - -class InstallTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_home_installation_scheme(self): - # This ensure two things: - # - that --home generates the desired set of directory names - # - test --home is supported on all platforms - builddir = self.mkdtemp() - destination = os.path.join(builddir, "installation") - - dist = Distribution({"name": "foopkg"}) - dist.command_obj["build"] = support.DummyCommand( - build_base=builddir, - build_lib=os.path.join(builddir, "lib"), - ) - - old_posix_prefix = _SCHEMES.get('posix_prefix', 'platinclude') - old_posix_home = _SCHEMES.get('posix_home', 'platinclude') - - new_path = '{platbase}/include/python{py_version_short}' - _SCHEMES.set('posix_prefix', 'platinclude', new_path) - _SCHEMES.set('posix_home', 'platinclude', '{platbase}/include/python') - - try: - cmd = install_dist(dist) - cmd.home = destination - cmd.ensure_finalized() - finally: - _SCHEMES.set('posix_prefix', 'platinclude', old_posix_prefix) - _SCHEMES.set('posix_home', 'platinclude', old_posix_home) - - self.assertEqual(cmd.install_base, destination) - self.assertEqual(cmd.install_platbase, destination) - - def check_path(got, expected): - got = os.path.normpath(got) - expected = os.path.normpath(expected) - self.assertEqual(got, expected) - - libdir = os.path.join(destination, "lib", "python") - check_path(cmd.install_lib, libdir) - check_path(cmd.install_platlib, libdir) - check_path(cmd.install_purelib, libdir) - check_path(cmd.install_headers, - os.path.join(destination, "include", "python", "foopkg")) - check_path(cmd.install_scripts, os.path.join(destination, "bin")) - check_path(cmd.install_data, destination) - - def test_user_site(self): - # test install with --user - # preparing the environment for the test - self.old_user_base = get_config_var('userbase') - self.old_user_site = get_path('purelib', '%s_user' % os.name) - self.tmpdir = self.mkdtemp() - self.user_base = os.path.join(self.tmpdir, 'B') - self.user_site = os.path.join(self.tmpdir, 'S') - _CONFIG_VARS['userbase'] = self.user_base - scheme = '%s_user' % os.name - _SCHEMES.set(scheme, 'purelib', self.user_site) - - def _expanduser(path): - if path[0] == '~': - path = os.path.normpath(self.tmpdir) + path[1:] - return path - - self.old_expand = os.path.expanduser - os.path.expanduser = _expanduser - - def cleanup(): - _CONFIG_VARS['userbase'] = self.old_user_base - _SCHEMES.set(scheme, 'purelib', self.old_user_site) - os.path.expanduser = self.old_expand - - self.addCleanup(cleanup) - - schemes = get_scheme_names() - for key in ('nt_user', 'posix_user', 'os2_home'): - self.assertIn(key, schemes) - - dist = Distribution({'name': 'xx'}) - cmd = install_dist(dist) - - # making sure the user option is there - options = [name for name, short, lable in - cmd.user_options] - self.assertIn('user', options) - - # setting a value - cmd.user = True - - # user base and site shouldn't be created yet - self.assertFalse(os.path.exists(self.user_base)) - self.assertFalse(os.path.exists(self.user_site)) - - # let's run finalize - cmd.ensure_finalized() - - # now they should - self.assertTrue(os.path.exists(self.user_base)) - self.assertTrue(os.path.exists(self.user_site)) - - self.assertIn('userbase', cmd.config_vars) - self.assertIn('usersite', cmd.config_vars) - - def test_handle_extra_path(self): - dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'}) - cmd = install_dist(dist) - - # two elements - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path', 'dirs']) - self.assertEqual(cmd.extra_dirs, 'dirs') - self.assertEqual(cmd.path_file, 'path') - - # one element - cmd.extra_path = ['path'] - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, ['path']) - self.assertEqual(cmd.extra_dirs, 'path') - self.assertEqual(cmd.path_file, 'path') - - # none - dist.extra_path = cmd.extra_path = None - cmd.handle_extra_path() - self.assertEqual(cmd.extra_path, None) - self.assertEqual(cmd.extra_dirs, '') - self.assertEqual(cmd.path_file, None) - - # three elements (no way !) - cmd.extra_path = 'path,dirs,again' - self.assertRaises(PackagingOptionError, cmd.handle_extra_path) - - def test_finalize_options(self): - dist = Distribution({'name': 'xx'}) - cmd = install_dist(dist) - - # must supply either prefix/exec-prefix/home or - # install-base/install-platbase -- not both - cmd.prefix = 'prefix' - cmd.install_base = 'base' - self.assertRaises(PackagingOptionError, cmd.finalize_options) - - # must supply either home or prefix/exec-prefix -- not both - cmd.install_base = None - cmd.home = 'home' - self.assertRaises(PackagingOptionError, cmd.finalize_options) - - # can't combine user with with prefix/exec_prefix/home or - # install_(plat)base - cmd.prefix = None - cmd.user = 'user' - self.assertRaises(PackagingOptionError, cmd.finalize_options) - - def test_old_record(self): - # test pre-PEP 376 --record option (outside dist-info dir) - install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(py_modules=['hello'], - scripts=['sayhi']) - os.chdir(project_dir) - self.write_file('hello.py', "def main(): print('o hai')") - self.write_file('sayhi', 'from hello import main; main()') - - cmd = install_dist(dist) - dist.command_obj['install_dist'] = cmd - cmd.root = install_dir - cmd.record = os.path.join(project_dir, 'filelist') - cmd.ensure_finalized() - cmd.run() - - with open(cmd.record) as f: - content = f.read() - - found = [os.path.basename(line) for line in content.splitlines()] - expected = ['hello.py', 'hello.%s.pyc' % imp.get_tag(), 'sayhi', - 'METADATA', 'INSTALLER', 'REQUESTED', 'RECORD'] - self.assertEqual(sorted(found), sorted(expected)) - - # XXX test that fancy_getopt is okay with options named - # record and no-record but unrelated - - def test_old_record_extensions(self): - # test pre-PEP 376 --record option with ext modules - install_dir = self.mkdtemp() - project_dir, dist = self.create_dist(ext_modules=[ - Extension('xx', ['xxmodule.c'])]) - os.chdir(project_dir) - support.copy_xxmodule_c(project_dir) - - buildextcmd = build_ext(dist) - support.fixup_build_ext(buildextcmd) - buildextcmd.ensure_finalized() - - cmd = install_dist(dist) - dist.command_obj['install_dist'] = cmd - dist.command_obj['build_ext'] = buildextcmd - cmd.root = install_dir - cmd.record = os.path.join(project_dir, 'filelist') - cmd.ensure_finalized() - cmd.run() - - with open(cmd.record) as f: - content = f.read() - - found = [os.path.basename(line) for line in content.splitlines()] - expected = [_make_ext_name('xx'), - 'METADATA', 'INSTALLER', 'REQUESTED', 'RECORD'] - self.assertEqual(found, expected) - - -def test_suite(): - return unittest.makeSuite(InstallTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_install_distinfo.py b/Lib/packaging/tests/test_command_install_distinfo.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_install_distinfo.py +++ /dev/null @@ -1,252 +0,0 @@ -"""Tests for ``packaging.command.install_distinfo``. - -Writing of the RESOURCES file is tested in test_command_install_data. -""" - -import os -import csv -import hashlib -import sysconfig - -from packaging.command.install_distinfo import install_distinfo -from packaging.command.cmd import Command -from packaging.compiler.extension import Extension -from packaging.metadata import Metadata -from packaging.tests import unittest, support - - -class DummyInstallCmd(Command): - - def __init__(self, dist=None): - self.outputs = [] - self.distribution = dist - - def __getattr__(self, name): - return None - - def ensure_finalized(self): - pass - - def get_outputs(self): - return (self.outputs + - self.get_finalized_command('install_distinfo').get_outputs()) - - -class InstallDistinfoTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - checkLists = lambda self, x, y: self.assertListEqual(sorted(x), sorted(y)) - - def test_empty_install(self): - pkg_dir, dist = self.create_dist(name='foo', - version='1.0') - install_dir = self.mkdtemp() - - install = DummyInstallCmd(dist) - dist.command_obj['install_dist'] = install - - cmd = install_distinfo(dist) - dist.command_obj['install_distinfo'] = cmd - - cmd.install_dir = install_dir - cmd.ensure_finalized() - cmd.run() - - self.checkLists(os.listdir(install_dir), ['foo-1.0.dist-info']) - - dist_info = os.path.join(install_dir, 'foo-1.0.dist-info') - self.checkLists(os.listdir(dist_info), - ['METADATA', 'RECORD', 'REQUESTED', 'INSTALLER']) - with open(os.path.join(dist_info, 'INSTALLER')) as fp: - self.assertEqual(fp.read(), 'distutils') - with open(os.path.join(dist_info, 'REQUESTED')) as fp: - self.assertEqual(fp.read(), '') - meta_path = os.path.join(dist_info, 'METADATA') - self.assertTrue(Metadata(path=meta_path).check()) - - def test_installer(self): - pkg_dir, dist = self.create_dist(name='foo', - version='1.0') - install_dir = self.mkdtemp() - - install = DummyInstallCmd(dist) - dist.command_obj['install_dist'] = install - - cmd = install_distinfo(dist) - dist.command_obj['install_distinfo'] = cmd - - cmd.install_dir = install_dir - cmd.installer = 'bacon-python' - cmd.ensure_finalized() - cmd.run() - - dist_info = os.path.join(install_dir, 'foo-1.0.dist-info') - with open(os.path.join(dist_info, 'INSTALLER')) as fp: - self.assertEqual(fp.read(), 'bacon-python') - - def test_requested(self): - pkg_dir, dist = self.create_dist(name='foo', - version='1.0') - install_dir = self.mkdtemp() - - install = DummyInstallCmd(dist) - dist.command_obj['install_dist'] = install - - cmd = install_distinfo(dist) - dist.command_obj['install_distinfo'] = cmd - - cmd.install_dir = install_dir - cmd.requested = False - cmd.ensure_finalized() - cmd.run() - - dist_info = os.path.join(install_dir, 'foo-1.0.dist-info') - self.checkLists(os.listdir(dist_info), - ['METADATA', 'RECORD', 'INSTALLER']) - - def test_no_record(self): - pkg_dir, dist = self.create_dist(name='foo', - version='1.0') - install_dir = self.mkdtemp() - - install = DummyInstallCmd(dist) - dist.command_obj['install_dist'] = install - - cmd = install_distinfo(dist) - dist.command_obj['install_distinfo'] = cmd - - cmd.install_dir = install_dir - cmd.no_record = True - cmd.ensure_finalized() - cmd.run() - - dist_info = os.path.join(install_dir, 'foo-1.0.dist-info') - self.checkLists(os.listdir(dist_info), - ['METADATA', 'REQUESTED', 'INSTALLER']) - - def test_record_basic(self): - install_dir = self.mkdtemp() - modules_dest = os.path.join(install_dir, 'lib') - scripts_dest = os.path.join(install_dir, 'bin') - project_dir, dist = self.create_dist( - name='Spamlib', version='0.1', - py_modules=['spam'], scripts=['spamd'], - ext_modules=[Extension('_speedspam', ['_speedspam.c'])]) - - # using a real install_dist command is too painful, so we use a mock - # class that's only a holder for options to be used by install_distinfo - # and we create placeholder files manually instead of using build_*. - # the install_* commands will still be consulted by install_distinfo. - os.chdir(project_dir) - self.write_file('spam', '# Python module') - self.write_file('spamd', '# Python script') - extmod = '_speedspam' + sysconfig.get_config_var('SO') - self.write_file(extmod, '') - - install = DummyInstallCmd(dist) - install.outputs = ['spam', 'spamd', extmod] - install.install_lib = modules_dest - install.install_scripts = scripts_dest - dist.command_obj['install_dist'] = install - - cmd = install_distinfo(dist) - cmd.ensure_finalized() - dist.command_obj['install_distinfo'] = cmd - cmd.run() - - # checksum and size are not hard-coded for METADATA as it is - # platform-dependent (line endings) - metadata = os.path.join(modules_dest, 'Spamlib-0.1.dist-info', - 'METADATA') - with open(metadata, 'rb') as fp: - content = fp.read() - - metadata_size = str(len(content)) - metadata_md5 = hashlib.md5(content).hexdigest() - - record = os.path.join(modules_dest, 'Spamlib-0.1.dist-info', 'RECORD') - with open(record, encoding='utf-8') as fp: - content = fp.read() - - found = [] - for line in content.splitlines(): - filename, checksum, size = line.split(',') - filename = os.path.basename(filename) - found.append((filename, checksum, size)) - - expected = [ - ('spam', '6ab2f288ef2545868effe68757448b45', '15'), - ('spamd', 'd13e6156ce78919a981e424b2fdcd974', '15'), - (extmod, 'd41d8cd98f00b204e9800998ecf8427e', '0'), - ('METADATA', metadata_md5, metadata_size), - ('INSTALLER', '44e3fde05f3f537ed85831969acf396d', '9'), - ('REQUESTED', 'd41d8cd98f00b204e9800998ecf8427e', '0'), - ('RECORD', '', ''), - ] - self.assertEqual(found, expected) - - def test_record(self): - pkg_dir, dist = self.create_dist(name='foo', - version='1.0') - install_dir = self.mkdtemp() - - install = DummyInstallCmd(dist) - dist.command_obj['install_dist'] = install - - fake_dists = os.path.join(os.path.dirname(__file__), 'fake_dists') - fake_dists = os.path.realpath(fake_dists) - - # for testing, we simply add all files from _backport's fake_dists - dirs = [] - for dir in os.listdir(fake_dists): - full_path = os.path.join(fake_dists, dir) - if (not dir.endswith('.egg') or dir.endswith('.egg-info') or - dir.endswith('.dist-info')) and os.path.isdir(full_path): - dirs.append(full_path) - - for dir in dirs: - for path, subdirs, files in os.walk(dir): - install.outputs += [os.path.join(path, f) for f in files] - install.outputs += [os.path.join('path', f + 'c') - for f in files if f.endswith('.py')] - - cmd = install_distinfo(dist) - dist.command_obj['install_distinfo'] = cmd - - cmd.install_dir = install_dir - cmd.ensure_finalized() - cmd.run() - - dist_info = os.path.join(install_dir, 'foo-1.0.dist-info') - - expected = [] - for f in install.get_outputs(): - if (f.endswith(('.pyc', '.pyo')) or f == os.path.join( - install_dir, 'foo-1.0.dist-info', 'RECORD')): - expected.append([f, '', '']) - else: - size = os.path.getsize(f) - md5 = hashlib.md5() - with open(f, 'rb') as fp: - md5.update(fp.read()) - hash = md5.hexdigest() - expected.append([f, hash, str(size)]) - - parsed = [] - with open(os.path.join(dist_info, 'RECORD'), 'r') as f: - reader = csv.reader(f, delimiter=',', - lineterminator=os.linesep, - quotechar='"') - parsed = list(reader) - - self.maxDiff = None - self.checkLists(parsed, expected) - - -def test_suite(): - return unittest.makeSuite(InstallDistinfoTestCase) - - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_install_headers.py b/Lib/packaging/tests/test_command_install_headers.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_install_headers.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Tests for packaging.command.install_headers.""" -import os - -from packaging.command.install_headers import install_headers -from packaging.tests import unittest, support - - -class InstallHeadersTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_simple_run(self): - # we have two headers - header_list = self.mkdtemp() - header1 = os.path.join(header_list, 'header1') - header2 = os.path.join(header_list, 'header2') - self.write_file(header1) - self.write_file(header2) - headers = [header1, header2] - - pkg_dir, dist = self.create_dist(headers=headers) - cmd = install_headers(dist) - self.assertEqual(cmd.get_inputs(), headers) - - # let's run the command - cmd.install_dir = os.path.join(pkg_dir, 'inst') - cmd.ensure_finalized() - cmd.run() - - # let's check the results - self.assertEqual(len(cmd.get_outputs()), 2) - - -def test_suite(): - return unittest.makeSuite(InstallHeadersTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_install_lib.py b/Lib/packaging/tests/test_command_install_lib.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_install_lib.py +++ /dev/null @@ -1,110 +0,0 @@ -"""Tests for packaging.command.install_data.""" -import os -import sys -import imp - -from packaging.tests import unittest, support -from packaging.command.install_lib import install_lib -from packaging.compiler.extension import Extension -from packaging.errors import PackagingOptionError - - -class InstallLibTestCase(support.TempdirManager, - support.LoggingCatcher, - support.EnvironRestorer, - unittest.TestCase): - - restore_environ = ['PYTHONPATH'] - - def test_finalize_options(self): - dist = self.create_dist()[1] - cmd = install_lib(dist) - - cmd.finalize_options() - self.assertTrue(cmd.compile) - self.assertEqual(cmd.optimize, 0) - - # optimize must be 0, 1, or 2 - cmd.optimize = 'foo' - self.assertRaises(PackagingOptionError, cmd.finalize_options) - cmd.optimize = '4' - self.assertRaises(PackagingOptionError, cmd.finalize_options) - - cmd.optimize = '2' - cmd.finalize_options() - self.assertEqual(cmd.optimize, 2) - - def test_byte_compile(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - cmd = install_lib(dist) - cmd.compile = True - cmd.optimize = 1 - - f = os.path.join(project_dir, 'foo.py') - self.write_file(f, '# python file') - cmd.byte_compile([f]) - pyc_file = imp.cache_from_source('foo.py', True) - pyo_file = imp.cache_from_source('foo.py', False) - self.assertTrue(os.path.exists(pyc_file)) - self.assertTrue(os.path.exists(pyo_file)) - - def test_byte_compile_under_B(self): - # make sure byte compilation works under -B (dont_write_bytecode) - self.addCleanup(setattr, sys, 'dont_write_bytecode', - sys.dont_write_bytecode) - sys.dont_write_bytecode = True - self.test_byte_compile() - - def test_get_outputs(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - os.mkdir('spam') - cmd = install_lib(dist) - - # setting up a dist environment - cmd.compile = True - cmd.optimize = 1 - cmd.install_dir = self.mkdtemp() - f = os.path.join(project_dir, 'spam', '__init__.py') - self.write_file(f, '# python package') - cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] - cmd.distribution.packages = ['spam'] - - # make sure the build_lib is set the temp dir # XXX what? this is not - # needed in the same distutils test and should work without manual - # intervention - build_dir = os.path.split(project_dir)[0] - cmd.get_finalized_command('build_py').build_lib = build_dir - - # get_outputs should return 4 elements: spam/__init__.py, .pyc and - # .pyo, foo.import-tag-abiflags.so / foo.pyd - outputs = cmd.get_outputs() - self.assertEqual(len(outputs), 4, outputs) - - def test_get_inputs(self): - project_dir, dist = self.create_dist() - os.chdir(project_dir) - os.mkdir('spam') - cmd = install_lib(dist) - - # setting up a dist environment - cmd.compile = True - cmd.optimize = 1 - cmd.install_dir = self.mkdtemp() - f = os.path.join(project_dir, 'spam', '__init__.py') - self.write_file(f, '# python package') - cmd.distribution.ext_modules = [Extension('foo', ['xxx'])] - cmd.distribution.packages = ['spam'] - - # get_inputs should return 2 elements: spam/__init__.py and - # foo.import-tag-abiflags.so / foo.pyd - inputs = cmd.get_inputs() - self.assertEqual(len(inputs), 2, inputs) - - -def test_suite(): - return unittest.makeSuite(InstallLibTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_install_scripts.py b/Lib/packaging/tests/test_command_install_scripts.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_install_scripts.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Tests for packaging.command.install_scripts.""" -import os - -from packaging.tests import unittest, support -from packaging.command.install_scripts import install_scripts -from packaging.dist import Distribution - - -class InstallScriptsTestCase(support.TempdirManager, - support.LoggingCatcher, - unittest.TestCase): - - def test_default_settings(self): - dist = Distribution() - dist.command_obj["build"] = support.DummyCommand( - build_scripts="/foo/bar") - dist.command_obj["install_dist"] = support.DummyCommand( - install_scripts="/splat/funk", - force=True, - skip_build=True, - ) - cmd = install_scripts(dist) - self.assertFalse(cmd.force) - self.assertFalse(cmd.skip_build) - self.assertIs(cmd.build_dir, None) - self.assertIs(cmd.install_dir, None) - - cmd.finalize_options() - - self.assertTrue(cmd.force) - self.assertTrue(cmd.skip_build) - self.assertEqual(cmd.build_dir, "/foo/bar") - self.assertEqual(cmd.install_dir, "/splat/funk") - - def test_installation(self): - source = self.mkdtemp() - expected = [] - - def write_script(name, text): - expected.append(name) - with open(os.path.join(source, name), "w") as f: - f.write(text) - - write_script("script1.py", ("#! /usr/bin/env python2.3\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("script2.py", ("#!/usr/bin/python\n" - "# bogus script w/ Python sh-bang\n" - "pass\n")) - write_script("shell.sh", ("#!/bin/sh\n" - "# bogus shell script w/ sh-bang\n" - "exit 0\n")) - - target = self.mkdtemp() - dist = Distribution() - dist.command_obj["build"] = support.DummyCommand(build_scripts=source) - dist.command_obj["install_dist"] = support.DummyCommand( - install_scripts=target, - force=True, - skip_build=True, - ) - cmd = install_scripts(dist) - cmd.finalize_options() - cmd.run() - - installed = os.listdir(target) - for name in expected: - self.assertIn(name, installed) - - -def test_suite(): - return unittest.makeSuite(InstallScriptsTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_register.py b/Lib/packaging/tests/test_command_register.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_register.py +++ /dev/null @@ -1,260 +0,0 @@ -"""Tests for packaging.command.register.""" -import os -import getpass -import urllib.request -import urllib.error -import urllib.parse - -try: - import docutils - DOCUTILS_SUPPORT = True -except ImportError: - DOCUTILS_SUPPORT = False - -from packaging.tests import unittest, support -from packaging.tests.support import Inputs -from packaging.command import register as register_module -from packaging.command.register import register -from packaging.errors import PackagingSetupError - - -PYPIRC_NOPASSWORD = """\ -[distutils] - -index-servers = - server1 - -[server1] -username:me -""" - -WANTED_PYPIRC = """\ -[distutils] -index-servers = - pypi - -[pypi] -username:tarek -password:password -""" - - -class FakeOpener: - """Fakes a PyPI server""" - def __init__(self): - self.reqs = [] - - def __call__(self, *args): - return self - - def open(self, req): - self.reqs.append(req) - return self - - def read(self): - return 'xxx' - - -class RegisterTestCase(support.TempdirManager, - support.EnvironRestorer, - support.LoggingCatcher, - unittest.TestCase): - - restore_environ = ['HOME'] - - def setUp(self): - super(RegisterTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - self.rc = os.path.join(self.tmp_dir, '.pypirc') - os.environ['HOME'] = self.tmp_dir - - # patching the password prompt - self._old_getpass = getpass.getpass - - def _getpass(prompt): - return 'password' - - getpass.getpass = _getpass - self.old_opener = urllib.request.build_opener - self.conn = urllib.request.build_opener = FakeOpener() - - def tearDown(self): - getpass.getpass = self._old_getpass - urllib.request.build_opener = self.old_opener - if hasattr(register_module, 'input'): - del register_module.input - super(RegisterTestCase, self).tearDown() - - def _get_cmd(self, metadata=None): - if metadata is None: - metadata = {'home_page': 'xxx', 'author': 'xxx', - 'author_email': 'xxx', - 'name': 'xxx', 'version': 'xxx'} - pkg_info, dist = self.create_dist(**metadata) - return register(dist) - - def test_create_pypirc(self): - # this test makes sure a .pypirc file - # is created when requested. - - # let's create a register instance - cmd = self._get_cmd() - - # we shouldn't have a .pypirc file yet - self.assertFalse(os.path.exists(self.rc)) - - # patching input and getpass.getpass - # so register gets happy - # Here's what we are faking : - # use your existing login (choice 1.) - # Username : 'tarek' - # Password : 'password' - # Save your login (y/N)? : 'y' - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs - cmd.ensure_finalized() - cmd.run() - - # we should have a brand new .pypirc file - self.assertTrue(os.path.exists(self.rc)) - - # with the content similar to WANTED_PYPIRC - with open(self.rc) as fp: - content = fp.read() - self.assertEqual(content, WANTED_PYPIRC) - - # now let's make sure the .pypirc file generated - # really works : we shouldn't be asked anything - # if we run the command again - def _no_way(prompt=''): - raise AssertionError(prompt) - - register_module.input = _no_way - cmd.show_response = True - cmd.finalized = False - cmd.ensure_finalized() - cmd.run() - - # let's see what the server received : we should - # have 2 similar requests - self.assertEqual(len(self.conn.reqs), 2) - req1 = dict(self.conn.reqs[0].headers) - req2 = dict(self.conn.reqs[1].headers) - self.assertEqual(req2['Content-length'], req1['Content-length']) - self.assertIn(b'xxx', self.conn.reqs[1].data) - - def test_password_not_in_file(self): - - self.write_file(self.rc, PYPIRC_NOPASSWORD) - cmd = self._get_cmd() - cmd.finalize_options() - cmd._set_config() - cmd.send_metadata() - - # dist.password should be set - # therefore used afterwards by other commands - self.assertEqual(cmd.distribution.password, 'password') - - def test_registration(self): - # this test runs choice 2 - cmd = self._get_cmd() - inputs = Inputs('2', 'tarek', 'tarek at ziade.org') - register_module.input = inputs - # let's run the command - # FIXME does this send a real request? use a mock server - cmd.ensure_finalized() - cmd.run() - - # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) - req = self.conn.reqs[0] - headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '628') - self.assertIn(b'tarek', req.data) - - def test_password_reset(self): - # this test runs choice 3 - cmd = self._get_cmd() - inputs = Inputs('3', 'tarek at ziade.org') - register_module.input = inputs - cmd.ensure_finalized() - cmd.run() - - # we should have send a request - self.assertEqual(len(self.conn.reqs), 1) - req = self.conn.reqs[0] - headers = dict(req.headers) - self.assertEqual(headers['Content-length'], '298') - self.assertIn(b'tarek', req.data) - - @unittest.skipUnless(DOCUTILS_SUPPORT, 'needs docutils') - def test_strict(self): - # testing the strict option: when on, the register command stops if the - # metadata is incomplete or if description contains bad reST - - # empty metadata # XXX this is not really empty.. - cmd = self._get_cmd({'name': 'xxx', 'version': 'xxx'}) - cmd.ensure_finalized() - cmd.strict = True - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs - self.assertRaises(PackagingSetupError, cmd.run) - - # metadata is OK but description is broken - metadata = {'home_page': 'xxx', 'author': 'xxx', - 'author_email': '?x?x?', - 'name': 'xxx', 'version': '4.2', - 'description': 'title\n==\n\ntext'} - - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = True - self.assertRaises(PackagingSetupError, cmd.run) - - # now something that works - metadata['description'] = 'title\n=====\n\ntext' - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = True - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs - cmd.ensure_finalized() - cmd.run() - - # strict is not by default - cmd = self._get_cmd() - cmd.ensure_finalized() - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs - cmd.ensure_finalized() - cmd.run() - - # and finally a Unicode test (bug #12114) - metadata = {'home_page': 'xxx', 'author': '\u00c9ric', - 'author_email': 'xxx', 'name': 'xxx', - 'version': 'xxx', - 'summary': 'Something about esszet \u00df', - 'description': 'More things about esszet \u00df'} - - cmd = self._get_cmd(metadata) - cmd.ensure_finalized() - cmd.strict = True - inputs = Inputs('1', 'tarek', 'y') - register_module.input = inputs - cmd.ensure_finalized() - cmd.run() - - def test_register_pep345(self): - cmd = self._get_cmd({}) - cmd.ensure_finalized() - cmd.distribution.metadata['Requires-Dist'] = ['lxml'] - data = cmd.build_post_data('submit') - self.assertEqual(data['metadata_version'], '1.2') - self.assertEqual(data['requires_dist'], ['lxml']) - - -def test_suite(): - return unittest.makeSuite(RegisterTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_sdist.py b/Lib/packaging/tests/test_command_sdist.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_sdist.py +++ /dev/null @@ -1,394 +0,0 @@ -"""Tests for packaging.command.sdist.""" -import os -import tarfile -import zipfile - -try: - import grp - import pwd - UID_GID_SUPPORT = True -except ImportError: - UID_GID_SUPPORT = False - -from shutil import get_archive_formats -from os.path import join -from packaging.dist import Distribution -from packaging.util import find_executable -from packaging.errors import PackagingOptionError -from packaging.command.sdist import sdist, show_formats - -from test.support import captured_stdout -from packaging.tests import support, unittest -from packaging.tests.support import requires_zlib - - -MANIFEST = """\ -# file GENERATED by packaging, do NOT edit -inroot.txt -setup.cfg -data%(sep)sdata.dt -scripts%(sep)sscript.py -some%(sep)sfile.txt -some%(sep)sother_file.txt -somecode%(sep)s__init__.py -somecode%(sep)sdoc.dat -somecode%(sep)sdoc.txt -""" - - -def builder(dist, filelist): - filelist.append('bah') - - -class SDistTestCase(support.TempdirManager, - support.LoggingCatcher, - support.EnvironRestorer, - unittest.TestCase): - - restore_environ = ['HOME'] - - def setUp(self): - super(SDistTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - os.environ['HOME'] = self.tmp_dir - # setting up an environment - self.old_path = os.getcwd() - os.mkdir(join(self.tmp_dir, 'somecode')) - os.mkdir(join(self.tmp_dir, 'dist')) - # a package, and a README - self.write_file((self.tmp_dir, 'README'), 'xxx') - self.write_file((self.tmp_dir, 'somecode', '__init__.py'), '#') - os.chdir(self.tmp_dir) - - def tearDown(self): - # back to normal - os.chdir(self.old_path) - super(SDistTestCase, self).tearDown() - - def get_cmd(self, metadata=None): - """Returns a cmd""" - if metadata is None: - metadata = {'name': 'fake', 'version': '1.0', - 'home_page': 'xxx', 'author': 'xxx', - 'author_email': 'xxx'} - dist = Distribution(metadata) - dist.packages = ['somecode'] - cmd = sdist(dist) - cmd.dist_dir = 'dist' - return dist, cmd - - @requires_zlib - def test_prune_file_list(self): - # this test creates a package with some vcs dirs in it - # and launch sdist to make sure they get pruned - # on all systems - - # creating VCS directories with some files in them - os.mkdir(join(self.tmp_dir, 'somecode', '.svn')) - self.write_file((self.tmp_dir, 'somecode', '.svn', 'ok.py'), 'xxx') - - os.mkdir(join(self.tmp_dir, 'somecode', '.hg')) - self.write_file((self.tmp_dir, 'somecode', '.hg', - 'ok'), 'xxx') - - os.mkdir(join(self.tmp_dir, 'somecode', '.git')) - self.write_file((self.tmp_dir, 'somecode', '.git', - 'ok'), 'xxx') - - # now building a sdist - dist, cmd = self.get_cmd() - - # zip is available universally - # (tar might not be installed under win32) - cmd.formats = ['zip'] - - cmd.ensure_finalized() - cmd.run() - - # now let's check what we have - dist_folder = join(self.tmp_dir, 'dist') - files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) - - with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file: - content = zip_file.namelist() - - # making sure everything has been pruned correctly - self.assertEqual(len(content), 2) - - @requires_zlib - @unittest.skipIf(find_executable('tar') is None or - find_executable('gzip') is None, - 'requires tar and gzip programs') - def test_make_distribution(self): - # building a sdist - dist, cmd = self.get_cmd() - - # creating a gztar then a tar - cmd.formats = ['gztar', 'tar'] - cmd.ensure_finalized() - cmd.run() - - # making sure we have two files - dist_folder = join(self.tmp_dir, 'dist') - result = sorted(os.listdir(dist_folder)) - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) - - os.remove(join(dist_folder, 'fake-1.0.tar')) - os.remove(join(dist_folder, 'fake-1.0.tar.gz')) - - # now trying a tar then a gztar - cmd.formats = ['tar', 'gztar'] - cmd.finalized = False - cmd.ensure_finalized() - cmd.run() - - result = sorted(os.listdir(dist_folder)) - self.assertEqual(result, ['fake-1.0.tar', 'fake-1.0.tar.gz']) - - @requires_zlib - def test_add_defaults(self): - - # http://bugs.python.org/issue2279 - - # add_default should also include - # data_files and package_data - dist, cmd = self.get_cmd() - - # filling data_files by pointing files - # in package_data - dist.package_data = {'': ['*.cfg', '*.dat'], - 'somecode': ['*.txt']} - self.write_file((self.tmp_dir, 'setup.cfg'), '#') - self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') - self.write_file((self.tmp_dir, 'somecode', 'doc.dat'), '#') - - # adding some data in data_files - data_dir = join(self.tmp_dir, 'data') - os.mkdir(data_dir) - self.write_file((data_dir, 'data.dt'), '#') - some_dir = join(self.tmp_dir, 'some') - os.mkdir(some_dir) - self.write_file((self.tmp_dir, 'inroot.txt'), '#') - self.write_file((some_dir, 'file.txt'), '#') - self.write_file((some_dir, 'other_file.txt'), '#') - - dist.data_files = {'data/data.dt': '{appdata}/data.dt', - 'inroot.txt': '{appdata}/inroot.txt', - 'some/file.txt': '{appdata}/file.txt', - 'some/other_file.txt': '{appdata}/other_file.txt'} - - # adding a script - script_dir = join(self.tmp_dir, 'scripts') - os.mkdir(script_dir) - self.write_file((script_dir, 'script.py'), '#') - dist.scripts = [join('scripts', 'script.py')] - - cmd.formats = ['zip'] - cmd.use_defaults = True - - cmd.ensure_finalized() - cmd.run() - - # now let's check what we have - dist_folder = join(self.tmp_dir, 'dist') - files = os.listdir(dist_folder) - self.assertEqual(files, ['fake-1.0.zip']) - - with zipfile.ZipFile(join(dist_folder, 'fake-1.0.zip')) as zip_file: - content = zip_file.namelist() - - # Making sure everything was added. This includes 8 code and data - # files in addition to PKG-INFO and setup.cfg - self.assertEqual(len(content), 10) - - # Checking the MANIFEST - with open(join(self.tmp_dir, 'MANIFEST')) as fp: - manifest = fp.read() - self.assertEqual(manifest, MANIFEST % {'sep': os.sep}) - - @requires_zlib - def test_metadata_check_option(self): - # testing the `check-metadata` option - dist, cmd = self.get_cmd(metadata={'name': 'xxx', 'version': 'xxx'}) - - # this should cause the check subcommand to log two warnings: - # version is invalid, home-page and author are missing - cmd.ensure_finalized() - cmd.run() - warnings = self.get_logs() - check_warnings = [msg for msg in warnings if - not msg.startswith('sdist:')] - self.assertEqual(len(check_warnings), 2, warnings) - - # trying with a complete set of metadata - self.loghandler.flush() - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - cmd.metadata_check = False - cmd.run() - warnings = self.get_logs() - self.assertEqual(len(warnings), 2) - self.assertIn('using default file list', warnings[0]) - self.assertIn("'setup.cfg' file not found", warnings[1]) - - def test_show_formats(self): - with captured_stdout() as stdout: - show_formats() - stdout = stdout.getvalue() - - # the output should be a header line + one line per format - num_formats = len(get_archive_formats()) - output = [line for line in stdout.split('\n') - if line.strip().startswith('--formats=')] - self.assertEqual(len(output), num_formats) - - def test_finalize_options(self): - dist, cmd = self.get_cmd() - cmd.finalize_options() - - # default options set by finalize - self.assertEqual(cmd.manifest, 'MANIFEST') - self.assertEqual(cmd.dist_dir, 'dist') - - # formats has to be a string splitable on (' ', ',') or - # a stringlist - cmd.formats = 1 - self.assertRaises(PackagingOptionError, cmd.finalize_options) - cmd.formats = ['zip'] - cmd.finalize_options() - - # formats has to be known - cmd.formats = 'supazipa' - self.assertRaises(PackagingOptionError, cmd.finalize_options) - - @requires_zlib - def test_template(self): - dist, cmd = self.get_cmd() - dist.extra_files = ['include yeah'] - cmd.ensure_finalized() - self.write_file((self.tmp_dir, 'yeah'), 'xxx') - cmd.run() - with open(cmd.manifest) as f: - content = f.read() - - self.assertIn('yeah', content) - - @requires_zlib - @unittest.skipUnless(UID_GID_SUPPORT, "requires grp and pwd support") - @unittest.skipIf(find_executable('tar') is None or - find_executable('gzip') is None, - 'requires tar and gzip programs') - def test_make_distribution_owner_group(self): - # building a sdist - dist, cmd = self.get_cmd() - - # creating a gztar and specifying the owner+group - cmd.formats = ['gztar'] - cmd.owner = pwd.getpwuid(0)[0] - cmd.group = grp.getgrgid(0)[0] - cmd.ensure_finalized() - cmd.run() - - # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - with tarfile.open(archive_name) as archive: - for member in archive.getmembers(): - self.assertEqual(member.uid, 0) - self.assertEqual(member.gid, 0) - - # building a sdist again - dist, cmd = self.get_cmd() - - # creating a gztar - cmd.formats = ['gztar'] - cmd.ensure_finalized() - cmd.run() - - # making sure we have the good rights - archive_name = join(self.tmp_dir, 'dist', 'fake-1.0.tar.gz') - with tarfile.open(archive_name) as archive: - - # note that we are not testing the group ownership here - # because, depending on the platforms and the container - # rights (see #7408) - for member in archive.getmembers(): - self.assertEqual(member.uid, os.getuid()) - - @requires_zlib - def test_get_file_list(self): - # make sure MANIFEST is recalculated - dist, cmd = self.get_cmd() - # filling data_files by pointing files in package_data - dist.package_data = {'somecode': ['*.txt']} - self.write_file((self.tmp_dir, 'somecode', 'doc.txt'), '#') - cmd.ensure_finalized() - cmd.run() - - # Should produce four lines. Those lines are one comment, one default - # (README) and two package files. - with open(cmd.manifest) as f: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - self.assertEqual(len(manifest), 3) - - # Adding a file - self.write_file((self.tmp_dir, 'somecode', 'doc2.txt'), '#') - - # make sure build_py is reinitialized, like a fresh run - build_py = dist.get_command_obj('build_py') - build_py.finalized = False - build_py.ensure_finalized() - - cmd.run() - - with open(cmd.manifest) as f: - manifest2 = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - - # Do we have the new file in MANIFEST? - self.assertEqual(len(manifest2), 4) - self.assertIn('doc2.txt', manifest2[-1]) - - @requires_zlib - def test_manifest_marker(self): - # check that autogenerated MANIFESTs have a marker - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - cmd.run() - - with open(cmd.manifest) as f: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - - self.assertEqual(manifest[0], - '# file GENERATED by packaging, do NOT edit') - - @requires_zlib - def test_manual_manifest(self): - # check that a MANIFEST without a marker is left alone - dist, cmd = self.get_cmd() - cmd.ensure_finalized() - self.write_file((self.tmp_dir, cmd.manifest), 'README.manual') - cmd.run() - - with open(cmd.manifest) as f: - manifest = [line.strip() for line in f.read().split('\n') - if line.strip() != ''] - - self.assertEqual(manifest, ['README.manual']) - - @requires_zlib - def test_manifest_builder(self): - dist, cmd = self.get_cmd() - cmd.manifest_builders = 'packaging.tests.test_command_sdist.builder' - cmd.ensure_finalized() - cmd.run() - self.assertIn('bah', cmd.filelist.files) - - -def test_suite(): - return unittest.makeSuite(SDistTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_test.py b/Lib/packaging/tests/test_command_test.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_test.py +++ /dev/null @@ -1,224 +0,0 @@ -import os -import re -import sys -import shutil -import unittest as ut1 -import packaging.database - -from os.path import join -from operator import getitem, setitem, delitem -from packaging.command.build import build -from packaging.tests import unittest -from packaging.tests.support import (TempdirManager, EnvironRestorer, - LoggingCatcher) -from packaging.command.test import test -from packaging.command import set_command -from packaging.dist import Distribution - - -EXPECTED_OUTPUT_RE = r'''FAIL: test_blah \(myowntestmodule.SomeTest\) ----------------------------------------------------------------------- -Traceback \(most recent call last\): - File ".+/myowntestmodule.py", line \d+, in test_blah - self.fail\("horribly"\) -AssertionError: horribly -''' - -here = os.path.dirname(os.path.abspath(__file__)) - - -class MockBuildCmd(build): - build_lib = "mock build lib" - command_name = 'build' - plat_name = 'whatever' - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - self._record.append("build has run") - - -class TestTest(TempdirManager, - EnvironRestorer, - LoggingCatcher, - unittest.TestCase): - - restore_environ = ['PYTHONPATH'] - - def setUp(self): - super(TestTest, self).setUp() - self.addCleanup(packaging.database.clear_cache) - new_pythonpath = os.path.dirname(os.path.dirname(here)) - pythonpath = os.environ.get('PYTHONPATH') - if pythonpath is not None: - new_pythonpath = os.pathsep.join((new_pythonpath, pythonpath)) - os.environ['PYTHONPATH'] = new_pythonpath - - def assert_re_match(self, pattern, string): - def quote(s): - lines = ['## ' + line for line in s.split('\n')] - sep = ["#" * 60] - return [''] + sep + lines + sep - msg = quote(pattern) + ["didn't match"] + quote(string) - msg = "\n".join(msg) - if not re.search(pattern, string): - self.fail(msg) - - def prepare_dist(self, dist_name): - pkg_dir = join(os.path.dirname(__file__), "dists", dist_name) - temp_pkg_dir = join(self.mkdtemp(), dist_name) - shutil.copytree(pkg_dir, temp_pkg_dir) - return temp_pkg_dir - - def safely_replace(self, obj, attr, - new_val=None, delete=False, dictionary=False): - """Replace a object's attribute returning to its original state at the - end of the test run. Creates the attribute if not present before - (deleting afterwards). When delete=True, makes sure the value is del'd - for the test run. If dictionary is set to True, operates of its items - rather than attributes.""" - if dictionary: - _setattr, _getattr, _delattr = setitem, getitem, delitem - - def _hasattr(_dict, value): - return value in _dict - else: - _setattr, _getattr, _delattr, _hasattr = (setattr, getattr, - delattr, hasattr) - - orig_has_attr = _hasattr(obj, attr) - if orig_has_attr: - orig_val = _getattr(obj, attr) - - if delete is False: - _setattr(obj, attr, new_val) - elif orig_has_attr: - _delattr(obj, attr) - - def do_cleanup(): - if orig_has_attr: - _setattr(obj, attr, orig_val) - elif _hasattr(obj, attr): - _delattr(obj, attr) - - self.addCleanup(do_cleanup) - - def test_runs_unittest(self): - module_name, a_module = self.prepare_a_module() - record = [] - a_module.recorder = lambda *args: record.append("suite") - - class MockTextTestRunner: - def __init__(*_, **__): - pass - - def run(_self, suite): - record.append("run") - - self.safely_replace(ut1, "TextTestRunner", MockTextTestRunner) - - dist = Distribution() - cmd = test(dist) - cmd.suite = "%s.recorder" % module_name - cmd.run() - self.assertEqual(record, ["suite", "run"]) - - def test_builds_before_running_tests(self): - self.addCleanup(set_command, 'packaging.command.build.build') - set_command('packaging.tests.test_command_test.MockBuildCmd') - - dist = Distribution() - dist.get_command_obj('build')._record = record = [] - cmd = test(dist) - cmd.runner = self.prepare_named_function(lambda: None) - cmd.ensure_finalized() - cmd.run() - self.assertEqual(['build has run'], record) - - @unittest.skip('needs to be written') - def test_works_with_2to3(self): - pass - - def test_checks_requires(self): - dist = Distribution() - cmd = test(dist) - phony_project = 'ohno_ohno-impossible_1234-name_stop-that!' - cmd.tests_require = [phony_project] - cmd.ensure_finalized() - logs = self.get_logs() - self.assertIn(phony_project, logs[-1]) - - def prepare_a_module(self): - tmp_dir = self.mkdtemp() - sys.path.append(tmp_dir) - self.addCleanup(sys.path.remove, tmp_dir) - - self.write_file((tmp_dir, 'packaging_tests_a.py'), '') - import packaging_tests_a as a_module - return "packaging_tests_a", a_module - - def prepare_named_function(self, func): - module_name, a_module = self.prepare_a_module() - a_module.recorder = func - return "%s.recorder" % module_name - - def test_custom_runner(self): - dist = Distribution() - cmd = test(dist) - record = [] - cmd.runner = self.prepare_named_function( - lambda: record.append("runner called")) - cmd.ensure_finalized() - cmd.run() - self.assertEqual(["runner called"], record) - - def prepare_mock_ut2(self): - class MockUTClass: - def __init__(*_, **__): - pass - - def discover(self): - pass - - def run(self, _): - pass - - class MockUTModule: - TestLoader = MockUTClass - TextTestRunner = MockUTClass - - mock_ut2 = MockUTModule() - self.safely_replace(sys.modules, "unittest2", - mock_ut2, dictionary=True) - return mock_ut2 - - def test_gets_unittest_discovery(self): - mock_ut2 = self.prepare_mock_ut2() - dist = Distribution() - cmd = test(dist) - self.safely_replace(ut1.TestLoader, "discover", lambda: None) - self.assertEqual(cmd.get_ut_with_discovery(), ut1) - - del ut1.TestLoader.discover - self.assertEqual(cmd.get_ut_with_discovery(), mock_ut2) - - def test_calls_discover(self): - self.safely_replace(ut1.TestLoader, "discover", delete=True) - mock_ut2 = self.prepare_mock_ut2() - record = [] - mock_ut2.TestLoader.discover = lambda self, path: record.append(path) - dist = Distribution() - cmd = test(dist) - cmd.run() - self.assertEqual([os.curdir], record) - - -def test_suite(): - return unittest.makeSuite(TestTest) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_upload.py b/Lib/packaging/tests/test_command_upload.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_upload.py +++ /dev/null @@ -1,159 +0,0 @@ -"""Tests for packaging.command.upload.""" -import os - -from packaging.command.upload import upload -from packaging.dist import Distribution -from packaging.errors import PackagingOptionError - -from packaging.tests import unittest, support -try: - import threading - from packaging.tests.pypi_server import PyPIServerTestCase -except ImportError: - threading = None - PyPIServerTestCase = unittest.TestCase - - -PYPIRC_NOPASSWORD = """\ -[distutils] - -index-servers = - server1 - -[server1] -username:me -""" - -PYPIRC = """\ -[distutils] - -index-servers = - server1 - server2 - -[server1] -username:me -password:secret - -[server2] -username:meagain -password: secret -realm:acme -repository:http://another.pypi/ -""" - - - at unittest.skipIf(threading is None, 'needs threading') -class UploadTestCase(support.TempdirManager, support.EnvironRestorer, - support.LoggingCatcher, PyPIServerTestCase): - - restore_environ = ['HOME'] - - def setUp(self): - super(UploadTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - self.rc = os.path.join(self.tmp_dir, '.pypirc') - os.environ['HOME'] = self.tmp_dir - - def test_finalize_options(self): - # new format - self.write_file(self.rc, PYPIRC) - dist = Distribution() - cmd = upload(dist) - cmd.finalize_options() - for attr, expected in (('username', 'me'), ('password', 'secret'), - ('realm', 'pypi'), - ('repository', 'http://pypi.python.org/pypi')): - self.assertEqual(getattr(cmd, attr), expected) - - def test_finalize_options_unsigned_identity_raises_exception(self): - self.write_file(self.rc, PYPIRC) - dist = Distribution() - cmd = upload(dist) - cmd.identity = True - cmd.sign = False - self.assertRaises(PackagingOptionError, cmd.finalize_options) - - def test_saved_password(self): - # file with no password - self.write_file(self.rc, PYPIRC_NOPASSWORD) - - # make sure it passes - dist = Distribution() - cmd = upload(dist) - cmd.ensure_finalized() - self.assertEqual(cmd.password, None) - - # make sure we get it as well, if another command - # initialized it at the dist level - dist.password = 'xxx' - cmd = upload(dist) - cmd.finalize_options() - self.assertEqual(cmd.password, 'xxx') - - def test_upload_without_files_raises_exception(self): - dist = Distribution() - cmd = upload(dist) - self.assertRaises(PackagingOptionError, cmd.run) - - def test_upload(self): - path = os.path.join(self.tmp_dir, 'xxx') - self.write_file(path) - command, pyversion, filename = 'xxx', '3.3', path - dist_files = [(command, pyversion, filename)] - - # let's run it - dist = self.create_dist(dist_files=dist_files, author='d?d?')[1] - cmd = upload(dist) - cmd.ensure_finalized() - cmd.repository = self.pypi.full_address - cmd.run() - - # what did we send? - handler, request_data = self.pypi.requests[-1] - headers = handler.headers - self.assertIn('d?d?'.encode('utf-8'), request_data) - self.assertIn(b'xxx', request_data) - - self.assertEqual(int(headers['content-length']), len(request_data)) - self.assertLess(int(headers['content-length']), 2500) - self.assertTrue(headers['content-type'].startswith( - 'multipart/form-data')) - self.assertEqual(handler.command, 'POST') - self.assertNotIn('\n', headers['authorization']) - - def test_upload_docs(self): - path = os.path.join(self.tmp_dir, 'xxx') - self.write_file(path) - command, pyversion, filename = 'xxx', '3.3', path - dist_files = [(command, pyversion, filename)] - docs_path = os.path.join(self.tmp_dir, "build", "docs") - os.makedirs(docs_path) - self.write_file((docs_path, "index.html"), "yellow") - self.write_file(self.rc, PYPIRC) - - # let's run it - dist = self.create_dist(dist_files=dist_files, author='d?d?')[1] - - cmd = upload(dist) - cmd.get_finalized_command("build").run() - cmd.upload_docs = True - cmd.ensure_finalized() - cmd.repository = self.pypi.full_address - os.chdir(self.tmp_dir) - cmd.run() - - handler, request_data = self.pypi.requests[-1] - action, name, content = request_data.split( - "----------------GHSKFJDLGDS7543FJKLFHRE75642756743254" - .encode())[1:4] - - self.assertIn(b'name=":action"', action) - self.assertIn(b'doc_upload', action) - - -def test_suite(): - return unittest.makeSuite(UploadTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_command_upload_docs.py b/Lib/packaging/tests/test_command_upload_docs.py deleted file mode 100644 --- a/Lib/packaging/tests/test_command_upload_docs.py +++ /dev/null @@ -1,186 +0,0 @@ -"""Tests for packaging.command.upload_docs.""" -import os -import shutil -import logging -import zipfile -try: - import _ssl -except ImportError: - _ssl = None - -from packaging.command import upload_docs as upload_docs_mod -from packaging.command.upload_docs import upload_docs, zip_dir -from packaging.dist import Distribution -from packaging.errors import PackagingFileError, PackagingOptionError - -from packaging.tests import unittest, support -try: - import threading - from packaging.tests.pypi_server import PyPIServerTestCase -except ImportError: - threading = None - PyPIServerTestCase = unittest.TestCase - - -PYPIRC = """\ -[distutils] -index-servers = server1 - -[server1] -repository = %s -username = real_slim_shady -password = long_island -""" - - - at unittest.skipIf(threading is None, "Needs threading") -class UploadDocsTestCase(support.TempdirManager, - support.EnvironRestorer, - support.LoggingCatcher, - PyPIServerTestCase): - - restore_environ = ['HOME'] - - def setUp(self): - super(UploadDocsTestCase, self).setUp() - self.tmp_dir = self.mkdtemp() - self.rc = os.path.join(self.tmp_dir, '.pypirc') - os.environ['HOME'] = self.tmp_dir - self.dist = Distribution() - self.dist.metadata['Name'] = "distr-name" - self.cmd = upload_docs(self.dist) - - def test_default_uploaddir(self): - sandbox = self.mkdtemp() - os.chdir(sandbox) - os.mkdir("build") - self.prepare_sample_dir("build") - self.cmd.ensure_finalized() - self.assertEqual(self.cmd.upload_dir, os.path.join("build", "docs")) - - def test_default_uploaddir_looks_for_doc_also(self): - sandbox = self.mkdtemp() - os.chdir(sandbox) - os.mkdir("build") - self.prepare_sample_dir("build") - os.rename(os.path.join("build", "docs"), os.path.join("build", "doc")) - self.cmd.ensure_finalized() - self.assertEqual(self.cmd.upload_dir, os.path.join("build", "doc")) - - def prepare_sample_dir(self, sample_dir=None): - if sample_dir is None: - sample_dir = self.mkdtemp() - os.mkdir(os.path.join(sample_dir, "docs")) - self.write_file((sample_dir, "docs", "index.html"), "Ce mortel ennui") - self.write_file((sample_dir, "index.html"), "Oh la la") - return sample_dir - - def test_zip_dir(self): - source_dir = self.prepare_sample_dir() - compressed = zip_dir(source_dir) - - zip_f = zipfile.ZipFile(compressed) - self.assertEqual(zip_f.namelist(), ['index.html', 'docs/index.html']) - - def prepare_command(self): - self.cmd.upload_dir = self.prepare_sample_dir() - self.cmd.ensure_finalized() - self.cmd.repository = self.pypi.full_address - self.cmd.username = "username" - self.cmd.password = "password" - - def test_upload(self): - self.prepare_command() - self.cmd.run() - - self.assertEqual(len(self.pypi.requests), 1) - handler, request_data = self.pypi.requests[-1] - self.assertIn(b"content", request_data) - self.assertIn("Basic", handler.headers['authorization']) - self.assertTrue(handler.headers['content-type'] - .startswith('multipart/form-data;')) - - action, name, version, content = request_data.split( - b'----------------GHSKFJDLGDS7543FJKLFHRE75642756743254')[1:5] - - # check that we picked the right chunks - self.assertIn(b'name=":action"', action) - self.assertIn(b'name="name"', name) - self.assertIn(b'name="version"', version) - self.assertIn(b'name="content"', content) - - # check their contents - self.assertIn(b'doc_upload', action) - self.assertIn(b'distr-name', name) - self.assertIn(b'docs/index.html', content) - self.assertIn(b'Ce mortel ennui', content) - - @unittest.skipIf(_ssl is None, 'Needs SSL support') - def test_https_connection(self): - self.https_called = False - self.addCleanup( - setattr, upload_docs_mod.http.client, 'HTTPSConnection', - upload_docs_mod.http.client.HTTPSConnection) - - def https_conn_wrapper(*args): - self.https_called = True - # the testing server is http - return upload_docs_mod.http.client.HTTPConnection(*args) - - upload_docs_mod.http.client.HTTPSConnection = https_conn_wrapper - - self.prepare_command() - self.cmd.run() - self.assertFalse(self.https_called) - - self.cmd.repository = self.cmd.repository.replace("http", "https") - self.cmd.run() - self.assertTrue(self.https_called) - - def test_handling_response(self): - self.pypi.default_response_status = '403 Forbidden' - self.prepare_command() - self.cmd.run() - errors = self.get_logs(logging.ERROR) - self.assertEqual(len(errors), 1) - self.assertIn('Upload failed (403): Forbidden', errors[0]) - - self.pypi.default_response_status = '301 Moved Permanently' - self.pypi.default_response_headers.append( - ("Location", "brand_new_location")) - self.cmd.run() - lastlog = self.get_logs(logging.INFO)[-1] - self.assertIn('brand_new_location', lastlog) - - def test_reads_pypirc_data(self): - self.write_file(self.rc, PYPIRC % self.pypi.full_address) - self.cmd.repository = self.pypi.full_address - self.cmd.upload_dir = self.prepare_sample_dir() - self.cmd.ensure_finalized() - self.assertEqual(self.cmd.username, "real_slim_shady") - self.assertEqual(self.cmd.password, "long_island") - - def test_checks_index_html_presence(self): - self.cmd.upload_dir = self.prepare_sample_dir() - os.remove(os.path.join(self.cmd.upload_dir, "index.html")) - self.assertRaises(PackagingFileError, self.cmd.ensure_finalized) - - def test_checks_upload_dir(self): - self.cmd.upload_dir = self.prepare_sample_dir() - shutil.rmtree(os.path.join(self.cmd.upload_dir)) - self.assertRaises(PackagingOptionError, self.cmd.ensure_finalized) - - def test_show_response(self): - self.prepare_command() - self.cmd.show_response = True - self.cmd.run() - record = self.get_logs(logging.INFO)[-1] - self.assertTrue(record, "should report the response") - self.assertIn(self.pypi.default_response_data, record) - - -def test_suite(): - return unittest.makeSuite(UploadDocsTestCase) - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_compiler.py b/Lib/packaging/tests/test_compiler.py deleted file mode 100644 --- a/Lib/packaging/tests/test_compiler.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Tests for distutils.compiler.""" -import os - -from packaging.compiler import (get_default_compiler, customize_compiler, - gen_lib_options) -from packaging.tests import unittest, support - - -class FakeCompiler: - - name = 'fake' - description = 'Fake' - - def library_dir_option(self, dir): - return "-L" + dir - - def runtime_library_dir_option(self, dir): - return ["-cool", "-R" + dir] - - def find_library_file(self, dirs, lib, debug=False): - return 'found' - - def library_option(self, lib): - return "-l" + lib - - -class CompilerTestCase(support.EnvironRestorer, unittest.TestCase): - - restore_environ = ['AR', 'ARFLAGS'] - - @unittest.skipUnless(get_default_compiler() == 'unix', - 'irrelevant if default compiler is not unix') - def test_customize_compiler(self): - - os.environ['AR'] = 'my_ar' - os.environ['ARFLAGS'] = '-arflags' - - # make sure AR gets caught - class compiler: - name = 'unix' - - def set_executables(self, **kw): - self.exes = kw - - comp = compiler() - customize_compiler(comp) - self.assertEqual(comp.exes['archiver'], 'my_ar -arflags') - - def test_gen_lib_options(self): - compiler = FakeCompiler() - libdirs = ['lib1', 'lib2'] - runlibdirs = ['runlib1'] - libs = [os.path.join('dir', 'name'), 'name2'] - - opts = gen_lib_options(compiler, libdirs, runlibdirs, libs) - wanted = ['-Llib1', '-Llib2', '-cool', '-Rrunlib1', 'found', - '-lname2'] - self.assertEqual(opts, wanted) - - -def test_suite(): - return unittest.makeSuite(CompilerTestCase) - - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/Lib/packaging/tests/test_config.py b/Lib/packaging/tests/test_config.py deleted file mode 100644 --- a/Lib/packaging/tests/test_config.py +++ /dev/null @@ -1,519 +0,0 @@ -"""Tests for packaging.config.""" -import os -import sys - -from packaging import command -from packaging.dist import Distribution -from packaging.errors import PackagingFileError, PackagingOptionError -from packaging.compiler import new_compiler, _COMPILERS -from packaging.command.sdist import sdist - -from packaging.tests import unittest, support -from packaging.tests.support import requires_zlib - - -SETUP_CFG = """ -[metadata] -name = RestingParrot -version = 0.6.4 -author = Carl Meyer -author_email = carl at oddbird.net -maintainer = ?ric Araujo -maintainer_email = merwok at netwok.org -summary = A sample project demonstrating packaging -description-file = %(description-file)s -keywords = packaging, sample project - -classifier = - Development Status :: 4 - Beta - Environment :: Console (Text Based) - Environment :: X11 Applications :: GTK; python_version < '3' - License :: OSI Approved :: MIT License - Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 3 - -requires_python = >=2.4, <3.2 - -requires_dist = - PetShoppe - MichaelPalin (> 1.1) - pywin32; sys.platform == 'win32' - pysqlite2; python_version < '2.5' - inotify (0.0.1); sys.platform == 'linux2' - -requires_external = libxml2 - -provides_dist = packaging-sample-project (0.2) - unittest2-sample-project - -project_url = - Main repository, http://bitbucket.org/carljm/sample-distutils2-project - Fork in progress, http://bitbucket.org/Merwok/sample-distutils2-project - -[files] -packages_root = src - -packages = one - two - three - -modules = haven - -scripts = - script1.py - scripts/find-coconuts - bin/taunt - -package_data = - cheese = data/templates/* doc/* - doc/images/*.png - - -extra_files = %(extra-files)s - -# Replaces MANIFEST.in -# FIXME no, it's extra_files -# (but sdist_extra is a better name, should use it) -sdist_extra = - include THANKS HACKING - recursive-include examples *.txt *.py - prune examples/sample?/build - -resources= - bm/ {b1,b2}.gif = {icon} - Cf*/ *.CFG = {config}/baBar/ - init_script = {script}/JunGle/ - -[global] -commands = - packaging.tests.test_config.FooBarBazTest - -compilers = - packaging.tests.test_config.DCompiler - -setup_hooks = %(setup-hooks)s - - - -[install_dist] -sub_commands = foo -""" - -SETUP_CFG_PKGDATA_BUGGY_1 = """ -[files] -package_data = foo.* -""" - -SETUP_CFG_PKGDATA_BUGGY_2 = """ -[files] -package_data = - foo.* -""" - -# Can not be merged with SETUP_CFG else install_dist -# command will fail when trying to compile C sources -# TODO use a DummyCommand to mock build_ext -EXT_SETUP_CFG = """ -[files] -packages = one - two - parent.undeclared - -[extension:one.speed_coconuts] -sources = c_src/speed_coconuts.c -extra_link_args = "`gcc -print-file-name=libgcc.a`" -shared -define_macros = HAVE_CAIRO HAVE_GTK2 -libraries = gecodeint gecodekernel -- sys.platform != 'win32' - GecodeInt GecodeKernel -- sys.platform == 'win32' - -[extension: two.fast_taunt] -sources = cxx_src/utils_taunt.cxx - cxx_src/python_module.cxx -include_dirs = /usr/include/gecode - /usr/include/blitz -extra_compile_args = -fPIC -O2 - -DGECODE_VERSION=$(./gecode_version) -- sys.platform != 'win32' - /DGECODE_VERSION=win32 -- sys.platform == 'win32' -language = cxx - -# corner case: if the parent package of an extension is declared but -# not its grandparent, it's legal -[extension: parent.undeclared._speed] -sources = parent/undeclared/_speed.c -""" - -EXT_SETUP_CFG_BUGGY_1 = """ -[extension: realname] -name = crash_here -""" - -EXT_SETUP_CFG_BUGGY_2 = """ -[files] -packages = ham - -[extension: spam.eggs] -""" - -EXT_SETUP_CFG_BUGGY_3 = """ -[files] -packages = ok - ok.works - -[extension: ok.works.breaks._ext] -""" - -HOOKS_MODULE = """ -import logging - -logger = logging.getLogger('packaging') - -def logging_hook(config): - logger.warning('logging_hook called') -""" - - -class DCompiler: - name = 'd' - description = 'D Compiler' - - def __init__(self, *args): - pass - - -def version_hook(config): - config['metadata']['version'] += '.dev1' - - -def first_hook(config): - config['files']['modules'] += '\n first' - - -def third_hook(config): - config['files']['modules'] += '\n third' - - -class FooBarBazTest: - - def __init__(self, dist): - self.distribution = dist - self._record = [] - - @classmethod - def get_command_name(cls): - return 'foo' - - def run(self): - self._record.append('foo has run') - - def nothing(self): - pass - - def get_source_files(self): - return [] - - ensure_finalized = finalize_options = initialize_options = nothing - - -class ConfigTestCase(support.TempdirManager, - support.EnvironRestorer, - support.LoggingCatcher, - unittest.TestCase): - - restore_environ = ['PLAT'] - - def setUp(self): - super(ConfigTestCase, self).setUp() - tempdir = self.mkdtemp() - self.working_dir = os.getcwd() - os.chdir(tempdir) - self.tempdir = tempdir - - def write_setup(self, kwargs=None): - opts = {'description-file': 'README', 'extra-files': '', - 'setup-hooks': 'packaging.tests.test_config.version_hook'} - if kwargs: - opts.update(kwargs) - self.write_file('setup.cfg', SETUP_CFG % opts, encoding='utf-8') - - def get_dist(self): - dist = Distribution() - dist.parse_config_files() - return dist - - def test_config(self): - self.write_setup() - self.write_file('README', 'yeah') - os.mkdir('bm') - self.write_file(('bm', 'b1.gif'), '') - self.write_file(('bm', 'b2.gif'), '') - os.mkdir('Cfg') - self.write_file(('Cfg', 'data.CFG'), '') - self.write_file('init_script', '') - - # try to load the metadata now - dist = self.get_dist() - - # check what was done - self.assertEqual(dist.metadata['Author'], 'Carl Meyer') - self.assertEqual(dist.metadata['Author-Email'], 'carl at oddbird.net') - - # the hook adds .dev1 - self.assertEqual(dist.metadata['Version'], '0.6.4.dev1') - - wanted = [ - 'Development Status :: 4 - Beta', - 'Environment :: Console (Text Based)', - "Environment :: X11 Applications :: GTK; python_version < '3'", - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 3'] - self.assertEqual(dist.metadata['Classifier'], wanted) - - wanted = ['packaging', 'sample project'] - self.assertEqual(dist.metadata['Keywords'], wanted) - - self.assertEqual(dist.metadata['Requires-Python'], '>=2.4, <3.2') - - wanted = ['PetShoppe', - 'MichaelPalin (> 1.1)', - "pywin32; sys.platform == 'win32'", - "pysqlite2; python_version < '2.5'", - "inotify (0.0.1); sys.platform == 'linux2'"] - - self.assertEqual(dist.metadata['Requires-Dist'], wanted) - urls = [('Main repository', - 'http://bitbucket.org/carljm/sample-distutils2-project'), - ('Fork in progress', - 'http://bitbucket.org/Merwok/sample-distutils2-project')] - self.assertEqual(dist.metadata['Project-Url'], urls) - - self.assertEqual(dist.packages, ['one', 'two', 'three']) - self.assertEqual(dist.py_modules, ['haven']) - self.assertEqual(dist.package_data, - {'cheese': ['data/templates/*', 'doc/*', - 'doc/images/*.png']}) - self.assertEqual(dist.data_files, - {'bm/b1.gif': '{icon}/b1.gif', - 'bm/b2.gif': '{icon}/b2.gif', - 'Cfg/data.CFG': '{config}/baBar/data.CFG', - 'init_script': '{script}/JunGle/init_script'}) - - self.assertEqual(dist.package_dir, 'src') - - # Make sure we get the foo command loaded. We use a string comparison - # instead of assertIsInstance because the class is not the same when - # this test is run directly: foo is packaging.tests.test_config.Foo - # because get_command_class uses the full name, but a bare "Foo" in - # this file would be __main__.Foo when run as "python test_config.py". - # The name FooBarBazTest should be unique enough to prevent - # collisions. - self.assertEqual(dist.get_command_obj('foo').__class__.__name__, - 'FooBarBazTest') - - # did the README got loaded ? - self.assertEqual(dist.metadata['description'], 'yeah') - - # do we have the D Compiler enabled ? - self.assertIn('d', _COMPILERS) - d = new_compiler(compiler='d') - self.assertEqual(d.description, 'D Compiler') - - # check error reporting for invalid package_data value - self.write_file('setup.cfg', SETUP_CFG_PKGDATA_BUGGY_1) - self.assertRaises(PackagingOptionError, self.get_dist) - - self.write_file('setup.cfg', SETUP_CFG_PKGDATA_BUGGY_2) - self.assertRaises(PackagingOptionError, self.get_dist) - - def test_multiple_description_file(self): - self.write_setup({'description-file': 'README CHANGES'}) - self.write_file('README', 'yeah') - self.write_file('CHANGES', 'changelog2') - dist = self.get_dist() - self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES']) - - def test_multiline_description_file(self): - self.write_setup({'description-file': 'README\n CHANGES'}) - self.write_file('README', 'yeah') - self.write_file('CHANGES', 'changelog') - dist = self.get_dist() - self.assertEqual(dist.metadata['description'], 'yeah\nchangelog') - self.assertEqual(dist.metadata.requires_files, ['README', 'CHANGES']) - - def test_parse_extensions_in_config(self): - self.write_file('setup.cfg', EXT_SETUP_CFG) - dist = self.get_dist() - - ext_modules = dict((mod.name, mod) for mod in dist.ext_modules) - self.assertEqual(len(ext_modules), 3) - ext = ext_modules.get('one.speed_coconuts') - self.assertEqual(ext.sources, ['c_src/speed_coconuts.c']) - self.assertEqual(ext.define_macros, ['HAVE_CAIRO', 'HAVE_GTK2']) - libs = ['gecodeint', 'gecodekernel'] - if sys.platform == 'win32': - libs = ['GecodeInt', 'GecodeKernel'] - self.assertEqual(ext.libraries, libs) - self.assertEqual(ext.extra_link_args, - ['`gcc -print-file-name=libgcc.a`', '-shared']) - - ext = ext_modules.get('two.fast_taunt') - self.assertEqual(ext.sources, - ['cxx_src/utils_taunt.cxx', 'cxx_src/python_module.cxx']) - self.assertEqual(ext.include_dirs, - ['/usr/include/gecode', '/usr/include/blitz']) - cargs = ['-fPIC', '-O2'] - if sys.platform == 'win32': - cargs.append("/DGECODE_VERSION=win32") - else: - cargs.append('-DGECODE_VERSION=$(./gecode_version)') - self.assertEqual(ext.extra_compile_args, cargs) - self.assertEqual(ext.language, 'cxx') - - self.write_file('setup.cfg', EXT_SETUP_CFG_BUGGY_1) - self.assertRaises(PackagingOptionError, self.get_dist) - - self.write_file('setup.cfg', EXT_SETUP_CFG_BUGGY_2) - self.assertRaises(PackagingOptionError, self.get_dist) - - self.write_file('setup.cfg', EXT_SETUP_CFG_BUGGY_3) - self.assertRaises(PackagingOptionError, self.get_dist) - - def test_project_setup_hook_works(self): - # Bug #11637: ensure the project directory is on sys.path to allow - # project-specific hooks - self.write_setup({'setup-hooks': 'hooks.logging_hook'}) - self.write_file('README', 'yeah') - self.write_file('hooks.py', HOOKS_MODULE) - self.get_dist() - self.assertEqual(['logging_hook called'], self.get_logs()) - self.assertIn('hooks', sys.modules) - - def test_missing_setup_hook_warns(self): - self.write_setup({'setup-hooks': 'does._not.exist'}) - self.write_file('README', 'yeah') - self.get_dist() - logs = self.get_logs() - self.assertEqual(1, len(logs)) - self.assertIn('cannot find setup hook', logs[0]) - - def test_multiple_setup_hooks(self): - self.write_setup({ - 'setup-hooks': '\n packaging.tests.test_config.first_hook' - '\n packaging.tests.test_config.missing_hook' - '\n packaging.tests.test_config.third_hook', - }) - self.write_file('README', 'yeah') - dist = self.get_dist() - - self.assertEqual(['haven', 'first', 'third'], dist.py_modules) - logs = self.get_logs() - self.assertEqual(1, len(logs)) - self.assertIn('cannot find setup hook', logs[0]) - - def test_metadata_requires_description_files_missing(self): - self.write_setup({'description-file': 'README README2'}) - self.write_file('README', 'yeah') - self.write_file('README2', 'yeah') - os.mkdir('src') - self.write_file(('src', 'haven.py'), '#') - self.write_file('script1.py', '#') - os.mkdir('scripts') - self.write_file(('scripts', 'find-coconuts'), '#') - os.mkdir('bin') - self.write_file(('bin', 'taunt'), '#') - - for pkg in ('one', 'two', 'three'): - pkg = os.path.join('src', pkg) - os.mkdir(pkg) - self.write_file((pkg, '__init__.py'), '#') - - dist = self.get_dist() - cmd = sdist(dist) - cmd.finalize_options() - cmd.get_file_list() - self.assertRaises(PackagingFileError, cmd.make_distribution) - - @requires_zlib - def test_metadata_requires_description_files(self): - # Create the following file structure: - # README - # README2 - # script1.py - # scripts/ - # find-coconuts - # bin/ - # taunt - # src/ - # haven.py - # one/__init__.py - # two/__init__.py - # three/__init__.py - - self.write_setup({'description-file': 'README\n README2', - 'extra-files': '\n README3'}) - self.write_file('README', 'yeah 1') - self.write_file('README2', 'yeah 2') - self.write_file('README3', 'yeah 3') - os.mkdir('src') - self.write_file(('src', 'haven.py'), '#') - self.write_file('script1.py', '#') - os.mkdir('scripts') - self.write_file(('scripts', 'find-coconuts'), '#') - os.mkdir('bin') - self.write_file(('bin', 'taunt'), '#') - - for pkg in ('one', 'two', 'three'): - pkg = os.path.join('src', pkg) - os.mkdir(pkg) - self.write_file((pkg, '__init__.py'), '#') - - dist = self.get_dist() - self.assertIn('yeah 1\nyeah 2', dist.metadata['description']) - - cmd = sdist(dist) - cmd.finalize_options() - cmd.get_file_list() - self.assertRaises(PackagingFileError, cmd.make_distribution) - - self.write_setup({'description-file': 'README\n README2', - 'extra-files': '\n README2\n README'}) - dist = self.get_dist() - cmd = sdist(dist) - cmd.finalize_options() - cmd.get_file_list() - cmd.make_distribution() - with open('MANIFEST') as fp: - self.assertIn('README\nREADME2\n', fp.read()) - - def test_sub_commands(self): - self.write_setup() - self.write_file('README', 'yeah') - os.mkdir('src') - self.write_file(('src', 'haven.py'), '#') - self.write_file('script1.py', '#') - os.mkdir('scripts') - self.write_file(('scripts', 'find-coconuts'), '#') - os.mkdir('bin') - self.write_file(('bin', 'taunt'), '#') - - for pkg in ('one', 'two', 'three'): - pkg = os.path.join('src', pkg) - os.mkdir(pkg) - self.write_file((pkg, '__init__.py'), '#') - - # try to run the install command to see if foo is called - self.addCleanup(command._COMMANDS.__delitem__, 'foo') - dist = self.get_dist() - dist.run_command('install_dist') - cmd = dist.get_command_obj('foo') - self.assertEqual(cmd.__class__.__name__, 'FooBarBazTest') - self.assertEqual(cmd._record, ['foo has run']) - - -def test_suite(): - return unittest.makeSuite(ConfigTestCase) - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_create.py b/Lib/packaging/tests/test_create.py deleted file mode 100644 --- a/Lib/packaging/tests/test_create.py +++ /dev/null @@ -1,233 +0,0 @@ -"""Tests for packaging.create.""" -import os -import sys -import sysconfig -from textwrap import dedent -from packaging import create -from packaging.create import MainProgram, ask_yn, ask, main - -from packaging.tests import support, unittest -from packaging.tests.support import Inputs - - -class CreateTestCase(support.TempdirManager, - support.EnvironRestorer, - support.LoggingCatcher, - unittest.TestCase): - - maxDiff = None - restore_environ = ['PLAT'] - - def setUp(self): - super(CreateTestCase, self).setUp() - self.wdir = self.mkdtemp() - os.chdir(self.wdir) - # patch sysconfig - self._old_get_paths = sysconfig.get_paths - sysconfig.get_paths = lambda *args, **kwargs: { - 'man': sys.prefix + '/share/man', - 'doc': sys.prefix + '/share/doc/pyxfoil', } - - def tearDown(self): - sysconfig.get_paths = self._old_get_paths - if hasattr(create, 'input'): - del create.input - super(CreateTestCase, self).tearDown() - - def test_ask_yn(self): - create.input = Inputs('y') - self.assertEqual('y', ask_yn('is this a test')) - - def test_ask(self): - create.input = Inputs('a', 'b') - self.assertEqual('a', ask('is this a test')) - self.assertEqual('b', ask(str(list(range(0, 70))), default='c', - lengthy=True)) - - def test_set_multi(self): - mainprogram = MainProgram() - create.input = Inputs('aaaaa') - mainprogram.data['author'] = [] - mainprogram._set_multi('_set_multi test', 'author') - self.assertEqual(['aaaaa'], mainprogram.data['author']) - - def test_find_files(self): - # making sure we scan a project dir correctly - mainprogram = MainProgram() - - # building the structure - tempdir = self.wdir - dirs = ['pkg1', 'data', 'pkg2', 'pkg2/sub'] - files = [ - 'README', - 'data/data1', - 'foo.py', - 'pkg1/__init__.py', - 'pkg1/bar.py', - 'pkg2/__init__.py', - 'pkg2/sub/__init__.py', - ] - - for dir_ in dirs: - os.mkdir(os.path.join(tempdir, dir_)) - - for file_ in files: - self.write_file((tempdir, file_), 'xxx') - - mainprogram._find_files() - mainprogram.data['packages'].sort() - - # do we have what we want? - self.assertEqual(mainprogram.data['packages'], - ['pkg1', 'pkg2', 'pkg2.sub']) - self.assertEqual(mainprogram.data['modules'], ['foo']) - data_fn = os.path.join('data', 'data1') - self.assertEqual(mainprogram.data['extra_files'], - ['README', data_fn]) - - def test_convert_setup_py_to_cfg(self): - self.write_file((self.wdir, 'setup.py'), - dedent(""" - # coding: utf-8 - from distutils.core import setup - - long_description = '''My super Death-scription - barbar is now on the public domain, - ho, baby !''' - - setup(name='pyxfoil', - version='0.2', - description='Python bindings for the Xfoil engine', - long_description=long_description, - maintainer='Andr? Espaze', - maintainer_email='andre.espaze at logilab.fr', - url='http://www.python-science.org/project/pyxfoil', - license='GPLv2', - packages=['pyxfoil', 'babar', 'me'], - data_files=[ - ('share/doc/pyxfoil', ['README.rst']), - ('share/man', ['pyxfoil.1']), - ], - py_modules=['my_lib', 'mymodule'], - package_dir={ - 'babar': '', - 'me': 'Martinique/Lamentin', - }, - package_data={ - 'babar': ['Pom', 'Flora', 'Alexander'], - 'me': ['dady', 'mumy', 'sys', 'bro'], - 'pyxfoil': ['fengine.so'], - }, - scripts=['my_script', 'bin/run'], - ) - """), encoding='utf-8') - create.input = Inputs('y') - main() - - path = os.path.join(self.wdir, 'setup.cfg') - with open(path, encoding='utf-8') as fp: - contents = fp.read() - - self.assertEqual(contents, dedent("""\ - [metadata] - name = pyxfoil - version = 0.2 - summary = Python bindings for the Xfoil engine - download_url = UNKNOWN - home_page = http://www.python-science.org/project/pyxfoil - maintainer = Andr? Espaze - maintainer_email = andre.espaze at logilab.fr - description = My super Death-scription - |barbar is now on the public domain, - |ho, baby ! - - [files] - packages = pyxfoil - babar - me - modules = my_lib - mymodule - scripts = my_script - bin/run - package_data = - babar = Pom - Flora - Alexander - me = dady - mumy - sys - bro - pyxfoil = fengine.so - - resources = - README.rst = {doc} - pyxfoil.1 = {man} - - """)) - - def test_convert_setup_py_to_cfg_with_description_in_readme(self): - self.write_file((self.wdir, 'setup.py'), - dedent(""" - # coding: utf-8 - from distutils.core import setup - with open('README.txt') as fp: - long_description = fp.read() - - setup(name='pyxfoil', - version='0.2', - description='Python bindings for the Xfoil engine', - long_description=long_description, - maintainer='Andr? Espaze', - maintainer_email='andre.espaze at logilab.fr', - url='http://www.python-science.org/project/pyxfoil', - license='GPLv2', - packages=['pyxfoil'], - package_data={'pyxfoil': ['fengine.so', 'babar.so']}, - data_files=[ - ('share/doc/pyxfoil', ['README.rst']), - ('share/man', ['pyxfoil.1']), - ], - ) - """), encoding='utf-8') - self.write_file((self.wdir, 'README.txt'), - dedent(''' -My super Death-scription -barbar is now in the public domain, -ho, baby! - ''')) - create.input = Inputs('y') - main() - - path = os.path.join(self.wdir, 'setup.cfg') - with open(path, encoding='utf-8') as fp: - contents = fp.read() - - self.assertEqual(contents, dedent("""\ - [metadata] - name = pyxfoil - version = 0.2 - summary = Python bindings for the Xfoil engine - download_url = UNKNOWN - home_page = http://www.python-science.org/project/pyxfoil - maintainer = Andr? Espaze - maintainer_email = andre.espaze at logilab.fr - description-file = README.txt - - [files] - packages = pyxfoil - package_data = - pyxfoil = fengine.so - babar.so - - resources = - README.rst = {doc} - pyxfoil.1 = {man} - - """)) - - -def test_suite(): - return unittest.makeSuite(CreateTestCase) - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_cygwinccompiler.py b/Lib/packaging/tests/test_cygwinccompiler.py deleted file mode 100644 --- a/Lib/packaging/tests/test_cygwinccompiler.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Tests for packaging.cygwinccompiler.""" -import os -import sys -import sysconfig -from packaging.compiler.cygwinccompiler import ( - check_config_h, get_msvcr, - CONFIG_H_OK, CONFIG_H_NOTOK, CONFIG_H_UNCERTAIN) - -from packaging.tests import unittest, support - - -class CygwinCCompilerTestCase(support.TempdirManager, - unittest.TestCase): - - def setUp(self): - super(CygwinCCompilerTestCase, self).setUp() - self.version = sys.version - self.python_h = os.path.join(self.mkdtemp(), 'python.h') - self.old_get_config_h_filename = sysconfig.get_config_h_filename - sysconfig.get_config_h_filename = self._get_config_h_filename - - def tearDown(self): - sys.version = self.version - sysconfig.get_config_h_filename = self.old_get_config_h_filename - super(CygwinCCompilerTestCase, self).tearDown() - - def _get_config_h_filename(self): - return self.python_h - - def test_check_config_h(self): - # check_config_h looks for "GCC" in sys.version first - # returns CONFIG_H_OK if found - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) \n[GCC ' - '4.0.1 (Apple Computer, Inc. build 5370)]') - - self.assertEqual(check_config_h()[0], CONFIG_H_OK) - - # then it tries to see if it can find "__GNUC__" in pyconfig.h - sys.version = 'something without the *CC word' - - # if the file doesn't exist it returns CONFIG_H_UNCERTAIN - self.assertEqual(check_config_h()[0], CONFIG_H_UNCERTAIN) - - # if it exists but does not contain __GNUC__, it returns CONFIG_H_NOTOK - self.write_file(self.python_h, 'xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_NOTOK) - - # and CONFIG_H_OK if __GNUC__ is found - self.write_file(self.python_h, 'xxx __GNUC__ xxx') - self.assertEqual(check_config_h()[0], CONFIG_H_OK) - - def test_get_msvcr(self): - # none - sys.version = ('2.6.1 (r261:67515, Dec 6 2008, 16:42:21) ' - '\n[GCC 4.0.1 (Apple Computer, Inc. build 5370)]') - self.assertEqual(get_msvcr(), None) - - # MSVC 7.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1300 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr70']) - - # MSVC 7.1 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1310 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr71']) - - # VS2005 / MSVC 8.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1400 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr80']) - - # VS2008 / MSVC 9.0 - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1500 32 bits (Intel)]') - self.assertEqual(get_msvcr(), ['msvcr90']) - - # unknown - sys.version = ('2.5.1 (r251:54863, Apr 18 2007, 08:51:08) ' - '[MSC v.1999 32 bits (Intel)]') - self.assertRaises(ValueError, get_msvcr) - - -def test_suite(): - return unittest.makeSuite(CygwinCCompilerTestCase) - -if __name__ == '__main__': - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_database.py b/Lib/packaging/tests/test_database.py deleted file mode 100644 --- a/Lib/packaging/tests/test_database.py +++ /dev/null @@ -1,686 +0,0 @@ -import os -import io -import csv -import sys -import shutil -import tempfile -from hashlib import md5 -from textwrap import dedent - -from packaging.tests.test_util import GlobTestCaseBase -from packaging.tests.support import requires_zlib - -import packaging.database -from packaging.config import get_resources_dests -from packaging.errors import PackagingError -from packaging.metadata import Metadata -from packaging.tests import unittest, support -from packaging.database import ( - Distribution, EggInfoDistribution, get_distribution, get_distributions, - provides_distribution, obsoletes_distribution, get_file_users, - enable_cache, disable_cache, distinfo_dirname, _yield_distributions, - get_file, get_file_path) - -# TODO Add a test for getting a distribution provided by another distribution -# TODO Add a test for absolute pathed RECORD items (e.g. /etc/myapp/config.ini) -# TODO Add tests from the former pep376 project (zipped site-packages, etc.) - - -def get_hexdigest(filename): - with open(filename, 'rb') as file: - checksum = md5(file.read()) - return checksum.hexdigest() - - -def record_pieces(path): - path = os.path.join(*path) - digest = get_hexdigest(path) - size = os.path.getsize(path) - return path, digest, size - - -class FakeDistsMixin: - - def setUp(self): - super(FakeDistsMixin, self).setUp() - self.addCleanup(enable_cache) - disable_cache() - - # make a copy that we can write into for our fake installed - # distributions - tmpdir = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, tmpdir) - self.fake_dists_path = os.path.realpath( - os.path.join(tmpdir, 'fake_dists')) - fake_dists_src = os.path.abspath( - os.path.join(os.path.dirname(__file__), 'fake_dists')) - shutil.copytree(fake_dists_src, self.fake_dists_path) - # XXX ugly workaround: revert copystat calls done by shutil behind our - # back (to avoid getting a read-only copy of a read-only file). we - # could pass a custom copy_function to change the mode of files, but - # shutil gives no control over the mode of directories :( - # see http://bugs.python.org/issue1666318 - for root, dirs, files in os.walk(self.fake_dists_path): - os.chmod(root, 0o755) - for f in files: - os.chmod(os.path.join(root, f), 0o644) - for d in dirs: - os.chmod(os.path.join(root, d), 0o755) - - -class CommonDistributionTests(FakeDistsMixin): - """Mixin used to test the interface common to both Distribution classes. - - Derived classes define cls, sample_dist, dirs and records. These - attributes are used in test methods. See source code for details. - """ - - def test_instantiation(self): - # check that useful attributes are here - name, version, distdir = self.sample_dist - here = os.path.abspath(os.path.dirname(__file__)) - dist_path = os.path.join(here, 'fake_dists', distdir) - - dist = self.dist = self.cls(dist_path) - self.assertEqual(dist.path, dist_path) - self.assertEqual(dist.name, name) - self.assertEqual(dist.metadata['Name'], name) - self.assertIsInstance(dist.metadata, Metadata) - self.assertEqual(dist.version, version) - self.assertEqual(dist.metadata['Version'], version) - - @requires_zlib - def test_repr(self): - dist = self.cls(self.dirs[0]) - # just check that the class name is in the repr - self.assertIn(self.cls.__name__, repr(dist)) - - @requires_zlib - def test_comparison(self): - # tests for __eq__ and __hash__ - dist = self.cls(self.dirs[0]) - dist2 = self.cls(self.dirs[0]) - dist3 = self.cls(self.dirs[1]) - self.assertIn(dist, {dist: True}) - self.assertEqual(dist, dist) - - self.assertIsNot(dist, dist2) - self.assertEqual(dist, dist2) - self.assertNotEqual(dist, dist3) - self.assertNotEqual(dist, ()) - - def test_list_installed_files(self): - for dir_ in self.dirs: - dist = self.cls(dir_) - for path, md5_, size in dist.list_installed_files(): - record_data = self.records[dist.path] - self.assertIn(path, record_data) - self.assertEqual(md5_, record_data[path][0]) - self.assertEqual(size, record_data[path][1]) - - -class TestDistribution(CommonDistributionTests, unittest.TestCase): - - cls = Distribution - sample_dist = 'choxie', '2.0.0.9', 'choxie-2.0.0.9.dist-info' - - def setUp(self): - super(TestDistribution, self).setUp() - self.dirs = [os.path.join(self.fake_dists_path, f) - for f in os.listdir(self.fake_dists_path) - if f.endswith('.dist-info')] - - self.records = {} - for distinfo_dir in self.dirs: - - record_file = os.path.join(distinfo_dir, 'RECORD') - with open(record_file, 'w') as file: - record_writer = csv.writer( - file, delimiter=',', quoting=csv.QUOTE_NONE, - lineterminator='\n') - - dist_location = distinfo_dir.replace('.dist-info', '') - - for path, dirs, files in os.walk(dist_location): - for f in files: - record_writer.writerow(record_pieces((path, f))) - for file in ('INSTALLER', 'METADATA', 'REQUESTED'): - record_writer.writerow(record_pieces((distinfo_dir, file))) - record_writer.writerow([record_file]) - - with open(record_file) as file: - record_reader = csv.reader(file, lineterminator='\n') - record_data = {} - for row in record_reader: - if row == []: - continue - path, md5_, size = (row[:] + - [None for i in range(len(row), 3)]) - record_data[path] = md5_, size - self.records[distinfo_dir] = record_data - - def test_instantiation(self): - super(TestDistribution, self).test_instantiation() - self.assertIsInstance(self.dist.requested, bool) - - def test_uses(self): - # Test to determine if a distribution uses a specified file. - # Criteria to test against - distinfo_name = 'grammar-1.0a4' - distinfo_dir = os.path.join(self.fake_dists_path, - distinfo_name + '.dist-info') - true_path = [self.fake_dists_path, distinfo_name, - 'grammar', 'utils.py'] - true_path = os.path.join(*true_path) - false_path = [self.fake_dists_path, 'towel_stuff-0.1', 'towel_stuff', - '__init__.py'] - false_path = os.path.join(*false_path) - - # Test if the distribution uses the file in question - dist = Distribution(distinfo_dir) - self.assertTrue(dist.uses(true_path), 'dist %r is supposed to use %r' % - (dist, true_path)) - self.assertFalse(dist.uses(false_path), 'dist %r is not supposed to ' - 'use %r' % (dist, true_path)) - - def test_get_distinfo_file(self): - # Test the retrieval of dist-info file objects. - distinfo_name = 'choxie-2.0.0.9' - other_distinfo_name = 'grammar-1.0a4' - distinfo_dir = os.path.join(self.fake_dists_path, - distinfo_name + '.dist-info') - dist = Distribution(distinfo_dir) - # Test for known good file matches - distinfo_files = [ - # Relative paths - 'INSTALLER', 'METADATA', - # Absolute paths - os.path.join(distinfo_dir, 'RECORD'), - os.path.join(distinfo_dir, 'REQUESTED'), - ] - - for distfile in distinfo_files: - with dist.get_distinfo_file(distfile) as value: - self.assertIsInstance(value, io.TextIOWrapper) - # Is it the correct file? - self.assertEqual(value.name, - os.path.join(distinfo_dir, distfile)) - - # Test an absolute path that is part of another distributions dist-info - other_distinfo_file = os.path.join( - self.fake_dists_path, other_distinfo_name + '.dist-info', - 'REQUESTED') - self.assertRaises(PackagingError, dist.get_distinfo_file, - other_distinfo_file) - # Test for a file that should not exist - self.assertRaises(PackagingError, dist.get_distinfo_file, - 'MAGICFILE') - - def test_list_distinfo_files(self): - distinfo_name = 'towel_stuff-0.1' - distinfo_dir = os.path.join(self.fake_dists_path, - distinfo_name + '.dist-info') - dist = Distribution(distinfo_dir) - # Test for the iteration of the raw path - distinfo_files = [os.path.join(distinfo_dir, filename) for filename in - os.listdir(distinfo_dir)] - found = dist.list_distinfo_files() - self.assertEqual(sorted(found), sorted(distinfo_files)) - # Test for the iteration of local absolute paths - distinfo_files = [os.path.join(sys.prefix, distinfo_dir, path) for - path in distinfo_files] - found = sorted(dist.list_distinfo_files(local=True)) - if os.sep != '/': - self.assertNotIn('/', found[0]) - self.assertIn(os.sep, found[0]) - self.assertEqual(found, sorted(distinfo_files)) - - def test_get_resources_path(self): - distinfo_name = 'babar-0.1' - distinfo_dir = os.path.join(self.fake_dists_path, - distinfo_name + '.dist-info') - dist = Distribution(distinfo_dir) - resource_path = dist.get_resource_path('babar.png') - self.assertEqual(resource_path, 'babar.png') - self.assertRaises(KeyError, dist.get_resource_path, 'notexist') - - -class TestEggInfoDistribution(CommonDistributionTests, - support.LoggingCatcher, - unittest.TestCase): - - cls = EggInfoDistribution - sample_dist = 'bacon', '0.1', 'bacon-0.1.egg-info' - - def setUp(self): - super(TestEggInfoDistribution, self).setUp() - - self.dirs = [os.path.join(self.fake_dists_path, f) - for f in os.listdir(self.fake_dists_path) - if f.endswith('.egg') or f.endswith('.egg-info')] - - self.records = {} - - @unittest.skip('not implemented yet') - def test_list_installed_files(self): - # EggInfoDistribution defines list_installed_files but there is no - # test for it yet; someone with setuptools expertise needs to add a - # file with the list of installed files for one of the egg fake dists - # and write the support code to populate self.records (and then delete - # this method) - pass - - -class TestDatabase(support.LoggingCatcher, - FakeDistsMixin, - unittest.TestCase): - - def setUp(self): - super(TestDatabase, self).setUp() - sys.path.insert(0, self.fake_dists_path) - self.addCleanup(sys.path.remove, self.fake_dists_path) - - def test_caches(self): - # sanity check for internal caches - for name in ('_cache_name', '_cache_name_egg', - '_cache_path', '_cache_path_egg'): - self.assertEqual(getattr(packaging.database, name), {}) - - def test_distinfo_dirname(self): - # Given a name and a version, we expect the distinfo_dirname function - # to return a standard distribution information directory name. - - items = [ - # (name, version, standard_dirname) - # Test for a very simple single word name and decimal version - # number - ('docutils', '0.5', 'docutils-0.5.dist-info'), - # Test for another except this time with a '-' in the name, which - # needs to be transformed during the name lookup - ('python-ldap', '2.5', 'python_ldap-2.5.dist-info'), - # Test for both '-' in the name and a funky version number - ('python-ldap', '2.5 a---5', 'python_ldap-2.5 a---5.dist-info'), - ] - - # Loop through the items to validate the results - for name, version, standard_dirname in items: - dirname = distinfo_dirname(name, version) - self.assertEqual(dirname, standard_dirname) - - @requires_zlib - def test_get_distributions(self): - # Lookup all distributions found in the ``sys.path``. - # This test could potentially pick up other installed distributions - fake_dists = [('grammar', '1.0a4'), ('choxie', '2.0.0.9'), - ('towel-stuff', '0.1'), ('babar', '0.1')] - found_dists = [] - - # Verify the fake dists have been found. - dists = [dist for dist in get_distributions()] - for dist in dists: - self.assertIsInstance(dist, Distribution) - if (dist.name in dict(fake_dists) and - dist.path.startswith(self.fake_dists_path)): - found_dists.append((dist.name, dist.version)) - else: - # check that it doesn't find anything more than this - self.assertFalse(dist.path.startswith(self.fake_dists_path)) - # otherwise we don't care what other distributions are found - - # Finally, test that we found all that we were looking for - self.assertEqual(sorted(found_dists), sorted(fake_dists)) - - # Now, test if the egg-info distributions are found correctly as well - fake_dists += [('bacon', '0.1'), ('cheese', '2.0.2'), - ('coconuts-aster', '10.3'), - ('banana', '0.4'), ('strawberry', '0.6'), - ('truffles', '5.0'), ('nut', 'funkyversion')] - found_dists = [] - - dists = [dist for dist in get_distributions(use_egg_info=True)] - for dist in dists: - self.assertIsInstance(dist, (Distribution, EggInfoDistribution)) - if (dist.name in dict(fake_dists) and - dist.path.startswith(self.fake_dists_path)): - found_dists.append((dist.name, dist.version)) - else: - self.assertFalse(dist.path.startswith(self.fake_dists_path)) - - self.assertEqual(sorted(fake_dists), sorted(found_dists)) - - @requires_zlib - def test_get_distribution(self): - # Test for looking up a distribution by name. - # Test the lookup of the towel-stuff distribution - name = 'towel-stuff' # Note: This is different from the directory name - - # Lookup the distribution - dist = get_distribution(name) - self.assertIsInstance(dist, Distribution) - self.assertEqual(dist.name, name) - - # Verify that an unknown distribution returns None - self.assertIsNone(get_distribution('bogus')) - - # Verify partial name matching doesn't work - self.assertIsNone(get_distribution('towel')) - - # Verify that it does not find egg-info distributions, when not - # instructed to - self.assertIsNone(get_distribution('bacon')) - self.assertIsNone(get_distribution('cheese')) - self.assertIsNone(get_distribution('strawberry')) - self.assertIsNone(get_distribution('banana')) - - # Now check that it works well in both situations, when egg-info - # is a file and directory respectively. - dist = get_distribution('cheese', use_egg_info=True) - self.assertIsInstance(dist, EggInfoDistribution) - self.assertEqual(dist.name, 'cheese') - - dist = get_distribution('bacon', use_egg_info=True) - self.assertIsInstance(dist, EggInfoDistribution) - self.assertEqual(dist.name, 'bacon') - - dist = get_distribution('banana', use_egg_info=True) - self.assertIsInstance(dist, EggInfoDistribution) - self.assertEqual(dist.name, 'banana') - - dist = get_distribution('strawberry', use_egg_info=True) - self.assertIsInstance(dist, EggInfoDistribution) - self.assertEqual(dist.name, 'strawberry') - - def test_get_file_users(self): - # Test the iteration of distributions that use a file. - name = 'towel_stuff-0.1' - path = os.path.join(self.fake_dists_path, name, - 'towel_stuff', '__init__.py') - for dist in get_file_users(path): - self.assertIsInstance(dist, Distribution) - self.assertEqual(dist.name, name) - - @requires_zlib - def test_provides(self): - # Test for looking up distributions by what they provide - checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y)) - - l = [dist.name for dist in provides_distribution('truffles')] - checkLists(l, ['choxie', 'towel-stuff']) - - l = [dist.name for dist in provides_distribution('truffles', '1.0')] - checkLists(l, ['choxie']) - - l = [dist.name for dist in provides_distribution('truffles', '1.0', - use_egg_info=True)] - checkLists(l, ['choxie', 'cheese']) - - l = [dist.name for dist in provides_distribution('truffles', '1.1.2')] - checkLists(l, ['towel-stuff']) - - l = [dist.name for dist in provides_distribution('truffles', '1.1')] - checkLists(l, ['towel-stuff']) - - l = [dist.name for dist in provides_distribution('truffles', - '!=1.1,<=2.0')] - checkLists(l, ['choxie']) - - l = [dist.name for dist in provides_distribution('truffles', - '!=1.1,<=2.0', - use_egg_info=True)] - checkLists(l, ['choxie', 'bacon', 'cheese']) - - l = [dist.name for dist in provides_distribution('truffles', '>1.0')] - checkLists(l, ['towel-stuff']) - - l = [dist.name for dist in provides_distribution('truffles', '>1.5')] - checkLists(l, []) - - l = [dist.name for dist in provides_distribution('truffles', '>1.5', - use_egg_info=True)] - checkLists(l, ['bacon']) - - l = [dist.name for dist in provides_distribution('truffles', '>=1.0')] - checkLists(l, ['choxie', 'towel-stuff']) - - l = [dist.name for dist in provides_distribution('strawberry', '0.6', - use_egg_info=True)] - checkLists(l, ['coconuts-aster']) - - l = [dist.name for dist in provides_distribution('strawberry', '>=0.5', - use_egg_info=True)] - checkLists(l, ['coconuts-aster']) - - l = [dist.name for dist in provides_distribution('strawberry', '>0.6', - use_egg_info=True)] - checkLists(l, []) - - l = [dist.name for dist in provides_distribution('banana', '0.4', - use_egg_info=True)] - checkLists(l, ['coconuts-aster']) - - l = [dist.name for dist in provides_distribution('banana', '>=0.3', - use_egg_info=True)] - checkLists(l, ['coconuts-aster']) - - l = [dist.name for dist in provides_distribution('banana', '!=0.4', - use_egg_info=True)] - checkLists(l, []) - - @requires_zlib - def test_obsoletes(self): - # Test looking for distributions based on what they obsolete - checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y)) - - l = [dist.name for dist in obsoletes_distribution('truffles', '1.0')] - checkLists(l, []) - - l = [dist.name for dist in obsoletes_distribution('truffles', '1.0', - use_egg_info=True)] - checkLists(l, ['cheese', 'bacon']) - - l = [dist.name for dist in obsoletes_distribution('truffles', '0.8')] - checkLists(l, ['choxie']) - - l = [dist.name for dist in obsoletes_distribution('truffles', '0.8', - use_egg_info=True)] - checkLists(l, ['choxie', 'cheese']) - - l = [dist.name for dist in obsoletes_distribution('truffles', '0.9.6')] - checkLists(l, ['choxie', 'towel-stuff']) - - l = [dist.name for dist in obsoletes_distribution('truffles', - '0.5.2.3')] - checkLists(l, ['choxie', 'towel-stuff']) - - l = [dist.name for dist in obsoletes_distribution('truffles', '0.2')] - checkLists(l, ['towel-stuff']) - - @requires_zlib - def test_yield_distribution(self): - # tests the internal function _yield_distributions - checkLists = lambda x, y: self.assertEqual(sorted(x), sorted(y)) - - eggs = [('bacon', '0.1'), ('banana', '0.4'), ('strawberry', '0.6'), - ('truffles', '5.0'), ('cheese', '2.0.2'), - ('coconuts-aster', '10.3'), ('nut', 'funkyversion')] - dists = [('choxie', '2.0.0.9'), ('grammar', '1.0a4'), - ('towel-stuff', '0.1'), ('babar', '0.1')] - - checkLists([], _yield_distributions(False, False, sys.path)) - - found = [(dist.name, dist.version) - for dist in _yield_distributions(False, True, sys.path) - if dist.path.startswith(self.fake_dists_path)] - checkLists(eggs, found) - - found = [(dist.name, dist.version) - for dist in _yield_distributions(True, False, sys.path) - if dist.path.startswith(self.fake_dists_path)] - checkLists(dists, found) - - found = [(dist.name, dist.version) - for dist in _yield_distributions(True, True, sys.path) - if dist.path.startswith(self.fake_dists_path)] - checkLists(dists + eggs, found) - - -class DataFilesTestCase(GlobTestCaseBase): - - def assertRulesMatch(self, rules, spec): - tempdir = self.build_files_tree(spec) - expected = self.clean_tree(spec) - result = get_resources_dests(tempdir, rules) - self.assertEqual(expected, result) - - def clean_tree(self, spec): - files = {} - for path, value in spec.items(): - if value is not None: - files[path] = value - return files - - def test_simple_glob(self): - rules = [('', '*.tpl', '{data}')] - spec = {'coucou.tpl': '{data}/coucou.tpl', - 'Donotwant': None} - self.assertRulesMatch(rules, spec) - - def test_multiple_match(self): - rules = [('scripts', '*.bin', '{appdata}'), - ('scripts', '*', '{appscript}')] - spec = {'scripts/script.bin': '{appscript}/script.bin', - 'Babarlikestrawberry': None} - self.assertRulesMatch(rules, spec) - - def test_set_match(self): - rules = [('scripts', '*.{bin,sh}', '{appscript}')] - spec = {'scripts/script.bin': '{appscript}/script.bin', - 'scripts/babar.sh': '{appscript}/babar.sh', - 'Babarlikestrawberry': None} - self.assertRulesMatch(rules, spec) - - def test_set_match_multiple(self): - rules = [('scripts', 'script{s,}.{bin,sh}', '{appscript}')] - spec = {'scripts/scripts.bin': '{appscript}/scripts.bin', - 'scripts/script.sh': '{appscript}/script.sh', - 'Babarlikestrawberry': None} - self.assertRulesMatch(rules, spec) - - def test_set_match_exclude(self): - rules = [('scripts', '*', '{appscript}'), - ('', os.path.join('**', '*.sh'), None)] - spec = {'scripts/scripts.bin': '{appscript}/scripts.bin', - 'scripts/script.sh': None, - 'Babarlikestrawberry': None} - self.assertRulesMatch(rules, spec) - - def test_glob_in_base(self): - rules = [('scrip*', '*.bin', '{appscript}')] - spec = {'scripts/scripts.bin': '{appscript}/scripts.bin', - 'scripouille/babar.bin': '{appscript}/babar.bin', - 'scriptortu/lotus.bin': '{appscript}/lotus.bin', - 'Babarlikestrawberry': None} - self.assertRulesMatch(rules, spec) - - def test_recursive_glob(self): - rules = [('', os.path.join('**', '*.bin'), '{binary}')] - spec = {'binary0.bin': '{binary}/binary0.bin', - 'scripts/binary1.bin': '{binary}/scripts/binary1.bin', - 'scripts/bin/binary2.bin': '{binary}/scripts/bin/binary2.bin', - 'you/kill/pandabear.guy': None} - self.assertRulesMatch(rules, spec) - - def test_final_exemple_glob(self): - rules = [ - ('mailman/database/schemas/', '*', '{appdata}/schemas'), - ('', os.path.join('**', '*.tpl'), '{appdata}/templates'), - ('', os.path.join('developer-docs', '**', '*.txt'), '{doc}'), - ('', 'README', '{doc}'), - ('mailman/etc/', '*', '{config}'), - ('mailman/foo/', os.path.join('**', 'bar', '*.cfg'), - '{config}/baz'), - ('mailman/foo/', os.path.join('**', '*.cfg'), '{config}/hmm'), - ('', 'some-new-semantic.sns', '{funky-crazy-category}'), - ] - spec = { - 'README': '{doc}/README', - 'some.tpl': '{appdata}/templates/some.tpl', - 'some-new-semantic.sns': - '{funky-crazy-category}/some-new-semantic.sns', - 'mailman/database/mailman.db': None, - 'mailman/database/schemas/blah.schema': - '{appdata}/schemas/blah.schema', - 'mailman/etc/my.cnf': '{config}/my.cnf', - 'mailman/foo/some/path/bar/my.cfg': - '{config}/hmm/some/path/bar/my.cfg', - 'mailman/foo/some/path/other.cfg': - '{config}/hmm/some/path/other.cfg', - 'developer-docs/index.txt': '{doc}/developer-docs/index.txt', - 'developer-docs/api/toc.txt': '{doc}/developer-docs/api/toc.txt', - } - self.maxDiff = None - self.assertRulesMatch(rules, spec) - - def test_get_file(self): - # Create a fake dist - temp_site_packages = tempfile.mkdtemp() - self.addCleanup(shutil.rmtree, temp_site_packages) - - dist_name = 'test' - dist_info = os.path.join(temp_site_packages, 'test-0.1.dist-info') - os.mkdir(dist_info) - - metadata_path = os.path.join(dist_info, 'METADATA') - resources_path = os.path.join(dist_info, 'RESOURCES') - - with open(metadata_path, 'w') as fp: - fp.write(dedent("""\ - Metadata-Version: 1.2 - Name: test - Version: 0.1 - Summary: test - Author: me - """)) - - test_path = 'test.cfg' - - fd, test_resource_path = tempfile.mkstemp() - os.close(fd) - self.addCleanup(os.remove, test_resource_path) - - with open(test_resource_path, 'w') as fp: - fp.write('Config') - - with open(resources_path, 'w') as fp: - fp.write('%s,%s' % (test_path, test_resource_path)) - - # Add fake site-packages to sys.path to retrieve fake dist - self.addCleanup(sys.path.remove, temp_site_packages) - sys.path.insert(0, temp_site_packages) - - # Force packaging.database to rescan the sys.path - self.addCleanup(enable_cache) - disable_cache() - - # Try to retrieve resources paths and files - self.assertEqual(get_file_path(dist_name, test_path), - test_resource_path) - self.assertRaises(KeyError, get_file_path, dist_name, 'i-dont-exist') - - with get_file(dist_name, test_path) as fp: - self.assertEqual(fp.read(), 'Config') - self.assertRaises(KeyError, get_file, dist_name, 'i-dont-exist') - - -def test_suite(): - suite = unittest.TestSuite() - load = unittest.defaultTestLoader.loadTestsFromTestCase - suite.addTest(load(TestDistribution)) - suite.addTest(load(TestEggInfoDistribution)) - suite.addTest(load(TestDatabase)) - suite.addTest(load(DataFilesTestCase)) - return suite - - -if __name__ == "__main__": - unittest.main(defaultTest='test_suite') diff --git a/Lib/packaging/tests/test_depgraph.py b/Lib/packaging/tests/test_depgraph.py deleted file mode 100644 --- a/Lib/packaging/tests/test_depgraph.py +++ /dev/null @@ -1,310 +0,0 @@ -"""Tests for packaging.depgraph """ -import os -import re -import sys -from io import StringIO - -from packaging import depgraph -from packaging.database import get_distribution, enable_cache, disable_cache - -from packaging.tests import unittest, support -from packaging.tests.support import requires_zlib - - -class DepGraphTestCase(support.LoggingCatcher, - unittest.TestCase): - - DISTROS_DIST = ('choxie', 'grammar', 'towel-stuff') - DISTROS_EGG = ('bacon', 'banana', 'strawberry', 'cheese') - BAD_EGGS = ('nut',) - - EDGE = re.compile( - r'"(?P.*)" -> "(?P.*)" \[label="(?P