From webhook-mailer at python.org Mon Apr 1 02:16:50 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 01 Apr 2019 06:16:50 -0000 Subject: [Python-checkins] bpo-36492: Deprecate passing some arguments as keyword arguments. (GH-12637) Message-ID: https://github.com/python/cpython/commit/42a139ed88c487f325a241c6ee8b308b3c045975 commit: 42a139ed88c487f325a241c6ee8b308b3c045975 branch: master author: Serhiy Storchaka committer: GitHub date: 2019-04-01T09:16:35+03:00 summary: bpo-36492: Deprecate passing some arguments as keyword arguments. (GH-12637) Deprecated passing the following arguments as keyword arguments: - "func" in functools.partialmethod(), weakref.finalize(), profile.Profile.runcall(), cProfile.Profile.runcall(), bdb.Bdb.runcall(), trace.Trace.runfunc() and curses.wrapper(). - "function" in unittest.addModuleCleanup() and unittest.TestCase.addCleanup(). - "fn" in the submit() method of concurrent.futures.ThreadPoolExecutor and concurrent.futures.ProcessPoolExecutor. - "callback" in contextlib.ExitStack.callback(), contextlib.AsyncExitStack.callback() and contextlib.AsyncExitStack.push_async_callback(). - "c" and "typeid" in the create() method of multiprocessing.managers.Server and multiprocessing.managers.SharedMemoryServer. - "obj" in weakref.finalize(). Also allowed to pass arbitrary keyword arguments (even "self" and "func") if the above arguments are passed as positional argument. files: A Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst M Doc/whatsnew/3.8.rst M Lib/bdb.py M Lib/cProfile.py M Lib/concurrent/futures/_base.py M Lib/concurrent/futures/process.py M Lib/concurrent/futures/thread.py M Lib/contextlib.py M Lib/curses/__init__.py M Lib/functools.py M Lib/multiprocessing/managers.py M Lib/profile.py M Lib/test/test_concurrent_futures.py M Lib/test/test_contextlib.py M Lib/test/test_contextlib_async.py M Lib/test/test_functools.py M Lib/test/test_trace.py M Lib/test/test_weakref.py M Lib/trace.py M Lib/unittest/case.py M Lib/unittest/test/test_runner.py M Lib/weakref.py diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index ccd8bbd81042..411f2299b290 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -594,6 +594,29 @@ Deprecated version they will be errors. (Contributed by Serhiy Storchaka in :issue:`36048`.) +* Deprecated passing the following arguments as keyword arguments: + + - *func* in :func:`functools.partialmethod`, :func:`weakref.finalize`, + :meth:`profile.Profile.runcall`, :meth:`cProfile.Profile.runcall`, + :meth:`bdb.Bdb.runcall`, :meth:`trace.Trace.runfunc` and + :func:`curses.wrapper`. + - *function* in :func:`unittest.addModuleCleanup` and + :meth:`unittest.TestCase.addCleanup`. + - *fn* in the :meth:`~concurrent.futures.Executor.submit` method of + :class:`concurrent.futures.ThreadPoolExecutor` and + :class:`concurrent.futures.ProcessPoolExecutor`. + - *callback* in :meth:`contextlib.ExitStack.callback`, + :meth:`contextlib.AsyncExitStack.callback` and + :meth:`contextlib.AsyncExitStack.push_async_callback`. + - *c* and *typeid* in the :meth:`~multiprocessing.managers.Server.create` + method of :class:`multiprocessing.managers.Server` and + :class:`multiprocessing.managers.SharedMemoryServer`. + - *obj* in :func:`weakref.finalize`. + + In future releases of Python they will be :ref:`positional-only + `. + (Contributed by Serhiy Storchaka in :issue:`36492`.) + API and Feature Removals ======================== diff --git a/Lib/bdb.py b/Lib/bdb.py index ec0f92c06a78..54aa98437450 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -618,11 +618,26 @@ def runctx(self, cmd, globals, locals): # This method is more useful to debug a single function call. - def runcall(self, func, *args, **kwds): + def runcall(*args, **kwds): """Debug a single function call. Return the result of the function call. """ + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runcall' of 'Bdb' object " + "needs an argument") + elif 'func' in kwds: + func = kwds.pop('func') + self, *args = args + import warnings + warnings.warn("Passing 'func' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('runcall expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + self.reset() sys.settrace(self.trace_dispatch) res = None diff --git a/Lib/cProfile.py b/Lib/cProfile.py index 305e79e28049..2e449cc576ce 100755 --- a/Lib/cProfile.py +++ b/Lib/cProfile.py @@ -103,7 +103,22 @@ def runctx(self, cmd, globals, locals): return self # This method is more useful to profile a single function call. - def runcall(self, func, *args, **kw): + def runcall(*args, **kw): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runcall' of 'Profile' object " + "needs an argument") + elif 'func' in kw: + func = kw.pop('func') + self, *args = args + import warnings + warnings.warn("Passing 'func' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('runcall expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + self.enable() try: return func(*args, **kw) diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py index 8b9dc507138e..ea16eef841c5 100644 --- a/Lib/concurrent/futures/_base.py +++ b/Lib/concurrent/futures/_base.py @@ -544,7 +544,7 @@ def set_exception(self, exception): class Executor(object): """This is an abstract base class for concrete asynchronous executors.""" - def submit(self, fn, *args, **kwargs): + def submit(*args, **kwargs): """Submits a callable to be executed with the given arguments. Schedules the callable to be executed as fn(*args, **kwargs) and returns @@ -553,6 +553,19 @@ def submit(self, fn, *args, **kwargs): Returns: A Future representing the given call. """ + if len(args) >= 2: + pass + elif not args: + raise TypeError("descriptor 'submit' of 'Executor' object " + "needs an argument") + elif 'fn' in kwargs: + import warnings + warnings.warn("Passing 'fn' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('submit expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + raise NotImplementedError() def map(self, fn, *iterables, timeout=None, chunksize=1): diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py index 9b85e7f33769..306e9ce47a6b 100644 --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -594,7 +594,22 @@ def _adjust_process_count(self): p.start() self._processes[p.pid] = p - def submit(self, fn, *args, **kwargs): + def submit(*args, **kwargs): + if len(args) >= 2: + self, fn, *args = args + elif not args: + raise TypeError("descriptor 'submit' of 'ProcessPoolExecutor' object " + "needs an argument") + elif 'fn' in kwargs: + fn = kwargs.pop('fn') + self, *args = args + import warnings + warnings.warn("Passing 'fn' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('submit expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + with self._shutdown_lock: if self._broken: raise BrokenProcessPool(self._broken) diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py index 78359711d5d9..0a61e3a9ac1b 100644 --- a/Lib/concurrent/futures/thread.py +++ b/Lib/concurrent/futures/thread.py @@ -142,7 +142,22 @@ def __init__(self, max_workers=None, thread_name_prefix='', self._initializer = initializer self._initargs = initargs - def submit(self, fn, *args, **kwargs): + def submit(*args, **kwargs): + if len(args) >= 2: + self, fn, *args = args + elif not args: + raise TypeError("descriptor 'submit' of 'ThreadPoolExecutor' object " + "needs an argument") + elif 'fn' in kwargs: + fn = kwargs.pop('fn') + self, *args = args + import warnings + warnings.warn("Passing 'fn' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('submit expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + with self._shutdown_lock: if self._broken: raise BrokenThreadPool(self._broken) diff --git a/Lib/contextlib.py b/Lib/contextlib.py index c06ec73f489d..ae498a2b6ef5 100644 --- a/Lib/contextlib.py +++ b/Lib/contextlib.py @@ -377,7 +377,8 @@ def _create_exit_wrapper(cm, cm_exit): return MethodType(cm_exit, cm) @staticmethod - def _create_cb_wrapper(callback, *args, **kwds): + def _create_cb_wrapper(*args, **kwds): + callback, *args = args def _exit_wrapper(exc_type, exc, tb): callback(*args, **kwds) return _exit_wrapper @@ -426,11 +427,26 @@ def enter_context(self, cm): self._push_cm_exit(cm, _exit) return result - def callback(self, callback, *args, **kwds): + def callback(*args, **kwds): """Registers an arbitrary callback and arguments. Cannot suppress exceptions. """ + if len(args) >= 2: + self, callback, *args = args + elif not args: + raise TypeError("descriptor 'callback' of '_BaseExitStack' object " + "needs an argument") + elif 'callback' in kwds: + callback = kwds.pop('callback') + self, *args = args + import warnings + warnings.warn("Passing 'callback' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('callback expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + _exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds) # We changed the signature, so using @wraps is not appropriate, but @@ -536,7 +552,8 @@ def _create_async_exit_wrapper(cm, cm_exit): return MethodType(cm_exit, cm) @staticmethod - def _create_async_cb_wrapper(callback, *args, **kwds): + def _create_async_cb_wrapper(*args, **kwds): + callback, *args = args async def _exit_wrapper(exc_type, exc, tb): await callback(*args, **kwds) return _exit_wrapper @@ -571,11 +588,26 @@ def push_async_exit(self, exit): self._push_async_cm_exit(exit, exit_method) return exit # Allow use as a decorator - def push_async_callback(self, callback, *args, **kwds): + def push_async_callback(*args, **kwds): """Registers an arbitrary coroutine function and arguments. Cannot suppress exceptions. """ + if len(args) >= 2: + self, callback, *args = args + elif not args: + raise TypeError("descriptor 'push_async_callback' of " + "'AsyncExitStack' object needs an argument") + elif 'callback' in kwds: + callback = kwds.pop('callback') + self, *args = args + import warnings + warnings.warn("Passing 'callback' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('push_async_callback expected at least 1 ' + 'positional argument, got %d' % (len(args)-1)) + _exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds) # We changed the signature, so using @wraps is not appropriate, but diff --git a/Lib/curses/__init__.py b/Lib/curses/__init__.py index 47378741acc2..44a198428820 100644 --- a/Lib/curses/__init__.py +++ b/Lib/curses/__init__.py @@ -60,7 +60,7 @@ def start_color(): # raises an exception, wrapper() will restore the terminal to a sane state so # you can read the resulting traceback. -def wrapper(func, *args, **kwds): +def wrapper(*args, **kwds): """Wrapper function that initializes curses and calls another function, restoring normal keyboard/screen behavior on error. The callable object 'func' is then passed the main window 'stdscr' @@ -68,6 +68,17 @@ def wrapper(func, *args, **kwds): wrapper(). """ + if args: + func, *args = args + elif 'func' in kwds: + func = kwds.pop('func') + import warnings + warnings.warn("Passing 'func' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('wrapper expected at least 1 positional argument, ' + 'got %d' % len(args)) + try: # Initialize curses stdscr = initscr() diff --git a/Lib/functools.py b/Lib/functools.py index 426653f13f6d..1f1874db9b4c 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -354,7 +354,23 @@ class partialmethod(object): callables as instance methods. """ - def __init__(self, func, *args, **keywords): + def __init__(*args, **keywords): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor '__init__' of partialmethod " + "needs an argument") + elif 'func' in keywords: + func = keywords.pop('func') + self, *args = args + import warnings + warnings.warn("Passing 'func' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError("type 'partialmethod' takes at least one argument, " + "got %d" % (len(args)-1)) + args = tuple(args) + if not callable(func) and not hasattr(func, "__get__"): raise TypeError("{!r} is not callable or a descriptor" .format(func)) diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py index 7973012b98d1..80c3ddb9154a 100644 --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -358,10 +358,36 @@ def shutdown(self, c): finally: self.stop_event.set() - def create(self, c, typeid, *args, **kwds): + def create(*args, **kwds): ''' Create a new shared object and return its id ''' + if len(args) >= 3: + self, c, typeid, *args = args + elif not args: + raise TypeError("descriptor 'create' of 'Server' object " + "needs an argument") + else: + if 'typeid' not in kwds: + raise TypeError('create expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + typeid = kwds.pop('typeid') + if len(args) >= 2: + self, c, *args = args + import warnings + warnings.warn("Passing 'typeid' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + if 'c' not in kwds: + raise TypeError('create expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + c = kwds.pop('c') + self, *args = args + import warnings + warnings.warn("Passing 'c' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + args = tuple(args) + with self.mutex: callable, exposed, method_to_typeid, proxytype = \ self.registry[typeid] @@ -583,10 +609,13 @@ def _run_server(cls, registry, address, authkey, serializer, writer, util.info('manager serving at %r', server.address) server.serve_forever() - def _create(self, typeid, *args, **kwds): + def _create(*args, **kwds): ''' Create a new shared object; return the token and exposed tuple ''' + self, typeid, *args = args + args = tuple(args) + assert self._state.value == State.STARTED, 'server not yet started' conn = self._Client(self._address, authkey=self._authkey) try: @@ -1261,15 +1290,25 @@ def __init__(self, *args, **kwargs): _SharedMemoryTracker(f"shmm_{self.address}_{getpid()}") util.debug(f"SharedMemoryServer started by pid {getpid()}") - def create(self, c, typeid, *args, **kwargs): + def create(*args, **kwargs): """Create a new distributed-shared object (not backed by a shared memory block) and return its id to be used in a Proxy Object.""" # Unless set up as a shared proxy, don't make shared_memory_context # a standard part of kwargs. This makes things easier for supplying # simple functions. + if len(args) >= 3: + typeod = args[2] + elif 'typeid' in kwargs: + typeid = kwargs['typeid'] + elif not args: + raise TypeError("descriptor 'create' of 'SharedMemoryServer' " + "object needs an argument") + else: + raise TypeError('create expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"): kwargs['shared_memory_context'] = self.shared_memory_context - return Server.create(self, c, typeid, *args, **kwargs) + return Server.create(*args, **kwargs) def shutdown(self, c): "Call unlink() on all tracked shared memory, terminate the Server." diff --git a/Lib/profile.py b/Lib/profile.py index 5df43604acdd..9a865d3f6f6e 100755 --- a/Lib/profile.py +++ b/Lib/profile.py @@ -425,7 +425,22 @@ def runctx(self, cmd, globals, locals): return self # This method is more useful to profile a single function call. - def runcall(self, func, *args, **kw): + def runcall(*args, **kw): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runcall' of 'Profile' object " + "needs an argument") + elif 'func' in kw: + func = kw.pop('func') + self, *args = args + import warnings + warnings.warn("Passing 'func' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('runcall expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + self.set_cmd(repr(func)) sys.setprofile(self.dispatcher) try: diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py index 01125c79ba57..903afbd2a4f6 100644 --- a/Lib/test/test_concurrent_futures.py +++ b/Lib/test/test_concurrent_futures.py @@ -49,6 +49,9 @@ def create_future(state=PENDING, exception=None, result=None): def mul(x, y): return x * y +def capture(*args, **kwargs): + return args, kwargs + def sleep_and_raise(t): time.sleep(t) raise Exception('this is an exception') @@ -658,6 +661,13 @@ def test_submit(self): def test_submit_keyword(self): future = self.executor.submit(mul, 2, y=8) self.assertEqual(16, future.result()) + future = self.executor.submit(capture, 1, self=2, fn=3) + self.assertEqual(future.result(), ((1,), {'self': 2, 'fn': 3})) + with self.assertWarns(DeprecationWarning): + future = self.executor.submit(fn=capture, arg=1) + self.assertEqual(future.result(), ((), {'arg': 1})) + with self.assertRaises(TypeError): + self.executor.submit(arg=1) def test_map(self): self.assertEqual( diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py index 755d9b95a677..188a29d9f9fd 100644 --- a/Lib/test/test_contextlib.py +++ b/Lib/test/test_contextlib.py @@ -574,6 +574,7 @@ def test_callback(self): ((), dict(example=1)), ((1,), dict(example=1)), ((1,2), dict(example=1)), + ((1,2), dict(self=3, callback=4)), ] result = [] def _exit(*args, **kwds): @@ -596,6 +597,16 @@ def _exit(*args, **kwds): self.assertIsNone(wrapper[1].__doc__, _exit.__doc__) self.assertEqual(result, expected) + result = [] + with self.exit_stack() as stack: + with self.assertRaises(TypeError): + stack.callback(arg=1) + with self.assertRaises(TypeError): + self.exit_stack.callback(arg=2) + with self.assertWarns(DeprecationWarning): + stack.callback(callback=_exit, arg=3) + self.assertEqual(result, [((), {'arg': 3})]) + def test_push(self): exc_raised = ZeroDivisionError def _expect_exc(exc_type, exc, exc_tb): diff --git a/Lib/test/test_contextlib_async.py b/Lib/test/test_contextlib_async.py index 57716aea9059..492b226a0d54 100644 --- a/Lib/test/test_contextlib_async.py +++ b/Lib/test/test_contextlib_async.py @@ -352,6 +352,16 @@ def setUp(self): self.assertEqual(result, expected) + result = [] + async with AsyncExitStack() as stack: + with self.assertRaises(TypeError): + stack.push_async_callback(arg=1) + with self.assertRaises(TypeError): + self.exit_stack.push_async_callback(arg=2) + with self.assertWarns(DeprecationWarning): + stack.push_async_callback(callback=_exit, arg=3) + self.assertEqual(result, [((), {'arg': 3})]) + @_async_test async def test_async_push(self): exc_raised = ZeroDivisionError diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 63a9ade54806..4b2b9ab61fa7 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -464,6 +464,7 @@ class A(object): positional = functools.partialmethod(capture, 1) keywords = functools.partialmethod(capture, a=2) both = functools.partialmethod(capture, 3, b=4) + spec_keywords = functools.partialmethod(capture, self=1, func=2) nested = functools.partialmethod(positional, 5) @@ -497,6 +498,8 @@ def test_arg_combinations(self): self.assertEqual(self.A.both(self.a, 5, c=6), ((self.a, 3, 5), {'b': 4, 'c': 6})) + self.assertEqual(self.a.spec_keywords(), ((self.a,), {'self': 1, 'func': 2})) + def test_nested(self): self.assertEqual(self.a.nested(), ((self.a, 1, 5), {})) self.assertEqual(self.a.nested(6), ((self.a, 1, 5, 6), {})) @@ -550,6 +553,14 @@ def test_invalid_args(self): with self.assertRaises(TypeError): class B(object): method = functools.partialmethod(None, 1) + with self.assertRaises(TypeError): + class B: + method = functools.partialmethod() + with self.assertWarns(DeprecationWarning): + class B: + method = functools.partialmethod(func=capture, a=1) + b = B() + self.assertEqual(b.method(2, x=3), ((b, 2), {'a': 1, 'x': 3})) def test_repr(self): self.assertEqual(repr(vars(self.A)['both']), diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py index 5c333b7a0a5e..afe790267661 100644 --- a/Lib/test/test_trace.py +++ b/Lib/test/test_trace.py @@ -70,6 +70,9 @@ def traced_func_calling_generator(): def traced_doubler(num): return num * 2 +def traced_capturer(*args, **kwargs): + return args, kwargs + def traced_caller_list_comprehension(): k = 10 mylist = [traced_doubler(i) for i in range(k)] @@ -270,6 +273,15 @@ def test_simple_caller(self): } self.assertEqual(self.tracer.results().calledfuncs, expected) + def test_arg_errors(self): + res = self.tracer.runfunc(traced_capturer, 1, 2, self=3, func=4) + self.assertEqual(res, ((1, 2), {'self': 3, 'func': 4})) + with self.assertWarns(DeprecationWarning): + res = self.tracer.runfunc(func=traced_capturer, arg=1) + self.assertEqual(res, ((), {'arg': 1})) + with self.assertRaises(TypeError): + self.tracer.runfunc() + def test_loop_caller_importing(self): self.tracer.runfunc(traced_func_importing_caller, 1) diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 1fac08dafc7d..50a46f817f9f 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -1839,6 +1839,35 @@ def add(x,y,z): self.assertEqual(f.alive, False) self.assertEqual(res, [199]) + def test_arg_errors(self): + def fin(*args, **kwargs): + res.append((args, kwargs)) + + a = self.A() + + res = [] + f = weakref.finalize(a, fin, 1, 2, func=3, obj=4) + self.assertEqual(f.peek(), (a, fin, (1, 2), {'func': 3, 'obj': 4})) + f() + self.assertEqual(res, [((1, 2), {'func': 3, 'obj': 4})]) + + res = [] + with self.assertWarns(DeprecationWarning): + f = weakref.finalize(a, func=fin, arg=1) + self.assertEqual(f.peek(), (a, fin, (), {'arg': 1})) + f() + self.assertEqual(res, [((), {'arg': 1})]) + + res = [] + with self.assertWarns(DeprecationWarning): + f = weakref.finalize(obj=a, func=fin, arg=1) + self.assertEqual(f.peek(), (a, fin, (), {'arg': 1})) + f() + self.assertEqual(res, [((), {'arg': 1})]) + + self.assertRaises(TypeError, weakref.finalize, a) + self.assertRaises(TypeError, weakref.finalize) + def test_order(self): a = self.A() res = [] diff --git a/Lib/trace.py b/Lib/trace.py index 3049e4ec6839..fd40fbae8505 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -451,7 +451,22 @@ def runctx(self, cmd, globals=None, locals=None): sys.settrace(None) threading.settrace(None) - def runfunc(self, func, *args, **kw): + def runfunc(*args, **kw): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runfunc' of 'Trace' object " + "needs an argument") + elif 'func' in kw: + func = kw.pop('func') + self, *args = args + import warnings + warnings.warn("Passing 'func' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('runfunc expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + result = None if not self.donothing: sys.settrace(self.globaltrace) diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index a157ae8a14bc..972a4658b17b 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -86,9 +86,21 @@ def _id(obj): _module_cleanups = [] -def addModuleCleanup(function, *args, **kwargs): +def addModuleCleanup(*args, **kwargs): """Same as addCleanup, except the cleanup items are called even if setUpModule fails (unlike tearDownModule).""" + if args: + function, *args = args + elif 'function' in kwargs: + function = kwargs.pop('function') + import warnings + warnings.warn("Passing 'function' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('addModuleCleanup expected at least 1 positional ' + 'argument, got %d' % (len(args)-1)) + args = tuple(args) + _module_cleanups.append((function, args, kwargs)) @@ -463,18 +475,44 @@ def addTypeEqualityFunc(self, typeobj, function): """ self._type_equality_funcs[typeobj] = function - def addCleanup(self, function, *args, **kwargs): + def addCleanup(*args, **kwargs): """Add a function, with arguments, to be called when the test is completed. Functions added are called on a LIFO basis and are called after tearDown on test failure or success. Cleanup items are called even if setUp fails (unlike tearDown).""" + if len(args) >= 2: + self, function, *args = args + elif not args: + raise TypeError("descriptor 'addCleanup' of 'TestCase' object " + "needs an argument") + elif 'function' in kwargs: + function = kwargs.pop('function') + self, *args = args + import warnings + warnings.warn("Passing 'function' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + raise TypeError('addCleanup expected at least 1 positional ' + 'argument, got %d' % (len(args)-1)) + args = tuple(args) + self._cleanups.append((function, args, kwargs)) @classmethod - def addClassCleanup(cls, function, *args, **kwargs): + def addClassCleanup(*args, **kwargs): """Same as addCleanup, except the cleanup items are called even if setUpClass fails (unlike tearDownClass).""" + if len(args) >= 2: + cls, function, *args = args + elif not args: + raise TypeError("descriptor 'addClassCleanup' of 'TestCase' object " + "needs an argument") + else: + raise TypeError('addClassCleanup expected at least 1 positional ' + 'argument, got %d' % (len(args)-1)) + args = tuple(args) + cls._class_cleanups.append((function, args, kwargs)) def setUp(self): diff --git a/Lib/unittest/test/test_runner.py b/Lib/unittest/test/test_runner.py index 2b475c2d8566..443b689dbea0 100644 --- a/Lib/unittest/test/test_runner.py +++ b/Lib/unittest/test/test_runner.py @@ -403,6 +403,22 @@ class Module(object): self.assertEqual(str(e.exception), 'CleanUpExc') self.assertEqual(unittest.case._module_cleanups, []) + def test_addModuleCleanup_arg_errors(self): + cleanups = [] + def cleanup(*args, **kwargs): + cleanups.append((args, kwargs)) + + class Module(object): + unittest.addModuleCleanup(cleanup, 1, 2, function='hello') + with self.assertWarns(DeprecationWarning): + unittest.addModuleCleanup(function=cleanup, arg='hello') + with self.assertRaises(TypeError): + unittest.addModuleCleanup() + unittest.case.doModuleCleanups() + self.assertEqual(cleanups, + [((), {'arg': 'hello'}), + ((1, 2), {'function': 'hello'})]) + def test_run_module_cleanUp(self): blowUp = True ordering = [] @@ -547,6 +563,50 @@ def tearDownClass(cls): 'tearDownModule', 'cleanup_good']) self.assertEqual(unittest.case._module_cleanups, []) + def test_addClassCleanup_arg_errors(self): + cleanups = [] + def cleanup(*args, **kwargs): + cleanups.append((args, kwargs)) + + class TestableTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.addClassCleanup(cleanup, 1, 2, function=3, cls=4) + with self.assertRaises(TypeError): + cls.addClassCleanup(function=cleanup, arg='hello') + def testNothing(self): + pass + + with self.assertRaises(TypeError): + TestableTest.addClassCleanup() + with self.assertRaises(TypeError): + unittest.TestCase.addCleanup(cls=TestableTest(), function=cleanup) + runTests(TestableTest) + self.assertEqual(cleanups, + [((1, 2), {'function': 3, 'cls': 4})]) + + def test_addCleanup_arg_errors(self): + cleanups = [] + def cleanup(*args, **kwargs): + cleanups.append((args, kwargs)) + + class TestableTest(unittest.TestCase): + def setUp(self2): + self2.addCleanup(cleanup, 1, 2, function=3, self=4) + with self.assertWarns(DeprecationWarning): + self2.addCleanup(function=cleanup, arg='hello') + def testNothing(self): + pass + + with self.assertRaises(TypeError): + TestableTest().addCleanup() + with self.assertRaises(TypeError): + unittest.TestCase.addCleanup(self=TestableTest(), function=cleanup) + runTests(TestableTest) + self.assertEqual(cleanups, + [((), {'arg': 'hello'}), + ((1, 2), {'function': 3, 'self': 4})]) + def test_with_errors_in_addClassCleanup(self): ordering = [] diff --git a/Lib/weakref.py b/Lib/weakref.py index 753f07291e20..285c70792e0b 100644 --- a/Lib/weakref.py +++ b/Lib/weakref.py @@ -527,7 +527,33 @@ class finalize: class _Info: __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index") - def __init__(self, obj, func, *args, **kwargs): + def __init__(*args, **kwargs): + if len(args) >= 3: + self, obj, func, *args = args + elif not args: + raise TypeError("descriptor '__init__' of 'finalize' object " + "needs an argument") + else: + if 'func' not in kwargs: + raise TypeError('finalize expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + func = kwargs.pop('func') + if len(args) >= 2: + self, obj, *args = args + import warnings + warnings.warn("Passing 'func' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + else: + if 'obj' not in kwargs: + raise TypeError('finalize expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + obj = kwargs.pop('obj') + self, *args = args + import warnings + warnings.warn("Passing 'obj' as keyword argument is deprecated", + DeprecationWarning, stacklevel=2) + args = tuple(args) + if not self._registered_with_atexit: # We may register the exit function more than once because # of a thread race, but that is harmless diff --git a/Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst b/Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst new file mode 100644 index 000000000000..f294bd27b25d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst @@ -0,0 +1,5 @@ +Deprecated passing required arguments like *func* as keyword arguments +in functions which should accept arbitrary keyword arguments and pass them +to other function. Arbitrary keyword arguments (even with names "self" and +"func") can now be passed to these functions if the required arguments are +passed as positional arguments. From webhook-mailer at python.org Mon Apr 1 03:59:30 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 01 Apr 2019 07:59:30 -0000 Subject: [Python-checkins] [3.7] bpo-36492: Fix passing special keyword arguments to some functions. (GH-12637) (GH-12645) Message-ID: https://github.com/python/cpython/commit/a37f356de19828241bf19129f804369794c72ed3 commit: a37f356de19828241bf19129f804369794c72ed3 branch: 3.7 author: Serhiy Storchaka committer: GitHub date: 2019-04-01T10:59:24+03:00 summary: [3.7] bpo-36492: Fix passing special keyword arguments to some functions. (GH-12637) (GH-12645) The following arguments can be passed as keyword arguments for passing to other function if the corresponding required argument is passed as positional: - "func" in functools.partialmethod(), weakref.finalize(), profile.Profile.runcall(), cProfile.Profile.runcall(), bdb.Bdb.runcall(), trace.Trace.runfunc() and curses.wrapper(). - "function" in unittest.addModuleCleanup() and unittest.TestCase.addCleanup(). - "fn" in the submit() method of concurrent.futures.ThreadPoolExecutor and concurrent.futures.ProcessPoolExecutor. - "callback" in contextlib.ExitStack.callback(), contextlib.AsyncExitStack.callback() and contextlib.AsyncExitStack.push_async_callback(). - "c" and "typeid" in multiprocessing.managers.Server.create(). - "obj" in weakref.finalize(). (cherry picked from commit 42a139ed88c487f325a241c6ee8b308b3c045975) files: A Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst M Lib/bdb.py M Lib/cProfile.py M Lib/concurrent/futures/_base.py M Lib/concurrent/futures/process.py M Lib/concurrent/futures/thread.py M Lib/contextlib.py M Lib/curses/__init__.py M Lib/functools.py M Lib/multiprocessing/managers.py M Lib/profile.py M Lib/test/test_concurrent_futures.py M Lib/test/test_contextlib.py M Lib/test/test_contextlib_async.py M Lib/test/test_functools.py M Lib/test/test_trace.py M Lib/test/test_weakref.py M Lib/trace.py M Lib/unittest/case.py M Lib/weakref.py diff --git a/Lib/bdb.py b/Lib/bdb.py index 25c6260c47c7..caf207733b73 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -616,11 +616,23 @@ def runctx(self, cmd, globals, locals): # This method is more useful to debug a single function call. - def runcall(self, func, *args, **kwds): + def runcall(*args, **kwds): """Debug a single function call. Return the result of the function call. """ + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runcall' of 'Bdb' object " + "needs an argument") + elif 'func' in kwds: + func = kwds.pop('func') + self, *args = args + else: + raise TypeError('runcall expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + self.reset() sys.settrace(self.trace_dispatch) res = None diff --git a/Lib/cProfile.py b/Lib/cProfile.py index f6e423b3dd1a..9485d3d515e4 100755 --- a/Lib/cProfile.py +++ b/Lib/cProfile.py @@ -103,7 +103,19 @@ def runctx(self, cmd, globals, locals): return self # This method is more useful to profile a single function call. - def runcall(self, func, *args, **kw): + def runcall(*args, **kw): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runcall' of 'Profile' object " + "needs an argument") + elif 'func' in kw: + func = kw.pop('func') + self, *args = args + else: + raise TypeError('runcall expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + self.enable() try: return func(*args, **kw) diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py index 61c81bb7fe68..0b847307a328 100644 --- a/Lib/concurrent/futures/_base.py +++ b/Lib/concurrent/futures/_base.py @@ -536,7 +536,7 @@ def set_exception(self, exception): class Executor(object): """This is an abstract base class for concrete asynchronous executors.""" - def submit(self, fn, *args, **kwargs): + def submit(*args, **kwargs): """Submits a callable to be executed with the given arguments. Schedules the callable to be executed as fn(*args, **kwargs) and returns @@ -545,6 +545,15 @@ def submit(self, fn, *args, **kwargs): Returns: A Future representing the given call. """ + if len(args) >= 2: + pass + elif not args: + raise TypeError("descriptor 'submit' of 'Executor' object " + "needs an argument") + elif 'fn' not in kwargs: + raise TypeError('submit expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + raise NotImplementedError() def map(self, fn, *iterables, timeout=None, chunksize=1): diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py index ce7d642b098a..8a0ed98b3e88 100644 --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -593,7 +593,19 @@ def _adjust_process_count(self): p.start() self._processes[p.pid] = p - def submit(self, fn, *args, **kwargs): + def submit(*args, **kwargs): + if len(args) >= 2: + self, fn, *args = args + elif not args: + raise TypeError("descriptor 'submit' of 'ProcessPoolExecutor' object " + "needs an argument") + elif 'fn' in kwargs: + fn = kwargs.pop('fn') + self, *args = args + else: + raise TypeError('submit expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + with self._shutdown_lock: if self._broken: raise BrokenProcessPool(self._broken) diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py index 78359711d5d9..c7c9ef44c60b 100644 --- a/Lib/concurrent/futures/thread.py +++ b/Lib/concurrent/futures/thread.py @@ -142,7 +142,19 @@ def __init__(self, max_workers=None, thread_name_prefix='', self._initializer = initializer self._initargs = initargs - def submit(self, fn, *args, **kwargs): + def submit(*args, **kwargs): + if len(args) >= 2: + self, fn, *args = args + elif not args: + raise TypeError("descriptor 'submit' of 'ThreadPoolExecutor' object " + "needs an argument") + elif 'fn' in kwargs: + fn = kwargs.pop('fn') + self, *args = args + else: + raise TypeError('submit expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + with self._shutdown_lock: if self._broken: raise BrokenThreadPool(self._broken) diff --git a/Lib/contextlib.py b/Lib/contextlib.py index 1ff8cdf1cecf..2d745ea3e3c5 100644 --- a/Lib/contextlib.py +++ b/Lib/contextlib.py @@ -378,7 +378,8 @@ def _exit_wrapper(exc_type, exc, tb): return _exit_wrapper @staticmethod - def _create_cb_wrapper(callback, *args, **kwds): + def _create_cb_wrapper(*args, **kwds): + callback, *args = args def _exit_wrapper(exc_type, exc, tb): callback(*args, **kwds) return _exit_wrapper @@ -427,11 +428,23 @@ def enter_context(self, cm): self._push_cm_exit(cm, _exit) return result - def callback(self, callback, *args, **kwds): + def callback(*args, **kwds): """Registers an arbitrary callback and arguments. Cannot suppress exceptions. """ + if len(args) >= 2: + self, callback, *args = args + elif not args: + raise TypeError("descriptor 'callback' of '_BaseExitStack' object " + "needs an argument") + elif 'callback' in kwds: + callback = kwds.pop('callback') + self, *args = args + else: + raise TypeError('callback expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + _exit_wrapper = self._create_cb_wrapper(callback, *args, **kwds) # We changed the signature, so using @wraps is not appropriate, but @@ -540,7 +553,8 @@ def _create_async_exit_wrapper(cm, cm_exit): return _exit_wrapper @staticmethod - def _create_async_cb_wrapper(callback, *args, **kwds): + def _create_async_cb_wrapper(*args, **kwds): + callback, *args = args async def _exit_wrapper(exc_type, exc, tb): await callback(*args, **kwds) return _exit_wrapper @@ -575,11 +589,23 @@ def push_async_exit(self, exit): self._push_async_cm_exit(exit, exit_method) return exit # Allow use as a decorator - def push_async_callback(self, callback, *args, **kwds): + def push_async_callback(*args, **kwds): """Registers an arbitrary coroutine function and arguments. Cannot suppress exceptions. """ + if len(args) >= 2: + self, callback, *args = args + elif not args: + raise TypeError("descriptor 'push_async_callback' of " + "'AsyncExitStack' object needs an argument") + elif 'callback' in kwds: + callback = kwds.pop('callback') + self, *args = args + else: + raise TypeError('push_async_callback expected at least 1 ' + 'positional argument, got %d' % (len(args)-1)) + _exit_wrapper = self._create_async_cb_wrapper(callback, *args, **kwds) # We changed the signature, so using @wraps is not appropriate, but diff --git a/Lib/curses/__init__.py b/Lib/curses/__init__.py index 47378741acc2..b3373af70e88 100644 --- a/Lib/curses/__init__.py +++ b/Lib/curses/__init__.py @@ -60,7 +60,7 @@ def start_color(): # raises an exception, wrapper() will restore the terminal to a sane state so # you can read the resulting traceback. -def wrapper(func, *args, **kwds): +def wrapper(*args, **kwds): """Wrapper function that initializes curses and calls another function, restoring normal keyboard/screen behavior on error. The callable object 'func' is then passed the main window 'stdscr' @@ -68,6 +68,14 @@ def wrapper(func, *args, **kwds): wrapper(). """ + if args: + func, *args = args + elif 'func' in kwds: + func = kwds.pop('func') + else: + raise TypeError('wrapper expected at least 1 positional argument, ' + 'got %d' % len(args)) + try: # Initialize curses stdscr = initscr() diff --git a/Lib/functools.py b/Lib/functools.py index b734899b56de..1daa1d177591 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -323,7 +323,20 @@ class partialmethod(object): callables as instance methods. """ - def __init__(self, func, *args, **keywords): + def __init__(*args, **keywords): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor '__init__' of partialmethod " + "needs an argument") + elif 'func' in keywords: + func = keywords.pop('func') + self, *args = args + else: + raise TypeError("type 'partialmethod' takes at least one argument, " + "got %d" % (len(args)-1)) + args = tuple(args) + if not callable(func) and not hasattr(func, "__get__"): raise TypeError("{!r} is not callable or a descriptor" .format(func)) diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py index 4ae8ddc77018..8e8d28f4b7cd 100644 --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -351,10 +351,30 @@ def shutdown(self, c): finally: self.stop_event.set() - def create(self, c, typeid, *args, **kwds): + def create(*args, **kwds): ''' Create a new shared object and return its id ''' + if len(args) >= 3: + self, c, typeid, *args = args + elif not args: + raise TypeError("descriptor 'create' of 'Server' object " + "needs an argument") + else: + if 'typeid' not in kwds: + raise TypeError('create expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + typeid = kwds.pop('typeid') + if len(args) >= 2: + self, c, *args = args + else: + if 'c' not in kwds: + raise TypeError('create expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + c = kwds.pop('c') + self, *args = args + args = tuple(args) + with self.mutex: callable, exposed, method_to_typeid, proxytype = \ self.registry[typeid] @@ -576,10 +596,13 @@ def _run_server(cls, registry, address, authkey, serializer, writer, util.info('manager serving at %r', server.address) server.serve_forever() - def _create(self, typeid, *args, **kwds): + def _create(*args, **kwds): ''' Create a new shared object; return the token and exposed tuple ''' + self, typeid, *args = args + args = tuple(args) + assert self._state.value == State.STARTED, 'server not yet started' conn = self._Client(self._address, authkey=self._authkey) try: diff --git a/Lib/profile.py b/Lib/profile.py index 0340a7907bfd..c26f0c8bfa93 100755 --- a/Lib/profile.py +++ b/Lib/profile.py @@ -425,7 +425,19 @@ def runctx(self, cmd, globals, locals): return self # This method is more useful to profile a single function call. - def runcall(self, func, *args, **kw): + def runcall(*args, **kw): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runcall' of 'Profile' object " + "needs an argument") + elif 'func' in kw: + func = kw.pop('func') + self, *args = args + else: + raise TypeError('runcall expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + self.set_cmd(repr(func)) sys.setprofile(self.dispatcher) try: diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py index 7e5a4edf80b2..59aa0f46f59f 100644 --- a/Lib/test/test_concurrent_futures.py +++ b/Lib/test/test_concurrent_futures.py @@ -49,6 +49,9 @@ def create_future(state=PENDING, exception=None, result=None): def mul(x, y): return x * y +def capture(*args, **kwargs): + return args, kwargs + def sleep_and_raise(t): time.sleep(t) raise Exception('this is an exception') @@ -658,6 +661,12 @@ def test_submit(self): def test_submit_keyword(self): future = self.executor.submit(mul, 2, y=8) self.assertEqual(16, future.result()) + future = self.executor.submit(capture, 1, self=2, fn=3) + self.assertEqual(future.result(), ((1,), {'self': 2, 'fn': 3})) + future = self.executor.submit(fn=capture, arg=1) + self.assertEqual(future.result(), ((), {'arg': 1})) + with self.assertRaises(TypeError): + self.executor.submit(arg=1) def test_map(self): self.assertEqual( diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py index 2a44404a603e..30c2e27b3c7e 100644 --- a/Lib/test/test_contextlib.py +++ b/Lib/test/test_contextlib.py @@ -575,6 +575,7 @@ def test_callback(self): ((), dict(example=1)), ((1,), dict(example=1)), ((1,2), dict(example=1)), + ((1,2), dict(self=3, callback=4)), ] result = [] def _exit(*args, **kwds): @@ -597,6 +598,15 @@ def _exit(*args, **kwds): self.assertIsNone(wrapper[1].__doc__, _exit.__doc__) self.assertEqual(result, expected) + result = [] + with self.exit_stack() as stack: + with self.assertRaises(TypeError): + stack.callback(arg=1) + with self.assertRaises(TypeError): + self.exit_stack.callback(arg=2) + stack.callback(callback=_exit, arg=3) + self.assertEqual(result, [((), {'arg': 3})]) + def test_push(self): exc_raised = ZeroDivisionError def _expect_exc(exc_type, exc, exc_tb): diff --git a/Lib/test/test_contextlib_async.py b/Lib/test/test_contextlib_async.py index e34fc38d1180..cc38dcf8c45c 100644 --- a/Lib/test/test_contextlib_async.py +++ b/Lib/test/test_contextlib_async.py @@ -329,6 +329,15 @@ def setUp(self): self.assertEqual(result, expected) + result = [] + async with AsyncExitStack() as stack: + with self.assertRaises(TypeError): + stack.push_async_callback(arg=1) + with self.assertRaises(TypeError): + self.exit_stack.push_async_callback(arg=2) + stack.push_async_callback(callback=_exit, arg=3) + self.assertEqual(result, [((), {'arg': 3})]) + @_async_test async def test_async_push(self): exc_raised = ZeroDivisionError diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index a91c6348e709..a7625d609039 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -464,6 +464,7 @@ class A(object): positional = functools.partialmethod(capture, 1) keywords = functools.partialmethod(capture, a=2) both = functools.partialmethod(capture, 3, b=4) + spec_keywords = functools.partialmethod(capture, self=1, func=2) nested = functools.partialmethod(positional, 5) @@ -497,6 +498,8 @@ def test_arg_combinations(self): self.assertEqual(self.A.both(self.a, 5, c=6), ((self.a, 3, 5), {'b': 4, 'c': 6})) + self.assertEqual(self.a.spec_keywords(), ((self.a,), {'self': 1, 'func': 2})) + def test_nested(self): self.assertEqual(self.a.nested(), ((self.a, 1, 5), {})) self.assertEqual(self.a.nested(6), ((self.a, 1, 5, 6), {})) @@ -550,6 +553,13 @@ def test_invalid_args(self): with self.assertRaises(TypeError): class B(object): method = functools.partialmethod(None, 1) + with self.assertRaises(TypeError): + class B: + method = functools.partialmethod() + class B: + method = functools.partialmethod(func=capture, a=1) + b = B() + self.assertEqual(b.method(2, x=3), ((b, 2), {'a': 1, 'x': 3})) def test_repr(self): self.assertEqual(repr(vars(self.A)['both']), diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py index 63f474179d68..66fff1cef470 100644 --- a/Lib/test/test_trace.py +++ b/Lib/test/test_trace.py @@ -70,6 +70,9 @@ def traced_func_calling_generator(): def traced_doubler(num): return num * 2 +def traced_capturer(*args, **kwargs): + return args, kwargs + def traced_caller_list_comprehension(): k = 10 mylist = [traced_doubler(i) for i in range(k)] @@ -238,6 +241,14 @@ def test_simple_caller(self): } self.assertEqual(self.tracer.results().calledfuncs, expected) + def test_arg_errors(self): + res = self.tracer.runfunc(traced_capturer, 1, 2, self=3, func=4) + self.assertEqual(res, ((1, 2), {'self': 3, 'func': 4})) + res = self.tracer.runfunc(func=traced_capturer, arg=1) + self.assertEqual(res, ((), {'arg': 1})) + with self.assertRaises(TypeError): + self.tracer.runfunc() + def test_loop_caller_importing(self): self.tracer.runfunc(traced_func_importing_caller, 1) diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 1fac08dafc7d..ad7a6acfcc7d 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -1839,6 +1839,33 @@ def add(x,y,z): self.assertEqual(f.alive, False) self.assertEqual(res, [199]) + def test_arg_errors(self): + def fin(*args, **kwargs): + res.append((args, kwargs)) + + a = self.A() + + res = [] + f = weakref.finalize(a, fin, 1, 2, func=3, obj=4) + self.assertEqual(f.peek(), (a, fin, (1, 2), {'func': 3, 'obj': 4})) + f() + self.assertEqual(res, [((1, 2), {'func': 3, 'obj': 4})]) + + res = [] + f = weakref.finalize(a, func=fin, arg=1) + self.assertEqual(f.peek(), (a, fin, (), {'arg': 1})) + f() + self.assertEqual(res, [((), {'arg': 1})]) + + res = [] + f = weakref.finalize(obj=a, func=fin, arg=1) + self.assertEqual(f.peek(), (a, fin, (), {'arg': 1})) + f() + self.assertEqual(res, [((), {'arg': 1})]) + + self.assertRaises(TypeError, weakref.finalize, a) + self.assertRaises(TypeError, weakref.finalize) + def test_order(self): a = self.A() res = [] diff --git a/Lib/trace.py b/Lib/trace.py index 0ed7ba95b520..206bd2b689f1 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -452,7 +452,19 @@ def runctx(self, cmd, globals=None, locals=None): sys.settrace(None) threading.settrace(None) - def runfunc(self, func, *args, **kw): + def runfunc(*args, **kw): + if len(args) >= 2: + self, func, *args = args + elif not args: + raise TypeError("descriptor 'runfunc' of 'Trace' object " + "needs an argument") + elif 'func' in kw: + func = kw.pop('func') + self, *args = args + else: + raise TypeError('runfunc expected at least 1 positional argument, ' + 'got %d' % (len(args)-1)) + result = None if not self.donothing: sys.settrace(self.globaltrace) diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index 758924d80113..811f5df23dd1 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -442,12 +442,25 @@ def addTypeEqualityFunc(self, typeobj, function): """ self._type_equality_funcs[typeobj] = function - def addCleanup(self, function, *args, **kwargs): + def addCleanup(*args, **kwargs): """Add a function, with arguments, to be called when the test is completed. Functions added are called on a LIFO basis and are called after tearDown on test failure or success. Cleanup items are called even if setUp fails (unlike tearDown).""" + if len(args) >= 2: + self, function, *args = args + elif not args: + raise TypeError("descriptor 'addCleanup' of 'TestCase' object " + "needs an argument") + elif 'function' in kwargs: + function = kwargs.pop('function') + self, *args = args + else: + raise TypeError('addCleanup expected at least 1 positional ' + 'argument, got %d' % (len(args)-1)) + args = tuple(args) + self._cleanups.append((function, args, kwargs)) def setUp(self): diff --git a/Lib/weakref.py b/Lib/weakref.py index 753f07291e20..59b3aa5621a3 100644 --- a/Lib/weakref.py +++ b/Lib/weakref.py @@ -527,7 +527,27 @@ class finalize: class _Info: __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index") - def __init__(self, obj, func, *args, **kwargs): + def __init__(*args, **kwargs): + if len(args) >= 3: + self, obj, func, *args = args + elif not args: + raise TypeError("descriptor '__init__' of 'finalize' object " + "needs an argument") + else: + if 'func' not in kwargs: + raise TypeError('finalize expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + func = kwargs.pop('func') + if len(args) >= 2: + self, obj, *args = args + else: + if 'obj' not in kwargs: + raise TypeError('finalize expected at least 2 positional ' + 'arguments, got %d' % (len(args)-1)) + obj = kwargs.pop('obj') + self, *args = args + args = tuple(args) + if not self._registered_with_atexit: # We may register the exit function more than once because # of a thread race, but that is harmless diff --git a/Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst b/Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst new file mode 100644 index 000000000000..749e2a87ab6c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-31-10-21-54.bpo-36492.f7vyUs.rst @@ -0,0 +1,5 @@ +Arbitrary keyword arguments (even with names "self" and "func") can now be +passed to some functions which should accept arbitrary keyword arguments and +pass them to other function (for example partialmethod(), TestCase.addCleanup() +and Profile.runcall()) if the required arguments are passed as positional +arguments. From webhook-mailer at python.org Mon Apr 1 04:56:23 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 01 Apr 2019 08:56:23 -0000 Subject: [Python-checkins] bpo-36026: make descr error message consistent (GH-11930) Message-ID: https://github.com/python/cpython/commit/62f9588663ebfea1735e9d142ef527395a6c2b95 commit: 62f9588663ebfea1735e9d142ef527395a6c2b95 branch: master author: Inada Naoki committer: GitHub date: 2019-04-01T17:56:11+09:00 summary: bpo-36026: make descr error message consistent (GH-11930) set.add(0) and set.add.__get__(0) now raise TypeError with same error message. files: M Lib/test/test_descr.py M Objects/descrobject.c diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index 09eef8c56f30..e37a98417f50 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -1614,10 +1614,14 @@ class SubSpam(spam.spamlist): pass with self.assertRaises(TypeError) as cm: spam_cm(list) - self.assertEqual( - str(cm.exception), + expected_errmsg = ( "descriptor 'classmeth' requires a subtype of 'xxsubtype.spamlist' " "but received 'list'") + self.assertEqual(str(cm.exception), expected_errmsg) + + with self.assertRaises(TypeError) as cm: + spam_cm.__get__(None, list) + self.assertEqual(str(cm.exception), expected_errmsg) def test_staticmethods(self): # Testing static methods... @@ -1952,6 +1956,29 @@ class E(object): self.assertEqual(E().foo.__func__, C.foo) # i.e., unbound self.assertTrue(repr(C.foo.__get__(C(1))).startswith("d_type)) { PyErr_Format(PyExc_TypeError, - "descriptor '%V' for '%s' objects " - "doesn't apply to '%s' object", + "descriptor '%V' for '%.100s' objects " + "doesn't apply to a '%.100s' object", descr_name((PyDescrObject *)descr), "?", descr->d_type->tp_name, obj->ob_type->tp_name); @@ -99,7 +99,7 @@ classmethod_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) else { /* Wot - no type?! */ PyErr_Format(PyExc_TypeError, - "descriptor '%V' for type '%s' " + "descriptor '%V' for type '%.100s' " "needs either an object or a type", descr_name((PyDescrObject *)descr), "?", PyDescr_TYPE(descr)->tp_name); @@ -108,8 +108,8 @@ classmethod_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) } if (!PyType_Check(type)) { PyErr_Format(PyExc_TypeError, - "descriptor '%V' for type '%s' " - "needs a type, not a '%s' as arg 2", + "descriptor '%V' for type '%.100s' " + "needs a type, not a '%.100s' as arg 2", descr_name((PyDescrObject *)descr), "?", PyDescr_TYPE(descr)->tp_name, type->ob_type->tp_name); @@ -117,8 +117,8 @@ classmethod_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) } if (!PyType_IsSubtype((PyTypeObject *)type, PyDescr_TYPE(descr))) { PyErr_Format(PyExc_TypeError, - "descriptor '%V' for type '%s' " - "doesn't apply to type '%s'", + "descriptor '%V' requires a subtype of '%.100s' " + "but received '%.100s'", descr_name((PyDescrObject *)descr), "?", PyDescr_TYPE(descr)->tp_name, ((PyTypeObject *)type)->tp_name); @@ -181,7 +181,7 @@ descr_setcheck(PyDescrObject *descr, PyObject *obj, PyObject *value, if (!PyObject_TypeCheck(obj, descr->d_type)) { PyErr_Format(PyExc_TypeError, "descriptor '%V' for '%.100s' objects " - "doesn't apply to '%.100s' object", + "doesn't apply to a '%.100s' object", descr_name(descr), "?", descr->d_type->tp_name, obj->ob_type->tp_name); @@ -239,9 +239,8 @@ methoddescr_call(PyMethodDescrObject *descr, PyObject *args, PyObject *kwargs) if (!_PyObject_RealIsSubclass((PyObject *)Py_TYPE(self), (PyObject *)PyDescr_TYPE(descr))) { PyErr_Format(PyExc_TypeError, - "descriptor '%V' " - "requires a '%.100s' object " - "but received a '%.100s'", + "descriptor '%V' for '%.100s' objects " + "doesn't apply to a '%.100s' object", descr_name((PyDescrObject *)descr), "?", PyDescr_TYPE(descr)->tp_name, self->ob_type->tp_name); @@ -278,9 +277,8 @@ _PyMethodDescr_FastCallKeywords(PyObject *descrobj, if (!_PyObject_RealIsSubclass((PyObject *)Py_TYPE(self), (PyObject *)PyDescr_TYPE(descr))) { PyErr_Format(PyExc_TypeError, - "descriptor '%V' " - "requires a '%.100s' object " - "but received a '%.100s'", + "descriptor '%V' for '%.100s' objects " + "doesn't apply to a '%.100s' object", descr_name((PyDescrObject *)descr), "?", PyDescr_TYPE(descr)->tp_name, self->ob_type->tp_name); From webhook-mailer at python.org Mon Apr 1 05:35:35 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 01 Apr 2019 09:35:35 -0000 Subject: [Python-checkins] bpo-20844: open script file with "rb" mode (GH-12616) Message-ID: https://github.com/python/cpython/commit/10654c19b5e6efdf3c529ff9bf7bcab89bdca1c1 commit: 10654c19b5e6efdf3c529ff9bf7bcab89bdca1c1 branch: master author: Inada Naoki committer: GitHub date: 2019-04-01T18:35:20+09:00 summary: bpo-20844: open script file with "rb" mode (GH-12616) files: A Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst M Doc/c-api/veryhigh.rst M Lib/test/test_cmd_line_script.py M Modules/main.c diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst index c891f6320f94..317093e95615 100644 --- a/Doc/c-api/veryhigh.rst +++ b/Doc/c-api/veryhigh.rst @@ -109,6 +109,10 @@ the same library that the Python runtime is using. (:func:`sys.getfilesystemencoding`). If *closeit* is true, the file is closed before PyRun_SimpleFileExFlags returns. + .. note:: + On Windows, *fp* should be opened as binary mode (e.g. ``fopen(filename, "rb")``. + Otherwise, Python may not handle script file with LF line ending correctly. + .. c:function:: int PyRun_InteractiveOne(FILE *fp, const char *filename) diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 85d2a4be069b..d138ca027c68 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -409,6 +409,23 @@ def test_issue8202_dash_m_file_ignored(self): script_name, script_name, script_dir, '', importlib.machinery.SourceFileLoader) + def test_issue20884(self): + # On Windows, script with encoding cookie and LF line ending + # will be failed. + with support.temp_dir() as script_dir: + script_name = os.path.join(script_dir, "issue20884.py") + with open(script_name, "w", newline='\n') as f: + f.write("#coding: iso-8859-1\n") + f.write('"""\n') + for _ in range(30): + f.write('x'*80 + '\n') + f.write('"""\n') + + with support.change_cwd(path=script_dir): + rc, out, err = assert_python_ok(script_name) + self.assertEqual(b"", out) + self.assertEqual(b"", err) + @contextlib.contextmanager def setup_test_pkg(self, *args): with support.temp_dir() as script_dir, \ diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst b/Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst new file mode 100644 index 000000000000..22a400ae7c8f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst @@ -0,0 +1,2 @@ +Fix running script with encoding cookie and LF line ending +may fail on Windows. diff --git a/Modules/main.c b/Modules/main.c index 42d2c3c2aeec..6a7f735ed692 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -283,7 +283,7 @@ static int pymain_run_file(_PyCoreConfig *config, PyCompilerFlags *cf) { const wchar_t *filename = config->run_filename; - FILE *fp = _Py_wfopen(filename, L"r"); + FILE *fp = _Py_wfopen(filename, L"rb"); if (fp == NULL) { char *cfilename_buffer; const char *cfilename; From webhook-mailer at python.org Mon Apr 1 08:03:02 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 01 Apr 2019 12:03:02 -0000 Subject: [Python-checkins] bpo-20844: open script file with "rb" mode (GH-12616) Message-ID: https://github.com/python/cpython/commit/8384670615a90418fc52c3881242b7c10d1f2b13 commit: 8384670615a90418fc52c3881242b7c10d1f2b13 branch: 3.7 author: Inada Naoki committer: GitHub date: 2019-04-01T21:02:51+09:00 summary: bpo-20844: open script file with "rb" mode (GH-12616) (cherry picked from commit 10654c19b5e6efdf3c529ff9bf7bcab89bdca1c1) files: A Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst M Doc/c-api/veryhigh.rst M Lib/test/test_cmd_line_script.py M Modules/main.c diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst index c891f6320f94..317093e95615 100644 --- a/Doc/c-api/veryhigh.rst +++ b/Doc/c-api/veryhigh.rst @@ -109,6 +109,10 @@ the same library that the Python runtime is using. (:func:`sys.getfilesystemencoding`). If *closeit* is true, the file is closed before PyRun_SimpleFileExFlags returns. + .. note:: + On Windows, *fp* should be opened as binary mode (e.g. ``fopen(filename, "rb")``. + Otherwise, Python may not handle script file with LF line ending correctly. + .. c:function:: int PyRun_InteractiveOne(FILE *fp, const char *filename) diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 5ec9bbbb1230..b2632602d454 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -387,6 +387,23 @@ def test_issue8202_dash_m_file_ignored(self): script_name, script_name, script_dir, '', importlib.machinery.SourceFileLoader) + def test_issue20884(self): + # On Windows, script with encoding cookie and LF line ending + # will be failed. + with support.temp_dir() as script_dir: + script_name = os.path.join(script_dir, "issue20884.py") + with open(script_name, "w", newline='\n') as f: + f.write("#coding: iso-8859-1\n") + f.write('"""\n') + for _ in range(30): + f.write('x'*80 + '\n') + f.write('"""\n') + + with support.change_cwd(path=script_dir): + rc, out, err = assert_python_ok(script_name) + self.assertEqual(b"", out) + self.assertEqual(b"", err) + @contextlib.contextmanager def setup_test_pkg(self, *args): with support.temp_dir() as script_dir, \ diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst b/Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst new file mode 100644 index 000000000000..22a400ae7c8f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-03-29-18-47-50.bpo-20844.ge-7SM.rst @@ -0,0 +1,2 @@ +Fix running script with encoding cookie and LF line ending +may fail on Windows. diff --git a/Modules/main.c b/Modules/main.c index 9011bd1f69cb..e3683b941754 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -1534,7 +1534,7 @@ pymain_open_filename(_PyMain *pymain) const _PyCoreConfig *config = &_PyGILState_GetInterpreterStateUnsafe()->core_config; FILE* fp; - fp = _Py_wfopen(pymain->filename, L"r"); + fp = _Py_wfopen(pymain->filename, L"rb"); if (fp == NULL) { char *cfilename_buffer; const char *cfilename; From webhook-mailer at python.org Mon Apr 1 10:36:28 2019 From: webhook-mailer at python.org (Guido van Rossum) Date: Mon, 01 Apr 2019 14:36:28 -0000 Subject: [Python-checkins] bpo-36495: Fix two out-of-bounds array reads (GH-12641) Message-ID: https://github.com/python/cpython/commit/a4d78362397fc3bced6ea80fbc7b5f4827aec55e commit: a4d78362397fc3bced6ea80fbc7b5f4827aec55e branch: master author: Brad Larsen committer: Guido van Rossum date: 2019-04-01T07:36:05-07:00 summary: bpo-36495: Fix two out-of-bounds array reads (GH-12641) Research and fix by @bradlarsen. files: M Python/ast.c diff --git a/Python/ast.c b/Python/ast.c index e9154fecff06..913e53ad7937 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -1400,7 +1400,7 @@ handle_keywordonly_args(struct compiling *c, const node *n, int start, goto error; asdl_seq_SET(kwonlyargs, j++, arg); i += 1; /* the name */ - if (TYPE(CHILD(n, i)) == COMMA) + if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA) i += 1; /* the comma, if present */ break; case TYPE_COMMENT: @@ -1599,7 +1599,7 @@ ast_for_arguments(struct compiling *c, const node *n) if (!kwarg) return NULL; i += 2; /* the double star and the name */ - if (TYPE(CHILD(n, i)) == COMMA) + if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA) i += 1; /* the comma, if present */ break; case TYPE_COMMENT: From webhook-mailer at python.org Mon Apr 1 11:09:00 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Apr 2019 15:09:00 -0000 Subject: [Python-checkins] bpo-36157:Document PyInterpreterState_Main() (GH-12238) Message-ID: https://github.com/python/cpython/commit/8c61739defd88c7f79e86537886c33745843ce01 commit: 8c61739defd88c7f79e86537886c33745843ce01 branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-01T08:08:43-07:00 summary: bpo-36157:Document PyInterpreterState_Main() (GH-12238) I have added documentation for `PyInterpreterState_Main()`. I chose to place it under Advanced Debugger Support together with similar functions like `PyInterpreterState_Head()`, `PyInterpreterState_Next(`), and `PyInterpreterState_ThreadHead()` . https://bugs.python.org/issue36157 files: A Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst M Doc/c-api/init.rst diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index b87e999fe02b..7c1f0ffa44a3 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -1395,6 +1395,11 @@ These functions are only intended to be used by advanced debugging tools. Return the interpreter state object at the head of the list of all such objects. +.. c:function:: PyInterpreterState* PyInterpreterState_Main() + + Return the main interpreter state object. + + .. c:function:: PyInterpreterState* PyInterpreterState_Next(PyInterpreterState *interp) Return the next interpreter state object after *interp* from the list of all diff --git a/Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst b/Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst new file mode 100644 index 000000000000..ff0293e8407f --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst @@ -0,0 +1 @@ +Added Documention for PyInterpreterState_Main(). \ No newline at end of file From webhook-mailer at python.org Mon Apr 1 11:15:17 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Apr 2019 15:15:17 -0000 Subject: [Python-checkins] bpo-36157:Document PyInterpreterState_Main() (GH-12238) Message-ID: https://github.com/python/cpython/commit/35fc38e5e82d18ceec95af9af2103319497e2eaf commit: 35fc38e5e82d18ceec95af9af2103319497e2eaf branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-01T08:15:10-07:00 summary: bpo-36157:Document PyInterpreterState_Main() (GH-12238) I have added documentation for `PyInterpreterState_Main()`. I chose to place it under Advanced Debugger Support together with similar functions like `PyInterpreterState_Head()`, `PyInterpreterState_Next(`), and `PyInterpreterState_ThreadHead()` . https://bugs.python.org/issue36157 (cherry picked from commit 8c61739defd88c7f79e86537886c33745843ce01) Co-authored-by: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> files: A Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst M Doc/c-api/init.rst diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 2cdc0527dff9..a88873bd8f23 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -1383,6 +1383,11 @@ These functions are only intended to be used by advanced debugging tools. Return the interpreter state object at the head of the list of all such objects. +.. c:function:: PyInterpreterState* PyInterpreterState_Main() + + Return the main interpreter state object. + + .. c:function:: PyInterpreterState* PyInterpreterState_Next(PyInterpreterState *interp) Return the next interpreter state object after *interp* from the list of all diff --git a/Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst b/Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst new file mode 100644 index 000000000000..ff0293e8407f --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2019-03-08-15-39-47.bpo-36157.nF1pP1.rst @@ -0,0 +1 @@ +Added Documention for PyInterpreterState_Main(). \ No newline at end of file From webhook-mailer at python.org Mon Apr 1 12:10:26 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 01 Apr 2019 16:10:26 -0000 Subject: [Python-checkins] Temporary workaround for an ACL issue on Ubuntu on Azure Pipelines (GH-12649) Message-ID: https://github.com/python/cpython/commit/b4bcefe5fe689ef5caf9c775f72c6d150f3e8ece commit: b4bcefe5fe689ef5caf9c775f72c6d150f3e8ece branch: master author: Steve Dower committer: GitHub date: 2019-04-01T09:10:20-07:00 summary: Temporary workaround for an ACL issue on Ubuntu on Azure Pipelines (GH-12649) files: M .azure-pipelines/posix-steps.yml diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml index 2affb50dc10e..3ed3abd02a71 100644 --- a/.azure-pipelines/posix-steps.yml +++ b/.azure-pipelines/posix-steps.yml @@ -10,6 +10,10 @@ steps: clean: true fetchDepth: 5 +# Work around a known issue affecting Ubuntu VMs on Pipelines +- script: sudo setfacl -Rb /home/vsts + displayName: 'Workaround ACL issue' + - script: ${{ parameters.sudo_dependencies }} ./.azure-pipelines/posix-deps-${{ parameters.dependencies }}.sh $(openssl_version) displayName: 'Install dependencies' From webhook-mailer at python.org Mon Apr 1 12:31:04 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Apr 2019 16:31:04 -0000 Subject: [Python-checkins] Temporary workaround for an ACL issue on Ubuntu on Azure Pipelines (GH-12649) Message-ID: https://github.com/python/cpython/commit/85730b84fbe6675c5f61cb23514e06362f8053ec commit: 85730b84fbe6675c5f61cb23514e06362f8053ec branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-01T09:30:58-07:00 summary: Temporary workaround for an ACL issue on Ubuntu on Azure Pipelines (GH-12649) (cherry picked from commit b4bcefe5fe689ef5caf9c775f72c6d150f3e8ece) Co-authored-by: Steve Dower files: M .azure-pipelines/posix-steps.yml diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml index a4160e5a1bf5..15e3f0b9ad1b 100644 --- a/.azure-pipelines/posix-steps.yml +++ b/.azure-pipelines/posix-steps.yml @@ -10,6 +10,10 @@ steps: clean: true fetchDepth: 5 +# Work around a known issue affecting Ubuntu VMs on Pipelines +- script: sudo setfacl -Rb /home/vsts + displayName: 'Workaround ACL issue' + - script: ${{ parameters.sudo_dependencies }} ./.azure-pipelines/posix-deps-${{ parameters.dependencies }}.sh $(openssl_version) displayName: 'Install dependencies' From webhook-mailer at python.org Mon Apr 1 17:59:53 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Apr 2019 21:59:53 -0000 Subject: [Python-checkins] bpo-13120: fix typo with test_issue13120() method name (GH-12250) Message-ID: https://github.com/python/cpython/commit/9139f926a8d8e5b71830cb7e10b0807836b5e9a4 commit: 9139f926a8d8e5b71830cb7e10b0807836b5e9a4 branch: master author: Daniel Hahler committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-01T14:59:50-07:00 summary: bpo-13120: fix typo with test_issue13120() method name (GH-12250) Incorrect issue number '13210' added in 539ee5da6f. https://bugs.python.org/issue13120 files: M Lib/test/test_pdb.py diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 7e03df02946b..56d823249544 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -1266,9 +1266,9 @@ def bar(): any('main.py(5)foo()->None' in l for l in stdout.splitlines()), 'Fail to step into the caller after a return') - def test_issue13210(self): - # invoking "continue" on a non-main thread triggered an exception - # inside signal.signal + def test_issue13120(self): + # Invoking "continue" on a non-main thread triggered an exception + # inside signal.signal. with open(support.TESTFN, 'wb') as f: f.write(textwrap.dedent(""" From webhook-mailer at python.org Tue Apr 2 00:52:45 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 02 Apr 2019 04:52:45 -0000 Subject: [Python-checkins] bpo-36377: Specify that range() can not be compared (GH-12468) Message-ID: https://github.com/python/cpython/commit/b00479d42aaaed589d8b374bf5e5c6f443b0b499 commit: b00479d42aaaed589d8b374bf5e5c6f443b0b499 branch: master author: Emmanuel Arias committer: Raymond Hettinger date: 2019-04-01T21:52:42-07:00 summary: bpo-36377: Specify that range() can not be compared (GH-12468) files: M Doc/tutorial/datastructures.rst diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst index b4db3f015912..01e437bb5da8 100644 --- a/Doc/tutorial/datastructures.rst +++ b/Doc/tutorial/datastructures.rst @@ -678,18 +678,17 @@ intended. Comparing Sequences and Other Types =================================== - -Sequence objects may be compared to other objects with the same sequence type. -The comparison uses *lexicographical* ordering: first the first two items are -compared, and if they differ this determines the outcome of the comparison; if -they are equal, the next two items are compared, and so on, until either -sequence is exhausted. If two items to be compared are themselves sequences of -the same type, the lexicographical comparison is carried out recursively. If -all items of two sequences compare equal, the sequences are considered equal. -If one sequence is an initial sub-sequence of the other, the shorter sequence is -the smaller (lesser) one. Lexicographical ordering for strings uses the Unicode -code point number to order individual characters. Some examples of comparisons -between sequences of the same type:: +Sequence objects typically may be compared to other objects with the same sequence +type. The comparison uses *lexicographical* ordering: first the first two +items are compared, and if they differ this determines the outcome of the +comparison; if they are equal, the next two items are compared, and so on, until +either sequence is exhausted. If two items to be compared are themselves +sequences of the same type, the lexicographical comparison is carried out +recursively. If all items of two sequences compare equal, the sequences are +considered equal. If one sequence is an initial sub-sequence of the other, the +shorter sequence is the smaller (lesser) one. Lexicographical ordering for +strings uses the Unicode code point number to order individual characters. +Some examples of comparisons between sequences of the same type:: (1, 2, 3) < (1, 2, 4) [1, 2, 3] < [1, 2, 4] From webhook-mailer at python.org Tue Apr 2 04:16:34 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 02 Apr 2019 08:16:34 -0000 Subject: [Python-checkins] fix confusing argument name in unicodeobject.c (GH-12653) Message-ID: https://github.com/python/cpython/commit/e6a0e804bfad4a5ef03c04d0e3d9a94472966f9a commit: e6a0e804bfad4a5ef03c04d0e3d9a94472966f9a branch: master author: Max Bernstein committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-02T01:16:22-07:00 summary: fix confusing argument name in unicodeobject.c (GH-12653) files: M Objects/exceptions.c diff --git a/Objects/exceptions.c b/Objects/exceptions.c index ad2a54a2b6b0..b40ecb78d456 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -1765,9 +1765,9 @@ PyUnicodeDecodeError_GetEnd(PyObject *exc, Py_ssize_t *end) int -PyUnicodeTranslateError_GetEnd(PyObject *exc, Py_ssize_t *start) +PyUnicodeTranslateError_GetEnd(PyObject *exc, Py_ssize_t *end) { - return PyUnicodeEncodeError_GetEnd(exc, start); + return PyUnicodeEncodeError_GetEnd(exc, end); } From webhook-mailer at python.org Tue Apr 2 04:17:30 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 02 Apr 2019 08:17:30 -0000 Subject: [Python-checkins] bpo-13120: fix typo with test_issue13120() method name (GH-12250) Message-ID: https://github.com/python/cpython/commit/5ca4fe04784fa278c319a3764c9a755f49cc0944 commit: 5ca4fe04784fa278c319a3764c9a755f49cc0944 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-02T01:17:25-07:00 summary: bpo-13120: fix typo with test_issue13120() method name (GH-12250) Incorrect issue number '13210' added in 539ee5da6f. https://bugs.python.org/issue13120 (cherry picked from commit 9139f926a8d8e5b71830cb7e10b0807836b5e9a4) Co-authored-by: Daniel Hahler files: M Lib/test/test_pdb.py diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index f573f5f54b0f..c2c4ca248d1b 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -1262,9 +1262,9 @@ def bar(): any('main.py(5)foo()->None' in l for l in stdout.splitlines()), 'Fail to step into the caller after a return') - def test_issue13210(self): - # invoking "continue" on a non-main thread triggered an exception - # inside signal.signal + def test_issue13120(self): + # Invoking "continue" on a non-main thread triggered an exception + # inside signal.signal. with open(support.TESTFN, 'wb') as f: f.write(textwrap.dedent(""" From webhook-mailer at python.org Tue Apr 2 05:08:52 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 02 Apr 2019 09:08:52 -0000 Subject: [Python-checkins] bpo-35838: document optionxform must be idempotent (GH-12656) Message-ID: https://github.com/python/cpython/commit/04694a306b8f4ab54ef5fc4ba673c26fa53b0ac1 commit: 04694a306b8f4ab54ef5fc4ba673c26fa53b0ac1 branch: master author: Inada Naoki committer: GitHub date: 2019-04-02T18:08:46+09:00 summary: bpo-35838: document optionxform must be idempotent (GH-12656) files: M Doc/library/configparser.rst diff --git a/Doc/library/configparser.rst b/Doc/library/configparser.rst index 185b4a10ec99..04b52dc7b215 100644 --- a/Doc/library/configparser.rst +++ b/Doc/library/configparser.rst @@ -721,6 +721,12 @@ be overridden by subclasses or by attribute assignment. >>> list(custom['Section2'].keys()) ['AnotherKey'] + .. note:: + The optionxform function transforms option names to a canonical form. + This should be an idempotent function: if the name is already in + canonical form, it should be returned unchanged. + + .. attribute:: ConfigParser.SECTCRE A compiled regular expression used to parse section headers. The default From webhook-mailer at python.org Tue Apr 2 05:29:20 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 02 Apr 2019 09:29:20 -0000 Subject: [Python-checkins] bpo-35838: document optionxform must be idempotent (GH-12656) Message-ID: https://github.com/python/cpython/commit/9a838c593f6ada69a37025d7ded8ac822816a74c commit: 9a838c593f6ada69a37025d7ded8ac822816a74c branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-02T02:29:16-07:00 summary: bpo-35838: document optionxform must be idempotent (GH-12656) (cherry picked from commit 04694a306b8f4ab54ef5fc4ba673c26fa53b0ac1) Co-authored-by: Inada Naoki files: M Doc/library/configparser.rst diff --git a/Doc/library/configparser.rst b/Doc/library/configparser.rst index 95cc352010e0..68b663ff7f65 100644 --- a/Doc/library/configparser.rst +++ b/Doc/library/configparser.rst @@ -723,6 +723,12 @@ be overridden by subclasses or by attribute assignment. >>> list(custom['Section2'].keys()) ['AnotherKey'] + .. note:: + The optionxform function transforms option names to a canonical form. + This should be an idempotent function: if the name is already in + canonical form, it should be returned unchanged. + + .. attribute:: ConfigParser.SECTCRE A compiled regular expression used to parse section headers. The default From webhook-mailer at python.org Tue Apr 2 05:30:13 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 02 Apr 2019 09:30:13 -0000 Subject: [Python-checkins] bpo-36473: add maximum iteration check for dict .values() and .items() (GH-12619) Message-ID: https://github.com/python/cpython/commit/b8311cf5e5d72f8a8aa688b7da1760d6a74a4d72 commit: b8311cf5e5d72f8a8aa688b7da1760d6a74a4d72 branch: master author: Thomas Perl committer: Inada Naoki date: 2019-04-02T18:30:10+09:00 summary: bpo-36473: add maximum iteration check for dict .values() and .items() (GH-12619) files: M Lib/test/test_dict.py M Misc/NEWS.d/next/Core and Builtins/2019-03-27-23-53-00.bpo-36452.xhK2lT.rst M Objects/dictobject.c diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index eecdc8beec69..13be857f7ab6 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -477,7 +477,25 @@ def test_mutating_iteration_delete(self): with self.assertRaises(RuntimeError): for i in d: del d[0] - d[1] = 1 + d[0] = 0 + + def test_mutating_iteration_delete_over_values(self): + # change dict content during iteration + d = {} + d[0] = 0 + with self.assertRaises(RuntimeError): + for i in d.values(): + del d[0] + d[0] = 0 + + def test_mutating_iteration_delete_over_items(self): + # change dict content during iteration + d = {} + d[0] = 0 + with self.assertRaises(RuntimeError): + for i in d.items(): + del d[0] + d[0] = 0 def test_mutating_lookup(self): # changing dict during a lookup (issue #14417) diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-03-27-23-53-00.bpo-36452.xhK2lT.rst b/Misc/NEWS.d/next/Core and Builtins/2019-03-27-23-53-00.bpo-36452.xhK2lT.rst index 37c0c503edec..26d85682f882 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2019-03-27-23-53-00.bpo-36452.xhK2lT.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2019-03-27-23-53-00.bpo-36452.xhK2lT.rst @@ -1 +1,5 @@ -Changing `dict` keys during iteration will now be detected in certain corner cases where the number of keys isn't changed (but they keys themselves are), and a `RuntimeError` will be raised. \ No newline at end of file +Changing ``dict`` keys during iteration of the dict itself, ``keys()``, +``values()``, or ``items()`` will now be detected in certain corner cases where +keys are deleted/added so that the number of keys isn't changed. +A `RuntimeError` will be raised after ``len(dict)`` iterations. +Contributed by Thomas Perl. diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 7ea979cd1761..bba27dd321a9 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -3630,6 +3630,12 @@ dictiter_iternextvalue(dictiterobject *di) goto fail; value = entry_ptr->me_value; } + // We found an element, but did not expect it + if (di->len == 0) { + PyErr_SetString(PyExc_RuntimeError, + "dictionary keys changed during iteration"); + goto fail; + } di->di_pos = i+1; di->len--; Py_INCREF(value); @@ -3713,6 +3719,12 @@ dictiter_iternextitem(dictiterobject *di) key = entry_ptr->me_key; value = entry_ptr->me_value; } + // We found an element, but did not expect it + if (di->len == 0) { + PyErr_SetString(PyExc_RuntimeError, + "dictionary keys changed during iteration"); + goto fail; + } di->di_pos = i+1; di->len--; Py_INCREF(key); From webhook-mailer at python.org Tue Apr 2 06:48:01 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 02 Apr 2019 10:48:01 -0000 Subject: [Python-checkins] bpo-36504: Fix signed integer overflow in _ctypes.c's PyCArrayType_new(). (GH-12660) Message-ID: https://github.com/python/cpython/commit/487b73ab39c80157474821ef9083f51e0846bd62 commit: 487b73ab39c80157474821ef9083f51e0846bd62 branch: master author: Zackery Spytz committer: Serhiy Storchaka date: 2019-04-02T13:47:51+03:00 summary: bpo-36504: Fix signed integer overflow in _ctypes.c's PyCArrayType_new(). (GH-12660) files: A Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst M Lib/ctypes/test/test_arrays.py M Modules/_ctypes/_ctypes.c diff --git a/Lib/ctypes/test/test_arrays.py b/Lib/ctypes/test/test_arrays.py index 6cfda8b7d2e6..0fc5d7ebf841 100644 --- a/Lib/ctypes/test/test_arrays.py +++ b/Lib/ctypes/test/test_arrays.py @@ -197,6 +197,12 @@ class T(Array): _type_ = c_int _length_ = 0 + def test_bpo36504_signed_int_overflow(self): + # The overflow check in PyCArrayType_new() could cause signed integer + # overflow. + with self.assertRaises(OverflowError): + c_char * sys.maxsize * 2 + @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform') @bigmemtest(size=_2G, memuse=1, dry_run=False) def test_large_array(self, size): diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst new file mode 100644 index 000000000000..8ac209d4a789 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst @@ -0,0 +1 @@ +Fix signed integer overflow in _ctypes.c's ``PyCArrayType_new()``. diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index b3a20309472d..ac071bbb708b 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -1518,7 +1518,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } itemsize = itemdict->size; - if (length * itemsize < 0) { + if (length > PY_SSIZE_T_MAX / itemsize) { PyErr_SetString(PyExc_OverflowError, "array too large"); goto error; From webhook-mailer at python.org Tue Apr 2 10:03:55 2019 From: webhook-mailer at python.org (Petr Viktorin) Date: Tue, 02 Apr 2019 14:03:55 -0000 Subject: [Python-checkins] bpo-33261: guard access to __code__ attribute in inspect (GH-6448) Message-ID: https://github.com/python/cpython/commit/fcef60f59d04c63b3540b4c4886226098c1bacd1 commit: fcef60f59d04c63b3540b4c4886226098c1bacd1 branch: master author: Jeroen Demeyer committer: Petr Viktorin date: 2019-04-02T16:03:42+02:00 summary: bpo-33261: guard access to __code__ attribute in inspect (GH-6448) files: A Misc/NEWS.d/next/Library/2018-04-11-11-41-52.bpo-33291.-xLGf8.rst M Lib/inspect.py M Lib/test/inspect_fodder.py M Lib/test/test_inspect.py diff --git a/Lib/inspect.py b/Lib/inspect.py index 8c398bd3534c..d8475c63f901 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -168,23 +168,30 @@ def isfunction(object): __kwdefaults__ dict of keyword only parameters with defaults""" return isinstance(object, types.FunctionType) +def _has_code_flag(f, flag): + """Return true if ``f`` is a function (or a method or functools.partial + wrapper wrapping a function) whose code object has the given ``flag`` + set in its flags.""" + while ismethod(f): + f = f.__func__ + f = functools._unwrap_partial(f) + if not isfunction(f): + return False + return bool(f.__code__.co_flags & flag) + def isgeneratorfunction(obj): """Return true if the object is a user-defined generator function. Generator function objects provide the same attributes as functions. See help(isfunction) for a list of attributes.""" - obj = functools._unwrap_partial(obj) - return bool((isfunction(obj) or ismethod(obj)) and - obj.__code__.co_flags & CO_GENERATOR) + return _has_code_flag(obj, CO_GENERATOR) def iscoroutinefunction(obj): """Return true if the object is a coroutine function. Coroutine functions are defined with "async def" syntax. """ - obj = functools._unwrap_partial(obj) - return bool(((isfunction(obj) or ismethod(obj)) and - obj.__code__.co_flags & CO_COROUTINE)) + return _has_code_flag(obj, CO_COROUTINE) def isasyncgenfunction(obj): """Return true if the object is an asynchronous generator function. @@ -192,9 +199,7 @@ def isasyncgenfunction(obj): Asynchronous generator functions are defined with "async def" syntax and have "yield" expressions in their body. """ - obj = functools._unwrap_partial(obj) - return bool((isfunction(obj) or ismethod(obj)) and - obj.__code__.co_flags & CO_ASYNC_GENERATOR) + return _has_code_flag(obj, CO_ASYNC_GENERATOR) def isasyncgen(object): """Return true if the object is an asynchronous generator.""" diff --git a/Lib/test/inspect_fodder.py b/Lib/test/inspect_fodder.py index ff3f0e4b73b9..667507768ccb 100644 --- a/Lib/test/inspect_fodder.py +++ b/Lib/test/inspect_fodder.py @@ -80,3 +80,14 @@ def contradiction(self): raise Exception() except: tb = sys.exc_info()[2] + +class Callable: + def __call__(self, *args): + return args + + def as_method_of(self, obj): + from types import MethodType + return MethodType(self, obj) + +custom_method = Callable().as_method_of(42) +del Callable diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index bc675aa5df21..7d74746b48b4 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -146,6 +146,7 @@ def test_excluding_predicates(self): self.istest(inspect.isfunction, 'mod.spam') self.istest(inspect.isfunction, 'mod.StupidGit.abuse') self.istest(inspect.ismethod, 'git.argue') + self.istest(inspect.ismethod, 'mod.custom_method') self.istest(inspect.ismodule, 'mod') self.istest(inspect.isdatadescriptor, 'collections.defaultdict.default_factory') self.istest(inspect.isgenerator, '(x for x in range(2))') diff --git a/Misc/NEWS.d/next/Library/2018-04-11-11-41-52.bpo-33291.-xLGf8.rst b/Misc/NEWS.d/next/Library/2018-04-11-11-41-52.bpo-33291.-xLGf8.rst new file mode 100644 index 000000000000..1ffb9ddccbb0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-04-11-11-41-52.bpo-33291.-xLGf8.rst @@ -0,0 +1,3 @@ +Do not raise AttributeError when calling the inspect functions +isgeneratorfunction, iscoroutinefunction, isasyncgenfunction on a method +created from an arbitrary callable. Instead, return False. From webhook-mailer at python.org Tue Apr 2 13:59:11 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 02 Apr 2019 17:59:11 -0000 Subject: [Python-checkins] bpo-32413: Add documentation that at the module level, locals(), globals() are the same dictionary (GH-5004) Message-ID: https://github.com/python/cpython/commit/1c5fa5af8a95f25119e45e40a4ed8183d06f4a5b commit: 1c5fa5af8a95f25119e45e40a4ed8183d06f4a5b branch: master author: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-02T10:58:50-07:00 summary: bpo-32413: Add documentation that at the module level, locals(), globals() are the same dictionary (GH-5004) https://bugs.python.org/issue32413 files: M Doc/library/functions.rst diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 6342ee3bb08f..613e4f74ac41 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -843,7 +843,8 @@ are always available. They are listed here in alphabetical order. Update and return a dictionary representing the current local symbol table. Free variables are returned by :func:`locals` when it is called in function - blocks, but not in class blocks. + blocks, but not in class blocks. Note that at the module level, :func:`locals` + and :func:`globals` are the same dictionary. .. note:: The contents of this dictionary should not be modified; changes may not From webhook-mailer at python.org Tue Apr 2 14:14:57 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 02 Apr 2019 18:14:57 -0000 Subject: [Python-checkins] bpo-32413: Add documentation that at the module level, locals(), globals() are the same dictionary (GH-5004) Message-ID: https://github.com/python/cpython/commit/ef516d11c1a0f885dba0aba8cf5366502077cdd4 commit: ef516d11c1a0f885dba0aba8cf5366502077cdd4 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-02T11:14:50-07:00 summary: bpo-32413: Add documentation that at the module level, locals(), globals() are the same dictionary (GH-5004) https://bugs.python.org/issue32413 (cherry picked from commit 1c5fa5af8a95f25119e45e40a4ed8183d06f4a5b) Co-authored-by: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) files: M Doc/library/functions.rst diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 2ed6fb3ba2b0..11147525d7e8 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -842,7 +842,8 @@ are always available. They are listed here in alphabetical order. Update and return a dictionary representing the current local symbol table. Free variables are returned by :func:`locals` when it is called in function - blocks, but not in class blocks. + blocks, but not in class blocks. Note that at the module level, :func:`locals` + and :func:`globals` are the same dictionary. .. note:: The contents of this dictionary should not be modified; changes may not From webhook-mailer at python.org Tue Apr 2 17:47:53 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 02 Apr 2019 21:47:53 -0000 Subject: [Python-checkins] Have UserDict.__init__() implicitly check for updating w/ bool(kwargs) instead of len() (GH-12139) Message-ID: https://github.com/python/cpython/commit/76b387bf7402863c5e64e3459e2f91ddc3b9d2d3 commit: 76b387bf7402863c5e64e3459e2f91ddc3b9d2d3 branch: master author: Slam <3lnc.slam at gmail.com> committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-02T14:47:41-07:00 summary: Have UserDict.__init__() implicitly check for updating w/ bool(kwargs) instead of len() (GH-12139) Semantically the same, but more idiomatic by checking against `kwargs` instead of `len(kwargs)`. files: M Lib/collections/__init__.py diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index cff75a48d627..9657c1cf83bc 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -1016,7 +1016,7 @@ def __init__(*args, **kwargs): self.data = {} if dict is not None: self.update(dict) - if len(kwargs): + if kwargs: self.update(kwargs) def __len__(self): return len(self.data) def __getitem__(self, key): From webhook-mailer at python.org Wed Apr 3 01:12:16 2019 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 03 Apr 2019 05:12:16 -0000 Subject: [Python-checkins] bpo-36440: include node names in ParserError messages, instead of numeric IDs (GH-12565) Message-ID: https://github.com/python/cpython/commit/cb0748d3939c31168ab5d3b80e3677494497d5e3 commit: cb0748d3939c31168ab5d3b80e3677494497d5e3 branch: master author: tyomitch committer: Pablo Galindo date: 2019-04-03T01:12:07-04:00 summary: bpo-36440: include node names in ParserError messages, instead of numeric IDs (GH-12565) The error messages in the parser module are referring to numeric IDs for the nodes. To improve readability, use the node names when reporting errors. files: A Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst M Lib/test/test_parser.py M Modules/parsermodule.c diff --git a/Lib/test/test_parser.py b/Lib/test/test_parser.py index bfa0a5a34e2d..ff587c365f2b 100644 --- a/Lib/test/test_parser.py +++ b/Lib/test/test_parser.py @@ -749,6 +749,22 @@ def test_illegal_encoding(self): with self.assertRaises(UnicodeEncodeError): parser.sequence2st(tree) + def test_invalid_node_id(self): + tree = (257, (269, (-7, ''))) + self.check_bad_tree(tree, "negative node id") + tree = (257, (269, (99, ''))) + self.check_bad_tree(tree, "invalid token id") + tree = (257, (269, (9999, (0, '')))) + self.check_bad_tree(tree, "invalid symbol id") + + def test_ParserError_message(self): + try: + parser.sequence2st((257,(269,(257,(0,''))))) + except parser.ParserError as why: + self.assertIn("compound_stmt", str(why)) # Expected + self.assertIn("file_input", str(why)) # Got + + class CompileTestCase(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst b/Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst new file mode 100644 index 000000000000..372b1f771009 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst @@ -0,0 +1,2 @@ +Include node names in ``ParserError`` messages, instead of numeric IDs. +Patch by A. Skrobov. diff --git a/Modules/parsermodule.c b/Modules/parsermodule.c index fd330b5fbe02..a215c7ecacd7 100644 --- a/Modules/parsermodule.c +++ b/Modules/parsermodule.c @@ -24,10 +24,6 @@ * Py_[X]DECREF() and Py_[X]INCREF() macros. The lint annotations * look like "NOTE(...)". * - * To debug parser errors like - * "parser.ParserError: Expected node type 12, got 333." - * decode symbol numbers using the automatically-generated files - * Lib/symbol.h and Include/token.h. */ #include "Python.h" /* general Python API */ @@ -666,6 +662,13 @@ validate_node(node *tree) for (pos = 0; pos < nch; ++pos) { node *ch = CHILD(tree, pos); int ch_type = TYPE(ch); + if ((ch_type >= NT_OFFSET + _PyParser_Grammar.g_ndfas) + || (ISTERMINAL(ch_type) && (ch_type >= N_TOKENS)) + || (ch_type < 0) + ) { + PyErr_Format(parser_error, "Unrecognized node type %d.", ch_type); + return 0; + } if (ch_type == suite && TYPE(tree) == funcdef) { /* This is the opposite hack of what we do in parser.c (search for func_body_suite), except we don't ever @@ -700,8 +703,10 @@ validate_node(node *tree) const char *expected_str = _PyParser_Grammar.g_ll.ll_label[a_label].lb_str; if (ISNONTERMINAL(next_type)) { - PyErr_Format(parser_error, "Expected node type %d, got %d.", - next_type, ch_type); + PyErr_Format(parser_error, "Expected %s, got %s.", + _PyParser_Grammar.g_dfa[next_type - NT_OFFSET].d_name, + ISTERMINAL(ch_type) ? _PyParser_TokenNames[ch_type] : + _PyParser_Grammar.g_dfa[ch_type - NT_OFFSET].d_name); } else if (expected_str != NULL) { PyErr_Format(parser_error, "Illegal terminal: expected '%s'.", From webhook-mailer at python.org Wed Apr 3 13:55:33 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 03 Apr 2019 17:55:33 -0000 Subject: [Python-checkins] bpo-36504: Fix signed integer overflow in _ctypes.c's PyCArrayType_new(). (GH-12660) Message-ID: https://github.com/python/cpython/commit/9c08eeb30ca0e551323467b62ae40e08e30839b3 commit: 9c08eeb30ca0e551323467b62ae40e08e30839b3 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-03T10:55:26-07:00 summary: bpo-36504: Fix signed integer overflow in _ctypes.c's PyCArrayType_new(). (GH-12660) (cherry picked from commit 487b73ab39c80157474821ef9083f51e0846bd62) Co-authored-by: Zackery Spytz files: A Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst M Lib/ctypes/test/test_arrays.py M Modules/_ctypes/_ctypes.c diff --git a/Lib/ctypes/test/test_arrays.py b/Lib/ctypes/test/test_arrays.py index 6e562cfd24e6..ca271341ed9a 100644 --- a/Lib/ctypes/test/test_arrays.py +++ b/Lib/ctypes/test/test_arrays.py @@ -183,6 +183,12 @@ class T(Array): _type_ = c_int _length_ = 1.87 + def test_bpo36504_signed_int_overflow(self): + # The overflow check in PyCArrayType_new() could cause signed integer + # overflow. + with self.assertRaises(OverflowError): + c_char * sys.maxsize * 2 + @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform') @bigmemtest(size=_2G, memuse=1, dry_run=False) def test_large_array(self, size): diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst new file mode 100644 index 000000000000..8ac209d4a789 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst @@ -0,0 +1 @@ +Fix signed integer overflow in _ctypes.c's ``PyCArrayType_new()``. diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index be0b321bad03..153990309b7a 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -1466,7 +1466,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } itemsize = itemdict->size; - if (length * itemsize < 0) { + if (length > PY_SSIZE_T_MAX / itemsize) { PyErr_SetString(PyExc_OverflowError, "array too large"); goto error; From webhook-mailer at python.org Wed Apr 3 14:35:10 2019 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 03 Apr 2019 18:35:10 -0000 Subject: [Python-checkins] [3.7] bpo-36440: include node names in ParserError messages, instead of numeric IDs (GH-12565) (GH-12671) Message-ID: https://github.com/python/cpython/commit/513d142993bb8c13e6803727fa086e44eafc360f commit: 513d142993bb8c13e6803727fa086e44eafc360f branch: 3.7 author: Pablo Galindo committer: GitHub date: 2019-04-03T14:34:59-04:00 summary: [3.7] bpo-36440: include node names in ParserError messages, instead of numeric IDs (GH-12565) (GH-12671) The error messages in the parser module are referring to numeric IDs for the nodes. To improve readability, use the node names when reporting errors.. (cherry picked from commit cb0748d3939c31168ab5d3b80e3677494497d5e3) Co-authored-by: tyomitch files: A Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst M Lib/test/test_parser.py M Modules/parsermodule.c diff --git a/Lib/test/test_parser.py b/Lib/test/test_parser.py index 94e454663573..e49afd2ba1d8 100644 --- a/Lib/test/test_parser.py +++ b/Lib/test/test_parser.py @@ -713,6 +713,22 @@ def test_illegal_encoding(self): with self.assertRaises(UnicodeEncodeError): parser.sequence2st(tree) + def test_invalid_node_id(self): + tree = (257, (269, (-7, ''))) + self.check_bad_tree(tree, "negative node id") + tree = (257, (269, (99, ''))) + self.check_bad_tree(tree, "invalid token id") + tree = (257, (269, (9999, (0, '')))) + self.check_bad_tree(tree, "invalid symbol id") + + def test_ParserError_message(self): + try: + parser.sequence2st((257,(269,(257,(0,''))))) + except parser.ParserError as why: + self.assertIn("simple_stmt", str(why)) # Expected + self.assertIn("file_input", str(why)) # Got + + class CompileTestCase(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst b/Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst new file mode 100644 index 000000000000..372b1f771009 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-03-25-13-45-19.bpo-36440.gkvzhi.rst @@ -0,0 +1,2 @@ +Include node names in ``ParserError`` messages, instead of numeric IDs. +Patch by A. Skrobov. diff --git a/Modules/parsermodule.c b/Modules/parsermodule.c index 67c874267f24..799a813468f1 100644 --- a/Modules/parsermodule.c +++ b/Modules/parsermodule.c @@ -24,10 +24,6 @@ * Py_[X]DECREF() and Py_[X]INCREF() macros. The lint annotations * look like "NOTE(...)". * - * To debug parser errors like - * "parser.ParserError: Expected node type 12, got 333." - * decode symbol numbers using the automatically-generated files - * Lib/symbol.h and Include/token.h. */ #include "Python.h" /* general Python API */ @@ -663,6 +659,13 @@ validate_node(node *tree) for (pos = 0; pos < nch; ++pos) { node *ch = CHILD(tree, pos); int ch_type = TYPE(ch); + if ((ch_type >= NT_OFFSET + _PyParser_Grammar.g_ndfas) + || (ISTERMINAL(ch_type) && (ch_type >= N_TOKENS)) + || (ch_type < 0) + ) { + PyErr_Format(parser_error, "Unrecognized node type %d.", ch_type); + return 0; + } for (arc = 0; arc < dfa_state->s_narcs; ++arc) { short a_label = dfa_state->s_arc[arc].a_lbl; assert(a_label < _PyParser_Grammar.g_ll.ll_nlabels); @@ -691,8 +694,10 @@ validate_node(node *tree) const char *expected_str = _PyParser_Grammar.g_ll.ll_label[a_label].lb_str; if (ISNONTERMINAL(next_type)) { - PyErr_Format(parser_error, "Expected node type %d, got %d.", - next_type, ch_type); + PyErr_Format(parser_error, "Expected %s, got %s.", + _PyParser_Grammar.g_dfa[next_type - NT_OFFSET].d_name, + ISTERMINAL(ch_type) ? _PyParser_TokenNames[ch_type] : + _PyParser_Grammar.g_dfa[ch_type - NT_OFFSET].d_name); } else if (expected_str != NULL) { PyErr_Format(parser_error, "Illegal terminal: expected '%s'.", From webhook-mailer at python.org Wed Apr 3 15:00:13 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Wed, 03 Apr 2019 19:00:13 -0000 Subject: [Python-checkins] bpo-36504: Fix signed integer overflow in _ctypes.c's PyCArrayType_new(). (GH-12660) (GH-12678) Message-ID: https://github.com/python/cpython/commit/fd83a823a6f268dc97ee2bf7d8a1a88d948446e5 commit: fd83a823a6f268dc97ee2bf7d8a1a88d948446e5 branch: 2.7 author: Zackery Spytz committer: Serhiy Storchaka date: 2019-04-03T21:59:51+03:00 summary: bpo-36504: Fix signed integer overflow in _ctypes.c's PyCArrayType_new(). (GH-12660) (GH-12678) (cherry picked from commit 487b73ab39c80157474821ef9083f51e0846bd62) files: A Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst M Lib/ctypes/test/test_arrays.py M Modules/_ctypes/_ctypes.c diff --git a/Lib/ctypes/test/test_arrays.py b/Lib/ctypes/test/test_arrays.py index 53859a3e5e93..29fd422a68cc 100644 --- a/Lib/ctypes/test/test_arrays.py +++ b/Lib/ctypes/test/test_arrays.py @@ -134,6 +134,12 @@ class my_int(c_int): t2 = my_int * 1 self.assertIs(t1, t2) + def test_bpo36504_signed_int_overflow(self): + # The overflow check in PyCArrayType_new() could cause signed integer + # overflow. + with self.assertRaises(OverflowError): + c_char * sys.maxsize * 2 + @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform') @precisionbigmemtest(size=_2G, memuse=1, dry_run=False) def test_large_array(self, size): diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst new file mode 100644 index 000000000000..8ac209d4a789 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-02-04-10-32.bpo-36504.k_V8Bm.rst @@ -0,0 +1 @@ +Fix signed integer overflow in _ctypes.c's ``PyCArrayType_new()``. diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 33e224386fb1..d608100243d8 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -1534,7 +1534,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } itemsize = itemdict->size; - if (length * itemsize < 0) { + if (length > PY_SSIZE_T_MAX / itemsize) { PyErr_SetString(PyExc_OverflowError, "array too large"); Py_DECREF(stgdict); From webhook-mailer at python.org Thu Apr 4 00:36:53 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 04 Apr 2019 04:36:53 -0000 Subject: [Python-checkins] bpo-36522: Print all values for headers with multiple values. (GH-12681) Message-ID: https://github.com/python/cpython/commit/461c416dd78a98f2bba7f323af8c9738e060b6f2 commit: 461c416dd78a98f2bba7f323af8c9738e060b6f2 branch: master author: Matt Houglum committer: Serhiy Storchaka date: 2019-04-04T07:36:47+03:00 summary: bpo-36522: Print all values for headers with multiple values. (GH-12681) files: A Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst M Lib/http/client.py M Lib/test/test_httplib.py diff --git a/Lib/http/client.py b/Lib/http/client.py index 5aa178d7b127..1de151c38e92 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -320,8 +320,8 @@ def begin(self): self.headers = self.msg = parse_headers(self.fp) if self.debuglevel > 0: - for hdr in self.headers: - print("header:", hdr + ":", self.headers.get(hdr)) + for hdr, val in self.headers.items(): + print("header:", hdr + ":", val) # are we using the chunked-style of transfer encoding? tr_enc = self.headers.get("transfer-encoding") diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index f816eac83b68..4755f8b4b9de 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -348,7 +348,8 @@ def test_headers_debuglevel(self): body = ( b'HTTP/1.1 200 OK\r\n' b'First: val\r\n' - b'Second: val\r\n' + b'Second: val1\r\n' + b'Second: val2\r\n' ) sock = FakeSocket(body) resp = client.HTTPResponse(sock, debuglevel=1) @@ -357,7 +358,8 @@ def test_headers_debuglevel(self): lines = output.getvalue().splitlines() self.assertEqual(lines[0], "reply: 'HTTP/1.1 200 OK\\r\\n'") self.assertEqual(lines[1], "header: First: val") - self.assertEqual(lines[2], "header: Second: val") + self.assertEqual(lines[2], "header: Second: val1") + self.assertEqual(lines[3], "header: Second: val2") class TransferEncodingTest(TestCase): diff --git a/Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst b/Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst new file mode 100644 index 000000000000..7869526b71c7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst @@ -0,0 +1 @@ +If *debuglevel* is set to >0 in :mod:`http.client`, print all values for headers with multiple values for the same header name. Patch by Matt Houglum. From webhook-mailer at python.org Thu Apr 4 04:26:05 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 04 Apr 2019 08:26:05 -0000 Subject: [Python-checkins] bpo-36522: Print all values for headers with multiple values. (GH-12681) (GH-12682) Message-ID: https://github.com/python/cpython/commit/6f9cd142a20d0d2a5825a9db376ce0ddf1933694 commit: 6f9cd142a20d0d2a5825a9db376ce0ddf1933694 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Serhiy Storchaka date: 2019-04-04T11:25:59+03:00 summary: bpo-36522: Print all values for headers with multiple values. (GH-12681) (GH-12682) (cherry picked from commit 461c416dd78a98f2bba7f323af8c9738e060b6f2) Co-authored-by: Matt Houglum files: A Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst M Lib/http/client.py M Lib/test/test_httplib.py diff --git a/Lib/http/client.py b/Lib/http/client.py index 5aa178d7b127..1de151c38e92 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -320,8 +320,8 @@ def begin(self): self.headers = self.msg = parse_headers(self.fp) if self.debuglevel > 0: - for hdr in self.headers: - print("header:", hdr + ":", self.headers.get(hdr)) + for hdr, val in self.headers.items(): + print("header:", hdr + ":", val) # are we using the chunked-style of transfer encoding? tr_enc = self.headers.get("transfer-encoding") diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index f816eac83b68..4755f8b4b9de 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -348,7 +348,8 @@ def test_headers_debuglevel(self): body = ( b'HTTP/1.1 200 OK\r\n' b'First: val\r\n' - b'Second: val\r\n' + b'Second: val1\r\n' + b'Second: val2\r\n' ) sock = FakeSocket(body) resp = client.HTTPResponse(sock, debuglevel=1) @@ -357,7 +358,8 @@ def test_headers_debuglevel(self): lines = output.getvalue().splitlines() self.assertEqual(lines[0], "reply: 'HTTP/1.1 200 OK\\r\\n'") self.assertEqual(lines[1], "header: First: val") - self.assertEqual(lines[2], "header: Second: val") + self.assertEqual(lines[2], "header: Second: val1") + self.assertEqual(lines[3], "header: Second: val2") class TransferEncodingTest(TestCase): diff --git a/Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst b/Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst new file mode 100644 index 000000000000..7869526b71c7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-03-20-46-47.bpo-36522.g5x3By.rst @@ -0,0 +1 @@ +If *debuglevel* is set to >0 in :mod:`http.client`, print all values for headers with multiple values for the same header name. Patch by Matt Houglum. From webhook-mailer at python.org Thu Apr 4 06:38:47 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 04 Apr 2019 10:38:47 -0000 Subject: [Python-checkins] Fix duplicated test case for re. (GH-12662) Message-ID: https://github.com/python/cpython/commit/ded4737989316653469763230036b04513cb62b3 commit: ded4737989316653469763230036b04513cb62b3 branch: master author: MakDon <379857334 at qq.com> committer: Serhiy Storchaka date: 2019-04-04T13:38:42+03:00 summary: Fix duplicated test case for re. (GH-12662) files: M Lib/test/re_tests.py diff --git a/Lib/test/re_tests.py b/Lib/test/re_tests.py index a379d33aec61..5ba9f1652e18 100755 --- a/Lib/test/re_tests.py +++ b/Lib/test/re_tests.py @@ -109,7 +109,7 @@ ('(?s)a.b', 'a\nb', SUCCEED, 'found', 'a\nb'), ('(?s)a.*b', 'acc\nccb', SUCCEED, 'found', 'acc\nccb'), ('(?s)a.{4,5}b', 'acc\nccb', SUCCEED, 'found', 'acc\nccb'), - ('(?s)a.b', 'a\nb', SUCCEED, 'found', 'a\nb'), + ('(?s)a.b', 'a\rb', SUCCEED, 'found', 'a\rb'), (')', '', SYNTAX_ERROR), # Unmatched right bracket ('', '', SUCCEED, 'found', ''), # Empty pattern From webhook-mailer at python.org Fri Apr 5 03:18:33 2019 From: webhook-mailer at python.org (Petr Viktorin) Date: Fri, 05 Apr 2019 07:18:33 -0000 Subject: [Python-checkins] Tools/importbench: Fix a misplaced stderr= (GH-12690) Message-ID: https://github.com/python/cpython/commit/2a4ce4387ff3e2b41c474e2b0522c8164162cafb commit: 2a4ce4387ff3e2b41c474e2b0522c8164162cafb branch: master author: Anthony Sottile committer: Petr Viktorin date: 2019-04-05T09:18:19+02:00 summary: Tools/importbench: Fix a misplaced stderr= (GH-12690) files: M Tools/importbench/importbench.py diff --git a/Tools/importbench/importbench.py b/Tools/importbench/importbench.py index e2ef75836ed4..6c4a537ad86e 100644 --- a/Tools/importbench/importbench.py +++ b/Tools/importbench/importbench.py @@ -183,8 +183,8 @@ def main(import_, options): benchmarks = [b] break else: - print('Unknown benchmark: {!r}'.format(options.benchmark, - file=sys.stderr)) + print('Unknown benchmark: {!r}'.format(options.benchmark), + file=sys.stderr) sys.exit(1) seconds = 1 seconds_plural = 's' if seconds > 1 else '' From webhook-mailer at python.org Fri Apr 5 04:54:31 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Fri, 05 Apr 2019 08:54:31 -0000 Subject: [Python-checkins] bpo-36404: recommend DeprecationWarning over PendingDeprecationWarning (GH-12505) Message-ID: https://github.com/python/cpython/commit/176d26364bb67801fa522f52f20cbe44420d6942 commit: 176d26364bb67801fa522f52f20cbe44420d6942 branch: master author: Inada Naoki committer: GitHub date: 2019-04-05T17:54:24+09:00 summary: bpo-36404: recommend DeprecationWarning over PendingDeprecationWarning (GH-12505) files: M Doc/library/exceptions.rst M Doc/library/warnings.rst diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 57ed29145816..250938003c07 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -694,6 +694,14 @@ The following exceptions are used as warning categories; see the Base class for warnings about features which will be deprecated in the future. + .. note:: + PendingDeprecationWarning was introduced as an "ignored by default" + version of DeprecationWarning. But :exc:`DeprecationWarning` is also + ignored by default since Python 2.7 and 3.2. + There is not much difference between PendingDeprecationWarning and + DeprecationWarning nowadays. DeprecationWarning is recommended + in general. + .. exception:: SyntaxWarning diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst index b04bd79e4bbd..d121f320d6a3 100644 --- a/Doc/library/warnings.rst +++ b/Doc/library/warnings.rst @@ -109,11 +109,11 @@ The following warnings category classes are currently defined: +----------------------------------+-----------------------------------------------+ .. versionchanged:: 3.7 - Previously :exc:`DeprecationWarning` and :exc:`FutureWarning` were - distinguished based on whether a feature was being removed entirely or - changing its behaviour. They are now distinguished based on their - intended audience and the way they're handled by the default warnings - filters. + Previously :exc:`DeprecationWarning` and :exc:`FutureWarning` were + distinguished based on whether a feature was being removed entirely or + changing its behaviour. They are now distinguished based on their + intended audience and the way they're handled by the default warnings + filters. .. _warning-filter: From webhook-mailer at python.org Fri Apr 5 05:07:30 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 05 Apr 2019 09:07:30 -0000 Subject: [Python-checkins] bpo-36404: recommend DeprecationWarning over PendingDeprecationWarning (GH-12505) Message-ID: https://github.com/python/cpython/commit/86fbe0287dd774022fd2b6c2dcbfbb5573a0b874 commit: 86fbe0287dd774022fd2b6c2dcbfbb5573a0b874 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-05T02:07:21-07:00 summary: bpo-36404: recommend DeprecationWarning over PendingDeprecationWarning (GH-12505) (cherry picked from commit 176d26364bb67801fa522f52f20cbe44420d6942) Co-authored-by: Inada Naoki files: M Doc/library/exceptions.rst M Doc/library/warnings.rst diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 57ed29145816..250938003c07 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -694,6 +694,14 @@ The following exceptions are used as warning categories; see the Base class for warnings about features which will be deprecated in the future. + .. note:: + PendingDeprecationWarning was introduced as an "ignored by default" + version of DeprecationWarning. But :exc:`DeprecationWarning` is also + ignored by default since Python 2.7 and 3.2. + There is not much difference between PendingDeprecationWarning and + DeprecationWarning nowadays. DeprecationWarning is recommended + in general. + .. exception:: SyntaxWarning diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst index b04bd79e4bbd..d121f320d6a3 100644 --- a/Doc/library/warnings.rst +++ b/Doc/library/warnings.rst @@ -109,11 +109,11 @@ The following warnings category classes are currently defined: +----------------------------------+-----------------------------------------------+ .. versionchanged:: 3.7 - Previously :exc:`DeprecationWarning` and :exc:`FutureWarning` were - distinguished based on whether a feature was being removed entirely or - changing its behaviour. They are now distinguished based on their - intended audience and the way they're handled by the default warnings - filters. + Previously :exc:`DeprecationWarning` and :exc:`FutureWarning` were + distinguished based on whether a feature was being removed entirely or + changing its behaviour. They are now distinguished based on their + intended audience and the way they're handled by the default warnings + filters. .. _warning-filter: From webhook-mailer at python.org Fri Apr 5 05:44:10 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 05 Apr 2019 09:44:10 -0000 Subject: [Python-checkins] bpo-36301: Fix _PyPreConfig_Read() compiler warning (GH-12695) Message-ID: https://github.com/python/cpython/commit/6a8c3139ae9ada89d4a95985ec7cf8bb7d03bc01 commit: 6a8c3139ae9ada89d4a95985ec7cf8bb7d03bc01 branch: master author: Victor Stinner committer: GitHub date: 2019-04-05T11:44:04+02:00 summary: bpo-36301: Fix _PyPreConfig_Read() compiler warning (GH-12695) Initialize init_utf8_mode earlier to fix a compiler warning. files: M Python/preconfig.c diff --git a/Python/preconfig.c b/Python/preconfig.c index 7ac645d7f08b..78377cf6e3a2 100644 --- a/Python/preconfig.c +++ b/Python/preconfig.c @@ -720,6 +720,11 @@ _PyPreConfig_Read(_PyPreConfig *config, const _PyArgv *args) _Py_SetLocaleFromEnv(LC_CTYPE); _PyPreCmdline cmdline = _PyPreCmdline_INIT; + int init_utf8_mode = Py_UTF8Mode; +#ifdef MS_WINDOWS + int init_legacy_encoding = Py_LegacyWindowsFSEncodingFlag; +#endif + if (args) { err = _PyPreCmdline_SetArgv(&cmdline, args); if (_Py_INIT_FAILED(err)) { @@ -727,10 +732,6 @@ _PyPreConfig_Read(_PyPreConfig *config, const _PyArgv *args) } } - int init_utf8_mode = Py_UTF8Mode; -#ifdef MS_WINDOWS - int init_legacy_encoding = Py_LegacyWindowsFSEncodingFlag; -#endif int locale_coerced = 0; int loops = 0; From webhook-mailer at python.org Fri Apr 5 06:08:53 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Fri, 05 Apr 2019 10:08:53 -0000 Subject: [Python-checkins] bpo-29202: improve dict iteration (GH-11900) Message-ID: https://github.com/python/cpython/commit/f66e336f455b5a6bb0ca857d61c43be410d0df13 commit: f66e336f455b5a6bb0ca857d61c43be410d0df13 branch: master author: Cheryl Sabella committer: Inada Naoki date: 2019-04-05T19:08:43+09:00 summary: bpo-29202: improve dict iteration (GH-11900) Use fewer iterations instead of iterating over the whole entry table. files: M Objects/dictobject.c diff --git a/Objects/dictobject.c b/Objects/dictobject.c index bba27dd321a9..c1187c2cb8ed 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2147,7 +2147,7 @@ dict_keys(PyDictObject *mp) PyObject *v; Py_ssize_t i, j; PyDictKeyEntry *ep; - Py_ssize_t size, n, offset; + Py_ssize_t n, offset; PyObject **value_ptr; again: @@ -2163,7 +2163,6 @@ dict_keys(PyDictObject *mp) goto again; } ep = DK_ENTRIES(mp->ma_keys); - size = mp->ma_keys->dk_nentries; if (mp->ma_values) { value_ptr = mp->ma_values; offset = sizeof(PyObject *); @@ -2172,7 +2171,7 @@ dict_keys(PyDictObject *mp) value_ptr = &ep[0].me_value; offset = sizeof(PyDictKeyEntry); } - for (i = 0, j = 0; i < size; i++) { + for (i = 0, j = 0; j < n; i++) { if (*value_ptr != NULL) { PyObject *key = ep[i].me_key; Py_INCREF(key); @@ -2191,7 +2190,7 @@ dict_values(PyDictObject *mp) PyObject *v; Py_ssize_t i, j; PyDictKeyEntry *ep; - Py_ssize_t size, n, offset; + Py_ssize_t n, offset; PyObject **value_ptr; again: @@ -2207,7 +2206,6 @@ dict_values(PyDictObject *mp) goto again; } ep = DK_ENTRIES(mp->ma_keys); - size = mp->ma_keys->dk_nentries; if (mp->ma_values) { value_ptr = mp->ma_values; offset = sizeof(PyObject *); @@ -2216,7 +2214,7 @@ dict_values(PyDictObject *mp) value_ptr = &ep[0].me_value; offset = sizeof(PyDictKeyEntry); } - for (i = 0, j = 0; i < size; i++) { + for (i = 0, j = 0; j < n; i++) { PyObject *value = *value_ptr; value_ptr = (PyObject **)(((char *)value_ptr) + offset); if (value != NULL) { @@ -2234,7 +2232,7 @@ dict_items(PyDictObject *mp) { PyObject *v; Py_ssize_t i, j, n; - Py_ssize_t size, offset; + Py_ssize_t offset; PyObject *item, *key; PyDictKeyEntry *ep; PyObject **value_ptr; @@ -2265,7 +2263,6 @@ dict_items(PyDictObject *mp) } /* Nothing we do below makes any function calls. */ ep = DK_ENTRIES(mp->ma_keys); - size = mp->ma_keys->dk_nentries; if (mp->ma_values) { value_ptr = mp->ma_values; offset = sizeof(PyObject *); @@ -2274,7 +2271,7 @@ dict_items(PyDictObject *mp) value_ptr = &ep[0].me_value; offset = sizeof(PyDictKeyEntry); } - for (i = 0, j = 0; i < size; i++) { + for (i = 0, j = 0; j < n; i++) { PyObject *value = *value_ptr; value_ptr = (PyObject **)(((char *)value_ptr) + offset); if (value != NULL) { From webhook-mailer at python.org Fri Apr 5 06:17:18 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Fri, 05 Apr 2019 10:17:18 -0000 Subject: [Python-checkins] bpo-25451: Add transparency methods to tkinter.PhotoImage. (GH-10406) Message-ID: https://github.com/python/cpython/commit/50866e9ed3e4e0ebb60c20c3483a8df424c02722 commit: 50866e9ed3e4e0ebb60c20c3483a8df424c02722 branch: master author: Zackery Spytz committer: Serhiy Storchaka date: 2019-04-05T13:17:13+03:00 summary: bpo-25451: Add transparency methods to tkinter.PhotoImage. (GH-10406) files: A Misc/NEWS.d/next/Library/2018-11-07-23-44-25.bpo-25451.re_8db.rst M Doc/whatsnew/3.8.rst M Lib/tkinter/__init__.py M Lib/tkinter/test/test_tkinter/test_images.py diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 411f2299b290..0fc4d774bcde 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -379,6 +379,11 @@ Added method :meth:`~tkinter.Canvas.moveto` in the :class:`tkinter.Canvas` class. (Contributed by Juliette Monsel in :issue:`23831`.) +The :class:`tkinter.PhotoImage` class now has +:meth:`~tkinter.PhotoImage.transparency_get` and +:meth:`~tkinter.PhotoImage.transparency_set` methods. (Contributed by +Zackery Spytz in :issue:`25451`.) + time ---- diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index ae493ed3aaaf..57d5b2572822 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -4124,6 +4124,15 @@ def write(self, filename, format=None, from_coords=None): args = args + ('-from',) + tuple(from_coords) self.tk.call(args) + def transparency_get(self, x, y): + """Return True if the pixel at x,y is transparent.""" + return self.tk.getboolean(self.tk.call( + self.name, 'transparency', 'get', x, y)) + + def transparency_set(self, x, y, boolean): + """Set the transparency of the pixel at x,y.""" + self.tk.call(self.name, 'transparency', 'set', x, y, boolean) + class BitmapImage(Image): """Widget which can display images in XBM format.""" diff --git a/Lib/tkinter/test/test_tkinter/test_images.py b/Lib/tkinter/test/test_tkinter/test_images.py index 85a8cd0495ba..2805d35a1f5b 100644 --- a/Lib/tkinter/test/test_tkinter/test_images.py +++ b/Lib/tkinter/test/test_tkinter/test_images.py @@ -320,6 +320,15 @@ def test_write(self): self.assertEqual(image3.get(0, 0), image.get(4, 6)) self.assertEqual(image3.get(1, 2), image.get(5, 8)) + def test_transparency(self): + image = self.create() + self.assertEqual(image.transparency_get(0, 0), True) + self.assertEqual(image.transparency_get(4, 6), False) + image.transparency_set(4, 6, True) + self.assertEqual(image.transparency_get(4, 6), True) + image.transparency_set(4, 6, False) + self.assertEqual(image.transparency_get(4, 6), False) + tests_gui = (MiscTest, BitmapImageTest, PhotoImageTest,) diff --git a/Misc/NEWS.d/next/Library/2018-11-07-23-44-25.bpo-25451.re_8db.rst b/Misc/NEWS.d/next/Library/2018-11-07-23-44-25.bpo-25451.re_8db.rst new file mode 100644 index 000000000000..e0a9ea0c1fbd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-11-07-23-44-25.bpo-25451.re_8db.rst @@ -0,0 +1,2 @@ +Add transparency methods to :class:`tkinter.PhotoImage`. Patch by Zackery +Spytz. From webhook-mailer at python.org Fri Apr 5 10:02:35 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 05 Apr 2019 14:02:35 -0000 Subject: [Python-checkins] Fix doc for create_subprocess_exec (GH-12598) Message-ID: https://github.com/python/cpython/commit/1328375ad1c91f25a1500945a67b0ef36e387527 commit: 1328375ad1c91f25a1500945a67b0ef36e387527 branch: master author: Dima Tisnek committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-05T07:02:28-07:00 summary: Fix doc for create_subprocess_exec (GH-12598) Add missing `program` argument to asyncio.create_subprocess_exec documentation. files: M Doc/library/asyncio-subprocess.rst diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index a2cf51734848..00dc66c48b21 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -56,7 +56,7 @@ See also the `Examples`_ subsection. Creating Subprocesses ===================== -.. coroutinefunction:: create_subprocess_exec(\*args, stdin=None, \ +.. coroutinefunction:: create_subprocess_exec(program, \*args, stdin=None, \ stdout=None, stderr=None, loop=None, \ limit=None, \*\*kwds) From webhook-mailer at python.org Fri Apr 5 10:08:40 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 05 Apr 2019 14:08:40 -0000 Subject: [Python-checkins] Fix doc for create_subprocess_exec (GH-12598) Message-ID: https://github.com/python/cpython/commit/1bc6cd70667096b8735391fd698d1d702ab419d7 commit: 1bc6cd70667096b8735391fd698d1d702ab419d7 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-05T07:08:31-07:00 summary: Fix doc for create_subprocess_exec (GH-12598) Add missing `program` argument to asyncio.create_subprocess_exec documentation. (cherry picked from commit 1328375ad1c91f25a1500945a67b0ef36e387527) Co-authored-by: Dima Tisnek files: M Doc/library/asyncio-subprocess.rst diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index 0bcf66175ce3..af7e36ed49a9 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -56,7 +56,7 @@ See also the `Examples`_ subsection. Creating Subprocesses ===================== -.. coroutinefunction:: create_subprocess_exec(\*args, stdin=None, \ +.. coroutinefunction:: create_subprocess_exec(program, \*args, stdin=None, \ stdout=None, stderr=None, loop=None, \ limit=None, \*\*kwds) From webhook-mailer at python.org Sat Apr 6 05:04:52 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Sat, 06 Apr 2019 09:04:52 -0000 Subject: [Python-checkins] bpo-36527: silence -Wunused-parameter warnings in object.h (GH-12688) Message-ID: https://github.com/python/cpython/commit/a0da131a74acdb147ecd64d729c86d65fecd0cff commit: a0da131a74acdb147ecd64d729c86d65fecd0cff branch: master author: Dmitry Marakasov committer: Inada Naoki date: 2019-04-06T18:04:47+09:00 summary: bpo-36527: silence -Wunused-parameter warnings in object.h (GH-12688) files: M Include/object.h diff --git a/Include/object.h b/Include/object.h index a729335750c7..86cbfc581ed1 100644 --- a/Include/object.h +++ b/Include/object.h @@ -440,6 +440,7 @@ static inline void _Py_NewReference(PyObject *op) static inline void _Py_ForgetReference(PyObject *op) { + (void)op; /* may be unused, shut up -Wunused-parameter */ _Py_INC_TPFREES(op); } #endif /* !Py_TRACE_REFS */ @@ -458,6 +459,8 @@ static inline void _Py_INCREF(PyObject *op) static inline void _Py_DECREF(const char *filename, int lineno, PyObject *op) { + (void)filename; /* may be unused, shut up -Wunused-parameter */ + (void)lineno; /* may be unused, shut up -Wunused-parameter */ _Py_DEC_REFTOTAL; if (--op->ob_refcnt != 0) { #ifdef Py_REF_DEBUG From webhook-mailer at python.org Sat Apr 6 05:06:22 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Sat, 06 Apr 2019 09:06:22 -0000 Subject: [Python-checkins] bpo-36050: optimize HTTPResponse.read() (GH-12698) Message-ID: https://github.com/python/cpython/commit/d6bf6f2d0c83f0c64ce86e7b9340278627798090 commit: d6bf6f2d0c83f0c64ce86e7b9340278627798090 branch: master author: Inada Naoki committer: GitHub date: 2019-04-06T18:06:19+09:00 summary: bpo-36050: optimize HTTPResponse.read() (GH-12698) * No need to chunking for now. * No need to partial read caused by EINTR for now. files: A Misc/NEWS.d/next/Library/2019-04-05-21-29-53.bpo-36050.x9DRKE.rst M Lib/http/client.py diff --git a/Lib/http/client.py b/Lib/http/client.py index 1de151c38e92..5a2225276b1a 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -105,9 +105,6 @@ # Mapping status codes to official W3C names responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()} -# maximal amount of data to read at one time in _safe_read -MAXAMOUNT = 1048576 - # maximal line length when calling readline(). _MAXLINE = 65536 _MAXHEADERS = 100 @@ -592,43 +589,24 @@ def _readinto_chunked(self, b): raise IncompleteRead(bytes(b[0:total_bytes])) def _safe_read(self, amt): - """Read the number of bytes requested, compensating for partial reads. - - Normally, we have a blocking socket, but a read() can be interrupted - by a signal (resulting in a partial read). - - Note that we cannot distinguish between EOF and an interrupt when zero - bytes have been read. IncompleteRead() will be raised in this - situation. + """Read the number of bytes requested. This function should be used when bytes "should" be present for reading. If the bytes are truly not available (due to EOF), then the IncompleteRead exception can be used to detect the problem. """ - s = [] - while amt > 0: - chunk = self.fp.read(min(amt, MAXAMOUNT)) - if not chunk: - raise IncompleteRead(b''.join(s), amt) - s.append(chunk) - amt -= len(chunk) - return b"".join(s) + data = self.fp.read(amt) + if len(data) < amt: + raise IncompleteRead(data, amt-len(data)) + return data def _safe_readinto(self, b): """Same as _safe_read, but for reading into a buffer.""" - total_bytes = 0 - mvb = memoryview(b) - while total_bytes < len(b): - if MAXAMOUNT < len(mvb): - temp_mvb = mvb[0:MAXAMOUNT] - n = self.fp.readinto(temp_mvb) - else: - n = self.fp.readinto(mvb) - if not n: - raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) - mvb = mvb[n:] - total_bytes += n - return total_bytes + amt = len(b) + n = self.fp.readinto(b) + if n < amt: + raise IncompleteRead(bytes(b[:n]), amt-n) + return n def read1(self, n=-1): """Read with at most one underlying system call. If at least one diff --git a/Misc/NEWS.d/next/Library/2019-04-05-21-29-53.bpo-36050.x9DRKE.rst b/Misc/NEWS.d/next/Library/2019-04-05-21-29-53.bpo-36050.x9DRKE.rst new file mode 100644 index 000000000000..92318f877b60 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-05-21-29-53.bpo-36050.x9DRKE.rst @@ -0,0 +1,2 @@ +Optimized ``http.client.HTTPResponse.read()`` for large response. Patch by +Inada Naoki. From webhook-mailer at python.org Sat Apr 6 12:57:46 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sat, 06 Apr 2019 16:57:46 -0000 Subject: [Python-checkins] bpo-9883: Update list of unimplemented interfaces in minidom. (GH-12677) Message-ID: https://github.com/python/cpython/commit/2ea8099523581cf2ecc060831a53debb57ff98ee commit: 2ea8099523581cf2ecc060831a53debb57ff98ee branch: master author: Stefan Behnel committer: Serhiy Storchaka date: 2019-04-06T19:57:43+03:00 summary: bpo-9883: Update list of unimplemented interfaces in minidom. (GH-12677) Remove names from the "unimplemented interfaces" list in the minidom docs that are actually implemented. files: M Doc/library/xml.dom.minidom.rst diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst index af09ea98c818..2423a0c15691 100644 --- a/Doc/library/xml.dom.minidom.rst +++ b/Doc/library/xml.dom.minidom.rst @@ -238,22 +238,8 @@ The following interfaces have no implementation in :mod:`xml.dom.minidom`: * :class:`DOMTimeStamp` -* :class:`DocumentType` - -* :class:`DOMImplementation` - -* :class:`CharacterData` - -* :class:`CDATASection` - -* :class:`Notation` - -* :class:`Entity` - * :class:`EntityReference` -* :class:`DocumentFragment` - Most of these reflect information in the XML document that is not of general utility to most DOM users. From webhook-mailer at python.org Sun Apr 7 00:47:54 2019 From: webhook-mailer at python.org (Nick Coghlan) Date: Sun, 07 Apr 2019 04:47:54 -0000 Subject: [Python-checkins] bpo-30661: Improve docs for tarfile pax change and effect on shutil (GH-12635) Message-ID: https://github.com/python/cpython/commit/89a894403cfa880d7f9d1d67070f61456d14cbde commit: 89a894403cfa880d7f9d1d67070f61456d14cbde branch: master author: CAM Gerlach committer: Nick Coghlan date: 2019-04-07T14:47:49+10:00 summary: bpo-30661: Improve docs for tarfile pax change and effect on shutil (GH-12635) The shutil archive creation helpers use the default tarfile format, so that API is also switching to use `pax` by default. files: M Doc/library/shutil.rst M Doc/library/tarfile.rst M Doc/whatsnew/3.8.rst diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index 587be3befa09..2dc872fd0777 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -559,6 +559,10 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. The *verbose* argument is unused and deprecated. + .. versionchanged:: 3.8 + The modern pax (POSIX.1-2001) format is now used instead of + the legacy GNU format for archives created with ``format="tar"``. + .. function:: get_archive_formats() @@ -568,7 +572,7 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. By default :mod:`shutil` provides these formats: - *zip*: ZIP file (if the :mod:`zlib` module is available). - - *tar*: uncompressed tar file. + - *tar*: Uncompressed tar file. Uses POSIX.1-2001 pax format for new archives. - *gztar*: gzip'ed tar-file (if the :mod:`zlib` module is available). - *bztar*: bzip2'ed tar-file (if the :mod:`bz2` module is available). - *xztar*: xz'ed tar-file (if the :mod:`lzma` module is available). diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index c7012a7d48f6..f25af8ca6a33 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -231,9 +231,9 @@ details. The default format for creating archives. This is currently :const:`PAX_FORMAT`. - .. versionchanged:: 3.8 - The default format for new archives was changed to - :const:`PAX_FORMAT` from :const:`GNU_FORMAT`. + .. versionchanged:: 3.8 + The default format for new archives was changed to + :const:`PAX_FORMAT` from :const:`GNU_FORMAT`. .. seealso:: @@ -813,8 +813,8 @@ Supported tar formats There are three tar formats that can be created with the :mod:`tarfile` module: * The POSIX.1-1988 ustar format (:const:`USTAR_FORMAT`). It supports filenames - up to a length of at best 256 characters and linknames up to 100 characters. The - maximum file size is 8 GiB. This is an old and limited but widely + up to a length of at best 256 characters and linknames up to 100 characters. + The maximum file size is 8 GiB. This is an old and limited but widely supported format. * The GNU tar format (:const:`GNU_FORMAT`). It supports long filenames and @@ -826,14 +826,15 @@ There are three tar formats that can be created with the :mod:`tarfile` module: format with virtually no limits. It supports long filenames and linknames, large files and stores pathnames in a portable way. Modern tar implementations, including GNU tar, bsdtar/libarchive and star, fully support extended *pax* - features; some older or unmaintained libraries may not, but should treat + features; some old or unmaintained libraries may not, but should treat *pax* archives as if they were in the universally-supported *ustar* format. + It is the current default format for new archives. - The *pax* format is an extension to the existing *ustar* format. It uses extra - headers for information that cannot be stored otherwise. There are two flavours - of pax headers: Extended headers only affect the subsequent file header, global - headers are valid for the complete archive and affect all following files. All - the data in a pax header is encoded in *UTF-8* for portability reasons. + It extends the existing *ustar* format with extra headers for information + that cannot be stored otherwise. There are two flavours of pax headers: + Extended headers only affect the subsequent file header, global + headers are valid for the complete archive and affect all following files. + All the data in a pax header is encoded in *UTF-8* for portability reasons. There are some more variants of the tar format which can be read, but not created: diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 0fc4d774bcde..ac20ee3aa57c 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -300,6 +300,11 @@ shutil :func:`shutil.copytree` now accepts a new ``dirs_exist_ok`` keyword argument. (Contributed by Josh Bronson in :issue:`20849`.) +:func:`shutil.make_archive` now defaults to the modern pax (POSIX.1-2001) +format for new archives to improve portability and standards conformance, +inherited from the corresponding change to the :mod:`tarfile` module. +(Contributed by C.A.M. Gerlach in :issue:`30661`.) + ssl --- From webhook-mailer at python.org Sun Apr 7 03:51:41 2019 From: webhook-mailer at python.org (Vinay Sajip) Date: Sun, 07 Apr 2019 07:51:41 -0000 Subject: [Python-checkins] bpo-35726: Add test for QueueHandler with multiple handlers (GH-11659) Message-ID: https://github.com/python/cpython/commit/2dad96013ca24abdc5ba5a369ea42d70ff02487a commit: 2dad96013ca24abdc5ba5a369ea42d70ff02487a branch: master author: Xtreak committer: Vinay Sajip date: 2019-04-07T08:51:27+01:00 summary: bpo-35726: Add test for QueueHandler with multiple handlers (GH-11659) files: M Lib/test/test_logging.py diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index b23ae24920b7..1805249e48bc 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -3505,6 +3505,19 @@ def test_queue_listener_with_StreamHandler(self): listener.stop() self.assertEqual(self.stream.getvalue().strip().count('Traceback'), 1) + @unittest.skipUnless(hasattr(logging.handlers, 'QueueListener'), + 'logging.handlers.QueueListener required for this test') + def test_queue_listener_with_multiple_handlers(self): + # Test that queue handler format doesn't affect other handler formats (bpo-35726). + self.que_hdlr.setFormatter(self.root_formatter) + self.que_logger.addHandler(self.root_hdlr) + + listener = logging.handlers.QueueListener(self.queue, self.que_hdlr) + listener.start() + self.que_logger.error("error") + listener.stop() + self.assertEqual(self.stream.getvalue().strip(), "que -> ERROR: error") + if hasattr(logging.handlers, 'QueueListener'): import multiprocessing from unittest.mock import patch From webhook-mailer at python.org Sun Apr 7 04:00:48 2019 From: webhook-mailer at python.org (Nick Coghlan) Date: Sun, 07 Apr 2019 08:00:48 -0000 Subject: [Python-checkins] bpo-35936: Updates to modulefinder (GH-11787) Message-ID: https://github.com/python/cpython/commit/9d7b2c0909b78800d1376fd696f73824ea680463 commit: 9d7b2c0909b78800d1376fd696f73824ea680463 branch: master author: Brandt Bucher committer: Nick Coghlan date: 2019-04-07T18:00:41+10:00 summary: bpo-35936: Updates to modulefinder (GH-11787) * Properly handle SyntaxErrors in Python source files. SyntaxErrors in the target module will rise normally, while SyntaxErrors in dependencies will be added to badmodules. This includes a new regression test. * Fix name collision bug. This fixes an issue where a "fromlist" import with the same name as a previously failed import would be incorrectly added to badmodules. This includes a new regression test. * Replace mutable default values. Bound empty lists have been replaced with the "if param is None" idiom. * Replace deprecated imp usage. Constants imported from imp have been moved to private module-level constants, and ModuleFinder.find_module has been refactored to use importlib. Other than an improvement on how frozen builtin imports are reported (as the frozen imports they are, rather than the stdlib modules they *may* have originated from), these changes maintain complete compatibility with past versions... including odd behavior for returning relative (below current directory, but not a C extension) vs. absolute (above current directory, or a C extension) paths. Patch by Brandt Bucher. files: A Misc/NEWS.d/next/Library/2019-02-13-18-56-22.bpo-17396.oKRkrD.rst A Misc/NEWS.d/next/Library/2019-02-13-18-56-27.bpo-35376.UFhYLj.rst A Misc/NEWS.d/next/Library/2019-02-16-22-19-32.bpo-35936.Ay5WtD.rst M Lib/modulefinder.py M Lib/test/test_modulefinder.py M Misc/ACKS diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py index 10320a74d942..0061ef415ce3 100644 --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -8,9 +8,7 @@ import sys import types import warnings -with warnings.catch_warnings(): - warnings.simplefilter('ignore', DeprecationWarning) - import imp + LOAD_CONST = dis.opmap['LOAD_CONST'] IMPORT_NAME = dis.opmap['IMPORT_NAME'] @@ -19,6 +17,16 @@ STORE_OPS = STORE_NAME, STORE_GLOBAL EXTENDED_ARG = dis.EXTENDED_ARG +# Old imp constants: + +_SEARCH_ERROR = 0 +_PY_SOURCE = 1 +_PY_COMPILED = 2 +_C_EXTENSION = 3 +_PKG_DIRECTORY = 5 +_C_BUILTIN = 6 +_PY_FROZEN = 7 + # Modulefinder does a good job at simulating Python's, but it can not # handle __path__ modifications packages make at runtime. Therefore there # is a mechanism whereby you can register extra paths in this map for a @@ -43,6 +51,54 @@ def ReplacePackage(oldname, newname): replacePackageMap[oldname] = newname +def _find_module(name, path=None): + """An importlib reimplementation of imp.find_module (for our purposes).""" + + # It's necessary to clear the caches for our Finder first, in case any + # modules are being added/deleted/modified at runtime. In particular, + # test_modulefinder.py changes file tree contents in a cache-breaking way: + + importlib.machinery.PathFinder.invalidate_caches() + + spec = importlib.machinery.PathFinder.find_spec(name, path) + + if spec is None: + raise ImportError("No module named {name!r}".format(name=name), name=name) + + # Some special cases: + + if spec.loader is importlib.machinery.BuiltinImporter: + return None, None, ("", "", _C_BUILTIN) + + if spec.loader is importlib.machinery.FrozenImporter: + return None, None, ("", "", _PY_FROZEN) + + file_path = spec.origin + + if spec.loader.is_package(name): + return None, os.path.dirname(file_path), ("", "", _PKG_DIRECTORY) + + if isinstance(spec.loader, importlib.machinery.SourceFileLoader): + kind = _PY_SOURCE + mode = "r" + + elif isinstance(spec.loader, importlib.machinery.ExtensionFileLoader): + kind = _C_EXTENSION + mode = "rb" + + elif isinstance(spec.loader, importlib.machinery.SourcelessFileLoader): + kind = _PY_COMPILED + mode = "rb" + + else: # Should never happen. + return None, None, ("", "", _SEARCH_ERROR) + + file = open(file_path, mode) + suffix = os.path.splitext(file_path)[-1] + + return file, file_path, (suffix, mode, kind) + + class Module: def __init__(self, name, file=None, path=None): @@ -69,7 +125,7 @@ def __repr__(self): class ModuleFinder: - def __init__(self, path=None, debug=0, excludes=[], replace_paths=[]): + def __init__(self, path=None, debug=0, excludes=None, replace_paths=None): if path is None: path = sys.path self.path = path @@ -77,8 +133,8 @@ def __init__(self, path=None, debug=0, excludes=[], replace_paths=[]): self.badmodules = {} self.debug = debug self.indent = 0 - self.excludes = excludes - self.replace_paths = replace_paths + self.excludes = excludes if excludes is not None else [] + self.replace_paths = replace_paths if replace_paths is not None else [] self.processed_paths = [] # Used in debugging only def msg(self, level, str, *args): @@ -105,14 +161,14 @@ def msgout(self, *args): def run_script(self, pathname): self.msg(2, "run_script", pathname) with open(pathname) as fp: - stuff = ("", "r", imp.PY_SOURCE) + stuff = ("", "r", _PY_SOURCE) self.load_module('__main__', fp, pathname, stuff) def load_file(self, pathname): dir, name = os.path.split(pathname) name, ext = os.path.splitext(name) with open(pathname) as fp: - stuff = (ext, "r", imp.PY_SOURCE) + stuff = (ext, "r", _PY_SOURCE) self.load_module(name, fp, pathname, stuff) def import_hook(self, name, caller=None, fromlist=None, level=-1): @@ -279,13 +335,13 @@ def import_module(self, partname, fqname, parent): def load_module(self, fqname, fp, pathname, file_info): suffix, mode, type = file_info self.msgin(2, "load_module", fqname, fp and "fp", pathname) - if type == imp.PKG_DIRECTORY: + if type == _PKG_DIRECTORY: m = self.load_package(fqname, pathname) self.msgout(2, "load_module ->", m) return m - if type == imp.PY_SOURCE: + if type == _PY_SOURCE: co = compile(fp.read()+'\n', pathname, 'exec') - elif type == imp.PY_COMPILED: + elif type == _PY_COMPILED: try: data = fp.read() importlib._bootstrap_external._classify_pyc(data, fqname, {}) @@ -323,17 +379,20 @@ def _safe_import_hook(self, name, caller, fromlist, level=-1): except ImportError as msg: self.msg(2, "ImportError:", str(msg)) self._add_badmodule(name, caller) + except SyntaxError as msg: + self.msg(2, "SyntaxError:", str(msg)) + self._add_badmodule(name, caller) else: if fromlist: for sub in fromlist: - if sub in self.badmodules: - self._add_badmodule(sub, caller) + fullname = name + "." + sub + if fullname in self.badmodules: + self._add_badmodule(fullname, caller) continue try: self.import_hook(name, caller, [sub], level=level) except ImportError as msg: self.msg(2, "ImportError:", str(msg)) - fullname = name + "." + sub self._add_badmodule(fullname, caller) def scan_opcodes(self, co): @@ -445,10 +504,11 @@ def find_module(self, name, path, parent=None): if path is None: if name in sys.builtin_module_names: - return (None, None, ("", "", imp.C_BUILTIN)) + return (None, None, ("", "", _C_BUILTIN)) path = self.path - return imp.find_module(name, path) + + return _find_module(name, path) def report(self): """Print a report to stdout, listing the found modules with their diff --git a/Lib/test/test_modulefinder.py b/Lib/test/test_modulefinder.py index e4df2a90d4a4..ebd96e1c8a2d 100644 --- a/Lib/test/test_modulefinder.py +++ b/Lib/test/test_modulefinder.py @@ -218,6 +218,33 @@ def foo(): pass "" ] +syntax_error_test = [ + "a.module", + ["a", "a.module", "b"], + ["b.module"], [], + """\ +a/__init__.py +a/module.py + import b.module +b/__init__.py +b/module.py + ? # SyntaxError: invalid syntax +"""] + + +same_name_as_bad_test = [ + "a.module", + ["a", "a.module", "b", "b.c"], + ["c"], [], + """\ +a/__init__.py +a/module.py + import c + from b import c +b/__init__.py +b/c.py +"""] + def open_file(path): dirname = os.path.dirname(path) @@ -299,6 +326,12 @@ def test_relative_imports_3(self): def test_relative_imports_4(self): self._do_test(relative_import_test_4) + def test_syntax_error(self): + self._do_test(syntax_error_test) + + def test_same_name_as_bad(self): + self._do_test(same_name_as_bad_test) + def test_bytecode(self): base_path = os.path.join(TEST_DIR, 'a') source_path = base_path + importlib.machinery.SOURCE_SUFFIXES[0] diff --git a/Misc/ACKS b/Misc/ACKS index 9cddcb3a871f..df6be5912785 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -222,6 +222,7 @@ Ian Bruntlett Floris Bruynooghe Matt Bryant Stan Bubrouski +Brandt Bucher Colm Buckley Erik de Bueger Jan-Hein B?hrman diff --git a/Misc/NEWS.d/next/Library/2019-02-13-18-56-22.bpo-17396.oKRkrD.rst b/Misc/NEWS.d/next/Library/2019-02-13-18-56-22.bpo-17396.oKRkrD.rst new file mode 100644 index 000000000000..50596cf9e43f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-02-13-18-56-22.bpo-17396.oKRkrD.rst @@ -0,0 +1,2 @@ +:mod:`modulefinder` no longer crashes when encountering syntax errors in followed imports. +Patch by Brandt Bucher. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2019-02-13-18-56-27.bpo-35376.UFhYLj.rst b/Misc/NEWS.d/next/Library/2019-02-13-18-56-27.bpo-35376.UFhYLj.rst new file mode 100644 index 000000000000..a9bf8c9a636c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-02-13-18-56-27.bpo-35376.UFhYLj.rst @@ -0,0 +1,2 @@ +:mod:`modulefinder` correctly handles modules that have the same name as a bad package. +Patch by Brandt Bucher. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2019-02-16-22-19-32.bpo-35936.Ay5WtD.rst b/Misc/NEWS.d/next/Library/2019-02-16-22-19-32.bpo-35936.Ay5WtD.rst new file mode 100644 index 000000000000..55a028ec8349 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-02-16-22-19-32.bpo-35936.Ay5WtD.rst @@ -0,0 +1,2 @@ +:mod:`modulefinder` no longer depends on the deprecated :mod:`imp` module, and the initializer for :class:`modulefinder.ModuleFinder` now has immutable default arguments. +Patch by Brandt Bucher. \ No newline at end of file From webhook-mailer at python.org Sun Apr 7 12:20:07 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 07 Apr 2019 16:20:07 -0000 Subject: [Python-checkins] bpo-27181: Add statistics.geometric_mean() (GH-12638) Message-ID: https://github.com/python/cpython/commit/6463ba3061bd311413d2951dc83c565907e10459 commit: 6463ba3061bd311413d2951dc83c565907e10459 branch: master author: Raymond Hettinger committer: GitHub date: 2019-04-07T09:20:03-07:00 summary: bpo-27181: Add statistics.geometric_mean() (GH-12638) files: A Misc/NEWS.d/next/Library/2019-03-31-01-18-52.bpo-27181.LVUWcc.rst M Doc/library/statistics.rst M Doc/whatsnew/3.8.rst M Lib/statistics.py M Lib/test/test_statistics.py diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index 1d52d98b2997..8bb2bdf7b697 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -40,6 +40,7 @@ or sample. ======================= =============================================================== :func:`mean` Arithmetic mean ("average") of data. :func:`fmean` Fast, floating point arithmetic mean. +:func:`geometric_mean` Geometric mean of data. :func:`harmonic_mean` Harmonic mean of data. :func:`median` Median (middle value) of data. :func:`median_low` Low median of data. @@ -130,6 +131,24 @@ However, for reading convenience, most of the examples show sorted sequences. .. versionadded:: 3.8 +.. function:: geometric_mean(data) + + Convert *data* to floats and compute the geometric mean. + + Raises a :exc:`StatisticsError` if the input dataset is empty, + if it contains a zero, or if it contains a negative value. + + No special efforts are made to achieve exact results. + (However, this may change in the future.) + + .. doctest:: + + >>> round(geometric_mean([54, 24, 36]), 9) + 36.0 + + .. versionadded:: 3.8 + + .. function:: harmonic_mean(data) Return the harmonic mean of *data*, a sequence or iterator of diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index ac20ee3aa57c..4347b3ee4118 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -322,6 +322,9 @@ Added :func:`statistics.fmean` as a faster, floating point variant of :func:`statistics.mean()`. (Contributed by Raymond Hettinger and Steven D'Aprano in :issue:`35904`.) +Added :func:`statistics.geometric_mean()` +(Contributed by Raymond Hettinger in :issue:`27181`.) + Added :func:`statistics.multimode` that returns a list of the most common values. (Contributed by Raymond Hettinger in :issue:`35892`.) diff --git a/Lib/statistics.py b/Lib/statistics.py index bd8a6f96381a..262ad976b65c 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -11,13 +11,14 @@ Function Description ================== ============================================= mean Arithmetic mean (average) of data. +geometric_mean Geometric mean of data. harmonic_mean Harmonic mean of data. median Median (middle value) of data. median_low Low median of data. median_high High median of data. median_grouped Median, or 50th percentile, of grouped data. mode Mode (most common value) of data. -multimode List of modes (most common values of data) +multimode List of modes (most common values of data). ================== ============================================= Calculate the arithmetic mean ("the average") of data: @@ -81,6 +82,7 @@ 'pstdev', 'pvariance', 'stdev', 'variance', 'median', 'median_low', 'median_high', 'median_grouped', 'mean', 'mode', 'multimode', 'harmonic_mean', 'fmean', + 'geometric_mean', ] import math @@ -328,6 +330,24 @@ def count(x): except ZeroDivisionError: raise StatisticsError('fmean requires at least one data point') from None +def geometric_mean(data): + """Convert data to floats and compute the geometric mean. + + Raises a StatisticsError if the input dataset is empty, + if it contains a zero, or if it contains a negative value. + + No special efforts are made to achieve exact results. + (However, this may change in the future.) + + >>> round(geometric_mean([54, 24, 36]), 9) + 36.0 + """ + try: + return exp(fmean(map(log, data))) + except ValueError: + raise StatisticsError('geometric mean requires a non-empty dataset ' + ' containing positive numbers') from None + def harmonic_mean(data): """Return the harmonic mean of data. diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 7f7839de4600..4d397eb1265d 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -2038,6 +2038,94 @@ def test_compare_to_variance(self): expected = math.sqrt(statistics.variance(data)) self.assertEqual(self.func(data), expected) +class TestGeometricMean(unittest.TestCase): + + def test_basics(self): + geometric_mean = statistics.geometric_mean + self.assertAlmostEqual(geometric_mean([54, 24, 36]), 36.0) + self.assertAlmostEqual(geometric_mean([4.0, 9.0]), 6.0) + self.assertAlmostEqual(geometric_mean([17.625]), 17.625) + + random.seed(86753095551212) + for rng in [ + range(1, 100), + range(1, 1_000), + range(1, 10_000), + range(500, 10_000, 3), + range(10_000, 500, -3), + [12, 17, 13, 5, 120, 7], + [random.expovariate(50.0) for i in range(1_000)], + [random.lognormvariate(20.0, 3.0) for i in range(2_000)], + [random.triangular(2000, 3000, 2200) for i in range(3_000)], + ]: + gm_decimal = math.prod(map(Decimal, rng)) ** (Decimal(1) / len(rng)) + gm_float = geometric_mean(rng) + self.assertTrue(math.isclose(gm_float, float(gm_decimal))) + + def test_various_input_types(self): + geometric_mean = statistics.geometric_mean + D = Decimal + F = Fraction + # https://www.wolframalpha.com/input/?i=geometric+mean+3.5,+4.0,+5.25 + expected_mean = 4.18886 + for data, kind in [ + ([3.5, 4.0, 5.25], 'floats'), + ([D('3.5'), D('4.0'), D('5.25')], 'decimals'), + ([F(7, 2), F(4, 1), F(21, 4)], 'fractions'), + ([3.5, 4, F(21, 4)], 'mixed types'), + ((3.5, 4.0, 5.25), 'tuple'), + (iter([3.5, 4.0, 5.25]), 'iterator'), + ]: + actual_mean = geometric_mean(data) + self.assertIs(type(actual_mean), float, kind) + self.assertAlmostEqual(actual_mean, expected_mean, places=5) + + def test_big_and_small(self): + geometric_mean = statistics.geometric_mean + + # Avoid overflow to infinity + large = 2.0 ** 1000 + big_gm = geometric_mean([54.0 * large, 24.0 * large, 36.0 * large]) + self.assertTrue(math.isclose(big_gm, 36.0 * large)) + self.assertFalse(math.isinf(big_gm)) + + # Avoid underflow to zero + small = 2.0 ** -1000 + small_gm = geometric_mean([54.0 * small, 24.0 * small, 36.0 * small]) + self.assertTrue(math.isclose(small_gm, 36.0 * small)) + self.assertNotEqual(small_gm, 0.0) + + def test_error_cases(self): + geometric_mean = statistics.geometric_mean + StatisticsError = statistics.StatisticsError + with self.assertRaises(StatisticsError): + geometric_mean([]) # empty input + with self.assertRaises(StatisticsError): + geometric_mean([3.5, 0.0, 5.25]) # zero input + with self.assertRaises(StatisticsError): + geometric_mean([3.5, -4.0, 5.25]) # negative input + with self.assertRaises(StatisticsError): + geometric_mean(iter([])) # empty iterator + with self.assertRaises(TypeError): + geometric_mean(None) # non-iterable input + with self.assertRaises(TypeError): + geometric_mean([10, None, 20]) # non-numeric input + with self.assertRaises(TypeError): + geometric_mean() # missing data argument + with self.assertRaises(TypeError): + geometric_mean([10, 20, 60], 70) # too many arguments + + def test_special_values(self): + # Rules for special values are inherited from math.fsum() + geometric_mean = statistics.geometric_mean + NaN = float('Nan') + Inf = float('Inf') + self.assertTrue(math.isnan(geometric_mean([10, NaN])), 'nan') + self.assertTrue(math.isnan(geometric_mean([NaN, Inf])), 'nan and infinity') + self.assertTrue(math.isinf(geometric_mean([10, Inf])), 'infinity') + with self.assertRaises(ValueError): + geometric_mean([Inf, -Inf]) + class TestNormalDist(unittest.TestCase): # General note on precision: The pdf(), cdf(), and overlap() methods diff --git a/Misc/NEWS.d/next/Library/2019-03-31-01-18-52.bpo-27181.LVUWcc.rst b/Misc/NEWS.d/next/Library/2019-03-31-01-18-52.bpo-27181.LVUWcc.rst new file mode 100644 index 000000000000..3ce41c557982 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-31-01-18-52.bpo-27181.LVUWcc.rst @@ -0,0 +1 @@ +Add statistics.geometric_mean(). From webhook-mailer at python.org Sun Apr 7 20:51:36 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 08 Apr 2019 00:51:36 -0000 Subject: [Python-checkins] bpo-36503: remove references to 'aix3' and 'aix4' (GH-12658) Message-ID: https://github.com/python/cpython/commit/b7eec94c0e86f8ac318b135ca9146fff32b7203a commit: b7eec94c0e86f8ac318b135ca9146fff32b7203a branch: master author: Michael Felt committer: Inada Naoki date: 2019-04-08T09:51:33+09:00 summary: bpo-36503: remove references to 'aix3' and 'aix4' (GH-12658) files: A Misc/NEWS.d/next/Build/2019-04-02-09-25-23.bpo-36503.0xzfkQ.rst M Lib/test/test_fcntl.py M Misc/ACKS M setup.py diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py index acd5c7cc5864..5d4abe388f78 100644 --- a/Lib/test/test_fcntl.py +++ b/Lib/test/test_fcntl.py @@ -34,7 +34,7 @@ def get_lockdata(): fcntl.F_WRLCK, 0) elif sys.platform.startswith('gnukfreebsd'): lockdata = struct.pack('qqihhi', 0, 0, 0, fcntl.F_WRLCK, 0, 0) - elif sys.platform in ['aix3', 'aix4', 'hp-uxB', 'unixware7']: + elif sys.platform in ['hp-uxB', 'unixware7']: lockdata = struct.pack('hhlllii', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) else: lockdata = struct.pack('hh'+start_len+'hh', fcntl.F_WRLCK, 0, 0, 0, 0, 0) diff --git a/Misc/ACKS b/Misc/ACKS index df6be5912785..19c7e4305351 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -476,6 +476,7 @@ Jim Fasarakis-Hilliard Mark Favas Sergey Fedoseev Boris Feld +M. Felt Thomas Fenzl Niels Ferguson Francisco Fern?ndez Casta?o diff --git a/Misc/NEWS.d/next/Build/2019-04-02-09-25-23.bpo-36503.0xzfkQ.rst b/Misc/NEWS.d/next/Build/2019-04-02-09-25-23.bpo-36503.0xzfkQ.rst new file mode 100644 index 000000000000..764c397f8f8d --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-02-09-25-23.bpo-36503.0xzfkQ.rst @@ -0,0 +1,2 @@ +Remove references to "aix3" and "aix4". +Patch by M. Felt. diff --git a/setup.py b/setup.py index c278f08b8e6d..d03596029149 100644 --- a/setup.py +++ b/setup.py @@ -1882,9 +1882,6 @@ def detect_tkinter(self): libs.append('tk'+ version) libs.append('tcl'+ version) - if HOST_PLATFORM in ['aix3', 'aix4']: - libs.append('ld') - # Finally, link with the X11 libraries (not appropriate on cygwin) if not CYGWIN: libs.append('X11') From webhook-mailer at python.org Sun Apr 7 22:56:04 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Apr 2019 02:56:04 -0000 Subject: [Python-checkins] bpo-9883: Update list of unimplemented interfaces in minidom. (GH-12677) Message-ID: https://github.com/python/cpython/commit/a9a065addd175ed37a959118c90377ba60f90036 commit: a9a065addd175ed37a959118c90377ba60f90036 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-07T19:55:58-07:00 summary: bpo-9883: Update list of unimplemented interfaces in minidom. (GH-12677) Remove names from the "unimplemented interfaces" list in the minidom docs that are actually implemented. (cherry picked from commit 2ea8099523581cf2ecc060831a53debb57ff98ee) Co-authored-by: Stefan Behnel files: M Doc/library/xml.dom.minidom.rst diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst index a37caf62a374..96080c3e318c 100644 --- a/Doc/library/xml.dom.minidom.rst +++ b/Doc/library/xml.dom.minidom.rst @@ -227,22 +227,8 @@ The following interfaces have no implementation in :mod:`xml.dom.minidom`: * :class:`DOMTimeStamp` -* :class:`DocumentType` - -* :class:`DOMImplementation` - -* :class:`CharacterData` - -* :class:`CDATASection` - -* :class:`Notation` - -* :class:`Entity` - * :class:`EntityReference` -* :class:`DocumentFragment` - Most of these reflect information in the XML document that is not of general utility to most DOM users. From webhook-mailer at python.org Mon Apr 8 04:01:21 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 08 Apr 2019 08:01:21 -0000 Subject: [Python-checkins] remove duplicate code in biscet (GH-1270) Message-ID: https://github.com/python/cpython/commit/96be3400a97946b0b3d032dd8c3c0561786796f1 commit: 96be3400a97946b0b3d032dd8c3c0561786796f1 branch: master author: Chillar Anand committer: Inada Naoki date: 2019-04-08T17:01:09+09:00 summary: remove duplicate code in biscet (GH-1270) files: M Lib/bisect.py diff --git a/Lib/bisect.py b/Lib/bisect.py index 7732c639e386..9786fc9d87c5 100644 --- a/Lib/bisect.py +++ b/Lib/bisect.py @@ -9,14 +9,7 @@ def insort_right(a, x, lo=0, hi=None): slice of a to be searched. """ - if lo < 0: - raise ValueError('lo must be non-negative') - if hi is None: - hi = len(a) - while lo < hi: - mid = (lo+hi)//2 - if x < a[mid]: hi = mid - else: lo = mid+1 + lo = bisect_right(a, x, lo, hi) a.insert(lo, x) def bisect_right(a, x, lo=0, hi=None): @@ -49,14 +42,7 @@ def insort_left(a, x, lo=0, hi=None): slice of a to be searched. """ - if lo < 0: - raise ValueError('lo must be non-negative') - if hi is None: - hi = len(a) - while lo < hi: - mid = (lo+hi)//2 - if a[mid] < x: lo = mid+1 - else: hi = mid + lo = bisect_left(a, x, lo, hi) a.insert(lo, x) From webhook-mailer at python.org Mon Apr 8 05:14:50 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 08 Apr 2019 09:14:50 -0000 Subject: [Python-checkins] Correct "inplace" with "in-place" (GH-10480) Message-ID: https://github.com/python/cpython/commit/f4efa312d14bc792f59514c5696e29041e05deca commit: f4efa312d14bc792f59514c5696e29041e05deca branch: master author: Andre Delfino committer: Inada Naoki date: 2019-04-08T18:14:43+09:00 summary: Correct "inplace" with "in-place" (GH-10480) files: M Doc/c-api/typeobj.rst M Doc/library/operator.rst diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index f36cfe551e4c..0647a493303d 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -2052,7 +2052,7 @@ Sequence Object Structures signature. It should modify its first operand, and return it. This slot may be left to *NULL*, in this case :c:func:`!PySequence_InPlaceConcat` will fall back to :c:func:`PySequence_Concat`. It is also used by the - augmented assignment ``+=``, after trying numeric inplace addition + augmented assignment ``+=``, after trying numeric in-place addition via the :c:member:`~PyNumberMethods.nb_inplace_add` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_inplace_repeat @@ -2061,7 +2061,7 @@ Sequence Object Structures signature. It should modify its first operand, and return it. This slot may be left to *NULL*, in this case :c:func:`!PySequence_InPlaceRepeat` will fall back to :c:func:`PySequence_Repeat`. It is also used by the - augmented assignment ``*=``, after trying numeric inplace multiplication + augmented assignment ``*=``, after trying numeric in-place multiplication via the :c:member:`~PyNumberMethods.nb_inplace_multiply` slot. diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst index 250cbb6b7c07..5d0ea7dfdd89 100644 --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -440,8 +440,8 @@ Python syntax and the functions in the :mod:`operator` module. | Ordering | ``a > b`` | ``gt(a, b)`` | +-----------------------+-------------------------+---------------------------------------+ -Inplace Operators ------------------ +In-place Operators +------------------ Many operations have an "in-place" version. Listed below are functions providing a more primitive access to in-place operators than the usual syntax @@ -464,7 +464,7 @@ value is computed, but not assigned back to the input variable: >>> a 'hello' -For mutable targets such as lists and dictionaries, the inplace method +For mutable targets such as lists and dictionaries, the in-place method will perform the update, so no subsequent assignment is necessary: >>> s = ['h', 'e', 'l', 'l', 'o'] From webhook-mailer at python.org Mon Apr 8 05:21:43 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Apr 2019 09:21:43 -0000 Subject: [Python-checkins] Correct "inplace" with "in-place" (GH-10480) Message-ID: https://github.com/python/cpython/commit/1f0ff57acbb6f3c2e8715af74b98984c6aa45ea6 commit: 1f0ff57acbb6f3c2e8715af74b98984c6aa45ea6 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-08T02:21:38-07:00 summary: Correct "inplace" with "in-place" (GH-10480) (cherry picked from commit f4efa312d14bc792f59514c5696e29041e05deca) Co-authored-by: Andre Delfino files: M Doc/c-api/typeobj.rst M Doc/library/operator.rst diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 6cbcc273c1f1..532508ee7228 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -1256,7 +1256,7 @@ Sequence Object Structures signature. It should modify its first operand, and return it. This slot may be left to *NULL*, in this case :c:func:`!PySequence_InPlaceConcat` will fall back to :c:func:`PySequence_Concat`. It is also used by the - augmented assignment ``+=``, after trying numeric inplace addition + augmented assignment ``+=``, after trying numeric in-place addition via the :c:member:`~PyNumberMethods.nb_inplace_add` slot. .. c:member:: ssizeargfunc PySequenceMethods.sq_inplace_repeat @@ -1265,7 +1265,7 @@ Sequence Object Structures signature. It should modify its first operand, and return it. This slot may be left to *NULL*, in this case :c:func:`!PySequence_InPlaceRepeat` will fall back to :c:func:`PySequence_Repeat`. It is also used by the - augmented assignment ``*=``, after trying numeric inplace multiplication + augmented assignment ``*=``, after trying numeric in-place multiplication via the :c:member:`~PyNumberMethods.nb_inplace_multiply` slot. diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst index e4d6d05a23a7..11152f6a70b2 100644 --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -438,8 +438,8 @@ Python syntax and the functions in the :mod:`operator` module. | Ordering | ``a > b`` | ``gt(a, b)`` | +-----------------------+-------------------------+---------------------------------------+ -Inplace Operators ------------------ +In-place Operators +------------------ Many operations have an "in-place" version. Listed below are functions providing a more primitive access to in-place operators than the usual syntax @@ -462,7 +462,7 @@ value is computed, but not assigned back to the input variable: >>> a 'hello' -For mutable targets such as lists and dictionaries, the inplace method +For mutable targets such as lists and dictionaries, the in-place method will perform the update, so no subsequent assignment is necessary: >>> s = ['h', 'e', 'l', 'l', 'o'] From webhook-mailer at python.org Mon Apr 8 07:34:09 2019 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 08 Apr 2019 11:34:09 -0000 Subject: [Python-checkins] Add a What's New entry for bpo-35459. (GH-12706) Message-ID: https://github.com/python/cpython/commit/7a0630c530121725136526a88c49589b54da6492 commit: 7a0630c530121725136526a88c49589b54da6492 branch: master author: Serhiy Storchaka committer: GitHub date: 2019-04-08T14:34:04+03:00 summary: Add a What's New entry for bpo-35459. (GH-12706) files: M Doc/whatsnew/3.8.rst diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 4347b3ee4118..29d370cc8a28 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -686,6 +686,11 @@ Changes in Python behavior to use equality tests (``==`` and ``!=``) instead. (Contributed by Serhiy Storchaka in :issue:`34850`.) +* The CPython interpreter can swallow exceptions in some circumstances. + In Python 3.8 this happens in less cases. In particular, exceptions + raised when getting the attribute from the type dictionary are no longer + ignored. (Contributed by Serhiy Storchaka in :issue:`35459`.) + Changes in the Python API ------------------------- From webhook-mailer at python.org Mon Apr 8 09:08:55 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 08 Apr 2019 13:08:55 -0000 Subject: [Python-checkins] bpo-35416: fix potential resource warnings in distutils (GH-10918) Message-ID: https://github.com/python/cpython/commit/58721a903074d28151d008d8990c98fc31d1e798 commit: 58721a903074d28151d008d8990c98fc31d1e798 branch: master author: Micka?l Schoentgen committer: Inada Naoki date: 2019-04-08T22:08:48+09:00 summary: bpo-35416: fix potential resource warnings in distutils (GH-10918) files: A Misc/NEWS.d/next/Library/2018-12-05-09-55-05.bpo-35416.XALKZG.rst M Lib/distutils/command/bdist_rpm.py M Lib/distutils/command/bdist_wininst.py M Lib/distutils/command/upload.py diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py index 02f10dd89d91..20ca7ac6dcff 100644 --- a/Lib/distutils/command/bdist_rpm.py +++ b/Lib/distutils/command/bdist_rpm.py @@ -537,7 +537,8 @@ def _make_spec_file(self): '', '%' + rpm_opt,]) if val: - spec_file.extend(open(val, 'r').read().split('\n')) + with open(val) as f: + spec_file.extend(f.read().split('\n')) else: spec_file.append(default) diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py index fde56754e891..1cf2e963e0e7 100644 --- a/Lib/distutils/command/bdist_wininst.py +++ b/Lib/distutils/command/bdist_wininst.py @@ -247,47 +247,49 @@ def create_exe(self, arcname, fullname, bitmap=None): self.announce("creating %s" % installer_name) if bitmap: - bitmapdata = open(bitmap, "rb").read() + with open(bitmap, "rb") as f: + bitmapdata = f.read() bitmaplen = len(bitmapdata) else: bitmaplen = 0 - file = open(installer_name, "wb") - file.write(self.get_exe_bytes()) - if bitmap: - file.write(bitmapdata) - - # Convert cfgdata from unicode to ascii, mbcs encoded - if isinstance(cfgdata, str): - cfgdata = cfgdata.encode("mbcs") - - # Append the pre-install script - cfgdata = cfgdata + b"\0" - if self.pre_install_script: - # We need to normalize newlines, so we open in text mode and - # convert back to bytes. "latin-1" simply avoids any possible - # failures. - with open(self.pre_install_script, "r", - encoding="latin-1") as script: - script_data = script.read().encode("latin-1") - cfgdata = cfgdata + script_data + b"\n\0" - else: - # empty pre-install script + with open(installer_name, "wb") as file: + file.write(self.get_exe_bytes()) + if bitmap: + file.write(bitmapdata) + + # Convert cfgdata from unicode to ascii, mbcs encoded + if isinstance(cfgdata, str): + cfgdata = cfgdata.encode("mbcs") + + # Append the pre-install script cfgdata = cfgdata + b"\0" - file.write(cfgdata) - - # The 'magic number' 0x1234567B is used to make sure that the - # binary layout of 'cfgdata' is what the wininst.exe binary - # expects. If the layout changes, increment that number, make - # the corresponding changes to the wininst.exe sources, and - # recompile them. - header = struct.pack(" https://github.com/python/cpython/commit/eb7e29f2a9d075accc1ab3faf3612ac44f5e2183 commit: eb7e29f2a9d075accc1ab3faf3612ac44f5e2183 branch: master author: Giampaolo Rodola committer: GitHub date: 2019-04-09T00:34:02+02:00 summary: bpo-35934: Add socket.create_server() utility function (GH-11784) files: A Misc/NEWS.d/next/Library/2019-02-07-20-25-39.bpo-35934.QmfNmY.rst M Doc/library/socket.rst M Doc/whatsnew/3.8.rst M Lib/ftplib.py M Lib/socket.py M Lib/test/_test_multiprocessing.py M Lib/test/eintrdata/eintr_tester.py M Lib/test/test_asyncio/functional.py M Lib/test/test_asyncio/test_events.py M Lib/test/test_asyncio/test_streams.py M Lib/test/test_epoll.py M Lib/test/test_ftplib.py M Lib/test/test_httplib.py M Lib/test/test_kqueue.py M Lib/test/test_socket.py M Lib/test/test_ssl.py M Lib/test/test_support.py diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index d466884d6135..b4a07bd5d5d2 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -595,6 +595,50 @@ The following functions all create :ref:`socket objects `. .. versionchanged:: 3.2 *source_address* was added. +.. function:: create_server(address, *, family=AF_INET, backlog=0, reuse_port=False, dualstack_ipv6=False) + + Convenience function which creates a TCP socket bound to *address* (a 2-tuple + ``(host, port)``) and return the socket object. + + *family* should be either :data:`AF_INET` or :data:`AF_INET6`. + *backlog* is the queue size passed to :meth:`socket.listen`; when ``0`` + a default reasonable value is chosen. + *reuse_port* dictates whether to set the :data:`SO_REUSEPORT` socket option. + + If *dualstack_ipv6* is true and the platform supports it the socket will + be able to accept both IPv4 and IPv6 connections, else it will raise + :exc:`ValueError`. Most POSIX platforms and Windows are supposed to support + this functionality. + When this functionality is enabled the address returned by + :meth:`socket.getpeername` when an IPv4 connection occurs will be an IPv6 + address represented as an IPv4-mapped IPv6 address. + If *dualstack_ipv6* is false it will explicitly disable this functionality + on platforms that enable it by default (e.g. Linux). + This parameter can be used in conjunction with :func:`has_dualstack_ipv6`: + + :: + + import socket + + addr = ("", 8080) # all interfaces, port 8080 + if socket.has_dualstack_ipv6(): + s = socket.create_server(addr, family=socket.AF_INET6, dualstack_ipv6=True) + else: + s = socket.create_server(addr) + + .. note:: + On POSIX platforms the :data:`SO_REUSEADDR` socket option is set in order to + immediately reuse previous sockets which were bound on the same *address* + and remained in TIME_WAIT state. + + .. versionadded:: 3.8 + +.. function:: has_dualstack_ipv6() + + Return ``True`` if the platform supports creating a TCP socket which can + handle both IPv4 and IPv6 connections. + + .. versionadded:: 3.8 .. function:: fromfd(fd, family, type, proto=0) @@ -1778,7 +1822,6 @@ sends traffic to the first one connected successfully. :: data = s.recv(1024) print('Received', repr(data)) - The next example shows how to write a very simple network sniffer with raw sockets on Windows. The example requires administrator privileges to modify the interface:: diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 29d370cc8a28..aa75beeabe70 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -294,6 +294,15 @@ contain characters unrepresentable at the OS level. (Contributed by Serhiy Storchaka in :issue:`33721`.) +socket +------ + +Added :meth:`~socket.create_server()` and :meth:`~socket.has_dualstack_ipv6()` +convenience functions to automate the necessary tasks usually involved when +creating a server socket, including accepting both IPv4 and IPv6 connections +on the same socket. (Contributed by Giampaolo Rodola in :issue:`17561`.) + + shutil ------ diff --git a/Lib/ftplib.py b/Lib/ftplib.py index 9611282ecacb..a9b1aee39e4a 100644 --- a/Lib/ftplib.py +++ b/Lib/ftplib.py @@ -302,26 +302,7 @@ def sendeprt(self, host, port): def makeport(self): '''Create a new socket and send a PORT command for it.''' - err = None - sock = None - for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE): - af, socktype, proto, canonname, sa = res - try: - sock = socket.socket(af, socktype, proto) - sock.bind(sa) - except OSError as _: - err = _ - if sock: - sock.close() - sock = None - continue - break - if sock is None: - if err is not None: - raise err - else: - raise OSError("getaddrinfo returns an empty list") - sock.listen(1) + sock = socket.create_server(("", 0), family=self.af, backlog=1) port = sock.getsockname()[1] # Get proper port host = self.sock.getsockname()[0] # Get proper host if self.af == socket.AF_INET: diff --git a/Lib/socket.py b/Lib/socket.py index 772b9e185bf1..2e51cd16f3ac 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -60,8 +60,8 @@ EAGAIN = getattr(errno, 'EAGAIN', 11) EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11) -__all__ = ["fromfd", "getfqdn", "create_connection", - "AddressFamily", "SocketKind"] +__all__ = ["fromfd", "getfqdn", "create_connection", "create_server", + "has_dualstack_ipv6", "AddressFamily", "SocketKind"] __all__.extend(os._get_exports_list(_socket)) # Set up the socket.AF_* socket.SOCK_* constants as members of IntEnums for @@ -728,6 +728,89 @@ def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, else: raise error("getaddrinfo returns an empty list") + +def has_dualstack_ipv6(): + """Return True if the platform supports creating a SOCK_STREAM socket + which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections. + """ + if not has_ipv6 \ + or not hasattr(_socket, 'IPPROTO_IPV6') \ + or not hasattr(_socket, 'IPV6_V6ONLY'): + return False + try: + with socket(AF_INET6, SOCK_STREAM) as sock: + sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0) + return True + except error: + return False + + +def create_server(address, *, family=AF_INET, backlog=0, reuse_port=False, + dualstack_ipv6=False): + """Convenience function which creates a SOCK_STREAM type socket + bound to *address* (a 2-tuple (host, port)) and return the socket + object. + + *family* should be either AF_INET or AF_INET6. + *backlog* is the queue size passed to socket.listen(). + *reuse_port* dictates whether to use the SO_REUSEPORT socket option. + *dualstack_ipv6*: if true and the platform supports it, it will + create an AF_INET6 socket able to accept both IPv4 or IPv6 + connections. When false it will explicitly disable this option on + platforms that enable it by default (e.g. Linux). + + >>> with create_server((None, 8000)) as server: + ... while True: + ... conn, addr = server.accept() + ... # handle new connection + """ + if reuse_port and not hasattr(_socket, "SO_REUSEPORT"): + raise ValueError("SO_REUSEPORT not supported on this platform") + if dualstack_ipv6: + if not has_dualstack_ipv6(): + raise ValueError("dualstack_ipv6 not supported on this platform") + if family != AF_INET6: + raise ValueError("dualstack_ipv6 requires AF_INET6 family") + sock = socket(family, SOCK_STREAM) + try: + # Note about Windows. We don't set SO_REUSEADDR because: + # 1) It's unnecessary: bind() will succeed even in case of a + # previous closed socket on the same address and still in + # TIME_WAIT state. + # 2) If set, another socket is free to bind() on the same + # address, effectively preventing this one from accepting + # connections. Also, it may set the process in a state where + # it'll no longer respond to any signals or graceful kills. + # See: msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx + if os.name not in ('nt', 'cygwin') and \ + hasattr(_socket, 'SO_REUSEADDR'): + try: + sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) + except error: + # Fail later on bind(), for platforms which may not + # support this option. + pass + if reuse_port: + sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1) + if has_ipv6 and family == AF_INET6: + if dualstack_ipv6: + sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0) + elif hasattr(_socket, "IPV6_V6ONLY") and \ + hasattr(_socket, "IPPROTO_IPV6"): + sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 1) + try: + sock.bind(address) + except error as err: + msg = '%s (while attempting to bind on address %r)' % \ + (err.strerror, address) + raise error(err.errno, msg) from None + sock.listen(backlog) + return sock + except error: + sock.close() + raise + + def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): """Resolve host and port into list of address info entries. diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index f4239badfe8b..553ab8178316 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -3334,9 +3334,7 @@ def _listener(cls, conn, families): new_conn.close() l.close() - l = socket.socket() - l.bind((test.support.HOST, 0)) - l.listen() + l = socket.create_server((test.support.HOST, 0)) conn.send(l.getsockname()) new_conn, addr = l.accept() conn.send(new_conn) @@ -4345,9 +4343,7 @@ def _child_test_wait_socket(cls, address, slow): def test_wait_socket(self, slow=False): from multiprocessing.connection import wait - l = socket.socket() - l.bind((test.support.HOST, 0)) - l.listen() + l = socket.create_server((test.support.HOST, 0)) addr = l.getsockname() readers = [] procs = [] diff --git a/Lib/test/eintrdata/eintr_tester.py b/Lib/test/eintrdata/eintr_tester.py index 5f956b548fc4..404934ce97a2 100644 --- a/Lib/test/eintrdata/eintr_tester.py +++ b/Lib/test/eintrdata/eintr_tester.py @@ -285,12 +285,9 @@ def test_sendmsg(self): self._test_send(lambda sock, data: sock.sendmsg([data])) def test_accept(self): - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock = socket.create_server((support.HOST, 0)) self.addCleanup(sock.close) - - sock.bind((support.HOST, 0)) port = sock.getsockname()[1] - sock.listen() code = '\n'.join(( 'import socket, time', diff --git a/Lib/test/test_asyncio/functional.py b/Lib/test/test_asyncio/functional.py index 6b5b3cc907cc..70cd140f4796 100644 --- a/Lib/test/test_asyncio/functional.py +++ b/Lib/test/test_asyncio/functional.py @@ -60,21 +60,13 @@ def tcp_server(self, server_prog, *, else: addr = ('127.0.0.1', 0) - sock = socket.socket(family, socket.SOCK_STREAM) - + sock = socket.create_server(addr, family=family, backlog=backlog) if timeout is None: raise RuntimeError('timeout is required') if timeout <= 0: raise RuntimeError('only blocking sockets are supported') sock.settimeout(timeout) - try: - sock.bind(addr) - sock.listen(backlog) - except OSError as ex: - sock.close() - raise ex - return TestThreadedServer( self, sock, server_prog, timeout, max_clients) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index a2b954eec4ad..b46b614e556e 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -667,9 +667,7 @@ def data_received(self, data): super().data_received(data) self.transport.write(expected_response) - lsock = socket.socket() - lsock.bind(('127.0.0.1', 0)) - lsock.listen(1) + lsock = socket.create_server(('127.0.0.1', 0), backlog=1) addr = lsock.getsockname() message = b'test data' @@ -1118,9 +1116,7 @@ def connection_made(self, transport): super().connection_made(transport) proto.set_result(self) - sock_ob = socket.socket(type=socket.SOCK_STREAM) - sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock_ob.bind(('0.0.0.0', 0)) + sock_ob = socket.create_server(('0.0.0.0', 0)) f = self.loop.create_server(TestMyProto, sock=sock_ob) server = self.loop.run_until_complete(f) @@ -1136,9 +1132,7 @@ def connection_made(self, transport): server.close() def test_create_server_addr_in_use(self): - sock_ob = socket.socket(type=socket.SOCK_STREAM) - sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - sock_ob.bind(('0.0.0.0', 0)) + sock_ob = socket.create_server(('0.0.0.0', 0)) f = self.loop.create_server(MyProto, sock=sock_ob) server = self.loop.run_until_complete(f) diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index 043fac7c6a2d..630f91dbf478 100644 --- a/Lib/test/test_asyncio/test_streams.py +++ b/Lib/test/test_asyncio/test_streams.py @@ -592,8 +592,7 @@ def __init__(self, loop): await client_writer.wait_closed() def start(self): - sock = socket.socket() - sock.bind(('127.0.0.1', 0)) + sock = socket.create_server(('127.0.0.1', 0)) self.server = self.loop.run_until_complete( asyncio.start_server(self.handle_client, sock=sock, @@ -605,8 +604,7 @@ def handle_client_callback(self, client_reader, client_writer): client_writer)) def start_callback(self): - sock = socket.socket() - sock.bind(('127.0.0.1', 0)) + sock = socket.create_server(('127.0.0.1', 0)) addr = sock.getsockname() sock.close() self.server = self.loop.run_until_complete( @@ -796,10 +794,7 @@ def test_drain_raises(self): def server(): # Runs in a separate thread. - sock = socket.socket() - with sock: - sock.bind(('localhost', 0)) - sock.listen(1) + with socket.create_server(('localhost', 0)) as sock: addr = sock.getsockname() q.put(addr) clt, _ = sock.accept() diff --git a/Lib/test/test_epoll.py b/Lib/test/test_epoll.py index 53ce1d55ff9c..8ac0f31d8051 100644 --- a/Lib/test/test_epoll.py +++ b/Lib/test/test_epoll.py @@ -41,9 +41,7 @@ class TestEPoll(unittest.TestCase): def setUp(self): - self.serverSocket = socket.socket() - self.serverSocket.bind(('127.0.0.1', 0)) - self.serverSocket.listen() + self.serverSocket = socket.create_server(('127.0.0.1', 0)) self.connections = [self.serverSocket] def tearDown(self): diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py index da8ba32917be..b0e46411a2e2 100644 --- a/Lib/test/test_ftplib.py +++ b/Lib/test/test_ftplib.py @@ -132,9 +132,7 @@ def cmd_port(self, arg): self.push('200 active data connection established') def cmd_pasv(self, arg): - with socket.socket() as sock: - sock.bind((self.socket.getsockname()[0], 0)) - sock.listen() + with socket.create_server((self.socket.getsockname()[0], 0)) as sock: sock.settimeout(TIMEOUT) ip, port = sock.getsockname()[:2] ip = ip.replace('.', ','); p1 = port / 256; p2 = port % 256 @@ -150,9 +148,8 @@ def cmd_eprt(self, arg): self.push('200 active data connection established') def cmd_epsv(self, arg): - with socket.socket(socket.AF_INET6) as sock: - sock.bind((self.socket.getsockname()[0], 0)) - sock.listen() + with socket.create_server((self.socket.getsockname()[0], 0), + family=socket.AF_INET6) as sock: sock.settimeout(TIMEOUT) port = sock.getsockname()[1] self.push('229 entering extended passive mode (|||%d|)' %port) diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index 4755f8b4b9de..65914616c7b5 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -1118,11 +1118,8 @@ def test_read1_bound_content_length(self): def test_response_fileno(self): # Make sure fd returned by fileno is valid. - serv = socket.socket( - socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP) + serv = socket.create_server((HOST, 0)) self.addCleanup(serv.close) - serv.bind((HOST, 0)) - serv.listen() result = None def run_server(): diff --git a/Lib/test/test_kqueue.py b/Lib/test/test_kqueue.py index 1099c759a791..998fd9d46496 100644 --- a/Lib/test/test_kqueue.py +++ b/Lib/test/test_kqueue.py @@ -110,9 +110,7 @@ def test_create_event(self): def test_queue_event(self): - serverSocket = socket.socket() - serverSocket.bind(('127.0.0.1', 0)) - serverSocket.listen() + serverSocket = socket.create_server(('127.0.0.1', 0)) client = socket.socket() client.setblocking(False) try: diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index 8a990ea31410..b0bdb11d9028 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -6068,9 +6068,133 @@ def test_new_tcp_flags(self): self.assertEqual([], unknown, "New TCP flags were discovered. See bpo-32394 for more information") + +class CreateServerTest(unittest.TestCase): + + def test_address(self): + port = support.find_unused_port() + with socket.create_server(("127.0.0.1", port)) as sock: + self.assertEqual(sock.getsockname()[0], "127.0.0.1") + self.assertEqual(sock.getsockname()[1], port) + if support.IPV6_ENABLED: + with socket.create_server(("::1", port), + family=socket.AF_INET6) as sock: + self.assertEqual(sock.getsockname()[0], "::1") + self.assertEqual(sock.getsockname()[1], port) + + def test_family_and_type(self): + with socket.create_server(("127.0.0.1", 0)) as sock: + self.assertEqual(sock.family, socket.AF_INET) + self.assertEqual(sock.type, socket.SOCK_STREAM) + if support.IPV6_ENABLED: + with socket.create_server(("::1", 0), family=socket.AF_INET6) as s: + self.assertEqual(s.family, socket.AF_INET6) + self.assertEqual(sock.type, socket.SOCK_STREAM) + + def test_reuse_port(self): + if not hasattr(socket, "SO_REUSEPORT"): + with self.assertRaises(ValueError): + socket.create_server(("localhost", 0), reuse_port=True) + else: + with socket.create_server(("localhost", 0)) as sock: + opt = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) + self.assertEqual(opt, 0) + with socket.create_server(("localhost", 0), reuse_port=True) as sock: + opt = sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) + self.assertNotEqual(opt, 0) + + @unittest.skipIf(not hasattr(_socket, 'IPPROTO_IPV6') or + not hasattr(_socket, 'IPV6_V6ONLY'), + "IPV6_V6ONLY option not supported") + @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test') + def test_ipv6_only_default(self): + with socket.create_server(("::1", 0), family=socket.AF_INET6) as sock: + assert sock.getsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY) + + @unittest.skipIf(not socket.has_dualstack_ipv6(), + "dualstack_ipv6 not supported") + @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test') + def test_dualstack_ipv6_family(self): + with socket.create_server(("::1", 0), family=socket.AF_INET6, + dualstack_ipv6=True) as sock: + self.assertEqual(sock.family, socket.AF_INET6) + + +class CreateServerFunctionalTest(unittest.TestCase): + timeout = 3 + + def setUp(self): + self.thread = None + + def tearDown(self): + if self.thread is not None: + self.thread.join(self.timeout) + + def echo_server(self, sock): + def run(sock): + with sock: + conn, _ = sock.accept() + with conn: + event.wait(self.timeout) + msg = conn.recv(1024) + if not msg: + return + conn.sendall(msg) + + event = threading.Event() + sock.settimeout(self.timeout) + self.thread = threading.Thread(target=run, args=(sock, )) + self.thread.start() + event.set() + + def echo_client(self, addr, family): + with socket.socket(family=family) as sock: + sock.settimeout(self.timeout) + sock.connect(addr) + sock.sendall(b'foo') + self.assertEqual(sock.recv(1024), b'foo') + + def test_tcp4(self): + port = support.find_unused_port() + with socket.create_server(("", port)) as sock: + self.echo_server(sock) + self.echo_client(("127.0.0.1", port), socket.AF_INET) + + @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test') + def test_tcp6(self): + port = support.find_unused_port() + with socket.create_server(("", port), + family=socket.AF_INET6) as sock: + self.echo_server(sock) + self.echo_client(("::1", port), socket.AF_INET6) + + # --- dual stack tests + + @unittest.skipIf(not socket.has_dualstack_ipv6(), + "dualstack_ipv6 not supported") + @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test') + def test_dual_stack_client_v4(self): + port = support.find_unused_port() + with socket.create_server(("", port), family=socket.AF_INET6, + dualstack_ipv6=True) as sock: + self.echo_server(sock) + self.echo_client(("127.0.0.1", port), socket.AF_INET) + + @unittest.skipIf(not socket.has_dualstack_ipv6(), + "dualstack_ipv6 not supported") + @unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test') + def test_dual_stack_client_v6(self): + port = support.find_unused_port() + with socket.create_server(("", port), family=socket.AF_INET6, + dualstack_ipv6=True) as sock: + self.echo_server(sock) + self.echo_client(("::1", port), socket.AF_INET6) + + def test_main(): tests = [GeneralModuleTests, BasicTCPTest, TCPCloserTest, TCPTimeoutTest, - TestExceptions, BufferIOTest, BasicTCPTest2, BasicUDPTest, UDPTimeoutTest ] + TestExceptions, BufferIOTest, BasicTCPTest2, BasicUDPTest, + UDPTimeoutTest, CreateServerTest, CreateServerFunctionalTest] tests.extend([ NonBlockingTCPTests, diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 55718220d88d..4444e945952f 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -765,9 +765,7 @@ def test_server_side(self): def test_unknown_channel_binding(self): # should raise ValueError for unknown type - s = socket.socket(socket.AF_INET) - s.bind(('127.0.0.1', 0)) - s.listen() + s = socket.create_server(('127.0.0.1', 0)) c = socket.socket(socket.AF_INET) c.connect(s.getsockname()) with test_wrap_socket(c, do_handshake_on_connect=False) as ss: @@ -1663,11 +1661,8 @@ def test_subclass(self): ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE - with socket.socket() as s: - s.bind(("127.0.0.1", 0)) - s.listen() - c = socket.socket() - c.connect(s.getsockname()) + with socket.create_server(("127.0.0.1", 0)) as s: + c = socket.create_connection(s.getsockname()) c.setblocking(False) with ctx.wrap_socket(c, False, do_handshake_on_connect=False) as c: with self.assertRaises(ssl.SSLWantReadError) as cm: diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index 4a8f3c581872..cb664bab1710 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -91,14 +91,12 @@ def test_forget(self): support.rmtree('__pycache__') def test_HOST(self): - s = socket.socket() - s.bind((support.HOST, 0)) + s = socket.create_server((support.HOST, 0)) s.close() def test_find_unused_port(self): port = support.find_unused_port() - s = socket.socket() - s.bind((support.HOST, port)) + s = socket.create_server((support.HOST, port)) s.close() def test_bind_port(self): diff --git a/Misc/NEWS.d/next/Library/2019-02-07-20-25-39.bpo-35934.QmfNmY.rst b/Misc/NEWS.d/next/Library/2019-02-07-20-25-39.bpo-35934.QmfNmY.rst new file mode 100644 index 000000000000..0601ac915fc8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-02-07-20-25-39.bpo-35934.QmfNmY.rst @@ -0,0 +1,4 @@ +Added :meth:`~socket.create_server()` and :meth:`~socket.has_dualstack_ipv6()` +convenience functions to automate the necessary tasks usually involved when +creating a server socket, including accepting both IPv4 and IPv6 connections +on the same socket. (Contributed by Giampaolo Rodola in :issue:`17561`.) From webhook-mailer at python.org Mon Apr 8 19:36:51 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 08 Apr 2019 23:36:51 -0000 Subject: [Python-checkins] bpo-36565: Fix libregrtest for Python without builtin _abc (GH-12733) Message-ID: https://github.com/python/cpython/commit/79b5d29041bd85ea3baa050b3fa2481344ea35c9 commit: 79b5d29041bd85ea3baa050b3fa2481344ea35c9 branch: master author: Victor Stinner committer: GitHub date: 2019-04-09T01:36:34+02:00 summary: bpo-36565: Fix libregrtest for Python without builtin _abc (GH-12733) Fix reference hunting (``python3 -m test -R 3:3``) when Python has no built-in abc module: fix _get_dump() reimplementation of libregrtest. files: A Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst M Lib/test/libregrtest/refleak.py diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index 6724488fcfb0..d68ea63b5b3c 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -8,9 +8,13 @@ try: from _abc import _get_dump except ImportError: + import weakref + def _get_dump(cls): - # For legacy Python version - return (cls._abc_registry, cls._abc_cache, + # Reimplement _get_dump() for pure-Python implementation of + # the abc module (Lib/_py_abc.py) + registry_weakrefs = set(weakref.ref(obj) for obj in cls._abc_registry) + return (registry_weakrefs, cls._abc_cache, cls._abc_negative_cache, cls._abc_negative_cache_version) diff --git a/Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst b/Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst new file mode 100644 index 000000000000..8a14d08ba88f --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst @@ -0,0 +1,2 @@ +Fix reference hunting (``python3 -m test -R 3:3``) when Python has no +built-in abc module. From webhook-mailer at python.org Mon Apr 8 19:54:25 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 08 Apr 2019 23:54:25 -0000 Subject: [Python-checkins] bpo-36565: Fix libregrtest for Python without builtin _abc (GH-12733) (GH-12734) Message-ID: https://github.com/python/cpython/commit/2368d86ed1249505b10561e005fc57f4884619c1 commit: 2368d86ed1249505b10561e005fc57f4884619c1 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Victor Stinner date: 2019-04-09T01:54:16+02:00 summary: bpo-36565: Fix libregrtest for Python without builtin _abc (GH-12733) (GH-12734) Fix reference hunting (``python3 -m test -R 3:3``) when Python has no built-in abc module: fix _get_dump() reimplementation of libregrtest. (cherry picked from commit 79b5d29041bd85ea3baa050b3fa2481344ea35c9) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst M Lib/test/libregrtest/refleak.py diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index 6724488fcfb0..d68ea63b5b3c 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -8,9 +8,13 @@ try: from _abc import _get_dump except ImportError: + import weakref + def _get_dump(cls): - # For legacy Python version - return (cls._abc_registry, cls._abc_cache, + # Reimplement _get_dump() for pure-Python implementation of + # the abc module (Lib/_py_abc.py) + registry_weakrefs = set(weakref.ref(obj) for obj in cls._abc_registry) + return (registry_weakrefs, cls._abc_cache, cls._abc_negative_cache, cls._abc_negative_cache_version) diff --git a/Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst b/Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst new file mode 100644 index 000000000000..8a14d08ba88f --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-08-19-01-21.bpo-36565.2bxgtU.rst @@ -0,0 +1,2 @@ +Fix reference hunting (``python3 -m test -R 3:3``) when Python has no +built-in abc module. From webhook-mailer at python.org Mon Apr 8 22:42:17 2019 From: webhook-mailer at python.org (Giampaolo Rodola) Date: Tue, 09 Apr 2019 02:42:17 -0000 Subject: [Python-checkins] BPO-17561: set create_server backlog default to None (GH-12735) Message-ID: https://github.com/python/cpython/commit/8702b67dad62a9084f6c1823dce10653743667c8 commit: 8702b67dad62a9084f6c1823dce10653743667c8 branch: master author: Giampaolo Rodola committer: GitHub date: 2019-04-09T04:42:06+02:00 summary: BPO-17561: set create_server backlog default to None (GH-12735) It turns out doing socket.listen(0) does not equal to "choose a reasonable default". It actually means "set backlog to 0". As such set backlog=None as the default for socket.create_server. Fixes the following BB failures: https://github.com/python/cpython/pull/11784#issuecomment-481036369 Ref. BPO-1756, GH-11784. files: A Misc/NEWS.d/next/Library/2019-04-09-04-08-46.bpo-17561.hOhVnh.rst M Doc/library/socket.rst M Lib/socket.py diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index b4a07bd5d5d2..62c83470271c 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -595,7 +595,7 @@ The following functions all create :ref:`socket objects `. .. versionchanged:: 3.2 *source_address* was added. -.. function:: create_server(address, *, family=AF_INET, backlog=0, reuse_port=False, dualstack_ipv6=False) +.. function:: create_server(address, *, family=AF_INET, backlog=None, reuse_port=False, dualstack_ipv6=False) Convenience function which creates a TCP socket bound to *address* (a 2-tuple ``(host, port)``) and return the socket object. diff --git a/Lib/socket.py b/Lib/socket.py index 2e51cd16f3ac..0dd8ec70e168 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -745,7 +745,7 @@ def has_dualstack_ipv6(): return False -def create_server(address, *, family=AF_INET, backlog=0, reuse_port=False, +def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False, dualstack_ipv6=False): """Convenience function which creates a SOCK_STREAM type socket bound to *address* (a 2-tuple (host, port)) and return the socket @@ -804,7 +804,10 @@ def create_server(address, *, family=AF_INET, backlog=0, reuse_port=False, msg = '%s (while attempting to bind on address %r)' % \ (err.strerror, address) raise error(err.errno, msg) from None - sock.listen(backlog) + if backlog is None: + sock.listen() + else: + sock.listen(backlog) return sock except error: sock.close() diff --git a/Misc/NEWS.d/next/Library/2019-04-09-04-08-46.bpo-17561.hOhVnh.rst b/Misc/NEWS.d/next/Library/2019-04-09-04-08-46.bpo-17561.hOhVnh.rst new file mode 100644 index 000000000000..e281c22305b9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-09-04-08-46.bpo-17561.hOhVnh.rst @@ -0,0 +1 @@ +Set backlog=None as the default for socket.create_server. From webhook-mailer at python.org Tue Apr 9 00:35:31 2019 From: webhook-mailer at python.org (Benjamin Peterson) Date: Tue, 09 Apr 2019 04:35:31 -0000 Subject: [Python-checkins] closes bpo-35848: Move all documentation regarding the readinto out of IOBase. (GH-11893) Message-ID: https://github.com/python/cpython/commit/7b97ab35b28b761ab1253df427ee674b1a90f465 commit: 7b97ab35b28b761ab1253df427ee674b1a90f465 branch: master author: Steve Palmer committer: Benjamin Peterson date: 2019-04-08T21:35:27-07:00 summary: closes bpo-35848: Move all documentation regarding the readinto out of IOBase. (GH-11893) Move all documentation regarding the readinto method into either io.RawIOBase or io.BufferedIOBase. Corresponding changes to documentation in the _pyio.py module. files: M Doc/library/io.rst M Lib/_pyio.py diff --git a/Doc/library/io.rst b/Doc/library/io.rst index e623a041acf8..9738c5c2ad27 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -226,7 +226,7 @@ I/O Base Classes implementations represent a file that cannot be read, written or seeked. - Even though :class:`IOBase` does not declare :meth:`read`, :meth:`readinto`, + Even though :class:`IOBase` does not declare :meth:`read` or :meth:`write` because their signatures will vary, implementations and clients should consider those methods part of the interface. Also, implementations may raise a :exc:`ValueError` (or :exc:`UnsupportedOperation`) @@ -234,9 +234,7 @@ I/O Base Classes The basic type used for binary data read from or written to a file is :class:`bytes`. Other :term:`bytes-like objects ` are - accepted as method arguments too. In some cases, such as - :meth:`~RawIOBase.readinto`, a writable object such as :class:`bytearray` - is required. Text I/O classes work with :class:`str` data. + accepted as method arguments too. Text I/O classes work with :class:`str` data. Note that calling any method (even inquiries) on a closed stream is undefined. Implementations may raise :exc:`ValueError` in this case. @@ -405,7 +403,8 @@ I/O Base Classes Read bytes into a pre-allocated, writable :term:`bytes-like object` *b*, and return the - number of bytes read. If the object is in non-blocking mode and no bytes + number of bytes read. For example, *b* might be a :class:`bytearray`. + If the object is in non-blocking mode and no bytes are available, ``None`` is returned. .. method:: write(b) @@ -495,6 +494,7 @@ I/O Base Classes Read bytes into a pre-allocated, writable :term:`bytes-like object` *b* and return the number of bytes read. + For example, *b* might be a :class:`bytearray`. Like :meth:`read`, multiple reads may be issued to the underlying raw stream, unless the latter is interactive. @@ -757,8 +757,7 @@ Text I/O .. class:: TextIOBase Base class for text streams. This class provides a character and line based - interface to stream I/O. There is no :meth:`readinto` method because - Python's character strings are immutable. It inherits :class:`IOBase`. + interface to stream I/O. It inherits :class:`IOBase`. There is no public constructor. :class:`TextIOBase` provides or overrides these data attributes and @@ -1048,4 +1047,3 @@ The above implicitly extends to text files, since the :func:`open()` function will wrap a buffered object inside a :class:`TextIOWrapper`. This includes standard streams and therefore affects the built-in function :func:`print()` as well. - diff --git a/Lib/_pyio.py b/Lib/_pyio.py index b0593c3d3ab5..e868fdc7cbc5 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -292,16 +292,15 @@ class IOBase(metaclass=abc.ABCMeta): derived classes can override selectively; the default implementations represent a file that cannot be read, written or seeked. - Even though IOBase does not declare read, readinto, or write because + Even though IOBase does not declare read or write because their signatures will vary, implementations and clients should consider those methods part of the interface. Also, implementations may raise UnsupportedOperation when operations they do not support are called. The basic type used for binary data read from or written to a file is - bytes. Other bytes-like objects are accepted as method arguments too. In - some cases (such as readinto), a writable object is required. Text I/O - classes work with str data. + bytes. Other bytes-like objects are accepted as method arguments too. + Text I/O classes work with str data. Note that calling any method (even inquiries) on a closed stream is undefined. Implementations may raise OSError in this case. @@ -1763,8 +1762,7 @@ class TextIOBase(IOBase): """Base class for text I/O. This class provides a character and line based interface to stream - I/O. There is no readinto method because Python's character strings - are immutable. There is no public constructor. + I/O. There is no public constructor. """ def read(self, size=-1): From webhook-mailer at python.org Tue Apr 9 00:57:43 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 09 Apr 2019 04:57:43 -0000 Subject: [Python-checkins] closes bpo-35848: Move all documentation regarding the readinto out of IOBase. (GH-11893) Message-ID: https://github.com/python/cpython/commit/0a16bb15afec28f355bc28203b6b10610293f026 commit: 0a16bb15afec28f355bc28203b6b10610293f026 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-08T21:57:31-07:00 summary: closes bpo-35848: Move all documentation regarding the readinto out of IOBase. (GH-11893) Move all documentation regarding the readinto method into either io.RawIOBase or io.BufferedIOBase. Corresponding changes to documentation in the _pyio.py module. (cherry picked from commit 7b97ab35b28b761ab1253df427ee674b1a90f465) Co-authored-by: Steve Palmer files: M Doc/library/io.rst M Lib/_pyio.py diff --git a/Doc/library/io.rst b/Doc/library/io.rst index e623a041acf8..9738c5c2ad27 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -226,7 +226,7 @@ I/O Base Classes implementations represent a file that cannot be read, written or seeked. - Even though :class:`IOBase` does not declare :meth:`read`, :meth:`readinto`, + Even though :class:`IOBase` does not declare :meth:`read` or :meth:`write` because their signatures will vary, implementations and clients should consider those methods part of the interface. Also, implementations may raise a :exc:`ValueError` (or :exc:`UnsupportedOperation`) @@ -234,9 +234,7 @@ I/O Base Classes The basic type used for binary data read from or written to a file is :class:`bytes`. Other :term:`bytes-like objects ` are - accepted as method arguments too. In some cases, such as - :meth:`~RawIOBase.readinto`, a writable object such as :class:`bytearray` - is required. Text I/O classes work with :class:`str` data. + accepted as method arguments too. Text I/O classes work with :class:`str` data. Note that calling any method (even inquiries) on a closed stream is undefined. Implementations may raise :exc:`ValueError` in this case. @@ -405,7 +403,8 @@ I/O Base Classes Read bytes into a pre-allocated, writable :term:`bytes-like object` *b*, and return the - number of bytes read. If the object is in non-blocking mode and no bytes + number of bytes read. For example, *b* might be a :class:`bytearray`. + If the object is in non-blocking mode and no bytes are available, ``None`` is returned. .. method:: write(b) @@ -495,6 +494,7 @@ I/O Base Classes Read bytes into a pre-allocated, writable :term:`bytes-like object` *b* and return the number of bytes read. + For example, *b* might be a :class:`bytearray`. Like :meth:`read`, multiple reads may be issued to the underlying raw stream, unless the latter is interactive. @@ -757,8 +757,7 @@ Text I/O .. class:: TextIOBase Base class for text streams. This class provides a character and line based - interface to stream I/O. There is no :meth:`readinto` method because - Python's character strings are immutable. It inherits :class:`IOBase`. + interface to stream I/O. It inherits :class:`IOBase`. There is no public constructor. :class:`TextIOBase` provides or overrides these data attributes and @@ -1048,4 +1047,3 @@ The above implicitly extends to text files, since the :func:`open()` function will wrap a buffered object inside a :class:`TextIOWrapper`. This includes standard streams and therefore affects the built-in function :func:`print()` as well. - diff --git a/Lib/_pyio.py b/Lib/_pyio.py index f0d4f4ed27a2..0c0cb84a48e2 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -287,16 +287,15 @@ class IOBase(metaclass=abc.ABCMeta): derived classes can override selectively; the default implementations represent a file that cannot be read, written or seeked. - Even though IOBase does not declare read, readinto, or write because + Even though IOBase does not declare read or write because their signatures will vary, implementations and clients should consider those methods part of the interface. Also, implementations may raise UnsupportedOperation when operations they do not support are called. The basic type used for binary data read from or written to a file is - bytes. Other bytes-like objects are accepted as method arguments too. In - some cases (such as readinto), a writable object is required. Text I/O - classes work with str data. + bytes. Other bytes-like objects are accepted as method arguments too. + Text I/O classes work with str data. Note that calling any method (even inquiries) on a closed stream is undefined. Implementations may raise OSError in this case. @@ -1759,8 +1758,7 @@ class TextIOBase(IOBase): """Base class for text I/O. This class provides a character and line based interface to stream - I/O. There is no readinto method because Python's character strings - are immutable. There is no public constructor. + I/O. There is no public constructor. """ def read(self, size=-1): From webhook-mailer at python.org Tue Apr 9 01:43:15 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 09 Apr 2019 05:43:15 -0000 Subject: [Python-checkins] bpo-30134: add an explanation of BytesWarning (GH-1249) Message-ID: https://github.com/python/cpython/commit/d012d64b6a513e760abb6745f7f7ce5e6a31f274 commit: d012d64b6a513e760abb6745f7f7ce5e6a31f274 branch: 2.7 author: cocoatomo committer: Inada Naoki date: 2019-04-09T14:43:11+09:00 summary: bpo-30134: add an explanation of BytesWarning (GH-1249) files: M Doc/library/exceptions.rst diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 8757c6cb4468..2bc2bce00adc 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -524,6 +524,13 @@ module for more information. .. versionadded:: 2.5 +.. exception:: BytesWarning + + Base class for warnings related to :class:`str` and :class:`bytearray`. + + .. versionadded:: 2.6 + + Exception hierarchy ------------------- From webhook-mailer at python.org Tue Apr 9 01:54:34 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 09 Apr 2019 05:54:34 -0000 Subject: [Python-checkins] fix code styling (GH-12737) Message-ID: https://github.com/python/cpython/commit/5909ad1217aad200c69ffa794fcab285bacb609e commit: 5909ad1217aad200c69ffa794fcab285bacb609e branch: master author: Inada Naoki committer: GitHub date: 2019-04-09T14:54:30+09:00 summary: fix code styling (GH-12737) files: M Lib/distutils/command/bdist_wininst.py diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py index 1cf2e963e0e7..3a616883bee5 100644 --- a/Lib/distutils/command/bdist_wininst.py +++ b/Lib/distutils/command/bdist_wininst.py @@ -269,7 +269,7 @@ def create_exe(self, arcname, fullname, bitmap=None): # convert back to bytes. "latin-1" simply avoids any possible # failures. with open(self.pre_install_script, "r", - encoding="latin-1") as script: + encoding="latin-1") as script: script_data = script.read().encode("latin-1") cfgdata = cfgdata + script_data + b"\n\0" else: From webhook-mailer at python.org Tue Apr 9 03:00:57 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 09 Apr 2019 07:00:57 -0000 Subject: [Python-checkins] bpo-30134: add BytesWarning in warnings.rst (GH-12738) Message-ID: https://github.com/python/cpython/commit/87ed1beb3e15c619f25b9a9308d1ec35659feffd commit: 87ed1beb3e15c619f25b9a9308d1ec35659feffd branch: 2.7 author: Inada Naoki committer: GitHub date: 2019-04-09T16:00:51+09:00 summary: bpo-30134: add BytesWarning in warnings.rst (GH-12738) files: M Doc/library/warnings.rst diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst index e82bb97b6c68..b5b89c97c560 100644 --- a/Doc/library/warnings.rst +++ b/Doc/library/warnings.rst @@ -91,6 +91,9 @@ following warnings category classes are currently defined: | :exc:`UnicodeWarning` | Base category for warnings related to | | | Unicode. | +----------------------------------+-----------------------------------------------+ +| :exc:`BytesWarning` | Base category for warnings related to | +| | str and bytearray. | ++----------------------------------+-----------------------------------------------+ While these are technically built-in exceptions, they are documented here, because conceptually they belong to the warnings mechanism. From webhook-mailer at python.org Tue Apr 9 03:17:28 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 09 Apr 2019 07:17:28 -0000 Subject: [Python-checkins] bpo-33461: emit DeprecationWarning when json.loads(encoding=...) is used (GH-6762) Message-ID: https://github.com/python/cpython/commit/a8abe097c1165db25b429ca02a65c4f8acbc062b commit: a8abe097c1165db25b429ca02a65c4f8acbc062b branch: master author: Matthias Bussonnier committer: Inada Naoki date: 2019-04-09T16:17:25+09:00 summary: bpo-33461: emit DeprecationWarning when json.loads(encoding=...) is used (GH-6762) files: A Misc/NEWS.d/next/Library/2019-04-09-14-46-28.bpo-33461.SYJM-E.rst M Doc/library/json.rst M Lib/json/__init__.py M Lib/test/test_json/test_decode.py diff --git a/Doc/library/json.rst b/Doc/library/json.rst index 589e86ca8107..b476c372370d 100644 --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -265,18 +265,21 @@ Basic Usage *fp* can now be a :term:`binary file`. The input encoding should be UTF-8, UTF-16 or UTF-32. -.. function:: loads(s, *, encoding=None, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw) +.. function:: loads(s, *, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw) Deserialize *s* (a :class:`str`, :class:`bytes` or :class:`bytearray` instance containing a JSON document) to a Python object using this :ref:`conversion table `. The other arguments have the same meaning as in :func:`load`, except - *encoding* which is ignored and deprecated. + *encoding* which is ignored and deprecated since Python 3.1. If the data being deserialized is not a valid JSON document, a :exc:`JSONDecodeError` will be raised. + .. deprecated-removed:: 3.1 3.9 + *encoding* keyword argument. + .. versionchanged:: 3.6 *s* can now be of type :class:`bytes` or :class:`bytearray`. The input encoding should be UTF-8, UTF-16 or UTF-32. diff --git a/Lib/json/__init__.py b/Lib/json/__init__.py index 3bb4490e818b..1ba8b48bd78c 100644 --- a/Lib/json/__init__.py +++ b/Lib/json/__init__.py @@ -296,7 +296,7 @@ def load(fp, *, cls=None, object_hook=None, parse_float=None, parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw) -def loads(s, *, encoding=None, cls=None, object_hook=None, parse_float=None, +def loads(s, *, cls=None, object_hook=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw): """Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance containing a JSON document) to a Python object. @@ -330,7 +330,7 @@ def loads(s, *, encoding=None, cls=None, object_hook=None, parse_float=None, To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` kwarg; otherwise ``JSONDecoder`` is used. - The ``encoding`` argument is ignored and deprecated. + The ``encoding`` argument is ignored and deprecated since Python 3.1. """ if isinstance(s, str): if s.startswith('\ufeff'): @@ -342,6 +342,15 @@ def loads(s, *, encoding=None, cls=None, object_hook=None, parse_float=None, f'not {s.__class__.__name__}') s = s.decode(detect_encoding(s), 'surrogatepass') + if "encoding" in kw: + import warnings + warnings.warn( + "'encoding' is ignored and deprecated. It will be removed in Python 3.9", + DeprecationWarning, + stacklevel=2 + ) + del kw['encoding'] + if (cls is None and object_hook is None and parse_int is None and parse_float is None and parse_constant is None and object_pairs_hook is None and not kw): diff --git a/Lib/test/test_json/test_decode.py b/Lib/test/test_json/test_decode.py index fdb9e62124ec..895c95b54c3b 100644 --- a/Lib/test/test_json/test_decode.py +++ b/Lib/test/test_json/test_decode.py @@ -95,5 +95,9 @@ def test_negative_index(self): d = self.json.JSONDecoder() self.assertRaises(ValueError, d.raw_decode, 'a'*42, -50000) + def test_deprecated_encode(self): + with self.assertWarns(DeprecationWarning): + self.loads('{}', encoding='fake') + class TestPyDecode(TestDecode, PyTest): pass class TestCDecode(TestDecode, CTest): pass diff --git a/Misc/NEWS.d/next/Library/2019-04-09-14-46-28.bpo-33461.SYJM-E.rst b/Misc/NEWS.d/next/Library/2019-04-09-14-46-28.bpo-33461.SYJM-E.rst new file mode 100644 index 000000000000..12b3bceaf8e3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-09-14-46-28.bpo-33461.SYJM-E.rst @@ -0,0 +1,2 @@ +``json.loads`` now emits ``DeprecationWarning`` when ``encoding`` option is +specified. Patch by Matthias Bussonnier. From webhook-mailer at python.org Tue Apr 9 03:27:41 2019 From: webhook-mailer at python.org (Vinay Sajip) Date: Tue, 09 Apr 2019 07:27:41 -0000 Subject: [Python-checkins] bpo-33456: site module documentation - fix wrong default for key in pyvenv.cfg (GH-6755) Message-ID: https://github.com/python/cpython/commit/c324c748871804f31f56b3bd02a8650b3bf1bae7 commit: c324c748871804f31f56b3bd02a8650b3bf1bae7 branch: master author: Lukas Waymann committer: Vinay Sajip date: 2019-04-09T08:27:36+01:00 summary: bpo-33456: site module documentation - fix wrong default for key in pyvenv.cfg (GH-6755) files: M Doc/library/site.rst diff --git a/Doc/library/site.rst b/Doc/library/site.rst index 7974e20974f1..dfc40d179443 100644 --- a/Doc/library/site.rst +++ b/Doc/library/site.rst @@ -45,9 +45,9 @@ sys.prefix and sys.exec_prefix are set to that directory and it is also checked for site-packages (sys.base_prefix and sys.base_exec_prefix will always be the "real" prefixes of the Python installation). If "pyvenv.cfg" (a bootstrap configuration file) contains -the key "include-system-site-packages" set to anything other than "false" -(case-insensitive), the system-level prefixes will still also be -searched for site-packages; otherwise they won't. +the key "include-system-site-packages" set to anything other than "true" +(case-insensitive), the system-level prefixes will not be +searched for site-packages; otherwise they will. .. index:: single: # (hash); comment From webhook-mailer at python.org Tue Apr 9 08:20:48 2019 From: webhook-mailer at python.org (Cheryl Sabella) Date: Tue, 09 Apr 2019 12:20:48 -0000 Subject: [Python-checkins] bpo-34060: Report system load when running test suite for Windows (GH-8357) Message-ID: https://github.com/python/cpython/commit/e16467af0bfcc9f399df251495ff2d2ad20a1669 commit: e16467af0bfcc9f399df251495ff2d2ad20a1669 branch: master author: Ammar Askar committer: Cheryl Sabella date: 2019-04-09T08:20:41-04:00 summary: bpo-34060: Report system load when running test suite for Windows (GH-8357) While Windows exposes the system processor queue length, the raw value used for load calculations on Unix systems, it does not provide an API to access the averaged value. Hence to calculate the load we must track and average it ourselves. We can't use multiprocessing or a thread to read it in the background while the tests run since using those would conflict with test_multiprocessing and test_xxsubprocess. Thus, we use Window's asynchronous IO API to run the tracker in the background with it sampling at the correct rate. When we wish to access the load we check to see if there's new data on the stream, if there is, we update our load values. files: A Lib/test/libregrtest/win_utils.py A Misc/NEWS.d/next/Windows/2018-07-20-13-09-19.bpo-34060.v-z87j.rst M Lib/test/libregrtest/main.py diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 32ac44029bc3..18ef6d0609cd 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -146,8 +146,8 @@ def display_progress(self, test_index, test): line = f"[{line}] {test}" # add the system load prefix: "load avg: 1.80 " - if hasattr(os, 'getloadavg'): - load_avg_1min = os.getloadavg()[0] + if self.getloadavg: + load_avg_1min = self.getloadavg() line = f"load avg: {load_avg_1min:.2f} {line}" # add the timestamp prefix: "0:01:05 " @@ -616,6 +616,19 @@ def _main(self, tests, kwargs): self.list_cases() sys.exit(0) + self.getloadavg = None + # If we're on windows and this is the parent runner (not a worker), + # report the load average. + if hasattr(os, 'getloadavg'): + def getloadavg_1m(): + return os.getloadavg()[0] + self.getloadavg = getloadavg_1m + elif sys.platform == 'win32' and (self.ns.worker_args is None): + from test.libregrtest.win_utils import WindowsLoadTracker + + load_tracker = WindowsLoadTracker() + self.getloadavg = load_tracker.getloadavg + self.run_tests() self.display_result() diff --git a/Lib/test/libregrtest/win_utils.py b/Lib/test/libregrtest/win_utils.py new file mode 100644 index 000000000000..2e6492289e15 --- /dev/null +++ b/Lib/test/libregrtest/win_utils.py @@ -0,0 +1,100 @@ +import subprocess +import sys +import os +import _winapi +import msvcrt +import uuid +from test import support + + +# Max size of asynchronous reads +BUFSIZE = 8192 +# Exponential damping factor (see below) +LOAD_FACTOR_1 = 0.9200444146293232478931553241 +# Seconds per measurement +SAMPLING_INTERVAL = 5 +COUNTER_NAME = r'\System\Processor Queue Length' + + +class WindowsLoadTracker(): + """ + This class asynchronously interacts with the `typeperf` command to read + the system load on Windows. Mulitprocessing and threads can't be used + here because they interfere with the test suite's cases for those + modules. + """ + + def __init__(self): + self.load = 0.0 + self.start() + + def start(self): + # Create a named pipe which allows for asynchronous IO in Windows + pipe_name = r'\\.\pipe\typeperf_output_' + str(uuid.uuid4()) + + open_mode = _winapi.PIPE_ACCESS_INBOUND + open_mode |= _winapi.FILE_FLAG_FIRST_PIPE_INSTANCE + open_mode |= _winapi.FILE_FLAG_OVERLAPPED + + # This is the read end of the pipe, where we will be grabbing output + self.pipe = _winapi.CreateNamedPipe( + pipe_name, open_mode, _winapi.PIPE_WAIT, + 1, BUFSIZE, BUFSIZE, _winapi.NMPWAIT_WAIT_FOREVER, _winapi.NULL + ) + # The write end of the pipe which is passed to the created process + pipe_write_end = _winapi.CreateFile( + pipe_name, _winapi.GENERIC_WRITE, 0, _winapi.NULL, + _winapi.OPEN_EXISTING, 0, _winapi.NULL + ) + # Open up the handle as a python file object so we can pass it to + # subprocess + command_stdout = msvcrt.open_osfhandle(pipe_write_end, 0) + + # Connect to the read end of the pipe in overlap/async mode + overlap = _winapi.ConnectNamedPipe(self.pipe, overlapped=True) + overlap.GetOverlappedResult(True) + + # Spawn off the load monitor + command = ['typeperf', COUNTER_NAME, '-si', str(SAMPLING_INTERVAL)] + self.p = subprocess.Popen(command, stdout=command_stdout, cwd=support.SAVEDCWD) + + # Close our copy of the write end of the pipe + os.close(command_stdout) + + def __del__(self): + self.p.kill() + self.p.wait() + + def read_output(self): + import _winapi + + overlapped, _ = _winapi.ReadFile(self.pipe, BUFSIZE, True) + bytes_read, res = overlapped.GetOverlappedResult(False) + if res != 0: + return + + return overlapped.getbuffer().decode() + + def getloadavg(self): + typeperf_output = self.read_output() + # Nothing to update, just return the current load + if not typeperf_output: + return self.load + + # Process the backlog of load values + for line in typeperf_output.splitlines(): + # typeperf outputs in a CSV format like this: + # "07/19/2018 01:32:26.605","3.000000" + toks = line.split(',') + # Ignore blank lines and the initial header + if line.strip() == '' or (COUNTER_NAME in line) or len(toks) != 2: + continue + + load = float(toks[1].replace('"', '')) + # We use an exponentially weighted moving average, imitating the + # load calculation on Unix systems. + # https://en.wikipedia.org/wiki/Load_(computing)#Unix-style_load_calculation + new_load = self.load * LOAD_FACTOR_1 + load * (1.0 - LOAD_FACTOR_1) + self.load = new_load + + return self.load diff --git a/Misc/NEWS.d/next/Windows/2018-07-20-13-09-19.bpo-34060.v-z87j.rst b/Misc/NEWS.d/next/Windows/2018-07-20-13-09-19.bpo-34060.v-z87j.rst new file mode 100644 index 000000000000..b77d805b7f2a --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2018-07-20-13-09-19.bpo-34060.v-z87j.rst @@ -0,0 +1,2 @@ +Report system load when running test suite on Windows. Patch by Ammar Askar. +Based on prior work by Jeremy Kloth. From webhook-mailer at python.org Tue Apr 9 08:23:52 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Apr 2019 12:23:52 -0000 Subject: [Python-checkins] bpo-36560: Fix reference leak hunting in regrtest (GH-12744) Message-ID: https://github.com/python/cpython/commit/5aaac94eeb44697e92b0951385cd557bc27e0f6a commit: 5aaac94eeb44697e92b0951385cd557bc27e0f6a branch: master author: Victor Stinner committer: GitHub date: 2019-04-09T14:23:47+02:00 summary: bpo-36560: Fix reference leak hunting in regrtest (GH-12744) Fix reference leak hunting in regrtest: compute also deltas (of reference count, allocated memory blocks, file descriptor count) during warmup, to ensure that everything is initialized before starting to hunt reference leaks. Other changes: * Replace gc.collect() with support.gc_collect() * Move calls to read memory statistics from dash_R_cleanup() to dash_R() * Pass regrtest 'ns' to dash_R() * dash_R() is now more quiet with --quiet option (don't display progress). * Precompute the full range for "for it in range(repcount):" to ensure that the iteration doesn't allocate anything new. * dash_R() now is responsible to call warm_caches(). files: A Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst M Lib/test/libregrtest/refleak.py M Lib/test/libregrtest/runtest.py M Lib/test/libregrtest/setup.py diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index d68ea63b5b3c..0bb8a0a2bf08 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -18,7 +18,7 @@ def _get_dump(cls): cls._abc_negative_cache, cls._abc_negative_cache_version) -def dash_R(the_module, test, indirect_test, huntrleaks): +def dash_R(ns, the_module, test_name, test_func): """Run a test multiple times, looking for reference leaks. Returns: @@ -32,6 +32,10 @@ def dash_R(the_module, test, indirect_test, huntrleaks): raise Exception("Tracking reference leaks requires a debug build " "of Python") + # Avoid false positives due to various caches + # filling slowly with random data: + warm_caches() + # Save current values for dash_R_cleanup() to restore. fs = warnings.filters[:] ps = copyreg.dispatch_table.copy() @@ -57,31 +61,50 @@ def dash_R(the_module, test, indirect_test, huntrleaks): def get_pooled_int(value): return int_pool.setdefault(value, value) - nwarmup, ntracked, fname = huntrleaks + nwarmup, ntracked, fname = ns.huntrleaks fname = os.path.join(support.SAVEDCWD, fname) repcount = nwarmup + ntracked + + # Pre-allocate to ensure that the loop doesn't allocate anything new + rep_range = list(range(repcount)) rc_deltas = [0] * repcount alloc_deltas = [0] * repcount fd_deltas = [0] * repcount + getallocatedblocks = sys.getallocatedblocks + gettotalrefcount = sys.gettotalrefcount + fd_count = support.fd_count - print("beginning", repcount, "repetitions", file=sys.stderr) - print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr, - flush=True) # initialize variables to make pyflakes quiet rc_before = alloc_before = fd_before = 0 - for i in range(repcount): - indirect_test() - alloc_after, rc_after, fd_after = dash_R_cleanup(fs, ps, pic, zdc, - abcs) - print('.', end='', file=sys.stderr, flush=True) - if i >= nwarmup: - rc_deltas[i] = get_pooled_int(rc_after - rc_before) - alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before) - fd_deltas[i] = get_pooled_int(fd_after - fd_before) + + if not ns.quiet: + print("beginning", repcount, "repetitions", file=sys.stderr) + print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr, + flush=True) + + for i in rep_range: + test_func() + dash_R_cleanup(fs, ps, pic, zdc, abcs) + + # Collect cyclic trash and read memory statistics immediately after. + support.gc_collect() + alloc_after = getallocatedblocks() + rc_after = gettotalrefcount() + fd_after = fd_count() + + if not ns.quiet: + print('.', end='', file=sys.stderr, flush=True) + + rc_deltas[i] = get_pooled_int(rc_after - rc_before) + alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before) + fd_deltas[i] = get_pooled_int(fd_after - fd_before) + alloc_before = alloc_after rc_before = rc_after fd_before = fd_after - print(file=sys.stderr) + + if not ns.quiet: + print(file=sys.stderr) # These checkers return False on success, True on failure def check_rc_deltas(deltas): @@ -112,7 +135,7 @@ def check_fd_deltas(deltas): deltas = deltas[nwarmup:] if checker(deltas): msg = '%s leaked %s %s, sum=%s' % ( - test, deltas, item_name, sum(deltas)) + test_name, deltas, item_name, sum(deltas)) print(msg, file=sys.stderr, flush=True) with open(fname, "a") as refrep: print(msg, file=refrep) @@ -122,7 +145,7 @@ def check_fd_deltas(deltas): def dash_R_cleanup(fs, ps, pic, zdc, abcs): - import gc, copyreg + import copyreg import collections.abc # Restore some original values. @@ -154,16 +177,8 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs): clear_caches() - # Collect cyclic trash and read memory statistics immediately after. - func1 = sys.getallocatedblocks - func2 = sys.gettotalrefcount - gc.collect() - return func1(), func2(), support.fd_count() - def clear_caches(): - import gc - # Clear the warnings registry, so they can be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): @@ -256,7 +271,7 @@ def clear_caches(): for f in typing._cleanups: f() - gc.collect() + support.gc_collect() def warm_caches(): diff --git a/Lib/test/libregrtest/runtest.py b/Lib/test/libregrtest/runtest.py index 4f218b769f98..0a9533c8a573 100644 --- a/Lib/test/libregrtest/runtest.py +++ b/Lib/test/libregrtest/runtest.py @@ -177,7 +177,7 @@ def test_runner(): raise Exception("errors while loading tests") support.run_unittest(tests) if ns.huntrleaks: - refleak = dash_R(the_module, test, test_runner, ns.huntrleaks) + refleak = dash_R(ns, the_module, test, test_runner) else: test_runner() test_time = time.perf_counter() - start_time diff --git a/Lib/test/libregrtest/setup.py b/Lib/test/libregrtest/setup.py index 910aca1b1a6c..9a6585af9d0d 100644 --- a/Lib/test/libregrtest/setup.py +++ b/Lib/test/libregrtest/setup.py @@ -10,8 +10,6 @@ except ImportError: gc = None -from test.libregrtest.refleak import warm_caches - def setup_tests(ns): try: @@ -79,10 +77,6 @@ def setup_tests(ns): if ns.huntrleaks: unittest.BaseTestSuite._cleanup = False - # Avoid false positives due to various caches - # filling slowly with random data: - warm_caches() - if ns.memlimit is not None: support.set_memlimit(ns.memlimit) diff --git a/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst b/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst new file mode 100644 index 000000000000..ad0f681ae877 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst @@ -0,0 +1,4 @@ +Fix reference leak hunting in regrtest: compute also deltas (of reference +count, allocated memory blocks, file descriptor count) during warmup, to +ensure that everything is initialized before starting to hunt reference +leaks. From webhook-mailer at python.org Tue Apr 9 08:49:54 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 09 Apr 2019 12:49:54 -0000 Subject: [Python-checkins] bpo-30134: fix BytesWarning doc, docstring and message (GH-12739) Message-ID: https://github.com/python/cpython/commit/4e8e8aabc6e6ae1c989ef288be7bddfdbbc3187d commit: 4e8e8aabc6e6ae1c989ef288be7bddfdbbc3187d branch: 2.7 author: Inada Naoki committer: GitHub date: 2019-04-09T21:49:49+09:00 summary: bpo-30134: fix BytesWarning doc, docstring and message (GH-12739) files: M Doc/library/exceptions.rst M Doc/library/warnings.rst M Include/code.h M Objects/bytearrayobject.c M Objects/exceptions.c M Python/pythonrun.c diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 2bc2bce00adc..3cb944555bd9 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -526,7 +526,7 @@ module for more information. .. exception:: BytesWarning - Base class for warnings related to :class:`str` and :class:`bytearray`. + Base class for warnings related to bytes and bytearray. .. versionadded:: 2.6 diff --git a/Doc/library/warnings.rst b/Doc/library/warnings.rst index b5b89c97c560..2f699ea8f551 100644 --- a/Doc/library/warnings.rst +++ b/Doc/library/warnings.rst @@ -92,7 +92,7 @@ following warnings category classes are currently defined: | | Unicode. | +----------------------------------+-----------------------------------------------+ | :exc:`BytesWarning` | Base category for warnings related to | -| | str and bytearray. | +| | bytes and bytearray. | +----------------------------------+-----------------------------------------------+ While these are technically built-in exceptions, they are documented here, diff --git a/Include/code.h b/Include/code.h index 7456fd610f98..26c571ae8727 100644 --- a/Include/code.h +++ b/Include/code.h @@ -104,7 +104,7 @@ PyAPI_FUNC(int) _PyCode_CheckLineNumber(PyCodeObject* co, * * Return (type(obj), obj, ...): a tuple with variable size (at least 2 items) * depending on the type and the value. The type is the first item to not - * compare bytes and str which can raise a BytesWarning exception. */ + * compare bytes and unicode which can raise a BytesWarning exception. */ PyAPI_FUNC(PyObject*) _PyCode_ConstantKey(PyObject *obj); PyAPI_FUNC(PyObject*) PyCode_Optimize(PyObject *code, PyObject* consts, diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index a96d6d981ddd..582483247769 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -1027,14 +1027,6 @@ bytearray_repr(PyByteArrayObject *self) static PyObject * bytearray_str(PyObject *op) { -#if 0 - if (Py_BytesWarningFlag) { - if (PyErr_WarnEx(PyExc_BytesWarning, - "str() on a bytearray instance", 1)) - return NULL; - } - return bytearray_repr((PyByteArrayObject*)op); -#endif return PyBytes_FromStringAndSize(((PyByteArrayObject*)op)->ob_bytes, Py_SIZE(op)); } @@ -1059,7 +1051,7 @@ bytearray_richcompare(PyObject *self, PyObject *other, int op) if (rc) { if (Py_BytesWarningFlag && op == Py_EQ) { if (PyErr_WarnEx(PyExc_BytesWarning, - "Comparison between bytearray and string", 1)) + "Comparison between bytearray and unicode", 1)) return NULL; } diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 224d1ba08afb..fc6015212081 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -2014,8 +2014,8 @@ SimpleExtendsException(PyExc_Warning, UnicodeWarning, * BytesWarning extends Warning */ SimpleExtendsException(PyExc_Warning, BytesWarning, - "Base class for warnings about bytes and buffer related problems, mostly\n" - "related to conversion from str or comparing to str."); + "Base class for warnings about bytes and bytearray related problems, \n" + "mostly related to comparing to str."); /* Pre-computed MemoryError instance. Best to create this as early as * possible and not wait until a MemoryError is actually raised! diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 2c9f55fbd1df..abdfb146ae51 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -70,7 +70,7 @@ int Py_VerboseFlag; /* Needed by import.c */ int Py_InteractiveFlag; /* Needed by Py_FdIsInteractive() below */ int Py_InspectFlag; /* Needed to determine whether to exit at SystemExit */ int Py_NoSiteFlag; /* Suppress 'import site' */ -int Py_BytesWarningFlag; /* Warn on str(bytes) and str(buffer) */ +int Py_BytesWarningFlag; /* Warn on comparison between bytearray and unicode */ int Py_DontWriteBytecodeFlag; /* Suppress writing bytecode files (*.py[co]) */ int Py_UseClassExceptionsFlag = 1; /* Needed by bltinmodule.c: deprecated */ int Py_FrozenFlag; /* Needed by getpath.c */ From webhook-mailer at python.org Tue Apr 9 09:41:16 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 09 Apr 2019 13:41:16 -0000 Subject: [Python-checkins] bpo-34139: Remove unix datagram socket from FS before binding (GH-8323) Message-ID: https://github.com/python/cpython/commit/56065d4c8ac03042cb7e29ffda9b1ac544a37b4d commit: 56065d4c8ac03042cb7e29ffda9b1ac544a37b4d branch: master author: Quentin Dawans committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-09T06:40:59-07:00 summary: bpo-34139: Remove unix datagram socket from FS before binding (GH-8323) https://bugs.python.org/issue34139 files: A Misc/NEWS.d/next/Library/2018-07-18-11-25-34.bpo-34139.tKbmW7.rst M Lib/asyncio/base_events.py M Lib/test/test_asyncio/test_base_events.py diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 36fe7e0076c9..9b4b846131de 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -20,6 +20,7 @@ import itertools import os import socket +import stat import subprocess import threading import time @@ -1183,6 +1184,19 @@ def _check_sendfile_params(self, sock, file, offset, count): for addr in (local_addr, remote_addr): if addr is not None and not isinstance(addr, str): raise TypeError('string is expected') + + if local_addr and local_addr[0] not in (0, '\x00'): + try: + if stat.S_ISSOCK(os.stat(local_addr).st_mode): + os.remove(local_addr) + except FileNotFoundError: + pass + except OSError as err: + # Directory may have permissions only to create socket. + logger.error('Unable to check or remove stale UNIX ' + 'socket %r: %r', + local_addr, err) + addr_pairs_info = (((family, proto), (local_addr, remote_addr)), ) else: diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 53854758a27d..c245c472996e 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -1662,6 +1662,20 @@ def test_create_datagram_endpoint_sock_unix(self): self.loop.run_until_complete(protocol.done) self.assertEqual('CLOSED', protocol.state) + @unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets') + def test_create_datagram_endpoint_existing_sock_unix(self): + with test_utils.unix_socket_path() as path: + sock = socket.socket(socket.AF_UNIX, type=socket.SOCK_DGRAM) + sock.bind(path) + sock.close() + + coro = self.loop.create_datagram_endpoint( + lambda: MyDatagramProto(create_future=True, loop=self.loop), + path, family=socket.AF_UNIX) + transport, protocol = self.loop.run_until_complete(coro) + transport.close() + self.loop.run_until_complete(protocol.done) + def test_create_datagram_endpoint_sock_sockopts(self): class FakeSock: type = socket.SOCK_DGRAM diff --git a/Misc/NEWS.d/next/Library/2018-07-18-11-25-34.bpo-34139.tKbmW7.rst b/Misc/NEWS.d/next/Library/2018-07-18-11-25-34.bpo-34139.tKbmW7.rst new file mode 100644 index 000000000000..44284a72ad8a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-07-18-11-25-34.bpo-34139.tKbmW7.rst @@ -0,0 +1 @@ +Remove stale unix datagram socket before binding From webhook-mailer at python.org Tue Apr 9 11:20:19 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 09 Apr 2019 15:20:19 -0000 Subject: [Python-checkins] bpo-36544 : Fix regression introduced in bpo-36146 (GH-12708) Message-ID: https://github.com/python/cpython/commit/2ee077f7955e0349074f16a7afee40b4914619f7 commit: 2ee077f7955e0349074f16a7afee40b4914619f7 branch: master author: xdegaye committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-09T08:20:08-07:00 summary: bpo-36544 : Fix regression introduced in bpo-36146 (GH-12708) https://bugs.python.org/issue36544 files: A Misc/NEWS.d/next/Build/2019-04-06-18-53-03.bpo-36544.hJr2_a.rst M setup.py diff --git a/Misc/NEWS.d/next/Build/2019-04-06-18-53-03.bpo-36544.hJr2_a.rst b/Misc/NEWS.d/next/Build/2019-04-06-18-53-03.bpo-36544.hJr2_a.rst new file mode 100644 index 000000000000..71f5c21847b6 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-06-18-53-03.bpo-36544.hJr2_a.rst @@ -0,0 +1 @@ +Fix regression introduced in bpo-36146 refactoring setup.py diff --git a/setup.py b/setup.py index d03596029149..85c706cf6610 100644 --- a/setup.py +++ b/setup.py @@ -1642,6 +1642,7 @@ def detect_modules(self): self.detect_crypt() self.detect_socket() self.detect_openssl_hashlib() + self.detect_hash_builtins() self.detect_dbm_gdbm() self.detect_sqlite() self.detect_platform_specific_exts() @@ -2184,6 +2185,7 @@ def split_var(name, sep): library_dirs=openssl_libdirs, libraries=openssl_libs)) + def detect_hash_builtins(self): # We always compile these even when OpenSSL is available (issue #14693). # It's harmless and the object code is tiny (40-50 KiB per module, # only loaded when actually used). From webhook-mailer at python.org Tue Apr 9 12:01:20 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Apr 2019 16:01:20 -0000 Subject: [Python-checkins] bpo-36560: Fix reference leak hunting in regrtest (GH-12744) (GH-12745) Message-ID: https://github.com/python/cpython/commit/9c14061a2c2df9a9b84d0aab190a50c24a0d52f4 commit: 9c14061a2c2df9a9b84d0aab190a50c24a0d52f4 branch: 2.7 author: Victor Stinner committer: GitHub date: 2019-04-09T18:01:17+02:00 summary: bpo-36560: Fix reference leak hunting in regrtest (GH-12744) (GH-12745) Fix reference leak hunting in regrtest: compute also deltas (of reference count and file descriptor count) during warmup, to ensure that everything is initialized before starting to hunt reference leaks. Other changes: * Replace gc.collect() with support.gc_collect() in clear_caches() * dash_R() is now more quiet with --quiet option (don't display progress). * Precompute the full range for "for it in range(repcount):" to ensure that the iteration doesn't allocate anything new. * dash_R() now is responsible to call warm_caches(). (cherry picked from commit 5aaac94eeb44697e92b0951385cd557bc27e0f6a) files: A Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst M Lib/test/regrtest.py diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py index 70c51226e923..046f6560a210 100755 --- a/Lib/test/regrtest.py +++ b/Lib/test/regrtest.py @@ -529,8 +529,6 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, if slaveargs is not None: args, kwargs = json.loads(slaveargs) - if kwargs['huntrleaks']: - warm_caches() if testdir: kwargs['testdir'] = testdir try: @@ -541,9 +539,6 @@ def main(tests=None, testdir=None, verbose=0, quiet=False, print json.dumps(result) sys.exit(0) - if huntrleaks: - warm_caches() - good = [] bad = [] skipped = [] @@ -1332,7 +1327,7 @@ def runtest_inner(test, verbose, quiet, huntrleaks=False, pgo=False, testdir=Non indirect_test = getattr(the_module, "test_main", None) if huntrleaks: refleak = dash_R(the_module, test, indirect_test, - huntrleaks) + huntrleaks, quiet) else: if indirect_test is not None: indirect_test() @@ -1425,7 +1420,7 @@ def cleanup_test_droppings(testname, verbose): print >> sys.stderr, ("%r left behind %s %r and it couldn't be " "removed: %s" % (testname, kind, name, msg)) -def dash_R(the_module, test, indirect_test, huntrleaks): +def dash_R(the_module, test, indirect_test, huntrleaks, quiet): """Run a test multiple times, looking for reference leaks. Returns: @@ -1438,6 +1433,10 @@ def dash_R(the_module, test, indirect_test, huntrleaks): raise Exception("Tracking reference leaks requires a debug build " "of Python") + # Avoid false positives due to various caches + # filling slowly with random data: + warm_caches() + # Save current values for dash_R_cleanup() to restore. fs = warnings.filters[:] ps = copy_reg.dispatch_table.copy() @@ -1457,6 +1456,14 @@ def dash_R(the_module, test, indirect_test, huntrleaks): for obj in abc.__subclasses__() + [abc]: abcs[obj] = obj._abc_registry.copy() + # bpo-31217: Integer pool to get a single integer object for the same + # value. The pool is used to prevent false alarm when checking for memory + # block leaks. Fill the pool with values in -1000..1000 which are the most + # common (reference, memory block, file descriptor) differences. + int_pool = {value: value for value in range(-1000, 1000)} + def get_pooled_int(value): + return int_pool.setdefault(value, value) + if indirect_test: def run_the_test(): indirect_test() @@ -1467,27 +1474,39 @@ def run_the_test(): deltas = [] nwarmup, ntracked, fname = huntrleaks fname = os.path.join(support.SAVEDCWD, fname) + + # Pre-allocate to ensure that the loop doesn't allocate anything new repcount = nwarmup + ntracked - rc_deltas = [0] * ntracked - fd_deltas = [0] * ntracked + rc_deltas = [0] * repcount + fd_deltas = [0] * repcount + rep_range = list(range(repcount)) + + if not quiet: + print >> sys.stderr, "beginning", repcount, "repetitions" + print >> sys.stderr, ("1234567890"*(repcount//10 + 1))[:repcount] - print >> sys.stderr, "beginning", repcount, "repetitions" - print >> sys.stderr, ("1234567890"*(repcount//10 + 1))[:repcount] dash_R_cleanup(fs, ps, pic, zdc, abcs) + # initialize variables to make pyflakes quiet rc_before = fd_before = 0 - for i in range(repcount): + + for i in rep_range: run_the_test() - sys.stderr.write('.') + + if not quiet: + sys.stderr.write('.') + dash_R_cleanup(fs, ps, pic, zdc, abcs) + rc_after = sys.gettotalrefcount() fd_after = support.fd_count() - if i >= nwarmup: - rc_deltas[i - nwarmup] = rc_after - rc_before - fd_deltas[i - nwarmup] = fd_after - fd_before + rc_deltas[i] = get_pooled_int(rc_after - rc_before) + fd_deltas[i] = get_pooled_int(fd_after - fd_before) rc_before = rc_after fd_before = fd_after - print >> sys.stderr + + if not quiet: + print >> sys.stderr # These checkers return False on success, True on failure def check_rc_deltas(deltas): @@ -1513,6 +1532,7 @@ def check_fd_deltas(deltas): (rc_deltas, 'references', check_rc_deltas), (fd_deltas, 'file descriptors', check_fd_deltas) ]: + deltas = deltas[nwarmup:] if checker(deltas): msg = '%s leaked %s %s, sum=%s' % (test, deltas, item_name, sum(deltas)) print >> sys.stderr, msg @@ -1647,7 +1667,7 @@ def clear_caches(): ctypes._reset_cache() # Collect cyclic trash. - gc.collect() + support.gc_collect() def warm_caches(): """Create explicitly internal singletons which are created on demand diff --git a/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst b/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst new file mode 100644 index 000000000000..a8de72c9ab13 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst @@ -0,0 +1,3 @@ +Fix reference leak hunting in regrtest: compute also deltas (of reference count +and file descriptor count) during warmup, to ensure that everything is +initialized before starting to hunt reference leaks. From webhook-mailer at python.org Tue Apr 9 12:12:49 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Apr 2019 16:12:49 -0000 Subject: [Python-checkins] bpo-36508: python-config don't export LINKFORSHARED (GH-12661) Message-ID: https://github.com/python/cpython/commit/e65f01f78d7bda3013fc5be485afa87ff56511d9 commit: e65f01f78d7bda3013fc5be485afa87ff56511d9 branch: master author: Victor Stinner committer: GitHub date: 2019-04-09T18:12:44+02:00 summary: bpo-36508: python-config don't export LINKFORSHARED (GH-12661) python-config --ldflags no longer includes flags of the LINKFORSHARED variable. The LINKFORSHARED variable must only be used to build executables. files: A Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst M Makefile.pre.in M Misc/python-config.in M Misc/python-config.sh.in diff --git a/Makefile.pre.in b/Makefile.pre.in index 174b12c5de8b..1cb8a590d45b 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -104,6 +104,8 @@ PY_LDFLAGS_NODIST=$(CONFIGURE_LDFLAGS_NODIST) $(LDFLAGS_NODIST) NO_AS_NEEDED= @NO_AS_NEEDED@ SGI_ABI= @SGI_ABI@ CCSHARED= @CCSHARED@ +# LINKFORSHARED are the flags passed to the $(CC) command that links +# the python executable -- this is only needed for a few systems LINKFORSHARED= @LINKFORSHARED@ ARFLAGS= @ARFLAGS@ # Extra C flags added for building the interpreter object files. diff --git a/Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst b/Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst new file mode 100644 index 000000000000..62f80840a044 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst @@ -0,0 +1,3 @@ +``python-config --ldflags`` no longer includes flags of the +``LINKFORSHARED`` variable. The ``LINKFORSHARED`` variable must only be used +to build executables. diff --git a/Misc/python-config.in b/Misc/python-config.in index e13da7543c9f..714415222798 100644 --- a/Misc/python-config.in +++ b/Misc/python-config.in @@ -55,8 +55,6 @@ for opt in opt_flags: if opt == '--ldflags': if not getvar('Py_ENABLE_SHARED'): libs.insert(0, '-L' + getvar('LIBPL')) - if not getvar('PYTHONFRAMEWORK'): - libs.extend(getvar('LINKFORSHARED').split()) print(' '.join(libs)) elif opt == '--extension-suffix': diff --git a/Misc/python-config.sh.in b/Misc/python-config.sh.in index d1d3275fa275..a3c479ce571f 100644 --- a/Misc/python-config.sh.in +++ b/Misc/python-config.sh.in @@ -44,7 +44,6 @@ ABIFLAGS="@ABIFLAGS@" LIBS="-lpython${VERSION}${ABIFLAGS} @LIBS@ $SYSLIBS" BASECFLAGS="@BASECFLAGS@" LDLIBRARY="@LDLIBRARY@" -LINKFORSHARED="@LINKFORSHARED@" OPT="@OPT@" PY_ENABLE_SHARED="@PY_ENABLE_SHARED@" LDVERSION="@LDVERSION@" @@ -89,15 +88,11 @@ do echo "$LIBS" ;; --ldflags) - LINKFORSHAREDUSED= - if [ -z "$PYTHONFRAMEWORK" ] ; then - LINKFORSHAREDUSED=$LINKFORSHARED - fi LIBPLUSED= if [ "$PY_ENABLE_SHARED" = "0" ] ; then LIBPLUSED="-L$LIBPL" fi - echo "$LIBPLUSED -L$libdir $LIBS $LINKFORSHAREDUSED" + echo "$LIBPLUSED -L$libdir $LIBS" ;; --extension-suffix) echo "$SO" From webhook-mailer at python.org Tue Apr 9 12:26:22 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Apr 2019 16:26:22 -0000 Subject: [Python-checkins] bpo-36560: regrtest: don't collect the GC twice (GH-12747) Message-ID: https://github.com/python/cpython/commit/bb4447897a5f141eecf42987a1191a3330c5d7ed commit: bb4447897a5f141eecf42987a1191a3330c5d7ed branch: master author: Victor Stinner committer: GitHub date: 2019-04-09T18:26:16+02:00 summary: bpo-36560: regrtest: don't collect the GC twice (GH-12747) dash_R() function of libregrtest doesn't call support.gc_collect() directly anymore: it's already called by dash_R_cleanup(). Call dash_R_cleanup() before starting the loop. files: M Lib/test/libregrtest/refleak.py diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index 0bb8a0a2bf08..235d6bfd3af6 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -82,12 +82,14 @@ def get_pooled_int(value): print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr, flush=True) + dash_R_cleanup(fs, ps, pic, zdc, abcs) + for i in rep_range: test_func() dash_R_cleanup(fs, ps, pic, zdc, abcs) - # Collect cyclic trash and read memory statistics immediately after. - support.gc_collect() + # dash_R_cleanup() ends with collecting cyclic trash: + # read memory statistics immediately after. alloc_after = getallocatedblocks() rc_after = gettotalrefcount() fd_after = fd_count() From webhook-mailer at python.org Tue Apr 9 12:40:17 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 09 Apr 2019 16:40:17 -0000 Subject: [Python-checkins] bpo-36577: setup.py reports missing OpenSSL again (GH-12746) Message-ID: https://github.com/python/cpython/commit/8abc3f4f91e6b523c761c7a6fa2e3405019803a1 commit: 8abc3f4f91e6b523c761c7a6fa2e3405019803a1 branch: master author: Christian Heimes committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-09T09:40:12-07:00 summary: bpo-36577: setup.py reports missing OpenSSL again (GH-12746) [bpo-36146](https://bugs.python.org/issue36146) introduced another regression. In case of missing OpenSSL libraries or headers, setup.py no longer reported _hashlib and _ssl to be missing. Signed-off-by: Christian Heimes https://bugs.python.org/issue36577 files: A Misc/NEWS.d/next/Build/2019-04-09-17-31-47.bpo-36577.34kuUW.rst M setup.py diff --git a/Misc/NEWS.d/next/Build/2019-04-09-17-31-47.bpo-36577.34kuUW.rst b/Misc/NEWS.d/next/Build/2019-04-09-17-31-47.bpo-36577.34kuUW.rst new file mode 100644 index 000000000000..58c015127309 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-09-17-31-47.bpo-36577.34kuUW.rst @@ -0,0 +1 @@ +setup.py now correctly reports missing OpenSSL headers and libraries again. diff --git a/setup.py b/setup.py index 85c706cf6610..30caed5b51c1 100644 --- a/setup.py +++ b/setup.py @@ -2153,6 +2153,7 @@ def split_var(name, sep): openssl_libs = split_var('OPENSSL_LIBS', '-l') if not openssl_libs: # libssl and libcrypto not found + self.missing.extend(['_ssl', '_hashlib']) return None, None # Find OpenSSL includes @@ -2160,6 +2161,7 @@ def split_var(name, sep): 'openssl/ssl.h', self.inc_dirs, openssl_includes ) if ssl_incs is None: + self.missing.extend(['_ssl', '_hashlib']) return None, None # OpenSSL 1.0.2 uses Kerberos for KRB5 ciphers From webhook-mailer at python.org Tue Apr 9 12:56:08 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Apr 2019 16:56:08 -0000 Subject: [Python-checkins] [3.7] bpo-36560: regrtest: don't collect the GC twice (GH-12747) (GH-12749) Message-ID: https://github.com/python/cpython/commit/86f0354fcb815312295b923c55e39364d85d0388 commit: 86f0354fcb815312295b923c55e39364d85d0388 branch: 3.7 author: Victor Stinner committer: GitHub date: 2019-04-09T18:55:50+02:00 summary: [3.7] bpo-36560: regrtest: don't collect the GC twice (GH-12747) (GH-12749) * bpo-36560: Fix reference leak hunting in regrtest (GH-12744) Fix reference leak hunting in regrtest: compute also deltas (of reference count, allocated memory blocks, file descriptor count) during warmup, to ensure that everything is initialized before starting to hunt reference leaks. Other changes: * Replace gc.collect() with support.gc_collect() * Move calls to read memory statistics from dash_R_cleanup() to dash_R() * Pass regrtest 'ns' to dash_R() * dash_R() is now more quiet with --quiet option (don't display progress). * Precompute the full range for "for it in range(repcount):" to ensure that the iteration doesn't allocate anything new. * dash_R() now is responsible to call warm_caches(). (cherry picked from commit 5aaac94eeb44697e92b0951385cd557bc27e0f6a) * bpo-36560: regrtest: don't collect the GC twice (GH-12747) dash_R() function of libregrtest doesn't call support.gc_collect() directly anymore: it's already called by dash_R_cleanup(). Call dash_R_cleanup() before starting the loop. (cherry picked from commit bb4447897a5f141eecf42987a1191a3330c5d7ed) files: A Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst M Lib/test/libregrtest/refleak.py M Lib/test/libregrtest/runtest.py M Lib/test/libregrtest/setup.py diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py index d68ea63b5b3c..235d6bfd3af6 100644 --- a/Lib/test/libregrtest/refleak.py +++ b/Lib/test/libregrtest/refleak.py @@ -18,7 +18,7 @@ def _get_dump(cls): cls._abc_negative_cache, cls._abc_negative_cache_version) -def dash_R(the_module, test, indirect_test, huntrleaks): +def dash_R(ns, the_module, test_name, test_func): """Run a test multiple times, looking for reference leaks. Returns: @@ -32,6 +32,10 @@ def dash_R(the_module, test, indirect_test, huntrleaks): raise Exception("Tracking reference leaks requires a debug build " "of Python") + # Avoid false positives due to various caches + # filling slowly with random data: + warm_caches() + # Save current values for dash_R_cleanup() to restore. fs = warnings.filters[:] ps = copyreg.dispatch_table.copy() @@ -57,31 +61,52 @@ def dash_R(the_module, test, indirect_test, huntrleaks): def get_pooled_int(value): return int_pool.setdefault(value, value) - nwarmup, ntracked, fname = huntrleaks + nwarmup, ntracked, fname = ns.huntrleaks fname = os.path.join(support.SAVEDCWD, fname) repcount = nwarmup + ntracked + + # Pre-allocate to ensure that the loop doesn't allocate anything new + rep_range = list(range(repcount)) rc_deltas = [0] * repcount alloc_deltas = [0] * repcount fd_deltas = [0] * repcount + getallocatedblocks = sys.getallocatedblocks + gettotalrefcount = sys.gettotalrefcount + fd_count = support.fd_count - print("beginning", repcount, "repetitions", file=sys.stderr) - print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr, - flush=True) # initialize variables to make pyflakes quiet rc_before = alloc_before = fd_before = 0 - for i in range(repcount): - indirect_test() - alloc_after, rc_after, fd_after = dash_R_cleanup(fs, ps, pic, zdc, - abcs) - print('.', end='', file=sys.stderr, flush=True) - if i >= nwarmup: - rc_deltas[i] = get_pooled_int(rc_after - rc_before) - alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before) - fd_deltas[i] = get_pooled_int(fd_after - fd_before) + + if not ns.quiet: + print("beginning", repcount, "repetitions", file=sys.stderr) + print(("1234567890"*(repcount//10 + 1))[:repcount], file=sys.stderr, + flush=True) + + dash_R_cleanup(fs, ps, pic, zdc, abcs) + + for i in rep_range: + test_func() + dash_R_cleanup(fs, ps, pic, zdc, abcs) + + # dash_R_cleanup() ends with collecting cyclic trash: + # read memory statistics immediately after. + alloc_after = getallocatedblocks() + rc_after = gettotalrefcount() + fd_after = fd_count() + + if not ns.quiet: + print('.', end='', file=sys.stderr, flush=True) + + rc_deltas[i] = get_pooled_int(rc_after - rc_before) + alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before) + fd_deltas[i] = get_pooled_int(fd_after - fd_before) + alloc_before = alloc_after rc_before = rc_after fd_before = fd_after - print(file=sys.stderr) + + if not ns.quiet: + print(file=sys.stderr) # These checkers return False on success, True on failure def check_rc_deltas(deltas): @@ -112,7 +137,7 @@ def check_fd_deltas(deltas): deltas = deltas[nwarmup:] if checker(deltas): msg = '%s leaked %s %s, sum=%s' % ( - test, deltas, item_name, sum(deltas)) + test_name, deltas, item_name, sum(deltas)) print(msg, file=sys.stderr, flush=True) with open(fname, "a") as refrep: print(msg, file=refrep) @@ -122,7 +147,7 @@ def check_fd_deltas(deltas): def dash_R_cleanup(fs, ps, pic, zdc, abcs): - import gc, copyreg + import copyreg import collections.abc # Restore some original values. @@ -154,16 +179,8 @@ def dash_R_cleanup(fs, ps, pic, zdc, abcs): clear_caches() - # Collect cyclic trash and read memory statistics immediately after. - func1 = sys.getallocatedblocks - func2 = sys.gettotalrefcount - gc.collect() - return func1(), func2(), support.fd_count() - def clear_caches(): - import gc - # Clear the warnings registry, so they can be displayed again for mod in sys.modules.values(): if hasattr(mod, '__warningregistry__'): @@ -256,7 +273,7 @@ def clear_caches(): for f in typing._cleanups: f() - gc.collect() + support.gc_collect() def warm_caches(): diff --git a/Lib/test/libregrtest/runtest.py b/Lib/test/libregrtest/runtest.py index dc2abf237bc0..99486c72db3e 100644 --- a/Lib/test/libregrtest/runtest.py +++ b/Lib/test/libregrtest/runtest.py @@ -177,7 +177,7 @@ def test_runner(): raise Exception("errors while loading tests") support.run_unittest(tests) if ns.huntrleaks: - refleak = dash_R(the_module, test, test_runner, ns.huntrleaks) + refleak = dash_R(ns, the_module, test, test_runner) else: test_runner() test_time = time.perf_counter() - start_time diff --git a/Lib/test/libregrtest/setup.py b/Lib/test/libregrtest/setup.py index 910aca1b1a6c..9a6585af9d0d 100644 --- a/Lib/test/libregrtest/setup.py +++ b/Lib/test/libregrtest/setup.py @@ -10,8 +10,6 @@ except ImportError: gc = None -from test.libregrtest.refleak import warm_caches - def setup_tests(ns): try: @@ -79,10 +77,6 @@ def setup_tests(ns): if ns.huntrleaks: unittest.BaseTestSuite._cleanup = False - # Avoid false positives due to various caches - # filling slowly with random data: - warm_caches() - if ns.memlimit is not None: support.set_memlimit(ns.memlimit) diff --git a/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst b/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst new file mode 100644 index 000000000000..ad0f681ae877 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-09-14-08-02.bpo-36560._ejeOr.rst @@ -0,0 +1,4 @@ +Fix reference leak hunting in regrtest: compute also deltas (of reference +count, allocated memory blocks, file descriptor count) during warmup, to +ensure that everything is initialized before starting to hunt reference +leaks. From webhook-mailer at python.org Tue Apr 9 13:12:31 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Apr 2019 17:12:31 -0000 Subject: [Python-checkins] bpo-34373: Fix time.mktime() on AIX (GH-12726) Message-ID: https://github.com/python/cpython/commit/8709490f48fc27b3dd1a16acb33bea2299c6a575 commit: 8709490f48fc27b3dd1a16acb33bea2299c6a575 branch: master author: Victor Stinner committer: GitHub date: 2019-04-09T19:12:26+02:00 summary: bpo-34373: Fix time.mktime() on AIX (GH-12726) Fix time.mktime() error handling on AIX for year before 1970. Other changes: * mktime(): rename variable 'buf' to 'tm'. * _PyTime_localtime(): * Use "localtime" rather than "ctime" in the error message (specific to AIX). * Always initialize errno to 0 just in case if localtime_r() doesn't set errno on error. * On AIX, avoid abs() which is limited to int type. * EINVAL constant is now always available. files: A Misc/NEWS.d/next/Library/2019-04-08-14-41-22.bpo-34373.lEAl_-.rst M Modules/timemodule.c M Python/pytime.c diff --git a/Misc/NEWS.d/next/Library/2019-04-08-14-41-22.bpo-34373.lEAl_-.rst b/Misc/NEWS.d/next/Library/2019-04-08-14-41-22.bpo-34373.lEAl_-.rst new file mode 100644 index 000000000000..19b38fef6414 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-08-14-41-22.bpo-34373.lEAl_-.rst @@ -0,0 +1 @@ +Fix :func:`time.mktime` error handling on AIX for year before 1970. diff --git a/Modules/timemodule.c b/Modules/timemodule.c index 7c01cefa4d3c..724a064f5ceb 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -990,60 +990,68 @@ not present, current time as returned by localtime() is used."); #ifdef HAVE_MKTIME static PyObject * -time_mktime(PyObject *self, PyObject *tup) +time_mktime(PyObject *self, PyObject *tm_tuple) { - struct tm buf; + struct tm tm; time_t tt; -#ifdef _AIX - time_t clk; - int year = buf.tm_year; - int delta_days = 0; -#endif - if (!gettmarg(tup, &buf, + if (!gettmarg(tm_tuple, &tm, "iiiiiiiii;mktime(): illegal time tuple argument")) { return NULL; } -#ifndef _AIX - buf.tm_wday = -1; /* sentinel; original value ignored */ - tt = mktime(&buf); -#else - /* year < 1902 or year > 2037 */ - if ((buf.tm_year < 2) || (buf.tm_year > 137)) { - /* Issue #19748: On AIX, mktime() doesn't report overflow error for - * timestamp < -2^31 or timestamp > 2**31-1. */ + +#ifdef _AIX + /* bpo-19748: AIX mktime() valid range is 00:00:00 UTC, January 1, 1970 + to 03:14:07 UTC, January 19, 2038. Thanks to the workaround below, + it is possible to support years in range [1902; 2037] */ + if (tm.tm_year < 2 || tm.tm_year > 137) { + /* bpo-19748: On AIX, mktime() does not report overflow error + for timestamp < -2^31 or timestamp > 2**31-1. */ PyErr_SetString(PyExc_OverflowError, "mktime argument out of range"); return NULL; } - year = buf.tm_year; - /* year < 1970 - adjust buf.tm_year into legal range */ - while (buf.tm_year < 70) { - buf.tm_year += 4; + + /* bpo-34373: AIX mktime() has an integer overflow for years in range + [1902; 1969]. Workaround the issue by using a year greater or equal than + 1970 (tm_year >= 70): mktime() behaves correctly in that case + (ex: properly report errors). tm_year and tm_wday are adjusted after + mktime() call. */ + int orig_tm_year = tm.tm_year; + int delta_days = 0; + while (tm.tm_year < 70) { + /* Use 4 years to account properly leap years */ + tm.tm_year += 4; delta_days -= (366 + (365 * 3)); } +#endif - buf.tm_wday = -1; - clk = mktime(&buf); - buf.tm_year = year; - - if ((buf.tm_wday != -1) && delta_days) - buf.tm_wday = (buf.tm_wday + delta_days) % 7; + tm.tm_wday = -1; /* sentinel; original value ignored */ + tt = mktime(&tm); - tt = clk + (delta_days * (24 * 3600)); -#endif /* Return value of -1 does not necessarily mean an error, but tm_wday * cannot remain set to -1 if mktime succeeded. */ if (tt == (time_t)(-1) /* Return value of -1 does not necessarily mean an error, but * tm_wday cannot remain set to -1 if mktime succeeded. */ - && buf.tm_wday == -1) + && tm.tm_wday == -1) { PyErr_SetString(PyExc_OverflowError, "mktime argument out of range"); return NULL; } + +#ifdef _AIX + if (delta_days != 0) { + tm.tm_year = orig_tm_year; + if (tm.tm_wday != -1) { + tm.tm_wday = (tm.tm_wday + delta_days) % 7; + } + tt += delta_days * (24 * 3600); + } +#endif + return PyFloat_FromDouble((double)tt); } diff --git a/Python/pytime.c b/Python/pytime.c index 68c49a86da25..9ff300699f04 100644 --- a/Python/pytime.c +++ b/Python/pytime.c @@ -1062,26 +1062,23 @@ _PyTime_localtime(time_t t, struct tm *tm) } return 0; #else /* !MS_WINDOWS */ + #ifdef _AIX - /* AIX does not return NULL on an error - so test ranges - asif! - (1902-01-01, -2145916800.0) - (2038-01-01, 2145916800.0) */ - if (abs(t) > (time_t) 2145916800) { -#ifdef EINVAL + /* bpo-34373: AIX does not return NULL if t is too small or too large */ + if (t < -2145916800 /* 1902-01-01 */ + || t > 2145916800 /* 2038-01-01 */) { errno = EINVAL; -#endif PyErr_SetString(PyExc_OverflowError, - "ctime argument out of range"); + "localtime argument out of range"); return -1; } #endif + + errno = 0; if (localtime_r(&t, tm) == NULL) { -#ifdef EINVAL if (errno == 0) { errno = EINVAL; } -#endif PyErr_SetFromErrno(PyExc_OSError); return -1; } From webhook-mailer at python.org Tue Apr 9 13:54:29 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Apr 2019 17:54:29 -0000 Subject: [Python-checkins] bpo-36235: Fix distutils test_customize_compiler() on macOS (GH-12751) Message-ID: https://github.com/python/cpython/commit/22de4ce498b656063e236350e8404981c13e1cd8 commit: 22de4ce498b656063e236350e8404981c13e1cd8 branch: 2.7 author: Victor Stinner committer: GitHub date: 2019-04-09T19:54:10+02:00 summary: bpo-36235: Fix distutils test_customize_compiler() on macOS (GH-12751) Set CUSTOMIZED_OSX_COMPILER to True to disable _osx_support.customize_compiler(). files: M Lib/distutils/tests/test_sysconfig.py diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py index 71754ddba17f..9e2aeb833bfd 100644 --- a/Lib/distutils/tests/test_sysconfig.py +++ b/Lib/distutils/tests/test_sysconfig.py @@ -73,6 +73,9 @@ def set_executables(self, **kw): comp = compiler() old_vars = dict(sysconfig._config_vars) try: + # On macOS, disable _osx_support.customize_compiler() + sysconfig._config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + for key, value in sysconfig_vars.items(): sysconfig._config_vars[key] = value sysconfig.customize_compiler(comp) From webhook-mailer at python.org Tue Apr 9 14:19:51 2019 From: webhook-mailer at python.org (Steve Dower) Date: Tue, 09 Apr 2019 18:19:51 -0000 Subject: [Python-checkins] bpo-31512: Add non-elevated symlink support for Windows (GH-3652) Message-ID: https://github.com/python/cpython/commit/0e10766574f4e287cd6b5e5860a1ca75488f4119 commit: 0e10766574f4e287cd6b5e5860a1ca75488f4119 branch: master author: Vidar Tonaas Fauske committer: Steve Dower date: 2019-04-09T11:19:46-07:00 summary: bpo-31512: Add non-elevated symlink support for Windows (GH-3652) files: A Misc/NEWS.d/next/Windows/2017-10-04-12-40-45.bpo-31512.YQeBt2.rst M Doc/library/os.rst M Modules/posixmodule.c M Modules/winreparse.h diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 85e240a0006a..f3b5d964ac58 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -2699,19 +2699,15 @@ features: as a directory if *target_is_directory* is ``True`` or a file symlink (the default) otherwise. On non-Windows platforms, *target_is_directory* is ignored. - Symbolic link support was introduced in Windows 6.0 (Vista). :func:`symlink` - will raise a :exc:`NotImplementedError` on Windows versions earlier than 6.0. - This function can support :ref:`paths relative to directory descriptors `. .. note:: - On Windows, the *SeCreateSymbolicLinkPrivilege* is required in order to - successfully create symlinks. This privilege is not typically granted to - regular users but is available to accounts which can escalate privileges - to the administrator level. Either obtaining the privilege or running your - application as an administrator are ways to successfully create symlinks. + On newer versions of Windows 10, unprivileged accounts can create symlinks + if Developer Mode is enabled. When Developer Mode is not available/enabled, + the *SeCreateSymbolicLinkPrivilege* privilege is required, or the process + must be run as an administrator. :exc:`OSError` is raised when the function is called by an unprivileged @@ -2729,6 +2725,9 @@ features: .. versionchanged:: 3.6 Accepts a :term:`path-like object` for *src* and *dst*. + .. versionchanged:: 3.8 + Added support for unelevated symlinks on Windows with Developer Mode. + .. function:: sync() diff --git a/Misc/NEWS.d/next/Windows/2017-10-04-12-40-45.bpo-31512.YQeBt2.rst b/Misc/NEWS.d/next/Windows/2017-10-04-12-40-45.bpo-31512.YQeBt2.rst new file mode 100644 index 000000000000..a6dbb5c0639b --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2017-10-04-12-40-45.bpo-31512.YQeBt2.rst @@ -0,0 +1,2 @@ +With the Windows 10 Creators Update, non-elevated users can now create +symlinks as long as the computer has Developer Mode enabled. diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 7c4e5f082b5d..e8dbdcc94aa7 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -284,10 +284,7 @@ extern char *ctermid_r(char *); #include #include /* for ShellExecute() */ #include /* for UNLEN */ -#ifdef SE_CREATE_SYMBOLIC_LINK_NAME /* Available starting with Vista */ #define HAVE_SYMLINK -static int win32_can_symlink = 0; -#endif #endif /* _MSC_VER */ #ifndef MAXPATHLEN @@ -7755,26 +7752,6 @@ os_readlink_impl(PyObject *module, path_t *path, int dir_fd) #if defined(MS_WINDOWS) -/* Grab CreateSymbolicLinkW dynamically from kernel32 */ -static BOOLEAN (CALLBACK *Py_CreateSymbolicLinkW)(LPCWSTR, LPCWSTR, DWORD) = NULL; - -static int -check_CreateSymbolicLink(void) -{ - HINSTANCE hKernel32; - /* only recheck */ - if (Py_CreateSymbolicLinkW) - return 1; - - Py_BEGIN_ALLOW_THREADS - hKernel32 = GetModuleHandleW(L"KERNEL32"); - *(FARPROC*)&Py_CreateSymbolicLinkW = GetProcAddress(hKernel32, - "CreateSymbolicLinkW"); - Py_END_ALLOW_THREADS - - return Py_CreateSymbolicLinkW != NULL; -} - /* Remove the last portion of the path - return 0 on success */ static int _dirnameW(WCHAR *path) @@ -7878,33 +7855,57 @@ os_symlink_impl(PyObject *module, path_t *src, path_t *dst, { #ifdef MS_WINDOWS DWORD result; + DWORD flags = 0; + + /* Assumed true, set to false if detected to not be available. */ + static int windows_has_symlink_unprivileged_flag = TRUE; #else int result; #endif #ifdef MS_WINDOWS - if (!check_CreateSymbolicLink()) { - PyErr_SetString(PyExc_NotImplementedError, - "CreateSymbolicLink functions not found"); - return NULL; - } - if (!win32_can_symlink) { - PyErr_SetString(PyExc_OSError, "symbolic link privilege not held"); - return NULL; - } -#endif -#ifdef MS_WINDOWS + if (windows_has_symlink_unprivileged_flag) { + /* Allow non-admin symlinks if system allows it. */ + flags |= SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE; + } Py_BEGIN_ALLOW_THREADS _Py_BEGIN_SUPPRESS_IPH - /* if src is a directory, ensure target_is_directory==1 */ - target_is_directory |= _check_dirW(src->wide, dst->wide); - result = Py_CreateSymbolicLinkW(dst->wide, src->wide, - target_is_directory); + /* if src is a directory, ensure flags==1 (target_is_directory bit) */ + if (target_is_directory || _check_dirW(src->wide, dst->wide)) { + flags |= SYMBOLIC_LINK_FLAG_DIRECTORY; + } + + result = CreateSymbolicLinkW(dst->wide, src->wide, flags); _Py_END_SUPPRESS_IPH Py_END_ALLOW_THREADS + if (windows_has_symlink_unprivileged_flag && !result && + ERROR_INVALID_PARAMETER == GetLastError()) { + + Py_BEGIN_ALLOW_THREADS + _Py_BEGIN_SUPPRESS_IPH + /* This error might be caused by + SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE not being supported. + Try again, and update windows_has_symlink_unprivileged_flag if we + are successful this time. + + NOTE: There is a risk of a race condition here if there are other + conditions than the flag causing ERROR_INVALID_PARAMETER, and + another process (or thread) changes that condition in between our + calls to CreateSymbolicLink. + */ + flags &= ~(SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE); + result = CreateSymbolicLinkW(dst->wide, src->wide, flags); + _Py_END_SUPPRESS_IPH + Py_END_ALLOW_THREADS + + if (result || ERROR_INVALID_PARAMETER != GetLastError()) { + windows_has_symlink_unprivileged_flag = FALSE; + } + } + if (!result) return path_error2(src, dst); @@ -13469,35 +13470,6 @@ static PyMethodDef posix_methods[] = { {NULL, NULL} /* Sentinel */ }; - -#if defined(HAVE_SYMLINK) && defined(MS_WINDOWS) -static int -enable_symlink() -{ - HANDLE tok; - TOKEN_PRIVILEGES tok_priv; - LUID luid; - - if (!OpenProcessToken(GetCurrentProcess(), TOKEN_ALL_ACCESS, &tok)) - return 0; - - if (!LookupPrivilegeValue(NULL, SE_CREATE_SYMBOLIC_LINK_NAME, &luid)) - return 0; - - tok_priv.PrivilegeCount = 1; - tok_priv.Privileges[0].Luid = luid; - tok_priv.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED; - - if (!AdjustTokenPrivileges(tok, FALSE, &tok_priv, - sizeof(TOKEN_PRIVILEGES), - (PTOKEN_PRIVILEGES) NULL, (PDWORD) NULL)) - return 0; - - /* ERROR_NOT_ALL_ASSIGNED returned when the privilege can't be assigned. */ - return GetLastError() == ERROR_NOT_ALL_ASSIGNED ? 0 : 1; -} -#endif /* defined(HAVE_SYMLINK) && defined(MS_WINDOWS) */ - static int all_ins(PyObject *m) { @@ -14105,10 +14077,6 @@ INITFUNC(void) PyObject *list; const char * const *trace; -#if defined(HAVE_SYMLINK) && defined(MS_WINDOWS) - win32_can_symlink = enable_symlink(); -#endif - m = PyModule_Create(&posixmodule); if (m == NULL) return NULL; diff --git a/Modules/winreparse.h b/Modules/winreparse.h index 28049c9af906..f06f701f999c 100644 --- a/Modules/winreparse.h +++ b/Modules/winreparse.h @@ -45,6 +45,11 @@ typedef struct { FIELD_OFFSET(_Py_REPARSE_DATA_BUFFER, GenericReparseBuffer) #define _Py_MAXIMUM_REPARSE_DATA_BUFFER_SIZE ( 16 * 1024 ) +// Defined in WinBase.h in 'recent' versions of Windows 10 SDK +#ifndef SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE +#define SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE 0x2 +#endif + #ifdef __cplusplus } #endif From webhook-mailer at python.org Tue Apr 9 19:37:03 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 09 Apr 2019 23:37:03 -0000 Subject: [Python-checkins] Fix typos in compile.c comments (GH-12752) Message-ID: https://github.com/python/cpython/commit/63b5fc5f42c95a9ef25f9ef9f69ef218763d28bd commit: 63b5fc5f42c95a9ef25f9ef9f69ef218763d28bd branch: master author: Simeon committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-09T16:36:57-07:00 summary: Fix typos in compile.c comments (GH-12752) files: M Python/compile.c diff --git a/Python/compile.c b/Python/compile.c index a992e4b4653c..ecf7d357c856 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -1216,7 +1216,7 @@ merge_consts_recursive(struct compiler *c, PyObject *o) } // We registered o in c_const_cache. - // When o is a tuple or frozenset, we want to merge it's + // When o is a tuple or frozenset, we want to merge its // items too. if (PyTuple_CheckExact(o)) { Py_ssize_t len = PyTuple_GET_SIZE(o); @@ -1246,7 +1246,7 @@ merge_consts_recursive(struct compiler *c, PyObject *o) } } else if (PyFrozenSet_CheckExact(o)) { - // *key* is tuple. And it's first item is frozenset of + // *key* is tuple. And its first item is frozenset of // constant keys. // See _PyCode_ConstantKey() for detail. assert(PyTuple_CheckExact(key)); From webhook-mailer at python.org Tue Apr 9 20:31:23 2019 From: webhook-mailer at python.org (Senthil Kumaran) Date: Wed, 10 Apr 2019 00:31:23 -0000 Subject: [Python-checkins] bpo-12910: update and correct quote docstring (#2568) Message-ID: https://github.com/python/cpython/commit/750d74fac5c510e39958b3f79641fe54096ee54f commit: 750d74fac5c510e39958b3f79641fe54096ee54f branch: master author: J?rn Hees committer: Senthil Kumaran date: 2019-04-09T17:31:18-07:00 summary: bpo-12910: update and correct quote docstring (#2568) Fixes some mistakes and misleadings in the quote function docstring: - reserved chars are never actually used by quote code, unreserved chars are - reserved chars were wrong and incomplete - mentioned that use-case is not minimal quoting wrt. RFC, but cautious quoting files: M Lib/urllib/parse.py diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index 8b6c9b106091..fb518a97749c 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -785,25 +785,32 @@ def quote(string, safe='/', encoding=None, errors=None): """quote('abc def') -> 'abc%20def' Each part of a URL, e.g. the path info, the query, etc., has a - different set of reserved characters that must be quoted. + different set of reserved characters that must be quoted. The + quote function offers a cautious (not minimal) way to quote a + string for most of these parts. - RFC 3986 Uniform Resource Identifiers (URI): Generic Syntax lists - the following reserved characters. + RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists + the following (un)reserved characters. - reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | - "$" | "," | "~" + unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" + reserved = gen-delims / sub-delims + gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" + sub-delims = "!" / "$" / "&" / "'" / "(" / ")" + / "*" / "+" / "," / ";" / "=" - Each of these characters is reserved in some component of a URL, + Each of the reserved characters is reserved in some component of a URL, but not necessarily in all of them. - Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. - Now, "~" is included in the set of reserved characters. + The quote function %-escapes all characters that are neither in the + unreserved chars ("always safe") nor the additional chars set via the + safe arg. + + The default for the safe arg is '/'. The character is reserved, but in + typical usage the quote function is being called on a path where the + existing slash characters are to be preserved. - By default, the quote function is intended for quoting the path - section of a URL. Thus, it will not encode '/'. This character - is reserved, but in typical usage the quote function is being - called on a path where the existing slash characters are used as - reserved characters. + Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. + Now, "~" is included in the set of unreserved characters. string and safe may be either str or bytes objects. encoding and errors must not be specified if string is a bytes object. From webhook-mailer at python.org Tue Apr 9 20:53:07 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 10 Apr 2019 00:53:07 -0000 Subject: [Python-checkins] bpo-12910: update and correct quote docstring (GH-2568) Message-ID: https://github.com/python/cpython/commit/796698adf558f2255474945082856538b1effb0b commit: 796698adf558f2255474945082856538b1effb0b branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-09T17:53:03-07:00 summary: bpo-12910: update and correct quote docstring (GH-2568) Fixes some mistakes and misleadings in the quote function docstring: - reserved chars are never actually used by quote code, unreserved chars are - reserved chars were wrong and incomplete - mentioned that use-case is not minimal quoting wrt. RFC, but cautious quoting (cherry picked from commit 750d74fac5c510e39958b3f79641fe54096ee54f) Co-authored-by: J?rn Hees files: M Lib/urllib/parse.py diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index 39c5d6a80824..1eec26e0f1f3 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -780,25 +780,32 @@ def quote(string, safe='/', encoding=None, errors=None): """quote('abc def') -> 'abc%20def' Each part of a URL, e.g. the path info, the query, etc., has a - different set of reserved characters that must be quoted. + different set of reserved characters that must be quoted. The + quote function offers a cautious (not minimal) way to quote a + string for most of these parts. - RFC 3986 Uniform Resource Identifiers (URI): Generic Syntax lists - the following reserved characters. + RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists + the following (un)reserved characters. - reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | - "$" | "," | "~" + unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" + reserved = gen-delims / sub-delims + gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" + sub-delims = "!" / "$" / "&" / "'" / "(" / ")" + / "*" / "+" / "," / ";" / "=" - Each of these characters is reserved in some component of a URL, + Each of the reserved characters is reserved in some component of a URL, but not necessarily in all of them. - Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. - Now, "~" is included in the set of reserved characters. + The quote function %-escapes all characters that are neither in the + unreserved chars ("always safe") nor the additional chars set via the + safe arg. + + The default for the safe arg is '/'. The character is reserved, but in + typical usage the quote function is being called on a path where the + existing slash characters are to be preserved. - By default, the quote function is intended for quoting the path - section of a URL. Thus, it will not encode '/'. This character - is reserved, but in typical usage the quote function is being - called on a path where the existing slash characters are used as - reserved characters. + Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings. + Now, "~" is included in the set of unreserved characters. string and safe may be either str or bytes objects. encoding and errors must not be specified if string is a bytes object. From webhook-mailer at python.org Wed Apr 10 05:17:55 2019 From: webhook-mailer at python.org (Gregory P. Smith) Date: Wed, 10 Apr 2019 09:17:55 -0000 Subject: [Python-checkins] bpo-14826: document that URLopener quotes fullurl. (GH-12758) Message-ID: https://github.com/python/cpython/commit/2fb2bc81c3f40d73945c6102569495140e1182c7 commit: 2fb2bc81c3f40d73945c6102569495140e1182c7 branch: master author: Gregory P. Smith committer: GitHub date: 2019-04-10T02:17:48-07:00 summary: bpo-14826: document that URLopener quotes fullurl. (GH-12758) files: M Doc/library/urllib.request.rst diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index f56da1b341fe..14fa27bb08af 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -1435,6 +1435,7 @@ some point in the future. The *data* argument has the same meaning as the *data* argument of :func:`urlopen`. + This method always quotes *fullurl* using :func:`~urllib.parse.quote`. .. method:: open_unknown(fullurl, data=None) From webhook-mailer at python.org Wed Apr 10 05:30:36 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 10 Apr 2019 09:30:36 -0000 Subject: [Python-checkins] bpo-14826: document that URLopener quotes fullurl. (GH-12758) Message-ID: https://github.com/python/cpython/commit/9d2ccf173e2e8ff069153f603d2e5b1ea757e734 commit: 9d2ccf173e2e8ff069153f603d2e5b1ea757e734 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-10T02:30:22-07:00 summary: bpo-14826: document that URLopener quotes fullurl. (GH-12758) (cherry picked from commit 2fb2bc81c3f40d73945c6102569495140e1182c7) Co-authored-by: Gregory P. Smith files: M Doc/library/urllib.request.rst diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 289bfcaebc3d..1bc81e05b38b 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -1435,6 +1435,7 @@ some point in the future. The *data* argument has the same meaning as the *data* argument of :func:`urlopen`. + This method always quotes *fullurl* using :func:`~urllib.parse.quote`. .. method:: open_unknown(fullurl, data=None) From webhook-mailer at python.org Wed Apr 10 13:19:26 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 10 Apr 2019 17:19:26 -0000 Subject: [Python-checkins] bpo-34144: Fix of venv acvtivate.bat for win 10 (GH-8321) Message-ID: https://github.com/python/cpython/commit/6955d44b41058e3bcc59ff41860bd4cc8948c441 commit: 6955d44b41058e3bcc59ff41860bd4cc8948c441 branch: master author: Lorenz Mende committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-10T10:19:16-07:00 summary: bpo-34144: Fix of venv acvtivate.bat for win 10 (GH-8321) The script needs to be updated to support win 10/ 1803 chcp.com command (output has trailing dot) https://bugs.python.org/issue34144 files: A Misc/NEWS.d/next/Windows/2019-04-10-04-35-31.bpo-34144._KzB5z.rst M Lib/venv/scripts/nt/activate.bat diff --git a/Lib/venv/scripts/nt/activate.bat b/Lib/venv/scripts/nt/activate.bat index 126049f495fe..da831bb42c79 100644 --- a/Lib/venv/scripts/nt/activate.bat +++ b/Lib/venv/scripts/nt/activate.bat @@ -1,7 +1,7 @@ @echo off rem This file is UTF-8 encoded, so we need to update the current code page while executing it -for /f "tokens=2 delims=:" %%a in ('"%SystemRoot%\System32\chcp.com"') do ( +for /f "tokens=2 delims=:." %%a in ('"%SystemRoot%\System32\chcp.com"') do ( set "_OLD_CODEPAGE=%%a" ) if defined _OLD_CODEPAGE ( diff --git a/Misc/NEWS.d/next/Windows/2019-04-10-04-35-31.bpo-34144._KzB5z.rst b/Misc/NEWS.d/next/Windows/2019-04-10-04-35-31.bpo-34144._KzB5z.rst new file mode 100644 index 000000000000..7b8ca821b401 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2019-04-10-04-35-31.bpo-34144._KzB5z.rst @@ -0,0 +1,2 @@ +Fixed activate.bat to correctly update codepage when chcp.com returns dots in output. +Patch by Lorenz Mende. From webhook-mailer at python.org Wed Apr 10 16:18:24 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Wed, 10 Apr 2019 20:18:24 -0000 Subject: [Python-checkins] bpo-36559: random module: optimize sha512 import (GH-12742) Message-ID: https://github.com/python/cpython/commit/d914596a671c4b0f13641359cf43aa0d6fc05070 commit: d914596a671c4b0f13641359cf43aa0d6fc05070 branch: master author: Christian Heimes committer: Raymond Hettinger date: 2019-04-10T13:18:02-07:00 summary: bpo-36559: random module: optimize sha512 import (GH-12742) The random module now prefers the lean internal _sha512 module over hashlib for seed(version=2) to optimize import time. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2019-04-09-12-02-35.bpo-36559.LbDRrw.rst M Lib/random.py diff --git a/Lib/random.py b/Lib/random.py index 79ef30d7d18d..53981f3e4f89 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -42,11 +42,18 @@ from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin from os import urandom as _urandom from _collections_abc import Set as _Set, Sequence as _Sequence -from hashlib import sha512 as _sha512 from itertools import accumulate as _accumulate, repeat as _repeat from bisect import bisect as _bisect import os as _os +try: + # hashlib is pretty heavy to load, try lean internal module first + from _sha512 import sha512 as _sha512 +except ImportError: + # fallback to official implementation + from hashlib import sha512 as _sha512 + + __all__ = ["Random","seed","random","uniform","randint","choice","sample", "randrange","shuffle","normalvariate","lognormvariate", "expovariate","vonmisesvariate","gammavariate","triangular", diff --git a/Misc/NEWS.d/next/Library/2019-04-09-12-02-35.bpo-36559.LbDRrw.rst b/Misc/NEWS.d/next/Library/2019-04-09-12-02-35.bpo-36559.LbDRrw.rst new file mode 100644 index 000000000000..2f6ee785e7de --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-09-12-02-35.bpo-36559.LbDRrw.rst @@ -0,0 +1,2 @@ +The random module now prefers the lean internal _sha512 module over hashlib +for seed(version=2) to optimize import time. From webhook-mailer at python.org Wed Apr 10 18:03:06 2019 From: webhook-mailer at python.org (=?utf-8?q?=C3=89ric?= Araujo) Date: Wed, 10 Apr 2019 22:03:06 -0000 Subject: [Python-checkins] fix typo in doc (#12686) Message-ID: https://github.com/python/cpython/commit/42f55ee50011ae822c0b5d4dabf231df8ea9469f commit: 42f55ee50011ae822c0b5d4dabf231df8ea9469f branch: master author: Caleb Marchent <37041441+calebmarchent at users.noreply.github.com> committer: ?ric Araujo date: 2019-04-10T18:03:02-04:00 summary: fix typo in doc (#12686) files: M Doc/extending/building.rst diff --git a/Doc/extending/building.rst b/Doc/extending/building.rst index 9fe12c2424c4..9bfad7fc3187 100644 --- a/Doc/extending/building.rst +++ b/Doc/extending/building.rst @@ -145,7 +145,7 @@ that distutils gets the invocations right. Distributing your extension modules =================================== -When an extension has been successfully build, there are three ways to use it. +When an extension has been successfully built, there are three ways to use it. End-users will typically want to install the module, they do so by running :: @@ -158,7 +158,7 @@ Module maintainers should produce source packages; to do so, they run :: In some cases, additional files need to be included in a source distribution; this is done through a :file:`MANIFEST.in` file; see :ref:`manifest` for details. -If the source distribution has been build successfully, maintainers can also +If the source distribution has been built successfully, maintainers can also create binary distributions. Depending on the platform, one of the following commands can be used to do so. :: From webhook-mailer at python.org Wed Apr 10 19:38:51 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 10 Apr 2019 23:38:51 -0000 Subject: [Python-checkins] bpo-36235: Fix distutils test_customize_compiler() on macOS (GH-12764) Message-ID: https://github.com/python/cpython/commit/a9bd8925c7fa50dd3cfab125b824ec192133ef49 commit: a9bd8925c7fa50dd3cfab125b824ec192133ef49 branch: master author: Victor Stinner committer: GitHub date: 2019-04-11T01:38:48+02:00 summary: bpo-36235: Fix distutils test_customize_compiler() on macOS (GH-12764) Set CUSTOMIZED_OSX_COMPILER to True to disable _osx_support.customize_compiler(). files: M Lib/distutils/tests/test_sysconfig.py diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py index 245a6c86b111..236755d09527 100644 --- a/Lib/distutils/tests/test_sysconfig.py +++ b/Lib/distutils/tests/test_sysconfig.py @@ -92,6 +92,9 @@ def set_executables(self, **kw): 'CCSHARED': '--sc-ccshared', 'LDSHARED': 'sc_ldshared', 'SHLIB_SUFFIX': 'sc_shutil_suffix', + + # On macOS, disable _osx_support.customize_compiler() + 'CUSTOMIZED_OSX_COMPILER': 'True', } comp = compiler() From webhook-mailer at python.org Wed Apr 10 19:58:59 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 10 Apr 2019 23:58:59 -0000 Subject: [Python-checkins] bpo-36235: Fix distutils test_customize_compiler() on macOS (GH-12764) Message-ID: https://github.com/python/cpython/commit/d9b25a2627ff6f4e10d46b4de4fff941b63497c7 commit: d9b25a2627ff6f4e10d46b4de4fff941b63497c7 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-10T16:58:55-07:00 summary: bpo-36235: Fix distutils test_customize_compiler() on macOS (GH-12764) Set CUSTOMIZED_OSX_COMPILER to True to disable _osx_support.customize_compiler(). (cherry picked from commit a9bd8925c7fa50dd3cfab125b824ec192133ef49) Co-authored-by: Victor Stinner files: M Lib/distutils/tests/test_sysconfig.py diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py index 245a6c86b111..236755d09527 100644 --- a/Lib/distutils/tests/test_sysconfig.py +++ b/Lib/distutils/tests/test_sysconfig.py @@ -92,6 +92,9 @@ def set_executables(self, **kw): 'CCSHARED': '--sc-ccshared', 'LDSHARED': 'sc_ldshared', 'SHLIB_SUFFIX': 'sc_shutil_suffix', + + # On macOS, disable _osx_support.customize_compiler() + 'CUSTOMIZED_OSX_COMPILER': 'True', } comp = compiler() From webhook-mailer at python.org Thu Apr 11 02:09:39 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 11 Apr 2019 06:09:39 -0000 Subject: [Python-checkins] Doc: fix typo in IncrementalDecoder.setstate (GH-12724) Message-ID: https://github.com/python/cpython/commit/b5e2959b27088d39f9954a207b91ab0ebbd149f4 commit: b5e2959b27088d39f9954a207b91ab0ebbd149f4 branch: master author: Christopher Thorne committer: Inada Naoki date: 2019-04-11T15:09:29+09:00 summary: Doc: fix typo in IncrementalDecoder.setstate (GH-12724) files: M Doc/library/codecs.rst diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index d2a0c8b33690..b3246376846d 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -638,7 +638,7 @@ define in order to be compatible with the Python codec registry. .. method:: setstate(state) - Set the state of the encoder to *state*. *state* must be a decoder state + Set the state of the decoder to *state*. *state* must be a decoder state returned by :meth:`getstate`. From webhook-mailer at python.org Thu Apr 11 02:10:38 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 11 Apr 2019 06:10:38 -0000 Subject: [Python-checkins] better __init__.py explanation in tutorial (#12763) Message-ID: https://github.com/python/cpython/commit/5410d3d283b11e2375f0c1f79728a230edd12bd0 commit: 5410d3d283b11e2375f0c1f79728a230edd12bd0 branch: master author: Inada Naoki committer: GitHub date: 2019-04-11T15:10:35+09:00 summary: better __init__.py explanation in tutorial (#12763) * better __init__.py explanation in tutorial * Update Doc/tutorial/modules.rst Co-Authored-By: methane files: M Doc/tutorial/modules.rst diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst index accc30649f24..fd594fd97af4 100644 --- a/Doc/tutorial/modules.rst +++ b/Doc/tutorial/modules.rst @@ -425,9 +425,9 @@ your package (expressed in terms of a hierarchical filesystem): When importing the package, Python searches through the directories on ``sys.path`` looking for the package subdirectory. -The :file:`__init__.py` files are required to make Python treat the directories -as containing packages; this is done to prevent directories with a common name, -such as ``string``, from unintentionally hiding valid modules that occur later +The :file:`__init__.py` files are required to make Python treat directories +containing the file as packages. This prevents directories with a common name, +such as ``string``, unintentionally hiding valid modules that occur later on the module search path. In the simplest case, :file:`__init__.py` can just be an empty file, but it can also execute initialization code for the package or set the ``__all__`` variable, described later. From webhook-mailer at python.org Thu Apr 11 02:16:38 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Apr 2019 06:16:38 -0000 Subject: [Python-checkins] better __init__.py explanation in tutorial (GH-12763) Message-ID: https://github.com/python/cpython/commit/59fd08c25ccd3dcdd18682494e8bd1245565c46a commit: 59fd08c25ccd3dcdd18682494e8bd1245565c46a branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-10T23:16:34-07:00 summary: better __init__.py explanation in tutorial (GH-12763) * better __init__.py explanation in tutorial * Update Doc/tutorial/modules.rst Co-Authored-By: methane (cherry picked from commit 5410d3d283b11e2375f0c1f79728a230edd12bd0) Co-authored-by: Inada Naoki files: M Doc/tutorial/modules.rst diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst index accc30649f24..fd594fd97af4 100644 --- a/Doc/tutorial/modules.rst +++ b/Doc/tutorial/modules.rst @@ -425,9 +425,9 @@ your package (expressed in terms of a hierarchical filesystem): When importing the package, Python searches through the directories on ``sys.path`` looking for the package subdirectory. -The :file:`__init__.py` files are required to make Python treat the directories -as containing packages; this is done to prevent directories with a common name, -such as ``string``, from unintentionally hiding valid modules that occur later +The :file:`__init__.py` files are required to make Python treat directories +containing the file as packages. This prevents directories with a common name, +such as ``string``, unintentionally hiding valid modules that occur later on the module search path. In the simplest case, :file:`__init__.py` can just be an empty file, but it can also execute initialization code for the package or set the ``__all__`` variable, described later. From webhook-mailer at python.org Thu Apr 11 02:18:10 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Apr 2019 06:18:10 -0000 Subject: [Python-checkins] Doc: fix typo in IncrementalDecoder.setstate (GH-12724) Message-ID: https://github.com/python/cpython/commit/a8c4fa531920f0025f570906b92372dd6ae5c43e commit: a8c4fa531920f0025f570906b92372dd6ae5c43e branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-10T23:18:07-07:00 summary: Doc: fix typo in IncrementalDecoder.setstate (GH-12724) (cherry picked from commit b5e2959b27088d39f9954a207b91ab0ebbd149f4) Co-authored-by: Christopher Thorne files: M Doc/library/codecs.rst diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index ef788bf241c8..ff4f4933b37d 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -638,7 +638,7 @@ define in order to be compatible with the Python codec registry. .. method:: setstate(state) - Set the state of the encoder to *state*. *state* must be a decoder state + Set the state of the decoder to *state*. *state* must be a decoder state returned by :meth:`getstate`. From webhook-mailer at python.org Thu Apr 11 02:58:50 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 11 Apr 2019 06:58:50 -0000 Subject: [Python-checkins] bpo-36416: Correct bytes.rpartition documentation (GH-12543) Message-ID: https://github.com/python/cpython/commit/efc48701496ef020e896fc6a91af3c0c612ac69a commit: efc48701496ef020e896fc6a91af3c0c612ac69a branch: master author: pewscorner committer: Inada Naoki date: 2019-04-11T15:58:43+09:00 summary: bpo-36416: Correct bytes.rpartition documentation (GH-12543) files: M Doc/library/stdtypes.rst diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 66915a7c560a..bae989e6b3a9 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2696,8 +2696,8 @@ arbitrary binary data. containing the part before the separator, the separator itself or its bytearray copy, and the part after the separator. If the separator is not found, return a 3-tuple - containing a copy of the original sequence, followed by two empty bytes or - bytearray objects. + containing two empty bytes or bytearray objects, followed by a copy of the + original sequence. The separator to search for may be any :term:`bytes-like object`. From webhook-mailer at python.org Thu Apr 11 03:46:58 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Apr 2019 07:46:58 -0000 Subject: [Python-checkins] bpo-36597: Disable Travis CI doctest job (GH-12775) Message-ID: https://github.com/python/cpython/commit/adff01e81d291c698581eadd693caa43ae5c78ac commit: adff01e81d291c698581eadd693caa43ae5c78ac branch: master author: Victor Stinner committer: GitHub date: 2019-04-11T09:46:46+02:00 summary: bpo-36597: Disable Travis CI doctest job (GH-12775) Disable the job to unblock the CI until the issue is properly understood. files: M .travis.yml diff --git a/.travis.yml b/.travis.yml index 6d57ebb1d2fb..23f79d0c3766 100644 --- a/.travis.yml +++ b/.travis.yml @@ -56,20 +56,22 @@ matrix: - python -m pip install sphinx==1.8.2 blurb python-docs-theme script: - make check suspicious html SPHINXOPTS="-q -W -j4" - - os: linux - language: c - compiler: clang - env: TESTING=doctest - addons: - apt: - packages: - - xvfb - before_script: - - ./configure - - make -j4 - - make -C Doc/ PYTHON=../python venv - script: - xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W -j4" doctest + # FIXME: bpo-36597: the doctest job fails because it fails + # FIXME: for an unknown reason + #- os: linux + # language: c + # compiler: clang + # env: TESTING=doctest + # addons: + # apt: + # packages: + # - xvfb + # before_script: + # - ./configure + # - make -j4 + # - make -C Doc/ PYTHON=../python venv + # script: + # xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W -j4" doctest - os: osx language: c compiler: clang From webhook-mailer at python.org Thu Apr 11 04:53:54 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 11 Apr 2019 08:53:54 -0000 Subject: [Python-checkins] bpo-36597: fix random doctest failure (GH-12776) Message-ID: https://github.com/python/cpython/commit/57b1a2862a99677f09614e9e456d36aae9ddd87c commit: 57b1a2862a99677f09614e9e456d36aae9ddd87c branch: master author: Inada Naoki committer: GitHub date: 2019-04-11T17:53:49+09:00 summary: bpo-36597: fix random doctest failure (GH-12776) files: M .travis.yml M Doc/library/weakref.rst diff --git a/.travis.yml b/.travis.yml index 23f79d0c3766..6d57ebb1d2fb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -56,22 +56,20 @@ matrix: - python -m pip install sphinx==1.8.2 blurb python-docs-theme script: - make check suspicious html SPHINXOPTS="-q -W -j4" - # FIXME: bpo-36597: the doctest job fails because it fails - # FIXME: for an unknown reason - #- os: linux - # language: c - # compiler: clang - # env: TESTING=doctest - # addons: - # apt: - # packages: - # - xvfb - # before_script: - # - ./configure - # - make -j4 - # - make -C Doc/ PYTHON=../python venv - # script: - # xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W -j4" doctest + - os: linux + language: c + compiler: clang + env: TESTING=doctest + addons: + apt: + packages: + - xvfb + before_script: + - ./configure + - make -j4 + - make -C Doc/ PYTHON=../python venv + script: + xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W -j4" doctest - os: osx language: c compiler: clang diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index 40bb06adfd44..b9e887cab5d1 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -492,7 +492,7 @@ is still alive. For instance >>> obj = Object() >>> weakref.finalize(obj, print, "obj dead or exiting") #doctest:+ELLIPSIS - >>> exit() #doctest:+SKIP + >>> del obj obj dead or exiting From webhook-mailer at python.org Thu Apr 11 05:33:30 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Apr 2019 09:33:30 -0000 Subject: [Python-checkins] bpo-36389: _PyObject_IsFreed() now also detects uninitialized memory (GH-12770) Message-ID: https://github.com/python/cpython/commit/2b00db68554422ec37faba2a80179a0172df6349 commit: 2b00db68554422ec37faba2a80179a0172df6349 branch: master author: Victor Stinner committer: GitHub date: 2019-04-11T11:33:27+02:00 summary: bpo-36389: _PyObject_IsFreed() now also detects uninitialized memory (GH-12770) Replace _PyMem_IsFreed() function with _PyMem_IsPtrFreed() inline function. The function is now way more efficient, it became a simple comparison on integers, rather than a short loop. It detects also uninitialized bytes and "forbidden bytes" filled by debug hooks on memory allocators. Add unit tests on _PyObject_IsFreed(). files: M Include/internal/pycore_pymem.h M Include/pymem.h M Lib/test/test_capi.py M Modules/_testcapimodule.c M Objects/object.c M Objects/obmalloc.c diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 1e7da87cd75c..78d457d67e82 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -155,6 +155,31 @@ PyAPI_FUNC(int) _PyMem_SetDefaultAllocator( PyMemAllocatorDomain domain, PyMemAllocatorEx *old_alloc); +/* Heuristic checking if a pointer value is newly allocated + (uninitialized) or newly freed. The pointer is not dereferenced, only the + pointer value is checked. + + The heuristic relies on the debug hooks on Python memory allocators which + fills newly allocated memory with CLEANBYTE (0xCB) and newly freed memory + with DEADBYTE (0xDB). Detect also "untouchable bytes" marked + with FORBIDDENBYTE (0xFB). */ +static inline int _PyMem_IsPtrFreed(void *ptr) +{ + uintptr_t value = (uintptr_t)ptr; +#if SIZEOF_VOID_P == 8 + return (value == (uintptr_t)0xCBCBCBCBCBCBCBCB + || value == (uintptr_t)0xDBDBDBDBDBDBDBDB + || value == (uintptr_t)0xFBFBFBFBFBFBFBFB + ); +#elif SIZEOF_VOID_P == 4 + return (value == (uintptr_t)0xCBCBCBCB + || value == (uintptr_t)0xDBDBDBDB + || value == (uintptr_t)0xFBFBFBFB); +#else +# error "unknown pointer size" +#endif +} + #ifdef __cplusplus } #endif diff --git a/Include/pymem.h b/Include/pymem.h index 23457adb5a45..93243f8553b8 100644 --- a/Include/pymem.h +++ b/Include/pymem.h @@ -23,8 +23,6 @@ PyAPI_FUNC(int) _PyMem_SetupAllocators(const char *opt); /* Try to get the allocators name set by _PyMem_SetupAllocators(). */ PyAPI_FUNC(const char*) _PyMem_GetAllocatorsName(void); - -PyAPI_FUNC(int) _PyMem_IsFreed(void *ptr, size_t size); #endif /* !defined(Py_LIMITED_API) */ diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 7c68b2c0fc2c..3cd39d4e8b28 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -526,6 +526,29 @@ def test_pyobject_malloc_without_gil(self): code = 'import _testcapi; _testcapi.pyobject_malloc_without_gil()' self.check_malloc_without_gil(code) + def check_pyobject_is_freed(self, func): + code = textwrap.dedent(''' + import gc, os, sys, _testcapi + # Disable the GC to avoid crash on GC collection + gc.disable() + obj = _testcapi.{func}() + error = (_testcapi.pyobject_is_freed(obj) == False) + # Exit immediately to avoid a crash while deallocating + # the invalid object + os._exit(int(error)) + ''') + code = code.format(func=func) + assert_python_ok('-c', code, PYTHONMALLOC=self.PYTHONMALLOC) + + def test_pyobject_is_freed_uninitialized(self): + self.check_pyobject_is_freed('pyobject_uninitialized') + + def test_pyobject_is_freed_forbidden_bytes(self): + self.check_pyobject_is_freed('pyobject_forbidden_bytes') + + def test_pyobject_is_freed_free(self): + self.check_pyobject_is_freed('pyobject_freed') + class PyMemMallocDebugTests(PyMemDebugTests): PYTHONMALLOC = 'malloc_debug' diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index c515efe660b5..1180b4b176e9 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -4236,6 +4236,59 @@ test_pymem_getallocatorsname(PyObject *self, PyObject *args) } +static PyObject* +pyobject_is_freed(PyObject *self, PyObject *op) +{ + int res = _PyObject_IsFreed(op); + return PyBool_FromLong(res); +} + + +static PyObject* +pyobject_uninitialized(PyObject *self, PyObject *args) +{ + PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_REFCNT(op) = 1; + /* object fields like ob_type are uninitialized! */ + return op; +} + + +static PyObject* +pyobject_forbidden_bytes(PyObject *self, PyObject *args) +{ + /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ + PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_REFCNT(op) = 1; + /* ob_type field is after the memory block: part of "forbidden bytes" + when using debug hooks on memory allocatrs! */ + return op; +} + + +static PyObject* +pyobject_freed(PyObject *self, PyObject *args) +{ + PyObject *op = _PyObject_CallNoArg((PyObject *)&PyBaseObject_Type); + if (op == NULL) { + return NULL; + } + Py_TYPE(op)->tp_dealloc(op); + /* Reset reference count to avoid early crash in ceval or GC */ + Py_REFCNT(op) = 1; + /* object memory is freed! */ + return op; +} + + static PyObject* pyobject_malloc_without_gil(PyObject *self, PyObject *args) { @@ -4907,6 +4960,10 @@ static PyMethodDef TestMethods[] = { {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, {"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS}, {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, + {"pyobject_is_freed", (PyCFunction)(void(*)(void))pyobject_is_freed, METH_O}, + {"pyobject_uninitialized", pyobject_uninitialized, METH_NOARGS}, + {"pyobject_forbidden_bytes", pyobject_forbidden_bytes, METH_NOARGS}, + {"pyobject_freed", pyobject_freed, METH_NOARGS}, {"pyobject_malloc_without_gil", pyobject_malloc_without_gil, METH_NOARGS}, {"tracemalloc_track", tracemalloc_track, METH_VARARGS}, {"tracemalloc_untrack", tracemalloc_untrack, METH_VARARGS}, diff --git a/Objects/object.c b/Objects/object.c index bd44acacb615..c9aa479abdc1 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -425,18 +425,17 @@ _Py_BreakPoint(void) int _PyObject_IsFreed(PyObject *op) { - uintptr_t ptr = (uintptr_t)op; - if (_PyMem_IsFreed(&ptr, sizeof(ptr))) { + if (_PyMem_IsPtrFreed(op) || _PyMem_IsPtrFreed(op->ob_type)) { return 1; } - int freed = _PyMem_IsFreed(&op->ob_type, sizeof(op->ob_type)); - /* ignore op->ob_ref: the value can have be modified + /* ignore op->ob_ref: its value can have be modified by Py_INCREF() and Py_DECREF(). */ #ifdef Py_TRACE_REFS - freed &= _PyMem_IsFreed(&op->_ob_next, sizeof(op->_ob_next)); - freed &= _PyMem_IsFreed(&op->_ob_prev, sizeof(op->_ob_prev)); + if (_PyMem_IsPtrFreed(op->_ob_next) || _PyMem_IsPtrFreed(op->_ob_prev)) { + return 1; + } #endif - return freed; + return 0; } @@ -453,7 +452,7 @@ _PyObject_Dump(PyObject* op) if (_PyObject_IsFreed(op)) { /* It seems like the object memory has been freed: don't access it to prevent a segmentation fault. */ - fprintf(stderr, "\n"); + fprintf(stderr, "\n"); return; } diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index 1c2a32050f93..e919fad595be 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1914,7 +1914,7 @@ _Py_GetAllocatedBlocks(void) /* Special bytes broadcast into debug memory blocks at appropriate times. * Strings of these are unlikely to be valid addresses, floats, ints or - * 7-bit ASCII. + * 7-bit ASCII. If modified, _PyMem_IsPtrFreed() should be updated as well. */ #undef CLEANBYTE #undef DEADBYTE @@ -2059,22 +2059,6 @@ _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize) } -/* Heuristic checking if the memory has been freed. Rely on the debug hooks on - Python memory allocators which fills the memory with DEADBYTE (0xDB) when - memory is deallocated. */ -int -_PyMem_IsFreed(void *ptr, size_t size) -{ - unsigned char *bytes = ptr; - for (size_t i=0; i < size; i++) { - if (bytes[i] != DEADBYTE) { - return 0; - } - } - return 1; -} - - /* The debug free first checks the 2*SST bytes on each end for sanity (in particular, that the FORBIDDENBYTEs with the api ID are still intact). Then fills the original bytes with DEADBYTE. From webhook-mailer at python.org Thu Apr 11 06:05:40 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 11 Apr 2019 10:05:40 -0000 Subject: [Python-checkins] bpo-36597: fix weakref example code (GH-12779) Message-ID: https://github.com/python/cpython/commit/b3c92c6ae923ffb2b9ac5f80b28ecd689de48662 commit: b3c92c6ae923ffb2b9ac5f80b28ecd689de48662 branch: master author: Inada Naoki committer: GitHub date: 2019-04-11T19:05:32+09:00 summary: bpo-36597: fix weakref example code (GH-12779) Commit 57b1a2862 fixed doctest, but example code is not match with document. Just skip doctest for the block. files: M Doc/library/weakref.rst diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index b9e887cab5d1..7f3d267d74c2 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -489,11 +489,14 @@ Unless you set the :attr:`~finalize.atexit` attribute to :const:`False`, a finalizer will be called when the program exits if it is still alive. For instance - >>> obj = Object() - >>> weakref.finalize(obj, print, "obj dead or exiting") #doctest:+ELLIPSIS - - >>> del obj - obj dead or exiting +.. doctest:: + :options: +SKIP + + >>> obj = Object() + >>> weakref.finalize(obj, print, "obj dead or exiting") + + >>> exit() + obj dead or exiting Comparing finalizers with :meth:`__del__` methods From webhook-mailer at python.org Thu Apr 11 06:11:50 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 11 Apr 2019 10:11:50 -0000 Subject: [Python-checkins] bpo-36575: lsprof: Use _PyTime_GetPerfCounter() (GH-8378) Message-ID: https://github.com/python/cpython/commit/536a35b3f14888999f1ffa5be7239d0c26b73d7a commit: 536a35b3f14888999f1ffa5be7239d0c26b73d7a branch: master author: Inada Naoki committer: GitHub date: 2019-04-11T19:11:46+09:00 summary: bpo-36575: lsprof: Use _PyTime_GetPerfCounter() (GH-8378) files: A Misc/NEWS.d/next/Library/2019-04-09-22-40-52.bpo-36575.Vg_p92.rst M Modules/_lsprof.c diff --git a/Misc/NEWS.d/next/Library/2019-04-09-22-40-52.bpo-36575.Vg_p92.rst b/Misc/NEWS.d/next/Library/2019-04-09-22-40-52.bpo-36575.Vg_p92.rst new file mode 100644 index 000000000000..3e305f132c07 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-09-22-40-52.bpo-36575.Vg_p92.rst @@ -0,0 +1,4 @@ +The ``_lsprof`` module now uses internal timer same to ``time.perf_counter()`` by default. +``gettimeofday(2)`` was used on Unix. New timer has better resolution on most Unix +platforms and timings are no longer impacted by system clock updates since ``perf_counter()`` +is monotonic. Patch by Inada Naoki. diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c index 4508f5e65695..c4e0f52389d9 100644 --- a/Modules/_lsprof.c +++ b/Modules/_lsprof.c @@ -2,62 +2,6 @@ #include "frameobject.h" #include "rotatingtree.h" -/*** Selection of a high-precision timer ***/ - -#ifdef MS_WINDOWS - -#include - -static long long -hpTimer(void) -{ - LARGE_INTEGER li; - QueryPerformanceCounter(&li); - return li.QuadPart; -} - -static double -hpTimerUnit(void) -{ - LARGE_INTEGER li; - if (QueryPerformanceFrequency(&li)) - return 1.0 / li.QuadPart; - else - return 0.000001; /* unlikely */ -} - -#else /* !MS_WINDOWS */ - -#ifndef HAVE_GETTIMEOFDAY -#error "This module requires gettimeofday() on non-Windows platforms!" -#endif - -#include -#include - -static long long -hpTimer(void) -{ - struct timeval tv; - long long ret; -#ifdef GETTIMEOFDAY_NO_TZ - gettimeofday(&tv); -#else - gettimeofday(&tv, (struct timezone *)NULL); -#endif - ret = tv.tv_sec; - ret = ret * 1000000 + tv.tv_usec; - return ret; -} - -static double -hpTimerUnit(void) -{ - return 0.000001; -} - -#endif /* MS_WINDOWS */ - /************************************************************/ /* Written by Brett Rosen and Ted Czotter */ @@ -66,8 +10,8 @@ struct _ProfilerEntry; /* represents a function called from another function */ typedef struct _ProfilerSubEntry { rotating_node_t header; - long long tt; - long long it; + _PyTime_t tt; + _PyTime_t it; long callcount; long recursivecallcount; long recursionLevel; @@ -77,8 +21,8 @@ typedef struct _ProfilerSubEntry { typedef struct _ProfilerEntry { rotating_node_t header; PyObject *userObj; /* PyCodeObject, or a descriptive str for builtins */ - long long tt; /* total time in this entry */ - long long it; /* inline time in this entry (not in subcalls) */ + _PyTime_t tt; /* total time in this entry */ + _PyTime_t it; /* inline time in this entry (not in subcalls) */ long callcount; /* how many times this was called */ long recursivecallcount; /* how many times called recursively */ long recursionLevel; @@ -86,8 +30,8 @@ typedef struct _ProfilerEntry { } ProfilerEntry; typedef struct _ProfilerContext { - long long t0; - long long subt; + _PyTime_t t0; + _PyTime_t subt; struct _ProfilerContext *previous; ProfilerEntry *ctxEntry; } ProfilerContext; @@ -114,41 +58,46 @@ static PyTypeObject PyProfiler_Type; /*** External Timers ***/ -#define DOUBLE_TIMER_PRECISION 4294967296.0 -static PyObject *empty_tuple; - -static long long CallExternalTimer(ProfilerObject *pObj) +static _PyTime_t CallExternalTimer(ProfilerObject *pObj) { - long long result; - PyObject *o = PyObject_Call(pObj->externalTimer, empty_tuple, NULL); + PyObject *o = _PyObject_CallNoArg(pObj->externalTimer); if (o == NULL) { PyErr_WriteUnraisable(pObj->externalTimer); return 0; } + + _PyTime_t result; + int err; if (pObj->externalTimerUnit > 0.0) { /* interpret the result as an integer that will be scaled in profiler_getstats() */ - result = PyLong_AsLongLong(o); + err = _PyTime_FromNanosecondsObject(&result, o); } else { /* interpret the result as a double measured in seconds. - As the profiler works with long long internally + As the profiler works with _PyTime_t internally we convert it to a large integer */ - double val = PyFloat_AsDouble(o); - /* error handling delayed to the code below */ - result = (long long) (val * DOUBLE_TIMER_PRECISION); + err = _PyTime_FromSecondsObject(&result, o, _PyTime_ROUND_FLOOR); } Py_DECREF(o); - if (PyErr_Occurred()) { + if (err < 0) { PyErr_WriteUnraisable(pObj->externalTimer); return 0; } return result; } -#define CALL_TIMER(pObj) ((pObj)->externalTimer ? \ - CallExternalTimer(pObj) : \ - hpTimer()) +static inline _PyTime_t +call_timer(ProfilerObject *pObj) +{ + if (pObj->externalTimer != NULL) { + return CallExternalTimer(pObj); + } + else { + return _PyTime_GetPerfCounter(); + } +} + /*** ProfilerObject ***/ @@ -332,14 +281,14 @@ initContext(ProfilerObject *pObj, ProfilerContext *self, ProfilerEntry *entry) if (subentry) ++subentry->recursionLevel; } - self->t0 = CALL_TIMER(pObj); + self->t0 = call_timer(pObj); } static void Stop(ProfilerObject *pObj, ProfilerContext *self, ProfilerEntry *entry) { - long long tt = CALL_TIMER(pObj) - self->t0; - long long it = tt - self->subt; + _PyTime_t tt = call_timer(pObj) - self->t0; + _PyTime_t it = tt - self->subt; if (self->previous) self->previous->subt += tt; pObj->currentProfilerContext = self->previous; @@ -631,12 +580,14 @@ profiler_getstats(ProfilerObject *pObj, PyObject* noarg) statscollector_t collect; if (pending_exception(pObj)) return NULL; - if (!pObj->externalTimer) - collect.factor = hpTimerUnit(); - else if (pObj->externalTimerUnit > 0.0) + if (!pObj->externalTimer || pObj->externalTimerUnit == 0.0) { + _PyTime_t onesec = _PyTime_FromSeconds(1); + collect.factor = (double)1 / onesec; + } + else { collect.factor = pObj->externalTimerUnit; - else - collect.factor = 1.0 / DOUBLE_TIMER_PRECISION; + } + collect.list = PyList_New(0); if (collect.list == NULL) return NULL; @@ -882,7 +833,6 @@ PyInit__lsprof(void) (PyObject*) &StatsEntryType); PyModule_AddObject(module, "profiler_subentry", (PyObject*) &StatsSubEntryType); - empty_tuple = PyTuple_New(0); initialized = 1; return module; } From webhook-mailer at python.org Thu Apr 11 06:18:51 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Apr 2019 10:18:51 -0000 Subject: [Python-checkins] bpo-36416: Correct bytes.rpartition documentation (GH-12543) Message-ID: https://github.com/python/cpython/commit/04b114eede82c7ffd7b3d9b40e8bf707780b022b commit: 04b114eede82c7ffd7b3d9b40e8bf707780b022b branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-11T03:18:48-07:00 summary: bpo-36416: Correct bytes.rpartition documentation (GH-12543) (cherry picked from commit efc48701496ef020e896fc6a91af3c0c612ac69a) Co-authored-by: pewscorner files: M Doc/library/stdtypes.rst diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index c21cb0d9ea0f..3b74331e51f2 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2677,8 +2677,8 @@ arbitrary binary data. containing the part before the separator, the separator itself or its bytearray copy, and the part after the separator. If the separator is not found, return a 3-tuple - containing a copy of the original sequence, followed by two empty bytes or - bytearray objects. + containing two empty bytes or bytearray objects, followed by a copy of the + original sequence. The separator to search for may be any :term:`bytes-like object`. From webhook-mailer at python.org Thu Apr 11 06:37:56 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 11 Apr 2019 10:37:56 -0000 Subject: [Python-checkins] bpo-36597: fix random doctest failure (GH-12778) Message-ID: https://github.com/python/cpython/commit/ac31da8f3710f9f9b8dbb4c36b2108fb1e5b4a48 commit: ac31da8f3710f9f9b8dbb4c36b2108fb1e5b4a48 branch: 3.7 author: Inada Naoki committer: GitHub date: 2019-04-11T19:37:53+09:00 summary: bpo-36597: fix random doctest failure (GH-12778) files: M Doc/library/weakref.rst diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index 40bb06adfd44..7f3d267d74c2 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -489,11 +489,14 @@ Unless you set the :attr:`~finalize.atexit` attribute to :const:`False`, a finalizer will be called when the program exits if it is still alive. For instance - >>> obj = Object() - >>> weakref.finalize(obj, print, "obj dead or exiting") #doctest:+ELLIPSIS - - >>> exit() #doctest:+SKIP - obj dead or exiting +.. doctest:: + :options: +SKIP + + >>> obj = Object() + >>> weakref.finalize(obj, print, "obj dead or exiting") + + >>> exit() + obj dead or exiting Comparing finalizers with :meth:`__del__` methods From webhook-mailer at python.org Thu Apr 11 07:01:20 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Apr 2019 11:01:20 -0000 Subject: [Python-checkins] bpo-36389: Change PyMem_SetupDebugHooks() constants (GH-12782) Message-ID: https://github.com/python/cpython/commit/4c409beb4c360a73d054f37807d3daad58d1b567 commit: 4c409beb4c360a73d054f37807d3daad58d1b567 branch: master author: Victor Stinner committer: GitHub date: 2019-04-11T13:01:15+02:00 summary: bpo-36389: Change PyMem_SetupDebugHooks() constants (GH-12782) Modify CLEANBYTE, DEADDYTE and FORBIDDENBYTE constants: use 0xCD, 0xDD and 0xFD, rather than 0xCB, 0xBB and 0xFB, to use the same byte patterns than Windows CRT debug malloc() and free(). files: A Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst M Doc/c-api/memory.rst M Include/internal/pycore_pymem.h M Lib/test/test_capi.py M Objects/object.c M Objects/obmalloc.c diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst index b79b7e49b67e..65a207691b8a 100644 --- a/Doc/c-api/memory.rst +++ b/Doc/c-api/memory.rst @@ -440,8 +440,9 @@ Customize Memory Allocators Setup hooks to detect bugs in the Python memory allocator functions. - Newly allocated memory is filled with the byte ``0xCB``, freed memory is - filled with the byte ``0xDB``. + Newly allocated memory is filled with the byte ``0xCD`` (``CLEANBYTE``), + freed memory is filled with the byte ``0xDD`` (``DEADBYTE``). Memory blocks + are surrounded by "forbidden bytes" (``FORBIDDENBYTE``: byte ``0xFD``). Runtime checks: @@ -471,6 +472,12 @@ Customize Memory Allocators if the GIL is held when functions of :c:data:`PYMEM_DOMAIN_OBJ` and :c:data:`PYMEM_DOMAIN_MEM` domains are called. + .. versionchanged:: 3.8.0 + Byte patterns ``0xCB`` (``CLEANBYTE``), ``0xDB`` (``DEADBYTE``) and + ``0xFB`` (``FORBIDDENBYTE``) have been replaced with ``0xCD``, ``0xDD`` + and ``0xFD`` to use the same values than Windows CRT debug ``malloc()`` + and ``free()``. + .. _pymalloc: diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 78d457d67e82..8da1bd9e304a 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -160,21 +160,20 @@ PyAPI_FUNC(int) _PyMem_SetDefaultAllocator( pointer value is checked. The heuristic relies on the debug hooks on Python memory allocators which - fills newly allocated memory with CLEANBYTE (0xCB) and newly freed memory - with DEADBYTE (0xDB). Detect also "untouchable bytes" marked - with FORBIDDENBYTE (0xFB). */ + fills newly allocated memory with CLEANBYTE (0xCD) and newly freed memory + with DEADBYTE (0xDD). Detect also "untouchable bytes" marked + with FORBIDDENBYTE (0xFD). */ static inline int _PyMem_IsPtrFreed(void *ptr) { uintptr_t value = (uintptr_t)ptr; #if SIZEOF_VOID_P == 8 - return (value == (uintptr_t)0xCBCBCBCBCBCBCBCB - || value == (uintptr_t)0xDBDBDBDBDBDBDBDB - || value == (uintptr_t)0xFBFBFBFBFBFBFBFB - ); + return (value == (uintptr_t)0xCDCDCDCDCDCDCDCD + || value == (uintptr_t)0xDDDDDDDDDDDDDDDD + || value == (uintptr_t)0xFDFDFDFDFDFDFDFD); #elif SIZEOF_VOID_P == 4 - return (value == (uintptr_t)0xCBCBCBCB - || value == (uintptr_t)0xDBDBDBDB - || value == (uintptr_t)0xFBFBFBFB); + return (value == (uintptr_t)0xCDCDCDCD + || value == (uintptr_t)0xDDDDDDDD + || value == (uintptr_t)0xFDFDFDFD); #else # error "unknown pointer size" #endif diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 3cd39d4e8b28..33c98ac28bc5 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -480,11 +480,11 @@ def test_buffer_overflow(self): r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n" r" The [0-9] pad bytes at tail={ptr} are not all FORBIDDENBYTE \(0x[0-9a-f]{{2}}\):\n" r" at tail\+0: 0x78 \*\*\* OUCH\n" - r" at tail\+1: 0xfb\n" - r" at tail\+2: 0xfb\n" + r" at tail\+1: 0xfd\n" + r" at tail\+2: 0xfd\n" r" .*\n" r" The block was made by call #[0-9]+ to debug malloc/realloc.\n" - r" Data at p: cb cb cb .*\n" + r" Data at p: cd cd cd .*\n" r"\n" r"Enable tracemalloc to get the memory block allocation traceback\n" r"\n" @@ -500,7 +500,7 @@ def test_api_misuse(self): r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n" r" The [0-9] pad bytes at tail={ptr} are FORBIDDENBYTE, as expected.\n" r" The block was made by call #[0-9]+ to debug malloc/realloc.\n" - r" Data at p: cb cb cb .*\n" + r" Data at p: cd cd cd .*\n" r"\n" r"Enable tracemalloc to get the memory block allocation traceback\n" r"\n" diff --git a/Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst b/Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst new file mode 100644 index 000000000000..f2b507a9c230 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst @@ -0,0 +1,5 @@ +Change the value of ``CLEANBYTE``, ``DEADDYTE`` and ``FORBIDDENBYTE`` internal +constants used by debug hooks on Python memory allocators +(:c:func:`PyMem_SetupDebugHooks` function). Byte patterns ``0xCB``, ``0xDB`` +and ``0xFB`` have been replaced with ``0xCD``, ``0xDD`` and ``0xFD`` to use the +same values than Windows CRT debug ``malloc()`` and ``free()``. diff --git a/Objects/object.c b/Objects/object.c index c9aa479abdc1..3fad73c493db 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -415,13 +415,12 @@ _Py_BreakPoint(void) } -/* Heuristic checking if the object memory has been deallocated. - Rely on the debug hooks on Python memory allocators which fills the memory - with DEADBYTE (0xDB) when memory is deallocated. +/* Heuristic checking if the object memory is uninitialized or deallocated. + Rely on the debug hooks on Python memory allocators: + see _PyMem_IsPtrFreed(). The function can be used to prevent segmentation fault on dereferencing - pointers like 0xdbdbdbdbdbdbdbdb. Such pointer is very unlikely to be mapped - in memory. */ + pointers like 0xDDDDDDDDDDDDDDDD. */ int _PyObject_IsFreed(PyObject *op) { diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index e919fad595be..be43c7a1c2b8 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1915,13 +1915,16 @@ _Py_GetAllocatedBlocks(void) /* Special bytes broadcast into debug memory blocks at appropriate times. * Strings of these are unlikely to be valid addresses, floats, ints or * 7-bit ASCII. If modified, _PyMem_IsPtrFreed() should be updated as well. + * + * Byte patterns 0xCB, 0xBB and 0xFB have been replaced with 0xCD, 0xDD and + * 0xFD to use the same values than Windows CRT debug malloc() and free(). */ #undef CLEANBYTE #undef DEADBYTE #undef FORBIDDENBYTE -#define CLEANBYTE 0xCB /* clean (newly allocated) memory */ -#define DEADBYTE 0xDB /* dead (newly freed) memory */ -#define FORBIDDENBYTE 0xFB /* untouchable bytes at each end of a block */ +#define CLEANBYTE 0xCD /* clean (newly allocated) memory */ +#define DEADBYTE 0xDD /* dead (newly freed) memory */ +#define FORBIDDENBYTE 0xFD /* untouchable bytes at each end of a block */ static size_t serialno = 0; /* incremented on each debug {m,re}alloc */ From webhook-mailer at python.org Thu Apr 11 16:28:18 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Apr 2019 20:28:18 -0000 Subject: [Python-checkins] bpo-36605: make tags: parse Modules/_io directory (GH-12789) Message-ID: https://github.com/python/cpython/commit/21a74a9d77c5ac628808b9faace18b824ca056f7 commit: 21a74a9d77c5ac628808b9faace18b824ca056f7 branch: master author: Victor Stinner committer: GitHub date: 2019-04-11T22:28:12+02:00 summary: bpo-36605: make tags: parse Modules/_io directory (GH-12789) "make tags" and "make TAGS" now also parse Modules/_io/*.c and Modules/_io/*.h. files: A Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst b/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst new file mode 100644 index 000000000000..4a558fa94d6f --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst @@ -0,0 +1,2 @@ +``make tags`` and ``make TAGS`` now also parse ``Modules/_io/*.c`` and +``Modules/_io/*.h``. diff --git a/configure b/configure index d0ef8a601b28..fcfa71496e40 100755 --- a/configure +++ b/configure @@ -783,7 +783,6 @@ infodir docdir oldincludedir includedir -runstatedir localstatedir sharedstatedir sysconfdir @@ -895,7 +894,6 @@ datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' @@ -1148,15 +1146,6 @@ do | -silent | --silent | --silen | --sile | --sil) silent=yes ;; - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ @@ -1294,7 +1283,7 @@ fi for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir + libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. @@ -1447,7 +1436,6 @@ Fine tuning of the installation directories: --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] @@ -16544,7 +16532,7 @@ do done -SRCDIRS="Parser Objects Python Modules Programs" +SRCDIRS="Parser Objects Python Modules Modules/_io Programs" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for build directories" >&5 $as_echo_n "checking for build directories... " >&6; } for dir in $SRCDIRS; do diff --git a/configure.ac b/configure.ac index 73ee71c6d249..880e47f70fb3 100644 --- a/configure.ac +++ b/configure.ac @@ -2706,7 +2706,7 @@ then # when running test_compile.py. LINKFORSHARED='-Wl,-E -N 2048K';; VxWorks*) - LINKFORSHARED='--export-dynamic';; + LINKFORSHARED='--export-dynamic';; esac fi AC_MSG_RESULT($LINKFORSHARED) @@ -5245,7 +5245,7 @@ do done AC_SUBST(SRCDIRS) -SRCDIRS="Parser Objects Python Modules Programs" +SRCDIRS="Parser Objects Python Modules Modules/_io Programs" AC_MSG_CHECKING(for build directories) for dir in $SRCDIRS; do if test ! -d $dir; then From webhook-mailer at python.org Thu Apr 11 16:30:36 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Apr 2019 20:30:36 -0000 Subject: [Python-checkins] [3.7] bpo-36389: _PyObject_IsFreed() now also detects uninitialized memory (GH-12770) (GH-12788) Message-ID: https://github.com/python/cpython/commit/9e23f0a27cb8bf6e4ea1d2aef36a91502282bbc9 commit: 9e23f0a27cb8bf6e4ea1d2aef36a91502282bbc9 branch: 3.7 author: Victor Stinner committer: GitHub date: 2019-04-11T22:30:31+02:00 summary: [3.7] bpo-36389: _PyObject_IsFreed() now also detects uninitialized memory (GH-12770) (GH-12788) * bpo-36389: _PyObject_IsFreed() now also detects uninitialized memory (GH-12770) Replace _PyMem_IsFreed() function with _PyMem_IsPtrFreed() inline function. The function is now way more efficient, it became a simple comparison on integers, rather than a short loop. It detects also uninitialized bytes and "forbidden bytes" filled by debug hooks on memory allocators. Add unit tests on _PyObject_IsFreed(). (cherry picked from commit 2b00db68554422ec37faba2a80179a0172df6349) * bpo-36389: Change PyMem_SetupDebugHooks() constants (GH-12782) Modify CLEANBYTE, DEADDYTE and FORBIDDENBYTE constants: use 0xCD, 0xDD and 0xFD, rather than 0xCB, 0xBB and 0xFB, to use the same byte patterns than Windows CRT debug malloc() and free(). (cherry picked from commit 4c409beb4c360a73d054f37807d3daad58d1b567) files: A Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst M Doc/c-api/memory.rst M Include/internal/mem.h M Include/pymem.h M Lib/test/test_capi.py M Modules/_testcapimodule.c M Objects/object.c M Objects/obmalloc.c diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst index b79b7e49b67e..9b42900b4350 100644 --- a/Doc/c-api/memory.rst +++ b/Doc/c-api/memory.rst @@ -440,8 +440,9 @@ Customize Memory Allocators Setup hooks to detect bugs in the Python memory allocator functions. - Newly allocated memory is filled with the byte ``0xCB``, freed memory is - filled with the byte ``0xDB``. + Newly allocated memory is filled with the byte ``0xCD`` (``CLEANBYTE``), + freed memory is filled with the byte ``0xDD`` (``DEADBYTE``). Memory blocks + are surrounded by "forbidden bytes" (``FORBIDDENBYTE``: byte ``0xFD``). Runtime checks: @@ -471,6 +472,12 @@ Customize Memory Allocators if the GIL is held when functions of :c:data:`PYMEM_DOMAIN_OBJ` and :c:data:`PYMEM_DOMAIN_MEM` domains are called. + .. versionchanged:: 3.7.3 + Byte patterns ``0xCB`` (``CLEANBYTE``), ``0xDB`` (``DEADBYTE``) and + ``0xFB`` (``FORBIDDENBYTE``) have been replaced with ``0xCD``, ``0xDD`` + and ``0xFD`` to use the same values than Windows CRT debug ``malloc()`` + and ``free()``. + .. _pymalloc: diff --git a/Include/internal/mem.h b/Include/internal/mem.h index a731e30e6af7..5896e4a05055 100644 --- a/Include/internal/mem.h +++ b/Include/internal/mem.h @@ -145,6 +145,30 @@ PyAPI_FUNC(void) _PyGC_Initialize(struct _gc_runtime_state *); #define _PyGC_generation0 _PyRuntime.gc.generation0 +/* Heuristic checking if a pointer value is newly allocated + (uninitialized) or newly freed. The pointer is not dereferenced, only the + pointer value is checked. + + The heuristic relies on the debug hooks on Python memory allocators which + fills newly allocated memory with CLEANBYTE (0xCD) and newly freed memory + with DEADBYTE (0xDD). Detect also "untouchable bytes" marked + with FORBIDDENBYTE (0xFD). */ +static inline int _PyMem_IsPtrFreed(void *ptr) +{ + uintptr_t value = (uintptr_t)ptr; +#if SIZEOF_VOID_P == 8 + return (value == (uintptr_t)0xCDCDCDCDCDCDCDCD + || value == (uintptr_t)0xDDDDDDDDDDDDDDDD + || value == (uintptr_t)0xFDFDFDFDFDFDFDFD); +#elif SIZEOF_VOID_P == 4 + return (value == (uintptr_t)0xCDCDCDCD + || value == (uintptr_t)0xDDDDDDDD + || value == (uintptr_t)0xFDFDFDFD); +#else +# error "unknown pointer size" +#endif +} + #ifdef __cplusplus } #endif diff --git a/Include/pymem.h b/Include/pymem.h index ef6e0bb5e25f..458a6489c75d 100644 --- a/Include/pymem.h +++ b/Include/pymem.h @@ -55,8 +55,6 @@ PyAPI_FUNC(int) PyTraceMalloc_Untrack( PyAPI_FUNC(PyObject*) _PyTraceMalloc_GetTraceback( unsigned int domain, uintptr_t ptr); - -PyAPI_FUNC(int) _PyMem_IsFreed(void *ptr, size_t size); #endif /* !defined(Py_LIMITED_API) */ diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 65e0795aba84..d94ee0227c87 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -462,11 +462,11 @@ def test_buffer_overflow(self): r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n" r" The [0-9] pad bytes at tail={ptr} are not all FORBIDDENBYTE \(0x[0-9a-f]{{2}}\):\n" r" at tail\+0: 0x78 \*\*\* OUCH\n" - r" at tail\+1: 0xfb\n" - r" at tail\+2: 0xfb\n" + r" at tail\+1: 0xfd\n" + r" at tail\+2: 0xfd\n" r" .*\n" r" The block was made by call #[0-9]+ to debug malloc/realloc.\n" - r" Data at p: cb cb cb .*\n" + r" Data at p: cd cd cd .*\n" r"\n" r"Enable tracemalloc to get the memory block allocation traceback\n" r"\n" @@ -482,7 +482,7 @@ def test_api_misuse(self): r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n" r" The [0-9] pad bytes at tail={ptr} are FORBIDDENBYTE, as expected.\n" r" The block was made by call #[0-9]+ to debug malloc/realloc.\n" - r" Data at p: cb cb cb .*\n" + r" Data at p: cd cd cd .*\n" r"\n" r"Enable tracemalloc to get the memory block allocation traceback\n" r"\n" @@ -508,6 +508,29 @@ def test_pyobject_malloc_without_gil(self): code = 'import _testcapi; _testcapi.pyobject_malloc_without_gil()' self.check_malloc_without_gil(code) + def check_pyobject_is_freed(self, func): + code = textwrap.dedent(''' + import gc, os, sys, _testcapi + # Disable the GC to avoid crash on GC collection + gc.disable() + obj = _testcapi.{func}() + error = (_testcapi.pyobject_is_freed(obj) == False) + # Exit immediately to avoid a crash while deallocating + # the invalid object + os._exit(int(error)) + ''') + code = code.format(func=func) + assert_python_ok('-c', code, PYTHONMALLOC=self.PYTHONMALLOC) + + def test_pyobject_is_freed_uninitialized(self): + self.check_pyobject_is_freed('pyobject_uninitialized') + + def test_pyobject_is_freed_forbidden_bytes(self): + self.check_pyobject_is_freed('pyobject_forbidden_bytes') + + def test_pyobject_is_freed_free(self): + self.check_pyobject_is_freed('pyobject_freed') + class PyMemMallocDebugTests(PyMemDebugTests): PYTHONMALLOC = 'malloc_debug' diff --git a/Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst b/Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst new file mode 100644 index 000000000000..f2b507a9c230 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2019-04-11-12-20-35.bpo-36389.P9QFoP.rst @@ -0,0 +1,5 @@ +Change the value of ``CLEANBYTE``, ``DEADDYTE`` and ``FORBIDDENBYTE`` internal +constants used by debug hooks on Python memory allocators +(:c:func:`PyMem_SetupDebugHooks` function). Byte patterns ``0xCB``, ``0xDB`` +and ``0xFB`` have been replaced with ``0xCD``, ``0xDD`` and ``0xFD`` to use the +same values than Windows CRT debug ``malloc()`` and ``free()``. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 1e33ca872d45..b864f9270e9d 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -4227,6 +4227,59 @@ test_pymem_getallocatorsname(PyObject *self, PyObject *args) } +static PyObject* +pyobject_is_freed(PyObject *self, PyObject *op) +{ + int res = _PyObject_IsFreed(op); + return PyBool_FromLong(res); +} + + +static PyObject* +pyobject_uninitialized(PyObject *self, PyObject *args) +{ + PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_REFCNT(op) = 1; + /* object fields like ob_type are uninitialized! */ + return op; +} + + +static PyObject* +pyobject_forbidden_bytes(PyObject *self, PyObject *args) +{ + /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ + PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_REFCNT(op) = 1; + /* ob_type field is after the memory block: part of "forbidden bytes" + when using debug hooks on memory allocatrs! */ + return op; +} + + +static PyObject* +pyobject_freed(PyObject *self, PyObject *args) +{ + PyObject *op = _PyObject_CallNoArg((PyObject *)&PyBaseObject_Type); + if (op == NULL) { + return NULL; + } + Py_TYPE(op)->tp_dealloc(op); + /* Reset reference count to avoid early crash in ceval or GC */ + Py_REFCNT(op) = 1; + /* object memory is freed! */ + return op; +} + + static PyObject* pyobject_malloc_without_gil(PyObject *self, PyObject *args) { @@ -4788,6 +4841,10 @@ static PyMethodDef TestMethods[] = { {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, {"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS}, {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, + {"pyobject_is_freed", (PyCFunction)(void(*)(void))pyobject_is_freed, METH_O}, + {"pyobject_uninitialized", pyobject_uninitialized, METH_NOARGS}, + {"pyobject_forbidden_bytes", pyobject_forbidden_bytes, METH_NOARGS}, + {"pyobject_freed", pyobject_freed, METH_NOARGS}, {"pyobject_malloc_without_gil", pyobject_malloc_without_gil, METH_NOARGS}, {"tracemalloc_track", tracemalloc_track, METH_VARARGS}, {"tracemalloc_untrack", tracemalloc_untrack, METH_VARARGS}, diff --git a/Objects/object.c b/Objects/object.c index 138df4488027..420af9465b5c 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -411,28 +411,26 @@ _Py_BreakPoint(void) } -/* Heuristic checking if the object memory has been deallocated. - Rely on the debug hooks on Python memory allocators which fills the memory - with DEADBYTE (0xDB) when memory is deallocated. +/* Heuristic checking if the object memory is uninitialized or deallocated. + Rely on the debug hooks on Python memory allocators: + see _PyMem_IsPtrFreed(). The function can be used to prevent segmentation fault on dereferencing - pointers like 0xdbdbdbdbdbdbdbdb. Such pointer is very unlikely to be mapped - in memory. */ + pointers like 0xDDDDDDDDDDDDDDDD. */ int _PyObject_IsFreed(PyObject *op) { - uintptr_t ptr = (uintptr_t)op; - if (_PyMem_IsFreed(&ptr, sizeof(ptr))) { + if (_PyMem_IsPtrFreed(op) || _PyMem_IsPtrFreed(op->ob_type)) { return 1; } - int freed = _PyMem_IsFreed(&op->ob_type, sizeof(op->ob_type)); - /* ignore op->ob_ref: the value can have be modified + /* ignore op->ob_ref: its value can have be modified by Py_INCREF() and Py_DECREF(). */ #ifdef Py_TRACE_REFS - freed &= _PyMem_IsFreed(&op->_ob_next, sizeof(op->_ob_next)); - freed &= _PyMem_IsFreed(&op->_ob_prev, sizeof(op->_ob_prev)); + if (_PyMem_IsPtrFreed(op->_ob_next) || _PyMem_IsPtrFreed(op->_ob_prev)) { + return 1; + } #endif - return freed; + return 0; } @@ -449,7 +447,7 @@ _PyObject_Dump(PyObject* op) if (_PyObject_IsFreed(op)) { /* It seems like the object memory has been freed: don't access it to prevent a segmentation fault. */ - fprintf(stderr, "\n"); + fprintf(stderr, "\n"); return; } diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index 3b0c35bcc941..46e84270b26e 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1946,14 +1946,17 @@ _Py_GetAllocatedBlocks(void) /* Special bytes broadcast into debug memory blocks at appropriate times. * Strings of these are unlikely to be valid addresses, floats, ints or - * 7-bit ASCII. + * 7-bit ASCII. If modified, _PyMem_IsPtrFreed() should be updated as well. + * + * Byte patterns 0xCB, 0xBB and 0xFB have been replaced with 0xCD, 0xDD and + * 0xFD to use the same values than Windows CRT debug malloc() and free(). */ #undef CLEANBYTE #undef DEADBYTE #undef FORBIDDENBYTE -#define CLEANBYTE 0xCB /* clean (newly allocated) memory */ -#define DEADBYTE 0xDB /* dead (newly freed) memory */ -#define FORBIDDENBYTE 0xFB /* untouchable bytes at each end of a block */ +#define CLEANBYTE 0xCD /* clean (newly allocated) memory */ +#define DEADBYTE 0xDD /* dead (newly freed) memory */ +#define FORBIDDENBYTE 0xFD /* untouchable bytes at each end of a block */ static size_t serialno = 0; /* incremented on each debug {m,re}alloc */ @@ -2091,22 +2094,6 @@ _PyMem_DebugRawCalloc(void *ctx, size_t nelem, size_t elsize) } -/* Heuristic checking if the memory has been freed. Rely on the debug hooks on - Python memory allocators which fills the memory with DEADBYTE (0xDB) when - memory is deallocated. */ -int -_PyMem_IsFreed(void *ptr, size_t size) -{ - unsigned char *bytes = ptr; - for (size_t i=0; i < size; i++) { - if (bytes[i] != DEADBYTE) { - return 0; - } - } - return 1; -} - - /* The debug free first checks the 2*SST bytes on each end for sanity (in particular, that the FORBIDDENBYTEs with the api ID are still intact). Then fills the original bytes with DEADBYTE. From webhook-mailer at python.org Thu Apr 11 17:13:41 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Apr 2019 21:13:41 -0000 Subject: [Python-checkins] bpo-20582: add link for manpage for flags on getnameinfo() (GH-11977) Message-ID: https://github.com/python/cpython/commit/3993ccb6820d4239ce3d9e1c5d31f13b86e0000b commit: 3993ccb6820d4239ce3d9e1c5d31f13b86e0000b branch: master author: Emmanuel Arias committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-11T14:13:37-07:00 summary: bpo-20582: add link for manpage for flags on getnameinfo() (GH-11977) files: M Doc/library/socket.rst diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 62c83470271c..379633a3b605 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -796,6 +796,8 @@ The :mod:`socket` module also offers various network-related services: For IPv6 addresses, ``%scope`` is appended to the host part if *sockaddr* contains meaningful *scopeid*. Usually this happens for multicast addresses. + For more information about *flags* you can consult :manpage:`getnameinfo(3)`. + .. function:: getprotobyname(protocolname) Translate an Internet protocol name (for example, ``'icmp'``) to a constant From webhook-mailer at python.org Fri Apr 12 02:27:32 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 12 Apr 2019 06:27:32 -0000 Subject: [Python-checkins] bpo-34839: Add a 'before 3.6' in the section 'warnings' of doctest (GH-9736) Message-ID: https://github.com/python/cpython/commit/a910c2c6f3542b61f084de2ece0d8dab09c5a0fa commit: a910c2c6f3542b61f084de2ece0d8dab09c5a0fa branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-11T23:27:27-07:00 summary: bpo-34839: Add a 'before 3.6' in the section 'warnings' of doctest (GH-9736) (cherry picked from commit 0522fd81dc6e3482c2d4c8719f1f85ad5924eede) Co-authored-by: St?phane Wirtel files: M Doc/library/doctest.rst diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst index a138e6874a05..e7c0033eb6bc 100644 --- a/Doc/library/doctest.rst +++ b/Doc/library/doctest.rst @@ -771,23 +771,27 @@ Warnings :mod:`doctest` is serious about requiring exact matches in expected output. If even a single character doesn't match, the test fails. This will probably surprise you a few times, as you learn exactly what Python does and doesn't -guarantee about output. For example, when printing a dict, Python doesn't -guarantee that the key-value pairs will be printed in any particular order, so a -test like :: +guarantee about output. For example, when printing a set, Python doesn't +guarantee that the element is printed in any particular order, so a test like :: >>> foo() - {"Hermione": "hippogryph", "Harry": "broomstick"} + {"Hermione", "Harry"} is vulnerable! One workaround is to do :: - >>> foo() == {"Hermione": "hippogryph", "Harry": "broomstick"} + >>> foo() == {"Hermione", "Harry"} True instead. Another is to do :: - >>> d = sorted(foo().items()) + >>> d = sorted(foo()) >>> d - [('Harry', 'broomstick'), ('Hermione', 'hippogryph')] + ['Harry', 'Hermione'] + +.. note:: + + Before Python 3.6, when printing a dict, Python did not guarantee that + the key-value pairs was printed in any particular order. There are others, but you get the idea. From webhook-mailer at python.org Fri Apr 12 03:11:34 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Fri, 12 Apr 2019 07:11:34 -0000 Subject: [Python-checkins] bpo-20180: Use argument clinic for dict.pop() and dict.popitem() (GH-12792) Message-ID: https://github.com/python/cpython/commit/9e4f2f3a6b8ee995c365e86d976937c141d867f8 commit: 9e4f2f3a6b8ee995c365e86d976937c141d867f8 branch: master author: Inada Naoki committer: GitHub date: 2019-04-12T16:11:28+09:00 summary: bpo-20180: Use argument clinic for dict.pop() and dict.popitem() (GH-12792) files: A Misc/NEWS.d/next/Core and Builtins/2019-04-12-15-49-15.bpo-20180.KUqVk7.rst M Objects/clinic/dictobject.c.h M Objects/dictobject.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-12-15-49-15.bpo-20180.KUqVk7.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-12-15-49-15.bpo-20180.KUqVk7.rst new file mode 100644 index 000000000000..8c9067081aa1 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-12-15-49-15.bpo-20180.KUqVk7.rst @@ -0,0 +1,2 @@ +``dict.pop()`` is now up to 33% faster thanks to Argument Clinic. Patch by +Inada Naoki. diff --git a/Objects/clinic/dictobject.c.h b/Objects/clinic/dictobject.c.h index 713781ce8806..b87244d87348 100644 --- a/Objects/clinic/dictobject.c.h +++ b/Objects/clinic/dictobject.c.h @@ -116,6 +116,63 @@ dict_setdefault(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) return return_value; } +PyDoc_STRVAR(dict_pop__doc__, +"pop($self, key, default=None, /)\n" +"--\n" +"\n" +"Remove specified key and return the corresponding value.\n" +"\n" +"If key is not found, default is returned if given, otherwise KeyError is raised"); + +#define DICT_POP_METHODDEF \ + {"pop", (PyCFunction)(void(*)(void))dict_pop, METH_FASTCALL, dict_pop__doc__}, + +static PyObject * +dict_pop_impl(PyDictObject *self, PyObject *key, PyObject *default_value); + +static PyObject * +dict_pop(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *key; + PyObject *default_value = NULL; + + if (!_PyArg_CheckPositional("pop", nargs, 1, 2)) { + goto exit; + } + key = args[0]; + if (nargs < 2) { + goto skip_optional; + } + default_value = args[1]; +skip_optional: + return_value = dict_pop_impl(self, key, default_value); + +exit: + return return_value; +} + +PyDoc_STRVAR(dict_popitem__doc__, +"popitem($self, /)\n" +"--\n" +"\n" +"Remove and return a (key, value) pair as a 2-tuple.\n" +"\n" +"Pairs are returned in LIFO (last-in, first-out) order.\n" +"Raises KeyError if the dict is empty."); + +#define DICT_POPITEM_METHODDEF \ + {"popitem", (PyCFunction)dict_popitem, METH_NOARGS, dict_popitem__doc__}, + +static PyObject * +dict_popitem_impl(PyDictObject *self); + +static PyObject * +dict_popitem(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +{ + return dict_popitem_impl(self); +} + PyDoc_STRVAR(dict___reversed____doc__, "__reversed__($self, /)\n" "--\n" @@ -133,4 +190,4 @@ dict___reversed__(PyDictObject *self, PyObject *Py_UNUSED(ignored)) { return dict___reversed___impl(self); } -/*[clinic end generated code: output=12c21ce3552d9617 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0fd5cafc61a51d3c input=a9049054013a1b77]*/ diff --git a/Objects/dictobject.c b/Objects/dictobject.c index c1187c2cb8ed..9ff009f6aa4e 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2985,19 +2985,37 @@ dict_clear(PyDictObject *mp, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } +/*[clinic input] +dict.pop + + key: object + default: object = NULL + / + +Remove specified key and return the corresponding value. + +If key is not found, default is returned if given, otherwise KeyError is raised +[clinic start generated code]*/ + static PyObject * -dict_pop(PyDictObject *mp, PyObject *args) +dict_pop_impl(PyDictObject *self, PyObject *key, PyObject *default_value) +/*[clinic end generated code: output=3abb47b89f24c21c input=016f6a000e4e633b]*/ { - PyObject *key, *deflt = NULL; + return _PyDict_Pop((PyObject*)self, key, default_value); +} - if(!PyArg_UnpackTuple(args, "pop", 1, 2, &key, &deflt)) - return NULL; +/*[clinic input] +dict.popitem - return _PyDict_Pop((PyObject*)mp, key, deflt); -} +Remove and return a (key, value) pair as a 2-tuple. + +Pairs are returned in LIFO (last-in, first-out) order. +Raises KeyError if the dict is empty. +[clinic start generated code]*/ static PyObject * -dict_popitem(PyDictObject *mp, PyObject *Py_UNUSED(ignored)) +dict_popitem_impl(PyDictObject *self) +/*[clinic end generated code: output=e65fcb04420d230d input=1c38a49f21f64941]*/ { Py_ssize_t i, j; PyDictKeyEntry *ep0, *ep; @@ -3015,44 +3033,43 @@ dict_popitem(PyDictObject *mp, PyObject *Py_UNUSED(ignored)) res = PyTuple_New(2); if (res == NULL) return NULL; - if (mp->ma_used == 0) { + if (self->ma_used == 0) { Py_DECREF(res); - PyErr_SetString(PyExc_KeyError, - "popitem(): dictionary is empty"); + PyErr_SetString(PyExc_KeyError, "popitem(): dictionary is empty"); return NULL; } /* Convert split table to combined table */ - if (mp->ma_keys->dk_lookup == lookdict_split) { - if (dictresize(mp, DK_SIZE(mp->ma_keys))) { + if (self->ma_keys->dk_lookup == lookdict_split) { + if (dictresize(self, DK_SIZE(self->ma_keys))) { Py_DECREF(res); return NULL; } } - ENSURE_ALLOWS_DELETIONS(mp); + ENSURE_ALLOWS_DELETIONS(self); /* Pop last item */ - ep0 = DK_ENTRIES(mp->ma_keys); - i = mp->ma_keys->dk_nentries - 1; + ep0 = DK_ENTRIES(self->ma_keys); + i = self->ma_keys->dk_nentries - 1; while (i >= 0 && ep0[i].me_value == NULL) { i--; } assert(i >= 0); ep = &ep0[i]; - j = lookdict_index(mp->ma_keys, ep->me_hash, i); + j = lookdict_index(self->ma_keys, ep->me_hash, i); assert(j >= 0); - assert(dictkeys_get_index(mp->ma_keys, j) == i); - dictkeys_set_index(mp->ma_keys, j, DKIX_DUMMY); + assert(dictkeys_get_index(self->ma_keys, j) == i); + dictkeys_set_index(self->ma_keys, j, DKIX_DUMMY); PyTuple_SET_ITEM(res, 0, ep->me_key); PyTuple_SET_ITEM(res, 1, ep->me_value); ep->me_key = NULL; ep->me_value = NULL; /* We can't dk_usable++ since there is DKIX_DUMMY in indices */ - mp->ma_keys->dk_nentries = i; - mp->ma_used--; - mp->ma_version_tag = DICT_NEXT_VERSION(); - assert(_PyDict_CheckConsistency(mp)); + self->ma_keys->dk_nentries = i; + self->ma_used--; + self->ma_version_tag = DICT_NEXT_VERSION(); + assert(_PyDict_CheckConsistency(self)); return res; } @@ -3135,14 +3152,6 @@ PyDoc_STRVAR(getitem__doc__, "x.__getitem__(y) <==> x[y]"); PyDoc_STRVAR(sizeof__doc__, "D.__sizeof__() -> size of D in memory, in bytes"); -PyDoc_STRVAR(pop__doc__, -"D.pop(k[,d]) -> v, remove specified key and return the corresponding value.\n\ -If key is not found, d is returned if given, otherwise KeyError is raised"); - -PyDoc_STRVAR(popitem__doc__, -"D.popitem() -> (k, v), remove and return some (key, value) pair as a\n\ -2-tuple; but raise KeyError if D is empty."); - PyDoc_STRVAR(update__doc__, "D.update([E, ]**F) -> None. Update D from dict/iterable E and F.\n\ If E is present and has a .keys() method, then does: for k in E: D[k] = E[k]\n\ @@ -3175,10 +3184,8 @@ static PyMethodDef mapp_methods[] = { sizeof__doc__}, DICT_GET_METHODDEF DICT_SETDEFAULT_METHODDEF - {"pop", (PyCFunction)dict_pop, METH_VARARGS, - pop__doc__}, - {"popitem", (PyCFunction)(void(*)(void))dict_popitem, METH_NOARGS, - popitem__doc__}, + DICT_POP_METHODDEF + DICT_POPITEM_METHODDEF {"keys", dictkeys_new, METH_NOARGS, keys__doc__}, {"items", dictitems_new, METH_NOARGS, From webhook-mailer at python.org Fri Apr 12 09:15:10 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 13:15:10 -0000 Subject: [Python-checkins] bpo-36611: Fix test_sys.test_getallocatedblocks() (GH-12797) Message-ID: https://github.com/python/cpython/commit/9b8314cfe29ca532fc335277f6c36b72e6132922 commit: 9b8314cfe29ca532fc335277f6c36b72e6132922 branch: master author: Victor Stinner committer: GitHub date: 2019-04-12T15:15:04+02:00 summary: bpo-36611: Fix test_sys.test_getallocatedblocks() (GH-12797) Fix test_sys.test_getallocatedblocks() when tracemalloc is enabled. If the name of Python memory allocators cannot get read, consider that pymalloc is disabled. Fix the following error: ./python -X tracemalloc -m test test_sys -v -m test_getallocatedblocks ERROR: test_getallocatedblocks (test.test_sys.SysModuleTest) ------------------------------------------------------------ Traceback (most recent call last): File "Lib/test/test_sys.py", line 770, in test_getallocatedblocks alloc_name = _testcapi.pymem_getallocatorsname() RuntimeError: cannot get allocators name files: A Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst M Lib/test/test_sys.py diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 4bd54af3629c..d1c7daad7bba 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -767,8 +767,13 @@ def test_getallocatedblocks(self): except ImportError: with_pymalloc = support.with_pymalloc() else: - alloc_name = _testcapi.pymem_getallocatorsname() - with_pymalloc = (alloc_name in ('pymalloc', 'pymalloc_debug')) + try: + alloc_name = _testcapi.pymem_getallocatorsname() + except RuntimeError as exc: + # "cannot get allocators name" (ex: tracemalloc is used) + with_pymalloc = True + else: + with_pymalloc = (alloc_name in ('pymalloc', 'pymalloc_debug')) # Some sanity checks a = sys.getallocatedblocks() diff --git a/Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst b/Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst new file mode 100644 index 000000000000..e4da7f1099f8 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst @@ -0,0 +1,2 @@ +Fix ``test_sys.test_getallocatedblocks()`` when :mod:`tracemalloc` is +enabled. From webhook-mailer at python.org Fri Apr 12 09:33:35 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 12 Apr 2019 13:33:35 -0000 Subject: [Python-checkins] bpo-36611: Fix test_sys.test_getallocatedblocks() (GH-12797) Message-ID: https://github.com/python/cpython/commit/7182e653fb5c6f78f05892b6ed302fc8db8978d3 commit: 7182e653fb5c6f78f05892b6ed302fc8db8978d3 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-12T06:33:31-07:00 summary: bpo-36611: Fix test_sys.test_getallocatedblocks() (GH-12797) Fix test_sys.test_getallocatedblocks() when tracemalloc is enabled. If the name of Python memory allocators cannot get read, consider that pymalloc is disabled. Fix the following error: ./python -X tracemalloc -m test test_sys -v -m test_getallocatedblocks ERROR: test_getallocatedblocks (test.test_sys.SysModuleTest) ------------------------------------------------------------ Traceback (most recent call last): File "Lib/test/test_sys.py", line 770, in test_getallocatedblocks alloc_name = _testcapi.pymem_getallocatorsname() RuntimeError: cannot get allocators name (cherry picked from commit 9b8314cfe29ca532fc335277f6c36b72e6132922) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst M Lib/test/test_sys.py diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 27f75901c63f..ef3fee13b961 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -768,8 +768,13 @@ def test_getallocatedblocks(self): except ImportError: with_pymalloc = support.with_pymalloc() else: - alloc_name = _testcapi.pymem_getallocatorsname() - with_pymalloc = (alloc_name in ('pymalloc', 'pymalloc_debug')) + try: + alloc_name = _testcapi.pymem_getallocatorsname() + except RuntimeError as exc: + # "cannot get allocators name" (ex: tracemalloc is used) + with_pymalloc = True + else: + with_pymalloc = (alloc_name in ('pymalloc', 'pymalloc_debug')) # Some sanity checks a = sys.getallocatedblocks() diff --git a/Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst b/Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst new file mode 100644 index 000000000000..e4da7f1099f8 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-12-12-44-42.bpo-36611.UtorXL.rst @@ -0,0 +1,2 @@ +Fix ``test_sys.test_getallocatedblocks()`` when :mod:`tracemalloc` is +enabled. From webhook-mailer at python.org Fri Apr 12 10:15:36 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 14:15:36 -0000 Subject: [Python-checkins] bpo-36588: On AIX, remove major version from sys.platform (GH-12787) Message-ID: https://github.com/python/cpython/commit/9d949f7796da612f1b588d18c6f041376992a9fc commit: 9d949f7796da612f1b588d18c6f041376992a9fc branch: master author: Michael Felt committer: Victor Stinner date: 2019-04-12T16:15:32+02:00 summary: bpo-36588: On AIX, remove major version from sys.platform (GH-12787) On AIX, sys.platform doesn't contain the major version anymore. Always return 'aix', instead of 'aix3' .. 'aix7'. Since older Python versions include the version number, it is recommended to always use sys.platform.startswith('aix'). files: A Misc/NEWS.d/next/Core and Builtins/2019-04-11-14-36-55.bpo-36588.wejLoC.rst M Doc/library/sys.rst M Doc/whatsnew/3.8.rst M configure M configure.ac diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 52026f6a2bce..591972e9b783 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -1014,7 +1014,7 @@ always available. This string contains a platform identifier that can be used to append platform-specific components to :data:`sys.path`, for instance. - For Unix systems, except on Linux, this is the lowercased OS name as + For Unix systems, except on Linux and AIX, this is the lowercased OS name as returned by ``uname -s`` with the first part of the version as returned by ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, *at the time when Python was built*. Unless you want to test for a specific system @@ -1024,12 +1024,15 @@ always available. # FreeBSD-specific code here... elif sys.platform.startswith('linux'): # Linux-specific code here... + elif sys.platform.startswith('aix'): + # AIX-specific code here... For other systems, the values are: ================ =========================== System ``platform`` value ================ =========================== + AIX ``'aix'`` Linux ``'linux'`` Windows ``'win32'`` Windows/Cygwin ``'cygwin'`` @@ -1042,6 +1045,12 @@ always available. older Python versions include the version number, it is recommended to always use the ``startswith`` idiom presented above. + .. versionchanged:: 3.8 + On AIX, :attr:`sys.platform` doesn't contain the major version anymore. + It is always ``'aix'``, instead of ``'aix5'`` or ``'aix7'``. Since + older Python versions include the version number, it is recommended to + always use the ``startswith`` idiom presented above. + .. seealso:: :attr:`os.name` has a coarser granularity. :func:`os.uname` gives diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index aa75beeabe70..bf28e5ff4b06 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -700,6 +700,11 @@ Changes in Python behavior raised when getting the attribute from the type dictionary are no longer ignored. (Contributed by Serhiy Storchaka in :issue:`35459`.) +* On AIX, :attr:`sys.platform` doesn't contain the major version anymore. + It is always ``'aix'``, instead of ``'aix3'`` .. ``'aix7'``. Since + older Python versions include the version number, it is recommended to + always use the ``sys.platform.startswith('aix')``. + (Contributed by M. Felt in :issue:`36588`.) Changes in the Python API ------------------------- diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-11-14-36-55.bpo-36588.wejLoC.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-11-14-36-55.bpo-36588.wejLoC.rst new file mode 100644 index 000000000000..77d2fa4e299b --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-11-14-36-55.bpo-36588.wejLoC.rst @@ -0,0 +1,5 @@ +On AIX, :attr:`sys.platform` doesn't contain the major version anymore. +Always return ``'aix'``, instead of ``'aix3'`` .. ``'aix7'``. Since +older Python versions include the version number, it is recommended to +always use ``sys.platform.startswith('aix')``. +Contributed by M. Felt. diff --git a/configure b/configure index fcfa71496e40..72589fdb78c5 100755 --- a/configure +++ b/configure @@ -3281,6 +3281,7 @@ then MACHDEP="$ac_md_system$ac_md_release" case $MACHDEP in + aix*) MACHDEP="aix";; linux*) MACHDEP="linux";; cygwin*) MACHDEP="cygwin";; darwin*) MACHDEP="darwin";; @@ -10199,7 +10200,6 @@ fi - if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. diff --git a/configure.ac b/configure.ac index 880e47f70fb3..30e8587cb08f 100644 --- a/configure.ac +++ b/configure.ac @@ -404,6 +404,7 @@ then MACHDEP="$ac_md_system$ac_md_release" case $MACHDEP in + aix*) MACHDEP="aix";; linux*) MACHDEP="linux";; cygwin*) MACHDEP="cygwin";; darwin*) MACHDEP="darwin";; From webhook-mailer at python.org Fri Apr 12 11:07:09 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 15:07:09 -0000 Subject: [Python-checkins] bpo-18748: io.IOBase destructor now logs close() errors in dev mode (GH-12786) Message-ID: https://github.com/python/cpython/commit/44235041f3b957abd36d3792450c3540aa09e120 commit: 44235041f3b957abd36d3792450c3540aa09e120 branch: master author: Victor Stinner committer: GitHub date: 2019-04-12T17:06:47+02:00 summary: bpo-18748: io.IOBase destructor now logs close() errors in dev mode (GH-12786) In development mode (-X dev) and in debug build, the io.IOBase destructor now logs close() exceptions. These exceptions are silent by default in release mode. files: A Misc/NEWS.d/next/Library/2019-04-11-16-09-42.bpo-18748.QW7upB.rst M Doc/using/cmdline.rst M Lib/test/test_io.py M Modules/_io/iobase.c diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index bd3cdef57390..0574336cf354 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -437,6 +437,7 @@ Miscellaneous options * Enable :ref:`asyncio debug mode `. * Set the :attr:`~sys.flags.dev_mode` attribute of :attr:`sys.flags` to ``True`` + * :class:`io.IOBase` destructor logs ``close()`` exceptions. * ``-X utf8`` enables UTF-8 mode for operating system interfaces, overriding the default locale-aware mode. ``-X utf8=0`` explicitly disables UTF-8 @@ -465,7 +466,8 @@ Miscellaneous options The ``-X importtime``, ``-X dev`` and ``-X utf8`` options. .. versionadded:: 3.8 - The ``-X pycache_prefix`` option. + The ``-X pycache_prefix`` option. The ``-X dev`` option now logs + ``close()`` exceptions in :class:`io.IOBase` destructor. Options you shouldn't use diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index d245c5d846ad..811a446f92be 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -67,6 +67,11 @@ class EmptyStruct(ctypes.Structure): '--with-memory-sanitizer' in _config_args ) +# Does io.IOBase logs unhandled exceptions on calling close()? +# They are silenced by default in release build. +DESTRUCTOR_LOG_ERRORS = (hasattr(sys, "gettotalrefcount") or sys.flags.dev_mode) + + def _default_chunk_size(): """Get the default TextIOWrapper chunk size""" with open(__file__, "r", encoding="latin-1") as f: @@ -1097,9 +1102,16 @@ def f(): s = s.getvalue().strip() if s: # The destructor *may* have printed an unraisable error, check it - self.assertEqual(len(s.splitlines()), 1) - self.assertTrue(s.startswith("Exception OSError: "), s) - self.assertTrue(s.endswith(" ignored"), s) + lines = s.splitlines() + if DESTRUCTOR_LOG_ERRORS: + self.assertEqual(len(lines), 5) + self.assertTrue(lines[0].startswith("Exception ignored in: "), lines) + self.assertEqual(lines[1], "Traceback (most recent call last):", lines) + self.assertEqual(lines[4], 'OSError:', lines) + else: + self.assertEqual(len(lines), 1) + self.assertTrue(lines[-1].startswith("Exception OSError: "), lines) + self.assertTrue(lines[-1].endswith(" ignored"), lines) def test_repr(self): raw = self.MockRawIO() @@ -2833,9 +2845,16 @@ def f(): s = s.getvalue().strip() if s: # The destructor *may* have printed an unraisable error, check it - self.assertEqual(len(s.splitlines()), 1) - self.assertTrue(s.startswith("Exception OSError: "), s) - self.assertTrue(s.endswith(" ignored"), s) + lines = s.splitlines() + if DESTRUCTOR_LOG_ERRORS: + self.assertEqual(len(lines), 5) + self.assertTrue(lines[0].startswith("Exception ignored in: "), lines) + self.assertEqual(lines[1], "Traceback (most recent call last):", lines) + self.assertEqual(lines[4], 'OSError:', lines) + else: + self.assertEqual(len(lines), 1) + self.assertTrue(lines[-1].startswith("Exception OSError: "), lines) + self.assertTrue(lines[-1].endswith(" ignored"), lines) # Systematic tests of the text I/O API diff --git a/Misc/NEWS.d/next/Library/2019-04-11-16-09-42.bpo-18748.QW7upB.rst b/Misc/NEWS.d/next/Library/2019-04-11-16-09-42.bpo-18748.QW7upB.rst new file mode 100644 index 000000000000..2e0cef8d1818 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-11-16-09-42.bpo-18748.QW7upB.rst @@ -0,0 +1,3 @@ +In development mode (:option:`-X` ``dev``) and in debug build, the +:class:`io.IOBase` destructor now logs ``close()`` exceptions. These exceptions +are silent by default in release mode. diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index 9b063cd372fe..3a8f16ae0b65 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -286,10 +286,22 @@ iobase_finalize(PyObject *self) /* Silencing I/O errors is bad, but printing spurious tracebacks is equally as bad, and potentially more frequent (because of shutdown issues). */ - if (res == NULL) - PyErr_Clear(); - else + if (res == NULL) { +#ifndef Py_DEBUG + const _PyCoreConfig *config = &_PyInterpreterState_GET_UNSAFE()->core_config; + if (config->dev_mode) { + PyErr_WriteUnraisable(self); + } + else { + PyErr_Clear(); + } +#else + PyErr_WriteUnraisable(self); +#endif + } + else { Py_DECREF(res); + } } /* Restore the saved exception. */ From webhook-mailer at python.org Fri Apr 12 11:18:21 2019 From: webhook-mailer at python.org (Eric Snow) Date: Fri, 12 Apr 2019 15:18:21 -0000 Subject: [Python-checkins] bpo-33608: Factor out a private, per-interpreter _Py_AddPendingCall(). (gh-12360) Message-ID: https://github.com/python/cpython/commit/f13c5c8b9401a9dc19e95d8b420ee100ac022208 commit: f13c5c8b9401a9dc19e95d8b420ee100ac022208 branch: master author: Eric Snow committer: GitHub date: 2019-04-12T09:18:16-06:00 summary: bpo-33608: Factor out a private, per-interpreter _Py_AddPendingCall(). (gh-12360) This is effectively an un-revert of #11617 and #12024 (reverted in #12159). Portions of those were merged in other PRs (with lower risk) and this represents the remainder. Note that I found 3 different bugs in the original PRs and have fixed them here. files: A Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst M Include/ceval.h M Include/internal/pycore_ceval.h M Include/internal/pycore_pystate.h M Lib/test/test_capi.py M Modules/_testcapimodule.c M Modules/signalmodule.c M Python/ceval.c M Python/ceval_gil.h M Python/pylifecycle.c M Python/pystate.c diff --git a/Include/ceval.h b/Include/ceval.h index 11283c0a570b..9c6d420bc234 100644 --- a/Include/ceval.h +++ b/Include/ceval.h @@ -221,7 +221,7 @@ PyAPI_FUNC(Py_ssize_t) _PyEval_RequestCodeExtraIndex(freefunc); #ifndef Py_LIMITED_API PyAPI_FUNC(int) _PyEval_SliceIndex(PyObject *, Py_ssize_t *); PyAPI_FUNC(int) _PyEval_SliceIndexNotNone(PyObject *, Py_ssize_t *); -PyAPI_FUNC(void) _PyEval_SignalAsyncExc(void); +PyAPI_FUNC(void) _PyEval_SignalAsyncExc(PyInterpreterState *); #endif /* Masks and values used by FORMAT_VALUE opcode. */ diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 2ead96c7abe3..1bdcdf527af4 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -11,7 +11,11 @@ extern "C" { #include "pycore_atomic.h" #include "pythread.h" -PyAPI_FUNC(void) _Py_FinishPendingCalls(void); +struct _is; // See PyInterpreterState in cpython/pystate.h. + +PyAPI_FUNC(int) _Py_AddPendingCall(struct _is*, unsigned long, int (*)(void *), void *); +PyAPI_FUNC(int) _Py_MakePendingCalls(struct _is*); +PyAPI_FUNC(void) _Py_FinishPendingCalls(struct _is*); struct _pending_calls { int finishing; @@ -24,6 +28,7 @@ struct _pending_calls { int async_exc; #define NPENDINGCALLS 32 struct { + unsigned long thread_id; int (*func)(void *); void *arg; } calls[NPENDINGCALLS]; @@ -31,6 +36,13 @@ struct _pending_calls { int last; }; +struct _ceval_interpreter_state { + /* This single variable consolidates all requests to break out of + the fast path in the eval loop. */ + _Py_atomic_int eval_breaker; + struct _pending_calls pending; +}; + #include "pycore_gil.h" struct _ceval_runtime_state { @@ -41,12 +53,8 @@ struct _ceval_runtime_state { c_tracefunc. This speeds up the if statement in PyEval_EvalFrameEx() after fast_next_opcode. */ int tracing_possible; - /* This single variable consolidates all requests to break out of - the fast path in the eval loop. */ - _Py_atomic_int eval_breaker; /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; - struct _pending_calls pending; /* Request for checking signals. */ _Py_atomic_int signals_pending; struct _gil_runtime_state gil; diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index df3730f8014a..3ae2e0c60483 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -12,6 +12,7 @@ extern "C" { #include "pystate.h" #include "pythread.h" +#include "pycore_atomic.h" #include "pycore_ceval.h" #include "pycore_pathconfig.h" #include "pycore_pymem.h" @@ -83,6 +84,8 @@ struct _is { PyObject *pyexitmodule; uint64_t tstate_next_unique_id; + + struct _ceval_interpreter_state ceval; }; PyAPI_FUNC(struct _is*) _PyInterpreterState_LookUpID(PY_INT64_T); diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 33c98ac28bc5..02ae54804f5f 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -373,7 +373,7 @@ def pendingcalls_wait(self, l, n, context = None): def test_pendingcalls_threaded(self): #do every callback on a separate thread - n = 32 #total callbacks + n = 32 #total callbacks (see NPENDINGCALLS in pycore_ceval.h) threads = [] class foo(object):pass context = foo() diff --git a/Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst b/Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst new file mode 100644 index 000000000000..73a01a1f46bd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst @@ -0,0 +1,5 @@ +We added a new internal _Py_AddPendingCall() that operates relative to the +provided interpreter. This allows us to use the existing implementation to +ask another interpreter to do work that cannot be done in the current +interpreter, like decref an object the other interpreter owns. The existing +Py_AddPendingCall() only operates relative to the main interpreter. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 1180b4b176e9..71356fbac318 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -2445,6 +2445,7 @@ pending_threadfunc(PyObject *self, PyObject *arg) Py_INCREF(callable); Py_BEGIN_ALLOW_THREADS + /* XXX Use the internal _Py_AddPendingCall(). */ r = Py_AddPendingCall(&_pending_callback, callable); Py_END_ALLOW_THREADS diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 4590017c170a..962174dda944 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -19,6 +19,7 @@ #include #endif #endif +#include "internal/pycore_pystate.h" #ifdef HAVE_SIGNAL_H #include @@ -295,8 +296,10 @@ trip_signal(int sig_num) { /* Py_AddPendingCall() isn't signal-safe, but we still use it for this exceptional case. */ - Py_AddPendingCall(report_wakeup_send_error, - (void *)(intptr_t) last_error); + _Py_AddPendingCall(_PyRuntime.interpreters.main, + main_thread, + report_wakeup_send_error, + (void *)(intptr_t) last_error); } } } @@ -313,8 +316,10 @@ trip_signal(int sig_num) { /* Py_AddPendingCall() isn't signal-safe, but we still use it for this exceptional case. */ - Py_AddPendingCall(report_wakeup_write_error, - (void *)(intptr_t)errno); + _Py_AddPendingCall(_PyRuntime.interpreters.main, + main_thread, + report_wakeup_write_error, + (void *)(intptr_t)errno); } } } diff --git a/Python/ceval.c b/Python/ceval.c index 28e923219d38..b2fa20d61686 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -96,61 +96,61 @@ static long dxp[256]; /* This can set eval_breaker to 0 even though gil_drop_request became 1. We believe this is all right because the eval loop will release the GIL eventually anyway. */ -#define COMPUTE_EVAL_BREAKER() \ +#define COMPUTE_EVAL_BREAKER(interp) \ _Py_atomic_store_relaxed( \ - &_PyRuntime.ceval.eval_breaker, \ + &interp->ceval.eval_breaker, \ GIL_REQUEST | \ _Py_atomic_load_relaxed(&_PyRuntime.ceval.signals_pending) | \ - _Py_atomic_load_relaxed(&_PyRuntime.ceval.pending.calls_to_do) | \ - _PyRuntime.ceval.pending.async_exc) + _Py_atomic_load_relaxed(&interp->ceval.pending.calls_to_do) | \ + interp->ceval.pending.async_exc) -#define SET_GIL_DROP_REQUEST() \ +#define SET_GIL_DROP_REQUEST(interp) \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.gil_drop_request, 1); \ - _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ + _Py_atomic_store_relaxed(&interp->ceval.eval_breaker, 1); \ } while (0) -#define RESET_GIL_DROP_REQUEST() \ +#define RESET_GIL_DROP_REQUEST(interp) \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.gil_drop_request, 0); \ - COMPUTE_EVAL_BREAKER(); \ + COMPUTE_EVAL_BREAKER(interp); \ } while (0) /* Pending calls are only modified under pending_lock */ -#define SIGNAL_PENDING_CALLS() \ +#define SIGNAL_PENDING_CALLS(interp) \ do { \ - _Py_atomic_store_relaxed(&_PyRuntime.ceval.pending.calls_to_do, 1); \ - _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ + _Py_atomic_store_relaxed(&interp->ceval.pending.calls_to_do, 1); \ + _Py_atomic_store_relaxed(&interp->ceval.eval_breaker, 1); \ } while (0) -#define UNSIGNAL_PENDING_CALLS() \ +#define UNSIGNAL_PENDING_CALLS(interp) \ do { \ - _Py_atomic_store_relaxed(&_PyRuntime.ceval.pending.calls_to_do, 0); \ - COMPUTE_EVAL_BREAKER(); \ + _Py_atomic_store_relaxed(&interp->ceval.pending.calls_to_do, 0); \ + COMPUTE_EVAL_BREAKER(interp); \ } while (0) #define SIGNAL_PENDING_SIGNALS() \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.signals_pending, 1); \ - _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ + _Py_atomic_store_relaxed(&_PyRuntime.interpreters.main->ceval.eval_breaker, 1); \ } while (0) #define UNSIGNAL_PENDING_SIGNALS() \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.signals_pending, 0); \ - COMPUTE_EVAL_BREAKER(); \ + COMPUTE_EVAL_BREAKER(_PyRuntime.interpreters.main); \ } while (0) -#define SIGNAL_ASYNC_EXC() \ +#define SIGNAL_ASYNC_EXC(interp) \ do { \ - _PyRuntime.ceval.pending.async_exc = 1; \ - _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ + interp->ceval.pending.async_exc = 1; \ + _Py_atomic_store_relaxed(&interp->ceval.eval_breaker, 1); \ } while (0) -#define UNSIGNAL_ASYNC_EXC() \ +#define UNSIGNAL_ASYNC_EXC(interp) \ do { \ - _PyRuntime.ceval.pending.async_exc = 0; \ - COMPUTE_EVAL_BREAKER(); \ + interp->ceval.pending.async_exc = 0; \ + COMPUTE_EVAL_BREAKER(interp); \ } while (0) @@ -177,10 +177,7 @@ PyEval_InitThreads(void) create_gil(); take_gil(_PyThreadState_GET()); - _PyRuntime.ceval.pending.lock = PyThread_allocate_lock(); - if (_PyRuntime.ceval.pending.lock == NULL) { - Py_FatalError("Can't initialize threads for pending calls"); - } + // The pending calls mutex is initialized in PyInterpreterState_New(). } void @@ -192,11 +189,6 @@ _PyEval_FiniThreads(void) destroy_gil(); assert(!gil_created()); - - if (_PyRuntime.ceval.pending.lock != NULL) { - PyThread_free_lock(_PyRuntime.ceval.pending.lock); - _PyRuntime.ceval.pending.lock = NULL; - } } void @@ -256,8 +248,10 @@ PyEval_ReInitThreads(void) recreate_gil(); take_gil(current_tstate); - _PyRuntime.ceval.pending.lock = PyThread_allocate_lock(); - if (_PyRuntime.ceval.pending.lock == NULL) { + // Only the main interpreter remains, so ignore the rest. + PyInterpreterState *interp = _PyRuntime.interpreters.main; + interp->ceval.pending.lock = PyThread_allocate_lock(); + if (interp->ceval.pending.lock == NULL) { Py_FatalError("Can't initialize threads for pending calls"); } @@ -269,9 +263,9 @@ PyEval_ReInitThreads(void) raised. */ void -_PyEval_SignalAsyncExc(void) +_PyEval_SignalAsyncExc(PyInterpreterState *interp) { - SIGNAL_ASYNC_EXC(); + SIGNAL_ASYNC_EXC(interp); } PyThreadState * @@ -339,7 +333,7 @@ _PyEval_SignalReceived(void) /* Push one item onto the queue while holding the lock. */ static int -_push_pending_call(struct _pending_calls *pending, +_push_pending_call(struct _pending_calls *pending, unsigned long thread_id, int (*func)(void *), void *arg) { int i = pending->last; @@ -347,6 +341,7 @@ _push_pending_call(struct _pending_calls *pending, if (j == pending->first) { return -1; /* Queue full */ } + pending->calls[i].thread_id = thread_id; pending->calls[i].func = func; pending->calls[i].arg = arg; pending->last = j; @@ -355,7 +350,7 @@ _push_pending_call(struct _pending_calls *pending, /* Pop one item off the queue while holding the lock. */ static void -_pop_pending_call(struct _pending_calls *pending, +_pop_pending_call(struct _pending_calls *pending, unsigned long *thread_id, int (**func)(void *), void **arg) { int i = pending->first; @@ -365,6 +360,7 @@ _pop_pending_call(struct _pending_calls *pending, *func = pending->calls[i].func; *arg = pending->calls[i].arg; + *thread_id = pending->calls[i].thread_id; pending->first = (i + 1) % NPENDINGCALLS; } @@ -374,9 +370,10 @@ _pop_pending_call(struct _pending_calls *pending, */ int -Py_AddPendingCall(int (*func)(void *), void *arg) +_Py_AddPendingCall(PyInterpreterState *interp, unsigned long thread_id, + int (*func)(void *), void *arg) { - struct _pending_calls *pending = &_PyRuntime.ceval.pending; + struct _pending_calls *pending = &interp->ceval.pending; PyThread_acquire_lock(pending->lock, WAIT_LOCK); if (pending->finishing) { @@ -391,14 +388,23 @@ Py_AddPendingCall(int (*func)(void *), void *arg) PyErr_Restore(exc, val, tb); return -1; } - int result = _push_pending_call(pending, func, arg); + int result = _push_pending_call(pending, thread_id, func, arg); + /* signal main loop */ + SIGNAL_PENDING_CALLS(interp); PyThread_release_lock(pending->lock); - /* signal main loop */ - SIGNAL_PENDING_CALLS(); return result; } +/* Py_AddPendingCall() is a simple wrapper for the sake + of backward-compatibility. */ +int +Py_AddPendingCall(int (*func)(void *), void *arg) +{ + PyInterpreterState *interp = _PyRuntime.interpreters.main; + return _Py_AddPendingCall(interp, _PyRuntime.main_thread, func, arg); +} + static int handle_signals(void) { @@ -425,15 +431,11 @@ handle_signals(void) } static int -make_pending_calls(struct _pending_calls* pending) +make_pending_calls(PyInterpreterState *interp) { + struct _pending_calls *pending = &interp->ceval.pending; static int busy = 0; - /* only service pending calls on main thread */ - if (PyThread_get_thread_ident() != _PyRuntime.main_thread) { - return 0; - } - /* don't perform recursive pending calls */ if (busy) { return 0; @@ -441,19 +443,27 @@ make_pending_calls(struct _pending_calls* pending) busy = 1; /* unsignal before starting to call callbacks, so that any callback added in-between re-signals */ - UNSIGNAL_PENDING_CALLS(); + UNSIGNAL_PENDING_CALLS(interp); int res = 0; /* perform a bounded number of calls, in case of recursion */ + unsigned long thread_id = 0; for (int i=0; ilock, WAIT_LOCK); - _pop_pending_call(pending, &func, &arg); + _pop_pending_call(pending, &thread_id, &func, &arg); PyThread_release_lock(pending->lock); + if (thread_id && PyThread_get_thread_ident() != thread_id) { + // Thread mismatch, so move it to the end of the list + // and start over. + _Py_AddPendingCall(interp, thread_id, func, arg); + goto error; + } + /* having released the lock, perform the callback */ if (func == NULL) { break; @@ -469,14 +479,14 @@ make_pending_calls(struct _pending_calls* pending) error: busy = 0; - SIGNAL_PENDING_CALLS(); + SIGNAL_PENDING_CALLS(interp); /* We're not done yet */ return res; } void -_Py_FinishPendingCalls(void) +_Py_FinishPendingCalls(PyInterpreterState *interp) { - struct _pending_calls *pending = &_PyRuntime.ceval.pending; + struct _pending_calls *pending = &interp->ceval.pending; assert(PyGILState_Check()); @@ -488,7 +498,7 @@ _Py_FinishPendingCalls(void) return; } - if (make_pending_calls(pending) < 0) { + if (make_pending_calls(interp) < 0) { PyObject *exc, *val, *tb; PyErr_Fetch(&exc, &val, &tb); PyErr_BadInternalCall(); @@ -497,6 +507,14 @@ _Py_FinishPendingCalls(void) } } +int +_Py_MakePendingCalls(PyInterpreterState *interp) +{ + assert(PyGILState_Check()); + + return make_pending_calls(interp); +} + /* Py_MakePendingCalls() is a simple wrapper for the sake of backward-compatibility. */ int @@ -511,12 +529,8 @@ Py_MakePendingCalls(void) return res; } - res = make_pending_calls(&_PyRuntime.ceval.pending); - if (res != 0) { - return res; - } - - return 0; + PyInterpreterState *interp = _PyRuntime.interpreters.main; + return make_pending_calls(interp); } /* The interpreter's recursion limit */ @@ -638,7 +652,7 @@ _PyEval_EvalFrameDefault(PyFrameObject *f, int throwflag) PyObject **fastlocals, **freevars; PyObject *retval = NULL; /* Return value */ PyThreadState *tstate = _PyThreadState_GET(); - _Py_atomic_int *eval_breaker = &_PyRuntime.ceval.eval_breaker; + _Py_atomic_int *eval_breaker = &tstate->interp->ceval.eval_breaker; PyCodeObject *co; /* when tracing we set things up so that @@ -1059,9 +1073,9 @@ _PyEval_EvalFrameDefault(PyFrameObject *f, int throwflag) } } if (_Py_atomic_load_relaxed( - &_PyRuntime.ceval.pending.calls_to_do)) + &tstate->interp->ceval.pending.calls_to_do)) { - if (make_pending_calls(&_PyRuntime.ceval.pending) != 0) { + if (make_pending_calls(tstate->interp) != 0) { goto error; } } @@ -1093,7 +1107,7 @@ _PyEval_EvalFrameDefault(PyFrameObject *f, int throwflag) if (tstate->async_exc != NULL) { PyObject *exc = tstate->async_exc; tstate->async_exc = NULL; - UNSIGNAL_ASYNC_EXC(); + UNSIGNAL_ASYNC_EXC(tstate->interp); PyErr_SetNone(exc); Py_DECREF(exc); goto error; diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index f2d5fdba0153..d9ad3616fa24 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -176,7 +176,7 @@ static void drop_gil(PyThreadState *tstate) &_PyRuntime.ceval.gil.last_holder) ) == tstate) { - RESET_GIL_DROP_REQUEST(); + RESET_GIL_DROP_REQUEST(tstate->interp); /* NOTE: if COND_WAIT does not atomically start waiting when releasing the mutex, another thread can run through, take the GIL and drop it again, and reset the condition @@ -213,7 +213,7 @@ static void take_gil(PyThreadState *tstate) if (timed_out && _Py_atomic_load_relaxed(&_PyRuntime.ceval.gil.locked) && _PyRuntime.ceval.gil.switch_number == saved_switchnum) { - SET_GIL_DROP_REQUEST(); + SET_GIL_DROP_REQUEST(tstate->interp); } } _ready: @@ -239,10 +239,10 @@ static void take_gil(PyThreadState *tstate) MUTEX_UNLOCK(_PyRuntime.ceval.gil.switch_mutex); #endif if (_Py_atomic_load_relaxed(&_PyRuntime.ceval.gil_drop_request)) { - RESET_GIL_DROP_REQUEST(); + RESET_GIL_DROP_REQUEST(tstate->interp); } if (tstate->async_exc != NULL) { - _PyEval_SignalAsyncExc(); + _PyEval_SignalAsyncExc(tstate->interp); } MUTEX_UNLOCK(_PyRuntime.ceval.gil.mutex); diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index ad1447256cc6..44acba2d93e1 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1146,7 +1146,7 @@ Py_FinalizeEx(void) interp = tstate->interp; // Make any remaining pending calls. - _Py_FinishPendingCalls(); + _Py_FinishPendingCalls(interp); /* The interpreter is still entirely intact at this point, and the * exit funcs may be relying on that. In particular, if some thread @@ -1552,6 +1552,9 @@ Py_EndInterpreter(PyThreadState *tstate) // Wrap up existing "threading"-module-created, non-daemon threads. wait_for_thread_shutdown(); + // Make any remaining pending calls. + _Py_FinishPendingCalls(interp); + call_py_exitfuncs(interp); if (tstate != interp->tstate_head || tstate->next != NULL) diff --git a/Python/pystate.c b/Python/pystate.c index a2464b6cf551..fee350100005 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -173,6 +173,14 @@ PyInterpreterState_New(void) memset(interp, 0, sizeof(*interp)); interp->id_refcount = -1; interp->check_interval = 100; + + interp->ceval.pending.lock = PyThread_allocate_lock(); + if (interp->ceval.pending.lock == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "failed to create interpreter ceval pending mutex"); + return NULL; + } + interp->core_config = _PyCoreConfig_INIT; interp->eval_frame = _PyEval_EvalFrameDefault; #ifdef HAVE_DLOPEN @@ -279,6 +287,9 @@ PyInterpreterState_Delete(PyInterpreterState *interp) if (interp->id_mutex != NULL) { PyThread_free_lock(interp->id_mutex); } + if (interp->ceval.pending.lock != NULL) { + PyThread_free_lock(interp->ceval.pending.lock); + } PyMem_RawFree(interp); } @@ -928,7 +939,7 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) p->async_exc = exc; HEAD_UNLOCK(); Py_XDECREF(old_exc); - _PyEval_SignalAsyncExc(); + _PyEval_SignalAsyncExc(interp); return 1; } } @@ -1342,7 +1353,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data) return 0; } -static void +static int _release_xidata(void *arg) { _PyCrossInterpreterData *data = (_PyCrossInterpreterData *)arg; @@ -1350,30 +1361,8 @@ _release_xidata(void *arg) data->free(data->data); } Py_XDECREF(data->obj); -} - -static void -_call_in_interpreter(PyInterpreterState *interp, - void (*func)(void *), void *arg) -{ - /* We would use Py_AddPendingCall() if it weren't specific to the - * main interpreter (see bpo-33608). In the meantime we take a - * naive approach. - */ - PyThreadState *save_tstate = NULL; - if (interp != _PyInterpreterState_Get()) { - // XXX Using the "head" thread isn't strictly correct. - PyThreadState *tstate = PyInterpreterState_ThreadHead(interp); - // XXX Possible GILState issues? - save_tstate = PyThreadState_Swap(tstate); - } - - func(arg); - - // Switch back. - if (save_tstate != NULL) { - PyThreadState_Swap(save_tstate); - } + PyMem_Free(data); + return 0; } void @@ -1384,7 +1373,7 @@ _PyCrossInterpreterData_Release(_PyCrossInterpreterData *data) return; } - // Switch to the original interpreter. + // Get the original interpreter. PyInterpreterState *interp = _PyInterpreterState_LookUpID(data->interp); if (interp == NULL) { // The intepreter was already destroyed. @@ -1393,9 +1382,24 @@ _PyCrossInterpreterData_Release(_PyCrossInterpreterData *data) } return; } + // XXX There's an ever-so-slight race here... + if (interp->finalizing) { + // XXX Someone leaked some memory... + return; + } // "Release" the data and/or the object. - _call_in_interpreter(interp, _release_xidata, data); + _PyCrossInterpreterData *copied = PyMem_Malloc(sizeof(_PyCrossInterpreterData)); + if (copied == NULL) { + PyErr_SetString(PyExc_MemoryError, + "Not enough memory to preserve cross-interpreter data"); + PyErr_Print(); + return; + } + memcpy(copied, data, sizeof(_PyCrossInterpreterData)); + if (_Py_AddPendingCall(interp, 0, _release_xidata, copied) != 0) { + // XXX Queue full or couldn't get lock. Try again somehow? + } } PyObject * From webhook-mailer at python.org Fri Apr 12 11:35:49 2019 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Apr 2019 15:35:49 -0000 Subject: [Python-checkins] bpo-36549: str.capitalize now titlecases the first character instead of uppercasing it (GH-12804) Message-ID: https://github.com/python/cpython/commit/b015fc86f7b1f35283804bfee788cce0a5495df7 commit: b015fc86f7b1f35283804bfee788cce0a5495df7 branch: master author: Kingsley M <37349466+kingdom5500 at users.noreply.github.com> committer: Steve Dower date: 2019-04-12T08:35:39-07:00 summary: bpo-36549: str.capitalize now titlecases the first character instead of uppercasing it (GH-12804) files: A Misc/NEWS.d/next/Core and Builtins/2019-04-11-12-41-31.bpo-36549.QSp8of.rst M Doc/library/stdtypes.rst M Lib/test/string_tests.py M Lib/test/test_unicode.py M Objects/unicodeobject.c diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index bae989e6b3a9..aeecdbb24a57 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -1509,6 +1509,10 @@ expression support in the :mod:`re` module). Return a copy of the string with its first character capitalized and the rest lowercased. + .. versionchanged:: 3.8 + The first character is now put into titlecase rather than uppercase. + This means that characters like digraphs will only have their first + letter capitalized, instead of the full character. .. method:: str.casefold() @@ -2052,8 +2056,7 @@ expression support in the :mod:`re` module). >>> import re >>> def titlecase(s): ... return re.sub(r"[A-Za-z]+('[A-Za-z]+)?", - ... lambda mo: mo.group(0)[0].upper() + - ... mo.group(0)[1:].lower(), + ... lambda mo: mo.group(0).capitalize(), ... s) ... >>> titlecase("they're bill's friends.") diff --git a/Lib/test/string_tests.py b/Lib/test/string_tests.py index 561b09a2d5ee..836a43b81dd6 100644 --- a/Lib/test/string_tests.py +++ b/Lib/test/string_tests.py @@ -977,7 +977,7 @@ def test_hash(self): def test_capitalize_nonascii(self): # check that titlecased chars are lowered correctly # \u1ffc is the titlecased char - self.checkequal('\u03a9\u0399\u1ff3\u1ff3\u1ff3', + self.checkequal('\u1ffc\u1ff3\u1ff3\u1ff3', '\u1ff3\u1ff3\u1ffc\u1ffc', 'capitalize') # check with cased non-letter chars self.checkequal('\u24c5\u24e8\u24e3\u24d7\u24de\u24dd', diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 1131efdd26ab..36b72e40c7e4 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -811,7 +811,7 @@ def test_capitalize(self): self.assertEqual('h\u0130'.capitalize(), 'H\u0069\u0307') exp = '\u0399\u0308\u0300\u0069\u0307' self.assertEqual('\u1fd2\u0130'.capitalize(), exp) - self.assertEqual('?nnish'.capitalize(), 'FInnish') + self.assertEqual('?nnish'.capitalize(), 'Finnish') self.assertEqual('A\u0345\u03a3'.capitalize(), 'A\u0345\u03c2') def test_title(self): diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-11-12-41-31.bpo-36549.QSp8of.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-11-12-41-31.bpo-36549.QSp8of.rst new file mode 100644 index 000000000000..9c6834cb3f90 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-11-12-41-31.bpo-36549.QSp8of.rst @@ -0,0 +1,2 @@ +Change str.capitalize to use titlecase for the first character instead of +uppercase. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index c0b345be7e8d..e00dc37974f8 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -9675,7 +9675,7 @@ do_capitalize(int kind, void *data, Py_ssize_t length, Py_UCS4 *res, Py_UCS4 *ma Py_UCS4 c, mapped[3]; c = PyUnicode_READ(kind, data, 0); - n_res = _PyUnicode_ToUpperFull(c, mapped); + n_res = _PyUnicode_ToTitleFull(c, mapped); for (j = 0; j < n_res; j++) { *maxchar = Py_MAX(*maxchar, mapped[j]); res[k++] = mapped[j]; From webhook-mailer at python.org Fri Apr 12 11:50:58 2019 From: webhook-mailer at python.org (Benjamin Peterson) Date: Fri, 12 Apr 2019 15:50:58 -0000 Subject: [Python-checkins] Indicate that seek and tell are mandatory on BufferedRandom. (GH-11216) Message-ID: https://github.com/python/cpython/commit/b13552c4d7ce68fc9e61b5ade03cb5b951349c2b commit: b13552c4d7ce68fc9e61b5ade03cb5b951349c2b branch: master author: Christopher Head committer: Benjamin Peterson date: 2019-04-12T08:50:40-07:00 summary: Indicate that seek and tell are mandatory on BufferedRandom. (GH-11216) For BufferedReader and BufferedWriter, seek and tell operations are optional (they may or may not exist based on the underlying stream). For BufferedRandom, they are mandatory: a BufferedRandom should not be constructed over an unseekable underlying stream. Document this. files: M Doc/library/io.rst diff --git a/Doc/library/io.rst b/Doc/library/io.rst index 9738c5c2ad27..0f1251687aeb 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -719,15 +719,15 @@ than raw I/O does. .. class:: BufferedRandom(raw, buffer_size=DEFAULT_BUFFER_SIZE) A buffered interface to random access streams. It inherits - :class:`BufferedReader` and :class:`BufferedWriter`, and further supports - :meth:`seek` and :meth:`tell` functionality. + :class:`BufferedReader` and :class:`BufferedWriter`. The constructor creates a reader and writer for a seekable raw stream, given in the first argument. If the *buffer_size* is omitted it defaults to :data:`DEFAULT_BUFFER_SIZE`. :class:`BufferedRandom` is capable of anything :class:`BufferedReader` or - :class:`BufferedWriter` can do. + :class:`BufferedWriter` can do. In addition, :meth:`seek` and :meth:`tell` + are guaranteed to be implemented. .. class:: BufferedRWPair(reader, writer, buffer_size=DEFAULT_BUFFER_SIZE) From webhook-mailer at python.org Fri Apr 12 11:51:38 2019 From: webhook-mailer at python.org (Benjamin Peterson) Date: Fri, 12 Apr 2019 15:51:38 -0000 Subject: [Python-checkins] bpo-34652: Use AC_CHECK_FUNCS for lchmod. (GH-12799) Message-ID: https://github.com/python/cpython/commit/0fd5a7338cbaf7a61ab5bad270c1b0311047d0f9 commit: 0fd5a7338cbaf7a61ab5bad270c1b0311047d0f9 branch: 2.7 author: Joshua Root committer: Benjamin Peterson date: 2019-04-12T08:51:35-07:00 summary: bpo-34652: Use AC_CHECK_FUNCS for lchmod. (GH-12799) A fix for 69e96910153219b0b15a18323b917bd74336d229, which resulted in lchmod being disabled on all platforms, not just Linux. (cherry picked from commit ed709d5699716bf7237856dc20aba321e2dfff6d) files: M configure M configure.ac M pyconfig.h.in diff --git a/configure b/configure index dd30c11ee1fa..ced0a0043fb6 100755 --- a/configure +++ b/configure @@ -10632,10 +10632,16 @@ done # links. Some libc implementations have a stub lchmod implementation that always # returns an error. if test "$MACHDEP" != linux; then + for ac_func in lchmod +do : ac_fn_c_check_func "$LINENO" "lchmod" "ac_cv_func_lchmod" if test "x$ac_cv_func_lchmod" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_LCHMOD 1 +_ACEOF fi +done fi diff --git a/configure.ac b/configure.ac index 7396c1f2ff45..13b40f2edd80 100644 --- a/configure.ac +++ b/configure.ac @@ -3138,7 +3138,7 @@ AC_CHECK_FUNCS(alarm setitimer getitimer bind_textdomain_codeset chown \ # links. Some libc implementations have a stub lchmod implementation that always # returns an error. if test "$MACHDEP" != linux; then - AC_CHECK_FUNC(lchmod) + AC_CHECK_FUNCS(lchmod) fi # For some functions, having a definition is not sufficient, since diff --git a/pyconfig.h.in b/pyconfig.h.in index f828677dda01..11c4a66873c1 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -439,6 +439,9 @@ /* Define to 1 if you have the 'lchflags' function. */ #undef HAVE_LCHFLAGS +/* Define to 1 if you have the `lchmod' function. */ +#undef HAVE_LCHMOD + /* Define to 1 if you have the `lchown' function. */ #undef HAVE_LCHOWN From webhook-mailer at python.org Fri Apr 12 12:17:24 2019 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Apr 2019 16:17:24 -0000 Subject: [Python-checkins] Correct grammar in concurrent/futures/process.py docstring (GH-12761) Message-ID: https://github.com/python/cpython/commit/f938d8be4ce3e1ccca7c31cf673265634307147f commit: f938d8be4ce3e1ccca7c31cf673265634307147f branch: master author: Thomas Grainger committer: Steve Dower date: 2019-04-12T09:17:17-07:00 summary: Correct grammar in concurrent/futures/process.py docstring (GH-12761) files: M Lib/concurrent/futures/process.py diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py index 306e9ce47a6b..e6ce278b5d44 100644 --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -3,7 +3,7 @@ """Implements ProcessPoolExecutor. -The follow diagram and text describe the data-flow through the system: +The following diagram and text describe the data-flow through the system: |======================= In-process =====================|== Out-of-process ==| From webhook-mailer at python.org Fri Apr 12 12:20:16 2019 From: webhook-mailer at python.org (Eric Snow) Date: Fri, 12 Apr 2019 16:20:16 -0000 Subject: [Python-checkins] bpo-33608: Revert "Factor out a private, per-interpreter _Py_AddPendingCall()." (gh-12806) Message-ID: https://github.com/python/cpython/commit/b75b1a3504a0cea6fac6ecba44c10b2629577025 commit: b75b1a3504a0cea6fac6ecba44c10b2629577025 branch: master author: Eric Snow committer: GitHub date: 2019-04-12T10:20:10-06:00 summary: bpo-33608: Revert "Factor out a private, per-interpreter _Py_AddPendingCall()." (gh-12806) This reverts commit f13c5c8b9401a9dc19e95d8b420ee100ac022208 (gh-12360). files: D Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst M Include/ceval.h M Include/internal/pycore_ceval.h M Include/internal/pycore_pystate.h M Lib/test/test_capi.py M Modules/_testcapimodule.c M Modules/signalmodule.c M Python/ceval.c M Python/ceval_gil.h M Python/pylifecycle.c M Python/pystate.c diff --git a/Include/ceval.h b/Include/ceval.h index 9c6d420bc234..11283c0a570b 100644 --- a/Include/ceval.h +++ b/Include/ceval.h @@ -221,7 +221,7 @@ PyAPI_FUNC(Py_ssize_t) _PyEval_RequestCodeExtraIndex(freefunc); #ifndef Py_LIMITED_API PyAPI_FUNC(int) _PyEval_SliceIndex(PyObject *, Py_ssize_t *); PyAPI_FUNC(int) _PyEval_SliceIndexNotNone(PyObject *, Py_ssize_t *); -PyAPI_FUNC(void) _PyEval_SignalAsyncExc(PyInterpreterState *); +PyAPI_FUNC(void) _PyEval_SignalAsyncExc(void); #endif /* Masks and values used by FORMAT_VALUE opcode. */ diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 1bdcdf527af4..2ead96c7abe3 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -11,11 +11,7 @@ extern "C" { #include "pycore_atomic.h" #include "pythread.h" -struct _is; // See PyInterpreterState in cpython/pystate.h. - -PyAPI_FUNC(int) _Py_AddPendingCall(struct _is*, unsigned long, int (*)(void *), void *); -PyAPI_FUNC(int) _Py_MakePendingCalls(struct _is*); -PyAPI_FUNC(void) _Py_FinishPendingCalls(struct _is*); +PyAPI_FUNC(void) _Py_FinishPendingCalls(void); struct _pending_calls { int finishing; @@ -28,7 +24,6 @@ struct _pending_calls { int async_exc; #define NPENDINGCALLS 32 struct { - unsigned long thread_id; int (*func)(void *); void *arg; } calls[NPENDINGCALLS]; @@ -36,13 +31,6 @@ struct _pending_calls { int last; }; -struct _ceval_interpreter_state { - /* This single variable consolidates all requests to break out of - the fast path in the eval loop. */ - _Py_atomic_int eval_breaker; - struct _pending_calls pending; -}; - #include "pycore_gil.h" struct _ceval_runtime_state { @@ -53,8 +41,12 @@ struct _ceval_runtime_state { c_tracefunc. This speeds up the if statement in PyEval_EvalFrameEx() after fast_next_opcode. */ int tracing_possible; + /* This single variable consolidates all requests to break out of + the fast path in the eval loop. */ + _Py_atomic_int eval_breaker; /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; + struct _pending_calls pending; /* Request for checking signals. */ _Py_atomic_int signals_pending; struct _gil_runtime_state gil; diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 3ae2e0c60483..df3730f8014a 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -12,7 +12,6 @@ extern "C" { #include "pystate.h" #include "pythread.h" -#include "pycore_atomic.h" #include "pycore_ceval.h" #include "pycore_pathconfig.h" #include "pycore_pymem.h" @@ -84,8 +83,6 @@ struct _is { PyObject *pyexitmodule; uint64_t tstate_next_unique_id; - - struct _ceval_interpreter_state ceval; }; PyAPI_FUNC(struct _is*) _PyInterpreterState_LookUpID(PY_INT64_T); diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 02ae54804f5f..33c98ac28bc5 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -373,7 +373,7 @@ def pendingcalls_wait(self, l, n, context = None): def test_pendingcalls_threaded(self): #do every callback on a separate thread - n = 32 #total callbacks (see NPENDINGCALLS in pycore_ceval.h) + n = 32 #total callbacks threads = [] class foo(object):pass context = foo() diff --git a/Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst b/Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst deleted file mode 100644 index 73a01a1f46bd..000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2018-09-15-12-13-46.bpo-33608.avmvVP.rst +++ /dev/null @@ -1,5 +0,0 @@ -We added a new internal _Py_AddPendingCall() that operates relative to the -provided interpreter. This allows us to use the existing implementation to -ask another interpreter to do work that cannot be done in the current -interpreter, like decref an object the other interpreter owns. The existing -Py_AddPendingCall() only operates relative to the main interpreter. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 71356fbac318..1180b4b176e9 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -2445,7 +2445,6 @@ pending_threadfunc(PyObject *self, PyObject *arg) Py_INCREF(callable); Py_BEGIN_ALLOW_THREADS - /* XXX Use the internal _Py_AddPendingCall(). */ r = Py_AddPendingCall(&_pending_callback, callable); Py_END_ALLOW_THREADS diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 962174dda944..4590017c170a 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -19,7 +19,6 @@ #include #endif #endif -#include "internal/pycore_pystate.h" #ifdef HAVE_SIGNAL_H #include @@ -296,10 +295,8 @@ trip_signal(int sig_num) { /* Py_AddPendingCall() isn't signal-safe, but we still use it for this exceptional case. */ - _Py_AddPendingCall(_PyRuntime.interpreters.main, - main_thread, - report_wakeup_send_error, - (void *)(intptr_t) last_error); + Py_AddPendingCall(report_wakeup_send_error, + (void *)(intptr_t) last_error); } } } @@ -316,10 +313,8 @@ trip_signal(int sig_num) { /* Py_AddPendingCall() isn't signal-safe, but we still use it for this exceptional case. */ - _Py_AddPendingCall(_PyRuntime.interpreters.main, - main_thread, - report_wakeup_write_error, - (void *)(intptr_t)errno); + Py_AddPendingCall(report_wakeup_write_error, + (void *)(intptr_t)errno); } } } diff --git a/Python/ceval.c b/Python/ceval.c index b2fa20d61686..28e923219d38 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -96,61 +96,61 @@ static long dxp[256]; /* This can set eval_breaker to 0 even though gil_drop_request became 1. We believe this is all right because the eval loop will release the GIL eventually anyway. */ -#define COMPUTE_EVAL_BREAKER(interp) \ +#define COMPUTE_EVAL_BREAKER() \ _Py_atomic_store_relaxed( \ - &interp->ceval.eval_breaker, \ + &_PyRuntime.ceval.eval_breaker, \ GIL_REQUEST | \ _Py_atomic_load_relaxed(&_PyRuntime.ceval.signals_pending) | \ - _Py_atomic_load_relaxed(&interp->ceval.pending.calls_to_do) | \ - interp->ceval.pending.async_exc) + _Py_atomic_load_relaxed(&_PyRuntime.ceval.pending.calls_to_do) | \ + _PyRuntime.ceval.pending.async_exc) -#define SET_GIL_DROP_REQUEST(interp) \ +#define SET_GIL_DROP_REQUEST() \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.gil_drop_request, 1); \ - _Py_atomic_store_relaxed(&interp->ceval.eval_breaker, 1); \ + _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ } while (0) -#define RESET_GIL_DROP_REQUEST(interp) \ +#define RESET_GIL_DROP_REQUEST() \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.gil_drop_request, 0); \ - COMPUTE_EVAL_BREAKER(interp); \ + COMPUTE_EVAL_BREAKER(); \ } while (0) /* Pending calls are only modified under pending_lock */ -#define SIGNAL_PENDING_CALLS(interp) \ +#define SIGNAL_PENDING_CALLS() \ do { \ - _Py_atomic_store_relaxed(&interp->ceval.pending.calls_to_do, 1); \ - _Py_atomic_store_relaxed(&interp->ceval.eval_breaker, 1); \ + _Py_atomic_store_relaxed(&_PyRuntime.ceval.pending.calls_to_do, 1); \ + _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ } while (0) -#define UNSIGNAL_PENDING_CALLS(interp) \ +#define UNSIGNAL_PENDING_CALLS() \ do { \ - _Py_atomic_store_relaxed(&interp->ceval.pending.calls_to_do, 0); \ - COMPUTE_EVAL_BREAKER(interp); \ + _Py_atomic_store_relaxed(&_PyRuntime.ceval.pending.calls_to_do, 0); \ + COMPUTE_EVAL_BREAKER(); \ } while (0) #define SIGNAL_PENDING_SIGNALS() \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.signals_pending, 1); \ - _Py_atomic_store_relaxed(&_PyRuntime.interpreters.main->ceval.eval_breaker, 1); \ + _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ } while (0) #define UNSIGNAL_PENDING_SIGNALS() \ do { \ _Py_atomic_store_relaxed(&_PyRuntime.ceval.signals_pending, 0); \ - COMPUTE_EVAL_BREAKER(_PyRuntime.interpreters.main); \ + COMPUTE_EVAL_BREAKER(); \ } while (0) -#define SIGNAL_ASYNC_EXC(interp) \ +#define SIGNAL_ASYNC_EXC() \ do { \ - interp->ceval.pending.async_exc = 1; \ - _Py_atomic_store_relaxed(&interp->ceval.eval_breaker, 1); \ + _PyRuntime.ceval.pending.async_exc = 1; \ + _Py_atomic_store_relaxed(&_PyRuntime.ceval.eval_breaker, 1); \ } while (0) -#define UNSIGNAL_ASYNC_EXC(interp) \ +#define UNSIGNAL_ASYNC_EXC() \ do { \ - interp->ceval.pending.async_exc = 0; \ - COMPUTE_EVAL_BREAKER(interp); \ + _PyRuntime.ceval.pending.async_exc = 0; \ + COMPUTE_EVAL_BREAKER(); \ } while (0) @@ -177,7 +177,10 @@ PyEval_InitThreads(void) create_gil(); take_gil(_PyThreadState_GET()); - // The pending calls mutex is initialized in PyInterpreterState_New(). + _PyRuntime.ceval.pending.lock = PyThread_allocate_lock(); + if (_PyRuntime.ceval.pending.lock == NULL) { + Py_FatalError("Can't initialize threads for pending calls"); + } } void @@ -189,6 +192,11 @@ _PyEval_FiniThreads(void) destroy_gil(); assert(!gil_created()); + + if (_PyRuntime.ceval.pending.lock != NULL) { + PyThread_free_lock(_PyRuntime.ceval.pending.lock); + _PyRuntime.ceval.pending.lock = NULL; + } } void @@ -248,10 +256,8 @@ PyEval_ReInitThreads(void) recreate_gil(); take_gil(current_tstate); - // Only the main interpreter remains, so ignore the rest. - PyInterpreterState *interp = _PyRuntime.interpreters.main; - interp->ceval.pending.lock = PyThread_allocate_lock(); - if (interp->ceval.pending.lock == NULL) { + _PyRuntime.ceval.pending.lock = PyThread_allocate_lock(); + if (_PyRuntime.ceval.pending.lock == NULL) { Py_FatalError("Can't initialize threads for pending calls"); } @@ -263,9 +269,9 @@ PyEval_ReInitThreads(void) raised. */ void -_PyEval_SignalAsyncExc(PyInterpreterState *interp) +_PyEval_SignalAsyncExc(void) { - SIGNAL_ASYNC_EXC(interp); + SIGNAL_ASYNC_EXC(); } PyThreadState * @@ -333,7 +339,7 @@ _PyEval_SignalReceived(void) /* Push one item onto the queue while holding the lock. */ static int -_push_pending_call(struct _pending_calls *pending, unsigned long thread_id, +_push_pending_call(struct _pending_calls *pending, int (*func)(void *), void *arg) { int i = pending->last; @@ -341,7 +347,6 @@ _push_pending_call(struct _pending_calls *pending, unsigned long thread_id, if (j == pending->first) { return -1; /* Queue full */ } - pending->calls[i].thread_id = thread_id; pending->calls[i].func = func; pending->calls[i].arg = arg; pending->last = j; @@ -350,7 +355,7 @@ _push_pending_call(struct _pending_calls *pending, unsigned long thread_id, /* Pop one item off the queue while holding the lock. */ static void -_pop_pending_call(struct _pending_calls *pending, unsigned long *thread_id, +_pop_pending_call(struct _pending_calls *pending, int (**func)(void *), void **arg) { int i = pending->first; @@ -360,7 +365,6 @@ _pop_pending_call(struct _pending_calls *pending, unsigned long *thread_id, *func = pending->calls[i].func; *arg = pending->calls[i].arg; - *thread_id = pending->calls[i].thread_id; pending->first = (i + 1) % NPENDINGCALLS; } @@ -370,10 +374,9 @@ _pop_pending_call(struct _pending_calls *pending, unsigned long *thread_id, */ int -_Py_AddPendingCall(PyInterpreterState *interp, unsigned long thread_id, - int (*func)(void *), void *arg) +Py_AddPendingCall(int (*func)(void *), void *arg) { - struct _pending_calls *pending = &interp->ceval.pending; + struct _pending_calls *pending = &_PyRuntime.ceval.pending; PyThread_acquire_lock(pending->lock, WAIT_LOCK); if (pending->finishing) { @@ -388,23 +391,14 @@ _Py_AddPendingCall(PyInterpreterState *interp, unsigned long thread_id, PyErr_Restore(exc, val, tb); return -1; } - int result = _push_pending_call(pending, thread_id, func, arg); - /* signal main loop */ - SIGNAL_PENDING_CALLS(interp); + int result = _push_pending_call(pending, func, arg); PyThread_release_lock(pending->lock); + /* signal main loop */ + SIGNAL_PENDING_CALLS(); return result; } -/* Py_AddPendingCall() is a simple wrapper for the sake - of backward-compatibility. */ -int -Py_AddPendingCall(int (*func)(void *), void *arg) -{ - PyInterpreterState *interp = _PyRuntime.interpreters.main; - return _Py_AddPendingCall(interp, _PyRuntime.main_thread, func, arg); -} - static int handle_signals(void) { @@ -431,11 +425,15 @@ handle_signals(void) } static int -make_pending_calls(PyInterpreterState *interp) +make_pending_calls(struct _pending_calls* pending) { - struct _pending_calls *pending = &interp->ceval.pending; static int busy = 0; + /* only service pending calls on main thread */ + if (PyThread_get_thread_ident() != _PyRuntime.main_thread) { + return 0; + } + /* don't perform recursive pending calls */ if (busy) { return 0; @@ -443,27 +441,19 @@ make_pending_calls(PyInterpreterState *interp) busy = 1; /* unsignal before starting to call callbacks, so that any callback added in-between re-signals */ - UNSIGNAL_PENDING_CALLS(interp); + UNSIGNAL_PENDING_CALLS(); int res = 0; /* perform a bounded number of calls, in case of recursion */ - unsigned long thread_id = 0; for (int i=0; ilock, WAIT_LOCK); - _pop_pending_call(pending, &thread_id, &func, &arg); + _pop_pending_call(pending, &func, &arg); PyThread_release_lock(pending->lock); - if (thread_id && PyThread_get_thread_ident() != thread_id) { - // Thread mismatch, so move it to the end of the list - // and start over. - _Py_AddPendingCall(interp, thread_id, func, arg); - goto error; - } - /* having released the lock, perform the callback */ if (func == NULL) { break; @@ -479,14 +469,14 @@ make_pending_calls(PyInterpreterState *interp) error: busy = 0; - SIGNAL_PENDING_CALLS(interp); /* We're not done yet */ + SIGNAL_PENDING_CALLS(); return res; } void -_Py_FinishPendingCalls(PyInterpreterState *interp) +_Py_FinishPendingCalls(void) { - struct _pending_calls *pending = &interp->ceval.pending; + struct _pending_calls *pending = &_PyRuntime.ceval.pending; assert(PyGILState_Check()); @@ -498,7 +488,7 @@ _Py_FinishPendingCalls(PyInterpreterState *interp) return; } - if (make_pending_calls(interp) < 0) { + if (make_pending_calls(pending) < 0) { PyObject *exc, *val, *tb; PyErr_Fetch(&exc, &val, &tb); PyErr_BadInternalCall(); @@ -507,14 +497,6 @@ _Py_FinishPendingCalls(PyInterpreterState *interp) } } -int -_Py_MakePendingCalls(PyInterpreterState *interp) -{ - assert(PyGILState_Check()); - - return make_pending_calls(interp); -} - /* Py_MakePendingCalls() is a simple wrapper for the sake of backward-compatibility. */ int @@ -529,8 +511,12 @@ Py_MakePendingCalls(void) return res; } - PyInterpreterState *interp = _PyRuntime.interpreters.main; - return make_pending_calls(interp); + res = make_pending_calls(&_PyRuntime.ceval.pending); + if (res != 0) { + return res; + } + + return 0; } /* The interpreter's recursion limit */ @@ -652,7 +638,7 @@ _PyEval_EvalFrameDefault(PyFrameObject *f, int throwflag) PyObject **fastlocals, **freevars; PyObject *retval = NULL; /* Return value */ PyThreadState *tstate = _PyThreadState_GET(); - _Py_atomic_int *eval_breaker = &tstate->interp->ceval.eval_breaker; + _Py_atomic_int *eval_breaker = &_PyRuntime.ceval.eval_breaker; PyCodeObject *co; /* when tracing we set things up so that @@ -1073,9 +1059,9 @@ _PyEval_EvalFrameDefault(PyFrameObject *f, int throwflag) } } if (_Py_atomic_load_relaxed( - &tstate->interp->ceval.pending.calls_to_do)) + &_PyRuntime.ceval.pending.calls_to_do)) { - if (make_pending_calls(tstate->interp) != 0) { + if (make_pending_calls(&_PyRuntime.ceval.pending) != 0) { goto error; } } @@ -1107,7 +1093,7 @@ _PyEval_EvalFrameDefault(PyFrameObject *f, int throwflag) if (tstate->async_exc != NULL) { PyObject *exc = tstate->async_exc; tstate->async_exc = NULL; - UNSIGNAL_ASYNC_EXC(tstate->interp); + UNSIGNAL_ASYNC_EXC(); PyErr_SetNone(exc); Py_DECREF(exc); goto error; diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index d9ad3616fa24..f2d5fdba0153 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -176,7 +176,7 @@ static void drop_gil(PyThreadState *tstate) &_PyRuntime.ceval.gil.last_holder) ) == tstate) { - RESET_GIL_DROP_REQUEST(tstate->interp); + RESET_GIL_DROP_REQUEST(); /* NOTE: if COND_WAIT does not atomically start waiting when releasing the mutex, another thread can run through, take the GIL and drop it again, and reset the condition @@ -213,7 +213,7 @@ static void take_gil(PyThreadState *tstate) if (timed_out && _Py_atomic_load_relaxed(&_PyRuntime.ceval.gil.locked) && _PyRuntime.ceval.gil.switch_number == saved_switchnum) { - SET_GIL_DROP_REQUEST(tstate->interp); + SET_GIL_DROP_REQUEST(); } } _ready: @@ -239,10 +239,10 @@ static void take_gil(PyThreadState *tstate) MUTEX_UNLOCK(_PyRuntime.ceval.gil.switch_mutex); #endif if (_Py_atomic_load_relaxed(&_PyRuntime.ceval.gil_drop_request)) { - RESET_GIL_DROP_REQUEST(tstate->interp); + RESET_GIL_DROP_REQUEST(); } if (tstate->async_exc != NULL) { - _PyEval_SignalAsyncExc(tstate->interp); + _PyEval_SignalAsyncExc(); } MUTEX_UNLOCK(_PyRuntime.ceval.gil.mutex); diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 44acba2d93e1..ad1447256cc6 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1146,7 +1146,7 @@ Py_FinalizeEx(void) interp = tstate->interp; // Make any remaining pending calls. - _Py_FinishPendingCalls(interp); + _Py_FinishPendingCalls(); /* The interpreter is still entirely intact at this point, and the * exit funcs may be relying on that. In particular, if some thread @@ -1552,9 +1552,6 @@ Py_EndInterpreter(PyThreadState *tstate) // Wrap up existing "threading"-module-created, non-daemon threads. wait_for_thread_shutdown(); - // Make any remaining pending calls. - _Py_FinishPendingCalls(interp); - call_py_exitfuncs(interp); if (tstate != interp->tstate_head || tstate->next != NULL) diff --git a/Python/pystate.c b/Python/pystate.c index fee350100005..a2464b6cf551 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -173,14 +173,6 @@ PyInterpreterState_New(void) memset(interp, 0, sizeof(*interp)); interp->id_refcount = -1; interp->check_interval = 100; - - interp->ceval.pending.lock = PyThread_allocate_lock(); - if (interp->ceval.pending.lock == NULL) { - PyErr_SetString(PyExc_RuntimeError, - "failed to create interpreter ceval pending mutex"); - return NULL; - } - interp->core_config = _PyCoreConfig_INIT; interp->eval_frame = _PyEval_EvalFrameDefault; #ifdef HAVE_DLOPEN @@ -287,9 +279,6 @@ PyInterpreterState_Delete(PyInterpreterState *interp) if (interp->id_mutex != NULL) { PyThread_free_lock(interp->id_mutex); } - if (interp->ceval.pending.lock != NULL) { - PyThread_free_lock(interp->ceval.pending.lock); - } PyMem_RawFree(interp); } @@ -939,7 +928,7 @@ PyThreadState_SetAsyncExc(unsigned long id, PyObject *exc) p->async_exc = exc; HEAD_UNLOCK(); Py_XDECREF(old_exc); - _PyEval_SignalAsyncExc(interp); + _PyEval_SignalAsyncExc(); return 1; } } @@ -1353,7 +1342,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data) return 0; } -static int +static void _release_xidata(void *arg) { _PyCrossInterpreterData *data = (_PyCrossInterpreterData *)arg; @@ -1361,8 +1350,30 @@ _release_xidata(void *arg) data->free(data->data); } Py_XDECREF(data->obj); - PyMem_Free(data); - return 0; +} + +static void +_call_in_interpreter(PyInterpreterState *interp, + void (*func)(void *), void *arg) +{ + /* We would use Py_AddPendingCall() if it weren't specific to the + * main interpreter (see bpo-33608). In the meantime we take a + * naive approach. + */ + PyThreadState *save_tstate = NULL; + if (interp != _PyInterpreterState_Get()) { + // XXX Using the "head" thread isn't strictly correct. + PyThreadState *tstate = PyInterpreterState_ThreadHead(interp); + // XXX Possible GILState issues? + save_tstate = PyThreadState_Swap(tstate); + } + + func(arg); + + // Switch back. + if (save_tstate != NULL) { + PyThreadState_Swap(save_tstate); + } } void @@ -1373,7 +1384,7 @@ _PyCrossInterpreterData_Release(_PyCrossInterpreterData *data) return; } - // Get the original interpreter. + // Switch to the original interpreter. PyInterpreterState *interp = _PyInterpreterState_LookUpID(data->interp); if (interp == NULL) { // The intepreter was already destroyed. @@ -1382,24 +1393,9 @@ _PyCrossInterpreterData_Release(_PyCrossInterpreterData *data) } return; } - // XXX There's an ever-so-slight race here... - if (interp->finalizing) { - // XXX Someone leaked some memory... - return; - } // "Release" the data and/or the object. - _PyCrossInterpreterData *copied = PyMem_Malloc(sizeof(_PyCrossInterpreterData)); - if (copied == NULL) { - PyErr_SetString(PyExc_MemoryError, - "Not enough memory to preserve cross-interpreter data"); - PyErr_Print(); - return; - } - memcpy(copied, data, sizeof(_PyCrossInterpreterData)); - if (_Py_AddPendingCall(interp, 0, _release_xidata, copied) != 0) { - // XXX Queue full or couldn't get lock. Try again somehow? - } + _call_in_interpreter(interp, _release_xidata, data); } PyObject * From webhook-mailer at python.org Fri Apr 12 12:36:42 2019 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Apr 2019 16:36:42 -0000 Subject: [Python-checkins] Enable building nuget packages for ARM32 (GH-12669) Message-ID: https://github.com/python/cpython/commit/aa25d5d026b21a6bac66ff1c47e96cbf6227473a commit: aa25d5d026b21a6bac66ff1c47e96cbf6227473a branch: master author: Paul Monson committer: Steve Dower date: 2019-04-12T09:36:38-07:00 summary: Enable building nuget packages for ARM32 (GH-12669) files: A Tools/nuget/pythonarm32.nuspec M Tools/nuget/build.bat M Tools/nuget/make_pkg.proj diff --git a/Tools/nuget/build.bat b/Tools/nuget/build.bat index f75cb3f7ba47..b532bd742168 100644 --- a/Tools/nuget/build.bat +++ b/Tools/nuget/build.bat @@ -6,20 +6,24 @@ if "%Py_OutDir%"=="" set Py_OutDir=%PCBUILD% set BUILDX86= set BUILDX64= +set BUILDARM32= set REBUILD= set OUTPUT= set PACKAGES= +set PYTHON_EXE= :CheckOpts if "%~1" EQU "-h" goto Help if "%~1" EQU "-x86" (set BUILDX86=1) && shift && goto CheckOpts if "%~1" EQU "-x64" (set BUILDX64=1) && shift && goto CheckOpts +if "%~1" EQU "-arm32" (set BUILDARM32=1) && shift && goto CheckOpts if "%~1" EQU "-r" (set REBUILD=-r) && shift && goto CheckOpts if "%~1" EQU "-o" (set OUTPUT="/p:OutputPath=%~2") && shift && shift && goto CheckOpts if "%~1" EQU "--out" (set OUTPUT="/p:OutputPath=%~2") && shift && shift && goto CheckOpts if "%~1" EQU "-p" (set PACKAGES=%PACKAGES% %~2) && shift && shift && goto CheckOpts +if "%~1" EQU "--python-exe" (set PYTHON_EXE="/p:PythonExe=%~2") && shift && shift && goto CheckOpts -if not defined BUILDX86 if not defined BUILDX64 (set BUILDX86=1) && (set BUILDX64=1) +if not defined BUILDX86 if not defined BUILDX64 if not defined BUILDARM32 (set BUILDX86=1) && (set BUILDX64=1) && (set BUILDARM32=1) call "%D%..\msi\get_externals.bat" call "%PCBUILD%find_msbuild.bat" %MSBUILD% @@ -32,7 +36,7 @@ if defined BUILDX86 ( ) else if not exist "%Py_OutDir%win32\python.exe" call "%PCBUILD%build.bat" -e if errorlevel 1 goto :eof - %MSBUILD% "%D%make_pkg.proj" /p:Configuration=Release /p:Platform=x86 %OUTPUT% %PACKAGES% + %MSBUILD% "%D%make_pkg.proj" /p:Configuration=Release /p:Platform=x86 %OUTPUT% %PACKAGES% %PYTHON_EXE% if errorlevel 1 goto :eof ) @@ -41,7 +45,16 @@ if defined BUILDX64 ( ) else if not exist "%Py_OutDir%amd64\python.exe" call "%PCBUILD%build.bat" -p x64 -e if errorlevel 1 goto :eof - %MSBUILD% "%D%make_pkg.proj" /p:Configuration=Release /p:Platform=x64 %OUTPUT% %PACKAGES% + %MSBUILD% "%D%make_pkg.proj" /p:Configuration=Release /p:Platform=x64 %OUTPUT% %PACKAGES% %PYTHON_EXE% + if errorlevel 1 goto :eof +) + +if defined BUILDARM32 ( + if defined REBUILD ( call "%PCBUILD%build.bat" -p ARM -e -r --no-tkinter + ) else if not exist "%Py_OutDir%arm32\python.exe" call "%PCBUILD%build.bat" -p ARM -e --no-tkinter + if errorlevel 1 goto :eof + + %MSBUILD% "%D%make_pkg.proj" /p:Configuration=Release /p:Platform=ARM %OUTPUT% %PACKAGES% %PYTHON_EXE% if errorlevel 1 goto :eof ) diff --git a/Tools/nuget/make_pkg.proj b/Tools/nuget/make_pkg.proj index 5638952ac9f9..b387b8eef542 100644 --- a/Tools/nuget/make_pkg.proj +++ b/Tools/nuget/make_pkg.proj @@ -4,6 +4,7 @@ {10487945-15D1-4092-A214-338395C4116B} python $(OutputName)x86 + $(OutputName)arm32 $(OutputName)daily false diff --git a/Tools/nuget/pythonarm32.nuspec b/Tools/nuget/pythonarm32.nuspec new file mode 100644 index 000000000000..273d79a0312b --- /dev/null +++ b/Tools/nuget/pythonarm32.nuspec @@ -0,0 +1,19 @@ + + + + pythonarm32 + Python (ARM32) + Python Software Foundation + 0.0.0.0 + https://docs.python.org/3/license.html + https://www.python.org/ + false + Installs Python ARM32 for use in build scenarios. + https://www.python.org/static/favicon.ico + python + + + + + + From webhook-mailer at python.org Fri Apr 12 12:56:01 2019 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Apr 2019 16:56:01 -0000 Subject: [Python-checkins] bpo-36509: Add iot layout for Windows IoT containers (GH-12663) Message-ID: https://github.com/python/cpython/commit/f4e5661e85ac41c987165246d2b33f363cd01e34 commit: f4e5661e85ac41c987165246d2b33f363cd01e34 branch: master author: Paul Monson committer: Steve Dower date: 2019-04-12T09:55:57-07:00 summary: bpo-36509: Add iot layout for Windows IoT containers (GH-12663) This enables using the `--preset-iot` option with the PC/layout script, but does not enable IoT builds as part of any normal release. files: A Misc/NEWS.d/next/Windows/2019-04-02-10-11-18.bpo-36509.DdaM67.rst M PC/layout/main.py M PC/layout/support/options.py diff --git a/Misc/NEWS.d/next/Windows/2019-04-02-10-11-18.bpo-36509.DdaM67.rst b/Misc/NEWS.d/next/Windows/2019-04-02-10-11-18.bpo-36509.DdaM67.rst new file mode 100644 index 000000000000..722f7638a144 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2019-04-02-10-11-18.bpo-36509.DdaM67.rst @@ -0,0 +1,4 @@ +Added preset-iot layout for Windows IoT ARM containers. This layout doesn't +contain UI components like tkinter or IDLE. It also doesn't contain files to +support on-target builds since Windows ARM32 builds must be cross-compiled +when using MSVC. diff --git a/PC/layout/main.py b/PC/layout/main.py index 185e6498e1bc..624033e721b7 100644 --- a/PC/layout/main.py +++ b/PC/layout/main.py @@ -66,6 +66,18 @@ TOOLS_DIRS = FileNameSet("scripts", "i18n", "pynche", "demo", "parser") TOOLS_FILES = FileSuffixSet(".py", ".pyw", ".txt") +def copy_if_modified(src, dest): + try: + dest_stat = os.stat(dest) + except FileNotFoundError: + do_copy = True + else: + src_stat = os.stat(src) + do_copy = (src_stat.st_mtime != dest_stat.st_mtime or + src_stat.st_size != dest_stat.st_size) + + if do_copy: + shutil.copy2(src, dest) def get_lib_layout(ns): def _c(f): @@ -426,7 +438,7 @@ def copy_files(files, ns): need_compile.append((dest, ns.copy / dest)) else: (ns.temp / "Lib" / dest).parent.mkdir(parents=True, exist_ok=True) - shutil.copy2(src, ns.temp / "Lib" / dest) + copy_if_modified(src, ns.temp / "Lib" / dest) need_compile.append((dest, ns.temp / "Lib" / dest)) if src not in EXCLUDE_FROM_CATALOG: @@ -436,7 +448,7 @@ def copy_files(files, ns): log_debug("Copy {} -> {}", src, ns.copy / dest) (ns.copy / dest).parent.mkdir(parents=True, exist_ok=True) try: - shutil.copy2(src, ns.copy / dest) + copy_if_modified(src, ns.copy / dest) except shutil.SameFileError: pass diff --git a/PC/layout/support/options.py b/PC/layout/support/options.py index 22492f220d60..00f05667ebb7 100644 --- a/PC/layout/support/options.py +++ b/PC/layout/support/options.py @@ -63,6 +63,10 @@ def public(f): "props" ], }, + "iot": { + "help": "Windows IoT Core", + "options": ["stable", "pip"], + }, "default": { "help": "development kit package", "options": [ From webhook-mailer at python.org Fri Apr 12 14:24:38 2019 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Apr 2019 18:24:38 -0000 Subject: [Python-checkins] Allow Windows layout builds to fully skip code signing (GH-12808) Message-ID: https://github.com/python/cpython/commit/606c66a17faf34a4e74d4829e8fe5ad0d2879434 commit: 606c66a17faf34a4e74d4829e8fe5ad0d2879434 branch: master author: Steve Dower committer: GitHub date: 2019-04-12T11:24:15-07:00 summary: Allow Windows layout builds to fully skip code signing (GH-12808) files: M Tools/msi/make_cat.ps1 M Tools/msi/sdktools.psm1 diff --git a/Tools/msi/make_cat.ps1 b/Tools/msi/make_cat.ps1 index 70741439869a..cc3cd4a2b50c 100644 --- a/Tools/msi/make_cat.ps1 +++ b/Tools/msi/make_cat.ps1 @@ -16,6 +16,7 @@ #> param( [Parameter(Mandatory=$true)][string]$catalog, + [switch]$sign, [string]$description, [string]$certname, [string]$certsha1, @@ -31,4 +32,6 @@ MakeCat $catalog if (-not $?) { throw "Catalog compilation failed" } -Sign-File -certname $certname -certsha1 $certsha1 -certfile $certfile -description $description -files @($catalog -replace 'cdf$', 'cat') +if ($sign) { + Sign-File -certname $certname -certsha1 $certsha1 -certfile $certfile -description $description -files @($catalog -replace 'cdf$', 'cat') +} diff --git a/Tools/msi/sdktools.psm1 b/Tools/msi/sdktools.psm1 index 61edb3411760..8081b104d85a 100644 --- a/Tools/msi/sdktools.psm1 +++ b/Tools/msi/sdktools.psm1 @@ -31,6 +31,10 @@ function Sign-File { $certfile = $env:SigningCertificateFile; } + if (-not ($certsha1 -or $certname -or $certfile)) { + throw "No signing certificate specified" + } + foreach ($a in $files) { if ($certsha1) { SignTool sign /sha1 $certsha1 /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a @@ -38,8 +42,6 @@ function Sign-File { SignTool sign /a /n $certname /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a } elseif ($certfile) { SignTool sign /f $certfile /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a - } else { - SignTool sign /a /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a } } } From webhook-mailer at python.org Fri Apr 12 14:44:07 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 12 Apr 2019 18:44:07 -0000 Subject: [Python-checkins] Allow Windows layout builds to fully skip code signing (GH-12808) Message-ID: https://github.com/python/cpython/commit/c05c1165abe7614ab3530adf8bc6c6cdefa9d0af commit: c05c1165abe7614ab3530adf8bc6c6cdefa9d0af branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-12T11:44:00-07:00 summary: Allow Windows layout builds to fully skip code signing (GH-12808) (cherry picked from commit 606c66a17faf34a4e74d4829e8fe5ad0d2879434) Co-authored-by: Steve Dower files: M Tools/msi/make_cat.ps1 M Tools/msi/sdktools.psm1 diff --git a/Tools/msi/make_cat.ps1 b/Tools/msi/make_cat.ps1 index 70741439869a..cc3cd4a2b50c 100644 --- a/Tools/msi/make_cat.ps1 +++ b/Tools/msi/make_cat.ps1 @@ -16,6 +16,7 @@ #> param( [Parameter(Mandatory=$true)][string]$catalog, + [switch]$sign, [string]$description, [string]$certname, [string]$certsha1, @@ -31,4 +32,6 @@ MakeCat $catalog if (-not $?) { throw "Catalog compilation failed" } -Sign-File -certname $certname -certsha1 $certsha1 -certfile $certfile -description $description -files @($catalog -replace 'cdf$', 'cat') +if ($sign) { + Sign-File -certname $certname -certsha1 $certsha1 -certfile $certfile -description $description -files @($catalog -replace 'cdf$', 'cat') +} diff --git a/Tools/msi/sdktools.psm1 b/Tools/msi/sdktools.psm1 index 61edb3411760..8081b104d85a 100644 --- a/Tools/msi/sdktools.psm1 +++ b/Tools/msi/sdktools.psm1 @@ -31,6 +31,10 @@ function Sign-File { $certfile = $env:SigningCertificateFile; } + if (-not ($certsha1 -or $certname -or $certfile)) { + throw "No signing certificate specified" + } + foreach ($a in $files) { if ($certsha1) { SignTool sign /sha1 $certsha1 /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a @@ -38,8 +42,6 @@ function Sign-File { SignTool sign /a /n $certname /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a } elseif ($certfile) { SignTool sign /f $certfile /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a - } else { - SignTool sign /a /fd sha256 /t http://timestamp.verisign.com/scripts/timestamp.dll /d $description $a } } } From webhook-mailer at python.org Fri Apr 12 15:27:50 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 19:27:50 -0000 Subject: [Python-checkins] bpo-36618: Add -fmax-type-align=8 flag for clang (GH-12809) Message-ID: https://github.com/python/cpython/commit/23a683adf803eef405d248cc9c2a7eb08a7300e2 commit: 23a683adf803eef405d248cc9c2a7eb08a7300e2 branch: master author: Victor Stinner committer: GitHub date: 2019-04-12T21:27:37+02:00 summary: bpo-36618: Add -fmax-type-align=8 flag for clang (GH-12809) Add -fmax-type-align=8 to CFLAGS when clang compiler is detected. The pymalloc memory allocator aligns memory on 8 bytes. On x86-64, clang expects alignment on 16 bytes by default and so uses MOVAPS instruction which can lead to segmentation fault. Instruct clang that Python is limited to alignemnt on 8 bytes to use MOVUPS instruction instead: slower but don't trigger a SIGSEGV if the memory is not aligned on 16 bytes. Sadly, the flag must be expected to CFLAGS and not just CFLAGS_NODIST, since third party C extensions can have the same issue. files: A Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst b/Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst new file mode 100644 index 000000000000..597dd67b46e0 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst @@ -0,0 +1,8 @@ +Add ``-fmax-type-align=8`` to CFLAGS when clang compiler is detected. The +pymalloc memory allocator aligns memory on 8 bytes. On x86-64, clang expects +alignment on 16 bytes by default and so uses MOVAPS instruction which can +lead to segmentation fault. Instruct clang that Python is limited to +alignemnt on 8 bytes to use MOVUPS instruction instead: slower but don't +trigger a SIGSEGV if the memory is not aligned on 16 bytes. Sadly, the flag +must be expected to ``CFLAGS`` and not just ``CFLAGS_NODIST``, since third +party C extensions can have the same issue. diff --git a/configure b/configure index 72589fdb78c5..ac1e66a96bb6 100755 --- a/configure +++ b/configure @@ -6813,6 +6813,19 @@ esac # compiler and platform. BASECFLAGS tweaks need to be made even if the # user set OPT. +case $CC in + *clang*) + cc_is_clang=1 + ;; + *) + if $CC --version 2>&1 | grep -q clang + then + cc_is_clang=1 + else + cc_is_clang= + fi +esac + # tweak OPT based on compiler and platform, only if the user didn't set # it on the command line @@ -6826,19 +6839,6 @@ then WRAP="-fwrapv" fi - case $CC in - *clang*) - cc_is_clang=1 - ;; - *) - if $CC --version 2>&1 | grep -q clang - then - cc_is_clang=1 - else - cc_is_clang= - fi - esac - if test -n "${cc_is_clang}" then # Clang also needs -fwrapv @@ -6879,6 +6879,21 @@ then esac fi +if test -n "${cc_is_clang}" +then + # bpo-36618: Add -fmax-type-align=8 to CFLAGS when clang compiler is + # detected. The pymalloc memory allocator aligns memory on 8 bytes. On + # x86-64, clang expects alignment on 16 bytes by default and so uses MOVAPS + # instruction which can lead to segmentation fault. Instruct clang that + # Python is limited to alignemnt on 8 bytes to use MOVUPS instruction + # instead: slower but don't trigger a SIGSEGV if the memory is not aligned + # on 16 bytes. + # + # Sadly, the flag must be expected to CFLAGS and not just CFLAGS_NODIST, + # since third party C extensions can have the same issue. + CFLAGS="$CFLAGS -fmax-type-align=8" +fi + @@ -10200,6 +10215,7 @@ fi + if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. diff --git a/configure.ac b/configure.ac index 30e8587cb08f..863b34245ad6 100644 --- a/configure.ac +++ b/configure.ac @@ -1464,6 +1464,19 @@ esac # compiler and platform. BASECFLAGS tweaks need to be made even if the # user set OPT. +case $CC in + *clang*) + cc_is_clang=1 + ;; + *) + if $CC --version 2>&1 | grep -q clang + then + cc_is_clang=1 + else + cc_is_clang= + fi +esac + # tweak OPT based on compiler and platform, only if the user didn't set # it on the command line AC_SUBST(OPT) @@ -1477,19 +1490,6 @@ then WRAP="-fwrapv" fi - case $CC in - *clang*) - cc_is_clang=1 - ;; - *) - if $CC --version 2>&1 | grep -q clang - then - cc_is_clang=1 - else - cc_is_clang= - fi - esac - if test -n "${cc_is_clang}" then # Clang also needs -fwrapv @@ -1530,6 +1530,21 @@ then esac fi +if test -n "${cc_is_clang}" +then + # bpo-36618: Add -fmax-type-align=8 to CFLAGS when clang compiler is + # detected. The pymalloc memory allocator aligns memory on 8 bytes. On + # x86-64, clang expects alignment on 16 bytes by default and so uses MOVAPS + # instruction which can lead to segmentation fault. Instruct clang that + # Python is limited to alignemnt on 8 bytes to use MOVUPS instruction + # instead: slower but don't trigger a SIGSEGV if the memory is not aligned + # on 16 bytes. + # + # Sadly, the flag must be expected to CFLAGS and not just CFLAGS_NODIST, + # since third party C extensions can have the same issue. + CFLAGS="$CFLAGS -fmax-type-align=8" +fi + AC_SUBST(BASECFLAGS) AC_SUBST(CFLAGS_NODIST) AC_SUBST(LDFLAGS_NODIST) From webhook-mailer at python.org Fri Apr 12 15:51:39 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 19:51:39 -0000 Subject: [Python-checkins] bpo-36389: Add _PyObject_CheckConsistency() function (GH-12803) Message-ID: https://github.com/python/cpython/commit/0fc91eef34a1d9194904fa093c9fbd711af0f26c commit: 0fc91eef34a1d9194904fa093c9fbd711af0f26c branch: master author: Victor Stinner committer: GitHub date: 2019-04-12T21:51:34+02:00 summary: bpo-36389: Add _PyObject_CheckConsistency() function (GH-12803) Add a new _PyObject_CheckConsistency() function which can be used to help debugging. The function is available in release mode. Add a 'check_content' parameter to _PyDict_CheckConsistency(). files: M Include/cpython/object.h M Include/internal/pycore_object.h M Objects/dictobject.c M Objects/object.c M Objects/typeobject.c M Objects/unicodeobject.c diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 64d196a722ee..ba52a4835823 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -446,6 +446,21 @@ PyAPI_FUNC(void) _PyObject_AssertFailed( int line, const char *function); +/* Check if an object is consistent. For example, ensure that the reference + counter is greater than or equal to 1, and ensure that ob_type is not NULL. + + Call _PyObject_AssertFailed() if the object is inconsistent. + + If check_content is zero, only check header fields: reduce the overhead. + + The function always return 1. The return value is just here to be able to + write: + + assert(_PyObject_CheckConsistency(obj, 1)); */ +PyAPI_FUNC(int) _PyObject_CheckConsistency( + PyObject *op, + int check_content); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index a88b626332f1..c95595358a9e 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -10,6 +10,10 @@ extern "C" { #include "pycore_pystate.h" /* _PyRuntime */ +PyAPI_FUNC(int) _PyType_CheckConsistency(PyTypeObject *type); +PyAPI_FUNC(int) _PyUnicode_CheckConsistency(PyObject *op, int check_content); +PyAPI_FUNC(int) _PyDict_CheckConsistency(PyObject *mp, int check_content); + /* Tell the GC to track this object. * * NB: While the object is tracked by the collector, it must be safe to call the diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 9ff009f6aa4e..9b5c0a3be9ab 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -449,77 +449,77 @@ static PyObject *empty_values[1] = { NULL }; /* Uncomment to check the dict content in _PyDict_CheckConsistency() */ /* #define DEBUG_PYDICT */ +#ifdef DEBUG_PYDICT +# define ASSERT_CONSISTENT(op) assert(_PyDict_CheckConsistency((PyObject *)(op), 1)) +#else +# define ASSERT_CONSISTENT(op) assert(_PyDict_CheckConsistency((PyObject *)(op), 0)) +#endif -#ifndef NDEBUG -static int -_PyDict_CheckConsistency(PyDictObject *mp) + +int +_PyDict_CheckConsistency(PyObject *op, int check_content) { -#define ASSERT(expr) _PyObject_ASSERT((PyObject *)mp, (expr)) + _PyObject_ASSERT(op, PyDict_Check(op)); + PyDictObject *mp = (PyDictObject *)op; PyDictKeysObject *keys = mp->ma_keys; int splitted = _PyDict_HasSplitTable(mp); Py_ssize_t usable = USABLE_FRACTION(keys->dk_size); -#ifdef DEBUG_PYDICT - PyDictKeyEntry *entries = DK_ENTRIES(keys); - Py_ssize_t i; -#endif - ASSERT(0 <= mp->ma_used && mp->ma_used <= usable); - ASSERT(IS_POWER_OF_2(keys->dk_size)); - ASSERT(0 <= keys->dk_usable - && keys->dk_usable <= usable); - ASSERT(0 <= keys->dk_nentries - && keys->dk_nentries <= usable); - ASSERT(keys->dk_usable + keys->dk_nentries <= usable); + _PyObject_ASSERT(op, 0 <= mp->ma_used && mp->ma_used <= usable); + _PyObject_ASSERT(op, IS_POWER_OF_2(keys->dk_size)); + _PyObject_ASSERT(op, 0 <= keys->dk_usable && keys->dk_usable <= usable); + _PyObject_ASSERT(op, 0 <= keys->dk_nentries && keys->dk_nentries <= usable); + _PyObject_ASSERT(op, keys->dk_usable + keys->dk_nentries <= usable); if (!splitted) { /* combined table */ - ASSERT(keys->dk_refcnt == 1); + _PyObject_ASSERT(op, keys->dk_refcnt == 1); } -#ifdef DEBUG_PYDICT - for (i=0; i < keys->dk_size; i++) { - Py_ssize_t ix = dictkeys_get_index(keys, i); - ASSERT(DKIX_DUMMY <= ix && ix <= usable); - } + if (check_content) { + PyDictKeyEntry *entries = DK_ENTRIES(keys); + Py_ssize_t i; + + for (i=0; i < keys->dk_size; i++) { + Py_ssize_t ix = dictkeys_get_index(keys, i); + _PyObject_ASSERT(op, DKIX_DUMMY <= ix && ix <= usable); + } - for (i=0; i < usable; i++) { - PyDictKeyEntry *entry = &entries[i]; - PyObject *key = entry->me_key; + for (i=0; i < usable; i++) { + PyDictKeyEntry *entry = &entries[i]; + PyObject *key = entry->me_key; - if (key != NULL) { - if (PyUnicode_CheckExact(key)) { - Py_hash_t hash = ((PyASCIIObject *)key)->hash; - ASSERT(hash != -1); - ASSERT(entry->me_hash == hash); - } - else { - /* test_dict fails if PyObject_Hash() is called again */ - ASSERT(entry->me_hash != -1); + if (key != NULL) { + if (PyUnicode_CheckExact(key)) { + Py_hash_t hash = ((PyASCIIObject *)key)->hash; + _PyObject_ASSERT(op, hash != -1); + _PyObject_ASSERT(op, entry->me_hash == hash); + } + else { + /* test_dict fails if PyObject_Hash() is called again */ + _PyObject_ASSERT(op, entry->me_hash != -1); + } + if (!splitted) { + _PyObject_ASSERT(op, entry->me_value != NULL); + } } - if (!splitted) { - ASSERT(entry->me_value != NULL); + + if (splitted) { + _PyObject_ASSERT(op, entry->me_value == NULL); } } if (splitted) { - ASSERT(entry->me_value == NULL); + /* splitted table */ + for (i=0; i < mp->ma_used; i++) { + _PyObject_ASSERT(op, mp->ma_values[i] != NULL); + } } } - if (splitted) { - /* splitted table */ - for (i=0; i < mp->ma_used; i++) { - ASSERT(mp->ma_values[i] != NULL); - } - } -#endif - return 1; - -#undef ASSERT } -#endif static PyDictKeysObject *new_keys_object(Py_ssize_t size) @@ -614,7 +614,7 @@ new_dict(PyDictKeysObject *keys, PyObject **values) mp->ma_values = values; mp->ma_used = 0; mp->ma_version_tag = DICT_NEXT_VERSION(); - assert(_PyDict_CheckConsistency(mp)); + ASSERT_CONSISTENT(mp); return (PyObject *)mp; } @@ -675,7 +675,7 @@ clone_combined_dict(PyDictObject *orig) return NULL; } new->ma_used = orig->ma_used; - assert(_PyDict_CheckConsistency(new)); + ASSERT_CONSISTENT(new); if (_PyObject_GC_IS_TRACKED(orig)) { /* Maintain tracking. */ _PyObject_GC_TRACK(new); @@ -1075,7 +1075,7 @@ insertdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value) mp->ma_keys->dk_usable--; mp->ma_keys->dk_nentries++; assert(mp->ma_keys->dk_usable >= 0); - assert(_PyDict_CheckConsistency(mp)); + ASSERT_CONSISTENT(mp); return 0; } @@ -1094,7 +1094,7 @@ insertdict(PyDictObject *mp, PyObject *key, Py_hash_t hash, PyObject *value) mp->ma_version_tag = DICT_NEXT_VERSION(); Py_XDECREF(old_value); /* which **CAN** re-enter (see issue #22653) */ - assert(_PyDict_CheckConsistency(mp)); + ASSERT_CONSISTENT(mp); Py_DECREF(key); return 0; @@ -1582,7 +1582,7 @@ delitem_common(PyDictObject *mp, Py_hash_t hash, Py_ssize_t ix, Py_DECREF(old_key); Py_DECREF(old_value); - assert(_PyDict_CheckConsistency(mp)); + ASSERT_CONSISTENT(mp); return 0; } @@ -1722,7 +1722,7 @@ PyDict_Clear(PyObject *op) assert(oldkeys->dk_refcnt == 1); dictkeys_decref(oldkeys); } - assert(_PyDict_CheckConsistency(mp)); + ASSERT_CONSISTENT(mp); } /* Internal version of PyDict_Next that returns a hash value in addition @@ -1852,7 +1852,7 @@ _PyDict_Pop_KnownHash(PyObject *dict, PyObject *key, Py_hash_t hash, PyObject *d ep->me_value = NULL; Py_DECREF(old_key); - assert(_PyDict_CheckConsistency(mp)); + ASSERT_CONSISTENT(mp); return old_value; } @@ -2434,7 +2434,7 @@ PyDict_MergeFromSeq2(PyObject *d, PyObject *seq2, int override) } i = 0; - assert(_PyDict_CheckConsistency((PyDictObject *)d)); + ASSERT_CONSISTENT(d); goto Return; Fail: Py_XDECREF(item); @@ -2586,7 +2586,7 @@ dict_merge(PyObject *a, PyObject *b, int override) /* Iterator completed, via error */ return -1; } - assert(_PyDict_CheckConsistency((PyDictObject *)a)); + ASSERT_CONSISTENT(a); return 0; } @@ -2950,7 +2950,7 @@ PyDict_SetDefault(PyObject *d, PyObject *key, PyObject *defaultobj) mp->ma_version_tag = DICT_NEXT_VERSION(); } - assert(_PyDict_CheckConsistency(mp)); + ASSERT_CONSISTENT(mp); return value; } @@ -3069,7 +3069,7 @@ dict_popitem_impl(PyDictObject *self) self->ma_keys->dk_nentries = i; self->ma_used--; self->ma_version_tag = DICT_NEXT_VERSION(); - assert(_PyDict_CheckConsistency(self)); + ASSERT_CONSISTENT(self); return res; } @@ -3275,7 +3275,7 @@ dict_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_DECREF(self); return NULL; } - assert(_PyDict_CheckConsistency(d)); + ASSERT_CONSISTENT(d); return self; } diff --git a/Objects/object.c b/Objects/object.c index 3fad73c493db..e7ec7aec490f 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -2,6 +2,7 @@ /* Generic object operations; and implementation of None */ #include "Python.h" +#include "pycore_object.h" #include "pycore_pystate.h" #include "pycore_context.h" #include "frameobject.h" @@ -19,6 +20,28 @@ _Py_IDENTIFIER(__bytes__); _Py_IDENTIFIER(__dir__); _Py_IDENTIFIER(__isabstractmethod__); + +int +_PyObject_CheckConsistency(PyObject *op, int check_content) +{ + _PyObject_ASSERT(op, op != NULL); + _PyObject_ASSERT(op, !_PyObject_IsFreed(op)); + _PyObject_ASSERT(op, Py_REFCNT(op) >= 1); + + PyTypeObject *type = op->ob_type; + _PyObject_ASSERT(op, type != NULL); + _PyType_CheckConsistency(type); + + if (PyUnicode_Check(op)) { + _PyUnicode_CheckConsistency(op, check_content); + } + else if (PyDict_Check(op)) { + _PyDict_CheckConsistency(op, check_content); + } + return 1; +} + + #ifdef Py_REF_DEBUG Py_ssize_t _Py_RefTotal; @@ -2136,7 +2159,13 @@ _PyObject_AssertFailed(PyObject *obj, const char *expr, const char *msg, else if (_PyObject_IsFreed(obj)) { /* It seems like the object memory has been freed: don't access it to prevent a segmentation fault. */ - fprintf(stderr, "\n"); + fprintf(stderr, "\n"); + } + else if (Py_TYPE(obj) == NULL) { + fprintf(stderr, "\n"); + } + else if (_PyObject_IsFreed((PyObject *)Py_TYPE(obj))) { + fprintf(stderr, "\n", Py_TYPE(obj)); } else { /* Diplay the traceback where the object has been allocated. diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 4c3909c098c4..37df4d23e4c1 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -131,8 +131,7 @@ skip_signature(const char *doc) return NULL; } -#ifndef NDEBUG -static int +int _PyType_CheckConsistency(PyTypeObject *type) { #define ASSERT(expr) _PyObject_ASSERT((PyObject *)type, (expr)) @@ -142,14 +141,16 @@ _PyType_CheckConsistency(PyTypeObject *type) return 1; } + ASSERT(!_PyObject_IsFreed((PyObject *)type)); + ASSERT(Py_REFCNT(type) >= 1); + ASSERT(PyType_Check(type)); + ASSERT(!(type->tp_flags & Py_TPFLAGS_READYING)); - ASSERT(type->tp_mro != NULL && PyTuple_Check(type->tp_mro)); ASSERT(type->tp_dict != NULL); - return 1; + return 1; #undef ASSERT } -#endif static const char * _PyType_DocWithoutSignature(const char *name, const char *internal_doc) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index e00dc37974f8..f6e68c94df55 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -401,23 +401,20 @@ PyUnicode_GetMax(void) #endif } -#ifdef Py_DEBUG int _PyUnicode_CheckConsistency(PyObject *op, int check_content) { -#define ASSERT(expr) _PyObject_ASSERT(op, (expr)) - PyASCIIObject *ascii; unsigned int kind; - ASSERT(PyUnicode_Check(op)); + _PyObject_ASSERT(op, PyUnicode_Check(op)); ascii = (PyASCIIObject *)op; kind = ascii->state.kind; if (ascii->state.ascii == 1 && ascii->state.compact == 1) { - ASSERT(kind == PyUnicode_1BYTE_KIND); - ASSERT(ascii->state.ready == 1); + _PyObject_ASSERT(op, kind == PyUnicode_1BYTE_KIND); + _PyObject_ASSERT(op, ascii->state.ready == 1); } else { PyCompactUnicodeObject *compact = (PyCompactUnicodeObject *)op; @@ -425,41 +422,41 @@ _PyUnicode_CheckConsistency(PyObject *op, int check_content) if (ascii->state.compact == 1) { data = compact + 1; - ASSERT(kind == PyUnicode_1BYTE_KIND - || kind == PyUnicode_2BYTE_KIND - || kind == PyUnicode_4BYTE_KIND); - ASSERT(ascii->state.ascii == 0); - ASSERT(ascii->state.ready == 1); - ASSERT (compact->utf8 != data); + _PyObject_ASSERT(op, kind == PyUnicode_1BYTE_KIND + || kind == PyUnicode_2BYTE_KIND + || kind == PyUnicode_4BYTE_KIND); + _PyObject_ASSERT(op, ascii->state.ascii == 0); + _PyObject_ASSERT(op, ascii->state.ready == 1); + _PyObject_ASSERT(op, compact->utf8 != data); } else { PyUnicodeObject *unicode = (PyUnicodeObject *)op; data = unicode->data.any; if (kind == PyUnicode_WCHAR_KIND) { - ASSERT(ascii->length == 0); - ASSERT(ascii->hash == -1); - ASSERT(ascii->state.compact == 0); - ASSERT(ascii->state.ascii == 0); - ASSERT(ascii->state.ready == 0); - ASSERT(ascii->state.interned == SSTATE_NOT_INTERNED); - ASSERT(ascii->wstr != NULL); - ASSERT(data == NULL); - ASSERT(compact->utf8 == NULL); + _PyObject_ASSERT(op, ascii->length == 0); + _PyObject_ASSERT(op, ascii->hash == -1); + _PyObject_ASSERT(op, ascii->state.compact == 0); + _PyObject_ASSERT(op, ascii->state.ascii == 0); + _PyObject_ASSERT(op, ascii->state.ready == 0); + _PyObject_ASSERT(op, ascii->state.interned == SSTATE_NOT_INTERNED); + _PyObject_ASSERT(op, ascii->wstr != NULL); + _PyObject_ASSERT(op, data == NULL); + _PyObject_ASSERT(op, compact->utf8 == NULL); } else { - ASSERT(kind == PyUnicode_1BYTE_KIND - || kind == PyUnicode_2BYTE_KIND - || kind == PyUnicode_4BYTE_KIND); - ASSERT(ascii->state.compact == 0); - ASSERT(ascii->state.ready == 1); - ASSERT(data != NULL); + _PyObject_ASSERT(op, kind == PyUnicode_1BYTE_KIND + || kind == PyUnicode_2BYTE_KIND + || kind == PyUnicode_4BYTE_KIND); + _PyObject_ASSERT(op, ascii->state.compact == 0); + _PyObject_ASSERT(op, ascii->state.ready == 1); + _PyObject_ASSERT(op, data != NULL); if (ascii->state.ascii) { - ASSERT (compact->utf8 == data); - ASSERT (compact->utf8_length == ascii->length); + _PyObject_ASSERT(op, compact->utf8 == data); + _PyObject_ASSERT(op, compact->utf8_length == ascii->length); } else - ASSERT (compact->utf8 != data); + _PyObject_ASSERT(op, compact->utf8 != data); } } if (kind != PyUnicode_WCHAR_KIND) { @@ -471,20 +468,20 @@ _PyUnicode_CheckConsistency(PyObject *op, int check_content) #endif ) { - ASSERT(ascii->wstr == data); - ASSERT(compact->wstr_length == ascii->length); + _PyObject_ASSERT(op, ascii->wstr == data); + _PyObject_ASSERT(op, compact->wstr_length == ascii->length); } else - ASSERT(ascii->wstr != data); + _PyObject_ASSERT(op, ascii->wstr != data); } if (compact->utf8 == NULL) - ASSERT(compact->utf8_length == 0); + _PyObject_ASSERT(op, compact->utf8_length == 0); if (ascii->wstr == NULL) - ASSERT(compact->wstr_length == 0); + _PyObject_ASSERT(op, compact->wstr_length == 0); } - /* check that the best kind is used */ - if (check_content && kind != PyUnicode_WCHAR_KIND) - { + + /* check that the best kind is used: O(n) operation */ + if (check_content && kind != PyUnicode_WCHAR_KIND) { Py_ssize_t i; Py_UCS4 maxchar = 0; void *data; @@ -499,27 +496,25 @@ _PyUnicode_CheckConsistency(PyObject *op, int check_content) } if (kind == PyUnicode_1BYTE_KIND) { if (ascii->state.ascii == 0) { - ASSERT(maxchar >= 128); - ASSERT(maxchar <= 255); + _PyObject_ASSERT(op, maxchar >= 128); + _PyObject_ASSERT(op, maxchar <= 255); } else - ASSERT(maxchar < 128); + _PyObject_ASSERT(op, maxchar < 128); } else if (kind == PyUnicode_2BYTE_KIND) { - ASSERT(maxchar >= 0x100); - ASSERT(maxchar <= 0xFFFF); + _PyObject_ASSERT(op, maxchar >= 0x100); + _PyObject_ASSERT(op, maxchar <= 0xFFFF); } else { - ASSERT(maxchar >= 0x10000); - ASSERT(maxchar <= MAX_UNICODE); + _PyObject_ASSERT(op, maxchar >= 0x10000); + _PyObject_ASSERT(op, maxchar <= MAX_UNICODE); } - ASSERT(PyUnicode_READ(kind, data, ascii->length) == 0); + _PyObject_ASSERT(op, PyUnicode_READ(kind, data, ascii->length) == 0); } return 1; - -#undef ASSERT } -#endif + static PyObject* unicode_result_wchar(PyObject *unicode) From webhook-mailer at python.org Fri Apr 12 15:54:10 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 19:54:10 -0000 Subject: [Python-checkins] bpo-36611: Disable serialno field of debug memory allocators (#12796) Message-ID: https://github.com/python/cpython/commit/e8f9acf03484c6c3f163f04a76321419369c28aa commit: e8f9acf03484c6c3f163f04a76321419369c28aa branch: master author: Victor Stinner committer: GitHub date: 2019-04-12T21:54:06+02:00 summary: bpo-36611: Disable serialno field of debug memory allocators (#12796) Omit serialno field from debug hooks on Python memory allocators to reduce the memory footprint by 5%. Enable tracemalloc to get the traceback where a memory block has been allocated when a fatal memory error is logged to decide where to put a breakpoint. Compile Python with PYMEM_DEBUG_SERIALNO defined to get back the field. files: A Misc/NEWS.d/next/Core and Builtins/2019-04-12-12-32-39.bpo-36611.zbo9WQ.rst M Lib/test/test_capi.py M Objects/obmalloc.c diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 33c98ac28bc5..31dab6a423e2 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -483,7 +483,7 @@ def test_buffer_overflow(self): r" at tail\+1: 0xfd\n" r" at tail\+2: 0xfd\n" r" .*\n" - r" The block was made by call #[0-9]+ to debug malloc/realloc.\n" + r"( The block was made by call #[0-9]+ to debug malloc/realloc.\n)?" r" Data at p: cd cd cd .*\n" r"\n" r"Enable tracemalloc to get the memory block allocation traceback\n" @@ -499,7 +499,7 @@ def test_api_misuse(self): r" 16 bytes originally requested\n" r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n" r" The [0-9] pad bytes at tail={ptr} are FORBIDDENBYTE, as expected.\n" - r" The block was made by call #[0-9]+ to debug malloc/realloc.\n" + r"( The block was made by call #[0-9]+ to debug malloc/realloc.\n)?" r" Data at p: cd cd cd .*\n" r"\n" r"Enable tracemalloc to get the memory block allocation traceback\n" diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-12-12-32-39.bpo-36611.zbo9WQ.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-12-12-32-39.bpo-36611.zbo9WQ.rst new file mode 100644 index 000000000000..f55a9efc5d38 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-12-12-32-39.bpo-36611.zbo9WQ.rst @@ -0,0 +1,5 @@ +Debug memory allocators: disable serialno field by default from debug hooks on +Python memory allocators to reduce the memory footprint by 5%. Enable +:mod:`tracemalloc` to get the traceback where a memory block has been allocated +when a fatal memory error is logged to decide where to put a breakpoint. +Compile Python with ``PYMEM_DEBUG_SERIALNO`` defined to get back the field. diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index be43c7a1c2b8..3ee143549d21 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -1926,6 +1926,10 @@ _Py_GetAllocatedBlocks(void) #define DEADBYTE 0xDD /* dead (newly freed) memory */ #define FORBIDDENBYTE 0xFD /* untouchable bytes at each end of a block */ +/* Uncomment this define to add the "serialno" field */ +/* #define PYMEM_DEBUG_SERIALNO */ + +#ifdef PYMEM_DEBUG_SERIALNO static size_t serialno = 0; /* incremented on each debug {m,re}alloc */ /* serialno is always incremented via calling this routine. The point is @@ -1936,9 +1940,16 @@ bumpserialno(void) { ++serialno; } +#endif #define SST SIZEOF_SIZE_T +#ifdef PYMEM_DEBUG_SERIALNO +# define PYMEM_DEBUG_EXTRA_BYTES 4 * SST +#else +# define PYMEM_DEBUG_EXTRA_BYTES 3 * SST +#endif + /* Read sizeof(size_t) bytes at p as a big-endian size_t. */ static size_t read_size_t(const void *p) @@ -1967,7 +1978,7 @@ write_size_t(void *p, size_t n) } } -/* Let S = sizeof(size_t). The debug malloc asks for 4*S extra bytes and +/* Let S = sizeof(size_t). The debug malloc asks for 4 * S extra bytes and fills them with useful stuff, here calling the underlying malloc's result p: p[0: S] @@ -1991,6 +2002,9 @@ p[2*S+n+S: 2*S+n+2*S] If "bad memory" is detected later, the serial number gives an excellent way to set a breakpoint on the next run, to capture the instant at which this block was passed out. + +If PYMEM_DEBUG_SERIALNO is not defined (default), the debug malloc only asks +for 3 * S extra bytes, and omits the last serialno field. */ static void * @@ -2000,21 +2014,24 @@ _PyMem_DebugRawAlloc(int use_calloc, void *ctx, size_t nbytes) uint8_t *p; /* base address of malloc'ed pad block */ uint8_t *data; /* p + 2*SST == pointer to data bytes */ uint8_t *tail; /* data + nbytes == pointer to tail pad bytes */ - size_t total; /* 2 * SST + nbytes + 2 * SST */ + size_t total; /* nbytes + PYMEM_DEBUG_EXTRA_BYTES */ - if (nbytes > (size_t)PY_SSIZE_T_MAX - 4 * SST) { + if (nbytes > (size_t)PY_SSIZE_T_MAX - PYMEM_DEBUG_EXTRA_BYTES) { /* integer overflow: can't represent total as a Py_ssize_t */ return NULL; } - total = nbytes + 4 * SST; + total = nbytes + PYMEM_DEBUG_EXTRA_BYTES; /* Layout: [SSSS IFFF CCCC...CCCC FFFF NNNN] - * ^--- p ^--- data ^--- tail + ^--- p ^--- data ^--- tail S: nbytes stored as size_t I: API identifier (1 byte) F: Forbidden bytes (size_t - 1 bytes before, size_t bytes after) C: Clean bytes used later to store actual data - N: Serial number stored as size_t */ + N: Serial number stored as size_t + + If PYMEM_DEBUG_SERIALNO is not defined (default), the last NNNN field + is omitted. */ if (use_calloc) { p = (uint8_t *)api->alloc.calloc(api->alloc.ctx, 1, total); @@ -2027,7 +2044,9 @@ _PyMem_DebugRawAlloc(int use_calloc, void *ctx, size_t nbytes) } data = p + 2*SST; +#ifdef PYMEM_DEBUG_SERIALNO bumpserialno(); +#endif /* at p, write size (SST bytes), id (1 byte), pad (SST-1 bytes) */ write_size_t(p, nbytes); @@ -2041,7 +2060,9 @@ _PyMem_DebugRawAlloc(int use_calloc, void *ctx, size_t nbytes) /* at tail, write pad (SST bytes) and serialno (SST bytes) */ tail = data + nbytes; memset(tail, FORBIDDENBYTE, SST); +#ifdef PYMEM_DEBUG_SERIALNO write_size_t(tail + SST, serialno); +#endif return data; } @@ -2081,7 +2102,7 @@ _PyMem_DebugRawFree(void *ctx, void *p) _PyMem_DebugCheckAddress(api->api_id, p); nbytes = read_size_t(q); - nbytes += 4 * SST; + nbytes += PYMEM_DEBUG_EXTRA_BYTES; memset(q, DEADBYTE, nbytes); api->alloc.free(api->alloc.ctx, q); } @@ -2101,7 +2122,6 @@ _PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes) uint8_t *tail; /* data + nbytes == pointer to tail pad bytes */ size_t total; /* 2 * SST + nbytes + 2 * SST */ size_t original_nbytes; - size_t block_serialno; #define ERASED_SIZE 64 uint8_t save[2*ERASED_SIZE]; /* A copy of erased bytes. */ @@ -2110,47 +2130,57 @@ _PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes) data = (uint8_t *)p; head = data - 2*SST; original_nbytes = read_size_t(head); - if (nbytes > (size_t)PY_SSIZE_T_MAX - 4*SST) { + if (nbytes > (size_t)PY_SSIZE_T_MAX - PYMEM_DEBUG_EXTRA_BYTES) { /* integer overflow: can't represent total as a Py_ssize_t */ return NULL; } - total = nbytes + 4*SST; + total = nbytes + PYMEM_DEBUG_EXTRA_BYTES; tail = data + original_nbytes; - block_serialno = read_size_t(tail + SST); +#ifdef PYMEM_DEBUG_SERIALNO + size_t block_serialno = read_size_t(tail + SST); +#endif /* Mark the header, the trailer, ERASED_SIZE bytes at the begin and ERASED_SIZE bytes at the end as dead and save the copy of erased bytes. */ if (original_nbytes <= sizeof(save)) { memcpy(save, data, original_nbytes); - memset(data - 2*SST, DEADBYTE, original_nbytes + 4*SST); + memset(data - 2 * SST, DEADBYTE, + original_nbytes + PYMEM_DEBUG_EXTRA_BYTES); } else { memcpy(save, data, ERASED_SIZE); - memset(head, DEADBYTE, ERASED_SIZE + 2*SST); + memset(head, DEADBYTE, ERASED_SIZE + 2 * SST); memcpy(&save[ERASED_SIZE], tail - ERASED_SIZE, ERASED_SIZE); - memset(tail - ERASED_SIZE, DEADBYTE, ERASED_SIZE + 2*SST); + memset(tail - ERASED_SIZE, DEADBYTE, + ERASED_SIZE + PYMEM_DEBUG_EXTRA_BYTES - 2 * SST); } /* Resize and add decorations. */ r = (uint8_t *)api->alloc.realloc(api->alloc.ctx, head, total); if (r == NULL) { + /* if realloc() failed: rewrite header and footer which have + just been erased */ nbytes = original_nbytes; } else { head = r; +#ifdef PYMEM_DEBUG_SERIALNO bumpserialno(); block_serialno = serialno; +#endif } + data = head + 2*SST; write_size_t(head, nbytes); head[SST] = (uint8_t)api->api_id; memset(head + SST + 1, FORBIDDENBYTE, SST-1); - data = head + 2*SST; tail = data + nbytes; memset(tail, FORBIDDENBYTE, SST); +#ifdef PYMEM_DEBUG_SERIALNO write_size_t(tail + SST, block_serialno); +#endif /* Restore saved bytes. */ if (original_nbytes <= sizeof(save)) { @@ -2170,7 +2200,7 @@ _PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes) } if (nbytes > original_nbytes) { - /* growing: mark new extra memory clean */ + /* growing: mark new extra memory clean */ memset(data + original_nbytes, CLEANBYTE, nbytes - original_nbytes); } @@ -2278,7 +2308,7 @@ _PyObject_DebugDumpAddress(const void *p) { const uint8_t *q = (const uint8_t *)p; const uint8_t *tail; - size_t nbytes, serial; + size_t nbytes; int i; int ok; char id; @@ -2347,9 +2377,11 @@ _PyObject_DebugDumpAddress(const void *p) } } - serial = read_size_t(tail + SST); +#ifdef PYMEM_DEBUG_SERIALNO + size_t serial = read_size_t(tail + SST); fprintf(stderr, " The block was made by call #%" PY_FORMAT_SIZE_T "u to debug malloc/realloc.\n", serial); +#endif if (nbytes > 0) { i = 0; @@ -2575,8 +2607,11 @@ _PyObject_DebugMallocStats(FILE *out) quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size); } fputc('\n', out); - if (_PyMem_DebugEnabled()) +#ifdef PYMEM_DEBUG_SERIALNO + if (_PyMem_DebugEnabled()) { (void)printone(out, "# times object malloc called", serialno); + } +#endif (void)printone(out, "# arenas allocated total", ntimes_arena_allocated); (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas); (void)printone(out, "# arenas highwater mark", narenas_highwater); From webhook-mailer at python.org Fri Apr 12 15:58:28 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 19:58:28 -0000 Subject: [Python-checkins] bpo-18748: test_io: silence destructor errors (GH-12805) Message-ID: https://github.com/python/cpython/commit/472f794a33221ea835a2fbf6c9f12aa2bd66d1b0 commit: 472f794a33221ea835a2fbf6c9f12aa2bd66d1b0 branch: master author: Victor Stinner committer: GitHub date: 2019-04-12T21:58:24+02:00 summary: bpo-18748: test_io: silence destructor errors (GH-12805) files: M Lib/test/test_io.py diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 811a446f92be..5406a2891bb2 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -991,6 +991,9 @@ def flush(self): # This would cause an assertion failure. self.assertRaises(OSError, f.close) + # Silence destructor error + R.flush = lambda self: None + class CIOTest(IOTest): @@ -1167,6 +1170,10 @@ def bad_close(): self.assertEqual(err.exception.__context__.args, ('flush',)) self.assertFalse(b.closed) + # Silence destructor error + raw.close = lambda: None + b.flush = lambda: None + def test_nonnormalized_close_error_on_close(self): # Issue #21677 raw = self.MockRawIO() @@ -1184,6 +1191,10 @@ def bad_close(): self.assertIn('non_existing_flush', str(err.exception.__context__)) self.assertFalse(b.closed) + # Silence destructor error + b.flush = lambda: None + raw.close = lambda: None + def test_multi_close(self): raw = self.MockRawIO() b = self.tp(raw) @@ -2039,6 +2050,9 @@ def reader_close(): self.assertFalse(reader.closed) self.assertTrue(writer.closed) + # Silence destructor error + reader.close = lambda: None + def test_writer_close_error_on_close(self): def writer_close(): writer_non_existing @@ -2053,6 +2067,9 @@ def writer_close(): self.assertTrue(reader.closed) self.assertFalse(writer.closed) + # Silence destructor error + writer.close = lambda: None + def test_reader_writer_close_error_on_close(self): def reader_close(): reader_non_existing @@ -2072,6 +2089,10 @@ def writer_close(): self.assertFalse(reader.closed) self.assertFalse(writer.closed) + # Silence destructor error + reader.close = lambda: None + writer.close = lambda: None + def test_isatty(self): class SelectableIsAtty(MockRawIO): def __init__(self, isatty): @@ -3270,6 +3291,10 @@ def bad_close(): self.assertEqual(err.exception.__context__.args, ('flush',)) self.assertFalse(txt.closed) + # Silence destructor error + buffer.close = lambda: None + txt.flush = lambda: None + def test_nonnormalized_close_error_on_close(self): # Issue #21677 buffer = self.BytesIO(self.testdata) @@ -3287,6 +3312,10 @@ def bad_close(): self.assertIn('non_existing_flush', str(err.exception.__context__)) self.assertFalse(txt.closed) + # Silence destructor error + buffer.close = lambda: None + txt.flush = lambda: None + def test_multi_close(self): txt = self.TextIOWrapper(self.BytesIO(self.testdata), encoding="ascii") txt.close() From webhook-mailer at python.org Fri Apr 12 18:26:52 2019 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Apr 2019 22:26:52 -0000 Subject: [Python-checkins] bpo-33922: Adding documentation for new "-64" suffix of Python launcher (GH-7849) Message-ID: https://github.com/python/cpython/commit/1e2ad6c275d2b09e76b7cbba7281d5a125a593c1 commit: 1e2ad6c275d2b09e76b7cbba7281d5a125a593c1 branch: master author: mrh1997 committer: Steve Dower date: 2019-04-12T15:26:47-07:00 summary: bpo-33922: Adding documentation for new "-64" suffix of Python launcher (GH-7849) Since bpo-30291 it is possible to specify the architecture of Python when using the launcher files: M Doc/using/windows.rst M Misc/ACKS diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 0165fff09cc5..2f3eb0ea3f06 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -154,7 +154,9 @@ of available options is shown below. | DefaultJustForMeTargetDir | The default install directory for | :file:`%LocalAppData%\\\ | | | just-for-me installs | Programs\\PythonXY` or | | | | :file:`%LocalAppData%\\\ | -| | | Programs\\PythonXY-32` | +| | | Programs\\PythonXY-32` or| +| | | :file:`%LocalAppData%\\\ | +| | | Programs\\PythonXY-64` | +---------------------------+--------------------------------------+--------------------------+ | DefaultCustomTargetDir | The default custom install directory | (empty) | | | displayed in the UI | | @@ -762,9 +764,16 @@ on Windows which you hope will be useful on Unix, you should use one of the shebang lines starting with ``/usr``. Any of the above virtual commands can be suffixed with an explicit version -(either just the major version, or the major and minor version) - for example -``/usr/bin/python2.7`` - which will cause that specific version to be located -and used. +(either just the major version, or the major and minor version). +Furthermore the 32-bit version can be requested by adding "-32" after the +minor version. I.e. ``/usr/bin/python2.7-32`` will request usage of the +32-bit python 2.7. + +.. versionadded:: 3.7 + + Beginning with python launcher 3.7 it is possible to request 64-bit version + by the "-64" suffix. Furthermore it is possible to specify a major and + architecture without minor (i.e. ``/usr/bin/python3-64``). The ``/usr/bin/env`` form of shebang line has one further special property. Before looking for installed Python interpreters, this form will search the @@ -806,17 +815,18 @@ Customizing default Python versions In some cases, a version qualifier can be included in a command to dictate which version of Python will be used by the command. A version qualifier starts with a major version number and can optionally be followed by a period -('.') and a minor version specifier. If the minor qualifier is specified, it -may optionally be followed by "-32" to indicate the 32-bit implementation of -that version be used. +('.') and a minor version specifier. Furthermore it is possible to specifiy +if a 32 or 64 bit implementation shall be requested by adding "-32" or "-64". For example, a shebang line of ``#!python`` has no version qualifier, while ``#!python3`` has a version qualifier which specifies only a major version. -If no version qualifiers are found in a command, the environment variable -``PY_PYTHON`` can be set to specify the default version qualifier - the default -value is "2". Note this value could specify just a major version (e.g. "2") or -a major.minor qualifier (e.g. "2.6"), or even major.minor-32. +If no version qualifiers are found in a command, the environment +variable :envvar:`PY_PYTHON` can be set to specify the default version +qualifier. If it is not set, the default is "3". The variable can +specify any value that may be passed on the command line, such as "3", +"3.7", "3.7-32" or "3.7-64". (Note that the "-64" option is only +available with the launcher included with Python 3.7 or newer.) If no minor version qualifiers are found, the environment variable ``PY_PYTHON{major}`` (where ``{major}`` is the current major version qualifier @@ -834,8 +844,8 @@ of the specified version if available. This is so the behavior of the launcher can be predicted knowing only what versions are installed on the PC and without regard to the order in which they were installed (i.e., without knowing whether a 32 or 64-bit version of Python and corresponding launcher was -installed last). As noted above, an optional "-32" suffix can be used on a -version specifier to change this behaviour. +installed last). As noted above, an optional "-32" or "-64" suffix can be +used on a version specifier to change this behaviour. Examples: diff --git a/Misc/ACKS b/Misc/ACKS index 19c7e4305351..5d7181df6794 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -723,6 +723,7 @@ Ludwig H?hne Gerhard H?ring Fredrik H??rd Florian H?ch +Robert H?lzl Catalin Iacob Mihai Ibanescu Ali Ikinci From webhook-mailer at python.org Fri Apr 12 18:32:38 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 12 Apr 2019 22:32:38 -0000 Subject: [Python-checkins] bpo-33922: Adding documentation for new "-64" suffix of Python launcher (GH-7849) Message-ID: https://github.com/python/cpython/commit/395bb94a7f1c3ec9c29976738dfc6cb5d31f9aee commit: 395bb94a7f1c3ec9c29976738dfc6cb5d31f9aee branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-12T15:32:33-07:00 summary: bpo-33922: Adding documentation for new "-64" suffix of Python launcher (GH-7849) Since bpo-30291 it is possible to specify the architecture of Python when using the launcher (cherry picked from commit 1e2ad6c275d2b09e76b7cbba7281d5a125a593c1) Co-authored-by: mrh1997 files: M Doc/using/windows.rst M Misc/ACKS diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 8654bc2b024a..a966d1fe67ad 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -154,7 +154,9 @@ of available options is shown below. | DefaultJustForMeTargetDir | The default install directory for | :file:`%LocalAppData%\\\ | | | just-for-me installs | Programs\\PythonXY` or | | | | :file:`%LocalAppData%\\\ | -| | | Programs\\PythonXY-32` | +| | | Programs\\PythonXY-32` or| +| | | :file:`%LocalAppData%\\\ | +| | | Programs\\PythonXY-64` | +---------------------------+--------------------------------------+--------------------------+ | DefaultCustomTargetDir | The default custom install directory | (empty) | | | displayed in the UI | | @@ -762,9 +764,16 @@ on Windows which you hope will be useful on Unix, you should use one of the shebang lines starting with ``/usr``. Any of the above virtual commands can be suffixed with an explicit version -(either just the major version, or the major and minor version) - for example -``/usr/bin/python2.7`` - which will cause that specific version to be located -and used. +(either just the major version, or the major and minor version). +Furthermore the 32-bit version can be requested by adding "-32" after the +minor version. I.e. ``/usr/bin/python2.7-32`` will request usage of the +32-bit python 2.7. + +.. versionadded:: 3.7 + + Beginning with python launcher 3.7 it is possible to request 64-bit version + by the "-64" suffix. Furthermore it is possible to specify a major and + architecture without minor (i.e. ``/usr/bin/python3-64``). The ``/usr/bin/env`` form of shebang line has one further special property. Before looking for installed Python interpreters, this form will search the @@ -806,17 +815,18 @@ Customizing default Python versions In some cases, a version qualifier can be included in a command to dictate which version of Python will be used by the command. A version qualifier starts with a major version number and can optionally be followed by a period -('.') and a minor version specifier. If the minor qualifier is specified, it -may optionally be followed by "-32" to indicate the 32-bit implementation of -that version be used. +('.') and a minor version specifier. Furthermore it is possible to specifiy +if a 32 or 64 bit implementation shall be requested by adding "-32" or "-64". For example, a shebang line of ``#!python`` has no version qualifier, while ``#!python3`` has a version qualifier which specifies only a major version. -If no version qualifiers are found in a command, the environment variable -``PY_PYTHON`` can be set to specify the default version qualifier - the default -value is "2". Note this value could specify just a major version (e.g. "2") or -a major.minor qualifier (e.g. "2.6"), or even major.minor-32. +If no version qualifiers are found in a command, the environment +variable :envvar:`PY_PYTHON` can be set to specify the default version +qualifier. If it is not set, the default is "3". The variable can +specify any value that may be passed on the command line, such as "3", +"3.7", "3.7-32" or "3.7-64". (Note that the "-64" option is only +available with the launcher included with Python 3.7 or newer.) If no minor version qualifiers are found, the environment variable ``PY_PYTHON{major}`` (where ``{major}`` is the current major version qualifier @@ -834,8 +844,8 @@ of the specified version if available. This is so the behavior of the launcher can be predicted knowing only what versions are installed on the PC and without regard to the order in which they were installed (i.e., without knowing whether a 32 or 64-bit version of Python and corresponding launcher was -installed last). As noted above, an optional "-32" suffix can be used on a -version specifier to change this behaviour. +installed last). As noted above, an optional "-32" or "-64" suffix can be +used on a version specifier to change this behaviour. Examples: diff --git a/Misc/ACKS b/Misc/ACKS index 193592290f0e..4581f32dd5f8 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -711,6 +711,7 @@ Ludwig H?hne Gerhard H?ring Fredrik H??rd Florian H?ch +Robert H?lzl Catalin Iacob Mihai Ibanescu Ali Ikinci From webhook-mailer at python.org Fri Apr 12 18:33:44 2019 From: webhook-mailer at python.org (Guido van Rossum) Date: Fri, 12 Apr 2019 22:33:44 -0000 Subject: [Python-checkins] bpo-35581: Document @typing.type_check_only (GH-11312) Message-ID: https://github.com/python/cpython/commit/1e8295402bf5e81d327ed2b5eb88a6b6de449d63 commit: 1e8295402bf5e81d327ed2b5eb88a6b6de449d63 branch: master author: Sebastian Rittau committer: Guido van Rossum date: 2019-04-12T15:33:40-07:00 summary: bpo-35581: Document @typing.type_check_only (GH-11312) files: A Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst M Doc/library/typing.rst diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index fad9dc69431f..ed5f547e3ce3 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -951,6 +951,24 @@ The module defines the following classes, functions and decorators: This wraps the decorator with something that wraps the decorated function in :func:`no_type_check`. +.. decorator:: type_check_only + + Decorator to mark a class or function to be unavailable at runtime. + + This decorator is itself not available at runtime. It is mainly + intended to mark classes that are defined in type stub files if + an implementation returns an instance of a private class:: + + @type_check_only + class Response: # private or not available at runtime + code: int + def get_header(self, name: str) -> str: ... + + def fetch_response() -> Response: ... + + Note that returning instances of private classes is not recommended. + It is usually preferable to make such classes public. + .. data:: Any Special type indicating an unconstrained type. diff --git a/Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst b/Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst new file mode 100644 index 000000000000..2fad3003e3b6 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst @@ -0,0 +1 @@ + at typing.type_check_only now allows type stubs to mark functions and classes not available during runtime. \ No newline at end of file From webhook-mailer at python.org Fri Apr 12 18:48:02 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 12 Apr 2019 22:48:02 -0000 Subject: [Python-checkins] bpo-35581: Document @typing.type_check_only (GH-11312) Message-ID: https://github.com/python/cpython/commit/b759a2c5b9612a03c8b30514aa93444268931e5e commit: b759a2c5b9612a03c8b30514aa93444268931e5e branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-12T15:47:57-07:00 summary: bpo-35581: Document @typing.type_check_only (GH-11312) (cherry picked from commit 1e8295402bf5e81d327ed2b5eb88a6b6de449d63) Co-authored-by: Sebastian Rittau files: A Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst M Doc/library/typing.rst diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 6f1c85feb0eb..9f6757c2864c 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -941,6 +941,24 @@ The module defines the following classes, functions and decorators: This wraps the decorator with something that wraps the decorated function in :func:`no_type_check`. +.. decorator:: type_check_only + + Decorator to mark a class or function to be unavailable at runtime. + + This decorator is itself not available at runtime. It is mainly + intended to mark classes that are defined in type stub files if + an implementation returns an instance of a private class:: + + @type_check_only + class Response: # private or not available at runtime + code: int + def get_header(self, name: str) -> str: ... + + def fetch_response() -> Response: ... + + Note that returning instances of private classes is not recommended. + It is usually preferable to make such classes public. + .. data:: Any Special type indicating an unconstrained type. diff --git a/Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst b/Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst new file mode 100644 index 000000000000..2fad3003e3b6 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2018-12-25-12-56-57.bpo-35581.aA7r6T.rst @@ -0,0 +1 @@ + at typing.type_check_only now allows type stubs to mark functions and classes not available during runtime. \ No newline at end of file From webhook-mailer at python.org Fri Apr 12 18:51:10 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Apr 2019 22:51:10 -0000 Subject: [Python-checkins] bpo-36618: Don't add -fmax-type-align flag to old clang (GH-12811) Message-ID: https://github.com/python/cpython/commit/a304b136adda3575898d8b5debedcd48d5072272 commit: a304b136adda3575898d8b5debedcd48d5072272 branch: master author: Victor Stinner committer: GitHub date: 2019-04-13T00:51:07+02:00 summary: bpo-36618: Don't add -fmax-type-align flag to old clang (GH-12811) files: M Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst b/Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst index 597dd67b46e0..4408227b326c 100644 --- a/Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst +++ b/Misc/NEWS.d/next/Build/2019-04-12-19-49-10.bpo-36618.gcI9iq.rst @@ -4,5 +4,5 @@ alignment on 16 bytes by default and so uses MOVAPS instruction which can lead to segmentation fault. Instruct clang that Python is limited to alignemnt on 8 bytes to use MOVUPS instruction instead: slower but don't trigger a SIGSEGV if the memory is not aligned on 16 bytes. Sadly, the flag -must be expected to ``CFLAGS`` and not just ``CFLAGS_NODIST``, since third -party C extensions can have the same issue. +must be added to ``CFLAGS`` and not just ``CFLAGS_NODIST``, since third party C +extensions can have the same issue. diff --git a/configure b/configure index ac1e66a96bb6..9c7eded85359 100755 --- a/configure +++ b/configure @@ -6889,9 +6889,14 @@ then # instead: slower but don't trigger a SIGSEGV if the memory is not aligned # on 16 bytes. # - # Sadly, the flag must be expected to CFLAGS and not just CFLAGS_NODIST, + # Sadly, the flag must be added to CFLAGS and not just CFLAGS_NODIST, # since third party C extensions can have the same issue. - CFLAGS="$CFLAGS -fmax-type-align=8" + # + # Check if -fmax-type-align flag is supported (it's not supported by old + # clang versions): + if "$CC" -v --help 2>/dev/null |grep -- -fmax-type-align > /dev/null; then + CFLAGS="$CFLAGS -fmax-type-align=8" + fi fi diff --git a/configure.ac b/configure.ac index 863b34245ad6..6450519444c8 100644 --- a/configure.ac +++ b/configure.ac @@ -1540,9 +1540,14 @@ then # instead: slower but don't trigger a SIGSEGV if the memory is not aligned # on 16 bytes. # - # Sadly, the flag must be expected to CFLAGS and not just CFLAGS_NODIST, + # Sadly, the flag must be added to CFLAGS and not just CFLAGS_NODIST, # since third party C extensions can have the same issue. - CFLAGS="$CFLAGS -fmax-type-align=8" + # + # Check if -fmax-type-align flag is supported (it's not supported by old + # clang versions): + if "$CC" -v --help 2>/dev/null |grep -- -fmax-type-align > /dev/null; then + CFLAGS="$CFLAGS -fmax-type-align=8" + fi fi AC_SUBST(BASECFLAGS) From webhook-mailer at python.org Fri Apr 12 20:50:27 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Sat, 13 Apr 2019 00:50:27 -0000 Subject: [Python-checkins] bpo-36605: make tags: parse Modules/_io directory (GH-12789) (GH-12815) Message-ID: https://github.com/python/cpython/commit/44a2c4aaf2d0c03c70646eb16fbc6c1ba1689e69 commit: 44a2c4aaf2d0c03c70646eb16fbc6c1ba1689e69 branch: 2.7 author: Victor Stinner committer: GitHub date: 2019-04-13T02:50:24+02:00 summary: bpo-36605: make tags: parse Modules/_io directory (GH-12789) (GH-12815) "make tags" and "make TAGS" now also parse Modules/_io/*.c and Modules/_io/*.h. (cherry picked from commit 21a74a9d77c5ac628808b9faace18b824ca056f7) files: A Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst b/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst new file mode 100644 index 000000000000..4a558fa94d6f --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst @@ -0,0 +1,2 @@ +``make tags`` and ``make TAGS`` now also parse ``Modules/_io/*.c`` and +``Modules/_io/*.h``. diff --git a/configure b/configure index ced0a0043fb6..aa361aa4c9ff 100755 --- a/configure +++ b/configure @@ -769,7 +769,6 @@ infodir docdir oldincludedir includedir -runstatedir localstatedir sharedstatedir sysconfdir @@ -881,7 +880,6 @@ datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' @@ -1134,15 +1132,6 @@ do | -silent | --silent | --silen | --sile | --sil) silent=yes ;; - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ @@ -1280,7 +1269,7 @@ fi for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir + libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. @@ -1433,7 +1422,6 @@ Fine tuning of the installation directories: --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] @@ -15269,7 +15257,7 @@ do done -SRCDIRS="Parser Objects Python Modules" +SRCDIRS="Parser Objects Python Modules Modules/_io" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for build directories" >&5 $as_echo_n "checking for build directories... " >&6; } for dir in $SRCDIRS; do diff --git a/configure.ac b/configure.ac index 13b40f2edd80..344bc7a8bba6 100644 --- a/configure.ac +++ b/configure.ac @@ -4857,7 +4857,7 @@ do done AC_SUBST(SRCDIRS) -SRCDIRS="Parser Objects Python Modules" +SRCDIRS="Parser Objects Python Modules Modules/_io" AC_MSG_CHECKING(for build directories) for dir in $SRCDIRS; do if test ! -d $dir; then From webhook-mailer at python.org Fri Apr 12 20:50:34 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Sat, 13 Apr 2019 00:50:34 -0000 Subject: [Python-checkins] bpo-36605: make tags: parse Modules/_io directory (GH-12789) (GH-12814) Message-ID: https://github.com/python/cpython/commit/5403006c5c371649b92ab8a2cde742412c765640 commit: 5403006c5c371649b92ab8a2cde742412c765640 branch: 3.7 author: Victor Stinner committer: GitHub date: 2019-04-13T02:50:31+02:00 summary: bpo-36605: make tags: parse Modules/_io directory (GH-12789) (GH-12814) "make tags" and "make TAGS" now also parse Modules/_io/*.c and Modules/_io/*.h. (cherry picked from commit 21a74a9d77c5ac628808b9faace18b824ca056f7) files: A Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst b/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst new file mode 100644 index 000000000000..4a558fa94d6f --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-11-18-50-58.bpo-36605.gk5czf.rst @@ -0,0 +1,2 @@ +``make tags`` and ``make TAGS`` now also parse ``Modules/_io/*.c`` and +``Modules/_io/*.h``. diff --git a/configure b/configure index 18047ced4349..2db11e6e8667 100755 --- a/configure +++ b/configure @@ -784,7 +784,6 @@ infodir docdir oldincludedir includedir -runstatedir localstatedir sharedstatedir sysconfdir @@ -898,7 +897,6 @@ datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' @@ -1151,15 +1149,6 @@ do | -silent | --silent | --silen | --sile | --sil) silent=yes ;; - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ @@ -1297,7 +1286,7 @@ fi for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir + libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. @@ -1450,7 +1439,6 @@ Fine tuning of the installation directories: --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] @@ -16739,7 +16727,7 @@ do done -SRCDIRS="Parser Objects Python Modules Programs" +SRCDIRS="Parser Objects Python Modules Modules/_io Programs" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for build directories" >&5 $as_echo_n "checking for build directories... " >&6; } for dir in $SRCDIRS; do diff --git a/configure.ac b/configure.ac index ad0f4d42b4b5..e5fb7e7b0b13 100644 --- a/configure.ac +++ b/configure.ac @@ -5391,7 +5391,7 @@ do done AC_SUBST(SRCDIRS) -SRCDIRS="Parser Objects Python Modules Programs" +SRCDIRS="Parser Objects Python Modules Modules/_io Programs" AC_MSG_CHECKING(for build directories) for dir in $SRCDIRS; do if test ! -d $dir; then From webhook-mailer at python.org Fri Apr 12 21:46:27 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Sat, 13 Apr 2019 01:46:27 -0000 Subject: [Python-checkins] Doc: define PY_SSIZE_T_CLEAN always (GH-12794) Message-ID: https://github.com/python/cpython/commit/c88feceb449d6e85d7e17ec36559206094d10d81 commit: c88feceb449d6e85d7e17ec36559206094d10d81 branch: master author: Inada Naoki committer: GitHub date: 2019-04-13T10:46:21+09:00 summary: Doc: define PY_SSIZE_T_CLEAN always (GH-12794) files: M Doc/c-api/intro.rst M Doc/extending/embedding.rst M Doc/extending/extending.rst M Doc/faq/extending.rst M Doc/includes/custom.c M Doc/includes/custom2.c M Doc/includes/custom3.c M Doc/includes/custom4.c M Doc/includes/run-func.c M Doc/includes/sublist.c diff --git a/Doc/c-api/intro.rst b/Doc/c-api/intro.rst index 6bb2356f694e..69aef0da04f3 100644 --- a/Doc/c-api/intro.rst +++ b/Doc/c-api/intro.rst @@ -48,7 +48,8 @@ Include Files All function, type and macro definitions needed to use the Python/C API are included in your code by the following line:: - #include "Python.h" + #define PY_SSIZE_T_CLEAN + #include This implies inclusion of the following standard headers: ````, ````, ````, ````, ```` and ```` @@ -60,6 +61,9 @@ This implies inclusion of the following standard headers: ````, headers on some systems, you *must* include :file:`Python.h` before any standard headers are included. + It is recommended to always define ``PY_SSIZE_T_CLEAN`` before including + ``Python.h``. See :ref:`arg-parsing` for a description of this macro. + All user visible names defined by Python.h (except those defined by the included standard headers) have one of the prefixes ``Py`` or ``_Py``. Names beginning with ``_Py`` are for internal use by the Python implementation and should not be diff --git a/Doc/extending/embedding.rst b/Doc/extending/embedding.rst index 7e4fc19db83b..13d83b72f82a 100644 --- a/Doc/extending/embedding.rst +++ b/Doc/extending/embedding.rst @@ -53,6 +53,7 @@ interface. This interface is intended to execute a Python script without needing to interact with the application directly. This can for example be used to perform some operation on a file. :: + #define PY_SSIZE_T_CLEAN #include int diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst index 9fbd91f6a034..433178ab64d8 100644 --- a/Doc/extending/extending.rst +++ b/Doc/extending/extending.rst @@ -55,8 +55,9 @@ called ``spam``, the C file containing its implementation is called :file:`spammodule.c`; if the module name is very long, like ``spammify``, the module name can be just :file:`spammify.c`.) -The first line of our file can be:: +The first two lines of our file can be:: + #define PY_SSIZE_T_CLEAN #include which pulls in the Python API (you can add a comment describing the purpose of @@ -68,6 +69,9 @@ the module and a copyright notice if you like). headers on some systems, you *must* include :file:`Python.h` before any standard headers are included. + It is recommended to always define ``PY_SSIZE_T_CLEAN`` before including + ``Python.h``. See :ref:`parsetuple` for a description of this macro. + All user-visible symbols defined by :file:`Python.h` have a prefix of ``Py`` or ``PY``, except those defined in standard header files. For convenience, and since they are used extensively by the Python interpreter, ``"Python.h"`` @@ -729,7 +733,8 @@ it returns false and raises an appropriate exception. Here is an example module which uses keywords, based on an example by Geoff Philbrick (philbrick at hks.com):: - #include "Python.h" + #define PY_SSIZE_T_CLEAN /* Make "s#" use Py_ssize_t rather than int. */ + #include static PyObject * keywdarg_parrot(PyObject *self, PyObject *args, PyObject *keywds) @@ -1228,7 +1233,7 @@ The function :c:func:`spam_system` is modified in a trivial way:: In the beginning of the module, right after the line :: - #include "Python.h" + #include two more lines must be added:: diff --git a/Doc/faq/extending.rst b/Doc/faq/extending.rst index 74e1af6ef24d..2ad276587052 100644 --- a/Doc/faq/extending.rst +++ b/Doc/faq/extending.rst @@ -280,6 +280,7 @@ solution then is to call :c:func:`PyParser_ParseString` and test for ``e.error`` equal to ``E_EOF``, which means the input is incomplete. Here's a sample code fragment, untested, inspired by code from Alex Farber:: + #define PY_SSIZE_T_CLEAN #include #include #include @@ -318,6 +319,7 @@ complete example using the GNU readline library (you may want to ignore #include #include + #define PY_SSIZE_T_CLEAN #include #include #include diff --git a/Doc/includes/custom.c b/Doc/includes/custom.c index fb2c7b2a430e..13d16f5424ae 100644 --- a/Doc/includes/custom.c +++ b/Doc/includes/custom.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include typedef struct { diff --git a/Doc/includes/custom2.c b/Doc/includes/custom2.c index 51ab4b80d680..6477a19dafed 100644 --- a/Doc/includes/custom2.c +++ b/Doc/includes/custom2.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include #include "structmember.h" diff --git a/Doc/includes/custom3.c b/Doc/includes/custom3.c index 09e87355b91a..213d0864ce1c 100644 --- a/Doc/includes/custom3.c +++ b/Doc/includes/custom3.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include #include "structmember.h" diff --git a/Doc/includes/custom4.c b/Doc/includes/custom4.c index 0994d8fda0e5..b0b2906dbdc8 100644 --- a/Doc/includes/custom4.c +++ b/Doc/includes/custom4.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include #include "structmember.h" diff --git a/Doc/includes/run-func.c b/Doc/includes/run-func.c index 9caf1fdb2010..392f86d65ecc 100644 --- a/Doc/includes/run-func.c +++ b/Doc/includes/run-func.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include int diff --git a/Doc/includes/sublist.c b/Doc/includes/sublist.c index 376dddfac09c..76ff93948cfd 100644 --- a/Doc/includes/sublist.c +++ b/Doc/includes/sublist.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include typedef struct { From webhook-mailer at python.org Fri Apr 12 21:56:55 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Apr 2019 01:56:55 -0000 Subject: [Python-checkins] Doc: define PY_SSIZE_T_CLEAN always (GH-12794) Message-ID: https://github.com/python/cpython/commit/be63df287a4a12ad86b4a2aec4358a1309f0488b commit: be63df287a4a12ad86b4a2aec4358a1309f0488b branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-12T18:56:50-07:00 summary: Doc: define PY_SSIZE_T_CLEAN always (GH-12794) (cherry picked from commit c88feceb449d6e85d7e17ec36559206094d10d81) Co-authored-by: Inada Naoki files: M Doc/c-api/intro.rst M Doc/extending/embedding.rst M Doc/extending/extending.rst M Doc/faq/extending.rst M Doc/includes/custom.c M Doc/includes/custom2.c M Doc/includes/custom3.c M Doc/includes/custom4.c M Doc/includes/run-func.c M Doc/includes/sublist.c diff --git a/Doc/c-api/intro.rst b/Doc/c-api/intro.rst index 15006100c736..330871bc2ae3 100644 --- a/Doc/c-api/intro.rst +++ b/Doc/c-api/intro.rst @@ -48,7 +48,8 @@ Include Files All function, type and macro definitions needed to use the Python/C API are included in your code by the following line:: - #include "Python.h" + #define PY_SSIZE_T_CLEAN + #include This implies inclusion of the following standard headers: ````, ````, ````, ````, ```` and ```` @@ -60,6 +61,9 @@ This implies inclusion of the following standard headers: ````, headers on some systems, you *must* include :file:`Python.h` before any standard headers are included. + It is recommended to always define ``PY_SSIZE_T_CLEAN`` before including + ``Python.h``. See :ref:`arg-parsing` for a description of this macro. + All user visible names defined by Python.h (except those defined by the included standard headers) have one of the prefixes ``Py`` or ``_Py``. Names beginning with ``_Py`` are for internal use by the Python implementation and should not be diff --git a/Doc/extending/embedding.rst b/Doc/extending/embedding.rst index 7e4fc19db83b..13d83b72f82a 100644 --- a/Doc/extending/embedding.rst +++ b/Doc/extending/embedding.rst @@ -53,6 +53,7 @@ interface. This interface is intended to execute a Python script without needing to interact with the application directly. This can for example be used to perform some operation on a file. :: + #define PY_SSIZE_T_CLEAN #include int diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst index b788a5575b3f..afed3aabb79a 100644 --- a/Doc/extending/extending.rst +++ b/Doc/extending/extending.rst @@ -55,8 +55,9 @@ called ``spam``, the C file containing its implementation is called :file:`spammodule.c`; if the module name is very long, like ``spammify``, the module name can be just :file:`spammify.c`.) -The first line of our file can be:: +The first two lines of our file can be:: + #define PY_SSIZE_T_CLEAN #include which pulls in the Python API (you can add a comment describing the purpose of @@ -68,6 +69,9 @@ the module and a copyright notice if you like). headers on some systems, you *must* include :file:`Python.h` before any standard headers are included. + It is recommended to always define ``PY_SSIZE_T_CLEAN`` before including + ``Python.h``. See :ref:`parsetuple` for a description of this macro. + All user-visible symbols defined by :file:`Python.h` have a prefix of ``Py`` or ``PY``, except those defined in standard header files. For convenience, and since they are used extensively by the Python interpreter, ``"Python.h"`` @@ -729,7 +733,8 @@ it returns false and raises an appropriate exception. Here is an example module which uses keywords, based on an example by Geoff Philbrick (philbrick at hks.com):: - #include "Python.h" + #define PY_SSIZE_T_CLEAN /* Make "s#" use Py_ssize_t rather than int. */ + #include static PyObject * keywdarg_parrot(PyObject *self, PyObject *args, PyObject *keywds) @@ -1228,7 +1233,7 @@ The function :c:func:`spam_system` is modified in a trivial way:: In the beginning of the module, right after the line :: - #include "Python.h" + #include two more lines must be added:: diff --git a/Doc/faq/extending.rst b/Doc/faq/extending.rst index 74e1af6ef24d..2ad276587052 100644 --- a/Doc/faq/extending.rst +++ b/Doc/faq/extending.rst @@ -280,6 +280,7 @@ solution then is to call :c:func:`PyParser_ParseString` and test for ``e.error`` equal to ``E_EOF``, which means the input is incomplete. Here's a sample code fragment, untested, inspired by code from Alex Farber:: + #define PY_SSIZE_T_CLEAN #include #include #include @@ -318,6 +319,7 @@ complete example using the GNU readline library (you may want to ignore #include #include + #define PY_SSIZE_T_CLEAN #include #include #include diff --git a/Doc/includes/custom.c b/Doc/includes/custom.c index fb2c7b2a430e..13d16f5424ae 100644 --- a/Doc/includes/custom.c +++ b/Doc/includes/custom.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include typedef struct { diff --git a/Doc/includes/custom2.c b/Doc/includes/custom2.c index 51ab4b80d680..6477a19dafed 100644 --- a/Doc/includes/custom2.c +++ b/Doc/includes/custom2.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include #include "structmember.h" diff --git a/Doc/includes/custom3.c b/Doc/includes/custom3.c index 09e87355b91a..213d0864ce1c 100644 --- a/Doc/includes/custom3.c +++ b/Doc/includes/custom3.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include #include "structmember.h" diff --git a/Doc/includes/custom4.c b/Doc/includes/custom4.c index 0994d8fda0e5..b0b2906dbdc8 100644 --- a/Doc/includes/custom4.c +++ b/Doc/includes/custom4.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include #include "structmember.h" diff --git a/Doc/includes/run-func.c b/Doc/includes/run-func.c index 9caf1fdb2010..392f86d65ecc 100644 --- a/Doc/includes/run-func.c +++ b/Doc/includes/run-func.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include int diff --git a/Doc/includes/sublist.c b/Doc/includes/sublist.c index 376dddfac09c..76ff93948cfd 100644 --- a/Doc/includes/sublist.c +++ b/Doc/includes/sublist.c @@ -1,3 +1,4 @@ +#define PY_SSIZE_T_CLEAN #include typedef struct { From webhook-mailer at python.org Sat Apr 13 04:49:56 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Sat, 13 Apr 2019 08:49:56 -0000 Subject: [Python-checkins] bpo-35734: ipaddress: remove unused methods (GH-11591) Message-ID: https://github.com/python/cpython/commit/e59ec1b05d3e1487ca7754530d3748446c9b7dfd commit: e59ec1b05d3e1487ca7754530d3748446c9b7dfd branch: master author: R?mi Lapeyre committer: Inada Naoki date: 2019-04-13T17:49:34+09:00 summary: bpo-35734: ipaddress: remove unused methods (GH-11591) files: M Lib/ipaddress.py M Lib/test/test_ipaddress.py diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index a88cf3d0b7c5..8c9d7406c447 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1077,9 +1077,6 @@ class _BaseV4: # Equivalent to 255.255.255.255 or 32 bits of 1's. _ALL_ONES = (2**IPV4LENGTH) - 1 - # the valid octets for host and netmasks. only useful for IPv4. - _valid_mask_octets = frozenset({255, 254, 252, 248, 240, 224, 192, 128, 0}) - _max_prefixlen = IPV4LENGTH # There are only a handful of valid v4 netmasks, so we cache them all # when constructed (see _make_netmask()). @@ -1182,58 +1179,6 @@ def _string_from_ip_int(cls, ip_int): """ return '.'.join(map(str, ip_int.to_bytes(4, 'big'))) - def _is_valid_netmask(self, netmask): - """Verify that the netmask is valid. - - Args: - netmask: A string, either a prefix or dotted decimal - netmask. - - Returns: - A boolean, True if the prefix represents a valid IPv4 - netmask. - - """ - mask = netmask.split('.') - if len(mask) == 4: - try: - for x in mask: - if int(x) not in self._valid_mask_octets: - return False - except ValueError: - # Found something that isn't an integer or isn't valid - return False - for idx, y in enumerate(mask): - if idx > 0 and y > mask[idx - 1]: - return False - return True - try: - netmask = int(netmask) - except ValueError: - return False - return 0 <= netmask <= self._max_prefixlen - - def _is_hostmask(self, ip_str): - """Test if the IP string is a hostmask (rather than a netmask). - - Args: - ip_str: A string, the potential hostmask. - - Returns: - A boolean, True if the IP string is a hostmask. - - """ - bits = ip_str.split('.') - try: - parts = [x for x in map(int, bits) if x in self._valid_mask_octets] - except ValueError: - return False - if len(parts) != len(bits): - return False - if parts[0] < parts[-1]: - return True - return False - def _reverse_pointer(self): """Return the reverse DNS pointer name for the IPv4 address. diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index 53f6f128443c..82daaff4d775 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -1040,27 +1040,12 @@ def testZeroNetmask(self): ipv4_zero_netmask = ipaddress.IPv4Interface('1.2.3.4/0') self.assertEqual(int(ipv4_zero_netmask.network.netmask), 0) self.assertEqual(ipv4_zero_netmask._prefix_from_prefix_string('0'), 0) - self.assertTrue(ipv4_zero_netmask._is_valid_netmask('0')) - self.assertTrue(ipv4_zero_netmask._is_valid_netmask('0.0.0.0')) - self.assertFalse(ipv4_zero_netmask._is_valid_netmask('invalid')) ipv6_zero_netmask = ipaddress.IPv6Interface('::1/0') self.assertEqual(int(ipv6_zero_netmask.network.netmask), 0) self.assertEqual(ipv6_zero_netmask._prefix_from_prefix_string('0'), 0) - def testIPv4NetAndHostmasks(self): - net = self.ipv4_network - self.assertFalse(net._is_valid_netmask('invalid')) - self.assertTrue(net._is_valid_netmask('128.128.128.128')) - self.assertFalse(net._is_valid_netmask('128.128.128.127')) - self.assertFalse(net._is_valid_netmask('128.128.128.255')) - self.assertTrue(net._is_valid_netmask('255.128.128.128')) - - self.assertFalse(net._is_hostmask('invalid')) - self.assertTrue(net._is_hostmask('128.255.255.255')) - self.assertFalse(net._is_hostmask('255.255.255.255')) - self.assertFalse(net._is_hostmask('1.2.3.4')) - + def testIPv4Net(self): net = ipaddress.IPv4Network('127.0.0.0/0.0.0.255') self.assertEqual(net.prefixlen, 24) From webhook-mailer at python.org Sat Apr 13 08:01:20 2019 From: webhook-mailer at python.org (Cheryl Sabella) Date: Sat, 13 Apr 2019 12:01:20 -0000 Subject: [Python-checkins] bpo-18610: Update wsgiref.validate docstring for wsgi.input read() (GH-11663) Message-ID: https://github.com/python/cpython/commit/f8716c88f13f035c126fc1db499ae0ea309c7ece commit: f8716c88f13f035c126fc1db499ae0ea309c7ece branch: master author: Cheryl Sabella committer: GitHub date: 2019-04-13T08:01:15-04:00 summary: bpo-18610: Update wsgiref.validate docstring for wsgi.input read() (GH-11663) files: M Lib/wsgiref/validate.py diff --git a/Lib/wsgiref/validate.py b/Lib/wsgiref/validate.py index 1c00bde220e7..48ac0070549b 100644 --- a/Lib/wsgiref/validate.py +++ b/Lib/wsgiref/validate.py @@ -77,7 +77,7 @@ * That wsgi.input is used properly: - - .read() is called with zero or one argument + - .read() is called with exactly one argument - That it returns a string From webhook-mailer at python.org Sat Apr 13 12:05:20 2019 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 13 Apr 2019 16:05:20 -0000 Subject: [Python-checkins] bpo-36623: Clean parser headers and include files (GH-12253) Message-ID: https://github.com/python/cpython/commit/f2cf1e3e2892a6326949c2570f1bb6d6c95715fb commit: f2cf1e3e2892a6326949c2570f1bb6d6c95715fb branch: master author: Pablo Galindo committer: GitHub date: 2019-04-13T17:05:14+01:00 summary: bpo-36623: Clean parser headers and include files (GH-12253) After the removal of pgen, multiple header and function prototypes that lack implementation or are unused are still lying around. files: A Misc/NEWS.d/next/Core and Builtins/2019-04-13-02-08-44.bpo-36623.HR_xhB.rst D Include/pgenheaders.h M Doc/whatsnew/3.8.rst M Include/bitset.h M Include/grammar.h M Makefile.pre.in M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M Parser/acceler.c M Parser/grammar1.c M Parser/listnode.c M Parser/parser.c M Parser/parser.h M Parser/parsetok.c M Parser/pgen/grammar.py M Parser/tokenizer.c M Parser/tokenizer.h M Python/graminit.c M Python/strdup.c diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index bf28e5ff4b06..39a0da5e61e9 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -814,6 +814,10 @@ Changes in the Python API by the installer). (See :issue:`36085`.) +* The header files and functions related to pgen have been removed after its + replacement by a pure Python implementation. (Contributed by Pablo Galindo + in :issue:`36623`.) + Changes in the C API -------------------- diff --git a/Include/bitset.h b/Include/bitset.h index b22fa77815cf..6a2ac9787eab 100644 --- a/Include/bitset.h +++ b/Include/bitset.h @@ -8,23 +8,14 @@ extern "C" { /* Bitset interface */ #define BYTE char - typedef BYTE *bitset; -bitset newbitset(int nbits); -void delbitset(bitset bs); #define testbit(ss, ibit) (((ss)[BIT2BYTE(ibit)] & BIT2MASK(ibit)) != 0) -int addbit(bitset bs, int ibit); /* Returns 0 if already set */ -int samebitset(bitset bs1, bitset bs2, int nbits); -void mergebitset(bitset bs1, bitset bs2, int nbits); #define BITSPERBYTE (8*sizeof(BYTE)) -#define NBYTES(nbits) (((nbits) + BITSPERBYTE - 1) / BITSPERBYTE) - #define BIT2BYTE(ibit) ((ibit) / BITSPERBYTE) #define BIT2SHIFT(ibit) ((ibit) % BITSPERBYTE) #define BIT2MASK(ibit) (1 << BIT2SHIFT(ibit)) -#define BYTE2BIT(ibyte) ((ibyte) * BITSPERBYTE) #ifdef __cplusplus } diff --git a/Include/grammar.h b/Include/grammar.h index 68b928c97189..7a6182bb76d5 100644 --- a/Include/grammar.h +++ b/Include/grammar.h @@ -66,27 +66,11 @@ typedef struct { } grammar; /* FUNCTIONS */ - -grammar *newgrammar(int start); -void freegrammar(grammar *g); -dfa *adddfa(grammar *g, int type, const char *name); -int addstate(dfa *d); -void addarc(dfa *d, int from, int to, int lbl); dfa *PyGrammar_FindDFA(grammar *g, int type); - -int addlabel(labellist *ll, int type, const char *str); -int findlabel(labellist *ll, int type, const char *str); const char *PyGrammar_LabelRepr(label *lb); -void translatelabels(grammar *g); - -void addfirstsets(grammar *g); - void PyGrammar_AddAccelerators(grammar *g); void PyGrammar_RemoveAccelerators(grammar *); -void printgrammar(grammar *g, FILE *fp); -void printnonterminals(grammar *g, FILE *fp); - #ifdef __cplusplus } #endif diff --git a/Include/pgenheaders.h b/Include/pgenheaders.h deleted file mode 100644 index dbc5e0a5f139..000000000000 --- a/Include/pgenheaders.h +++ /dev/null @@ -1,43 +0,0 @@ -#ifndef Py_PGENHEADERS_H -#define Py_PGENHEADERS_H -#ifdef __cplusplus -extern "C" { -#endif - - -/* Include files and extern declarations used by most of the parser. */ - -#include "Python.h" - -PyAPI_FUNC(void) PySys_WriteStdout(const char *format, ...) - Py_GCC_ATTRIBUTE((format(printf, 1, 2))); -PyAPI_FUNC(void) PySys_WriteStderr(const char *format, ...) - Py_GCC_ATTRIBUTE((format(printf, 1, 2))); - -#define addarc _Py_addarc -#define addbit _Py_addbit -#define adddfa _Py_adddfa -#define addfirstsets _Py_addfirstsets -#define addlabel _Py_addlabel -#define addstate _Py_addstate -#define delbitset _Py_delbitset -#define dumptree _Py_dumptree -#define findlabel _Py_findlabel -#define freegrammar _Py_freegrammar -#define mergebitset _Py_mergebitset -#define meta_grammar _Py_meta_grammar -#define newbitset _Py_newbitset -#define newgrammar _Py_newgrammar -#define pgen _Py_pgen -#define printgrammar _Py_printgrammar -#define printnonterminals _Py_printnonterminals -#define printtree _Py_printtree -#define samebitset _Py_samebitset -#define showtree _Py_showtree -#define tok_dump _Py_tok_dump -#define translatelabels _Py_translatelabels - -#ifdef __cplusplus -} -#endif -#endif /* !Py_PGENHEADERS_H */ diff --git a/Makefile.pre.in b/Makefile.pre.in index 1cb8a590d45b..05c195767a8b 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1008,7 +1008,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/osdefs.h \ $(srcdir)/Include/osmodule.h \ $(srcdir)/Include/patchlevel.h \ - $(srcdir)/Include/pgenheaders.h \ $(srcdir)/Include/pyarena.h \ $(srcdir)/Include/pycapsule.h \ $(srcdir)/Include/pyctype.h \ diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-13-02-08-44.bpo-36623.HR_xhB.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-13-02-08-44.bpo-36623.HR_xhB.rst new file mode 100644 index 000000000000..cc90973e2964 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-13-02-08-44.bpo-36623.HR_xhB.rst @@ -0,0 +1,2 @@ +Remove parser headers and related function declarations that lack +implementations after the removal of pgen. diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index c9ff2f88d8e0..a980799461a3 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -175,7 +175,6 @@ - diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 5dfa193f048a..f92433e3e0c9 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -267,9 +267,6 @@ Include - - Include - Include diff --git a/Parser/acceler.c b/Parser/acceler.c index 9b14263b461a..3a230c19bb9f 100644 --- a/Parser/acceler.c +++ b/Parser/acceler.c @@ -10,7 +10,7 @@ are not part of the static data structure written on graminit.[ch] by the parser generator. */ -#include "pgenheaders.h" +#include "Python.h" #include "grammar.h" #include "node.h" #include "token.h" diff --git a/Parser/grammar1.c b/Parser/grammar1.c index 9c323911ab26..fec6d9ec0ee2 100644 --- a/Parser/grammar1.c +++ b/Parser/grammar1.c @@ -2,7 +2,6 @@ /* Grammar subroutines needed by parser */ #include "Python.h" -#include "pgenheaders.h" #include "grammar.h" #include "token.h" diff --git a/Parser/listnode.c b/Parser/listnode.c index 71300ae908ae..8f1a1163b63d 100644 --- a/Parser/listnode.c +++ b/Parser/listnode.c @@ -1,7 +1,7 @@ /* List a node on a file */ -#include "pgenheaders.h" +#include "Python.h" #include "token.h" #include "node.h" diff --git a/Parser/parser.c b/Parser/parser.c index fa4a8f011ff5..c21b6fdf466d 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -6,7 +6,6 @@ /* XXX To do: error recovery */ #include "Python.h" -#include "pgenheaders.h" #include "token.h" #include "grammar.h" #include "node.h" diff --git a/Parser/parser.h b/Parser/parser.h index aee1c86cb044..ebb06c2b1976 100644 --- a/Parser/parser.h +++ b/Parser/parser.h @@ -38,6 +38,11 @@ int PyParser_AddToken(parser_state *ps, int type, char *str, int *expected_ret); void PyGrammar_AddAccelerators(grammar *g); + +#define showtree _Py_showtree +#define printtree _Py_printtree +#define dumptree _Py_dumptree + #ifdef __cplusplus } #endif diff --git a/Parser/parsetok.c b/Parser/parsetok.c index ba33a9a0586f..31be0ebbde2d 100644 --- a/Parser/parsetok.c +++ b/Parser/parsetok.c @@ -1,7 +1,7 @@ /* Parser-tokenizer link implementation */ -#include "pgenheaders.h" +#include "Python.h" #include "tokenizer.h" #include "node.h" #include "grammar.h" diff --git a/Parser/pgen/grammar.py b/Parser/pgen/grammar.py index 340bf64f6d23..1ab9434fa887 100644 --- a/Parser/pgen/grammar.py +++ b/Parser/pgen/grammar.py @@ -61,7 +61,6 @@ def produce_graminit_h(self, writer): def produce_graminit_c(self, writer): writer("/* Generated by Parser/pgen */\n\n") - writer('#include "pgenheaders.h"\n') writer('#include "grammar.h"\n') writer("grammar _PyParser_Grammar;\n") diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 58dd1cd30b37..e8068f268074 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -2,7 +2,6 @@ /* Tokenizer implementation */ #include "Python.h" -#include "pgenheaders.h" #include #include diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index 06c7a14b70b0..92669bfd8a16 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -80,6 +80,8 @@ extern struct tok_state *PyTokenizer_FromFile(FILE *, const char*, extern void PyTokenizer_Free(struct tok_state *); extern int PyTokenizer_Get(struct tok_state *, char **, char **); +#define tok_dump _Py_tok_dump + #ifdef __cplusplus } #endif diff --git a/Python/graminit.c b/Python/graminit.c index 441502e90876..cd9003241700 100644 --- a/Python/graminit.c +++ b/Python/graminit.c @@ -1,6 +1,5 @@ /* Generated by Parser/pgen */ -#include "pgenheaders.h" #include "grammar.h" grammar _PyParser_Grammar; static arc arcs_0_0[3] = { diff --git a/Python/strdup.c b/Python/strdup.c index 99dc77417bd6..6ce171b21fe6 100644 --- a/Python/strdup.c +++ b/Python/strdup.c @@ -1,7 +1,5 @@ /* strdup() replacement (from stdwin, if you must know) */ -#include "pgenheaders.h" - char * strdup(const char *str) { From webhook-mailer at python.org Sat Apr 13 12:06:06 2019 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 13 Apr 2019 16:06:06 -0000 Subject: [Python-checkins] Skip test_preadv_flags if RWF_HIPRI is not supported by the system (GH-12762) Message-ID: https://github.com/python/cpython/commit/46544f69bff1c3c4173d461be35993ca0109f622 commit: 46544f69bff1c3c4173d461be35993ca0109f622 branch: master author: Pablo Galindo committer: GitHub date: 2019-04-13T17:06:03+01:00 summary: Skip test_preadv_flags if RWF_HIPRI is not supported by the system (GH-12762) files: M Lib/test/test_posix.py diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index fe21b21b3385..843402930ffc 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -310,6 +310,14 @@ def test_preadv_flags(self): buf = [bytearray(i) for i in [5, 3, 2]] self.assertEqual(posix.preadv(fd, buf, 3, os.RWF_HIPRI), 10) self.assertEqual([b't1tt2', b't3t', b'5t'], list(buf)) + except OSError as inst: + # Is possible that the macro RWF_HIPRI was defined at compilation time + # but the option is not supported by the kernel or the runtime libc shared + # library. + if inst.errno in {errno.EINVAL, errno.ENOTSUP}: + raise unittest.SkipTest("RWF_HIPRI is not supported by the current system") + else: + raise finally: os.close(fd) From webhook-mailer at python.org Sat Apr 13 12:23:28 2019 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 13 Apr 2019 16:23:28 -0000 Subject: [Python-checkins] bpo-36427: Document that PyEval_RestoreThread and PyGILState_Ensure can terminate the calling thread (GH-12541) Message-ID: https://github.com/python/cpython/commit/fde9b33dfeedd4a4ed723b12d2330979dc684760 commit: fde9b33dfeedd4a4ed723b12d2330979dc684760 branch: master author: Pablo Galindo committer: GitHub date: 2019-04-13T17:23:24+01:00 summary: bpo-36427: Document that PyEval_RestoreThread and PyGILState_Ensure can terminate the calling thread (GH-12541) Calling these function from a thread when the runtime is finalizing will terminate the thread, even if the thread was not created by Python. Users can use _Py_IsFinalizing or sys.is_finalizing to check if the interpreter is in the process of being finalized before calling this function to avoid unwanted termination. files: M Doc/c-api/init.rst diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 7c1f0ffa44a3..7ef11228a33d 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -856,6 +856,12 @@ code, or when embedding the Python interpreter: created, the current thread must not have acquired it, otherwise deadlock ensues. + .. note:: + Calling this function from a thread when the runtime is finalizing + will terminate the thread, even if the thread was not created by Python. + You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to + check if the interpreter is in process of being finalized before calling + this function to avoid unwanted termination. .. c:function:: PyThreadState* PyThreadState_Get() @@ -903,6 +909,12 @@ with sub-interpreters: When the function returns, the current thread will hold the GIL and be able to call arbitrary Python code. Failure is a fatal error. + .. note:: + Calling this function from a thread when the runtime is finalizing + will terminate the thread, even if the thread was not created by Python. + You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to + check if the interpreter is in process of being finalized before calling + this function to avoid unwanted termination. .. c:function:: void PyGILState_Release(PyGILState_STATE) From webhook-mailer at python.org Sat Apr 13 12:25:23 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Apr 2019 16:25:23 -0000 Subject: [Python-checkins] Skip test_preadv_flags if RWF_HIPRI is not supported by the system (GH-12762) Message-ID: https://github.com/python/cpython/commit/d28aaa7df8bcd46f4135d240d041b0b171b664cc commit: d28aaa7df8bcd46f4135d240d041b0b171b664cc branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-13T09:25:20-07:00 summary: Skip test_preadv_flags if RWF_HIPRI is not supported by the system (GH-12762) (cherry picked from commit 46544f69bff1c3c4173d461be35993ca0109f622) Co-authored-by: Pablo Galindo files: M Lib/test/test_posix.py diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 11180b7278c9..55077354589d 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -308,6 +308,14 @@ def test_preadv_flags(self): buf = [bytearray(i) for i in [5, 3, 2]] self.assertEqual(posix.preadv(fd, buf, 3, os.RWF_HIPRI), 10) self.assertEqual([b't1tt2', b't3t', b'5t'], list(buf)) + except OSError as inst: + # Is possible that the macro RWF_HIPRI was defined at compilation time + # but the option is not supported by the kernel or the runtime libc shared + # library. + if inst.errno in {errno.EINVAL, errno.ENOTSUP}: + raise unittest.SkipTest("RWF_HIPRI is not supported by the current system") + else: + raise finally: os.close(fd) From webhook-mailer at python.org Sat Apr 13 15:12:39 2019 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 13 Apr 2019 19:12:39 -0000 Subject: [Python-checkins] bpo-36593: Fix isinstance check for Mock objects with spec executed under tracing (GH-12790) Message-ID: https://github.com/python/cpython/commit/830b43d03cc47a27a22a50d777f23c8e60820867 commit: 830b43d03cc47a27a22a50d777f23c8e60820867 branch: master author: Xtreak committer: Pablo Galindo date: 2019-04-13T20:12:33+01:00 summary: bpo-36593: Fix isinstance check for Mock objects with spec executed under tracing (GH-12790) In Python having a trace function in effect while mock is imported causes isinstance to be wrong for MagicMocks. This is due to the usage of super() in some class methods, as this sets the __class__ attribute. To avoid this, as a workaround, alias the usage of super . files: A Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst M Lib/unittest/mock.py M Lib/unittest/test/testmock/testmock.py diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 8684f1dfa572..0e77f0e48943 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -739,7 +739,7 @@ def __delattr__(self, name): obj = self._mock_children.get(name, _missing) if name in self.__dict__: - super().__delattr__(name) + _safe_super(NonCallableMock, self).__delattr__(name) elif obj is _deleted: raise AttributeError(name) if obj is not _missing: diff --git a/Lib/unittest/test/testmock/testmock.py b/Lib/unittest/test/testmock/testmock.py index 66a5720d1432..37f14c37f47d 100644 --- a/Lib/unittest/test/testmock/testmock.py +++ b/Lib/unittest/test/testmock/testmock.py @@ -1847,6 +1847,44 @@ def foo(a, b): self.assertRaises(TypeError, mock.child, 1) self.assertEqual(mock.mock_calls, [call.child(1, 2)]) + def test_isinstance_under_settrace(self): + # bpo-36593 : __class__ is not set for a class that has __class__ + # property defined when it's used with sys.settrace(trace) set. + # Delete the module to force reimport with tracing function set + # restore the old reference later since there are other tests that are + # dependent on unittest.mock.patch. In testpatch.PatchTest + # test_patch_dict_test_prefix and test_patch_test_prefix not restoring + # causes the objects patched to go out of sync + + old_patch = unittest.mock.patch + + # Directly using __setattr__ on unittest.mock causes current imported + # reference to be updated. Use a lambda so that during cleanup the + # re-imported new reference is updated. + self.addCleanup(lambda patch: setattr(unittest.mock, 'patch', patch), + old_patch) + + with patch.dict('sys.modules'): + del sys.modules['unittest.mock'] + + def trace(frame, event, arg): + return trace + + sys.settrace(trace) + self.addCleanup(sys.settrace, None) + + from unittest.mock import ( + Mock, MagicMock, NonCallableMock, NonCallableMagicMock + ) + + mocks = [ + Mock, MagicMock, NonCallableMock, NonCallableMagicMock + ] + + for mock in mocks: + obj = mock(spec=Something) + self.assertIsInstance(obj, Something) + if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst b/Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst new file mode 100644 index 000000000000..2a7980209136 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst @@ -0,0 +1,2 @@ +Fix ``isinstance`` check for Mock objects with spec when the code is +executed under tracing. Patch by Karthikeyan Singaravelan. From webhook-mailer at python.org Sat Apr 13 15:32:01 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Apr 2019 19:32:01 -0000 Subject: [Python-checkins] bpo-36593: Fix isinstance check for Mock objects with spec executed under tracing (GH-12790) Message-ID: https://github.com/python/cpython/commit/f3a9d722d77753f5110e35f46bd61732c0cb81c1 commit: f3a9d722d77753f5110e35f46bd61732c0cb81c1 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-13T12:31:58-07:00 summary: bpo-36593: Fix isinstance check for Mock objects with spec executed under tracing (GH-12790) In Python having a trace function in effect while mock is imported causes isinstance to be wrong for MagicMocks. This is due to the usage of super() in some class methods, as this sets the __class__ attribute. To avoid this, as a workaround, alias the usage of super . (cherry picked from commit 830b43d03cc47a27a22a50d777f23c8e60820867) Co-authored-by: Xtreak files: A Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst M Lib/unittest/mock.py M Lib/unittest/test/testmock/testmock.py diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 5b8e74414035..373e1d5f64d8 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -738,7 +738,7 @@ def __delattr__(self, name): obj = self._mock_children.get(name, _missing) if name in self.__dict__: - super().__delattr__(name) + _safe_super(NonCallableMock, self).__delattr__(name) elif obj is _deleted: raise AttributeError(name) if obj is not _missing: diff --git a/Lib/unittest/test/testmock/testmock.py b/Lib/unittest/test/testmock/testmock.py index 447a502b57d6..2f50236d1ece 100644 --- a/Lib/unittest/test/testmock/testmock.py +++ b/Lib/unittest/test/testmock/testmock.py @@ -1811,6 +1811,44 @@ def foo(a, b): self.assertRaises(TypeError, mock.child, 1) self.assertEqual(mock.mock_calls, [call.child(1, 2)]) + def test_isinstance_under_settrace(self): + # bpo-36593 : __class__ is not set for a class that has __class__ + # property defined when it's used with sys.settrace(trace) set. + # Delete the module to force reimport with tracing function set + # restore the old reference later since there are other tests that are + # dependent on unittest.mock.patch. In testpatch.PatchTest + # test_patch_dict_test_prefix and test_patch_test_prefix not restoring + # causes the objects patched to go out of sync + + old_patch = unittest.mock.patch + + # Directly using __setattr__ on unittest.mock causes current imported + # reference to be updated. Use a lambda so that during cleanup the + # re-imported new reference is updated. + self.addCleanup(lambda patch: setattr(unittest.mock, 'patch', patch), + old_patch) + + with patch.dict('sys.modules'): + del sys.modules['unittest.mock'] + + def trace(frame, event, arg): + return trace + + sys.settrace(trace) + self.addCleanup(sys.settrace, None) + + from unittest.mock import ( + Mock, MagicMock, NonCallableMock, NonCallableMagicMock + ) + + mocks = [ + Mock, MagicMock, NonCallableMock, NonCallableMagicMock + ] + + for mock in mocks: + obj = mock(spec=Something) + self.assertIsInstance(obj, Something) + if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst b/Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst new file mode 100644 index 000000000000..2a7980209136 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-11-22-11-24.bpo-36598.hfzDUl.rst @@ -0,0 +1,2 @@ +Fix ``isinstance`` check for Mock objects with spec when the code is +executed under tracing. Patch by Karthikeyan Singaravelan. From webhook-mailer at python.org Sat Apr 13 22:49:33 2019 From: webhook-mailer at python.org (Pablo Galindo) Date: Sun, 14 Apr 2019 02:49:33 -0000 Subject: [Python-checkins] [3.7] bpo-36427: Document that PyEval_RestoreThread and PyGILState_Ensure can terminate the calling thread (GH-12541) (GH-12820) Message-ID: https://github.com/python/cpython/commit/7723d0545c3369e1b2601b207c250c70ce90b75e commit: 7723d0545c3369e1b2601b207c250c70ce90b75e branch: 3.7 author: Pablo Galindo committer: GitHub date: 2019-04-14T03:49:17+01:00 summary: [3.7] bpo-36427: Document that PyEval_RestoreThread and PyGILState_Ensure can terminate the calling thread (GH-12541) (GH-12820) Calling these function from a thread when the runtime is finalizing will terminate the thread, even if the thread was not created by Python. Users can use _Py_IsFinalizing or sys.is_finalizing to check if the interpreter is in the process of being finalized before calling this function to avoid unwanted termination. (cherry picked from commit fde9b33) files: M Doc/c-api/init.rst diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index a88873bd8f23..93fcfe626078 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -856,6 +856,12 @@ code, or when embedding the Python interpreter: *NULL*. If the lock has been created, the current thread must not have acquired it, otherwise deadlock ensues. + .. note:: + Calling this function from a thread when the runtime is finalizing + will terminate the thread, even if the thread was not created by Python. + You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to + check if the interpreter is in process of being finalized before calling + this function to avoid unwanted termination. .. c:function:: PyThreadState* PyThreadState_Get() @@ -903,6 +909,12 @@ with sub-interpreters: When the function returns, the current thread will hold the GIL and be able to call arbitrary Python code. Failure is a fatal error. + .. note:: + Calling this function from a thread when the runtime is finalizing + will terminate the thread, even if the thread was not created by Python. + You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to + check if the interpreter is in process of being finalized before calling + this function to avoid unwanted termination. .. c:function:: void PyGILState_Release(PyGILState_STATE) From webhook-mailer at python.org Sun Apr 14 04:07:17 2019 From: webhook-mailer at python.org (Stefan Behnel) Date: Sun, 14 Apr 2019 08:07:17 -0000 Subject: [Python-checkins] bpo-36227: ElementTree.tostring() default_namespace and xml_declaration arguments (GH-12225) Message-ID: https://github.com/python/cpython/commit/ffca16e25a70fd44a87b13b379b5ec0c7a11e926 commit: ffca16e25a70fd44a87b13b379b5ec0c7a11e926 branch: master author: Bernt R?skar Brenna committer: Stefan Behnel date: 2019-04-14T10:07:02+02:00 summary: bpo-36227: ElementTree.tostring() default_namespace and xml_declaration arguments (GH-12225) Add new keyword arguments "default_namespace" and "xml_declaration" to functions ET.tostring() and ET.tostringlist(), as known from ElementTree.write(). files: A Misc/NEWS.d/next/Library/2019-03-07-20-02-18.bpo-36227.i2Z1XR.rst M Doc/library/xml.etree.elementtree.rst M Lib/test/test_xml_etree.py M Lib/xml/etree/ElementTree.py diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 4a7cf6f09588..9bee0eadc289 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -594,6 +594,7 @@ Functions .. function:: tostring(element, encoding="us-ascii", method="xml", *, \ + xml_declaration=None, default_namespace=None, short_empty_elements=True) Generates a string representation of an XML element, including all @@ -601,14 +602,19 @@ Functions the output encoding (default is US-ASCII). Use ``encoding="unicode"`` to generate a Unicode string (otherwise, a bytestring is generated). *method* is either ``"xml"``, ``"html"`` or ``"text"`` (default is ``"xml"``). - *short_empty_elements* has the same meaning as in :meth:`ElementTree.write`. - Returns an (optionally) encoded string containing the XML data. + *xml_declaration*, *default_namespace* and *short_empty_elements* has the same + meaning as in :meth:`ElementTree.write`. Returns an (optionally) encoded string + containing the XML data. .. versionadded:: 3.4 The *short_empty_elements* parameter. + .. versionadded:: 3.8 + The *xml_declaration* and *default_namespace* parameters. + .. function:: tostringlist(element, encoding="us-ascii", method="xml", *, \ + xml_declaration=None, default_namespace=None, short_empty_elements=True) Generates a string representation of an XML element, including all @@ -616,16 +622,19 @@ Functions the output encoding (default is US-ASCII). Use ``encoding="unicode"`` to generate a Unicode string (otherwise, a bytestring is generated). *method* is either ``"xml"``, ``"html"`` or ``"text"`` (default is ``"xml"``). - *short_empty_elements* has the same meaning as in :meth:`ElementTree.write`. - Returns a list of (optionally) encoded strings containing the XML data. - It does not guarantee any specific sequence, except that - ``b"".join(tostringlist(element)) == tostring(element)``. + *xml_declaration*, *default_namespace* and *short_empty_elements* has the same + meaning as in :meth:`ElementTree.write`. Returns a list of (optionally) encoded + strings containing the XML data. It does not guarantee any specific sequence, + except that ``b"".join(tostringlist(element)) == tostring(element)``. .. versionadded:: 3.2 .. versionadded:: 3.4 The *short_empty_elements* parameter. + .. versionadded:: 3.8 + The *xml_declaration* and *default_namespace* parameters. + .. function:: XML(text, parser=None) diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index 8a7ec0076ff0..bdcd4e0d19a7 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -9,6 +9,7 @@ import functools import html import io +import locale import operator import pickle import sys @@ -756,6 +757,128 @@ def test_writestring(self): elem = ET.fromstring("text") self.assertEqual(ET.tostring(elem), b'text') + def test_tostring_default_namespace(self): + elem = ET.XML('') + self.assertEqual( + ET.tostring(elem, encoding='unicode'), + '' + ) + self.assertEqual( + ET.tostring(elem, encoding='unicode', default_namespace='http://effbot.org/ns'), + '' + ) + + def test_tostring_default_namespace_different_namespace(self): + elem = ET.XML('') + self.assertEqual( + ET.tostring(elem, encoding='unicode', default_namespace='foobar'), + '' + ) + + def test_tostring_default_namespace_original_no_namespace(self): + elem = ET.XML('') + EXPECTED_MSG = '^cannot use non-qualified names with default_namespace option$' + with self.assertRaisesRegex(ValueError, EXPECTED_MSG): + ET.tostring(elem, encoding='unicode', default_namespace='foobar') + + def test_tostring_no_xml_declaration(self): + elem = ET.XML('') + self.assertEqual( + ET.tostring(elem, encoding='unicode'), + '' + ) + + def test_tostring_xml_declaration(self): + elem = ET.XML('') + self.assertEqual( + ET.tostring(elem, encoding='utf8', xml_declaration=True), + b"\n" + ) + + def test_tostring_xml_declaration_unicode_encoding(self): + elem = ET.XML('') + preferredencoding = locale.getpreferredencoding() + self.assertEqual( + f"\n", + ET.tostring(elem, encoding='unicode', xml_declaration=True) + ) + + def test_tostring_xml_declaration_cases(self): + elem = ET.XML('?') + preferredencoding = locale.getpreferredencoding() + TESTCASES = [ + # (expected_retval, encoding, xml_declaration) + # ... xml_declaration = None + (b'ø', None, None), + (b'\xc3\xb8', 'UTF-8', None), + (b'ø', 'US-ASCII', None), + (b"\n" + b"\xf8", 'ISO-8859-1', None), + ('?', 'unicode', None), + + # ... xml_declaration = False + (b"ø", None, False), + (b"\xc3\xb8", 'UTF-8', False), + (b"ø", 'US-ASCII', False), + (b"\xf8", 'ISO-8859-1', False), + ("?", 'unicode', False), + + # ... xml_declaration = True + (b"\n" + b"ø", None, True), + (b"\n" + b"\xc3\xb8", 'UTF-8', True), + (b"\n" + b"ø", 'US-ASCII', True), + (b"\n" + b"\xf8", 'ISO-8859-1', True), + (f"\n" + "?", 'unicode', True), + + ] + for expected_retval, encoding, xml_declaration in TESTCASES: + with self.subTest(f'encoding={encoding} ' + f'xml_declaration={xml_declaration}'): + self.assertEqual( + ET.tostring( + elem, + encoding=encoding, + xml_declaration=xml_declaration + ), + expected_retval + ) + + def test_tostringlist_default_namespace(self): + elem = ET.XML('') + self.assertEqual( + ''.join(ET.tostringlist(elem, encoding='unicode')), + '' + ) + self.assertEqual( + ''.join(ET.tostringlist(elem, encoding='unicode', default_namespace='http://effbot.org/ns')), + '' + ) + + def test_tostringlist_xml_declaration(self): + elem = ET.XML('') + self.assertEqual( + ''.join(ET.tostringlist(elem, encoding='unicode')), + '' + ) + self.assertEqual( + b''.join(ET.tostringlist(elem, xml_declaration=True)), + b"\n" + ) + + preferredencoding = locale.getpreferredencoding() + stringlist = ET.tostringlist(elem, encoding='unicode', xml_declaration=True) + self.assertEqual( + ''.join(stringlist), + f"\n" + ) + self.assertRegex(stringlist[0], r"^<\?xml version='1.0' encoding='.+'?>") + self.assertEqual(['', '', ''], stringlist[1:]) + def test_encoding(self): def check(encoding, body=''): xml = ("%s" % diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py index b5ad8e1d1406..c9e2f3683502 100644 --- a/Lib/xml/etree/ElementTree.py +++ b/Lib/xml/etree/ElementTree.py @@ -1113,6 +1113,7 @@ def _escape_attrib_html(text): # -------------------------------------------------------------------- def tostring(element, encoding=None, method=None, *, + xml_declaration=None, default_namespace=None, short_empty_elements=True): """Generate string representation of XML element. @@ -1121,13 +1122,17 @@ def tostring(element, encoding=None, method=None, *, *element* is an Element instance, *encoding* is an optional output encoding defaulting to US-ASCII, *method* is an optional output which can - be one of "xml" (default), "html", "text" or "c14n". + be one of "xml" (default), "html", "text" or "c14n", *default_namespace* + sets the default XML namespace (for "xmlns"). Returns an (optionally) encoded string containing the XML data. """ stream = io.StringIO() if encoding == 'unicode' else io.BytesIO() - ElementTree(element).write(stream, encoding, method=method, + ElementTree(element).write(stream, encoding, + xml_declaration=xml_declaration, + default_namespace=default_namespace, + method=method, short_empty_elements=short_empty_elements) return stream.getvalue() @@ -1149,10 +1154,14 @@ def tell(self): return len(self.lst) def tostringlist(element, encoding=None, method=None, *, + xml_declaration=None, default_namespace=None, short_empty_elements=True): lst = [] stream = _ListDataStream(lst) - ElementTree(element).write(stream, encoding, method=method, + ElementTree(element).write(stream, encoding, + xml_declaration=xml_declaration, + default_namespace=default_namespace, + method=method, short_empty_elements=short_empty_elements) return lst diff --git a/Misc/NEWS.d/next/Library/2019-03-07-20-02-18.bpo-36227.i2Z1XR.rst b/Misc/NEWS.d/next/Library/2019-03-07-20-02-18.bpo-36227.i2Z1XR.rst new file mode 100644 index 000000000000..3b5b6cda09c8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-07-20-02-18.bpo-36227.i2Z1XR.rst @@ -0,0 +1,2 @@ +Added support for keyword arguments `default_namespace` and `xml_declaration` in functions +ElementTree.tostring() and ElementTree.tostringlist(). From webhook-mailer at python.org Sun Apr 14 04:09:15 2019 From: webhook-mailer at python.org (Stefan Behnel) Date: Sun, 14 Apr 2019 08:09:15 -0000 Subject: [Python-checkins] bpo-30485: support a default prefix mapping in ElementPath by passing None as prefix (#1823) Message-ID: https://github.com/python/cpython/commit/e9927e1820caea01e576141d9a623ea394d43dad commit: e9927e1820caea01e576141d9a623ea394d43dad branch: master author: Stefan Behnel committer: GitHub date: 2019-04-14T10:09:09+02:00 summary: bpo-30485: support a default prefix mapping in ElementPath by passing None as prefix (#1823) files: A Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst M Doc/library/xml.etree.elementtree.rst M Lib/test/test_xml_etree.py M Lib/xml/etree/ElementPath.py diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 9bee0eadc289..c83e719e959a 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -764,7 +764,8 @@ Element Objects Finds the first subelement matching *match*. *match* may be a tag name or a :ref:`path `. Returns an element instance or ``None``. *namespaces* is an optional mapping from namespace prefix - to full name. + to full name. Pass ``None`` as prefix to move all unprefixed tag names + in the expression into the given namespace. .. method:: findall(match, namespaces=None) @@ -772,7 +773,8 @@ Element Objects Finds all matching subelements, by tag name or :ref:`path `. Returns a list containing all matching elements in document order. *namespaces* is an optional mapping from - namespace prefix to full name. + namespace prefix to full name. Pass ``None`` as prefix to move all + unprefixed tag names in the expression into the given namespace. .. method:: findtext(match, default=None, namespaces=None) @@ -782,7 +784,8 @@ Element Objects of the first matching element, or *default* if no element was found. Note that if the matching element has no text content an empty string is returned. *namespaces* is an optional mapping from namespace prefix - to full name. + to full name. Pass ``None`` as prefix to move all unprefixed tag names + in the expression into the given namespace. .. method:: getchildren() diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index bdcd4e0d19a7..2f7a3b60b22d 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2463,6 +2463,12 @@ def test_findall_different_nsmaps(self): nsmap = {'xx': 'Y'} self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 1) self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) + nsmap = {'xx': 'X', None: 'Y'} + self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 2) + self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 1) + nsmap = {'xx': 'X', '': 'Y'} + with self.assertRaisesRegex(ValueError, 'namespace prefix'): + root.findall(".//xx:b", namespaces=nsmap) def test_bad_find(self): e = ET.XML(SAMPLE_XML) diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py index ef32917b14d4..0e3854f9db22 100644 --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -71,16 +71,22 @@ ) def xpath_tokenizer(pattern, namespaces=None): + default_namespace = namespaces.get(None) if namespaces else None for token in xpath_tokenizer_re.findall(pattern): tag = token[1] - if tag and tag[0] != "{" and ":" in tag: - try: + if tag and tag[0] != "{": + if ":" in tag: prefix, uri = tag.split(":", 1) - if not namespaces: - raise KeyError - yield token[0], "{%s}%s" % (namespaces[prefix], uri) - except KeyError: - raise SyntaxError("prefix %r not found in prefix map" % prefix) from None + try: + if not namespaces: + raise KeyError + yield token[0], "{%s}%s" % (namespaces[prefix], uri) + except KeyError: + raise SyntaxError("prefix %r not found in prefix map" % prefix) from None + elif default_namespace: + yield token[0], "{%s}%s" % (default_namespace, tag) + else: + yield token else: yield token @@ -264,10 +270,19 @@ def __init__(self, root): def iterfind(elem, path, namespaces=None): # compile selector pattern - cache_key = (path, None if namespaces is None - else tuple(sorted(namespaces.items()))) if path[-1:] == "/": path = path + "*" # implicit all (FIXME: keep this?) + + cache_key = (path,) + if namespaces: + if '' in namespaces: + raise ValueError("empty namespace prefix must be passed as None, not the empty string") + if None in namespaces: + cache_key += (namespaces[None],) + tuple(sorted( + item for item in namespaces.items() if item[0] is not None)) + else: + cache_key += tuple(sorted(namespaces.items())) + try: selector = _cache[cache_key] except KeyError: diff --git a/Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst b/Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst new file mode 100644 index 000000000000..6c82efd3e009 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst @@ -0,0 +1,3 @@ +Path expressions in xml.etree.ElementTree can now avoid explicit namespace +prefixes for tags (or the "{namespace}tag" notation) by passing a default +namespace with a 'None' prefix. From webhook-mailer at python.org Sun Apr 14 05:17:13 2019 From: webhook-mailer at python.org (Stefan Behnel) Date: Sun, 14 Apr 2019 09:17:13 -0000 Subject: [Python-checkins] bpo-31658: Make xml.sax.parse accepting Path objects (GH-8564) Message-ID: https://github.com/python/cpython/commit/929b70473829f04dedb8e802abcbd506926886e1 commit: 929b70473829f04dedb8e802abcbd506926886e1 branch: master author: Micka?l Schoentgen committer: Stefan Behnel date: 2019-04-14T11:16:54+02:00 summary: bpo-31658: Make xml.sax.parse accepting Path objects (GH-8564) files: A Misc/NEWS.d/next/Library/2018-07-30-12-00-15.bpo-31658._bx7a_.rst M Doc/library/xml.sax.reader.rst M Lib/test/test_sax.py M Lib/xml/sax/saxutils.py diff --git a/Doc/library/xml.sax.reader.rst b/Doc/library/xml.sax.reader.rst index 1b6e43145b90..113e9e93fb04 100644 --- a/Doc/library/xml.sax.reader.rst +++ b/Doc/library/xml.sax.reader.rst @@ -102,13 +102,17 @@ The :class:`XMLReader` interface supports the following methods: Process an input source, producing SAX events. The *source* object can be a system identifier (a string identifying the input source -- typically a file - name or a URL), a file-like object, or an :class:`InputSource` object. When + name or a URL), a :class:`pathlib.Path` or :term:`path-like ` + object, or an :class:`InputSource` object. When :meth:`parse` returns, the input is completely processed, and the parser object can be discarded or reset. .. versionchanged:: 3.5 Added support of character streams. + .. versionchanged:: 3.8 + Added support of path-like objects. + .. method:: XMLReader.getContentHandler() diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py index 9addc06f20dd..da4eb1da3c6a 100644 --- a/Lib/test/test_sax.py +++ b/Lib/test/test_sax.py @@ -21,7 +21,7 @@ import shutil from urllib.error import URLError from test import support -from test.support import findfile, run_unittest, TESTFN +from test.support import findfile, run_unittest, FakePath, TESTFN TEST_XMLFILE = findfile("test.xml", subdir="xmltestdata") TEST_XMLFILE_OUT = findfile("test.xml.out", subdir="xmltestdata") @@ -182,6 +182,10 @@ def test_parse_bytes(self): with self.assertRaises(SAXException): self.check_parse(f) + def test_parse_path_object(self): + make_xml_file(self.data, 'utf-8', None) + self.check_parse(FakePath(TESTFN)) + def test_parse_InputSource(self): # accept data without declared but with explicitly specified encoding make_xml_file(self.data, 'iso-8859-1', None) @@ -397,6 +401,13 @@ def test_string(self): self.checkContent(prep.getByteStream(), b"This was read from a file.") + def test_path_objects(self): + # If the source is a Path object, use it as a system ID and open it. + prep = prepare_input_source(FakePath(self.file)) + self.assertIsNone(prep.getCharacterStream()) + self.checkContent(prep.getByteStream(), + b"This was read from a file.") + def test_binary_file(self): # If the source is a binary file-like object, use it as a byte # stream. diff --git a/Lib/xml/sax/saxutils.py b/Lib/xml/sax/saxutils.py index b4fc2da76408..c1612ea1cebc 100644 --- a/Lib/xml/sax/saxutils.py +++ b/Lib/xml/sax/saxutils.py @@ -339,6 +339,8 @@ def prepare_input_source(source, base=""): """This function takes an InputSource and an optional base URL and returns a fully resolved InputSource object ready for reading.""" + if isinstance(source, os.PathLike): + source = os.fspath(source) if isinstance(source, str): source = xmlreader.InputSource(source) elif hasattr(source, "read"): diff --git a/Misc/NEWS.d/next/Library/2018-07-30-12-00-15.bpo-31658._bx7a_.rst b/Misc/NEWS.d/next/Library/2018-07-30-12-00-15.bpo-31658._bx7a_.rst new file mode 100644 index 000000000000..8b35060fd737 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-07-30-12-00-15.bpo-31658._bx7a_.rst @@ -0,0 +1,2 @@ +:func:`xml.sax.parse` now supports :term:`path-like `. +Patch by Micka?l Schoentgen. From webhook-mailer at python.org Sun Apr 14 13:32:11 2019 From: webhook-mailer at python.org (Gregory P. Smith) Date: Sun, 14 Apr 2019 17:32:11 -0000 Subject: [Python-checkins] bpo-16079: fix duplicate test method name in test_gzip. (GH-12827) Message-ID: https://github.com/python/cpython/commit/cd466559c4a312b3c1223a774ad4df19fc4f0407 commit: cd466559c4a312b3c1223a774ad4df19fc4f0407 branch: master author: Gregory P. Smith committer: GitHub date: 2019-04-14T10:32:07-07:00 summary: bpo-16079: fix duplicate test method name in test_gzip. (GH-12827) files: M Lib/test/test_gzip.py diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py index 2c8f854c6436..3583b47336fb 100644 --- a/Lib/test/test_gzip.py +++ b/Lib/test/test_gzip.py @@ -746,7 +746,7 @@ def test_compress_stdin_outfile(self): self.assertEqual(out[:2], b"\x1f\x8b") @create_and_remove_directory(TEMPDIR) - def test_compress_infile_outfile(self): + def test_compress_infile_outfile_default(self): local_testgzip = os.path.join(TEMPDIR, 'testgzip') gzipname = local_testgzip + '.gz' self.assertFalse(os.path.exists(gzipname)) From webhook-mailer at python.org Sun Apr 14 13:50:57 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 14 Apr 2019 17:50:57 -0000 Subject: [Python-checkins] bpo-16079: fix duplicate test method name in test_gzip. (GH-12827) Message-ID: https://github.com/python/cpython/commit/9f9e029bd2223ecba46eaefecadf0ac252d891f2 commit: 9f9e029bd2223ecba46eaefecadf0ac252d891f2 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-14T10:50:52-07:00 summary: bpo-16079: fix duplicate test method name in test_gzip. (GH-12827) (cherry picked from commit cd466559c4a312b3c1223a774ad4df19fc4f0407) Co-authored-by: Gregory P. Smith files: M Lib/test/test_gzip.py diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py index b072ce4682c0..17ecda2089f2 100644 --- a/Lib/test/test_gzip.py +++ b/Lib/test/test_gzip.py @@ -735,7 +735,7 @@ def test_compress_stdin_outfile(self): self.assertEqual(out[:2], b"\x1f\x8b") @create_and_remove_directory(TEMPDIR) - def test_compress_infile_outfile(self): + def test_compress_infile_outfile_default(self): local_testgzip = os.path.join(TEMPDIR, 'testgzip') gzipname = local_testgzip + '.gz' self.assertFalse(os.path.exists(gzipname)) From webhook-mailer at python.org Sun Apr 14 15:12:39 2019 From: webhook-mailer at python.org (Stefan Behnel) Date: Sun, 14 Apr 2019 19:12:39 -0000 Subject: [Python-checkins] bpo-30485: Re-allow empty strings in ElementPath namespace mappings since they might actually be harmless and unused (and thus went undetected previously). (#12830) Message-ID: https://github.com/python/cpython/commit/3c5a858ec6a4e5851903762770fe526a46d3c351 commit: 3c5a858ec6a4e5851903762770fe526a46d3c351 branch: master author: Stefan Behnel committer: GitHub date: 2019-04-14T21:12:34+02:00 summary: bpo-30485: Re-allow empty strings in ElementPath namespace mappings since they might actually be harmless and unused (and thus went undetected previously). (#12830) files: M Lib/test/test_xml_etree.py M Lib/xml/etree/ElementPath.py diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index 2f7a3b60b22d..f5b118b079ee 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2466,9 +2466,6 @@ def test_findall_different_nsmaps(self): nsmap = {'xx': 'X', None: 'Y'} self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 2) self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 1) - nsmap = {'xx': 'X', '': 'Y'} - with self.assertRaisesRegex(ValueError, 'namespace prefix'): - root.findall(".//xx:b", namespaces=nsmap) def test_bad_find(self): e = ET.XML(SAMPLE_XML) diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py index 0e3854f9db22..4d231a7df656 100644 --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -275,8 +275,6 @@ def iterfind(elem, path, namespaces=None): cache_key = (path,) if namespaces: - if '' in namespaces: - raise ValueError("empty namespace prefix must be passed as None, not the empty string") if None in namespaces: cache_key += (namespaces[None],) + tuple(sorted( item for item in namespaces.items() if item[0] is not None)) From webhook-mailer at python.org Mon Apr 15 03:01:07 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 15 Apr 2019 07:01:07 -0000 Subject: [Python-checkins] bpo-27860: use cached_property (GH-12832) Message-ID: https://github.com/python/cpython/commit/2430d532e240dea55f0082d1e9bf2e0f3d7505be commit: 2430d532e240dea55f0082d1e9bf2e0f3d7505be branch: master author: Inada Naoki committer: GitHub date: 2019-04-15T16:01:00+09:00 summary: bpo-27860: use cached_property (GH-12832) * cached_property is more efficient than hand crafted cache. * In IPv[46]Network, `self.network.prefixlen` is same to `self._prefixlen`. files: M Lib/ipaddress.py M Lib/test/test_ipaddress.py diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 8c9d7406c447..7a3f36f3bc0f 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -597,15 +597,11 @@ def __reduce__(self): @functools.total_ordering class _BaseNetwork(_IPAddressBase): - """A generic IP network object. This IP class contains the version independent methods which are used by networks. - """ - def __init__(self, address): - self._cache = {} def __repr__(self): return '%s(%r)' % (self.__class__.__name__, str(self)) @@ -687,22 +683,14 @@ def overlaps(self, other): other.network_address in self or ( other.broadcast_address in self))) - @property + @functools.cached_property def broadcast_address(self): - x = self._cache.get('broadcast_address') - if x is None: - x = self._address_class(int(self.network_address) | - int(self.hostmask)) - self._cache['broadcast_address'] = x - return x + return self._address_class(int(self.network_address) | + int(self.hostmask)) - @property + @functools.cached_property def hostmask(self): - x = self._cache.get('hostmask') - if x is None: - x = self._address_class(int(self.netmask) ^ self._ALL_ONES) - self._cache['hostmask'] = x - return x + return self._address_class(int(self.netmask) ^ self._ALL_ONES) @property def with_prefixlen(self): @@ -1346,7 +1334,7 @@ def __init__(self, address): def __str__(self): return '%s/%d' % (self._string_from_ip_int(self._ip), - self.network.prefixlen) + self._prefixlen) def __eq__(self, other): address_equal = IPv4Address.__eq__(self, other) @@ -1413,7 +1401,6 @@ class IPv4Network(_BaseV4, _BaseNetwork): _address_class = IPv4Address def __init__(self, address, strict=True): - """Instantiate a new IPv4 network object. Args: @@ -1447,10 +1434,7 @@ def __init__(self, address, strict=True): an IPv4 address. ValueError: If strict is True and a network address is not supplied. - """ - _BaseNetwork.__init__(self, address) - # Constructing from a packed address or integer if isinstance(address, (int, bytes)): addr = address @@ -2020,7 +2004,7 @@ def __init__(self, address): def __str__(self): return '%s/%d' % (self._string_from_ip_int(self._ip), - self.network.prefixlen) + self._prefixlen) def __eq__(self, other): address_equal = IPv6Address.__eq__(self, other) @@ -2125,10 +2109,7 @@ def __init__(self, address, strict=True): an IPv6 address. ValueError: If strict was True and a network address was not supplied. - """ - _BaseNetwork.__init__(self, address) - # Constructing from a packed address or integer if isinstance(address, (int, bytes)): addr = address diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index 82daaff4d775..15317c944630 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -961,20 +961,6 @@ def testInternals(self): self.assertEqual(128, ipaddress._count_righthand_zero_bits(0, 128)) self.assertEqual("IPv4Network('1.2.3.0/24')", repr(self.ipv4_network)) - def testMissingNetworkVersion(self): - class Broken(ipaddress._BaseNetwork): - pass - broken = Broken('127.0.0.1') - with self.assertRaisesRegex(NotImplementedError, "Broken.*version"): - broken.version - - def testMissingAddressClass(self): - class Broken(ipaddress._BaseNetwork): - pass - broken = Broken('127.0.0.1') - with self.assertRaisesRegex(NotImplementedError, "Broken.*address"): - broken._address_class - def testGetNetwork(self): self.assertEqual(int(self.ipv4_network.network_address), 16909056) self.assertEqual(str(self.ipv4_network.network_address), '1.2.3.0') @@ -1986,25 +1972,22 @@ def testWithStar(self): def testNetworkElementCaching(self): # V4 - make sure we're empty - self.assertNotIn('network_address', self.ipv4_network._cache) - self.assertNotIn('broadcast_address', self.ipv4_network._cache) - self.assertNotIn('hostmask', self.ipv4_network._cache) + self.assertNotIn('broadcast_address', self.ipv4_network.__dict__) + self.assertNotIn('hostmask', self.ipv4_network.__dict__) # V4 - populate and test - self.assertEqual(self.ipv4_network.network_address, - ipaddress.IPv4Address('1.2.3.0')) self.assertEqual(self.ipv4_network.broadcast_address, ipaddress.IPv4Address('1.2.3.255')) self.assertEqual(self.ipv4_network.hostmask, ipaddress.IPv4Address('0.0.0.255')) # V4 - check we're cached - self.assertIn('broadcast_address', self.ipv4_network._cache) - self.assertIn('hostmask', self.ipv4_network._cache) + self.assertIn('broadcast_address', self.ipv4_network.__dict__) + self.assertIn('hostmask', self.ipv4_network.__dict__) # V6 - make sure we're empty - self.assertNotIn('broadcast_address', self.ipv6_network._cache) - self.assertNotIn('hostmask', self.ipv6_network._cache) + self.assertNotIn('broadcast_address', self.ipv6_network.__dict__) + self.assertNotIn('hostmask', self.ipv6_network.__dict__) # V6 - populate and test self.assertEqual(self.ipv6_network.network_address, @@ -2024,10 +2007,10 @@ def testNetworkElementCaching(self): ipaddress.IPv6Address('::ffff:ffff:ffff:ffff')) # V6 - check we're cached - self.assertIn('broadcast_address', self.ipv6_network._cache) - self.assertIn('hostmask', self.ipv6_network._cache) - self.assertIn('broadcast_address', self.ipv6_interface.network._cache) - self.assertIn('hostmask', self.ipv6_interface.network._cache) + self.assertIn('broadcast_address', self.ipv6_network.__dict__) + self.assertIn('hostmask', self.ipv6_network.__dict__) + self.assertIn('broadcast_address', self.ipv6_interface.network.__dict__) + self.assertIn('hostmask', self.ipv6_interface.network.__dict__) def testTeredo(self): # stolen from wikipedia From webhook-mailer at python.org Mon Apr 15 05:02:35 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 09:02:35 -0000 Subject: [Python-checkins] bpo-31904: Don't build the _crypt extension on VxWorks (GH-12833) Message-ID: https://github.com/python/cpython/commit/236d0b75c41449a266201c683b4b0d6acdee02df commit: 236d0b75c41449a266201c683b4b0d6acdee02df branch: master author: pxinwr committer: Victor Stinner date: 2019-04-15T11:02:20+02:00 summary: bpo-31904: Don't build the _crypt extension on VxWorks (GH-12833) files: A Misc/NEWS.d/next/Build/2019-04-15-15-01-29.bpo-31904.38fdkg.rst M Doc/library/crypt.rst M setup.py diff --git a/Doc/library/crypt.rst b/Doc/library/crypt.rst index 43d4b5b749e4..d25c626a1758 100644 --- a/Doc/library/crypt.rst +++ b/Doc/library/crypt.rst @@ -30,6 +30,8 @@ the :manpage:`crypt(3)` routine in the running system. Therefore, any extensions available on the current implementation will also be available on this module. +.. availability:: Unix. Not available on VxWorks. + Hashing Methods --------------- diff --git a/Misc/NEWS.d/next/Build/2019-04-15-15-01-29.bpo-31904.38fdkg.rst b/Misc/NEWS.d/next/Build/2019-04-15-15-01-29.bpo-31904.38fdkg.rst new file mode 100644 index 000000000000..c82636ed7b5b --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-15-15-01-29.bpo-31904.38fdkg.rst @@ -0,0 +1 @@ +Don't build the ``_crypt`` extension on VxWorks. diff --git a/setup.py b/setup.py index 30caed5b51c1..9c83914fd907 100644 --- a/setup.py +++ b/setup.py @@ -973,17 +973,18 @@ def detect_readline_curses(self): def detect_crypt(self): # crypt module. + if VXWORKS: + # bpo-31904: crypt() function is not provided by VxWorks. + # DES_crypt() OpenSSL provides is too weak to implement + # the encryption. + return + if self.compiler.find_library_file(self.lib_dirs, 'crypt'): libs = ['crypt'] else: libs = [] - if not VXWORKS: - self.add(Extension('_crypt', ['_cryptmodule.c'], - libraries=libs)) - elif self.compiler.find_library_file(self.lib_dirs, 'OPENSSL'): - libs = ['OPENSSL'] - self.add(Extension('_crypt', ['_cryptmodule.c'], + self.add(Extension('_crypt', ['_cryptmodule.c'], libraries=libs)) def detect_socket(self): From webhook-mailer at python.org Mon Apr 15 05:06:28 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 09:06:28 -0000 Subject: [Python-checkins] bpo-31904: Port the time module on VxWorks (GH-12305) Message-ID: https://github.com/python/cpython/commit/f1464f4d2ecf9b809ff768c523c5eea1abd31c55 commit: f1464f4d2ecf9b809ff768c523c5eea1abd31c55 branch: master author: pxinwr committer: Victor Stinner date: 2019-04-15T11:06:21+02:00 summary: bpo-31904: Port the time module on VxWorks (GH-12305) time.clock() is not available on VxWorks. files: A Misc/NEWS.d/next/Library/2019-03-13-16-48-42.bpo-31904.9sjd38.rst M Doc/library/time.rst M Lib/test/test_time.py M Modules/timemodule.c diff --git a/Doc/library/time.rst b/Doc/library/time.rst index baf92c1400ee..170f8dc629bf 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -153,6 +153,8 @@ Functions :c:func:`QueryPerformanceCounter`. The resolution is typically better than one microsecond. + .. availability:: Windows, Unix. Not available on VxWorks. + .. deprecated:: 3.3 The behaviour of this function depends on the platform: use :func:`perf_counter` or :func:`process_time` instead, depending on your diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py index 136ad29e20ad..303918960b63 100644 --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -88,6 +88,8 @@ def check_ns(sec, ns): check_ns(time.clock_gettime(time.CLOCK_REALTIME), time.clock_gettime_ns(time.CLOCK_REALTIME)) + @unittest.skipUnless(hasattr(time, 'clock'), + 'need time.clock()') def test_clock(self): with self.assertWarns(DeprecationWarning): time.clock() @@ -549,7 +551,9 @@ def test_localtime_failure(self): self.assertRaises(ValueError, time.ctime, float("nan")) def test_get_clock_info(self): - clocks = ['clock', 'monotonic', 'perf_counter', 'process_time', 'time'] + clocks = ['monotonic', 'perf_counter', 'process_time', 'time'] + if hasattr(time, 'clock'): + clocks.append('clock') for name in clocks: if name == 'clock': diff --git a/Misc/NEWS.d/next/Library/2019-03-13-16-48-42.bpo-31904.9sjd38.rst b/Misc/NEWS.d/next/Library/2019-03-13-16-48-42.bpo-31904.9sjd38.rst new file mode 100644 index 000000000000..6fb5c89d41a5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-13-16-48-42.bpo-31904.9sjd38.rst @@ -0,0 +1 @@ +Add time module support and fix test_time faiures for VxWorks. diff --git a/Modules/timemodule.c b/Modules/timemodule.c index 724a064f5ceb..3df17ac4fb68 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -145,7 +145,7 @@ perf_counter(_Py_clock_info_t *info) return _PyFloat_FromPyTime(t); } -#if defined(MS_WINDOWS) || defined(HAVE_CLOCK) +#if (defined(MS_WINDOWS) || defined(HAVE_CLOCK)) && !defined(__VXWORKS__) #define PYCLOCK static PyObject* pyclock(_Py_clock_info_t *info) @@ -765,7 +765,7 @@ time_strftime(PyObject *self, PyObject *args) return NULL; } -#if defined(_MSC_VER) || (defined(__sun) && defined(__SVR4)) || defined(_AIX) +#if defined(_MSC_VER) || (defined(__sun) && defined(__SVR4)) || defined(_AIX) || defined(__VXWORKS__) if (buf.tm_year + 1900 < 1 || 9999 < buf.tm_year + 1900) { PyErr_SetString(PyExc_ValueError, "strftime() requires year in [1; 9999]"); @@ -1001,18 +1001,21 @@ time_mktime(PyObject *self, PyObject *tm_tuple) return NULL; } -#ifdef _AIX +#if defined(_AIX) || (defined(__VXWORKS__) && !defined(_WRS_CONFIG_LP64)) /* bpo-19748: AIX mktime() valid range is 00:00:00 UTC, January 1, 1970 to 03:14:07 UTC, January 19, 2038. Thanks to the workaround below, it is possible to support years in range [1902; 2037] */ if (tm.tm_year < 2 || tm.tm_year > 137) { /* bpo-19748: On AIX, mktime() does not report overflow error - for timestamp < -2^31 or timestamp > 2**31-1. */ + for timestamp < -2^31 or timestamp > 2**31-1. VxWorks has the + same issue when working in 32 bit mode. */ PyErr_SetString(PyExc_OverflowError, "mktime argument out of range"); return NULL; } +#endif +#ifdef _AIX /* bpo-34373: AIX mktime() has an integer overflow for years in range [1902; 1969]. Workaround the issue by using a year greater or equal than 1970 (tm_year >= 70): mktime() behaves correctly in that case From webhook-mailer at python.org Mon Apr 15 06:35:07 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 10:35:07 -0000 Subject: [Python-checkins] bpo-36629: Add support.get_socket_conn_refused_errs() (GH-12834) Message-ID: https://github.com/python/cpython/commit/3c7931e514faf509a39c218c2c9f55efb434628f commit: 3c7931e514faf509a39c218c2c9f55efb434628f branch: master author: Victor Stinner committer: GitHub date: 2019-04-15T12:34:53+02:00 summary: bpo-36629: Add support.get_socket_conn_refused_errs() (GH-12834) Fix test_imap4_host_default_value() of test_imaplib: catch also errno.ENETUNREACH error. files: A Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst M Lib/test/support/__init__.py M Lib/test/test_imaplib.py M Lib/test/test_socket.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 5bd15a2feae9..2bb561b4cee1 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1477,6 +1477,22 @@ def __exit__(self, type_=None, value=None, traceback=None): ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) +def get_socket_conn_refused_errs(): + """ + Get the different socket error numbers ('errno') which can be received + when a connection is refused. + """ + errors = [errno.ECONNREFUSED] + if hasattr(errno, 'ENETUNREACH'): + # On Solaris, ENETUNREACH is returned sometimes instead of ECONNREFUSED + errors.append(errno.ENETUNREACH) + if hasattr(errno, 'EADDRNOTAVAIL'): + # bpo-31910: socket.create_connection() fails randomly + # with EADDRNOTAVAIL on Travis CI + errors.append(errno.EADDRNOTAVAIL) + return errors + + @contextlib.contextmanager def transient_internet(resource_name, *, timeout=30.0, errnos=()): """Return a context manager that raises ResourceDenied when various issues diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index a060143e1f6b..aec36af6c525 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -81,14 +81,8 @@ def test_imap4_host_default_value(self): except socket.error: pass - expected_errnos = [ - # This is the exception that should be raised. - errno.ECONNREFUSED, - ] - if hasattr(errno, 'EADDRNOTAVAIL'): - # socket.create_connection() fails randomly with - # EADDRNOTAVAIL on Travis CI. - expected_errnos.append(errno.EADDRNOTAVAIL) + # This is the exception that should be raised. + expected_errnos = support.get_socket_conn_refused_errs() with self.assertRaises(OSError) as cm: imaplib.IMAP4() self.assertIn(cm.exception.errno, expected_errnos) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index b0bdb11d9028..815f9adce677 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -4804,14 +4804,7 @@ def test_create_connection(self): # On Solaris, ENETUNREACH is returned in this circumstance instead # of ECONNREFUSED. So, if that errno exists, add it to our list of # expected errnos. - expected_errnos = [ errno.ECONNREFUSED, ] - if hasattr(errno, 'ENETUNREACH'): - expected_errnos.append(errno.ENETUNREACH) - if hasattr(errno, 'EADDRNOTAVAIL'): - # bpo-31910: socket.create_connection() fails randomly - # with EADDRNOTAVAIL on Travis CI - expected_errnos.append(errno.EADDRNOTAVAIL) - + expected_errnos = support.get_socket_conn_refused_errs() self.assertIn(cm.exception.errno, expected_errnos) def test_create_connection_timeout(self): diff --git a/Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst b/Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst new file mode 100644 index 000000000000..0837a233d582 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst @@ -0,0 +1,2 @@ +Fix ``test_imap4_host_default_value()`` of ``test_imaplib``: catch also +:data:`errno.ENETUNREACH` error. From webhook-mailer at python.org Mon Apr 15 06:49:43 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 10:49:43 -0000 Subject: [Python-checkins] bpo-36629: Add support.get_socket_conn_refused_errs() (GH-12834) (GH-12835) Message-ID: https://github.com/python/cpython/commit/28ed39e83e4c545fa1da89fd7691cace280296f7 commit: 28ed39e83e4c545fa1da89fd7691cace280296f7 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Victor Stinner date: 2019-04-15T12:49:37+02:00 summary: bpo-36629: Add support.get_socket_conn_refused_errs() (GH-12834) (GH-12835) Fix test_imap4_host_default_value() of test_imaplib: catch also errno.ENETUNREACH error. (cherry picked from commit 3c7931e514faf509a39c218c2c9f55efb434628f) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst M Lib/test/support/__init__.py M Lib/test/test_imaplib.py M Lib/test/test_socket.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 9d6fd44cbc10..a7be4982e001 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1409,6 +1409,22 @@ def __exit__(self, type_=None, value=None, traceback=None): ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) +def get_socket_conn_refused_errs(): + """ + Get the different socket error numbers ('errno') which can be received + when a connection is refused. + """ + errors = [errno.ECONNREFUSED] + if hasattr(errno, 'ENETUNREACH'): + # On Solaris, ENETUNREACH is returned sometimes instead of ECONNREFUSED + errors.append(errno.ENETUNREACH) + if hasattr(errno, 'EADDRNOTAVAIL'): + # bpo-31910: socket.create_connection() fails randomly + # with EADDRNOTAVAIL on Travis CI + errors.append(errno.EADDRNOTAVAIL) + return errors + + @contextlib.contextmanager def transient_internet(resource_name, *, timeout=30.0, errnos=()): """Return a context manager that raises ResourceDenied when various issues diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index a060143e1f6b..aec36af6c525 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -81,14 +81,8 @@ def test_imap4_host_default_value(self): except socket.error: pass - expected_errnos = [ - # This is the exception that should be raised. - errno.ECONNREFUSED, - ] - if hasattr(errno, 'EADDRNOTAVAIL'): - # socket.create_connection() fails randomly with - # EADDRNOTAVAIL on Travis CI. - expected_errnos.append(errno.EADDRNOTAVAIL) + # This is the exception that should be raised. + expected_errnos = support.get_socket_conn_refused_errs() with self.assertRaises(OSError) as cm: imaplib.IMAP4() self.assertIn(cm.exception.errno, expected_errnos) diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index 381965289615..43929b355e6b 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -4720,14 +4720,7 @@ def test_create_connection(self): # On Solaris, ENETUNREACH is returned in this circumstance instead # of ECONNREFUSED. So, if that errno exists, add it to our list of # expected errnos. - expected_errnos = [ errno.ECONNREFUSED, ] - if hasattr(errno, 'ENETUNREACH'): - expected_errnos.append(errno.ENETUNREACH) - if hasattr(errno, 'EADDRNOTAVAIL'): - # bpo-31910: socket.create_connection() fails randomly - # with EADDRNOTAVAIL on Travis CI - expected_errnos.append(errno.EADDRNOTAVAIL) - + expected_errnos = support.get_socket_conn_refused_errs() self.assertIn(cm.exception.errno, expected_errnos) def test_create_connection_timeout(self): diff --git a/Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst b/Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst new file mode 100644 index 000000000000..0837a233d582 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-15-11-57-39.bpo-36629.ySnaL3.rst @@ -0,0 +1,2 @@ +Fix ``test_imap4_host_default_value()`` of ``test_imaplib``: catch also +:data:`errno.ENETUNREACH` error. From webhook-mailer at python.org Mon Apr 15 08:37:13 2019 From: webhook-mailer at python.org (Mark Dickinson) Date: Mon, 15 Apr 2019 12:37:13 -0000 Subject: [Python-checkins] bpo-36625: Remove obsolete comments from docstrings in fractions module (GH-12822) Message-ID: https://github.com/python/cpython/commit/a9a28808e5a03d2e68e421227c113a38edc40946 commit: a9a28808e5a03d2e68e421227c113a38edc40946 branch: master author: Jakub Molinski committer: Mark Dickinson date: 2019-04-15T13:37:04+01:00 summary: bpo-36625: Remove obsolete comments from docstrings in fractions module (GH-12822) Remove left-over references to Python 3.0 as the future in Fraction class docstrings. files: A Misc/NEWS.d/next/Documentation/2019-04-15-12-02-45.bpo-36625.x3LMCF.rst M Lib/fractions.py M Misc/ACKS diff --git a/Lib/fractions.py b/Lib/fractions.py index 4bbfc434f7d1..7443bd3e0c6a 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -512,16 +512,16 @@ def __trunc__(a): return a._numerator // a._denominator def __floor__(a): - """Will be math.floor(a) in 3.0.""" + """math.floor(a)""" return a.numerator // a.denominator def __ceil__(a): - """Will be math.ceil(a) in 3.0.""" + """math.ceil(a)""" # The negations cleverly convince floordiv to return the ceiling. return -(-a.numerator // a.denominator) def __round__(self, ndigits=None): - """Will be round(self, ndigits) in 3.0. + """round(self, ndigits) Rounds half toward even. """ diff --git a/Misc/ACKS b/Misc/ACKS index 5d7181df6794..393ea205ac1d 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1099,6 +1099,7 @@ Tim Mitchell Zubin Mithra Florian Mladitsch Doug Moen +Jakub Molinski Juliette Monsel The Dragon De Monsyne Bastien Montagne diff --git a/Misc/NEWS.d/next/Documentation/2019-04-15-12-02-45.bpo-36625.x3LMCF.rst b/Misc/NEWS.d/next/Documentation/2019-04-15-12-02-45.bpo-36625.x3LMCF.rst new file mode 100644 index 000000000000..af1a15733249 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2019-04-15-12-02-45.bpo-36625.x3LMCF.rst @@ -0,0 +1 @@ +Remove obsolete comments from docstrings in fractions.Fraction From webhook-mailer at python.org Mon Apr 15 08:40:29 2019 From: webhook-mailer at python.org (Nick Coghlan) Date: Mon, 15 Apr 2019 12:40:29 -0000 Subject: [Python-checkins] Doc: update PendingDeprecationWarning explanation (GH-12837) Message-ID: https://github.com/python/cpython/commit/a3283efd30ad52b56d1046138523cbabc6c69daf commit: a3283efd30ad52b56d1046138523cbabc6c69daf branch: master author: Inada Naoki committer: Nick Coghlan date: 2019-04-15T22:40:23+10:00 summary: Doc: update PendingDeprecationWarning explanation (GH-12837) Keep the nudge towards DeprecationWarning, but remove the "Note" markup and generally shorten the description. Ref: https://github.com/python/cpython/pull/12505/files#r273978757 files: M Doc/library/exceptions.rst diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 250938003c07..52a505e0a0ff 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -691,16 +691,13 @@ The following exceptions are used as warning categories; see the .. exception:: PendingDeprecationWarning - Base class for warnings about features which will be deprecated in the - future. + Base class for warnings about features which are obsolete and + expected to be deprecated in the future, but are not deprecated + at the moment. - .. note:: - PendingDeprecationWarning was introduced as an "ignored by default" - version of DeprecationWarning. But :exc:`DeprecationWarning` is also - ignored by default since Python 2.7 and 3.2. - There is not much difference between PendingDeprecationWarning and - DeprecationWarning nowadays. DeprecationWarning is recommended - in general. + This class is rarely used as emitting a warning about a possible + upcoming deprecation is unusual, and :exc:`DeprecationWarning` + is preferred for already active deprecations. .. exception:: SyntaxWarning From webhook-mailer at python.org Mon Apr 15 08:48:39 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 15 Apr 2019 12:48:39 -0000 Subject: [Python-checkins] Doc: update PendingDeprecationWarning explanation (GH-12837) Message-ID: https://github.com/python/cpython/commit/871ba6c848474959a506e0301628bf826ba11028 commit: 871ba6c848474959a506e0301628bf826ba11028 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-15T05:48:31-07:00 summary: Doc: update PendingDeprecationWarning explanation (GH-12837) Keep the nudge towards DeprecationWarning, but remove the "Note" markup and generally shorten the description. Ref: https://github.com/python/cpython/pull/12505/filesGH-r273978757 (cherry picked from commit a3283efd30ad52b56d1046138523cbabc6c69daf) Co-authored-by: Inada Naoki files: M Doc/library/exceptions.rst diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 250938003c07..52a505e0a0ff 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -691,16 +691,13 @@ The following exceptions are used as warning categories; see the .. exception:: PendingDeprecationWarning - Base class for warnings about features which will be deprecated in the - future. + Base class for warnings about features which are obsolete and + expected to be deprecated in the future, but are not deprecated + at the moment. - .. note:: - PendingDeprecationWarning was introduced as an "ignored by default" - version of DeprecationWarning. But :exc:`DeprecationWarning` is also - ignored by default since Python 2.7 and 3.2. - There is not much difference between PendingDeprecationWarning and - DeprecationWarning nowadays. DeprecationWarning is recommended - in general. + This class is rarely used as emitting a warning about a possible + upcoming deprecation is unusual, and :exc:`DeprecationWarning` + is preferred for already active deprecations. .. exception:: SyntaxWarning From webhook-mailer at python.org Mon Apr 15 11:00:23 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 15:00:23 -0000 Subject: [Python-checkins] bpo-35134: Add Include/cpython/pymem.h (GH-12840) Message-ID: https://github.com/python/cpython/commit/9820c07e4146e18bddc9ac1586cee7e542903de0 commit: 9820c07e4146e18bddc9ac1586cee7e542903de0 branch: master author: Victor Stinner committer: GitHub date: 2019-04-15T17:00:19+02:00 summary: bpo-35134: Add Include/cpython/pymem.h (GH-12840) Move unstable CPython API from Include/pymem.h into a new Include/cpython/pymem.h header file. files: A Include/cpython/pymem.h M Include/pymem.h diff --git a/Include/cpython/pymem.h b/Include/cpython/pymem.h new file mode 100644 index 000000000000..bd66506639ab --- /dev/null +++ b/Include/cpython/pymem.h @@ -0,0 +1,99 @@ +#ifndef Py_CPYTHON_PYMEM_H +# error "this header file must not be included directly" +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +PyAPI_FUNC(void *) PyMem_RawMalloc(size_t size); +PyAPI_FUNC(void *) PyMem_RawCalloc(size_t nelem, size_t elsize); +PyAPI_FUNC(void *) PyMem_RawRealloc(void *ptr, size_t new_size); +PyAPI_FUNC(void) PyMem_RawFree(void *ptr); + +/* Configure the Python memory allocators. Pass NULL to use default + allocators. */ +PyAPI_FUNC(int) _PyMem_SetupAllocators(const char *opt); + +/* Try to get the allocators name set by _PyMem_SetupAllocators(). */ +PyAPI_FUNC(const char*) _PyMem_GetAllocatorsName(void); + +PyAPI_FUNC(void *) PyMem_Calloc(size_t nelem, size_t elsize); + +/* strdup() using PyMem_RawMalloc() */ +PyAPI_FUNC(char *) _PyMem_RawStrdup(const char *str); + +/* strdup() using PyMem_Malloc() */ +PyAPI_FUNC(char *) _PyMem_Strdup(const char *str); + +/* wcsdup() using PyMem_RawMalloc() */ +PyAPI_FUNC(wchar_t*) _PyMem_RawWcsdup(const wchar_t *str); + + +typedef enum { + /* PyMem_RawMalloc(), PyMem_RawRealloc() and PyMem_RawFree() */ + PYMEM_DOMAIN_RAW, + + /* PyMem_Malloc(), PyMem_Realloc() and PyMem_Free() */ + PYMEM_DOMAIN_MEM, + + /* PyObject_Malloc(), PyObject_Realloc() and PyObject_Free() */ + PYMEM_DOMAIN_OBJ +} PyMemAllocatorDomain; + +typedef struct { + /* user context passed as the first argument to the 4 functions */ + void *ctx; + + /* allocate a memory block */ + void* (*malloc) (void *ctx, size_t size); + + /* allocate a memory block initialized by zeros */ + void* (*calloc) (void *ctx, size_t nelem, size_t elsize); + + /* allocate or resize a memory block */ + void* (*realloc) (void *ctx, void *ptr, size_t new_size); + + /* release a memory block */ + void (*free) (void *ctx, void *ptr); +} PyMemAllocatorEx; + +/* Get the memory block allocator of the specified domain. */ +PyAPI_FUNC(void) PyMem_GetAllocator(PyMemAllocatorDomain domain, + PyMemAllocatorEx *allocator); + +/* Set the memory block allocator of the specified domain. + + The new allocator must return a distinct non-NULL pointer when requesting + zero bytes. + + For the PYMEM_DOMAIN_RAW domain, the allocator must be thread-safe: the GIL + is not held when the allocator is called. + + If the new allocator is not a hook (don't call the previous allocator), the + PyMem_SetupDebugHooks() function must be called to reinstall the debug hooks + on top on the new allocator. */ +PyAPI_FUNC(void) PyMem_SetAllocator(PyMemAllocatorDomain domain, + PyMemAllocatorEx *allocator); + +/* Setup hooks to detect bugs in the following Python memory allocator + functions: + + - PyMem_RawMalloc(), PyMem_RawRealloc(), PyMem_RawFree() + - PyMem_Malloc(), PyMem_Realloc(), PyMem_Free() + - PyObject_Malloc(), PyObject_Realloc() and PyObject_Free() + + Newly allocated memory is filled with the byte 0xCB, freed memory is filled + with the byte 0xDB. Additional checks: + + - detect API violations, ex: PyObject_Free() called on a buffer allocated + by PyMem_Malloc() + - detect write before the start of the buffer (buffer underflow) + - detect write after the end of the buffer (buffer overflow) + + The function does nothing if Python is not compiled is debug mode. */ +PyAPI_FUNC(void) PyMem_SetupDebugHooks(void); + +#ifdef __cplusplus +} +#endif diff --git a/Include/pymem.h b/Include/pymem.h index 93243f8553b8..07b380aa6e7f 100644 --- a/Include/pymem.h +++ b/Include/pymem.h @@ -11,21 +11,6 @@ extern "C" { #endif -#ifndef Py_LIMITED_API -PyAPI_FUNC(void *) PyMem_RawMalloc(size_t size); -PyAPI_FUNC(void *) PyMem_RawCalloc(size_t nelem, size_t elsize); -PyAPI_FUNC(void *) PyMem_RawRealloc(void *ptr, size_t new_size); -PyAPI_FUNC(void) PyMem_RawFree(void *ptr); - -/* Configure the Python memory allocators. Pass NULL to use default - allocators. */ -PyAPI_FUNC(int) _PyMem_SetupAllocators(const char *opt); - -/* Try to get the allocators name set by _PyMem_SetupAllocators(). */ -PyAPI_FUNC(const char*) _PyMem_GetAllocatorsName(void); -#endif /* !defined(Py_LIMITED_API) */ - - /* BEWARE: Each interface exports both functions and macros. Extension modules should @@ -65,23 +50,9 @@ PyAPI_FUNC(const char*) _PyMem_GetAllocatorsName(void); */ PyAPI_FUNC(void *) PyMem_Malloc(size_t size); -#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03050000 -PyAPI_FUNC(void *) PyMem_Calloc(size_t nelem, size_t elsize); -#endif PyAPI_FUNC(void *) PyMem_Realloc(void *ptr, size_t new_size); PyAPI_FUNC(void) PyMem_Free(void *ptr); -#ifndef Py_LIMITED_API -/* strdup() using PyMem_RawMalloc() */ -PyAPI_FUNC(char *) _PyMem_RawStrdup(const char *str); - -/* strdup() using PyMem_Malloc() */ -PyAPI_FUNC(char *) _PyMem_Strdup(const char *str); - -/* wcsdup() using PyMem_RawMalloc() */ -PyAPI_FUNC(wchar_t*) _PyMem_RawWcsdup(const wchar_t *str); -#endif - /* Macros. */ /* PyMem_MALLOC(0) means malloc(1). Some systems would return NULL @@ -130,72 +101,6 @@ PyAPI_FUNC(wchar_t*) _PyMem_RawWcsdup(const wchar_t *str); #define PyMem_Del PyMem_Free #define PyMem_DEL PyMem_FREE -#ifndef Py_LIMITED_API -typedef enum { - /* PyMem_RawMalloc(), PyMem_RawRealloc() and PyMem_RawFree() */ - PYMEM_DOMAIN_RAW, - - /* PyMem_Malloc(), PyMem_Realloc() and PyMem_Free() */ - PYMEM_DOMAIN_MEM, - - /* PyObject_Malloc(), PyObject_Realloc() and PyObject_Free() */ - PYMEM_DOMAIN_OBJ -} PyMemAllocatorDomain; - -typedef struct { - /* user context passed as the first argument to the 4 functions */ - void *ctx; - - /* allocate a memory block */ - void* (*malloc) (void *ctx, size_t size); - - /* allocate a memory block initialized by zeros */ - void* (*calloc) (void *ctx, size_t nelem, size_t elsize); - - /* allocate or resize a memory block */ - void* (*realloc) (void *ctx, void *ptr, size_t new_size); - - /* release a memory block */ - void (*free) (void *ctx, void *ptr); -} PyMemAllocatorEx; - -/* Get the memory block allocator of the specified domain. */ -PyAPI_FUNC(void) PyMem_GetAllocator(PyMemAllocatorDomain domain, - PyMemAllocatorEx *allocator); - -/* Set the memory block allocator of the specified domain. - - The new allocator must return a distinct non-NULL pointer when requesting - zero bytes. - - For the PYMEM_DOMAIN_RAW domain, the allocator must be thread-safe: the GIL - is not held when the allocator is called. - - If the new allocator is not a hook (don't call the previous allocator), the - PyMem_SetupDebugHooks() function must be called to reinstall the debug hooks - on top on the new allocator. */ -PyAPI_FUNC(void) PyMem_SetAllocator(PyMemAllocatorDomain domain, - PyMemAllocatorEx *allocator); - -/* Setup hooks to detect bugs in the following Python memory allocator - functions: - - - PyMem_RawMalloc(), PyMem_RawRealloc(), PyMem_RawFree() - - PyMem_Malloc(), PyMem_Realloc(), PyMem_Free() - - PyObject_Malloc(), PyObject_Realloc() and PyObject_Free() - - Newly allocated memory is filled with the byte 0xCB, freed memory is filled - with the byte 0xDB. Additional checks: - - - detect API violations, ex: PyObject_Free() called on a buffer allocated - by PyMem_Malloc() - - detect write before the start of the buffer (buffer underflow) - - detect write after the end of the buffer (buffer overflow) - - The function does nothing if Python is not compiled is debug mode. */ -PyAPI_FUNC(void) PyMem_SetupDebugHooks(void); -#endif /* Py_LIMITED_API */ - /* bpo-35053: expose _Py_tracemalloc_config for performance: _Py_NewReference() needs an efficient check to test if tracemalloc is tracing. @@ -231,6 +136,13 @@ PyAPI_DATA(struct _PyTraceMalloc_Config) _Py_tracemalloc_config; .max_nframe = 1, \ .use_domain = 0} + +#ifndef Py_LIMITED_API +# define Py_CPYTHON_PYMEM_H +# include "cpython/pymem.h" +# undef Py_CPYTHON_PYMEM_H +#endif + #ifdef __cplusplus } #endif From webhook-mailer at python.org Mon Apr 15 11:29:37 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 15:29:37 -0000 Subject: [Python-checkins] bpo-35134: Add cpython/pymem.h to build system (GH-12842) Message-ID: https://github.com/python/cpython/commit/aba7d662abbb847f9f45c6db58242a9b4bf65bff commit: aba7d662abbb847f9f45c6db58242a9b4bf65bff branch: master author: Victor Stinner committer: GitHub date: 2019-04-15T17:29:31+02:00 summary: bpo-35134: Add cpython/pymem.h to build system (GH-12842) files: M Makefile.pre.in M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters diff --git a/Makefile.pre.in b/Makefile.pre.in index 05c195767a8b..cd7098cac72b 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1056,6 +1056,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/objimpl.h \ $(srcdir)/Include/cpython/pyerrors.h \ $(srcdir)/Include/cpython/pylifecycle.h \ + $(srcdir)/Include/cpython/pymem.h \ $(srcdir)/Include/cpython/pystate.h \ $(srcdir)/Include/cpython/tupleobject.h \ $(srcdir)/Include/cpython/unicodeobject.h \ diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index a980799461a3..a135e9326c78 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -117,6 +117,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index f92433e3e0c9..913464656795 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -102,6 +102,9 @@ Include + + Include + Include From webhook-mailer at python.org Mon Apr 15 11:54:14 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 15:54:14 -0000 Subject: [Python-checkins] bpo-36389: Cleanup gc.set_threshold() (GH-12844) Message-ID: https://github.com/python/cpython/commit/0810fa79885276114d1a94e2ce61da367ebb1ffc commit: 0810fa79885276114d1a94e2ce61da367ebb1ffc branch: master author: Victor Stinner committer: GitHub date: 2019-04-15T17:54:09+02:00 summary: bpo-36389: Cleanup gc.set_threshold() (GH-12844) Don't assign generations[2].threshold to generations[2].threshold: useless operation. files: M Modules/gcmodule.c diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index fad1356d6b44..a75d5fed95f1 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1374,7 +1374,7 @@ gc_set_thresh(PyObject *self, PyObject *args) &_PyRuntime.gc.generations[1].threshold, &_PyRuntime.gc.generations[2].threshold)) return NULL; - for (i = 2; i < NUM_GENERATIONS; i++) { + for (i = 3; i < NUM_GENERATIONS; i++) { /* generations higher than 2 get the same threshold */ _PyRuntime.gc.generations[i].threshold = _PyRuntime.gc.generations[2].threshold; } @@ -1524,7 +1524,7 @@ gc_get_objects_impl(PyObject *module, Py_ssize_t generation) } /* If generation is passed, we extract only that generation */ - if (generation != -1) { + if (generation != -1) { if (generation >= NUM_GENERATIONS) { PyErr_Format(PyExc_ValueError, "generation parameter must be less than the number of " From webhook-mailer at python.org Mon Apr 15 12:23:47 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 16:23:47 -0000 Subject: [Python-checkins] bpo-36348: IMAP4.logout() doesn't ignore exc (GH-12411) Message-ID: https://github.com/python/cpython/commit/74125a60b7a477451ff2b8385bfbce3fdaee8dbc commit: 74125a60b7a477451ff2b8385bfbce3fdaee8dbc branch: master author: Victor Stinner committer: GitHub date: 2019-04-15T18:23:20+02:00 summary: bpo-36348: IMAP4.logout() doesn't ignore exc (GH-12411) The imap.IMAP4.logout() method no longer ignores silently arbitrary exceptions. Changes: * The IMAP4.logout() method now expects a "BYE" untagged response, rather than relying on _check_bye() which raises a self.abort() exception. * IMAP4.__exit__() now does nothing if the client already logged out. * Add more debug info if test_logout() tests fail. files: A Misc/NEWS.d/next/Library/2019-03-18-16-16-55.bpo-36348.E0w_US.rst M Doc/library/imaplib.rst M Doc/whatsnew/3.8.rst M Lib/imaplib.py M Lib/test/test_imaplib.py diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst index d0709f8b678e..f027f82ddebe 100644 --- a/Doc/library/imaplib.rst +++ b/Doc/library/imaplib.rst @@ -327,6 +327,9 @@ An :class:`IMAP4` instance has the following methods: Shutdown connection to server. Returns server ``BYE`` response. + .. versionchanged:: 3.8 + The method no longer ignores silently arbitrary exceptions. + .. method:: IMAP4.lsub(directory='""', pattern='*') diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 39a0da5e61e9..f866f9ccb8c1 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -709,6 +709,9 @@ Changes in Python behavior Changes in the Python API ------------------------- +* The :meth:`imap.IMAP4.logout` method no longer ignores silently arbitrary + exceptions. + * The function :func:`platform.popen` has been removed, it was deprecated since Python 3.3: use :func:`os.popen` instead. diff --git a/Lib/imaplib.py b/Lib/imaplib.py index dd237f7704ac..341ee25ae965 100644 --- a/Lib/imaplib.py +++ b/Lib/imaplib.py @@ -272,6 +272,9 @@ def __enter__(self): return self def __exit__(self, *args): + if self.state == "LOGOUT": + return + try: self.logout() except OSError: @@ -625,11 +628,8 @@ def logout(self): Returns server 'BYE' response. """ self.state = 'LOGOUT' - try: typ, dat = self._simple_command('LOGOUT') - except: typ, dat = 'NO', ['%s: %s' % sys.exc_info()[:2]] + typ, dat = self._simple_command('LOGOUT') self.shutdown() - if 'BYE' in self.untagged_responses: - return 'BYE', self.untagged_responses['BYE'] return typ, dat @@ -1012,16 +1012,17 @@ def _command(self, name, *args): def _command_complete(self, name, tag): + logout = (name == 'LOGOUT') # BYE is expected after LOGOUT - if name != 'LOGOUT': + if not logout: self._check_bye() try: - typ, data = self._get_tagged_response(tag) + typ, data = self._get_tagged_response(tag, expect_bye=logout) except self.abort as val: raise self.abort('command: %s => %s' % (name, val)) except self.error as val: raise self.error('command: %s => %s' % (name, val)) - if name != 'LOGOUT': + if not logout: self._check_bye() if typ == 'BAD': raise self.error('%s command error: %s %s' % (name, typ, data)) @@ -1117,7 +1118,7 @@ def _get_response(self): return resp - def _get_tagged_response(self, tag): + def _get_tagged_response(self, tag, expect_bye=False): while 1: result = self.tagged_commands[tag] @@ -1125,9 +1126,15 @@ def _get_tagged_response(self, tag): del self.tagged_commands[tag] return result + if expect_bye: + typ = 'BYE' + bye = self.untagged_responses.pop(typ, None) + if bye is not None: + # Server replies to the "LOGOUT" command with "BYE" + return (typ, bye) + # If we've seen a BYE at this point, the socket will be # closed, so report the BYE now. - self._check_bye() # Some have reported "unexpected response" exceptions. diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index aec36af6c525..9305e47ee993 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -470,8 +470,8 @@ def test_logout(self): self.assertEqual(typ, 'OK') self.assertEqual(data[0], b'LOGIN completed') typ, data = client.logout() - self.assertEqual(typ, 'BYE') - self.assertEqual(data[0], b'IMAP4ref1 Server logging out') + self.assertEqual(typ, 'BYE', (typ, data)) + self.assertEqual(data[0], b'IMAP4ref1 Server logging out', (typ, data)) self.assertEqual(client.state, 'LOGOUT') def test_lsub(self): @@ -937,7 +937,7 @@ def test_logout(self): with transient_internet(self.host): rs = self.server.logout() self.server = None - self.assertEqual(rs[0], 'BYE') + self.assertEqual(rs[0], 'BYE', rs) @unittest.skipUnless(ssl, "SSL not available") @@ -995,7 +995,7 @@ def test_logout(self): with transient_internet(self.host): _server = self.imap_class(self.host, self.port) rs = _server.logout() - self.assertEqual(rs[0], 'BYE') + self.assertEqual(rs[0], 'BYE', rs) def test_ssl_context_certfile_exclusive(self): with transient_internet(self.host): diff --git a/Misc/NEWS.d/next/Library/2019-03-18-16-16-55.bpo-36348.E0w_US.rst b/Misc/NEWS.d/next/Library/2019-03-18-16-16-55.bpo-36348.E0w_US.rst new file mode 100644 index 000000000000..2320b4c05b53 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-18-16-16-55.bpo-36348.E0w_US.rst @@ -0,0 +1,2 @@ +The :meth:`imap.IMAP4.logout` method no longer ignores silently arbitrary +exceptions. From webhook-mailer at python.org Mon Apr 15 12:45:08 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Apr 2019 16:45:08 -0000 Subject: [Python-checkins] bpo-36348: test_imaplib: add debug info (GH-12846) Message-ID: https://github.com/python/cpython/commit/2815bf5b1f39b9f677135473392887a8d261fc97 commit: 2815bf5b1f39b9f677135473392887a8d261fc97 branch: 3.7 author: Victor Stinner committer: GitHub date: 2019-04-15T18:45:01+02:00 summary: bpo-36348: test_imaplib: add debug info (GH-12846) Log more info if tests fail. files: M Lib/test/test_imaplib.py diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index aec36af6c525..9305e47ee993 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -470,8 +470,8 @@ def test_logout(self): self.assertEqual(typ, 'OK') self.assertEqual(data[0], b'LOGIN completed') typ, data = client.logout() - self.assertEqual(typ, 'BYE') - self.assertEqual(data[0], b'IMAP4ref1 Server logging out') + self.assertEqual(typ, 'BYE', (typ, data)) + self.assertEqual(data[0], b'IMAP4ref1 Server logging out', (typ, data)) self.assertEqual(client.state, 'LOGOUT') def test_lsub(self): @@ -937,7 +937,7 @@ def test_logout(self): with transient_internet(self.host): rs = self.server.logout() self.server = None - self.assertEqual(rs[0], 'BYE') + self.assertEqual(rs[0], 'BYE', rs) @unittest.skipUnless(ssl, "SSL not available") @@ -995,7 +995,7 @@ def test_logout(self): with transient_internet(self.host): _server = self.imap_class(self.host, self.port) rs = _server.logout() - self.assertEqual(rs[0], 'BYE') + self.assertEqual(rs[0], 'BYE', rs) def test_ssl_context_certfile_exclusive(self): with transient_internet(self.host): From webhook-mailer at python.org Mon Apr 15 19:32:38 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 15 Apr 2019 23:32:38 -0000 Subject: [Python-checkins] bpo-27860: ipaddress: fix Interface missed some attributes (GH-12836) Message-ID: https://github.com/python/cpython/commit/6fa84bd12c4b83bee6a41b989363230d5c03b96c commit: 6fa84bd12c4b83bee6a41b989363230d5c03b96c branch: master author: Inada Naoki committer: GitHub date: 2019-04-16T08:32:28+09:00 summary: bpo-27860: ipaddress: fix Interface missed some attributes (GH-12836) IPv4Interface and IPv6Interface did not has netmask and hostmask attributes when its argument is bytes or int. This commit extracts method for constructors of Network and Interface, and ensure Interface class always provides them. files: M Lib/ipaddress.py M Lib/test/test_ipaddress.py diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 7a3f36f3bc0f..909a55de4f19 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -532,6 +532,30 @@ def _prefix_from_ip_string(cls, ip_str): except ValueError: cls._report_invalid_netmask(ip_str) + @classmethod + def _split_addr_prefix(cls, address): + """Helper function to parse address of Network/Interface. + + Arg: + address: Argument of Network/Interface. + + Returns: + (addr, prefix) tuple. + """ + # a packed address or integer + if isinstance(address, (bytes, int)): + return address, cls._max_prefixlen + + if not isinstance(address, tuple): + # Assume input argument to be string or any object representation + # which converts into a formatted IP prefix string. + address = _split_optional_netmask(address) + + # Constructing from a tuple (addr, [mask]) + if len(address) > 1: + return address + return address[0], cls._max_prefixlen + def __reduce__(self): return self.__class__, (str(self),) @@ -1305,32 +1329,16 @@ def is_link_local(self): class IPv4Interface(IPv4Address): def __init__(self, address): - if isinstance(address, (bytes, int)): - IPv4Address.__init__(self, address) - self.network = IPv4Network(self._ip) - self._prefixlen = self._max_prefixlen - return - - if isinstance(address, tuple): - IPv4Address.__init__(self, address[0]) - if len(address) > 1: - self._prefixlen = int(address[1]) - else: - self._prefixlen = self._max_prefixlen - - self.network = IPv4Network(address, strict=False) - self.netmask = self.network.netmask - self.hostmask = self.network.hostmask - return - - addr = _split_optional_netmask(address) - IPv4Address.__init__(self, addr[0]) + addr, mask = self._split_addr_prefix(address) - self.network = IPv4Network(address, strict=False) + IPv4Address.__init__(self, addr) + self.network = IPv4Network((addr, mask), strict=False) + self.netmask = self.network.netmask self._prefixlen = self.network._prefixlen - self.netmask = self.network.netmask - self.hostmask = self.network.hostmask + @functools.cached_property + def hostmask(self): + return self.network.hostmask def __str__(self): return '%s/%d' % (self._string_from_ip_int(self._ip), @@ -1435,20 +1443,7 @@ def __init__(self, address, strict=True): ValueError: If strict is True and a network address is not supplied. """ - # Constructing from a packed address or integer - if isinstance(address, (int, bytes)): - addr = address - mask = self._max_prefixlen - # Constructing from a tuple (addr, [mask]) - elif isinstance(address, tuple): - addr = address[0] - mask = address[1] if len(address) > 1 else self._max_prefixlen - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - else: - args = _split_optional_netmask(address) - addr = self._ip_int_from_string(args[0]) - mask = args[1] if len(args) == 2 else self._max_prefixlen + addr, mask = self._split_addr_prefix(address) self.network_address = IPv4Address(addr) self.netmask, self._prefixlen = self._make_netmask(mask) @@ -1979,28 +1974,16 @@ def sixtofour(self): class IPv6Interface(IPv6Address): def __init__(self, address): - if isinstance(address, (bytes, int)): - IPv6Address.__init__(self, address) - self.network = IPv6Network(self._ip) - self._prefixlen = self._max_prefixlen - return - if isinstance(address, tuple): - IPv6Address.__init__(self, address[0]) - if len(address) > 1: - self._prefixlen = int(address[1]) - else: - self._prefixlen = self._max_prefixlen - self.network = IPv6Network(address, strict=False) - self.netmask = self.network.netmask - self.hostmask = self.network.hostmask - return + addr, mask = self._split_addr_prefix(address) - addr = _split_optional_netmask(address) - IPv6Address.__init__(self, addr[0]) - self.network = IPv6Network(address, strict=False) + IPv6Address.__init__(self, addr) + self.network = IPv6Network((addr, mask), strict=False) self.netmask = self.network.netmask self._prefixlen = self.network._prefixlen - self.hostmask = self.network.hostmask + + @functools.cached_property + def hostmask(self): + return self.network.hostmask def __str__(self): return '%s/%d' % (self._string_from_ip_int(self._ip), @@ -2110,20 +2093,7 @@ def __init__(self, address, strict=True): ValueError: If strict was True and a network address was not supplied. """ - # Constructing from a packed address or integer - if isinstance(address, (int, bytes)): - addr = address - mask = self._max_prefixlen - # Constructing from a tuple (addr, [mask]) - elif isinstance(address, tuple): - addr = address[0] - mask = address[1] if len(address) > 1 else self._max_prefixlen - # Assume input argument to be string or any object representation - # which converts into a formatted IP prefix string. - else: - args = _split_optional_netmask(address) - addr = self._ip_int_from_string(args[0]) - mask = args[1] if len(args) == 2 else self._max_prefixlen + addr, mask = self._split_addr_prefix(address) self.network_address = IPv6Address(addr) self.netmask, self._prefixlen = self._make_netmask(mask) diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index 15317c944630..20316f15f8cf 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -399,7 +399,13 @@ class NetmaskTestMixin_v4(CommonTestMixin_v4): """Input validation on interfaces and networks is very similar""" def test_no_mask(self): - self.assertEqual(str(self.factory('1.2.3.4')), '1.2.3.4/32') + for address in ('1.2.3.4', 0x01020304, b'\x01\x02\x03\x04'): + net = self.factory(address) + self.assertEqual(str(net), '1.2.3.4/32') + self.assertEqual(str(net.netmask), '255.255.255.255') + self.assertEqual(str(net.hostmask), '0.0.0.0') + # IPv4Network has prefixlen, but IPv4Interface doesn't. + # Should we add it to IPv4Interface too? (bpo-36392) def test_split_netmask(self): addr = "1.2.3.4/32/24" @@ -527,6 +533,15 @@ def test_subnet_of_mixed_types(self): class NetmaskTestMixin_v6(CommonTestMixin_v6): """Input validation on interfaces and networks is very similar""" + def test_no_mask(self): + for address in ('::1', 1, b'\x00'*15 + b'\x01'): + net = self.factory(address) + self.assertEqual(str(net), '::1/128') + self.assertEqual(str(net.netmask), 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff') + self.assertEqual(str(net.hostmask), '::') + # IPv6Network has prefixlen, but IPv6Interface doesn't. + # Should we add it to IPv4Interface too? (bpo-36392) + def test_split_netmask(self): addr = "cafe:cafe::/128/190" with self.assertAddressError("Only one '/' permitted in %r" % addr): From webhook-mailer at python.org Tue Apr 16 08:53:15 2019 From: webhook-mailer at python.org (=?utf-8?q?St=C3=A9phane?= Wirtel) Date: Tue, 16 Apr 2019 12:53:15 -0000 Subject: [Python-checkins] bpo-36345: Update wsgiref example (GH-12562) Message-ID: https://github.com/python/cpython/commit/2b7f93b99afbe78e4e567d9252d9470d29f387c8 commit: 2b7f93b99afbe78e4e567d9252d9470d29f387c8 branch: master author: St?phane Wirtel committer: GitHub date: 2019-04-16T14:52:54+02:00 summary: bpo-36345: Update wsgiref example (GH-12562) Use literalinclude markup to include Tools/scripts/serve.py code. Tools/scripts/serve.py first argument on the command line is now optional. files: A Misc/NEWS.d/next/Documentation/2019-03-26-14-58-34.bpo-36345.r2stx3.rst M Doc/library/wsgiref.rst M Tools/scripts/serve.py diff --git a/Doc/library/wsgiref.rst b/Doc/library/wsgiref.rst index b85ec53c8ae5..ec5136742fa2 100644 --- a/Doc/library/wsgiref.rst +++ b/Doc/library/wsgiref.rst @@ -783,33 +783,7 @@ This is a working "Hello World" WSGI application:: httpd.serve_forever() -Example of a small wsgiref-based web server:: - - # Takes a path to serve from and an optional port number (defaults to 8000), - # then tries to serve files. Mime types are guessed from the file names, 404 - # errors are raised if the file is not found. - import sys - import os - import mimetypes - from wsgiref import simple_server, util - - def app(environ, respond): - fn = os.path.join(path, environ['PATH_INFO'][1:]) - if '.' not in fn.split(os.path.sep)[-1]: - fn = os.path.join(fn, 'index.html') - type = mimetypes.guess_type(fn)[0] - - if os.path.exists(fn): - respond('200 OK', [('Content-Type', type)]) - return util.FileWrapper(open(fn, "rb")) - else: - respond('404 Not Found', [('Content-Type', 'text/plain')]) - return [b'not found'] - - path = sys.argv[1] - port = int(sys.argv[2]) if len(sys.argv) > 2 else 8000 - with simple_server.make_server('', port, app) as httpd: - print("Serving {} on port {}, control-C to stop".format(path, port)) - - # Serve until process is killed - httpd.serve_forever() +Example of a WSGI application serving the current directory, accept optional +directory and port number (default: 8000) on the command line: + +.. literalinclude:: ../../Tools/scripts/serve.py diff --git a/Misc/NEWS.d/next/Documentation/2019-03-26-14-58-34.bpo-36345.r2stx3.rst b/Misc/NEWS.d/next/Documentation/2019-03-26-14-58-34.bpo-36345.r2stx3.rst new file mode 100644 index 000000000000..bbecc947cafa --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2019-03-26-14-58-34.bpo-36345.r2stx3.rst @@ -0,0 +1,3 @@ +Avoid the duplication of code from ``Tools/scripts/serve.py`` in using the +:rst:dir:`literalinclude` directive for the basic wsgiref-based web server in the +documentation of :mod:`wsgiref`. Contributed by St?phane Wirtel. diff --git a/Tools/scripts/serve.py b/Tools/scripts/serve.py index dae21f2260ff..7ac9c1050783 100755 --- a/Tools/scripts/serve.py +++ b/Tools/scripts/serve.py @@ -25,11 +25,12 @@ def app(environ, respond): return [b'not found'] if __name__ == '__main__': - path = sys.argv[1] + path = sys.argv[1] if len(sys.argv) > 1 else os.getcwd() port = int(sys.argv[2]) if len(sys.argv) > 2 else 8000 httpd = simple_server.make_server('', port, app) print("Serving {} on port {}, control-C to stop".format(path, port)) try: httpd.serve_forever() except KeyboardInterrupt: - print("\b\bShutting down.") + print("Shutting down.") + httpd.server_close() From webhook-mailer at python.org Tue Apr 16 09:01:37 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 16 Apr 2019 13:01:37 -0000 Subject: [Python-checkins] bpo-36508: python-config don't export LINKFORSHARED (GH-12661) (GH-12748) Message-ID: https://github.com/python/cpython/commit/cd46b09b0863c787dd54c433fae52bd8bdfaecd0 commit: cd46b09b0863c787dd54c433fae52bd8bdfaecd0 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Victor Stinner date: 2019-04-16T15:01:33+02:00 summary: bpo-36508: python-config don't export LINKFORSHARED (GH-12661) (GH-12748) python-config --ldflags no longer includes flags of the LINKFORSHARED variable. The LINKFORSHARED variable must only be used to build executables. (cherry picked from commit e65f01f78d7bda3013fc5be485afa87ff56511d9) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst M Makefile.pre.in M Misc/python-config.in M Misc/python-config.sh.in diff --git a/Makefile.pre.in b/Makefile.pre.in index 2d2e11f17209..babe4e041681 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -106,6 +106,8 @@ NO_AS_NEEDED= @NO_AS_NEEDED@ LDLAST= @LDLAST@ SGI_ABI= @SGI_ABI@ CCSHARED= @CCSHARED@ +# LINKFORSHARED are the flags passed to the $(CC) command that links +# the python executable -- this is only needed for a few systems LINKFORSHARED= @LINKFORSHARED@ ARFLAGS= @ARFLAGS@ # Extra C flags added for building the interpreter object files. diff --git a/Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst b/Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst new file mode 100644 index 000000000000..62f80840a044 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-02-17-01-23.bpo-36508.SN5Y6N.rst @@ -0,0 +1,3 @@ +``python-config --ldflags`` no longer includes flags of the +``LINKFORSHARED`` variable. The ``LINKFORSHARED`` variable must only be used +to build executables. diff --git a/Misc/python-config.in b/Misc/python-config.in index e13da7543c9f..714415222798 100644 --- a/Misc/python-config.in +++ b/Misc/python-config.in @@ -55,8 +55,6 @@ for opt in opt_flags: if opt == '--ldflags': if not getvar('Py_ENABLE_SHARED'): libs.insert(0, '-L' + getvar('LIBPL')) - if not getvar('PYTHONFRAMEWORK'): - libs.extend(getvar('LINKFORSHARED').split()) print(' '.join(libs)) elif opt == '--extension-suffix': diff --git a/Misc/python-config.sh.in b/Misc/python-config.sh.in index d1d3275fa275..a3c479ce571f 100644 --- a/Misc/python-config.sh.in +++ b/Misc/python-config.sh.in @@ -44,7 +44,6 @@ ABIFLAGS="@ABIFLAGS@" LIBS="-lpython${VERSION}${ABIFLAGS} @LIBS@ $SYSLIBS" BASECFLAGS="@BASECFLAGS@" LDLIBRARY="@LDLIBRARY@" -LINKFORSHARED="@LINKFORSHARED@" OPT="@OPT@" PY_ENABLE_SHARED="@PY_ENABLE_SHARED@" LDVERSION="@LDVERSION@" @@ -89,15 +88,11 @@ do echo "$LIBS" ;; --ldflags) - LINKFORSHAREDUSED= - if [ -z "$PYTHONFRAMEWORK" ] ; then - LINKFORSHAREDUSED=$LINKFORSHARED - fi LIBPLUSED= if [ "$PY_ENABLE_SHARED" = "0" ] ; then LIBPLUSED="-L$LIBPL" fi - echo "$LIBPLUSED -L$libdir $LIBS $LINKFORSHAREDUSED" + echo "$LIBPLUSED -L$libdir $LIBS" ;; --extension-suffix) echo "$SO" From webhook-mailer at python.org Tue Apr 16 09:55:11 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Tue, 16 Apr 2019 13:55:11 -0000 Subject: [Python-checkins] Add myself to CODEOWNERS for sqlite3 and urllib.robotparser (GH-12856) Message-ID: https://github.com/python/cpython/commit/2f5b44879f244fbb577bd97df844b7bd4b9a19a5 commit: 2f5b44879f244fbb577bd97df844b7bd4b9a19a5 branch: master author: Berker Peksag committer: GitHub date: 2019-04-16T16:54:56+03:00 summary: Add myself to CODEOWNERS for sqlite3 and urllib.robotparser (GH-12856) files: M .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 40d2cc127690..2ca555b7fa50 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -43,6 +43,9 @@ Python/bootstrap_hash.c @python/crypto-team **/*imap* @python/email-team **/*poplib* @python/email-team +# SQLite 3 +**/*sqlite* @berkerpeksag + # subprocess **/*subprocess* @gpshead @@ -50,6 +53,9 @@ Python/bootstrap_hash.c @python/crypto-team /PC/ @python/windows-team /PCbuild/ @python/windows-team +# Urllib +**/*robotparser* @berkerpeksag + # Windows installer packages /Tools/msi/ @python/windows-team /Tools/nuget/ @python/windows-team From webhook-mailer at python.org Tue Apr 16 12:47:33 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 16 Apr 2019 16:47:33 -0000 Subject: [Python-checkins] =?utf-8?q?bpo-33783=3A_Use_proper_class_markup?= =?utf-8?q?_for_random=2ERandom_docs_=28GH=C3=A87817=29?= Message-ID: https://github.com/python/cpython/commit/31e8d69bfe7cf5d4ffe0967cb225d2a8a229cc97 commit: 31e8d69bfe7cf5d4ffe0967cb225d2a8a229cc97 branch: master author: Matthias Bussonnier committer: Victor Stinner date: 2019-04-16T18:47:11+02:00 summary: bpo-33783: Use proper class markup for random.Random docs (GH?7817) Signed-off-by: Matthias Bussonnier files: M Doc/library/random.rst diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 79a7bddad497..fcedba4dbc20 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -310,6 +310,11 @@ be found in any statistics text. Alternative Generator --------------------- +.. class:: Random([seed]) + + Class that implements the default pseudo-random number generator used by the + :mod:`random` module. + .. class:: SystemRandom([seed]) Class that uses the :func:`os.urandom` function for generating random numbers From webhook-mailer at python.org Tue Apr 16 17:32:46 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 16 Apr 2019 21:32:46 -0000 Subject: [Python-checkins] bpo-33783: Use proper class markup for random.Random docs (GH-7817) (GH-12859) Message-ID: https://github.com/python/cpython/commit/a6fce19968cd39dfbc2684f97ca3184d9996f61e commit: a6fce19968cd39dfbc2684f97ca3184d9996f61e branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Victor Stinner date: 2019-04-16T23:32:32+02:00 summary: bpo-33783: Use proper class markup for random.Random docs (GH-7817) (GH-12859) Signed-off-by: Matthias Bussonnier (cherry picked from commit 31e8d69bfe7cf5d4ffe0967cb225d2a8a229cc97) Co-authored-by: Matthias Bussonnier files: M Doc/library/random.rst diff --git a/Doc/library/random.rst b/Doc/library/random.rst index 7d051e185429..42979ffbe38e 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -310,6 +310,11 @@ be found in any statistics text. Alternative Generator --------------------- +.. class:: Random([seed]) + + Class that implements the default pseudo-random number generator used by the + :mod:`random` module. + .. class:: SystemRandom([seed]) Class that uses the :func:`os.urandom` function for generating random numbers From webhook-mailer at python.org Tue Apr 16 19:39:59 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 16 Apr 2019 23:39:59 -0000 Subject: [Python-checkins] bpo-36641: Add "const" to PyDoc_VAR macro (GH-12854) Message-ID: https://github.com/python/cpython/commit/926b0cb5f688808dc11448a0bf3e452d1b92c232 commit: 926b0cb5f688808dc11448a0bf3e452d1b92c232 branch: master author: Inada Naoki committer: GitHub date: 2019-04-17T08:39:46+09:00 summary: bpo-36641: Add "const" to PyDoc_VAR macro (GH-12854) It reduces "data" segment in python about 200KB. files: A Misc/NEWS.d/next/C API/2019-04-16-21-18-19.bpo-36641.pz-DIR.rst M Include/pymacro.h M Modules/_ssl.c diff --git a/Include/pymacro.h b/Include/pymacro.h index 3f6ddbe9977a..546f9c6e7020 100644 --- a/Include/pymacro.h +++ b/Include/pymacro.h @@ -67,7 +67,7 @@ /* Define macros for inline documentation. */ -#define PyDoc_VAR(name) static char name[] +#define PyDoc_VAR(name) static const char name[] #define PyDoc_STRVAR(name,str) PyDoc_VAR(name) = PyDoc_STR(str) #ifdef WITH_DOC_STRINGS #define PyDoc_STR(str) str diff --git a/Misc/NEWS.d/next/C API/2019-04-16-21-18-19.bpo-36641.pz-DIR.rst b/Misc/NEWS.d/next/C API/2019-04-16-21-18-19.bpo-36641.pz-DIR.rst new file mode 100644 index 000000000000..f92af63029be --- /dev/null +++ b/Misc/NEWS.d/next/C API/2019-04-16-21-18-19.bpo-36641.pz-DIR.rst @@ -0,0 +1,2 @@ +:c:macro:`PyDoc_VAR(name)` and :c:macro:`PyDoc_STRVAR(name,str)` now create +``static const char name[]`` instead of ``static char name[]``. Patch by Inada Naoki. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index f8ae916735f4..e75e3466dd3f 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -557,7 +557,7 @@ SSLError_str(PyOSErrorObject *self) static PyType_Slot sslerror_type_slots[] = { {Py_tp_base, NULL}, /* Filled out in module init as it's not a constant */ - {Py_tp_doc, SSLError_doc}, + {Py_tp_doc, (void*)SSLError_doc}, {Py_tp_str, SSLError_str}, {0, 0}, }; From webhook-mailer at python.org Tue Apr 16 19:40:37 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 16 Apr 2019 23:40:37 -0000 Subject: [Python-checkins] bpo-36642: make unicodedata const (GH-12855) Message-ID: https://github.com/python/cpython/commit/6fec905de5c139017f36b212e54cac46959808fe commit: 6fec905de5c139017f36b212e54cac46959808fe branch: master author: Inada Naoki committer: GitHub date: 2019-04-17T08:40:34+09:00 summary: bpo-36642: make unicodedata const (GH-12855) files: M Modules/unicodedata.c M Modules/unicodedata_db.h M Modules/unicodename_db.h M Objects/unicodetype_db.h M Tools/unicode/makeunicodedata.py diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c index 9ceab1b3db4f..7fdbf332ee74 100644 --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -1029,7 +1029,7 @@ _getucname(PyObject *self, Py_UCS4 code, char* buffer, int buflen, int offset; int i; int word; - unsigned char* w; + const unsigned char* w; if (code >= 0x110000) return 0; diff --git a/Modules/unicodedata_db.h b/Modules/unicodedata_db.h index 11c7dc87244a..66f81e311e29 100644 --- a/Modules/unicodedata_db.h +++ b/Modules/unicodedata_db.h @@ -699,7 +699,7 @@ static const char *decomp_prefix[] = { }; /* index tables for the database records */ #define SHIFT 7 -static unsigned short index1[] = { +static const unsigned short index1[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 41, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, @@ -1322,7 +1322,7 @@ static unsigned short index1[] = { 121, 260, }; -static unsigned short index2[] = { +static const unsigned short index2[] = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 2, 4, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 2, 5, 6, 6, 7, 8, 7, 6, 6, 9, 10, 6, 11, 12, 13, 12, 12, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 12, 6, 15, 16, 15, 6, 6, 17, @@ -3260,7 +3260,7 @@ static unsigned short index2[] = { }; /* decomposition data */ -static unsigned int decomp_data[] = { +static const unsigned int decomp_data[] = { 0, 257, 32, 514, 32, 776, 259, 97, 514, 32, 772, 259, 50, 259, 51, 514, 32, 769, 258, 956, 514, 32, 807, 259, 49, 259, 111, 772, 49, 8260, 52, 772, 49, 8260, 50, 772, 51, 8260, 52, 512, 65, 768, 512, 65, 769, 512, @@ -4328,7 +4328,7 @@ static unsigned int decomp_data[] = { /* index tables for the decomposition data */ #define DECOMP_SHIFT 7 -static unsigned char decomp_index1[] = { +static const unsigned char decomp_index1[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 10, 11, 12, 0, 0, 0, 0, 13, 14, 15, 0, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 29, 30, 31, 32, 33, 34, @@ -4695,7 +4695,7 @@ static unsigned char decomp_index1[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; -static unsigned short decomp_index2[] = { +static const unsigned short decomp_index2[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, @@ -5512,7 +5512,7 @@ static unsigned short decomp_index2[] = { /* NFC pairs */ #define COMP_SHIFT 2 -static unsigned short comp_index[] = { +static const unsigned short comp_index[] = { 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 4, 5, 6, 7, 8, 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 12, 0, 13, 0, 0, @@ -5772,7 +5772,7 @@ static unsigned short comp_index[] = { 656, }; -static unsigned int comp_data[] = { +static const unsigned int comp_data[] = { 0, 0, 0, 0, 0, 0, 0, 8814, 0, 8800, 0, 0, 0, 0, 0, 8815, 0, 0, 192, 193, 194, 195, 256, 258, 550, 196, 7842, 197, 0, 461, 512, 514, 0, 0, 0, 7840, 0, 7680, 0, 0, 260, 0, 0, 0, 0, 0, 7682, 0, 0, 7684, 0, 0, 0, 0, 7686, 0, @@ -5982,7 +5982,7 @@ static const change_record change_records_3_2_0[] = { { 255, 19, 255, 255, 255, -1 }, { 1, 255, 255, 0, 255, 0 }, }; -static unsigned char changes_3_2_0_index[] = { +static const unsigned char changes_3_2_0_index[] = { 0, 1, 2, 2, 3, 4, 5, 6, 2, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 2, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 2, 2, 2, 38, 39, 2, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, @@ -6356,7 +6356,7 @@ static unsigned char changes_3_2_0_index[] = { 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, }; -static unsigned char changes_3_2_0_data[] = { +static const unsigned char changes_3_2_0_data[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, diff --git a/Modules/unicodename_db.h b/Modules/unicodename_db.h index 8fa87a02cd26..372616966aa4 100644 --- a/Modules/unicodename_db.h +++ b/Modules/unicodename_db.h @@ -3,7 +3,7 @@ #define NAME_MAXLEN 256 /* lexicon */ -static unsigned char lexicon[] = { +static const unsigned char lexicon[] = { 76, 69, 84, 84, 69, 210, 83, 73, 71, 206, 87, 73, 84, 200, 83, 77, 65, 76, 204, 83, 89, 76, 76, 65, 66, 76, 197, 67, 65, 80, 73, 84, 65, 204, 72, 73, 69, 82, 79, 71, 76, 89, 80, 200, 76, 65, 84, 73, 206, 65, 82, 65, @@ -6442,7 +6442,7 @@ static unsigned char lexicon[] = { 45, 68, 90, 85, 196, 45, 67, 72, 65, 210, 45, 67, 72, 65, 76, 128, }; -static unsigned int lexicon_offset[] = { +static const unsigned int lexicon_offset[] = { 0, 0, 6, 10, 14, 19, 27, 34, 44, 49, 55, 64, 66, 69, 81, 89, 102, 108, 113, 118, 124, 129, 137, 146, 157, 162, 167, 170, 174, 183, 189, 195, 201, 206, 214, 221, 229, 171, 232, 241, 242, 250, 256, 261, 266, 273, @@ -8012,7 +8012,7 @@ static unsigned int lexicon_offset[] = { /* code->name phrasebook */ #define phrasebook_shift 7 #define phrasebook_short 194 -static unsigned char phrasebook[] = { +static const unsigned char phrasebook[] = { 0, 205, 148, 236, 89, 78, 211, 61, 78, 31, 55, 239, 9, 55, 213, 44, 55, 251, 110, 251, 29, 50, 213, 139, 53, 213, 139, 250, 178, 98, 55, 244, 158, 231, 5, 234, 216, 204, 226, 205, 177, 17, 195, 79, 17, 100, 17, 102, @@ -19073,7 +19073,7 @@ static unsigned char phrasebook[] = { 73, 241, 124, 152, 154, }; -static unsigned short phrasebook_offset1[] = { +static const unsigned short phrasebook_offset1[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, @@ -19696,7 +19696,7 @@ static unsigned short phrasebook_offset1[] = { 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, }; -static unsigned int phrasebook_offset2[] = { +static const unsigned int phrasebook_offset2[] = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 6, 9, 11, 14, 17, 19, 21, 24, 27, 29, 31, 33, 35, 39, 41, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 69, 72, @@ -23496,7 +23496,7 @@ static unsigned int phrasebook_offset2[] = { }; /* name->code dictionary */ -static unsigned int code_hash[] = { +static const unsigned int code_hash[] = { 74224, 4851, 0, 0, 0, 0, 7929, 0, 0, 0, 0, 127931, 0, 42833, 983091, 12064, 0, 129548, 194597, 69850, 65842, 0, 0, 0, 78159, 68476, 72392, 1373, 0, 0, 5816, 0, 0, 4231, 0, 0, 4233, 4234, 4232, 68885, 70351, 0, diff --git a/Objects/unicodetype_db.h b/Objects/unicodetype_db.h index e64b5059342a..957bd4c62126 100644 --- a/Objects/unicodetype_db.h +++ b/Objects/unicodetype_db.h @@ -1750,7 +1750,7 @@ const Py_UCS4 _PyUnicode_ExtendedCase[] = { /* type indexes */ #define SHIFT 7 -static unsigned short index1[] = { +static const unsigned short index1[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 34, 35, 36, 37, 38, 39, 34, 34, 34, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, @@ -2359,7 +2359,7 @@ static unsigned short index1[] = { 126, 126, 126, 126, 126, 126, 126, 269, }; -static unsigned short index2[] = { +static const unsigned short index2[] = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 2, 4, 5, 5, 5, 5, 5, 5, 6, 5, 5, 5, 5, 5, 5, 6, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 6, 5, 5, 5, 5, 5, 5, 17, 17, 17, 17, diff --git a/Tools/unicode/makeunicodedata.py b/Tools/unicode/makeunicodedata.py index 9327693a1732..2550b8f940c2 100644 --- a/Tools/unicode/makeunicodedata.py +++ b/Tools/unicode/makeunicodedata.py @@ -1249,7 +1249,7 @@ def dump(self, file, trace=0): size = getsize(self.data) if trace: print(self.name+":", size*len(self.data), "bytes", file=sys.stderr) - file.write("static ") + file.write("static const ") if size == 1: file.write("unsigned char") elif size == 2: From webhook-mailer at python.org Wed Apr 17 05:46:55 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 09:46:55 -0000 Subject: [Python-checkins] bpo-31904: Fix test_tabnanny on VxWorks (GH-12646) Message-ID: https://github.com/python/cpython/commit/36c41bc2017921321dbb19557f616a6bb7572c83 commit: 36c41bc2017921321dbb19557f616a6bb7572c83 branch: master author: Lihua Zhao <44661095+LihuaZhao at users.noreply.github.com> committer: Victor Stinner date: 2019-04-17T11:46:50+02:00 summary: bpo-31904: Fix test_tabnanny on VxWorks (GH-12646) Fix test_tabnanny on VxWorks: adjust ENOENT error message, use os.strerror(). files: A Misc/NEWS.d/next/Tests/2019-04-01-16-06-36.bpo-31904.peaceF.rst M Lib/test/test_tabnanny.py diff --git a/Lib/test/test_tabnanny.py b/Lib/test/test_tabnanny.py index 845096e63c26..81549d14ae2b 100644 --- a/Lib/test/test_tabnanny.py +++ b/Lib/test/test_tabnanny.py @@ -6,6 +6,8 @@ from unittest import TestCase, mock from unittest import mock import errno +import os +import sys import tabnanny import tokenize import tempfile @@ -233,8 +235,8 @@ def test_when_nannynag_error(self): def test_when_no_file(self): """A python file which does not exist actually in system.""" path = 'no_file.py' - err = f"{path!r}: I/O Error: [Errno {errno.ENOENT}] " \ - f"No such file or directory: {path!r}\n" + err = (f"{path!r}: I/O Error: [Errno {errno.ENOENT}] " + f"{os.strerror(errno.ENOENT)}: {path!r}\n") self.verify_tabnanny_check(path, err=err) def test_errored_directory(self): diff --git a/Misc/NEWS.d/next/Tests/2019-04-01-16-06-36.bpo-31904.peaceF.rst b/Misc/NEWS.d/next/Tests/2019-04-01-16-06-36.bpo-31904.peaceF.rst new file mode 100644 index 000000000000..6297717e0fc6 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-01-16-06-36.bpo-31904.peaceF.rst @@ -0,0 +1 @@ +Fix test_tabnanny on VxWorks: adjust ENOENT error message. From webhook-mailer at python.org Wed Apr 17 08:18:52 2019 From: webhook-mailer at python.org (Cheryl Sabella) Date: Wed, 17 Apr 2019 12:18:52 -0000 Subject: [Python-checkins] Clarify file-closing example in tutorial (GH-11652) Message-ID: https://github.com/python/cpython/commit/71ce03df9c643faa94fbdf73bbb4e99a9a62cbdc commit: 71ce03df9c643faa94fbdf73bbb4e99a9a62cbdc branch: master author: Colin Watson committer: Cheryl Sabella date: 2019-04-17T08:18:37-04:00 summary: Clarify file-closing example in tutorial (GH-11652) files: M Doc/tutorial/inputoutput.rst diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst index 79427860f518..fc2bd5578c4c 100644 --- a/Doc/tutorial/inputoutput.rst +++ b/Doc/tutorial/inputoutput.rst @@ -322,6 +322,8 @@ equivalent :keyword:`try`\ -\ :keyword:`finally` blocks:: >>> with open('workfile') as f: ... read_data = f.read() + + >>> # We can check that the file has been automatically closed. >>> f.closed True From webhook-mailer at python.org Wed Apr 17 10:26:44 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 14:26:44 -0000 Subject: [Python-checkins] bpo-35755: shutil.which() uses os.confstr("CS_PATH") (GH-12858) Message-ID: https://github.com/python/cpython/commit/228a3c99bdb2d02771bead66a0beabafad3a90d3 commit: 228a3c99bdb2d02771bead66a0beabafad3a90d3 branch: master author: Victor Stinner committer: GitHub date: 2019-04-17T16:26:36+02:00 summary: bpo-35755: shutil.which() uses os.confstr("CS_PATH") (GH-12858) shutil.which() and distutils.spawn.find_executable() now use os.confstr("CS_PATH") if available instead of os.defpath, if the PATH environment variable is not set. Don't use os.confstr("CS_PATH") nor os.defpath if the PATH environment variable is set to an empty string to mimick Unix 'which' command behavior. Changes: * find_executable() now starts by checking for the executable in the current working directly case. Add an explicit "if not path: return None". * Add tests for PATH='' (empty string), PATH=':' and for PATHEXT. files: A Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst M Lib/distutils/spawn.py M Lib/distutils/tests/test_spawn.py M Lib/shutil.py M Lib/test/test_shutil.py diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py index 538768809327..888327270e22 100644 --- a/Lib/distutils/spawn.py +++ b/Lib/distutils/spawn.py @@ -172,21 +172,32 @@ def find_executable(executable, path=None): A string listing directories separated by 'os.pathsep'; defaults to os.environ['PATH']. Returns the complete filename or None if not found. """ - if path is None: - path = os.environ.get('PATH', os.defpath) - - paths = path.split(os.pathsep) - base, ext = os.path.splitext(executable) - + _, ext = os.path.splitext(executable) if (sys.platform == 'win32') and (ext != '.exe'): executable = executable + '.exe' - if not os.path.isfile(executable): - for p in paths: - f = os.path.join(p, executable) - if os.path.isfile(f): - # the file exists, we have a shot at spawn working - return f - return None - else: + if os.path.isfile(executable): return executable + + if path is None: + path = os.environ.get('PATH', None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string to mimick Unix which command behavior + + # PATH='' doesn't match, whereas PATH=':' looks in the current directory + if not path: + return None + + paths = path.split(os.pathsep) + for p in paths: + f = os.path.join(p, executable) + if os.path.isfile(f): + # the file exists, we have a shot at spawn working + return f + return None diff --git a/Lib/distutils/tests/test_spawn.py b/Lib/distutils/tests/test_spawn.py index 0d455385d8ac..f9ae69ef86b3 100644 --- a/Lib/distutils/tests/test_spawn.py +++ b/Lib/distutils/tests/test_spawn.py @@ -87,11 +87,52 @@ def test_find_executable(self): rv = find_executable(dont_exist_program , path=tmp_dir) self.assertIsNone(rv) - # test os.defpath: missing PATH environment variable + # PATH='': no match, except in the current directory with test_support.EnvironmentVarGuard() as env: - with mock.patch('distutils.spawn.os.defpath', tmp_dir): - env.pop('PATH') + env['PATH'] = '' + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value=tmp_dir, create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', + tmp_dir): + rv = find_executable(program) + self.assertIsNone(rv) + + # look in current directory + with test_support.change_cwd(tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, program) + + # PATH=':': explicitly looks in the current directory + with test_support.EnvironmentVarGuard() as env: + env['PATH'] = os.pathsep + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value='', create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', ''): + rv = find_executable(program) + self.assertIsNone(rv) + + # look in current directory + with test_support.change_cwd(tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, program) + + # missing PATH: test os.confstr("CS_PATH") and os.defpath + with test_support.EnvironmentVarGuard() as env: + env.pop('PATH', None) + + # without confstr + with unittest.mock.patch('distutils.spawn.os.confstr', + side_effect=ValueError, + create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', + tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, filename) + # with confstr + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value=tmp_dir, create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', ''): rv = find_executable(program) self.assertEqual(rv, filename) diff --git a/Lib/shutil.py b/Lib/shutil.py index 7dd470dfaba4..34df9cc47463 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -1309,9 +1309,20 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): use_bytes = isinstance(cmd, bytes) if path is None: - path = os.environ.get("PATH", os.defpath) + path = os.environ.get("PATH", None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string to mimick Unix which command behavior + + # PATH='' doesn't match, whereas PATH=':' looks in the current directory if not path: return None + if use_bytes: path = os.fsencode(path) path = path.split(os.fsencode(os.pathsep)) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 678a190bcf5e..e709a5661bf3 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1619,6 +1619,57 @@ def test_environ_path(self): rv = shutil.which(self.file) self.assertEqual(rv, self.temp_file.name) + def test_environ_path_empty(self): + # PATH='': no match + with support.EnvironmentVarGuard() as env: + env['PATH'] = '' + with unittest.mock.patch('os.confstr', return_value=self.dir, \ + create=True), \ + support.swap_attr(os, 'defpath', self.dir), \ + support.change_cwd(self.dir): + rv = shutil.which(self.file) + self.assertIsNone(rv) + + def test_environ_path_cwd(self): + expected_cwd = os.path.basename(self.temp_file.name) + if sys.platform == "win32": + curdir = os.curdir + if isinstance(expected_cwd, bytes): + curdir = os.fsencode(curdir) + expected_cwd = os.path.join(curdir, expected_cwd) + + # PATH=':': explicitly looks in the current directory + with support.EnvironmentVarGuard() as env: + env['PATH'] = os.pathsep + with unittest.mock.patch('os.confstr', return_value=self.dir, \ + create=True), \ + support.swap_attr(os, 'defpath', self.dir): + rv = shutil.which(self.file) + self.assertIsNone(rv) + + # look in current directory + with support.change_cwd(self.dir): + rv = shutil.which(self.file) + self.assertEqual(rv, expected_cwd) + + def test_environ_path_missing(self): + with support.EnvironmentVarGuard() as env: + env.pop('PATH', None) + + # without confstr + with unittest.mock.patch('os.confstr', side_effect=ValueError, \ + create=True), \ + support.swap_attr(os, 'defpath', self.dir): + rv = shutil.which(self.file) + self.assertEqual(rv, self.temp_file.name) + + # with confstr + with unittest.mock.patch('os.confstr', return_value=self.dir, \ + create=True), \ + support.swap_attr(os, 'defpath', ''): + rv = shutil.which(self.file) + self.assertEqual(rv, self.temp_file.name) + def test_empty_path(self): base_dir = os.path.dirname(self.dir) with support.change_cwd(path=self.dir), \ @@ -1633,6 +1684,23 @@ def test_empty_path_no_PATH(self): rv = shutil.which(self.file) self.assertIsNone(rv) + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_pathext(self): + ext = ".xyz" + temp_filexyz = tempfile.NamedTemporaryFile(dir=self.temp_dir, + prefix="Tmp2", suffix=ext) + os.chmod(temp_filexyz.name, stat.S_IXUSR) + self.addCleanup(temp_filexyz.close) + + # strip path and extension + program = os.path.basename(temp_filexyz.name) + program = os.path.splitext(program)[0] + + with support.EnvironmentVarGuard() as env: + env['PATHEXT'] = ext + rv = shutil.which(program, path=self.temp_dir) + self.assertEqual(rv, temp_filexyz.name) + class TestWhichBytes(TestWhich): def setUp(self): diff --git a/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst b/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst new file mode 100644 index 000000000000..8e92ffdec235 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst @@ -0,0 +1,6 @@ +:func:`shutil.which` and :func:`distutils.spawn.find_executable` now use +``os.confstr("CS_PATH")`` if available instead of :data:`os.defpath`, if the +``PATH`` environment variable is not set. Moreover, don't use +``os.confstr("CS_PATH")`` nor :data:`os.defpath` if the ``PATH`` environment +variable is set to an empty string to mimick Unix ``which`` command +behavior. From webhook-mailer at python.org Wed Apr 17 11:05:44 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 15:05:44 -0000 Subject: [Python-checkins] bpo-35755: Remove current directory from posixpath.defpath (GH-11586) Message-ID: https://github.com/python/cpython/commit/2c4c02f8a876fcf084575dcaf857a0236c81261a commit: 2c4c02f8a876fcf084575dcaf857a0236c81261a branch: master author: Victor Stinner committer: GitHub date: 2019-04-17T17:05:30+02:00 summary: bpo-35755: Remove current directory from posixpath.defpath (GH-11586) Document the change in a NEWS entry of the Security category. files: A Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst M Lib/posixpath.py diff --git a/Lib/posixpath.py b/Lib/posixpath.py index 21ce72fd79cd..ecb4e5a8f707 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -18,7 +18,7 @@ extsep = '.' sep = '/' pathsep = ':' -defpath = ':/bin:/usr/bin' +defpath = '/bin:/usr/bin' altsep = None devnull = '/dev/null' diff --git a/Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst b/Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst new file mode 100644 index 000000000000..959aafd73449 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst @@ -0,0 +1,5 @@ +:func:`shutil.which` now uses ``os.confstr("CS_PATH")`` if available and if the +:envvar:`PATH` environment variable is not set. Remove also the current +directory from :data:`posixpath.defpath`. On Unix, :func:`shutil.which` and the +:mod:`subprocess` module no longer search the executable in the current +directory if the :envvar:`PATH` environment variable is not set. From webhook-mailer at python.org Wed Apr 17 11:33:46 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 15:33:46 -0000 Subject: [Python-checkins] bpo-31904: Port test_cmd_line to VxWorks (#12648) Message-ID: https://github.com/python/cpython/commit/2954550818e5c23a082e6279eb326168230ebf04 commit: 2954550818e5c23a082e6279eb326168230ebf04 branch: master author: Lihua Zhao <44661095+LihuaZhao at users.noreply.github.com> committer: Victor Stinner date: 2019-04-17T17:33:25+02:00 summary: bpo-31904: Port test_cmd_line to VxWorks (#12648) subprocess.Popen doesn't support preexec on VxWorks. files: M Lib/test/test_cmd_line.py diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 21511b896cad..f7925eb795c7 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -369,6 +369,8 @@ def test_closed_stdout(self): # Issue #7111: Python should work without standard streams @unittest.skipIf(os.name != 'posix', "test needs POSIX semantics") + @unittest.skipIf(sys.platform == "vxworks", + "test needs preexec support in subprocess.Popen") def _test_no_stdio(self, streams): code = """if 1: import os, sys From webhook-mailer at python.org Wed Apr 17 11:41:39 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 15:41:39 -0000 Subject: [Python-checkins] bpo-31904: Port test_resource to VxWorks (GH-12719) Message-ID: https://github.com/python/cpython/commit/693c104ae74feea11f0b51176dc91ecd153230c0 commit: 693c104ae74feea11f0b51176dc91ecd153230c0 branch: master author: Lihua Zhao <44661095+LihuaZhao at users.noreply.github.com> committer: Victor Stinner date: 2019-04-17T17:41:33+02:00 summary: bpo-31904: Port test_resource to VxWorks (GH-12719) Skip tests cases setting RLIMIT_FSIZE and RLIMIT_CPU on VxWorks. files: A Misc/NEWS.d/next/Tests/2019-04-08-09-24-36.bpo-31904.ab03ea.rst M Doc/library/resource.rst M Lib/test/test_resource.py diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst index 2ed15c136736..3573da7ea2d7 100644 --- a/Doc/library/resource.rst +++ b/Doc/library/resource.rst @@ -76,6 +76,8 @@ this module for those platforms. ``setrlimit`` may also raise :exc:`error` if the underlying system call fails. + VxWorks only supports setting :data:`RLIMIT_NOFILE`. + .. function:: prlimit(pid, resource[, limits]) Combines :func:`setrlimit` and :func:`getrlimit` in one function and diff --git a/Lib/test/test_resource.py b/Lib/test/test_resource.py index 62c7963fe699..e5ece5284cf1 100644 --- a/Lib/test/test_resource.py +++ b/Lib/test/test_resource.py @@ -16,6 +16,8 @@ def test_args(self): self.assertRaises(TypeError, resource.setrlimit) self.assertRaises(TypeError, resource.setrlimit, 42, 42, 42) + @unittest.skipIf(sys.platform == "vxworks", + "setting RLIMIT_FSIZE is not supported on VxWorks") def test_fsize_ismax(self): try: (cur, max) = resource.getrlimit(resource.RLIMIT_FSIZE) @@ -110,6 +112,8 @@ def test_getrusage(self): pass # Issue 6083: Reference counting bug + @unittest.skipIf(sys.platform == "vxworks", + "setting RLIMIT_CPU is not supported on VxWorks") def test_setrusage_refcount(self): try: limits = resource.getrlimit(resource.RLIMIT_CPU) diff --git a/Misc/NEWS.d/next/Tests/2019-04-08-09-24-36.bpo-31904.ab03ea.rst b/Misc/NEWS.d/next/Tests/2019-04-08-09-24-36.bpo-31904.ab03ea.rst new file mode 100644 index 000000000000..2b361011abae --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-08-09-24-36.bpo-31904.ab03ea.rst @@ -0,0 +1 @@ +Port test_resource to VxWorks: skip tests cases setting RLIMIT_FSIZE and RLIMIT_CPU. From webhook-mailer at python.org Wed Apr 17 11:44:13 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 15:44:13 -0000 Subject: [Python-checkins] bpo-35755: Don't say "to mimick Unix which command behavior" (GH-12861) Message-ID: https://github.com/python/cpython/commit/197f0447e3bcfa4f529fedab09966d7e3d283979 commit: 197f0447e3bcfa4f529fedab09966d7e3d283979 branch: master author: Victor Stinner committer: GitHub date: 2019-04-17T17:44:06+02:00 summary: bpo-35755: Don't say "to mimick Unix which command behavior" (GH-12861) files: M Lib/distutils/spawn.py M Lib/shutil.py M Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py index 888327270e22..d3a12c283397 100644 --- a/Lib/distutils/spawn.py +++ b/Lib/distutils/spawn.py @@ -188,7 +188,7 @@ def find_executable(executable, path=None): # os.confstr() or CS_PATH is not available path = os.defpath # bpo-35755: Don't use os.defpath if the PATH environment variable is - # set to an empty string to mimick Unix which command behavior + # set to an empty string # PATH='' doesn't match, whereas PATH=':' looks in the current directory if not path: diff --git a/Lib/shutil.py b/Lib/shutil.py index 34df9cc47463..6cfe3738f6eb 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -1317,7 +1317,7 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None): # os.confstr() or CS_PATH is not available path = os.defpath # bpo-35755: Don't use os.defpath if the PATH environment variable is - # set to an empty string to mimick Unix which command behavior + # set to an empty string # PATH='' doesn't match, whereas PATH=':' looks in the current directory if not path: diff --git a/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst b/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst index 8e92ffdec235..d84f63bf7b83 100644 --- a/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst +++ b/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst @@ -2,5 +2,4 @@ ``os.confstr("CS_PATH")`` if available instead of :data:`os.defpath`, if the ``PATH`` environment variable is not set. Moreover, don't use ``os.confstr("CS_PATH")`` nor :data:`os.defpath` if the ``PATH`` environment -variable is set to an empty string to mimick Unix ``which`` command -behavior. +variable is set to an empty string. From webhook-mailer at python.org Wed Apr 17 12:09:26 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 16:09:26 -0000 Subject: [Python-checkins] bpo-32849: Fix is_valid_fd() on FreeBSD (GH-12852) Message-ID: https://github.com/python/cpython/commit/3092d6b2630e4d2bd200fbc3231c27a7cba4d6b2 commit: 3092d6b2630e4d2bd200fbc3231c27a7cba4d6b2 branch: master author: Victor Stinner committer: GitHub date: 2019-04-17T18:09:12+02:00 summary: bpo-32849: Fix is_valid_fd() on FreeBSD (GH-12852) Fix Python Initialization code on FreeBSD to detect properly when stdin file descriptor (fd 0) is invalid. On FreeBSD, fstat() must be used to check if stdin (fd 0) is valid. dup(0) doesn't fail if stdin is invalid in some cases. files: A Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst M Python/pylifecycle.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst new file mode 100644 index 000000000000..6a9a85c4b134 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst @@ -0,0 +1,2 @@ +Fix Python Initialization code on FreeBSD to detect properly when stdin file +descriptor (fd 0) is invalid. diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index ad1447256cc6..c7920ef6262d 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1664,26 +1664,34 @@ initsite(void) static int is_valid_fd(int fd) { -#ifdef __APPLE__ - /* bpo-30225: On macOS Tiger, when stdout is redirected to a pipe - and the other side of the pipe is closed, dup(1) succeed, whereas - fstat(1, &st) fails with EBADF. Prefer fstat() over dup() to detect - such error. */ - struct stat st; - return (fstat(fd, &st) == 0); -#else - int fd2; - if (fd < 0) +/* dup() is faster than fstat(): fstat() can require input/output operations, + whereas dup() doesn't. There is a low risk of EMFILE/ENFILE at Python + startup. Problem: dup() doesn't check if the file descriptor is valid on + some platforms. + + bpo-30225: On macOS Tiger, when stdout is redirected to a pipe and the other + side of the pipe is closed, dup(1) succeed, whereas fstat(1, &st) fails with + EBADF. FreeBSD has similar issue (bpo-32849). + + Only use dup() on platforms where dup() is enough to detect invalid FD in + corner cases: on Linux and Windows (bpo-32849). */ +#if defined(__linux__) || defined(MS_WINDOWS) + if (fd < 0) { return 0; + } + int fd2; + _Py_BEGIN_SUPPRESS_IPH - /* Prefer dup() over fstat(). fstat() can require input/output whereas - dup() doesn't, there is a low risk of EMFILE/ENFILE at Python - startup. */ fd2 = dup(fd); - if (fd2 >= 0) + if (fd2 >= 0) { close(fd2); + } _Py_END_SUPPRESS_IPH - return fd2 >= 0; + + return (fd2 >= 0); +#else + struct stat st; + return (fstat(fd, &st) == 0); #endif } From webhook-mailer at python.org Wed Apr 17 12:30:39 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 16:30:39 -0000 Subject: [Python-checkins] bpo-32849: Fix is_valid_fd() on FreeBSD (GH-12852) (GH-12863) Message-ID: https://github.com/python/cpython/commit/b87a8073db73f9ffa96104e00c624052e34b11c7 commit: b87a8073db73f9ffa96104e00c624052e34b11c7 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Victor Stinner date: 2019-04-17T18:30:27+02:00 summary: bpo-32849: Fix is_valid_fd() on FreeBSD (GH-12852) (GH-12863) Fix Python Initialization code on FreeBSD to detect properly when stdin file descriptor (fd 0) is invalid. On FreeBSD, fstat() must be used to check if stdin (fd 0) is valid. dup(0) doesn't fail if stdin is invalid in some cases. (cherry picked from commit 3092d6b2630e4d2bd200fbc3231c27a7cba4d6b2) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst M Python/pylifecycle.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst b/Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst new file mode 100644 index 000000000000..6a9a85c4b134 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-04-16-11-56-12.bpo-32849.aeSg-D.rst @@ -0,0 +1,2 @@ +Fix Python Initialization code on FreeBSD to detect properly when stdin file +descriptor (fd 0) is invalid. diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index d8e6f8fa8995..55d1ba573443 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1672,26 +1672,34 @@ initsite(void) static int is_valid_fd(int fd) { -#ifdef __APPLE__ - /* bpo-30225: On macOS Tiger, when stdout is redirected to a pipe - and the other side of the pipe is closed, dup(1) succeed, whereas - fstat(1, &st) fails with EBADF. Prefer fstat() over dup() to detect - such error. */ - struct stat st; - return (fstat(fd, &st) == 0); -#else - int fd2; - if (fd < 0) +/* dup() is faster than fstat(): fstat() can require input/output operations, + whereas dup() doesn't. There is a low risk of EMFILE/ENFILE at Python + startup. Problem: dup() doesn't check if the file descriptor is valid on + some platforms. + + bpo-30225: On macOS Tiger, when stdout is redirected to a pipe and the other + side of the pipe is closed, dup(1) succeed, whereas fstat(1, &st) fails with + EBADF. FreeBSD has similar issue (bpo-32849). + + Only use dup() on platforms where dup() is enough to detect invalid FD in + corner cases: on Linux and Windows (bpo-32849). */ +#if defined(__linux__) || defined(MS_WINDOWS) + if (fd < 0) { return 0; + } + int fd2; + _Py_BEGIN_SUPPRESS_IPH - /* Prefer dup() over fstat(). fstat() can require input/output whereas - dup() doesn't, there is a low risk of EMFILE/ENFILE at Python - startup. */ fd2 = dup(fd); - if (fd2 >= 0) + if (fd2 >= 0) { close(fd2); + } _Py_END_SUPPRESS_IPH - return fd2 >= 0; + + return (fd2 >= 0); +#else + struct stat st; + return (fstat(fd, &st) == 0); #endif } From webhook-mailer at python.org Wed Apr 17 12:38:11 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 16:38:11 -0000 Subject: [Python-checkins] [3.7] bpo-35755: shutil.which() uses os.confstr("CS_PATH") (GH-12862) Message-ID: https://github.com/python/cpython/commit/394b991e41a2a4ce3afc8e6fde44de46e73bbb34 commit: 394b991e41a2a4ce3afc8e6fde44de46e73bbb34 branch: 3.7 author: Victor Stinner committer: GitHub date: 2019-04-17T18:38:06+02:00 summary: [3.7] bpo-35755: shutil.which() uses os.confstr("CS_PATH") (GH-12862) * bpo-35755: shutil.which() uses os.confstr("CS_PATH") (GH-12858) shutil.which() and distutils.spawn.find_executable() now use os.confstr("CS_PATH") if available instead of os.defpath, if the PATH environment variable is not set. Don't use os.confstr("CS_PATH") nor os.defpath if the PATH environment variable is set to an empty string. Changes: * find_executable() now starts by checking for the executable in the current working directly case. Add an explicit "if not path: return None". * Add tests for PATH='' (empty string), PATH=':' and for PATHEXT. (cherry picked from commit 228a3c99bdb2d02771bead66a0beabafad3a90d3) * bpo-35755: Remove current directory from posixpath.defpath (GH-11586) Document the change in a NEWS entry of the Security category. (cherry picked from commit 2c4c02f8a876fcf084575dcaf857a0236c81261a) files: A Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst A Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst M Lib/distutils/spawn.py M Lib/distutils/tests/test_spawn.py M Lib/posixpath.py M Lib/shutil.py M Lib/test/test_shutil.py diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py index 538768809327..d3a12c283397 100644 --- a/Lib/distutils/spawn.py +++ b/Lib/distutils/spawn.py @@ -172,21 +172,32 @@ def find_executable(executable, path=None): A string listing directories separated by 'os.pathsep'; defaults to os.environ['PATH']. Returns the complete filename or None if not found. """ - if path is None: - path = os.environ.get('PATH', os.defpath) - - paths = path.split(os.pathsep) - base, ext = os.path.splitext(executable) - + _, ext = os.path.splitext(executable) if (sys.platform == 'win32') and (ext != '.exe'): executable = executable + '.exe' - if not os.path.isfile(executable): - for p in paths: - f = os.path.join(p, executable) - if os.path.isfile(f): - # the file exists, we have a shot at spawn working - return f - return None - else: + if os.path.isfile(executable): return executable + + if path is None: + path = os.environ.get('PATH', None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string + + # PATH='' doesn't match, whereas PATH=':' looks in the current directory + if not path: + return None + + paths = path.split(os.pathsep) + for p in paths: + f = os.path.join(p, executable) + if os.path.isfile(f): + # the file exists, we have a shot at spawn working + return f + return None diff --git a/Lib/distutils/tests/test_spawn.py b/Lib/distutils/tests/test_spawn.py index 0d455385d8ac..f9ae69ef86b3 100644 --- a/Lib/distutils/tests/test_spawn.py +++ b/Lib/distutils/tests/test_spawn.py @@ -87,11 +87,52 @@ def test_find_executable(self): rv = find_executable(dont_exist_program , path=tmp_dir) self.assertIsNone(rv) - # test os.defpath: missing PATH environment variable + # PATH='': no match, except in the current directory with test_support.EnvironmentVarGuard() as env: - with mock.patch('distutils.spawn.os.defpath', tmp_dir): - env.pop('PATH') + env['PATH'] = '' + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value=tmp_dir, create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', + tmp_dir): + rv = find_executable(program) + self.assertIsNone(rv) + + # look in current directory + with test_support.change_cwd(tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, program) + + # PATH=':': explicitly looks in the current directory + with test_support.EnvironmentVarGuard() as env: + env['PATH'] = os.pathsep + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value='', create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', ''): + rv = find_executable(program) + self.assertIsNone(rv) + + # look in current directory + with test_support.change_cwd(tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, program) + + # missing PATH: test os.confstr("CS_PATH") and os.defpath + with test_support.EnvironmentVarGuard() as env: + env.pop('PATH', None) + + # without confstr + with unittest.mock.patch('distutils.spawn.os.confstr', + side_effect=ValueError, + create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', + tmp_dir): + rv = find_executable(program) + self.assertEqual(rv, filename) + # with confstr + with unittest.mock.patch('distutils.spawn.os.confstr', + return_value=tmp_dir, create=True), \ + unittest.mock.patch('distutils.spawn.os.defpath', ''): rv = find_executable(program) self.assertEqual(rv, filename) diff --git a/Lib/posixpath.py b/Lib/posixpath.py index ca578a5df35c..785aa728b365 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -18,7 +18,7 @@ extsep = '.' sep = '/' pathsep = ':' -defpath = ':/bin:/usr/bin' +defpath = '/bin:/usr/bin' altsep = None devnull = '/dev/null' diff --git a/Lib/shutil.py b/Lib/shutil.py index f32c66b3550c..b0a53dba3a34 100644 --- a/Lib/shutil.py +++ b/Lib/shutil.py @@ -1138,7 +1138,17 @@ def _access_check(fn, mode): return None if path is None: - path = os.environ.get("PATH", os.defpath) + path = os.environ.get("PATH", None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string + + # PATH='' doesn't match, whereas PATH=':' looks in the current directory if not path: return None path = path.split(os.pathsep) diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index 6e2b1004d309..197dd130a964 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -1500,6 +1500,57 @@ def test_environ_path(self): rv = shutil.which(self.file) self.assertEqual(rv, self.temp_file.name) + def test_environ_path_empty(self): + # PATH='': no match + with support.EnvironmentVarGuard() as env: + env['PATH'] = '' + with unittest.mock.patch('os.confstr', return_value=self.dir, \ + create=True), \ + support.swap_attr(os, 'defpath', self.dir), \ + support.change_cwd(self.dir): + rv = shutil.which(self.file) + self.assertIsNone(rv) + + def test_environ_path_cwd(self): + expected_cwd = os.path.basename(self.temp_file.name) + if sys.platform == "win32": + curdir = os.curdir + if isinstance(expected_cwd, bytes): + curdir = os.fsencode(curdir) + expected_cwd = os.path.join(curdir, expected_cwd) + + # PATH=':': explicitly looks in the current directory + with support.EnvironmentVarGuard() as env: + env['PATH'] = os.pathsep + with unittest.mock.patch('os.confstr', return_value=self.dir, \ + create=True), \ + support.swap_attr(os, 'defpath', self.dir): + rv = shutil.which(self.file) + self.assertIsNone(rv) + + # look in current directory + with support.change_cwd(self.dir): + rv = shutil.which(self.file) + self.assertEqual(rv, expected_cwd) + + def test_environ_path_missing(self): + with support.EnvironmentVarGuard() as env: + env.pop('PATH', None) + + # without confstr + with unittest.mock.patch('os.confstr', side_effect=ValueError, \ + create=True), \ + support.swap_attr(os, 'defpath', self.dir): + rv = shutil.which(self.file) + self.assertEqual(rv, self.temp_file.name) + + # with confstr + with unittest.mock.patch('os.confstr', return_value=self.dir, \ + create=True), \ + support.swap_attr(os, 'defpath', ''): + rv = shutil.which(self.file) + self.assertEqual(rv, self.temp_file.name) + def test_empty_path(self): base_dir = os.path.dirname(self.dir) with support.change_cwd(path=self.dir), \ @@ -1514,6 +1565,23 @@ def test_empty_path_no_PATH(self): rv = shutil.which(self.file) self.assertIsNone(rv) + @unittest.skipUnless(sys.platform == "win32", 'test specific to Windows') + def test_pathext(self): + ext = ".xyz" + temp_filexyz = tempfile.NamedTemporaryFile(dir=self.temp_dir, + prefix="Tmp2", suffix=ext) + os.chmod(temp_filexyz.name, stat.S_IXUSR) + self.addCleanup(temp_filexyz.close) + + # strip path and extension + program = os.path.basename(temp_filexyz.name) + program = os.path.splitext(program)[0] + + with support.EnvironmentVarGuard() as env: + env['PATHEXT'] = ext + rv = shutil.which(program, path=self.temp_dir) + self.assertEqual(rv, temp_filexyz.name) + class TestMove(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst b/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst new file mode 100644 index 000000000000..d84f63bf7b83 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-16-17-50-39.bpo-35755.Fg4EXb.rst @@ -0,0 +1,5 @@ +:func:`shutil.which` and :func:`distutils.spawn.find_executable` now use +``os.confstr("CS_PATH")`` if available instead of :data:`os.defpath`, if the +``PATH`` environment variable is not set. Moreover, don't use +``os.confstr("CS_PATH")`` nor :data:`os.defpath` if the ``PATH`` environment +variable is set to an empty string. diff --git a/Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst b/Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst new file mode 100644 index 000000000000..959aafd73449 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2019-01-17-10-03-48.bpo-35755.GmllIs.rst @@ -0,0 +1,5 @@ +:func:`shutil.which` now uses ``os.confstr("CS_PATH")`` if available and if the +:envvar:`PATH` environment variable is not set. Remove also the current +directory from :data:`posixpath.defpath`. On Unix, :func:`shutil.which` and the +:mod:`subprocess` module no longer search the executable in the current +directory if the :envvar:`PATH` environment variable is not set. From webhook-mailer at python.org Wed Apr 17 17:02:50 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Apr 2019 21:02:50 -0000 Subject: [Python-checkins] bpo-36635: Change pyport.h for Py_BUILD_CORE_MODULE define (GH-12853) Message-ID: https://github.com/python/cpython/commit/5c75f37d473140f0e0b7d9bf3a8c08343447ded1 commit: 5c75f37d473140f0e0b7d9bf3a8c08343447ded1 branch: master author: Victor Stinner committer: GitHub date: 2019-04-17T23:02:26+02:00 summary: bpo-36635: Change pyport.h for Py_BUILD_CORE_MODULE define (GH-12853) Change PyAPI_FUNC(type), PyAPI_DATA(type) and PyMODINIT_FUNC macros of pyport.h when Py_BUILD_CORE_MODULE is defined. The Py_BUILD_CORE_MODULE define must be now be used to build a C extension as a dynamic library accessing Python internals: export the PyInit_xxx() function in DLL exports on Windows. Changes: * Py_BUILD_CORE_BUILTIN and Py_BUILD_CORE_MODULE now imply Py_BUILD_CORE directy in pyport.h. * ceval.c compilation now fails with an error if Py_BUILD_CORE is not defined, just to ensure that Python is build with the correct defines. * setup.py now compiles _pickle.c with Py_BUILD_CORE_MODULE define. * setup.py compiles _json.c with Py_BUILD_CORE_MODULE define, rather than Py_BUILD_CORE_BUILTIN define * PCbuild/pythoncore.vcxproj: Add Py_BUILD_CORE_BUILTIN define. files: A Misc/NEWS.d/next/Build/2019-04-16-13-58-52.bpo-36635.JKlzkf.rst M Include/internal/pycore_accu.h M Include/internal/pycore_atomic.h M Include/internal/pycore_ceval.h M Include/internal/pycore_condvar.h M Include/internal/pycore_context.h M Include/internal/pycore_coreconfig.h M Include/internal/pycore_getopt.h M Include/internal/pycore_gil.h M Include/internal/pycore_hamt.h M Include/internal/pycore_object.h M Include/internal/pycore_pathconfig.h M Include/internal/pycore_pyhash.h M Include/internal/pycore_pylifecycle.h M Include/internal/pycore_pymem.h M Include/internal/pycore_pystate.h M Include/internal/pycore_tupleobject.h M Include/internal/pycore_warnings.h M Include/pyport.h M Modules/Setup M Modules/_json.c M Modules/_pickle.c M Modules/_testcapimodule.c M PCbuild/pythoncore.vcxproj M Python/ceval.c M setup.py diff --git a/Include/internal/pycore_accu.h b/Include/internal/pycore_accu.h index 4350db58a269..d346222e4dd0 100644 --- a/Include/internal/pycore_accu.h +++ b/Include/internal/pycore_accu.h @@ -9,8 +9,8 @@ extern "C" { *** Its definition may be changed or removed at any moment. ***/ -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif /* diff --git a/Include/internal/pycore_atomic.h b/Include/internal/pycore_atomic.h index 7aa7eed6f7c2..b3ec44c1bcfe 100644 --- a/Include/internal/pycore_atomic.h +++ b/Include/internal/pycore_atomic.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "dynamic_annotations.h" diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 2ead96c7abe3..0bb19f1aa3b6 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "pycore_atomic.h" diff --git a/Include/internal/pycore_condvar.h b/Include/internal/pycore_condvar.h index a12b6994ad55..8b89d709510a 100644 --- a/Include/internal/pycore_condvar.h +++ b/Include/internal/pycore_condvar.h @@ -1,8 +1,8 @@ #ifndef Py_INTERNAL_CONDVAR_H #define Py_INTERNAL_CONDVAR_H -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #ifndef _POSIX_THREADS diff --git a/Include/internal/pycore_context.h b/Include/internal/pycore_context.h index 70701cdd11dc..5e1ba0d0393f 100644 --- a/Include/internal/pycore_context.h +++ b/Include/internal/pycore_context.h @@ -1,8 +1,8 @@ #ifndef Py_INTERNAL_CONTEXT_H #define Py_INTERNAL_CONTEXT_H -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "pycore_hamt.h" diff --git a/Include/internal/pycore_coreconfig.h b/Include/internal/pycore_coreconfig.h index 3a27628aa740..33538442043b 100644 --- a/Include/internal/pycore_coreconfig.h +++ b/Include/internal/pycore_coreconfig.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN defined" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif diff --git a/Include/internal/pycore_getopt.h b/Include/internal/pycore_getopt.h index 0d1897c75a64..834b8c8a1409 100644 --- a/Include/internal/pycore_getopt.h +++ b/Include/internal/pycore_getopt.h @@ -1,8 +1,8 @@ #ifndef Py_INTERNAL_PYGETOPT_H #define Py_INTERNAL_PYGETOPT_H -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif extern int _PyOS_opterr; diff --git a/Include/internal/pycore_gil.h b/Include/internal/pycore_gil.h index 014e75fd182f..7de316397b15 100644 --- a/Include/internal/pycore_gil.h +++ b/Include/internal/pycore_gil.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "pycore_condvar.h" diff --git a/Include/internal/pycore_hamt.h b/Include/internal/pycore_hamt.h index 8b2ce1fc96c3..e65aef5e21a9 100644 --- a/Include/internal/pycore_hamt.h +++ b/Include/internal/pycore_hamt.h @@ -1,8 +1,8 @@ #ifndef Py_INTERNAL_HAMT_H #define Py_INTERNAL_HAMT_H -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #define _Py_HAMT_MAX_TREE_DEPTH 7 diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index c95595358a9e..81548f819198 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN defined" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "pycore_pystate.h" /* _PyRuntime */ diff --git a/Include/internal/pycore_pathconfig.h b/Include/internal/pycore_pathconfig.h index 80d86a0dd1b5..9eb8e88df767 100644 --- a/Include/internal/pycore_pathconfig.h +++ b/Include/internal/pycore_pathconfig.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif typedef struct _PyPathConfig { diff --git a/Include/internal/pycore_pyhash.h b/Include/internal/pycore_pyhash.h index babbc95b879e..a229f8d8b7f0 100644 --- a/Include/internal/pycore_pyhash.h +++ b/Include/internal/pycore_pyhash.h @@ -1,8 +1,8 @@ #ifndef Py_INTERNAL_HASH_H #define Py_INTERNAL_HASH_H -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif uint64_t _Py_KeyedHash(uint64_t, const char *, Py_ssize_t); diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index d837ea4fb33a..bfff24b80a9f 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif /* True if the main interpreter thread exited due to an unhandled diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 8da1bd9e304a..20f3b5e40067 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN defined" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "objimpl.h" diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index df3730f8014a..e1ce08d335b9 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "cpython/coreconfig.h" diff --git a/Include/internal/pycore_tupleobject.h b/Include/internal/pycore_tupleobject.h index d0c5b620d356..9fcfc5c6ec71 100644 --- a/Include/internal/pycore_tupleobject.h +++ b/Include/internal/pycore_tupleobject.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "tupleobject.h" diff --git a/Include/internal/pycore_warnings.h b/Include/internal/pycore_warnings.h index 91bf90232f5c..73e5350aff14 100644 --- a/Include/internal/pycore_warnings.h +++ b/Include/internal/pycore_warnings.h @@ -4,8 +4,8 @@ extern "C" { #endif -#if !defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_BUILTIN) -# error "this header requires Py_BUILD_CORE or Py_BUILD_CORE_BUILTIN define" +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" #endif #include "object.h" diff --git a/Include/pyport.h b/Include/pyport.h index 4971a493ccee..075b36028b8a 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -5,6 +5,27 @@ #include + +/* Defines to build Python and its standard library: + * + * - Py_BUILD_CORE: Build Python core. Give access to Python internals, but + * should not be used by third-party modules. + * - Py_BUILD_CORE_BUILTIN: Build a Python stdlib module as a built-in module. + * - Py_BUILD_CORE_MODULE: Build a Python stdlib module as a dynamic library. + * + * Py_BUILD_CORE_BUILTIN and Py_BUILD_CORE_MODULE imply Py_BUILD_CORE. + * + * On Windows, Py_BUILD_CORE_MODULE exports "PyInit_xxx" symbol, whereas + * Py_BUILD_CORE_BUILTIN does not. + */ +#if defined(Py_BUILD_CORE_BUILTIN) && !defined(Py_BUILD_CORE) +# define Py_BUILD_CORE +#endif +#if defined(Py_BUILD_CORE_MODULE) && !defined(Py_BUILD_CORE) +# define Py_BUILD_CORE +#endif + + /************************************************************************** Symbols and macros to supply platform-independent interfaces to basic C language & library operations whose spellings vary across platforms. @@ -623,7 +644,7 @@ extern char * _getpty(int *, int, mode_t, int); /* only get special linkage if built as shared or platform is Cygwin */ #if defined(Py_ENABLE_SHARED) || defined(__CYGWIN__) # if defined(HAVE_DECLSPEC_DLL) -# if defined(Py_BUILD_CORE) || defined(Py_BUILD_CORE_BUILTIN) +# if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # define PyAPI_FUNC(RTYPE) __declspec(dllexport) RTYPE # define PyAPI_DATA(RTYPE) extern __declspec(dllexport) RTYPE /* module init functions inside the core need no external linkage */ @@ -755,7 +776,7 @@ extern char * _getpty(int *, int, mode_t, int); #define PY_LITTLE_ENDIAN 1 #endif -#if defined(Py_BUILD_CORE) || defined(Py_BUILD_CORE_BUILTIN) +#ifdef Py_BUILD_CORE /* * Macros to protect CRT calls against instant termination when passed an * invalid parameter (issue23524). diff --git a/Misc/NEWS.d/next/Build/2019-04-16-13-58-52.bpo-36635.JKlzkf.rst b/Misc/NEWS.d/next/Build/2019-04-16-13-58-52.bpo-36635.JKlzkf.rst new file mode 100644 index 000000000000..6d346d22b807 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-16-13-58-52.bpo-36635.JKlzkf.rst @@ -0,0 +1,5 @@ +Change ``PyAPI_FUNC(type)``, ``PyAPI_DATA(type)`` and ``PyMODINIT_FUNC`` +macros of ``pyport.h`` when ``Py_BUILD_CORE_MODULE`` is defined. The +``Py_BUILD_CORE_MODULE`` define must be now be used to build a C extension +as a dynamic library accessing Python internals: export the PyInit_xxx() +function in DLL exports on Windows. diff --git a/Modules/Setup b/Modules/Setup index 11ddd0c7b202..03aa0f16be14 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -101,29 +101,29 @@ PYTHONPATH=$(COREPYTHONPATH) # This only contains the minimal set of modules required to run the # setup.py script in the root of the Python source tree. -posix -DPy_BUILD_CORE -I$(srcdir)/Include/internal posixmodule.c # posix (UNIX) system calls +posix -DPy_BUILD_CORE_BUILTIN -I$(srcdir)/Include/internal posixmodule.c # posix (UNIX) system calls errno errnomodule.c # posix (UNIX) errno values pwd pwdmodule.c # this is needed to find out the user's home dir # if $HOME is not set _sre _sre.c # Fredrik Lundh's new regular expressions _codecs _codecsmodule.c # access to the builtin codecs and codec registry _weakref _weakref.c # weak references -_functools -DPy_BUILD_CORE -I$(srcdir)/Include/internal _functoolsmodule.c # Tools for working with functions and callable objects +_functools -DPy_BUILD_CORE_BUILTIN -I$(srcdir)/Include/internal _functoolsmodule.c # Tools for working with functions and callable objects _operator _operator.c # operator.add() and similar goodies _collections _collectionsmodule.c # Container types _abc _abc.c # Abstract base classes itertools itertoolsmodule.c # Functions creating iterators for efficient looping atexit atexitmodule.c # Register functions to be run at interpreter-shutdown -_signal -DPy_BUILD_CORE -I$(srcdir)/Include/internal signalmodule.c +_signal -DPy_BUILD_CORE_BUILTIN -I$(srcdir)/Include/internal signalmodule.c _stat _stat.c # stat.h interface -time -DPy_BUILD_CORE -I$(srcdir)/Include/internal timemodule.c # -lm # time operations and variables -_thread -DPy_BUILD_CORE -I$(srcdir)/Include/internal _threadmodule.c # low-level threading interface +time -DPy_BUILD_CORE_BUILTIN -I$(srcdir)/Include/internal timemodule.c # -lm # time operations and variables +_thread -DPy_BUILD_CORE_BUILTIN -I$(srcdir)/Include/internal _threadmodule.c # low-level threading interface # access to ISO C locale support -_locale -DPy_BUILD_CORE _localemodule.c # -lintl +_locale -DPy_BUILD_CORE_BUILTIN _localemodule.c # -lintl # Standard I/O baseline -_io -DPy_BUILD_CORE -I$(srcdir)/Include/internal -I$(srcdir)/Modules/_io _io/_iomodule.c _io/iobase.c _io/fileio.c _io/bytesio.c _io/bufferedio.c _io/textio.c _io/stringio.c +_io -DPy_BUILD_CORE_BUILTIN -I$(srcdir)/Include/internal -I$(srcdir)/Modules/_io _io/_iomodule.c _io/iobase.c _io/fileio.c _io/bytesio.c _io/bufferedio.c _io/textio.c _io/stringio.c # faulthandler module faulthandler faulthandler.c diff --git a/Modules/_json.c b/Modules/_json.c index 94a7c0d2bf09..2d7c1bf1e1c7 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -1,8 +1,11 @@ - -/* Core extension modules are built-in on some platforms (e.g. Windows). */ -#ifdef Py_BUILD_CORE -#define Py_BUILD_CORE_BUILTIN -#undef Py_BUILD_CORE +/* JSON accelerator C extensor: _json module. + * + * It is built as a built-in module (Py_BUILD_CORE_BUILTIN define) on Windows + * and as an extension module (Py_BUILD_CORE_MODULE define) on other + * platforms. */ + +#if !defined(Py_BUILD_CORE_BUILTIN) && !defined(Py_BUILD_CORE_MODULE) +# error "Py_BUILD_CORE_BUILTIN or Py_BUILD_CORE_MODULE must be defined" #endif #include "Python.h" diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 2b97294e1e86..f956a382ac53 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1,8 +1,11 @@ +/* pickle accelerator C extensor: _pickle module. + * + * It is built as a built-in module (Py_BUILD_CORE_BUILTIN define) on Windows + * and as an extension module (Py_BUILD_CORE_MODULE define) on other + * platforms. */ -/* Core extension modules are built-in on some platforms (e.g. Windows). */ -#ifdef Py_BUILD_CORE -#define Py_BUILD_CORE_BUILTIN -#undef Py_BUILD_CORE +#if !defined(Py_BUILD_CORE_BUILTIN) && !defined(Py_BUILD_CORE_MODULE) +# error "Py_BUILD_CORE_BUILTIN or Py_BUILD_CORE_MODULE must be defined" #endif #include "Python.h" diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 1180b4b176e9..ae960deba78c 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -5,6 +5,11 @@ * standard Python regression test, via Lib/test/test_capi.py. */ +/* The Visual Studio projects builds _testcapi with Py_BUILD_CORE_MODULE + define, but we only want to test the public C API, not the internal + C API. */ +#undef Py_BUILD_CORE_MODULE + #define PY_SSIZE_T_CLEAN #include "Python.h" diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index a135e9326c78..5c5a720ba0c8 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -86,7 +86,7 @@ /Zm200 %(AdditionalOptions) $(PySourcePath)Python;%(AdditionalIncludeDirectories) $(zlibDir);%(AdditionalIncludeDirectories) - _USRDLL;Py_BUILD_CORE;Py_ENABLE_SHARED;MS_DLL_ID="$(SysWinVer)";%(PreprocessorDefinitions) + _USRDLL;Py_BUILD_CORE;Py_BUILD_CORE_BUILTIN;Py_ENABLE_SHARED;MS_DLL_ID="$(SysWinVer)";%(PreprocessorDefinitions) _Py_HAVE_ZLIB;%(PreprocessorDefinitions) diff --git a/Python/ceval.c b/Python/ceval.c index 28e923219d38..342dc10af6a6 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -30,6 +30,10 @@ #define CHECKEXC 1 /* Double-check exception checking */ #endif +#if !defined(Py_BUILD_CORE) +# error "ceval.c must be build with Py_BUILD_CORE define for best performance" +#endif + /* Private API for the LOAD_METHOD opcode. */ extern int _PyObject_GetMethod(PyObject *, PyObject *, PyObject **); diff --git a/setup.py b/setup.py index 9c83914fd907..c470719a6a80 100644 --- a/setup.py +++ b/setup.py @@ -725,13 +725,13 @@ def detect_simple_extensions(self): # heapq self.add(Extension("_heapq", ["_heapqmodule.c"])) # C-optimized pickle replacement - self.add(Extension("_pickle", ["_pickle.c"])) + self.add(Extension("_pickle", ["_pickle.c"], + extra_compile_args=['-D Py_BUILD_CORE_MODULE'])) # atexit self.add(Extension("atexit", ["atexitmodule.c"])) # _json speedups self.add(Extension("_json", ["_json.c"], - # pycore_accu.h requires Py_BUILD_CORE_BUILTIN - extra_compile_args=['-DPy_BUILD_CORE_BUILTIN'])) + extra_compile_args=['-D Py_BUILD_CORE_MODULE'])) # profiler (_lsprof is for cProfile.py) self.add(Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c'])) From webhook-mailer at python.org Wed Apr 17 17:31:39 2019 From: webhook-mailer at python.org (Steve Dower) Date: Wed, 17 Apr 2019 21:31:39 -0000 Subject: [Python-checkins] bpo-36649: Remove trailing spaces for registry keys when installed via the Store (GH-12865) Message-ID: https://github.com/python/cpython/commit/4c3efd9cd07194b5db2a60ae5951134cda8b69db commit: 4c3efd9cd07194b5db2a60ae5951134cda8b69db branch: master author: Steve Dower committer: GitHub date: 2019-04-17T14:31:32-07:00 summary: bpo-36649: Remove trailing spaces for registry keys when installed via the Store (GH-12865) files: A Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst M PC/layout/support/appxmanifest.py diff --git a/Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst b/Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst new file mode 100644 index 000000000000..8b44feb9f4c0 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst @@ -0,0 +1 @@ +Remove trailing spaces for registry keys when installed via the Store. diff --git a/PC/layout/support/appxmanifest.py b/PC/layout/support/appxmanifest.py index c5dda70c7ef8..49a35fa1f046 100644 --- a/PC/layout/support/appxmanifest.py +++ b/PC/layout/support/appxmanifest.py @@ -159,21 +159,20 @@ def public(f): "SysVersion": VER_DOT, "Version": "{}.{}.{}".format(VER_MAJOR, VER_MINOR, VER_MICRO), "InstallPath": { - # I have no idea why the trailing spaces are needed, but they seem to be needed. - "": "[{AppVPackageRoot}][ ]", - "ExecutablePath": "[{AppVPackageRoot}]python.exe[ ]", - "WindowedExecutablePath": "[{AppVPackageRoot}]pythonw.exe[ ]", + "": "[{AppVPackageRoot}]", + "ExecutablePath": "[{AppVPackageRoot}]\\python.exe", + "WindowedExecutablePath": "[{AppVPackageRoot}]\\pythonw.exe", }, "Help": { "Main Python Documentation": { "_condition": lambda ns: ns.include_chm, - "": "[{{AppVPackageRoot}}]Doc\\{}[ ]".format( + "": "[{{AppVPackageRoot}}]\\Doc\\{}".format( PYTHON_CHM_NAME ), }, "Local Python Documentation": { "_condition": lambda ns: ns.include_html_doc, - "": "[{AppVPackageRoot}]Doc\\html\\index.html[ ]", + "": "[{AppVPackageRoot}]\\Doc\\html\\index.html", }, "Online Python Documentation": { "": "https://docs.python.org/{}".format(VER_DOT) @@ -181,7 +180,7 @@ def public(f): }, "Idle": { "_condition": lambda ns: ns.include_idle, - "": "[{AppVPackageRoot}]Lib\\idlelib\\idle.pyw[ ]", + "": "[{AppVPackageRoot}]\\Lib\\idlelib\\idle.pyw", }, } } From webhook-mailer at python.org Wed Apr 17 17:52:19 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 17 Apr 2019 21:52:19 -0000 Subject: [Python-checkins] bpo-36649: Remove trailing spaces for registry keys when installed via the Store (GH-12865) Message-ID: https://github.com/python/cpython/commit/0d4f16d283fe3b8a183775ac7ac193988d971ad5 commit: 0d4f16d283fe3b8a183775ac7ac193988d971ad5 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-17T14:52:14-07:00 summary: bpo-36649: Remove trailing spaces for registry keys when installed via the Store (GH-12865) (cherry picked from commit 4c3efd9cd07194b5db2a60ae5951134cda8b69db) Co-authored-by: Steve Dower files: A Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst M PC/layout/support/appxmanifest.py diff --git a/Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst b/Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst new file mode 100644 index 000000000000..8b44feb9f4c0 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2019-04-17-11-39-24.bpo-36649.arbzIo.rst @@ -0,0 +1 @@ +Remove trailing spaces for registry keys when installed via the Store. diff --git a/PC/layout/support/appxmanifest.py b/PC/layout/support/appxmanifest.py index c5dda70c7ef8..49a35fa1f046 100644 --- a/PC/layout/support/appxmanifest.py +++ b/PC/layout/support/appxmanifest.py @@ -159,21 +159,20 @@ def public(f): "SysVersion": VER_DOT, "Version": "{}.{}.{}".format(VER_MAJOR, VER_MINOR, VER_MICRO), "InstallPath": { - # I have no idea why the trailing spaces are needed, but they seem to be needed. - "": "[{AppVPackageRoot}][ ]", - "ExecutablePath": "[{AppVPackageRoot}]python.exe[ ]", - "WindowedExecutablePath": "[{AppVPackageRoot}]pythonw.exe[ ]", + "": "[{AppVPackageRoot}]", + "ExecutablePath": "[{AppVPackageRoot}]\\python.exe", + "WindowedExecutablePath": "[{AppVPackageRoot}]\\pythonw.exe", }, "Help": { "Main Python Documentation": { "_condition": lambda ns: ns.include_chm, - "": "[{{AppVPackageRoot}}]Doc\\{}[ ]".format( + "": "[{{AppVPackageRoot}}]\\Doc\\{}".format( PYTHON_CHM_NAME ), }, "Local Python Documentation": { "_condition": lambda ns: ns.include_html_doc, - "": "[{AppVPackageRoot}]Doc\\html\\index.html[ ]", + "": "[{AppVPackageRoot}]\\Doc\\html\\index.html", }, "Online Python Documentation": { "": "https://docs.python.org/{}".format(VER_DOT) @@ -181,7 +180,7 @@ def public(f): }, "Idle": { "_condition": lambda ns: ns.include_idle, - "": "[{AppVPackageRoot}]Lib\\idlelib\\idle.pyw[ ]", + "": "[{AppVPackageRoot}]\\Lib\\idlelib\\idle.pyw", }, } } From webhook-mailer at python.org Wed Apr 17 18:43:42 2019 From: webhook-mailer at python.org (Brett Cannon) Date: Wed, 17 Apr 2019 22:43:42 -0000 Subject: [Python-checkins] bpo-32913: Added re.Match.groupdict example to regex HOWTO (GH-5821) Message-ID: https://github.com/python/cpython/commit/a6de52c74d831e45ee0ff105196da8a58b9e43cd commit: a6de52c74d831e45ee0ff105196da8a58b9e43cd branch: master author: josh committer: Brett Cannon date: 2019-04-17T15:43:30-07:00 summary: bpo-32913: Added re.Match.groupdict example to regex HOWTO (GH-5821) files: A Misc/NEWS.d/next/Documentation/2018-02-22-15-48-16.bpo-32913.f3utho.rst M Doc/howto/regex.rst diff --git a/Doc/howto/regex.rst b/Doc/howto/regex.rst index d385d991344b..d574c3736b1c 100644 --- a/Doc/howto/regex.rst +++ b/Doc/howto/regex.rst @@ -942,6 +942,13 @@ given numbers, so you can retrieve information about a group in two ways:: >>> m.group(1) 'Lots' +Additionally, you can retrieve named groups as a dictionary with +:meth:`~re.Match.groupdict`:: + + >>> m = re.match(r'(?P\w+) (?P\w+)', 'Jane Doe') + >>> m.groupdict() + {'first': 'Jane', 'last': 'Doe'} + Named groups are handy because they let you use easily-remembered names, instead of having to remember numbers. Here's an example RE from the :mod:`imaplib` module:: diff --git a/Misc/NEWS.d/next/Documentation/2018-02-22-15-48-16.bpo-32913.f3utho.rst b/Misc/NEWS.d/next/Documentation/2018-02-22-15-48-16.bpo-32913.f3utho.rst new file mode 100644 index 000000000000..caa9590abbaf --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2018-02-22-15-48-16.bpo-32913.f3utho.rst @@ -0,0 +1 @@ +Added re.Match.groupdict example to regex HOWTO. From webhook-mailer at python.org Wed Apr 17 20:54:59 2019 From: webhook-mailer at python.org (Ivan Levkivskyi) Date: Thu, 18 Apr 2019 00:54:59 -0000 Subject: [Python-checkins] Fix wrong indentation of a paragraph in documentation (GH-12868) Message-ID: https://github.com/python/cpython/commit/9941f963fe085261aec25545fa9f0cc35f750120 commit: 9941f963fe085261aec25545fa9f0cc35f750120 branch: master author: cocoatomo committer: Ivan Levkivskyi date: 2019-04-18T01:54:51+01:00 summary: Fix wrong indentation of a paragraph in documentation (GH-12868) This paragraph doesn't seem to be a part of code, but merged into previous code block. files: M Doc/library/typing.rst diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index ed5f547e3ce3..c2523ed52960 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -966,8 +966,8 @@ The module defines the following classes, functions and decorators: def fetch_response() -> Response: ... - Note that returning instances of private classes is not recommended. - It is usually preferable to make such classes public. + Note that returning instances of private classes is not recommended. + It is usually preferable to make such classes public. .. data:: Any From webhook-mailer at python.org Wed Apr 17 21:00:41 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 18 Apr 2019 01:00:41 -0000 Subject: [Python-checkins] Fix wrong indentation of a paragraph in documentation (GH-12868) Message-ID: https://github.com/python/cpython/commit/21c8caa16a64e95ee91e83c641e9d4a0844ddb74 commit: 21c8caa16a64e95ee91e83c641e9d4a0844ddb74 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-17T18:00:37-07:00 summary: Fix wrong indentation of a paragraph in documentation (GH-12868) This paragraph doesn't seem to be a part of code, but merged into previous code block. (cherry picked from commit 9941f963fe085261aec25545fa9f0cc35f750120) Co-authored-by: cocoatomo files: M Doc/library/typing.rst diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 9f6757c2864c..21258a58d0aa 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -956,8 +956,8 @@ The module defines the following classes, functions and decorators: def fetch_response() -> Response: ... - Note that returning instances of private classes is not recommended. - It is usually preferable to make such classes public. + Note that returning instances of private classes is not recommended. + It is usually preferable to make such classes public. .. data:: Any From webhook-mailer at python.org Wed Apr 17 21:06:11 2019 From: webhook-mailer at python.org (Steve Dower) Date: Thu, 18 Apr 2019 01:06:11 -0000 Subject: [Python-checkins] bpo-36638: Fix WindowsLoadTracker exception on some Windows versions (GH-12849) Message-ID: https://github.com/python/cpython/commit/264a0b40b030fc0ff919b8294df91bdaac853bfb commit: 264a0b40b030fc0ff919b8294df91bdaac853bfb branch: master author: Paul Monson committer: Steve Dower date: 2019-04-17T18:06:06-07:00 summary: bpo-36638: Fix WindowsLoadTracker exception on some Windows versions (GH-12849) files: M Lib/test/libregrtest/main.py diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 18ef6d0609cd6..d20e1746781f0 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -626,8 +626,13 @@ def getloadavg_1m(): elif sys.platform == 'win32' and (self.ns.worker_args is None): from test.libregrtest.win_utils import WindowsLoadTracker - load_tracker = WindowsLoadTracker() - self.getloadavg = load_tracker.getloadavg + try: + load_tracker = WindowsLoadTracker() + self.getloadavg = load_tracker.getloadavg + except FileNotFoundError as error: + # Windows IoT Core and Windows Nano Server do not provide + # typeperf.exe for x64, x86 or ARM + print('Failed to create WindowsLoadTracker: {}'.format(error)) self.run_tests() self.display_result() From webhook-mailer at python.org Wed Apr 17 21:09:19 2019 From: webhook-mailer at python.org (Steve Dower) Date: Thu, 18 Apr 2019 01:09:19 -0000 Subject: [Python-checkins] bpo-36071 Add support for Windows ARM32 in ctypes/libffi (GH-12059) Message-ID: https://github.com/python/cpython/commit/11efd79076559cc6e4034bb36db73e5e4293f02d commit: 11efd79076559cc6e4034bb36db73e5e4293f02d branch: master author: Paul Monson committer: Steve Dower date: 2019-04-17T18:09:16-07:00 summary: bpo-36071 Add support for Windows ARM32 in ctypes/libffi (GH-12059) files: M Include/pyport.h M Include/pythonrun.h M Modules/_ctypes/callbacks.c M Modules/_ctypes/callproc.c M Modules/_ctypes/malloc_closure.c M PCbuild/pcbuild.sln M PCbuild/prepare_libffi.bat diff --git a/Include/pyport.h b/Include/pyport.h index 075b36028b8a..acbae5bafeb5 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -427,7 +427,7 @@ extern "C" { #endif /* get and set x87 control word for VisualStudio/x86 */ -#if defined(_MSC_VER) && defined(_M_IX86) /* x87 only supported in x86 */ +#if defined(_MSC_VER) && !defined(_WIN64) && !defined(_M_ARM) /* x87 not supported in 64-bit or ARM */ #define HAVE_PY_SET_53BIT_PRECISION 1 #define _Py_SET_53BIT_PRECISION_HEADER \ unsigned int old_387controlword, new_387controlword, out_387controlword diff --git a/Include/pythonrun.h b/Include/pythonrun.h index 6f0c6fc65543..e83846add981 100644 --- a/Include/pythonrun.h +++ b/Include/pythonrun.h @@ -165,7 +165,7 @@ PyAPI_DATA(PyThreadState*) _PyOS_ReadlineTState; to an 8k margin. */ #define PYOS_STACK_MARGIN 2048 -#if defined(WIN32) && !defined(MS_WIN64) && defined(_MSC_VER) && _MSC_VER >= 1300 +#if defined(WIN32) && !defined(MS_WIN64) && !defined(_M_ARM) && defined(_MSC_VER) && _MSC_VER >= 1300 /* Enable stack checking under Microsoft C */ #define USE_STACKCHECK #endif diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index 2b7cb06ea8a9..9f793c2771bf 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -380,7 +380,7 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, } cc = FFI_DEFAULT_ABI; -#if defined(MS_WIN32) && !defined(_WIN32_WCE) && !defined(MS_WIN64) +#if defined(MS_WIN32) && !defined(_WIN32_WCE) && !defined(MS_WIN64) && !defined(_M_ARM) if ((flags & FUNCFLAG_CDECL) == 0) cc = FFI_STDCALL; #endif diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 5a943d3c3708..1ad842eb3d40 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -737,12 +737,17 @@ of 1, 2, 4, 8, 16, 32, or 64 bits */ int can_return_struct_as_int(size_t s) { - return s == 1 || s == 2 || s == 4; + return s == 1 || s == 2 || s == 4; } int can_return_struct_as_sint64(size_t s) { - return s == 8; +#ifdef _M_ARM + // 8 byte structs cannot be returned in a register on ARM32 + return 0; +#else + return s == 8; +#endif } #endif @@ -807,7 +812,7 @@ static int _call_function_pointer(int flags, } cc = FFI_DEFAULT_ABI; -#if defined(MS_WIN32) && !defined(MS_WIN64) && !defined(_WIN32_WCE) +#if defined(MS_WIN32) && !defined(MS_WIN64) && !defined(_WIN32_WCE) && !defined(_M_ARM) if ((flags & FUNCFLAG_CDECL) == 0) cc = FFI_STDCALL; #endif diff --git a/Modules/_ctypes/malloc_closure.c b/Modules/_ctypes/malloc_closure.c index 8ad76497c7b3..f9cdb336958c 100644 --- a/Modules/_ctypes/malloc_closure.c +++ b/Modules/_ctypes/malloc_closure.c @@ -106,6 +106,11 @@ void *ffi_closure_alloc(size_t ignored, void** codeloc) return NULL; item = free_list; free_list = item->next; +#ifdef _M_ARM + // set Thumb bit so that blx is called correctly + *codeloc = (ITEM*)((uintptr_t)item | 1); +#else *codeloc = (void *)item; +#endif return (void *)item; } diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 1bbfd180df14..e9239365b5c1 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -238,6 +238,7 @@ Global {0E9791DB-593A-465F-98BC-681011311617}.Release|x64.ActiveCfg = Release|x64 {0E9791DB-593A-465F-98BC-681011311617}.Release|x64.Build.0 = Release|x64 {0E9791DB-593A-465F-98BC-681011311618}.Debug|ARM.ActiveCfg = Debug|ARM + {0E9791DB-593A-465F-98BC-681011311618}.Debug|ARM.Build.0 = Debug|ARM {0E9791DB-593A-465F-98BC-681011311618}.Debug|Win32.ActiveCfg = Debug|Win32 {0E9791DB-593A-465F-98BC-681011311618}.Debug|Win32.Build.0 = Debug|Win32 {0E9791DB-593A-465F-98BC-681011311618}.Debug|x64.ActiveCfg = Debug|x64 @@ -255,6 +256,7 @@ Global {0E9791DB-593A-465F-98BC-681011311618}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 {0E9791DB-593A-465F-98BC-681011311618}.PGUpdate|x64.Build.0 = PGUpdate|x64 {0E9791DB-593A-465F-98BC-681011311618}.Release|ARM.ActiveCfg = Release|ARM + {0E9791DB-593A-465F-98BC-681011311618}.Release|ARM.Build.0 = Release|ARM {0E9791DB-593A-465F-98BC-681011311618}.Release|Win32.ActiveCfg = Release|Win32 {0E9791DB-593A-465F-98BC-681011311618}.Release|Win32.Build.0 = Release|Win32 {0E9791DB-593A-465F-98BC-681011311618}.Release|x64.ActiveCfg = Release|x64 diff --git a/PCbuild/prepare_libffi.bat b/PCbuild/prepare_libffi.bat index 3df85130f48a..307739d874a2 100644 --- a/PCbuild/prepare_libffi.bat +++ b/PCbuild/prepare_libffi.bat @@ -24,6 +24,7 @@ echo. echo.Available flags: echo. -x64 build for x64 echo. -x86 build for x86 +echo. -arm32 build for arm32 echo. -? this help echo. --install-cygwin install cygwin to c:\cygwin exit /b 127 @@ -32,12 +33,14 @@ exit /b 127 set BUILD_X64= set BUILD_X86= +set BUILD_ARM32= set INSTALL_CYGWIN= :CheckOpts if "%1"=="" goto :CheckOptsDone if /I "%1"=="-x64" (set BUILD_X64=1) & shift & goto :CheckOpts if /I "%1"=="-x86" (set BUILD_X86=1) & shift & goto :CheckOpts +if /I "%1"=="-arm32" (set BUILD_ARM32=1) & shift & goto :CheckOpts if /I "%1"=="-?" goto :Usage if /I "%1"=="--install-cygwin" (set INSTALL_CYGWIN=1) & shift & goto :CheckOpts goto :Usage @@ -47,6 +50,7 @@ goto :Usage if NOT DEFINED BUILD_X64 if NOT DEFINED BUILD_X86 if NOT DEFINED BUILD_ARM32 ( set BUILD_X64=1 set BUILD_X86=1 + set BUILD_ARM32=1 ) if "%INSTALL_CYGWIN%"=="1" call :InstallCygwin @@ -83,8 +87,9 @@ echo. if not exist Makefile.in (%SH% -lc "(cd $LIBFFI_SOURCE; ./autogen.sh;)") -call :BuildOne x86 i686-pc-cygwin i686-pc-cygwin -call :BuildOne x64 x86_64-w64-cygwin x86_64-w64-cygwin +if "%BUILD_X64%"=="1" call :BuildOne x64 x86_64-w64-cygwin x86_64-w64-cygwin +if "%BUILD_X86%"=="1" call :BuildOne x86 i686-pc-cygwin i686-pc-cygwin +if "%BUILD_ARM32%"=="1" call :BuildOne x86_arm i686-pc-cygwin arm-w32-cygwin popd endlocal @@ -118,6 +123,12 @@ if /I "%VCVARS_PLATFORM%" EQU "x86" ( set ASSEMBLER= set SRC_ARCHITECTURE=x86 ) +if /I "%VCVARS_PLATFORM%" EQU "x86_arm" ( + set ARCH=arm32 + set ARTIFACTS=%LIBFFI_SOURCE%\arm-w32-cygwin + set ASSEMBLER=-marm + set SRC_ARCHITECTURE=ARM +) if NOT DEFINED LIBFFI_OUT set LIBFFI_OUT=%~dp0\..\externals\libffi set _LIBFFI_OUT=%LIBFFI_OUT%\%ARCH% From webhook-mailer at python.org Thu Apr 18 05:37:36 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 18 Apr 2019 09:37:36 -0000 Subject: [Python-checkins] bpo-36635: Add _testinternalcapi module (GH-12841) Message-ID: https://github.com/python/cpython/commit/23bace26ec265557697cf3b578b361c178070cd5 commit: 23bace26ec265557697cf3b578b361c178070cd5 branch: master author: Victor Stinner committer: GitHub date: 2019-04-18T11:37:26+02:00 summary: bpo-36635: Add _testinternalcapi module (GH-12841) Add a new _testinternalcapi module to test the internal C API. Move _Py_GetConfigsAsDict() function to the internal C API: _testembed now uses _testinternalcapi to access the function. files: A Misc/NEWS.d/next/Tests/2019-04-15-16-55-49.bpo-36635.__FTq9.rst A Modules/_testinternalcapi.c A PCbuild/_testinternalcapi.vcxproj A PCbuild/_testinternalcapi.vcxproj.filters M Include/cpython/coreconfig.h M Include/internal/pycore_coreconfig.h M Lib/test/pythoninfo.py M Modules/Setup M Modules/_testcapimodule.c M PCbuild/pcbuild.proj M PCbuild/pcbuild.sln M Programs/_testembed.c M setup.py diff --git a/Include/cpython/coreconfig.h b/Include/cpython/coreconfig.h index 7ce1a02e16c6..c1a72989a5e9 100644 --- a/Include/cpython/coreconfig.h +++ b/Include/cpython/coreconfig.h @@ -408,11 +408,6 @@ typedef struct { ._init_main = 1} /* Note: _PyCoreConfig_INIT sets other fields to 0/NULL */ - -/* --- Function used for testing ---------------------------------- */ - -PyAPI_FUNC(PyObject*) _Py_GetConfigsAsDict(void); - #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_coreconfig.h b/Include/internal/pycore_coreconfig.h index 33538442043b..e88a46314c89 100644 --- a/Include/internal/pycore_coreconfig.h +++ b/Include/internal/pycore_coreconfig.h @@ -110,6 +110,11 @@ PyAPI_FUNC(_PyInitError) _PyCoreConfig_Read(_PyCoreConfig *config, const _PyArgv *args); PyAPI_FUNC(void) _PyCoreConfig_Write(const _PyCoreConfig *config); + +/* --- Function used for testing ---------------------------------- */ + +PyAPI_FUNC(PyObject*) _Py_GetConfigsAsDict(void); + #ifdef __cplusplus } #endif diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py index 79f7e82e0006..19f274a6b629 100644 --- a/Lib/test/pythoninfo.py +++ b/Lib/test/pythoninfo.py @@ -598,7 +598,7 @@ def collect_get_config(info_add): # Dump global configuration variables, _PyCoreConfig # and _PyMainInterpreterConfig try: - from _testcapi import get_configs + from _testinternalcapi import get_configs except ImportError: return diff --git a/Misc/NEWS.d/next/Tests/2019-04-15-16-55-49.bpo-36635.__FTq9.rst b/Misc/NEWS.d/next/Tests/2019-04-15-16-55-49.bpo-36635.__FTq9.rst new file mode 100644 index 000000000000..855d1cb27764 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-15-16-55-49.bpo-36635.__FTq9.rst @@ -0,0 +1 @@ +Add a new :mod:`_testinternalcapi` module to test the internal C API. diff --git a/Modules/Setup b/Modules/Setup index 03aa0f16be14..e729ab883f41 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -173,6 +173,7 @@ _symtable symtablemodule.c #_struct _struct.c # binary structure packing/unpacking #_weakref _weakref.c # basic weak reference support #_testcapi _testcapimodule.c # Python C API test module +#_testinternalcapi _testinternalcapi.c -I$(srcdir)/Include/internal -DPy_BUILD_CORE_MODULE # Python internal C API test module #_random _randommodule.c # Random number generator #_elementtree -I$(srcdir)/Modules/expat -DHAVE_EXPAT_CONFIG_H -DUSE_PYEXPAT_CAPI _elementtree.c # elementtree accelerator #_pickle _pickle.c # pickle accelerator diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index ae960deba78c..6f4eb53d7e92 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -4736,13 +4736,6 @@ decode_locale_ex(PyObject *self, PyObject *args) } -static PyObject * -get_configs(PyObject *self, PyObject *Py_UNUSED(args)) -{ - return _Py_GetConfigsAsDict(); -} - - #ifdef Py_REF_DEBUG static PyObject * negative_refcount(PyObject *self, PyObject *Py_UNUSED(args)) @@ -4990,7 +4983,6 @@ static PyMethodDef TestMethods[] = { {"bad_get", (PyCFunction)(void(*)(void))bad_get, METH_FASTCALL}, {"EncodeLocaleEx", encode_locale_ex, METH_VARARGS}, {"DecodeLocaleEx", decode_locale_ex, METH_VARARGS}, - {"get_configs", get_configs, METH_NOARGS}, #ifdef Py_REF_DEBUG {"negative_refcount", negative_refcount, METH_NOARGS}, #endif diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c new file mode 100644 index 000000000000..3a43ec16850f --- /dev/null +++ b/Modules/_testinternalcapi.c @@ -0,0 +1,45 @@ +/* + * C Extension module to test Python internal C APIs (Include/internal). + */ + +#if !defined(Py_BUILD_CORE_BUILTIN) && !defined(Py_BUILD_CORE_MODULE) +# error "Py_BUILD_CORE_BUILTIN or Py_BUILD_CORE_MODULE must be defined" +#endif + +#define PY_SSIZE_T_CLEAN + +#include "Python.h" +#include "pycore_coreconfig.h" + + +static PyObject * +get_configs(PyObject *self, PyObject *Py_UNUSED(args)) +{ + return _Py_GetConfigsAsDict(); +} + + +static PyMethodDef TestMethods[] = { + {"get_configs", get_configs, METH_NOARGS}, + {NULL, NULL} /* sentinel */ +}; + + +static struct PyModuleDef _testcapimodule = { + PyModuleDef_HEAD_INIT, + "_testinternalcapi", + NULL, + -1, + TestMethods, + NULL, + NULL, + NULL, + NULL +}; + + +PyMODINIT_FUNC +PyInit__testinternalcapi(void) +{ + return PyModule_Create(&_testcapimodule); +} diff --git a/PCbuild/_testinternalcapi.vcxproj b/PCbuild/_testinternalcapi.vcxproj new file mode 100644 index 000000000000..116d193a39cf --- /dev/null +++ b/PCbuild/_testinternalcapi.vcxproj @@ -0,0 +1,94 @@ +? + + + + Debug + ARM + + + Debug + Win32 + + + Debug + x64 + + + PGInstrument + ARM + + + PGInstrument + Win32 + + + PGInstrument + x64 + + + PGUpdate + ARM + + + PGUpdate + Win32 + + + PGUpdate + x64 + + + Release + ARM + + + Release + Win32 + + + Release + x64 + + + + {900342D7-516A-4469-B1AD-59A66E49A25F} + _testinternalcapi + Win32Proj + false + + + + + DynamicLibrary + NotSet + + + + .pyd + + + + + + + + + + <_ProjectFileVersion>10.0.30319.1 + + + + + + + + + + {cf7ac3d1-e2df-41d2-bea6-1e2556cdea26} + false + + + + + + diff --git a/PCbuild/_testinternalcapi.vcxproj.filters b/PCbuild/_testinternalcapi.vcxproj.filters new file mode 100644 index 000000000000..4644f230be1c --- /dev/null +++ b/PCbuild/_testinternalcapi.vcxproj.filters @@ -0,0 +1,13 @@ +? + + + + {136fc5eb-7fe4-4486-8c6d-b49f37a00199} + + + + + Source Files + + + diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index 574c4f02913b..d16ddef89f62 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -62,13 +62,13 @@ - + false - + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index e9239365b5c1..951dc932a8e5 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -37,6 +37,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_ssl", "_ssl.vcxproj", "{C6 EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testcapi", "_testcapi.vcxproj", "{6901D91C-6E48-4BB7-9FEC-700C8131DF1D}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testinternalcapi", "_testinternalcapi.vcxproj", "{900342D7-516A-4469-B1AD-59A66E49A25F}" +EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testimportmultiple", "_testimportmultiple.vcxproj", "{36D0C52C-DF4E-45D0-8BC7-E294C3ABC781}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_tkinter", "_tkinter.vcxproj", "{4946ECAC-2E69-4BF8-A90A-F5136F5094DF}" diff --git a/Programs/_testembed.c b/Programs/_testembed.c index 7d71a961602d..3fc8e6d8c6b2 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -298,42 +298,13 @@ static int test_initialize_pymain(void) } -static int -dump_config_impl(void) -{ - PyObject *config = _Py_GetConfigsAsDict(); - if (config == NULL) { - return -1; - } - - PyObject *res; - PyObject *json = PyImport_ImportModule("json"); - if (json) { - res = PyObject_CallMethod(json, "dumps", "O", config); - Py_DECREF(json); - } - else { - res = NULL; - } - Py_CLEAR(config); - if (res == NULL) { - return -1; - } - - PySys_FormatStdout("%S\n", res); - Py_DECREF(res); - - return 0; -} - - static void dump_config(void) { - if (dump_config_impl() < 0) { - fprintf(stderr, "failed to dump the configuration:\n"); - PyErr_Print(); - } + (void) PyRun_SimpleStringFlags( + "import _testinternalcapi, json; " + "print(json.dumps(_testinternalcapi.get_configs()))", + 0); } diff --git a/setup.py b/setup.py index c470719a6a80..3d6404f89eef 100644 --- a/setup.py +++ b/setup.py @@ -814,6 +814,10 @@ def detect_test_extensions(self): self.add(Extension('_testcapi', ['_testcapimodule.c'], depends=['testcapi_long.h'])) + # Python Internal C API test module + self.add(Extension('_testinternalcapi', ['_testinternalcapi.c'], + extra_compile_args=['-D Py_BUILD_CORE_MODULE'])) + # Python PEP-3118 (buffer protocol) test module self.add(Extension('_testbuffer', ['_testbuffer.c'])) From webhook-mailer at python.org Thu Apr 18 08:43:19 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 18 Apr 2019 12:43:19 -0000 Subject: [Python-checkins] bpo-36651: Fixed Asyncio Event Loop documentation inconsistency (GH-12866) Message-ID: https://github.com/python/cpython/commit/7e954e7de4f3777b5ce239640bd2b76aced09561 commit: 7e954e7de4f3777b5ce239640bd2b76aced09561 branch: master author: Enrico Alarico Carbognani committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-18T05:43:14-07:00 summary: bpo-36651: Fixed Asyncio Event Loop documentation inconsistency (GH-12866) # [bpo-36651](https://bugs.python.org/issue36651): Fixed Asyncio Event Loop documentation inconsistency In the documentation for the call_later and the call_at methods there is a note which says that the delay cannot be longer than a day, but both methods have a note saying that this limitation was removed in Python 3.8 Here I fixed this issue by removing the pre-exising note and added a versionchanged. To test my changes I have rebuilt the documentation with ```make html```. I did not have any errors and the effected page displayed correctly on a browser. https://bugs.python.org/issue36651 files: M Doc/library/asyncio-eventloop.rst diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 8d157fd5f599..bf7c93a86fd0 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -281,9 +281,9 @@ clocks to track time. the event loop's internal monotonic clock. .. note:: - - Timeouts (relative *delay* or absolute *when*) should not - exceed one day. + .. versionchanged:: 3.8 + In Python 3.7 and earlier timeouts (relative *delay* or absolute *when*) + should not exceed one day. This has been fixed in Python 3.8. .. seealso:: From webhook-mailer at python.org Thu Apr 18 08:49:29 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 18 Apr 2019 12:49:29 -0000 Subject: [Python-checkins] bpo-36651: Fixed Asyncio Event Loop documentation inconsistency (GH-12866) Message-ID: https://github.com/python/cpython/commit/d29b3dd9227cfc4a23f77e99d62e20e063272de1 commit: d29b3dd9227cfc4a23f77e99d62e20e063272de1 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-18T05:49:24-07:00 summary: bpo-36651: Fixed Asyncio Event Loop documentation inconsistency (GH-12866) GH- [bpo-36651](https://bugs.python.org/issue36651): Fixed Asyncio Event Loop documentation inconsistency In the documentation for the call_later and the call_at methods there is a note which says that the delay cannot be longer than a day, but both methods have a note saying that this limitation was removed in Python 3.8 Here I fixed this issue by removing the pre-exising note and added a versionchanged. To test my changes I have rebuilt the documentation with ```make html```. I did not have any errors and the effected page displayed correctly on a browser. https://bugs.python.org/issue36651 (cherry picked from commit 7e954e7de4f3777b5ce239640bd2b76aced09561) Co-authored-by: Enrico Alarico Carbognani files: M Doc/library/asyncio-eventloop.rst diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index d59cf055b614..b9a8b8941a46 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -281,9 +281,9 @@ clocks to track time. the event loop's internal monotonic clock. .. note:: - - Timeouts (relative *delay* or absolute *when*) should not - exceed one day. + .. versionchanged:: 3.8 + In Python 3.7 and earlier timeouts (relative *delay* or absolute *when*) + should not exceed one day. This has been fixed in Python 3.8. .. seealso:: From webhook-mailer at python.org Thu Apr 18 13:05:27 2019 From: webhook-mailer at python.org (Stefan Behnel) Date: Thu, 18 Apr 2019 17:05:27 -0000 Subject: [Python-checkins] bpo-30485: Change the prefix for defining the default namespace in ElementPath from None to '' since there is existing code that uses that and it's more convenient to have an all-string-keys dict (e.g. when sorting items etc.). (#12860) Message-ID: https://github.com/python/cpython/commit/e8113f51a8bdf33188ee30a1c038a298329e7bfa commit: e8113f51a8bdf33188ee30a1c038a298329e7bfa branch: master author: Stefan Behnel committer: GitHub date: 2019-04-18T19:05:03+02:00 summary: bpo-30485: Change the prefix for defining the default namespace in ElementPath from None to '' since there is existing code that uses that and it's more convenient to have an all-string-keys dict (e.g. when sorting items etc.). (#12860) files: M Doc/library/xml.etree.elementtree.rst M Lib/test/test_xml_etree.py M Lib/xml/etree/ElementPath.py M Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index c83e719e959a..9e2c295867ca 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -764,7 +764,7 @@ Element Objects Finds the first subelement matching *match*. *match* may be a tag name or a :ref:`path `. Returns an element instance or ``None``. *namespaces* is an optional mapping from namespace prefix - to full name. Pass ``None`` as prefix to move all unprefixed tag names + to full name. Pass ``''`` as prefix to move all unprefixed tag names in the expression into the given namespace. @@ -773,7 +773,7 @@ Element Objects Finds all matching subelements, by tag name or :ref:`path `. Returns a list containing all matching elements in document order. *namespaces* is an optional mapping from - namespace prefix to full name. Pass ``None`` as prefix to move all + namespace prefix to full name. Pass ``''`` as prefix to move all unprefixed tag names in the expression into the given namespace. @@ -784,7 +784,7 @@ Element Objects of the first matching element, or *default* if no element was found. Note that if the matching element has no text content an empty string is returned. *namespaces* is an optional mapping from namespace prefix - to full name. Pass ``None`` as prefix to move all unprefixed tag names + to full name. Pass ``''`` as prefix to move all unprefixed tag names in the expression into the given namespace. diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index f5b118b079ee..14ce32af8026 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2463,7 +2463,7 @@ def test_findall_different_nsmaps(self): nsmap = {'xx': 'Y'} self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 1) self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 2) - nsmap = {'xx': 'X', None: 'Y'} + nsmap = {'xx': 'X', '': 'Y'} self.assertEqual(len(root.findall(".//xx:b", namespaces=nsmap)), 2) self.assertEqual(len(root.findall(".//b", namespaces=nsmap)), 1) diff --git a/Lib/xml/etree/ElementPath.py b/Lib/xml/etree/ElementPath.py index 4d231a7df656..b670d58f3f01 100644 --- a/Lib/xml/etree/ElementPath.py +++ b/Lib/xml/etree/ElementPath.py @@ -71,7 +71,7 @@ ) def xpath_tokenizer(pattern, namespaces=None): - default_namespace = namespaces.get(None) if namespaces else None + default_namespace = namespaces.get('') if namespaces else None for token in xpath_tokenizer_re.findall(pattern): tag = token[1] if tag and tag[0] != "{": @@ -275,11 +275,7 @@ def iterfind(elem, path, namespaces=None): cache_key = (path,) if namespaces: - if None in namespaces: - cache_key += (namespaces[None],) + tuple(sorted( - item for item in namespaces.items() if item[0] is not None)) - else: - cache_key += tuple(sorted(namespaces.items())) + cache_key += tuple(sorted(namespaces.items())) try: selector = _cache[cache_key] diff --git a/Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst b/Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst index 6c82efd3e009..900edf8c7553 100644 --- a/Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst +++ b/Misc/NEWS.d/next/Library/2019-04-13-23-42-33.bpo-30485.JHhjJS.rst @@ -1,3 +1,3 @@ Path expressions in xml.etree.ElementTree can now avoid explicit namespace prefixes for tags (or the "{namespace}tag" notation) by passing a default -namespace with a 'None' prefix. +namespace with an empty string prefix. From webhook-mailer at python.org Fri Apr 19 03:07:25 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Fri, 19 Apr 2019 07:07:25 -0000 Subject: [Python-checkins] ctypes: remove use of legacy unicode API (GH-12340) Message-ID: https://github.com/python/cpython/commit/9d062d690b768252204992fc6ab7c3873a87442d commit: 9d062d690b768252204992fc6ab7c3873a87442d branch: master author: Inada Naoki committer: GitHub date: 2019-04-19T16:07:19+09:00 summary: ctypes: remove use of legacy unicode API (GH-12340) PyUnicode_AsUnicodeAndSize() -> PyUnicode_AsWideChar() files: M Modules/_ctypes/_ctypes.c M Modules/_ctypes/cfield.c diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index ac071bbb708b..13cf76a16d76 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -1304,8 +1304,6 @@ static int WCharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) { Py_ssize_t result = 0; - Py_UNICODE *wstr; - Py_ssize_t len; if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -1320,12 +1318,14 @@ WCharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored } else Py_INCREF(value); - wstr = PyUnicode_AsUnicodeAndSize(value, &len); - if (wstr == NULL) + Py_ssize_t len = PyUnicode_AsWideChar(value, NULL, 0); + if (len < 0) { return -1; - if ((size_t)len > self->b_size/sizeof(wchar_t)) { - PyErr_SetString(PyExc_ValueError, - "string too long"); + } + // PyUnicode_AsWideChar() returns number of wchars including trailing null byte, + // when it is called with NULL. + if (((size_t)len-1) > self->b_size/sizeof(wchar_t)) { + PyErr_SetString(PyExc_ValueError, "string too long"); result = -1; goto done; } diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 5f194e21550f..157c32fd9096 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -1229,9 +1229,6 @@ U_get(void *ptr, Py_ssize_t size) static PyObject * U_set(void *ptr, PyObject *value, Py_ssize_t length) { - Py_UNICODE *wstr; - Py_ssize_t size; - /* It's easier to calculate in characters than in bytes */ length /= sizeof(wchar_t); @@ -1242,9 +1239,14 @@ U_set(void *ptr, PyObject *value, Py_ssize_t length) return NULL; } - wstr = PyUnicode_AsUnicodeAndSize(value, &size); - if (wstr == NULL) + Py_ssize_t size = PyUnicode_AsWideChar(value, NULL, 0); + if (size < 0) { return NULL; + } + // PyUnicode_AsWideChar() returns number of wchars including trailing null byte, + // when it is called with NULL. + size--; + assert(size >= 0); if (size > length) { PyErr_Format(PyExc_ValueError, "string too long (%zd, maximum length %zd)", @@ -1421,16 +1423,18 @@ BSTR_set(void *ptr, PyObject *value, Py_ssize_t size) /* create a BSTR from value */ if (value) { - wchar_t* wvalue; Py_ssize_t wsize; - wvalue = PyUnicode_AsUnicodeAndSize(value, &wsize); - if (wvalue == NULL) + wchar_t *wvalue = PyUnicode_AsWideCharString(value, &wsize); + if (wvalue == NULL) { return NULL; + } if ((unsigned) wsize != wsize) { PyErr_SetString(PyExc_ValueError, "String too long for BSTR"); + PyMem_Free(wvalue); return NULL; } bstr = SysAllocStringLen(wvalue, (unsigned)wsize); + PyMem_Free(wvalue); } else bstr = NULL; From webhook-mailer at python.org Sat Apr 20 13:20:49 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sat, 20 Apr 2019 17:20:49 -0000 Subject: [Python-checkins] bpo-36650: Fix handling of empty keyword args in C version of lru_cache. (GH-12881) Message-ID: https://github.com/python/cpython/commit/14adbd45980f705cb6554ca17b8a66b56e105296 commit: 14adbd45980f705cb6554ca17b8a66b56e105296 branch: master author: Raymond Hettinger committer: GitHub date: 2019-04-20T07:20:44-10:00 summary: bpo-36650: Fix handling of empty keyword args in C version of lru_cache. (GH-12881) files: A Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst M Lib/test/test_functools.py M Modules/_functoolsmodule.c diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 4b2b9ab61fa7..98908405e140 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1271,6 +1271,20 @@ def f(x): self.assertEqual(f(20), '.20.') self.assertEqual(f.cache_info().currsize, 10) + def test_lru_bug_36650(self): + # C version of lru_cache was treating a call with an empty **kwargs + # dictionary as being distinct from a call with no keywords at all. + # This did not result in an incorrect answer, but it did trigger + # an unexpected cache miss. + + @self.module.lru_cache() + def f(x): + pass + + f(0) + f(0, **{}) + self.assertEqual(f.cache_info().hits, 1) + def test_lru_hash_only_once(self): # To protect against weird reentrancy bugs and to improve # efficiency when faced with slow __hash__ methods, the diff --git a/Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst b/Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst new file mode 100644 index 000000000000..de10575fc272 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst @@ -0,0 +1,4 @@ +The C version of functools.lru_cache() was treating calls with an empty +``**kwargs`` dictionary as being distinct from calls with no keywords at all. +This did not result in an incorrect answer, but it did trigger an unexpected +cache miss. diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index 3f1c01651ded..dcc9129fc6b1 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -750,8 +750,10 @@ lru_cache_make_key(PyObject *args, PyObject *kwds, int typed) PyObject *key, *keyword, *value; Py_ssize_t key_size, pos, key_pos, kwds_size; + kwds_size = kwds ? PyDict_GET_SIZE(kwds) : 0; + /* short path, key will match args anyway, which is a tuple */ - if (!typed && !kwds) { + if (!typed && !kwds_size) { if (PyTuple_GET_SIZE(args) == 1) { key = PyTuple_GET_ITEM(args, 0); if (PyUnicode_CheckExact(key) || PyLong_CheckExact(key)) { @@ -765,9 +767,6 @@ lru_cache_make_key(PyObject *args, PyObject *kwds, int typed) return args; } - kwds_size = kwds ? PyDict_GET_SIZE(kwds) : 0; - assert(kwds_size >= 0); - key_size = PyTuple_GET_SIZE(args); if (kwds_size) key_size += kwds_size * 2 + 1; From webhook-mailer at python.org Sat Apr 20 13:50:36 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sat, 20 Apr 2019 17:50:36 -0000 Subject: [Python-checkins] bpo-36650: Fix handling of empty keyword args in C version of lru_cache. (GH-12881) (GH-12888) Message-ID: https://github.com/python/cpython/commit/8b30ee843528d0f0e2bfc3307d86658915579c21 commit: 8b30ee843528d0f0e2bfc3307d86658915579c21 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Raymond Hettinger date: 2019-04-20T07:50:32-10:00 summary: bpo-36650: Fix handling of empty keyword args in C version of lru_cache. (GH-12881) (GH-12888) (cherry picked from commit 14adbd45980f705cb6554ca17b8a66b56e105296) Co-authored-by: Raymond Hettinger files: A Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst M Lib/test/test_functools.py M Modules/_functoolsmodule.c diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index a7625d609039..a17b8ede3374 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1263,6 +1263,20 @@ def f(x): self.assertEqual(f(20), '.20.') self.assertEqual(f.cache_info().currsize, 10) + def test_lru_bug_36650(self): + # C version of lru_cache was treating a call with an empty **kwargs + # dictionary as being distinct from a call with no keywords at all. + # This did not result in an incorrect answer, but it did trigger + # an unexpected cache miss. + + @self.module.lru_cache() + def f(x): + pass + + f(0) + f(0, **{}) + self.assertEqual(f.cache_info().hits, 1) + def test_lru_hash_only_once(self): # To protect against weird reentrancy bugs and to improve # efficiency when faced with slow __hash__ methods, the diff --git a/Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst b/Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst new file mode 100644 index 000000000000..de10575fc272 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-19-15-29-55.bpo-36650._EVdrz.rst @@ -0,0 +1,4 @@ +The C version of functools.lru_cache() was treating calls with an empty +``**kwargs`` dictionary as being distinct from calls with no keywords at all. +This did not result in an incorrect answer, but it did trigger an unexpected +cache miss. diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index c6a92ed16959..c0578554a600 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -750,8 +750,10 @@ lru_cache_make_key(PyObject *args, PyObject *kwds, int typed) PyObject *key, *keyword, *value; Py_ssize_t key_size, pos, key_pos, kwds_size; + kwds_size = kwds ? PyDict_GET_SIZE(kwds) : 0; + /* short path, key will match args anyway, which is a tuple */ - if (!typed && !kwds) { + if (!typed && !kwds_size) { if (PyTuple_GET_SIZE(args) == 1) { key = PyTuple_GET_ITEM(args, 0); if (PyUnicode_CheckExact(key) || PyLong_CheckExact(key)) { @@ -765,9 +767,6 @@ lru_cache_make_key(PyObject *args, PyObject *kwds, int typed) return args; } - kwds_size = kwds ? PyDict_GET_SIZE(kwds) : 0; - assert(kwds_size >= 0); - key_size = PyTuple_GET_SIZE(args); if (kwds_size) key_size += kwds_size * 2 + 1; From webhook-mailer at python.org Sat Apr 20 19:06:42 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 20 Apr 2019 23:06:42 -0000 Subject: [Python-checkins] Fix typo (GH-12878) Message-ID: https://github.com/python/cpython/commit/3e986de0d65e78901b55d4e500b1d05c847b6d5e commit: 3e986de0d65e78901b55d4e500b1d05c847b6d5e branch: master author: Fredrik Averpil committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-20T16:06:38-07:00 summary: Fix typo (GH-12878) "sychronization" -> "synchronization" files: M Doc/library/asyncio-sync.rst diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index 18da18873dbf..e3f18ccb4341 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -17,7 +17,7 @@ those of the :mod:`threading` module with two important caveats: argument; use the :func:`asyncio.wait_for` function to perform operations with timeouts. -asyncio has the following basic sychronization primitives: +asyncio has the following basic synchronization primitives: * :class:`Lock` * :class:`Event` From webhook-mailer at python.org Sat Apr 20 19:12:26 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 20 Apr 2019 23:12:26 -0000 Subject: [Python-checkins] Fix typo (GH-12878) Message-ID: https://github.com/python/cpython/commit/307e7a426403bc91801e79625df359f89ccb5c03 commit: 307e7a426403bc91801e79625df359f89ccb5c03 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-20T16:12:23-07:00 summary: Fix typo (GH-12878) "sychronization" -> "synchronization" (cherry picked from commit 3e986de0d65e78901b55d4e500b1d05c847b6d5e) Co-authored-by: Fredrik Averpil files: M Doc/library/asyncio-sync.rst diff --git a/Doc/library/asyncio-sync.rst b/Doc/library/asyncio-sync.rst index 18b562970436..291310d71414 100644 --- a/Doc/library/asyncio-sync.rst +++ b/Doc/library/asyncio-sync.rst @@ -17,7 +17,7 @@ those of the :mod:`threading` module with two important caveats: argument; use the :func:`asyncio.wait_for` function to perform operations with timeouts. -asyncio has the following basic sychronization primitives: +asyncio has the following basic synchronization primitives: * :class:`Lock` * :class:`Event` From webhook-mailer at python.org Sun Apr 21 18:14:51 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Sun, 21 Apr 2019 22:14:51 -0000 Subject: [Python-checkins] bpo-36645: Fix ambiguous formatting in re.sub() documentation (GH-12879) Message-ID: https://github.com/python/cpython/commit/5ebfa840a1c9967da299356733da41b532688988 commit: 5ebfa840a1c9967da299356733da41b532688988 branch: master author: mollison committer: Berker Peksag date: 2019-04-22T01:14:45+03:00 summary: bpo-36645: Fix ambiguous formatting in re.sub() documentation (GH-12879) files: M Doc/library/re.rst diff --git a/Doc/library/re.rst b/Doc/library/re.rst index 4ac5dee14071..5ef72b535ce8 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -908,6 +908,7 @@ form. Unknown escapes in *repl* consisting of ``'\'`` and an ASCII letter now are errors. + .. versionchanged:: 3.7 Empty matches for the pattern are replaced when adjacent to a previous non-empty match. From webhook-mailer at python.org Sun Apr 21 18:20:48 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 21 Apr 2019 22:20:48 -0000 Subject: [Python-checkins] bpo-36645: Fix ambiguous formatting in re.sub() documentation (GH-12879) Message-ID: https://github.com/python/cpython/commit/71b88827f6ad368eafa17983bd979175d24da888 commit: 71b88827f6ad368eafa17983bd979175d24da888 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-21T15:20:45-07:00 summary: bpo-36645: Fix ambiguous formatting in re.sub() documentation (GH-12879) (cherry picked from commit 5ebfa840a1c9967da299356733da41b532688988) Co-authored-by: mollison files: M Doc/library/re.rst diff --git a/Doc/library/re.rst b/Doc/library/re.rst index dc3f428b8a19..2e6c7f715d20 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -902,6 +902,7 @@ form. Unknown escapes in *repl* consisting of ``'\'`` and an ASCII letter now are errors. + .. versionchanged:: 3.7 Empty matches for the pattern are replaced when adjacent to a previous non-empty match. From webhook-mailer at python.org Sun Apr 21 21:47:10 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Mon, 22 Apr 2019 01:47:10 -0000 Subject: [Python-checkins] bpo-24011: Use PyModule_Add{Object, IntMacro} in PyInit__signal() (GH-12765) Message-ID: https://github.com/python/cpython/commit/9541bd321a94f13dc41163a5d7a1a847816fac84 commit: 9541bd321a94f13dc41163a5d7a1a847816fac84 branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: Berker Peksag date: 2019-04-22T04:47:06+03:00 summary: bpo-24011: Use PyModule_Add{Object,IntMacro} in PyInit__signal() (GH-12765) files: M Modules/signalmodule.c diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 4590017c170a..8c5a0d044ab6 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -1350,17 +1350,15 @@ PyInit__signal(void) d = PyModule_GetDict(m); x = DefaultHandler = PyLong_FromVoidPtr((void *)SIG_DFL); - if (!x || PyDict_SetItemString(d, "SIG_DFL", x) < 0) + if (PyModule_AddObject(m, "SIG_DFL", x)) goto finally; x = IgnoreHandler = PyLong_FromVoidPtr((void *)SIG_IGN); - if (!x || PyDict_SetItemString(d, "SIG_IGN", x) < 0) + if (PyModule_AddObject(m, "SIG_IGN", x)) goto finally; - x = PyLong_FromLong((long)NSIG); - if (!x || PyDict_SetItemString(d, "NSIG", x) < 0) + if (PyModule_AddIntMacro(m, NSIG)) goto finally; - Py_DECREF(x); #ifdef SIG_BLOCK if (PyModule_AddIntMacro(m, SIG_BLOCK)) @@ -1569,8 +1567,8 @@ PyInit__signal(void) #if defined (HAVE_SETITIMER) || defined (HAVE_GETITIMER) ItimerError = PyErr_NewException("signal.ItimerError", PyExc_OSError, NULL); - if (ItimerError != NULL) - PyDict_SetItemString(d, "ItimerError", ItimerError); + if (PyModule_AddObject(m, "ItimerError", ItimerError)) + goto finally; #endif #ifdef CTRL_C_EVENT From webhook-mailer at python.org Sun Apr 21 22:30:26 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Mon, 22 Apr 2019 02:30:26 -0000 Subject: [Python-checkins] bpo-23078: Add support for {class, static}method to mock.create_autospec() (GH-11613) Message-ID: https://github.com/python/cpython/commit/9b21856b0fcda949de239edc7aa6cf3f2f4f77a3 commit: 9b21856b0fcda949de239edc7aa6cf3f2f4f77a3 branch: master author: Xtreak committer: Berker Peksag date: 2019-04-22T05:30:23+03:00 summary: bpo-23078: Add support for {class,static}method to mock.create_autospec() (GH-11613) Co-authored-by: Felipe files: A Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst M Lib/unittest/mock.py M Lib/unittest/test/testmock/testhelpers.py M Lib/unittest/test/testmock/testmock.py M Lib/unittest/test/testmock/testpatch.py diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 0e77f0e48943..1636073ff009 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -29,7 +29,7 @@ import pprint import sys import builtins -from types import ModuleType +from types import ModuleType, MethodType from unittest.util import safe_repr from functools import wraps, partial @@ -122,6 +122,8 @@ def _copy_func_details(func, funcopy): def _callable(obj): if isinstance(obj, type): return True + if isinstance(obj, (staticmethod, classmethod, MethodType)): + return _callable(obj.__func__) if getattr(obj, '__call__', None) is not None: return True return False diff --git a/Lib/unittest/test/testmock/testhelpers.py b/Lib/unittest/test/testmock/testhelpers.py index 9f1bf2676bf5..0d03108aca55 100644 --- a/Lib/unittest/test/testmock/testhelpers.py +++ b/Lib/unittest/test/testmock/testhelpers.py @@ -5,7 +5,7 @@ from unittest.mock import ( call, _Call, create_autospec, MagicMock, - Mock, ANY, _CallList, patch, PropertyMock + Mock, ANY, _CallList, patch, PropertyMock, _callable ) from datetime import datetime @@ -1011,5 +1011,43 @@ def test_propertymock_returnvalue(self): self.assertNotIsInstance(returned, PropertyMock) +class TestCallablePredicate(unittest.TestCase): + + def test_type(self): + for obj in [str, bytes, int, list, tuple, SomeClass]: + self.assertTrue(_callable(obj)) + + def test_call_magic_method(self): + class Callable: + def __call__(self): + pass + instance = Callable() + self.assertTrue(_callable(instance)) + + def test_staticmethod(self): + class WithStaticMethod: + @staticmethod + def staticfunc(): + pass + self.assertTrue(_callable(WithStaticMethod.staticfunc)) + + def test_non_callable_staticmethod(self): + class BadStaticMethod: + not_callable = staticmethod(None) + self.assertFalse(_callable(BadStaticMethod.not_callable)) + + def test_classmethod(self): + class WithClassMethod: + @classmethod + def classfunc(cls): + pass + self.assertTrue(_callable(WithClassMethod.classfunc)) + + def test_non_callable_classmethod(self): + class BadClassMethod: + not_callable = classmethod(None) + self.assertFalse(_callable(BadClassMethod.not_callable)) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/unittest/test/testmock/testmock.py b/Lib/unittest/test/testmock/testmock.py index 37f14c37f47d..bdaebbe66b74 100644 --- a/Lib/unittest/test/testmock/testmock.py +++ b/Lib/unittest/test/testmock/testmock.py @@ -1419,6 +1419,23 @@ def test_create_autospec_with_name(self): m = mock.create_autospec(object(), name='sweet_func') self.assertIn('sweet_func', repr(m)) + #Issue23078 + def test_create_autospec_classmethod_and_staticmethod(self): + class TestClass: + @classmethod + def class_method(cls): + pass + + @staticmethod + def static_method(): + pass + for method in ('class_method', 'static_method'): + with self.subTest(method=method): + mock_method = mock.create_autospec(getattr(TestClass, method)) + mock_method() + mock_method.assert_called_once_with() + self.assertRaises(TypeError, mock_method, 'extra_arg') + #Issue21238 def test_mock_unsafe(self): m = Mock() diff --git a/Lib/unittest/test/testmock/testpatch.py b/Lib/unittest/test/testmock/testpatch.py index 2c14360b2df5..51c66fec67fc 100644 --- a/Lib/unittest/test/testmock/testpatch.py +++ b/Lib/unittest/test/testmock/testpatch.py @@ -51,6 +51,14 @@ def g(self): pass foo = 'bar' + @staticmethod + def static_method(): + return 24 + + @classmethod + def class_method(cls): + return 42 + class Bar(object): def a(self): pass @@ -1023,6 +1031,18 @@ def test(mock_function): self.assertEqual(result, 3) + def test_autospec_staticmethod(self): + with patch('%s.Foo.static_method' % __name__, autospec=True) as method: + Foo.static_method() + method.assert_called_once_with() + + + def test_autospec_classmethod(self): + with patch('%s.Foo.class_method' % __name__, autospec=True) as method: + Foo.class_method() + method.assert_called_once_with() + + def test_autospec_with_new(self): patcher = patch('%s.function' % __name__, new=3, autospec=True) self.assertRaises(TypeError, patcher.start) diff --git a/Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst b/Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst new file mode 100644 index 000000000000..975cc9c0454c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst @@ -0,0 +1,2 @@ +Add support for :func:`classmethod` and :func:`staticmethod` to +:func:`unittest.mock.create_autospec`. Initial patch by Felipe Ochoa. From webhook-mailer at python.org Sun Apr 21 23:07:59 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Mon, 22 Apr 2019 03:07:59 -0000 Subject: [Python-checkins] bpo-23078: Add support for {class, static}method to mock.create_autospec() (GH-11613) Message-ID: https://github.com/python/cpython/commit/15a57a3cadb992bb1752302333ff593e7eab284c commit: 15a57a3cadb992bb1752302333ff593e7eab284c branch: 3.7 author: Berker Peksag committer: GitHub date: 2019-04-22T06:07:56+03:00 summary: bpo-23078: Add support for {class,static}method to mock.create_autospec() (GH-11613) Co-authored-by: Felipe (cherry picked from commit 9b21856b0fcda949de239edc7aa6cf3f2f4f77a3) files: A Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst M Lib/unittest/mock.py M Lib/unittest/test/testmock/testhelpers.py M Lib/unittest/test/testmock/testmock.py M Lib/unittest/test/testmock/testpatch.py diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 373e1d5f64d8..db99585c33d2 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -29,7 +29,7 @@ import pprint import sys import builtins -from types import ModuleType +from types import ModuleType, MethodType from functools import wraps, partial @@ -121,6 +121,8 @@ def _copy_func_details(func, funcopy): def _callable(obj): if isinstance(obj, type): return True + if isinstance(obj, (staticmethod, classmethod, MethodType)): + return _callable(obj.__func__) if getattr(obj, '__call__', None) is not None: return True return False diff --git a/Lib/unittest/test/testmock/testhelpers.py b/Lib/unittest/test/testmock/testhelpers.py index 745580ef79db..674de367e877 100644 --- a/Lib/unittest/test/testmock/testhelpers.py +++ b/Lib/unittest/test/testmock/testhelpers.py @@ -5,7 +5,7 @@ from unittest.mock import ( call, _Call, create_autospec, MagicMock, - Mock, ANY, _CallList, patch, PropertyMock + Mock, ANY, _CallList, patch, PropertyMock, _callable ) from datetime import datetime @@ -1002,5 +1002,43 @@ def test_propertymock_returnvalue(self): self.assertNotIsInstance(returned, PropertyMock) +class TestCallablePredicate(unittest.TestCase): + + def test_type(self): + for obj in [str, bytes, int, list, tuple, SomeClass]: + self.assertTrue(_callable(obj)) + + def test_call_magic_method(self): + class Callable: + def __call__(self): + pass + instance = Callable() + self.assertTrue(_callable(instance)) + + def test_staticmethod(self): + class WithStaticMethod: + @staticmethod + def staticfunc(): + pass + self.assertTrue(_callable(WithStaticMethod.staticfunc)) + + def test_non_callable_staticmethod(self): + class BadStaticMethod: + not_callable = staticmethod(None) + self.assertFalse(_callable(BadStaticMethod.not_callable)) + + def test_classmethod(self): + class WithClassMethod: + @classmethod + def classfunc(cls): + pass + self.assertTrue(_callable(WithClassMethod.classfunc)) + + def test_non_callable_classmethod(self): + class BadClassMethod: + not_callable = classmethod(None) + self.assertFalse(_callable(BadClassMethod.not_callable)) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/unittest/test/testmock/testmock.py b/Lib/unittest/test/testmock/testmock.py index 2f50236d1ece..dab17651e057 100644 --- a/Lib/unittest/test/testmock/testmock.py +++ b/Lib/unittest/test/testmock/testmock.py @@ -1404,6 +1404,23 @@ def test_create_autospec_with_name(self): m = mock.create_autospec(object(), name='sweet_func') self.assertIn('sweet_func', repr(m)) + #Issue23078 + def test_create_autospec_classmethod_and_staticmethod(self): + class TestClass: + @classmethod + def class_method(cls): + pass + + @staticmethod + def static_method(): + pass + for method in ('class_method', 'static_method'): + with self.subTest(method=method): + mock_method = mock.create_autospec(getattr(TestClass, method)) + mock_method() + mock_method.assert_called_once_with() + self.assertRaises(TypeError, mock_method, 'extra_arg') + #Issue21238 def test_mock_unsafe(self): m = Mock() diff --git a/Lib/unittest/test/testmock/testpatch.py b/Lib/unittest/test/testmock/testpatch.py index c484adb60508..6358154b3e5e 100644 --- a/Lib/unittest/test/testmock/testpatch.py +++ b/Lib/unittest/test/testmock/testpatch.py @@ -51,6 +51,14 @@ def g(self): pass foo = 'bar' + @staticmethod + def static_method(): + return 24 + + @classmethod + def class_method(cls): + return 42 + class Bar(object): def a(self): pass @@ -1015,6 +1023,18 @@ def test(mock_function): self.assertEqual(result, 3) + def test_autospec_staticmethod(self): + with patch('%s.Foo.static_method' % __name__, autospec=True) as method: + Foo.static_method() + method.assert_called_once_with() + + + def test_autospec_classmethod(self): + with patch('%s.Foo.class_method' % __name__, autospec=True) as method: + Foo.class_method() + method.assert_called_once_with() + + def test_autospec_with_new(self): patcher = patch('%s.function' % __name__, new=3, autospec=True) self.assertRaises(TypeError, patcher.start) diff --git a/Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst b/Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst new file mode 100644 index 000000000000..975cc9c0454c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-01-18-23-10-10.bpo-23078.l4dFoj.rst @@ -0,0 +1,2 @@ +Add support for :func:`classmethod` and :func:`staticmethod` to +:func:`unittest.mock.create_autospec`. Initial patch by Felipe Ochoa. From webhook-mailer at python.org Mon Apr 22 07:08:29 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 22 Apr 2019 11:08:29 -0000 Subject: [Python-checkins] Doc: add the missing ".tp_flags" in type definition (GH-12902) Message-ID: https://github.com/python/cpython/commit/662ebd2ab2047aeae9689ad254b39915c38069fd commit: 662ebd2ab2047aeae9689ad254b39915c38069fd branch: master author: Wu Wei committer: Inada Naoki date: 2019-04-22T20:08:20+09:00 summary: Doc: add the missing ".tp_flags" in type definition (GH-12902) files: M Doc/extending/newtypes_tutorial.rst diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index bb8a40d0fb06..b4bf9b9e6f75 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -92,6 +92,7 @@ The second bit is the definition of the type object. :: .tp_doc = "Custom objects", .tp_basicsize = sizeof(CustomObject), .tp_itemsize = 0, + .tp_flags = Py_TPFLAGS_DEFAULT, .tp_new = PyType_GenericNew, }; From webhook-mailer at python.org Mon Apr 22 07:14:02 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 22 Apr 2019 11:14:02 -0000 Subject: [Python-checkins] bpo-36523: Add docstring to io.IOBase.writelines (GH-12683) Message-ID: https://github.com/python/cpython/commit/ab86521a9d9999731e39bd9056420bb7774fd144 commit: ab86521a9d9999731e39bd9056420bb7774fd144 branch: master author: Marcin Niemira committer: Inada Naoki date: 2019-04-22T20:13:51+09:00 summary: bpo-36523: Add docstring to io.IOBase.writelines (GH-12683) files: A Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst M Lib/_pyio.py M Modules/_io/clinic/iobase.c.h M Modules/_io/iobase.c diff --git a/Lib/_pyio.py b/Lib/_pyio.py index e868fdc7cbc5..af2ce30c2780 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -551,6 +551,11 @@ def readlines(self, hint=None): return lines def writelines(self, lines): + """Write a list of lines to the stream. + + Line separators are not added, so it is usual for each of the lines + provided to have a line separator at the end. + """ self._checkClosed() for line in lines: self.write(line) diff --git a/Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst b/Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst new file mode 100644 index 000000000000..9355f607d760 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst @@ -0,0 +1 @@ +Add docstring for io.IOBase.writelines(). diff --git a/Modules/_io/clinic/iobase.c.h b/Modules/_io/clinic/iobase.c.h index a5c8eea3ec3a..ddaff7b5d135 100644 --- a/Modules/_io/clinic/iobase.c.h +++ b/Modules/_io/clinic/iobase.c.h @@ -242,7 +242,11 @@ _io__IOBase_readlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs) PyDoc_STRVAR(_io__IOBase_writelines__doc__, "writelines($self, lines, /)\n" "--\n" -"\n"); +"\n" +"Write a list of lines to stream.\n" +"\n" +"Line separators are not added, so it is usual for each of the\n" +"lines provided to have a line separator at the end."); #define _IO__IOBASE_WRITELINES_METHODDEF \ {"writelines", (PyCFunction)_io__IOBase_writelines, METH_O, _io__IOBase_writelines__doc__}, @@ -311,4 +315,4 @@ _io__RawIOBase_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { return _io__RawIOBase_readall_impl(self); } -/*[clinic end generated code: output=60e43a7cbd9f314e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=61b6ea7153ef9940 input=a9049054013a1b77]*/ diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index 3a8f16ae0b65..6a0d9bec5af3 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -751,11 +751,16 @@ _io__IOBase_readlines_impl(PyObject *self, Py_ssize_t hint) _io._IOBase.writelines lines: object / + +Write a list of lines to stream. + +Line separators are not added, so it is usual for each of the +lines provided to have a line separator at the end. [clinic start generated code]*/ static PyObject * _io__IOBase_writelines(PyObject *self, PyObject *lines) -/*[clinic end generated code: output=976eb0a9b60a6628 input=432e729a8450b3cb]*/ +/*[clinic end generated code: output=976eb0a9b60a6628 input=cac3fc8864183359]*/ { PyObject *iter, *res; From webhook-mailer at python.org Mon Apr 22 07:14:56 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Apr 2019 11:14:56 -0000 Subject: [Python-checkins] Doc: add the missing ".tp_flags" in type definition (GH-12902) Message-ID: https://github.com/python/cpython/commit/8c49d713851e95bcc03de6226d1bd69741edae7c commit: 8c49d713851e95bcc03de6226d1bd69741edae7c branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-22T04:14:25-07:00 summary: Doc: add the missing ".tp_flags" in type definition (GH-12902) (cherry picked from commit 662ebd2ab2047aeae9689ad254b39915c38069fd) Co-authored-by: Wu Wei files: M Doc/extending/newtypes_tutorial.rst diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index ac48637bbee9..07c2ef718aed 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -92,6 +92,7 @@ The second bit is the definition of the type object. :: .tp_doc = "Custom objects", .tp_basicsize = sizeof(CustomObject), .tp_itemsize = 0, + .tp_flags = Py_TPFLAGS_DEFAULT, .tp_new = PyType_GenericNew, }; From webhook-mailer at python.org Mon Apr 22 08:08:32 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 22 Apr 2019 12:08:32 -0000 Subject: [Python-checkins] bpo-36523: Add docstring to io.IOBase.writelines (GH-12683) Message-ID: https://github.com/python/cpython/commit/1100ae8f3fb1d5a8f6f5638919196bd8fab66de9 commit: 1100ae8f3fb1d5a8f6f5638919196bd8fab66de9 branch: 3.7 author: Marcin Niemira committer: Inada Naoki date: 2019-04-22T21:08:24+09:00 summary: bpo-36523: Add docstring to io.IOBase.writelines (GH-12683) (cherry picked from commit ab86521a9d9999731e39bd9056420bb7774fd144) files: A Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst M Lib/_pyio.py M Modules/_io/clinic/iobase.c.h M Modules/_io/iobase.c diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 0c0cb84a48e2..afbd48e0005d 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -546,6 +546,11 @@ def readlines(self, hint=None): return lines def writelines(self, lines): + """Write a list of lines to the stream. + + Line separators are not added, so it is usual for each of the lines + provided to have a line separator at the end. + """ self._checkClosed() for line in lines: self.write(line) diff --git a/Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst b/Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst new file mode 100644 index 000000000000..9355f607d760 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2019-04-04-19-11-47.bpo-36523.sG1Tr4.rst @@ -0,0 +1 @@ +Add docstring for io.IOBase.writelines(). diff --git a/Modules/_io/clinic/iobase.c.h b/Modules/_io/clinic/iobase.c.h index e6f72cd5ff9e..66836bfdf45c 100644 --- a/Modules/_io/clinic/iobase.c.h +++ b/Modules/_io/clinic/iobase.c.h @@ -230,7 +230,11 @@ _io__IOBase_readlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs) PyDoc_STRVAR(_io__IOBase_writelines__doc__, "writelines($self, lines, /)\n" "--\n" -"\n"); +"\n" +"Write a list of lines to stream.\n" +"\n" +"Line separators are not added, so it is usual for each of the\n" +"lines provided to have a line separator at the end."); #define _IO__IOBASE_WRITELINES_METHODDEF \ {"writelines", (PyCFunction)_io__IOBase_writelines, METH_O, _io__IOBase_writelines__doc__}, @@ -279,4 +283,4 @@ _io__RawIOBase_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { return _io__RawIOBase_readall_impl(self); } -/*[clinic end generated code: output=64989ec3dbf44a7c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6f8d078401fb9d48 input=a9049054013a1b77]*/ diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index 5b71732ef19c..6995c1570cdf 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -738,11 +738,16 @@ _io__IOBase_readlines_impl(PyObject *self, Py_ssize_t hint) _io._IOBase.writelines lines: object / + +Write a list of lines to stream. + +Line separators are not added, so it is usual for each of the +lines provided to have a line separator at the end. [clinic start generated code]*/ static PyObject * _io__IOBase_writelines(PyObject *self, PyObject *lines) -/*[clinic end generated code: output=976eb0a9b60a6628 input=432e729a8450b3cb]*/ +/*[clinic end generated code: output=976eb0a9b60a6628 input=cac3fc8864183359]*/ { PyObject *iter, *res; From webhook-mailer at python.org Mon Apr 22 09:29:20 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Mon, 22 Apr 2019 13:29:20 -0000 Subject: [Python-checkins] bpo-36690: Fix typo in Tools/demo/rpython.py (GH-12903) Message-ID: https://github.com/python/cpython/commit/d59b662e490d3fae662c5f81fa5248f0445d2158 commit: d59b662e490d3fae662c5f81fa5248f0445d2158 branch: master author: ??? <752736341 at qq.com> committer: Berker Peksag date: 2019-04-22T16:28:57+03:00 summary: bpo-36690: Fix typo in Tools/demo/rpython.py (GH-12903) files: M Tools/demo/rpython.py diff --git a/Tools/demo/rpython.py b/Tools/demo/rpython.py index 8d7e2747636c..11f72cb3dd26 100755 --- a/Tools/demo/rpython.py +++ b/Tools/demo/rpython.py @@ -19,7 +19,7 @@ def main(): port = PORT i = host.find(':') if i >= 0: - port = int(port[i+1:]) + port = int(host[i+1:]) host = host[:i] command = ' '.join(sys.argv[2:]) with socket(AF_INET, SOCK_STREAM) as s: From webhook-mailer at python.org Mon Apr 22 09:54:37 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Mon, 22 Apr 2019 13:54:37 -0000 Subject: [Python-checkins] bpo-36690: Fix typo in Tools/demo/rpython.py (GH-12903) Message-ID: https://github.com/python/cpython/commit/5407aaf18b8d33d0a327991db366457ac6fead2d commit: 5407aaf18b8d33d0a327991db366457ac6fead2d branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: Berker Peksag date: 2019-04-22T16:53:53+03:00 summary: bpo-36690: Fix typo in Tools/demo/rpython.py (GH-12903) (cherry picked from commit d59b662e490d3fae662c5f81fa5248f0445d2158) Co-authored-by: ??? <752736341 at qq.com> files: M Tools/demo/rpython.py diff --git a/Tools/demo/rpython.py b/Tools/demo/rpython.py index 5e7bc0a27d11..71d436c46dbd 100755 --- a/Tools/demo/rpython.py +++ b/Tools/demo/rpython.py @@ -19,7 +19,7 @@ def main(): port = PORT i = host.find(':') if i >= 0: - port = int(port[i+1:]) + port = int(host[i+1:]) host = host[:i] command = ' '.join(sys.argv[2:]) s = socket(AF_INET, SOCK_STREAM) From webhook-mailer at python.org Mon Apr 22 11:46:51 2019 From: webhook-mailer at python.org (Berker Peksag) Date: Mon, 22 Apr 2019 15:46:51 -0000 Subject: [Python-checkins] bpo-29734: Cleanup test_getfinalpathname_handles test (GH-12908) Message-ID: https://github.com/python/cpython/commit/6ef726af3ec106013c7c4261ddb306854f2b1778 commit: 6ef726af3ec106013c7c4261ddb306854f2b1778 branch: master author: Berker Peksag committer: GitHub date: 2019-04-22T18:46:28+03:00 summary: bpo-29734: Cleanup test_getfinalpathname_handles test (GH-12908) files: M Lib/test/test_os.py diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 4c620ccae9c8..bbadb81069b9 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -2337,19 +2337,10 @@ def test_unlink_removes_junction(self): @unittest.skipUnless(sys.platform == "win32", "Win32 specific tests") class Win32NtTests(unittest.TestCase): - def setUp(self): - from test import support - self.nt = support.import_module('nt') - pass - - def tearDown(self): - pass - def test_getfinalpathname_handles(self): - try: - import ctypes, ctypes.wintypes - except ImportError: - raise unittest.SkipTest('ctypes module is required for this test') + nt = support.import_module('nt') + ctypes = support.import_module('ctypes') + import ctypes.wintypes kernel = ctypes.WinDLL('Kernel32.dll', use_last_error=True) kernel.GetCurrentProcess.restype = ctypes.wintypes.HANDLE @@ -2368,21 +2359,23 @@ def test_getfinalpathname_handles(self): before_count = handle_count.value # The first two test the error path, __file__ tests the success path - filenames = [ r'\\?\C:', - r'\\?\NUL', - r'\\?\CONIN', - __file__ ] + filenames = [ + r'\\?\C:', + r'\\?\NUL', + r'\\?\CONIN', + __file__, + ] - for i in range(10): + for _ in range(10): for name in filenames: try: - tmp = self.nt._getfinalpathname(name) - except: + nt._getfinalpathname(name) + except Exception: # Failure is expected pass try: - tmp = os.stat(name) - except: + os.stat(name) + except Exception: pass ok = kernel.GetProcessHandleCount(hproc, ctypes.byref(handle_count)) From webhook-mailer at python.org Mon Apr 22 13:01:50 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 17:01:50 -0000 Subject: [Python-checkins] bpo-9194: Fix the bounds checking in winreg.c's fixupMultiSZ() (GH-12687) Message-ID: https://github.com/python/cpython/commit/56ed86490cb8221c874d432461d77702437f63e5 commit: 56ed86490cb8221c874d432461d77702437f63e5 branch: master author: Zackery Spytz committer: Steve Dower date: 2019-04-22T10:01:32-07:00 summary: bpo-9194: Fix the bounds checking in winreg.c's fixupMultiSZ() (GH-12687) files: M PC/winreg.c diff --git a/PC/winreg.c b/PC/winreg.c index ae0c292b7172..28b316ae2f4c 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -521,7 +521,7 @@ fixupMultiSZ(wchar_t **str, wchar_t *data, int len) Q = data + len; for (P = data, i = 0; P < Q && *P != '\0'; P++, i++) { str[i] = P; - for(; *P != '\0'; P++) + for (; P < Q && *P != '\0'; P++) ; } } From webhook-mailer at python.org Mon Apr 22 13:09:37 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 17:09:37 -0000 Subject: [Python-checkins] bpo-36672: Fix a compiler warning in winreg.SetValue() (GH-12882) Message-ID: https://github.com/python/cpython/commit/34366b7f914eedbcc33aebe882098a2199ffaf82 commit: 34366b7f914eedbcc33aebe882098a2199ffaf82 branch: master author: Zackery Spytz committer: Steve Dower date: 2019-04-22T10:08:05-07:00 summary: bpo-36672: Fix a compiler warning in winreg.SetValue() (GH-12882) files: M PC/winreg.c diff --git a/PC/winreg.c b/PC/winreg.c index 28b316ae2f4c..5469fcba0444 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -1614,7 +1614,7 @@ winreg_SetValue_impl(PyObject *module, HKEY key, const Py_UNICODE *sub_key, } Py_BEGIN_ALLOW_THREADS - rc = RegSetValueW(key, sub_key, REG_SZ, value, value_length+1); + rc = RegSetValueW(key, sub_key, REG_SZ, value, (DWORD)(value_length + 1)); Py_END_ALLOW_THREADS if (rc != ERROR_SUCCESS) return PyErr_SetFromWindowsErrWithFunction(rc, "RegSetValue"); From webhook-mailer at python.org Mon Apr 22 13:21:28 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Apr 2019 17:21:28 -0000 Subject: [Python-checkins] bpo-9194: Fix the bounds checking in winreg.c's fixupMultiSZ() (GH-12687) Message-ID: https://github.com/python/cpython/commit/7038deed09784a03e2a7bad500f0054d29876ae7 commit: 7038deed09784a03e2a7bad500f0054d29876ae7 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-22T10:20:33-07:00 summary: bpo-9194: Fix the bounds checking in winreg.c's fixupMultiSZ() (GH-12687) (cherry picked from commit 56ed86490cb8221c874d432461d77702437f63e5) Co-authored-by: Zackery Spytz files: M PC/winreg.c diff --git a/PC/winreg.c b/PC/winreg.c index e3801b257b07..96dd4ef059be 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -520,7 +520,7 @@ fixupMultiSZ(wchar_t **str, wchar_t *data, int len) Q = data + len; for (P = data, i = 0; P < Q && *P != '\0'; P++, i++) { str[i] = P; - for(; *P != '\0'; P++) + for (; P < Q && *P != '\0'; P++) ; } } From webhook-mailer at python.org Mon Apr 22 13:32:44 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Apr 2019 17:32:44 -0000 Subject: [Python-checkins] bpo-36672: Fix a compiler warning in winreg.SetValue() (GH-12882) Message-ID: https://github.com/python/cpython/commit/36aecc0079af0ec65add8ffb5bcdea9a594baca4 commit: 36aecc0079af0ec65add8ffb5bcdea9a594baca4 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-22T10:31:46-07:00 summary: bpo-36672: Fix a compiler warning in winreg.SetValue() (GH-12882) (cherry picked from commit 34366b7f914eedbcc33aebe882098a2199ffaf82) Co-authored-by: Zackery Spytz files: M PC/winreg.c diff --git a/PC/winreg.c b/PC/winreg.c index 96dd4ef059be..1021609e582f 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -1609,7 +1609,7 @@ winreg_SetValue_impl(PyObject *module, HKEY key, const Py_UNICODE *sub_key, } Py_BEGIN_ALLOW_THREADS - rc = RegSetValueW(key, sub_key, REG_SZ, value, value_length+1); + rc = RegSetValueW(key, sub_key, REG_SZ, value, (DWORD)(value_length + 1)); Py_END_ALLOW_THREADS if (rc != ERROR_SUCCESS) return PyErr_SetFromWindowsErrWithFunction(rc, "RegSetValue"); From webhook-mailer at python.org Mon Apr 22 14:13:25 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:13:25 -0000 Subject: [Python-checkins] bpo-33608: Normalize atomic macros so that they all expect an atomic struct (GH-12877) Message-ID: https://github.com/python/cpython/commit/264490797ad936868c54b3d4ceb0343e7ba4be76 commit: 264490797ad936868c54b3d4ceb0343e7ba4be76 branch: master author: Steve Dower committer: GitHub date: 2019-04-22T11:13:11-07:00 summary: bpo-33608: Normalize atomic macros so that they all expect an atomic struct (GH-12877) files: M Include/internal/pycore_atomic.h diff --git a/Include/internal/pycore_atomic.h b/Include/internal/pycore_atomic.h index b3ec44c1bcfe..336bc3fec27e 100644 --- a/Include/internal/pycore_atomic.h +++ b/Include/internal/pycore_atomic.h @@ -261,13 +261,13 @@ typedef struct _Py_atomic_int { #define _Py_atomic_store_64bit(ATOMIC_VAL, NEW_VAL, ORDER) \ switch (ORDER) { \ case _Py_memory_order_acquire: \ - _InterlockedExchange64_HLEAcquire((__int64 volatile*)ATOMIC_VAL, (__int64)NEW_VAL); \ + _InterlockedExchange64_HLEAcquire((__int64 volatile*)&((ATOMIC_VAL)->_value), (__int64)(NEW_VAL)); \ break; \ case _Py_memory_order_release: \ - _InterlockedExchange64_HLERelease((__int64 volatile*)ATOMIC_VAL, (__int64)NEW_VAL); \ + _InterlockedExchange64_HLERelease((__int64 volatile*)&((ATOMIC_VAL)->_value), (__int64)(NEW_VAL)); \ break; \ default: \ - _InterlockedExchange64((__int64 volatile*)ATOMIC_VAL, (__int64)NEW_VAL); \ + _InterlockedExchange64((__int64 volatile*)&((ATOMIC_VAL)->_value), (__int64)(NEW_VAL)); \ break; \ } #else @@ -277,13 +277,13 @@ typedef struct _Py_atomic_int { #define _Py_atomic_store_32bit(ATOMIC_VAL, NEW_VAL, ORDER) \ switch (ORDER) { \ case _Py_memory_order_acquire: \ - _InterlockedExchange_HLEAcquire((volatile long*)ATOMIC_VAL, (int)NEW_VAL); \ + _InterlockedExchange_HLEAcquire((volatile long*)&((ATOMIC_VAL)->_value), (int)(NEW_VAL)); \ break; \ case _Py_memory_order_release: \ - _InterlockedExchange_HLERelease((volatile long*)ATOMIC_VAL, (int)NEW_VAL); \ + _InterlockedExchange_HLERelease((volatile long*)&((ATOMIC_VAL)->_value), (int)(NEW_VAL)); \ break; \ default: \ - _InterlockedExchange((volatile long*)ATOMIC_VAL, (int)NEW_VAL); \ + _InterlockedExchange((volatile long*)&((ATOMIC_VAL)->_value), (int)(NEW_VAL)); \ break; \ } @@ -292,7 +292,7 @@ typedef struct _Py_atomic_int { gil_created() uses -1 as a sentinel value, if this returns a uintptr_t it will do an unsigned compare and crash */ -inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) { +inline intptr_t _Py_atomic_load_64bit_impl(volatile uintptr_t* value, int order) { __int64 old; switch (order) { case _Py_memory_order_acquire: @@ -323,11 +323,14 @@ inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) { return old; } +#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) \ + _Py_atomic_load_64bit_impl((volatile uintptr_t*)&((ATOMIC_VAL)->_value), (ORDER)) + #else -#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) *(ATOMIC_VAL) +#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) ((ATOMIC_VAL)->_value) #endif -inline int _Py_atomic_load_32bit(volatile int* value, int order) { +inline int _Py_atomic_load_32bit_impl(volatile int* value, int order) { long old; switch (order) { case _Py_memory_order_acquire: @@ -358,16 +361,19 @@ inline int _Py_atomic_load_32bit(volatile int* value, int order) { return old; } +#define _Py_atomic_load_32bit(ATOMIC_VAL, ORDER) \ + _Py_atomic_load_32bit_impl((volatile int*)&((ATOMIC_VAL)->_value), (ORDER)) + #define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) \ if (sizeof((ATOMIC_VAL)->_value) == 8) { \ - _Py_atomic_store_64bit((volatile long long*)&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) } else { \ - _Py_atomic_store_32bit((volatile long*)&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) } + _Py_atomic_store_64bit((ATOMIC_VAL), NEW_VAL, ORDER) } else { \ + _Py_atomic_store_32bit((ATOMIC_VAL), NEW_VAL, ORDER) } #define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) \ ( \ sizeof((ATOMIC_VAL)->_value) == 8 ? \ - _Py_atomic_load_64bit((volatile long long*)&((ATOMIC_VAL)->_value), ORDER) : \ - _Py_atomic_load_32bit((volatile long*)&((ATOMIC_VAL)->_value), ORDER) \ + _Py_atomic_load_64bit((ATOMIC_VAL), ORDER) : \ + _Py_atomic_load_32bit((ATOMIC_VAL), ORDER) \ ) #elif defined(_M_ARM) || defined(_M_ARM64) typedef enum _Py_memory_order { @@ -422,7 +428,7 @@ typedef struct _Py_atomic_int { gil_created() uses -1 as a sentinel value, if this returns a uintptr_t it will do an unsigned compare and crash */ -inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) { +inline intptr_t _Py_atomic_load_64bit_impl(volatile uintptr_t* value, int order) { uintptr_t old; switch (order) { case _Py_memory_order_acquire: @@ -453,11 +459,14 @@ inline intptr_t _Py_atomic_load_64bit(volatile uintptr_t* value, int order) { return old; } +#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) \ + _Py_atomic_load_64bit_impl((volatile uintptr_t*)&((ATOMIC_VAL)->_value), (ORDER)) + #else -#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) *(ATOMIC_VAL) +#define _Py_atomic_load_64bit(ATOMIC_VAL, ORDER) ((ATOMIC_VAL)->_value) #endif -inline int _Py_atomic_load_32bit(volatile int* value, int order) { +inline int _Py_atomic_load_32bit_impl(volatile int* value, int order) { int old; switch (order) { case _Py_memory_order_acquire: @@ -488,16 +497,19 @@ inline int _Py_atomic_load_32bit(volatile int* value, int order) { return old; } +#define _Py_atomic_load_32bit(ATOMIC_VAL, ORDER) \ + _Py_atomic_load_32bit_impl((volatile int*)&((ATOMIC_VAL)->_value), (ORDER)) + #define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) \ if (sizeof((ATOMIC_VAL)->_value) == 8) { \ - _Py_atomic_store_64bit(&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) } else { \ - _Py_atomic_store_32bit(&((ATOMIC_VAL)->_value), NEW_VAL, ORDER) } + _Py_atomic_store_64bit((ATOMIC_VAL), (NEW_VAL), (ORDER)) } else { \ + _Py_atomic_store_32bit((ATOMIC_VAL), (NEW_VAL), (ORDER)) } #define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) \ ( \ sizeof((ATOMIC_VAL)->_value) == 8 ? \ - _Py_atomic_load_64bit(&((ATOMIC_VAL)->_value), ORDER) : \ - _Py_atomic_load_32bit(&((ATOMIC_VAL)->_value), ORDER) \ + _Py_atomic_load_64bit((ATOMIC_VAL), (ORDER)) : \ + _Py_atomic_load_32bit((ATOMIC_VAL), (ORDER)) \ ) #endif #else /* !gcc x86 !_msc_ver */ @@ -529,16 +541,16 @@ typedef struct _Py_atomic_int { /* Standardized shortcuts. */ #define _Py_atomic_store(ATOMIC_VAL, NEW_VAL) \ - _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, _Py_memory_order_seq_cst) + _Py_atomic_store_explicit((ATOMIC_VAL), (NEW_VAL), _Py_memory_order_seq_cst) #define _Py_atomic_load(ATOMIC_VAL) \ - _Py_atomic_load_explicit(ATOMIC_VAL, _Py_memory_order_seq_cst) + _Py_atomic_load_explicit((ATOMIC_VAL), _Py_memory_order_seq_cst) /* Python-local extensions */ #define _Py_atomic_store_relaxed(ATOMIC_VAL, NEW_VAL) \ - _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, _Py_memory_order_relaxed) + _Py_atomic_store_explicit((ATOMIC_VAL), (NEW_VAL), _Py_memory_order_relaxed) #define _Py_atomic_load_relaxed(ATOMIC_VAL) \ - _Py_atomic_load_explicit(ATOMIC_VAL, _Py_memory_order_relaxed) + _Py_atomic_load_explicit((ATOMIC_VAL), _Py_memory_order_relaxed) #ifdef __cplusplus } From webhook-mailer at python.org Mon Apr 22 14:40:23 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:40:23 -0000 Subject: [Python-checkins] Fixes platform.win32_ver on non-Windows platforms (GH-12912) Message-ID: https://github.com/python/cpython/commit/d307d05350e26a7a5f8f74db9af632a15215b50f commit: d307d05350e26a7a5f8f74db9af632a15215b50f branch: master author: Steve Dower committer: GitHub date: 2019-04-22T11:40:12-07:00 summary: Fixes platform.win32_ver on non-Windows platforms (GH-12912) files: M Lib/platform.py diff --git a/Lib/platform.py b/Lib/platform.py index 2ab68aed7861..21defd1095d2 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -339,10 +339,6 @@ def win32_ver(release='', version='', csd='', ptype=''): from sys import getwindowsversion except ImportError: return release, version, csd, ptype - try: - from winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE - except ImportError: - from _winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE winver = getwindowsversion() maj, min, build = winver.platform_version or winver[:3] @@ -368,16 +364,20 @@ def win32_ver(release='', version='', csd='', ptype=''): _WIN32_SERVER_RELEASES.get((maj, None)) or release) - key = None try: - key = OpenKeyEx(HKEY_LOCAL_MACHINE, - r'SOFTWARE\Microsoft\Windows NT\CurrentVersion') - ptype = QueryValueEx(key, 'CurrentType')[0] - except: + try: + import winreg + except ImportError: + import _winreg as winreg + except ImportError: pass - finally: - if key: - CloseKey(key) + else: + try: + cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion' + with winreg.OpenKeyEx(HKEY_LOCAL_MACHINE, cvkey) as key: + ptype = QueryValueEx(key, 'CurrentType')[0] + except: + pass return release, version, csd, ptype From webhook-mailer at python.org Mon Apr 22 14:45:41 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:45:41 -0000 Subject: [Python-checkins] bpo-36678: Rename duplicate tests in test_dataclasses (GH-12899) Message-ID: https://github.com/python/cpython/commit/be372d73b4c59026134a7e722ece34367c3bd3b6 commit: be372d73b4c59026134a7e722ece34367c3bd3b6 branch: master author: Windson yang committer: Steve Dower date: 2019-04-22T11:45:34-07:00 summary: bpo-36678: Rename duplicate tests in test_dataclasses (GH-12899) files: M Lib/test/test_dataclasses.py diff --git a/Lib/test/test_dataclasses.py b/Lib/test/test_dataclasses.py index 9c83459f09e7..d320a969876e 100755 --- a/Lib/test/test_dataclasses.py +++ b/Lib/test/test_dataclasses.py @@ -697,7 +697,7 @@ class C: y: int self.assertNotEqual(Point(1, 3), C(1, 3)) - def test_not_tuple(self): + def test_not_other_dataclass(self): # Test that some of the problems with namedtuple don't happen # here. @dataclass @@ -1403,7 +1403,7 @@ class GroupDict: self.assertEqual(asdict(gd), {'id': 0, 'users': {'first': {'name': 'Alice', 'id': 1}, 'second': {'name': 'Bob', 'id': 2}}}) - def test_helper_asdict_builtin_containers(self): + def test_helper_asdict_builtin_object_containers(self): @dataclass class Child: d: object @@ -1576,7 +1576,7 @@ class GroupDict: self.assertEqual(astuple(gt), (0, (('Alice', 1), ('Bob', 2)))) self.assertEqual(astuple(gd), (0, {'first': ('Alice', 1), 'second': ('Bob', 2)})) - def test_helper_astuple_builtin_containers(self): + def test_helper_astuple_builtin_object_containers(self): @dataclass class Child: d: object @@ -3242,18 +3242,6 @@ class E: "..D(f=TestReplace.test_recursive_repr_indirection_two" "..E(f=...)))") - def test_recursive_repr_two_attrs(self): - @dataclass - class C: - f: "C" - g: "C" - - c = C(None, None) - c.f = c - c.g = c - self.assertEqual(repr(c), "TestReplace.test_recursive_repr_two_attrs" - "..C(f=..., g=...)") - def test_recursive_repr_misc_attrs(self): @dataclass class C: From webhook-mailer at python.org Mon Apr 22 14:47:22 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:47:22 -0000 Subject: [Python-checkins] bpo-36683: Rename duplicate test_io_encoding to test_pyio_encoding (GH-12896) Message-ID: https://github.com/python/cpython/commit/f51dd4feb0794b7659f281173da9d8a04317d134 commit: f51dd4feb0794b7659f281173da9d8a04317d134 branch: master author: Windson yang committer: Steve Dower date: 2019-04-22T11:46:27-07:00 summary: bpo-36683: Rename duplicate test_io_encoding to test_pyio_encoding (GH-12896) files: M Lib/test/test_utf8_mode.py diff --git a/Lib/test/test_utf8_mode.py b/Lib/test/test_utf8_mode.py index 2429b00459be..bdb93457cfc4 100644 --- a/Lib/test/test_utf8_mode.py +++ b/Lib/test/test_utf8_mode.py @@ -195,7 +195,7 @@ def check_io_encoding(self, module): def test_io_encoding(self): self.check_io_encoding('io') - def test_io_encoding(self): + def test_pyio_encoding(self): self.check_io_encoding('_pyio') def test_locale_getpreferredencoding(self): From webhook-mailer at python.org Mon Apr 22 14:48:51 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:48:51 -0000 Subject: [Python-checkins] bpo-36682: Rename duplicate tests in test_sys_setprofile (GH-12895) Message-ID: https://github.com/python/cpython/commit/007d0b0188a16273a5850d89857ecef97c1f4595 commit: 007d0b0188a16273a5850d89857ecef97c1f4595 branch: master author: Windson yang committer: Steve Dower date: 2019-04-22T11:48:12-07:00 summary: bpo-36682: Rename duplicate tests in test_sys_setprofile (GH-12895) files: M Lib/test/test_sys_setprofile.py diff --git a/Lib/test/test_sys_setprofile.py b/Lib/test/test_sys_setprofile.py index c2ecf8eeed9f..b64bcbc5b686 100644 --- a/Lib/test/test_sys_setprofile.py +++ b/Lib/test/test_sys_setprofile.py @@ -351,7 +351,7 @@ def f(p): (1, 'return', f_ident)]) # Test an invalid call (bpo-34125) - def test_unbound_method_no_args(self): + def test_unbound_method_no_keyword_args(self): kwargs = {} def f(p): dict.get(**kwargs) @@ -360,7 +360,7 @@ def f(p): (1, 'return', f_ident)]) # Test an invalid call (bpo-34125) - def test_unbound_method_invalid_args(self): + def test_unbound_method_invalid_keyword_args(self): kwargs = {} def f(p): dict.get(print, 42, **kwargs) From webhook-mailer at python.org Mon Apr 22 14:50:35 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:50:35 -0000 Subject: [Python-checkins] bpo-36681: Remove duplicate test_regression_29220 function (GH-12894) Message-ID: https://github.com/python/cpython/commit/3d6f61edb8a6161148b3cf3eeb291408cc91154a commit: 3d6f61edb8a6161148b3cf3eeb291408cc91154a branch: master author: Windson yang committer: Steve Dower date: 2019-04-22T11:49:11-07:00 summary: bpo-36681: Remove duplicate test_regression_29220 function (GH-12894) files: M Lib/test/test_logging.py diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 1805249e48bc..82cbedada472 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -315,12 +315,6 @@ def test_regression_22386(self): self.assertEqual(logging.getLevelName('INFO'), logging.INFO) self.assertEqual(logging.getLevelName(logging.INFO), 'INFO') - def test_regression_29220(self): - """See issue #29220 for more information.""" - logging.addLevelName(logging.INFO, '') - self.addCleanup(logging.addLevelName, logging.INFO, 'INFO') - self.assertEqual(logging.getLevelName(logging.INFO), '') - def test_issue27935(self): fatal = logging.getLevelName('FATAL') self.assertEqual(fatal, logging.FATAL) From webhook-mailer at python.org Mon Apr 22 14:52:11 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:52:11 -0000 Subject: [Python-checkins] bpo-36680: Rename duplicate test_source_from_cache_path_like_arg function (GH-12893) Message-ID: https://github.com/python/cpython/commit/c442b1c486db5cb0aa589b43f73385d9cc5706e3 commit: c442b1c486db5cb0aa589b43f73385d9cc5706e3 branch: master author: Windson yang committer: Steve Dower date: 2019-04-22T11:50:24-07:00 summary: bpo-36680: Rename duplicate test_source_from_cache_path_like_arg function (GH-12893) files: M Lib/test/test_importlib/test_util.py diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index d134e3c3b04d..8739eea841d0 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -682,7 +682,7 @@ def test_sep_altsep_and_sep_cache_from_source(self): @unittest.skipIf(sys.implementation.cache_tag is None, 'requires sys.implementation.cache_tag not be None') - def test_source_from_cache_path_like_arg(self): + def test_cache_from_source_path_like_arg(self): path = pathlib.PurePath('foo', 'bar', 'baz', 'qux.py') expect = os.path.join('foo', 'bar', 'baz', '__pycache__', 'qux.{}.pyc'.format(self.tag)) From webhook-mailer at python.org Mon Apr 22 14:52:25 2019 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 22 Apr 2019 18:52:25 -0000 Subject: [Python-checkins] bpo-36679: Rename duplicate test_class_getitem function (GH-12892) Message-ID: https://github.com/python/cpython/commit/d437012cdd4a38b5b3d05f139d5f0a28196e4769 commit: d437012cdd4a38b5b3d05f139d5f0a28196e4769 branch: master author: Windson yang committer: Steve Dower date: 2019-04-22T11:51:06-07:00 summary: bpo-36679: Rename duplicate test_class_getitem function (GH-12892) files: M Lib/test/test_genericclass.py diff --git a/Lib/test/test_genericclass.py b/Lib/test/test_genericclass.py index 37a87bc6815e..27420d4f2bad 100644 --- a/Lib/test/test_genericclass.py +++ b/Lib/test/test_genericclass.py @@ -158,7 +158,7 @@ def __class_getitem__(*args, **kwargs): self.assertEqual(getitem_args[0], (C, (int, str))) self.assertEqual(getitem_args[1], {}) - def test_class_getitem(self): + def test_class_getitem_format(self): class C: def __class_getitem__(cls, item): return f'C[{item.__name__}]' From webhook-mailer at python.org Mon Apr 22 15:01:27 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Apr 2019 19:01:27 -0000 Subject: [Python-checkins] Fixes platform.win32_ver on non-Windows platforms (GH-12912) Message-ID: https://github.com/python/cpython/commit/9344d74f7bf1039326e0e15c6d94f0d9e0f63d84 commit: 9344d74f7bf1039326e0e15c6d94f0d9e0f63d84 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-22T11:59:08-07:00 summary: Fixes platform.win32_ver on non-Windows platforms (GH-12912) (cherry picked from commit d307d05350e26a7a5f8f74db9af632a15215b50f) Co-authored-by: Steve Dower files: M Lib/platform.py diff --git a/Lib/platform.py b/Lib/platform.py index 0c6fc03efa9f..8ed2807df109 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -535,10 +535,6 @@ def win32_ver(release='', version='', csd='', ptype=''): from sys import getwindowsversion except ImportError: return release, version, csd, ptype - try: - from winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE - except ImportError: - from _winreg import OpenKeyEx, QueryValueEx, CloseKey, HKEY_LOCAL_MACHINE winver = getwindowsversion() maj, min, build = winver.platform_version or winver[:3] @@ -564,16 +560,20 @@ def win32_ver(release='', version='', csd='', ptype=''): _WIN32_SERVER_RELEASES.get((maj, None)) or release) - key = None try: - key = OpenKeyEx(HKEY_LOCAL_MACHINE, - r'SOFTWARE\Microsoft\Windows NT\CurrentVersion') - ptype = QueryValueEx(key, 'CurrentType')[0] - except: + try: + import winreg + except ImportError: + import _winreg as winreg + except ImportError: pass - finally: - if key: - CloseKey(key) + else: + try: + cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion' + with winreg.OpenKeyEx(HKEY_LOCAL_MACHINE, cvkey) as key: + ptype = QueryValueEx(key, 'CurrentType')[0] + except: + pass return release, version, csd, ptype From webhook-mailer at python.org Mon Apr 22 19:36:01 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Apr 2019 23:36:01 -0000 Subject: [Python-checkins] [2.7] bpo-9194: Fix the bounds checking in winreg.c's fixupMultiSZ() (GH-12687) (GH-12916) Message-ID: https://github.com/python/cpython/commit/84efbaecaf50b771cc7a95fd9dd9602bd31de305 commit: 84efbaecaf50b771cc7a95fd9dd9602bd31de305 branch: 2.7 author: Zackery Spytz committer: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> date: 2019-04-22T16:35:55-07:00 summary: [2.7] bpo-9194: Fix the bounds checking in winreg.c's fixupMultiSZ() (GH-12687) (GH-12916) (cherry picked from commit 56ed86490cb8221c874d432461d77702437f63e5) https://bugs.python.org/issue9194 files: M PC/_winreg.c diff --git a/PC/_winreg.c b/PC/_winreg.c index f0f8df33107b..3b887e075334 100644 --- a/PC/_winreg.c +++ b/PC/_winreg.c @@ -727,7 +727,7 @@ fixupMultiSZ(char **str, char *data, int len) Q = data + len; for (P = data, i = 0; P < Q && *P != '\0'; P++, i++) { str[i] = P; - for(; *P != '\0'; P++) + for (; P < Q && *P != '\0'; P++) ; } } From webhook-mailer at python.org Tue Apr 23 03:07:02 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Apr 2019 07:07:02 -0000 Subject: [Python-checkins] bpo-36546: Add statistics.quantiles() (#12710) Message-ID: https://github.com/python/cpython/commit/9013ccf6d8037f6ae78145a42d194141cb10d332 commit: 9013ccf6d8037f6ae78145a42d194141cb10d332 branch: master author: Raymond Hettinger committer: GitHub date: 2019-04-23T00:06:35-07:00 summary: bpo-36546: Add statistics.quantiles() (#12710) files: A Misc/NEWS.d/next/Library/2019-04-06-14-23-00.bpo-36546.YXjbyY.rst M Doc/library/statistics.rst M Doc/whatsnew/3.8.rst M Lib/statistics.py M Lib/test/test_statistics.py diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index 8bb2bdf7b697..b62bcfdffd0b 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -48,6 +48,7 @@ or sample. :func:`median_grouped` Median, or 50th percentile, of grouped data. :func:`mode` Single mode (most common value) of discrete or nominal data. :func:`multimode` List of modes (most common values) of discrete or nomimal data. +:func:`quantiles` Divide data into intervals with equal probability. ======================= =============================================================== Measures of spread @@ -499,6 +500,53 @@ However, for reading convenience, most of the examples show sorted sequences. :func:`pvariance` function as the *mu* parameter to get the variance of a sample. +.. function:: quantiles(dist, *, n=4, method='exclusive') + + Divide *dist* into *n* continuous intervals with equal probability. + Returns a list of ``n - 1`` cut points separating the intervals. + + Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. Set + *n* to 100 for percentiles which gives the 99 cuts points that separate + *dist* in to 100 equal sized groups. Raises :exc:`StatisticsError` if *n* + is not least 1. + + The *dist* can be any iterable containing sample data or it can be an + instance of a class that defines an :meth:`~inv_cdf` method. + Raises :exc:`StatisticsError` if there are not at least two data points. + + For sample data, the cut points are linearly interpolated from the + two nearest data points. For example, if a cut point falls one-third + of the distance between two sample values, ``100`` and ``112``, the + cut-point will evaluate to ``104``. Other selection methods may be + offered in the future (for example choose ``100`` as the nearest + value or compute ``106`` as the midpoint). This might matter if + there are too few samples for a given number of cut points. + + If *method* is set to *inclusive*, *dist* is treated as population data. + The minimum value is treated as the 0th percentile and the maximum + value is treated as the 100th percentile. If *dist* is an instance of + a class that defines an :meth:`~inv_cdf` method, setting *method* + has no effect. + + .. doctest:: + + # Decile cut points for empirically sampled data + >>> data = [105, 129, 87, 86, 111, 111, 89, 81, 108, 92, 110, + ... 100, 75, 105, 103, 109, 76, 119, 99, 91, 103, 129, + ... 106, 101, 84, 111, 74, 87, 86, 103, 103, 106, 86, + ... 111, 75, 87, 102, 121, 111, 88, 89, 101, 106, 95, + ... 103, 107, 101, 81, 109, 104] + >>> [round(q, 1) for q in quantiles(data, n=10)] + [81.0, 86.2, 89.0, 99.4, 102.5, 103.6, 106.0, 109.8, 111.0] + + >>> # Quartile cut points for the standard normal distibution + >>> Z = NormalDist() + >>> [round(q, 4) for q in quantiles(Z, n=4)] + [-0.6745, 0.0, 0.6745] + + .. versionadded:: 3.8 + + Exceptions ---------- @@ -606,7 +654,7 @@ of applications in statistics. `_ between two normal distributions, giving a measure of agreement. Returns a value between 0.0 and 1.0 giving `the overlapping area for - two probability density functions + the two probability density functions `_. Instances of :class:`NormalDist` support addition, subtraction, @@ -649,8 +697,8 @@ of applications in statistics. For example, given `historical data for SAT exams `_ showing that scores are normally distributed with a mean of 1060 and a standard deviation of 192, -determine the percentage of students with scores between 1100 and 1200, after -rounding to the nearest whole number: +determine the percentage of students with test scores between 1100 and +1200, after rounding to the nearest whole number: .. doctest:: diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index f866f9ccb8c1..bd7ad3f87cb5 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -337,6 +337,10 @@ Added :func:`statistics.geometric_mean()` Added :func:`statistics.multimode` that returns a list of the most common values. (Contributed by Raymond Hettinger in :issue:`35892`.) +Added :func:`statistics.quantiles` that divides data or a distribution +in to equiprobable intervals (e.g. quartiles, deciles, or percentiles). +(Contributed by Raymond Hettinger in :issue:`36546`.) + Added :class:`statistics.NormalDist`, a tool for creating and manipulating normal distributions of a random variable. (Contributed by Raymond Hettinger in :issue:`36018`.) diff --git a/Lib/statistics.py b/Lib/statistics.py index 262ad976b65c..05edfdf98e06 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -7,9 +7,9 @@ Calculating averages -------------------- -================== ============================================= +================== ================================================== Function Description -================== ============================================= +================== ================================================== mean Arithmetic mean (average) of data. geometric_mean Geometric mean of data. harmonic_mean Harmonic mean of data. @@ -19,7 +19,8 @@ median_grouped Median, or 50th percentile, of grouped data. mode Mode (most common value) of data. multimode List of modes (most common values of data). -================== ============================================= +quantiles Divide data into intervals with equal probability. +================== ================================================== Calculate the arithmetic mean ("the average") of data: @@ -78,7 +79,7 @@ """ -__all__ = [ 'StatisticsError', 'NormalDist', +__all__ = [ 'StatisticsError', 'NormalDist', 'quantiles', 'pstdev', 'pvariance', 'stdev', 'variance', 'median', 'median_low', 'median_high', 'median_grouped', 'mean', 'mode', 'multimode', 'harmonic_mean', 'fmean', @@ -562,6 +563,54 @@ def multimode(data): maxcount, mode_items = next(groupby(counts, key=itemgetter(1)), (0, [])) return list(map(itemgetter(0), mode_items)) +def quantiles(dist, *, n=4, method='exclusive'): + '''Divide *dist* into *n* continuous intervals with equal probability. + + Returns a list of (n - 1) cut points separating the intervals. + + Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles. + Set *n* to 100 for percentiles which gives the 99 cuts points that + separate *dist* in to 100 equal sized groups. + + The *dist* can be any iterable containing sample data or it can be + an instance of a class that defines an inv_cdf() method. For sample + data, the cut points are linearly interpolated between data points. + + If *method* is set to *inclusive*, *dist* is treated as population + data. The minimum value is treated as the 0th percentile and the + maximum value is treated as the 100th percentile. + ''' + # Possible future API extensions: + # quantiles(data, already_sorted=True) + # quantiles(data, cut_points=[0.02, 0.25, 0.50, 0.75, 0.98]) + if n < 1: + raise StatisticsError('n must be at least 1') + if hasattr(dist, 'inv_cdf'): + return [dist.inv_cdf(i / n) for i in range(1, n)] + data = sorted(dist) + ld = len(data) + if ld < 2: + raise StatisticsError('must have at least two data points') + if method == 'inclusive': + m = ld - 1 + result = [] + for i in range(1, n): + j = i * m // n + delta = i*m - j*n + interpolated = (data[j] * (n - delta) + data[j+1] * delta) / n + result.append(interpolated) + return result + if method == 'exclusive': + m = ld + 1 + result = [] + for i in range(1, n): + j = i * m // n # rescale i to m/n + j = 1 if j < 1 else ld-1 if j > ld-1 else j # clamp to 1 .. ld-1 + delta = i*m - j*n # exact integer math + interpolated = (data[j-1] * (n - delta) + data[j] * delta) / n + result.append(interpolated) + return result + raise ValueError(f'Unknown method: {method!r}') # === Measures of spread === diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 4d397eb1265d..c988d7fd8be7 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -3,6 +3,7 @@ """ +import bisect import collections import collections.abc import copy @@ -2038,6 +2039,7 @@ def test_compare_to_variance(self): expected = math.sqrt(statistics.variance(data)) self.assertEqual(self.func(data), expected) + class TestGeometricMean(unittest.TestCase): def test_basics(self): @@ -2126,6 +2128,146 @@ def test_special_values(self): with self.assertRaises(ValueError): geometric_mean([Inf, -Inf]) + +class TestQuantiles(unittest.TestCase): + + def test_specific_cases(self): + # Match results computed by hand and cross-checked + # against the PERCENTILE.EXC function in MS Excel. + quantiles = statistics.quantiles + data = [120, 200, 250, 320, 350] + random.shuffle(data) + for n, expected in [ + (1, []), + (2, [250.0]), + (3, [200.0, 320.0]), + (4, [160.0, 250.0, 335.0]), + (5, [136.0, 220.0, 292.0, 344.0]), + (6, [120.0, 200.0, 250.0, 320.0, 350.0]), + (8, [100.0, 160.0, 212.5, 250.0, 302.5, 335.0, 357.5]), + (10, [88.0, 136.0, 184.0, 220.0, 250.0, 292.0, 326.0, 344.0, 362.0]), + (12, [80.0, 120.0, 160.0, 200.0, 225.0, 250.0, 285.0, 320.0, 335.0, + 350.0, 365.0]), + (15, [72.0, 104.0, 136.0, 168.0, 200.0, 220.0, 240.0, 264.0, 292.0, + 320.0, 332.0, 344.0, 356.0, 368.0]), + ]: + self.assertEqual(expected, quantiles(data, n=n)) + self.assertEqual(len(quantiles(data, n=n)), n - 1) + self.assertEqual(list(map(float, expected)), + quantiles(map(Decimal, data), n=n)) + self.assertEqual(list(map(Decimal, expected)), + quantiles(map(Decimal, data), n=n)) + self.assertEqual(list(map(Fraction, expected)), + quantiles(map(Fraction, data), n=n)) + # Invariant under tranlation and scaling + def f(x): + return 3.5 * x - 1234.675 + exp = list(map(f, expected)) + act = quantiles(map(f, data), n=n) + self.assertTrue(all(math.isclose(e, a) for e, a in zip(exp, act))) + # Quartiles of a standard normal distribution + for n, expected in [ + (1, []), + (2, [0.0]), + (3, [-0.4307, 0.4307]), + (4 ,[-0.6745, 0.0, 0.6745]), + ]: + actual = quantiles(statistics.NormalDist(), n=n) + self.assertTrue(all(math.isclose(e, a, abs_tol=0.0001) + for e, a in zip(expected, actual))) + + def test_specific_cases_inclusive(self): + # Match results computed by hand and cross-checked + # against the PERCENTILE.INC function in MS Excel + # and against the quaatile() function in SciPy. + quantiles = statistics.quantiles + data = [100, 200, 400, 800] + random.shuffle(data) + for n, expected in [ + (1, []), + (2, [300.0]), + (3, [200.0, 400.0]), + (4, [175.0, 300.0, 500.0]), + (5, [160.0, 240.0, 360.0, 560.0]), + (6, [150.0, 200.0, 300.0, 400.0, 600.0]), + (8, [137.5, 175, 225.0, 300.0, 375.0, 500.0,650.0]), + (10, [130.0, 160.0, 190.0, 240.0, 300.0, 360.0, 440.0, 560.0, 680.0]), + (12, [125.0, 150.0, 175.0, 200.0, 250.0, 300.0, 350.0, 400.0, + 500.0, 600.0, 700.0]), + (15, [120.0, 140.0, 160.0, 180.0, 200.0, 240.0, 280.0, 320.0, 360.0, + 400.0, 480.0, 560.0, 640.0, 720.0]), + ]: + self.assertEqual(expected, quantiles(data, n=n, method="inclusive")) + self.assertEqual(len(quantiles(data, n=n, method="inclusive")), n - 1) + self.assertEqual(list(map(float, expected)), + quantiles(map(Decimal, data), n=n, method="inclusive")) + self.assertEqual(list(map(Decimal, expected)), + quantiles(map(Decimal, data), n=n, method="inclusive")) + self.assertEqual(list(map(Fraction, expected)), + quantiles(map(Fraction, data), n=n, method="inclusive")) + # Invariant under tranlation and scaling + def f(x): + return 3.5 * x - 1234.675 + exp = list(map(f, expected)) + act = quantiles(map(f, data), n=n, method="inclusive") + self.assertTrue(all(math.isclose(e, a) for e, a in zip(exp, act))) + # Quartiles of a standard normal distribution + for n, expected in [ + (1, []), + (2, [0.0]), + (3, [-0.4307, 0.4307]), + (4 ,[-0.6745, 0.0, 0.6745]), + ]: + actual = quantiles(statistics.NormalDist(), n=n, method="inclusive") + self.assertTrue(all(math.isclose(e, a, abs_tol=0.0001) + for e, a in zip(expected, actual))) + + def test_equal_sized_groups(self): + quantiles = statistics.quantiles + total = 10_000 + data = [random.expovariate(0.2) for i in range(total)] + while len(set(data)) != total: + data.append(random.expovariate(0.2)) + data.sort() + + # Cases where the group size exactly divides the total + for n in (1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000): + group_size = total // n + self.assertEqual( + [bisect.bisect(data, q) for q in quantiles(data, n=n)], + list(range(group_size, total, group_size))) + + # When the group sizes can't be exactly equal, they should + # differ by no more than one + for n in (13, 19, 59, 109, 211, 571, 1019, 1907, 5261, 9769): + group_sizes = {total // n, total // n + 1} + pos = [bisect.bisect(data, q) for q in quantiles(data, n=n)] + sizes = {q - p for p, q in zip(pos, pos[1:])} + self.assertTrue(sizes <= group_sizes) + + def test_error_cases(self): + quantiles = statistics.quantiles + StatisticsError = statistics.StatisticsError + with self.assertRaises(TypeError): + quantiles() # Missing arguments + with self.assertRaises(TypeError): + quantiles([10, 20, 30], 13, n=4) # Too many arguments + with self.assertRaises(TypeError): + quantiles([10, 20, 30], 4) # n is a positional argument + with self.assertRaises(StatisticsError): + quantiles([10, 20, 30], n=0) # n is zero + with self.assertRaises(StatisticsError): + quantiles([10, 20, 30], n=-1) # n is negative + with self.assertRaises(TypeError): + quantiles([10, 20, 30], n=1.5) # n is not an integer + with self.assertRaises(ValueError): + quantiles([10, 20, 30], method='X') # method is unknown + with self.assertRaises(StatisticsError): + quantiles([10], n=4) # not enough data points + with self.assertRaises(TypeError): + quantiles([10, None, 30], n=4) # data is non-numeric + + class TestNormalDist(unittest.TestCase): # General note on precision: The pdf(), cdf(), and overlap() methods diff --git a/Misc/NEWS.d/next/Library/2019-04-06-14-23-00.bpo-36546.YXjbyY.rst b/Misc/NEWS.d/next/Library/2019-04-06-14-23-00.bpo-36546.YXjbyY.rst new file mode 100644 index 000000000000..c69aadf3b69e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-04-06-14-23-00.bpo-36546.YXjbyY.rst @@ -0,0 +1 @@ +Add statistics.quantiles() From webhook-mailer at python.org Tue Apr 23 04:32:49 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Apr 2019 08:32:49 -0000 Subject: [Python-checkins] [3.7] bpo-9566: Fix compiler warnings on Windows (GH-12920) Message-ID: https://github.com/python/cpython/commit/8a9a6b443c57e47e9d10ed7775479aada4dac719 commit: 8a9a6b443c57e47e9d10ed7775479aada4dac719 branch: 3.7 author: Victor Stinner committer: GitHub date: 2019-04-23T10:26:11+02:00 summary: [3.7] bpo-9566: Fix compiler warnings on Windows (GH-12920) * bpo-9566: Fix compiler warnings in gcmodule.c (GH-11010) Change PyDTrace_GC_DONE() argument type from int to Py_ssize_t. (cherry picked from commit edad38e3e05586ba58291f47756eb3fb808f5577) * bpo-30465: Fix C downcast warning on Windows in ast.c (#6593) ast.c: fstring_fix_node_location() downcasts a pointer difference to a C int. Replace int with Py_ssize_t to fix the compiler warning. (cherry picked from commit fb7e7992beec7f76cc2db77ab6ce1e86446bfccf) * bpo-9566: Fix compiler warnings in peephole.c (GH-10652) (cherry picked from commit 028f0ef4f3111d2b3fc5b971642e337ba7990873) * bpo-27645, sqlite: Fix integer overflow on sleep (#6594) Use the _PyTime_t type and round away from zero (ROUND_UP, _PyTime_ROUND_TIMEOUT) the sleep duration, when converting a Python object to seconds and then to milliseconds. Raise an OverflowError in case of overflow. Previously the (int)double conversion rounded towards zero (ROUND_DOWN). (cherry picked from commit ca405017d5e776a2e3d9291236e62d2e09489dd2) files: M Include/pydtrace.h M Modules/_sqlite/connection.c M Python/ast.c M Python/peephole.c diff --git a/Include/pydtrace.h b/Include/pydtrace.h index 037961d429c6..7a04278166b0 100644 --- a/Include/pydtrace.h +++ b/Include/pydtrace.h @@ -29,7 +29,7 @@ static inline void PyDTrace_LINE(const char *arg0, const char *arg1, int arg2) { static inline void PyDTrace_FUNCTION_ENTRY(const char *arg0, const char *arg1, int arg2) {} static inline void PyDTrace_FUNCTION_RETURN(const char *arg0, const char *arg1, int arg2) {} static inline void PyDTrace_GC_START(int arg0) {} -static inline void PyDTrace_GC_DONE(int arg0) {} +static inline void PyDTrace_GC_DONE(Py_ssize_t arg0) {} static inline void PyDTrace_INSTANCE_NEW_START(int arg0) {} static inline void PyDTrace_INSTANCE_NEW_DONE(int arg0) {} static inline void PyDTrace_INSTANCE_DELETE_START(int arg0) {} diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index d43286a2d34d..9eb61c18f304 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -1462,17 +1462,33 @@ pysqlite_connection_backup(pysqlite_Connection *self, PyObject *args, PyObject * const char *name = "main"; int rc; int callback_error = 0; - double sleep_secs = 0.250; + PyObject *sleep_obj = NULL; + int sleep_ms = 250; sqlite3 *bck_conn; sqlite3_backup *bck_handle; static char *keywords[] = {"target", "pages", "progress", "name", "sleep", NULL}; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!|$iOsd:backup", keywords, + if (!PyArg_ParseTupleAndKeywords(args, kwds, "O!|$iOsO:backup", keywords, &pysqlite_ConnectionType, &target, - &pages, &progress, &name, &sleep_secs)) { + &pages, &progress, &name, &sleep_obj)) { return NULL; } + if (sleep_obj != NULL) { + _PyTime_t sleep_secs; + if (_PyTime_FromSecondsObject(&sleep_secs, sleep_obj, + _PyTime_ROUND_TIMEOUT)) { + return NULL; + } + _PyTime_t ms = _PyTime_AsMilliseconds(sleep_secs, + _PyTime_ROUND_TIMEOUT); + if (ms < INT_MIN || ms > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "sleep is too large"); + return NULL; + } + sleep_ms = (int)ms; + } + if (!pysqlite_check_connection((pysqlite_Connection *)target)) { return NULL; } @@ -1532,7 +1548,7 @@ pysqlite_connection_backup(pysqlite_Connection *self, PyObject *args, PyObject * the engine could not make any progress */ if (rc == SQLITE_BUSY || rc == SQLITE_LOCKED) { Py_BEGIN_ALLOW_THREADS - sqlite3_sleep(sleep_secs * 1000.0); + sqlite3_sleep(sleep_ms); Py_END_ALLOW_THREADS } } while (rc == SQLITE_OK || rc == SQLITE_BUSY || rc == SQLITE_LOCKED); diff --git a/Python/ast.c b/Python/ast.c index 1e182c7d782a..ce61375ea951 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -4282,7 +4282,7 @@ fstring_fix_node_location(const node *parent, node *n, char *expr_str) break; start--; } - cols += substr - start; + cols += (int)(substr - start); /* Fix lineno in mulitline strings. */ while ((substr = strchr(substr + 1, '\n'))) lines--; diff --git a/Python/peephole.c b/Python/peephole.c index a3b078fdf1d4..95b3dbb6bf51 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -152,6 +152,15 @@ fold_tuple_on_constants(_Py_CODEUNIT *codestr, Py_ssize_t codelen, PyTuple_SET_ITEM(newconst, i, constant); } + Py_ssize_t index = PyList_GET_SIZE(consts); +#if SIZEOF_SIZE_T > SIZEOF_INT + if ((size_t)index >= UINT_MAX - 1) { + Py_DECREF(newconst); + PyErr_SetString(PyExc_OverflowError, "too many constants"); + return -1; + } +#endif + /* Append folded constant onto consts */ if (PyList_Append(consts, newconst)) { Py_DECREF(newconst); @@ -160,7 +169,7 @@ fold_tuple_on_constants(_Py_CODEUNIT *codestr, Py_ssize_t codelen, Py_DECREF(newconst); return copy_op_arg(codestr, c_start, LOAD_CONST, - PyList_GET_SIZE(consts)-1, opcode_end); + (unsigned int)index, opcode_end); } static unsigned int * @@ -223,7 +232,7 @@ PyObject * PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, PyObject *lnotab_obj) { - Py_ssize_t h, i, nexti, op_start, codelen, tgt; + Py_ssize_t h, i, nexti, op_start, tgt; unsigned int j, nops; unsigned char opcode, nextop; _Py_CODEUNIT *codestr = NULL; @@ -251,17 +260,22 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, the peephole optimizer doesn't modify line numbers. */ assert(PyBytes_Check(code)); - codelen = PyBytes_GET_SIZE(code); - assert(codelen % sizeof(_Py_CODEUNIT) == 0); + Py_ssize_t codesize = PyBytes_GET_SIZE(code); + assert(codesize % sizeof(_Py_CODEUNIT) == 0); + Py_ssize_t codelen = codesize / sizeof(_Py_CODEUNIT); + if (codelen > INT_MAX) { + /* Python assembler is limited to INT_MAX: see assembler.a_offset in + compile.c. */ + goto exitUnchanged; + } /* Make a modifiable copy of the code string */ - codestr = (_Py_CODEUNIT *)PyMem_Malloc(codelen); + codestr = (_Py_CODEUNIT *)PyMem_Malloc(codesize); if (codestr == NULL) { PyErr_NoMemory(); goto exitError; } - memcpy(codestr, PyBytes_AS_STRING(code), codelen); - codelen /= sizeof(_Py_CODEUNIT); + memcpy(codestr, PyBytes_AS_STRING(code), codesize); blocks = markblocks(codestr, codelen); if (blocks == NULL) @@ -359,7 +373,11 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, jump past it), and all conditional jumps pop their argument when they're not taken (so change the first jump to pop its argument when it's taken). */ - h = set_arg(codestr, i, (tgt + 1) * sizeof(_Py_CODEUNIT)); + Py_ssize_t arg = (tgt + 1); + /* cannot overflow: codelen <= INT_MAX */ + assert((size_t)arg <= UINT_MAX / sizeof(_Py_CODEUNIT)); + arg *= sizeof(_Py_CODEUNIT); + h = set_arg(codestr, i, (unsigned int)arg); j = opcode == JUMP_IF_TRUE_OR_POP ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE; } @@ -392,17 +410,20 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, codestr[op_start] = PACKOPARG(RETURN_VALUE, 0); fill_nops(codestr, op_start + 1, i + 1); } else if (UNCONDITIONAL_JUMP(_Py_OPCODE(codestr[tgt]))) { - j = GETJUMPTGT(codestr, tgt); + size_t arg = GETJUMPTGT(codestr, tgt); if (opcode == JUMP_FORWARD) { /* JMP_ABS can go backwards */ opcode = JUMP_ABSOLUTE; } else if (!ABSOLUTE_JUMP(opcode)) { - if ((Py_ssize_t)j < i + 1) { + if (arg < (size_t)(i + 1)) { break; /* No backward relative jumps */ } - j -= i + 1; /* Calc relative jump addr */ + arg -= i + 1; /* Calc relative jump addr */ } - j *= sizeof(_Py_CODEUNIT); - copy_op_arg(codestr, op_start, opcode, j, i + 1); + /* cannot overflow: codelen <= INT_MAX */ + assert(arg <= (UINT_MAX / sizeof(_Py_CODEUNIT))); + arg *= sizeof(_Py_CODEUNIT); + copy_op_arg(codestr, op_start, opcode, + (unsigned int)arg, i + 1); } break; @@ -422,11 +443,14 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, /* Fixup lnotab */ for (i = 0, nops = 0; i < codelen; i++) { - assert(i - nops <= INT_MAX); + size_t block = (size_t)i - nops; + /* cannot overflow: codelen <= INT_MAX */ + assert(block <= UINT_MAX); /* original code offset => new code offset */ - blocks[i] = i - nops; - if (_Py_OPCODE(codestr[i]) == NOP) + blocks[i] = (unsigned int)block; + if (_Py_OPCODE(codestr[i]) == NOP) { nops++; + } } cum_orig_offset = 0; last_offset = 0; @@ -473,12 +497,14 @@ PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, j *= sizeof(_Py_CODEUNIT); break; } - nexti = i - op_start + 1; - if (instrsize(j) > nexti) + Py_ssize_t ilen = i - op_start + 1; + if (instrsize(j) > ilen) { goto exitUnchanged; - /* If instrsize(j) < nexti, we'll emit EXTENDED_ARG 0 */ - write_op_arg(codestr + h, opcode, j, nexti); - h += nexti; + } + assert(ilen <= INT_MAX); + /* If instrsize(j) < ilen, we'll emit EXTENDED_ARG 0 */ + write_op_arg(codestr + h, opcode, j, (int)ilen); + h += ilen; } assert(h + (Py_ssize_t)nops == codelen); From webhook-mailer at python.org Tue Apr 23 04:36:37 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Apr 2019 08:36:37 -0000 Subject: [Python-checkins] Add module specification: itemgetter -> operator.itemgetter (GH-12823) Message-ID: https://github.com/python/cpython/commit/b4c7f39bbf8f16a0da758601b33aec3ba531c8d6 commit: b4c7f39bbf8f16a0da758601b33aec3ba531c8d6 branch: master author: Jakub Molinski committer: Raymond Hettinger date: 2019-04-23T01:30:30-07:00 summary: Add module specification: itemgetter -> operator.itemgetter (GH-12823) files: M Doc/library/itertools.rst diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 3d4e5836cf20..b3a0a5f5192d 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -827,7 +827,7 @@ which incur interpreter overhead. "List unique elements, preserving order. Remember only the element just seen." # unique_justseen('AAAABBBCCDAABBB') --> A B C D A B # unique_justseen('ABBCcAD', str.lower) --> A B C A D - return map(next, map(itemgetter(1), groupby(iterable, key))) + return map(next, map(operator.itemgetter(1), groupby(iterable, key))) def iter_except(func, exception, first=None): """ Call a function repeatedly until an exception is raised. From webhook-mailer at python.org Tue Apr 23 04:39:10 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Apr 2019 08:39:10 -0000 Subject: [Python-checkins] Document that TestCase.assertCountEqual() can take iterables (GH-686) Message-ID: https://github.com/python/cpython/commit/39baace622564867f55cea49483dd1443b8655e3 commit: 39baace622564867f55cea49483dd1443b8655e3 branch: master author: jkleint committer: Raymond Hettinger date: 2019-04-23T01:34:29-07:00 summary: Document that TestCase.assertCountEqual() can take iterables (GH-686) files: M Lib/unittest/case.py diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index 972a4658b17b..8ff2546fc207 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -1244,9 +1244,8 @@ def assertDictContainsSubset(self, subset, dictionary, msg=None): def assertCountEqual(self, first, second, msg=None): - """An unordered sequence comparison asserting that the same elements, - regardless of order. If the same element occurs more than once, - it verifies that the elements occur the same number of times. + """Asserts that two iterables have the same elements, the same number of + times, without regard to order. self.assertEqual(Counter(list(first)), Counter(list(second))) From webhook-mailer at python.org Tue Apr 23 04:39:58 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Apr 2019 08:39:58 -0000 Subject: [Python-checkins] bpo-35904: Add missing fmean() entry to the summary table (GH-12919) Message-ID: https://github.com/python/cpython/commit/7280048690244e73b13f4f68b96c244bcb5434e8 commit: 7280048690244e73b13f4f68b96c244bcb5434e8 branch: master author: Raymond Hettinger committer: GitHub date: 2019-04-23T01:35:16-07:00 summary: bpo-35904: Add missing fmean() entry to the summary table (GH-12919) files: M Lib/statistics.py diff --git a/Lib/statistics.py b/Lib/statistics.py index 05edfdf98e06..4a0978cbcd9c 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -11,6 +11,7 @@ Function Description ================== ================================================== mean Arithmetic mean (average) of data. +fmean Fast, floating point arithmetic mean. geometric_mean Geometric mean of data. harmonic_mean Harmonic mean of data. median Median (middle value) of data. From webhook-mailer at python.org Tue Apr 23 04:56:26 2019 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Apr 2019 08:56:26 -0000 Subject: [Python-checkins] bpo-36018: Make "seed" into a keyword only argument (GH-12921) Message-ID: https://github.com/python/cpython/commit/fb8c7d53326d137785ca311bfc48c8284da46770 commit: fb8c7d53326d137785ca311bfc48c8284da46770 branch: master author: Raymond Hettinger committer: GitHub date: 2019-04-23T01:46:18-07:00 summary: bpo-36018: Make "seed" into a keyword only argument (GH-12921) files: M Doc/library/statistics.rst M Lib/statistics.py diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index b62bcfdffd0b..fb7df4e7188a 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -607,7 +607,7 @@ of applications in statistics. :exc:`StatisticsError` because it takes at least one point to estimate a central value and at least two points to estimate dispersion. - .. method:: NormalDist.samples(n, seed=None) + .. method:: NormalDist.samples(n, *, seed=None) Generates *n* random samples for a given mean and standard deviation. Returns a :class:`list` of :class:`float` values. diff --git a/Lib/statistics.py b/Lib/statistics.py index 4a0978cbcd9c..19db8e828010 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -797,7 +797,7 @@ def from_samples(cls, data): xbar = fmean(data) return cls(xbar, stdev(data, xbar)) - def samples(self, n, seed=None): + def samples(self, n, *, seed=None): 'Generate *n* samples for a given mean and standard deviation.' gauss = random.gauss if seed is None else random.Random(seed).gauss mu, sigma = self.mu, self.sigma From webhook-mailer at python.org Tue Apr 23 05:30:53 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 23 Apr 2019 09:30:53 -0000 Subject: [Python-checkins] use `const` in graminit.c (GH-12713) Message-ID: https://github.com/python/cpython/commit/84b4784f12d4b8aab2b4cc5a4f5b64f45ec3e5d4 commit: 84b4784f12d4b8aab2b4cc5a4f5b64f45ec3e5d4 branch: master author: tyomitch committer: Inada Naoki date: 2019-04-23T18:29:57+09:00 summary: use `const` in graminit.c (GH-12713) files: M Include/grammar.h M Parser/pgen/grammar.py M Python/graminit.c diff --git a/Include/grammar.h b/Include/grammar.h index 7a6182bb76d5..faccae4657ee 100644 --- a/Include/grammar.h +++ b/Include/grammar.h @@ -13,7 +13,7 @@ extern "C" { typedef struct { int lb_type; - char *lb_str; + const char *lb_str; } label; #define EMPTY 0 /* Label number 0 is by definition the empty label */ @@ -22,7 +22,7 @@ typedef struct { typedef struct { int ll_nlabels; - label *ll_label; + const label *ll_label; } labellist; /* An arc from one state to another */ @@ -36,7 +36,7 @@ typedef struct { typedef struct { int s_narcs; - arc *s_arc; /* Array of arcs */ + const arc *s_arc; /* Array of arcs */ /* Optional accelerators */ int s_lower; /* Lowest label index */ @@ -59,8 +59,8 @@ typedef struct { typedef struct { int g_ndfas; - dfa *g_dfa; /* Array of DFAs */ - labellist g_ll; + const dfa *g_dfa; /* Array of DFAs */ + const labellist g_ll; int g_start; /* Start symbol of the grammar */ int g_accel; /* Set if accelerators present */ } grammar; diff --git a/Parser/pgen/grammar.py b/Parser/pgen/grammar.py index 1ab9434fa887..5cd652426b47 100644 --- a/Parser/pgen/grammar.py +++ b/Parser/pgen/grammar.py @@ -76,7 +76,7 @@ def produce_graminit_c(self, writer): def print_labels(self, writer): writer( - "static label labels[{n_labels}] = {{\n".format(n_labels=len(self.labels)) + "static const label labels[{n_labels}] = {{\n".format(n_labels=len(self.labels)) ) for label, name in self.labels: label_name = '"{}"'.format(name) if name is not None else 0 @@ -89,7 +89,7 @@ def print_labels(self, writer): def print_dfas(self, writer): self.print_states(writer) - writer("static dfa dfas[{}] = {{\n".format(len(self.dfas))) + writer("static const dfa dfas[{}] = {{\n".format(len(self.dfas))) for dfaindex, dfa_elem in enumerate(self.dfas.items()): symbol, (dfa, first_sets) = dfa_elem writer( @@ -131,7 +131,7 @@ def print_arcs(self, write, dfaindex, states): for stateindex, state in enumerate(states): narcs = len(state) write( - "static arc arcs_{dfa_index}_{state_index}[{n_arcs}] = {{\n".format( + "static const arc arcs_{dfa_index}_{state_index}[{n_arcs}] = {{\n".format( dfa_index=dfaindex, state_index=stateindex, n_arcs=narcs ) ) diff --git a/Python/graminit.c b/Python/graminit.c index cd9003241700..96e32aa8fd33 100644 --- a/Python/graminit.c +++ b/Python/graminit.c @@ -2,15 +2,15 @@ #include "grammar.h" grammar _PyParser_Grammar; -static arc arcs_0_0[3] = { +static const arc arcs_0_0[3] = { {2, 1}, {3, 2}, {4, 1}, }; -static arc arcs_0_1[1] = { +static const arc arcs_0_1[1] = { {0, 1}, }; -static arc arcs_0_2[1] = { +static const arc arcs_0_2[1] = { {2, 1}, }; static state states_0[3] = { @@ -18,26 +18,26 @@ static state states_0[3] = { {1, arcs_0_1}, {1, arcs_0_2}, }; -static arc arcs_1_0[3] = { +static const arc arcs_1_0[3] = { {44, 1}, {2, 0}, {45, 0}, }; -static arc arcs_1_1[1] = { +static const arc arcs_1_1[1] = { {0, 1}, }; static state states_1[2] = { {3, arcs_1_0}, {1, arcs_1_1}, }; -static arc arcs_2_0[1] = { +static const arc arcs_2_0[1] = { {47, 1}, }; -static arc arcs_2_1[2] = { +static const arc arcs_2_1[2] = { {44, 2}, {2, 1}, }; -static arc arcs_2_2[1] = { +static const arc arcs_2_2[1] = { {0, 2}, }; static state states_2[3] = { @@ -45,27 +45,27 @@ static state states_2[3] = { {2, arcs_2_1}, {1, arcs_2_2}, }; -static arc arcs_3_0[1] = { +static const arc arcs_3_0[1] = { {10, 1}, }; -static arc arcs_3_1[1] = { +static const arc arcs_3_1[1] = { {49, 2}, }; -static arc arcs_3_2[2] = { +static const arc arcs_3_2[2] = { {5, 3}, {2, 4}, }; -static arc arcs_3_3[2] = { +static const arc arcs_3_3[2] = { {50, 5}, {51, 6}, }; -static arc arcs_3_4[1] = { +static const arc arcs_3_4[1] = { {0, 4}, }; -static arc arcs_3_5[1] = { +static const arc arcs_3_5[1] = { {2, 4}, }; -static arc arcs_3_6[1] = { +static const arc arcs_3_6[1] = { {50, 5}, }; static state states_3[7] = { @@ -77,10 +77,10 @@ static state states_3[7] = { {1, arcs_3_5}, {1, arcs_3_6}, }; -static arc arcs_4_0[1] = { +static const arc arcs_4_0[1] = { {48, 1}, }; -static arc arcs_4_1[2] = { +static const arc arcs_4_1[2] = { {48, 1}, {0, 1}, }; @@ -88,15 +88,15 @@ static state states_4[2] = { {1, arcs_4_0}, {2, arcs_4_1}, }; -static arc arcs_5_0[1] = { +static const arc arcs_5_0[1] = { {52, 1}, }; -static arc arcs_5_1[3] = { +static const arc arcs_5_1[3] = { {54, 2}, {55, 2}, {56, 2}, }; -static arc arcs_5_2[1] = { +static const arc arcs_5_2[1] = { {0, 2}, }; static state states_5[3] = { @@ -104,13 +104,13 @@ static state states_5[3] = { {3, arcs_5_1}, {1, arcs_5_2}, }; -static arc arcs_6_0[1] = { +static const arc arcs_6_0[1] = { {38, 1}, }; -static arc arcs_6_1[1] = { +static const arc arcs_6_1[1] = { {56, 2}, }; -static arc arcs_6_2[1] = { +static const arc arcs_6_2[1] = { {0, 2}, }; static state states_6[3] = { @@ -118,33 +118,33 @@ static state states_6[3] = { {1, arcs_6_1}, {1, arcs_6_2}, }; -static arc arcs_7_0[1] = { +static const arc arcs_7_0[1] = { {19, 1}, }; -static arc arcs_7_1[1] = { +static const arc arcs_7_1[1] = { {40, 2}, }; -static arc arcs_7_2[1] = { +static const arc arcs_7_2[1] = { {57, 3}, }; -static arc arcs_7_3[2] = { +static const arc arcs_7_3[2] = { {58, 4}, {59, 5}, }; -static arc arcs_7_4[1] = { +static const arc arcs_7_4[1] = { {60, 6}, }; -static arc arcs_7_5[2] = { +static const arc arcs_7_5[2] = { {61, 7}, {62, 8}, }; -static arc arcs_7_6[1] = { +static const arc arcs_7_6[1] = { {59, 5}, }; -static arc arcs_7_7[1] = { +static const arc arcs_7_7[1] = { {62, 8}, }; -static arc arcs_7_8[1] = { +static const arc arcs_7_8[1] = { {0, 8}, }; static state states_7[9] = { @@ -158,17 +158,17 @@ static state states_7[9] = { {1, arcs_7_7}, {1, arcs_7_8}, }; -static arc arcs_8_0[1] = { +static const arc arcs_8_0[1] = { {5, 1}, }; -static arc arcs_8_1[2] = { +static const arc arcs_8_1[2] = { {50, 2}, {63, 3}, }; -static arc arcs_8_2[1] = { +static const arc arcs_8_2[1] = { {0, 2}, }; -static arc arcs_8_3[1] = { +static const arc arcs_8_3[1] = { {50, 2}, }; static state states_8[4] = { @@ -177,113 +177,113 @@ static state states_8[4] = { {1, arcs_8_2}, {1, arcs_8_3}, }; -static arc arcs_9_0[3] = { +static const arc arcs_9_0[3] = { {6, 1}, {64, 2}, {65, 3}, }; -static arc arcs_9_1[4] = { +static const arc arcs_9_1[4] = { {66, 4}, {61, 5}, {65, 6}, {0, 1}, }; -static arc arcs_9_2[1] = { +static const arc arcs_9_2[1] = { {65, 7}, }; -static arc arcs_9_3[4] = { +static const arc arcs_9_3[4] = { {66, 8}, {67, 9}, {61, 5}, {0, 3}, }; -static arc arcs_9_4[4] = { +static const arc arcs_9_4[4] = { {64, 2}, {61, 10}, {65, 11}, {0, 4}, }; -static arc arcs_9_5[1] = { +static const arc arcs_9_5[1] = { {0, 5}, }; -static arc arcs_9_6[3] = { +static const arc arcs_9_6[3] = { {66, 4}, {61, 5}, {0, 6}, }; -static arc arcs_9_7[3] = { +static const arc arcs_9_7[3] = { {66, 12}, {61, 5}, {0, 7}, }; -static arc arcs_9_8[5] = { +static const arc arcs_9_8[5] = { {6, 13}, {64, 2}, {61, 14}, {65, 3}, {0, 8}, }; -static arc arcs_9_9[1] = { +static const arc arcs_9_9[1] = { {60, 15}, }; -static arc arcs_9_10[3] = { +static const arc arcs_9_10[3] = { {64, 2}, {65, 11}, {0, 10}, }; -static arc arcs_9_11[4] = { +static const arc arcs_9_11[4] = { {66, 4}, {67, 16}, {61, 5}, {0, 11}, }; -static arc arcs_9_12[2] = { +static const arc arcs_9_12[2] = { {61, 5}, {0, 12}, }; -static arc arcs_9_13[4] = { +static const arc arcs_9_13[4] = { {66, 17}, {61, 5}, {65, 18}, {0, 13}, }; -static arc arcs_9_14[4] = { +static const arc arcs_9_14[4] = { {6, 13}, {64, 2}, {65, 3}, {0, 14}, }; -static arc arcs_9_15[3] = { +static const arc arcs_9_15[3] = { {66, 8}, {61, 5}, {0, 15}, }; -static arc arcs_9_16[1] = { +static const arc arcs_9_16[1] = { {60, 6}, }; -static arc arcs_9_17[4] = { +static const arc arcs_9_17[4] = { {64, 2}, {61, 19}, {65, 20}, {0, 17}, }; -static arc arcs_9_18[3] = { +static const arc arcs_9_18[3] = { {66, 17}, {61, 5}, {0, 18}, }; -static arc arcs_9_19[3] = { +static const arc arcs_9_19[3] = { {64, 2}, {65, 20}, {0, 19}, }; -static arc arcs_9_20[4] = { +static const arc arcs_9_20[4] = { {66, 17}, {67, 21}, {61, 5}, {0, 20}, }; -static arc arcs_9_21[1] = { +static const arc arcs_9_21[1] = { {60, 18}, }; static state states_9[22] = { @@ -310,17 +310,17 @@ static state states_9[22] = { {4, arcs_9_20}, {1, arcs_9_21}, }; -static arc arcs_10_0[1] = { +static const arc arcs_10_0[1] = { {40, 1}, }; -static arc arcs_10_1[2] = { +static const arc arcs_10_1[2] = { {59, 2}, {0, 1}, }; -static arc arcs_10_2[1] = { +static const arc arcs_10_2[1] = { {60, 3}, }; -static arc arcs_10_3[1] = { +static const arc arcs_10_3[1] = { {0, 3}, }; static state states_10[4] = { @@ -329,81 +329,81 @@ static state states_10[4] = { {1, arcs_10_2}, {1, arcs_10_3}, }; -static arc arcs_11_0[3] = { +static const arc arcs_11_0[3] = { {6, 1}, {64, 2}, {69, 3}, }; -static arc arcs_11_1[3] = { +static const arc arcs_11_1[3] = { {66, 4}, {69, 5}, {0, 1}, }; -static arc arcs_11_2[1] = { +static const arc arcs_11_2[1] = { {69, 6}, }; -static arc arcs_11_3[3] = { +static const arc arcs_11_3[3] = { {66, 7}, {67, 8}, {0, 3}, }; -static arc arcs_11_4[3] = { +static const arc arcs_11_4[3] = { {64, 2}, {69, 9}, {0, 4}, }; -static arc arcs_11_5[2] = { +static const arc arcs_11_5[2] = { {66, 4}, {0, 5}, }; -static arc arcs_11_6[2] = { +static const arc arcs_11_6[2] = { {66, 10}, {0, 6}, }; -static arc arcs_11_7[4] = { +static const arc arcs_11_7[4] = { {6, 11}, {64, 2}, {69, 3}, {0, 7}, }; -static arc arcs_11_8[1] = { +static const arc arcs_11_8[1] = { {60, 12}, }; -static arc arcs_11_9[3] = { +static const arc arcs_11_9[3] = { {66, 4}, {67, 13}, {0, 9}, }; -static arc arcs_11_10[1] = { +static const arc arcs_11_10[1] = { {0, 10}, }; -static arc arcs_11_11[3] = { +static const arc arcs_11_11[3] = { {66, 14}, {69, 15}, {0, 11}, }; -static arc arcs_11_12[2] = { +static const arc arcs_11_12[2] = { {66, 7}, {0, 12}, }; -static arc arcs_11_13[1] = { +static const arc arcs_11_13[1] = { {60, 5}, }; -static arc arcs_11_14[3] = { +static const arc arcs_11_14[3] = { {64, 2}, {69, 16}, {0, 14}, }; -static arc arcs_11_15[2] = { +static const arc arcs_11_15[2] = { {66, 14}, {0, 15}, }; -static arc arcs_11_16[3] = { +static const arc arcs_11_16[3] = { {66, 14}, {67, 17}, {0, 16}, }; -static arc arcs_11_17[1] = { +static const arc arcs_11_17[1] = { {60, 15}, }; static state states_11[18] = { @@ -426,39 +426,39 @@ static state states_11[18] = { {3, arcs_11_16}, {1, arcs_11_17}, }; -static arc arcs_12_0[1] = { +static const arc arcs_12_0[1] = { {40, 1}, }; -static arc arcs_12_1[1] = { +static const arc arcs_12_1[1] = { {0, 1}, }; static state states_12[2] = { {1, arcs_12_0}, {1, arcs_12_1}, }; -static arc arcs_13_0[2] = { +static const arc arcs_13_0[2] = { {3, 1}, {4, 1}, }; -static arc arcs_13_1[1] = { +static const arc arcs_13_1[1] = { {0, 1}, }; static state states_13[2] = { {2, arcs_13_0}, {1, arcs_13_1}, }; -static arc arcs_14_0[1] = { +static const arc arcs_14_0[1] = { {70, 1}, }; -static arc arcs_14_1[2] = { +static const arc arcs_14_1[2] = { {71, 2}, {2, 3}, }; -static arc arcs_14_2[2] = { +static const arc arcs_14_2[2] = { {2, 3}, {70, 1}, }; -static arc arcs_14_3[1] = { +static const arc arcs_14_3[1] = { {0, 3}, }; static state states_14[4] = { @@ -467,7 +467,7 @@ static state states_14[4] = { {2, arcs_14_2}, {1, arcs_14_3}, }; -static arc arcs_15_0[8] = { +static const arc arcs_15_0[8] = { {72, 1}, {73, 1}, {74, 1}, @@ -477,34 +477,34 @@ static arc arcs_15_0[8] = { {78, 1}, {79, 1}, }; -static arc arcs_15_1[1] = { +static const arc arcs_15_1[1] = { {0, 1}, }; static state states_15[2] = { {8, arcs_15_0}, {1, arcs_15_1}, }; -static arc arcs_16_0[1] = { +static const arc arcs_16_0[1] = { {80, 1}, }; -static arc arcs_16_1[4] = { +static const arc arcs_16_1[4] = { {67, 2}, {81, 3}, {82, 4}, {0, 1}, }; -static arc arcs_16_2[2] = { +static const arc arcs_16_2[2] = { {80, 5}, {83, 5}, }; -static arc arcs_16_3[1] = { +static const arc arcs_16_3[1] = { {0, 3}, }; -static arc arcs_16_4[2] = { +static const arc arcs_16_4[2] = { {47, 3}, {83, 3}, }; -static arc arcs_16_5[3] = { +static const arc arcs_16_5[3] = { {67, 2}, {61, 3}, {0, 5}, @@ -517,21 +517,21 @@ static state states_16[6] = { {2, arcs_16_4}, {3, arcs_16_5}, }; -static arc arcs_17_0[1] = { +static const arc arcs_17_0[1] = { {59, 1}, }; -static arc arcs_17_1[1] = { +static const arc arcs_17_1[1] = { {60, 2}, }; -static arc arcs_17_2[2] = { +static const arc arcs_17_2[2] = { {67, 3}, {0, 2}, }; -static arc arcs_17_3[2] = { +static const arc arcs_17_3[2] = { {47, 4}, {83, 4}, }; -static arc arcs_17_4[1] = { +static const arc arcs_17_4[1] = { {0, 4}, }; static state states_17[5] = { @@ -541,15 +541,15 @@ static state states_17[5] = { {2, arcs_17_3}, {1, arcs_17_4}, }; -static arc arcs_18_0[2] = { +static const arc arcs_18_0[2] = { {84, 1}, {60, 1}, }; -static arc arcs_18_1[2] = { +static const arc arcs_18_1[2] = { {66, 2}, {0, 1}, }; -static arc arcs_18_2[3] = { +static const arc arcs_18_2[3] = { {84, 1}, {60, 1}, {0, 2}, @@ -559,7 +559,7 @@ static state states_18[3] = { {2, arcs_18_1}, {3, arcs_18_2}, }; -static arc arcs_19_0[13] = { +static const arc arcs_19_0[13] = { {85, 1}, {86, 1}, {87, 1}, @@ -574,20 +574,20 @@ static arc arcs_19_0[13] = { {96, 1}, {97, 1}, }; -static arc arcs_19_1[1] = { +static const arc arcs_19_1[1] = { {0, 1}, }; static state states_19[2] = { {13, arcs_19_0}, {1, arcs_19_1}, }; -static arc arcs_20_0[1] = { +static const arc arcs_20_0[1] = { {20, 1}, }; -static arc arcs_20_1[1] = { +static const arc arcs_20_1[1] = { {98, 2}, }; -static arc arcs_20_2[1] = { +static const arc arcs_20_2[1] = { {0, 2}, }; static state states_20[3] = { @@ -595,58 +595,58 @@ static state states_20[3] = { {1, arcs_20_1}, {1, arcs_20_2}, }; -static arc arcs_21_0[1] = { +static const arc arcs_21_0[1] = { {29, 1}, }; -static arc arcs_21_1[1] = { +static const arc arcs_21_1[1] = { {0, 1}, }; static state states_21[2] = { {1, arcs_21_0}, {1, arcs_21_1}, }; -static arc arcs_22_0[5] = { +static const arc arcs_22_0[5] = { {99, 1}, {100, 1}, {101, 1}, {102, 1}, {103, 1}, }; -static arc arcs_22_1[1] = { +static const arc arcs_22_1[1] = { {0, 1}, }; static state states_22[2] = { {5, arcs_22_0}, {1, arcs_22_1}, }; -static arc arcs_23_0[1] = { +static const arc arcs_23_0[1] = { {16, 1}, }; -static arc arcs_23_1[1] = { +static const arc arcs_23_1[1] = { {0, 1}, }; static state states_23[2] = { {1, arcs_23_0}, {1, arcs_23_1}, }; -static arc arcs_24_0[1] = { +static const arc arcs_24_0[1] = { {18, 1}, }; -static arc arcs_24_1[1] = { +static const arc arcs_24_1[1] = { {0, 1}, }; static state states_24[2] = { {1, arcs_24_0}, {1, arcs_24_1}, }; -static arc arcs_25_0[1] = { +static const arc arcs_25_0[1] = { {31, 1}, }; -static arc arcs_25_1[2] = { +static const arc arcs_25_1[2] = { {80, 2}, {0, 1}, }; -static arc arcs_25_2[1] = { +static const arc arcs_25_2[1] = { {0, 2}, }; static state states_25[3] = { @@ -654,31 +654,31 @@ static state states_25[3] = { {2, arcs_25_1}, {1, arcs_25_2}, }; -static arc arcs_26_0[1] = { +static const arc arcs_26_0[1] = { {83, 1}, }; -static arc arcs_26_1[1] = { +static const arc arcs_26_1[1] = { {0, 1}, }; static state states_26[2] = { {1, arcs_26_0}, {1, arcs_26_1}, }; -static arc arcs_27_0[1] = { +static const arc arcs_27_0[1] = { {30, 1}, }; -static arc arcs_27_1[2] = { +static const arc arcs_27_1[2] = { {60, 2}, {0, 1}, }; -static arc arcs_27_2[2] = { +static const arc arcs_27_2[2] = { {22, 3}, {0, 2}, }; -static arc arcs_27_3[1] = { +static const arc arcs_27_3[1] = { {60, 4}, }; -static arc arcs_27_4[1] = { +static const arc arcs_27_4[1] = { {0, 4}, }; static state states_27[5] = { @@ -688,24 +688,24 @@ static state states_27[5] = { {1, arcs_27_3}, {1, arcs_27_4}, }; -static arc arcs_28_0[2] = { +static const arc arcs_28_0[2] = { {104, 1}, {105, 1}, }; -static arc arcs_28_1[1] = { +static const arc arcs_28_1[1] = { {0, 1}, }; static state states_28[2] = { {2, arcs_28_0}, {1, arcs_28_1}, }; -static arc arcs_29_0[1] = { +static const arc arcs_29_0[1] = { {25, 1}, }; -static arc arcs_29_1[1] = { +static const arc arcs_29_1[1] = { {106, 2}, }; -static arc arcs_29_2[1] = { +static const arc arcs_29_2[1] = { {0, 2}, }; static state states_29[3] = { @@ -713,35 +713,35 @@ static state states_29[3] = { {1, arcs_29_1}, {1, arcs_29_2}, }; -static arc arcs_30_0[1] = { +static const arc arcs_30_0[1] = { {22, 1}, }; -static arc arcs_30_1[3] = { +static const arc arcs_30_1[3] = { {107, 2}, {9, 2}, {49, 3}, }; -static arc arcs_30_2[4] = { +static const arc arcs_30_2[4] = { {107, 2}, {9, 2}, {25, 4}, {49, 3}, }; -static arc arcs_30_3[1] = { +static const arc arcs_30_3[1] = { {25, 4}, }; -static arc arcs_30_4[3] = { +static const arc arcs_30_4[3] = { {5, 5}, {6, 6}, {108, 6}, }; -static arc arcs_30_5[1] = { +static const arc arcs_30_5[1] = { {108, 7}, }; -static arc arcs_30_6[1] = { +static const arc arcs_30_6[1] = { {0, 6}, }; -static arc arcs_30_7[1] = { +static const arc arcs_30_7[1] = { {50, 6}, }; static state states_30[8] = { @@ -754,17 +754,17 @@ static state states_30[8] = { {1, arcs_30_6}, {1, arcs_30_7}, }; -static arc arcs_31_0[1] = { +static const arc arcs_31_0[1] = { {40, 1}, }; -static arc arcs_31_1[2] = { +static const arc arcs_31_1[2] = { {110, 2}, {0, 1}, }; -static arc arcs_31_2[1] = { +static const arc arcs_31_2[1] = { {40, 3}, }; -static arc arcs_31_3[1] = { +static const arc arcs_31_3[1] = { {0, 3}, }; static state states_31[4] = { @@ -773,17 +773,17 @@ static state states_31[4] = { {1, arcs_31_2}, {1, arcs_31_3}, }; -static arc arcs_32_0[1] = { +static const arc arcs_32_0[1] = { {49, 1}, }; -static arc arcs_32_1[2] = { +static const arc arcs_32_1[2] = { {110, 2}, {0, 1}, }; -static arc arcs_32_2[1] = { +static const arc arcs_32_2[1] = { {40, 3}, }; -static arc arcs_32_3[1] = { +static const arc arcs_32_3[1] = { {0, 3}, }; static state states_32[4] = { @@ -792,14 +792,14 @@ static state states_32[4] = { {1, arcs_32_2}, {1, arcs_32_3}, }; -static arc arcs_33_0[1] = { +static const arc arcs_33_0[1] = { {109, 1}, }; -static arc arcs_33_1[2] = { +static const arc arcs_33_1[2] = { {66, 2}, {0, 1}, }; -static arc arcs_33_2[2] = { +static const arc arcs_33_2[2] = { {109, 1}, {0, 2}, }; @@ -808,10 +808,10 @@ static state states_33[3] = { {2, arcs_33_1}, {2, arcs_33_2}, }; -static arc arcs_34_0[1] = { +static const arc arcs_34_0[1] = { {111, 1}, }; -static arc arcs_34_1[2] = { +static const arc arcs_34_1[2] = { {66, 0}, {0, 1}, }; @@ -819,10 +819,10 @@ static state states_34[2] = { {1, arcs_34_0}, {2, arcs_34_1}, }; -static arc arcs_35_0[1] = { +static const arc arcs_35_0[1] = { {40, 1}, }; -static arc arcs_35_1[2] = { +static const arc arcs_35_1[2] = { {107, 0}, {0, 1}, }; @@ -830,13 +830,13 @@ static state states_35[2] = { {1, arcs_35_0}, {2, arcs_35_1}, }; -static arc arcs_36_0[1] = { +static const arc arcs_36_0[1] = { {23, 1}, }; -static arc arcs_36_1[1] = { +static const arc arcs_36_1[1] = { {40, 2}, }; -static arc arcs_36_2[2] = { +static const arc arcs_36_2[2] = { {66, 1}, {0, 2}, }; @@ -845,13 +845,13 @@ static state states_36[3] = { {1, arcs_36_1}, {2, arcs_36_2}, }; -static arc arcs_37_0[1] = { +static const arc arcs_37_0[1] = { {27, 1}, }; -static arc arcs_37_1[1] = { +static const arc arcs_37_1[1] = { {40, 2}, }; -static arc arcs_37_2[2] = { +static const arc arcs_37_2[2] = { {66, 1}, {0, 2}, }; @@ -860,20 +860,20 @@ static state states_37[3] = { {1, arcs_37_1}, {2, arcs_37_2}, }; -static arc arcs_38_0[1] = { +static const arc arcs_38_0[1] = { {15, 1}, }; -static arc arcs_38_1[1] = { +static const arc arcs_38_1[1] = { {60, 2}, }; -static arc arcs_38_2[2] = { +static const arc arcs_38_2[2] = { {66, 3}, {0, 2}, }; -static arc arcs_38_3[1] = { +static const arc arcs_38_3[1] = { {60, 4}, }; -static arc arcs_38_4[1] = { +static const arc arcs_38_4[1] = { {0, 4}, }; static state states_38[5] = { @@ -883,7 +883,7 @@ static state states_38[5] = { {1, arcs_38_3}, {1, arcs_38_4}, }; -static arc arcs_39_0[9] = { +static const arc arcs_39_0[9] = { {112, 1}, {55, 1}, {53, 1}, @@ -894,22 +894,22 @@ static arc arcs_39_0[9] = { {116, 1}, {117, 1}, }; -static arc arcs_39_1[1] = { +static const arc arcs_39_1[1] = { {0, 1}, }; static state states_39[2] = { {9, arcs_39_0}, {1, arcs_39_1}, }; -static arc arcs_40_0[1] = { +static const arc arcs_40_0[1] = { {38, 1}, }; -static arc arcs_40_1[3] = { +static const arc arcs_40_1[3] = { {113, 2}, {56, 2}, {117, 2}, }; -static arc arcs_40_2[1] = { +static const arc arcs_40_2[1] = { {0, 2}, }; static state states_40[3] = { @@ -917,30 +917,30 @@ static state states_40[3] = { {3, arcs_40_1}, {1, arcs_40_2}, }; -static arc arcs_41_0[1] = { +static const arc arcs_41_0[1] = { {24, 1}, }; -static arc arcs_41_1[1] = { +static const arc arcs_41_1[1] = { {118, 2}, }; -static arc arcs_41_2[1] = { +static const arc arcs_41_2[1] = { {59, 3}, }; -static arc arcs_41_3[1] = { +static const arc arcs_41_3[1] = { {119, 4}, }; -static arc arcs_41_4[3] = { +static const arc arcs_41_4[3] = { {120, 1}, {121, 5}, {0, 4}, }; -static arc arcs_41_5[1] = { +static const arc arcs_41_5[1] = { {59, 6}, }; -static arc arcs_41_6[1] = { +static const arc arcs_41_6[1] = { {119, 7}, }; -static arc arcs_41_7[1] = { +static const arc arcs_41_7[1] = { {0, 7}, }; static state states_41[8] = { @@ -953,29 +953,29 @@ static state states_41[8] = { {1, arcs_41_6}, {1, arcs_41_7}, }; -static arc arcs_42_0[1] = { +static const arc arcs_42_0[1] = { {33, 1}, }; -static arc arcs_42_1[1] = { +static const arc arcs_42_1[1] = { {118, 2}, }; -static arc arcs_42_2[1] = { +static const arc arcs_42_2[1] = { {59, 3}, }; -static arc arcs_42_3[1] = { +static const arc arcs_42_3[1] = { {119, 4}, }; -static arc arcs_42_4[2] = { +static const arc arcs_42_4[2] = { {121, 5}, {0, 4}, }; -static arc arcs_42_5[1] = { +static const arc arcs_42_5[1] = { {59, 6}, }; -static arc arcs_42_6[1] = { +static const arc arcs_42_6[1] = { {119, 7}, }; -static arc arcs_42_7[1] = { +static const arc arcs_42_7[1] = { {0, 7}, }; static state states_42[8] = { @@ -988,39 +988,39 @@ static state states_42[8] = { {1, arcs_42_6}, {1, arcs_42_7}, }; -static arc arcs_43_0[1] = { +static const arc arcs_43_0[1] = { {21, 1}, }; -static arc arcs_43_1[1] = { +static const arc arcs_43_1[1] = { {98, 2}, }; -static arc arcs_43_2[1] = { +static const arc arcs_43_2[1] = { {122, 3}, }; -static arc arcs_43_3[1] = { +static const arc arcs_43_3[1] = { {47, 4}, }; -static arc arcs_43_4[1] = { +static const arc arcs_43_4[1] = { {59, 5}, }; -static arc arcs_43_5[2] = { +static const arc arcs_43_5[2] = { {61, 6}, {119, 7}, }; -static arc arcs_43_6[1] = { +static const arc arcs_43_6[1] = { {119, 7}, }; -static arc arcs_43_7[2] = { +static const arc arcs_43_7[2] = { {121, 8}, {0, 7}, }; -static arc arcs_43_8[1] = { +static const arc arcs_43_8[1] = { {59, 9}, }; -static arc arcs_43_9[1] = { +static const arc arcs_43_9[1] = { {119, 10}, }; -static arc arcs_43_10[1] = { +static const arc arcs_43_10[1] = { {0, 10}, }; static state states_43[11] = { @@ -1036,47 +1036,47 @@ static state states_43[11] = { {1, arcs_43_9}, {1, arcs_43_10}, }; -static arc arcs_44_0[1] = { +static const arc arcs_44_0[1] = { {32, 1}, }; -static arc arcs_44_1[1] = { +static const arc arcs_44_1[1] = { {59, 2}, }; -static arc arcs_44_2[1] = { +static const arc arcs_44_2[1] = { {119, 3}, }; -static arc arcs_44_3[2] = { +static const arc arcs_44_3[2] = { {123, 4}, {124, 5}, }; -static arc arcs_44_4[1] = { +static const arc arcs_44_4[1] = { {59, 6}, }; -static arc arcs_44_5[1] = { +static const arc arcs_44_5[1] = { {59, 7}, }; -static arc arcs_44_6[1] = { +static const arc arcs_44_6[1] = { {119, 8}, }; -static arc arcs_44_7[1] = { +static const arc arcs_44_7[1] = { {119, 9}, }; -static arc arcs_44_8[1] = { +static const arc arcs_44_8[1] = { {0, 8}, }; -static arc arcs_44_9[4] = { +static const arc arcs_44_9[4] = { {121, 10}, {123, 4}, {124, 5}, {0, 9}, }; -static arc arcs_44_10[1] = { +static const arc arcs_44_10[1] = { {59, 11}, }; -static arc arcs_44_11[1] = { +static const arc arcs_44_11[1] = { {119, 12}, }; -static arc arcs_44_12[2] = { +static const arc arcs_44_12[2] = { {123, 4}, {0, 12}, }; @@ -1095,24 +1095,24 @@ static state states_44[13] = { {1, arcs_44_11}, {2, arcs_44_12}, }; -static arc arcs_45_0[1] = { +static const arc arcs_45_0[1] = { {34, 1}, }; -static arc arcs_45_1[1] = { +static const arc arcs_45_1[1] = { {125, 2}, }; -static arc arcs_45_2[2] = { +static const arc arcs_45_2[2] = { {66, 1}, {59, 3}, }; -static arc arcs_45_3[2] = { +static const arc arcs_45_3[2] = { {61, 4}, {119, 5}, }; -static arc arcs_45_4[1] = { +static const arc arcs_45_4[1] = { {119, 5}, }; -static arc arcs_45_5[1] = { +static const arc arcs_45_5[1] = { {0, 5}, }; static state states_45[6] = { @@ -1123,17 +1123,17 @@ static state states_45[6] = { {1, arcs_45_4}, {1, arcs_45_5}, }; -static arc arcs_46_0[1] = { +static const arc arcs_46_0[1] = { {60, 1}, }; -static arc arcs_46_1[2] = { +static const arc arcs_46_1[2] = { {110, 2}, {0, 1}, }; -static arc arcs_46_2[1] = { +static const arc arcs_46_2[1] = { {126, 3}, }; -static arc arcs_46_3[1] = { +static const arc arcs_46_3[1] = { {0, 3}, }; static state states_46[4] = { @@ -1142,21 +1142,21 @@ static state states_46[4] = { {1, arcs_46_2}, {1, arcs_46_3}, }; -static arc arcs_47_0[1] = { +static const arc arcs_47_0[1] = { {127, 1}, }; -static arc arcs_47_1[2] = { +static const arc arcs_47_1[2] = { {60, 2}, {0, 1}, }; -static arc arcs_47_2[2] = { +static const arc arcs_47_2[2] = { {110, 3}, {0, 2}, }; -static arc arcs_47_3[1] = { +static const arc arcs_47_3[1] = { {40, 4}, }; -static arc arcs_47_4[1] = { +static const arc arcs_47_4[1] = { {0, 4}, }; static state states_47[5] = { @@ -1166,20 +1166,20 @@ static state states_47[5] = { {1, arcs_47_3}, {1, arcs_47_4}, }; -static arc arcs_48_0[2] = { +static const arc arcs_48_0[2] = { {2, 1}, {4, 2}, }; -static arc arcs_48_1[1] = { +static const arc arcs_48_1[1] = { {128, 3}, }; -static arc arcs_48_2[1] = { +static const arc arcs_48_2[1] = { {0, 2}, }; -static arc arcs_48_3[1] = { +static const arc arcs_48_3[1] = { {45, 4}, }; -static arc arcs_48_4[2] = { +static const arc arcs_48_4[2] = { {129, 2}, {45, 4}, }; @@ -1190,17 +1190,17 @@ static state states_48[5] = { {1, arcs_48_3}, {2, arcs_48_4}, }; -static arc arcs_49_0[1] = { +static const arc arcs_49_0[1] = { {60, 1}, }; -static arc arcs_49_1[2] = { +static const arc arcs_49_1[2] = { {130, 2}, {0, 1}, }; -static arc arcs_49_2[1] = { +static const arc arcs_49_2[1] = { {60, 3}, }; -static arc arcs_49_3[1] = { +static const arc arcs_49_3[1] = { {0, 3}, }; static state states_49[4] = { @@ -1209,24 +1209,24 @@ static state states_49[4] = { {1, arcs_49_2}, {1, arcs_49_3}, }; -static arc arcs_50_0[2] = { +static const arc arcs_50_0[2] = { {131, 1}, {132, 2}, }; -static arc arcs_50_1[1] = { +static const arc arcs_50_1[1] = { {0, 1}, }; -static arc arcs_50_2[2] = { +static const arc arcs_50_2[2] = { {24, 3}, {0, 2}, }; -static arc arcs_50_3[1] = { +static const arc arcs_50_3[1] = { {132, 4}, }; -static arc arcs_50_4[1] = { +static const arc arcs_50_4[1] = { {121, 5}, }; -static arc arcs_50_5[1] = { +static const arc arcs_50_5[1] = { {60, 1}, }; static state states_50[6] = { @@ -1237,31 +1237,31 @@ static state states_50[6] = { {1, arcs_50_4}, {1, arcs_50_5}, }; -static arc arcs_51_0[2] = { +static const arc arcs_51_0[2] = { {134, 1}, {132, 1}, }; -static arc arcs_51_1[1] = { +static const arc arcs_51_1[1] = { {0, 1}, }; static state states_51[2] = { {2, arcs_51_0}, {1, arcs_51_1}, }; -static arc arcs_52_0[1] = { +static const arc arcs_52_0[1] = { {26, 1}, }; -static arc arcs_52_1[2] = { +static const arc arcs_52_1[2] = { {59, 2}, {68, 3}, }; -static arc arcs_52_2[1] = { +static const arc arcs_52_2[1] = { {60, 4}, }; -static arc arcs_52_3[1] = { +static const arc arcs_52_3[1] = { {59, 2}, }; -static arc arcs_52_4[1] = { +static const arc arcs_52_4[1] = { {0, 4}, }; static state states_52[5] = { @@ -1271,20 +1271,20 @@ static state states_52[5] = { {1, arcs_52_3}, {1, arcs_52_4}, }; -static arc arcs_53_0[1] = { +static const arc arcs_53_0[1] = { {26, 1}, }; -static arc arcs_53_1[2] = { +static const arc arcs_53_1[2] = { {59, 2}, {68, 3}, }; -static arc arcs_53_2[1] = { +static const arc arcs_53_2[1] = { {133, 4}, }; -static arc arcs_53_3[1] = { +static const arc arcs_53_3[1] = { {59, 2}, }; -static arc arcs_53_4[1] = { +static const arc arcs_53_4[1] = { {0, 4}, }; static state states_53[5] = { @@ -1294,10 +1294,10 @@ static state states_53[5] = { {1, arcs_53_3}, {1, arcs_53_4}, }; -static arc arcs_54_0[1] = { +static const arc arcs_54_0[1] = { {135, 1}, }; -static arc arcs_54_1[2] = { +static const arc arcs_54_1[2] = { {136, 0}, {0, 1}, }; @@ -1305,10 +1305,10 @@ static state states_54[2] = { {1, arcs_54_0}, {2, arcs_54_1}, }; -static arc arcs_55_0[1] = { +static const arc arcs_55_0[1] = { {137, 1}, }; -static arc arcs_55_1[2] = { +static const arc arcs_55_1[2] = { {138, 0}, {0, 1}, }; @@ -1316,14 +1316,14 @@ static state states_55[2] = { {1, arcs_55_0}, {2, arcs_55_1}, }; -static arc arcs_56_0[2] = { +static const arc arcs_56_0[2] = { {28, 1}, {139, 2}, }; -static arc arcs_56_1[1] = { +static const arc arcs_56_1[1] = { {137, 2}, }; -static arc arcs_56_2[1] = { +static const arc arcs_56_2[1] = { {0, 2}, }; static state states_56[3] = { @@ -1331,10 +1331,10 @@ static state states_56[3] = { {1, arcs_56_1}, {1, arcs_56_2}, }; -static arc arcs_57_0[1] = { +static const arc arcs_57_0[1] = { {126, 1}, }; -static arc arcs_57_1[2] = { +static const arc arcs_57_1[2] = { {140, 0}, {0, 1}, }; @@ -1342,7 +1342,7 @@ static state states_57[2] = { {1, arcs_57_0}, {2, arcs_57_1}, }; -static arc arcs_58_0[10] = { +static const arc arcs_58_0[10] = { {141, 1}, {142, 1}, {143, 1}, @@ -1354,14 +1354,14 @@ static arc arcs_58_0[10] = { {147, 2}, {28, 3}, }; -static arc arcs_58_1[1] = { +static const arc arcs_58_1[1] = { {0, 1}, }; -static arc arcs_58_2[2] = { +static const arc arcs_58_2[2] = { {28, 1}, {0, 2}, }; -static arc arcs_58_3[1] = { +static const arc arcs_58_3[1] = { {122, 1}, }; static state states_58[4] = { @@ -1370,13 +1370,13 @@ static state states_58[4] = { {2, arcs_58_2}, {1, arcs_58_3}, }; -static arc arcs_59_0[1] = { +static const arc arcs_59_0[1] = { {6, 1}, }; -static arc arcs_59_1[1] = { +static const arc arcs_59_1[1] = { {126, 2}, }; -static arc arcs_59_2[1] = { +static const arc arcs_59_2[1] = { {0, 2}, }; static state states_59[3] = { @@ -1384,10 +1384,10 @@ static state states_59[3] = { {1, arcs_59_1}, {1, arcs_59_2}, }; -static arc arcs_60_0[1] = { +static const arc arcs_60_0[1] = { {148, 1}, }; -static arc arcs_60_1[2] = { +static const arc arcs_60_1[2] = { {149, 0}, {0, 1}, }; @@ -1395,10 +1395,10 @@ static state states_60[2] = { {1, arcs_60_0}, {2, arcs_60_1}, }; -static arc arcs_61_0[1] = { +static const arc arcs_61_0[1] = { {150, 1}, }; -static arc arcs_61_1[2] = { +static const arc arcs_61_1[2] = { {151, 0}, {0, 1}, }; @@ -1406,10 +1406,10 @@ static state states_61[2] = { {1, arcs_61_0}, {2, arcs_61_1}, }; -static arc arcs_62_0[1] = { +static const arc arcs_62_0[1] = { {152, 1}, }; -static arc arcs_62_1[2] = { +static const arc arcs_62_1[2] = { {153, 0}, {0, 1}, }; @@ -1417,10 +1417,10 @@ static state states_62[2] = { {1, arcs_62_0}, {2, arcs_62_1}, }; -static arc arcs_63_0[1] = { +static const arc arcs_63_0[1] = { {154, 1}, }; -static arc arcs_63_1[3] = { +static const arc arcs_63_1[3] = { {155, 0}, {156, 0}, {0, 1}, @@ -1429,10 +1429,10 @@ static state states_63[2] = { {1, arcs_63_0}, {3, arcs_63_1}, }; -static arc arcs_64_0[1] = { +static const arc arcs_64_0[1] = { {157, 1}, }; -static arc arcs_64_1[3] = { +static const arc arcs_64_1[3] = { {7, 0}, {8, 0}, {0, 1}, @@ -1441,10 +1441,10 @@ static state states_64[2] = { {1, arcs_64_0}, {3, arcs_64_1}, }; -static arc arcs_65_0[1] = { +static const arc arcs_65_0[1] = { {158, 1}, }; -static arc arcs_65_1[6] = { +static const arc arcs_65_1[6] = { {159, 0}, {6, 0}, {160, 0}, @@ -1456,16 +1456,16 @@ static state states_65[2] = { {1, arcs_65_0}, {6, arcs_65_1}, }; -static arc arcs_66_0[4] = { +static const arc arcs_66_0[4] = { {7, 1}, {8, 1}, {37, 1}, {162, 2}, }; -static arc arcs_66_1[1] = { +static const arc arcs_66_1[1] = { {158, 2}, }; -static arc arcs_66_2[1] = { +static const arc arcs_66_2[1] = { {0, 2}, }; static state states_66[3] = { @@ -1473,17 +1473,17 @@ static state states_66[3] = { {1, arcs_66_1}, {1, arcs_66_2}, }; -static arc arcs_67_0[1] = { +static const arc arcs_67_0[1] = { {163, 1}, }; -static arc arcs_67_1[2] = { +static const arc arcs_67_1[2] = { {64, 2}, {0, 1}, }; -static arc arcs_67_2[1] = { +static const arc arcs_67_2[1] = { {158, 3}, }; -static arc arcs_67_3[1] = { +static const arc arcs_67_3[1] = { {0, 3}, }; static state states_67[4] = { @@ -1492,14 +1492,14 @@ static state states_67[4] = { {1, arcs_67_2}, {1, arcs_67_3}, }; -static arc arcs_68_0[2] = { +static const arc arcs_68_0[2] = { {39, 1}, {164, 2}, }; -static arc arcs_68_1[1] = { +static const arc arcs_68_1[1] = { {164, 2}, }; -static arc arcs_68_2[2] = { +static const arc arcs_68_2[2] = { {165, 2}, {0, 2}, }; @@ -1508,7 +1508,7 @@ static state states_68[3] = { {1, arcs_68_1}, {2, arcs_68_2}, }; -static arc arcs_69_0[10] = { +static const arc arcs_69_0[10] = { {5, 1}, {9, 2}, {11, 2}, @@ -1520,33 +1520,33 @@ static arc arcs_69_0[10] = { {41, 2}, {42, 5}, }; -static arc arcs_69_1[3] = { +static const arc arcs_69_1[3] = { {50, 2}, {166, 6}, {83, 6}, }; -static arc arcs_69_2[1] = { +static const arc arcs_69_2[1] = { {0, 2}, }; -static arc arcs_69_3[2] = { +static const arc arcs_69_3[2] = { {167, 2}, {166, 7}, }; -static arc arcs_69_4[2] = { +static const arc arcs_69_4[2] = { {168, 2}, {169, 8}, }; -static arc arcs_69_5[2] = { +static const arc arcs_69_5[2] = { {42, 5}, {0, 5}, }; -static arc arcs_69_6[1] = { +static const arc arcs_69_6[1] = { {50, 2}, }; -static arc arcs_69_7[1] = { +static const arc arcs_69_7[1] = { {167, 2}, }; -static arc arcs_69_8[1] = { +static const arc arcs_69_8[1] = { {168, 2}, }; static state states_69[9] = { @@ -1560,24 +1560,24 @@ static state states_69[9] = { {1, arcs_69_7}, {1, arcs_69_8}, }; -static arc arcs_70_0[2] = { +static const arc arcs_70_0[2] = { {118, 1}, {84, 1}, }; -static arc arcs_70_1[3] = { +static const arc arcs_70_1[3] = { {66, 2}, {170, 3}, {0, 1}, }; -static arc arcs_70_2[3] = { +static const arc arcs_70_2[3] = { {118, 4}, {84, 4}, {0, 2}, }; -static arc arcs_70_3[1] = { +static const arc arcs_70_3[1] = { {0, 3}, }; -static arc arcs_70_4[2] = { +static const arc arcs_70_4[2] = { {66, 2}, {0, 4}, }; @@ -1588,28 +1588,28 @@ static state states_70[5] = { {1, arcs_70_3}, {2, arcs_70_4}, }; -static arc arcs_71_0[3] = { +static const arc arcs_71_0[3] = { {5, 1}, {107, 2}, {14, 3}, }; -static arc arcs_71_1[2] = { +static const arc arcs_71_1[2] = { {50, 4}, {51, 5}, }; -static arc arcs_71_2[1] = { +static const arc arcs_71_2[1] = { {40, 4}, }; -static arc arcs_71_3[1] = { +static const arc arcs_71_3[1] = { {171, 6}, }; -static arc arcs_71_4[1] = { +static const arc arcs_71_4[1] = { {0, 4}, }; -static arc arcs_71_5[1] = { +static const arc arcs_71_5[1] = { {50, 4}, }; -static arc arcs_71_6[1] = { +static const arc arcs_71_6[1] = { {167, 4}, }; static state states_71[7] = { @@ -1621,14 +1621,14 @@ static state states_71[7] = { {1, arcs_71_5}, {1, arcs_71_6}, }; -static arc arcs_72_0[1] = { +static const arc arcs_72_0[1] = { {172, 1}, }; -static arc arcs_72_1[2] = { +static const arc arcs_72_1[2] = { {66, 2}, {0, 1}, }; -static arc arcs_72_2[2] = { +static const arc arcs_72_2[2] = { {172, 1}, {0, 2}, }; @@ -1637,23 +1637,23 @@ static state states_72[3] = { {2, arcs_72_1}, {2, arcs_72_2}, }; -static arc arcs_73_0[2] = { +static const arc arcs_73_0[2] = { {59, 1}, {60, 2}, }; -static arc arcs_73_1[3] = { +static const arc arcs_73_1[3] = { {173, 3}, {60, 4}, {0, 1}, }; -static arc arcs_73_2[2] = { +static const arc arcs_73_2[2] = { {59, 1}, {0, 2}, }; -static arc arcs_73_3[1] = { +static const arc arcs_73_3[1] = { {0, 3}, }; -static arc arcs_73_4[2] = { +static const arc arcs_73_4[2] = { {173, 3}, {0, 4}, }; @@ -1664,14 +1664,14 @@ static state states_73[5] = { {1, arcs_73_3}, {2, arcs_73_4}, }; -static arc arcs_74_0[1] = { +static const arc arcs_74_0[1] = { {59, 1}, }; -static arc arcs_74_1[2] = { +static const arc arcs_74_1[2] = { {60, 2}, {0, 1}, }; -static arc arcs_74_2[1] = { +static const arc arcs_74_2[1] = { {0, 2}, }; static state states_74[3] = { @@ -1679,15 +1679,15 @@ static state states_74[3] = { {2, arcs_74_1}, {1, arcs_74_2}, }; -static arc arcs_75_0[2] = { +static const arc arcs_75_0[2] = { {126, 1}, {84, 1}, }; -static arc arcs_75_1[2] = { +static const arc arcs_75_1[2] = { {66, 2}, {0, 1}, }; -static arc arcs_75_2[3] = { +static const arc arcs_75_2[3] = { {126, 1}, {84, 1}, {0, 2}, @@ -1697,14 +1697,14 @@ static state states_75[3] = { {2, arcs_75_1}, {3, arcs_75_2}, }; -static arc arcs_76_0[1] = { +static const arc arcs_76_0[1] = { {60, 1}, }; -static arc arcs_76_1[2] = { +static const arc arcs_76_1[2] = { {66, 2}, {0, 1}, }; -static arc arcs_76_2[2] = { +static const arc arcs_76_2[2] = { {60, 1}, {0, 2}, }; @@ -1713,61 +1713,61 @@ static state states_76[3] = { {2, arcs_76_1}, {2, arcs_76_2}, }; -static arc arcs_77_0[3] = { +static const arc arcs_77_0[3] = { {64, 1}, {84, 2}, {60, 3}, }; -static arc arcs_77_1[1] = { +static const arc arcs_77_1[1] = { {126, 4}, }; -static arc arcs_77_2[3] = { +static const arc arcs_77_2[3] = { {66, 5}, {170, 6}, {0, 2}, }; -static arc arcs_77_3[4] = { +static const arc arcs_77_3[4] = { {66, 5}, {59, 7}, {170, 6}, {0, 3}, }; -static arc arcs_77_4[3] = { +static const arc arcs_77_4[3] = { {66, 8}, {170, 6}, {0, 4}, }; -static arc arcs_77_5[3] = { +static const arc arcs_77_5[3] = { {84, 9}, {60, 9}, {0, 5}, }; -static arc arcs_77_6[1] = { +static const arc arcs_77_6[1] = { {0, 6}, }; -static arc arcs_77_7[1] = { +static const arc arcs_77_7[1] = { {60, 4}, }; -static arc arcs_77_8[3] = { +static const arc arcs_77_8[3] = { {64, 10}, {60, 11}, {0, 8}, }; -static arc arcs_77_9[2] = { +static const arc arcs_77_9[2] = { {66, 5}, {0, 9}, }; -static arc arcs_77_10[1] = { +static const arc arcs_77_10[1] = { {126, 12}, }; -static arc arcs_77_11[1] = { +static const arc arcs_77_11[1] = { {59, 13}, }; -static arc arcs_77_12[2] = { +static const arc arcs_77_12[2] = { {66, 8}, {0, 12}, }; -static arc arcs_77_13[1] = { +static const arc arcs_77_13[1] = { {60, 12}, }; static state states_77[14] = { @@ -1786,30 +1786,30 @@ static state states_77[14] = { {2, arcs_77_12}, {1, arcs_77_13}, }; -static arc arcs_78_0[1] = { +static const arc arcs_78_0[1] = { {17, 1}, }; -static arc arcs_78_1[1] = { +static const arc arcs_78_1[1] = { {40, 2}, }; -static arc arcs_78_2[2] = { +static const arc arcs_78_2[2] = { {5, 3}, {59, 4}, }; -static arc arcs_78_3[2] = { +static const arc arcs_78_3[2] = { {50, 5}, {51, 6}, }; -static arc arcs_78_4[1] = { +static const arc arcs_78_4[1] = { {119, 7}, }; -static arc arcs_78_5[1] = { +static const arc arcs_78_5[1] = { {59, 4}, }; -static arc arcs_78_6[1] = { +static const arc arcs_78_6[1] = { {50, 5}, }; -static arc arcs_78_7[1] = { +static const arc arcs_78_7[1] = { {0, 7}, }; static state states_78[8] = { @@ -1822,14 +1822,14 @@ static state states_78[8] = { {1, arcs_78_6}, {1, arcs_78_7}, }; -static arc arcs_79_0[1] = { +static const arc arcs_79_0[1] = { {174, 1}, }; -static arc arcs_79_1[2] = { +static const arc arcs_79_1[2] = { {66, 2}, {0, 1}, }; -static arc arcs_79_2[2] = { +static const arc arcs_79_2[2] = { {174, 1}, {0, 2}, }; @@ -1838,21 +1838,21 @@ static state states_79[3] = { {2, arcs_79_1}, {2, arcs_79_2}, }; -static arc arcs_80_0[3] = { +static const arc arcs_80_0[3] = { {6, 1}, {64, 1}, {60, 2}, }; -static arc arcs_80_1[1] = { +static const arc arcs_80_1[1] = { {60, 3}, }; -static arc arcs_80_2[4] = { +static const arc arcs_80_2[4] = { {130, 1}, {67, 1}, {170, 3}, {0, 2}, }; -static arc arcs_80_3[1] = { +static const arc arcs_80_3[1] = { {0, 3}, }; static state states_80[4] = { @@ -1861,34 +1861,34 @@ static state states_80[4] = { {4, arcs_80_2}, {1, arcs_80_3}, }; -static arc arcs_81_0[2] = { +static const arc arcs_81_0[2] = { {170, 1}, {176, 1}, }; -static arc arcs_81_1[1] = { +static const arc arcs_81_1[1] = { {0, 1}, }; static state states_81[2] = { {2, arcs_81_0}, {1, arcs_81_1}, }; -static arc arcs_82_0[1] = { +static const arc arcs_82_0[1] = { {21, 1}, }; -static arc arcs_82_1[1] = { +static const arc arcs_82_1[1] = { {98, 2}, }; -static arc arcs_82_2[1] = { +static const arc arcs_82_2[1] = { {122, 3}, }; -static arc arcs_82_3[1] = { +static const arc arcs_82_3[1] = { {132, 4}, }; -static arc arcs_82_4[2] = { +static const arc arcs_82_4[2] = { {175, 5}, {0, 4}, }; -static arc arcs_82_5[1] = { +static const arc arcs_82_5[1] = { {0, 5}, }; static state states_82[6] = { @@ -1899,14 +1899,14 @@ static state states_82[6] = { {2, arcs_82_4}, {1, arcs_82_5}, }; -static arc arcs_83_0[2] = { +static const arc arcs_83_0[2] = { {38, 1}, {177, 2}, }; -static arc arcs_83_1[1] = { +static const arc arcs_83_1[1] = { {177, 2}, }; -static arc arcs_83_2[1] = { +static const arc arcs_83_2[1] = { {0, 2}, }; static state states_83[3] = { @@ -1914,17 +1914,17 @@ static state states_83[3] = { {1, arcs_83_1}, {1, arcs_83_2}, }; -static arc arcs_84_0[1] = { +static const arc arcs_84_0[1] = { {24, 1}, }; -static arc arcs_84_1[1] = { +static const arc arcs_84_1[1] = { {133, 2}, }; -static arc arcs_84_2[2] = { +static const arc arcs_84_2[2] = { {175, 3}, {0, 2}, }; -static arc arcs_84_3[1] = { +static const arc arcs_84_3[1] = { {0, 3}, }; static state states_84[4] = { @@ -1933,24 +1933,24 @@ static state states_84[4] = { {2, arcs_84_2}, {1, arcs_84_3}, }; -static arc arcs_85_0[1] = { +static const arc arcs_85_0[1] = { {40, 1}, }; -static arc arcs_85_1[1] = { +static const arc arcs_85_1[1] = { {0, 1}, }; static state states_85[2] = { {1, arcs_85_0}, {1, arcs_85_1}, }; -static arc arcs_86_0[1] = { +static const arc arcs_86_0[1] = { {35, 1}, }; -static arc arcs_86_1[2] = { +static const arc arcs_86_1[2] = { {179, 2}, {0, 1}, }; -static arc arcs_86_2[1] = { +static const arc arcs_86_2[1] = { {0, 2}, }; static state states_86[3] = { @@ -1958,14 +1958,14 @@ static state states_86[3] = { {2, arcs_86_1}, {1, arcs_86_2}, }; -static arc arcs_87_0[2] = { +static const arc arcs_87_0[2] = { {22, 1}, {80, 2}, }; -static arc arcs_87_1[1] = { +static const arc arcs_87_1[1] = { {60, 2}, }; -static arc arcs_87_2[1] = { +static const arc arcs_87_2[1] = { {0, 2}, }; static state states_87[3] = { @@ -1973,28 +1973,28 @@ static state states_87[3] = { {1, arcs_87_1}, {1, arcs_87_2}, }; -static arc arcs_88_0[2] = { +static const arc arcs_88_0[2] = { {2, 1}, {4, 2}, }; -static arc arcs_88_1[2] = { +static const arc arcs_88_1[2] = { {128, 3}, {61, 4}, }; -static arc arcs_88_2[1] = { +static const arc arcs_88_2[1] = { {0, 2}, }; -static arc arcs_88_3[1] = { +static const arc arcs_88_3[1] = { {45, 5}, }; -static arc arcs_88_4[1] = { +static const arc arcs_88_4[1] = { {2, 6}, }; -static arc arcs_88_5[2] = { +static const arc arcs_88_5[2] = { {129, 2}, {45, 5}, }; -static arc arcs_88_6[1] = { +static const arc arcs_88_6[1] = { {128, 3}, }; static state states_88[7] = { @@ -2006,14 +2006,14 @@ static state states_88[7] = { {2, arcs_88_5}, {1, arcs_88_6}, }; -static arc arcs_89_0[1] = { +static const arc arcs_89_0[1] = { {181, 1}, }; -static arc arcs_89_1[2] = { +static const arc arcs_89_1[2] = { {44, 2}, {2, 1}, }; -static arc arcs_89_2[1] = { +static const arc arcs_89_2[1] = { {0, 2}, }; static state states_89[3] = { @@ -2021,23 +2021,23 @@ static state states_89[3] = { {2, arcs_89_1}, {1, arcs_89_2}, }; -static arc arcs_90_0[1] = { +static const arc arcs_90_0[1] = { {5, 1}, }; -static arc arcs_90_1[2] = { +static const arc arcs_90_1[2] = { {50, 2}, {182, 3}, }; -static arc arcs_90_2[1] = { +static const arc arcs_90_2[1] = { {58, 4}, }; -static arc arcs_90_3[1] = { +static const arc arcs_90_3[1] = { {50, 2}, }; -static arc arcs_90_4[1] = { +static const arc arcs_90_4[1] = { {60, 5}, }; -static arc arcs_90_5[1] = { +static const arc arcs_90_5[1] = { {0, 5}, }; static state states_90[6] = { @@ -2048,50 +2048,50 @@ static state states_90[6] = { {1, arcs_90_4}, {1, arcs_90_5}, }; -static arc arcs_91_0[3] = { +static const arc arcs_91_0[3] = { {6, 1}, {64, 2}, {60, 3}, }; -static arc arcs_91_1[3] = { +static const arc arcs_91_1[3] = { {66, 4}, {60, 5}, {0, 1}, }; -static arc arcs_91_2[1] = { +static const arc arcs_91_2[1] = { {60, 6}, }; -static arc arcs_91_3[2] = { +static const arc arcs_91_3[2] = { {66, 7}, {0, 3}, }; -static arc arcs_91_4[2] = { +static const arc arcs_91_4[2] = { {64, 2}, {60, 5}, }; -static arc arcs_91_5[2] = { +static const arc arcs_91_5[2] = { {66, 4}, {0, 5}, }; -static arc arcs_91_6[1] = { +static const arc arcs_91_6[1] = { {0, 6}, }; -static arc arcs_91_7[4] = { +static const arc arcs_91_7[4] = { {6, 8}, {64, 2}, {60, 3}, {0, 7}, }; -static arc arcs_91_8[3] = { +static const arc arcs_91_8[3] = { {66, 9}, {60, 10}, {0, 8}, }; -static arc arcs_91_9[2] = { +static const arc arcs_91_9[2] = { {64, 2}, {60, 10}, }; -static arc arcs_91_10[2] = { +static const arc arcs_91_10[2] = { {66, 9}, {0, 10}, }; @@ -2108,7 +2108,7 @@ static state states_91[11] = { {2, arcs_91_9}, {2, arcs_91_10}, }; -static dfa dfas[92] = { +static const dfa dfas[92] = { {256, "single_input", 3, states_0, "\344\377\377\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, {257, "file_input", 2, states_1, @@ -2294,7 +2294,7 @@ static dfa dfas[92] = { {347, "typelist", 11, states_91, "\340\173\000\024\260\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, }; -static label labels[183] = { +static const label labels[183] = { {0, "EMPTY"}, {256, 0}, {4, 0}, From webhook-mailer at python.org Tue Apr 23 06:26:52 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Apr 2019 10:26:52 -0000 Subject: [Python-checkins] bpo-36635, bpo-36696: Fix setup.py on AIX (GH-12922) Message-ID: https://github.com/python/cpython/commit/574913479f26b5ff48827861bce68281be01d16e commit: 574913479f26b5ff48827861bce68281be01d16e branch: master author: Victor Stinner committer: GitHub date: 2019-04-23T12:26:33+02:00 summary: bpo-36635, bpo-36696: Fix setup.py on AIX (GH-12922) xlc compiler doesn't support "-D define" flag only "-Ddefine". files: M setup.py diff --git a/setup.py b/setup.py index 3d6404f89eef..58c16e8ba49d 100644 --- a/setup.py +++ b/setup.py @@ -726,12 +726,12 @@ def detect_simple_extensions(self): self.add(Extension("_heapq", ["_heapqmodule.c"])) # C-optimized pickle replacement self.add(Extension("_pickle", ["_pickle.c"], - extra_compile_args=['-D Py_BUILD_CORE_MODULE'])) + extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) # atexit self.add(Extension("atexit", ["atexitmodule.c"])) # _json speedups self.add(Extension("_json", ["_json.c"], - extra_compile_args=['-D Py_BUILD_CORE_MODULE'])) + extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) # profiler (_lsprof is for cProfile.py) self.add(Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c'])) @@ -816,7 +816,7 @@ def detect_test_extensions(self): # Python Internal C API test module self.add(Extension('_testinternalcapi', ['_testinternalcapi.c'], - extra_compile_args=['-D Py_BUILD_CORE_MODULE'])) + extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) # Python PEP-3118 (buffer protocol) test module self.add(Extension('_testbuffer', ['_testbuffer.c'])) From webhook-mailer at python.org Tue Apr 23 07:39:56 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 23 Apr 2019 11:39:56 -0000 Subject: [Python-checkins] fix warnings by adding more const (GH-12924) Message-ID: https://github.com/python/cpython/commit/09415ff0ebbbe97c8cd08ac1f94673d7a49c8018 commit: 09415ff0ebbbe97c8cd08ac1f94673d7a49c8018 branch: master author: Inada Naoki committer: GitHub date: 2019-04-23T20:39:37+09:00 summary: fix warnings by adding more const (GH-12924) files: M Include/grammar.h M Modules/parsermodule.c M Parser/acceler.c M Parser/grammar1.c M Parser/parser.c M Parser/parser.h diff --git a/Include/grammar.h b/Include/grammar.h index faccae4657ee..4b66b1e9b974 100644 --- a/Include/grammar.h +++ b/Include/grammar.h @@ -66,7 +66,7 @@ typedef struct { } grammar; /* FUNCTIONS */ -dfa *PyGrammar_FindDFA(grammar *g, int type); +const dfa *PyGrammar_FindDFA(grammar *g, int type); const char *PyGrammar_LabelRepr(label *lb); void PyGrammar_AddAccelerators(grammar *g); void PyGrammar_RemoveAccelerators(grammar *); diff --git a/Modules/parsermodule.c b/Modules/parsermodule.c index a215c7ecacd7..0f681622f288 100644 --- a/Modules/parsermodule.c +++ b/Modules/parsermodule.c @@ -644,7 +644,6 @@ validate_node(node *tree) { int type = TYPE(tree); int nch = NCH(tree); - dfa *nt_dfa; state *dfa_state; int pos, arc; @@ -654,7 +653,7 @@ validate_node(node *tree) PyErr_Format(parser_error, "Unrecognized node type %d.", TYPE(tree)); return 0; } - nt_dfa = &_PyParser_Grammar.g_dfa[type]; + const dfa *nt_dfa = &_PyParser_Grammar.g_dfa[type]; REQ(tree, nt_dfa->d_type); /* Run the DFA for this nonterminal. */ diff --git a/Parser/acceler.c b/Parser/acceler.c index 3a230c19bb9f..e515833e1dda 100644 --- a/Parser/acceler.c +++ b/Parser/acceler.c @@ -17,15 +17,14 @@ #include "parser.h" /* Forward references */ -static void fixdfa(grammar *, dfa *); +static void fixdfa(grammar *, const dfa *); static void fixstate(grammar *, state *); void PyGrammar_AddAccelerators(grammar *g) { - dfa *d; int i; - d = g->g_dfa; + const dfa *d = g->g_dfa; for (i = g->g_ndfas; --i >= 0; d++) fixdfa(g, d); g->g_accel = 1; @@ -34,10 +33,9 @@ PyGrammar_AddAccelerators(grammar *g) void PyGrammar_RemoveAccelerators(grammar *g) { - dfa *d; int i; g->g_accel = 0; - d = g->g_dfa; + const dfa *d = g->g_dfa; for (i = g->g_ndfas; --i >= 0; d++) { state *s; int j; @@ -51,7 +49,7 @@ PyGrammar_RemoveAccelerators(grammar *g) } static void -fixdfa(grammar *g, dfa *d) +fixdfa(grammar *g, const dfa *d) { state *s; int j; @@ -63,7 +61,7 @@ fixdfa(grammar *g, dfa *d) static void fixstate(grammar *g, state *s) { - arc *a; + const arc *a; int k; int *accel; int nl = g->g_ll.ll_nlabels; @@ -78,14 +76,14 @@ fixstate(grammar *g, state *s) a = s->s_arc; for (k = s->s_narcs; --k >= 0; a++) { int lbl = a->a_lbl; - label *l = &g->g_ll.ll_label[lbl]; + const label *l = &g->g_ll.ll_label[lbl]; int type = l->lb_type; if (a->a_arrow >= (1 << 7)) { printf("XXX too many states!\n"); continue; } if (ISNONTERMINAL(type)) { - dfa *d1 = PyGrammar_FindDFA(g, type); + const dfa *d1 = PyGrammar_FindDFA(g, type); int ibit; if (type - NT_OFFSET >= (1 << 7)) { printf("XXX too high nonterminal number!\n"); diff --git a/Parser/grammar1.c b/Parser/grammar1.c index fec6d9ec0ee2..e0b8fbb8b828 100644 --- a/Parser/grammar1.c +++ b/Parser/grammar1.c @@ -7,12 +7,11 @@ /* Return the DFA for the given type */ -dfa * +const dfa * PyGrammar_FindDFA(grammar *g, int type) { - dfa *d; /* Massive speed-up */ - d = &g->g_dfa[type - NT_OFFSET]; + const dfa *d = &g->g_dfa[type - NT_OFFSET]; assert(d->d_type == type); return d; } diff --git a/Parser/parser.c b/Parser/parser.c index c21b6fdf466d..227b9184f471 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -35,7 +35,7 @@ s_reset(stack *s) #define s_empty(s) ((s)->s_top == &(s)->s_base[MAXSTACK]) static int -s_push(stack *s, dfa *d, node *parent) +s_push(stack *s, const dfa *d, node *parent) { stackentry *top; if (s->s_top == s->s_base) { @@ -119,7 +119,7 @@ shift(stack *s, int type, char *str, int newstate, int lineno, int col_offset, } static int -push(stack *s, int type, dfa *d, int newstate, int lineno, int col_offset, +push(stack *s, int type, const dfa *d, int newstate, int lineno, int col_offset, int end_lineno, int end_col_offset) { int err; @@ -144,7 +144,7 @@ classify(parser_state *ps, int type, const char *str) int n = g->g_ll.ll_nlabels; if (type == NAME) { - label *l = g->g_ll.ll_label; + const label *l = g->g_ll.ll_label; int i; for (i = n; i > 0; i--, l++) { if (l->lb_type != NAME || l->lb_str == NULL || @@ -168,7 +168,7 @@ classify(parser_state *ps, int type, const char *str) } { - label *l = g->g_ll.ll_label; + const label *l = g->g_ll.ll_label; int i; for (i = n; i > 0; i--, l++) { if (l->lb_type == type && l->lb_str == NULL) { @@ -246,7 +246,7 @@ PyParser_AddToken(parser_state *ps, int type, char *str, /* Loop until the token is shifted or an error occurred */ for (;;) { /* Fetch the current dfa and state */ - dfa *d = ps->p_stack.s_top->s_dfa; + const dfa *d = ps->p_stack.s_top->s_dfa; state *s = &d->d_state[ps->p_stack.s_top->s_state]; D(printf(" DFA '%s', state %d:", @@ -260,7 +260,6 @@ PyParser_AddToken(parser_state *ps, int type, char *str, /* Push non-terminal */ int nt = (x >> 8) + NT_OFFSET; int arrow = x & ((1<<7)-1); - dfa *d1; if (nt == func_body_suite && !(ps->p_flags & PyCF_TYPE_COMMENTS)) { /* When parsing type comments is not requested, we can provide better errors about bad indentation @@ -268,7 +267,7 @@ PyParser_AddToken(parser_state *ps, int type, char *str, D(printf(" [switch func_body_suite to suite]")); nt = suite; } - d1 = PyGrammar_FindDFA( + const dfa *d1 = PyGrammar_FindDFA( ps->p_grammar, nt); if ((err = push(&ps->p_stack, nt, d1, arrow, lineno, col_offset, diff --git a/Parser/parser.h b/Parser/parser.h index ebb06c2b1976..b16075e7f29f 100644 --- a/Parser/parser.h +++ b/Parser/parser.h @@ -11,7 +11,7 @@ extern "C" { typedef struct { int s_state; /* State in current DFA */ - dfa *s_dfa; /* Current DFA */ + const dfa *s_dfa; /* Current DFA */ struct _node *s_parent; /* Where to add next node */ } stackentry; From webhook-mailer at python.org Tue Apr 23 07:56:12 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 23 Apr 2019 11:56:12 -0000 Subject: [Python-checkins] bpo-18372: Add missing PyObject_GC_Track() calls in the pickle module (GH-8505) Message-ID: https://github.com/python/cpython/commit/359bd4f61b9e1493081f4f67882554247b53926a commit: 359bd4f61b9e1493081f4f67882554247b53926a branch: master author: Zackery Spytz committer: Inada Naoki date: 2019-04-23T20:56:08+09:00 summary: bpo-18372: Add missing PyObject_GC_Track() calls in the pickle module (GH-8505) files: A Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst M Modules/_pickle.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst b/Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst new file mode 100644 index 000000000000..d8205b8d32d0 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst @@ -0,0 +1,2 @@ +Add missing :c:func:`PyObject_GC_Track` calls in the :mod:`pickle` module. +Patch by Zackery Spytz. diff --git a/Modules/_pickle.c b/Modules/_pickle.c index f956a382ac53..391ce5e923c6 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1119,6 +1119,8 @@ _Pickler_New(void) Py_DECREF(self); return NULL; } + + PyObject_GC_Track(self); return self; } @@ -1496,6 +1498,7 @@ _Unpickler_New(void) return NULL; } + PyObject_GC_Track(self); return self; } From webhook-mailer at python.org Tue Apr 23 08:18:29 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 23 Apr 2019 12:18:29 -0000 Subject: [Python-checkins] bpo-18372: Add missing PyObject_GC_Track() calls in the pickle module (GH-8505) Message-ID: https://github.com/python/cpython/commit/c0f6f5370325459cadd90010530b1d300dce514e commit: c0f6f5370325459cadd90010530b1d300dce514e branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-23T05:18:15-07:00 summary: bpo-18372: Add missing PyObject_GC_Track() calls in the pickle module (GH-8505) (cherry picked from commit 359bd4f61b9e1493081f4f67882554247b53926a) Co-authored-by: Zackery Spytz files: A Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst M Modules/_pickle.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst b/Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst new file mode 100644 index 000000000000..d8205b8d32d0 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2018-12-08-03-40-43.bpo-18372.DT1nR0.rst @@ -0,0 +1,2 @@ +Add missing :c:func:`PyObject_GC_Track` calls in the :mod:`pickle` module. +Patch by Zackery Spytz. diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 15e15cdf4506..c8b3ef70f521 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1114,6 +1114,8 @@ _Pickler_New(void) Py_DECREF(self); return NULL; } + + PyObject_GC_Track(self); return self; } @@ -1491,6 +1493,7 @@ _Unpickler_New(void) return NULL; } + PyObject_GC_Track(self); return self; } From webhook-mailer at python.org Tue Apr 23 09:01:25 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 23 Apr 2019 13:01:25 -0000 Subject: [Python-checkins] replace 'sequencial argument' by 'positional' in doc (GH-12925) Message-ID: https://github.com/python/cpython/commit/29d018aa63b72161cfc67602dc3dbd386272da64 commit: 29d018aa63b72161cfc67602dc3dbd386272da64 branch: master author: Mathieu Dupuy committer: Inada Naoki date: 2019-04-23T22:01:09+09:00 summary: replace 'sequencial argument' by 'positional' in doc (GH-12925) files: M Doc/library/threading.rst diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index d7dbcb107dda..c58a6ad75d08 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -250,7 +250,7 @@ since it is impossible to detect the termination of alien threads. You may override this method in a subclass. The standard :meth:`run` method invokes the callable object passed to the object's constructor as - the *target* argument, if any, with sequential and keyword arguments taken + the *target* argument, if any, with positional and keyword arguments taken from the *args* and *kwargs* arguments, respectively. .. method:: join(timeout=None) From webhook-mailer at python.org Tue Apr 23 09:08:34 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 23 Apr 2019 13:08:34 -0000 Subject: [Python-checkins] replace 'sequencial argument' by 'positional' in doc (GH-12925) Message-ID: https://github.com/python/cpython/commit/e64d21b187871c35fb2a1d68b6a591ec26d86722 commit: e64d21b187871c35fb2a1d68b6a591ec26d86722 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-23T06:06:57-07:00 summary: replace 'sequencial argument' by 'positional' in doc (GH-12925) (cherry picked from commit 29d018aa63b72161cfc67602dc3dbd386272da64) Co-authored-by: Mathieu Dupuy files: M Doc/library/threading.rst diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index d7dbcb107dda..c58a6ad75d08 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -250,7 +250,7 @@ since it is impossible to detect the termination of alien threads. You may override this method in a subclass. The standard :meth:`run` method invokes the callable object passed to the object's constructor as - the *target* argument, if any, with sequential and keyword arguments taken + the *target* argument, if any, with positional and keyword arguments taken from the *args* and *kwargs* arguments, respectively. .. method:: join(timeout=None) From webhook-mailer at python.org Tue Apr 23 18:15:34 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Apr 2019 22:15:34 -0000 Subject: [Python-checkins] bpo-36454: Fix test_time.test_monotonic() (GH-12929) Message-ID: https://github.com/python/cpython/commit/d246a6766b9d8cc625112906299c4cb019944300 commit: d246a6766b9d8cc625112906299c4cb019944300 branch: master author: Victor Stinner committer: GitHub date: 2019-04-24T00:15:12+02:00 summary: bpo-36454: Fix test_time.test_monotonic() (GH-12929) Change test_time.test_monotonic() to test only the lower bound of elapsed time after a sleep command rather than the upper bound. This prevents unnecessary test failures on slow buildbots. Patch by Victor Stinner. files: A Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst M Lib/test/test_time.py diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py index 303918960b63..42799b2a21ca 100644 --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -470,8 +470,9 @@ def test_monotonic(self): t2 = time.monotonic() dt = t2 - t1 self.assertGreater(t2, t1) - # Issue #20101: On some Windows machines, dt may be slightly low - self.assertTrue(0.45 <= dt <= 1.0, dt) + # bpo-20101: tolerate a difference of 50 ms because of bad timer + # resolution on Windows + self.assertTrue(0.450 <= dt) # monotonic() is a monotonic but non adjustable clock info = time.get_clock_info('monotonic') diff --git a/Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst b/Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst new file mode 100644 index 000000000000..151c7ab04040 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst @@ -0,0 +1,3 @@ +Change test_time.test_monotonic() to test only the lower bound of elapsed time +after a sleep command rather than the upper bound. This prevents unnecessary +test failures on slow buildbots. Patch by Victor Stinner. From webhook-mailer at python.org Tue Apr 23 18:36:00 2019 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 23 Apr 2019 22:36:00 -0000 Subject: [Python-checkins] bpo-36454: Fix test_time.test_monotonic() (GH-12929) Message-ID: https://github.com/python/cpython/commit/e1a6cf2824acb43dc80473e0d938db99856daa97 commit: e1a6cf2824acb43dc80473e0d938db99856daa97 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2019-04-23T15:35:55-07:00 summary: bpo-36454: Fix test_time.test_monotonic() (GH-12929) Change test_time.test_monotonic() to test only the lower bound of elapsed time after a sleep command rather than the upper bound. This prevents unnecessary test failures on slow buildbots. Patch by Victor Stinner. (cherry picked from commit d246a6766b9d8cc625112906299c4cb019944300) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst M Lib/test/test_time.py diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py index ea455c0d0d13..4e31abf4ec8e 100644 --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -470,8 +470,9 @@ def test_monotonic(self): t2 = time.monotonic() dt = t2 - t1 self.assertGreater(t2, t1) - # Issue #20101: On some Windows machines, dt may be slightly low - self.assertTrue(0.45 <= dt <= 1.0, dt) + # bpo-20101: tolerate a difference of 50 ms because of bad timer + # resolution on Windows + self.assertTrue(0.450 <= dt) # monotonic() is a monotonic but non adjustable clock info = time.get_clock_info('monotonic') diff --git a/Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst b/Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst new file mode 100644 index 000000000000..151c7ab04040 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2019-04-23-17-48-11.bpo-36454.0q4lQz.rst @@ -0,0 +1,3 @@ +Change test_time.test_monotonic() to test only the lower bound of elapsed time +after a sleep command rather than the upper bound. This prevents unnecessary +test failures on slow buildbots. Patch by Victor Stinner. From python-checkins at python.org Tue Apr 23 19:45:47 2019 From: python-checkins at python.org (python-checkins at python.org) Date: Wed, 24 Apr 2019 07:45:47 +0800 (CST) Subject: [Python-checkins] Zdravstvuyte! Vas interesuyut kliyentskiye bazy dannykh? Message-ID: <20190423234548.31F344651B9@localhost.localdomain> Zdravstvuyte! Vas interesuyut kliyentskiye bazy dannykh? From webhook-mailer at python.org Wed Apr 24 05:21:52 2019 From: webhook-mailer at python.org (Inada Naoki) Date: Wed, 24 Apr 2019 09:21:52 -0000 Subject: [Python-checkins] fix typo in gzip.py (GH-12928) Message-ID: https://github.com/python/cpython/commit/4f5a3493b534a95fbb01d593b1ffe320db6b395e commit: 4f5a3493b534a95fbb01d593b1ffe320db6b395e branch: master author: Maximilian N?the committer: Inada Naoki date: 2019-04-24T18:21:02+09:00 summary: fix typo in gzip.py (GH-12928) files: M Lib/gzip.py diff --git a/Lib/gzip.py b/Lib/gzip.py index 948fec293e23..7c8618741988 100644 --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -283,7 +283,7 @@ def read(self, size=-1): def read1(self, size=-1): """Implements BufferedIOBase.read1() - Reads up to a buffer's worth of data is size is negative.""" + Reads up to a buffer's worth of data if size is negative.""" self._check_not_closed() if self.mode != READ: import errno From webhook-mailer at python.org Wed Apr 24 10:11:12 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 24 Apr 2019 14:11:12 -0000 Subject: [Python-checkins] bpo-36707: Remove the "m" flag (pymalloc) from SOABI (GH-12931) Message-ID: https://github.com/python/cpython/commit/6c44fde3e03079e0c69f823dafbe04af50b5bd0d commit: 6c44fde3e03079e0c69f823dafbe04af50b5bd0d branch: master author: Victor Stinner committer: GitHub date: 2019-04-24T16:10:09+02:00 summary: bpo-36707: Remove the "m" flag (pymalloc) from SOABI (GH-12931) "./configure --with-pymalloc" no longer adds the "m" flag to SOABI (sys.implementation.cache_tag). Enabling or disabling pymalloc has no impact on the ABI. files: A Misc/NEWS.d/next/Build/2019-04-24-02-29-15.bpo-36707.8ZNB67.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/Build/2019-04-24-02-29-15.bpo-36707.8ZNB67.rst b/Misc/NEWS.d/next/Build/2019-04-24-02-29-15.bpo-36707.8ZNB67.rst new file mode 100644 index 000000000000..77bd4d73b2a2 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2019-04-24-02-29-15.bpo-36707.8ZNB67.rst @@ -0,0 +1,3 @@ +``./configure --with-pymalloc`` no longer adds the ``m`` flag to SOABI +(sys.implementation.cache_tag). Enabling or disabling pymalloc has no impact +on the ABI. diff --git a/configure b/configure index 9c7eded85359..e96c03ad4f59 100755 --- a/configure +++ b/configure @@ -11238,7 +11238,6 @@ then $as_echo "#define WITH_PYMALLOC 1" >>confdefs.h - ABIFLAGS="${ABIFLAGS}m" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_pymalloc" >&5 $as_echo "$with_pymalloc" >&6; } @@ -15086,12 +15085,13 @@ $as_echo "#define AC_APPLE_UNIVERSAL_BUILD 1" >>confdefs.h # * The Python implementation (always 'cpython-' for us) # * The major and minor version numbers # * --with-pydebug (adds a 'd') -# * --with-pymalloc (adds a 'm') -# * --with-wide-unicode (adds a 'u') # # Thus for example, Python 3.2 built with wide unicode, pydebug, and pymalloc, # would get a shared library ABI version tag of 'cpython-32dmu' and shared # libraries would be named 'foo.cpython-32dmu.so'. +# +# In Python 3.2 and older, --with-wide-unicode added a 'u' flag. +# In Python 3.7 and older, --with-pymalloc added a 'm' flag. { $as_echo "$as_me:${as_lineno-$LINENO}: checking ABIFLAGS" >&5 $as_echo_n "checking ABIFLAGS... " >&6; } diff --git a/configure.ac b/configure.ac index 6450519444c8..3f378c97345e 100644 --- a/configure.ac +++ b/configure.ac @@ -3400,7 +3400,6 @@ if test "$with_pymalloc" != "no" then AC_DEFINE(WITH_PYMALLOC, 1, [Define if you want to compile in Python-specific mallocs]) - ABIFLAGS="${ABIFLAGS}m" fi AC_MSG_RESULT($with_pymalloc) @@ -4601,12 +4600,13 @@ AC_C_BIGENDIAN # * The Python implementation (always 'cpython-' for us) # * The major and minor version numbers # * --with-pydebug (adds a 'd') -# * --with-pymalloc (adds a 'm') -# * --with-wide-unicode (adds a 'u') # # Thus for example, Python 3.2 built with wide unicode, pydebug, and pymalloc, # would get a shared library ABI version tag of 'cpython-32dmu' and shared # libraries would be named 'foo.cpython-32dmu.so'. +# +# In Python 3.2 and older, --with-wide-unicode added a 'u' flag. +# In Python 3.7 and older, --with-pymalloc added a 'm' flag. AC_SUBST(SOABI) AC_MSG_CHECKING(ABIFLAGS) AC_MSG_RESULT($ABIFLAGS) From webhook-mailer at python.org Wed Apr 24 10:47:51 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 24 Apr 2019 14:47:51 -0000 Subject: [Python-checkins] bpo-36710: Add runtime parameter to _PyThreadState_Init() (GH-12935) Message-ID: https://github.com/python/cpython/commit/8bb3230149538c25c1bacced5e64a3c071475f73 commit: 8bb3230149538c25c1bacced5e64a3c071475f73 branch: master author: Victor Stinner committer: GitHub date: 2019-04-24T16:47:40+02:00 summary: bpo-36710: Add runtime parameter to _PyThreadState_Init() (GH-12935) * Add 'runtime' parameter to _PyThreadState_Init() * Add 'gilstate' parameter to _PyGILState_NoteThreadState() * Move _PyThreadState_Init() and _PyThreadState_DeleteExcept() to the internal C API. files: M Include/cpython/pystate.h M Include/internal/pycore_pystate.h M Modules/_threadmodule.c M Python/pystate.c diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index a0953f03261d..2341dda3fabd 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -155,8 +155,6 @@ PyAPI_FUNC(PyInterpreterState *) _PyInterpreterState_Get(void); PyAPI_FUNC(int) _PyState_AddModule(PyObject*, struct PyModuleDef*); PyAPI_FUNC(void) _PyState_ClearModules(void); PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *); -PyAPI_FUNC(void) _PyThreadState_Init(PyThreadState *); -PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate); PyAPI_FUNC(void) _PyGILState_Reinit(void); /* Similar to PyThreadState_Get(), but don't issue a fatal error diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index e1ce08d335b9..509ca3634a3a 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -231,6 +231,11 @@ PyAPI_FUNC(void) _PyRuntime_Finalize(void); /* Other */ +PyAPI_FUNC(void) _PyThreadState_Init( + _PyRuntimeState *runtime, + PyThreadState *tstate); +PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate); + PyAPI_FUNC(_PyInitError) _PyInterpreterState_Enable(_PyRuntimeState *); PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(void); diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 73babaf9ca8f..3c02d8dd5145 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -994,7 +994,7 @@ t_bootstrap(void *boot_raw) tstate = boot->tstate; tstate->thread_id = PyThread_get_thread_ident(); - _PyThreadState_Init(tstate); + _PyThreadState_Init(&_PyRuntime, tstate); PyEval_AcquireThread(tstate); tstate->interp->num_threads++; res = PyObject_Call(boot->func, boot->args, boot->keyw); diff --git a/Python/pystate.c b/Python/pystate.c index a2464b6cf551..ef9d79288736 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -133,7 +133,9 @@ _PyRuntimeState_ReInitThreads(void) WAIT_LOCK) #define HEAD_UNLOCK() PyThread_release_lock(_PyRuntime.interpreters.mutex) -static void _PyGILState_NoteThreadState(PyThreadState* tstate); +/* Forward declaration */ +static void _PyGILState_NoteThreadState( + struct _gilstate_runtime_state *gilstate, PyThreadState* tstate); _PyInitError _PyInterpreterState_Enable(_PyRuntimeState *runtime) @@ -487,71 +489,74 @@ static PyThreadState * new_threadstate(PyInterpreterState *interp, int init) { PyThreadState *tstate = (PyThreadState *)PyMem_RawMalloc(sizeof(PyThreadState)); + if (tstate == NULL) { + return NULL; + } - if (_PyThreadState_GetFrame == NULL) + if (_PyThreadState_GetFrame == NULL) { _PyThreadState_GetFrame = threadstate_getframe; + } - if (tstate != NULL) { - tstate->interp = interp; - - tstate->frame = NULL; - tstate->recursion_depth = 0; - tstate->overflowed = 0; - tstate->recursion_critical = 0; - tstate->stackcheck_counter = 0; - tstate->tracing = 0; - tstate->use_tracing = 0; - tstate->gilstate_counter = 0; - tstate->async_exc = NULL; - tstate->thread_id = PyThread_get_thread_ident(); + tstate->interp = interp; - tstate->dict = NULL; + tstate->frame = NULL; + tstate->recursion_depth = 0; + tstate->overflowed = 0; + tstate->recursion_critical = 0; + tstate->stackcheck_counter = 0; + tstate->tracing = 0; + tstate->use_tracing = 0; + tstate->gilstate_counter = 0; + tstate->async_exc = NULL; + tstate->thread_id = PyThread_get_thread_ident(); - tstate->curexc_type = NULL; - tstate->curexc_value = NULL; - tstate->curexc_traceback = NULL; + tstate->dict = NULL; - tstate->exc_state.exc_type = NULL; - tstate->exc_state.exc_value = NULL; - tstate->exc_state.exc_traceback = NULL; - tstate->exc_state.previous_item = NULL; - tstate->exc_info = &tstate->exc_state; + tstate->curexc_type = NULL; + tstate->curexc_value = NULL; + tstate->curexc_traceback = NULL; - tstate->c_profilefunc = NULL; - tstate->c_tracefunc = NULL; - tstate->c_profileobj = NULL; - tstate->c_traceobj = NULL; + tstate->exc_state.exc_type = NULL; + tstate->exc_state.exc_value = NULL; + tstate->exc_state.exc_traceback = NULL; + tstate->exc_state.previous_item = NULL; + tstate->exc_info = &tstate->exc_state; - tstate->trash_delete_nesting = 0; - tstate->trash_delete_later = NULL; - tstate->on_delete = NULL; - tstate->on_delete_data = NULL; + tstate->c_profilefunc = NULL; + tstate->c_tracefunc = NULL; + tstate->c_profileobj = NULL; + tstate->c_traceobj = NULL; - tstate->coroutine_origin_tracking_depth = 0; + tstate->trash_delete_nesting = 0; + tstate->trash_delete_later = NULL; + tstate->on_delete = NULL; + tstate->on_delete_data = NULL; - tstate->coroutine_wrapper = NULL; - tstate->in_coroutine_wrapper = 0; + tstate->coroutine_origin_tracking_depth = 0; - tstate->async_gen_firstiter = NULL; - tstate->async_gen_finalizer = NULL; + tstate->coroutine_wrapper = NULL; + tstate->in_coroutine_wrapper = 0; - tstate->context = NULL; - tstate->context_ver = 1; + tstate->async_gen_firstiter = NULL; + tstate->async_gen_finalizer = NULL; - tstate->id = ++interp->tstate_next_unique_id; + tstate->context = NULL; + tstate->context_ver = 1; - if (init) - _PyThreadState_Init(tstate); + tstate->id = ++interp->tstate_next_unique_id; - HEAD_LOCK(); - tstate->prev = NULL; - tstate->next = interp->tstate_head; - if (tstate->next) - tstate->next->prev = tstate; - interp->tstate_head = tstate; - HEAD_UNLOCK(); + if (init) { + _PyThreadState_Init(&_PyRuntime, tstate); } + HEAD_LOCK(); + tstate->prev = NULL; + tstate->next = interp->tstate_head; + if (tstate->next) + tstate->next->prev = tstate; + interp->tstate_head = tstate; + HEAD_UNLOCK(); + return tstate; } @@ -568,9 +573,9 @@ _PyThreadState_Prealloc(PyInterpreterState *interp) } void -_PyThreadState_Init(PyThreadState *tstate) +_PyThreadState_Init(_PyRuntimeState *runtime, PyThreadState *tstate) { - _PyGILState_NoteThreadState(tstate); + _PyGILState_NoteThreadState(&runtime->gilstate, tstate); } PyObject* @@ -1037,17 +1042,23 @@ PyThreadState_IsCurrent(PyThreadState *tstate) Py_Initialize/Py_FinalizeEx */ void -_PyGILState_Init(PyInterpreterState *i, PyThreadState *t) +_PyGILState_Init(PyInterpreterState *interp, PyThreadState *tstate) { - assert(i && t); /* must init with valid states */ - if (PyThread_tss_create(&_PyRuntime.gilstate.autoTSSkey) != 0) { + /* must init with valid states */ + assert(interp != NULL); + assert(tstate != NULL); + + _PyRuntimeState *runtime = &_PyRuntime; + struct _gilstate_runtime_state *gilstate = &runtime->gilstate; + + if (PyThread_tss_create(&gilstate->autoTSSkey) != 0) { Py_FatalError("Could not allocate TSS entry"); } - _PyRuntime.gilstate.autoInterpreterState = i; - assert(PyThread_tss_get(&_PyRuntime.gilstate.autoTSSkey) == NULL); - assert(t->gilstate_counter == 0); + gilstate->autoInterpreterState = interp; + assert(PyThread_tss_get(&gilstate->autoTSSkey) == NULL); + assert(tstate->gilstate_counter == 0); - _PyGILState_NoteThreadState(t); + _PyGILState_NoteThreadState(gilstate, tstate); } PyInterpreterState * @@ -1104,13 +1115,14 @@ _PyGILState_Reinit(void) a better fix for SF bug #1010677 than the first one attempted). */ static void -_PyGILState_NoteThreadState(PyThreadState* tstate) +_PyGILState_NoteThreadState(struct _gilstate_runtime_state *gilstate, PyThreadState* tstate) { /* If autoTSSkey isn't initialized, this must be the very first threadstate created in Py_Initialize(). Don't do anything for now (we'll be back here when _PyGILState_Init is called). */ - if (!_PyRuntime.gilstate.autoInterpreterState) + if (!gilstate->autoInterpreterState) { return; + } /* Stick the thread state for this thread in thread specific storage. @@ -1124,10 +1136,8 @@ _PyGILState_NoteThreadState(PyThreadState* tstate) The first thread state created for that given OS level thread will "win", which seems reasonable behaviour. */ - if (PyThread_tss_get(&_PyRuntime.gilstate.autoTSSkey) == NULL) { - if ((PyThread_tss_set(&_PyRuntime.gilstate.autoTSSkey, (void *)tstate) - ) != 0) - { + if (PyThread_tss_get(&gilstate->autoTSSkey) == NULL) { + if ((PyThread_tss_set(&gilstate->autoTSSkey, (void *)tstate)) != 0) { Py_FatalError("Couldn't create autoTSSkey mapping"); } } From webhook-mailer at python.org Wed Apr 24 11:18:12 2019 From: webhook-mailer at python.org (Nick Coghlan) Date: Wed, 24 Apr 2019 15:18:12 -0000 Subject: [Python-checkins] bpo-30840: Document relative imports (#12831) Message-ID: https://github.com/python/cpython/commit/70bf713617e15fad390ed953e48b3c65d9bc90ec commit: 70bf713617e15fad390ed953e48b3c65d9bc90ec branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: Nick Coghlan date: 2019-04-25T01:14:44+10:00 summary: bpo-30840: Document relative imports (#12831) * document relative imports * ?? Added by blurb_it. * fix indentation error * remove indentation * Document relative imports * Document relative imports * remove from ...package * Document relative imports * remove trailing space * Document relative imports * Document relative imports files: A Misc/NEWS.d/next/Documentation/2019-04-14-19-46-21.bpo-30840.R-JFzw.rst M Doc/reference/import.rst M Doc/reference/simple_stmts.rst diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index 9a0ab39d3b4a..88290c88bb35 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -921,6 +921,46 @@ it is sufficient to raise :exc:`ModuleNotFoundError` directly from ``None``. The latter indicates that the meta path search should continue, while raising an exception terminates it immediately. +.. _relativeimports: + +Package Relative Imports +======================== + +Relative imports use leading dots. A single leading dot indicates a relative +import, starting with the current package. Two or more leading dots indicate a +relative import to the parent(s) of the current package, one level per dot +after the first. For example, given the following package layout:: + + package/ + __init__.py + subpackage1/ + __init__.py + moduleX.py + moduleY.py + subpackage2/ + __init__.py + moduleZ.py + moduleA.py + +In either ``subpackage1/moduleX.py`` or ``subpackage1/__init__.py``, +the following are valid relative imports:: + + from .moduleY import spam + from .moduleY import spam as ham + from . import moduleY + from ..subpackage1 import moduleY + from ..subpackage2.moduleZ import eggs + from ..moduleA import foo + +Absolute imports may use either the ``import <>`` or ``from <> import <>`` +syntax, but relative imports may only use the second form; the reason +for this is that:: + + import XXX.YYY.ZZZ + +should expose ``XXX.YYY.ZZZ`` as a usable expression, but .moduleY is +not a valid expression. + Special considerations for __main__ =================================== diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 00964afc6d3d..207057cbc124 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -828,7 +828,8 @@ exists. Two dots means up one package level. Three dots is up two levels, etc. So if you execute ``from . import mod`` from a module in the ``pkg`` package then you will end up importing ``pkg.mod``. If you execute ``from ..subpkg2 import mod`` from within ``pkg.subpkg1`` you will import ``pkg.subpkg2.mod``. -The specification for relative imports is contained within :pep:`328`. +The specification for relative imports is contained in +the :ref:`relativeimports` section. :func:`importlib.import_module` is provided to support applications that determine dynamically the modules to be loaded. diff --git a/Misc/NEWS.d/next/Documentation/2019-04-14-19-46-21.bpo-30840.R-JFzw.rst b/Misc/NEWS.d/next/Documentation/2019-04-14-19-46-21.bpo-30840.R-JFzw.rst new file mode 100644 index 000000000000..210f54f2593e --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2019-04-14-19-46-21.bpo-30840.R-JFzw.rst @@ -0,0 +1 @@ +Document relative imports \ No newline at end of file From webhook-mailer at python.org Wed Apr 24 11:18:42 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 24 Apr 2019 15:18:42 -0000 Subject: [Python-checkins] bpo-36710: PyOS_AfterFork_Child() pass runtime parameter (GH-12936) Message-ID: https://github.com/python/cpython/commit/b930a2d2b1247bdba560db341ba90a9cbb538eb3 commit: b930a2d2b1247bdba560db341ba90a9cbb538eb3 branch: master author: Victor Stinner committer: GitHub date: 2019-04-24T17:14:33+02:00 summary: bpo-36710: PyOS_AfterFork_Child() pass runtime parameter (GH-12936) The PyOS_AfterFork_Child() function now pass a 'runtime' parameter to subfunctions. * Fix _PyRuntimeState_ReInitThreads(): use the correct memory allocator * Add runtime parameter to _PyRuntimeState_ReInitThreads(), _PyGILState_Reinit() and _PyInterpreterState_DeleteExceptMain() * Move _PyGILState_Reinit() to the internal C API. files: M Include/cpython/pystate.h M Include/internal/pycore_pystate.h M Modules/posixmodule.c M Python/pystate.c diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 2341dda3fabd..94331f35e1bd 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -155,7 +155,6 @@ PyAPI_FUNC(PyInterpreterState *) _PyInterpreterState_Get(void); PyAPI_FUNC(int) _PyState_AddModule(PyObject*, struct PyModuleDef*); PyAPI_FUNC(void) _PyState_ClearModules(void); PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *); -PyAPI_FUNC(void) _PyGILState_Reinit(void); /* Similar to PyThreadState_Get(), but don't issue a fatal error * if it is NULL. */ diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 509ca3634a3a..2c24f679dc02 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -185,9 +185,9 @@ typedef struct pyruntimestate { /* Note: _PyRuntimeState_INIT sets other fields to 0/NULL */ PyAPI_DATA(_PyRuntimeState) _PyRuntime; -PyAPI_FUNC(_PyInitError) _PyRuntimeState_Init(_PyRuntimeState *); -PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *); -PyAPI_FUNC(void) _PyRuntimeState_ReInitThreads(void); +PyAPI_FUNC(_PyInitError) _PyRuntimeState_Init(_PyRuntimeState *runtime); +PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *runtime); +PyAPI_FUNC(void) _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime); /* Initialize _PyRuntimeState. Return NULL on success, or return an error message on failure. */ @@ -236,8 +236,10 @@ PyAPI_FUNC(void) _PyThreadState_Init( PyThreadState *tstate); PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate); -PyAPI_FUNC(_PyInitError) _PyInterpreterState_Enable(_PyRuntimeState *); -PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(void); +PyAPI_FUNC(_PyInitError) _PyInterpreterState_Enable(_PyRuntimeState *runtime); +PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime); + +PyAPI_FUNC(void) _PyGILState_Reinit(_PyRuntimeState *runtime); #ifdef __cplusplus } diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index e8dbdcc94aa7..56ec3ee5a0ee 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -421,12 +421,13 @@ PyOS_AfterFork_Parent(void) void PyOS_AfterFork_Child(void) { - _PyGILState_Reinit(); - _PyInterpreterState_DeleteExceptMain(); + _PyRuntimeState *runtime = &_PyRuntime; + _PyGILState_Reinit(runtime); + _PyInterpreterState_DeleteExceptMain(runtime); PyEval_ReInitThreads(); _PyImport_ReInitLock(); _PySignal_AfterFork(); - _PyRuntimeState_ReInitThreads(); + _PyRuntimeState_ReInitThreads(runtime); run_at_forkers(_PyInterpreterState_Get()->after_forkers_child, 0); } diff --git a/Python/pystate.c b/Python/pystate.c index ef9d79288736..6aaf993cfe2c 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -108,23 +108,31 @@ _PyRuntimeState_Fini(_PyRuntimeState *runtime) */ void -_PyRuntimeState_ReInitThreads(void) +_PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime) { // This was initially set in _PyRuntimeState_Init(). - _PyRuntime.main_thread = PyThread_get_thread_ident(); + runtime->main_thread = PyThread_get_thread_ident(); + + /* Force default allocator, since _PyRuntimeState_Fini() must + use the same allocator than this function. */ + PyMemAllocatorEx old_alloc; + _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); + + runtime->interpreters.mutex = PyThread_allocate_lock(); + runtime->interpreters.main->id_mutex = PyThread_allocate_lock(); + runtime->xidregistry.mutex = PyThread_allocate_lock(); + + PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - _PyRuntime.interpreters.mutex = PyThread_allocate_lock(); - if (_PyRuntime.interpreters.mutex == NULL) { + if (runtime->interpreters.mutex == NULL) { Py_FatalError("Can't initialize lock for runtime interpreters"); } - _PyRuntime.interpreters.main->id_mutex = PyThread_allocate_lock(); - if (_PyRuntime.interpreters.main->id_mutex == NULL) { + if (runtime->interpreters.main->id_mutex == NULL) { Py_FatalError("Can't initialize ID lock for main interpreter"); } - _PyRuntime.xidregistry.mutex = PyThread_allocate_lock(); - if (_PyRuntime.xidregistry.mutex == NULL) { + if (runtime->xidregistry.mutex == NULL) { Py_FatalError("Can't initialize lock for cross-interpreter data registry"); } } @@ -290,20 +298,22 @@ PyInterpreterState_Delete(PyInterpreterState *interp) * is a current interpreter state, it *must* be the main interpreter. */ void -_PyInterpreterState_DeleteExceptMain() +_PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime) { + struct pyinterpreters *interpreters = &runtime->interpreters; + PyThreadState *tstate = PyThreadState_Swap(NULL); - if (tstate != NULL && tstate->interp != _PyRuntime.interpreters.main) { + if (tstate != NULL && tstate->interp != interpreters->main) { Py_FatalError("PyInterpreterState_DeleteExceptMain: not main interpreter"); } HEAD_LOCK(); - PyInterpreterState *interp = _PyRuntime.interpreters.head; - _PyRuntime.interpreters.head = NULL; + PyInterpreterState *interp = interpreters->head; + interpreters->head = NULL; while (interp != NULL) { - if (interp == _PyRuntime.interpreters.main) { - _PyRuntime.interpreters.main->next = NULL; - _PyRuntime.interpreters.head = interp; + if (interp == interpreters->main) { + interpreters->main->next = NULL; + interpreters->head = interp; interp = interp->next; continue; } @@ -319,7 +329,7 @@ _PyInterpreterState_DeleteExceptMain() } HEAD_UNLOCK(); - if (_PyRuntime.interpreters.head == NULL) { + if (interpreters->head == NULL) { Py_FatalError("PyInterpreterState_DeleteExceptMain: missing main"); } PyThreadState_Swap(tstate); @@ -1079,31 +1089,20 @@ _PyGILState_Fini(void) * don't reset TSS upon fork(), see issue #10517. */ void -_PyGILState_Reinit(void) +_PyGILState_Reinit(_PyRuntimeState *runtime) { - /* Force default allocator, since _PyRuntimeState_Fini() must - use the same allocator than this function. */ - PyMemAllocatorEx old_alloc; - _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - - _PyRuntime.interpreters.mutex = PyThread_allocate_lock(); - - PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - - if (_PyRuntime.interpreters.mutex == NULL) { - Py_FatalError("Can't initialize threads for interpreter"); - } - + struct _gilstate_runtime_state *gilstate = &runtime->gilstate; PyThreadState *tstate = PyGILState_GetThisThreadState(); - PyThread_tss_delete(&_PyRuntime.gilstate.autoTSSkey); - if (PyThread_tss_create(&_PyRuntime.gilstate.autoTSSkey) != 0) { + + PyThread_tss_delete(&gilstate->autoTSSkey); + if (PyThread_tss_create(&gilstate->autoTSSkey) != 0) { Py_FatalError("Could not allocate TSS entry"); } /* If the thread had an associated auto thread state, reassociate it with * the new key. */ if (tstate && - PyThread_tss_set(&_PyRuntime.gilstate.autoTSSkey, (void *)tstate) != 0) + PyThread_tss_set(&gilstate->autoTSSkey, (void *)tstate) != 0) { Py_FatalError("Couldn't create autoTSSkey mapping"); } From webhook-mailer at python.org Wed Apr 24 11:29:22 2019 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 24 Apr 2019 15:29:22 -0000 Subject: [Python-checkins] bpo-36710: Add runtime variable to Py_FinalizeEx() (GH-12937) Message-ID: https://github.com/python/cpython/commit/8e91c246e468515b877690e090c73f496552541d commit: 8e91c246e468515b877690e090c73f496552541d branch: master author: Victor Stinner committer: GitHub date: 2019-04-24T17:24:01+02:00 summary: bpo-36710: Add runtime variable to Py_FinalizeEx() (GH-12937) * Add a 'runtime' variable to Py_FinalizeEx() rather than working directly on the global variable _PyRuntime * Add a 'runtime' parameter to _PyGC_Fini(), _PyGILState_Fini() and call_ll_exitfuncs() files: M Include/internal/pycore_pylifecycle.h M Modules/gcmodule.c M Python/pylifecycle.c M Python/pystate.c diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index bfff24b80a9f..f07bc427bb88 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -8,6 +8,8 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_pystate.h" /* _PyRuntimeState */ + /* True if the main interpreter thread exited due to an unhandled * KeyboardInterrupt exception, suggesting the user pressed ^C. */ PyAPI_DATA(int) _Py_UnhandledKeyboardInterrupt; @@ -63,7 +65,7 @@ extern void PyAsyncGen_Fini(void); extern void _PyExc_Fini(void); extern void _PyImport_Fini(void); extern void _PyImport_Fini2(void); -extern void _PyGC_Fini(void); +extern void _PyGC_Fini(_PyRuntimeState *runtime); extern void _PyType_Fini(void); extern void _Py_HashRandomization_Fini(void); extern void _PyUnicode_Fini(void); @@ -73,7 +75,7 @@ extern void _PyHash_Fini(void); extern int _PyTraceMalloc_Fini(void); extern void _PyGILState_Init(PyInterpreterState *, PyThreadState *); -extern void _PyGILState_Fini(void); +extern void _PyGILState_Fini(_PyRuntimeState *runtime); PyAPI_FUNC(void) _PyGC_DumpShutdownStats(void); diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index a75d5fed95f1..f36c7f5d5e40 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1865,9 +1865,10 @@ _PyGC_DumpShutdownStats(void) } void -_PyGC_Fini(void) +_PyGC_Fini(_PyRuntimeState *runtime) { - Py_CLEAR(_PyRuntime.gc.callbacks); + struct _gc_runtime_state *gc = &runtime->gc; + Py_CLEAR(gc->callbacks); } /* for debugging */ diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index c7920ef6262d..fe4cb97a4740 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -65,7 +65,7 @@ static _PyInitError init_sys_streams(PyInterpreterState *interp); static _PyInitError initsigs(void); static void call_py_exitfuncs(PyInterpreterState *); static void wait_for_thread_shutdown(void); -static void call_ll_exitfuncs(void); +static void call_ll_exitfuncs(_PyRuntimeState *runtime); int _Py_UnhandledKeyboardInterrupt = 0; _PyRuntimeState _PyRuntime = _PyRuntimeState_INIT; @@ -1131,23 +1131,23 @@ flush_std_files(void) int Py_FinalizeEx(void) { - PyInterpreterState *interp; - PyThreadState *tstate; int status = 0; - if (!_PyRuntime.initialized) + _PyRuntimeState *runtime = &_PyRuntime; + if (!runtime->initialized) { return status; + } // Wrap up existing "threading"-module-created, non-daemon threads. wait_for_thread_shutdown(); - /* Get current thread state and interpreter pointer */ - tstate = _PyThreadState_GET(); - interp = tstate->interp; - // Make any remaining pending calls. _Py_FinishPendingCalls(); + /* Get current thread state and interpreter pointer */ + PyThreadState *tstate = _PyThreadState_GET(); + PyInterpreterState *interp = tstate->interp; + /* The interpreter is still entirely intact at this point, and the * exit funcs may be relying on that. In particular, if some thread * or exit func is still waiting to do an import, the import machinery @@ -1174,9 +1174,9 @@ Py_FinalizeEx(void) /* Remaining threads (e.g. daemon threads) will automatically exit after taking the GIL (in PyEval_RestoreThread()). */ - _PyRuntime.finalizing = tstate; - _PyRuntime.initialized = 0; - _PyRuntime.core_initialized = 0; + runtime->finalizing = tstate; + runtime->initialized = 0; + runtime->core_initialized = 0; /* Flush sys.stdout and sys.stderr */ if (flush_std_files() < 0) { @@ -1294,7 +1294,7 @@ Py_FinalizeEx(void) PyFloat_Fini(); PyDict_Fini(); PySlice_Fini(); - _PyGC_Fini(); + _PyGC_Fini(runtime); _Py_HashRandomization_Fini(); _PyArg_Fini(); PyAsyncGen_Fini(); @@ -1314,7 +1314,7 @@ Py_FinalizeEx(void) PyGrammar_RemoveAccelerators(&_PyParser_Grammar); /* Cleanup auto-thread-state */ - _PyGILState_Fini(); + _PyGILState_Fini(runtime); /* Delete current thread. After this, many C API calls become crashy. */ PyThreadState_Swap(NULL); @@ -1336,7 +1336,7 @@ Py_FinalizeEx(void) } #endif - call_ll_exitfuncs(); + call_ll_exitfuncs(runtime); _PyRuntime_Finalize(); return status; @@ -2223,10 +2223,11 @@ int Py_AtExit(void (*func)(void)) } static void -call