Python-checkins
Threads by month
- ----- 2025 -----
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2005 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2004 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2003 -----
- December
- November
- October
- September
- August
May 2023
- 1 participants
- 741 discussions
https://github.com/python/cpython/commit/03ad6624c2b4a30cccf6d5b7e2b9999e10…
commit: 03ad6624c2b4a30cccf6d5b7e2b9999e104444ae
branch: main
author: Victor Stinner <vstinner(a)python.org>
committer: vstinner <vstinner(a)python.org>
date: 2023-05-31T12:09:41Z
summary:
gh-105096: Deprecate wave getmarkers() method (#105098)
wave: Deprecate the getmark(), setmark() and getmarkers() methods of
the Wave_read and Wave_write classes. They will be removed in Python
3.15.
files:
A Misc/NEWS.d/next/Library/2023-05-30-17-39-03.gh-issue-105096.pw00FW.rst
M Doc/library/wave.rst
M Doc/whatsnew/3.13.rst
M Lib/test/test_wave.py
M Lib/wave.py
diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst
index 9565ed926576..55b029bc742b 100644
--- a/Doc/library/wave.rst
+++ b/Doc/library/wave.rst
@@ -131,11 +131,19 @@ Wave_read Objects
Returns ``None``.
+ .. deprecated-removed:: 3.13 3.15
+ The method only existed for compatibility with the :mod:`!aifc` module
+ which has been removed in Python 3.13.
+
.. method:: getmark(id)
Raise an error.
+ .. deprecated-removed:: 3.13 3.15
+ The method only existed for compatibility with the :mod:`!aifc` module
+ which has been removed in Python 3.13.
+
The following two methods define a term "position" which is compatible between
them, and is otherwise implementation dependent.
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 0fd82d6188b3..6d0be3b258f6 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -115,6 +115,10 @@ Optimizations
Deprecated
==========
+* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()``
+ methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes.
+ They will be removed in Python 3.15.
+ (Contributed by Victor Stinner in :gh:`105096`.)
Removed
diff --git a/Lib/test/test_wave.py b/Lib/test/test_wave.py
index 6c3362857fc2..5e771c8de969 100644
--- a/Lib/test/test_wave.py
+++ b/Lib/test/test_wave.py
@@ -136,6 +136,32 @@ def test__all__(self):
not_exported = {'WAVE_FORMAT_PCM', 'WAVE_FORMAT_EXTENSIBLE', 'KSDATAFORMAT_SUBTYPE_PCM'}
support.check__all__(self, wave, not_exported=not_exported)
+ def test_read_deprecations(self):
+ filename = support.findfile('pluck-pcm8.wav', subdir='audiodata')
+ with wave.open(filename) as reader:
+ with self.assertWarns(DeprecationWarning):
+ with self.assertRaises(wave.Error):
+ reader.getmark('mark')
+ with self.assertWarns(DeprecationWarning):
+ self.assertIsNone(reader.getmarkers())
+
+ def test_write_deprecations(self):
+ with io.BytesIO(b'') as tmpfile:
+ with wave.open(tmpfile, 'wb') as writer:
+ writer.setnchannels(1)
+ writer.setsampwidth(1)
+ writer.setframerate(1)
+ writer.setcomptype('NONE', 'not compressed')
+
+ with self.assertWarns(DeprecationWarning):
+ with self.assertRaises(wave.Error):
+ writer.setmark(0, 0, 'mark')
+ with self.assertWarns(DeprecationWarning):
+ with self.assertRaises(wave.Error):
+ writer.getmark('mark')
+ with self.assertWarns(DeprecationWarning):
+ self.assertIsNone(writer.getmarkers())
+
class WaveLowLevelTest(unittest.TestCase):
diff --git a/Lib/wave.py b/Lib/wave.py
index 5177ecbef820..a34af244c3e2 100644
--- a/Lib/wave.py
+++ b/Lib/wave.py
@@ -342,9 +342,13 @@ def getparams(self):
self.getcomptype(), self.getcompname())
def getmarkers(self):
+ import warnings
+ warnings._deprecated("Wave_read.getmarkers", remove=(3, 15))
return None
def getmark(self, id):
+ import warnings
+ warnings._deprecated("Wave_read.getmark", remove=(3, 15))
raise Error('no marks')
def setpos(self, pos):
@@ -548,12 +552,18 @@ def getparams(self):
self._nframes, self._comptype, self._compname)
def setmark(self, id, pos, name):
+ import warnings
+ warnings._deprecated("Wave_write.setmark", remove=(3, 15))
raise Error('setmark() not supported')
def getmark(self, id):
+ import warnings
+ warnings._deprecated("Wave_write.getmark", remove=(3, 15))
raise Error('no marks')
def getmarkers(self):
+ import warnings
+ warnings._deprecated("Wave_write.getmarkers", remove=(3, 15))
return None
def tell(self):
diff --git a/Misc/NEWS.d/next/Library/2023-05-30-17-39-03.gh-issue-105096.pw00FW.rst b/Misc/NEWS.d/next/Library/2023-05-30-17-39-03.gh-issue-105096.pw00FW.rst
new file mode 100644
index 000000000000..bc82c13081f1
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-05-30-17-39-03.gh-issue-105096.pw00FW.rst
@@ -0,0 +1,3 @@
+:mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()``
+methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes.
+They will be removed in Python 3.15. Patch by Victor Stinner.
1
0

May 31, 2023
https://github.com/python/cpython/commit/01b42f9559b614d729c3f055d09269db13…
commit: 01b42f9559b614d729c3f055d09269db13d2433c
branch: 3.12
author: Victor Stinner <vstinner(a)python.org>
committer: vstinner <vstinner(a)python.org>
date: 2023-05-31T12:04:21Z
summary:
[3.12] gh-105096: Reformat wave documentation (#105136) (#105138)
gh-105096: Reformat wave documentation (#105136)
Add ".. class::" markups in the wave documentation.
* Reformat also wave.py (minor PEP 8 changes).
* Remove redundant "import struct": it's already imported at top
level.
* Remove wave.rst from .nitignore
(cherry picked from commit 85e5d03163cac106ac8ec142ef03f1349a48948b)
files:
M Doc/library/wave.rst
M Doc/tools/.nitignore
M Lib/wave.py
diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst
index 04a28d97d619e..bb85dbe365c3f 100644
--- a/Doc/library/wave.rst
+++ b/Doc/library/wave.rst
@@ -11,8 +11,9 @@
--------------
-The :mod:`wave` module provides a convenient interface to the WAV sound format.
-Only PCM encoded wave files are supported.
+The :mod:`wave` module provides a convenient interface to the Waveform Audio
+"WAVE" (or "WAV") file format. Only uncompressed PCM encoded wave files are
+supported.
.. versionchanged:: 3.12
@@ -41,13 +42,12 @@ The :mod:`wave` module defines the following function and exception:
value for *mode*.
If you pass in a file-like object, the wave object will not close it when its
- :meth:`close` method is called; it is the caller's responsibility to close
+ ``close()`` method is called; it is the caller's responsibility to close
the file object.
The :func:`.open` function may be used in a :keyword:`with` statement. When
- the :keyword:`!with` block completes, the :meth:`Wave_read.close()
- <wave.Wave_read.close>` or :meth:`Wave_write.close()
- <wave.Wave_write.close()>` method is called.
+ the :keyword:`!with` block completes, the :meth:`Wave_read.close()` or
+ :meth:`Wave_write.close()` method is called.
.. versionchanged:: 3.4
Added support for unseekable files.
@@ -63,87 +63,91 @@ The :mod:`wave` module defines the following function and exception:
Wave_read Objects
-----------------
-Wave_read objects, as returned by :func:`.open`, have the following methods:
+.. class:: Wave_read
+ Read a WAV file.
-.. method:: Wave_read.close()
+ Wave_read objects, as returned by :func:`.open`, have the following methods:
- Close the stream if it was opened by :mod:`wave`, and make the instance
- unusable. This is called automatically on object collection.
+ .. method:: close()
-.. method:: Wave_read.getnchannels()
+ Close the stream if it was opened by :mod:`wave`, and make the instance
+ unusable. This is called automatically on object collection.
- Returns number of audio channels (``1`` for mono, ``2`` for stereo).
+ .. method:: getnchannels()
-.. method:: Wave_read.getsampwidth()
+ Returns number of audio channels (``1`` for mono, ``2`` for stereo).
- Returns sample width in bytes.
+ .. method:: getsampwidth()
-.. method:: Wave_read.getframerate()
+ Returns sample width in bytes.
- Returns sampling frequency.
+ .. method:: getframerate()
-.. method:: Wave_read.getnframes()
+ Returns sampling frequency.
- Returns number of audio frames.
+ .. method:: getnframes()
-.. method:: Wave_read.getcomptype()
+ Returns number of audio frames.
- Returns compression type (``'NONE'`` is the only supported type).
+ .. method:: getcomptype()
-.. method:: Wave_read.getcompname()
+ Returns compression type (``'NONE'`` is the only supported type).
- Human-readable version of :meth:`getcomptype`. Usually ``'not compressed'``
- parallels ``'NONE'``.
+ .. method:: getcompname()
-.. method:: Wave_read.getparams()
+ Human-readable version of :meth:`getcomptype`. Usually ``'not compressed'``
+ parallels ``'NONE'``.
- Returns a :func:`~collections.namedtuple` ``(nchannels, sampwidth,
- framerate, nframes, comptype, compname)``, equivalent to output of the
- :meth:`get\*` methods.
+ .. method:: getparams()
-.. method:: Wave_read.readframes(n)
+ Returns a :func:`~collections.namedtuple` ``(nchannels, sampwidth,
+ framerate, nframes, comptype, compname)``, equivalent to output of the
+ ``get*()`` methods.
- Reads and returns at most *n* frames of audio, as a :class:`bytes` object.
+ .. method:: readframes(n)
-.. method:: Wave_read.rewind()
+ Reads and returns at most *n* frames of audio, as a :class:`bytes` object.
- Rewind the file pointer to the beginning of the audio stream.
-The following two methods are defined for compatibility with the :mod:`aifc`
-module, and don't do anything interesting.
+ .. method:: rewind()
+ Rewind the file pointer to the beginning of the audio stream.
-.. method:: Wave_read.getmarkers()
+ The following two methods are defined for compatibility with the :mod:`aifc`
+ module, and don't do anything interesting.
- Returns ``None``.
+ .. method:: getmarkers()
-.. method:: Wave_read.getmark(id)
+ Returns ``None``.
- Raise an error.
-The following two methods define a term "position" which is compatible between
-them, and is otherwise implementation dependent.
+ .. method:: getmark(id)
+ Raise an error.
-.. method:: Wave_read.setpos(pos)
+ The following two methods define a term "position" which is compatible between
+ them, and is otherwise implementation dependent.
- Set the file pointer to the specified position.
+ .. method:: setpos(pos)
-.. method:: Wave_read.tell()
+ Set the file pointer to the specified position.
- Return current file pointer position.
+
+ .. method:: tell()
+
+ Return current file pointer position.
.. _wave-write-objects:
@@ -151,97 +155,100 @@ them, and is otherwise implementation dependent.
Wave_write Objects
------------------
-For seekable output streams, the ``wave`` header will automatically be updated
-to reflect the number of frames actually written. For unseekable streams, the
-*nframes* value must be accurate when the first frame data is written. An
-accurate *nframes* value can be achieved either by calling
-:meth:`~Wave_write.setnframes` or :meth:`~Wave_write.setparams` with the number
-of frames that will be written before :meth:`~Wave_write.close` is called and
-then using :meth:`~Wave_write.writeframesraw` to write the frame data, or by
-calling :meth:`~Wave_write.writeframes` with all of the frame data to be
-written. In the latter case :meth:`~Wave_write.writeframes` will calculate
-the number of frames in the data and set *nframes* accordingly before writing
-the frame data.
+.. class:: Wave_write
-Wave_write objects, as returned by :func:`.open`, have the following methods:
+ Write a WAV file.
-.. versionchanged:: 3.4
- Added support for unseekable files.
+ Wave_write objects, as returned by :func:`.open`.
+ For seekable output streams, the ``wave`` header will automatically be updated
+ to reflect the number of frames actually written. For unseekable streams, the
+ *nframes* value must be accurate when the first frame data is written. An
+ accurate *nframes* value can be achieved either by calling
+ :meth:`setnframes` or :meth:`setparams` with the number
+ of frames that will be written before :meth:`close` is called and
+ then using :meth:`writeframesraw` to write the frame data, or by
+ calling :meth:`writeframes` with all of the frame data to be
+ written. In the latter case :meth:`writeframes` will calculate
+ the number of frames in the data and set *nframes* accordingly before writing
+ the frame data.
-.. method:: Wave_write.close()
+ .. versionchanged:: 3.4
+ Added support for unseekable files.
- Make sure *nframes* is correct, and close the file if it was opened by
- :mod:`wave`. This method is called upon object collection. It will raise
- an exception if the output stream is not seekable and *nframes* does not
- match the number of frames actually written.
+ Wave_write objects have the following methods:
+ .. method:: close()
-.. method:: Wave_write.setnchannels(n)
+ Make sure *nframes* is correct, and close the file if it was opened by
+ :mod:`wave`. This method is called upon object collection. It will raise
+ an exception if the output stream is not seekable and *nframes* does not
+ match the number of frames actually written.
- Set the number of channels.
+ .. method:: setnchannels(n)
-.. method:: Wave_write.setsampwidth(n)
+ Set the number of channels.
- Set the sample width to *n* bytes.
+ .. method:: setsampwidth(n)
-.. method:: Wave_write.setframerate(n)
+ Set the sample width to *n* bytes.
- Set the frame rate to *n*.
- .. versionchanged:: 3.2
- A non-integral input to this method is rounded to the nearest
- integer.
+ .. method:: setframerate(n)
+ Set the frame rate to *n*.
-.. method:: Wave_write.setnframes(n)
+ .. versionchanged:: 3.2
+ A non-integral input to this method is rounded to the nearest
+ integer.
- Set the number of frames to *n*. This will be changed later if the number
- of frames actually written is different (this update attempt will
- raise an error if the output stream is not seekable).
+ .. method:: setnframes(n)
-.. method:: Wave_write.setcomptype(type, name)
+ Set the number of frames to *n*. This will be changed later if the number
+ of frames actually written is different (this update attempt will
+ raise an error if the output stream is not seekable).
- Set the compression type and description. At the moment, only compression type
- ``NONE`` is supported, meaning no compression.
+ .. method:: setcomptype(type, name)
-.. method:: Wave_write.setparams(tuple)
+ Set the compression type and description. At the moment, only compression type
+ ``NONE`` is supported, meaning no compression.
- The *tuple* should be ``(nchannels, sampwidth, framerate, nframes, comptype,
- compname)``, with values valid for the :meth:`set\*` methods. Sets all
- parameters.
+ .. method:: setparams(tuple)
-.. method:: Wave_write.tell()
+ The *tuple* should be ``(nchannels, sampwidth, framerate, nframes, comptype,
+ compname)``, with values valid for the ``set*()`` methods. Sets all
+ parameters.
- Return current position in the file, with the same disclaimer for the
- :meth:`Wave_read.tell` and :meth:`Wave_read.setpos` methods.
+ .. method:: tell()
-.. method:: Wave_write.writeframesraw(data)
+ Return current position in the file, with the same disclaimer for the
+ :meth:`Wave_read.tell` and :meth:`Wave_read.setpos` methods.
- Write audio frames, without correcting *nframes*.
- .. versionchanged:: 3.4
- Any :term:`bytes-like object` is now accepted.
+ .. method:: writeframesraw(data)
+ Write audio frames, without correcting *nframes*.
-.. method:: Wave_write.writeframes(data)
+ .. versionchanged:: 3.4
+ Any :term:`bytes-like object` is now accepted.
- Write audio frames and make sure *nframes* is correct. It will raise an
- error if the output stream is not seekable and the total number of frames
- that have been written after *data* has been written does not match the
- previously set value for *nframes*.
- .. versionchanged:: 3.4
- Any :term:`bytes-like object` is now accepted.
+ .. method:: writeframes(data)
+ Write audio frames and make sure *nframes* is correct. It will raise an
+ error if the output stream is not seekable and the total number of frames
+ that have been written after *data* has been written does not match the
+ previously set value for *nframes*.
-Note that it is invalid to set any parameters after calling :meth:`writeframes`
-or :meth:`writeframesraw`, and any attempt to do so will raise
-:exc:`wave.Error`.
+ .. versionchanged:: 3.4
+ Any :term:`bytes-like object` is now accepted.
+ Note that it is invalid to set any parameters after calling :meth:`writeframes`
+ or :meth:`writeframesraw`, and any attempt to do so will raise
+ :exc:`wave.Error`.
diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore
index 1d3503bf06f08..3a34c0b2cbfff 100644
--- a/Doc/tools/.nitignore
+++ b/Doc/tools/.nitignore
@@ -236,7 +236,6 @@ Doc/library/urllib.error.rst
Doc/library/urllib.parse.rst
Doc/library/urllib.request.rst
Doc/library/uuid.rst
-Doc/library/wave.rst
Doc/library/weakref.rst
Doc/library/webbrowser.rst
Doc/library/winreg.rst
diff --git a/Lib/wave.py b/Lib/wave.py
index d5858e5d4b80d..4b0c683f6b5e2 100644
--- a/Lib/wave.py
+++ b/Lib/wave.py
@@ -92,6 +92,7 @@ class Error(Exception):
_wave_params = namedtuple('_wave_params',
'nchannels sampwidth framerate nframes comptype compname')
+
def _byteswap(data, width):
swapped_data = bytearray(len(data))
@@ -104,7 +105,6 @@ def _byteswap(data, width):
class _Chunk:
def __init__(self, file, align=True, bigendian=True, inclheader=False):
- import struct
self.closed = False
self.align = align # whether to align to word (2-byte) boundaries
if bigendian:
@@ -214,7 +214,6 @@ def skip(self):
raise EOFError
-
class Wave_read:
"""Variables used in this class:
@@ -411,6 +410,7 @@ def _read_fmt_chunk(self, chunk):
self._comptype = 'NONE'
self._compname = 'not compressed'
+
class Wave_write:
"""Variables used in this class:
@@ -638,6 +638,7 @@ def _patchheader(self):
self._file.seek(curpos, 0)
self._datalength = self._datawritten
+
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
1
0
https://github.com/python/cpython/commit/58a2e0981642dcddf49daa776ff68a43d3…
commit: 58a2e0981642dcddf49daa776ff68a43d3498cee
branch: main
author: Victor Stinner <vstinner(a)python.org>
committer: vstinner <vstinner(a)python.org>
date: 2023-05-31T11:41:19Z
summary:
gh-62948: IOBase finalizer logs close() errors (#105104)
files:
A Misc/NEWS.d/next/Library/2023-05-30-18-45-02.gh-issue-62948.1-5wMR.rst
M Doc/whatsnew/3.13.rst
M Lib/_pyio.py
M Lib/test/test_io.py
M Modules/_io/iobase.c
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 7e29ed306c2d7..0fd82d6188b32 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -87,6 +87,15 @@ New Modules
Improved Modules
================
+io
+--
+
+The :class:`io.IOBase` finalizer now logs the ``close()`` method errors with
+:data:`sys.unraisablehook`. Previously, errors were ignored silently by default,
+and only logged in :ref:`Python Development Mode <devmode>` or on :ref:`Python
+built on debug mode <debug-build>`.
+(Contributed by Victor Stinner in :gh:`62948`.)
+
pathlib
-------
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index 7f247ff47c9e6..32698abac78d2 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -33,11 +33,8 @@
# Rebind for compatibility
BlockingIOError = BlockingIOError
-# Does io.IOBase finalizer log the exception if the close() method fails?
-# The exception is ignored silently by default in release build.
-_IOBASE_EMITS_UNRAISABLE = (hasattr(sys, "gettotalrefcount") or sys.flags.dev_mode)
# Does open() check its 'errors' argument?
-_CHECK_ERRORS = _IOBASE_EMITS_UNRAISABLE
+_CHECK_ERRORS = (hasattr(sys, "gettotalrefcount") or sys.flags.dev_mode)
def text_encoding(encoding, stacklevel=2):
@@ -416,18 +413,9 @@ def __del__(self):
if closed:
return
- if _IOBASE_EMITS_UNRAISABLE:
- self.close()
- else:
- # The try/except block is in case this is called at program
- # exit time, when it's possible that globals have already been
- # deleted, and then the close() call might fail. Since
- # there's nothing we can do about such failures and they annoy
- # the end users, we suppress the traceback.
- try:
- self.close()
- except:
- pass
+ # If close() fails, the caller logs the exception with
+ # sys.unraisablehook. close() must be called at the end at __del__().
+ self.close()
### Inquiries ###
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index cc16804fe2182..26ae40d93c84e 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -66,10 +66,6 @@ def byteslike(*pos, **kw):
class EmptyStruct(ctypes.Structure):
pass
-# Does io.IOBase finalizer log the exception if the close() method fails?
-# The exception is ignored silently by default in release build.
-IOBASE_EMITS_UNRAISABLE = (support.Py_DEBUG or sys.flags.dev_mode)
-
def _default_chunk_size():
"""Get the default TextIOWrapper chunk size"""
@@ -1218,10 +1214,7 @@ def test_error_through_destructor(self):
with self.assertRaises(AttributeError):
self.tp(rawio).xyzzy
- if not IOBASE_EMITS_UNRAISABLE:
- self.assertIsNone(cm.unraisable)
- elif cm.unraisable is not None:
- self.assertEqual(cm.unraisable.exc_type, OSError)
+ self.assertEqual(cm.unraisable.exc_type, OSError)
def test_repr(self):
raw = self.MockRawIO()
@@ -3022,10 +3015,7 @@ def test_error_through_destructor(self):
with self.assertRaises(AttributeError):
self.TextIOWrapper(rawio, encoding="utf-8").xyzzy
- if not IOBASE_EMITS_UNRAISABLE:
- self.assertIsNone(cm.unraisable)
- elif cm.unraisable is not None:
- self.assertEqual(cm.unraisable.exc_type, OSError)
+ self.assertEqual(cm.unraisable.exc_type, OSError)
# Systematic tests of the text I/O API
diff --git a/Misc/NEWS.d/next/Library/2023-05-30-18-45-02.gh-issue-62948.1-5wMR.rst b/Misc/NEWS.d/next/Library/2023-05-30-18-45-02.gh-issue-62948.1-5wMR.rst
new file mode 100644
index 0000000000000..d6ba989329bce
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-05-30-18-45-02.gh-issue-62948.1-5wMR.rst
@@ -0,0 +1,4 @@
+The :class:`io.IOBase` finalizer now logs the ``close()`` method errors with
+:data:`sys.unraisablehook`. Previously, errors were ignored silently by default,
+and only logged in :ref:`Python Development Mode <devmode>` or on
+:ref:`Python built on debug mode <debug-build>`. Patch by Victor Stinner.
diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c
index bcb498d9c5b5d..f98e75ce2d1ed 100644
--- a/Modules/_io/iobase.c
+++ b/Modules/_io/iobase.c
@@ -319,20 +319,8 @@ iobase_finalize(PyObject *self)
if (PyObject_SetAttr(self, &_Py_ID(_finalizing), Py_True))
PyErr_Clear();
res = PyObject_CallMethodNoArgs((PyObject *)self, &_Py_ID(close));
- /* Silencing I/O errors is bad, but printing spurious tracebacks is
- equally as bad, and potentially more frequent (because of
- shutdown issues). */
if (res == NULL) {
-#ifndef Py_DEBUG
- if (_Py_GetConfig()->dev_mode) {
- PyErr_WriteUnraisable(self);
- }
- else {
- PyErr_Clear();
- }
-#else
PyErr_WriteUnraisable(self);
-#endif
}
else {
Py_DECREF(res);
1
0
https://github.com/python/cpython/commit/85e5d03163cac106ac8ec142ef03f1349a…
commit: 85e5d03163cac106ac8ec142ef03f1349a48948b
branch: main
author: Victor Stinner <vstinner(a)python.org>
committer: vstinner <vstinner(a)python.org>
date: 2023-05-31T11:29:10Z
summary:
gh-105096: Reformat wave documentation (#105136)
Add ".. class::" markups in the wave documentation.
* Reformat also wave.py (minor PEP 8 changes).
* Remove redundant "import struct": it's already imported at top
level.
* Remove wave.rst from .nitignore
files:
M Doc/library/wave.rst
M Doc/tools/.nitignore
M Lib/wave.py
diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst
index 4dcbc3d4c2d1..9565ed926576 100644
--- a/Doc/library/wave.rst
+++ b/Doc/library/wave.rst
@@ -11,8 +11,9 @@
--------------
-The :mod:`wave` module provides a convenient interface to the WAV sound format.
-Only PCM encoded wave files are supported.
+The :mod:`wave` module provides a convenient interface to the Waveform Audio
+"WAVE" (or "WAV") file format. Only uncompressed PCM encoded wave files are
+supported.
.. versionchanged:: 3.12
@@ -41,13 +42,12 @@ The :mod:`wave` module defines the following function and exception:
value for *mode*.
If you pass in a file-like object, the wave object will not close it when its
- :meth:`close` method is called; it is the caller's responsibility to close
+ ``close()`` method is called; it is the caller's responsibility to close
the file object.
The :func:`.open` function may be used in a :keyword:`with` statement. When
- the :keyword:`!with` block completes, the :meth:`Wave_read.close()
- <wave.Wave_read.close>` or :meth:`Wave_write.close()
- <wave.Wave_write.close()>` method is called.
+ the :keyword:`!with` block completes, the :meth:`Wave_read.close()` or
+ :meth:`Wave_write.close()` method is called.
.. versionchanged:: 3.4
Added support for unseekable files.
@@ -63,87 +63,91 @@ The :mod:`wave` module defines the following function and exception:
Wave_read Objects
-----------------
-Wave_read objects, as returned by :func:`.open`, have the following methods:
+.. class:: Wave_read
+ Read a WAV file.
-.. method:: Wave_read.close()
+ Wave_read objects, as returned by :func:`.open`, have the following methods:
- Close the stream if it was opened by :mod:`wave`, and make the instance
- unusable. This is called automatically on object collection.
+ .. method:: close()
-.. method:: Wave_read.getnchannels()
+ Close the stream if it was opened by :mod:`wave`, and make the instance
+ unusable. This is called automatically on object collection.
- Returns number of audio channels (``1`` for mono, ``2`` for stereo).
+ .. method:: getnchannels()
-.. method:: Wave_read.getsampwidth()
+ Returns number of audio channels (``1`` for mono, ``2`` for stereo).
- Returns sample width in bytes.
+ .. method:: getsampwidth()
-.. method:: Wave_read.getframerate()
+ Returns sample width in bytes.
- Returns sampling frequency.
+ .. method:: getframerate()
-.. method:: Wave_read.getnframes()
+ Returns sampling frequency.
- Returns number of audio frames.
+ .. method:: getnframes()
-.. method:: Wave_read.getcomptype()
+ Returns number of audio frames.
- Returns compression type (``'NONE'`` is the only supported type).
+ .. method:: getcomptype()
-.. method:: Wave_read.getcompname()
+ Returns compression type (``'NONE'`` is the only supported type).
- Human-readable version of :meth:`getcomptype`. Usually ``'not compressed'``
- parallels ``'NONE'``.
+ .. method:: getcompname()
-.. method:: Wave_read.getparams()
+ Human-readable version of :meth:`getcomptype`. Usually ``'not compressed'``
+ parallels ``'NONE'``.
- Returns a :func:`~collections.namedtuple` ``(nchannels, sampwidth,
- framerate, nframes, comptype, compname)``, equivalent to output of the
- :meth:`get\*` methods.
+ .. method:: getparams()
-.. method:: Wave_read.readframes(n)
+ Returns a :func:`~collections.namedtuple` ``(nchannels, sampwidth,
+ framerate, nframes, comptype, compname)``, equivalent to output of the
+ ``get*()`` methods.
- Reads and returns at most *n* frames of audio, as a :class:`bytes` object.
+ .. method:: readframes(n)
-.. method:: Wave_read.rewind()
+ Reads and returns at most *n* frames of audio, as a :class:`bytes` object.
- Rewind the file pointer to the beginning of the audio stream.
-The following two methods are defined for compatibility with the old :mod:`!aifc`
-module, and don't do anything interesting.
+ .. method:: rewind()
+ Rewind the file pointer to the beginning of the audio stream.
-.. method:: Wave_read.getmarkers()
+ The following two methods are defined for compatibility with the old :mod:`!aifc`
+ module, and don't do anything interesting.
- Returns ``None``.
+ .. method:: getmarkers()
-.. method:: Wave_read.getmark(id)
+ Returns ``None``.
- Raise an error.
-The following two methods define a term "position" which is compatible between
-them, and is otherwise implementation dependent.
+ .. method:: getmark(id)
+ Raise an error.
-.. method:: Wave_read.setpos(pos)
+ The following two methods define a term "position" which is compatible between
+ them, and is otherwise implementation dependent.
- Set the file pointer to the specified position.
+ .. method:: setpos(pos)
-.. method:: Wave_read.tell()
+ Set the file pointer to the specified position.
- Return current file pointer position.
+
+ .. method:: tell()
+
+ Return current file pointer position.
.. _wave-write-objects:
@@ -151,97 +155,100 @@ them, and is otherwise implementation dependent.
Wave_write Objects
------------------
-For seekable output streams, the ``wave`` header will automatically be updated
-to reflect the number of frames actually written. For unseekable streams, the
-*nframes* value must be accurate when the first frame data is written. An
-accurate *nframes* value can be achieved either by calling
-:meth:`~Wave_write.setnframes` or :meth:`~Wave_write.setparams` with the number
-of frames that will be written before :meth:`~Wave_write.close` is called and
-then using :meth:`~Wave_write.writeframesraw` to write the frame data, or by
-calling :meth:`~Wave_write.writeframes` with all of the frame data to be
-written. In the latter case :meth:`~Wave_write.writeframes` will calculate
-the number of frames in the data and set *nframes* accordingly before writing
-the frame data.
+.. class:: Wave_write
-Wave_write objects, as returned by :func:`.open`, have the following methods:
+ Write a WAV file.
-.. versionchanged:: 3.4
- Added support for unseekable files.
+ Wave_write objects, as returned by :func:`.open`.
+ For seekable output streams, the ``wave`` header will automatically be updated
+ to reflect the number of frames actually written. For unseekable streams, the
+ *nframes* value must be accurate when the first frame data is written. An
+ accurate *nframes* value can be achieved either by calling
+ :meth:`setnframes` or :meth:`setparams` with the number
+ of frames that will be written before :meth:`close` is called and
+ then using :meth:`writeframesraw` to write the frame data, or by
+ calling :meth:`writeframes` with all of the frame data to be
+ written. In the latter case :meth:`writeframes` will calculate
+ the number of frames in the data and set *nframes* accordingly before writing
+ the frame data.
-.. method:: Wave_write.close()
+ .. versionchanged:: 3.4
+ Added support for unseekable files.
- Make sure *nframes* is correct, and close the file if it was opened by
- :mod:`wave`. This method is called upon object collection. It will raise
- an exception if the output stream is not seekable and *nframes* does not
- match the number of frames actually written.
+ Wave_write objects have the following methods:
+ .. method:: close()
-.. method:: Wave_write.setnchannels(n)
+ Make sure *nframes* is correct, and close the file if it was opened by
+ :mod:`wave`. This method is called upon object collection. It will raise
+ an exception if the output stream is not seekable and *nframes* does not
+ match the number of frames actually written.
- Set the number of channels.
+ .. method:: setnchannels(n)
-.. method:: Wave_write.setsampwidth(n)
+ Set the number of channels.
- Set the sample width to *n* bytes.
+ .. method:: setsampwidth(n)
-.. method:: Wave_write.setframerate(n)
+ Set the sample width to *n* bytes.
- Set the frame rate to *n*.
- .. versionchanged:: 3.2
- A non-integral input to this method is rounded to the nearest
- integer.
+ .. method:: setframerate(n)
+ Set the frame rate to *n*.
-.. method:: Wave_write.setnframes(n)
+ .. versionchanged:: 3.2
+ A non-integral input to this method is rounded to the nearest
+ integer.
- Set the number of frames to *n*. This will be changed later if the number
- of frames actually written is different (this update attempt will
- raise an error if the output stream is not seekable).
+ .. method:: setnframes(n)
-.. method:: Wave_write.setcomptype(type, name)
+ Set the number of frames to *n*. This will be changed later if the number
+ of frames actually written is different (this update attempt will
+ raise an error if the output stream is not seekable).
- Set the compression type and description. At the moment, only compression type
- ``NONE`` is supported, meaning no compression.
+ .. method:: setcomptype(type, name)
-.. method:: Wave_write.setparams(tuple)
+ Set the compression type and description. At the moment, only compression type
+ ``NONE`` is supported, meaning no compression.
- The *tuple* should be ``(nchannels, sampwidth, framerate, nframes, comptype,
- compname)``, with values valid for the :meth:`set\*` methods. Sets all
- parameters.
+ .. method:: setparams(tuple)
-.. method:: Wave_write.tell()
+ The *tuple* should be ``(nchannels, sampwidth, framerate, nframes, comptype,
+ compname)``, with values valid for the ``set*()`` methods. Sets all
+ parameters.
- Return current position in the file, with the same disclaimer for the
- :meth:`Wave_read.tell` and :meth:`Wave_read.setpos` methods.
+ .. method:: tell()
-.. method:: Wave_write.writeframesraw(data)
+ Return current position in the file, with the same disclaimer for the
+ :meth:`Wave_read.tell` and :meth:`Wave_read.setpos` methods.
- Write audio frames, without correcting *nframes*.
- .. versionchanged:: 3.4
- Any :term:`bytes-like object` is now accepted.
+ .. method:: writeframesraw(data)
+ Write audio frames, without correcting *nframes*.
-.. method:: Wave_write.writeframes(data)
+ .. versionchanged:: 3.4
+ Any :term:`bytes-like object` is now accepted.
- Write audio frames and make sure *nframes* is correct. It will raise an
- error if the output stream is not seekable and the total number of frames
- that have been written after *data* has been written does not match the
- previously set value for *nframes*.
- .. versionchanged:: 3.4
- Any :term:`bytes-like object` is now accepted.
+ .. method:: writeframes(data)
+ Write audio frames and make sure *nframes* is correct. It will raise an
+ error if the output stream is not seekable and the total number of frames
+ that have been written after *data* has been written does not match the
+ previously set value for *nframes*.
-Note that it is invalid to set any parameters after calling :meth:`writeframes`
-or :meth:`writeframesraw`, and any attempt to do so will raise
-:exc:`wave.Error`.
+ .. versionchanged:: 3.4
+ Any :term:`bytes-like object` is now accepted.
+ Note that it is invalid to set any parameters after calling :meth:`writeframes`
+ or :meth:`writeframesraw`, and any attempt to do so will raise
+ :exc:`wave.Error`.
diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore
index 2b1fc2bdce46..23aa30c956b3 100644
--- a/Doc/tools/.nitignore
+++ b/Doc/tools/.nitignore
@@ -226,7 +226,6 @@ Doc/library/urllib.error.rst
Doc/library/urllib.parse.rst
Doc/library/urllib.request.rst
Doc/library/uuid.rst
-Doc/library/wave.rst
Doc/library/weakref.rst
Doc/library/winreg.rst
Doc/library/winsound.rst
diff --git a/Lib/wave.py b/Lib/wave.py
index 76b73de1d67a..5177ecbef820 100644
--- a/Lib/wave.py
+++ b/Lib/wave.py
@@ -92,6 +92,7 @@ class Error(Exception):
_wave_params = namedtuple('_wave_params',
'nchannels sampwidth framerate nframes comptype compname')
+
def _byteswap(data, width):
swapped_data = bytearray(len(data))
@@ -104,7 +105,6 @@ def _byteswap(data, width):
class _Chunk:
def __init__(self, file, align=True, bigendian=True, inclheader=False):
- import struct
self.closed = False
self.align = align # whether to align to word (2-byte) boundaries
if bigendian:
@@ -214,7 +214,6 @@ def skip(self):
raise EOFError
-
class Wave_read:
"""Variables used in this class:
@@ -411,6 +410,7 @@ def _read_fmt_chunk(self, chunk):
self._comptype = 'NONE'
self._compname = 'not compressed'
+
class Wave_write:
"""Variables used in this class:
@@ -638,6 +638,7 @@ def _patchheader(self):
self._file.seek(curpos, 0)
self._datalength = self._datawritten
+
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
1
0
https://github.com/python/cpython/commit/579c41c10224a004c3e89ed9088771325c…
commit: 579c41c10224a004c3e89ed9088771325c1c1a98
branch: main
author: Victor Stinner <vstinner(a)python.org>
committer: vstinner <vstinner(a)python.org>
date: 2023-05-31T11:17:06Z
summary:
gh-105107: Remove PyEval_CallFunction() function (#105108)
Remove 4 functions from the C API, deprecated in Python 3.9:
* PyEval_CallObjectWithKeywords()
* PyEval_CallObject()
* PyEval_CallFunction()
* PyEval_CallMethod()
Keep 3 functions in the stable ABI:
* PyEval_CallObjectWithKeywords()
* PyEval_CallFunction()
* PyEval_CallMethod()
files:
A Misc/NEWS.d/next/C API/2023-05-30-19-11-09.gh-issue-105107.YQwMnm.rst
M Doc/data/stable_abi.dat
M Doc/whatsnew/3.13.rst
M Include/ceval.h
M Misc/stable_abi.toml
M Objects/call.c
diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat
index f112d268129f..f255d9586b9b 100644
--- a/Doc/data/stable_abi.dat
+++ b/Doc/data/stable_abi.dat
@@ -180,9 +180,6 @@ function,PyErr_WarnFormat,3.2,,
function,PyErr_WriteUnraisable,3.2,,
function,PyEval_AcquireLock,3.2,,
function,PyEval_AcquireThread,3.2,,
-function,PyEval_CallFunction,3.2,,
-function,PyEval_CallMethod,3.2,,
-function,PyEval_CallObjectWithKeywords,3.2,,
function,PyEval_EvalCode,3.2,,
function,PyEval_EvalCodeEx,3.2,,
function,PyEval_EvalFrame,3.2,,
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 13b9be1c8ee2..7e29ed306c2d 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -308,3 +308,18 @@ Deprecated
Removed
-------
+* Remove functions deprecated in Python 3.9.
+
+ * ``PyEval_CallObject()``, ``PyEval_CallObjectWithKeywords()``: use
+ :c:func:`PyObject_CallNoArgs` or :c:func:`PyObject_Call` instead.
+ Warning: :c:func:`PyObject_Call` positional arguments must be a
+ :class:`tuple` and must not be *NULL*, keyword arguments must be a
+ :class:`dict` or *NULL*, whereas removed functions checked arguments type
+ and accepted *NULL* positional and keyword arguments.
+ To replace ``PyEval_CallObjectWithKeywords(func, NULL, kwargs)`` with
+ :c:func:`PyObject_Call`, pass an empty tuple as positional arguments using
+ :c:func:`PyTuple_New(0) <PyTuple_New>`.
+ * ``PyEval_CallFunction()``: use :c:func:`PyObject_CallFunction` instead.
+ * ``PyEval_CallMethod()``: use :c:func:`PyObject_CallMethod` instead.
+
+ (Contributed by Victor Stinner in :gh:`105107`.)
diff --git a/Include/ceval.h b/Include/ceval.h
index ad4d909d6f2b..cf231b74f2bf 100644
--- a/Include/ceval.h
+++ b/Include/ceval.h
@@ -17,27 +17,6 @@ PyAPI_FUNC(PyObject *) PyEval_EvalCodeEx(PyObject *co,
PyObject *const *defs, int defc,
PyObject *kwdefs, PyObject *closure);
-/* PyEval_CallObjectWithKeywords(), PyEval_CallObject(), PyEval_CallFunction
- * and PyEval_CallMethod are deprecated. Since they are officially part of the
- * stable ABI (PEP 384), they must be kept for backward compatibility.
- * PyObject_Call(), PyObject_CallFunction() and PyObject_CallMethod() are
- * recommended to call a callable object.
- */
-
-Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(
- PyObject *callable,
- PyObject *args,
- PyObject *kwargs);
-
-/* Deprecated since PyEval_CallObjectWithKeywords is deprecated */
-#define PyEval_CallObject(callable, arg) \
- PyEval_CallObjectWithKeywords((callable), (arg), _PyObject_CAST(_Py_NULL))
-
-Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallFunction(
- PyObject *callable, const char *format, ...);
-Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallMethod(
- PyObject *obj, const char *name, const char *format, ...);
-
PyAPI_FUNC(PyObject *) PyEval_GetBuiltins(void);
PyAPI_FUNC(PyObject *) PyEval_GetGlobals(void);
PyAPI_FUNC(PyObject *) PyEval_GetLocals(void);
diff --git a/Misc/NEWS.d/next/C API/2023-05-30-19-11-09.gh-issue-105107.YQwMnm.rst b/Misc/NEWS.d/next/C API/2023-05-30-19-11-09.gh-issue-105107.YQwMnm.rst
new file mode 100644
index 000000000000..8423f4742ce2
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2023-05-30-19-11-09.gh-issue-105107.YQwMnm.rst
@@ -0,0 +1,9 @@
+Remove functions deprecated in Python 3.9.
+
+* ``PyEval_CallObject()``, ``PyEval_CallObjectWithKeywords()``: use
+ :c:func:`PyObject_CallNoArgs` and :c:func:`PyObject_Call` (positional
+ arguments must not be *NULL*) instead.
+* ``PyEval_CallFunction()``: use :c:func:`PyObject_CallFunction` instead.
+* ``PyEval_CallMethod()``: use :c:func:`PyObject_CallMethod` instead.
+
+Patch by Victor Stinner.
diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml
index 1db98483f09f..ff3b09ca74c3 100644
--- a/Misc/stable_abi.toml
+++ b/Misc/stable_abi.toml
@@ -667,10 +667,13 @@
added = '3.2'
[function.PyEval_CallFunction]
added = '3.2'
+ abi_only = true
[function.PyEval_CallMethod]
added = '3.2'
+ abi_only = true
[function.PyEval_CallObjectWithKeywords]
added = '3.2'
+ abi_only = true
[function.PyEval_EvalCode]
added = '3.2'
[function.PyEval_EvalCodeEx]
diff --git a/Objects/call.c b/Objects/call.c
index e9002d350c92..4658cf1f56bb 100644
--- a/Objects/call.c
+++ b/Objects/call.c
@@ -426,8 +426,9 @@ _PyFunction_Vectorcall(PyObject *func, PyObject* const* stack,
/* --- More complex call functions -------------------------------- */
/* External interface to call any callable object.
- The args must be a tuple or NULL. The kwargs must be a dict or NULL. */
-PyObject *
+ The args must be a tuple or NULL. The kwargs must be a dict or NULL.
+ Function removed in Python 3.13 API but kept in the stable ABI. */
+PyAPI_FUNC(PyObject*)
PyEval_CallObjectWithKeywords(PyObject *callable,
PyObject *args, PyObject *kwargs)
{
@@ -583,9 +584,8 @@ PyObject_CallFunction(PyObject *callable, const char *format, ...)
/* PyEval_CallFunction is exact copy of PyObject_CallFunction.
- * This function is kept for backward compatibility.
- */
-PyObject *
+ Function removed in Python 3.13 API but kept in the stable ABI. */
+PyAPI_FUNC(PyObject*)
PyEval_CallFunction(PyObject *callable, const char *format, ...)
{
va_list va;
@@ -656,9 +656,8 @@ PyObject_CallMethod(PyObject *obj, const char *name, const char *format, ...)
/* PyEval_CallMethod is exact copy of PyObject_CallMethod.
- * This function is kept for backward compatibility.
- */
-PyObject *
+ Function removed in Python 3.13 API but kept in the stable ABI. */
+PyAPI_FUNC(PyObject*)
PyEval_CallMethod(PyObject *obj, const char *name, const char *format, ...)
{
PyThreadState *tstate = _PyThreadState_GET();
1
0

[3.12] gh-105042: Disable unmatched parens syntax error in python tokenize (GH-105061) (#105120)
by pablogsal May 31, 2023
by pablogsal May 31, 2023
May 31, 2023
https://github.com/python/cpython/commit/2f8c22f1d6c22f018c78264937db66d52f…
commit: 2f8c22f1d6c22f018c78264937db66d52fb18869
branch: 3.12
author: Miss Islington (bot) <31488909+miss-islington(a)users.noreply.github.com>
committer: pablogsal <Pablogsal(a)gmail.com>
date: 2023-05-31T11:11:39+01:00
summary:
[3.12] gh-105042: Disable unmatched parens syntax error in python tokenize (GH-105061) (#105120)
gh-105042: Disable unmatched parens syntax error in python tokenize (GH-105061)
(cherry picked from commit 70f315c2d6de87b0514ce16cc00a91a5b60a6098)
Co-authored-by: Lysandros Nikolaou <lisandrosnik(a)gmail.com>
files:
M Lib/test/inspect_fodder.py
M Lib/test/test_inspect.py
M Lib/test/test_tokenize.py
M Parser/tokenizer.c
M Python/Python-tokenize.c
diff --git a/Lib/test/inspect_fodder.py b/Lib/test/inspect_fodder.py
index 567dfbab80486..60ba7aa78394e 100644
--- a/Lib/test/inspect_fodder.py
+++ b/Lib/test/inspect_fodder.py
@@ -113,3 +113,8 @@ async def asyncf(self):
# after asyncf - line 113
# end of WhichComments - line 114
# after WhichComments - line 115
+
+# Test that getsource works on a line that includes
+# a closing parenthesis with the opening paren being in another line
+(
+); after_closing = lambda: 1
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index a7bd680d0f5bc..6a49e3b5530e1 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -557,7 +557,8 @@ def test_getclasses(self):
def test_getfunctions(self):
functions = inspect.getmembers(mod, inspect.isfunction)
- self.assertEqual(functions, [('eggs', mod.eggs),
+ self.assertEqual(functions, [('after_closing', mod.after_closing),
+ ('eggs', mod.eggs),
('lobbest', mod.lobbest),
('spam', mod.spam)])
@@ -641,6 +642,7 @@ def test_getsource(self):
self.assertSourceEqual(git.abuse, 29, 39)
self.assertSourceEqual(mod.StupidGit, 21, 51)
self.assertSourceEqual(mod.lobbest, 75, 76)
+ self.assertSourceEqual(mod.after_closing, 120, 120)
def test_getsourcefile(self):
self.assertEqual(normcase(inspect.getsourcefile(mod.spam)), modfile)
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index cd11dddd0fe51..3adcc4e420671 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -1100,6 +1100,13 @@ def test_newline_after_parenthesized_block_with_comment(self):
NEWLINE '\\n' (4, 1) (4, 2)
""")
+ def test_closing_parenthesis_from_different_line(self):
+ self.check_tokenize("); x", """\
+ OP ')' (1, 0) (1, 1)
+ OP ';' (1, 1) (1, 2)
+ NAME 'x' (1, 3) (1, 4)
+ """)
+
class GenerateTokensTest(TokenizeTest):
def check_tokenize(self, s, expected):
# Format the tokens in s in a table format.
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index 9058c67ada648..b6d63e150cfbc 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -2496,41 +2496,42 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
case ')':
case ']':
case '}':
- if (!tok->level) {
- if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') {
- return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed"));
- }
+ if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed"));
+ }
+ if (!tok->tok_extra_tokens && !tok->level) {
return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c));
}
- tok->level--;
- int opening = tok->parenstack[tok->level];
- if (!((opening == '(' && c == ')') ||
- (opening == '[' && c == ']') ||
- (opening == '{' && c == '}')))
- {
- /* If the opening bracket belongs to an f-string's expression
- part (e.g. f"{)}") and the closing bracket is an arbitrary
- nested expression, then instead of matching a different
- syntactical construct with it; we'll throw an unmatched
- parentheses error. */
- if (INSIDE_FSTRING(tok) && opening == '{') {
- assert(current_tok->curly_bracket_depth >= 0);
- int previous_bracket = current_tok->curly_bracket_depth - 1;
- if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
- return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
+ if (tok->level > 0) {
+ tok->level--;
+ int opening = tok->parenstack[tok->level];
+ if (!tok->tok_extra_tokens && !((opening == '(' && c == ')') ||
+ (opening == '[' && c == ']') ||
+ (opening == '{' && c == '}'))) {
+ /* If the opening bracket belongs to an f-string's expression
+ part (e.g. f"{)}") and the closing bracket is an arbitrary
+ nested expression, then instead of matching a different
+ syntactical construct with it; we'll throw an unmatched
+ parentheses error. */
+ if (INSIDE_FSTRING(tok) && opening == '{') {
+ assert(current_tok->curly_bracket_depth >= 0);
+ int previous_bracket = current_tok->curly_bracket_depth - 1;
+ if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
+ }
+ }
+ if (tok->parenlinenostack[tok->level] != tok->lineno) {
+ return MAKE_TOKEN(syntaxerror(tok,
+ "closing parenthesis '%c' does not match "
+ "opening parenthesis '%c' on line %d",
+ c, opening, tok->parenlinenostack[tok->level]));
+ }
+ else {
+ return MAKE_TOKEN(syntaxerror(tok,
+ "closing parenthesis '%c' does not match "
+ "opening parenthesis '%c'",
+ c, opening));
}
- }
- if (tok->parenlinenostack[tok->level] != tok->lineno) {
- return MAKE_TOKEN(syntaxerror(tok,
- "closing parenthesis '%c' does not match "
- "opening parenthesis '%c' on line %d",
- c, opening, tok->parenlinenostack[tok->level]));
- }
- else {
- return MAKE_TOKEN(syntaxerror(tok,
- "closing parenthesis '%c' does not match "
- "opening parenthesis '%c'",
- c, opening));
}
}
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index 4eced66b61770..2de1daae8c0dd 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -82,7 +82,7 @@ _tokenizer_error(struct tok_state *tok)
msg = "invalid token";
break;
case E_EOF:
- if (tok->level) {
+ if (tok->level > 0) {
PyErr_Format(PyExc_SyntaxError,
"parenthesis '%c' was never closed",
tok->parenstack[tok->level-1]);
1
0

[3.12] gh-105069: Add a readline-like callable to the tokenizer to consume input iteratively (GH-105070) (#105119)
by pablogsal May 31, 2023
by pablogsal May 31, 2023
May 31, 2023
https://github.com/python/cpython/commit/c687946f6815a17bc5ceacaf3bbceba5b4…
commit: c687946f6815a17bc5ceacaf3bbceba5b41e73fd
branch: 3.12
author: Miss Islington (bot) <31488909+miss-islington(a)users.noreply.github.com>
committer: pablogsal <Pablogsal(a)gmail.com>
date: 2023-05-31T11:11:53+01:00
summary:
[3.12] gh-105069: Add a readline-like callable to the tokenizer to consume input iteratively (GH-105070) (#105119)
gh-105069: Add a readline-like callable to the tokenizer to consume input iteratively (GH-105070)
(cherry picked from commit 9216e69a87d16d871625721ed5a8aa302511f367)
Co-authored-by: Pablo Galindo Salgado <Pablogsal(a)gmail.com>
files:
M Lib/inspect.py
M Lib/test/test_tokenize.py
M Lib/tokenize.py
M Parser/tokenizer.c
M Parser/tokenizer.h
M Python/Python-tokenize.c
M Python/clinic/Python-tokenize.c.h
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 55530fc780b35..15eefdb6570be 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -2203,7 +2203,7 @@ def _signature_strip_non_python_syntax(signature):
add(string)
if (string == ','):
add(' ')
- clean_signature = ''.join(text).strip()
+ clean_signature = ''.join(text).strip().replace("\n", "")
return clean_signature, self_parameter
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 3adcc4e420671..a9a2b7673887c 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -1,6 +1,6 @@
from test import support
from test.support import os_helper
-from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
+from tokenize import (tokenize, untokenize, NUMBER, NAME, OP,
STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
open as tokenize_open, Untokenizer, generate_tokens,
NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT, TokenInfo)
@@ -51,6 +51,25 @@ def check_tokenize(self, s, expected):
[" ENCODING 'utf-8' (0, 0) (0, 0)"] +
expected.rstrip().splitlines())
+ def test_invalid_readline(self):
+ def gen():
+ yield "sdfosdg"
+ yield "sdfosdg"
+ with self.assertRaises(TypeError):
+ list(tokenize(gen().__next__))
+
+ def gen():
+ yield b"sdfosdg"
+ yield b"sdfosdg"
+ with self.assertRaises(TypeError):
+ list(generate_tokens(gen().__next__))
+
+ def gen():
+ yield "sdfosdg"
+ 1/0
+ with self.assertRaises(ZeroDivisionError):
+ list(generate_tokens(gen().__next__))
+
def test_implicit_newline(self):
# Make sure that the tokenizer puts in an implicit NEWLINE
# when the input lacks a trailing new line.
@@ -1161,7 +1180,8 @@ class TestTokenizerAdheresToPep0263(TestCase):
def _testFile(self, filename):
path = os.path.join(os.path.dirname(__file__), filename)
- TestRoundtrip.check_roundtrip(self, open(path, 'rb'))
+ with open(path, 'rb') as f:
+ TestRoundtrip.check_roundtrip(self, f)
def test_utf8_coding_cookie_and_no_utf8_bom(self):
f = 'tokenize_tests-utf8-coding-cookie-and-no-utf8-bom-sig.txt'
@@ -1206,7 +1226,8 @@ def readline():
yield b''
# skip the initial encoding token and the end tokens
- tokens = list(_tokenize(readline(), encoding='utf-8'))[:-2]
+ tokens = list(_generate_tokens_from_c_tokenizer(readline().__next__, encoding='utf-8',
+ extra_tokens=True))[:-2]
expected_tokens = [TokenInfo(3, '"ЉЊЈЁЂ"', (1, 0), (1, 7), '"ЉЊЈЁЂ"\n')]
self.assertEqual(tokens, expected_tokens,
"bytes not decoded with encoding")
@@ -1475,13 +1496,13 @@ def test_tokenize(self):
def mock_detect_encoding(readline):
return encoding, [b'first', b'second']
- def mock__tokenize(readline, encoding):
+ def mock__tokenize(readline, encoding, **kwargs):
nonlocal encoding_used
encoding_used = encoding
out = []
while True:
try:
- next_line = next(readline)
+ next_line = readline()
except StopIteration:
return out
if next_line:
@@ -1498,16 +1519,16 @@ def mock_readline():
return str(counter).encode()
orig_detect_encoding = tokenize_module.detect_encoding
- orig__tokenize = tokenize_module._tokenize
+ orig_c_token = tokenize_module._generate_tokens_from_c_tokenizer
tokenize_module.detect_encoding = mock_detect_encoding
- tokenize_module._tokenize = mock__tokenize
+ tokenize_module._generate_tokens_from_c_tokenizer = mock__tokenize
try:
results = tokenize(mock_readline)
self.assertEqual(list(results)[1:],
[b'first', b'second', b'1', b'2', b'3', b'4'])
finally:
tokenize_module.detect_encoding = orig_detect_encoding
- tokenize_module._tokenize = orig__tokenize
+ tokenize_module._generate_tokens_from_c_tokenizer = orig_c_token
self.assertEqual(encoding_used, encoding)
@@ -1834,12 +1855,33 @@ class CTokenizeTest(TestCase):
def check_tokenize(self, s, expected):
# Format the tokens in s in a table format.
# The ENDMARKER and final NEWLINE are omitted.
+ f = StringIO(s)
with self.subTest(source=s):
result = stringify_tokens_from_source(
- _generate_tokens_from_c_tokenizer(s), s
+ _generate_tokens_from_c_tokenizer(f.readline), s
)
self.assertEqual(result, expected.rstrip().splitlines())
+ def test_encoding(self):
+ def readline(encoding):
+ yield "1+1".encode(encoding)
+
+ expected = [
+ TokenInfo(type=NUMBER, string='1', start=(1, 0), end=(1, 1), line='1+1\n'),
+ TokenInfo(type=OP, string='+', start=(1, 1), end=(1, 2), line='1+1\n'),
+ TokenInfo(type=NUMBER, string='1', start=(1, 2), end=(1, 3), line='1+1\n'),
+ TokenInfo(type=NEWLINE, string='\n', start=(1, 3), end=(1, 4), line='1+1\n'),
+ TokenInfo(type=ENDMARKER, string='', start=(2, 0), end=(2, 0), line='')
+ ]
+ for encoding in ["utf-8", "latin-1", "utf-16"]:
+ with self.subTest(encoding=encoding):
+ tokens = list(_generate_tokens_from_c_tokenizer(
+ readline(encoding).__next__,
+ extra_tokens=True,
+ encoding=encoding,
+ ))
+ self.assertEqual(tokens, expected)
+
def test_int(self):
self.check_tokenize('0xff <= 255', """\
@@ -2675,43 +2717,44 @@ def test_unicode(self):
def test_invalid_syntax(self):
def get_tokens(string):
- return list(_generate_tokens_from_c_tokenizer(string))
-
- self.assertRaises(SyntaxError, get_tokens, "(1+2]")
- self.assertRaises(SyntaxError, get_tokens, "(1+2}")
- self.assertRaises(SyntaxError, get_tokens, "{1+2]")
-
- self.assertRaises(SyntaxError, get_tokens, "1_")
- self.assertRaises(SyntaxError, get_tokens, "1.2_")
- self.assertRaises(SyntaxError, get_tokens, "1e2_")
- self.assertRaises(SyntaxError, get_tokens, "1e+")
-
- self.assertRaises(SyntaxError, get_tokens, "\xa0")
- self.assertRaises(SyntaxError, get_tokens, "€")
-
- self.assertRaises(SyntaxError, get_tokens, "0b12")
- self.assertRaises(SyntaxError, get_tokens, "0b1_2")
- self.assertRaises(SyntaxError, get_tokens, "0b2")
- self.assertRaises(SyntaxError, get_tokens, "0b1_")
- self.assertRaises(SyntaxError, get_tokens, "0b")
- self.assertRaises(SyntaxError, get_tokens, "0o18")
- self.assertRaises(SyntaxError, get_tokens, "0o1_8")
- self.assertRaises(SyntaxError, get_tokens, "0o8")
- self.assertRaises(SyntaxError, get_tokens, "0o1_")
- self.assertRaises(SyntaxError, get_tokens, "0o")
- self.assertRaises(SyntaxError, get_tokens, "0x1_")
- self.assertRaises(SyntaxError, get_tokens, "0x")
- self.assertRaises(SyntaxError, get_tokens, "1_")
- self.assertRaises(SyntaxError, get_tokens, "012")
- self.assertRaises(SyntaxError, get_tokens, "1.2_")
- self.assertRaises(SyntaxError, get_tokens, "1e2_")
- self.assertRaises(SyntaxError, get_tokens, "1e+")
-
- self.assertRaises(SyntaxError, get_tokens, "'sdfsdf")
- self.assertRaises(SyntaxError, get_tokens, "'''sdfsdf''")
-
- self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000)
- self.assertRaises(SyntaxError, get_tokens, "]")
+ the_string = StringIO(string)
+ return list(_generate_tokens_from_c_tokenizer(the_string.readline))
+
+ for case in [
+ "(1+2]",
+ "(1+2}",
+ "{1+2]",
+ "1_",
+ "1.2_",
+ "1e2_",
+ "1e+",
+
+ "\xa0",
+ "€",
+ "0b12",
+ "0b1_2",
+ "0b2",
+ "0b1_",
+ "0b",
+ "0o18",
+ "0o1_8",
+ "0o8",
+ "0o1_",
+ "0o",
+ "0x1_",
+ "0x",
+ "1_",
+ "012",
+ "1.2_",
+ "1e2_",
+ "1e+",
+ "'sdfsdf",
+ "'''sdfsdf''",
+ "("*1000+"a"+")"*1000,
+ "]",
+ ]:
+ with self.subTest(case=case):
+ self.assertRaises(SyntaxError, get_tokens, case)
def test_max_indent(self):
MAXINDENT = 100
@@ -2722,20 +2765,24 @@ def generate_source(indents):
return source
valid = generate_source(MAXINDENT - 1)
- tokens = list(_generate_tokens_from_c_tokenizer(valid))
+ the_input = StringIO(valid)
+ tokens = list(_generate_tokens_from_c_tokenizer(the_input.readline))
self.assertEqual(tokens[-2].type, DEDENT)
self.assertEqual(tokens[-1].type, ENDMARKER)
compile(valid, "<string>", "exec")
invalid = generate_source(MAXINDENT)
- self.assertRaises(SyntaxError, lambda: list(_generate_tokens_from_c_tokenizer(invalid)))
+ the_input = StringIO(invalid)
+ self.assertRaises(SyntaxError, lambda: list(_generate_tokens_from_c_tokenizer(the_input.readline)))
self.assertRaises(
IndentationError, compile, invalid, "<string>", "exec"
)
def test_continuation_lines_indentation(self):
def get_tokens(string):
- return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)]
+ the_string = StringIO(string)
+ return [(kind, string) for (kind, string, *_)
+ in _generate_tokens_from_c_tokenizer(the_string.readline)]
code = dedent("""
def fib(n):
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 4895e94d1dfda..380dc2ab468b5 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -34,6 +34,7 @@
import sys
from token import *
from token import EXACT_TOKEN_TYPES
+import _tokenize
cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII)
blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII)
@@ -443,12 +444,7 @@ def tokenize(readline):
# BOM will already have been stripped.
encoding = "utf-8"
yield TokenInfo(ENCODING, encoding, (0, 0), (0, 0), '')
- yield from _tokenize(rl_gen, encoding)
-
-def _tokenize(rl_gen, encoding):
- source = b"".join(rl_gen).decode(encoding)
- for token in _generate_tokens_from_c_tokenizer(source, extra_tokens=True):
- yield token
+ yield from _generate_tokens_from_c_tokenizer(rl_gen.__next__, encoding, extra_tokens=True)
def generate_tokens(readline):
"""Tokenize a source reading Python code as unicode strings.
@@ -456,16 +452,7 @@ def generate_tokens(readline):
This has the same API as tokenize(), except that it expects the *readline*
callable to return str objects instead of bytes.
"""
- def _gen():
- while True:
- try:
- line = readline()
- except StopIteration:
- return
- if not line:
- return
- yield line.encode()
- return _tokenize(_gen(), 'utf-8')
+ return _generate_tokens_from_c_tokenizer(readline, extra_tokens=True)
def main():
import argparse
@@ -502,9 +489,9 @@ def error(message, filename=None, location=None):
tokens = list(tokenize(f.readline))
else:
filename = "<stdin>"
- tokens = _tokenize(
+ tokens = _generate_tokens_from_c_tokenizer(
(x.encode('utf-8') for x in iter(sys.stdin.readline, "")
- ), "utf-8")
+ ), "utf-8", extra_tokens=True)
# Output the tokenization
@@ -531,10 +518,13 @@ def error(message, filename=None, location=None):
perror("unexpected error: %s" % err)
raise
-def _generate_tokens_from_c_tokenizer(source, extra_tokens=False):
+def _generate_tokens_from_c_tokenizer(source, encoding=None, extra_tokens=False):
"""Tokenize a source reading Python code as unicode strings using the internal C tokenizer"""
- import _tokenize as c_tokenizer
- for info in c_tokenizer.TokenizerIter(source, extra_tokens=extra_tokens):
+ if encoding is None:
+ it = _tokenize.TokenizerIter(source, extra_tokens=extra_tokens)
+ else:
+ it = _tokenize.TokenizerIter(source, encoding=encoding, extra_tokens=extra_tokens)
+ for info in it:
yield TokenInfo._make(info)
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index b6d63e150cfbc..fae613e3a18c1 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -103,6 +103,7 @@ tok_new(void)
tok->filename = NULL;
tok->decoding_readline = NULL;
tok->decoding_buffer = NULL;
+ tok->readline = NULL;
tok->type_comments = 0;
tok->async_hacks = 0;
tok->async_def = 0;
@@ -139,8 +140,9 @@ static char *
error_ret(struct tok_state *tok) /* XXX */
{
tok->decoding_erred = 1;
- if (tok->fp != NULL && tok->buf != NULL) /* see _PyTokenizer_Free */
+ if ((tok->fp != NULL || tok->readline != NULL) && tok->buf != NULL) {/* see _PyTokenizer_Free */
PyMem_Free(tok->buf);
+ }
tok->buf = tok->cur = tok->inp = NULL;
tok->start = NULL;
tok->end = NULL;
@@ -900,6 +902,33 @@ _PyTokenizer_FromString(const char *str, int exec_input, int preserve_crlf)
return tok;
}
+struct tok_state *
+_PyTokenizer_FromReadline(PyObject* readline, const char* enc,
+ int exec_input, int preserve_crlf)
+{
+ struct tok_state *tok = tok_new();
+ if (tok == NULL)
+ return NULL;
+ if ((tok->buf = (char *)PyMem_Malloc(BUFSIZ)) == NULL) {
+ _PyTokenizer_Free(tok);
+ return NULL;
+ }
+ tok->cur = tok->inp = tok->buf;
+ tok->end = tok->buf + BUFSIZ;
+ tok->fp = NULL;
+ if (enc != NULL) {
+ tok->encoding = new_string(enc, strlen(enc), tok);
+ if (!tok->encoding) {
+ _PyTokenizer_Free(tok);
+ return NULL;
+ }
+ }
+ tok->decoding_state = STATE_NORMAL;
+ Py_INCREF(readline);
+ tok->readline = readline;
+ return tok;
+}
+
/* Set up tokenizer for UTF-8 string */
struct tok_state *
@@ -969,8 +998,9 @@ _PyTokenizer_Free(struct tok_state *tok)
}
Py_XDECREF(tok->decoding_readline);
Py_XDECREF(tok->decoding_buffer);
+ Py_XDECREF(tok->readline);
Py_XDECREF(tok->filename);
- if (tok->fp != NULL && tok->buf != NULL) {
+ if ((tok->readline != NULL || tok->fp != NULL ) && tok->buf != NULL) {
PyMem_Free(tok->buf);
}
if (tok->input) {
@@ -1021,6 +1051,71 @@ tok_readline_raw(struct tok_state *tok)
return 1;
}
+static int
+tok_readline_string(struct tok_state* tok) {
+ PyObject* line = NULL;
+ PyObject* raw_line = PyObject_CallNoArgs(tok->readline);
+ if (raw_line == NULL) {
+ if (PyErr_ExceptionMatches(PyExc_StopIteration)) {
+ PyErr_Clear();
+ return 1;
+ }
+ error_ret(tok);
+ goto error;
+ }
+ if(tok->encoding != NULL) {
+ if (!PyBytes_Check(raw_line)) {
+ PyErr_Format(PyExc_TypeError, "readline() returned a non-bytes object");
+ error_ret(tok);
+ goto error;
+ }
+ line = PyUnicode_Decode(PyBytes_AS_STRING(raw_line), PyBytes_GET_SIZE(raw_line),
+ tok->encoding, "replace");
+ Py_CLEAR(raw_line);
+ if (line == NULL) {
+ error_ret(tok);
+ goto error;
+ }
+ } else {
+ if(!PyUnicode_Check(raw_line)) {
+ PyErr_Format(PyExc_TypeError, "readline() returned a non-string object");
+ error_ret(tok);
+ goto error;
+ }
+ line = raw_line;
+ raw_line = NULL;
+ }
+ Py_ssize_t buflen;
+ const char* buf = PyUnicode_AsUTF8AndSize(line, &buflen);
+ if (buf == NULL) {
+ error_ret(tok);
+ goto error;
+ }
+
+ // Make room for the null terminator *and* potentially
+ // an extra newline character that we may need to artificially
+ // add.
+ size_t buffer_size = buflen + 2;
+ if (!tok_reserve_buf(tok, buffer_size)) {
+ goto error;
+ }
+ memcpy(tok->inp, buf, buflen);
+ tok->inp += buflen;
+ *tok->inp = '\0';
+
+ if (tok->start == NULL) {
+ tok->buf = tok->cur;
+ }
+ tok->line_start = tok->cur;
+
+ Py_DECREF(line);
+ return 1;
+error:
+ Py_XDECREF(raw_line);
+ Py_XDECREF(line);
+ return 0;
+}
+
static int
tok_underflow_string(struct tok_state *tok) {
char *end = strchr(tok->inp, '\n');
@@ -1195,6 +1290,38 @@ tok_underflow_file(struct tok_state *tok) {
return tok->done == E_OK;
}
+static int
+tok_underflow_readline(struct tok_state* tok) {
+ assert(tok->decoding_state == STATE_NORMAL);
+ assert(tok->fp == NULL && tok->input == NULL && tok->decoding_readline == NULL);
+ if (tok->start == NULL && !INSIDE_FSTRING(tok)) {
+ tok->cur = tok->inp = tok->buf;
+ }
+ if (!tok_readline_string(tok)) {
+ return 0;
+ }
+ if (tok->inp == tok->cur) {
+ tok->done = E_EOF;
+ return 0;
+ }
+ if (tok->inp[-1] != '\n') {
+ assert(tok->inp + 1 < tok->end);
+ /* Last line does not end in \n, fake one */
+ *tok->inp++ = '\n';
+ *tok->inp = '\0';
+ }
+
+ ADVANCE_LINENO();
+ /* The default encoding is UTF-8, so make sure we don't have any
+ non-UTF-8 sequences in it. */
+ if (!tok->encoding && !ensure_utf8(tok->cur, tok)) {
+ error_ret(tok);
+ return 0;
+ }
+ assert(tok->done == E_OK);
+ return tok->done == E_OK;
+}
+
#if defined(Py_DEBUG)
static void
print_escape(FILE *f, const char *s, Py_ssize_t size)
@@ -1238,7 +1365,10 @@ tok_nextc(struct tok_state *tok)
if (tok->done != E_OK) {
return EOF;
}
- if (tok->fp == NULL) {
+ if (tok->readline) {
+ rc = tok_underflow_readline(tok);
+ }
+ else if (tok->fp == NULL) {
rc = tok_underflow_string(tok);
}
else if (tok->prompt != NULL) {
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 02749e355da81..600d4297b6865 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -109,6 +109,7 @@ struct tok_state {
expression (cf. issue 16806) */
PyObject *decoding_readline; /* open(...).readline */
PyObject *decoding_buffer;
+ PyObject *readline; /* readline() function */
const char* enc; /* Encoding for the current str. */
char* str; /* Source string being tokenized (if tokenizing from a string)*/
char* input; /* Tokenizer's newline translated copy of the string. */
@@ -137,6 +138,7 @@ struct tok_state {
extern struct tok_state *_PyTokenizer_FromString(const char *, int, int);
extern struct tok_state *_PyTokenizer_FromUTF8(const char *, int, int);
+extern struct tok_state *_PyTokenizer_FromReadline(PyObject*, const char*, int, int);
extern struct tok_state *_PyTokenizer_FromFile(FILE *, const char*,
const char *, const char *);
extern void _PyTokenizer_Free(struct tok_state *);
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index 2de1daae8c0dd..a7933b2d6b018 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -37,15 +37,17 @@ typedef struct
@classmethod
_tokenizer.tokenizeriter.__new__ as tokenizeriter_new
- source: str
+ readline: object
+ /
*
extra_tokens: bool
+ encoding: str(c_default="NULL") = 'utf-8'
[clinic start generated code]*/
static PyObject *
-tokenizeriter_new_impl(PyTypeObject *type, const char *source,
- int extra_tokens)
-/*[clinic end generated code: output=f6f9d8b4beec8106 input=90dc5b6a5df180c2]*/
+tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
+ int extra_tokens, const char *encoding)
+/*[clinic end generated code: output=7501a1211683ce16 input=f7dddf8a613ae8bd]*/
{
tokenizeriterobject *self = (tokenizeriterobject *)type->tp_alloc(type, 0);
if (self == NULL) {
@@ -55,7 +57,7 @@ tokenizeriter_new_impl(PyTypeObject *type, const char *source,
if (filename == NULL) {
return NULL;
}
- self->tok = _PyTokenizer_FromUTF8(source, 1, 1);
+ self->tok = _PyTokenizer_FromReadline(readline, encoding, 1, 1);
if (self->tok == NULL) {
Py_DECREF(filename);
return NULL;
diff --git a/Python/clinic/Python-tokenize.c.h b/Python/clinic/Python-tokenize.c.h
index 7e779388a92db..28f5075826e36 100644
--- a/Python/clinic/Python-tokenize.c.h
+++ b/Python/clinic/Python-tokenize.c.h
@@ -9,8 +9,8 @@ preserve
static PyObject *
-tokenizeriter_new_impl(PyTypeObject *type, const char *source,
- int extra_tokens);
+tokenizeriter_new_impl(PyTypeObject *type, PyObject *readline,
+ int extra_tokens, const char *encoding);
static PyObject *
tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
@@ -25,7 +25,7 @@ tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject *ob_item[NUM_KEYWORDS];
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
- .ob_item = { &_Py_ID(source), &_Py_ID(extra_tokens), },
+ .ob_item = { &_Py_ID(extra_tokens), &_Py_ID(encoding), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -34,43 +34,50 @@ tokenizeriter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"source", "extra_tokens", NULL};
+ static const char * const _keywords[] = {"", "extra_tokens", "encoding", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "tokenizeriter",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
- const char *source;
+ Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 2;
+ PyObject *readline;
int extra_tokens;
+ const char *encoding = NULL;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 1, argsbuf);
if (!fastargs) {
goto exit;
}
- if (!PyUnicode_Check(fastargs[0])) {
- _PyArg_BadArgument("tokenizeriter", "argument 'source'", "str", fastargs[0]);
+ readline = fastargs[0];
+ extra_tokens = PyObject_IsTrue(fastargs[1]);
+ if (extra_tokens < 0) {
goto exit;
}
- Py_ssize_t source_length;
- source = PyUnicode_AsUTF8AndSize(fastargs[0], &source_length);
- if (source == NULL) {
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ if (!PyUnicode_Check(fastargs[2])) {
+ _PyArg_BadArgument("tokenizeriter", "argument 'encoding'", "str", fastargs[2]);
goto exit;
}
- if (strlen(source) != (size_t)source_length) {
- PyErr_SetString(PyExc_ValueError, "embedded null character");
+ Py_ssize_t encoding_length;
+ encoding = PyUnicode_AsUTF8AndSize(fastargs[2], &encoding_length);
+ if (encoding == NULL) {
goto exit;
}
- extra_tokens = PyObject_IsTrue(fastargs[1]);
- if (extra_tokens < 0) {
+ if (strlen(encoding) != (size_t)encoding_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character");
goto exit;
}
- return_value = tokenizeriter_new_impl(type, source, extra_tokens);
+skip_optional_kwonly:
+ return_value = tokenizeriter_new_impl(type, readline, extra_tokens, encoding);
exit:
return return_value;
}
-/*[clinic end generated code: output=940b564c67f6e0e2 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=48be65a2808bdfa6 input=a9049054013a1b77]*/
1
0

May 31, 2023
https://github.com/python/cpython/commit/adccff3b3f9fbdb58cb4b8fde92456e6dd…
commit: adccff3b3f9fbdb58cb4b8fde92456e6dd078af0
branch: main
author: Inada Naoki <songofacandy(a)gmail.com>
committer: methane <songofacandy(a)gmail.com>
date: 2023-05-31T18:38:55+09:00
summary:
gh-104922: Make `PY_SSIZE_T_CLEAN` not mandatory again (#105051)
files:
A Misc/NEWS.d/next/C API/2023-05-29-16-09-27.gh-issue-104922.L23qaU.rst
M Doc/c-api/arg.rst
M Doc/extending/extending.rst
M Doc/whatsnew/3.13.rst
M Include/abstract.h
M Include/cpython/abstract.h
M Include/cpython/modsupport.h
M Include/modsupport.h
M Lib/test/test_capi/test_getargs.py
M Modules/_testcapi/getargs.c
M Modules/_testcapimodule.c
M Objects/call.c
M Python/getargs.c
M Python/modsupport.c
M Python/sysmodule.c
diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst
index 9713431688d49..b7cdf293d2238 100644
--- a/Doc/c-api/arg.rst
+++ b/Doc/c-api/arg.rst
@@ -27,9 +27,18 @@ unit; the entry in (round) parentheses is the Python object type that matches
the format unit; and the entry in [square] brackets is the type of the C
variable(s) whose address should be passed.
+.. _arg-parsing-string-and-buffers:
+
Strings and buffers
-------------------
+.. note::
+
+ On Python 3.12 and older, the macro :c:macro:`!PY_SSIZE_T_CLEAN` must be
+ defined before including :file:`Python.h` to use all ``#`` variants of
+ formats (``s#``, ``y#``, etc.) explained below.
+ This is not necessary on Python 3.13 and later.
+
These formats allow accessing an object as a contiguous chunk of memory.
You don't have to provide raw storage for the returned unicode or bytes
area.
@@ -68,15 +77,6 @@ There are three ways strings and buffers can be converted to C:
whether the input object is immutable (e.g. whether it would honor a request
for a writable buffer, or whether another thread can mutate the data).
-.. note::
-
- For all ``#`` variants of formats (``s#``, ``y#``, etc.), the macro
- :c:macro:`PY_SSIZE_T_CLEAN` must be defined before including
- :file:`Python.h`. On Python 3.9 and older, the type of the length argument
- is :c:type:`Py_ssize_t` if the :c:macro:`PY_SSIZE_T_CLEAN` macro is defined,
- or int otherwise.
-
-
``s`` (:class:`str`) [const char \*]
Convert a Unicode object to a C pointer to a character string.
A pointer to an existing string is stored in the character pointer
diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst
index d9bf4fd6c7ae0..c37b69f988a6b 100644
--- a/Doc/extending/extending.rst
+++ b/Doc/extending/extending.rst
@@ -70,7 +70,7 @@ the module and a copyright notice if you like).
headers are included.
It is recommended to always define ``PY_SSIZE_T_CLEAN`` before including
- ``Python.h``. See :ref:`parsetuple` for a description of this macro.
+ ``Python.h``. See :ref:`arg-parsing-string-and-buffers` for a description of this macro.
All user-visible symbols defined by :file:`Python.h` have a prefix of ``Py`` or
``PY``, except those defined in standard header files. For convenience, and
@@ -649,7 +649,7 @@ Note that any Python object references which are provided to the caller are
Some example calls::
- #define PY_SSIZE_T_CLEAN /* Make "s#" use Py_ssize_t rather than int. */
+ #define PY_SSIZE_T_CLEAN
#include <Python.h>
::
@@ -745,7 +745,7 @@ it returns false and raises an appropriate exception.
Here is an example module which uses keywords, based on an example by Geoff
Philbrick (philbrick(a)hks.com)::
- #define PY_SSIZE_T_CLEAN /* Make "s#" use Py_ssize_t rather than int. */
+ #define PY_SSIZE_T_CLEAN
#include <Python.h>
static PyObject *
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 44c0915492dcc..13b9be1c8ee23 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -292,6 +292,13 @@ C API Changes
New Features
------------
+* You no longer have to define the ``PY_SSIZE_T_CLEAN`` macro before including
+ :file:`Python.h` when using ``#`` formats in
+ :ref:`format codes <arg-parsing-string-and-buffers>`.
+ APIs accepting the format codes always use ``Py_ssize_t`` for ``#`` formats.
+ (Contributed by Inada Naoki in :gh:`104922`.)
+
+
Porting to Python 3.13
----------------------
diff --git a/Include/abstract.h b/Include/abstract.h
index 064b0300b51ea..b4c2bedef442b 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -135,12 +135,6 @@ extern "C" {
This function always succeeds. */
-#ifdef PY_SSIZE_T_CLEAN
-# define PyObject_CallFunction _PyObject_CallFunction_SizeT
-# define PyObject_CallMethod _PyObject_CallMethod_SizeT
-#endif
-
-
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000
/* Call a callable Python object without any arguments */
PyAPI_FUNC(PyObject *) PyObject_CallNoArgs(PyObject *func);
@@ -195,15 +189,6 @@ PyAPI_FUNC(PyObject *) PyObject_CallMethod(PyObject *obj,
const char *name,
const char *format, ...);
-PyAPI_FUNC(PyObject *) _PyObject_CallFunction_SizeT(PyObject *callable,
- const char *format,
- ...);
-
-PyAPI_FUNC(PyObject *) _PyObject_CallMethod_SizeT(PyObject *obj,
- const char *name,
- const char *format,
- ...);
-
/* Call a callable Python object 'callable' with a variable number of C
arguments. The C arguments are provided as PyObject* values, terminated
by a NULL.
diff --git a/Include/cpython/abstract.h b/Include/cpython/abstract.h
index 3b27aab2fc479..992dd068db90e 100644
--- a/Include/cpython/abstract.h
+++ b/Include/cpython/abstract.h
@@ -4,10 +4,6 @@
/* === Object Protocol ================================================== */
-#ifdef PY_SSIZE_T_CLEAN
-# define _PyObject_CallMethodId _PyObject_CallMethodId_SizeT
-#endif
-
/* Convert keyword arguments from the FASTCALL (stack: C array, kwnames: tuple)
format to a Python dictionary ("kwargs" dict).
@@ -113,11 +109,6 @@ PyAPI_FUNC(PyObject *) _PyObject_CallMethodId(PyObject *obj,
_Py_Identifier *name,
const char *format, ...);
-PyAPI_FUNC(PyObject *) _PyObject_CallMethodId_SizeT(PyObject *obj,
- _Py_Identifier *name,
- const char *format,
- ...);
-
PyAPI_FUNC(PyObject *) _PyObject_CallMethodIdObjArgs(
PyObject *obj,
_Py_Identifier *name,
diff --git a/Include/cpython/modsupport.h b/Include/cpython/modsupport.h
index 88f34fe7513bf..a5d95d15440df 100644
--- a/Include/cpython/modsupport.h
+++ b/Include/cpython/modsupport.h
@@ -2,20 +2,6 @@
# error "this header file must not be included directly"
#endif
-/* If PY_SSIZE_T_CLEAN is defined, each functions treats #-specifier
- to mean Py_ssize_t */
-#ifdef PY_SSIZE_T_CLEAN
-#define _Py_VaBuildStack _Py_VaBuildStack_SizeT
-#else
-PyAPI_FUNC(PyObject *) _Py_VaBuildValue_SizeT(const char *, va_list);
-PyAPI_FUNC(PyObject **) _Py_VaBuildStack_SizeT(
- PyObject **small_stack,
- Py_ssize_t small_stack_len,
- const char *format,
- va_list va,
- Py_ssize_t *p_nargs);
-#endif
-
PyAPI_FUNC(int) _PyArg_UnpackStack(
PyObject *const *args,
Py_ssize_t nargs,
@@ -63,13 +49,6 @@ typedef struct _PyArg_Parser {
struct _PyArg_Parser *next;
} _PyArg_Parser;
-#ifdef PY_SSIZE_T_CLEAN
-#define _PyArg_ParseTupleAndKeywordsFast _PyArg_ParseTupleAndKeywordsFast_SizeT
-#define _PyArg_ParseStack _PyArg_ParseStack_SizeT
-#define _PyArg_ParseStackAndKeywords _PyArg_ParseStackAndKeywords_SizeT
-#define _PyArg_VaParseTupleAndKeywordsFast _PyArg_VaParseTupleAndKeywordsFast_SizeT
-#endif
-
PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *,
struct _PyArg_Parser *, ...);
PyAPI_FUNC(int) _PyArg_ParseStack(
diff --git a/Include/modsupport.h b/Include/modsupport.h
index 4e369bd56b4d2..7d4cfe853aaa7 100644
--- a/Include/modsupport.h
+++ b/Include/modsupport.h
@@ -9,34 +9,17 @@ extern "C" {
#include <stdarg.h> // va_list
-/* If PY_SSIZE_T_CLEAN is defined, each functions treats #-specifier
- to mean Py_ssize_t */
-#ifdef PY_SSIZE_T_CLEAN
-#define PyArg_Parse _PyArg_Parse_SizeT
-#define PyArg_ParseTuple _PyArg_ParseTuple_SizeT
-#define PyArg_ParseTupleAndKeywords _PyArg_ParseTupleAndKeywords_SizeT
-#define PyArg_VaParse _PyArg_VaParse_SizeT
-#define PyArg_VaParseTupleAndKeywords _PyArg_VaParseTupleAndKeywords_SizeT
-#define Py_BuildValue _Py_BuildValue_SizeT
-#define Py_VaBuildValue _Py_VaBuildValue_SizeT
-#endif
-
-/* Due to a glitch in 3.2, the _SizeT versions weren't exported from the DLL. */
-#if !defined(PY_SSIZE_T_CLEAN) || !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000
PyAPI_FUNC(int) PyArg_Parse(PyObject *, const char *, ...);
PyAPI_FUNC(int) PyArg_ParseTuple(PyObject *, const char *, ...);
PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *,
- const char *, char **, ...);
+ const char *, char **, ...);
PyAPI_FUNC(int) PyArg_VaParse(PyObject *, const char *, va_list);
PyAPI_FUNC(int) PyArg_VaParseTupleAndKeywords(PyObject *, PyObject *,
- const char *, char **, va_list);
-#endif
+ const char *, char **, va_list);
+
PyAPI_FUNC(int) PyArg_ValidateKeywordArguments(PyObject *);
PyAPI_FUNC(int) PyArg_UnpackTuple(PyObject *, const char *, Py_ssize_t, Py_ssize_t, ...);
PyAPI_FUNC(PyObject *) Py_BuildValue(const char *, ...);
-PyAPI_FUNC(PyObject *) _Py_BuildValue_SizeT(const char *, ...);
-
-
PyAPI_FUNC(PyObject *) Py_VaBuildValue(const char *, va_list);
// Add an attribute with name 'name' and value 'obj' to the module 'mod.
diff --git a/Lib/test/test_capi/test_getargs.py b/Lib/test/test_capi/test_getargs.py
index 3792d1a6515b4..01bc30779add7 100644
--- a/Lib/test/test_capi/test_getargs.py
+++ b/Lib/test/test_capi/test_getargs.py
@@ -901,23 +901,6 @@ def test_s_hash(self):
self.assertRaises(TypeError, getargs_s_hash, memoryview(b'memoryview'))
self.assertRaises(TypeError, getargs_s_hash, None)
- def test_s_hash_int(self):
- # "s#" without PY_SSIZE_T_CLEAN defined.
- from _testcapi import getargs_s_hash_int
- from _testcapi import getargs_s_hash_int2
- buf = bytearray([1, 2])
- self.assertRaises(SystemError, getargs_s_hash_int, buf, "abc")
- self.assertRaises(SystemError, getargs_s_hash_int, buf, x=42)
- self.assertRaises(SystemError, getargs_s_hash_int, buf, x="abc")
- self.assertRaises(SystemError, getargs_s_hash_int2, buf, ("abc",))
- self.assertRaises(SystemError, getargs_s_hash_int2, buf, x=42)
- self.assertRaises(SystemError, getargs_s_hash_int2, buf, x="abc")
- buf.append(3) # still mutable -- not locked by a buffer export
- # getargs_s_hash_int(buf) may not raise SystemError because skipitem()
- # is not called. But it is an implementation detail.
- # getargs_s_hash_int(buf)
- # getargs_s_hash_int2(buf)
-
def test_z(self):
from _testcapi import getargs_z
self.assertEqual(getargs_z('abc\xe9'), b'abc\xc3\xa9')
diff --git a/Misc/NEWS.d/next/C API/2023-05-29-16-09-27.gh-issue-104922.L23qaU.rst b/Misc/NEWS.d/next/C API/2023-05-29-16-09-27.gh-issue-104922.L23qaU.rst
new file mode 100644
index 0000000000000..ca56d0b4403b8
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2023-05-29-16-09-27.gh-issue-104922.L23qaU.rst
@@ -0,0 +1,3 @@
+``PY_SSIZE_T_CLEAN`` is no longer required to use ``'#'`` formats in APIs
+like :c:func:`PyArg_ParseTuple` and :c:func:`Py_BuildValue`. They uses
+``Py_ssize_t`` for ``'#'`` regardless ``PY_SSIZE_T_CLEAN``.
diff --git a/Modules/_testcapi/getargs.c b/Modules/_testcapi/getargs.c
index aa201319950de..95ef2d2a546d3 100644
--- a/Modules/_testcapi/getargs.c
+++ b/Modules/_testcapi/getargs.c
@@ -816,44 +816,6 @@ test_s_code(PyObject *self, PyObject *Py_UNUSED(ignored))
Py_RETURN_NONE;
}
-#undef PyArg_ParseTupleAndKeywords
-PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *,
- const char *, char **, ...);
-
-static PyObject *
-getargs_s_hash_int(PyObject *self, PyObject *args, PyObject *kwargs)
-{
- static char *keywords[] = {"", "", "x", NULL};
- Py_buffer buf = {NULL};
- const char *s;
- int len;
- int i = 0;
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|s#i", keywords,
- &buf, &s, &len, &i))
- {
- return NULL;
- }
- PyBuffer_Release(&buf);
- Py_RETURN_NONE;
-}
-
-static PyObject *
-getargs_s_hash_int2(PyObject *self, PyObject *args, PyObject *kwargs)
-{
- static char *keywords[] = {"", "", "x", NULL};
- Py_buffer buf = {NULL};
- const char *s;
- int len;
- int i = 0;
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "w*|(s#)i", keywords,
- &buf, &s, &len, &i))
- {
- return NULL;
- }
- PyBuffer_Release(&buf);
- Py_RETURN_NONE;
-}
-
static PyObject *
gh_99240_clear_args(PyObject *self, PyObject *args)
{
@@ -906,8 +868,6 @@ static PyMethodDef test_methods[] = {
{"getargs_positional_only_and_keywords", _PyCFunction_CAST(getargs_positional_only_and_keywords), METH_VARARGS|METH_KEYWORDS},
{"getargs_s", getargs_s, METH_VARARGS},
{"getargs_s_hash", getargs_s_hash, METH_VARARGS},
- {"getargs_s_hash_int", _PyCFunction_CAST(getargs_s_hash_int), METH_VARARGS|METH_KEYWORDS},
- {"getargs_s_hash_int2", _PyCFunction_CAST(getargs_s_hash_int2), METH_VARARGS|METH_KEYWORDS},
{"getargs_s_star", getargs_s_star, METH_VARARGS},
{"getargs_tuple", getargs_tuple, METH_VARARGS},
{"getargs_u", getargs_u, METH_VARARGS},
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 66c1cbabe0f8c..86b6dc3b36fe7 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -3266,8 +3266,6 @@ test_atexit(PyObject *self, PyObject *Py_UNUSED(args))
}
-static PyObject *test_buildvalue_issue38913(PyObject *, PyObject *);
-
static PyMethodDef TestMethods[] = {
{"set_errno", set_errno, METH_VARARGS},
{"test_config", test_config, METH_NOARGS},
@@ -3297,7 +3295,6 @@ static PyMethodDef TestMethods[] = {
{"getbuffer_with_null_view", getbuffer_with_null_view, METH_O},
{"PyBuffer_SizeFromFormat", test_PyBuffer_SizeFromFormat, METH_VARARGS},
{"test_buildvalue_N", test_buildvalue_N, METH_NOARGS},
- {"test_buildvalue_issue38913", test_buildvalue_issue38913, METH_NOARGS},
{"test_get_statictype_slots", test_get_statictype_slots, METH_NOARGS},
{"test_get_type_name", test_get_type_name, METH_NOARGS},
{"test_get_type_qualname", test_get_type_qualname, METH_NOARGS},
@@ -4067,47 +4064,3 @@ PyInit__testcapi(void)
PyState_AddModule(m, &_testcapimodule);
return m;
}
-
-/* Test the C API exposed when PY_SSIZE_T_CLEAN is not defined */
-
-#undef Py_BuildValue
-PyAPI_FUNC(PyObject *) Py_BuildValue(const char *, ...);
-
-static PyObject *
-test_buildvalue_issue38913(PyObject *self, PyObject *Py_UNUSED(ignored))
-{
- PyObject *res;
- const char str[] = "string";
- const Py_UNICODE unicode[] = L"unicode";
- assert(!PyErr_Occurred());
-
- res = Py_BuildValue("(s#O)", str, 1, Py_None);
- assert(res == NULL);
- if (!PyErr_ExceptionMatches(PyExc_SystemError)) {
- return NULL;
- }
- PyErr_Clear();
-
- res = Py_BuildValue("(z#O)", str, 1, Py_None);
- assert(res == NULL);
- if (!PyErr_ExceptionMatches(PyExc_SystemError)) {
- return NULL;
- }
- PyErr_Clear();
-
- res = Py_BuildValue("(y#O)", str, 1, Py_None);
- assert(res == NULL);
- if (!PyErr_ExceptionMatches(PyExc_SystemError)) {
- return NULL;
- }
- PyErr_Clear();
-
- res = Py_BuildValue("(u#O)", unicode, 1, Py_None);
- assert(res == NULL);
- if (!PyErr_ExceptionMatches(PyExc_SystemError)) {
- return NULL;
- }
- PyErr_Clear();
-
- Py_RETURN_NONE;
-}
diff --git a/Objects/call.c b/Objects/call.c
index 0d548dcd5e1ae..e9002d350c928 100644
--- a/Objects/call.c
+++ b/Objects/call.c
@@ -519,7 +519,7 @@ _PyObject_Call_Prepend(PyThreadState *tstate, PyObject *callable,
static PyObject *
_PyObject_CallFunctionVa(PyThreadState *tstate, PyObject *callable,
- const char *format, va_list va, int is_size_t)
+ const char *format, va_list va)
{
PyObject* small_stack[_PY_FASTCALL_SMALL_STACK];
const Py_ssize_t small_stack_len = Py_ARRAY_LENGTH(small_stack);
@@ -535,14 +535,8 @@ _PyObject_CallFunctionVa(PyThreadState *tstate, PyObject *callable,
return _PyObject_CallNoArgsTstate(tstate, callable);
}
- if (is_size_t) {
- stack = _Py_VaBuildStack_SizeT(small_stack, small_stack_len,
- format, va, &nargs);
- }
- else {
- stack = _Py_VaBuildStack(small_stack, small_stack_len,
- format, va, &nargs);
- }
+ stack = _Py_VaBuildStack(small_stack, small_stack_len,
+ format, va, &nargs);
if (stack == NULL) {
return NULL;
}
@@ -581,7 +575,7 @@ PyObject_CallFunction(PyObject *callable, const char *format, ...)
PyThreadState *tstate = _PyThreadState_GET();
va_start(va, format);
- result = _PyObject_CallFunctionVa(tstate, callable, format, va, 0);
+ result = _PyObject_CallFunctionVa(tstate, callable, format, va);
va_end(va);
return result;
@@ -599,21 +593,24 @@ PyEval_CallFunction(PyObject *callable, const char *format, ...)
PyThreadState *tstate = _PyThreadState_GET();
va_start(va, format);
- result = _PyObject_CallFunctionVa(tstate, callable, format, va, 0);
+ result = _PyObject_CallFunctionVa(tstate, callable, format, va);
va_end(va);
return result;
}
-PyObject *
+/* _PyObject_CallFunction_SizeT is exact copy of PyObject_CallFunction.
+ * This function must be kept because it is part of the stable ABI.
+ */
+PyAPI_FUNC(PyObject *) /* abi_only */
_PyObject_CallFunction_SizeT(PyObject *callable, const char *format, ...)
{
PyThreadState *tstate = _PyThreadState_GET();
va_list va;
va_start(va, format);
- PyObject *result = _PyObject_CallFunctionVa(tstate, callable, format, va, 1);
+ PyObject *result = _PyObject_CallFunctionVa(tstate, callable, format, va);
va_end(va);
return result;
@@ -621,7 +618,7 @@ _PyObject_CallFunction_SizeT(PyObject *callable, const char *format, ...)
static PyObject*
-callmethod(PyThreadState *tstate, PyObject* callable, const char *format, va_list va, int is_size_t)
+callmethod(PyThreadState *tstate, PyObject* callable, const char *format, va_list va)
{
assert(callable != NULL);
if (!PyCallable_Check(callable)) {
@@ -631,7 +628,7 @@ callmethod(PyThreadState *tstate, PyObject* callable, const char *format, va_lis
return NULL;
}
- return _PyObject_CallFunctionVa(tstate, callable, format, va, is_size_t);
+ return _PyObject_CallFunctionVa(tstate, callable, format, va);
}
PyObject *
@@ -650,7 +647,7 @@ PyObject_CallMethod(PyObject *obj, const char *name, const char *format, ...)
va_list va;
va_start(va, format);
- PyObject *retval = callmethod(tstate, callable, format, va, 0);
+ PyObject *retval = callmethod(tstate, callable, format, va);
va_end(va);
Py_DECREF(callable);
@@ -676,7 +673,7 @@ PyEval_CallMethod(PyObject *obj, const char *name, const char *format, ...)
va_list va;
va_start(va, format);
- PyObject *retval = callmethod(tstate, callable, format, va, 0);
+ PyObject *retval = callmethod(tstate, callable, format, va);
va_end(va);
Py_DECREF(callable);
@@ -700,7 +697,7 @@ _PyObject_CallMethod(PyObject *obj, PyObject *name,
va_list va;
va_start(va, format);
- PyObject *retval = callmethod(tstate, callable, format, va, 1);
+ PyObject *retval = callmethod(tstate, callable, format, va);
va_end(va);
Py_DECREF(callable);
@@ -724,7 +721,7 @@ _PyObject_CallMethodId(PyObject *obj, _Py_Identifier *name,
va_list va;
va_start(va, format);
- PyObject *retval = callmethod(tstate, callable, format, va, 0);
+ PyObject *retval = callmethod(tstate, callable, format, va);
va_end(va);
Py_DECREF(callable);
@@ -737,13 +734,15 @@ PyObject * _PyObject_CallMethodFormat(PyThreadState *tstate, PyObject *callable,
{
va_list va;
va_start(va, format);
- PyObject *retval = callmethod(tstate, callable, format, va, 0);
+ PyObject *retval = callmethod(tstate, callable, format, va);
va_end(va);
return retval;
}
-PyObject *
+// _PyObject_CallMethod_SizeT is exact copy of PyObject_CallMethod.
+// This function must be kept because it is part of the stable ABI.
+PyAPI_FUNC(PyObject *) /* abi_only */
_PyObject_CallMethod_SizeT(PyObject *obj, const char *name,
const char *format, ...)
{
@@ -759,31 +758,7 @@ _PyObject_CallMethod_SizeT(PyObject *obj, const char *name,
va_list va;
va_start(va, format);
- PyObject *retval = callmethod(tstate, callable, format, va, 1);
- va_end(va);
-
- Py_DECREF(callable);
- return retval;
-}
-
-
-PyObject *
-_PyObject_CallMethodId_SizeT(PyObject *obj, _Py_Identifier *name,
- const char *format, ...)
-{
- PyThreadState *tstate = _PyThreadState_GET();
- if (obj == NULL || name == NULL) {
- return null_error(tstate);
- }
-
- PyObject *callable = _PyObject_GetAttrId(obj, name);
- if (callable == NULL) {
- return NULL;
- }
-
- va_list va;
- va_start(va, format);
- PyObject *retval = callmethod(tstate, callable, format, va, 1);
+ PyObject *retval = callmethod(tstate, callable, format, va);
va_end(va);
Py_DECREF(callable);
diff --git a/Python/getargs.c b/Python/getargs.c
index 66dd90877fe6f..5639aba9bc9d8 100644
--- a/Python/getargs.c
+++ b/Python/getargs.c
@@ -12,44 +12,17 @@
#ifdef __cplusplus
extern "C" {
#endif
-int PyArg_Parse(PyObject *, const char *, ...);
-int PyArg_ParseTuple(PyObject *, const char *, ...);
-int PyArg_VaParse(PyObject *, const char *, va_list);
-
-int PyArg_ParseTupleAndKeywords(PyObject *, PyObject *,
- const char *, char **, ...);
-int PyArg_VaParseTupleAndKeywords(PyObject *, PyObject *,
- const char *, char **, va_list);
-
-int _PyArg_ParseTupleAndKeywordsFast(PyObject *, PyObject *,
- struct _PyArg_Parser *, ...);
-int _PyArg_VaParseTupleAndKeywordsFast(PyObject *, PyObject *,
- struct _PyArg_Parser *, va_list);
-
-#ifdef HAVE_DECLSPEC_DLL
-/* Export functions */
+
+/* Export Stable ABIs (abi only) */
PyAPI_FUNC(int) _PyArg_Parse_SizeT(PyObject *, const char *, ...);
-PyAPI_FUNC(int) _PyArg_ParseStack_SizeT(PyObject *const *args, Py_ssize_t nargs,
- const char *format, ...);
-PyAPI_FUNC(int) _PyArg_ParseStackAndKeywords_SizeT(PyObject *const *args, Py_ssize_t nargs,
- PyObject *kwnames,
- struct _PyArg_Parser *parser, ...);
PyAPI_FUNC(int) _PyArg_ParseTuple_SizeT(PyObject *, const char *, ...);
PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywords_SizeT(PyObject *, PyObject *,
const char *, char **, ...);
-PyAPI_FUNC(PyObject *) _Py_BuildValue_SizeT(const char *, ...);
PyAPI_FUNC(int) _PyArg_VaParse_SizeT(PyObject *, const char *, va_list);
PyAPI_FUNC(int) _PyArg_VaParseTupleAndKeywords_SizeT(PyObject *, PyObject *,
const char *, char **, va_list);
-PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywordsFast_SizeT(PyObject *, PyObject *,
- struct _PyArg_Parser *, ...);
-PyAPI_FUNC(int) _PyArg_VaParseTupleAndKeywordsFast_SizeT(PyObject *, PyObject *,
- struct _PyArg_Parser *, va_list);
-#endif
-
#define FLAG_COMPAT 1
-#define FLAG_SIZE_T 2
typedef int (*destr_t)(PyObject *, void *);
@@ -114,7 +87,7 @@ _PyArg_Parse_SizeT(PyObject *args, const char *format, ...)
va_list va;
va_start(va, format);
- retval = vgetargs1(args, format, &va, FLAG_COMPAT|FLAG_SIZE_T);
+ retval = vgetargs1(args, format, &va, FLAG_COMPAT);
va_end(va);
return retval;
}
@@ -132,14 +105,14 @@ PyArg_ParseTuple(PyObject *args, const char *format, ...)
return retval;
}
-PyAPI_FUNC(int)
+int
_PyArg_ParseTuple_SizeT(PyObject *args, const char *format, ...)
{
int retval;
va_list va;
va_start(va, format);
- retval = vgetargs1(args, format, &va, FLAG_SIZE_T);
+ retval = vgetargs1(args, format, &va, 0);
va_end(va);
return retval;
}
@@ -157,19 +130,6 @@ _PyArg_ParseStack(PyObject *const *args, Py_ssize_t nargs, const char *format, .
return retval;
}
-PyAPI_FUNC(int)
-_PyArg_ParseStack_SizeT(PyObject *const *args, Py_ssize_t nargs, const char *format, ...)
-{
- int retval;
- va_list va;
-
- va_start(va, format);
- retval = vgetargs1_impl(NULL, args, nargs, format, &va, FLAG_SIZE_T);
- va_end(va);
- return retval;
-}
-
-
int
PyArg_VaParse(PyObject *args, const char *format, va_list va)
{
@@ -183,7 +143,7 @@ PyArg_VaParse(PyObject *args, const char *format, va_list va)
return retval;
}
-PyAPI_FUNC(int)
+int
_PyArg_VaParse_SizeT(PyObject *args, const char *format, va_list va)
{
va_list lva;
@@ -191,7 +151,7 @@ _PyArg_VaParse_SizeT(PyObject *args, const char *format, va_list va)
va_copy(lva, va);
- retval = vgetargs1(args, format, &lva, FLAG_SIZE_T);
+ retval = vgetargs1(args, format, &lva, 0);
va_end(lva);
return retval;
}
@@ -657,13 +617,6 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags,
char *msgbuf, size_t bufsize, freelist_t *freelist)
{
#define RETURN_ERR_OCCURRED return msgbuf
- /* For # codes */
-#define REQUIRE_PY_SSIZE_T_CLEAN \
- if (!(flags & FLAG_SIZE_T)) { \
- PyErr_SetString(PyExc_SystemError, \
- "PY_SSIZE_T_CLEAN macro must be defined for '#' formats"); \
- RETURN_ERR_OCCURRED; \
- }
const char *format = *p_format;
char c = *format++;
@@ -917,7 +870,6 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags,
if (count < 0)
return converterr(buf, arg, msgbuf, bufsize);
if (*format == '#') {
- REQUIRE_PY_SSIZE_T_CLEAN;
Py_ssize_t *psize = va_arg(*p_va, Py_ssize_t*);
*psize = count;
format++;
@@ -961,7 +913,6 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags,
} else if (*format == '#') { /* a string or read-only bytes-like object */
/* "s#" or "z#" */
const void **p = (const void **)va_arg(*p_va, const char **);
- REQUIRE_PY_SSIZE_T_CLEAN;
Py_ssize_t *psize = va_arg(*p_va, Py_ssize_t*);
if (c == 'z' && arg == Py_None) {
@@ -1099,7 +1050,6 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags,
trailing 0-byte
*/
- REQUIRE_PY_SSIZE_T_CLEAN;
Py_ssize_t *psize = va_arg(*p_va, Py_ssize_t*);
format++;
@@ -1274,7 +1224,6 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags,
*p_format = format;
return NULL;
-#undef REQUIRE_PY_SSIZE_T_CLEAN
#undef RETURN_ERR_OCCURRED
}
@@ -1343,7 +1292,7 @@ PyArg_ParseTupleAndKeywords(PyObject *args,
return retval;
}
-PyAPI_FUNC(int)
+int
_PyArg_ParseTupleAndKeywords_SizeT(PyObject *args,
PyObject *keywords,
const char *format,
@@ -1363,7 +1312,7 @@ _PyArg_ParseTupleAndKeywords_SizeT(PyObject *args,
va_start(va, kwlist);
retval = vgetargskeywords(args, keywords, format,
- kwlist, &va, FLAG_SIZE_T);
+ kwlist, &va, 0);
va_end(va);
return retval;
}
@@ -1394,7 +1343,7 @@ PyArg_VaParseTupleAndKeywords(PyObject *args,
return retval;
}
-PyAPI_FUNC(int)
+int
_PyArg_VaParseTupleAndKeywords_SizeT(PyObject *args,
PyObject *keywords,
const char *format,
@@ -1415,7 +1364,7 @@ _PyArg_VaParseTupleAndKeywords_SizeT(PyObject *args,
va_copy(lva, va);
retval = vgetargskeywords(args, keywords, format,
- kwlist, &lva, FLAG_SIZE_T);
+ kwlist, &lva, 0);
va_end(lva);
return retval;
}
@@ -1433,7 +1382,7 @@ _PyArg_ParseTupleAndKeywordsFast(PyObject *args, PyObject *keywords,
return retval;
}
-PyAPI_FUNC(int)
+int
_PyArg_ParseTupleAndKeywordsFast_SizeT(PyObject *args, PyObject *keywords,
struct _PyArg_Parser *parser, ...)
{
@@ -1441,12 +1390,12 @@ _PyArg_ParseTupleAndKeywordsFast_SizeT(PyObject *args, PyObject *keywords,
va_list va;
va_start(va, parser);
- retval = vgetargskeywordsfast(args, keywords, parser, &va, FLAG_SIZE_T);
+ retval = vgetargskeywordsfast(args, keywords, parser, &va, 0);
va_end(va);
return retval;
}
-PyAPI_FUNC(int)
+int
_PyArg_ParseStackAndKeywords(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames,
struct _PyArg_Parser *parser, ...)
{
@@ -1459,7 +1408,7 @@ _PyArg_ParseStackAndKeywords(PyObject *const *args, Py_ssize_t nargs, PyObject *
return retval;
}
-PyAPI_FUNC(int)
+int
_PyArg_ParseStackAndKeywords_SizeT(PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames,
struct _PyArg_Parser *parser, ...)
{
@@ -1467,7 +1416,7 @@ _PyArg_ParseStackAndKeywords_SizeT(PyObject *const *args, Py_ssize_t nargs, PyOb
va_list va;
va_start(va, parser);
- retval = vgetargskeywordsfast_impl(args, nargs, NULL, kwnames, parser, &va, FLAG_SIZE_T);
+ retval = vgetargskeywordsfast_impl(args, nargs, NULL, kwnames, parser, &va, 0);
va_end(va);
return retval;
}
@@ -1487,20 +1436,6 @@ _PyArg_VaParseTupleAndKeywordsFast(PyObject *args, PyObject *keywords,
return retval;
}
-PyAPI_FUNC(int)
-_PyArg_VaParseTupleAndKeywordsFast_SizeT(PyObject *args, PyObject *keywords,
- struct _PyArg_Parser *parser, va_list va)
-{
- int retval;
- va_list lva;
-
- va_copy(lva, va);
-
- retval = vgetargskeywordsfast(args, keywords, parser, &lva, FLAG_SIZE_T);
- va_end(lva);
- return retval;
-}
-
static void
error_unexpected_keyword_arg(PyObject *kwargs, PyObject *kwnames, PyObject *kwtuple, const char *fname)
{
@@ -2710,9 +2645,6 @@ skipitem(const char **p_format, va_list *p_va, int flags)
}
if (*format == '#') {
if (p_va != NULL) {
- if (!(flags & FLAG_SIZE_T)) {
- return "PY_SSIZE_T_CLEAN macro must be defined for '#' formats";
- }
(void) va_arg(*p_va, Py_ssize_t *);
}
format++;
diff --git a/Python/modsupport.c b/Python/modsupport.c
index be229c987b8a7..e2092ac2099d0 100644
--- a/Python/modsupport.c
+++ b/Python/modsupport.c
@@ -5,11 +5,9 @@
#include "pycore_abstract.h" // _PyIndex_Check()
#include "pycore_object.h" // _PyType_IsReady()
-#define FLAG_SIZE_T 1
typedef double va_double;
-static PyObject *va_build_value(const char *, va_list, int);
-static PyObject **va_build_stack(PyObject **small_stack, Py_ssize_t small_stack_len, const char *, va_list, int, Py_ssize_t*);
+static PyObject *va_build_value(const char *, va_list);
int
@@ -84,21 +82,21 @@ countformat(const char *format, char endchar)
/* Generic function to create a value -- the inverse of getargs() */
/* After an original idea and first implementation by Steven Miale */
-static PyObject *do_mktuple(const char**, va_list *, char, Py_ssize_t, int);
-static int do_mkstack(PyObject **, const char**, va_list *, char, Py_ssize_t, int);
-static PyObject *do_mklist(const char**, va_list *, char, Py_ssize_t, int);
-static PyObject *do_mkdict(const char**, va_list *, char, Py_ssize_t, int);
-static PyObject *do_mkvalue(const char**, va_list *, int);
+static PyObject *do_mktuple(const char**, va_list *, char, Py_ssize_t);
+static int do_mkstack(PyObject **, const char**, va_list *, char, Py_ssize_t);
+static PyObject *do_mklist(const char**, va_list *, char, Py_ssize_t);
+static PyObject *do_mkdict(const char**, va_list *, char, Py_ssize_t);
+static PyObject *do_mkvalue(const char**, va_list *);
static void
-do_ignore(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int flags)
+do_ignore(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n)
{
assert(PyErr_Occurred());
PyObject *v = PyTuple_New(n);
for (Py_ssize_t i = 0; i < n; i++) {
PyObject *exc = PyErr_GetRaisedException();
- PyObject *w = do_mkvalue(p_format, p_va, flags);
+ PyObject *w = do_mkvalue(p_format, p_va);
PyErr_SetRaisedException(exc);
if (w != NULL) {
if (v != NULL) {
@@ -121,7 +119,7 @@ do_ignore(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int
}
static PyObject *
-do_mkdict(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int flags)
+do_mkdict(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n)
{
PyObject *d;
Py_ssize_t i;
@@ -130,27 +128,27 @@ do_mkdict(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int
if (n % 2) {
PyErr_SetString(PyExc_SystemError,
"Bad dict format");
- do_ignore(p_format, p_va, endchar, n, flags);
+ do_ignore(p_format, p_va, endchar, n);
return NULL;
}
/* Note that we can't bail immediately on error as this will leak
refcounts on any 'N' arguments. */
if ((d = PyDict_New()) == NULL) {
- do_ignore(p_format, p_va, endchar, n, flags);
+ do_ignore(p_format, p_va, endchar, n);
return NULL;
}
for (i = 0; i < n; i+= 2) {
PyObject *k, *v;
- k = do_mkvalue(p_format, p_va, flags);
+ k = do_mkvalue(p_format, p_va);
if (k == NULL) {
- do_ignore(p_format, p_va, endchar, n - i - 1, flags);
+ do_ignore(p_format, p_va, endchar, n - i - 1);
Py_DECREF(d);
return NULL;
}
- v = do_mkvalue(p_format, p_va, flags);
+ v = do_mkvalue(p_format, p_va);
if (v == NULL || PyDict_SetItem(d, k, v) < 0) {
- do_ignore(p_format, p_va, endchar, n - i - 2, flags);
+ do_ignore(p_format, p_va, endchar, n - i - 2);
Py_DECREF(k);
Py_XDECREF(v);
Py_DECREF(d);
@@ -171,7 +169,7 @@ do_mkdict(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int
}
static PyObject *
-do_mklist(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int flags)
+do_mklist(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n)
{
PyObject *v;
Py_ssize_t i;
@@ -181,13 +179,13 @@ do_mklist(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int
refcounts on any 'N' arguments. */
v = PyList_New(n);
if (v == NULL) {
- do_ignore(p_format, p_va, endchar, n, flags);
+ do_ignore(p_format, p_va, endchar, n);
return NULL;
}
for (i = 0; i < n; i++) {
- PyObject *w = do_mkvalue(p_format, p_va, flags);
+ PyObject *w = do_mkvalue(p_format, p_va);
if (w == NULL) {
- do_ignore(p_format, p_va, endchar, n - i - 1, flags);
+ do_ignore(p_format, p_va, endchar, n - i - 1);
Py_DECREF(v);
return NULL;
}
@@ -206,7 +204,7 @@ do_mklist(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int
static int
do_mkstack(PyObject **stack, const char **p_format, va_list *p_va,
- char endchar, Py_ssize_t n, int flags)
+ char endchar, Py_ssize_t n)
{
Py_ssize_t i;
@@ -216,9 +214,9 @@ do_mkstack(PyObject **stack, const char **p_format, va_list *p_va,
/* Note that we can't bail immediately on error as this will leak
refcounts on any 'N' arguments. */
for (i = 0; i < n; i++) {
- PyObject *w = do_mkvalue(p_format, p_va, flags);
+ PyObject *w = do_mkvalue(p_format, p_va);
if (w == NULL) {
- do_ignore(p_format, p_va, endchar, n - i - 1, flags);
+ do_ignore(p_format, p_va, endchar, n - i - 1);
goto error;
}
stack[i] = w;
@@ -242,7 +240,7 @@ do_mkstack(PyObject **stack, const char **p_format, va_list *p_va,
}
static PyObject *
-do_mktuple(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int flags)
+do_mktuple(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n)
{
PyObject *v;
Py_ssize_t i;
@@ -251,13 +249,13 @@ do_mktuple(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int
/* Note that we can't bail immediately on error as this will leak
refcounts on any 'N' arguments. */
if ((v = PyTuple_New(n)) == NULL) {
- do_ignore(p_format, p_va, endchar, n, flags);
+ do_ignore(p_format, p_va, endchar, n);
return NULL;
}
for (i = 0; i < n; i++) {
- PyObject *w = do_mkvalue(p_format, p_va, flags);
+ PyObject *w = do_mkvalue(p_format, p_va);
if (w == NULL) {
- do_ignore(p_format, p_va, endchar, n - i - 1, flags);
+ do_ignore(p_format, p_va, endchar, n - i - 1);
Py_DECREF(v);
return NULL;
}
@@ -275,28 +273,21 @@ do_mktuple(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int
}
static PyObject *
-do_mkvalue(const char **p_format, va_list *p_va, int flags)
+do_mkvalue(const char **p_format, va_list *p_va)
{
-#define ERROR_NEED_PY_SSIZE_T_CLEAN \
- { \
- PyErr_SetString(PyExc_SystemError, \
- "PY_SSIZE_T_CLEAN macro must be defined for '#' formats"); \
- return NULL; \
- }
-
for (;;) {
switch (*(*p_format)++) {
case '(':
return do_mktuple(p_format, p_va, ')',
- countformat(*p_format, ')'), flags);
+ countformat(*p_format, ')'));
case '[':
return do_mklist(p_format, p_va, ']',
- countformat(*p_format, ']'), flags);
+ countformat(*p_format, ']'));
case '{':
return do_mkdict(p_format, p_va, '}',
- countformat(*p_format, '}'), flags);
+ countformat(*p_format, '}'));
case 'b':
case 'B':
@@ -342,13 +333,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags)
Py_ssize_t n;
if (**p_format == '#') {
++*p_format;
- if (flags & FLAG_SIZE_T) {
- n = va_arg(*p_va, Py_ssize_t);
- }
- else {
- n = va_arg(*p_va, int);
- ERROR_NEED_PY_SSIZE_T_CLEAN;
- }
+ n = va_arg(*p_va, Py_ssize_t);
}
else
n = -1;
@@ -392,13 +377,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags)
Py_ssize_t n;
if (**p_format == '#') {
++*p_format;
- if (flags & FLAG_SIZE_T) {
- n = va_arg(*p_va, Py_ssize_t);
- }
- else {
- n = va_arg(*p_va, int);
- ERROR_NEED_PY_SSIZE_T_CLEAN;
- }
+ n = va_arg(*p_va, Py_ssize_t);
}
else
n = -1;
@@ -427,13 +406,7 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags)
Py_ssize_t n;
if (**p_format == '#') {
++*p_format;
- if (flags & FLAG_SIZE_T) {
- n = va_arg(*p_va, Py_ssize_t);
- }
- else {
- n = va_arg(*p_va, int);
- ERROR_NEED_PY_SSIZE_T_CLEAN;
- }
+ n = va_arg(*p_va, Py_ssize_t);
}
else
n = -1;
@@ -499,8 +472,6 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags)
}
}
-
-#undef ERROR_NEED_PY_SSIZE_T_CLEAN
}
@@ -510,18 +481,18 @@ Py_BuildValue(const char *format, ...)
va_list va;
PyObject* retval;
va_start(va, format);
- retval = va_build_value(format, va, 0);
+ retval = va_build_value(format, va);
va_end(va);
return retval;
}
-PyObject *
+PyAPI_FUNC(PyObject *) /* abi only */
_Py_BuildValue_SizeT(const char *format, ...)
{
va_list va;
PyObject* retval;
va_start(va, format);
- retval = va_build_value(format, va, FLAG_SIZE_T);
+ retval = va_build_value(format, va);
va_end(va);
return retval;
}
@@ -529,17 +500,17 @@ _Py_BuildValue_SizeT(const char *format, ...)
PyObject *
Py_VaBuildValue(const char *format, va_list va)
{
- return va_build_value(format, va, 0);
+ return va_build_value(format, va);
}
-PyObject *
+PyAPI_FUNC(PyObject *) /* abi only */
_Py_VaBuildValue_SizeT(const char *format, va_list va)
{
- return va_build_value(format, va, FLAG_SIZE_T);
+ return va_build_value(format, va);
}
static PyObject *
-va_build_value(const char *format, va_list va, int flags)
+va_build_value(const char *format, va_list va)
{
const char *f = format;
Py_ssize_t n = countformat(f, '\0');
@@ -553,9 +524,9 @@ va_build_value(const char *format, va_list va, int flags)
}
va_copy(lva, va);
if (n == 1) {
- retval = do_mkvalue(&f, &lva, flags);
+ retval = do_mkvalue(&f, &lva);
} else {
- retval = do_mktuple(&f, &lva, '\0', n, flags);
+ retval = do_mktuple(&f, &lva, '\0', n);
}
va_end(lva);
return retval;
@@ -564,20 +535,6 @@ va_build_value(const char *format, va_list va, int flags)
PyObject **
_Py_VaBuildStack(PyObject **small_stack, Py_ssize_t small_stack_len,
const char *format, va_list va, Py_ssize_t *p_nargs)
-{
- return va_build_stack(small_stack, small_stack_len, format, va, 0, p_nargs);
-}
-
-PyObject **
-_Py_VaBuildStack_SizeT(PyObject **small_stack, Py_ssize_t small_stack_len,
- const char *format, va_list va, Py_ssize_t *p_nargs)
-{
- return va_build_stack(small_stack, small_stack_len, format, va, FLAG_SIZE_T, p_nargs);
-}
-
-static PyObject **
-va_build_stack(PyObject **small_stack, Py_ssize_t small_stack_len,
- const char *format, va_list va, int flags, Py_ssize_t *p_nargs)
{
const char *f;
Py_ssize_t n;
@@ -609,7 +566,7 @@ va_build_stack(PyObject **small_stack, Py_ssize_t small_stack_len,
va_copy(lva, va);
f = format;
- res = do_mkstack(stack, &f, &lva, '\0', n, flags);
+ res = do_mkstack(stack, &f, &lva, '\0', n);
va_end(lva);
if (res < 0) {
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 33147f012b611..32be7ec09ebfd 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -211,7 +211,7 @@ sys_audit_tstate(PyThreadState *ts, const char *event,
/* Initialize event args now */
if (argFormat && argFormat[0]) {
- eventArgs = _Py_VaBuildValue_SizeT(argFormat, vargs);
+ eventArgs = Py_VaBuildValue(argFormat, vargs);
if (eventArgs && !PyTuple_Check(eventArgs)) {
PyObject *argTuple = PyTuple_Pack(1, eventArgs);
Py_SETREF(eventArgs, argTuple);
1
0

[3.12] gh-105091: stable_abi.py: Remove "Unixy" check from --all on other platforms (GH-105092) (GH-105133)
by encukou May 31, 2023
by encukou May 31, 2023
May 31, 2023
https://github.com/python/cpython/commit/4729100239ce5486fce0ff4d62dad52c30…
commit: 4729100239ce5486fce0ff4d62dad52c30e108c3
branch: 3.12
author: Miss Islington (bot) <31488909+miss-islington(a)users.noreply.github.com>
committer: encukou <encukou(a)gmail.com>
date: 2023-05-31T07:48:28Z
summary:
[3.12] gh-105091: stable_abi.py: Remove "Unixy" check from --all on other platforms (GH-105092) (GH-105133)
gh-105091: stable_abi.py: Remove "Unixy" check from --all on other platforms (GH-105092)
(cherry picked from commit 0656d23d82cd5b88e578a26c65dd4a64414c833b)
Co-authored-by: Petr Viktorin <encukou(a)gmail.com>
files:
M Tools/build/stable_abi.py
diff --git a/Tools/build/stable_abi.py b/Tools/build/stable_abi.py
index 88db93e935e9..42b2dd92307b 100644
--- a/Tools/build/stable_abi.py
+++ b/Tools/build/stable_abi.py
@@ -684,7 +684,8 @@ def main():
if args.all:
run_all_generators = True
- args.unixy_check = True
+ if UNIXY:
+ args.unixy_check = True
try:
file = args.file.open('rb')
1
0

[3.12] gh-103142: Update macOS installer to use OpenSSL 1.1.1u. (GH-105132)
by ned-deily May 31, 2023
by ned-deily May 31, 2023
May 31, 2023
https://github.com/python/cpython/commit/e3fcd9e4fa34f087985f0ab958c6effa20…
commit: e3fcd9e4fa34f087985f0ab958c6effa20bf071e
branch: 3.12
author: Miss Islington (bot) <31488909+miss-islington(a)users.noreply.github.com>
committer: ned-deily <nad(a)python.org>
date: 2023-05-31T06:18:55Z
summary:
[3.12] gh-103142: Update macOS installer to use OpenSSL 1.1.1u. (GH-105132)
(cherry picked from commit f90d3f68db720bd6d0deda8cc0030339ccd43858)
Co-authored-by: Ned Deily <nad(a)python.org>
files:
A Misc/NEWS.d/next/macOS/2023-05-30-23-30-46.gh-issue-103142.55lMXQ.rst
M Mac/BuildScript/build-installer.py
diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py
index 177fc2044f69..9729759434a9 100755
--- a/Mac/BuildScript/build-installer.py
+++ b/Mac/BuildScript/build-installer.py
@@ -246,9 +246,9 @@ def library_recipes():
result.extend([
dict(
- name="OpenSSL 1.1.1t",
- url="https://www.openssl.org/source/openssl-1.1.1t.tar.gz",
- checksum='8dee9b24bdb1dcbf0c3d1e9b02fb8f6bf22165e807f45adeb7c9677536859d3b',
+ name="OpenSSL 1.1.1u",
+ url="https://www.openssl.org/source/openssl-1.1.1u.tar.gz",
+ checksum='e2f8d84b523eecd06c7be7626830370300fbcc15386bf5142d72758f6963ebc6',
buildrecipe=build_universal_openssl,
configure=None,
install=None,
diff --git a/Misc/NEWS.d/next/macOS/2023-05-30-23-30-46.gh-issue-103142.55lMXQ.rst b/Misc/NEWS.d/next/macOS/2023-05-30-23-30-46.gh-issue-103142.55lMXQ.rst
new file mode 100644
index 000000000000..1afd949d6a9f
--- /dev/null
+++ b/Misc/NEWS.d/next/macOS/2023-05-30-23-30-46.gh-issue-103142.55lMXQ.rst
@@ -0,0 +1 @@
+Update macOS installer to use OpenSSL 1.1.1u.
1
0