Python-checkins
Threads by month
- ----- 2025 -----
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2005 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2004 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2003 -----
- December
- November
- October
- September
- August
January 2025
- 1 participants
- 705 discussions
[3.12] gh-128833: Patch `test_embed.test_specialized_static_code_gets_unspecialized_at_Py_FINALIZE` on Windows (#128860)
by ambv Jan. 15, 2025
by ambv Jan. 15, 2025
Jan. 15, 2025
https://github.com/python/cpython/commit/6c9b7e591b8d19fce53f8ad0e5d617a1c1…
commit: 6c9b7e591b8d19fce53f8ad0e5d617a1c1842319
branch: 3.12
author: Bénédikt Tran <10796600+picnixz(a)users.noreply.github.com>
committer: ambv <lukasz(a)langa.pl>
date: 2025-01-15T14:22:45+01:00
summary:
[3.12] gh-128833: Patch `test_embed.test_specialized_static_code_gets_unspecialized_at_Py_FINALIZE` on Windows (#128860)
Hotfix for `test_embed`
files:
M Lib/test/test_embed.py
diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py
index 13713cf37b83a4..c931d160350545 100644
--- a/Lib/test/test_embed.py
+++ b/Lib/test/test_embed.py
@@ -348,12 +348,12 @@ def test_simple_initialization_api(self):
@support.requires_specialization
def test_specialized_static_code_gets_unspecialized_at_Py_FINALIZE(self):
# https://github.com/python/cpython/issues/92031
+ from test.test_dis import ADAPTIVE_WARMUP_DELAY
- code = textwrap.dedent("""\
+ code = textwrap.dedent(f"""\
import dis
import importlib._bootstrap
import opcode
- import test.test_dis
def is_specialized(f):
for instruction in dis.get_instructions(f, adaptive=True):
@@ -373,7 +373,7 @@ def is_specialized(f):
assert not is_specialized(func), "specialized instructions found"
- for i in range(test.test_dis.ADAPTIVE_WARMUP_DELAY):
+ for i in range({ADAPTIVE_WARMUP_DELAY}):
func(importlib._bootstrap, ["x"], lambda *args: None)
assert is_specialized(func), "no specialized instructions found"
1
0
https://github.com/python/cpython/commit/599be687ec7327c30c6469cf743aa4ee9e…
commit: 599be687ec7327c30c6469cf743aa4ee9e82232d
branch: main
author: Thomas Grainger <tagrain(a)gmail.com>
committer: serhiy-storchaka <storchaka(a)gmail.com>
date: 2025-01-15T15:05:59+02:00
summary:
gh-128816: Fix warnings in test_doctest (GH-128817)
* Fix a deprecation warning for using importlib.resources.abc.ResourceReader.
* Fix an import warning when importing readline (if it has not yet been imported).
files:
M Lib/test/test_doctest/test_doctest.py
diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py
index b1e165fe16b54f..a4a49298bab3be 100644
--- a/Lib/test/test_doctest/test_doctest.py
+++ b/Lib/test/test_doctest/test_doctest.py
@@ -2860,7 +2860,7 @@ def test_testfile(): r"""
>>> _colorize.COLORIZE = save_colorize
"""
-class TestImporter(importlib.abc.MetaPathFinder, importlib.abc.ResourceLoader):
+class TestImporter(importlib.abc.MetaPathFinder):
def find_spec(self, fullname, path, target=None):
return importlib.util.spec_from_file_location(fullname, path, loader=self)
@@ -2869,6 +2869,12 @@ def get_data(self, path):
with open(path, mode='rb') as f:
return f.read()
+ def exec_module(self, module):
+ raise ImportError
+
+ def create_module(self, spec):
+ return None
+
class TestHook:
def __init__(self, pathdir):
1
0
https://github.com/python/cpython/commit/1a1056d394a489d229b26c06c7c79aa9c0…
commit: 1a1056d394a489d229b26c06c7c79aa9c06696f6
branch: main
author: Wang Ran (汪然) <wangr(a)smail.nju.edu.cn>
committer: kumaraditya303 <kumaraditya(a)python.org>
date: 2025-01-15T17:54:31+05:30
summary:
Fix typo in `Lib/asyncio/futures.py` (#128819)
files:
M Lib/asyncio/futures.py
diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py
index c95fce035cd548..359b7a5e3f9eea 100644
--- a/Lib/asyncio/futures.py
+++ b/Lib/asyncio/futures.py
@@ -62,7 +62,7 @@ class Future:
# that it is not compatible by setting this to None.
# - It is set by __iter__() below so that Task.__step() can tell
# the difference between
- # `await Future()` or`yield from Future()` (correct) vs.
+ # `await Future()` or `yield from Future()` (correct) vs.
# `yield Future()` (incorrect).
_asyncio_future_blocking = False
1
0
gh-128438: Use `EnvironmentVarGuard` for `test_{builtin,io,locale}.py` (#128476)
by picnixz Jan. 15, 2025
by picnixz Jan. 15, 2025
Jan. 15, 2025
https://github.com/python/cpython/commit/ae7f621c33c854334c69cf97260c751617…
commit: ae7f621c33c854334c69cf97260c7516170dcf0d
branch: main
author: Yan Yanchii <yyanchiy(a)gmail.com>
committer: picnixz <10796600+picnixz(a)users.noreply.github.com>
date: 2025-01-15T10:38:43+01:00
summary:
gh-128438: Use `EnvironmentVarGuard` for `test_{builtin,io,locale}.py` (#128476)
Modifying locale-related environment variables in `Lib/test/test_builtin.py`,
`Lib/test/test_io.py` and `Lib/test/test_locale.py` is now achieved by using
an `EnvironmentVarGuard` context instead of an explicit `try-finally` block.
files:
M Lib/test/test_builtin.py
M Lib/test/test_io.py
M Lib/test/test_locale.py
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index 5f4eac5267622f..73b139e405ae59 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -1567,14 +1567,12 @@ def test_open(self):
@unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled")
def test_open_default_encoding(self):
- old_environ = dict(os.environ)
- try:
+ with EnvironmentVarGuard() as env:
# try to get a user preferred encoding different than the current
# locale encoding to check that open() uses the current locale
# encoding and not the user preferred encoding
for key in ('LC_ALL', 'LANG', 'LC_CTYPE'):
- if key in os.environ:
- del os.environ[key]
+ env.unset(key)
self.write_testfile()
current_locale_encoding = locale.getencoding()
@@ -1583,9 +1581,6 @@ def test_open_default_encoding(self):
fp = open(TESTFN, 'w')
with fp:
self.assertEqual(fp.encoding, current_locale_encoding)
- finally:
- os.environ.clear()
- os.environ.update(old_environ)
@support.requires_subprocess()
def test_open_non_inheritable(self):
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 33e0161241e87e..8c79d2c24a140a 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -2892,14 +2892,12 @@ def test_reconfigure_line_buffering(self):
@unittest.skipIf(sys.flags.utf8_mode, "utf-8 mode is enabled")
def test_default_encoding(self):
- old_environ = dict(os.environ)
- try:
+ with os_helper.EnvironmentVarGuard() as env:
# try to get a user preferred encoding different than the current
# locale encoding to check that TextIOWrapper() uses the current
# locale encoding and not the user preferred encoding
for key in ('LC_ALL', 'LANG', 'LC_CTYPE'):
- if key in os.environ:
- del os.environ[key]
+ env.unset(key)
current_locale_encoding = locale.getencoding()
b = self.BytesIO()
@@ -2907,9 +2905,6 @@ def test_default_encoding(self):
warnings.simplefilter("ignore", EncodingWarning)
t = self.TextIOWrapper(b)
self.assertEqual(t.encoding, current_locale_encoding)
- finally:
- os.environ.clear()
- os.environ.update(old_environ)
def test_encoding(self):
# Check the encoding attribute is always set, and valid
diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py
index 00e93d8e78443d..c025ed4108fb58 100644
--- a/Lib/test/test_locale.py
+++ b/Lib/test/test_locale.py
@@ -1,5 +1,5 @@
from decimal import Decimal
-from test.support import verbose, is_android, is_emscripten, is_wasi
+from test.support import verbose, is_android, is_emscripten, is_wasi, os_helper
from test.support.warnings_helper import check_warnings
from test.support.import_helper import import_fresh_module
from unittest import mock
@@ -499,25 +499,16 @@ def test_defaults_UTF8(self):
else:
orig_getlocale = None
- orig_env = {}
try:
- for key in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'):
- if key in os.environ:
- orig_env[key] = os.environ[key]
- del os.environ[key]
+ with os_helper.EnvironmentVarGuard() as env:
+ for key in ('LC_ALL', 'LC_CTYPE', 'LANG', 'LANGUAGE'):
+ env.unset(key)
- os.environ['LC_CTYPE'] = 'UTF-8'
-
- with check_warnings(('', DeprecationWarning)):
- self.assertEqual(locale.getdefaultlocale(), (None, 'UTF-8'))
+ env.set('LC_CTYPE', 'UTF-8')
+ with check_warnings(('', DeprecationWarning)):
+ self.assertEqual(locale.getdefaultlocale(), (None, 'UTF-8'))
finally:
- for k in orig_env:
- os.environ[k] = orig_env[k]
-
- if 'LC_CTYPE' not in orig_env:
- del os.environ['LC_CTYPE']
-
if orig_getlocale is not None:
_locale._getdefaultlocale = orig_getlocale
1
0
gh-128473: Skip segfaulting `test_embed` tests when BOLT instrumented (gh-128474)
by corona10 Jan. 15, 2025
by corona10 Jan. 15, 2025
Jan. 15, 2025
https://github.com/python/cpython/commit/6e4f64109b0eb6c9f1b50eb7dc5f647a1d…
commit: 6e4f64109b0eb6c9f1b50eb7dc5f647a1d901ff4
branch: main
author: Zanie Blue <contact(a)zanie.dev>
committer: corona10 <donghee.na92(a)gmail.com>
date: 2025-01-15T10:49:02+09:00
summary:
gh-128473: Skip segfaulting `test_embed` tests when BOLT instrumented (gh-128474)
* Skip segfaulting `test_embed` tests when BOLT instrumented
Co-authored-by: Gregory Szorc <gregory.szorc(a)gmail.com>
* NEWS
---------
Co-authored-by: Gregory Szorc <gregory.szorc(a)gmail.com>
files:
A Misc/NEWS.d/next/Tests/2025-01-04-02-41-41.gh-issue-128474.0b-tl4.rst
M Lib/test/test_embed.py
diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py
index 7110fb889f3c8e..1b55cd156d759d 100644
--- a/Lib/test/test_embed.py
+++ b/Lib/test/test_embed.py
@@ -940,6 +940,7 @@ def check_all_configs(self, testname, expected_config=None,
self.check_global_config(configs)
return configs
+ @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries")
def test_init_default_config(self):
self.check_all_configs("test_init_initialize_config", api=API_COMPAT)
@@ -1039,6 +1040,7 @@ def test_init_from_config(self):
self.check_all_configs("test_init_from_config", config, preconfig,
api=API_COMPAT)
+ @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries")
def test_init_compat_env(self):
preconfig = {
'allocator': ALLOCATOR_FOR_CONFIG,
@@ -1074,6 +1076,7 @@ def test_init_compat_env(self):
self.check_all_configs("test_init_compat_env", config, preconfig,
api=API_COMPAT)
+ @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries")
def test_init_python_env(self):
preconfig = {
'allocator': ALLOCATOR_FOR_CONFIG,
@@ -1772,6 +1775,7 @@ def test_init_set_config(self):
self.check_all_configs("test_init_set_config", config,
api=API_ISOLATED)
+ @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries")
def test_initconfig_api(self):
preconfig = {
'configure_locale': True,
diff --git a/Misc/NEWS.d/next/Tests/2025-01-04-02-41-41.gh-issue-128474.0b-tl4.rst b/Misc/NEWS.d/next/Tests/2025-01-04-02-41-41.gh-issue-128474.0b-tl4.rst
new file mode 100644
index 00000000000000..7dc807757c5ec0
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2025-01-04-02-41-41.gh-issue-128474.0b-tl4.rst
@@ -0,0 +1,2 @@
+Disable ``test_embed`` test cases that segfault on BOLT instrument binaries.
+The tests are only disabled when BOLT is enabled.
1
0
gh-121604: Make sure all deprecated items in importlib raise DeprecationWarning (#128007)
by brettcannon Jan. 15, 2025
by brettcannon Jan. 15, 2025
Jan. 15, 2025
https://github.com/python/cpython/commit/bd3baa8b1a7755f17b2fc98c7fb7b872fe…
commit: bd3baa8b1a7755f17b2fc98c7fb7b872fec43af3
branch: main
author: Tomas R. <tomas.roun8(a)gmail.com>
committer: brettcannon <brett(a)python.org>
date: 2025-01-14T16:48:46-08:00
summary:
gh-121604: Make sure all deprecated items in importlib raise DeprecationWarning (#128007)
Co-authored-by: rashansmith <smith.rashan(a)gmail.com>
Co-authored-by: Stan Ulbrych <89152624+StanFromIreland(a)users.noreply.github.com>
Co-authored-by: Brett Cannon <brett(a)python.org>
files:
A Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst
M Lib/importlib/_bootstrap_external.py
M Lib/importlib/abc.py
M Lib/importlib/machinery.py
M Lib/inspect.py
M Lib/test/test_importlib/test_abc.py
M Lib/test/test_importlib/test_api.py
M Lib/test/test_importlib/test_windows.py
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
index fa36159711846f..697f7c55218a8f 100644
--- a/Lib/importlib/_bootstrap_external.py
+++ b/Lib/importlib/_bootstrap_external.py
@@ -716,6 +716,12 @@ def _search_registry(cls, fullname):
@classmethod
def find_spec(cls, fullname, path=None, target=None):
+ _warnings.warn('importlib.machinery.WindowsRegistryFinder is '
+ 'deprecated; use site configuration instead. '
+ 'Future versions of Python may not enable this '
+ 'finder by default.',
+ DeprecationWarning, stacklevel=2)
+
filepath = cls._search_registry(fullname)
if filepath is None:
return None
diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py
index eea6b38af6fa13..bb2837d38d83f1 100644
--- a/Lib/importlib/abc.py
+++ b/Lib/importlib/abc.py
@@ -70,6 +70,15 @@ class ResourceLoader(Loader):
"""
+ def __init__(self):
+ import warnings
+ warnings.warn('importlib.abc.ResourceLoader is deprecated in '
+ 'favour of supporting resource loading through '
+ 'importlib.resources.abc.ResourceReader.',
+ DeprecationWarning, stacklevel=2)
+ super().__init__()
+
+
@abc.abstractmethod
def get_data(self, path):
"""Abstract method which when implemented should return the bytes for
@@ -199,6 +208,10 @@ class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLo
def path_mtime(self, path):
"""Return the (int) modification time for the path (str)."""
+ import warnings
+ warnings.warn('SourceLoader.path_mtime is deprecated in favour of '
+ 'SourceLoader.path_stats().',
+ DeprecationWarning, stacklevel=2)
if self.path_stats.__func__ is SourceLoader.path_stats:
raise OSError
return int(self.path_stats(path)['mtime'])
diff --git a/Lib/importlib/machinery.py b/Lib/importlib/machinery.py
index 6e294d59bfdcb9..63d726445c3d96 100644
--- a/Lib/importlib/machinery.py
+++ b/Lib/importlib/machinery.py
@@ -3,9 +3,11 @@
from ._bootstrap import ModuleSpec
from ._bootstrap import BuiltinImporter
from ._bootstrap import FrozenImporter
-from ._bootstrap_external import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES,
- OPTIMIZED_BYTECODE_SUFFIXES, BYTECODE_SUFFIXES,
- EXTENSION_SUFFIXES)
+from ._bootstrap_external import (
+ SOURCE_SUFFIXES, BYTECODE_SUFFIXES, EXTENSION_SUFFIXES,
+ DEBUG_BYTECODE_SUFFIXES as _DEBUG_BYTECODE_SUFFIXES,
+ OPTIMIZED_BYTECODE_SUFFIXES as _OPTIMIZED_BYTECODE_SUFFIXES
+)
from ._bootstrap_external import WindowsRegistryFinder
from ._bootstrap_external import PathFinder
from ._bootstrap_external import FileFinder
@@ -27,3 +29,22 @@ def all_suffixes():
'NamespaceLoader', 'OPTIMIZED_BYTECODE_SUFFIXES', 'PathFinder',
'SOURCE_SUFFIXES', 'SourceFileLoader', 'SourcelessFileLoader',
'WindowsRegistryFinder', 'all_suffixes']
+
+
+def __getattr__(name):
+ import warnings
+
+ if name == 'DEBUG_BYTECODE_SUFFIXES':
+ warnings.warn('importlib.machinery.DEBUG_BYTECODE_SUFFIXES is '
+ 'deprecated; use importlib.machinery.BYTECODE_SUFFIXES '
+ 'instead.',
+ DeprecationWarning, stacklevel=2)
+ return _DEBUG_BYTECODE_SUFFIXES
+ elif name == 'OPTIMIZED_BYTECODE_SUFFIXES':
+ warnings.warn('importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES is '
+ 'deprecated; use importlib.machinery.BYTECODE_SUFFIXES '
+ 'instead.',
+ DeprecationWarning, stacklevel=2)
+ return _OPTIMIZED_BYTECODE_SUFFIXES
+
+ raise AttributeError(f'module {__name__!r} has no attribute {name!r}')
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 5b7c4df8927c87..facad478103668 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -858,8 +858,7 @@ def getsourcefile(object):
Return None if no way can be identified to get the source.
"""
filename = getfile(object)
- all_bytecode_suffixes = importlib.machinery.DEBUG_BYTECODE_SUFFIXES[:]
- all_bytecode_suffixes += importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES[:]
+ all_bytecode_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
if any(filename.endswith(s) for s in all_bytecode_suffixes):
filename = (os.path.splitext(filename)[0] +
importlib.machinery.SOURCE_SUFFIXES[0])
diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py
index 603125f6d926f6..00af2dd712425a 100644
--- a/Lib/test/test_importlib/test_abc.py
+++ b/Lib/test/test_importlib/test_abc.py
@@ -913,5 +913,37 @@ def test_universal_newlines(self):
SourceOnlyLoaderMock=SPLIT_SOL)
+class SourceLoaderDeprecationWarningsTests(unittest.TestCase):
+ """Tests SourceLoader deprecation warnings."""
+
+ def test_deprecated_path_mtime(self):
+ from importlib.abc import SourceLoader
+ class DummySourceLoader(SourceLoader):
+ def get_data(self, path):
+ return b''
+
+ def get_filename(self, fullname):
+ return 'foo.py'
+
+ def path_stats(self, path):
+ return {'mtime': 1}
+
+ loader = DummySourceLoader()
+ with self.assertWarns(DeprecationWarning):
+ loader.path_mtime('foo.py')
+
+
+class ResourceLoaderDeprecationWarningsTests(unittest.TestCase):
+ """Tests ResourceLoader deprecation warnings."""
+
+ def test_deprecated_resource_loader(self):
+ from importlib.abc import ResourceLoader
+ class DummyLoader(ResourceLoader):
+ def get_data(self, path):
+ return b''
+
+ with self.assertWarns(DeprecationWarning):
+ DummyLoader()
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_importlib/test_api.py b/Lib/test/test_importlib/test_api.py
index 51ea5270b1a928..6035b2ca72efb9 100644
--- a/Lib/test/test_importlib/test_api.py
+++ b/Lib/test/test_importlib/test_api.py
@@ -492,5 +492,18 @@ def test_util(self):
support.check__all__(self, util['Source'], extra=extra)
+class TestDeprecations(unittest.TestCase):
+ def test_machinery_deprecated_attributes(self):
+ from importlib import machinery
+ attributes = (
+ 'DEBUG_BYTECODE_SUFFIXES',
+ 'OPTIMIZED_BYTECODE_SUFFIXES',
+ )
+ for attr in attributes:
+ with self.subTest(attr=attr):
+ with self.assertWarns(DeprecationWarning):
+ getattr(machinery, attr)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_importlib/test_windows.py b/Lib/test/test_importlib/test_windows.py
index 8a9a8fffcd10d4..f32680bdbeb9e3 100644
--- a/Lib/test/test_importlib/test_windows.py
+++ b/Lib/test/test_importlib/test_windows.py
@@ -104,6 +104,12 @@ def test_module_not_found(self):
spec = self.machinery.WindowsRegistryFinder.find_spec(self.test_module)
self.assertIsNone(spec)
+ def test_raises_deprecation_warning(self):
+ # WindowsRegistryFinder is not meant to be instantiated, so the
+ # deprecation warning is raised in the 'find_spec' method instead.
+ with self.assertWarns(DeprecationWarning):
+ self.machinery.WindowsRegistryFinder.find_spec('spam')
+
(Frozen_WindowsRegistryFinderTests,
Source_WindowsRegistryFinderTests
) = test_util.test_both(WindowsRegistryFinderTests, machinery=machinery)
diff --git a/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst b/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst
new file mode 100644
index 00000000000000..9a6fce8647cc6b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-12-16-22-20-38.gh-issue-121604.m3Xn4G.rst
@@ -0,0 +1 @@
+Add missing Deprecation warnings for :attr:`importlib.machinery.DEBUG_BYTECODE_SUFFIXES`, :attr:`importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES`, :class:`importlib.machinery.WindowsRegistryFinder`, :class:`importlib.abc.ResourceLoader`, :meth:`importlib.abc.SourceLoader.path_mtime`.
1
0
gh-71339: Use new assertion methods in the multiprocessing tests (GH-128847)
by serhiy-storchaka Jan. 14, 2025
by serhiy-storchaka Jan. 14, 2025
Jan. 14, 2025
https://github.com/python/cpython/commit/b52de22ac345ad8583bcc57f963e26b35c…
commit: b52de22ac345ad8583bcc57f963e26b35c2ee527
branch: main
author: Serhiy Storchaka <storchaka(a)gmail.com>
committer: serhiy-storchaka <storchaka(a)gmail.com>
date: 2025-01-15T01:17:11+02:00
summary:
gh-71339: Use new assertion methods in the multiprocessing tests (GH-128847)
files:
M Lib/test/_test_multiprocessing.py
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
index 38a03f3391d31d..4b7c3e7fa8bdd7 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
@@ -319,7 +319,7 @@ def test_current(self):
authkey = current.authkey
self.assertTrue(current.is_alive())
- self.assertTrue(not current.daemon)
+ self.assertFalse(current.daemon)
self.assertIsInstance(authkey, bytes)
self.assertTrue(len(authkey) > 0)
self.assertEqual(current.ident, os.getpid())
@@ -463,7 +463,7 @@ def test_process(self):
self.assertEqual(p.is_alive(), False)
self.assertEqual(p.daemon, True)
self.assertNotIn(p, self.active_children())
- self.assertTrue(type(self.active_children()) is list)
+ self.assertIs(type(self.active_children()), list)
self.assertEqual(p.exitcode, None)
p.start()
@@ -583,8 +583,8 @@ def test_cpu_count(self):
cpus = multiprocessing.cpu_count()
except NotImplementedError:
cpus = 1
- self.assertTrue(type(cpus) is int)
- self.assertTrue(cpus >= 1)
+ self.assertIsInstance(cpus, int)
+ self.assertGreaterEqual(cpus, 1)
def test_active_children(self):
self.assertEqual(type(self.active_children()), list)
@@ -2382,14 +2382,14 @@ def test_getobj_getlock(self):
self.assertEqual(lock, lock3)
arr4 = self.Value('i', 5, lock=False)
- self.assertFalse(hasattr(arr4, 'get_lock'))
- self.assertFalse(hasattr(arr4, 'get_obj'))
+ self.assertNotHasAttr(arr4, 'get_lock')
+ self.assertNotHasAttr(arr4, 'get_obj')
self.assertRaises(AttributeError, self.Value, 'i', 5, lock='navalue')
arr5 = self.RawValue('i', 5)
- self.assertFalse(hasattr(arr5, 'get_lock'))
- self.assertFalse(hasattr(arr5, 'get_obj'))
+ self.assertNotHasAttr(arr5, 'get_lock')
+ self.assertNotHasAttr(arr5, 'get_obj')
class _TestArray(BaseTestCase):
@@ -2462,14 +2462,14 @@ def test_getobj_getlock_obj(self):
self.assertEqual(lock, lock3)
arr4 = self.Array('i', range(10), lock=False)
- self.assertFalse(hasattr(arr4, 'get_lock'))
- self.assertFalse(hasattr(arr4, 'get_obj'))
+ self.assertNotHasAttr(arr4, 'get_lock')
+ self.assertNotHasAttr(arr4, 'get_obj')
self.assertRaises(AttributeError,
self.Array, 'i', range(10), lock='notalock')
arr5 = self.RawArray('i', range(10))
- self.assertFalse(hasattr(arr5, 'get_lock'))
- self.assertFalse(hasattr(arr5, 'get_obj'))
+ self.assertNotHasAttr(arr5, 'get_lock')
+ self.assertNotHasAttr(arr5, 'get_obj')
#
#
@@ -2657,8 +2657,8 @@ def test_namespace(self):
self.assertEqual((n.name, n.job), ('Bob', 'Builder'))
del n.job
self.assertEqual(str(n), "Namespace(name='Bob')")
- self.assertTrue(hasattr(n, 'name'))
- self.assertTrue(not hasattr(n, 'job'))
+ self.assertHasAttr(n, 'name')
+ self.assertNotHasAttr(n, 'job')
#
#
@@ -4938,13 +4938,9 @@ def test_import(self):
for name in modules:
__import__(name)
mod = sys.modules[name]
- self.assertTrue(hasattr(mod, '__all__'), name)
-
+ self.assertHasAttr(mod, '__all__', name)
for attr in mod.__all__:
- self.assertTrue(
- hasattr(mod, attr),
- '%r does not have attribute %r' % (mod, attr)
- )
+ self.assertHasAttr(mod, attr)
#
# Quick test that logging works -- does not test logging output
@@ -4957,7 +4953,7 @@ class _TestLogging(BaseTestCase):
def test_enable_logging(self):
logger = multiprocessing.get_logger()
logger.setLevel(util.SUBWARNING)
- self.assertTrue(logger is not None)
+ self.assertIsNotNone(logger)
logger.debug('this will not be printed')
logger.info('nor will this')
logger.setLevel(LOG_LEVEL)
@@ -5753,9 +5749,8 @@ def test_set_get(self):
self.assertEqual(multiprocessing.get_start_method(), method)
ctx = multiprocessing.get_context()
self.assertEqual(ctx.get_start_method(), method)
- self.assertTrue(type(ctx).__name__.lower().startswith(method))
- self.assertTrue(
- ctx.Process.__name__.lower().startswith(method))
+ self.assertStartsWith(type(ctx).__name__.lower(), method)
+ self.assertStartsWith(ctx.Process.__name__.lower(), method)
self.check_context(multiprocessing)
count += 1
finally:
@@ -5956,9 +5951,9 @@ def check_resource_tracker_death(self, signum, should_die):
if should_die:
self.assertEqual(len(all_warn), 1)
the_warn = all_warn[0]
- self.assertTrue(issubclass(the_warn.category, UserWarning))
- self.assertTrue("resource_tracker: process died"
- in str(the_warn.message))
+ self.assertIsSubclass(the_warn.category, UserWarning)
+ self.assertIn("resource_tracker: process died",
+ str(the_warn.message))
else:
self.assertEqual(len(all_warn), 0)
@@ -6163,8 +6158,8 @@ def is_alive(self):
Process=FailingForkProcess))
p.close()
p.join()
- self.assertFalse(
- any(process.is_alive() for process in forked_processes))
+ for process in forked_processes:
+ self.assertFalse(process.is_alive(), process)
@hashlib_helper.requires_hashdigest('sha256')
1
0
Jan. 14, 2025
https://github.com/python/cpython/commit/f7ceb317aec498823555885a4b7fed5e02…
commit: f7ceb317aec498823555885a4b7fed5e0244f300
branch: main
author: Serhiy Storchaka <storchaka(a)gmail.com>
committer: serhiy-storchaka <storchaka(a)gmail.com>
date: 2025-01-14T22:40:45+02:00
summary:
gh-71339: Use new assertion methods in test_logging (GH-128828)
files:
M Lib/test/test_logging.py
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index 44c854f02a73c6..2e5f6475ae3b1e 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -1134,7 +1134,7 @@ def test_basic(self):
self.assertEqual(mailfrom, 'me')
self.assertEqual(rcpttos, ['you'])
self.assertIn('\nSubject: Log\n', data)
- self.assertTrue(data.endswith('\n\nHello \u2713'))
+ self.assertEndsWith(data, '\n\nHello \u2713')
h.close()
def process_message(self, *args):
@@ -3524,7 +3524,7 @@ def test_config14_ok(self):
self.assertEqual(h.foo, 'bar')
self.assertEqual(h.terminator, '!\n')
logging.warning('Exclamation')
- self.assertTrue(output.getvalue().endswith('Exclamation!\n'))
+ self.assertEndsWith(output.getvalue(), 'Exclamation!\n')
def test_config15_ok(self):
@@ -4281,7 +4281,7 @@ def test_queue_handler(self):
msg = self.next_message()
self.que_logger.warning(msg)
data = self.queue.get_nowait()
- self.assertTrue(isinstance(data, logging.LogRecord))
+ self.assertIsInstance(data, logging.LogRecord)
self.assertEqual(data.name, self.que_logger.name)
self.assertEqual((data.msg, data.args), (msg, None))
@@ -4879,14 +4879,14 @@ def test_formatting(self):
r.removeHandler(h)
h.close()
r = h.records[0]
- self.assertTrue(r.exc_text.startswith('Traceback (most recent '
- 'call last):\n'))
- self.assertTrue(r.exc_text.endswith('\nRuntimeError: '
- 'deliberate mistake'))
- self.assertTrue(r.stack_info.startswith('Stack (most recent '
- 'call last):\n'))
- self.assertTrue(r.stack_info.endswith('logging.exception(\'failed\', '
- 'stack_info=True)'))
+ self.assertStartsWith(r.exc_text,
+ 'Traceback (most recent call last):\n')
+ self.assertEndsWith(r.exc_text,
+ '\nRuntimeError: deliberate mistake')
+ self.assertStartsWith(r.stack_info,
+ 'Stack (most recent call last):\n')
+ self.assertEndsWith(r.stack_info,
+ "logging.exception('failed', stack_info=True)")
class LastResortTest(BaseTest):
@@ -5229,8 +5229,8 @@ class LogRecordTest(BaseTest):
def test_str_rep(self):
r = logging.makeLogRecord({})
s = str(r)
- self.assertTrue(s.startswith('<LogRecord: '))
- self.assertTrue(s.endswith('>'))
+ self.assertStartsWith(s, '<LogRecord: ')
+ self.assertEndsWith(s, '>')
def test_dict_arg(self):
h = RecordingHandler()
@@ -5880,14 +5880,14 @@ def test_extra_in_records(self):
self.adapter.critical('foo should be here')
self.assertEqual(len(self.recording.records), 1)
record = self.recording.records[0]
- self.assertTrue(hasattr(record, 'foo'))
+ self.assertHasAttr(record, 'foo')
self.assertEqual(record.foo, '1')
def test_extra_not_merged_by_default(self):
self.adapter.critical('foo should NOT be here', extra={'foo': 'nope'})
self.assertEqual(len(self.recording.records), 1)
record = self.recording.records[0]
- self.assertFalse(hasattr(record, 'foo'))
+ self.assertNotHasAttr(record, 'foo')
def test_extra_merged(self):
self.adapter = logging.LoggerAdapter(logger=self.logger,
@@ -5897,8 +5897,8 @@ def test_extra_merged(self):
self.adapter.critical('foo and bar should be here', extra={'bar': '2'})
self.assertEqual(len(self.recording.records), 1)
record = self.recording.records[0]
- self.assertTrue(hasattr(record, 'foo'))
- self.assertTrue(hasattr(record, 'bar'))
+ self.assertHasAttr(record, 'foo')
+ self.assertHasAttr(record, 'bar')
self.assertEqual(record.foo, '1')
self.assertEqual(record.bar, '2')
@@ -5910,7 +5910,7 @@ def test_extra_merged_log_call_has_precedence(self):
self.adapter.critical('foo shall be min', extra={'foo': '2'})
self.assertEqual(len(self.recording.records), 1)
record = self.recording.records[0]
- self.assertTrue(hasattr(record, 'foo'))
+ self.assertHasAttr(record, 'foo')
self.assertEqual(record.foo, '2')
@@ -6624,18 +6624,19 @@ def namer(filename):
p = '%s.log.' % prefix
for c in candidates:
d, fn = os.path.split(c)
- self.assertTrue(fn.startswith(p))
+ self.assertStartsWith(fn, p)
elif prefix.startswith('d.e'):
for c in candidates:
d, fn = os.path.split(c)
- self.assertTrue(fn.endswith('.log'), fn)
- self.assertTrue(fn.startswith(prefix + '.') and
- fn[len(prefix) + 2].isdigit())
+ self.assertEndsWith(fn, '.log')
+ self.assertStartsWith(fn, prefix + '.')
+ self.assertTrue(fn[len(prefix) + 2].isdigit())
elif prefix == 'g':
for c in candidates:
d, fn = os.path.split(c)
- self.assertTrue(fn.endswith('.oldlog'))
- self.assertTrue(fn.startswith('g') and fn[1].isdigit())
+ self.assertEndsWith(fn, '.oldlog')
+ self.assertStartsWith(fn, 'g')
+ self.assertTrue(fn[1].isdigit())
def test_compute_files_to_delete_same_filename_different_extensions(self):
# See GH-93205 for background
@@ -6673,7 +6674,7 @@ def test_compute_files_to_delete_same_filename_different_extensions(self):
matcher = re.compile(r"^\d{4}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}\Z")
for c in candidates:
d, fn = os.path.split(c)
- self.assertTrue(fn.startswith(prefix+'.'))
+ self.assertStartsWith(fn, prefix+'.')
suffix = fn[(len(prefix)+1):]
self.assertRegex(suffix, matcher)
1
0
gh-115999: Specialize `LOAD_ATTR` for instance and class receivers in free-threaded builds (#128164)
by mpage Jan. 14, 2025
by mpage Jan. 14, 2025
Jan. 14, 2025
https://github.com/python/cpython/commit/b5ee0258bf5bb60a5a5a65c64717853e06…
commit: b5ee0258bf5bb60a5a5a65c64717853e06b64808
branch: main
author: mpage <mpage(a)meta.com>
committer: mpage <mpage(a)cs.stanford.edu>
date: 2025-01-14T11:56:11-08:00
summary:
gh-115999: Specialize `LOAD_ATTR` for instance and class receivers in free-threaded builds (#128164)
Finish specialization for LOAD_ATTR in the free-threaded build by adding support for class and instance receivers.
files:
M Include/cpython/pystats.h
M Include/internal/pycore_dict.h
M Include/internal/pycore_opcode_metadata.h
M Include/internal/pycore_uop_metadata.h
M Lib/test/test_capi/test_type.py
M Lib/test/test_descr.py
M Lib/test/test_generated_cases.py
M Lib/test/test_opcache.py
M Objects/dictobject.c
M Python/bytecodes.c
M Python/executor_cases.c.h
M Python/generated_cases.c.h
M Python/optimizer_bytecodes.c
M Python/optimizer_cases.c.h
M Python/specialize.c
M Tools/cases_generator/analyzer.py
M Tools/cases_generator/generators_common.py
M Tools/cases_generator/stack.py
diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h
index 29ef0c0e4d4e72..ee8885cda7b60d 100644
--- a/Include/cpython/pystats.h
+++ b/Include/cpython/pystats.h
@@ -31,7 +31,7 @@
#define PYSTATS_MAX_UOP_ID 512
-#define SPECIALIZATION_FAILURE_KINDS 36
+#define SPECIALIZATION_FAILURE_KINDS 37
/* Stats for determining who is calling PyEval_EvalFrame */
#define EVAL_CALL_TOTAL 0
diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h
index 71927006d1cd48..74ac8f2148174c 100644
--- a/Include/internal/pycore_dict.h
+++ b/Include/internal/pycore_dict.h
@@ -114,6 +114,16 @@ extern Py_ssize_t _Py_dict_lookup_threadsafe_stackref(PyDictObject *mp, PyObject
extern Py_ssize_t _PyDict_LookupIndex(PyDictObject *, PyObject *);
extern Py_ssize_t _PyDictKeys_StringLookup(PyDictKeysObject* dictkeys, PyObject *key);
+
+/* Look up a string key in an all unicode dict keys, assign the keys object a version, and
+ * store it in version.
+ *
+ * Returns DKIX_ERROR if key is not a string or if the keys object is not all
+ * strings.
+ *
+ * Returns DKIX_EMPTY if the key is not present.
+ */
+extern Py_ssize_t _PyDictKeys_StringLookupAndVersion(PyDictKeysObject* dictkeys, PyObject *key, uint32_t *version);
extern Py_ssize_t _PyDictKeys_StringLookupSplit(PyDictKeysObject* dictkeys, PyObject *key);
PyAPI_FUNC(PyObject *)_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObject *);
PyAPI_FUNC(void) _PyDict_LoadGlobalStackRef(PyDictObject *, PyDictObject *, PyObject *, _PyStackRef *);
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index 5365e2a5c6b8cd..0c0a6145bdbb27 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -1540,7 +1540,7 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) {
return 0;
}
case LOAD_ATTR_WITH_HINT: {
- *effect = Py_MAX(0, (oparg & 1));
+ *effect = Py_MAX(1, (oparg & 1));
return 0;
}
case LOAD_BUILD_CLASS: {
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index 721fa94da19615..5670fe26f72071 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -862,7 +862,7 @@ int _PyUop_num_popped(int opcode, int oparg)
case _CHECK_ATTR_WITH_HINT:
return 0;
case _LOAD_ATTR_WITH_HINT:
- return 1;
+ return 2;
case _LOAD_ATTR_SLOT_0:
return 1;
case _LOAD_ATTR_SLOT_1:
diff --git a/Lib/test/test_capi/test_type.py b/Lib/test/test_capi/test_type.py
index 54c83e09f892a0..92d056e802eeed 100644
--- a/Lib/test/test_capi/test_type.py
+++ b/Lib/test/test_capi/test_type.py
@@ -1,4 +1,4 @@
-from test.support import import_helper
+from test.support import import_helper, Py_GIL_DISABLED, refleak_helper
import unittest
_testcapi = import_helper.import_module('_testcapi')
@@ -37,6 +37,9 @@ class D(A, C): pass
# as well
type_freeze(D)
+ @unittest.skipIf(
+ Py_GIL_DISABLED and refleak_helper.hunting_for_refleaks(),
+ "Specialization failure triggers gh-127773")
def test_freeze_meta(self):
"""test PyType_Freeze() with overridden MRO"""
type_freeze = _testcapi.type_freeze
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 51f97bb51f7bd2..a7ebc9e8be0294 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -7,6 +7,7 @@
import random
import string
import sys
+import textwrap
import types
import unittest
import warnings
@@ -15,6 +16,7 @@
from copy import deepcopy
from contextlib import redirect_stdout
from test import support
+from test.support.script_helper import assert_python_ok
try:
import _testcapi
@@ -5222,6 +5224,7 @@ def test_type_lookup_mro_reference(self):
# Issue #14199: _PyType_Lookup() has to keep a strong reference to
# the type MRO because it may be modified during the lookup, if
# __bases__ is set during the lookup for example.
+ code = textwrap.dedent("""
class MyKey(object):
def __hash__(self):
return hash('mykey')
@@ -5237,12 +5240,29 @@ class Base2(object):
mykey = 'from Base2'
mykey2 = 'from Base2'
- with self.assertWarnsRegex(RuntimeWarning, 'X'):
- X = type('X', (Base,), {MyKey(): 5})
- # mykey is read from Base
- self.assertEqual(X.mykey, 'from Base')
- # mykey2 is read from Base2 because MyKey.__eq__ has set __bases__
- self.assertEqual(X.mykey2, 'from Base2')
+ X = type('X', (Base,), {MyKey(): 5})
+
+ bases_before = ",".join([c.__name__ for c in X.__bases__])
+ print(f"before={bases_before}")
+
+ # mykey is initially read from Base, however, the lookup will be perfomed
+ # again if specialization fails. The second lookup will use the new
+ # mro set by __eq__.
+ print(X.mykey)
+
+ bases_after = ",".join([c.__name__ for c in X.__bases__])
+ print(f"after={bases_after}")
+
+ # mykey2 is read from Base2 because MyKey.__eq__ has set __bases_
+ print(f"mykey2={X.mykey2}")
+ """)
+ _, out, err = assert_python_ok("-c", code)
+ err = err.decode()
+ self.assertRegex(err, "RuntimeWarning: .*X")
+ out = out.decode()
+ self.assertRegex(out, "before=Base")
+ self.assertRegex(out, "after=Base2")
+ self.assertRegex(out, "mykey2=from Base2")
class PicklingTests(unittest.TestCase):
diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py
index 4a1c99edacb71a..7a50a29bb0126c 100644
--- a/Lib/test/test_generated_cases.py
+++ b/Lib/test/test_generated_cases.py
@@ -1639,12 +1639,16 @@ def test_escaping_call_next_to_cmacro(self):
"""
self.run_cases_test(input, output)
- def test_pop_dead_inputs_all_live(self):
+ def test_pystackref_frompyobject_new_next_to_cmacro(self):
input = """
- inst(OP, (a, b --)) {
- POP_DEAD_INPUTS();
- HAM(a, b);
- INPUTS_DEAD();
+ inst(OP, (-- out1, out2)) {
+ PyObject *obj = SPAM();
+ #ifdef Py_GIL_DISABLED
+ out1 = PyStackRef_FromPyObjectNew(obj);
+ #else
+ out1 = PyStackRef_FromPyObjectNew(obj);
+ #endif
+ out2 = PyStackRef_FromPyObjectNew(obj);
}
"""
output = """
@@ -1652,22 +1656,28 @@ def test_pop_dead_inputs_all_live(self):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
- _PyStackRef a;
- _PyStackRef b;
- b = stack_pointer[-1];
- a = stack_pointer[-2];
- HAM(a, b);
- stack_pointer += -2;
+ _PyStackRef out1;
+ _PyStackRef out2;
+ PyObject *obj = SPAM();
+ #ifdef Py_GIL_DISABLED
+ out1 = PyStackRef_FromPyObjectNew(obj);
+ #else
+ out1 = PyStackRef_FromPyObjectNew(obj);
+ #endif
+ out2 = PyStackRef_FromPyObjectNew(obj);
+ stack_pointer[0] = out1;
+ stack_pointer[1] = out2;
+ stack_pointer += 2;
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
}
"""
self.run_cases_test(input, output)
- def test_pop_dead_inputs_some_live(self):
+ def test_pop_input(self):
input = """
- inst(OP, (a, b, c --)) {
- POP_DEAD_INPUTS();
+ inst(OP, (a, b --)) {
+ POP_INPUT(b);
HAM(a);
INPUTS_DEAD();
}
@@ -1678,8 +1688,10 @@ def test_pop_dead_inputs_some_live(self):
next_instr += 1;
INSTRUCTION_STATS(OP);
_PyStackRef a;
- a = stack_pointer[-3];
- stack_pointer += -2;
+ _PyStackRef b;
+ b = stack_pointer[-1];
+ a = stack_pointer[-2];
+ stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
HAM(a);
stack_pointer += -1;
@@ -1689,29 +1701,23 @@ def test_pop_dead_inputs_some_live(self):
"""
self.run_cases_test(input, output)
- def test_pop_dead_inputs_with_output(self):
+ def test_pop_input_with_empty_stack(self):
input = """
- inst(OP, (a, b -- c)) {
- POP_DEAD_INPUTS();
- c = SPAM();
+ inst(OP, (--)) {
+ POP_INPUT(foo);
}
"""
- output = """
- TARGET(OP) {
- frame->instr_ptr = next_instr;
- next_instr += 1;
- INSTRUCTION_STATS(OP);
- _PyStackRef c;
- stack_pointer += -2;
- assert(WITHIN_STACK_BOUNDS());
- c = SPAM();
- stack_pointer[0] = c;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- DISPATCH();
+ with self.assertRaises(SyntaxError):
+ self.run_cases_test(input, "")
+
+ def test_pop_input_with_non_tos(self):
+ input = """
+ inst(OP, (a, b --)) {
+ POP_INPUT(a);
}
"""
- self.run_cases_test(input, output)
+ with self.assertRaises(SyntaxError):
+ self.run_cases_test(input, "")
def test_no_escaping_calls_in_branching_macros(self):
diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py
index c7cd4c2e8a3146..b80ccbf17f1ee6 100644
--- a/Lib/test/test_opcache.py
+++ b/Lib/test/test_opcache.py
@@ -564,6 +564,16 @@ def instantiate():
instantiate()
+def make_deferred_ref_count_obj():
+ """Create an object that uses deferred reference counting.
+
+ Only objects that use deferred refence counting may be stored in inline
+ caches in free-threaded builds. This constructs a new class named Foo,
+ which uses deferred reference counting.
+ """
+ return type("Foo", (object,), {})
+
+
@threading_helper.requires_working_threading()
class TestRacesDoNotCrash(TestBase):
# Careful with these. Bigger numbers have a higher chance of catching bugs,
@@ -714,11 +724,11 @@ def write(items):
opname = "FOR_ITER_LIST"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
def test_load_attr_class(self):
def get_items():
class C:
- a = object()
+ a = make_deferred_ref_count_obj()
items = []
for _ in range(self.ITEMS):
@@ -739,12 +749,45 @@ def write(items):
del item.a
except AttributeError:
pass
- item.a = object()
+ item.a = make_deferred_ref_count_obj()
opname = "LOAD_ATTR_CLASS"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
+ def test_load_attr_class_with_metaclass_check(self):
+ def get_items():
+ class Meta(type):
+ pass
+
+ class C(metaclass=Meta):
+ a = make_deferred_ref_count_obj()
+
+ items = []
+ for _ in range(self.ITEMS):
+ item = C
+ items.append(item)
+ return items
+
+ def read(items):
+ for item in items:
+ try:
+ item.a
+ except AttributeError:
+ pass
+
+ def write(items):
+ for item in items:
+ try:
+ del item.a
+ except AttributeError:
+ pass
+ item.a = make_deferred_ref_count_obj()
+
+ opname = "LOAD_ATTR_CLASS_WITH_METACLASS_CHECK"
+ self.assert_races_do_not_crash(opname, get_items, read, write)
+
+ @requires_specialization_ft
def test_load_attr_getattribute_overridden(self):
def get_items():
class C:
@@ -774,7 +817,7 @@ def write(items):
opname = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
def test_load_attr_instance_value(self):
def get_items():
class C:
@@ -798,7 +841,7 @@ def write(items):
opname = "LOAD_ATTR_INSTANCE_VALUE"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
def test_load_attr_method_lazy_dict(self):
def get_items():
class C(Exception):
@@ -828,7 +871,7 @@ def write(items):
opname = "LOAD_ATTR_METHOD_LAZY_DICT"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
def test_load_attr_method_no_dict(self):
def get_items():
class C:
@@ -859,7 +902,7 @@ def write(items):
opname = "LOAD_ATTR_METHOD_NO_DICT"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
def test_load_attr_method_with_values(self):
def get_items():
class C:
@@ -914,7 +957,7 @@ def write(items):
opname = "LOAD_ATTR_MODULE"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
def test_load_attr_property(self):
def get_items():
class C:
@@ -944,7 +987,34 @@ def write(items):
opname = "LOAD_ATTR_PROPERTY"
self.assert_races_do_not_crash(opname, get_items, read, write)
- @requires_specialization
+ @requires_specialization_ft
+ def test_load_attr_slot(self):
+ def get_items():
+ class C:
+ __slots__ = ["a", "b"]
+
+ items = []
+ for i in range(self.ITEMS):
+ item = C()
+ item.a = i
+ item.b = i + self.ITEMS
+ items.append(item)
+ return items
+
+ def read(items):
+ for item in items:
+ item.a
+ item.b
+
+ def write(items):
+ for item in items:
+ item.a = 100
+ item.b = 200
+
+ opname = "LOAD_ATTR_SLOT"
+ self.assert_races_do_not_crash(opname, get_items, read, write)
+
+ @requires_specialization_ft
def test_load_attr_with_hint(self):
def get_items():
class C:
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 2a054c3f2ae0ff..82789d5e56f523 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -1129,6 +1129,24 @@ dictkeys_generic_lookup(PyDictObject *mp, PyDictKeysObject* dk, PyObject *key, P
return do_lookup(mp, dk, key, hash, compare_generic);
}
+static bool
+check_keys_unicode(PyDictKeysObject *dk, PyObject *key)
+{
+ return PyUnicode_CheckExact(key) && (dk->dk_kind != DICT_KEYS_GENERAL);
+}
+
+static Py_ssize_t
+hash_unicode_key(PyObject *key)
+{
+ assert(PyUnicode_CheckExact(key));
+ Py_hash_t hash = unicode_get_hash(key);
+ if (hash == -1) {
+ hash = PyUnicode_Type.tp_hash(key);
+ assert(hash != -1);
+ }
+ return hash;
+}
+
#ifdef Py_GIL_DISABLED
static Py_ssize_t
unicodekeys_lookup_unicode_threadsafe(PyDictKeysObject* dk, PyObject *key,
@@ -1167,21 +1185,28 @@ unicodekeys_lookup_split(PyDictKeysObject* dk, PyObject *key, Py_hash_t hash)
Py_ssize_t
_PyDictKeys_StringLookup(PyDictKeysObject* dk, PyObject *key)
{
- DictKeysKind kind = dk->dk_kind;
- if (!PyUnicode_CheckExact(key) || kind == DICT_KEYS_GENERAL) {
+ if (!check_keys_unicode(dk, key)) {
return DKIX_ERROR;
}
- Py_hash_t hash = unicode_get_hash(key);
- if (hash == -1) {
- hash = PyUnicode_Type.tp_hash(key);
- if (hash == -1) {
- PyErr_Clear();
- return DKIX_ERROR;
- }
- }
+ Py_hash_t hash = hash_unicode_key(key);
return unicodekeys_lookup_unicode(dk, key, hash);
}
+Py_ssize_t
+_PyDictKeys_StringLookupAndVersion(PyDictKeysObject *dk, PyObject *key, uint32_t *version)
+{
+ if (!check_keys_unicode(dk, key)) {
+ return DKIX_ERROR;
+ }
+ Py_ssize_t ix;
+ Py_hash_t hash = hash_unicode_key(key);
+ LOCK_KEYS(dk);
+ ix = unicodekeys_lookup_unicode(dk, key, hash);
+ *version = _PyDictKeys_GetVersionForCurrentState(_PyInterpreterState_GET(), dk);
+ UNLOCK_KEYS(dk);
+ return ix;
+}
+
/* Like _PyDictKeys_StringLookup() but only works on split keys. Note
* that in free-threaded builds this locks the keys object as required.
*/
@@ -1926,6 +1951,16 @@ build_indices_unicode(PyDictKeysObject *keys, PyDictUnicodeEntry *ep, Py_ssize_t
}
}
+static void
+invalidate_and_clear_inline_values(PyDictValues *values)
+{
+ assert(values->embedded);
+ FT_ATOMIC_STORE_UINT8(values->valid, 0);
+ for (int i = 0; i < values->capacity; i++) {
+ FT_ATOMIC_STORE_PTR_RELEASE(values->values[i], NULL);
+ }
+}
+
/*
Restructure the table by allocating a new table and reinserting all
items again. When entries have been deleted, the new table may
@@ -2017,7 +2052,7 @@ dictresize(PyInterpreterState *interp, PyDictObject *mp,
if (oldvalues->embedded) {
assert(oldvalues->embedded == 1);
assert(oldvalues->valid == 1);
- FT_ATOMIC_STORE_UINT8(oldvalues->valid, 0);
+ invalidate_and_clear_inline_values(oldvalues);
}
else {
free_values(oldvalues, IS_DICT_SHARED(mp));
@@ -7007,7 +7042,13 @@ _PyObject_TryGetInstanceAttribute(PyObject *obj, PyObject *name, PyObject **attr
#ifdef Py_GIL_DISABLED
PyObject *value = _Py_atomic_load_ptr_acquire(&values->values[ix]);
- if (value == NULL || _Py_TryIncrefCompare(&values->values[ix], value)) {
+ if (value == NULL) {
+ if (FT_ATOMIC_LOAD_UINT8(values->valid)) {
+ *attr = NULL;
+ return true;
+ }
+ }
+ else if (_Py_TryIncrefCompare(&values->values[ix], value)) {
*attr = value;
return true;
}
@@ -7345,7 +7386,7 @@ _PyDict_DetachFromObject(PyDictObject *mp, PyObject *obj)
}
mp->ma_values = values;
- FT_ATOMIC_STORE_UINT8(_PyObject_InlineValues(obj)->valid, 0);
+ invalidate_and_clear_inline_values(_PyObject_InlineValues(obj));
assert(_PyObject_InlineValuesConsistencyCheck(obj));
ASSERT_CONSISTENT(mp);
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index cec530fefffefb..a906ded365650c 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -2190,18 +2190,23 @@ dummy_func(
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_dictoffset < 0);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
- DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid);
+ DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(_PyObject_InlineValues(owner_o)->valid));
}
split op(_LOAD_ATTR_INSTANCE_VALUE, (offset/1, owner -- attr, null if (oparg & 1))) {
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset);
- PyObject *attr_o = *value_ptr;
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(*value_ptr);
DEOPT_IF(attr_o == NULL);
+ #ifdef Py_GIL_DISABLED
+ if (!_Py_TryIncrefCompareStackRef(value_ptr, attr_o, &attr)) {
+ DEOPT_IF(true);
+ }
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(attr_o);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectSteal(attr_o);
DECREF_INPUTS();
}
@@ -2227,9 +2232,8 @@ dummy_func(
assert(index < FT_ATOMIC_LOAD_SSIZE_RELAXED(mod_keys->dk_nentries));
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(mod_keys) + index;
PyObject *attr_o = FT_ATOMIC_LOAD_PTR_RELAXED(ep->me_value);
- DEAD(mod_keys);
// Clear mod_keys from stack in case we need to deopt
- POP_DEAD_INPUTS();
+ POP_INPUT(mod_keys);
DEOPT_IF(attr_o == NULL);
#ifdef Py_GIL_DISABLED
int increfed = _Py_TryIncrefCompareStackRef(&ep->me_value, attr_o, &attr);
@@ -2251,30 +2255,50 @@ dummy_func(
_LOAD_ATTR_MODULE_FROM_KEYS +
unused/5;
- op(_CHECK_ATTR_WITH_HINT, (owner -- owner)) {
+ op(_CHECK_ATTR_WITH_HINT, (owner -- owner, dict: PyDictObject *)) {
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
- PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
- EXIT_IF(dict == NULL);
- assert(PyDict_CheckExact((PyObject *)dict));
+ PyDictObject *dict_o = _PyObject_GetManagedDict(owner_o);
+ EXIT_IF(dict_o == NULL);
+ assert(PyDict_CheckExact((PyObject *)dict_o));
+ dict = dict_o;
}
- op(_LOAD_ATTR_WITH_HINT, (hint/1, owner -- attr, null if (oparg & 1))) {
- PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
+ op(_LOAD_ATTR_WITH_HINT, (hint/1, owner, dict: PyDictObject * -- attr, null if (oparg & 1))) {
PyObject *attr_o;
+ if (!LOCK_OBJECT(dict)) {
+ POP_INPUT(dict);
+ DEOPT_IF(true);
+ }
- PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
- DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries);
+ if (hint >= (size_t)dict->ma_keys->dk_nentries) {
+ UNLOCK_OBJECT(dict);
+ POP_INPUT(dict);
+ DEOPT_IF(true);
+ }
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys));
+ if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) {
+ UNLOCK_OBJECT(dict);
+ POP_INPUT(dict);
+ DEOPT_IF(true);
+ }
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name);
+ if (ep->me_key != name) {
+ UNLOCK_OBJECT(dict);
+ POP_INPUT(dict);
+ DEOPT_IF(true);
+ }
attr_o = ep->me_value;
- DEOPT_IF(attr_o == NULL);
+ if (attr_o == NULL) {
+ UNLOCK_OBJECT(dict);
+ POP_INPUT(dict);
+ DEOPT_IF(true);
+ }
STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(attr_o);
- attr = PyStackRef_FromPyObjectSteal(attr_o);
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ UNLOCK_OBJECT(dict);
+ DEAD(dict);
null = PyStackRef_NULL;
DECREF_INPUTS();
}
@@ -2289,12 +2313,17 @@ dummy_func(
split op(_LOAD_ATTR_SLOT, (index/1, owner -- attr, null if (oparg & 1))) {
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
- char *addr = (char *)owner_o + index;
- PyObject *attr_o = *(PyObject **)addr;
+ PyObject **addr = (PyObject **)((char *)owner_o + index);
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR(*addr);
DEOPT_IF(attr_o == NULL);
+ #ifdef Py_GIL_DISABLED
+ int increfed = _Py_TryIncrefCompareStackRef(addr, attr_o, &attr);
+ DEOPT_IF(!increfed);
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectNew(attr_o);
DECREF_INPUTS();
}
@@ -2309,7 +2338,7 @@ dummy_func(
EXIT_IF(!PyType_Check(owner_o));
assert(type_version != 0);
- EXIT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version);
+ EXIT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version);
}
split op(_LOAD_ATTR_CLASS, (descr/4, owner -- attr, null if (oparg & 1))) {
@@ -2363,7 +2392,7 @@ dummy_func(
DEOPT_IF(tstate->interp->eval_frame);
PyTypeObject *cls = Py_TYPE(owner_o);
assert(type_version != 0);
- DEOPT_IF(cls->tp_version_tag != type_version);
+ DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(cls->tp_version_tag) != type_version);
assert(Py_IS_TYPE(getattribute, &PyFunction_Type));
PyFunctionObject *f = (PyFunctionObject *)getattribute;
assert(func_version != 0);
@@ -3281,13 +3310,15 @@ dummy_func(
op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner)) {
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
- DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid);
+ PyDictValues *ivs = _PyObject_InlineValues(owner_o);
+ DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(ivs->valid));
}
op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner)) {
PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner));
PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls;
- DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version);
+ PyDictKeysObject *keys = owner_heap_type->ht_cached_keys;
+ DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version);
}
split op(_LOAD_ATTR_METHOD_WITH_VALUES, (descr/4, owner -- attr, self if (1))) {
@@ -3357,7 +3388,7 @@ dummy_func(
op(_CHECK_ATTR_METHOD_LAZY_DICT, (dictoffset/1, owner -- owner)) {
char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset;
- PyObject *dict = *(PyObject **)ptr;
+ PyObject *dict = FT_ATOMIC_LOAD_PTR_ACQUIRE(*(PyObject **)ptr);
/* This object has a __dict__, just not yet created */
DEOPT_IF(dict != NULL);
}
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 1aa80f398d7470..cda01bb768c269 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -2652,7 +2652,7 @@
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_dictoffset < 0);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
- if (!_PyObject_InlineValues(owner_o)->valid) {
+ if (!FT_ATOMIC_LOAD_UINT8(_PyObject_InlineValues(owner_o)->valid)) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
@@ -2668,15 +2668,23 @@
uint16_t offset = (uint16_t)CURRENT_OPERAND0();
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset);
- PyObject *attr_o = *value_ptr;
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(*value_ptr);
if (attr_o == NULL) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
+ #ifdef Py_GIL_DISABLED
+ if (!_Py_TryIncrefCompareStackRef(value_ptr, attr_o, &attr)) {
+ if (true) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ }
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(attr_o);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectSteal(attr_o);
PyStackRef_CLOSE(owner);
stack_pointer[-1] = attr;
break;
@@ -2691,15 +2699,23 @@
uint16_t offset = (uint16_t)CURRENT_OPERAND0();
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset);
- PyObject *attr_o = *value_ptr;
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(*value_ptr);
if (attr_o == NULL) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
+ #ifdef Py_GIL_DISABLED
+ if (!_Py_TryIncrefCompareStackRef(value_ptr, attr_o, &attr)) {
+ if (true) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ }
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(attr_o);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectSteal(attr_o);
PyStackRef_CLOSE(owner);
stack_pointer[-1] = attr;
stack_pointer[0] = null;
@@ -2778,55 +2794,88 @@
case _CHECK_ATTR_WITH_HINT: {
_PyStackRef owner;
+ PyDictObject *dict;
owner = stack_pointer[-1];
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
- PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
- if (dict == NULL) {
+ PyDictObject *dict_o = _PyObject_GetManagedDict(owner_o);
+ if (dict_o == NULL) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
- assert(PyDict_CheckExact((PyObject *)dict));
+ assert(PyDict_CheckExact((PyObject *)dict_o));
+ dict = dict_o;
+ stack_pointer[0].bits = (uintptr_t)dict;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_ATTR_WITH_HINT: {
+ PyDictObject *dict;
_PyStackRef owner;
_PyStackRef attr;
_PyStackRef null = PyStackRef_NULL;
oparg = CURRENT_OPARG();
- owner = stack_pointer[-1];
+ dict = (PyDictObject *)stack_pointer[-1].bits;
+ owner = stack_pointer[-2];
uint16_t hint = (uint16_t)CURRENT_OPERAND0();
- PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
PyObject *attr_o;
- PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
+ if (!LOCK_OBJECT(dict)) {
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ if (true) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ }
if (hint >= (size_t)dict->ma_keys->dk_nentries) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
+ UNLOCK_OBJECT(dict);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ if (true) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
}
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- if (!DK_IS_UNICODE(dict->ma_keys)) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
+ if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) {
+ UNLOCK_OBJECT(dict);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ if (true) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
}
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
if (ep->me_key != name) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
+ UNLOCK_OBJECT(dict);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ if (true) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
}
attr_o = ep->me_value;
if (attr_o == NULL) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
+ UNLOCK_OBJECT(dict);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ if (true) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
}
STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(attr_o);
- attr = PyStackRef_FromPyObjectSteal(attr_o);
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ UNLOCK_OBJECT(dict);
null = PyStackRef_NULL;
PyStackRef_CLOSE(owner);
- stack_pointer[-1] = attr;
- if (oparg & 1) stack_pointer[0] = null;
- stack_pointer += (oparg & 1);
+ stack_pointer[-2] = attr;
+ if (oparg & 1) stack_pointer[-1] = null;
+ stack_pointer += -1 + (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -2839,15 +2888,23 @@
owner = stack_pointer[-1];
uint16_t index = (uint16_t)CURRENT_OPERAND0();
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
- char *addr = (char *)owner_o + index;
- PyObject *attr_o = *(PyObject **)addr;
+ PyObject **addr = (PyObject **)((char *)owner_o + index);
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR(*addr);
if (attr_o == NULL) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
+ #ifdef Py_GIL_DISABLED
+ int increfed = _Py_TryIncrefCompareStackRef(addr, attr_o, &attr);
+ if (!increfed) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectNew(attr_o);
PyStackRef_CLOSE(owner);
stack_pointer[-1] = attr;
break;
@@ -2861,15 +2918,23 @@
owner = stack_pointer[-1];
uint16_t index = (uint16_t)CURRENT_OPERAND0();
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
- char *addr = (char *)owner_o + index;
- PyObject *attr_o = *(PyObject **)addr;
+ PyObject **addr = (PyObject **)((char *)owner_o + index);
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR(*addr);
if (attr_o == NULL) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
+ #ifdef Py_GIL_DISABLED
+ int increfed = _Py_TryIncrefCompareStackRef(addr, attr_o, &attr);
+ if (!increfed) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectNew(attr_o);
PyStackRef_CLOSE(owner);
stack_pointer[-1] = attr;
stack_pointer[0] = null;
@@ -2890,7 +2955,7 @@
JUMP_TO_JUMP_TARGET();
}
assert(type_version != 0);
- if (((PyTypeObject *)owner_o)->tp_version_tag != type_version) {
+ if (FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
@@ -3924,7 +3989,8 @@
owner = stack_pointer[-1];
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
- if (!_PyObject_InlineValues(owner_o)->valid) {
+ PyDictValues *ivs = _PyObject_InlineValues(owner_o);
+ if (!FT_ATOMIC_LOAD_UINT8(ivs->valid)) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
@@ -3937,7 +4003,8 @@
uint32_t keys_version = (uint32_t)CURRENT_OPERAND0();
PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner));
PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls;
- if (owner_heap_type->ht_cached_keys->dk_version != keys_version) {
+ PyDictKeysObject *keys = owner_heap_type->ht_cached_keys;
+ if (FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
@@ -4022,7 +4089,7 @@
owner = stack_pointer[-1];
uint16_t dictoffset = (uint16_t)CURRENT_OPERAND0();
char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset;
- PyObject *dict = *(PyObject **)ptr;
+ PyObject *dict = FT_ATOMIC_LOAD_PTR_ACQUIRE(*(PyObject **)ptr);
/* This object has a __dict__, just not yet created */
if (dict != NULL) {
UOP_STAT_INC(uopcode, miss);
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 810beb61d0db5e..81408380d6b2b8 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -5345,7 +5345,7 @@
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
DEOPT_IF(!PyType_Check(owner_o), LOAD_ATTR);
assert(type_version != 0);
- DEOPT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version, LOAD_ATTR);
+ DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version, LOAD_ATTR);
}
/* Skip 2 cache entries */
// _LOAD_ATTR_CLASS
@@ -5380,7 +5380,7 @@
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
DEOPT_IF(!PyType_Check(owner_o), LOAD_ATTR);
assert(type_version != 0);
- DEOPT_IF(((PyTypeObject *)owner_o)->tp_version_tag != type_version, LOAD_ATTR);
+ DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(((PyTypeObject *)owner_o)->tp_version_tag) != type_version, LOAD_ATTR);
}
// _GUARD_TYPE_VERSION
{
@@ -5421,7 +5421,7 @@
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner_o);
assert(type_version != 0);
- DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR);
+ DEOPT_IF(FT_ATOMIC_LOAD_UINT_RELAXED(cls->tp_version_tag) != type_version, LOAD_ATTR);
assert(Py_IS_TYPE(getattribute, &PyFunction_Type));
PyFunctionObject *f = (PyFunctionObject *)getattribute;
assert(func_version != 0);
@@ -5463,19 +5463,24 @@
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_dictoffset < 0);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
- DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR);
+ DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(_PyObject_InlineValues(owner_o)->valid), LOAD_ATTR);
}
// _LOAD_ATTR_INSTANCE_VALUE
{
uint16_t offset = read_u16(&this_instr[4].cache);
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
PyObject **value_ptr = (PyObject**)(((char *)owner_o) + offset);
- PyObject *attr_o = *value_ptr;
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR_ACQUIRE(*value_ptr);
DEOPT_IF(attr_o == NULL, LOAD_ATTR);
+ #ifdef Py_GIL_DISABLED
+ if (!_Py_TryIncrefCompareStackRef(value_ptr, attr_o, &attr)) {
+ DEOPT_IF(true, LOAD_ATTR);
+ }
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(attr_o);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectSteal(attr_o);
PyStackRef_CLOSE(owner);
}
/* Skip 5 cache entries */
@@ -5507,7 +5512,7 @@
{
uint16_t dictoffset = read_u16(&this_instr[4].cache);
char *ptr = ((char *)PyStackRef_AsPyObjectBorrow(owner)) + MANAGED_DICT_OFFSET + dictoffset;
- PyObject *dict = *(PyObject **)ptr;
+ PyObject *dict = FT_ATOMIC_LOAD_PTR_ACQUIRE(*(PyObject **)ptr);
/* This object has a __dict__, just not yet created */
DEOPT_IF(dict != NULL, LOAD_ATTR);
}
@@ -5586,14 +5591,16 @@
{
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
- DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR);
+ PyDictValues *ivs = _PyObject_InlineValues(owner_o);
+ DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(ivs->valid), LOAD_ATTR);
}
// _GUARD_KEYS_VERSION
{
uint32_t keys_version = read_u32(&this_instr[4].cache);
PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner));
PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls;
- DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR);
+ PyDictKeysObject *keys = owner_heap_type->ht_cached_keys;
+ DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version, LOAD_ATTR);
}
// _LOAD_ATTR_METHOD_WITH_VALUES
{
@@ -5716,14 +5723,16 @@
{
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_INLINE_VALUES);
- DEOPT_IF(!_PyObject_InlineValues(owner_o)->valid, LOAD_ATTR);
+ PyDictValues *ivs = _PyObject_InlineValues(owner_o);
+ DEOPT_IF(!FT_ATOMIC_LOAD_UINT8(ivs->valid), LOAD_ATTR);
}
// _GUARD_KEYS_VERSION
{
uint32_t keys_version = read_u32(&this_instr[4].cache);
PyTypeObject *owner_cls = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner));
PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls;
- DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR);
+ PyDictKeysObject *keys = owner_heap_type->ht_cached_keys;
+ DEOPT_IF(FT_ATOMIC_LOAD_UINT32_RELAXED(keys->dk_version) != keys_version, LOAD_ATTR);
}
// _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES
{
@@ -5824,12 +5833,17 @@
{
uint16_t index = read_u16(&this_instr[4].cache);
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
- char *addr = (char *)owner_o + index;
- PyObject *attr_o = *(PyObject **)addr;
+ PyObject **addr = (PyObject **)((char *)owner_o + index);
+ PyObject *attr_o = FT_ATOMIC_LOAD_PTR(*addr);
DEOPT_IF(attr_o == NULL, LOAD_ATTR);
+ #ifdef Py_GIL_DISABLED
+ int increfed = _Py_TryIncrefCompareStackRef(addr, attr_o, &attr);
+ DEOPT_IF(!increfed, LOAD_ATTR);
+ #else
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ #endif
STAT_INC(LOAD_ATTR, hit);
null = PyStackRef_NULL;
- attr = PyStackRef_FromPyObjectNew(attr_o);
PyStackRef_CLOSE(owner);
}
/* Skip 5 cache entries */
@@ -5846,6 +5860,7 @@
INSTRUCTION_STATS(LOAD_ATTR_WITH_HINT);
static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size");
_PyStackRef owner;
+ PyDictObject *dict;
_PyStackRef attr;
_PyStackRef null = PyStackRef_NULL;
/* Skip 1 cache entry */
@@ -5861,26 +5876,40 @@
{
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
assert(Py_TYPE(owner_o)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
- PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
- DEOPT_IF(dict == NULL, LOAD_ATTR);
- assert(PyDict_CheckExact((PyObject *)dict));
+ PyDictObject *dict_o = _PyObject_GetManagedDict(owner_o);
+ DEOPT_IF(dict_o == NULL, LOAD_ATTR);
+ assert(PyDict_CheckExact((PyObject *)dict_o));
+ dict = dict_o;
}
// _LOAD_ATTR_WITH_HINT
{
uint16_t hint = read_u16(&this_instr[4].cache);
- PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
PyObject *attr_o;
- PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
- DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR);
+ if (!LOCK_OBJECT(dict)) {
+ DEOPT_IF(true, LOAD_ATTR);
+ }
+ if (hint >= (size_t)dict->ma_keys->dk_nentries) {
+ UNLOCK_OBJECT(dict);
+ DEOPT_IF(true, LOAD_ATTR);
+ }
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys), LOAD_ATTR);
+ if (dict->ma_keys->dk_kind != DICT_KEYS_UNICODE) {
+ UNLOCK_OBJECT(dict);
+ DEOPT_IF(true, LOAD_ATTR);
+ }
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name, LOAD_ATTR);
+ if (ep->me_key != name) {
+ UNLOCK_OBJECT(dict);
+ DEOPT_IF(true, LOAD_ATTR);
+ }
attr_o = ep->me_value;
- DEOPT_IF(attr_o == NULL, LOAD_ATTR);
+ if (attr_o == NULL) {
+ UNLOCK_OBJECT(dict);
+ DEOPT_IF(true, LOAD_ATTR);
+ }
STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(attr_o);
- attr = PyStackRef_FromPyObjectSteal(attr_o);
+ attr = PyStackRef_FromPyObjectNew(attr_o);
+ UNLOCK_OBJECT(dict);
null = PyStackRef_NULL;
PyStackRef_CLOSE(owner);
}
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index 788adecca8af80..4d96ada5acf00f 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -582,11 +582,17 @@ dummy_func(void) {
}
}
- op(_LOAD_ATTR_WITH_HINT, (hint/1, owner -- attr, null if (oparg & 1))) {
+ op(_CHECK_ATTR_WITH_HINT, (owner -- owner, dict)) {
+ dict = sym_new_not_null(ctx);
+ (void)owner;
+ }
+
+ op(_LOAD_ATTR_WITH_HINT, (hint/1, owner, dict -- attr, null if (oparg & 1))) {
attr = sym_new_not_null(ctx);
null = sym_new_null(ctx);
(void)hint;
(void)owner;
+ (void)dict;
}
op(_LOAD_ATTR_SLOT, (index/1, owner -- attr, null if (oparg & 1))) {
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 1a7cc6becfefb6..aff4493fdc4dd7 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -1250,22 +1250,33 @@
}
case _CHECK_ATTR_WITH_HINT: {
+ _Py_UopsSymbol *owner;
+ _Py_UopsSymbol *dict;
+ owner = stack_pointer[-1];
+ dict = sym_new_not_null(ctx);
+ (void)owner;
+ stack_pointer[0] = dict;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_ATTR_WITH_HINT: {
+ _Py_UopsSymbol *dict;
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *null = NULL;
- owner = stack_pointer[-1];
+ dict = stack_pointer[-1];
+ owner = stack_pointer[-2];
uint16_t hint = (uint16_t)this_instr->operand0;
attr = sym_new_not_null(ctx);
null = sym_new_null(ctx);
(void)hint;
(void)owner;
- stack_pointer[-1] = attr;
- if (oparg & 1) stack_pointer[0] = null;
- stack_pointer += (oparg & 1);
+ (void)dict;
+ stack_pointer[-2] = attr;
+ if (oparg & 1) stack_pointer[-1] = null;
+ stack_pointer += -1 + (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
diff --git a/Python/specialize.c b/Python/specialize.c
index 897005c4f1078d..8d9f19c8895187 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -3,6 +3,7 @@
#include "opcode.h"
#include "pycore_code.h"
+#include "pycore_critical_section.h"
#include "pycore_descrobject.h" // _PyMethodWrapper_Type
#include "pycore_dict.h" // DICT_KEYS_UNICODE
#include "pycore_function.h" // _PyFunction_GetVersionForCurrentState()
@@ -537,6 +538,7 @@ _PyCode_Quicken(_Py_CODEUNIT *instructions, Py_ssize_t size, PyObject *consts,
#define SPEC_FAIL_ATTR_BUILTIN_CLASS_METHOD_OBJ 33
#define SPEC_FAIL_ATTR_METACLASS_OVERRIDDEN 34
#define SPEC_FAIL_ATTR_SPLIT_DICT 35
+#define SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED 36
/* Binary subscr and store subscr */
@@ -729,11 +731,8 @@ unspecialize(_Py_CODEUNIT *instr)
}
static int function_kind(PyCodeObject *code);
-#ifndef Py_GIL_DISABLED
static bool function_check_args(PyObject *o, int expected_argcount, int opcode);
static uint32_t function_get_version(PyObject *o, int opcode);
-static uint32_t type_get_version(PyTypeObject *t, int opcode);
-#endif
static int
specialize_module_load_attr_lock_held(PyDictObject *dict, _Py_CODEUNIT *instr, PyObject *name)
@@ -879,10 +878,11 @@ descriptor_is_class(PyObject *descriptor, PyObject *name)
(descriptor == _PyType_Lookup(&PyBaseObject_Type, name)));
}
-#ifndef Py_GIL_DISABLED
static DescriptorClassification
-analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr) {
+analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr, unsigned int *tp_version) {
bool has_getattr = false;
+ bool have_ga_version = false;
+ unsigned int ga_version;
getattrofunc getattro_slot = type->tp_getattro;
if (getattro_slot == PyObject_GenericGetAttr) {
/* Normal attribute lookup; */
@@ -892,24 +892,27 @@ analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr) {
getattro_slot == _Py_slot_tp_getattro) {
/* One or both of __getattribute__ or __getattr__ may have been
overridden See typeobject.c for why these functions are special. */
- PyObject *getattribute = _PyType_LookupRef(type, &_Py_ID(__getattribute__));
+ PyObject *getattribute = _PyType_LookupRefAndVersion(type,
+ &_Py_ID(__getattribute__), &ga_version);
+ have_ga_version = true;
PyInterpreterState *interp = _PyInterpreterState_GET();
bool has_custom_getattribute = getattribute != NULL &&
getattribute != interp->callable_cache.object__getattribute__;
- PyObject *getattr = _PyType_LookupRef(type, &_Py_ID(__getattr__));
+ PyObject *getattr = _PyType_Lookup(type, &_Py_ID(__getattr__));
has_getattr = getattr != NULL;
- Py_XDECREF(getattr);
if (has_custom_getattribute) {
if (getattro_slot == _Py_slot_tp_getattro &&
!has_getattr &&
Py_IS_TYPE(getattribute, &PyFunction_Type)) {
*descr = getattribute;
+ *tp_version = ga_version;
return GETATTRIBUTE_IS_PYTHON_FUNCTION;
}
/* Potentially both __getattr__ and __getattribute__ are set.
Too complicated */
Py_DECREF(getattribute);
*descr = NULL;
+ *tp_version = ga_version;
return GETSET_OVERRIDDEN;
}
/* Potentially has __getattr__ but no custom __getattribute__.
@@ -923,16 +926,18 @@ analyze_descriptor_load(PyTypeObject *type, PyObject *name, PyObject **descr) {
}
else {
*descr = NULL;
+ *tp_version = FT_ATOMIC_LOAD_UINT_RELAXED(type->tp_version_tag);
return GETSET_OVERRIDDEN;
}
- PyObject *descriptor = _PyType_LookupRef(type, name);
+ unsigned int descr_version;
+ PyObject *descriptor = _PyType_LookupRefAndVersion(type, name, &descr_version);
*descr = descriptor;
+ *tp_version = have_ga_version ? ga_version : descr_version;
if (descriptor_is_class(descriptor, name)) {
return DUNDER_CLASS;
}
return classify_descriptor(descriptor, has_getattr);
}
-#endif //!Py_GIL_DISABLED
static DescriptorClassification
analyze_descriptor_store(PyTypeObject *type, PyObject *name, PyObject **descr, unsigned int *tp_version)
@@ -952,12 +957,13 @@ analyze_descriptor_store(PyTypeObject *type, PyObject *name, PyObject **descr, u
static int
specialize_dict_access_inline(
PyObject *owner, _Py_CODEUNIT *instr, PyTypeObject *type,
- DescriptorClassification kind, PyObject *name, unsigned int tp_version,
+ PyObject *name, unsigned int tp_version,
int base_op, int values_op)
{
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
PyDictKeysObject *keys = ((PyHeapTypeObject *)type)->ht_cached_keys;
assert(PyUnicode_CheckExact(name));
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(owner);
Py_ssize_t index = _PyDictKeys_StringLookupSplit(keys, name);
assert (index != DKIX_ERROR);
if (index == DKIX_EMPTY) {
@@ -965,6 +971,7 @@ specialize_dict_access_inline(
return 0;
}
assert(index >= 0);
+ assert(_PyObject_InlineValues(owner)->valid);
char *value_addr = (char *)&_PyObject_InlineValues(owner)->values[index];
Py_ssize_t offset = value_addr - (char *)owner;
if (offset != (uint16_t)offset) {
@@ -980,10 +987,13 @@ specialize_dict_access_inline(
static int
specialize_dict_access_hint(
PyDictObject *dict, _Py_CODEUNIT *instr, PyTypeObject *type,
- DescriptorClassification kind, PyObject *name, unsigned int tp_version,
+ PyObject *name, unsigned int tp_version,
int base_op, int hint_op)
{
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
+
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(dict);
+
// We found an instance with a __dict__.
if (_PyDict_HasSplitTable(dict)) {
SPECIALIZATION_FAIL(base_op, SPEC_FAIL_ATTR_SPLIT_DICT);
@@ -1027,7 +1037,7 @@ specialize_dict_access(
PyDictObject *dict = _PyObject_GetManagedDict(owner);
if (dict == NULL) {
// managed dict, not materialized, inline values valid
- res = specialize_dict_access_inline(owner, instr, type, kind, name,
+ res = specialize_dict_access_inline(owner, instr, type, name,
tp_version, base_op, values_op);
}
else {
@@ -1047,16 +1057,19 @@ specialize_dict_access(
int res;
Py_BEGIN_CRITICAL_SECTION(dict);
// materialized managed dict
- res = specialize_dict_access_hint(dict, instr, type, kind, name,
+ res = specialize_dict_access_hint(dict, instr, type, name,
tp_version, base_op, hint_op);
Py_END_CRITICAL_SECTION();
return res;
}
}
-#ifndef Py_GIL_DISABLED
-static int specialize_attr_loadclassattr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name,
- PyObject* descr, DescriptorClassification kind, bool is_method);
+static int
+specialize_attr_loadclassattr(PyObject *owner, _Py_CODEUNIT *instr,
+ PyObject *name, PyObject *descr,
+ unsigned int tp_version,
+ DescriptorClassification kind, bool is_method,
+ uint32_t shared_keys_version);
static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name);
/* Returns true if instances of obj's class are
@@ -1065,7 +1078,7 @@ static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyOb
* For other objects, we check their actual dictionary.
*/
static bool
-instance_has_key(PyObject *obj, PyObject* name)
+instance_has_key(PyObject *obj, PyObject *name, uint32_t *shared_keys_version)
{
PyTypeObject *cls = Py_TYPE(obj);
if ((cls->tp_flags & Py_TPFLAGS_MANAGED_DICT) == 0) {
@@ -1073,36 +1086,38 @@ instance_has_key(PyObject *obj, PyObject* name)
}
if (cls->tp_flags & Py_TPFLAGS_INLINE_VALUES) {
PyDictKeysObject *keys = ((PyHeapTypeObject *)cls)->ht_cached_keys;
- Py_ssize_t index = _PyDictKeys_StringLookup(keys, name);
+ Py_ssize_t index =
+ _PyDictKeys_StringLookupAndVersion(keys, name, shared_keys_version);
return index >= 0;
}
PyDictObject *dict = _PyObject_GetManagedDict(obj);
if (dict == NULL || !PyDict_CheckExact(dict)) {
return false;
}
+ bool result;
+ Py_BEGIN_CRITICAL_SECTION(dict);
if (dict->ma_values) {
- return false;
+ result = false;
}
- Py_ssize_t index = _PyDict_LookupIndex(dict, name);
- if (index < 0) {
- return false;
+ else {
+ result = (_PyDict_LookupIndex(dict, name) >= 0);
}
- return true;
+ Py_END_CRITICAL_SECTION();
+ return result;
}
static int
-specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name)
+do_specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name,
+ bool shadow, uint32_t shared_keys_version,
+ DescriptorClassification kind, PyObject *descr, unsigned int tp_version)
{
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
PyTypeObject *type = Py_TYPE(owner);
- bool shadow = instance_has_key(owner, name);
- PyObject *descr = NULL;
- DescriptorClassification kind = analyze_descriptor_load(type, name, &descr);
- Py_XDECREF(descr); // turn strong ref into a borrowed ref
- assert(descr != NULL || kind == ABSENT || kind == GETSET_OVERRIDDEN);
- if (type_get_version(type, LOAD_ATTR) == 0) {
+ if (tp_version == 0) {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS);
return -1;
}
+ uint8_t oparg = FT_ATOMIC_LOAD_UINT8_RELAXED(instr->op.arg);
switch(kind) {
case OVERRIDING:
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_OVERRIDING_DESCRIPTOR);
@@ -1112,9 +1127,10 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
if (shadow) {
goto try_instance;
}
- int oparg = instr->op.arg;
if (oparg & 1) {
- if (specialize_attr_loadclassattr(owner, instr, name, descr, kind, true)) {
+ if (specialize_attr_loadclassattr(owner, instr, name, descr,
+ tp_version, kind, true,
+ shared_keys_version)) {
return 0;
}
else {
@@ -1140,7 +1156,7 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
if (!function_check_args(fget, 1, LOAD_ATTR)) {
return -1;
}
- if (instr->op.arg & 1) {
+ if (oparg & 1) {
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD);
return -1;
}
@@ -1149,8 +1165,14 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER);
return -1;
}
- assert(type->tp_version_tag != 0);
- write_u32(lm_cache->type_version, type->tp_version_tag);
+ #ifdef Py_GIL_DISABLED
+ if (!_PyObject_HasDeferredRefcount(fget)) {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED);
+ return -1;
+ }
+ #endif
+ assert(tp_version != 0);
+ write_u32(lm_cache->type_version, tp_version);
/* borrowed */
write_obj(lm_cache->descr, fget);
specialize(instr, LOAD_ATTR_PROPERTY);
@@ -1176,7 +1198,7 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
assert(dmem->type == Py_T_OBJECT_EX || dmem->type == _Py_T_OBJECT);
assert(offset > 0);
cache->index = (uint16_t)offset;
- write_u32(cache->version, type->tp_version_tag);
+ write_u32(cache->version, tp_version);
specialize(instr, LOAD_ATTR_SLOT);
return 0;
}
@@ -1185,7 +1207,7 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
Py_ssize_t offset = offsetof(PyObject, ob_type);
assert(offset == (uint16_t)offset);
cache->index = (uint16_t)offset;
- write_u32(cache->version, type->tp_version_tag);
+ write_u32(cache->version, tp_version);
specialize(instr, LOAD_ATTR_SLOT);
return 0;
}
@@ -1200,13 +1222,18 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
return -1;
case GETATTRIBUTE_IS_PYTHON_FUNCTION:
{
+ #ifndef Py_GIL_DISABLED
+ // In free-threaded builds it's possible for tp_getattro to change
+ // after the call to analyze_descriptor. That is fine: the version
+ // guard will fail.
assert(type->tp_getattro == _Py_slot_tp_getattro);
+ #endif
assert(Py_IS_TYPE(descr, &PyFunction_Type));
_PyLoadMethodCache *lm_cache = (_PyLoadMethodCache *)(instr + 1);
if (!function_check_args(descr, 2, LOAD_ATTR)) {
return -1;
}
- if (instr->op.arg & 1) {
+ if (oparg & 1) {
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD);
return -1;
}
@@ -1219,10 +1246,16 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER);
return -1;
}
+ #ifdef Py_GIL_DISABLED
+ if (!_PyObject_HasDeferredRefcount(descr)) {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED);
+ return -1;
+ }
+ #endif
write_u32(lm_cache->keys_version, version);
/* borrowed */
write_obj(lm_cache->descr, descr);
- write_u32(lm_cache->type_version, type->tp_version_tag);
+ write_u32(lm_cache->type_version, tp_version);
specialize(instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN);
return 0;
}
@@ -1237,8 +1270,10 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
if (shadow) {
goto try_instance;
}
- if ((instr->op.arg & 1) == 0) {
- if (specialize_attr_loadclassattr(owner, instr, name, descr, kind, false)) {
+ if ((oparg & 1) == 0) {
+ if (specialize_attr_loadclassattr(owner, instr, name, descr,
+ tp_version, kind, false,
+ shared_keys_version)) {
return 0;
}
}
@@ -1252,14 +1287,28 @@ specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* na
}
Py_UNREACHABLE();
try_instance:
- if (specialize_dict_access(owner, instr, type, kind, name, type->tp_version_tag,
+ if (specialize_dict_access(owner, instr, type, kind, name, tp_version,
LOAD_ATTR, LOAD_ATTR_INSTANCE_VALUE, LOAD_ATTR_WITH_HINT))
{
return 0;
}
return -1;
}
-#endif // Py_GIL_DISABLED
+
+static int
+specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name)
+{
+ // 0 is not a valid version
+ uint32_t shared_keys_version = 0;
+ bool shadow = instance_has_key(owner, name, &shared_keys_version);
+ PyObject *descr = NULL;
+ unsigned int tp_version = 0;
+ PyTypeObject *type = Py_TYPE(owner);
+ DescriptorClassification kind = analyze_descriptor_load(type, name, &descr, &tp_version);
+ int result = do_specialize_instance_load_attr(owner, instr, name, shadow, shared_keys_version, kind, descr, tp_version);
+ Py_XDECREF(descr);
+ return result;
+}
void
_Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *name)
@@ -1281,20 +1330,10 @@ _Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *nam
fail = specialize_module_load_attr(owner, instr, name);
}
else if (PyType_Check(owner)) {
- #ifdef Py_GIL_DISABLED
- SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR);
- fail = true;
- #else
fail = specialize_class_load_attr(owner, instr, name);
- #endif
}
else {
- #ifdef Py_GIL_DISABLED
- SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR);
- fail = true;
- #else
fail = specialize_instance_load_attr(owner, instr, name);
- #endif
}
if (fail) {
@@ -1402,8 +1441,6 @@ _Py_Specialize_StoreAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *na
return;
}
-#ifndef Py_GIL_DISABLED
-
#ifdef Py_STATS
static int
load_attr_fail_kind(DescriptorClassification kind)
@@ -1452,8 +1489,10 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr,
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METACLASS_OVERRIDDEN);
return -1;
}
- PyObject *metadescriptor = _PyType_Lookup(Py_TYPE(cls), name);
+ unsigned int meta_version = 0;
+ PyObject *metadescriptor = _PyType_LookupRefAndVersion(Py_TYPE(cls), name, &meta_version);
DescriptorClassification metakind = classify_descriptor(metadescriptor, false);
+ Py_XDECREF(metadescriptor);
switch (metakind) {
case METHOD:
case NON_DESCRIPTOR:
@@ -1468,38 +1507,52 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr,
}
PyObject *descr = NULL;
DescriptorClassification kind = 0;
- kind = analyze_descriptor_load(cls, name, &descr);
- Py_XDECREF(descr); // turn strong ref into a borrowed ref
- if (type_get_version(cls, LOAD_ATTR) == 0) {
+ unsigned int tp_version = 0;
+ kind = analyze_descriptor_load(cls, name, &descr, &tp_version);
+ if (tp_version == 0) {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS);
+ Py_XDECREF(descr);
return -1;
}
bool metaclass_check = false;
if ((Py_TYPE(cls)->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) == 0) {
metaclass_check = true;
- if (type_get_version(Py_TYPE(cls), LOAD_ATTR) == 0) {
+ if (meta_version == 0) {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS);
+ Py_XDECREF(descr);
return -1;
}
}
switch (kind) {
case METHOD:
case NON_DESCRIPTOR:
- write_u32(cache->type_version, cls->tp_version_tag);
+ #ifdef Py_GIL_DISABLED
+ if (!_PyObject_HasDeferredRefcount(descr)) {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED);
+ Py_XDECREF(descr);
+ return -1;
+ }
+ #endif
+ write_u32(cache->type_version, tp_version);
write_obj(cache->descr, descr);
if (metaclass_check) {
- write_u32(cache->keys_version, Py_TYPE(cls)->tp_version_tag);
+ write_u32(cache->keys_version, meta_version);
specialize(instr, LOAD_ATTR_CLASS_WITH_METACLASS_CHECK);
}
else {
specialize(instr, LOAD_ATTR_CLASS);
}
+ Py_XDECREF(descr);
return 0;
#ifdef Py_STATS
case ABSENT:
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR);
+ Py_XDECREF(descr);
return -1;
#endif
default:
SPECIALIZATION_FAIL(LOAD_ATTR, load_attr_fail_kind(kind));
+ Py_XDECREF(descr);
return -1;
}
}
@@ -1508,29 +1561,41 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr,
// can cause a significant drop in cache hits. A possible test is
// python.exe -m test_typing test_re test_dis test_zlib.
static int
-specialize_attr_loadclassattr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name,
-PyObject *descr, DescriptorClassification kind, bool is_method)
+specialize_attr_loadclassattr(PyObject *owner, _Py_CODEUNIT *instr,
+ PyObject *name, PyObject *descr,
+ unsigned int tp_version,
+ DescriptorClassification kind, bool is_method,
+ uint32_t shared_keys_version)
{
_PyLoadMethodCache *cache = (_PyLoadMethodCache *)(instr + 1);
PyTypeObject *owner_cls = Py_TYPE(owner);
assert(descr != NULL);
assert((is_method && kind == METHOD) || (!is_method && kind == NON_DESCRIPTOR));
- if (owner_cls->tp_flags & Py_TPFLAGS_INLINE_VALUES) {
- PyDictKeysObject *keys = ((PyHeapTypeObject *)owner_cls)->ht_cached_keys;
- assert(_PyDictKeys_StringLookup(keys, name) < 0);
- uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(
- _PyInterpreterState_GET(), keys);
- if (keys_version == 0) {
+
+ #ifdef Py_GIL_DISABLED
+ if (!_PyObject_HasDeferredRefcount(descr)) {
+ SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_DESCR_NOT_DEFERRED);
+ return 0;
+ }
+ #endif
+
+ unsigned long tp_flags = PyType_GetFlags(owner_cls);
+ if (tp_flags & Py_TPFLAGS_INLINE_VALUES) {
+ #ifndef Py_GIL_DISABLED
+ assert(_PyDictKeys_StringLookup(
+ ((PyHeapTypeObject *)owner_cls)->ht_cached_keys, name) < 0);
+ #endif
+ if (shared_keys_version == 0) {
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_VERSIONS);
return 0;
}
- write_u32(cache->keys_version, keys_version);
+ write_u32(cache->keys_version, shared_keys_version);
specialize(instr, is_method ? LOAD_ATTR_METHOD_WITH_VALUES : LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES);
}
else {
Py_ssize_t dictoffset;
- if (owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT) {
+ if (tp_flags & Py_TPFLAGS_MANAGED_DICT) {
dictoffset = MANAGED_DICT_OFFSET;
}
else {
@@ -1576,13 +1641,11 @@ PyObject *descr, DescriptorClassification kind, bool is_method)
* PyType_Modified usages in typeobject.c). The MCACHE has been
* working since Python 2.6 and it's battle-tested.
*/
- write_u32(cache->type_version, owner_cls->tp_version_tag);
+ write_u32(cache->type_version, tp_version);
write_obj(cache->descr, descr);
return 1;
}
-#endif // Py_GIL_DISABLED
-
static void
specialize_load_global_lock_held(
@@ -1729,7 +1792,6 @@ function_kind(PyCodeObject *code) {
return SIMPLE_FUNCTION;
}
-#ifndef Py_GIL_DISABLED
/* Returning false indicates a failure. */
static bool
function_check_args(PyObject *o, int expected_argcount, int opcode)
@@ -1763,19 +1825,6 @@ function_get_version(PyObject *o, int opcode)
return version;
}
-/* Returning 0 indicates a failure. */
-static uint32_t
-type_get_version(PyTypeObject *t, int opcode)
-{
- uint32_t version = t->tp_version_tag;
- if (version == 0) {
- SPECIALIZATION_FAIL(opcode, SPEC_FAIL_OUT_OF_VERSIONS);
- return 0;
- }
- return version;
-}
-#endif // Py_GIL_DISABLED
-
void
_Py_Specialize_BinarySubscr(
_PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr)
diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py
index 99896f32fd2b08..4013b503502df6 100644
--- a/Tools/cases_generator/analyzer.py
+++ b/Tools/cases_generator/analyzer.py
@@ -386,7 +386,7 @@ def find_assignment_target(node: parser.InstDef, idx: int) -> list[lexer.Token]:
"""Find the tokens that make up the left-hand side of an assignment"""
offset = 0
for tkn in reversed(node.block.tokens[: idx]):
- if tkn.kind in {"SEMI", "LBRACE", "RBRACE"}:
+ if tkn.kind in {"SEMI", "LBRACE", "RBRACE", "CMACRO"}:
return node.block.tokens[idx - offset : idx]
offset += 1
return []
diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py
index 69d84183f1c7e6..f54afbb880d2fa 100644
--- a/Tools/cases_generator/generators_common.py
+++ b/Tools/cases_generator/generators_common.py
@@ -126,7 +126,7 @@ def __init__(self, out: CWriter):
"PyStackRef_AsPyObjectSteal": self.stackref_steal,
"DISPATCH": self.dispatch,
"INSTRUCTION_SIZE": self.instruction_size,
- "POP_DEAD_INPUTS": self.pop_dead_inputs,
+ "POP_INPUT": self.pop_input,
}
self.out = out
@@ -423,7 +423,7 @@ def save_stack(
self.emit_save(storage)
return True
- def pop_dead_inputs(
+ def pop_input(
self,
tkn: Token,
tkn_iter: TokenIterator,
@@ -432,9 +432,18 @@ def pop_dead_inputs(
inst: Instruction | None,
) -> bool:
next(tkn_iter)
+ name_tkn = next(tkn_iter)
+ name = name_tkn.text
next(tkn_iter)
next(tkn_iter)
- storage.pop_dead_inputs(self.out)
+ if not storage.inputs:
+ raise analysis_error("stack is empty", tkn)
+ tos = storage.inputs[-1]
+ if tos.name != name:
+ raise analysis_error(f"'{name} is not top of stack", name_tkn)
+ tos.defined = False
+ storage.clear_dead_inputs()
+ storage.flush(self.out)
return True
def emit_reload(self, storage: Storage) -> None:
diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py
index 9471fe0e56f7d8..286f47d0cfb11b 100644
--- a/Tools/cases_generator/stack.py
+++ b/Tools/cases_generator/stack.py
@@ -512,10 +512,6 @@ def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool =
self._push_defined_outputs()
self.stack.flush(out, cast_type, extract_bits)
- def pop_dead_inputs(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True) -> None:
- self.clear_dead_inputs()
- self.stack.flush(out, cast_type, extract_bits)
-
def save(self, out: CWriter) -> None:
assert self.spilled >= 0
if self.spilled == 0:
1
0
https://github.com/python/cpython/commit/1c13c56a34fc4c4d8969f0b6dc93d5208a…
commit: 1c13c56a34fc4c4d8969f0b6dc93d5208a50d61b
branch: main
author: Neil Schemenauer <nas-github(a)arctrix.com>
committer: nascheme <nas-github(a)arctrix.com>
date: 2025-01-14T11:43:42-08:00
summary:
gh-128384: Add locking to warnings.py. (gh-128386)
Co-authored-by: Kumar Aditya <kumaraditya(a)python.org>
files:
A Misc/NEWS.d/next/Library/2025-01-06-10-37-27.gh-issue-128384.V0xzwH.rst
M Include/internal/pycore_warnings.h
M Lib/test/test_warnings/__init__.py
M Lib/warnings.py
M Python/_warnings.c
M Python/clinic/_warnings.c.h
diff --git a/Include/internal/pycore_warnings.h b/Include/internal/pycore_warnings.h
index f9f6559312f4ef..672228cd6fbd19 100644
--- a/Include/internal/pycore_warnings.h
+++ b/Include/internal/pycore_warnings.h
@@ -14,7 +14,7 @@ struct _warnings_runtime_state {
PyObject *filters; /* List */
PyObject *once_registry; /* Dict */
PyObject *default_action; /* String */
- PyMutex mutex;
+ _PyRecursiveMutex lock;
long filters_version;
};
diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py
index 4e3c877896f295..4bd164b8a9a82b 100644
--- a/Lib/test/test_warnings/__init__.py
+++ b/Lib/test/test_warnings/__init__.py
@@ -1521,7 +1521,7 @@ def test_late_resource_warning(self):
self.assertTrue(err.startswith(expected), ascii(err))
-class DeprecatedTests(unittest.TestCase):
+class DeprecatedTests(PyPublicAPITests):
def test_dunder_deprecated(self):
@deprecated("A will go away soon")
class A:
diff --git a/Lib/warnings.py b/Lib/warnings.py
index e83cde37ab2d1a..f20b01372dd7a4 100644
--- a/Lib/warnings.py
+++ b/Lib/warnings.py
@@ -185,24 +185,32 @@ def simplefilter(action, category=Warning, lineno=0, append=False):
raise ValueError("lineno must be an int >= 0")
_add_filter(action, None, category, None, lineno, append=append)
+def _filters_mutated():
+ # Even though this function is not part of the public API, it's used by
+ # a fair amount of user code.
+ with _lock:
+ _filters_mutated_lock_held()
+
def _add_filter(*item, append):
- # Remove possible duplicate filters, so new one will be placed
- # in correct place. If append=True and duplicate exists, do nothing.
- if not append:
- try:
- filters.remove(item)
- except ValueError:
- pass
- filters.insert(0, item)
- else:
- if item not in filters:
- filters.append(item)
- _filters_mutated()
+ with _lock:
+ if not append:
+ # Remove possible duplicate filters, so new one will be placed
+ # in correct place. If append=True and duplicate exists, do nothing.
+ try:
+ filters.remove(item)
+ except ValueError:
+ pass
+ filters.insert(0, item)
+ else:
+ if item not in filters:
+ filters.append(item)
+ _filters_mutated_lock_held()
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
- filters[:] = []
- _filters_mutated()
+ with _lock:
+ filters[:] = []
+ _filters_mutated_lock_held()
class _OptionError(Exception):
"""Exception used by option processing helpers."""
@@ -353,11 +361,6 @@ def warn_explicit(message, category, filename, lineno,
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
- if registry is None:
- registry = {}
- if registry.get('version', 0) != _filters_version:
- registry.clear()
- registry['version'] = _filters_version
if isinstance(message, Warning):
text = str(message)
category = message.__class__
@@ -365,52 +368,59 @@ def warn_explicit(message, category, filename, lineno,
text = message
message = category(message)
key = (text, category, lineno)
- # Quick test for common case
- if registry.get(key):
- return
- # Search the filters
- for item in filters:
- action, msg, cat, mod, ln = item
- if ((msg is None or msg.match(text)) and
- issubclass(category, cat) and
- (mod is None or mod.match(module)) and
- (ln == 0 or lineno == ln)):
- break
- else:
- action = defaultaction
- # Early exit actions
- if action == "ignore":
- return
+ with _lock:
+ if registry is None:
+ registry = {}
+ if registry.get('version', 0) != _filters_version:
+ registry.clear()
+ registry['version'] = _filters_version
+ # Quick test for common case
+ if registry.get(key):
+ return
+ # Search the filters
+ for item in filters:
+ action, msg, cat, mod, ln = item
+ if ((msg is None or msg.match(text)) and
+ issubclass(category, cat) and
+ (mod is None or mod.match(module)) and
+ (ln == 0 or lineno == ln)):
+ break
+ else:
+ action = defaultaction
+ # Early exit actions
+ if action == "ignore":
+ return
+
+ if action == "error":
+ raise message
+ # Other actions
+ if action == "once":
+ registry[key] = 1
+ oncekey = (text, category)
+ if onceregistry.get(oncekey):
+ return
+ onceregistry[oncekey] = 1
+ elif action in {"always", "all"}:
+ pass
+ elif action == "module":
+ registry[key] = 1
+ altkey = (text, category, 0)
+ if registry.get(altkey):
+ return
+ registry[altkey] = 1
+ elif action == "default":
+ registry[key] = 1
+ else:
+ # Unrecognized actions are errors
+ raise RuntimeError(
+ "Unrecognized action (%r) in warnings.filters:\n %s" %
+ (action, item))
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
import linecache
linecache.getlines(filename, module_globals)
- if action == "error":
- raise message
- # Other actions
- if action == "once":
- registry[key] = 1
- oncekey = (text, category)
- if onceregistry.get(oncekey):
- return
- onceregistry[oncekey] = 1
- elif action in {"always", "all"}:
- pass
- elif action == "module":
- registry[key] = 1
- altkey = (text, category, 0)
- if registry.get(altkey):
- return
- registry[altkey] = 1
- elif action == "default":
- registry[key] = 1
- else:
- # Unrecognized actions are errors
- raise RuntimeError(
- "Unrecognized action (%r) in warnings.filters:\n %s" %
- (action, item))
# Print message and context
msg = WarningMessage(message, category, filename, lineno, source)
_showwarnmsg(msg)
@@ -488,30 +498,32 @@ def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
- self._filters = self._module.filters
- self._module.filters = self._filters[:]
- self._module._filters_mutated()
- self._showwarning = self._module.showwarning
- self._showwarnmsg_impl = self._module._showwarnmsg_impl
+ with _lock:
+ self._filters = self._module.filters
+ self._module.filters = self._filters[:]
+ self._module._filters_mutated_lock_held()
+ self._showwarning = self._module.showwarning
+ self._showwarnmsg_impl = self._module._showwarnmsg_impl
+ if self._record:
+ log = []
+ self._module._showwarnmsg_impl = log.append
+ # Reset showwarning() to the default implementation to make sure
+ # that _showwarnmsg() calls _showwarnmsg_impl()
+ self._module.showwarning = self._module._showwarning_orig
+ else:
+ log = None
if self._filter is not None:
simplefilter(*self._filter)
- if self._record:
- log = []
- self._module._showwarnmsg_impl = log.append
- # Reset showwarning() to the default implementation to make sure
- # that _showwarnmsg() calls _showwarnmsg_impl()
- self._module.showwarning = self._module._showwarning_orig
- return log
- else:
- return None
+ return log
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
- self._module.filters = self._filters
- self._module._filters_mutated()
- self._module.showwarning = self._showwarning
- self._module._showwarnmsg_impl = self._showwarnmsg_impl
+ with _lock:
+ self._module.filters = self._filters
+ self._module._filters_mutated_lock_held()
+ self._module.showwarning = self._showwarning
+ self._module._showwarnmsg_impl = self._showwarnmsg_impl
class deprecated:
@@ -701,18 +713,36 @@ def extract():
# If either if the compiled regexs are None, match anything.
try:
from _warnings import (filters, _defaultaction, _onceregistry,
- warn, warn_explicit, _filters_mutated)
+ warn, warn_explicit,
+ _filters_mutated_lock_held,
+ _acquire_lock, _release_lock,
+ )
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
+
+ class _Lock:
+ def __enter__(self):
+ _acquire_lock()
+ return self
+
+ def __exit__(self, *args):
+ _release_lock()
+
+ _lock = _Lock()
+
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
+ import _thread
+
+ _lock = _thread.RLock()
+
_filters_version = 1
- def _filters_mutated():
+ def _filters_mutated_lock_held():
global _filters_version
_filters_version += 1
diff --git a/Misc/NEWS.d/next/Library/2025-01-06-10-37-27.gh-issue-128384.V0xzwH.rst b/Misc/NEWS.d/next/Library/2025-01-06-10-37-27.gh-issue-128384.V0xzwH.rst
new file mode 100644
index 00000000000000..2ca592be20b681
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-01-06-10-37-27.gh-issue-128384.V0xzwH.rst
@@ -0,0 +1,5 @@
+Add locking to :mod:`warnings` to avoid some data races when free-threading
+is used. Change ``_warnings_runtime_state.mutex`` to be a recursive mutex
+and expose it to :mod:`warnings`, via the :func:`!_acquire_lock` and
+:func:`!_release_lock` functions. The lock is held when ``filters`` and
+``_filters_version`` are updated.
diff --git a/Python/_warnings.c b/Python/_warnings.c
index e05ba99e8eaec4..283f203c72c9bf 100644
--- a/Python/_warnings.c
+++ b/Python/_warnings.c
@@ -232,6 +232,61 @@ get_warnings_attr(PyInterpreterState *interp, PyObject *attr, int try_import)
return obj;
}
+static inline void
+warnings_lock(PyInterpreterState *interp)
+{
+ WarningsState *st = warnings_get_state(interp);
+ assert(st != NULL);
+ _PyRecursiveMutex_Lock(&st->lock);
+}
+
+static inline void
+warnings_unlock(PyInterpreterState *interp)
+{
+ WarningsState *st = warnings_get_state(interp);
+ assert(st != NULL);
+ _PyRecursiveMutex_Unlock(&st->lock);
+}
+
+static inline bool
+warnings_lock_held(WarningsState *st)
+{
+ return PyMutex_IsLocked(&st->lock.mutex);
+}
+
+/*[clinic input]
+_acquire_lock as warnings_acquire_lock
+
+[clinic start generated code]*/
+
+static PyObject *
+warnings_acquire_lock_impl(PyObject *module)
+/*[clinic end generated code: output=594313457d1bf8e1 input=46ec20e55acca52f]*/
+{
+ PyInterpreterState *interp = get_current_interp();
+ if (interp == NULL) {
+ return NULL;
+ }
+ warnings_lock(interp);
+ Py_RETURN_NONE;
+}
+
+/*[clinic input]
+_release_lock as warnings_release_lock
+
+[clinic start generated code]*/
+
+static PyObject *
+warnings_release_lock_impl(PyObject *module)
+/*[clinic end generated code: output=d73d5a8789396750 input=ea01bb77870c5693]*/
+{
+ PyInterpreterState *interp = get_current_interp();
+ if (interp == NULL) {
+ return NULL;
+ }
+ warnings_unlock(interp);
+ Py_RETURN_NONE;
+}
static PyObject *
get_once_registry(PyInterpreterState *interp)
@@ -239,7 +294,7 @@ get_once_registry(PyInterpreterState *interp)
WarningsState *st = warnings_get_state(interp);
assert(st != NULL);
- _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex);
+ assert(warnings_lock_held(st));
PyObject *registry = GET_WARNINGS_ATTR(interp, onceregistry, 0);
if (registry == NULL) {
@@ -267,7 +322,7 @@ get_default_action(PyInterpreterState *interp)
WarningsState *st = warnings_get_state(interp);
assert(st != NULL);
- _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex);
+ assert(warnings_lock_held(st));
PyObject *default_action = GET_WARNINGS_ATTR(interp, defaultaction, 0);
if (default_action == NULL) {
@@ -299,7 +354,7 @@ get_filter(PyInterpreterState *interp, PyObject *category,
WarningsState *st = warnings_get_state(interp);
assert(st != NULL);
- _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex);
+ assert(warnings_lock_held(st));
PyObject *warnings_filters = GET_WARNINGS_ATTR(interp, filters, 0);
if (warnings_filters == NULL) {
@@ -399,7 +454,7 @@ already_warned(PyInterpreterState *interp, PyObject *registry, PyObject *key,
WarningsState *st = warnings_get_state(interp);
assert(st != NULL);
- _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex);
+ assert(warnings_lock_held(st));
PyObject *version_obj;
if (PyDict_GetItemRef(registry, &_Py_ID(version), &version_obj) < 0) {
@@ -994,15 +1049,10 @@ do_warn(PyObject *message, PyObject *category, Py_ssize_t stack_level,
&filename, &lineno, &module, ®istry))
return NULL;
-#ifdef Py_GIL_DISABLED
- WarningsState *st = warnings_get_state(tstate->interp);
- assert(st != NULL);
-#endif
-
- Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex);
+ warnings_lock(tstate->interp);
res = warn_explicit(tstate, category, message, filename, lineno, module, registry,
NULL, source);
- Py_END_CRITICAL_SECTION();
+ warnings_unlock(tstate->interp);
Py_DECREF(filename);
Py_DECREF(registry);
Py_DECREF(module);
@@ -1151,27 +1201,22 @@ warnings_warn_explicit_impl(PyObject *module, PyObject *message,
}
}
-#ifdef Py_GIL_DISABLED
- WarningsState *st = warnings_get_state(tstate->interp);
- assert(st != NULL);
-#endif
-
- Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex);
+ warnings_lock(tstate->interp);
returned = warn_explicit(tstate, category, message, filename, lineno,
mod, registry, source_line, sourceobj);
- Py_END_CRITICAL_SECTION();
+ warnings_unlock(tstate->interp);
Py_XDECREF(source_line);
return returned;
}
/*[clinic input]
-_filters_mutated as warnings_filters_mutated
+_filters_mutated_lock_held as warnings_filters_mutated_lock_held
[clinic start generated code]*/
static PyObject *
-warnings_filters_mutated_impl(PyObject *module)
-/*[clinic end generated code: output=8ce517abd12b88f4 input=35ecbf08ee2491b2]*/
+warnings_filters_mutated_lock_held_impl(PyObject *module)
+/*[clinic end generated code: output=df5c84f044e856ec input=34208bf03d70e432]*/
{
PyInterpreterState *interp = get_current_interp();
if (interp == NULL) {
@@ -1181,14 +1226,17 @@ warnings_filters_mutated_impl(PyObject *module)
WarningsState *st = warnings_get_state(interp);
assert(st != NULL);
- Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex);
+ // Note that the lock must be held by the caller.
+ if (!warnings_lock_held(st)) {
+ PyErr_SetString(PyExc_RuntimeError, "warnings lock is not held");
+ return NULL;
+ }
+
st->filters_version++;
- Py_END_CRITICAL_SECTION();
Py_RETURN_NONE;
}
-
/* Function to issue a warning message; may raise an exception. */
static int
@@ -1303,15 +1351,10 @@ PyErr_WarnExplicitObject(PyObject *category, PyObject *message,
return -1;
}
-#ifdef Py_GIL_DISABLED
- WarningsState *st = warnings_get_state(tstate->interp);
- assert(st != NULL);
-#endif
-
- Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex);
+ warnings_lock(tstate->interp);
res = warn_explicit(tstate, category, message, filename, lineno,
module, registry, NULL, NULL);
- Py_END_CRITICAL_SECTION();
+ warnings_unlock(tstate->interp);
if (res == NULL)
return -1;
Py_DECREF(res);
@@ -1376,15 +1419,10 @@ PyErr_WarnExplicitFormat(PyObject *category,
PyObject *res;
PyThreadState *tstate = get_current_tstate();
if (tstate != NULL) {
-#ifdef Py_GIL_DISABLED
- WarningsState *st = warnings_get_state(tstate->interp);
- assert(st != NULL);
-#endif
-
- Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex);
+ warnings_lock(tstate->interp);
res = warn_explicit(tstate, category, message, filename, lineno,
module, registry, NULL, NULL);
- Py_END_CRITICAL_SECTION();
+ warnings_unlock(tstate->interp);
Py_DECREF(message);
if (res != NULL) {
Py_DECREF(res);
@@ -1464,7 +1502,9 @@ _PyErr_WarnUnawaitedCoroutine(PyObject *coro)
static PyMethodDef warnings_functions[] = {
WARNINGS_WARN_METHODDEF
WARNINGS_WARN_EXPLICIT_METHODDEF
- WARNINGS_FILTERS_MUTATED_METHODDEF
+ WARNINGS_FILTERS_MUTATED_LOCK_HELD_METHODDEF
+ WARNINGS_ACQUIRE_LOCK_METHODDEF
+ WARNINGS_RELEASE_LOCK_METHODDEF
/* XXX(brett.cannon): add showwarning? */
/* XXX(brett.cannon): Reasonable to add formatwarning? */
{NULL, NULL} /* sentinel */
diff --git a/Python/clinic/_warnings.c.h b/Python/clinic/_warnings.c.h
index 9a2c33f2ea8169..bcb4b344fa4370 100644
--- a/Python/clinic/_warnings.c.h
+++ b/Python/clinic/_warnings.c.h
@@ -9,6 +9,40 @@ preserve
#include "pycore_abstract.h" // _PyNumber_Index()
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
+PyDoc_STRVAR(warnings_acquire_lock__doc__,
+"_acquire_lock($module, /)\n"
+"--\n"
+"\n");
+
+#define WARNINGS_ACQUIRE_LOCK_METHODDEF \
+ {"_acquire_lock", (PyCFunction)warnings_acquire_lock, METH_NOARGS, warnings_acquire_lock__doc__},
+
+static PyObject *
+warnings_acquire_lock_impl(PyObject *module);
+
+static PyObject *
+warnings_acquire_lock(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return warnings_acquire_lock_impl(module);
+}
+
+PyDoc_STRVAR(warnings_release_lock__doc__,
+"_release_lock($module, /)\n"
+"--\n"
+"\n");
+
+#define WARNINGS_RELEASE_LOCK_METHODDEF \
+ {"_release_lock", (PyCFunction)warnings_release_lock, METH_NOARGS, warnings_release_lock__doc__},
+
+static PyObject *
+warnings_release_lock_impl(PyObject *module);
+
+static PyObject *
+warnings_release_lock(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return warnings_release_lock_impl(module);
+}
+
PyDoc_STRVAR(warnings_warn__doc__,
"warn($module, /, message, category=None, stacklevel=1, source=None, *,\n"
" skip_file_prefixes=<unrepresentable>)\n"
@@ -230,20 +264,20 @@ warnings_warn_explicit(PyObject *module, PyObject *const *args, Py_ssize_t nargs
return return_value;
}
-PyDoc_STRVAR(warnings_filters_mutated__doc__,
-"_filters_mutated($module, /)\n"
+PyDoc_STRVAR(warnings_filters_mutated_lock_held__doc__,
+"_filters_mutated_lock_held($module, /)\n"
"--\n"
"\n");
-#define WARNINGS_FILTERS_MUTATED_METHODDEF \
- {"_filters_mutated", (PyCFunction)warnings_filters_mutated, METH_NOARGS, warnings_filters_mutated__doc__},
+#define WARNINGS_FILTERS_MUTATED_LOCK_HELD_METHODDEF \
+ {"_filters_mutated_lock_held", (PyCFunction)warnings_filters_mutated_lock_held, METH_NOARGS, warnings_filters_mutated_lock_held__doc__},
static PyObject *
-warnings_filters_mutated_impl(PyObject *module);
+warnings_filters_mutated_lock_held_impl(PyObject *module);
static PyObject *
-warnings_filters_mutated(PyObject *module, PyObject *Py_UNUSED(ignored))
+warnings_filters_mutated_lock_held(PyObject *module, PyObject *Py_UNUSED(ignored))
{
- return warnings_filters_mutated_impl(module);
+ return warnings_filters_mutated_lock_held_impl(module);
}
-/*[clinic end generated code: output=ed02c0f521a03a37 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=d9d32a8b59a30683 input=a9049054013a1b77]*/
1
0