Python-checkins
Threads by month
- ----- 2025 -----
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2009 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2008 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2007 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2006 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2005 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2004 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2003 -----
- December
- November
- October
- September
- August
April 2020
- 1 participants
- 410 discussions

[3.8] bpo-40431: Fix syntax typo in turtledemo (GH-19777) (GH-19784)
by Miss Islington (bot) April 29, 2020
by Miss Islington (bot) April 29, 2020
April 29, 2020
https://github.com/python/cpython/commit/adb1f853482e75e81ae0ae7307318a1051…
commit: adb1f853482e75e81ae0ae7307318a1051ca46b5
branch: 3.7
author: Miss Islington (bot) <31488909+miss-islington(a)users.noreply.github.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T02:42:05-07:00
summary:
[3.8] bpo-40431: Fix syntax typo in turtledemo (GH-19777) (GH-19784)
[3.8] bpo-40431: Fix syntax typo in turtledemo (GH-19777)
* Addresses a syntax typo that mistakenly used a …
[View More]undefined string prefix due to a missing space.
(cherry picked from commit 49f70db83e2c62ad06805927f53f6c3e8f4b798e)
Co-authored-by: Miro Hrončok <miro(a)hroncok.cz>
(cherry picked from commit cc011b5190b63f0be561ddec38fc4cd9e60cbf6a)
Co-authored-by: Kyle Stanley <aeros167(a)gmail.com>
files:
M Lib/turtledemo/__main__.py
diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py
index 17fe9a75e1c5e..12be5098dad27 100644
--- a/Lib/turtledemo/__main__.py
+++ b/Lib/turtledemo/__main__.py
@@ -272,7 +272,7 @@ def configGUI(self, start, stop, clear, txt="", color="blue"):
self.stop_btn.config(state=stop,
bg="#d00" if stop == NORMAL else "#fca")
self.clear_btn.config(state=clear,
- bg="#d00" if clear == NORMAL else"#fca")
+ bg="#d00" if clear == NORMAL else "#fca")
self.output_lbl.config(text=txt, fg=color)
def makeLoadDemoMenu(self, master):
[View Less]
1
0

April 29, 2020
https://github.com/python/cpython/commit/4db245ee9ddbe6c53d375de59a35ff59de…
commit: 4db245ee9ddbe6c53d375de59a35ff59dea2a8e0
branch: master
author: Pablo Galindo <Pablogsal(a)gmail.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T10:42:21+01:00
summary:
bpo-40334: refactor and cleanup for the PEG generators (GH-19775)
files:
M Parser/pegen/parse.c
M Parser/pegen/pegen.c
M Parser/pegen/pegen.h
M Tools/peg_generator/Makefile
M Tools/peg_generator/pegen/c_generator.py
…
[View More]M Tools/peg_generator/pegen/parser_generator.py
M Tools/peg_generator/pegen/python_generator.py
diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c
index b26f7327bd273..76dd6d31da05a 100644
--- a/Parser/pegen/parse.c
+++ b/Parser/pegen/parse.c
@@ -648,7 +648,7 @@ file_rule(Parser *p)
if (
(a = statements_rule(p), 1)
&&
- (endmarker_var = _PyPegen_endmarker_token(p))
+ (endmarker_var = _PyPegen_expect_token(p, ENDMARKER))
)
{
res = Module ( a , NULL , p -> arena );
@@ -712,7 +712,7 @@ eval_rule(Parser *p)
&&
(_loop0_1_var = _loop0_1_rule(p))
&&
- (endmarker_var = _PyPegen_endmarker_token(p))
+ (endmarker_var = _PyPegen_expect_token(p, ENDMARKER))
)
{
res = Expression ( a , p -> arena );
@@ -846,7 +846,7 @@ statement_newline_rule(Parser *p)
if (
(a = compound_stmt_rule(p))
&&
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
)
{
res = _PyPegen_singleton_seq ( p , a );
@@ -872,7 +872,7 @@ statement_newline_rule(Parser *p)
{ // NEWLINE
void *newline_var;
if (
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
)
{
Token *token = _PyPegen_get_last_nonnwhitespace_token(p);
@@ -895,7 +895,7 @@ statement_newline_rule(Parser *p)
{ // $
void *endmarker_var;
if (
- (endmarker_var = _PyPegen_endmarker_token(p))
+ (endmarker_var = _PyPegen_expect_token(p, ENDMARKER))
)
{
res = _PyPegen_interactive_exit ( p );
@@ -929,7 +929,7 @@ simple_stmt_rule(Parser *p)
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13)
&&
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
)
{
res = _PyPegen_singleton_seq ( p , a );
@@ -951,7 +951,7 @@ simple_stmt_rule(Parser *p)
&&
(opt_var = _PyPegen_expect_token(p, 13), 1)
&&
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
)
{
res = a;
@@ -2684,7 +2684,7 @@ for_stmt_rule(Parser *p)
void *literal;
expr_ty t;
if (
- (is_async = _PyPegen_async_token(p), 1)
+ (is_async = _PyPegen_expect_token(p, ASYNC), 1)
&&
(keyword = _PyPegen_expect_token(p, 517))
&&
@@ -2751,7 +2751,7 @@ with_stmt_rule(Parser *p)
void *literal_1;
void *literal_2;
if (
- (is_async = _PyPegen_async_token(p), 1)
+ (is_async = _PyPegen_expect_token(p, ASYNC), 1)
&&
(keyword = _PyPegen_expect_token(p, 519))
&&
@@ -2790,7 +2790,7 @@ with_stmt_rule(Parser *p)
void *keyword;
void *literal;
if (
- (is_async = _PyPegen_async_token(p), 1)
+ (is_async = _PyPegen_expect_token(p, ASYNC), 1)
&&
(keyword = _PyPegen_expect_token(p, 519))
&&
@@ -3263,7 +3263,7 @@ function_def_raw_rule(Parser *p)
expr_ty n;
void *params;
if (
- (is_async = _PyPegen_async_token(p), 1)
+ (is_async = _PyPegen_expect_token(p, ASYNC), 1)
&&
(keyword = _PyPegen_expect_token(p, 522))
&&
@@ -4002,13 +4002,13 @@ block_rule(Parser *p)
void *indent_var;
void *newline_var;
if (
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
&&
- (indent_var = _PyPegen_indent_token(p))
+ (indent_var = _PyPegen_expect_token(p, INDENT))
&&
(a = statements_rule(p))
&&
- (dedent_var = _PyPegen_dedent_token(p))
+ (dedent_var = _PyPegen_expect_token(p, DEDENT))
)
{
res = a;
@@ -6754,7 +6754,7 @@ await_primary_rule(Parser *p)
expr_ty a;
void *await_var;
if (
- (await_var = _PyPegen_await_token(p))
+ (await_var = _PyPegen_expect_token(p, AWAIT))
&&
(a = primary_rule(p))
)
@@ -9919,9 +9919,9 @@ invalid_block_rule(Parser *p)
{ // NEWLINE !INDENT
void *newline_var;
if (
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
&&
- _PyPegen_lookahead(0, _PyPegen_indent_token, p)
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT)
)
{
res = RAISE_INDENTATION_ERROR ( "expected an indented block" );
@@ -10036,7 +10036,7 @@ _loop0_1_rule(Parser *p)
{ // NEWLINE
void *newline_var;
while (
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
)
{
res = newline_var;
@@ -10273,7 +10273,7 @@ _tmp_6_rule(Parser *p)
{ // ASYNC
void *async_var;
if (
- (async_var = _PyPegen_async_token(p))
+ (async_var = _PyPegen_expect_token(p, ASYNC))
)
{
res = async_var;
@@ -10345,7 +10345,7 @@ _tmp_8_rule(Parser *p)
{ // ASYNC
void *async_var;
if (
- (async_var = _PyPegen_async_token(p))
+ (async_var = _PyPegen_expect_token(p, ASYNC))
)
{
res = async_var;
@@ -10381,7 +10381,7 @@ _tmp_9_rule(Parser *p)
{ // ASYNC
void *async_var;
if (
- (async_var = _PyPegen_async_token(p))
+ (async_var = _PyPegen_expect_token(p, ASYNC))
)
{
res = async_var;
@@ -15068,7 +15068,7 @@ _tmp_128_rule(Parser *p)
&&
(f = named_expression_rule(p))
&&
- (newline_var = _PyPegen_newline_token(p))
+ (newline_var = _PyPegen_expect_token(p, NEWLINE))
)
{
res = f;
@@ -15257,7 +15257,7 @@ _tmp_134_rule(Parser *p)
void *keyword_1;
void *y;
if (
- (y = _PyPegen_async_token(p), 1)
+ (y = _PyPegen_expect_token(p, ASYNC), 1)
&&
(keyword = _PyPegen_expect_token(p, 517))
&&
diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c
index 39da2709991b9..942447b0f8fd1 100644
--- a/Parser/pegen/pegen.c
+++ b/Parser/pegen/pegen.c
@@ -692,16 +692,6 @@ _PyPegen_lookahead_with_name(int positive, expr_ty (func)(Parser *), Parser *p)
return (res != NULL) == positive;
}
-int
-_PyPegen_lookahead_with_string(int positive, void *(func)(Parser *, const char *), Parser *p,
- const char *arg)
-{
- int mark = p->mark;
- void *res = func(p, arg);
- p->mark = mark;
- return (res != NULL) == positive;
-}
-
int
_PyPegen_lookahead_with_int(int positive, Token *(func)(Parser *, int), Parser *p, int arg)
{
@@ -751,24 +741,6 @@ _PyPegen_get_last_nonnwhitespace_token(Parser *p)
return token;
}
-void *
-_PyPegen_async_token(Parser *p)
-{
- return _PyPegen_expect_token(p, ASYNC);
-}
-
-void *
-_PyPegen_await_token(Parser *p)
-{
- return _PyPegen_expect_token(p, AWAIT);
-}
-
-void *
-_PyPegen_endmarker_token(Parser *p)
-{
- return _PyPegen_expect_token(p, ENDMARKER);
-}
-
expr_ty
_PyPegen_name_token(Parser *p)
{
@@ -794,24 +766,6 @@ _PyPegen_string_token(Parser *p)
return _PyPegen_expect_token(p, STRING);
}
-void *
-_PyPegen_newline_token(Parser *p)
-{
- return _PyPegen_expect_token(p, NEWLINE);
-}
-
-void *
-_PyPegen_indent_token(Parser *p)
-{
- return _PyPegen_expect_token(p, INDENT);
-}
-
-void *
-_PyPegen_dedent_token(Parser *p)
-{
- return _PyPegen_expect_token(p, DEDENT);
-}
-
static PyObject *
parsenumber_raw(const char *s)
{
diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h
index 0ac9b317efe59..99ec0f44e6518 100644
--- a/Parser/pegen/pegen.h
+++ b/Parser/pegen/pegen.h
@@ -104,7 +104,6 @@ int _PyPegen_update_memo(Parser *p, int mark, int type, void *node);
int _PyPegen_is_memoized(Parser *p, int type, void *pres);
int _PyPegen_lookahead_with_name(int, expr_ty (func)(Parser *), Parser *);
-int _PyPegen_lookahead_with_string(int, void *(func)(Parser *, const char *), Parser *, const char *);
int _PyPegen_lookahead_with_int(int, Token *(func)(Parser *, int), Parser *, int);
int _PyPegen_lookahead(int, void *(func)(Parser *), Parser *);
diff --git a/Tools/peg_generator/Makefile b/Tools/peg_generator/Makefile
index a37cbfcaa8551..c1219b9263851 100644
--- a/Tools/peg_generator/Makefile
+++ b/Tools/peg_generator/Makefile
@@ -33,7 +33,7 @@ dump: peg_extension/parse.c
$(PYTHON) -c "from peg_extension import parse; import ast; t = parse.parse_file('$(TESTFILE)', mode=1); print(ast.dump(t))"
regen-metaparser: pegen/metagrammar.gram pegen/*.py
- $(PYTHON) -m pegen -q -c pegen/metagrammar.gram -o pegen/grammar_parser.py
+ $(PYTHON) -m pegen -q python pegen/metagrammar.gram -o pegen/grammar_parser.py
# Note: These targets really depend on the generated shared object in peg_extension/parse.*.so but
# this has different names in different systems so we are abusing the implicit dependency on
diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py
index a01c3097c365b..a59da2ffae8e1 100644
--- a/Tools/peg_generator/pegen/c_generator.py
+++ b/Tools/peg_generator/pegen/c_generator.py
@@ -1,33 +1,36 @@
import ast
+from dataclasses import dataclass, field
import re
-from typing import Any, cast, Dict, IO, Optional, List, Text, Tuple, Set
+from typing import IO, Any, Dict, List, Optional, Set, Text, Tuple
+from enum import Enum
+from pegen import grammar
from pegen.grammar import (
+ Alt,
Cut,
+ Gather,
GrammarVisitor,
- Rhs,
- Alt,
+ Group,
+ Lookahead,
NamedItem,
NameLeaf,
- StringLeaf,
- Lookahead,
- PositiveLookahead,
NegativeLookahead,
Opt,
+ PositiveLookahead,
Repeat0,
Repeat1,
- Gather,
- Group,
+ Rhs,
Rule,
+ StringLeaf,
)
-from pegen import grammar
-from pegen.parser_generator import dedupe, ParserGenerator
+from pegen.parser_generator import ParserGenerator
EXTENSION_PREFIX = """\
#include "pegen.h"
"""
+
EXTENSION_SUFFIX = """
void *
_PyPegen_parse(Parser *p)
@@ -41,6 +44,43 @@
"""
+class NodeTypes(Enum):
+ NAME_TOKEN = 0
+ NUMBER_TOKEN = 1
+ STRING_TOKEN = 2
+ GENERIC_TOKEN = 3
+ KEYWORD = 4
+ CUT_OPERATOR = 5
+
+
+BASE_NODETYPES = {
+ "NAME": NodeTypes.NAME_TOKEN,
+ "NUMBER": NodeTypes.NUMBER_TOKEN,
+ "STRING": NodeTypes.STRING_TOKEN,
+}
+
+
+@dataclass
+class FunctionCall:
+ function: str
+ arguments: Optional[List[Any]] = None
+ assigned_variable: Optional[str] = None
+ nodetype: Optional[NodeTypes] = None
+ force_true: bool = False
+ metadata: Dict[str, Any] = field(default_factory=dict)
+
+ def __str__(self) -> str:
+ parts = []
+ parts.append(self.function)
+ if self.arguments:
+ parts.append(f"({', '.join(map(str, self.arguments))})")
+ if self.force_true:
+ parts.append(", 1")
+ if self.assigned_variable:
+ parts = ["(", self.assigned_variable, " = ", *parts, ")"]
+ return "".join(parts)
+
+
class CCallMakerVisitor(GrammarVisitor):
def __init__(
self,
@@ -54,28 +94,57 @@ def __init__(
self.cache: Dict[Any, Any] = {}
self.keyword_cache: Dict[str, int] = {}
- def keyword_helper(self, keyword: str) -> Tuple[str, str]:
+ def keyword_helper(self, keyword: str) -> FunctionCall:
if keyword not in self.keyword_cache:
self.keyword_cache[keyword] = self.gen.keyword_type()
- return "keyword", f"_PyPegen_expect_token(p, {self.keyword_cache[keyword]})"
+ return FunctionCall(
+ assigned_variable="keyword",
+ function="_PyPegen_expect_token",
+ arguments=["p", self.keyword_cache[keyword]],
+ nodetype=NodeTypes.KEYWORD,
+ )
- def visit_NameLeaf(self, node: NameLeaf) -> Tuple[str, str]:
+ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall:
name = node.value
if name in self.non_exact_tokens:
- name = name.lower()
- return f"{name}_var", f"_PyPegen_{name}_token(p)"
- return f"{name}_var", f"{name}_rule(p)"
+ if name in BASE_NODETYPES:
+ return FunctionCall(
+ assigned_variable=f"{name.lower()}_var",
+ function=f"_PyPegen_{name.lower()}_token",
+ arguments=["p"],
+ nodetype=BASE_NODETYPES[name],
+ metadata={"rulename": name.lower()},
+ )
+ return FunctionCall(
+ assigned_variable=f"{name.lower()}_var",
+ function=f"_PyPegen_expect_token",
+ arguments=["p", name],
+ nodetype=NodeTypes.GENERIC_TOKEN,
+ metadata={"rulename": name.lower()},
+ )
+
+ return FunctionCall(
+ assigned_variable=f"{name}_var",
+ function=f"{name}_rule",
+ arguments=["p"],
+ metadata={"rulename": name.lower()},
+ )
- def visit_StringLeaf(self, node: StringLeaf) -> Tuple[str, str]:
+ def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall:
val = ast.literal_eval(node.value)
if re.match(r"[a-zA-Z_]\w*\Z", val): # This is a keyword
return self.keyword_helper(val)
else:
assert val in self.exact_tokens, f"{node.value} is not a known literal"
type = self.exact_tokens[val]
- return "literal", f"_PyPegen_expect_token(p, {type})"
+ return FunctionCall(
+ assigned_variable="literal",
+ function=f"_PyPegen_expect_token",
+ arguments=["p", type],
+ nodetype=NodeTypes.GENERIC_TOKEN,
+ )
- def visit_Rhs(self, node: Rhs) -> Tuple[Optional[str], str]:
+ def visit_Rhs(self, node: Rhs) -> FunctionCall:
def can_we_inline(node: Rhs) -> int:
if len(node.alts) != 1 or len(node.alts[0].items) != 1:
return False
@@ -90,65 +159,96 @@ def can_we_inline(node: Rhs) -> int:
self.cache[node] = self.visit(node.alts[0].items[0])
else:
name = self.gen.name_node(node)
- self.cache[node] = f"{name}_var", f"{name}_rule(p)"
+ self.cache[node] = FunctionCall(
+ assigned_variable=f"{name}_var",
+ function=f"{name}_rule",
+ arguments=["p"],
+ metadata={"rulename": name},
+ )
return self.cache[node]
- def visit_NamedItem(self, node: NamedItem) -> Tuple[Optional[str], str]:
- name, call = self.visit(node.item)
+ def visit_NamedItem(self, node: NamedItem) -> FunctionCall:
+ call = self.visit(node.item)
if node.name:
- name = node.name
- return name, call
-
- def lookahead_call_helper(self, node: Lookahead, positive: int) -> Tuple[None, str]:
- name, call = self.visit(node.node)
- func, args = call.split("(", 1)
- assert args[-1] == ")"
- args = args[:-1]
- if "name_token" in call:
- return None, f"_PyPegen_lookahead_with_name({positive}, {func}, {args})"
- elif not args.startswith("p,"):
- return None, f"_PyPegen_lookahead({positive}, {func}, {args})"
- elif args[2:].strip().isalnum():
- return None, f"_PyPegen_lookahead_with_int({positive}, {func}, {args})"
+ call.assigned_variable = node.name
+ return call
+
+ def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall:
+ call = self.visit(node.node)
+ if call.nodetype == NodeTypes.NAME_TOKEN:
+ return FunctionCall(
+ function=f"_PyPegen_lookahead_with_name",
+ arguments=[positive, call.function, *call.arguments],
+ )
+ elif call.nodetype in {NodeTypes.GENERIC_TOKEN, NodeTypes.KEYWORD}:
+ return FunctionCall(
+ function=f"_PyPegen_lookahead_with_int",
+ arguments=[positive, call.function, *call.arguments],
+ )
else:
- return None, f"_PyPegen_lookahead_with_string({positive}, {func}, {args})"
+ return FunctionCall(
+ function=f"_PyPegen_lookahead",
+ arguments=[positive, call.function, *call.arguments],
+ )
- def visit_PositiveLookahead(self, node: PositiveLookahead) -> Tuple[None, str]:
+ def visit_PositiveLookahead(self, node: PositiveLookahead) -> FunctionCall:
return self.lookahead_call_helper(node, 1)
- def visit_NegativeLookahead(self, node: NegativeLookahead) -> Tuple[None, str]:
+ def visit_NegativeLookahead(self, node: NegativeLookahead) -> FunctionCall:
return self.lookahead_call_helper(node, 0)
- def visit_Opt(self, node: Opt) -> Tuple[str, str]:
- name, call = self.visit(node.node)
- return "opt_var", f"{call}, 1" # Using comma operator!
+ def visit_Opt(self, node: Opt) -> FunctionCall:
+ call = self.visit(node.node)
+ return FunctionCall(
+ assigned_variable="opt_var",
+ function=call.function,
+ arguments=call.arguments,
+ force_true=True,
+ )
- def visit_Repeat0(self, node: Repeat0) -> Tuple[str, str]:
+ def visit_Repeat0(self, node: Repeat0) -> FunctionCall:
if node in self.cache:
return self.cache[node]
name = self.gen.name_loop(node.node, False)
- self.cache[node] = f"{name}_var", f"{name}_rule(p)"
+ self.cache[node] = FunctionCall(
+ assigned_variable=f"{name}_var",
+ function=f"{name}_rule",
+ arguments=["p"],
+ metadata={"rulename": name},
+ )
return self.cache[node]
- def visit_Repeat1(self, node: Repeat1) -> Tuple[str, str]:
+ def visit_Repeat1(self, node: Repeat1) -> FunctionCall:
if node in self.cache:
return self.cache[node]
name = self.gen.name_loop(node.node, True)
- self.cache[node] = f"{name}_var", f"{name}_rule(p)"
+ self.cache[node] = FunctionCall(
+ assigned_variable=f"{name}_var",
+ function=f"{name}_rule",
+ arguments=["p"],
+ metadata={"rulename": name},
+ )
return self.cache[node]
- def visit_Gather(self, node: Gather) -> Tuple[str, str]:
+ def visit_Gather(self, node: Gather) -> FunctionCall:
if node in self.cache:
return self.cache[node]
name = self.gen.name_gather(node)
- self.cache[node] = f"{name}_var", f"{name}_rule(p)"
+ self.cache[node] = FunctionCall(
+ assigned_variable=f"{name}_var",
+ function=f"{name}_rule",
+ arguments=["p"],
+ metadata={"rulename": name},
+ )
return self.cache[node]
- def visit_Group(self, node: Group) -> Tuple[Optional[str], str]:
+ def visit_Group(self, node: Group) -> FunctionCall:
return self.visit(node.rhs)
- def visit_Cut(self, node: Cut) -> Tuple[str, str]:
- return "cut_var", "1"
+ def visit_Cut(self, node: Cut) -> FunctionCall:
+ return FunctionCall(
+ assigned_variable="cut_var", function="1", nodetype=NodeTypes.CUT_OPERATOR
+ )
class CParserGenerator(ParserGenerator, GrammarVisitor):
@@ -252,7 +352,6 @@ def generate(self, filename: str) -> None:
mode += 1
modulename = self.grammar.metas.get("modulename", "parse")
trailer = self.grammar.metas.get("trailer", EXTENSION_SUFFIX)
- keyword_cache = self.callmakervisitor.keyword_cache
if trailer:
self.print(trailer.rstrip("\n") % dict(mode=mode, modulename=modulename))
@@ -448,13 +547,11 @@ def visit_Rule(self, node: Rule) -> None:
self._handle_default_rule_body(node, rhs, result_type)
self.print("}")
- def visit_NamedItem(self, node: NamedItem, names: List[str]) -> None:
- name, call = self.callmakervisitor.visit(node)
- if not name:
- self.print(call)
- else:
- name = dedupe(name, names)
- self.print(f"({name} = {call})")
+ def visit_NamedItem(self, node: NamedItem) -> None:
+ call = self.callmakervisitor.visit(node)
+ if call.assigned_variable:
+ call.assigned_variable = self.dedupe(call.assigned_variable)
+ self.print(call)
def visit_Rhs(
self, node: Rhs, is_loop: bool, is_gather: bool, rulename: Optional[str]
@@ -464,7 +561,7 @@ def visit_Rhs(
for alt in node.alts:
self.visit(alt, is_loop=is_loop, is_gather=is_gather, rulename=rulename)
- def join_conditions(self, keyword: str, node: Any, names: List[str]) -> None:
+ def join_conditions(self, keyword: str, node: Any) -> None:
self.print(f"{keyword} (")
with self.indent():
first = True
@@ -473,7 +570,7 @@ def join_conditions(self, keyword: str, node: Any, names: List[str]) -> None:
first = False
else:
self.print("&&")
- self.visit(item, names=names)
+ self.visit(item)
self.print(")")
def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None:
@@ -492,29 +589,34 @@ def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None:
f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", mark, p->mark, "{node}");'
)
- def emit_default_action(self, is_gather: bool, names: List[str], node: Alt) -> None:
- if len(names) > 1:
+ def emit_default_action(self, is_gather: bool, node: Alt) -> None:
+ if len(self.local_variable_names) > 1:
if is_gather:
- assert len(names) == 2
- self.print(f"res = _PyPegen_seq_insert_in_front(p, {names[0]}, {names[1]});")
+ assert len(self.local_variable_names) == 2
+ self.print(
+ f"res = _PyPegen_seq_insert_in_front(p, "
+ f"{self.local_variable_names[0]}, {self.local_variable_names[1]});"
+ )
else:
if self.debug:
self.print(
f'fprintf(stderr, "Hit without action [%d:%d]: %s\\n", mark, p->mark, "{node}");'
)
- self.print(f"res = _PyPegen_dummy_name(p, {', '.join(names)});")
+ self.print(
+ f"res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});"
+ )
else:
if self.debug:
self.print(
f'fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", mark, p->mark, "{node}");'
)
- self.print(f"res = {names[0]};")
+ self.print(f"res = {self.local_variable_names[0]};")
def emit_dummy_action(self) -> None:
self.print(f"res = _PyPegen_dummy_name(p);")
- def handle_alt_normal(self, node: Alt, is_gather: bool, names: List[str]) -> None:
- self.join_conditions(keyword="if", node=node, names=names)
+ def handle_alt_normal(self, node: Alt, is_gather: bool) -> None:
+ self.join_conditions(keyword="if", node=node)
self.print("{")
# We have parsed successfully all the conditions for the option.
with self.indent():
@@ -526,17 +628,15 @@ def handle_alt_normal(self, node: Alt, is_gather: bool, names: List[str]) -> Non
elif node.action:
self.emit_action(node)
else:
- self.emit_default_action(is_gather, names, node)
+ self.emit_default_action(is_gather, node)
# As the current option has parsed correctly, do not continue with the rest.
self.print(f"goto done;")
self.print("}")
- def handle_alt_loop(
- self, node: Alt, is_gather: bool, rulename: Optional[str], names: List[str]
- ) -> None:
+ def handle_alt_loop(self, node: Alt, is_gather: bool, rulename: Optional[str]) -> None:
# Condition of the main body of the alternative
- self.join_conditions(keyword="while", node=node, names=names)
+ self.join_conditions(keyword="while", node=node)
self.print("{")
# We have parsed successfully one item!
with self.indent():
@@ -548,7 +648,7 @@ def handle_alt_loop(
elif node.action:
self.emit_action(node, cleanup_code="PyMem_Free(children);")
else:
- self.emit_default_action(is_gather, names, node)
+ self.emit_default_action(is_gather, node)
# Add the result of rule to the temporary buffer of children. This buffer
# will populate later an asdl_seq with all elements to return.
@@ -580,47 +680,45 @@ def visit_Alt(
if v == "opt_var":
self.print("UNUSED(opt_var); // Silence compiler warnings")
- names: List[str] = []
- if is_loop:
- self.handle_alt_loop(node, is_gather, rulename, names)
- else:
- self.handle_alt_normal(node, is_gather, names)
+ with self.local_variable_context():
+ if is_loop:
+ self.handle_alt_loop(node, is_gather, rulename)
+ else:
+ self.handle_alt_normal(node, is_gather)
self.print("p->mark = mark;")
- if "cut_var" in names:
+ if "cut_var" in vars:
self.print("if (cut_var) return NULL;")
self.print("}")
- def collect_vars(self, node: Alt) -> Dict[str, Optional[str]]:
- names: List[str] = []
+ def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]:
types = {}
- for item in node.items:
- name, type = self.add_var(item, names)
- types[name] = type
+ with self.local_variable_context():
+ for item in node.items:
+ name, type = self.add_var(item)
+ types[name] = type
return types
- def add_var(self, node: NamedItem, names: List[str]) -> Tuple[str, Optional[str]]:
- name: str
- call: str
- name, call = self.callmakervisitor.visit(node.item)
- type = None
- if not name:
- return name, type
- if name.startswith("cut"):
- return name, "int"
- if name.endswith("_var"):
- rulename = name[:-4]
- rule = self.rules.get(rulename)
- if rule is not None:
- if rule.is_loop() or rule.is_gather():
- type = "asdl_seq *"
- else:
- type = rule.type
- elif name.startswith("_loop") or name.startswith("_gather"):
+ def add_var(self, node: NamedItem) -> Tuple[Optional[str], Optional[str]]:
+ call = self.callmakervisitor.visit(node.item)
+ if not call.assigned_variable:
+ return None, None
+ if call.nodetype == NodeTypes.CUT_OPERATOR:
+ return call.assigned_variable, "int"
+
+ name = call.assigned_variable
+ rulename = call.metadata.get("rulename")
+
+ type: Optional[str] = None
+
+ assert self.all_rules is not None
+ if rulename and rulename in self.all_rules:
+ rule = self.all_rules.get(rulename)
+ if rule.is_loop() or rule.is_gather():
type = "asdl_seq *"
- elif name in ("name_var", "string_var", "number_var"):
- type = "expr_ty"
- if node.name:
- name = node.name
- name = dedupe(name, names)
- return name, type
+ else:
+ type = rule.type
+ elif call.nodetype in BASE_NODETYPES.values():
+ type = "expr_ty"
+
+ return self.dedupe(node.name if node.name else call.assigned_variable), type
diff --git a/Tools/peg_generator/pegen/parser_generator.py b/Tools/peg_generator/pegen/parser_generator.py
index 7851a7c90f4d5..3f6cdbe409d56 100644
--- a/Tools/peg_generator/pegen/parser_generator.py
+++ b/Tools/peg_generator/pegen/parser_generator.py
@@ -13,7 +13,6 @@
NamedItem,
Plain,
NameLeaf,
- StringLeaf,
Gather,
)
from pegen.grammar import GrammarError, GrammarVisitor
@@ -48,6 +47,18 @@ def __init__(self, grammar: Grammar, file: Optional[IO[Text]]):
self.todo = self.rules.copy() # Rules to generate
self.counter = 0 # For name_rule()/name_loop()
self.keyword_counter = 499 # For keyword_type()
+ self.all_rules: Optional[Dict[str, Rule]] = None # Rules + temporal rules
+ self._local_variable_stack: List[List[str]] = []
+
+ @contextlib.contextmanager
+ def local_variable_context(self) -> Iterator[None]:
+ self._local_variable_stack.append([])
+ yield
+ self._local_variable_stack.pop()
+
+ @property
+ def local_variable_names(self) -> List[str]:
+ return self._local_variable_stack[-1]
@abstractmethod
def generate(self, filename: str) -> None:
@@ -82,6 +93,7 @@ def collect_todo(self) -> None:
for rulename in todo:
self.todo[rulename].collect_todo(self)
done = set(alltodo)
+ self.all_rules = self.todo.copy()
def keyword_type(self) -> int:
self.keyword_counter += 1
@@ -109,26 +121,23 @@ def name_gather(self, node: Gather) -> str:
self.counter += 1
extra_function_name = f"_loop0_{self.counter}"
extra_function_alt = Alt(
- [NamedItem(None, node.separator), NamedItem("elem", node.node),], action="elem",
+ [NamedItem(None, node.separator), NamedItem("elem", node.node)], action="elem",
)
self.todo[extra_function_name] = Rule(
extra_function_name, None, Rhs([extra_function_alt]),
)
- alt = Alt(
- [NamedItem("elem", node.node), NamedItem("seq", NameLeaf(extra_function_name)),],
- )
+ alt = Alt([NamedItem("elem", node.node), NamedItem("seq", NameLeaf(extra_function_name))],)
self.todo[name] = Rule(name, None, Rhs([alt]),)
return name
-
-def dedupe(name: str, names: List[str]) -> str:
- origname = name
- counter = 0
- while name in names:
- counter += 1
- name = f"{origname}_{counter}"
- names.append(name)
- return name
+ def dedupe(self, name: str) -> str:
+ origname = name
+ counter = 0
+ while name in self.local_variable_names:
+ counter += 1
+ name = f"{origname}_{counter}"
+ self.local_variable_names.append(name)
+ return name
def compute_nullables(rules: Dict[str, Rule]) -> None:
@@ -153,13 +162,13 @@ def compute_left_recursives(
leaders = set(scc)
for start in scc:
for cycle in sccutils.find_cycles_in_scc(graph, scc, start):
- ## print("Cycle:", " -> ".join(cycle))
+ # print("Cycle:", " -> ".join(cycle))
leaders -= scc - set(cycle)
if not leaders:
raise ValueError(
f"SCC {scc} has no leadership candidate (no element is included in all cycles)"
)
- ## print("Leaders:", leaders)
+ # print("Leaders:", leaders)
leader = min(leaders) # Pick an arbitrary leader from the candidates.
rules[leader].leader = True
else:
diff --git a/Tools/peg_generator/pegen/python_generator.py b/Tools/peg_generator/pegen/python_generator.py
index b2891885f957e..bde27890c15a6 100644
--- a/Tools/peg_generator/pegen/python_generator.py
+++ b/Tools/peg_generator/pegen/python_generator.py
@@ -1,4 +1,4 @@
-from typing import Any, Dict, List, Optional, IO, Text, Tuple
+from typing import Any, Dict, Optional, IO, Text, Tuple
from pegen.grammar import (
Cut,
@@ -19,7 +19,7 @@
Alt,
)
from pegen import grammar
-from pegen.parser_generator import dedupe, ParserGenerator
+from pegen.parser_generator import ParserGenerator
MODULE_PREFIX = """\
#!/usr/bin/env python3.8
@@ -173,7 +173,7 @@ def visit_Rule(self, node: Rule) -> None:
else:
self.print("return None")
- def visit_NamedItem(self, node: NamedItem, names: List[str]) -> None:
+ def visit_NamedItem(self, node: NamedItem) -> None:
name, call = self.callmakervisitor.visit(node.item)
if node.name:
name = node.name
@@ -181,7 +181,7 @@ def visit_NamedItem(self, node: NamedItem, names: List[str]) -> None:
self.print(call)
else:
if name != "cut":
- name = dedupe(name, names)
+ name = self.dedupe(name)
self.print(f"({name} := {call})")
def visit_Rhs(self, node: Rhs, is_loop: bool = False, is_gather: bool = False) -> None:
@@ -191,34 +191,36 @@ def visit_Rhs(self, node: Rhs, is_loop: bool = False, is_gather: bool = False) -
self.visit(alt, is_loop=is_loop, is_gather=is_gather)
def visit_Alt(self, node: Alt, is_loop: bool, is_gather: bool) -> None:
- names: List[str] = []
- self.print("cut = False") # TODO: Only if needed.
- if is_loop:
- self.print("while (")
- else:
- self.print("if (")
- with self.indent():
- first = True
- for item in node.items:
- if first:
- first = False
- else:
- self.print("and")
- self.visit(item, names=names)
- self.print("):")
- with self.indent():
- action = node.action
- if not action:
- if is_gather:
- assert len(names) == 2
- action = f"[{names[0]}] + {names[1]}"
- else:
- action = f"[{', '.join(names)}]"
+ with self.local_variable_context():
+ self.print("cut = False") # TODO: Only if needed.
if is_loop:
- self.print(f"children.append({action})")
- self.print(f"mark = self.mark()")
+ self.print("while (")
else:
- self.print(f"return {action}")
- self.print("self.reset(mark)")
- # Skip remaining alternatives if a cut was reached.
- self.print("if cut: return None") # TODO: Only if needed.
+ self.print("if (")
+ with self.indent():
+ first = True
+ for item in node.items:
+ if first:
+ first = False
+ else:
+ self.print("and")
+ self.visit(item)
+ self.print("):")
+ with self.indent():
+ action = node.action
+ if not action:
+ if is_gather:
+ assert len(self.local_variable_names) == 2
+ action = (
+ f"[{self.local_variable_names[0]}] + {self.local_variable_names[1]}"
+ )
+ else:
+ action = f"[{', '.join(self.local_variable_names)}]"
+ if is_loop:
+ self.print(f"children.append({action})")
+ self.print(f"mark = self.mark()")
+ else:
+ self.print(f"return {action}")
+ self.print("self.reset(mark)")
+ # Skip remaining alternatives if a cut was reached.
+ self.print("if cut: return None") # TODO: Only if needed.
[View Less]
1
0

April 29, 2020
https://github.com/python/cpython/commit/9b64ef3ac7b434065dbff0048b9103999e…
commit: 9b64ef3ac7b434065dbff0048b9103999e4b491a
branch: master
author: Anthony Shaw <anthony.p.shaw(a)gmail.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T10:09:09+01:00
summary:
bpo-40432 Fix MSBuild project for Pegen grammars (#GH-9785)
* Update the source path of the pegen target within the Windows regen project.
Change the path to Windows path formats.
* Use the more reliable SetEnv …
[View More]task for Cpp Projects in MSBuild.
files:
M PCbuild/regen.vcxproj
diff --git a/PCbuild/regen.vcxproj b/PCbuild/regen.vcxproj
index 285a8a1b9e49c..c97536f7dd96d 100644
--- a/PCbuild/regen.vcxproj
+++ b/PCbuild/regen.vcxproj
@@ -168,7 +168,8 @@
</Target>
<Target Name="_RegenPegen" BeforeTargets="Build">
<!-- Regenerate Parser/pegen/parse.c -->
- <Exec Command=""$PYTHONPATH=$(srcdir)/Tools/peg_generator" "$(PythonExe)" -m pegen -q c "$(PySourcePath)Grammar\python.gram" "$(PySourcePath)Grammar\Tokens" -o "$(IntDir)parse.c"" />
+ <SetEnv Name="PYTHONPATH" Prefix="true" Value="$(PySourcePath)Tools\peg_generator\" />
+ <Exec Command=""$(PythonExe)" -m pegen -q c "$(PySourcePath)Grammar\python.gram" "$(PySourcePath)Grammar\Tokens" -o "$(IntDir)parse.c"" />
<Copy SourceFiles="$(IntDir)parse.c" DestinationFiles="$(PySourcePath)Parser\pegen\parse.c">
<Output TaskParameter="CopiedFiles" ItemName="_UpdatedParse" />
</Copy>
[View Less]
1
0

bpo-40275: Move transient_internet from test.support to socket_helper (GH-19711)
by Serhiy Storchaka April 29, 2020
by Serhiy Storchaka April 29, 2020
April 29, 2020
https://github.com/python/cpython/commit/bfb1cf44658934cbcd9707fb717d6770c7…
commit: bfb1cf44658934cbcd9707fb717d6770c78fbeb3
branch: master
author: Serhiy Storchaka <storchaka(a)gmail.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T10:36:20+03:00
summary:
bpo-40275: Move transient_internet from test.support to socket_helper (GH-19711)
files:
M Doc/library/test.rst
M Lib/test/support/__init__.py
M Lib/test/support/socket_helper.py
M Lib/test/test_httplib.py
M Lib/…
[View More]test/test_imaplib.py
M Lib/test/test_nntplib.py
M Lib/test/test_robotparser.py
M Lib/test/test_site.py
M Lib/test/test_smtpnet.py
M Lib/test/test_socket.py
M Lib/test/test_ssl.py
M Lib/test/test_timeout.py
M Lib/test/test_urllib2.py
M Lib/test/test_urllib2net.py
M Lib/test/test_urllibnet.py
diff --git a/Doc/library/test.rst b/Doc/library/test.rst
index c2aaecc183e77..f7e6eba018161 100644
--- a/Doc/library/test.rst
+++ b/Doc/library/test.rst
@@ -314,7 +314,7 @@ The :mod:`test.support` module defines the following constants:
Usually, a timeout using :data:`INTERNET_TIMEOUT` should not mark a test as
failed, but skip the test instead: see
- :func:`~test.support.transient_internet`.
+ :func:`~test.support.socket_helper.transient_internet`.
Its default value is 1 minute.
@@ -759,12 +759,6 @@ The :mod:`test.support` module defines the following functions:
A context manager that temporarily sets the process umask.
-.. function:: transient_internet(resource_name, *, timeout=30.0, errnos=())
-
- A context manager that raises :exc:`ResourceDenied` when various issues
- with the internet connection manifest themselves as exceptions.
-
-
.. function:: disable_faulthandler()
A context manager that replaces ``sys.stderr`` with ``sys.__stderr__``.
@@ -1488,6 +1482,13 @@ The :mod:`test.support.socket_helper` module provides support for socket tests.
sockets.
+.. function:: transient_internet(resource_name, *, timeout=30.0, errnos=())
+
+ A context manager that raises :exc:`~test.support.ResourceDenied` when
+ various issues with the internet connection manifest themselves as
+ exceptions.
+
+
:mod:`test.support.script_helper` --- Utilities for the Python execution tests
==============================================================================
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index ee5882f237cfc..bd2157496fe00 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -78,7 +78,7 @@
"requires_linux_version", "requires_mac_ver",
"check_syntax_error", "check_syntax_warning",
"TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset",
- "transient_internet", "BasicTestRunner", "run_unittest", "run_doctest",
+ "BasicTestRunner", "run_unittest", "run_doctest",
"skip_unless_symlink", "requires_gzip", "requires_bz2", "requires_lzma",
"bigmemtest", "bigaddrspacetest", "cpython_only", "get_attribute",
"requires_IEEE_754", "skip_unless_xattr", "requires_zlib",
@@ -144,8 +144,6 @@
# option.
LONG_TIMEOUT = 5 * 60.0
-_NOT_SET = object()
-
class Error(Exception):
"""Base class for regression test exceptions."""
@@ -1386,90 +1384,6 @@ def __exit__(self, type_=None, value=None, traceback=None):
ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET)
-(a)contextlib.contextmanager
-def transient_internet(resource_name, *, timeout=_NOT_SET, errnos=()):
- """Return a context manager that raises ResourceDenied when various issues
- with the Internet connection manifest themselves as exceptions."""
- import socket
- import nntplib
- import urllib.error
- if timeout is _NOT_SET:
- timeout = INTERNET_TIMEOUT
-
- default_errnos = [
- ('ECONNREFUSED', 111),
- ('ECONNRESET', 104),
- ('EHOSTUNREACH', 113),
- ('ENETUNREACH', 101),
- ('ETIMEDOUT', 110),
- # socket.create_connection() fails randomly with
- # EADDRNOTAVAIL on Travis CI.
- ('EADDRNOTAVAIL', 99),
- ]
- default_gai_errnos = [
- ('EAI_AGAIN', -3),
- ('EAI_FAIL', -4),
- ('EAI_NONAME', -2),
- ('EAI_NODATA', -5),
- # Encountered when trying to resolve IPv6-only hostnames
- ('WSANO_DATA', 11004),
- ]
-
- denied = ResourceDenied("Resource %r is not available" % resource_name)
- captured_errnos = errnos
- gai_errnos = []
- if not captured_errnos:
- captured_errnos = [getattr(errno, name, num)
- for (name, num) in default_errnos]
- gai_errnos = [getattr(socket, name, num)
- for (name, num) in default_gai_errnos]
-
- def filter_error(err):
- n = getattr(err, 'errno', None)
- if (isinstance(err, socket.timeout) or
- (isinstance(err, socket.gaierror) and n in gai_errnos) or
- (isinstance(err, urllib.error.HTTPError) and
- 500 <= err.code <= 599) or
- (isinstance(err, urllib.error.URLError) and
- (("ConnectionRefusedError" in err.reason) or
- ("TimeoutError" in err.reason) or
- ("EOFError" in err.reason))) or
- n in captured_errnos):
- if not verbose:
- sys.stderr.write(denied.args[0] + "\n")
- raise denied from err
-
- old_timeout = socket.getdefaulttimeout()
- try:
- if timeout is not None:
- socket.setdefaulttimeout(timeout)
- yield
- except nntplib.NNTPTemporaryError as err:
- if verbose:
- sys.stderr.write(denied.args[0] + "\n")
- raise denied from err
- except OSError as err:
- # urllib can wrap original socket errors multiple times (!), we must
- # unwrap to get at the original error.
- while True:
- a = err.args
- if len(a) >= 1 and isinstance(a[0], OSError):
- err = a[0]
- # The error can also be wrapped as args[1]:
- # except socket.error as msg:
- # raise OSError('socket error', msg).with_traceback(sys.exc_info()[2])
- elif len(a) >= 2 and isinstance(a[1], OSError):
- err = a[1]
- else:
- break
- filter_error(err)
- raise
- # XXX should we catch generic exceptions and look for their
- # __cause__ or __context__?
- finally:
- socket.setdefaulttimeout(old_timeout)
-
-
@contextlib.contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
diff --git a/Lib/test/support/socket_helper.py b/Lib/test/support/socket_helper.py
index 5f4a7f19a3223..b09c248cfccdf 100644
--- a/Lib/test/support/socket_helper.py
+++ b/Lib/test/support/socket_helper.py
@@ -1,7 +1,11 @@
+import contextlib
import errno
import socket
import unittest
+from .. import support
+
+
HOST = "localhost"
HOSTv4 = "127.0.0.1"
HOSTv6 = "::1"
@@ -175,3 +179,88 @@ def get_socket_conn_refused_errs():
if not IPV6_ENABLED:
errors.append(errno.EAFNOSUPPORT)
return errors
+
+
+_NOT_SET = object()
+
+(a)contextlib.contextmanager
+def transient_internet(resource_name, *, timeout=_NOT_SET, errnos=()):
+ """Return a context manager that raises ResourceDenied when various issues
+ with the Internet connection manifest themselves as exceptions."""
+ import nntplib
+ import urllib.error
+ if timeout is _NOT_SET:
+ timeout = support.INTERNET_TIMEOUT
+
+ default_errnos = [
+ ('ECONNREFUSED', 111),
+ ('ECONNRESET', 104),
+ ('EHOSTUNREACH', 113),
+ ('ENETUNREACH', 101),
+ ('ETIMEDOUT', 110),
+ # socket.create_connection() fails randomly with
+ # EADDRNOTAVAIL on Travis CI.
+ ('EADDRNOTAVAIL', 99),
+ ]
+ default_gai_errnos = [
+ ('EAI_AGAIN', -3),
+ ('EAI_FAIL', -4),
+ ('EAI_NONAME', -2),
+ ('EAI_NODATA', -5),
+ # Encountered when trying to resolve IPv6-only hostnames
+ ('WSANO_DATA', 11004),
+ ]
+
+ denied = support.ResourceDenied("Resource %r is not available" % resource_name)
+ captured_errnos = errnos
+ gai_errnos = []
+ if not captured_errnos:
+ captured_errnos = [getattr(errno, name, num)
+ for (name, num) in default_errnos]
+ gai_errnos = [getattr(socket, name, num)
+ for (name, num) in default_gai_errnos]
+
+ def filter_error(err):
+ n = getattr(err, 'errno', None)
+ if (isinstance(err, socket.timeout) or
+ (isinstance(err, socket.gaierror) and n in gai_errnos) or
+ (isinstance(err, urllib.error.HTTPError) and
+ 500 <= err.code <= 599) or
+ (isinstance(err, urllib.error.URLError) and
+ (("ConnectionRefusedError" in err.reason) or
+ ("TimeoutError" in err.reason) or
+ ("EOFError" in err.reason))) or
+ n in captured_errnos):
+ if not support.verbose:
+ sys.stderr.write(denied.args[0] + "\n")
+ raise denied from err
+
+ old_timeout = socket.getdefaulttimeout()
+ try:
+ if timeout is not None:
+ socket.setdefaulttimeout(timeout)
+ yield
+ except nntplib.NNTPTemporaryError as err:
+ if support.verbose:
+ sys.stderr.write(denied.args[0] + "\n")
+ raise denied from err
+ except OSError as err:
+ # urllib can wrap original socket errors multiple times (!), we must
+ # unwrap to get at the original error.
+ while True:
+ a = err.args
+ if len(a) >= 1 and isinstance(a[0], OSError):
+ err = a[0]
+ # The error can also be wrapped as args[1]:
+ # except socket.error as msg:
+ # raise OSError('socket error', msg).with_traceback(sys.exc_info()[2])
+ elif len(a) >= 2 and isinstance(a[1], OSError):
+ err = a[1]
+ else:
+ break
+ filter_error(err)
+ raise
+ # XXX should we catch generic exceptions and look for their
+ # __cause__ or __context__?
+ finally:
+ socket.setdefaulttimeout(old_timeout)
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 6b7a9dedf1a2a..e95487bcd45db 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -1629,7 +1629,7 @@ def test_networked(self):
# Default settings: requires a valid cert from a trusted CA
import ssl
support.requires('network')
- with support.transient_internet('self-signed.pythontest.net'):
+ with socket_helper.transient_internet('self-signed.pythontest.net'):
h = client.HTTPSConnection('self-signed.pythontest.net', 443)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
@@ -1639,7 +1639,7 @@ def test_networked_noverification(self):
# Switch off cert verification
import ssl
support.requires('network')
- with support.transient_internet('self-signed.pythontest.net'):
+ with socket_helper.transient_internet('self-signed.pythontest.net'):
context = ssl._create_unverified_context()
h = client.HTTPSConnection('self-signed.pythontest.net', 443,
context=context)
@@ -1653,7 +1653,7 @@ def test_networked_noverification(self):
def test_networked_trusted_by_default_cert(self):
# Default settings: requires a valid cert from a trusted CA
support.requires('network')
- with support.transient_internet('www.python.org'):
+ with socket_helper.transient_internet('www.python.org'):
h = client.HTTPSConnection('www.python.org', 443)
h.request('GET', '/')
resp = h.getresponse()
@@ -1667,7 +1667,7 @@ def test_networked_good_cert(self):
import ssl
support.requires('network')
selfsigned_pythontestdotnet = 'self-signed.pythontest.net'
- with support.transient_internet(selfsigned_pythontestdotnet):
+ with socket_helper.transient_internet(selfsigned_pythontestdotnet):
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
self.assertEqual(context.verify_mode, ssl.CERT_REQUIRED)
self.assertEqual(context.check_hostname, True)
@@ -1699,7 +1699,7 @@ def test_networked_bad_cert(self):
# We feed a "CA" cert that is unrelated to the server's cert
import ssl
support.requires('network')
- with support.transient_internet('self-signed.pythontest.net'):
+ with socket_helper.transient_internet('self-signed.pythontest.net'):
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.load_verify_locations(CERT_localhost)
h = client.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py
index ce601565cf1a6..d1e3550868059 100644
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -10,7 +10,7 @@
import threading
import socket
-from test.support import (reap_threads, verbose, transient_internet,
+from test.support import (reap_threads, verbose,
run_with_tz, run_with_locale, cpython_only)
from test.support import hashlib_helper
import unittest
@@ -968,16 +968,16 @@ class RemoteIMAPTest(unittest.TestCase):
imap_class = imaplib.IMAP4
def setUp(self):
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
self.server = self.imap_class(self.host, self.port)
def tearDown(self):
if self.server is not None:
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
self.server.logout()
def test_logincapa(self):
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
for cap in self.server.capabilities:
self.assertIsInstance(cap, str)
self.assertIn('LOGINDISABLED', self.server.capabilities)
@@ -986,7 +986,7 @@ def test_logincapa(self):
self.assertEqual(rs[0], 'OK')
def test_logout(self):
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
rs = self.server.logout()
self.server = None
self.assertEqual(rs[0], 'BYE', rs)
@@ -999,7 +999,7 @@ class RemoteIMAP_STARTTLSTest(RemoteIMAPTest):
def setUp(self):
super().setUp()
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
rs = self.server.starttls()
self.assertEqual(rs[0], 'OK')
@@ -1039,24 +1039,24 @@ def check_logincapa(self, server):
server.logout()
def test_logincapa(self):
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
_server = self.imap_class(self.host, self.port)
self.check_logincapa(_server)
def test_logout(self):
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
_server = self.imap_class(self.host, self.port)
rs = _server.logout()
self.assertEqual(rs[0], 'BYE', rs)
def test_ssl_context_certfile_exclusive(self):
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
self.assertRaises(
ValueError, self.imap_class, self.host, self.port,
certfile=CERTFILE, ssl_context=self.create_ssl_context())
def test_ssl_context_keyfile_exclusive(self):
- with transient_internet(self.host):
+ with socket_helper.transient_internet(self.host):
self.assertRaises(
ValueError, self.imap_class, self.host, self.port,
keyfile=CERTFILE, ssl_context=self.create_ssl_context())
diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py
index 2a5a0b97eea63..8d296818e64f1 100644
--- a/Lib/test/test_nntplib.py
+++ b/Lib/test/test_nntplib.py
@@ -246,7 +246,7 @@ def wrap_methods(cls):
def wrap_meth(meth):
@functools.wraps(meth)
def wrapped(self):
- with support.transient_internet(self.NNTP_HOST):
+ with socket_helper.transient_internet(self.NNTP_HOST):
meth(self)
return wrapped
for name in dir(cls):
@@ -315,7 +315,7 @@ class NetworkedNNTPTests(NetworkedNNTPTestsMixin, unittest.TestCase):
@classmethod
def setUpClass(cls):
support.requires("network")
- with support.transient_internet(cls.NNTP_HOST):
+ with socket_helper.transient_internet(cls.NNTP_HOST):
try:
cls.server = cls.NNTP_CLASS(cls.NNTP_HOST,
timeout=support.INTERNET_TIMEOUT,
diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py
index 9d4764ece2fd2..a3112b8fdf473 100644
--- a/Lib/test/test_robotparser.py
+++ b/Lib/test/test_robotparser.py
@@ -349,7 +349,7 @@ class NetworkTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
support.requires('network')
- with support.transient_internet(cls.base_url):
+ with socket_helper.transient_internet(cls.base_url):
cls.parser = urllib.robotparser.RobotFileParser(cls.robots_txt)
cls.parser.read()
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
index 12e357cd9ba69..957e7a41d5466 100644
--- a/Lib/test/test_site.py
+++ b/Lib/test/test_site.py
@@ -7,6 +7,7 @@
import unittest
import test.support
from test import support
+from test.support import socket_helper
from test.support import (captured_stderr, TESTFN, EnvironmentVarGuard,
change_cwd)
import builtins
@@ -509,7 +510,7 @@ def test_license_exists_at_url(self):
url = license._Printer__data.split()[1]
req = urllib.request.Request(url, method='HEAD')
try:
- with test.support.transient_internet(url):
+ with socket_helper.transient_internet(url):
with urllib.request.urlopen(req) as data:
code = data.getcode()
except urllib.error.HTTPError as e:
diff --git a/Lib/test/test_smtpnet.py b/Lib/test/test_smtpnet.py
index b69cd9de62724..74a00a9d7cc58 100644
--- a/Lib/test/test_smtpnet.py
+++ b/Lib/test/test_smtpnet.py
@@ -1,5 +1,6 @@
import unittest
from test import support
+from test.support import socket_helper
import smtplib
import socket
@@ -28,7 +29,7 @@ def test_connect_starttls(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
- with support.transient_internet(self.testServer):
+ with socket_helper.transient_internet(self.testServer):
server = smtplib.SMTP(self.testServer, self.remotePort)
try:
server.starttls(context=context)
@@ -47,14 +48,14 @@ class SmtpSSLTest(unittest.TestCase):
def test_connect(self):
support.get_attribute(smtplib, 'SMTP_SSL')
- with support.transient_internet(self.testServer):
+ with socket_helper.transient_internet(self.testServer):
server = smtplib.SMTP_SSL(self.testServer, self.remotePort)
server.ehlo()
server.quit()
def test_connect_default_port(self):
support.get_attribute(smtplib, 'SMTP_SSL')
- with support.transient_internet(self.testServer):
+ with socket_helper.transient_internet(self.testServer):
server = smtplib.SMTP_SSL(self.testServer)
server.ehlo()
server.quit()
@@ -64,20 +65,20 @@ def test_connect_using_sslcontext(self):
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
support.get_attribute(smtplib, 'SMTP_SSL')
- with support.transient_internet(self.testServer):
+ with socket_helper.transient_internet(self.testServer):
server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=context)
server.ehlo()
server.quit()
def test_connect_using_sslcontext_verified(self):
- with support.transient_internet(self.testServer):
+ with socket_helper.transient_internet(self.testServer):
can_verify = check_ssl_verifiy(self.testServer, self.remotePort)
if not can_verify:
self.skipTest("SSL certificate can't be verified")
support.get_attribute(smtplib, 'SMTP_SSL')
context = ssl.create_default_context()
- with support.transient_internet(self.testServer):
+ with socket_helper.transient_internet(self.testServer):
server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=context)
server.ehlo()
server.quit()
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index a70e28219ed23..87ae2e127a236 100755
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -1533,7 +1533,7 @@ def test_getnameinfo(self):
def test_idna(self):
# Check for internet access before running test
# (issue #12804, issue #25138).
- with support.transient_internet('python.org'):
+ with socket_helper.transient_internet('python.org'):
socket.gethostbyname('python.org')
# these should all be successful
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index dafdb6c08092b..5d496c6687614 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -2246,7 +2246,7 @@ class NetworkedTests(unittest.TestCase):
def test_timeout_connect_ex(self):
# Issue #12065: on a timeout, connect_ex() should return the original
# errno (mimicking the behaviour of non-SSL sockets).
- with support.transient_internet(REMOTE_HOST):
+ with socket_helper.transient_internet(REMOTE_HOST):
s = test_wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
do_handshake_on_connect=False)
@@ -2259,7 +2259,7 @@ def test_timeout_connect_ex(self):
@unittest.skipUnless(socket_helper.IPV6_ENABLED, 'Needs IPv6')
def test_get_server_certificate_ipv6(self):
- with support.transient_internet('ipv6.google.com'):
+ with socket_helper.transient_internet('ipv6.google.com'):
_test_get_server_certificate(self, 'ipv6.google.com', 443)
_test_get_server_certificate_fail(self, 'ipv6.google.com', 443)
diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py
index c0952c75e9913..ac803f5d63823 100644
--- a/Lib/test/test_timeout.py
+++ b/Lib/test/test_timeout.py
@@ -20,7 +20,7 @@ def resolve_address(host, port):
We must perform name resolution before timeout tests, otherwise it will be
performed by connect().
"""
- with support.transient_internet(host):
+ with socket_helper.transient_internet(host):
return socket.getaddrinfo(host, port, socket.AF_INET,
socket.SOCK_STREAM)[0][4]
@@ -230,12 +230,12 @@ def testConnectTimeout(self):
# All that hard work just to test if connect times out in 0.001s ;-)
self.addr_remote = blackhole
- with support.transient_internet(self.addr_remote[0]):
+ with socket_helper.transient_internet(self.addr_remote[0]):
self._sock_operation(1, 0.001, 'connect', self.addr_remote)
def testRecvTimeout(self):
# Test recv() timeout
- with support.transient_internet(self.addr_remote[0]):
+ with socket_helper.transient_internet(self.addr_remote[0]):
self.sock.connect(self.addr_remote)
self._sock_operation(1, 1.5, 'recv', 1024)
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index e69ac3e2136a2..cbfa9ba60c2c8 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -1,5 +1,6 @@
import unittest
from test import support
+from test.support import socket_helper
from test import test_urllib
import os
@@ -1776,7 +1777,7 @@ class MyOtherHTTPHandler(urllib.request.HTTPHandler):
@unittest.skipUnless(support.is_resource_enabled('network'),
'test requires network access')
def test_issue16464(self):
- with support.transient_internet("http://www.example.com/"):
+ with socket_helper.transient_internet("http://www.example.com/"):
opener = urllib.request.build_opener()
request = urllib.request.Request("http://www.example.com/")
self.assertEqual(None, request.data)
diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py
index b3a5e8974df32..ba4c500e8ec3e 100644
--- a/Lib/test/test_urllib2net.py
+++ b/Lib/test/test_urllib2net.py
@@ -1,5 +1,6 @@
import unittest
from test import support
+from test.support import socket_helper
from test.test_urllib2 import sanepathname2url
import os
@@ -86,7 +87,7 @@ def test_close(self):
# calling .close() on urllib2's response objects should close the
# underlying socket
url = support.TEST_HTTP_URL
- with support.transient_internet(url):
+ with socket_helper.transient_internet(url):
response = _urlopen_with_retry(url)
sock = response.fp
self.assertFalse(sock.closed)
@@ -159,7 +160,7 @@ def test_file(self):
def test_urlwithfrag(self):
urlwith_frag = "http://www.pythontest.net/index.html#frag"
- with support.transient_internet(urlwith_frag):
+ with socket_helper.transient_internet(urlwith_frag):
req = urllib.request.Request(urlwith_frag)
res = urllib.request.urlopen(req)
self.assertEqual(res.geturl(),
@@ -167,7 +168,7 @@ def test_urlwithfrag(self):
def test_redirect_url_withfrag(self):
redirect_url_with_frag = "http://www.pythontest.net/redir/with_frag/"
- with support.transient_internet(redirect_url_with_frag):
+ with socket_helper.transient_internet(redirect_url_with_frag):
req = urllib.request.Request(redirect_url_with_frag)
res = urllib.request.urlopen(req)
self.assertEqual(res.geturl(),
@@ -175,7 +176,7 @@ def test_redirect_url_withfrag(self):
def test_custom_headers(self):
url = support.TEST_HTTP_URL
- with support.transient_internet(url):
+ with socket_helper.transient_internet(url):
opener = urllib.request.build_opener()
request = urllib.request.Request(url)
self.assertFalse(request.header_items())
@@ -193,7 +194,7 @@ def test_sites_no_connection_close(self):
URL = 'http://www.imdb.com' # mangles Connection:close
- with support.transient_internet(URL):
+ with socket_helper.transient_internet(URL):
try:
with urllib.request.urlopen(URL) as res:
pass
@@ -223,7 +224,7 @@ def _test_urls(self, urls, handlers, retry=True):
else:
req = expected_err = None
- with support.transient_internet(url):
+ with socket_helper.transient_internet(url):
try:
f = urlopen(url, req, support.INTERNET_TIMEOUT)
# urllib.error.URLError is a subclass of OSError
@@ -265,7 +266,7 @@ def setUp(self):
def test_http_basic(self):
self.assertIsNone(socket.getdefaulttimeout())
url = support.TEST_HTTP_URL
- with support.transient_internet(url, timeout=None):
+ with socket_helper.transient_internet(url, timeout=None):
u = _urlopen_with_retry(url)
self.addCleanup(u.close)
self.assertIsNone(u.fp.raw._sock.gettimeout())
@@ -273,7 +274,7 @@ def test_http_basic(self):
def test_http_default_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
url = support.TEST_HTTP_URL
- with support.transient_internet(url):
+ with socket_helper.transient_internet(url):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(url)
@@ -285,7 +286,7 @@ def test_http_default_timeout(self):
def test_http_no_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
url = support.TEST_HTTP_URL
- with support.transient_internet(url):
+ with socket_helper.transient_internet(url):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(url, timeout=None)
@@ -296,7 +297,7 @@ def test_http_no_timeout(self):
def test_http_timeout(self):
url = support.TEST_HTTP_URL
- with support.transient_internet(url):
+ with socket_helper.transient_internet(url):
u = _urlopen_with_retry(url, timeout=120)
self.addCleanup(u.close)
self.assertEqual(u.fp.raw._sock.gettimeout(), 120)
@@ -306,7 +307,7 @@ def test_http_timeout(self):
@skip_ftp_test_on_travis
def test_ftp_basic(self):
self.assertIsNone(socket.getdefaulttimeout())
- with support.transient_internet(self.FTP_HOST, timeout=None):
+ with socket_helper.transient_internet(self.FTP_HOST, timeout=None):
u = _urlopen_with_retry(self.FTP_HOST)
self.addCleanup(u.close)
self.assertIsNone(u.fp.fp.raw._sock.gettimeout())
@@ -314,7 +315,7 @@ def test_ftp_basic(self):
@skip_ftp_test_on_travis
def test_ftp_default_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
- with support.transient_internet(self.FTP_HOST):
+ with socket_helper.transient_internet(self.FTP_HOST):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST)
@@ -326,7 +327,7 @@ def test_ftp_default_timeout(self):
@skip_ftp_test_on_travis
def test_ftp_no_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
- with support.transient_internet(self.FTP_HOST):
+ with socket_helper.transient_internet(self.FTP_HOST):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST, timeout=None)
@@ -337,7 +338,7 @@ def test_ftp_no_timeout(self):
@skip_ftp_test_on_travis
def test_ftp_timeout(self):
- with support.transient_internet(self.FTP_HOST):
+ with socket_helper.transient_internet(self.FTP_HOST):
u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
self.addCleanup(u.close)
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
diff --git a/Lib/test/test_urllibnet.py b/Lib/test/test_urllibnet.py
index 422d529a70074..28680aa6b2405 100644
--- a/Lib/test/test_urllibnet.py
+++ b/Lib/test/test_urllibnet.py
@@ -1,5 +1,6 @@
import unittest
from test import support
+from test.support import socket_helper
import contextlib
import socket
@@ -27,7 +28,7 @@ def testURLread(self):
self.addCleanup(urllib.request.urlcleanup)
domain = urllib.parse.urlparse(support.TEST_HTTP_URL).netloc
- with support.transient_internet(domain):
+ with socket_helper.transient_internet(domain):
f = urllib.request.urlopen(support.TEST_HTTP_URL)
f.read()
@@ -56,7 +57,7 @@ def setUp(self):
@contextlib.contextmanager
def urlopen(self, *args, **kwargs):
resource = args[0]
- with support.transient_internet(resource):
+ with socket_helper.transient_internet(resource):
r = urllib.request.urlopen(*args, **kwargs)
try:
yield r
@@ -98,7 +99,7 @@ def test_geturl(self):
def test_getcode(self):
# test getcode() with the fancy opener to get 404 error codes
URL = self.url + "XXXinvalidXXX"
- with support.transient_internet(URL):
+ with socket_helper.transient_internet(URL):
with self.assertWarns(DeprecationWarning):
open_url = urllib.request.FancyURLopener().open(URL)
try:
@@ -156,7 +157,7 @@ def setUp(self):
@contextlib.contextmanager
def urlretrieve(self, *args, **kwargs):
resource = args[0]
- with support.transient_internet(resource):
+ with socket_helper.transient_internet(resource):
file_location, info = urllib.request.urlretrieve(*args, **kwargs)
try:
yield file_location, info
[View Less]
1
0

April 29, 2020
https://github.com/python/cpython/commit/cc011b5190b63f0be561ddec38fc4cd9e6…
commit: cc011b5190b63f0be561ddec38fc4cd9e60cbf6a
branch: 3.8
author: Kyle Stanley <aeros167(a)gmail.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-28T23:11:18-04:00
summary:
[3.8] bpo-40431: Fix syntax typo in turtledemo (GH-19777) (#19784)
[3.8] bpo-40431: Fix syntax typo in turtledemo (GH-19777)
* Addresses a syntax typo that mistakenly used a undefined string prefix due to a missing space.…
[View More]
(cherry picked from commit 49f70db83e2c62ad06805927f53f6c3e8f4b798e)
Co-authored-by: Miro Hrončok <miro(a)hroncok.cz>
files:
M Lib/turtledemo/__main__.py
diff --git a/Lib/turtledemo/__main__.py b/Lib/turtledemo/__main__.py
index 17fe9a75e1c5e..12be5098dad27 100644
--- a/Lib/turtledemo/__main__.py
+++ b/Lib/turtledemo/__main__.py
@@ -272,7 +272,7 @@ def configGUI(self, start, stop, clear, txt="", color="blue"):
self.stop_btn.config(state=stop,
bg="#d00" if stop == NORMAL else "#fca")
self.clear_btn.config(state=clear,
- bg="#d00" if clear == NORMAL else"#fca")
+ bg="#d00" if clear == NORMAL else "#fca")
self.output_lbl.config(text=txt, fg=color)
def makeLoadDemoMenu(self, master):
[View Less]
1
0

bpo-40428: Remove references to Py*_ClearFreeList in the docs (GH-19783)
by Zackery Spytz April 29, 2020
by Zackery Spytz April 29, 2020
April 29, 2020
https://github.com/python/cpython/commit/bb4a585d903e7fe0a46ded8c2ee3f47435…
commit: bb4a585d903e7fe0a46ded8c2ee3f47435ad6a66
branch: master
author: Zackery Spytz <zspytz(a)gmail.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T04:41:56+02:00
summary:
bpo-40428: Remove references to Py*_ClearFreeList in the docs (GH-19783)
They were removed from the C API in commit
ae00a5a88534fd45939f86c12e038da9fa6f9ed6.
files:
M Doc/c-api/contextvars.rst
M Doc/c-api/dict.rst
M …
[View More]Doc/c-api/float.rst
M Doc/c-api/list.rst
M Doc/c-api/method.rst
M Doc/c-api/set.rst
M Doc/c-api/tuple.rst
diff --git a/Doc/c-api/contextvars.rst b/Doc/c-api/contextvars.rst
index 38256a3b0f2a0..9c088814314a8 100644
--- a/Doc/c-api/contextvars.rst
+++ b/Doc/c-api/contextvars.rst
@@ -101,11 +101,6 @@ Context object management functions:
current context for the current thread. Returns ``0`` on success,
and ``-1`` on error.
-.. c:function:: int PyContext_ClearFreeList()
-
- Clear the context variable free list. Return the total number of
- freed items. This function always succeeds.
-
Context variable functions:
diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst
index e48c11d336b8c..2fb29cdd61778 100644
--- a/Doc/c-api/dict.rst
+++ b/Doc/c-api/dict.rst
@@ -232,10 +232,3 @@ Dictionary Objects
for key, value in seq2:
if override or key not in a:
a[key] = value
-
-
-.. c:function:: int PyDict_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
- .. versionadded:: 3.3
diff --git a/Doc/c-api/float.rst b/Doc/c-api/float.rst
index bfc28a79ecfdc..b29937dbecdcf 100644
--- a/Doc/c-api/float.rst
+++ b/Doc/c-api/float.rst
@@ -76,8 +76,3 @@ Floating Point Objects
.. c:function:: double PyFloat_GetMin()
Return the minimum normalized positive float *DBL_MIN* as C :c:type:`double`.
-
-.. c:function:: int PyFloat_ClearFreeList()
-
- Clear the float free list. Return the number of items that could not
- be freed.
diff --git a/Doc/c-api/list.rst b/Doc/c-api/list.rst
index b247cdfba0187..0bc0785f200d4 100644
--- a/Doc/c-api/list.rst
+++ b/Doc/c-api/list.rst
@@ -142,10 +142,3 @@ List Objects
Return a new tuple object containing the contents of *list*; equivalent to
``tuple(list)``.
-
-
-.. c:function:: int PyList_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
- .. versionadded:: 3.3
diff --git a/Doc/c-api/method.rst b/Doc/c-api/method.rst
index b1862d796c9f4..0a5341cbbdf15 100644
--- a/Doc/c-api/method.rst
+++ b/Doc/c-api/method.rst
@@ -92,9 +92,3 @@ no longer available.
.. c:function:: PyObject* PyMethod_GET_SELF(PyObject *meth)
Macro version of :c:func:`PyMethod_Self` which avoids error checking.
-
-
-.. c:function:: int PyMethod_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
diff --git a/Doc/c-api/set.rst b/Doc/c-api/set.rst
index 54819e8fd6cbd..879f394d966cd 100644
--- a/Doc/c-api/set.rst
+++ b/Doc/c-api/set.rst
@@ -157,10 +157,3 @@ subtypes but not for instances of :class:`frozenset` or its subtypes.
.. c:function:: int PySet_Clear(PyObject *set)
Empty an existing set of all elements.
-
-
-.. c:function:: int PySet_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
- .. versionadded:: 3.3
diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst
index 62bc9a565071d..c14cb2d38fd54 100644
--- a/Doc/c-api/tuple.rst
+++ b/Doc/c-api/tuple.rst
@@ -111,11 +111,6 @@ Tuple Objects
raises :exc:`MemoryError` or :exc:`SystemError`.
-.. c:function:: int PyTuple_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
-
Struct Sequence Objects
-----------------------
[View Less]
1
0

bpo-40334: Disallow invalid single statements in the new parser (GH-19774)
by Lysandros Nikolaou April 29, 2020
by Lysandros Nikolaou April 29, 2020
April 29, 2020
https://github.com/python/cpython/commit/6d6508765514c7c10719478a0430f5e47c…
commit: 6d6508765514c7c10719478a0430f5e47c9a96ac
branch: master
author: Lysandros Nikolaou <lisandrosnik(a)gmail.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T02:42:27+01:00
summary:
bpo-40334: Disallow invalid single statements in the new parser (GH-19774)
After parsing is done in single statement mode, the tokenizer buffer has to be checked for additional lines and a `SyntaxError` must …
[View More]be raised, in case there are any.
Co-authored-by: Pablo Galindo <Pablogsal(a)gmail.com>
files:
M Lib/test/test_compile.py
M Parser/pegen/pegen.c
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
index a507ac0914918..566ca27fca893 100644
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -501,7 +501,6 @@ def test_single_statement(self):
self.compile_single("if x:\n f(x)\nelse:\n g(x)")
self.compile_single("class T:\n pass")
- @support.skip_if_new_parser('Pegen does not disallow multiline single stmts')
def test_bad_single_statement(self):
self.assertInvalidSingle('1\n2')
self.assertInvalidSingle('def f(): pass')
diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c
index ef95aacb7f084..39da2709991b9 100644
--- a/Parser/pegen/pegen.c
+++ b/Parser/pegen/pegen.c
@@ -911,6 +911,52 @@ _PyPegen_number_token(Parser *p)
p->arena);
}
+static int // bool
+newline_in_string(Parser *p, const char *cur)
+{
+ for (char c = *cur; cur >= p->tok->buf; c = *--cur) {
+ if (c == '\'' || c == '"') {
+ return 1;
+ }
+ }
+ return 0;
+}
+
+/* Check that the source for a single input statement really is a single
+ statement by looking at what is left in the buffer after parsing.
+ Trailing whitespace and comments are OK. */
+static int // bool
+bad_single_statement(Parser *p)
+{
+ const char *cur = strchr(p->tok->buf, '\n');
+
+ /* Newlines are allowed if preceded by a line continuation character
+ or if they appear inside a string. */
+ if (!cur || *(cur - 1) == '\\' || newline_in_string(p, cur)) {
+ return 0;
+ }
+ char c = *cur;
+
+ for (;;) {
+ while (c == ' ' || c == '\t' || c == '\n' || c == '\014') {
+ c = *++cur;
+ }
+
+ if (!c) {
+ return 0;
+ }
+
+ if (c != '#') {
+ return 1;
+ }
+
+ /* Suck up comment. */
+ while (c && c != '\n') {
+ c = *++cur;
+ }
+ }
+}
+
void
_PyPegen_Parser_Free(Parser *p)
{
@@ -1014,6 +1060,11 @@ _PyPegen_run_parser(Parser *p)
return NULL;
}
+ if (p->start_rule == Py_single_input && bad_single_statement(p)) {
+ p->tok->done = E_BADSINGLE; // This is not necessary for now, but might be in the future
+ return RAISE_SYNTAX_ERROR("multiple statements found while compiling a single statement");
+ }
+
return res;
}
[View Less]
1
0

April 29, 2020
https://github.com/python/cpython/commit/a4dfe8ede5a37576e17035dccfe109ba77…
commit: a4dfe8ede5a37576e17035dccfe109ba7752237e
branch: master
author: Victor Stinner <vstinner(a)python.org>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T03:32:06+02:00
summary:
bpo-39995: Fix concurrent.futures _ThreadWakeup (GH-19760)
Fix a race condition in concurrent.futures._ThreadWakeup: access to
_ThreadWakeup is now protected with the shutdown lock.
files:
A Misc/NEWS.d/next/…
[View More]Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rst
M Lib/concurrent/futures/process.py
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index 8e9b69a8f08b4..a76e2c9cf231a 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -90,6 +90,7 @@ def _python_exit():
_global_shutdown = True
items = list(_threads_wakeups.items())
for _, thread_wakeup in items:
+ # call not protected by ProcessPoolExecutor._shutdown_lock
thread_wakeup.wakeup()
for t, _ in items:
t.join()
@@ -157,8 +158,10 @@ def __init__(self, work_id, fn, args, kwargs):
class _SafeQueue(Queue):
"""Safe Queue set exception to the future object linked to a job"""
- def __init__(self, max_size=0, *, ctx, pending_work_items, thread_wakeup):
+ def __init__(self, max_size=0, *, ctx, pending_work_items, shutdown_lock,
+ thread_wakeup):
self.pending_work_items = pending_work_items
+ self.shutdown_lock = shutdown_lock
self.thread_wakeup = thread_wakeup
super().__init__(max_size, ctx=ctx)
@@ -167,7 +170,8 @@ def _on_queue_feeder_error(self, e, obj):
tb = traceback.format_exception(type(e), e, e.__traceback__)
e.__cause__ = _RemoteTraceback('\n"""\n{}"""'.format(''.join(tb)))
work_item = self.pending_work_items.pop(obj.work_id, None)
- self.thread_wakeup.wakeup()
+ with self.shutdown_lock:
+ self.thread_wakeup.wakeup()
# work_item can be None if another process terminated. In this
# case, the executor_manager_thread fails all work_items
# with BrokenProcessPool
@@ -268,6 +272,7 @@ def __init__(self, executor):
# A _ThreadWakeup to allow waking up the queue_manager_thread from the
# main Thread and avoid deadlocks caused by permanently locked queues.
self.thread_wakeup = executor._executor_manager_thread_wakeup
+ self.shutdown_lock = executor._shutdown_lock
# A weakref.ref to the ProcessPoolExecutor that owns this thread. Used
# to determine if the ProcessPoolExecutor has been garbage collected
@@ -275,10 +280,13 @@ def __init__(self, executor):
# When the executor gets garbage collected, the weakref callback
# will wake up the queue management thread so that it can terminate
# if there is no pending work item.
- def weakref_cb(_, thread_wakeup=self.thread_wakeup):
+ def weakref_cb(_,
+ thread_wakeup=self.thread_wakeup,
+ shutdown_lock=self.shutdown_lock):
mp.util.debug('Executor collected: triggering callback for'
' QueueManager wakeup')
- thread_wakeup.wakeup()
+ with shutdown_lock:
+ thread_wakeup.wakeup()
self.executor_reference = weakref.ref(executor, weakref_cb)
@@ -363,6 +371,7 @@ def wait_result_broken_or_wakeup(self):
# submitted, from the executor being shutdown/gc-ed, or from the
# shutdown of the python interpreter.
result_reader = self.result_queue._reader
+ assert not self.thread_wakeup._closed
wakeup_reader = self.thread_wakeup._reader
readers = [result_reader, wakeup_reader]
worker_sentinels = [p.sentinel for p in self.processes.values()]
@@ -380,7 +389,9 @@ def wait_result_broken_or_wakeup(self):
elif wakeup_reader in ready:
is_broken = False
- self.thread_wakeup.clear()
+
+ with self.shutdown_lock:
+ self.thread_wakeup.clear()
return result_item, is_broken, cause
@@ -500,7 +511,8 @@ def join_executor_internals(self):
# Release the queue's resources as soon as possible.
self.call_queue.close()
self.call_queue.join_thread()
- self.thread_wakeup.close()
+ with self.shutdown_lock:
+ self.thread_wakeup.close()
# If .join() is not called on the created processes then
# some ctx.Queue methods may deadlock on Mac OS X.
for p in self.processes.values():
@@ -619,6 +631,8 @@ def __init__(self, max_workers=None, mp_context=None,
# _result_queue to send wakeup signals to the executor_manager_thread
# as it could result in a deadlock if a worker process dies with the
# _result_queue write lock still acquired.
+ #
+ # _shutdown_lock must be locked to access _ThreadWakeup.
self._executor_manager_thread_wakeup = _ThreadWakeup()
# Create communication channels for the executor
@@ -629,6 +643,7 @@ def __init__(self, max_workers=None, mp_context=None,
self._call_queue = _SafeQueue(
max_size=queue_size, ctx=self._mp_context,
pending_work_items=self._pending_work_items,
+ shutdown_lock=self._shutdown_lock,
thread_wakeup=self._executor_manager_thread_wakeup)
# Killed worker processes can produce spurious "broken pipe"
# tracebacks in the queue's own worker thread. But we detect killed
@@ -718,12 +733,12 @@ def shutdown(self, wait=True, *, cancel_futures=False):
with self._shutdown_lock:
self._cancel_pending_futures = cancel_futures
self._shutdown_thread = True
+ if self._executor_manager_thread_wakeup is not None:
+ # Wake up queue management thread
+ self._executor_manager_thread_wakeup.wakeup()
- if self._executor_manager_thread:
- # Wake up queue management thread
- self._executor_manager_thread_wakeup.wakeup()
- if wait:
- self._executor_manager_thread.join()
+ if self._executor_manager_thread is not None and wait:
+ self._executor_manager_thread.join()
# To reduce the risk of opening too many files, remove references to
# objects that use file descriptors.
self._executor_manager_thread = None
@@ -732,8 +747,6 @@ def shutdown(self, wait=True, *, cancel_futures=False):
self._result_queue.close()
self._result_queue = None
self._processes = None
-
- if self._executor_manager_thread_wakeup:
- self._executor_manager_thread_wakeup = None
+ self._executor_manager_thread_wakeup = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
diff --git a/Misc/NEWS.d/next/Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rst b/Misc/NEWS.d/next/Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rst
new file mode 100644
index 0000000000000..24aff65736337
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2020-04-28-18-25-27.bpo-39995.WmA3Gk.rst
@@ -0,0 +1,2 @@
+Fix a race condition in concurrent.futures._ThreadWakeup: access to
+_ThreadWakeup is now protected with the shutdown lock.
[View Less]
1
0

April 29, 2020
https://github.com/python/cpython/commit/703647732359200c54f1d2e695cc3a06b9…
commit: 703647732359200c54f1d2e695cc3a06b9a96c9a
branch: master
author: Victor Stinner <vstinner(a)python.org>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T03:28:46+02:00
summary:
bpo-40421: Add PyFrame_GetBack() function (GH-19765)
New PyFrame_GetBack() function: get the frame next outer frame.
Replace frame->f_back with PyFrame_GetBack(frame) in most code but
frameobject.c, ceval.c and …
[View More]genobject.c.
files:
A Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst
M Doc/c-api/reflection.rst
M Doc/whatsnew/3.9.rst
M Include/cpython/frameobject.h
M Modules/_tracemalloc.c
M Objects/frameobject.c
M Python/_warnings.c
M Python/sysmodule.c
M Python/traceback.c
diff --git a/Doc/c-api/reflection.rst b/Doc/c-api/reflection.rst
index 21d9878609127..9207d86012c8b 100644
--- a/Doc/c-api/reflection.rst
+++ b/Doc/c-api/reflection.rst
@@ -31,6 +31,17 @@ Reflection
See also :c:func:`PyThreadState_GetFrame`.
+.. c:function:: int PyFrame_GetBack(PyFrameObject *frame)
+
+ Get the *frame* next outer frame.
+
+ Return a strong reference, or ``NULL`` if *frame* has no outer frame.
+
+ *frame* must not be ``NULL``.
+
+ .. versionadded:: 3.9
+
+
.. c:function:: int PyFrame_GetCode(PyFrameObject *frame)
Get the *frame* code.
diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst
index e26bd473e6189..0edb11419c43c 100644
--- a/Doc/whatsnew/3.9.rst
+++ b/Doc/whatsnew/3.9.rst
@@ -538,6 +538,7 @@ Build and C API Changes
=======================
* New :c:func:`PyFrame_GetCode` function: get a frame code.
+ New :c:func:`PyFrame_GetBack` function: get the frame next outer frame.
(Contributed by Victor Stinner in :issue:`40421`.)
* Add :c:func:`PyFrame_GetLineNumber` to the limited C API.
diff --git a/Include/cpython/frameobject.h b/Include/cpython/frameobject.h
index e32efac594718..36a51baae8784 100644
--- a/Include/cpython/frameobject.h
+++ b/Include/cpython/frameobject.h
@@ -77,6 +77,8 @@ PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *);
PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out);
+PyAPI_FUNC(PyFrameObject *) PyFrame_GetBack(PyFrameObject *frame);
+
#ifdef __cplusplus
}
#endif
diff --git a/Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst b/Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst
new file mode 100644
index 0000000000000..aadfb339b1711
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2020-04-28-19-29-36.bpo-40421.3uIIaB.rst
@@ -0,0 +1 @@
+New :c:func:`PyFrame_GetBack` function: get the frame next outer frame.
diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c
index 6f28f7f5757fa..ea7e0127366ab 100644
--- a/Modules/_tracemalloc.c
+++ b/Modules/_tracemalloc.c
@@ -3,7 +3,7 @@
#include "pycore_pymem.h" // _Py_tracemalloc_config
#include "pycore_traceback.h"
#include "hashtable.h"
-#include "frameobject.h"
+#include "frameobject.h" // PyFrame_GetBack()
#include "clinic/_tracemalloc.c.h"
/*[clinic input]
@@ -434,15 +434,19 @@ traceback_get_frames(traceback_t *traceback)
}
PyFrameObject *pyframe = PyThreadState_GetFrame(tstate);
- Py_XDECREF(pyframe); // use a borrowed reference
- for (; pyframe != NULL; pyframe = pyframe->f_back) {
+ for (; pyframe != NULL;) {
if (traceback->nframe < _Py_tracemalloc_config.max_nframe) {
tracemalloc_get_frame(pyframe, &traceback->frames[traceback->nframe]);
assert(traceback->frames[traceback->nframe].filename != NULL);
traceback->nframe++;
}
- if (traceback->total_nframe < UINT16_MAX)
+ if (traceback->total_nframe < UINT16_MAX) {
traceback->total_nframe++;
+ }
+
+ PyFrameObject *back = PyFrame_GetBack(pyframe);
+ Py_DECREF(pyframe);
+ pyframe = back;
}
}
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
index 6d288b5b059d7..451c895a77c6b 100644
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -1237,3 +1237,13 @@ PyFrame_GetCode(PyFrameObject *frame)
Py_INCREF(code);
return code;
}
+
+
+PyFrameObject*
+PyFrame_GetBack(PyFrameObject *frame)
+{
+ assert(frame != NULL);
+ PyFrameObject *back = frame->f_back;
+ Py_XINCREF(back);
+ return back;
+}
diff --git a/Python/_warnings.c b/Python/_warnings.c
index 7c15ce0ef89c3..4d65bb30c8e5c 100644
--- a/Python/_warnings.c
+++ b/Python/_warnings.c
@@ -3,7 +3,7 @@
#include "pycore_interp.h" // PyInterpreterState.warnings
#include "pycore_pyerrors.h"
#include "pycore_pystate.h" // _PyThreadState_GET()
-#include "frameobject.h"
+#include "frameobject.h" // PyFrame_GetBack()
#include "clinic/_warnings.c.h"
#define MODULE_NAME "_warnings"
@@ -815,7 +815,9 @@ static PyFrameObject *
next_external_frame(PyFrameObject *frame)
{
do {
- frame = frame->f_back;
+ PyFrameObject *back = PyFrame_GetBack(frame);
+ Py_DECREF(frame);
+ frame = back;
} while (frame != NULL && is_internal_frame(frame));
return frame;
@@ -831,12 +833,15 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno,
PyObject *globals;
/* Setup globals, filename and lineno. */
- PyFrameObject *f = _PyThreadState_GET()->frame;
+ PyThreadState *tstate = _PyThreadState_GET();
+ PyFrameObject *f = PyThreadState_GetFrame(tstate);
// Stack level comparisons to Python code is off by one as there is no
// warnings-related stack level to avoid.
if (stack_level <= 0 || is_internal_frame(f)) {
while (--stack_level > 0 && f != NULL) {
- f = f->f_back;
+ PyFrameObject *back = PyFrame_GetBack(f);
+ Py_DECREF(f);
+ f = back;
}
}
else {
@@ -857,6 +862,7 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno,
Py_DECREF(code);
Py_INCREF(*filename);
*lineno = PyFrame_GetLineNumber(f);
+ Py_DECREF(f);
}
*module = NULL;
@@ -868,7 +874,7 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno,
if (*registry == NULL) {
int rc;
- if (PyErr_Occurred()) {
+ if (_PyErr_Occurred(tstate)) {
goto handle_error;
}
*registry = PyDict_New();
@@ -887,7 +893,7 @@ setup_context(Py_ssize_t stack_level, PyObject **filename, int *lineno,
if (*module == Py_None || (*module != NULL && PyUnicode_Check(*module))) {
Py_INCREF(*module);
}
- else if (PyErr_Occurred()) {
+ else if (_PyErr_Occurred(tstate)) {
goto handle_error;
}
else {
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 92ea5e7d637b9..914beb7e127fe 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -16,7 +16,7 @@ Data members:
#include "Python.h"
#include "code.h"
-#include "frameobject.h"
+#include "frameobject.h" // PyFrame_GetBack()
#include "pycore_ceval.h" // _Py_RecursionLimitLowerWaterMark()
#include "pycore_initconfig.h"
#include "pycore_object.h"
@@ -1787,14 +1787,17 @@ sys__getframe_impl(PyObject *module, int depth)
/*[clinic end generated code: output=d438776c04d59804 input=c1be8a6464b11ee5]*/
{
PyThreadState *tstate = _PyThreadState_GET();
- PyFrameObject *f = tstate->frame;
+ PyFrameObject *f = PyThreadState_GetFrame(tstate);
if (_PySys_Audit(tstate, "sys._getframe", "O", f) < 0) {
+ Py_DECREF(f);
return NULL;
}
while (depth > 0 && f != NULL) {
- f = f->f_back;
+ PyFrameObject *back = PyFrame_GetBack(f);
+ Py_DECREF(f);
+ f = back;
--depth;
}
if (f == NULL) {
@@ -1802,7 +1805,6 @@ sys__getframe_impl(PyObject *module, int depth)
"call stack is not deep enough");
return NULL;
}
- Py_INCREF(f);
return (PyObject*)f;
}
diff --git a/Python/traceback.c b/Python/traceback.c
index 438a2c4fce7ca..99b63af11f8be 100644
--- a/Python/traceback.c
+++ b/Python/traceback.c
@@ -4,7 +4,7 @@
#include "Python.h"
#include "code.h"
-#include "frameobject.h"
+#include "frameobject.h" // PyFrame_GetBack()
#include "structmember.h" // PyMemberDef
#include "osdefs.h" // SEP
#ifdef HAVE_FCNTL_H
@@ -798,22 +798,31 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header)
PUTS(fd, "Stack (most recent call first):\n");
}
- frame = tstate->frame;
+ frame = PyThreadState_GetFrame(tstate);
if (frame == NULL) {
PUTS(fd, "<no Python frame>\n");
return;
}
depth = 0;
- while (frame != NULL) {
+ while (1) {
if (MAX_FRAME_DEPTH <= depth) {
+ Py_DECREF(frame);
PUTS(fd, " ...\n");
break;
}
- if (!PyFrame_Check(frame))
+ if (!PyFrame_Check(frame)) {
+ Py_DECREF(frame);
break;
+ }
dump_frame(fd, frame);
- frame = frame->f_back;
+ PyFrameObject *back = PyFrame_GetBack(frame);
+ Py_DECREF(frame);
+
+ if (back == NULL) {
+ break;
+ }
+ frame = back;
depth++;
}
}
[View Less]
1
0

bpo-40275: Move requires_hashdigest() to test.support.hashlib_helper (GH-19716)
by Hai Shi April 29, 2020
by Hai Shi April 29, 2020
April 29, 2020
https://github.com/python/cpython/commit/66abe98a816de84f89e2de4aa78cf09056…
commit: 66abe98a816de84f89e2de4aa78cf09056227c25
branch: master
author: Hai Shi <shihai1992(a)gmail.com>
committer: GitHub <noreply(a)github.com>
date: 2020-04-29T03:11:29+02:00
summary:
bpo-40275: Move requires_hashdigest() to test.support.hashlib_helper (GH-19716)
Add a new test.support.hashlib_helper submodule.
files:
A Lib/test/support/hashlib_helper.py
M Lib/test/support/__init__.py
M Lib/test/…
[View More]test_hashlib.py
M Lib/test/test_hmac.py
M Lib/test/test_imaplib.py
M Lib/test/test_poplib.py
M Lib/test/test_smtplib.py
M Lib/test/test_tarfile.py
M Lib/test/test_urllib2_localnet.py
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index f48decc704cb8..ee5882f237cfc 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -11,7 +11,6 @@
import functools
import gc
import glob
-import hashlib
import importlib
import importlib.util
import locale
@@ -59,11 +58,6 @@
except ImportError:
resource = None
-try:
- import _hashlib
-except ImportError:
- _hashlib = None
-
__all__ = [
# globals
"PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast",
@@ -81,7 +75,7 @@
"create_empty_file", "can_symlink", "fs_is_case_insensitive",
# unittest
"is_resource_enabled", "requires", "requires_freebsd_version",
- "requires_linux_version", "requires_mac_ver", "requires_hashdigest",
+ "requires_linux_version", "requires_mac_ver",
"check_syntax_error", "check_syntax_warning",
"TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset",
"transient_internet", "BasicTestRunner", "run_unittest", "run_doctest",
@@ -685,36 +679,6 @@ def wrapper(*args, **kw):
return decorator
-def requires_hashdigest(digestname, openssl=None, usedforsecurity=True):
- """Decorator raising SkipTest if a hashing algorithm is not available
-
- The hashing algorithm could be missing or blocked by a strict crypto
- policy.
-
- If 'openssl' is True, then the decorator checks that OpenSSL provides
- the algorithm. Otherwise the check falls back to built-in
- implementations. The usedforsecurity flag is passed to the constructor.
-
- ValueError: [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS
- ValueError: unsupported hash type md4
- """
- def decorator(func):
- @functools.wraps(func)
- def wrapper(*args, **kwargs):
- try:
- if openssl and _hashlib is not None:
- _hashlib.new(digestname, usedforsecurity=usedforsecurity)
- else:
- hashlib.new(digestname, usedforsecurity=usedforsecurity)
- except ValueError:
- raise unittest.SkipTest(
- f"hash digest '{digestname}' is not available."
- )
- return func(*args, **kwargs)
- return wrapper
- return decorator
-
-
def system_must_validate_cert(f):
"""Skip the test on TLS certificate validation failures."""
@functools.wraps(f)
diff --git a/Lib/test/support/hashlib_helper.py b/Lib/test/support/hashlib_helper.py
new file mode 100644
index 0000000000000..a28132a565a0b
--- /dev/null
+++ b/Lib/test/support/hashlib_helper.py
@@ -0,0 +1,38 @@
+import functools
+import hashlib
+import unittest
+
+try:
+ import _hashlib
+except ImportError:
+ _hashlib = None
+
+
+def requires_hashdigest(digestname, openssl=None, usedforsecurity=True):
+ """Decorator raising SkipTest if a hashing algorithm is not available
+
+ The hashing algorithm could be missing or blocked by a strict crypto
+ policy.
+
+ If 'openssl' is True, then the decorator checks that OpenSSL provides
+ the algorithm. Otherwise the check falls back to built-in
+ implementations. The usedforsecurity flag is passed to the constructor.
+
+ ValueError: [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS
+ ValueError: unsupported hash type md4
+ """
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ if openssl and _hashlib is not None:
+ _hashlib.new(digestname, usedforsecurity=usedforsecurity)
+ else:
+ hashlib.new(digestname, usedforsecurity=usedforsecurity)
+ except ValueError:
+ raise unittest.SkipTest(
+ f"hash digest '{digestname}' is not available."
+ )
+ return func(*args, **kwargs)
+ return wrapper
+ return decorator
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 0e30b2fb11f29..33b687e0b4086 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -19,7 +19,6 @@
import warnings
from test import support
from test.support import _4G, bigmemtest, import_fresh_module
-from test.support import requires_hashdigest
from http.client import HTTPException
# Were we compiled --with-pydebug or with #define Py_DEBUG?
diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py
index 23c108f6e3c27..08086f0e78c83 100644
--- a/Lib/test/test_hmac.py
+++ b/Lib/test/test_hmac.py
@@ -6,7 +6,7 @@
import unittest.mock
import warnings
-from test.support import requires_hashdigest
+from test.support import hashlib_helper
def ignore_warning(func):
@@ -21,7 +21,7 @@ def wrapper(*args, **kwargs):
class TestVectorsTestCase(unittest.TestCase):
- @requires_hashdigest('md5', openssl=True)
+ @hashlib_helper.requires_hashdigest('md5', openssl=True)
def test_md5_vectors(self):
# Test the HMAC module against test vectors from the RFC.
@@ -79,7 +79,7 @@ def md5test(key, data, digest):
b"and Larger Than One Block-Size Data"),
"6f630fad67cda0ee1fb1f562db3aa53e")
- @requires_hashdigest('sha1', openssl=True)
+ @hashlib_helper.requires_hashdigest('sha1', openssl=True)
def test_sha_vectors(self):
def shatest(key, data, digest):
h = hmac.HMAC(key, data, digestmod=hashlib.sha1)
@@ -272,23 +272,23 @@ def hmactest(key, data, hexdigests):
'134676fb6de0446065c97440fa8c6a58',
})
- @requires_hashdigest('sha224', openssl=True)
+ @hashlib_helper.requires_hashdigest('sha224', openssl=True)
def test_sha224_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha224, 'sha224', 28, 64)
- @requires_hashdigest('sha256', openssl=True)
+ @hashlib_helper.requires_hashdigest('sha256', openssl=True)
def test_sha256_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha256, 'sha256', 32, 64)
- @requires_hashdigest('sha384', openssl=True)
+ @hashlib_helper.requires_hashdigest('sha384', openssl=True)
def test_sha384_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha384, 'sha384', 48, 128)
- @requires_hashdigest('sha512', openssl=True)
+ @hashlib_helper.requires_hashdigest('sha512', openssl=True)
def test_sha512_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha512, 'sha512', 64, 128)
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_legacy_block_size_warnings(self):
class MockCrazyHash(object):
"""Ain't no block_size attribute here."""
@@ -329,7 +329,7 @@ class ConstructorTestCase(unittest.TestCase):
"6c845b47f52b3b47f6590c502db7825aad757bf4fadc8fa972f7cd2e76a5bdeb"
)
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_normal(self):
# Standard constructor call.
try:
@@ -337,21 +337,21 @@ def test_normal(self):
except Exception:
self.fail("Standard constructor call raised exception.")
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_with_str_key(self):
# Pass a key of type str, which is an error, because it expects a key
# of type bytes
with self.assertRaises(TypeError):
h = hmac.HMAC("key", digestmod='sha256')
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_dot_new_with_str_key(self):
# Pass a key of type str, which is an error, because it expects a key
# of type bytes
with self.assertRaises(TypeError):
h = hmac.new("key", digestmod='sha256')
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_withtext(self):
# Constructor call with text.
try:
@@ -360,7 +360,7 @@ def test_withtext(self):
self.fail("Constructor call with text argument raised exception.")
self.assertEqual(h.hexdigest(), self.expected)
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_with_bytearray(self):
try:
h = hmac.HMAC(bytearray(b"key"), bytearray(b"hash this!"),
@@ -369,7 +369,7 @@ def test_with_bytearray(self):
self.fail("Constructor call with bytearray arguments raised exception.")
self.assertEqual(h.hexdigest(), self.expected)
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_with_memoryview_msg(self):
try:
h = hmac.HMAC(b"key", memoryview(b"hash this!"), digestmod="sha256")
@@ -377,7 +377,7 @@ def test_with_memoryview_msg(self):
self.fail("Constructor call with memoryview msg raised exception.")
self.assertEqual(h.hexdigest(), self.expected)
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_withmodule(self):
# Constructor call with text and digest module.
try:
@@ -388,7 +388,7 @@ def test_withmodule(self):
class SanityTestCase(unittest.TestCase):
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_exercise_all_methods(self):
# Exercising all methods once.
# This must not raise any exceptions
@@ -404,7 +404,7 @@ def test_exercise_all_methods(self):
class CopyTestCase(unittest.TestCase):
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_attributes(self):
# Testing if attributes are of same type.
h1 = hmac.HMAC(b"key", digestmod="sha256")
@@ -416,7 +416,7 @@ def test_attributes(self):
self.assertEqual(type(h1.outer), type(h2.outer),
"Types of outer don't match.")
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_realcopy(self):
# Testing if the copy method created a real copy.
h1 = hmac.HMAC(b"key", digestmod="sha256")
@@ -428,7 +428,7 @@ def test_realcopy(self):
self.assertTrue(id(h1.outer) != id(h2.outer),
"No real copy of the attribute 'outer'.")
- @requires_hashdigest('sha256')
+ @hashlib_helper.requires_hashdigest('sha256')
def test_equality(self):
# Testing if the copy has the same digests.
h1 = hmac.HMAC(b"key", digestmod="sha256")
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py
index 69ee63b18c373..ce601565cf1a6 100644
--- a/Lib/test/test_imaplib.py
+++ b/Lib/test/test_imaplib.py
@@ -11,8 +11,8 @@
import socket
from test.support import (reap_threads, verbose, transient_internet,
- run_with_tz, run_with_locale, cpython_only,
- requires_hashdigest)
+ run_with_tz, run_with_locale, cpython_only)
+from test.support import hashlib_helper
import unittest
from unittest import mock
from datetime import datetime, timezone, timedelta
@@ -385,7 +385,7 @@ def cmd_AUTHENTICATE(self, tag, args):
self.assertEqual(code, 'OK')
self.assertEqual(server.response, b'ZmFrZQ==\r\n') # b64 encoded 'fake'
- @requires_hashdigest('md5')
+ @hashlib_helper.requires_hashdigest('md5')
def test_login_cram_md5_bytes(self):
class AuthHandler(SimpleIMAPHandler):
capabilities = 'LOGINDISABLED AUTH=CRAM-MD5'
@@ -403,7 +403,7 @@ def cmd_AUTHENTICATE(self, tag, args):
ret, _ = client.login_cram_md5("tim", b"tanstaaftanstaaf")
self.assertEqual(ret, "OK")
- @requires_hashdigest('md5')
+ @hashlib_helper.requires_hashdigest('md5')
def test_login_cram_md5_plain_text(self):
class AuthHandler(SimpleIMAPHandler):
capabilities = 'LOGINDISABLED AUTH=CRAM-MD5'
@@ -849,7 +849,7 @@ def cmd_AUTHENTICATE(self, tag, args):
b'ZmFrZQ==\r\n') # b64 encoded 'fake'
@reap_threads
- @requires_hashdigest('md5')
+ @hashlib_helper.requires_hashdigest('md5')
def test_login_cram_md5(self):
class AuthHandler(SimpleIMAPHandler):
diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py
index d4877b1fbbc6b..b670afcf4e62e 100644
--- a/Lib/test/test_poplib.py
+++ b/Lib/test/test_poplib.py
@@ -13,6 +13,7 @@
from unittest import TestCase, skipUnless
from test import support as test_support
+from test.support import hashlib_helper
from test.support import socket_helper
HOST = socket_helper.HOST
@@ -311,11 +312,11 @@ def test_noop(self):
def test_rpop(self):
self.assertOK(self.client.rpop('foo'))
- @test_support.requires_hashdigest('md5')
+ @hashlib_helper.requires_hashdigest('md5')
def test_apop_normal(self):
self.assertOK(self.client.apop('foo', 'dummypassword'))
- @test_support.requires_hashdigest('md5')
+ @hashlib_helper.requires_hashdigest('md5')
def test_apop_REDOS(self):
# Replace welcome with very long evil welcome.
# NB The upper bound on welcome length is currently 2048.
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py
index d1ffb368a4f6f..c1bd2e291255b 100644
--- a/Lib/test/test_smtplib.py
+++ b/Lib/test/test_smtplib.py
@@ -20,9 +20,9 @@
import unittest
from test import support, mock_socket
+from test.support import hashlib_helper
from test.support import socket_helper
from test.support import threading_setup, threading_cleanup, join_thread
-from test.support import requires_hashdigest
from unittest.mock import Mock
HOST = socket_helper.HOST
@@ -1058,7 +1058,7 @@ def testAUTH_LOGIN(self):
self.assertEqual(resp, (235, b'Authentication Succeeded'))
smtp.close()
- @requires_hashdigest('md5')
+ @hashlib_helper.requires_hashdigest('md5')
def testAUTH_CRAM_MD5(self):
self.serv.add_feature("AUTH CRAM-MD5")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost',
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 99196f6043191..25e9e93604476 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -11,7 +11,7 @@
import tarfile
from test import support
-from test.support import script_helper, requires_hashdigest
+from test.support import script_helper
# Check for our compression modules.
try:
diff --git a/Lib/test/test_urllib2_localnet.py b/Lib/test/test_urllib2_localnet.py
index 8cfb214c9af9a..421b9f7de2e21 100644
--- a/Lib/test/test_urllib2_localnet.py
+++ b/Lib/test/test_urllib2_localnet.py
@@ -9,6 +9,7 @@
import hashlib
from test import support
+from test.support import hashlib_helper
try:
import ssl
@@ -322,7 +323,7 @@ class ProxyAuthTests(unittest.TestCase):
PASSWD = "test123"
REALM = "TestRealm"
- @support.requires_hashdigest("md5")
+ @hashlib_helper.requires_hashdigest("md5")
def setUp(self):
super(ProxyAuthTests, self).setUp()
# Ignore proxy bypass settings in the environment.
[View Less]
1
0