Change venv

This commit is contained in:
Ambulance Clerc
2023-05-31 08:31:22 +02:00
parent fb6f579089
commit fdbb52c96f
466 changed files with 25899 additions and 64721 deletions

View File

@@ -19,6 +19,23 @@ from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host
# === NOTE TO REPACKAGERS AND VENDORS ===
# Please delete this block, this logic is only
# for urllib3 being distributed via PyPI.
# See: https://github.com/urllib3/urllib3/issues/2680
try:
import urllib3_secure_extra # type: ignore # noqa: F401
except ImportError:
pass
else:
warnings.warn(
"'urllib3[secure]' extra is deprecated and will be removed "
"in a future release of urllib3 2.x. Read more in this issue: "
"https://github.com/urllib3/urllib3/issues/2680",
category=DeprecationWarning,
stacklevel=2,
)
__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
__license__ = "MIT"
__version__ = __version__

View File

@@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS
__version__ = "1.26.6"
__version__ = "1.26.15"

View File

@@ -51,15 +51,16 @@ from .exceptions import (
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import CertificateError, match_hostname
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import (
assert_fingerprint,
create_urllib3_context,
is_ipaddress,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
from .util.ssl_match_hostname import CertificateError, match_hostname
log = logging.getLogger(__name__)
@@ -67,7 +68,7 @@ port_by_scheme = {"http": 80, "https": 443}
# When it comes time to update this value as a part of regular maintenance
# (ie test_recent_date is failing) update it to ~6 months before the current date.
RECENT_DATE = datetime.date(2020, 7, 1)
RECENT_DATE = datetime.date(2022, 1, 1)
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
@@ -107,6 +108,10 @@ class HTTPConnection(_HTTPConnection, object):
#: Whether this connection verifies the host's certificate.
is_verified = False
#: Whether this proxy connection (if used) verifies the proxy host's
#: certificate.
proxy_is_verified = None
def __init__(self, *args, **kw):
if not six.PY2:
kw.pop("strict", None)
@@ -224,6 +229,11 @@ class HTTPConnection(_HTTPConnection, object):
)
def request(self, method, url, body=None, headers=None):
# Update the inner socket's timeout value to send the request.
# This only triggers if the connection is re-used.
if getattr(self, "sock", None) is not None:
self.sock.settimeout(self.timeout)
if headers is None:
headers = {}
else:
@@ -350,17 +360,15 @@ class HTTPSConnection(HTTPConnection):
def connect(self):
# Add certificate verification
conn = self._new_conn()
self.sock = conn = self._new_conn()
hostname = self.host
tls_in_tls = False
if self._is_using_tunnel():
if self.tls_in_tls_required:
conn = self._connect_tls_proxy(hostname, conn)
self.sock = conn = self._connect_tls_proxy(hostname, conn)
tls_in_tls = True
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
@@ -490,14 +498,10 @@ class HTTPSConnection(HTTPConnection):
self.ca_cert_dir,
self.ca_cert_data,
)
# By default urllib3's SSLContext disables `check_hostname` and uses
# a custom check. For proxies we're good with relying on the default
# verification.
ssl_context.check_hostname = True
# If no cert was provided, use only the default options for server
# certificate validation
return ssl_wrap_socket(
socket = ssl_wrap_socket(
sock=conn,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
@@ -506,8 +510,37 @@ class HTTPSConnection(HTTPConnection):
ssl_context=ssl_context,
)
if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
ssl_context, "check_hostname", False
):
# While urllib3 attempts to always turn off hostname matching from
# the TLS library, this cannot always be done. So we check whether
# the TLS Library still thinks it's matching hostnames.
cert = socket.getpeercert()
if not cert.get("subjectAltName", ()):
warnings.warn(
(
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
"`commonName` for now. This feature is being removed by major browsers and "
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
"for details.)".format(hostname)
),
SubjectAltNameWarning,
)
_match_hostname(cert, hostname)
self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
return socket
def _match_hostname(cert, asserted_hostname):
# Our upstream implementation of ssl.match_hostname()
# only applies this normalization to IP addresses so it doesn't
# match DNS SANs so we do the same thing!
stripped_hostname = asserted_hostname.strip("u[]")
if is_ipaddress(stripped_hostname):
asserted_hostname = stripped_hostname
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:

View File

@@ -2,6 +2,7 @@ from __future__ import absolute_import
import errno
import logging
import re
import socket
import sys
import warnings
@@ -35,7 +36,6 @@ from .exceptions import (
)
from .packages import six
from .packages.six.moves import queue
from .packages.ssl_match_hostname import CertificateError
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
@@ -44,6 +44,7 @@ from .util.queue import LifoQueue
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.ssl_match_hostname import CertificateError
from .util.timeout import Timeout
from .util.url import Url, _encode_target
from .util.url import _normalize_host as normalize_host
@@ -301,8 +302,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
pass
except queue.Full:
# This should never happen if self.block == True
log.warning("Connection pool is full, discarding connection: %s", self.host)
log.warning(
"Connection pool is full, discarding connection: %s. Connection pool size: %s",
self.host,
self.pool.qsize(),
)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
@@ -375,7 +379,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
# Trigger any extra validation we need to do.
try:
@@ -745,7 +749,35 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
if isinstance(e, (BaseSSLError, CertificateError)):
def _is_ssl_error_message_from_http_proxy(ssl_error):
# We're trying to detect the message 'WRONG_VERSION_NUMBER' but
# SSLErrors are kinda all over the place when it comes to the message,
# so we try to cover our bases here!
message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
return (
"wrong version number" in message or "unknown protocol" in message
)
# Try to detect a common user error with proxies which is to
# set an HTTP proxy to be HTTPS when it should be 'http://'
# (ie {'http': 'http://proxy', 'https': 'https://proxy'})
# Instead we add a nice error message and point to a URL.
if (
isinstance(e, BaseSSLError)
and self.proxy
and _is_ssl_error_message_from_http_proxy(e)
and conn.proxy
and conn.proxy.scheme == "https"
):
e = ProxyError(
"Your proxy appears to only use HTTP and not HTTPS, "
"try changing your proxy URL to be HTTP. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#https-proxy-error-http-proxy",
SSLError(e),
)
elif isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError("Cannot connect to proxy.", e)
@@ -830,7 +862,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
)
# Check if we should retry the HTTP response.
has_retry_after = bool(response.getheader("Retry-After"))
has_retry_after = bool(response.headers.get("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
@@ -1020,6 +1052,17 @@ class HTTPSConnectionPool(HTTPConnectionPool):
InsecureRequestWarning,
)
if getattr(conn, "proxy_is_verified", None) is False:
warnings.warn(
(
"Unverified HTTPS connection done to an HTTPS proxy. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings"
),
InsecureRequestWarning,
)
def connection_from_url(url, **kw):
"""

View File

@@ -48,7 +48,7 @@ from ctypes import (
)
from ctypes.util import find_library
from pip._vendor.urllib3.packages.six import raise_from
from ...packages.six import raise_from
if platform.system() != "Darwin":
raise ImportError("Only macOS is supported")

View File

@@ -188,6 +188,7 @@ def _cert_array_from_pem(pem_bundle):
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
raise
return cert_array

View File

@@ -224,7 +224,7 @@ class AppEngineManager(RequestMethods):
)
# Check if we should retry the HTTP response.
has_retry_after = bool(http_response.getheader("Retry-After"))
has_retry_after = bool(http_response.headers.get("Retry-After"))
if retries.is_retry(method, http_response.status, has_retry_after):
retries = retries.increment(method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)

View File

@@ -69,7 +69,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
log.debug("Request headers: %s", headers)
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
reshdr = dict(res.headers)
log.debug("Response status: %s %s", res.status, res.reason)
log.debug("Response headers: %s", reshdr)
log.debug("Response data: %s [...]", res.read(100))
@@ -101,7 +101,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
log.debug("Response status: %s %s", res.status, res.reason)
log.debug("Response headers: %s", dict(res.getheaders()))
log.debug("Response headers: %s", dict(res.headers))
log.debug("Response data: %s [...]", res.read()[:100])
if res.status != 200:
if res.status == 401:

View File

@@ -28,8 +28,8 @@ like this:
.. code-block:: python
try:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
import pip._vendor.urllib3.contrib.pyopenssl as pyopenssl
pyopenssl.inject_into_urllib3()
except ImportError:
pass
@@ -47,10 +47,10 @@ compression in Python 2 (see `CRIME attack`_).
"""
from __future__ import absolute_import
import OpenSSL.crypto
import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
try:
from cryptography.x509 import UnsupportedExtension
@@ -73,11 +73,20 @@ except ImportError: # Platform-specific: Python 3
import logging
import ssl
import sys
import warnings
from .. import util
from ..packages import six
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
warnings.warn(
"'urllib3.contrib.pyopenssl' module is deprecated and will be removed "
"in a future release of urllib3 2.x. Read more in this issue: "
"https://github.com/urllib3/urllib3/issues/2680",
category=DeprecationWarning,
stacklevel=2,
)
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works.
@@ -219,9 +228,8 @@ def get_subj_alt_name(peer_cert):
if hasattr(peer_cert, "to_cryptography"):
cert = peer_cert.to_cryptography()
else:
# This is technically using private APIs, but should work across all
# relevant versions before PyOpenSSL got a proper API for this.
cert = _Certificate(openssl_backend, peer_cert._x509)
der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
cert = x509.load_der_x509_certificate(der, openssl_backend)
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)
@@ -406,7 +414,6 @@ if _fileobject: # Platform-specific: Python 2
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
makefile = backport_makefile

View File

@@ -19,8 +19,8 @@ contrib module. So...here we are.
To use this module, simply import and inject it::
import urllib3.contrib.securetransport
urllib3.contrib.securetransport.inject_into_urllib3()
import pip._vendor.urllib3.contrib.securetransport as securetransport
securetransport.inject_into_urllib3()
Happy TLSing!
@@ -770,7 +770,6 @@ if _fileobject: # Platform-specific: Python 2
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
else: # Platform-specific: Python 3
def makefile(self, mode="r", buffering=None, *args, **kwargs):

View File

@@ -34,6 +34,7 @@ SSL_KEYWORDS = (
"ca_cert_dir",
"ssl_context",
"key_password",
"server_hostname",
)
# All known keyword arguments that could be provided to the pool manager, its

View File

@@ -2,16 +2,16 @@ from __future__ import absolute_import
import io
import logging
import sys
import warnings
import zlib
from contextlib import contextmanager
from socket import error as SocketError
from socket import timeout as SocketTimeout
try:
import brotli
except ImportError:
brotli = None
brotli = None
from . import util
from ._collections import HTTPHeaderDict
from .connection import BaseSSLError, HTTPException
from .exceptions import (
@@ -478,6 +478,54 @@ class HTTPResponse(io.IOBase):
if self._original_response and self._original_response.isclosed():
self.release_conn()
def _fp_read(self, amt):
"""
Read a response with the thought that reading the number of bytes
larger than can fit in a 32-bit int at a time via SSL in some
known cases leads to an overflow error that has to be prevented
if `amt` or `self.length_remaining` indicate that a problem may
happen.
The known cases:
* 3.8 <= CPython < 3.9.7 because of a bug
https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
* urllib3 injected with pyOpenSSL-backed SSL-support.
* CPython < 3.10 only when `amt` does not fit 32-bit int.
"""
assert self._fp
c_int_max = 2 ** 31 - 1
if (
(
(amt and amt > c_int_max)
or (self.length_remaining and self.length_remaining > c_int_max)
)
and not util.IS_SECURETRANSPORT
and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
):
buffer = io.BytesIO()
# Besides `max_chunk_amt` being a maximum chunk size, it
# affects memory overhead of reading a response by this
# method in CPython.
# `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
# chunk size that does not lead to an overflow error, but
# 256 MiB is a compromise.
max_chunk_amt = 2 ** 28
while amt is None or amt != 0:
if amt is not None:
chunk_amt = min(amt, max_chunk_amt)
amt -= chunk_amt
else:
chunk_amt = max_chunk_amt
data = self._fp.read(chunk_amt)
if not data:
break
buffer.write(data)
del data # to reduce peak memory usage by `max_chunk_amt`.
return buffer.getvalue()
else:
# StringIO doesn't like amt=None
return self._fp.read(amt) if amt is not None else self._fp.read()
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
@@ -510,13 +558,11 @@ class HTTPResponse(io.IOBase):
fp_closed = getattr(self._fp, "closed", False)
with self._error_catcher():
data = self._fp_read(amt) if not fp_closed else b""
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read() if not fp_closed else b""
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt) if not fp_closed else b""
if (
amt != 0 and not data
): # Platform-specific: Buggy versions of Python.
@@ -612,9 +658,21 @@ class HTTPResponse(io.IOBase):
# Backwards-compatibility methods for http.client.HTTPResponse
def getheaders(self):
warnings.warn(
"HTTPResponse.getheaders() is deprecated and will be removed "
"in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
category=DeprecationWarning,
stacklevel=2,
)
return self.headers
def getheader(self, name, default=None):
warnings.warn(
"HTTPResponse.getheader() is deprecated and will be removed "
"in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
category=DeprecationWarning,
stacklevel=2,
)
return self.headers.get(name, default)
# Backwards compatibility for http.cookiejar

View File

@@ -2,9 +2,8 @@ from __future__ import absolute_import
import socket
from pip._vendor.urllib3.exceptions import LocationParseError
from ..contrib import _appengine_environ
from ..exceptions import LocationParseError
from ..packages import six
from .wait import NoWayToWaitForSocketError, wait_for_read

View File

@@ -45,6 +45,7 @@ def create_proxy_ssl_context(
ssl_version=resolve_ssl_version(ssl_version),
cert_reqs=resolve_cert_reqs(cert_reqs),
)
if (
not ca_certs
and not ca_cert_dir

View File

@@ -13,12 +13,6 @@ SKIP_HEADER = "@@@SKIP_HEADER@@@"
SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
ACCEPT_ENCODING = "gzip,deflate"
try:
import brotli as _unused_module_brotli # noqa: F401
except ImportError:
pass
else:
ACCEPT_ENCODING += ",br"
_FAILEDTELL = object()

View File

@@ -69,6 +69,24 @@ class _RetryMeta(type):
)
cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
@property
def BACKOFF_MAX(cls):
warnings.warn(
"Using 'Retry.BACKOFF_MAX' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
DeprecationWarning,
)
return cls.DEFAULT_BACKOFF_MAX
@BACKOFF_MAX.setter
def BACKOFF_MAX(cls, value):
warnings.warn(
"Using 'Retry.BACKOFF_MAX' is deprecated and "
"will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
DeprecationWarning,
)
cls.DEFAULT_BACKOFF_MAX = value
@six.add_metaclass(_RetryMeta)
class Retry(object):
@@ -181,7 +199,7 @@ class Retry(object):
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
than :attr:`Retry.BACKOFF_MAX`.
than :attr:`Retry.DEFAULT_BACKOFF_MAX`.
By default, backoff is disabled (set to 0).
@@ -220,7 +238,7 @@ class Retry(object):
DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
#: Maximum backoff time.
BACKOFF_MAX = 120
DEFAULT_BACKOFF_MAX = 120
def __init__(
self,
@@ -348,7 +366,7 @@ class Retry(object):
return 0
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
return min(self.BACKOFF_MAX, backoff_value)
return min(self.DEFAULT_BACKOFF_MAX, backoff_value)
def parse_retry_after(self, retry_after):
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
@@ -376,7 +394,7 @@ class Retry(object):
def get_retry_after(self, response):
"""Get the value of Retry-After in seconds."""
retry_after = response.getheader("Retry-After")
retry_after = response.headers.get("Retry-After")
if retry_after is None:
return None

View File

@@ -2,8 +2,8 @@ import io
import socket
import ssl
from pip._vendor.urllib3.exceptions import ProxySchemeUnsupported
from pip._vendor.urllib3.packages import six
from ..exceptions import ProxySchemeUnsupported
from ..packages import six
SSL_BLOCKSIZE = 16384

View File

@@ -2,9 +2,8 @@ from __future__ import absolute_import
import time
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout
from ..exceptions import TimeoutStateError
@@ -116,6 +115,10 @@ class Timeout(object):
# __str__ provided for backwards compatibility
__str__ = __repr__
@classmethod
def resolve_default_timeout(cls, timeout):
return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout
@classmethod
def _validate_timeout(cls, value, name):
"""Check that a timeout attribute is valid.

View File

@@ -50,7 +50,7 @@ _variations = [
"(?:(?:%(hex)s:){0,6}%(hex)s)?::",
]
UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~"
IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
@@ -63,7 +63,7 @@ IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % (
REG_NAME_PAT,
IPV4_PAT,
IPV6_ADDRZ_PAT,
@@ -279,6 +279,9 @@ def _normalize_host(host, scheme):
if scheme in NORMALIZABLE_SCHEMES:
is_ipv6 = IPV6_ADDRZ_RE.match(host)
if is_ipv6:
# IPv6 hosts of the form 'a::b%zone' are encoded in a URL as
# such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID
# separator as necessary to return a valid RFC 4007 scoped IP.
match = ZONE_ID_RE.search(host)
if match:
start, end = match.span(1)
@@ -300,7 +303,7 @@ def _normalize_host(host, scheme):
def _idna_encode(name):
if name and any([ord(x) > 128 for x in name]):
if name and any(ord(x) >= 128 for x in name):
try:
from pip._vendor import idna
except ImportError:
@@ -331,7 +334,7 @@ def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
This parser is RFC 3986 compliant.
This parser is RFC 3986 and RFC 6874 compliant.
The parser logic and helper functions are based heavily on
work done in the ``rfc3986`` module.

View File

@@ -42,7 +42,6 @@ if sys.version_info >= (3, 5):
def _retry_on_intr(fn, timeout):
return fn(timeout)
else:
# Old and broken Pythons.
def _retry_on_intr(fn, timeout):