Change venv

This commit is contained in:
Ambulance Clerc
2023-05-31 08:31:22 +02:00
parent fb6f579089
commit fdbb52c96f
466 changed files with 25899 additions and 64721 deletions

View File

@@ -7,29 +7,46 @@ and eventually drop this after all usages are changed.
"""
import os
import sys
from typing import List
from pip._vendor import appdirs as _appdirs
from pip._vendor import platformdirs as _appdirs
def user_cache_dir(appname: str) -> str:
return _appdirs.user_cache_dir(appname, appauthor=False)
def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
# Use ~/Application Support/pip, if the directory exists.
path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
if os.path.isdir(path):
return path
# Use a Linux-like ~/.config/pip, by default.
linux_like_path = "~/.config/"
if appname:
linux_like_path = os.path.join(linux_like_path, appname)
return os.path.expanduser(linux_like_path)
def user_config_dir(appname: str, roaming: bool = True) -> str:
path = _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
if _appdirs.system == "darwin" and not os.path.isdir(path):
path = os.path.expanduser("~/.config/")
if appname:
path = os.path.join(path, appname)
return path
if sys.platform == "darwin":
return _macos_user_config_dir(appname, roaming)
return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
# for the discussion regarding site_config_dir locations
# see <https://github.com/pypa/pip/issues/1733>
def site_config_dirs(appname: str) -> List[str]:
if sys.platform == "darwin":
return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
if _appdirs.system not in ["win32", "darwin"]:
# always look in /etc directly as well
return dirval.split(os.pathsep) + ["/etc"]
return [dirval]
if sys.platform == "win32":
return [dirval]
# Unix-y system. Look in /etc as well.
return dirval.split(os.pathsep) + ["/etc"]

View File

@@ -2,9 +2,10 @@
"""
import re
from typing import TYPE_CHECKING, List, Optional, Tuple
from typing import List, Optional, Tuple
from pip._vendor.packaging.tags import (
PythonVersion,
Tag,
compatible_tags,
cpython_tags,
@@ -14,10 +15,6 @@ from pip._vendor.packaging.tags import (
mac_platforms,
)
if TYPE_CHECKING:
from pip._vendor.packaging.tags import PythonVersion
_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
@@ -95,7 +92,7 @@ def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[s
return result
def _get_python_version(version: str) -> "PythonVersion":
def _get_python_version(version: str) -> PythonVersion:
if len(version) > 1:
return int(version[0]), int(version[1:])
else:
@@ -132,7 +129,7 @@ def get_supported(
"""
supported: List[Tag] = []
python_version: Optional["PythonVersion"] = None
python_version: Optional[PythonVersion] = None
if version is not None:
python_version = _get_python_version(version)

View File

@@ -8,7 +8,7 @@ from typing import Any, Optional, TextIO, Type, Union
from pip._vendor.packaging.version import parse
from pip import __version__ as current_version
from pip import __version__ as current_version # NOTE: tests patch this name.
DEPRECATION_MSG_PREFIX = "DEPRECATION: "
@@ -53,52 +53,68 @@ def install_warning_logger() -> None:
def deprecated(
*,
reason: str,
replacement: Optional[str],
gone_in: Optional[str],
feature_flag: Optional[str] = None,
issue: Optional[int] = None,
) -> None:
"""Helper to deprecate existing functionality.
reason:
Textual reason shown to the user about why this functionality has
been deprecated.
been deprecated. Should be a complete sentence.
replacement:
Textual suggestion shown to the user about what alternative
functionality they can use.
gone_in:
The version of pip does this functionality should get removed in.
Raises errors if pip's current version is greater than or equal to
Raises an error if pip's current version is greater than or equal to
this.
feature_flag:
Command-line flag of the form --use-feature={feature_flag} for testing
upcoming functionality.
issue:
Issue number on the tracker that would serve as a useful place for
users to find related discussion and provide feedback.
Always pass replacement, gone_in and issue as keyword arguments for clarity
at the call site.
"""
# Construct a nice message.
# This is eagerly formatted as we want it to get logged as if someone
# typed this entire message out.
sentences = [
(reason, DEPRECATION_MSG_PREFIX + "{}"),
(gone_in, "pip {} will remove support for this functionality."),
(replacement, "A possible replacement is {}."),
# Determine whether or not the feature is already gone in this version.
is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
message_parts = [
(reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
(
gone_in,
"pip {} will enforce this behaviour change."
if not is_gone
else "Since pip {}, this is no longer supported.",
),
(
replacement,
"A possible replacement is {}.",
),
(
feature_flag,
"You can use the flag --use-feature={} to test the upcoming behaviour."
if not is_gone
else None,
),
(
issue,
(
"You can find discussion regarding this at "
"https://github.com/pypa/pip/issues/{}."
),
"Discussion can be found at https://github.com/pypa/pip/issues/{}",
),
]
message = " ".join(
template.format(val) for val, template in sentences if val is not None
format_str.format(value)
for value, format_str in message_parts
if format_str is not None and value is not None
)
# Raise as an error if it has to be removed.
if gone_in is not None and parse(current_version) >= parse(gone_in):
# Raise as an error if this behaviour is deprecated.
if is_gone:
raise PipDeprecationWarning(message)
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)

View File

@@ -2,6 +2,7 @@ from typing import Optional
from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
from pip._internal.models.link import Link
from pip._internal.utils.urls import path_to_url
from pip._internal.vcs import vcs
@@ -28,6 +29,13 @@ def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> s
return requirement
def direct_url_for_editable(source_dir: str) -> DirectUrl:
return DirectUrl(
url=path_to_url(source_dir),
info=DirInfo(editable=True),
)
def direct_url_from_link(
link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
) -> DirectUrl:

View File

@@ -1,42 +0,0 @@
from distutils.errors import DistutilsArgError
from distutils.fancy_getopt import FancyGetopt
from typing import Dict, List
_options = [
("exec-prefix=", None, ""),
("home=", None, ""),
("install-base=", None, ""),
("install-data=", None, ""),
("install-headers=", None, ""),
("install-lib=", None, ""),
("install-platlib=", None, ""),
("install-purelib=", None, ""),
("install-scripts=", None, ""),
("prefix=", None, ""),
("root=", None, ""),
("user", None, ""),
]
# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469.
_distutils_getopt = FancyGetopt(_options) # type: ignore
def parse_distutils_args(args: List[str]) -> Dict[str, str]:
"""Parse provided arguments, returning an object that has the
matched arguments.
Any unknown arguments are ignored.
"""
result = {}
for arg in args:
try:
_, match = _distutils_getopt.getopt(args=[arg])
except DistutilsArgError:
# We don't care about any other options, which here may be
# considered unrecognized since our option list is not
# exhaustive.
pass
else:
result.update(match.__dict__)
return result

View File

@@ -14,7 +14,7 @@ BOMS: List[Tuple[bytes, str]] = [
(codecs.BOM_UTF32_LE, "utf-32-le"),
]
ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)")
ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
def auto_decode(data: bytes) -> str:

View File

@@ -1,7 +1,23 @@
import itertools
import os
import shutil
import sys
from typing import List, Optional
from pip._internal.cli.main import main
from pip._internal.utils.compat import WINDOWS
_EXECUTABLE_NAMES = [
"pip",
f"pip{sys.version_info.major}",
f"pip{sys.version_info.major}.{sys.version_info.minor}",
]
if WINDOWS:
_allowed_extensions = {"", ".exe"}
_EXECUTABLE_NAMES = [
"".join(parts)
for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
]
def _wrapper(args: Optional[List[str]] = None) -> int:
@@ -25,3 +41,44 @@ def _wrapper(args: Optional[List[str]] = None) -> int:
"running pip directly.\n"
)
return main(args)
def get_best_invocation_for_this_pip() -> str:
"""Try to figure out the best way to invoke pip in the current environment."""
binary_directory = "Scripts" if WINDOWS else "bin"
binary_prefix = os.path.join(sys.prefix, binary_directory)
# Try to use pip[X[.Y]] names, if those executables for this environment are
# the first on PATH with that name.
path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
if exe_are_in_PATH:
for exe_name in _EXECUTABLE_NAMES:
found_executable = shutil.which(exe_name)
binary_executable = os.path.join(binary_prefix, exe_name)
if (
found_executable
and os.path.exists(binary_executable)
and os.path.samefile(
found_executable,
binary_executable,
)
):
return exe_name
# Use the `-m` invocation, if there's no "nice" invocation.
return f"{get_best_invocation_for_this_python()} -m pip"
def get_best_invocation_for_this_python() -> str:
"""Try to figure out the best way to invoke the current Python."""
exe = sys.executable
exe_name = os.path.basename(exe)
# Try to use the basename, if it's the first executable.
found_executable = shutil.which(exe_name)
if found_executable and os.path.samefile(found_executable, exe):
return exe_name
# Use the full executable name, because we couldn't find something simpler.
return exe

View File

@@ -2,12 +2,10 @@ import fnmatch
import os
import os.path
import random
import shutil
import stat
import sys
from contextlib import contextmanager
from tempfile import NamedTemporaryFile
from typing import Any, BinaryIO, Iterator, List, Union, cast
from typing import Any, BinaryIO, Generator, List, Union, cast
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
@@ -42,35 +40,8 @@ def check_path_owner(path: str) -> bool:
return False # assume we don't own the path
def copy2_fixed(src: str, dest: str) -> None:
"""Wrap shutil.copy2() but map errors copying socket files to
SpecialFileError as expected.
See also https://bugs.python.org/issue37700.
"""
try:
shutil.copy2(src, dest)
except OSError:
for f in [src, dest]:
try:
is_socket_file = is_socket(f)
except OSError:
# An error has already occurred. Another error here is not
# a problem and we can ignore it.
pass
else:
if is_socket_file:
raise shutil.SpecialFileError(f"`{f}` is a socket")
raise
def is_socket(path: str) -> bool:
return stat.S_ISSOCK(os.lstat(path).st_mode)
@contextmanager
def adjacent_tmp_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]:
def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
"""Return a file-like object pointing to a tmp file next to path.
The file is created securely and is ensured to be written to disk

View File

@@ -6,21 +6,20 @@ from typing import Tuple
from pip._internal.utils.misc import splitext
WHEEL_EXTENSION = ".whl"
BZ2_EXTENSIONS = (".tar.bz2", ".tbz") # type: Tuple[str, ...]
XZ_EXTENSIONS = (
BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")
XZ_EXTENSIONS: Tuple[str, ...] = (
".tar.xz",
".txz",
".tlz",
".tar.lz",
".tar.lzma",
) # type: Tuple[str, ...]
ZIP_EXTENSIONS = (".zip", WHEEL_EXTENSION) # type: Tuple[str, ...]
TAR_EXTENSIONS = (".tar.gz", ".tgz", ".tar") # type: Tuple[str, ...]
)
ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)
TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")
ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
def is_archive_file(name):
# type: (str) -> bool
def is_archive_file(name: str) -> bool:
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:

View File

@@ -6,14 +6,12 @@ import sys
from typing import Optional, Tuple
def glibc_version_string():
# type: () -> Optional[str]
def glibc_version_string() -> Optional[str]:
"Returns glibc version string, or None if not using glibc."
return glibc_version_string_confstr() or glibc_version_string_ctypes()
def glibc_version_string_confstr():
# type: () -> Optional[str]
def glibc_version_string_confstr() -> Optional[str]:
"Primary implementation of glibc_version_string using os.confstr."
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
# to be broken or missing. This strategy is used in the standard library
@@ -30,8 +28,7 @@ def glibc_version_string_confstr():
return version
def glibc_version_string_ctypes():
# type: () -> Optional[str]
def glibc_version_string_ctypes() -> Optional[str]:
"Fallback implementation of glibc_version_string using ctypes."
try:
@@ -78,8 +75,7 @@ def glibc_version_string_ctypes():
# versions that was generated by pip 8.1.2 and earlier is useless and
# misleading. Solution: instead of using platform, use our code that actually
# works.
def libc_ver():
# type: () -> Tuple[str, str]
def libc_ver() -> Tuple[str, str]:
"""Try to determine the glibc version
Returns a tuple of strings (lib, version) which default to empty strings

View File

@@ -1,5 +1,5 @@
import hashlib
from typing import TYPE_CHECKING, BinaryIO, Dict, Iterator, List
from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional
from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
from pip._internal.utils.misc import read_chunks
@@ -28,8 +28,7 @@ class Hashes:
"""
def __init__(self, hashes=None):
# type: (Dict[str, List[str]]) -> None
def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
"""
:param hashes: A dict of algorithm names pointing to lists of allowed
hex digests
@@ -41,8 +40,7 @@ class Hashes:
allowed[alg] = sorted(keys)
self._allowed = allowed
def __and__(self, other):
# type: (Hashes) -> Hashes
def __and__(self, other: "Hashes") -> "Hashes":
if not isinstance(other, Hashes):
return NotImplemented
@@ -62,21 +60,14 @@ class Hashes:
return Hashes(new)
@property
def digest_count(self):
# type: () -> int
def digest_count(self) -> int:
return sum(len(digests) for digests in self._allowed.values())
def is_hash_allowed(
self,
hash_name, # type: str
hex_digest, # type: str
):
# type: (...) -> bool
def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
"""Return whether the given hex digest is allowed."""
return hex_digest in self._allowed.get(hash_name, [])
def check_against_chunks(self, chunks):
# type: (Iterator[bytes]) -> None
def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
"""Check good hashes against ones built from iterable of chunks of
data.
@@ -99,12 +90,10 @@ class Hashes:
return
self._raise(gots)
def _raise(self, gots):
# type: (Dict[str, _Hash]) -> NoReturn
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
raise HashMismatch(self._allowed, gots)
def check_against_file(self, file):
# type: (BinaryIO) -> None
def check_against_file(self, file: BinaryIO) -> None:
"""Check good hashes against a file-like object
Raise HashMismatch if none match.
@@ -112,28 +101,27 @@ class Hashes:
"""
return self.check_against_chunks(read_chunks(file))
def check_against_path(self, path):
# type: (str) -> None
def check_against_path(self, path: str) -> None:
with open(path, "rb") as file:
return self.check_against_file(file)
def __nonzero__(self):
# type: () -> bool
def has_one_of(self, hashes: Dict[str, str]) -> bool:
"""Return whether any of the given hashes are allowed."""
for hash_name, hex_digest in hashes.items():
if self.is_hash_allowed(hash_name, hex_digest):
return True
return False
def __bool__(self) -> bool:
"""Return whether I know any known-good hashes."""
return bool(self._allowed)
def __bool__(self):
# type: () -> bool
return self.__nonzero__()
def __eq__(self, other):
# type: (object) -> bool
def __eq__(self, other: object) -> bool:
if not isinstance(other, Hashes):
return NotImplemented
return self._allowed == other._allowed
def __hash__(self):
# type: () -> int
def __hash__(self) -> int:
return hash(
",".join(
sorted(
@@ -153,13 +141,11 @@ class MissingHashes(Hashes):
"""
def __init__(self):
# type: () -> None
def __init__(self) -> None:
"""Don't offer the ``hashes`` kwarg."""
# Pass our favorite hash in to generate a "gotten hash". With the
# empty list, it will never match, so an error will always raise.
super().__init__(hashes={FAVORITE_HASH: []})
def _raise(self, gots):
# type: (Dict[str, _Hash]) -> NoReturn
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
raise HashMissing(gots[FAVORITE_HASH].hexdigest())

View File

@@ -10,8 +10,7 @@ old to handle TLSv1.2.
import sys
def inject_securetransport():
# type: () -> None
def inject_securetransport() -> None:
# Only relevant on macOS
if sys.platform != "darwin":
return

View File

@@ -4,28 +4,30 @@ import logging
import logging.handlers
import os
import sys
import threading
from dataclasses import dataclass
from io import TextIOWrapper
from logging import Filter
from typing import IO, Any, Callable, Iterator, Optional, TextIO, Type, cast
from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
from pip._vendor.rich.console import (
Console,
ConsoleOptions,
ConsoleRenderable,
RenderableType,
RenderResult,
RichCast,
)
from pip._vendor.rich.highlighter import NullHighlighter
from pip._vendor.rich.logging import RichHandler
from pip._vendor.rich.segment import Segment
from pip._vendor.rich.style import Style
from pip._internal.utils._log import VERBOSE, getLogger
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
from pip._internal.utils.misc import ensure_dir
try:
import threading
except ImportError:
import dummy_threading as threading # type: ignore
try:
from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
_log_state = threading.local()
subprocess_logger = getLogger("pip.subprocessor")
@@ -35,39 +37,22 @@ class BrokenStdoutLoggingError(Exception):
Raised if BrokenPipeError occurs for the stdout stream while logging.
"""
pass
def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:
if exc_class is BrokenPipeError:
return True
# BrokenPipeError manifests differently in Windows and non-Windows.
if WINDOWS:
# In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
# On Windows, a broken pipe can show up as EINVAL rather than EPIPE:
# https://bugs.python.org/issue19612
# https://bugs.python.org/issue30418
def _is_broken_pipe_error(exc_class, exc):
# type: (Type[BaseException], BaseException) -> bool
"""See the docstring for non-Windows below."""
return (exc_class is BrokenPipeError) or (
isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
)
if not WINDOWS:
return False
else:
# Then we are in the non-Windows case.
def _is_broken_pipe_error(exc_class, exc):
# type: (Type[BaseException], BaseException) -> bool
"""
Return whether an exception is a broken pipe error.
Args:
exc_class: an exception class.
exc: an exception instance.
"""
return exc_class is BrokenPipeError
return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
@contextlib.contextmanager
def indent_log(num=2):
# type: (int) -> Iterator[None]
def indent_log(num: int = 2) -> Generator[None, None, None]:
"""
A context manager which will cause the log output to be indented for any
log messages emitted inside it.
@@ -81,8 +66,7 @@ def indent_log(num=2):
_log_state.indentation -= num
def get_indentation():
# type: () -> int
def get_indentation() -> int:
return getattr(_log_state, "indentation", 0)
@@ -91,11 +75,10 @@ class IndentingFormatter(logging.Formatter):
def __init__(
self,
*args, # type: Any
add_timestamp=False, # type: bool
**kwargs, # type: Any
):
# type: (...) -> None
*args: Any,
add_timestamp: bool = False,
**kwargs: Any,
) -> None:
"""
A logging.Formatter that obeys the indent_log() context manager.
@@ -105,8 +88,7 @@ class IndentingFormatter(logging.Formatter):
self.add_timestamp = add_timestamp
super().__init__(*args, **kwargs)
def get_message_start(self, formatted, levelno):
# type: (str, int) -> str
def get_message_start(self, formatted: str, levelno: int) -> str:
"""
Return the start of the formatted log message (not counting the
prefix to add to each line).
@@ -122,8 +104,7 @@ class IndentingFormatter(logging.Formatter):
return "ERROR: "
def format(self, record):
# type: (logging.LogRecord) -> str
def format(self, record: logging.LogRecord) -> str:
"""
Calls the standard formatter, but will indent all of the log message
lines by our current indentation level.
@@ -140,85 +121,66 @@ class IndentingFormatter(logging.Formatter):
return formatted
def _color_wrap(*colors):
# type: (*str) -> Callable[[str], str]
def wrapped(inp):
# type: (str) -> str
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
@dataclass
class IndentedRenderable:
renderable: RenderableType
indent: int
return wrapped
def __rich_console__(
self, console: Console, options: ConsoleOptions
) -> RenderResult:
segments = console.render(self.renderable, options)
lines = Segment.split_lines(segments)
for line in lines:
yield Segment(" " * self.indent)
yield from line
yield Segment("\n")
class ColorizedStreamHandler(logging.StreamHandler):
class RichPipStreamHandler(RichHandler):
KEYWORDS: ClassVar[Optional[List[str]]] = []
# Don't build up a list of colors if we don't have colorama
if colorama:
COLORS = [
# This needs to be in order from highest logging level to lowest.
(logging.ERROR, _color_wrap(colorama.Fore.RED)),
(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
]
else:
COLORS = []
def __init__(self, stream=None, no_color=None):
# type: (Optional[TextIO], bool) -> None
super().__init__(stream)
self._no_color = no_color
if WINDOWS and colorama:
self.stream = colorama.AnsiToWin32(self.stream)
def _using_stdout(self):
# type: () -> bool
"""
Return whether the handler is using sys.stdout.
"""
if WINDOWS and colorama:
# Then self.stream is an AnsiToWin32 object.
stream = cast(colorama.AnsiToWin32, self.stream)
return stream.wrapped is sys.stdout
return self.stream is sys.stdout
def should_color(self):
# type: () -> bool
# Don't colorize things if we do not have colorama or if told not to
if not colorama or self._no_color:
return False
real_stream = (
self.stream
if not isinstance(self.stream, colorama.AnsiToWin32)
else self.stream.wrapped
def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
super().__init__(
console=Console(file=stream, no_color=no_color, soft_wrap=True),
show_time=False,
show_level=False,
show_path=False,
highlighter=NullHighlighter(),
)
# If the stream is a tty we should color it
if hasattr(real_stream, "isatty") and real_stream.isatty():
return True
# Our custom override on Rich's logger, to make things work as we need them to.
def emit(self, record: logging.LogRecord) -> None:
style: Optional[Style] = None
# If we have an ANSI term we should color it
if os.environ.get("TERM") == "ANSI":
return True
# If we are given a diagnostic error to present, present it with indentation.
assert isinstance(record.args, tuple)
if record.msg == "[present-rich] %s" and len(record.args) == 1:
rich_renderable = record.args[0]
assert isinstance(
rich_renderable, (ConsoleRenderable, RichCast, str)
), f"{rich_renderable} is not rich-console-renderable"
# If anything else we should not color it
return False
renderable: RenderableType = IndentedRenderable(
rich_renderable, indent=get_indentation()
)
else:
message = self.format(record)
renderable = self.render_message(record, message)
if record.levelno is not None:
if record.levelno >= logging.ERROR:
style = Style(color="red")
elif record.levelno >= logging.WARNING:
style = Style(color="yellow")
def format(self, record):
# type: (logging.LogRecord) -> str
msg = super().format(record)
try:
self.console.print(renderable, overflow="ignore", crop=False, style=style)
except Exception:
self.handleError(record)
if self.should_color():
for level, color in self.COLORS:
if record.levelno >= level:
msg = color(msg)
break
def handleError(self, record: logging.LogRecord) -> None:
"""Called when logging is unable to log some output."""
return msg
# The logging module says handleError() can be customized.
def handleError(self, record):
# type: (logging.LogRecord) -> None
exc_class, exc = sys.exc_info()[:2]
# If a broken pipe occurred while calling write() or flush() on the
# stdout stream in logging's Handler.emit(), then raise our special
@@ -227,7 +189,7 @@ class ColorizedStreamHandler(logging.StreamHandler):
if (
exc_class
and exc
and self._using_stdout()
and self.console.file is sys.stdout
and _is_broken_pipe_error(exc_class, exc)
):
raise BrokenStdoutLoggingError()
@@ -236,19 +198,16 @@ class ColorizedStreamHandler(logging.StreamHandler):
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
def _open(self):
# type: () -> IO[Any]
def _open(self) -> TextIOWrapper:
ensure_dir(os.path.dirname(self.baseFilename))
return super()._open()
class MaxLevelFilter(Filter):
def __init__(self, level):
# type: (int) -> None
def __init__(self, level: int) -> None:
self.level = level
def filter(self, record):
# type: (logging.LogRecord) -> bool
def filter(self, record: logging.LogRecord) -> bool:
return record.levelno < self.level
@@ -258,15 +217,13 @@ class ExcludeLoggerFilter(Filter):
A logging Filter that excludes records from a logger (or its children).
"""
def filter(self, record):
# type: (logging.LogRecord) -> bool
def filter(self, record: logging.LogRecord) -> bool:
# The base Filter class allows only records from a logger (or its
# children).
return not super().filter(record)
def setup_logging(verbosity, no_color, user_log_file):
# type: (int, bool, Optional[str]) -> int
def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:
"""Configures and sets up all of the logging
Returns the requested logging level, as its integer value.
@@ -308,7 +265,7 @@ def setup_logging(verbosity, no_color, user_log_file):
"stderr": "ext://sys.stderr",
}
handler_classes = {
"stream": "pip._internal.utils.logging.ColorizedStreamHandler",
"stream": "pip._internal.utils.logging.RichPipStreamHandler",
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
}
handlers = ["console", "console_errors", "console_subprocess"] + (
@@ -366,8 +323,8 @@ def setup_logging(verbosity, no_color, user_log_file):
"console_subprocess": {
"level": level,
"class": handler_classes["stream"],
"no_color": no_color,
"stream": log_streams["stderr"],
"no_color": no_color,
"filters": ["restrict_to_subprocess"],
"formatter": "indent",
},

View File

@@ -12,17 +12,18 @@ import posixpath
import shutil
import stat
import sys
import sysconfig
import urllib.parse
from io import StringIO
from itertools import filterfalse, tee, zip_longest
from types import TracebackType
from typing import (
Any,
AnyStr,
BinaryIO,
Callable,
Container,
ContextManager,
Dict,
Generator,
Iterable,
Iterator,
List,
@@ -31,20 +32,18 @@ from typing import (
Tuple,
Type,
TypeVar,
Union,
cast,
)
from pip._vendor.pkg_resources import Distribution
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
from pip import __version__
from pip._internal.exceptions import CommandError
from pip._internal.locations import get_major_minor_version, site_packages, user_site
from pip._internal.utils.compat import WINDOWS, stdlib_pkgs
from pip._internal.utils.virtualenv import (
running_under_virtualenv,
virtualenv_no_global,
)
from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment
from pip._internal.locations import get_major_minor_version
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.virtualenv import running_under_virtualenv
__all__ = [
"rmtree",
@@ -60,9 +59,10 @@ __all__ = [
"captured_stdout",
"ensure_dir",
"remove_auth_from_url",
"check_externally_managed",
"ConfiguredBuildBackendHookCaller",
]
logger = logging.getLogger(__name__)
T = TypeVar("T")
@@ -71,8 +71,7 @@ VersionInfo = Tuple[int, int, int]
NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
def get_pip_version():
# type: () -> str
def get_pip_version() -> str:
pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
pip_pkg_dir = os.path.abspath(pip_pkg_dir)
@@ -83,8 +82,7 @@ def get_pip_version():
)
def normalize_version_info(py_version_info):
# type: (Tuple[int, ...]) -> Tuple[int, int, int]
def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
"""
Convert a tuple of ints representing a Python version to one of length
three.
@@ -103,8 +101,7 @@ def normalize_version_info(py_version_info):
return cast("VersionInfo", py_version_info)
def ensure_dir(path):
# type: (AnyStr) -> None
def ensure_dir(path: str) -> None:
"""os.path.makedirs without EEXIST."""
try:
os.makedirs(path)
@@ -114,8 +111,7 @@ def ensure_dir(path):
raise
def get_prog():
# type: () -> str
def get_prog() -> str:
try:
prog = os.path.basename(sys.argv[0])
if prog in ("__main__.py", "-c"):
@@ -130,13 +126,11 @@ def get_prog():
# Retry every half second for up to 3 seconds
# Tenacity raises RetryError by default, explicitly raise the original exception
@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
def rmtree(dir, ignore_errors=False):
# type: (AnyStr, bool) -> None
def rmtree(dir: str, ignore_errors: bool = False) -> None:
shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler)
def rmtree_errorhandler(func, path, exc_info):
# type: (Callable[..., Any], str, ExcInfo) -> None
def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None:
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
@@ -156,8 +150,7 @@ def rmtree_errorhandler(func, path, exc_info):
raise
def display_path(path):
# type: (str) -> str
def display_path(path: str) -> str:
"""Gives the display value for a given path, making it relative to cwd
if possible."""
path = os.path.normcase(os.path.abspath(path))
@@ -166,8 +159,7 @@ def display_path(path):
return path
def backup_dir(dir, ext=".bak"):
# type: (str, str) -> str
def backup_dir(dir: str, ext: str = ".bak") -> str:
"""Figure out the name of a directory to back up the given dir to
(adding .bak, .bak2, etc)"""
n = 1
@@ -178,16 +170,14 @@ def backup_dir(dir, ext=".bak"):
return dir + extension
def ask_path_exists(message, options):
# type: (str, Iterable[str]) -> str
def ask_path_exists(message: str, options: Iterable[str]) -> str:
for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
if action in options:
return action
return ask(message, options)
def _check_no_input(message):
# type: (str) -> None
def _check_no_input(message: str) -> None:
"""Raise an error if no input is allowed."""
if os.environ.get("PIP_NO_INPUT"):
raise Exception(
@@ -195,8 +185,7 @@ def _check_no_input(message):
)
def ask(message, options):
# type: (str, Iterable[str]) -> str
def ask(message: str, options: Iterable[str]) -> str:
"""Ask the message interactively, with the given possible responses"""
while 1:
_check_no_input(message)
@@ -211,22 +200,19 @@ def ask(message, options):
return response
def ask_input(message):
# type: (str) -> str
def ask_input(message: str) -> str:
"""Ask for input interactively."""
_check_no_input(message)
return input(message)
def ask_password(message):
# type: (str) -> str
def ask_password(message: str) -> str:
"""Ask for a password interactively."""
_check_no_input(message)
return getpass.getpass(message)
def strtobool(val):
# type: (str) -> int
def strtobool(val: str) -> int:
"""Convert a string representation of truth to true (1) or false (0).
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
@@ -242,8 +228,7 @@ def strtobool(val):
raise ValueError(f"invalid truth value {val!r}")
def format_size(bytes):
# type: (float) -> str
def format_size(bytes: float) -> str:
if bytes > 1000 * 1000:
return "{:.1f} MB".format(bytes / 1000.0 / 1000)
elif bytes > 10 * 1000:
@@ -254,8 +239,7 @@ def format_size(bytes):
return "{} bytes".format(int(bytes))
def tabulate(rows):
# type: (Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]
def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
"""Return a list of formatted rows and a list of column sizes.
For example::
@@ -286,8 +270,9 @@ def is_installable_dir(path: str) -> bool:
return False
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
# type: (BinaryIO, int) -> Iterator[bytes]
def read_chunks(
file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE
) -> Generator[bytes, None, None]:
"""Yield pieces of data from a file-like object until EOF."""
while True:
chunk = file.read(size)
@@ -296,8 +281,7 @@ def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE):
yield chunk
def normalize_path(path, resolve_symlinks=True):
# type: (str, bool) -> str
def normalize_path(path: str, resolve_symlinks: bool = True) -> str:
"""
Convert a path to its canonical, case-normalized, absolute version.
@@ -310,8 +294,7 @@ def normalize_path(path, resolve_symlinks=True):
return os.path.normcase(path)
def splitext(path):
# type: (str) -> Tuple[str, str]
def splitext(path: str) -> Tuple[str, str]:
"""Like os.path.splitext, but take off .tar too"""
base, ext = posixpath.splitext(path)
if base.lower().endswith(".tar"):
@@ -320,8 +303,7 @@ def splitext(path):
return base, ext
def renames(old, new):
# type: (str, str) -> None
def renames(old: str, new: str) -> None:
"""Like os.renames(), but handles renaming across devices."""
# Implementation borrowed from os.renames().
head, tail = os.path.split(new)
@@ -338,8 +320,7 @@ def renames(old, new):
pass
def is_local(path):
# type: (str) -> bool
def is_local(path: str) -> bool:
"""
Return True if path is within sys.prefix, if we're running in a virtualenv.
@@ -353,158 +334,15 @@ def is_local(path):
return path.startswith(normalize_path(sys.prefix))
def dist_is_local(dist):
# type: (Distribution) -> bool
"""
Return True if given Distribution object is installed locally
(i.e. within current virtualenv).
Always True if we're not in a virtualenv.
"""
return is_local(dist_location(dist))
def dist_in_usersite(dist):
# type: (Distribution) -> bool
"""
Return True if given Distribution is installed in user site.
"""
return dist_location(dist).startswith(normalize_path(user_site))
def dist_in_site_packages(dist):
# type: (Distribution) -> bool
"""
Return True if given Distribution is installed in
sysconfig.get_python_lib().
"""
return dist_location(dist).startswith(normalize_path(site_packages))
def dist_is_editable(dist):
# type: (Distribution) -> bool
"""
Return True if given Distribution is an editable install.
"""
for path_item in sys.path:
egg_link = os.path.join(path_item, dist.project_name + ".egg-link")
if os.path.isfile(egg_link):
return True
return False
def get_installed_distributions(
local_only=True, # type: bool
skip=stdlib_pkgs, # type: Container[str]
include_editables=True, # type: bool
editables_only=False, # type: bool
user_only=False, # type: bool
paths=None, # type: Optional[List[str]]
):
# type: (...) -> List[Distribution]
"""Return a list of installed Distribution objects.
Left for compatibility until direct pkg_resources uses are refactored out.
"""
from pip._internal.metadata import get_default_environment, get_environment
from pip._internal.metadata.pkg_resources import Distribution as _Dist
if paths is None:
env = get_default_environment()
else:
env = get_environment(paths)
dists = env.iter_installed_distributions(
local_only=local_only,
skip=skip,
include_editables=include_editables,
editables_only=editables_only,
user_only=user_only,
)
return [cast(_Dist, dist)._dist for dist in dists]
def get_distribution(req_name):
# type: (str) -> Optional[Distribution]
"""Given a requirement name, return the installed Distribution object.
This searches from *all* distributions available in the environment, to
match the behavior of ``pkg_resources.get_distribution()``.
Left for compatibility until direct pkg_resources uses are refactored out.
"""
from pip._internal.metadata import get_default_environment
from pip._internal.metadata.pkg_resources import Distribution as _Dist
dist = get_default_environment().get_distribution(req_name)
if dist is None:
return None
return cast(_Dist, dist)._dist
def egg_link_path(dist):
# type: (Distribution) -> Optional[str]
"""
Return the path for the .egg-link file if it exists, otherwise, None.
There's 3 scenarios:
1) not in a virtualenv
try to find in site.USER_SITE, then site_packages
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
try to find in site_packages, then site.USER_SITE
(don't look in global location)
For #1 and #3, there could be odd cases, where there's an egg-link in 2
locations.
This method will just return the first one found.
"""
sites = []
if running_under_virtualenv():
sites.append(site_packages)
if not virtualenv_no_global() and user_site:
sites.append(user_site)
else:
if user_site:
sites.append(user_site)
sites.append(site_packages)
for site in sites:
egglink = os.path.join(site, dist.project_name) + ".egg-link"
if os.path.isfile(egglink):
return egglink
return None
def dist_location(dist):
# type: (Distribution) -> str
"""
Get the site-packages location of this distribution. Generally
this is dist.location, except in the case of develop-installed
packages, where dist.location is the source code location, and we
want to know where the egg-link file is.
The returned location is normalized (in particular, with symlinks removed).
"""
egg_link = egg_link_path(dist)
if egg_link:
return normalize_path(egg_link)
return normalize_path(dist.location)
def write_output(msg, *args):
# type: (Any, Any) -> None
def write_output(msg: Any, *args: Any) -> None:
logger.info(msg, *args)
class StreamWrapper(StringIO):
orig_stream = None # type: TextIO
orig_stream: TextIO = None
@classmethod
def from_stream(cls, orig_stream):
# type: (TextIO) -> StreamWrapper
def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
cls.orig_stream = orig_stream
return cls()
@@ -516,8 +354,7 @@ class StreamWrapper(StringIO):
@contextlib.contextmanager
def captured_output(stream_name):
# type: (str) -> Iterator[StreamWrapper]
def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]:
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO.
@@ -531,8 +368,7 @@ def captured_output(stream_name):
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
# type: () -> ContextManager[StreamWrapper]
def captured_stdout() -> ContextManager[StreamWrapper]:
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
@@ -544,8 +380,7 @@ def captured_stdout():
return captured_output("stdout")
def captured_stderr():
# type: () -> ContextManager[StreamWrapper]
def captured_stderr() -> ContextManager[StreamWrapper]:
"""
See captured_stdout().
"""
@@ -553,16 +388,14 @@ def captured_stderr():
# Simulates an enum
def enum(*sequential, **named):
# type: (*Any, **Any) -> Type[Any]
def enum(*sequential: Any, **named: Any) -> Type[Any]:
enums = dict(zip(sequential, range(len(sequential))), **named)
reverse = {value: key for key, value in enums.items()}
enums["reverse_mapping"] = reverse
return type("Enum", (), enums)
def build_netloc(host, port):
# type: (str, Optional[int]) -> str
def build_netloc(host: str, port: Optional[int]) -> str:
"""
Build a netloc from a host-port pair
"""
@@ -574,8 +407,7 @@ def build_netloc(host, port):
return f"{host}:{port}"
def build_url_from_netloc(netloc, scheme="https"):
# type: (str, str) -> str
def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
"""
Build a full URL from a netloc.
"""
@@ -585,8 +417,7 @@ def build_url_from_netloc(netloc, scheme="https"):
return f"{scheme}://{netloc}"
def parse_netloc(netloc):
# type: (str) -> Tuple[str, Optional[int]]
def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]:
"""
Return the host-port pair from a netloc.
"""
@@ -595,8 +426,7 @@ def parse_netloc(netloc):
return parsed.hostname, parsed.port
def split_auth_from_netloc(netloc):
# type: (str) -> NetlocTuple
def split_auth_from_netloc(netloc: str) -> NetlocTuple:
"""
Parse out and remove the auth information from a netloc.
@@ -609,7 +439,7 @@ def split_auth_from_netloc(netloc):
# behaves if more than one @ is present (which can be checked using
# the password attribute of urlsplit()'s return value).
auth, netloc = netloc.rsplit("@", 1)
pw = None # type: Optional[str]
pw: Optional[str] = None
if ":" in auth:
# Split from the left because that's how urllib.parse.urlsplit()
# behaves if more than one : is present (which again can be checked
@@ -625,8 +455,7 @@ def split_auth_from_netloc(netloc):
return netloc, (user, pw)
def redact_netloc(netloc):
# type: (str) -> str
def redact_netloc(netloc: str) -> str:
"""
Replace the sensitive data in a netloc with "****", if it exists.
@@ -648,8 +477,9 @@ def redact_netloc(netloc):
)
def _transform_url(url, transform_netloc):
# type: (str, Callable[[str], Tuple[Any, ...]]) -> Tuple[str, NetlocTuple]
def _transform_url(
url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]
) -> Tuple[str, NetlocTuple]:
"""Transform and replace netloc in a url.
transform_netloc is a function taking the netloc and returning a
@@ -667,18 +497,15 @@ def _transform_url(url, transform_netloc):
return surl, cast("NetlocTuple", netloc_tuple)
def _get_netloc(netloc):
# type: (str) -> NetlocTuple
def _get_netloc(netloc: str) -> NetlocTuple:
return split_auth_from_netloc(netloc)
def _redact_netloc(netloc):
# type: (str) -> Tuple[str,]
def _redact_netloc(netloc: str) -> Tuple[str]:
return (redact_netloc(netloc),)
def split_auth_netloc_from_url(url):
# type: (str) -> Tuple[str, str, Tuple[str, str]]
def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]:
"""
Parse a url into separate netloc, auth, and url with no auth.
@@ -688,41 +515,31 @@ def split_auth_netloc_from_url(url):
return url_without_auth, netloc, auth
def remove_auth_from_url(url):
# type: (str) -> str
def remove_auth_from_url(url: str) -> str:
"""Return a copy of url with 'username:password@' removed."""
# username/pass params are passed to subversion through flags
# and are not recognized in the url.
return _transform_url(url, _get_netloc)[0]
def redact_auth_from_url(url):
# type: (str) -> str
def redact_auth_from_url(url: str) -> str:
"""Replace the password in a given url with ****."""
return _transform_url(url, _redact_netloc)[0]
class HiddenText:
def __init__(
self,
secret, # type: str
redacted, # type: str
):
# type: (...) -> None
def __init__(self, secret: str, redacted: str) -> None:
self.secret = secret
self.redacted = redacted
def __repr__(self):
# type: (...) -> str
def __repr__(self) -> str:
return "<HiddenText {!r}>".format(str(self))
def __str__(self):
# type: (...) -> str
def __str__(self) -> str:
return self.redacted
# This is useful for testing.
def __eq__(self, other):
# type: (Any) -> bool
def __eq__(self, other: Any) -> bool:
if type(self) != type(other):
return False
@@ -731,28 +548,25 @@ class HiddenText:
return self.secret == other.secret
def hide_value(value):
# type: (str) -> HiddenText
def hide_value(value: str) -> HiddenText:
return HiddenText(value, redacted="****")
def hide_url(url):
# type: (str) -> HiddenText
def hide_url(url: str) -> HiddenText:
redacted = redact_auth_from_url(url)
return HiddenText(url, redacted=redacted)
def protect_pip_from_modification_on_windows(modifying_pip):
# type: (bool) -> None
def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:
"""Protection of pip.exe from modification on Windows
On Windows, any operation modifying pip should be run as:
python -m pip ...
"""
pip_names = [
"pip.exe",
"pip{}.exe".format(sys.version_info[0]),
"pip{}.{}.exe".format(*sys.version_info[:2]),
"pip",
f"pip{sys.version_info.major}",
f"pip{sys.version_info.major}.{sys.version_info.minor}",
]
# See https://github.com/pypa/pip/issues/1299 for more discussion
@@ -769,14 +583,27 @@ def protect_pip_from_modification_on_windows(modifying_pip):
)
def is_console_interactive():
# type: () -> bool
def check_externally_managed() -> None:
"""Check whether the current environment is externally managed.
If the ``EXTERNALLY-MANAGED`` config file is found, the current environment
is considered externally managed, and an ExternallyManagedEnvironment is
raised.
"""
if running_under_virtualenv():
return
marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED")
if not os.path.isfile(marker):
return
raise ExternallyManagedEnvironment.from_config(marker)
def is_console_interactive() -> bool:
"""Is this console interactive?"""
return sys.stdin is not None and sys.stdin.isatty()
def hash_file(path, blocksize=1 << 20):
# type: (str, int) -> Tuple[Any, int]
def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
"""Return (hash, length) for path using hashlib.sha256()"""
h = hashlib.sha256()
@@ -788,21 +615,7 @@ def hash_file(path, blocksize=1 << 20):
return h, length
def is_wheel_installed():
# type: () -> bool
"""
Return whether the wheel package is installed.
"""
try:
import wheel # noqa: F401
except ImportError:
return False
return True
def pairwise(iterable):
# type: (Iterable[Any]) -> Iterator[Tuple[Any, Any]]
def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
"""
Return paired elements.
@@ -814,10 +627,9 @@ def pairwise(iterable):
def partition(
pred, # type: Callable[[T], bool]
iterable, # type: Iterable[T]
):
# type: (...) -> Tuple[Iterable[T], Iterable[T]]
pred: Callable[[T], bool],
iterable: Iterable[T],
) -> Tuple[Iterable[T], Iterable[T]]:
"""
Use a predicate to partition entries into false entries and true entries,
like
@@ -826,3 +638,93 @@ def partition(
"""
t1, t2 = tee(iterable)
return filterfalse(pred, t1), filter(pred, t2)
class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
def __init__(
self,
config_holder: Any,
source_dir: str,
build_backend: str,
backend_path: Optional[str] = None,
runner: Optional[Callable[..., None]] = None,
python_executable: Optional[str] = None,
):
super().__init__(
source_dir, build_backend, backend_path, runner, python_executable
)
self.config_holder = config_holder
def build_wheel(
self,
wheel_directory: str,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
metadata_directory: Optional[str] = None,
) -> str:
cs = self.config_holder.config_settings
return super().build_wheel(
wheel_directory, config_settings=cs, metadata_directory=metadata_directory
)
def build_sdist(
self,
sdist_directory: str,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
) -> str:
cs = self.config_holder.config_settings
return super().build_sdist(sdist_directory, config_settings=cs)
def build_editable(
self,
wheel_directory: str,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
metadata_directory: Optional[str] = None,
) -> str:
cs = self.config_holder.config_settings
return super().build_editable(
wheel_directory, config_settings=cs, metadata_directory=metadata_directory
)
def get_requires_for_build_wheel(
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
) -> List[str]:
cs = self.config_holder.config_settings
return super().get_requires_for_build_wheel(config_settings=cs)
def get_requires_for_build_sdist(
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
) -> List[str]:
cs = self.config_holder.config_settings
return super().get_requires_for_build_sdist(config_settings=cs)
def get_requires_for_build_editable(
self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
) -> List[str]:
cs = self.config_holder.config_settings
return super().get_requires_for_build_editable(config_settings=cs)
def prepare_metadata_for_build_wheel(
self,
metadata_directory: str,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
_allow_fallback: bool = True,
) -> str:
cs = self.config_holder.config_settings
return super().prepare_metadata_for_build_wheel(
metadata_directory=metadata_directory,
config_settings=cs,
_allow_fallback=_allow_fallback,
)
def prepare_metadata_for_build_editable(
self,
metadata_directory: str,
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
_allow_fallback: bool = True,
) -> str:
cs = self.config_holder.config_settings
return super().prepare_metadata_for_build_editable(
metadata_directory=metadata_directory,
config_settings=cs,
_allow_fallback=_allow_fallback,
)

View File

@@ -10,37 +10,29 @@ class KeyBasedCompareMixin:
__slots__ = ["_compare_key", "_defining_class"]
def __init__(self, key, defining_class):
# type: (Any, Type[KeyBasedCompareMixin]) -> None
def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None:
self._compare_key = key
self._defining_class = defining_class
def __hash__(self):
# type: () -> int
def __hash__(self) -> int:
return hash(self._compare_key)
def __lt__(self, other):
# type: (Any) -> bool
def __lt__(self, other: Any) -> bool:
return self._compare(other, operator.__lt__)
def __le__(self, other):
# type: (Any) -> bool
def __le__(self, other: Any) -> bool:
return self._compare(other, operator.__le__)
def __gt__(self, other):
# type: (Any) -> bool
def __gt__(self, other: Any) -> bool:
return self._compare(other, operator.__gt__)
def __ge__(self, other):
# type: (Any) -> bool
def __ge__(self, other: Any) -> bool:
return self._compare(other, operator.__ge__)
def __eq__(self, other):
# type: (Any) -> bool
def __eq__(self, other: Any) -> bool:
return self._compare(other, operator.__eq__)
def _compare(self, other, method):
# type: (Any, Callable[[Any, Any], bool]) -> bool
def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool:
if not isinstance(other, self._defining_class):
return NotImplemented

View File

@@ -1,20 +1,19 @@
import functools
import logging
from email.message import Message
from email.parser import FeedParser
from typing import Optional, Tuple
import re
from typing import NewType, Optional, Tuple, cast
from pip._vendor import pkg_resources
from pip._vendor.packaging import specifiers, version
from pip._vendor.pkg_resources import Distribution
from pip._vendor.packaging.requirements import Requirement
from pip._internal.exceptions import NoneMetadataError
from pip._internal.utils.misc import display_path
NormalizedExtra = NewType("NormalizedExtra", str)
logger = logging.getLogger(__name__)
def check_requires_python(requires_python, version_info):
# type: (Optional[str], Tuple[int, ...]) -> bool
def check_requires_python(
requires_python: Optional[str], version_info: Tuple[int, ...]
) -> bool:
"""
Check if the given Python version matches a "Requires-Python" specifier.
@@ -35,55 +34,24 @@ def check_requires_python(requires_python, version_info):
return python_version in requires_python_specifier
def get_metadata(dist):
# type: (Distribution) -> Message
@functools.lru_cache(maxsize=512)
def get_requirement(req_string: str) -> Requirement:
"""Construct a packaging.Requirement object with caching"""
# Parsing requirement strings is expensive, and is also expected to happen
# with a low diversity of different arguments (at least relative the number
# constructed). This method adds a cache to requirement object creation to
# minimize repeated parsing of the same string to construct equivalent
# Requirement objects.
return Requirement(req_string)
def safe_extra(extra: str) -> NormalizedExtra:
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
This function is duplicated from ``pkg_resources``. Note that this is not
the same to either ``canonicalize_name`` or ``_egg_link_name``.
"""
:raises NoneMetadataError: if the distribution reports `has_metadata()`
True but `get_metadata()` returns None.
"""
metadata_name = "METADATA"
if isinstance(dist, pkg_resources.DistInfoDistribution) and dist.has_metadata(
metadata_name
):
metadata = dist.get_metadata(metadata_name)
elif dist.has_metadata("PKG-INFO"):
metadata_name = "PKG-INFO"
metadata = dist.get_metadata(metadata_name)
else:
logger.warning("No metadata found in %s", display_path(dist.location))
metadata = ""
if metadata is None:
raise NoneMetadataError(dist, metadata_name)
feed_parser = FeedParser()
# The following line errors out if with a "NoneType" TypeError if
# passed metadata=None.
feed_parser.feed(metadata)
return feed_parser.close()
def get_requires_python(dist):
# type: (pkg_resources.Distribution) -> Optional[str]
"""
Return the "Requires-Python" metadata for a distribution, or None
if not present.
"""
pkg_info_dict = get_metadata(dist)
requires_python = pkg_info_dict.get("Requires-Python")
if requires_python is not None:
# Convert to a str to satisfy the type checker, since requires_python
# can be a Header object.
requires_python = str(requires_python)
return requires_python
def get_installer(dist):
# type: (Distribution) -> str
if dist.has_metadata("INSTALLER"):
for line in dist.get_metadata_lines("INSTALLER"):
if line.strip():
return line.strip()
return ""
return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())

View File

@@ -1,101 +0,0 @@
"""Convenient parallelization of higher order functions.
This module provides two helper functions, with appropriate fallbacks on
Python 2 and on systems lacking support for synchronization mechanisms:
- map_multiprocess
- map_multithread
These helpers work like Python 3's map, with two differences:
- They don't guarantee the order of processing of
the elements of the iterable.
- The underlying process/thread pools chop the iterable into
a number of chunks, so that for very long iterables using
a large value for chunksize can make the job complete much faster
than using the default value of 1.
"""
__all__ = ["map_multiprocess", "map_multithread"]
from contextlib import contextmanager
from multiprocessing import Pool as ProcessPool
from multiprocessing import pool
from multiprocessing.dummy import Pool as ThreadPool
from typing import Callable, Iterable, Iterator, TypeVar, Union
from pip._vendor.requests.adapters import DEFAULT_POOLSIZE
Pool = Union[pool.Pool, pool.ThreadPool]
S = TypeVar("S")
T = TypeVar("T")
# On platforms without sem_open, multiprocessing[.dummy] Pool
# cannot be created.
try:
import multiprocessing.synchronize # noqa
except ImportError:
LACK_SEM_OPEN = True
else:
LACK_SEM_OPEN = False
# Incredibly large timeout to work around bpo-8296 on Python 2.
TIMEOUT = 2000000
@contextmanager
def closing(pool):
# type: (Pool) -> Iterator[Pool]
"""Return a context manager making sure the pool closes properly."""
try:
yield pool
finally:
# For Pool.imap*, close and join are needed
# for the returned iterator to begin yielding.
pool.close()
pool.join()
pool.terminate()
def _map_fallback(func, iterable, chunksize=1):
# type: (Callable[[S], T], Iterable[S], int) -> Iterator[T]
"""Make an iterator applying func to each element in iterable.
This function is the sequential fallback either on Python 2
where Pool.imap* doesn't react to KeyboardInterrupt
or when sem_open is unavailable.
"""
return map(func, iterable)
def _map_multiprocess(func, iterable, chunksize=1):
# type: (Callable[[S], T], Iterable[S], int) -> Iterator[T]
"""Chop iterable into chunks and submit them to a process pool.
For very long iterables using a large value for chunksize can make
the job complete much faster than using the default value of 1.
Return an unordered iterator of the results.
"""
with closing(ProcessPool()) as pool:
return pool.imap_unordered(func, iterable, chunksize)
def _map_multithread(func, iterable, chunksize=1):
# type: (Callable[[S], T], Iterable[S], int) -> Iterator[T]
"""Chop iterable into chunks and submit them to a thread pool.
For very long iterables using a large value for chunksize can make
the job complete much faster than using the default value of 1.
Return an unordered iterator of the results.
"""
with closing(ThreadPool(DEFAULT_POOLSIZE)) as pool:
return pool.imap_unordered(func, iterable, chunksize)
if LACK_SEM_OPEN:
map_multiprocess = map_multithread = _map_fallback
else:
map_multiprocess = _map_multiprocess
map_multithread = _map_multithread

View File

@@ -1,40 +0,0 @@
from typing import Dict, Iterable, List
from pip._vendor.pkg_resources import yield_lines
class DictMetadata:
"""IMetadataProvider that reads metadata files from a dictionary."""
def __init__(self, metadata):
# type: (Dict[str, bytes]) -> None
self._metadata = metadata
def has_metadata(self, name):
# type: (str) -> bool
return name in self._metadata
def get_metadata(self, name):
# type: (str) -> str
try:
return self._metadata[name].decode()
except UnicodeDecodeError as e:
# Mirrors handling done in pkg_resources.NullProvider.
e.reason += f" in {name} file"
raise
def get_metadata_lines(self, name):
# type: (str) -> Iterable[str]
return yield_lines(self.get_metadata(name))
def metadata_isdir(self, name):
# type: (str) -> bool
return False
def metadata_listdir(self, name):
# type: (str) -> List[str]
return []
def run_script(self, script_name, namespace):
# type: (str, str) -> None
pass

View File

@@ -1,30 +1,57 @@
import sys
import textwrap
from typing import List, Optional, Sequence
# Shim to wrap setup.py invocation with setuptools
#
# We set sys.argv[0] to the path to the underlying setup.py file so
# setuptools / distutils don't take the path to the setup.py to be "-c" when
# invoking via the shim. This avoids e.g. the following manifest_maker
# warning: "warning: manifest_maker: standard file '-c' not found".
_SETUPTOOLS_SHIM = (
"import io, os, sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};"
"f = getattr(tokenize, 'open', open)(__file__) "
"if os.path.exists(__file__) "
"else io.StringIO('from setuptools import setup; setup()');"
"code = f.read().replace('\\r\\n', '\\n');"
"f.close();"
"exec(compile(code, __file__, 'exec'))"
)
# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on
# Windows are correctly handled (it should be "C:\\Users" not "C:\Users").
_SETUPTOOLS_SHIM = textwrap.dedent(
"""
exec(compile('''
# This is <pip-setuptools-caller> -- a caller that pip uses to run setup.py
#
# - It imports setuptools before invoking setup.py, to enable projects that directly
# import from `distutils.core` to work with newer packaging standards.
# - It provides a clear error message when setuptools is not installed.
# - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so
# setuptools doesn't think the script is `-c`. This avoids the following warning:
# manifest_maker: standard file '-c' not found".
# - It generates a shim setup.py, for handling setup.cfg-only projects.
import os, sys, tokenize
try:
import setuptools
except ImportError as error:
print(
"ERROR: Can not execute `setup.py` since setuptools is not available in "
"the build environment.",
file=sys.stderr,
)
sys.exit(1)
__file__ = %r
sys.argv[0] = __file__
if os.path.exists(__file__):
filename = __file__
with tokenize.open(__file__) as f:
setup_py_code = f.read()
else:
filename = "<auto-generated setuptools caller>"
setup_py_code = "from setuptools import setup; setup()"
exec(compile(setup_py_code, filename, "exec"))
''' % ({!r},), "<pip-setuptools-caller>", "exec"))
"""
).rstrip()
def make_setuptools_shim_args(
setup_py_path, # type: str
global_options=None, # type: Sequence[str]
no_user_config=False, # type: bool
unbuffered_output=False, # type: bool
):
# type: (...) -> List[str]
setup_py_path: str,
global_options: Optional[Sequence[str]] = None,
no_user_config: bool = False,
unbuffered_output: bool = False,
) -> List[str]:
"""
Get setuptools command arguments with shim wrapped setup file invocation.
@@ -46,12 +73,11 @@ def make_setuptools_shim_args(
def make_setuptools_bdist_wheel_args(
setup_py_path, # type: str
global_options, # type: Sequence[str]
build_options, # type: Sequence[str]
destination_dir, # type: str
):
# type: (...) -> List[str]
setup_py_path: str,
global_options: Sequence[str],
build_options: Sequence[str],
destination_dir: str,
) -> List[str]:
# NOTE: Eventually, we'd want to also -S to the flags here, when we're
# isolating. Currently, it breaks Python in virtualenvs, because it
# relies on site.py to find parts of the standard library outside the
@@ -65,10 +91,9 @@ def make_setuptools_bdist_wheel_args(
def make_setuptools_clean_args(
setup_py_path, # type: str
global_options, # type: Sequence[str]
):
# type: (...) -> List[str]
setup_py_path: str,
global_options: Sequence[str],
) -> List[str]:
args = make_setuptools_shim_args(
setup_py_path, global_options=global_options, unbuffered_output=True
)
@@ -77,15 +102,14 @@ def make_setuptools_clean_args(
def make_setuptools_develop_args(
setup_py_path, # type: str
global_options, # type: Sequence[str]
install_options, # type: Sequence[str]
no_user_config, # type: bool
prefix, # type: Optional[str]
home, # type: Optional[str]
use_user_site, # type: bool
):
# type: (...) -> List[str]
setup_py_path: str,
*,
global_options: Sequence[str],
no_user_config: bool,
prefix: Optional[str],
home: Optional[str],
use_user_site: bool,
) -> List[str]:
assert not (use_user_site and prefix)
args = make_setuptools_shim_args(
@@ -96,8 +120,6 @@ def make_setuptools_develop_args(
args += ["develop", "--no-deps"]
args += install_options
if prefix:
args += ["--prefix", prefix]
if home is not None:
@@ -110,11 +132,10 @@ def make_setuptools_develop_args(
def make_setuptools_egg_info_args(
setup_py_path, # type: str
egg_info_dir, # type: Optional[str]
no_user_config, # type: bool
):
# type: (...) -> List[str]
setup_py_path: str,
egg_info_dir: Optional[str],
no_user_config: bool,
) -> List[str]:
args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
args += ["egg_info"]
@@ -123,51 +144,3 @@ def make_setuptools_egg_info_args(
args += ["--egg-base", egg_info_dir]
return args
def make_setuptools_install_args(
setup_py_path, # type: str
global_options, # type: Sequence[str]
install_options, # type: Sequence[str]
record_filename, # type: str
root, # type: Optional[str]
prefix, # type: Optional[str]
header_dir, # type: Optional[str]
home, # type: Optional[str]
use_user_site, # type: bool
no_user_config, # type: bool
pycompile, # type: bool
):
# type: (...) -> List[str]
assert not (use_user_site and prefix)
assert not (use_user_site and root)
args = make_setuptools_shim_args(
setup_py_path,
global_options=global_options,
no_user_config=no_user_config,
unbuffered_output=True,
)
args += ["install", "--record", record_filename]
args += ["--single-version-externally-managed"]
if root is not None:
args += ["--root", root]
if prefix is not None:
args += ["--prefix", prefix]
if home is not None:
args += ["--home", home]
if use_user_site:
args += ["--user", "--prefix="]
if pycompile:
args += ["--compile"]
else:
args += ["--no-compile"]
if header_dir:
args += ["--install-headers", header_dir]
args += install_options
return args

View File

@@ -2,25 +2,38 @@ import logging
import os
import shlex
import subprocess
from typing import Any, Callable, Iterable, List, Mapping, Optional, Union
from typing import (
TYPE_CHECKING,
Any,
Callable,
Iterable,
List,
Mapping,
Optional,
Union,
)
from pip._vendor.rich.markup import escape
from pip._internal.cli.spinners import SpinnerInterface, open_spinner
from pip._internal.exceptions import InstallationSubprocessError
from pip._internal.utils.logging import VERBOSE, subprocess_logger
from pip._internal.utils.misc import HiddenText
if TYPE_CHECKING:
# Literal was introduced in Python 3.8.
#
# TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
from typing import Literal
CommandArgs = List[Union[str, HiddenText]]
LOG_DIVIDER = "----------------------------------------"
def make_command(*args):
# type: (Union[str, HiddenText, CommandArgs]) -> CommandArgs
def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:
"""
Create a CommandArgs object.
"""
command_args = [] # type: CommandArgs
command_args: CommandArgs = []
for arg in args:
# Check for list instead of CommandArgs since CommandArgs is
# only known during type-checking.
@@ -33,8 +46,7 @@ def make_command(*args):
return command_args
def format_command_args(args):
# type: (Union[List[str], CommandArgs]) -> str
def format_command_args(args: Union[List[str], CommandArgs]) -> str:
"""
Format command arguments for display.
"""
@@ -49,64 +61,27 @@ def format_command_args(args):
)
def reveal_command_args(args):
# type: (Union[List[str], CommandArgs]) -> List[str]
def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:
"""
Return the arguments in their raw, unredacted form.
"""
return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
def make_subprocess_output_error(
cmd_args, # type: Union[List[str], CommandArgs]
cwd, # type: Optional[str]
lines, # type: List[str]
exit_status, # type: int
):
# type: (...) -> str
"""
Create and return the error message to use to log a subprocess error
with command output.
:param lines: A list of lines, each ending with a newline.
"""
command = format_command_args(cmd_args)
# We know the joined output value ends in a newline.
output = "".join(lines)
msg = (
# Use a unicode string to avoid "UnicodeEncodeError: 'ascii'
# codec can't encode character ..." in Python 2 when a format
# argument (e.g. `output`) has a non-ascii character.
"Command errored out with exit status {exit_status}:\n"
" command: {command_display}\n"
" cwd: {cwd_display}\n"
"Complete output ({line_count} lines):\n{output}{divider}"
).format(
exit_status=exit_status,
command_display=command,
cwd_display=cwd,
line_count=len(lines),
output=output,
divider=LOG_DIVIDER,
)
return msg
def call_subprocess(
cmd, # type: Union[List[str], CommandArgs]
show_stdout=False, # type: bool
cwd=None, # type: Optional[str]
on_returncode="raise", # type: str
extra_ok_returncodes=None, # type: Optional[Iterable[int]]
command_desc=None, # type: Optional[str]
extra_environ=None, # type: Optional[Mapping[str, Any]]
unset_environ=None, # type: Optional[Iterable[str]]
spinner=None, # type: Optional[SpinnerInterface]
log_failed_cmd=True, # type: Optional[bool]
stdout_only=False, # type: Optional[bool]
):
# type: (...) -> str
cmd: Union[List[str], CommandArgs],
show_stdout: bool = False,
cwd: Optional[str] = None,
on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
extra_ok_returncodes: Optional[Iterable[int]] = None,
extra_environ: Optional[Mapping[str, Any]] = None,
unset_environ: Optional[Iterable[str]] = None,
spinner: Optional[SpinnerInterface] = None,
log_failed_cmd: Optional[bool] = True,
stdout_only: Optional[bool] = False,
*,
command_desc: str,
) -> str:
"""
Args:
show_stdout: if true, use INFO to log the subprocess's stderr and
@@ -141,7 +116,7 @@ def call_subprocess(
# replaced by INFO.
if show_stdout:
# Then log the subprocess output at INFO level.
log_subprocess = subprocess_logger.info
log_subprocess: Callable[..., None] = subprocess_logger.info
used_level = logging.INFO
else:
# Then log the subprocess output using VERBOSE. This also ensures
@@ -156,9 +131,6 @@ def call_subprocess(
# and we have a spinner.
use_spinner = not showing_subprocess and spinner is not None
if command_desc is None:
command_desc = format_command_args(cmd)
log_subprocess("Running command %s", command_desc)
env = os.environ.copy()
if extra_environ:
@@ -191,7 +163,7 @@ def call_subprocess(
proc.stdin.close()
# In this mode, stdout and stderr are in the same pipe.
while True:
line = proc.stdout.readline() # type: str
line: str = proc.stdout.readline()
if not line:
break
line = line.rstrip()
@@ -231,17 +203,25 @@ def call_subprocess(
spinner.finish("done")
if proc_had_error:
if on_returncode == "raise":
if not showing_subprocess and log_failed_cmd:
# Then the subprocess streams haven't been logged to the
# console yet.
msg = make_subprocess_output_error(
cmd_args=cmd,
cwd=cwd,
lines=all_output,
exit_status=proc.returncode,
error = InstallationSubprocessError(
command_description=command_desc,
exit_code=proc.returncode,
output_lines=all_output if not showing_subprocess else None,
)
if log_failed_cmd:
subprocess_logger.error("[present-rich] %s", error)
subprocess_logger.verbose(
"[bold magenta]full command[/]: [blue]%s[/]",
escape(format_command_args(cmd)),
extra={"markup": True},
)
subprocess_logger.error(msg)
raise InstallationSubprocessError(proc.returncode, command_desc)
subprocess_logger.verbose(
"[bold magenta]cwd[/]: %s",
escape(cwd or "[inherit]"),
extra={"markup": True},
)
raise error
elif on_returncode == "warn":
subprocess_logger.warning(
'Command "%s" had error code %s in %s',
@@ -256,23 +236,22 @@ def call_subprocess(
return output
def runner_with_spinner_message(message):
# type: (str) -> Callable[..., None]
def runner_with_spinner_message(message: str) -> Callable[..., None]:
"""Provide a subprocess_runner that shows a spinner message.
Intended for use with for pep517's Pep517HookCaller. Thus, the runner has
an API that matches what's expected by Pep517HookCaller.subprocess_runner.
Intended for use with for BuildBackendHookCaller. Thus, the runner has
an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.
"""
def runner(
cmd, # type: List[str]
cwd=None, # type: Optional[str]
extra_environ=None, # type: Optional[Mapping[str, Any]]
):
# type: (...) -> None
cmd: List[str],
cwd: Optional[str] = None,
extra_environ: Optional[Mapping[str, Any]] = None,
) -> None:
with open_spinner(message) as spinner:
call_subprocess(
cmd,
command_desc=message,
cwd=cwd,
extra_environ=extra_environ,
spinner=spinner,

View File

@@ -4,7 +4,7 @@ import logging
import os.path
import tempfile
from contextlib import ExitStack, contextmanager
from typing import Any, Dict, Iterator, Optional, TypeVar, Union
from typing import Any, Dict, Generator, Optional, TypeVar, Union
from pip._internal.utils.misc import enum, rmtree
@@ -22,12 +22,11 @@ tempdir_kinds = enum(
)
_tempdir_manager = None # type: Optional[ExitStack]
_tempdir_manager: Optional[ExitStack] = None
@contextmanager
def global_tempdir_manager():
# type: () -> Iterator[None]
def global_tempdir_manager() -> Generator[None, None, None]:
global _tempdir_manager
with ExitStack() as stack:
old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
@@ -40,31 +39,27 @@ def global_tempdir_manager():
class TempDirectoryTypeRegistry:
"""Manages temp directory behavior"""
def __init__(self):
# type: () -> None
self._should_delete = {} # type: Dict[str, bool]
def __init__(self) -> None:
self._should_delete: Dict[str, bool] = {}
def set_delete(self, kind, value):
# type: (str, bool) -> None
def set_delete(self, kind: str, value: bool) -> None:
"""Indicate whether a TempDirectory of the given kind should be
auto-deleted.
"""
self._should_delete[kind] = value
def get_delete(self, kind):
# type: (str) -> bool
def get_delete(self, kind: str) -> bool:
"""Get configured auto-delete flag for a given TempDirectory type,
default True.
"""
return self._should_delete.get(kind, True)
_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry]
_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
@contextmanager
def tempdir_registry():
# type: () -> Iterator[TempDirectoryTypeRegistry]
def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
"""Provides a scoped global tempdir registry that can be used to dictate
whether directories should be deleted.
"""
@@ -107,10 +102,10 @@ class TempDirectory:
def __init__(
self,
path=None, # type: Optional[str]
delete=_default, # type: Union[bool, None, _Default]
kind="temp", # type: str
globally_managed=False, # type: bool
path: Optional[str] = None,
delete: Union[bool, None, _Default] = _default,
kind: str = "temp",
globally_managed: bool = False,
):
super().__init__()
@@ -139,21 +134,17 @@ class TempDirectory:
_tempdir_manager.enter_context(self)
@property
def path(self):
# type: () -> str
def path(self) -> str:
assert not self._deleted, f"Attempted to access deleted path: {self._path}"
return self._path
def __repr__(self):
# type: () -> str
def __repr__(self) -> str:
return f"<{self.__class__.__name__} {self.path!r}>"
def __enter__(self):
# type: (_T) -> _T
def __enter__(self: _T) -> _T:
return self
def __exit__(self, exc, value, tb):
# type: (Any, Any, Any) -> None
def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
if self.delete is not None:
delete = self.delete
elif _tempdir_registry:
@@ -164,8 +155,7 @@ class TempDirectory:
if delete:
self.cleanup()
def _create(self, kind):
# type: (str) -> str
def _create(self, kind: str) -> str:
"""Create a temporary directory and store its path in self.path"""
# We realpath here because some systems have their default tmpdir
# symlinked to another directory. This tends to confuse build
@@ -175,8 +165,7 @@ class TempDirectory:
logger.debug("Created temporary directory: %s", path)
return path
def cleanup(self):
# type: () -> None
def cleanup(self) -> None:
"""Remove the temporary directory created and reset state"""
self._deleted = True
if not os.path.exists(self._path):
@@ -206,14 +195,12 @@ class AdjacentTempDirectory(TempDirectory):
# with leading '-' and invalid metadata
LEADING_CHARS = "-~.=%0123456789"
def __init__(self, original, delete=None):
# type: (str, Optional[bool]) -> None
def __init__(self, original: str, delete: Optional[bool] = None) -> None:
self.original = original.rstrip("/\\")
super().__init__(delete=delete)
@classmethod
def _generate_names(cls, name):
# type: (str) -> Iterator[str]
def _generate_names(cls, name: str) -> Generator[str, None, None]:
"""Generates a series of temporary names.
The algorithm replaces the leading characters in the name
@@ -238,8 +225,7 @@ class AdjacentTempDirectory(TempDirectory):
if new_name != name:
yield new_name
def _create(self, kind):
# type: (str) -> str
def _create(self, kind: str) -> str:
root, name = os.path.split(self.original)
for candidate in self._generate_names(name):
path = os.path.join(root, candidate)

View File

@@ -40,16 +40,14 @@ except ImportError:
logger.debug("lzma module is not available")
def current_umask():
# type: () -> int
def current_umask() -> int:
"""Get the current umask which involves having to set it temporarily."""
mask = os.umask(0)
os.umask(mask)
return mask
def split_leading_dir(path):
# type: (str) -> List[str]
def split_leading_dir(path: str) -> List[str]:
path = path.lstrip("/").lstrip("\\")
if "/" in path and (
("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path
@@ -61,8 +59,7 @@ def split_leading_dir(path):
return [path, ""]
def has_leading_dir(paths):
# type: (Iterable[str]) -> bool
def has_leading_dir(paths: Iterable[str]) -> bool:
"""Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)"""
common_prefix = None
@@ -77,8 +74,7 @@ def has_leading_dir(paths):
return True
def is_within_directory(directory, target):
# type: (str, str) -> bool
def is_within_directory(directory: str, target: str) -> bool:
"""
Return true if the absolute path of target is within the directory
"""
@@ -89,8 +85,7 @@ def is_within_directory(directory, target):
return prefix == abs_directory
def set_extracted_file_to_default_mode_plus_executable(path):
# type: (str) -> None
def set_extracted_file_to_default_mode_plus_executable(path: str) -> None:
"""
Make file present at path have execute for user/group/world
(chmod +x) is no-op on windows per python docs
@@ -98,16 +93,14 @@ def set_extracted_file_to_default_mode_plus_executable(path):
os.chmod(path, (0o777 & ~current_umask() | 0o111))
def zip_item_is_executable(info):
# type: (ZipInfo) -> bool
def zip_item_is_executable(info: ZipInfo) -> bool:
mode = info.external_attr >> 16
# if mode and regular file and any execute permissions for
# user/group/world?
return bool(mode and stat.S_ISREG(mode) and mode & 0o111)
def unzip_file(filename, location, flatten=True):
# type: (str, str, bool) -> None
def unzip_file(filename: str, location: str, flatten: bool = True) -> None:
"""
Unzip the file (with path `filename`) to the destination `location`. All
files are written based on system defaults and umask (i.e. permissions are
@@ -153,8 +146,7 @@ def unzip_file(filename, location, flatten=True):
zipfp.close()
def untar_file(filename, location):
# type: (str, str) -> None
def untar_file(filename: str, location: str) -> None:
"""
Untar the file (with path `filename`) to the destination `location`.
All files are written based on system defaults and umask (i.e. permissions
@@ -196,8 +188,7 @@ def untar_file(filename, location):
ensure_dir(path)
elif member.issym():
try:
# https://github.com/python/typeshed/issues/2673
tar._extract_member(member, path) # type: ignore
tar._extract_member(member, path)
except Exception as exc:
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
@@ -236,11 +227,10 @@ def untar_file(filename, location):
def unpack_file(
filename, # type: str
location, # type: str
content_type=None, # type: Optional[str]
):
# type: (...) -> None
filename: str,
location: str,
content_type: Optional[str] = None,
) -> None:
filename = os.path.realpath(filename)
if (
content_type == "application/zip"

View File

@@ -7,15 +7,13 @@ from typing import Optional
from .compat import WINDOWS
def get_url_scheme(url):
# type: (str) -> Optional[str]
def get_url_scheme(url: str) -> Optional[str]:
if ":" not in url:
return None
return url.split(":", 1)[0].lower()
def path_to_url(path):
# type: (str) -> str
def path_to_url(path: str) -> str:
"""
Convert a path to a file: URL. The path will be made absolute and have
quoted path parts.
@@ -25,8 +23,7 @@ def path_to_url(path):
return url
def url_to_path(url):
# type: (str) -> str
def url_to_path(url: str) -> str:
"""
Convert a file: URL to a path.
"""

View File

@@ -11,8 +11,7 @@ _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
)
def _running_under_venv():
# type: () -> bool
def _running_under_venv() -> bool:
"""Checks if sys.base_prefix and sys.prefix match.
This handles PEP 405 compliant virtual environments.
@@ -20,8 +19,7 @@ def _running_under_venv():
return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
def _running_under_regular_virtualenv():
# type: () -> bool
def _running_under_legacy_virtualenv() -> bool:
"""Checks if sys.real_prefix is set.
This handles virtual environments created with pypa's virtualenv.
@@ -30,14 +28,12 @@ def _running_under_regular_virtualenv():
return hasattr(sys, "real_prefix")
def running_under_virtualenv():
# type: () -> bool
"""Return True if we're running inside a virtualenv, False otherwise."""
return _running_under_venv() or _running_under_regular_virtualenv()
def running_under_virtualenv() -> bool:
"""True if we're running inside a virtual environment, False otherwise."""
return _running_under_venv() or _running_under_legacy_virtualenv()
def _get_pyvenv_cfg_lines():
# type: () -> Optional[List[str]]
def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
"""Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
Returns None, if it could not read/access the file.
@@ -52,8 +48,7 @@ def _get_pyvenv_cfg_lines():
return None
def _no_global_under_venv():
# type: () -> bool
def _no_global_under_venv() -> bool:
"""Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
PEP 405 specifies that when system site-packages are not supposed to be
@@ -82,8 +77,7 @@ def _no_global_under_venv():
return False
def _no_global_under_regular_virtualenv():
# type: () -> bool
def _no_global_under_legacy_virtualenv() -> bool:
"""Check if "no-global-site-packages.txt" exists beside site.py
This mirrors logic in pypa/virtualenv for determining whether system
@@ -97,15 +91,14 @@ def _no_global_under_regular_virtualenv():
return os.path.exists(no_global_site_packages_file)
def virtualenv_no_global():
# type: () -> bool
def virtualenv_no_global() -> bool:
"""Returns a boolean, whether running in venv with no system site-packages."""
# PEP 405 compliance needs to be checked first since virtualenv >=20 would
# return True for both checks, but is only able to use the PEP 405 config.
if _running_under_venv():
return _no_global_under_venv()
if _running_under_regular_virtualenv():
return _no_global_under_regular_virtualenv()
if _running_under_legacy_virtualenv():
return _no_global_under_legacy_virtualenv()
return False

View File

@@ -4,14 +4,12 @@
import logging
from email.message import Message
from email.parser import Parser
from typing import Dict, Tuple
from typing import Tuple
from zipfile import BadZipFile, ZipFile
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import DistInfoDistribution, Distribution
from pip._internal.exceptions import UnsupportedWheel
from pip._internal.utils.pkg_resources import DictMetadata
VERSION_COMPATIBLE = (1, 0)
@@ -19,53 +17,7 @@ VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__)
class WheelMetadata(DictMetadata):
"""Metadata provider that maps metadata decoding exceptions to our
internal exception type.
"""
def __init__(self, metadata, wheel_name):
# type: (Dict[str, bytes], str) -> None
super().__init__(metadata)
self._wheel_name = wheel_name
def get_metadata(self, name):
# type: (str) -> str
try:
return super().get_metadata(name)
except UnicodeDecodeError as e:
# Augment the default error with the origin of the file.
raise UnsupportedWheel(
f"Error decoding metadata for {self._wheel_name}: {e}"
)
def pkg_resources_distribution_for_wheel(wheel_zip, name, location):
# type: (ZipFile, str, str) -> Distribution
"""Get a pkg_resources distribution given a wheel.
:raises UnsupportedWheel: on any errors
"""
info_dir, _ = parse_wheel(wheel_zip, name)
metadata_files = [p for p in wheel_zip.namelist() if p.startswith(f"{info_dir}/")]
metadata_text = {} # type: Dict[str, bytes]
for path in metadata_files:
_, metadata_name = path.split("/", 1)
try:
metadata_text[metadata_name] = read_wheel_metadata_file(wheel_zip, path)
except UnsupportedWheel as e:
raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
metadata = WheelMetadata(metadata_text, location)
return DistInfoDistribution(location=location, metadata=metadata, project_name=name)
def parse_wheel(wheel_zip, name):
# type: (ZipFile, str) -> Tuple[str, Message]
def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
"""Extract information from the provided wheel, ensuring it meets basic
standards.
@@ -83,8 +35,7 @@ def parse_wheel(wheel_zip, name):
return info_dir, metadata
def wheel_dist_info_dir(source, name):
# type: (ZipFile, str) -> str
def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
"""Returns the name of the contained .dist-info directory.
Raises AssertionError or UnsupportedWheel if not found, >1 found, or
@@ -117,8 +68,7 @@ def wheel_dist_info_dir(source, name):
return info_dir
def read_wheel_metadata_file(source, path):
# type: (ZipFile, str) -> bytes
def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:
try:
return source.read(path)
# BadZipFile for general corruption, KeyError for missing entry,
@@ -127,8 +77,7 @@ def read_wheel_metadata_file(source, path):
raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
def wheel_metadata(source, dist_info_dir):
# type: (ZipFile, str) -> Message
def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:
"""Return the WHEEL metadata of an extracted wheel, if possible.
Otherwise, raise UnsupportedWheel.
"""
@@ -147,8 +96,7 @@ def wheel_metadata(source, dist_info_dir):
return Parser().parsestr(wheel_text)
def wheel_version(wheel_data):
# type: (Message) -> Tuple[int, ...]
def wheel_version(wheel_data: Message) -> Tuple[int, ...]:
"""Given WHEEL metadata, return the parsed Wheel-Version.
Otherwise, raise UnsupportedWheel.
"""
@@ -164,8 +112,7 @@ def wheel_version(wheel_data):
raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
def check_compatibility(version, name):
# type: (Tuple[int, ...], str) -> None
def check_compatibility(version: Tuple[int, ...], name: str) -> None:
"""Raises errors or warns if called with an incompatible Wheel-Version.
pip should refuse to install a Wheel-Version that's a major series