Change venv
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
from typing import List, Optional
|
||||
|
||||
__version__ = "21.2.4"
|
||||
__version__ = "23.1.2"
|
||||
|
||||
|
||||
def main(args: Optional[List[str]] = None) -> int:
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
"""Build Environment used for isolation during sdist building
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import site
|
||||
import sys
|
||||
import textwrap
|
||||
import zipfile
|
||||
from collections import OrderedDict
|
||||
from sysconfig import get_paths
|
||||
from types import TracebackType
|
||||
from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
|
||||
|
||||
from pip._vendor.certifi import where
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
@@ -19,8 +17,8 @@ from pip._vendor.packaging.version import Version
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.cli.spinners import open_spinner
|
||||
from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
|
||||
from pip._internal.metadata import get_environment
|
||||
from pip._internal.locations import get_platlib, get_purelib, get_scheme
|
||||
from pip._internal.metadata import get_default_environment, get_environment
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
|
||||
@@ -30,62 +28,68 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _Prefix:
|
||||
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
|
||||
return (a, b) if a != b else (a,)
|
||||
|
||||
def __init__(self, path):
|
||||
# type: (str) -> None
|
||||
|
||||
class _Prefix:
|
||||
def __init__(self, path: str) -> None:
|
||||
self.path = path
|
||||
self.setup = False
|
||||
self.bin_dir = get_paths(
|
||||
'nt' if os.name == 'nt' else 'posix_prefix',
|
||||
vars={'base': path, 'platbase': path}
|
||||
)['scripts']
|
||||
self.lib_dirs = get_prefixed_libs(path)
|
||||
scheme = get_scheme("", prefix=path)
|
||||
self.bin_dir = scheme.scripts
|
||||
self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _create_standalone_pip() -> Iterator[str]:
|
||||
"""Create a "standalone pip" zip file.
|
||||
def get_runnable_pip() -> str:
|
||||
"""Get a file to pass to a Python executable, to run the currently-running pip.
|
||||
|
||||
The zip file's content is identical to the currently-running pip.
|
||||
It will be used to install requirements into the build environment.
|
||||
This is used to run a pip subprocess, for installing requirements into the build
|
||||
environment.
|
||||
"""
|
||||
source = pathlib.Path(pip_location).resolve().parent
|
||||
|
||||
# Return the current instance if `source` is not a directory. We can't build
|
||||
# a zip from this, and it likely means the instance is already standalone.
|
||||
if not source.is_dir():
|
||||
yield str(source)
|
||||
return
|
||||
# This would happen if someone is using pip from inside a zip file. In that
|
||||
# case, we can use that directly.
|
||||
return str(source)
|
||||
|
||||
with TempDirectory(kind="standalone-pip") as tmp_dir:
|
||||
pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip")
|
||||
kwargs = {}
|
||||
if sys.version_info >= (3, 8):
|
||||
kwargs["strict_timestamps"] = False
|
||||
with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf:
|
||||
for child in source.rglob("*"):
|
||||
zf.write(child, child.relative_to(source.parent).as_posix())
|
||||
yield os.path.join(pip_zip, "pip")
|
||||
return os.fsdecode(source / "__pip-runner__.py")
|
||||
|
||||
|
||||
def _get_system_sitepackages() -> Set[str]:
|
||||
"""Get system site packages
|
||||
|
||||
Usually from site.getsitepackages,
|
||||
but fallback on `get_purelib()/get_platlib()` if unavailable
|
||||
(e.g. in a virtualenv created by virtualenv<20)
|
||||
|
||||
Returns normalized set of strings.
|
||||
"""
|
||||
if hasattr(site, "getsitepackages"):
|
||||
system_sites = site.getsitepackages()
|
||||
else:
|
||||
# virtualenv < 20 overwrites site.py without getsitepackages
|
||||
# fallback on get_purelib/get_platlib.
|
||||
# this is known to miss things, but shouldn't in the cases
|
||||
# where getsitepackages() has been removed (inside a virtualenv)
|
||||
system_sites = [get_purelib(), get_platlib()]
|
||||
return {os.path.normcase(path) for path in system_sites}
|
||||
|
||||
|
||||
class BuildEnvironment:
|
||||
"""Creates and manages an isolated environment to install build deps
|
||||
"""
|
||||
"""Creates and manages an isolated environment to install build deps"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
temp_dir = TempDirectory(
|
||||
kind=tempdir_kinds.BUILD_ENV, globally_managed=True
|
||||
)
|
||||
def __init__(self) -> None:
|
||||
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
|
||||
|
||||
self._prefixes = OrderedDict(
|
||||
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
||||
for name in ('normal', 'overlay')
|
||||
for name in ("normal", "overlay")
|
||||
)
|
||||
|
||||
self._bin_dirs = [] # type: List[str]
|
||||
self._lib_dirs = [] # type: List[str]
|
||||
self._bin_dirs: List[str] = []
|
||||
self._lib_dirs: List[str] = []
|
||||
for prefix in reversed(list(self._prefixes.values())):
|
||||
self._bin_dirs.append(prefix.bin_dir)
|
||||
self._lib_dirs.extend(prefix.lib_dirs)
|
||||
@@ -93,15 +97,17 @@ class BuildEnvironment:
|
||||
# Customize site to:
|
||||
# - ensure .pth files are honored
|
||||
# - prevent access to system site packages
|
||||
system_sites = {
|
||||
os.path.normcase(site) for site in (get_purelib(), get_platlib())
|
||||
}
|
||||
self._site_dir = os.path.join(temp_dir.path, 'site')
|
||||
system_sites = _get_system_sitepackages()
|
||||
|
||||
self._site_dir = os.path.join(temp_dir.path, "site")
|
||||
if not os.path.exists(self._site_dir):
|
||||
os.mkdir(self._site_dir)
|
||||
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
||||
fp.write(textwrap.dedent(
|
||||
'''
|
||||
with open(
|
||||
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
|
||||
) as fp:
|
||||
fp.write(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import os, site, sys
|
||||
|
||||
# First, drop system-sites related paths.
|
||||
@@ -124,54 +130,64 @@ class BuildEnvironment:
|
||||
for path in {lib_dirs!r}:
|
||||
assert not path in sys.path
|
||||
site.addsitedir(path)
|
||||
'''
|
||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
||||
"""
|
||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> None
|
||||
def __enter__(self) -> None:
|
||||
self._save_env = {
|
||||
name: os.environ.get(name, None)
|
||||
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
||||
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
|
||||
}
|
||||
|
||||
path = self._bin_dirs[:]
|
||||
old_path = self._save_env['PATH']
|
||||
old_path = self._save_env["PATH"]
|
||||
if old_path:
|
||||
path.extend(old_path.split(os.pathsep))
|
||||
|
||||
pythonpath = [self._site_dir]
|
||||
|
||||
os.environ.update({
|
||||
'PATH': os.pathsep.join(path),
|
||||
'PYTHONNOUSERSITE': '1',
|
||||
'PYTHONPATH': os.pathsep.join(pythonpath),
|
||||
})
|
||||
os.environ.update(
|
||||
{
|
||||
"PATH": os.pathsep.join(path),
|
||||
"PYTHONNOUSERSITE": "1",
|
||||
"PYTHONPATH": os.pathsep.join(pythonpath),
|
||||
}
|
||||
)
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type, # type: Optional[Type[BaseException]]
|
||||
exc_val, # type: Optional[BaseException]
|
||||
exc_tb # type: Optional[TracebackType]
|
||||
):
|
||||
# type: (...) -> None
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
for varname, old_value in self._save_env.items():
|
||||
if old_value is None:
|
||||
os.environ.pop(varname, None)
|
||||
else:
|
||||
os.environ[varname] = old_value
|
||||
|
||||
def check_requirements(self, reqs):
|
||||
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
||||
def check_requirements(
|
||||
self, reqs: Iterable[str]
|
||||
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
|
||||
"""Return 2 sets:
|
||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||
- missing requirements: set of reqs
|
||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
||||
- missing requirements: set of reqs
|
||||
"""
|
||||
missing = set()
|
||||
conflicting = set()
|
||||
if reqs:
|
||||
env = get_environment(self._lib_dirs)
|
||||
env = (
|
||||
get_environment(self._lib_dirs)
|
||||
if hasattr(self, "_lib_dirs")
|
||||
else get_default_environment()
|
||||
)
|
||||
for req_str in reqs:
|
||||
req = Requirement(req_str)
|
||||
# We're explicitly evaluating with an empty extra value, since build
|
||||
# environments are not provided any mechanism to select specific extras.
|
||||
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
|
||||
continue
|
||||
dist = env.get_distribution(req.name)
|
||||
if not dist:
|
||||
missing.add(req_str)
|
||||
@@ -180,40 +196,31 @@ class BuildEnvironment:
|
||||
installed_req_str = f"{req.name}=={dist.version}"
|
||||
else:
|
||||
installed_req_str = f"{req.name}==={dist.version}"
|
||||
if dist.version not in req.specifier:
|
||||
if not req.specifier.contains(dist.version, prereleases=True):
|
||||
conflicting.add((installed_req_str, req_str))
|
||||
# FIXME: Consider direct URL?
|
||||
return conflicting, missing
|
||||
|
||||
def install_requirements(
|
||||
self,
|
||||
finder, # type: PackageFinder
|
||||
requirements, # type: Iterable[str]
|
||||
prefix_as_string, # type: str
|
||||
message # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
finder: "PackageFinder",
|
||||
requirements: Iterable[str],
|
||||
prefix_as_string: str,
|
||||
*,
|
||||
kind: str,
|
||||
) -> None:
|
||||
prefix = self._prefixes[prefix_as_string]
|
||||
assert not prefix.setup
|
||||
prefix.setup = True
|
||||
if not requirements:
|
||||
return
|
||||
with contextlib.ExitStack() as ctx:
|
||||
# TODO: Remove this block when dropping 3.6 support. Python 3.6
|
||||
# lacks importlib.resources and pep517 has issues loading files in
|
||||
# a zip, so we fallback to the "old" method by adding the current
|
||||
# pip directory to the child process's sys.path.
|
||||
if sys.version_info < (3, 7):
|
||||
pip_runnable = os.path.dirname(pip_location)
|
||||
else:
|
||||
pip_runnable = ctx.enter_context(_create_standalone_pip())
|
||||
self._install_requirements(
|
||||
pip_runnable,
|
||||
finder,
|
||||
requirements,
|
||||
prefix,
|
||||
message,
|
||||
)
|
||||
self._install_requirements(
|
||||
get_runnable_pip(),
|
||||
finder,
|
||||
requirements,
|
||||
prefix,
|
||||
kind=kind,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _install_requirements(
|
||||
@@ -221,74 +228,84 @@ class BuildEnvironment:
|
||||
finder: "PackageFinder",
|
||||
requirements: Iterable[str],
|
||||
prefix: _Prefix,
|
||||
message: str,
|
||||
*,
|
||||
kind: str,
|
||||
) -> None:
|
||||
args = [
|
||||
sys.executable, pip_runnable, 'install',
|
||||
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
||||
'--no-warn-script-location',
|
||||
] # type: List[str]
|
||||
args: List[str] = [
|
||||
sys.executable,
|
||||
pip_runnable,
|
||||
"install",
|
||||
"--ignore-installed",
|
||||
"--no-user",
|
||||
"--prefix",
|
||||
prefix.path,
|
||||
"--no-warn-script-location",
|
||||
]
|
||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
||||
args.append('-v')
|
||||
for format_control in ('no_binary', 'only_binary'):
|
||||
args.append("-v")
|
||||
for format_control in ("no_binary", "only_binary"):
|
||||
formats = getattr(finder.format_control, format_control)
|
||||
args.extend(('--' + format_control.replace('_', '-'),
|
||||
','.join(sorted(formats or {':none:'}))))
|
||||
args.extend(
|
||||
(
|
||||
"--" + format_control.replace("_", "-"),
|
||||
",".join(sorted(formats or {":none:"})),
|
||||
)
|
||||
)
|
||||
|
||||
index_urls = finder.index_urls
|
||||
if index_urls:
|
||||
args.extend(['-i', index_urls[0]])
|
||||
args.extend(["-i", index_urls[0]])
|
||||
for extra_index in index_urls[1:]:
|
||||
args.extend(['--extra-index-url', extra_index])
|
||||
args.extend(["--extra-index-url", extra_index])
|
||||
else:
|
||||
args.append('--no-index')
|
||||
args.append("--no-index")
|
||||
for link in finder.find_links:
|
||||
args.extend(['--find-links', link])
|
||||
args.extend(["--find-links", link])
|
||||
|
||||
for host in finder.trusted_hosts:
|
||||
args.extend(['--trusted-host', host])
|
||||
args.extend(["--trusted-host", host])
|
||||
if finder.allow_all_prereleases:
|
||||
args.append('--pre')
|
||||
args.append("--pre")
|
||||
if finder.prefer_binary:
|
||||
args.append('--prefer-binary')
|
||||
args.append('--')
|
||||
args.append("--prefer-binary")
|
||||
args.append("--")
|
||||
args.extend(requirements)
|
||||
extra_environ = {"_PIP_STANDALONE_CERT": where()}
|
||||
with open_spinner(message) as spinner:
|
||||
call_subprocess(args, spinner=spinner, extra_environ=extra_environ)
|
||||
with open_spinner(f"Installing {kind}") as spinner:
|
||||
call_subprocess(
|
||||
args,
|
||||
command_desc=f"pip subprocess to install {kind}",
|
||||
spinner=spinner,
|
||||
extra_environ=extra_environ,
|
||||
)
|
||||
|
||||
|
||||
class NoOpBuildEnvironment(BuildEnvironment):
|
||||
"""A no-op drop-in replacement for BuildEnvironment
|
||||
"""
|
||||
"""A no-op drop-in replacement for BuildEnvironment"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> None
|
||||
def __enter__(self) -> None:
|
||||
pass
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type, # type: Optional[Type[BaseException]]
|
||||
exc_val, # type: Optional[BaseException]
|
||||
exc_tb # type: Optional[TracebackType]
|
||||
):
|
||||
# type: (...) -> None
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType],
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
def cleanup(self) -> None:
|
||||
pass
|
||||
|
||||
def install_requirements(
|
||||
self,
|
||||
finder, # type: PackageFinder
|
||||
requirements, # type: Iterable[str]
|
||||
prefix_as_string, # type: str
|
||||
message # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
finder: "PackageFinder",
|
||||
requirements: Iterable[str],
|
||||
prefix_as_string: str,
|
||||
*,
|
||||
kind: str,
|
||||
) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -5,13 +5,14 @@ import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional, Set
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import InvalidWheelFilename
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
from pip._internal.models.direct_url import DirectUrl
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
@@ -19,9 +20,10 @@ from pip._internal.utils.urls import path_to_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ORIGIN_JSON_NAME = "origin.json"
|
||||
|
||||
def _hash_dict(d):
|
||||
# type: (Dict[str, str]) -> str
|
||||
|
||||
def _hash_dict(d: Dict[str, str]) -> str:
|
||||
"""Return a stable sha224 of a dictionary."""
|
||||
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
||||
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
||||
@@ -30,29 +32,16 @@ def _hash_dict(d):
|
||||
class Cache:
|
||||
"""An abstract class - provides cache directories for data from links
|
||||
|
||||
|
||||
:param cache_dir: The root of the cache.
|
||||
:param format_control: An object of FormatControl class to limit
|
||||
binaries being read from the cache.
|
||||
:param allowed_formats: which formats of files the cache should store.
|
||||
('binary' and 'source' are the only allowed values)
|
||||
:param cache_dir: The root of the cache.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control, allowed_formats):
|
||||
# type: (str, FormatControl, Set[str]) -> None
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
super().__init__()
|
||||
assert not cache_dir or os.path.isabs(cache_dir)
|
||||
self.cache_dir = cache_dir or None
|
||||
self.format_control = format_control
|
||||
self.allowed_formats = allowed_formats
|
||||
|
||||
_valid_formats = {"source", "binary"}
|
||||
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
||||
|
||||
def _get_cache_path_parts(self, link):
|
||||
# type: (Link) -> List[str]
|
||||
"""Get parts of part that must be os.path.joined with cache_dir
|
||||
"""
|
||||
def _get_cache_path_parts(self, link: Link) -> List[str]:
|
||||
"""Get parts of part that must be os.path.joined with cache_dir"""
|
||||
|
||||
# We want to generate an url to use as our cache key, we don't want to
|
||||
# just re-use the URL because it might have other items in the fragment
|
||||
@@ -84,22 +73,11 @@ class Cache:
|
||||
|
||||
return parts
|
||||
|
||||
def _get_candidates(self, link, canonical_package_name):
|
||||
# type: (Link, str) -> List[Any]
|
||||
can_not_cache = (
|
||||
not self.cache_dir or
|
||||
not canonical_package_name or
|
||||
not link
|
||||
)
|
||||
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
|
||||
can_not_cache = not self.cache_dir or not canonical_package_name or not link
|
||||
if can_not_cache:
|
||||
return []
|
||||
|
||||
formats = self.format_control.get_allowed_formats(
|
||||
canonical_package_name
|
||||
)
|
||||
if not self.allowed_formats.intersection(formats):
|
||||
return []
|
||||
|
||||
candidates = []
|
||||
path = self.get_path_for_link(link)
|
||||
if os.path.isdir(path):
|
||||
@@ -107,19 +85,16 @@ class Cache:
|
||||
candidates.append((candidate, path))
|
||||
return candidates
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
"""Return a directory to store cached items in for link.
|
||||
"""
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
"""Return a directory to store cached items in for link."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Link
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Link:
|
||||
"""Returns a link to a cached item if it exists, otherwise returns the
|
||||
passed link.
|
||||
"""
|
||||
@@ -127,15 +102,12 @@ class Cache:
|
||||
|
||||
|
||||
class SimpleWheelCache(Cache):
|
||||
"""A cache of wheels for future installs.
|
||||
"""
|
||||
"""A cache of wheels for future installs."""
|
||||
|
||||
def __init__(self, cache_dir, format_control):
|
||||
# type: (str, FormatControl) -> None
|
||||
super().__init__(cache_dir, format_control, {"binary"})
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
super().__init__(cache_dir)
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
"""Return a directory to store cached wheels for link
|
||||
|
||||
Because there are M wheels for any one sdist, we provide a directory
|
||||
@@ -157,20 +129,17 @@ class SimpleWheelCache(Cache):
|
||||
|
||||
def get(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Link
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Link:
|
||||
candidates = []
|
||||
|
||||
if not package_name:
|
||||
return link
|
||||
|
||||
canonical_package_name = canonicalize_name(package_name)
|
||||
for wheel_name, wheel_dir in self._get_candidates(
|
||||
link, canonical_package_name
|
||||
):
|
||||
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
|
||||
try:
|
||||
wheel = Wheel(wheel_name)
|
||||
except InvalidWheelFilename:
|
||||
@@ -179,7 +148,9 @@ class SimpleWheelCache(Cache):
|
||||
logger.debug(
|
||||
"Ignoring cached wheel %s for %s as it "
|
||||
"does not match the expected distribution name %s.",
|
||||
wheel_name, link, package_name,
|
||||
wheel_name,
|
||||
link,
|
||||
package_name,
|
||||
)
|
||||
continue
|
||||
if not wheel.supported(supported_tags):
|
||||
@@ -201,27 +172,29 @@ class SimpleWheelCache(Cache):
|
||||
|
||||
|
||||
class EphemWheelCache(SimpleWheelCache):
|
||||
"""A SimpleWheelCache that creates it's own temporary cache directory
|
||||
"""
|
||||
"""A SimpleWheelCache that creates it's own temporary cache directory"""
|
||||
|
||||
def __init__(self, format_control):
|
||||
# type: (FormatControl) -> None
|
||||
def __init__(self) -> None:
|
||||
self._temp_dir = TempDirectory(
|
||||
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
||||
globally_managed=True,
|
||||
)
|
||||
|
||||
super().__init__(self._temp_dir.path, format_control)
|
||||
super().__init__(self._temp_dir.path)
|
||||
|
||||
|
||||
class CacheEntry:
|
||||
def __init__(
|
||||
self,
|
||||
link, # type: Link
|
||||
persistent, # type: bool
|
||||
link: Link,
|
||||
persistent: bool,
|
||||
):
|
||||
self.link = link
|
||||
self.persistent = persistent
|
||||
self.origin: Optional[DirectUrl] = None
|
||||
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
|
||||
if origin_direct_url_path.exists():
|
||||
self.origin = DirectUrl.from_json(origin_direct_url_path.read_text())
|
||||
|
||||
|
||||
class WheelCache(Cache):
|
||||
@@ -231,27 +204,23 @@ class WheelCache(Cache):
|
||||
when a certain link is not found in the simple wheel cache first.
|
||||
"""
|
||||
|
||||
def __init__(self, cache_dir, format_control):
|
||||
# type: (str, FormatControl) -> None
|
||||
super().__init__(cache_dir, format_control, {'binary'})
|
||||
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
||||
self._ephem_cache = EphemWheelCache(format_control)
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
super().__init__(cache_dir)
|
||||
self._wheel_cache = SimpleWheelCache(cache_dir)
|
||||
self._ephem_cache = EphemWheelCache()
|
||||
|
||||
def get_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
def get_path_for_link(self, link: Link) -> str:
|
||||
return self._wheel_cache.get_path_for_link(link)
|
||||
|
||||
def get_ephem_path_for_link(self, link):
|
||||
# type: (Link) -> str
|
||||
def get_ephem_path_for_link(self, link: Link) -> str:
|
||||
return self._ephem_cache.get_path_for_link(link)
|
||||
|
||||
def get(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Link
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Link:
|
||||
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
||||
if cache_entry is None:
|
||||
return link
|
||||
@@ -259,11 +228,10 @@ class WheelCache(Cache):
|
||||
|
||||
def get_cache_entry(
|
||||
self,
|
||||
link, # type: Link
|
||||
package_name, # type: Optional[str]
|
||||
supported_tags, # type: List[Tag]
|
||||
):
|
||||
# type: (...) -> Optional[CacheEntry]
|
||||
link: Link,
|
||||
package_name: Optional[str],
|
||||
supported_tags: List[Tag],
|
||||
) -> Optional[CacheEntry]:
|
||||
"""Returns a CacheEntry with a link to a cached item if it exists or
|
||||
None. The cache entry indicates if the item was found in the persistent
|
||||
or ephemeral cache.
|
||||
@@ -285,3 +253,20 @@ class WheelCache(Cache):
|
||||
return CacheEntry(retval, persistent=False)
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
|
||||
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
|
||||
if origin_path.is_file():
|
||||
origin = DirectUrl.from_json(origin_path.read_text())
|
||||
# TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564
|
||||
# is merged.
|
||||
if origin.url != download_info.url:
|
||||
logger.warning(
|
||||
"Origin URL %s in cache entry %s does not match download URL %s. "
|
||||
"This is likely a pip bug or a cache corruption issue.",
|
||||
origin.url,
|
||||
cache_dir,
|
||||
download_info.url,
|
||||
)
|
||||
origin_path.write_text(download_info.to_json(), encoding="utf-8")
|
||||
|
||||
@@ -59,6 +59,14 @@ def autocomplete() -> None:
|
||||
print(dist)
|
||||
sys.exit(1)
|
||||
|
||||
should_list_installables = (
|
||||
not current.startswith("-") and subcommand_name == "install"
|
||||
)
|
||||
if should_list_installables:
|
||||
for path in auto_complete_paths(current, "path"):
|
||||
print(path)
|
||||
sys.exit(1)
|
||||
|
||||
subcommand = create_command(subcommand_name)
|
||||
|
||||
for opt in subcommand.parser.option_list_all:
|
||||
@@ -138,7 +146,7 @@ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
|
||||
starting with ``current``.
|
||||
|
||||
:param current: The word to be completed
|
||||
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
||||
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
|
||||
:return: A generator of regular files and/or directories
|
||||
"""
|
||||
directory, filename = os.path.split(current)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Base Command class, and related routines"""
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import logging.config
|
||||
import optparse
|
||||
@@ -7,7 +8,9 @@ import os
|
||||
import sys
|
||||
import traceback
|
||||
from optparse import Values
|
||||
from typing import Any, List, Optional, Tuple
|
||||
from typing import Any, Callable, List, Optional, Tuple
|
||||
|
||||
from pip._vendor.rich import traceback as rich_traceback
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.command_context import CommandContextMixIn
|
||||
@@ -21,12 +24,12 @@ from pip._internal.cli.status_codes import (
|
||||
from pip._internal.exceptions import (
|
||||
BadCommand,
|
||||
CommandError,
|
||||
DiagnosticPipError,
|
||||
InstallationError,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
UninstallationError,
|
||||
)
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.filesystem import check_path_owner
|
||||
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
||||
from pip._internal.utils.misc import get_prog, normalize_path
|
||||
@@ -85,10 +88,10 @@ class Command(CommandContextMixIn):
|
||||
# are present.
|
||||
assert not hasattr(options, "no_index")
|
||||
|
||||
def run(self, options: Values, args: List[Any]) -> int:
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
raise NotImplementedError
|
||||
|
||||
def parse_args(self, args: List[str]) -> Tuple[Any, Any]:
|
||||
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
|
||||
# factored out for testability
|
||||
return self.parser.parse_args(args)
|
||||
|
||||
@@ -119,6 +122,15 @@ class Command(CommandContextMixIn):
|
||||
user_log_file=options.log,
|
||||
)
|
||||
|
||||
always_enabled_features = set(options.features_enabled) & set(
|
||||
cmdoptions.ALWAYS_ENABLED_FEATURES
|
||||
)
|
||||
if always_enabled_features:
|
||||
logger.warning(
|
||||
"The following features are always enabled: %s. ",
|
||||
", ".join(sorted(always_enabled_features)),
|
||||
)
|
||||
|
||||
# TODO: Try to get these passing down from the command?
|
||||
# without resorting to os.environ to hold these.
|
||||
# This also affects isolated builds and it should.
|
||||
@@ -148,67 +160,66 @@ class Command(CommandContextMixIn):
|
||||
)
|
||||
options.cache_dir = None
|
||||
|
||||
if getattr(options, "build_dir", None):
|
||||
deprecated(
|
||||
reason=(
|
||||
"The -b/--build/--build-dir/--build-directory "
|
||||
"option is deprecated and has no effect anymore."
|
||||
),
|
||||
replacement=(
|
||||
"use the TMPDIR/TEMP/TMP environment variable, "
|
||||
"possibly combined with --no-clean"
|
||||
),
|
||||
gone_in="21.3",
|
||||
issue=8333,
|
||||
)
|
||||
def intercepts_unhandled_exc(
|
||||
run_func: Callable[..., int]
|
||||
) -> Callable[..., int]:
|
||||
@functools.wraps(run_func)
|
||||
def exc_logging_wrapper(*args: Any) -> int:
|
||||
try:
|
||||
status = run_func(*args)
|
||||
assert isinstance(status, int)
|
||||
return status
|
||||
except DiagnosticPipError as exc:
|
||||
logger.error("[present-rich] %s", exc)
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
if "2020-resolver" in options.features_enabled:
|
||||
logger.warning(
|
||||
"--use-feature=2020-resolver no longer has any effect, "
|
||||
"since it is now the default dependency resolver in pip. "
|
||||
"This will become an error in pip 21.0."
|
||||
)
|
||||
return ERROR
|
||||
except PreviousBuildDirError as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return PREVIOUS_BUILD_DIR_ERROR
|
||||
except (
|
||||
InstallationError,
|
||||
UninstallationError,
|
||||
BadCommand,
|
||||
NetworkConnectionError,
|
||||
) as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except CommandError as exc:
|
||||
logger.critical("%s", exc)
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BrokenStdoutLoggingError:
|
||||
# Bypass our logger and write any remaining messages to
|
||||
# stderr because stdout no longer works.
|
||||
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
||||
if level_number <= logging.DEBUG:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
return ERROR
|
||||
except KeyboardInterrupt:
|
||||
logger.critical("Operation cancelled by user")
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BaseException:
|
||||
logger.critical("Exception:", exc_info=True)
|
||||
|
||||
return UNKNOWN_ERROR
|
||||
|
||||
return exc_logging_wrapper
|
||||
|
||||
try:
|
||||
status = self.run(options, args)
|
||||
assert isinstance(status, int)
|
||||
return status
|
||||
except PreviousBuildDirError as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return PREVIOUS_BUILD_DIR_ERROR
|
||||
except (
|
||||
InstallationError,
|
||||
UninstallationError,
|
||||
BadCommand,
|
||||
NetworkConnectionError,
|
||||
) as exc:
|
||||
logger.critical(str(exc))
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except CommandError as exc:
|
||||
logger.critical("%s", exc)
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BrokenStdoutLoggingError:
|
||||
# Bypass our logger and write any remaining messages to stderr
|
||||
# because stdout no longer works.
|
||||
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
||||
if level_number <= logging.DEBUG:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
return ERROR
|
||||
except KeyboardInterrupt:
|
||||
logger.critical("Operation cancelled by user")
|
||||
logger.debug("Exception information:", exc_info=True)
|
||||
|
||||
return ERROR
|
||||
except BaseException:
|
||||
logger.critical("Exception:", exc_info=True)
|
||||
|
||||
return UNKNOWN_ERROR
|
||||
if not options.debug_mode:
|
||||
run = intercepts_unhandled_exc(self.run)
|
||||
else:
|
||||
run = self.run
|
||||
rich_traceback.install(show_locals=True)
|
||||
return run(options, args)
|
||||
finally:
|
||||
self.handle_pip_version_check(options)
|
||||
|
||||
@@ -10,9 +10,10 @@ pass on state. To be consistent, all options will follow this design.
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import importlib.util
|
||||
import logging
|
||||
import os
|
||||
import textwrap
|
||||
import warnings
|
||||
from functools import partial
|
||||
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
||||
from textwrap import dedent
|
||||
@@ -21,7 +22,6 @@ from typing import Any, Callable, Dict, Optional, Tuple
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli.parser import ConfigOptionParser
|
||||
from pip._internal.cli.progress_bars import BAR_TYPES
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
@@ -30,6 +30,8 @@ from pip._internal.models.target_python import TargetPython
|
||||
from pip._internal.utils.hashes import STRONG_HASHES
|
||||
from pip._internal.utils.misc import strtobool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
|
||||
"""
|
||||
@@ -57,32 +59,6 @@ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> Opti
|
||||
return option_group
|
||||
|
||||
|
||||
def check_install_build_global(
|
||||
options: Values, check_options: Optional[Values] = None
|
||||
) -> None:
|
||||
"""Disable wheels if per-setup.py call options are set.
|
||||
|
||||
:param options: The OptionParser options to update.
|
||||
:param check_options: The options to check, if not supplied defaults to
|
||||
options.
|
||||
"""
|
||||
if check_options is None:
|
||||
check_options = options
|
||||
|
||||
def getname(n: str) -> Optional[Any]:
|
||||
return getattr(check_options, n, None)
|
||||
|
||||
names = ["build_options", "global_options", "install_options"]
|
||||
if any(map(getname, names)):
|
||||
control = options.format_control
|
||||
control.disallow_binaries()
|
||||
warnings.warn(
|
||||
"Disabling all use of wheels due to the use of --build-option "
|
||||
"/ --global-option / --install-option.",
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
||||
"""Function for determining if custom platform options are allowed.
|
||||
|
||||
@@ -151,6 +127,18 @@ help_: Callable[..., Option] = partial(
|
||||
help="Show help.",
|
||||
)
|
||||
|
||||
debug_mode: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--debug",
|
||||
dest="debug_mode",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"Let unhandled exceptions propagate outside the main subroutine, "
|
||||
"instead of logging them to stderr."
|
||||
),
|
||||
)
|
||||
|
||||
isolated_mode: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--isolated",
|
||||
@@ -165,13 +153,30 @@ isolated_mode: Callable[..., Option] = partial(
|
||||
|
||||
require_virtualenv: Callable[..., Option] = partial(
|
||||
Option,
|
||||
# Run only if inside a virtualenv, bail if not.
|
||||
"--require-virtualenv",
|
||||
"--require-venv",
|
||||
dest="require_venv",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=SUPPRESS_HELP,
|
||||
help=(
|
||||
"Allow pip to only run in a virtual environment; "
|
||||
"exit with an error otherwise."
|
||||
),
|
||||
)
|
||||
|
||||
override_externally_managed: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--break-system-packages",
|
||||
dest="override_externally_managed",
|
||||
action="store_true",
|
||||
help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
|
||||
)
|
||||
|
||||
python: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--python",
|
||||
dest="python",
|
||||
help="Run pip with the specified Python interpreter.",
|
||||
)
|
||||
|
||||
verbose: Callable[..., Option] = partial(
|
||||
@@ -221,13 +226,9 @@ progress_bar: Callable[..., Option] = partial(
|
||||
"--progress-bar",
|
||||
dest="progress_bar",
|
||||
type="choice",
|
||||
choices=list(BAR_TYPES.keys()),
|
||||
choices=["on", "off"],
|
||||
default="on",
|
||||
help=(
|
||||
"Specify type of progress to be displayed ["
|
||||
+ "|".join(BAR_TYPES.keys())
|
||||
+ "] (default: %default)"
|
||||
),
|
||||
help="Specify whether the progress bar should be used [on, off] (default: on)",
|
||||
)
|
||||
|
||||
log: Callable[..., Option] = partial(
|
||||
@@ -251,13 +252,26 @@ no_input: Callable[..., Option] = partial(
|
||||
help="Disable prompting for input.",
|
||||
)
|
||||
|
||||
keyring_provider: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--keyring-provider",
|
||||
dest="keyring_provider",
|
||||
choices=["auto", "disabled", "import", "subprocess"],
|
||||
default="auto",
|
||||
help=(
|
||||
"Enable the credential lookup via the keyring library if user input is allowed."
|
||||
" Specify which mechanism to use [disabled, import, subprocess]."
|
||||
" (default: disabled)"
|
||||
),
|
||||
)
|
||||
|
||||
proxy: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--proxy",
|
||||
dest="proxy",
|
||||
type="str",
|
||||
default="",
|
||||
help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
|
||||
help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
|
||||
)
|
||||
|
||||
retries: Callable[..., Option] = partial(
|
||||
@@ -719,18 +733,6 @@ no_deps: Callable[..., Option] = partial(
|
||||
help="Don't install package dependencies.",
|
||||
)
|
||||
|
||||
build_dir: Callable[..., Option] = partial(
|
||||
PipOption,
|
||||
"-b",
|
||||
"--build",
|
||||
"--build-dir",
|
||||
"--build-directory",
|
||||
dest="build_dir",
|
||||
type="path",
|
||||
metavar="dir",
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
ignore_requires_python: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--ignore-requires-python",
|
||||
@@ -750,6 +752,15 @@ no_build_isolation: Callable[..., Option] = partial(
|
||||
"if this option is used.",
|
||||
)
|
||||
|
||||
check_build_deps: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--check-build-dependencies",
|
||||
dest="check_build_deps",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Check the build dependencies when PEP517 is used.",
|
||||
)
|
||||
|
||||
|
||||
def _handle_no_use_pep517(
|
||||
option: Option, opt: str, value: str, parser: OptionParser
|
||||
@@ -772,6 +783,16 @@ def _handle_no_use_pep517(
|
||||
"""
|
||||
raise_option_error(parser, option=option, msg=msg)
|
||||
|
||||
# If user doesn't wish to use pep517, we check if setuptools and wheel are installed
|
||||
# and raise error if it is not.
|
||||
packages = ("setuptools", "wheel")
|
||||
if not all(importlib.util.find_spec(package) for package in packages):
|
||||
msg = (
|
||||
f"It is not possible to use --no-use-pep517 "
|
||||
f"without {' and '.join(packages)} installed."
|
||||
)
|
||||
raise_option_error(parser, option=option, msg=msg)
|
||||
|
||||
# Otherwise, --no-use-pep517 was passed via the command-line.
|
||||
parser.values.use_pep517 = False
|
||||
|
||||
@@ -796,17 +817,38 @@ no_use_pep517: Any = partial(
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
|
||||
install_options: Callable[..., Option] = partial(
|
||||
|
||||
def _handle_config_settings(
|
||||
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||
) -> None:
|
||||
key, sep, val = value.partition("=")
|
||||
if sep != "=":
|
||||
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
|
||||
dest = getattr(parser.values, option.dest)
|
||||
if dest is None:
|
||||
dest = {}
|
||||
setattr(parser.values, option.dest, dest)
|
||||
if key in dest:
|
||||
if isinstance(dest[key], list):
|
||||
dest[key].append(val)
|
||||
else:
|
||||
dest[key] = [dest[key], val]
|
||||
else:
|
||||
dest[key] = val
|
||||
|
||||
|
||||
config_settings: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--install-option",
|
||||
dest="install_options",
|
||||
action="append",
|
||||
metavar="options",
|
||||
help="Extra arguments to be supplied to the setup.py install "
|
||||
'command (use like --install-option="--install-scripts=/usr/local/'
|
||||
'bin"). Use multiple --install-option options to pass multiple '
|
||||
"options to setup.py install. If you are using an option with a "
|
||||
"directory path, be sure to use absolute path.",
|
||||
"-C",
|
||||
"--config-settings",
|
||||
dest="config_settings",
|
||||
type=str,
|
||||
action="callback",
|
||||
callback=_handle_config_settings,
|
||||
metavar="settings",
|
||||
help="Configuration settings to be passed to the PEP 517 build backend. "
|
||||
"Settings take the form KEY=VALUE. Use multiple --config-settings options "
|
||||
"to pass multiple keys to the backend.",
|
||||
)
|
||||
|
||||
build_options: Callable[..., Option] = partial(
|
||||
@@ -855,6 +897,15 @@ disable_pip_version_check: Callable[..., Option] = partial(
|
||||
"of pip is available for download. Implied with --no-index.",
|
||||
)
|
||||
|
||||
root_user_action: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--root-user-action",
|
||||
dest="root_user_action",
|
||||
default="warn",
|
||||
choices=["warn", "ignore"],
|
||||
help="Action if pip is run as a root user. By default, a warning message is shown.",
|
||||
)
|
||||
|
||||
|
||||
def _handle_merge_hash(
|
||||
option: Option, opt_str: str, value: str, parser: OptionParser
|
||||
@@ -943,6 +994,11 @@ no_python_version_warning: Callable[..., Option] = partial(
|
||||
)
|
||||
|
||||
|
||||
# Features that are now always on. A warning is printed if they are used.
|
||||
ALWAYS_ENABLED_FEATURES = [
|
||||
"no-binary-enable-wheel-cache", # always on since 23.1
|
||||
]
|
||||
|
||||
use_new_feature: Callable[..., Option] = partial(
|
||||
Option,
|
||||
"--use-feature",
|
||||
@@ -950,7 +1006,11 @@ use_new_feature: Callable[..., Option] = partial(
|
||||
metavar="feature",
|
||||
action="append",
|
||||
default=[],
|
||||
choices=["2020-resolver", "fast-deps", "in-tree-build"],
|
||||
choices=[
|
||||
"fast-deps",
|
||||
"truststore",
|
||||
]
|
||||
+ ALWAYS_ENABLED_FEATURES,
|
||||
help="Enable new functionality, that may be backward incompatible.",
|
||||
)
|
||||
|
||||
@@ -961,7 +1021,9 @@ use_deprecated_feature: Callable[..., Option] = partial(
|
||||
metavar="feature",
|
||||
action="append",
|
||||
default=[],
|
||||
choices=["legacy-resolver"],
|
||||
choices=[
|
||||
"legacy-resolver",
|
||||
],
|
||||
help=("Enable deprecated functionality, that will be removed in the future."),
|
||||
)
|
||||
|
||||
@@ -974,13 +1036,16 @@ general_group: Dict[str, Any] = {
|
||||
"name": "General Options",
|
||||
"options": [
|
||||
help_,
|
||||
debug_mode,
|
||||
isolated_mode,
|
||||
require_virtualenv,
|
||||
python,
|
||||
verbose,
|
||||
version,
|
||||
quiet,
|
||||
log,
|
||||
no_input,
|
||||
keyring_provider,
|
||||
proxy,
|
||||
retries,
|
||||
timeout,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from typing import ContextManager, Iterator, TypeVar
|
||||
from typing import ContextManager, Generator, TypeVar
|
||||
|
||||
_T = TypeVar("_T", covariant=True)
|
||||
|
||||
@@ -11,7 +11,7 @@ class CommandContextMixIn:
|
||||
self._main_context = ExitStack()
|
||||
|
||||
@contextmanager
|
||||
def main_context(self) -> Iterator[None]:
|
||||
def main_context(self) -> Generator[None, None, None]:
|
||||
assert not self._in_main_context
|
||||
|
||||
self._in_main_context = True
|
||||
|
||||
@@ -4,6 +4,7 @@ import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.cli.autocompletion import autocomplete
|
||||
@@ -46,6 +47,14 @@ def main(args: Optional[List[str]] = None) -> int:
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Suppress the pkg_resources deprecation warning
|
||||
# Note - we use a module of .*pkg_resources to cover
|
||||
# the normal case (pip._vendor.pkg_resources) and the
|
||||
# devendored case (a bare pkg_resources)
|
||||
warnings.filterwarnings(
|
||||
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
|
||||
)
|
||||
|
||||
# Configure our deprecation warnings to be sent through loggers
|
||||
deprecation.install_warning_logger()
|
||||
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._internal.build_env import get_runnable_pip
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
||||
from pip._internal.commands import commands_dict, get_similar_commands
|
||||
@@ -45,6 +47,25 @@ def create_main_parser() -> ConfigOptionParser:
|
||||
return parser
|
||||
|
||||
|
||||
def identify_python_interpreter(python: str) -> Optional[str]:
|
||||
# If the named file exists, use it.
|
||||
# If it's a directory, assume it's a virtual environment and
|
||||
# look for the environment's Python executable.
|
||||
if os.path.exists(python):
|
||||
if os.path.isdir(python):
|
||||
# bin/python for Unix, Scripts/python.exe for Windows
|
||||
# Try both in case of odd cases like cygwin.
|
||||
for exe in ("bin/python", "Scripts/python.exe"):
|
||||
py = os.path.join(python, exe)
|
||||
if os.path.exists(py):
|
||||
return py
|
||||
else:
|
||||
return python
|
||||
|
||||
# Could not find the interpreter specified
|
||||
return None
|
||||
|
||||
|
||||
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
||||
parser = create_main_parser()
|
||||
|
||||
@@ -57,6 +78,32 @@ def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
||||
# args_else: ['install', '--user', 'INITools']
|
||||
general_options, args_else = parser.parse_args(args)
|
||||
|
||||
# --python
|
||||
if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
||||
# Re-invoke pip using the specified Python interpreter
|
||||
interpreter = identify_python_interpreter(general_options.python)
|
||||
if interpreter is None:
|
||||
raise CommandError(
|
||||
f"Could not locate Python interpreter {general_options.python}"
|
||||
)
|
||||
|
||||
pip_cmd = [
|
||||
interpreter,
|
||||
get_runnable_pip(),
|
||||
]
|
||||
pip_cmd.extend(args)
|
||||
|
||||
# Set a flag so the child doesn't re-invoke itself, causing
|
||||
# an infinite loop.
|
||||
os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
|
||||
returncode = 0
|
||||
try:
|
||||
proc = subprocess.run(pip_cmd)
|
||||
returncode = proc.returncode
|
||||
except (subprocess.SubprocessError, OSError) as exc:
|
||||
raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
|
||||
sys.exit(returncode)
|
||||
|
||||
# --version
|
||||
if general_options.version:
|
||||
sys.stdout.write(parser.version)
|
||||
|
||||
@@ -6,7 +6,7 @@ import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
from contextlib import suppress
|
||||
from typing import Any, Dict, Iterator, List, Tuple
|
||||
from typing import Any, Dict, Generator, List, Tuple
|
||||
|
||||
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
||||
from pip._internal.configuration import Configuration, ConfigurationError
|
||||
@@ -175,7 +175,9 @@ class ConfigOptionParser(CustomOptionParser):
|
||||
print(f"An error occurred during configuration: {exc}")
|
||||
sys.exit(3)
|
||||
|
||||
def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]:
|
||||
def _get_ordered_configuration_items(
|
||||
self,
|
||||
) -> Generator[Tuple[str, Any], None, None]:
|
||||
# Configuration gives keys in an unordered manner. Order them.
|
||||
override_order = ["global", self.name, ":env:"]
|
||||
|
||||
|
||||
@@ -1,250 +1,68 @@
|
||||
import itertools
|
||||
import sys
|
||||
from signal import SIGINT, default_int_handler, signal
|
||||
from typing import Any
|
||||
import functools
|
||||
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
|
||||
|
||||
from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
|
||||
from pip._vendor.progress.spinner import Spinner
|
||||
from pip._vendor.rich.progress import (
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
FileSizeColumn,
|
||||
Progress,
|
||||
ProgressColumn,
|
||||
SpinnerColumn,
|
||||
TextColumn,
|
||||
TimeElapsedColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import get_indentation
|
||||
from pip._internal.utils.misc import format_size
|
||||
|
||||
try:
|
||||
from pip._vendor import colorama
|
||||
# Lots of different errors can come from this, including SystemError and
|
||||
# ImportError.
|
||||
except Exception:
|
||||
colorama = None
|
||||
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
|
||||
|
||||
|
||||
def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar:
|
||||
encoding = getattr(preferred.file, "encoding", None)
|
||||
def _rich_progress_bar(
|
||||
iterable: Iterable[bytes],
|
||||
*,
|
||||
bar_type: str,
|
||||
size: int,
|
||||
) -> Generator[bytes, None, None]:
|
||||
assert bar_type == "on", "This should only be used in the default mode."
|
||||
|
||||
# If we don't know what encoding this file is in, then we'll just assume
|
||||
# that it doesn't support unicode and use the ASCII bar.
|
||||
if not encoding:
|
||||
return fallback
|
||||
|
||||
# Collect all of the possible characters we want to use with the preferred
|
||||
# bar.
|
||||
characters = [
|
||||
getattr(preferred, "empty_fill", ""),
|
||||
getattr(preferred, "fill", ""),
|
||||
]
|
||||
characters += list(getattr(preferred, "phases", []))
|
||||
|
||||
# Try to decode the characters we're using for the bar using the encoding
|
||||
# of the given file, if this works then we'll assume that we can use the
|
||||
# fancier bar and if not we'll fall back to the plaintext bar.
|
||||
try:
|
||||
"".join(characters).encode(encoding)
|
||||
except UnicodeEncodeError:
|
||||
return fallback
|
||||
if not size:
|
||||
total = float("inf")
|
||||
columns: Tuple[ProgressColumn, ...] = (
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
SpinnerColumn("line", speed=1.5),
|
||||
FileSizeColumn(),
|
||||
TransferSpeedColumn(),
|
||||
TimeElapsedColumn(),
|
||||
)
|
||||
else:
|
||||
return preferred
|
||||
|
||||
|
||||
_BaseBar: Any = _select_progress_class(IncrementalBar, Bar)
|
||||
|
||||
|
||||
class InterruptibleMixin:
|
||||
"""
|
||||
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
||||
|
||||
This allows downloads to be interrupted without leaving temporary state
|
||||
(like hidden cursors) behind.
|
||||
|
||||
This class is similar to the progress library's existing SigIntMixin
|
||||
helper, but as of version 1.2, that helper has the following problems:
|
||||
|
||||
1. It calls sys.exit().
|
||||
2. It discards the existing SIGINT handler completely.
|
||||
3. It leaves its own handler in place even after an uninterrupted finish,
|
||||
which will have unexpected delayed effects if the user triggers an
|
||||
unrelated keyboard interrupt some time after a progress-displaying
|
||||
download has already completed, for example.
|
||||
"""
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""
|
||||
Save the original SIGINT handler for later.
|
||||
"""
|
||||
# https://github.com/python/mypy/issues/5887
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
|
||||
self.original_handler = signal(SIGINT, self.handle_sigint)
|
||||
|
||||
# If signal() returns None, the previous handler was not installed from
|
||||
# Python, and we cannot restore it. This probably should not happen,
|
||||
# but if it does, we must restore something sensible instead, at least.
|
||||
# The least bad option should be Python's default SIGINT handler, which
|
||||
# just raises KeyboardInterrupt.
|
||||
if self.original_handler is None:
|
||||
self.original_handler = default_int_handler
|
||||
|
||||
def finish(self) -> None:
|
||||
"""
|
||||
Restore the original SIGINT handler after finishing.
|
||||
|
||||
This should happen regardless of whether the progress display finishes
|
||||
normally, or gets interrupted.
|
||||
"""
|
||||
super().finish() # type: ignore
|
||||
signal(SIGINT, self.original_handler)
|
||||
|
||||
def handle_sigint(self, signum, frame): # type: ignore
|
||||
"""
|
||||
Call self.finish() before delegating to the original SIGINT handler.
|
||||
|
||||
This handler should only be in place while the progress display is
|
||||
active.
|
||||
"""
|
||||
self.finish()
|
||||
self.original_handler(signum, frame)
|
||||
|
||||
|
||||
class SilentBar(Bar):
|
||||
def update(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class BlueEmojiBar(IncrementalBar):
|
||||
|
||||
suffix = "%(percent)d%%"
|
||||
bar_prefix = " "
|
||||
bar_suffix = " "
|
||||
phases = ("\U0001F539", "\U0001F537", "\U0001F535")
|
||||
|
||||
|
||||
class DownloadProgressMixin:
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
# https://github.com/python/mypy/issues/5887
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
self.message: str = (" " * (get_indentation() + 2)) + self.message
|
||||
|
||||
@property
|
||||
def downloaded(self) -> str:
|
||||
return format_size(self.index) # type: ignore
|
||||
|
||||
@property
|
||||
def download_speed(self) -> str:
|
||||
# Avoid zero division errors...
|
||||
if self.avg == 0.0: # type: ignore
|
||||
return "..."
|
||||
return format_size(1 / self.avg) + "/s" # type: ignore
|
||||
|
||||
@property
|
||||
def pretty_eta(self) -> str:
|
||||
if self.eta: # type: ignore
|
||||
return f"eta {self.eta_td}" # type: ignore
|
||||
return ""
|
||||
|
||||
def iter(self, it): # type: ignore
|
||||
for x in it:
|
||||
yield x
|
||||
# B305 is incorrectly raised here
|
||||
# https://github.com/PyCQA/flake8-bugbear/issues/59
|
||||
self.next(len(x)) # noqa: B305
|
||||
self.finish()
|
||||
|
||||
|
||||
class WindowsMixin:
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes
|
||||
# even with colorama. So we'll ensure that hide_cursor is False on
|
||||
# Windows.
|
||||
# This call needs to go before the super() call, so that hide_cursor
|
||||
# is set in time. The base progress bar class writes the "hide cursor"
|
||||
# code to the terminal in its init, so if we don't set this soon
|
||||
# enough, we get a "hide" with no corresponding "show"...
|
||||
if WINDOWS and self.hide_cursor: # type: ignore
|
||||
self.hide_cursor = False
|
||||
|
||||
# https://github.com/python/mypy/issues/5887
|
||||
super().__init__(*args, **kwargs) # type: ignore
|
||||
|
||||
# Check if we are running on Windows and we have the colorama module,
|
||||
# if we do then wrap our file with it.
|
||||
if WINDOWS and colorama:
|
||||
self.file = colorama.AnsiToWin32(self.file) # type: ignore
|
||||
# The progress code expects to be able to call self.file.isatty()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.isatty = lambda: self.file.wrapped.isatty()
|
||||
# The progress code expects to be able to call self.file.flush()
|
||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
||||
# add it.
|
||||
self.file.flush = lambda: self.file.wrapped.flush()
|
||||
|
||||
|
||||
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
|
||||
|
||||
file = sys.stdout
|
||||
message = "%(percent)d%%"
|
||||
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
||||
|
||||
|
||||
class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadBar(BaseDownloadProgressBar, Bar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
|
||||
pass
|
||||
|
||||
|
||||
class DownloadProgressSpinner(
|
||||
WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
|
||||
):
|
||||
|
||||
file = sys.stdout
|
||||
suffix = "%(downloaded)s %(download_speed)s"
|
||||
|
||||
def next_phase(self) -> str:
|
||||
if not hasattr(self, "_phaser"):
|
||||
self._phaser = itertools.cycle(self.phases)
|
||||
return next(self._phaser)
|
||||
|
||||
def update(self) -> None:
|
||||
message = self.message % self
|
||||
phase = self.next_phase()
|
||||
suffix = self.suffix % self
|
||||
line = "".join(
|
||||
[
|
||||
message,
|
||||
" " if message else "",
|
||||
phase,
|
||||
" " if suffix else "",
|
||||
suffix,
|
||||
]
|
||||
total = size
|
||||
columns = (
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
BarColumn(),
|
||||
DownloadColumn(),
|
||||
TransferSpeedColumn(),
|
||||
TextColumn("eta"),
|
||||
TimeRemainingColumn(),
|
||||
)
|
||||
|
||||
self.writeln(line)
|
||||
progress = Progress(*columns, refresh_per_second=30)
|
||||
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
|
||||
with progress:
|
||||
for chunk in iterable:
|
||||
yield chunk
|
||||
progress.update(task_id, advance=len(chunk))
|
||||
|
||||
|
||||
BAR_TYPES = {
|
||||
"off": (DownloadSilentBar, DownloadSilentBar),
|
||||
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
||||
"ascii": (DownloadBar, DownloadProgressSpinner),
|
||||
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
||||
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
|
||||
}
|
||||
def get_download_progress_renderer(
|
||||
*, bar_type: str, size: Optional[int] = None
|
||||
) -> DownloadProgressRenderer:
|
||||
"""Get an object that can be used to render the download progress.
|
||||
|
||||
|
||||
def DownloadProgressProvider(progress_bar, max=None): # type: ignore
|
||||
if max is None or max == 0:
|
||||
return BAR_TYPES[progress_bar][1]().iter
|
||||
Returns a callable, that takes an iterable to "wrap".
|
||||
"""
|
||||
if bar_type == "on":
|
||||
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
|
||||
else:
|
||||
return BAR_TYPES[progress_bar][0](max=max).iter
|
||||
return iter # no-op, when passed an iterator
|
||||
|
||||
@@ -10,7 +10,7 @@ import os
|
||||
import sys
|
||||
from functools import partial
|
||||
from optparse import Values
|
||||
from typing import Any, List, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Any, List, Optional, Tuple
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
@@ -22,6 +22,7 @@ from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.models.target_python import TargetPython
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable,
|
||||
@@ -31,7 +32,6 @@ from pip._internal.req.constructors import (
|
||||
)
|
||||
from pip._internal.req.req_file import parse_requirements
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.resolution.base import BaseResolver
|
||||
from pip._internal.self_outdated_check import pip_self_version_check
|
||||
from pip._internal.utils.temp_dir import (
|
||||
@@ -41,9 +41,33 @@ from pip._internal.utils.temp_dir import (
|
||||
)
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ssl import SSLContext
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
|
||||
if sys.version_info < (3, 10):
|
||||
raise CommandError("The truststore feature is only available for Python 3.10+")
|
||||
|
||||
try:
|
||||
import ssl
|
||||
except ImportError:
|
||||
logger.warning("Disabling truststore since ssl support is missing")
|
||||
return None
|
||||
|
||||
try:
|
||||
import truststore
|
||||
except ImportError:
|
||||
raise CommandError(
|
||||
"To use the truststore feature, 'truststore' must be installed into "
|
||||
"pip's current environment."
|
||||
)
|
||||
|
||||
return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
||||
|
||||
|
||||
class SessionCommandMixin(CommandContextMixIn):
|
||||
|
||||
"""
|
||||
@@ -83,15 +107,27 @@ class SessionCommandMixin(CommandContextMixIn):
|
||||
options: Values,
|
||||
retries: Optional[int] = None,
|
||||
timeout: Optional[int] = None,
|
||||
fallback_to_certifi: bool = False,
|
||||
) -> PipSession:
|
||||
assert not options.cache_dir or os.path.isabs(options.cache_dir)
|
||||
cache_dir = options.cache_dir
|
||||
assert not cache_dir or os.path.isabs(cache_dir)
|
||||
|
||||
if "truststore" in options.features_enabled:
|
||||
try:
|
||||
ssl_context = _create_truststore_ssl_context()
|
||||
except Exception:
|
||||
if not fallback_to_certifi:
|
||||
raise
|
||||
ssl_context = None
|
||||
else:
|
||||
ssl_context = None
|
||||
|
||||
session = PipSession(
|
||||
cache=(
|
||||
os.path.join(options.cache_dir, "http") if options.cache_dir else None
|
||||
),
|
||||
cache=os.path.join(cache_dir, "http") if cache_dir else None,
|
||||
retries=retries if retries is not None else options.retries,
|
||||
trusted_hosts=options.trusted_hosts,
|
||||
index_urls=self._get_index_urls(options),
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
|
||||
# Handle custom ca-bundles from the user
|
||||
@@ -115,6 +151,7 @@ class SessionCommandMixin(CommandContextMixIn):
|
||||
|
||||
# Determine if we can prompt the user for authentication or not
|
||||
session.auth.prompting = not options.no_input
|
||||
session.auth.keyring_provider = options.keyring_provider
|
||||
|
||||
return session
|
||||
|
||||
@@ -141,7 +178,14 @@ class IndexGroupCommand(Command, SessionCommandMixin):
|
||||
|
||||
# Otherwise, check if we're using the latest version of pip available.
|
||||
session = self._build_session(
|
||||
options, retries=0, timeout=min(5, options.timeout)
|
||||
options,
|
||||
retries=0,
|
||||
timeout=min(5, options.timeout),
|
||||
# This is set to ensure the function does not fail when truststore is
|
||||
# specified in use-feature but cannot be loaded. This usually raises a
|
||||
# CommandError and shows a nice user-facing error, but this function is not
|
||||
# called in that try-except block.
|
||||
fallback_to_certifi=True,
|
||||
)
|
||||
with session:
|
||||
pip_self_version_check(session, options)
|
||||
@@ -172,9 +216,10 @@ def warn_if_run_as_root() -> None:
|
||||
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
||||
if sys.platform == "win32" or sys.platform == "cygwin":
|
||||
return
|
||||
if sys.platform == "darwin" or sys.platform == "linux":
|
||||
if os.getuid() != 0:
|
||||
return
|
||||
|
||||
if os.getuid() != 0:
|
||||
return
|
||||
|
||||
logger.warning(
|
||||
"Running pip as the 'root' user can result in broken permissions and "
|
||||
"conflicting behaviour with the system package manager. "
|
||||
@@ -230,11 +275,12 @@ class RequirementCommand(IndexGroupCommand):
|
||||
cls,
|
||||
temp_build_dir: TempDirectory,
|
||||
options: Values,
|
||||
req_tracker: RequirementTracker,
|
||||
build_tracker: BuildTracker,
|
||||
session: PipSession,
|
||||
finder: PackageFinder,
|
||||
use_user_site: bool,
|
||||
download_dir: Optional[str] = None,
|
||||
verbosity: int = 0,
|
||||
) -> RequirementPreparer:
|
||||
"""
|
||||
Create a RequirementPreparer instance for the given parameters.
|
||||
@@ -265,14 +311,15 @@ class RequirementCommand(IndexGroupCommand):
|
||||
src_dir=options.src_dir,
|
||||
download_dir=download_dir,
|
||||
build_isolation=options.build_isolation,
|
||||
req_tracker=req_tracker,
|
||||
check_build_deps=options.check_build_deps,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
progress_bar=options.progress_bar,
|
||||
finder=finder,
|
||||
require_hashes=options.require_hashes,
|
||||
use_user_site=use_user_site,
|
||||
lazy_wheel=lazy_wheel,
|
||||
in_tree_build="in-tree-build" in options.features_enabled,
|
||||
verbosity=verbosity,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -363,10 +410,11 @@ class RequirementCommand(IndexGroupCommand):
|
||||
for req in args:
|
||||
req_to_add = install_req_from_line(
|
||||
req,
|
||||
None,
|
||||
comes_from=None,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
user_supplied=True,
|
||||
config_settings=getattr(options, "config_settings", None),
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
@@ -376,6 +424,7 @@ class RequirementCommand(IndexGroupCommand):
|
||||
user_supplied=True,
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
config_settings=getattr(options, "config_settings", None),
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
@@ -389,6 +438,9 @@ class RequirementCommand(IndexGroupCommand):
|
||||
isolated=options.isolated_mode,
|
||||
use_pep517=options.use_pep517,
|
||||
user_supplied=True,
|
||||
config_settings=parsed_req.options.get("config_settings")
|
||||
if parsed_req.options
|
||||
else None,
|
||||
)
|
||||
requirements.append(req_to_add)
|
||||
|
||||
|
||||
@@ -3,9 +3,7 @@ import itertools
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from typing import IO, Iterator
|
||||
|
||||
from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
|
||||
from typing import IO, Generator, Optional
|
||||
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import get_indentation
|
||||
@@ -25,7 +23,7 @@ class InteractiveSpinner(SpinnerInterface):
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
file: IO[str] = None,
|
||||
file: Optional[IO[str]] = None,
|
||||
spin_chars: str = "-\\|/",
|
||||
# Empirically, 8 updates/second looks nice
|
||||
min_update_interval_seconds: float = 0.125,
|
||||
@@ -115,7 +113,7 @@ class RateLimiter:
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def open_spinner(message: str) -> Iterator[SpinnerInterface]:
|
||||
def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
|
||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
||||
# through the logging system, but it acts like it has level INFO,
|
||||
# i.e. it's only displayed if we're at level INFO or better.
|
||||
@@ -138,8 +136,12 @@ def open_spinner(message: str) -> Iterator[SpinnerInterface]:
|
||||
spinner.finish("done")
|
||||
|
||||
|
||||
HIDE_CURSOR = "\x1b[?25l"
|
||||
SHOW_CURSOR = "\x1b[?25h"
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def hidden_cursor(file: IO[str]) -> Iterator[None]:
|
||||
def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
|
||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
||||
# even via colorama. So don't even try.
|
||||
if WINDOWS:
|
||||
|
||||
@@ -3,87 +3,107 @@ Package containing all pip commands
|
||||
"""
|
||||
|
||||
import importlib
|
||||
from collections import OrderedDict, namedtuple
|
||||
from collections import namedtuple
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
|
||||
CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary')
|
||||
CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
|
||||
|
||||
# The ordering matters for help display.
|
||||
# Also, even though the module path starts with the same
|
||||
# "pip._internal.commands" prefix in each case, we include the full path
|
||||
# because it makes testing easier (specifically when modifying commands_dict
|
||||
# in test setup / teardown by adding info for a FakeCommand class defined
|
||||
# in a test-related module).
|
||||
# Finally, we need to pass an iterable of pairs here rather than a dict
|
||||
# so that the ordering won't be lost when using Python 2.7.
|
||||
commands_dict: Dict[str, CommandInfo] = OrderedDict([
|
||||
('install', CommandInfo(
|
||||
'pip._internal.commands.install', 'InstallCommand',
|
||||
'Install packages.',
|
||||
)),
|
||||
('download', CommandInfo(
|
||||
'pip._internal.commands.download', 'DownloadCommand',
|
||||
'Download packages.',
|
||||
)),
|
||||
('uninstall', CommandInfo(
|
||||
'pip._internal.commands.uninstall', 'UninstallCommand',
|
||||
'Uninstall packages.',
|
||||
)),
|
||||
('freeze', CommandInfo(
|
||||
'pip._internal.commands.freeze', 'FreezeCommand',
|
||||
'Output installed packages in requirements format.',
|
||||
)),
|
||||
('list', CommandInfo(
|
||||
'pip._internal.commands.list', 'ListCommand',
|
||||
'List installed packages.',
|
||||
)),
|
||||
('show', CommandInfo(
|
||||
'pip._internal.commands.show', 'ShowCommand',
|
||||
'Show information about installed packages.',
|
||||
)),
|
||||
('check', CommandInfo(
|
||||
'pip._internal.commands.check', 'CheckCommand',
|
||||
'Verify installed packages have compatible dependencies.',
|
||||
)),
|
||||
('config', CommandInfo(
|
||||
'pip._internal.commands.configuration', 'ConfigurationCommand',
|
||||
'Manage local and global configuration.',
|
||||
)),
|
||||
('search', CommandInfo(
|
||||
'pip._internal.commands.search', 'SearchCommand',
|
||||
'Search PyPI for packages.',
|
||||
)),
|
||||
('cache', CommandInfo(
|
||||
'pip._internal.commands.cache', 'CacheCommand',
|
||||
# This dictionary does a bunch of heavy lifting for help output:
|
||||
# - Enables avoiding additional (costly) imports for presenting `--help`.
|
||||
# - The ordering matters for help display.
|
||||
#
|
||||
# Even though the module path starts with the same "pip._internal.commands"
|
||||
# prefix, the full path makes testing easier (specifically when modifying
|
||||
# `commands_dict` in test setup / teardown).
|
||||
commands_dict: Dict[str, CommandInfo] = {
|
||||
"install": CommandInfo(
|
||||
"pip._internal.commands.install",
|
||||
"InstallCommand",
|
||||
"Install packages.",
|
||||
),
|
||||
"download": CommandInfo(
|
||||
"pip._internal.commands.download",
|
||||
"DownloadCommand",
|
||||
"Download packages.",
|
||||
),
|
||||
"uninstall": CommandInfo(
|
||||
"pip._internal.commands.uninstall",
|
||||
"UninstallCommand",
|
||||
"Uninstall packages.",
|
||||
),
|
||||
"freeze": CommandInfo(
|
||||
"pip._internal.commands.freeze",
|
||||
"FreezeCommand",
|
||||
"Output installed packages in requirements format.",
|
||||
),
|
||||
"inspect": CommandInfo(
|
||||
"pip._internal.commands.inspect",
|
||||
"InspectCommand",
|
||||
"Inspect the python environment.",
|
||||
),
|
||||
"list": CommandInfo(
|
||||
"pip._internal.commands.list",
|
||||
"ListCommand",
|
||||
"List installed packages.",
|
||||
),
|
||||
"show": CommandInfo(
|
||||
"pip._internal.commands.show",
|
||||
"ShowCommand",
|
||||
"Show information about installed packages.",
|
||||
),
|
||||
"check": CommandInfo(
|
||||
"pip._internal.commands.check",
|
||||
"CheckCommand",
|
||||
"Verify installed packages have compatible dependencies.",
|
||||
),
|
||||
"config": CommandInfo(
|
||||
"pip._internal.commands.configuration",
|
||||
"ConfigurationCommand",
|
||||
"Manage local and global configuration.",
|
||||
),
|
||||
"search": CommandInfo(
|
||||
"pip._internal.commands.search",
|
||||
"SearchCommand",
|
||||
"Search PyPI for packages.",
|
||||
),
|
||||
"cache": CommandInfo(
|
||||
"pip._internal.commands.cache",
|
||||
"CacheCommand",
|
||||
"Inspect and manage pip's wheel cache.",
|
||||
)),
|
||||
('index', CommandInfo(
|
||||
'pip._internal.commands.index', 'IndexCommand',
|
||||
),
|
||||
"index": CommandInfo(
|
||||
"pip._internal.commands.index",
|
||||
"IndexCommand",
|
||||
"Inspect information available from package indexes.",
|
||||
)),
|
||||
('wheel', CommandInfo(
|
||||
'pip._internal.commands.wheel', 'WheelCommand',
|
||||
'Build wheels from your requirements.',
|
||||
)),
|
||||
('hash', CommandInfo(
|
||||
'pip._internal.commands.hash', 'HashCommand',
|
||||
'Compute hashes of package archives.',
|
||||
)),
|
||||
('completion', CommandInfo(
|
||||
'pip._internal.commands.completion', 'CompletionCommand',
|
||||
'A helper command used for command completion.',
|
||||
)),
|
||||
('debug', CommandInfo(
|
||||
'pip._internal.commands.debug', 'DebugCommand',
|
||||
'Show information useful for debugging.',
|
||||
)),
|
||||
('help', CommandInfo(
|
||||
'pip._internal.commands.help', 'HelpCommand',
|
||||
'Show help for commands.',
|
||||
)),
|
||||
])
|
||||
),
|
||||
"wheel": CommandInfo(
|
||||
"pip._internal.commands.wheel",
|
||||
"WheelCommand",
|
||||
"Build wheels from your requirements.",
|
||||
),
|
||||
"hash": CommandInfo(
|
||||
"pip._internal.commands.hash",
|
||||
"HashCommand",
|
||||
"Compute hashes of package archives.",
|
||||
),
|
||||
"completion": CommandInfo(
|
||||
"pip._internal.commands.completion",
|
||||
"CompletionCommand",
|
||||
"A helper command used for command completion.",
|
||||
),
|
||||
"debug": CommandInfo(
|
||||
"pip._internal.commands.debug",
|
||||
"DebugCommand",
|
||||
"Show information useful for debugging.",
|
||||
),
|
||||
"help": CommandInfo(
|
||||
"pip._internal.commands.help",
|
||||
"HelpCommand",
|
||||
"Show help for commands.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def create_command(name: str, **kwargs: Any) -> Command:
|
||||
|
||||
@@ -37,19 +37,18 @@ class CacheCommand(Command):
|
||||
"""
|
||||
|
||||
def add_options(self) -> None:
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--format',
|
||||
action='store',
|
||||
dest='list_format',
|
||||
"--format",
|
||||
action="store",
|
||||
dest="list_format",
|
||||
default="human",
|
||||
choices=('human', 'abspath'),
|
||||
help="Select the output format among: human (default) or abspath"
|
||||
choices=("human", "abspath"),
|
||||
help="Select the output format among: human (default) or abspath",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[Any]) -> int:
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
handlers = {
|
||||
"dir": self.get_cache_dir,
|
||||
"info": self.get_cache_info,
|
||||
@@ -59,8 +58,7 @@ class CacheCommand(Command):
|
||||
}
|
||||
|
||||
if not options.cache_dir:
|
||||
logger.error("pip cache commands can not "
|
||||
"function since cache is disabled.")
|
||||
logger.error("pip cache commands can not function since cache is disabled.")
|
||||
return ERROR
|
||||
|
||||
# Determine action
|
||||
@@ -84,69 +82,73 @@ class CacheCommand(Command):
|
||||
|
||||
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
logger.info(options.cache_dir)
|
||||
|
||||
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
num_http_files = len(self._find_http_files(options))
|
||||
num_packages = len(self._find_wheels(options, '*'))
|
||||
num_packages = len(self._find_wheels(options, "*"))
|
||||
|
||||
http_cache_location = self._cache_dir(options, 'http')
|
||||
wheels_cache_location = self._cache_dir(options, 'wheels')
|
||||
http_cache_location = self._cache_dir(options, "http")
|
||||
wheels_cache_location = self._cache_dir(options, "wheels")
|
||||
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
||||
wheels_cache_size = filesystem.format_directory_size(
|
||||
wheels_cache_location
|
||||
)
|
||||
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
||||
|
||||
message = textwrap.dedent("""
|
||||
Package index page cache location: {http_cache_location}
|
||||
Package index page cache size: {http_cache_size}
|
||||
Number of HTTP files: {num_http_files}
|
||||
Wheels location: {wheels_cache_location}
|
||||
Wheels size: {wheels_cache_size}
|
||||
Number of wheels: {package_count}
|
||||
""").format(
|
||||
http_cache_location=http_cache_location,
|
||||
http_cache_size=http_cache_size,
|
||||
num_http_files=num_http_files,
|
||||
wheels_cache_location=wheels_cache_location,
|
||||
package_count=num_packages,
|
||||
wheels_cache_size=wheels_cache_size,
|
||||
).strip()
|
||||
message = (
|
||||
textwrap.dedent(
|
||||
"""
|
||||
Package index page cache location: {http_cache_location}
|
||||
Package index page cache size: {http_cache_size}
|
||||
Number of HTTP files: {num_http_files}
|
||||
Locally built wheels location: {wheels_cache_location}
|
||||
Locally built wheels size: {wheels_cache_size}
|
||||
Number of locally built wheels: {package_count}
|
||||
"""
|
||||
)
|
||||
.format(
|
||||
http_cache_location=http_cache_location,
|
||||
http_cache_size=http_cache_size,
|
||||
num_http_files=num_http_files,
|
||||
wheels_cache_location=wheels_cache_location,
|
||||
package_count=num_packages,
|
||||
wheels_cache_size=wheels_cache_size,
|
||||
)
|
||||
.strip()
|
||||
)
|
||||
|
||||
logger.info(message)
|
||||
|
||||
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) > 1:
|
||||
raise CommandError('Too many arguments')
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
if args:
|
||||
pattern = args[0]
|
||||
else:
|
||||
pattern = '*'
|
||||
pattern = "*"
|
||||
|
||||
files = self._find_wheels(options, pattern)
|
||||
if options.list_format == 'human':
|
||||
if options.list_format == "human":
|
||||
self.format_for_human(files)
|
||||
else:
|
||||
self.format_for_abspath(files)
|
||||
|
||||
def format_for_human(self, files: List[str]) -> None:
|
||||
if not files:
|
||||
logger.info('Nothing cached.')
|
||||
logger.info("No locally built wheels cached.")
|
||||
return
|
||||
|
||||
results = []
|
||||
for filename in files:
|
||||
wheel = os.path.basename(filename)
|
||||
size = filesystem.format_file_size(filename)
|
||||
results.append(f' - {wheel} ({size})')
|
||||
logger.info('Cache contents:\n')
|
||||
logger.info('\n'.join(sorted(results)))
|
||||
results.append(f" - {wheel} ({size})")
|
||||
logger.info("Cache contents:\n")
|
||||
logger.info("\n".join(sorted(results)))
|
||||
|
||||
def format_for_abspath(self, files: List[str]) -> None:
|
||||
if not files:
|
||||
@@ -156,23 +158,27 @@ class CacheCommand(Command):
|
||||
for filename in files:
|
||||
results.append(filename)
|
||||
|
||||
logger.info('\n'.join(sorted(results)))
|
||||
logger.info("\n".join(sorted(results)))
|
||||
|
||||
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) > 1:
|
||||
raise CommandError('Too many arguments')
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
if not args:
|
||||
raise CommandError('Please provide a pattern')
|
||||
raise CommandError("Please provide a pattern")
|
||||
|
||||
files = self._find_wheels(options, args[0])
|
||||
|
||||
# Only fetch http files if no specific pattern given
|
||||
if args[0] == '*':
|
||||
no_matching_msg = "No matching packages"
|
||||
if args[0] == "*":
|
||||
# Only fetch http files if no specific pattern given
|
||||
files += self._find_http_files(options)
|
||||
else:
|
||||
# Add the pattern to the log message
|
||||
no_matching_msg += ' for pattern "{}"'.format(args[0])
|
||||
|
||||
if not files:
|
||||
raise CommandError('No matching packages')
|
||||
logger.warning(no_matching_msg)
|
||||
|
||||
for filename in files:
|
||||
os.unlink(filename)
|
||||
@@ -181,19 +187,19 @@ class CacheCommand(Command):
|
||||
|
||||
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
||||
if args:
|
||||
raise CommandError('Too many arguments')
|
||||
raise CommandError("Too many arguments")
|
||||
|
||||
return self.remove_cache_items(options, ['*'])
|
||||
return self.remove_cache_items(options, ["*"])
|
||||
|
||||
def _cache_dir(self, options: Values, subdir: str) -> str:
|
||||
return os.path.join(options.cache_dir, subdir)
|
||||
|
||||
def _find_http_files(self, options: Values) -> List[str]:
|
||||
http_dir = self._cache_dir(options, 'http')
|
||||
return filesystem.find_files(http_dir, '*')
|
||||
http_dir = self._cache_dir(options, "http")
|
||||
return filesystem.find_files(http_dir, "*")
|
||||
|
||||
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
||||
wheel_dir = self._cache_dir(options, 'wheels')
|
||||
wheel_dir = self._cache_dir(options, "wheels")
|
||||
|
||||
# The wheel filename format, as specified in PEP 427, is:
|
||||
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import Any, List
|
||||
from typing import List
|
||||
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
@@ -19,8 +19,7 @@ class CheckCommand(Command):
|
||||
usage = """
|
||||
%prog [options]"""
|
||||
|
||||
def run(self, options: Values, args: List[Any]) -> int:
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
package_set, parsing_probs = create_package_set_from_installed()
|
||||
missing, conflicting = check_package_set(package_set)
|
||||
|
||||
@@ -29,7 +28,9 @@ class CheckCommand(Command):
|
||||
for dependency in missing[project_name]:
|
||||
write_output(
|
||||
"%s %s requires %s, which is not installed.",
|
||||
project_name, version, dependency[0],
|
||||
project_name,
|
||||
version,
|
||||
dependency[0],
|
||||
)
|
||||
|
||||
for project_name in conflicting:
|
||||
@@ -37,7 +38,11 @@ class CheckCommand(Command):
|
||||
for dep_name, dep_version, req in conflicting[project_name]:
|
||||
write_output(
|
||||
"%s %s has requirement %s, but you have %s %s.",
|
||||
project_name, version, req, dep_name, dep_version,
|
||||
project_name,
|
||||
version,
|
||||
req,
|
||||
dep_name,
|
||||
dep_version,
|
||||
)
|
||||
|
||||
if missing or conflicting or parsing_probs:
|
||||
|
||||
@@ -12,7 +12,7 @@ BASE_COMPLETION = """
|
||||
"""
|
||||
|
||||
COMPLETION_SCRIPTS = {
|
||||
'bash': """
|
||||
"bash": """
|
||||
_pip_completion()
|
||||
{{
|
||||
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
||||
@@ -21,7 +21,7 @@ COMPLETION_SCRIPTS = {
|
||||
}}
|
||||
complete -o default -F _pip_completion {prog}
|
||||
""",
|
||||
'zsh': """
|
||||
"zsh": """
|
||||
function _pip_completion {{
|
||||
local words cword
|
||||
read -Ac words
|
||||
@@ -32,7 +32,7 @@ COMPLETION_SCRIPTS = {
|
||||
}}
|
||||
compctl -K _pip_completion {prog}
|
||||
""",
|
||||
'fish': """
|
||||
"fish": """
|
||||
function __fish_complete_pip
|
||||
set -lx COMP_WORDS (commandline -o) ""
|
||||
set -lx COMP_CWORD ( \\
|
||||
@@ -43,6 +43,28 @@ COMPLETION_SCRIPTS = {
|
||||
end
|
||||
complete -fa "(__fish_complete_pip)" -c {prog}
|
||||
""",
|
||||
"powershell": """
|
||||
if ((Test-Path Function:\\TabExpansion) -and -not `
|
||||
(Test-Path Function:\\_pip_completeBackup)) {{
|
||||
Rename-Item Function:\\TabExpansion _pip_completeBackup
|
||||
}}
|
||||
function TabExpansion($line, $lastWord) {{
|
||||
$lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
|
||||
if ($lastBlock.StartsWith("{prog} ")) {{
|
||||
$Env:COMP_WORDS=$lastBlock
|
||||
$Env:COMP_CWORD=$lastBlock.Split().Length - 1
|
||||
$Env:PIP_AUTO_COMPLETE=1
|
||||
(& {prog}).Split()
|
||||
Remove-Item Env:COMP_WORDS
|
||||
Remove-Item Env:COMP_CWORD
|
||||
Remove-Item Env:PIP_AUTO_COMPLETE
|
||||
}}
|
||||
elseif (Test-Path Function:\\_pip_completeBackup) {{
|
||||
# Fall back on existing tab expansion
|
||||
_pip_completeBackup $line $lastWord
|
||||
}}
|
||||
}}
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
@@ -53,39 +75,52 @@ class CompletionCommand(Command):
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'--bash', '-b',
|
||||
action='store_const',
|
||||
const='bash',
|
||||
dest='shell',
|
||||
help='Emit completion code for bash')
|
||||
"--bash",
|
||||
"-b",
|
||||
action="store_const",
|
||||
const="bash",
|
||||
dest="shell",
|
||||
help="Emit completion code for bash",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--zsh', '-z',
|
||||
action='store_const',
|
||||
const='zsh',
|
||||
dest='shell',
|
||||
help='Emit completion code for zsh')
|
||||
"--zsh",
|
||||
"-z",
|
||||
action="store_const",
|
||||
const="zsh",
|
||||
dest="shell",
|
||||
help="Emit completion code for zsh",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--fish', '-f',
|
||||
action='store_const',
|
||||
const='fish',
|
||||
dest='shell',
|
||||
help='Emit completion code for fish')
|
||||
"--fish",
|
||||
"-f",
|
||||
action="store_const",
|
||||
const="fish",
|
||||
dest="shell",
|
||||
help="Emit completion code for fish",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--powershell",
|
||||
"-p",
|
||||
action="store_const",
|
||||
const="powershell",
|
||||
dest="shell",
|
||||
help="Emit completion code for powershell",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
"""Prints the completion code of the given shell"""
|
||||
shells = COMPLETION_SCRIPTS.keys()
|
||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
||||
shell_options = ["--" + shell for shell in sorted(shells)]
|
||||
if options.shell in shells:
|
||||
script = textwrap.dedent(
|
||||
COMPLETION_SCRIPTS.get(options.shell, '').format(
|
||||
prog=get_prog())
|
||||
COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
|
||||
)
|
||||
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
||||
return SUCCESS
|
||||
else:
|
||||
sys.stderr.write(
|
||||
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
|
||||
"ERROR: You must pass {}\n".format(" or ".join(shell_options))
|
||||
)
|
||||
return SUCCESS
|
||||
|
||||
@@ -27,14 +27,20 @@ class ConfigurationCommand(Command):
|
||||
|
||||
- list: List the active configuration (or from the file specified)
|
||||
- edit: Edit the configuration file in an editor
|
||||
- get: Get the value associated with name
|
||||
- set: Set the name=value
|
||||
- unset: Unset the value associated with name
|
||||
- get: Get the value associated with command.option
|
||||
- set: Set the command.option=value
|
||||
- unset: Unset the value associated with command.option
|
||||
- debug: List the configuration files and values defined under them
|
||||
|
||||
Configuration keys should be dot separated command and option name,
|
||||
with the special prefix "global" affecting any command. For example,
|
||||
"pip config set global.index-url https://example.org/" would configure
|
||||
the index url for all commands, but "pip config set download.timeout 10"
|
||||
would configure a 10 second timeout only for "pip download" commands.
|
||||
|
||||
If none of --user, --global and --site are passed, a virtual
|
||||
environment configuration file is used if one is active and the file
|
||||
exists. Otherwise, all modifications happen on the to the user file by
|
||||
exists. Otherwise, all modifications happen to the user file by
|
||||
default.
|
||||
"""
|
||||
|
||||
@@ -43,46 +49,46 @@ class ConfigurationCommand(Command):
|
||||
%prog [<file-option>] list
|
||||
%prog [<file-option>] [--editor <editor-path>] edit
|
||||
|
||||
%prog [<file-option>] get name
|
||||
%prog [<file-option>] set name value
|
||||
%prog [<file-option>] unset name
|
||||
%prog [<file-option>] get command.option
|
||||
%prog [<file-option>] set command.option value
|
||||
%prog [<file-option>] unset command.option
|
||||
%prog [<file-option>] debug
|
||||
"""
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'--editor',
|
||||
dest='editor',
|
||||
action='store',
|
||||
"--editor",
|
||||
dest="editor",
|
||||
action="store",
|
||||
default=None,
|
||||
help=(
|
||||
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
||||
'environment variables if not provided.'
|
||||
)
|
||||
"Editor to use to edit the file. Uses VISUAL or EDITOR "
|
||||
"environment variables if not provided."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--global',
|
||||
dest='global_file',
|
||||
action='store_true',
|
||||
"--global",
|
||||
dest="global_file",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Use the system-wide configuration file only'
|
||||
help="Use the system-wide configuration file only",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user_file',
|
||||
action='store_true',
|
||||
"--user",
|
||||
dest="user_file",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Use the user configuration file only'
|
||||
help="Use the user configuration file only",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--site',
|
||||
dest='site_file',
|
||||
action='store_true',
|
||||
"--site",
|
||||
dest="site_file",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Use the current environment configuration file only'
|
||||
help="Use the current environment configuration file only",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
@@ -133,11 +139,15 @@ class ConfigurationCommand(Command):
|
||||
return SUCCESS
|
||||
|
||||
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
||||
file_options = [key for key, value in (
|
||||
(kinds.USER, options.user_file),
|
||||
(kinds.GLOBAL, options.global_file),
|
||||
(kinds.SITE, options.site_file),
|
||||
) if value]
|
||||
file_options = [
|
||||
key
|
||||
for key, value in (
|
||||
(kinds.USER, options.user_file),
|
||||
(kinds.GLOBAL, options.global_file),
|
||||
(kinds.SITE, options.site_file),
|
||||
)
|
||||
if value
|
||||
]
|
||||
|
||||
if not file_options:
|
||||
if not need_value:
|
||||
@@ -194,24 +204,22 @@ class ConfigurationCommand(Command):
|
||||
for fname in files:
|
||||
with indent_log():
|
||||
file_exists = os.path.exists(fname)
|
||||
write_output("%s, exists: %r",
|
||||
fname, file_exists)
|
||||
write_output("%s, exists: %r", fname, file_exists)
|
||||
if file_exists:
|
||||
self.print_config_file_values(variant)
|
||||
|
||||
def print_config_file_values(self, variant: Kind) -> None:
|
||||
"""Get key-value pairs from the file of a variant"""
|
||||
for name, value in self.configuration.\
|
||||
get_values_in_config(variant).items():
|
||||
for name, value in self.configuration.get_values_in_config(variant).items():
|
||||
with indent_log():
|
||||
write_output("%s: %s", name, value)
|
||||
|
||||
def print_env_var_values(self) -> None:
|
||||
"""Get key-values pairs present as environment variables"""
|
||||
write_output("%s:", 'env_var')
|
||||
write_output("%s:", "env_var")
|
||||
with indent_log():
|
||||
for key, value in sorted(self.configuration.get_environ_vars()):
|
||||
env_var = f'PIP_{key.upper()}'
|
||||
env_var = f"PIP_{key.upper()}"
|
||||
write_output("%s=%r", env_var, value)
|
||||
|
||||
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
||||
@@ -220,21 +228,29 @@ class ConfigurationCommand(Command):
|
||||
fname = self.configuration.get_file_to_edit()
|
||||
if fname is None:
|
||||
raise PipError("Could not determine appropriate file.")
|
||||
elif '"' in fname:
|
||||
# This shouldn't happen, unless we see a username like that.
|
||||
# If that happens, we'd appreciate a pull request fixing this.
|
||||
raise PipError(
|
||||
f'Can not open an editor for a file name containing "\n{fname}'
|
||||
)
|
||||
|
||||
try:
|
||||
subprocess.check_call([editor, fname])
|
||||
subprocess.check_call(f'{editor} "{fname}"', shell=True)
|
||||
except FileNotFoundError as e:
|
||||
if not e.filename:
|
||||
e.filename = editor
|
||||
raise
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise PipError(
|
||||
"Editor Subprocess exited with exit code {}"
|
||||
.format(e.returncode)
|
||||
"Editor Subprocess exited with exit code {}".format(e.returncode)
|
||||
)
|
||||
|
||||
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
||||
"""Helper to make sure the command got the right number of arguments
|
||||
"""
|
||||
"""Helper to make sure the command got the right number of arguments"""
|
||||
if len(args) != n:
|
||||
msg = (
|
||||
'Got unexpected number of arguments, expected {}. '
|
||||
"Got unexpected number of arguments, expected {}. "
|
||||
'(example: "{} config {}")'
|
||||
).format(n, get_prog(), example)
|
||||
raise PipError(msg)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import importlib.resources
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
@@ -10,7 +11,6 @@ import pip._vendor
|
||||
from pip._vendor.certifi import where
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip import __file__ as pip_location
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
@@ -24,55 +24,46 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def show_value(name: str, value: Any) -> None:
|
||||
logger.info('%s: %s', name, value)
|
||||
logger.info("%s: %s", name, value)
|
||||
|
||||
|
||||
def show_sys_implementation() -> None:
|
||||
logger.info('sys.implementation:')
|
||||
logger.info("sys.implementation:")
|
||||
implementation_name = sys.implementation.name
|
||||
with indent_log():
|
||||
show_value('name', implementation_name)
|
||||
show_value("name", implementation_name)
|
||||
|
||||
|
||||
def create_vendor_txt_map() -> Dict[str, str]:
|
||||
vendor_txt_path = os.path.join(
|
||||
os.path.dirname(pip_location),
|
||||
'_vendor',
|
||||
'vendor.txt'
|
||||
)
|
||||
|
||||
with open(vendor_txt_path) as f:
|
||||
with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
|
||||
# Purge non version specifying lines.
|
||||
# Also, remove any space prefix or suffixes (including comments).
|
||||
lines = [line.strip().split(' ', 1)[0]
|
||||
for line in f.readlines() if '==' in line]
|
||||
lines = [
|
||||
line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
|
||||
]
|
||||
|
||||
# Transform into "module" -> version dict.
|
||||
return dict(line.split('==', 1) for line in lines) # type: ignore
|
||||
return dict(line.split("==", 1) for line in lines)
|
||||
|
||||
|
||||
def get_module_from_module_name(module_name: str) -> ModuleType:
|
||||
# Module name can be uppercase in vendor.txt for some reason...
|
||||
module_name = module_name.lower()
|
||||
module_name = module_name.lower().replace("-", "_")
|
||||
# PATCH: setuptools is actually only pkg_resources.
|
||||
if module_name == 'setuptools':
|
||||
module_name = 'pkg_resources'
|
||||
if module_name == "setuptools":
|
||||
module_name = "pkg_resources"
|
||||
|
||||
__import__(
|
||||
f'pip._vendor.{module_name}',
|
||||
globals(),
|
||||
locals(),
|
||||
level=0
|
||||
)
|
||||
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
||||
return getattr(pip._vendor, module_name)
|
||||
|
||||
|
||||
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
||||
module = get_module_from_module_name(module_name)
|
||||
version = getattr(module, '__version__', None)
|
||||
version = getattr(module, "__version__", None)
|
||||
|
||||
if not version:
|
||||
# Try to find version in debundled module info.
|
||||
assert module.__file__ is not None
|
||||
env = get_environment([os.path.dirname(module.__file__)])
|
||||
dist = env.get_distribution(module_name)
|
||||
if dist:
|
||||
@@ -86,20 +77,24 @@ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
|
||||
a conflict or if the actual version could not be imported.
|
||||
"""
|
||||
for module_name, expected_version in vendor_txt_versions.items():
|
||||
extra_message = ''
|
||||
extra_message = ""
|
||||
actual_version = get_vendor_version_from_module(module_name)
|
||||
if not actual_version:
|
||||
extra_message = ' (Unable to locate actual module version, using'\
|
||||
' vendor.txt specified version)'
|
||||
extra_message = (
|
||||
" (Unable to locate actual module version, using"
|
||||
" vendor.txt specified version)"
|
||||
)
|
||||
actual_version = expected_version
|
||||
elif parse_version(actual_version) != parse_version(expected_version):
|
||||
extra_message = ' (CONFLICT: vendor.txt suggests version should'\
|
||||
' be {})'.format(expected_version)
|
||||
logger.info('%s==%s%s', module_name, actual_version, extra_message)
|
||||
extra_message = (
|
||||
" (CONFLICT: vendor.txt suggests version should"
|
||||
" be {})".format(expected_version)
|
||||
)
|
||||
logger.info("%s==%s%s", module_name, actual_version, extra_message)
|
||||
|
||||
|
||||
def show_vendor_versions() -> None:
|
||||
logger.info('vendored library versions:')
|
||||
logger.info("vendored library versions:")
|
||||
|
||||
vendor_txt_versions = create_vendor_txt_map()
|
||||
with indent_log():
|
||||
@@ -114,11 +109,11 @@ def show_tags(options: Values) -> None:
|
||||
|
||||
# Display the target options that were explicitly provided.
|
||||
formatted_target = target_python.format_given()
|
||||
suffix = ''
|
||||
suffix = ""
|
||||
if formatted_target:
|
||||
suffix = f' (target: {formatted_target})'
|
||||
suffix = f" (target: {formatted_target})"
|
||||
|
||||
msg = 'Compatible tags: {}{}'.format(len(tags), suffix)
|
||||
msg = "Compatible tags: {}{}".format(len(tags), suffix)
|
||||
logger.info(msg)
|
||||
|
||||
if options.verbose < 1 and len(tags) > tag_limit:
|
||||
@@ -133,8 +128,7 @@ def show_tags(options: Values) -> None:
|
||||
|
||||
if tags_limited:
|
||||
msg = (
|
||||
'...\n'
|
||||
'[First {tag_limit} tags shown. Pass --verbose to show all.]'
|
||||
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
|
||||
).format(tag_limit=tag_limit)
|
||||
logger.info(msg)
|
||||
|
||||
@@ -142,20 +136,20 @@ def show_tags(options: Values) -> None:
|
||||
def ca_bundle_info(config: Configuration) -> str:
|
||||
levels = set()
|
||||
for key, _ in config.items():
|
||||
levels.add(key.split('.')[0])
|
||||
levels.add(key.split(".")[0])
|
||||
|
||||
if not levels:
|
||||
return "Not specified"
|
||||
|
||||
levels_that_override_global = ['install', 'wheel', 'download']
|
||||
levels_that_override_global = ["install", "wheel", "download"]
|
||||
global_overriding_level = [
|
||||
level for level in levels if level in levels_that_override_global
|
||||
]
|
||||
if not global_overriding_level:
|
||||
return 'global'
|
||||
return "global"
|
||||
|
||||
if 'global' in levels:
|
||||
levels.remove('global')
|
||||
if "global" in levels:
|
||||
levels.remove("global")
|
||||
return ", ".join(levels)
|
||||
|
||||
|
||||
@@ -180,20 +174,21 @@ class DebugCommand(Command):
|
||||
"details, since the output and options of this command may "
|
||||
"change without notice."
|
||||
)
|
||||
show_value('pip version', get_pip_version())
|
||||
show_value('sys.version', sys.version)
|
||||
show_value('sys.executable', sys.executable)
|
||||
show_value('sys.getdefaultencoding', sys.getdefaultencoding())
|
||||
show_value('sys.getfilesystemencoding', sys.getfilesystemencoding())
|
||||
show_value("pip version", get_pip_version())
|
||||
show_value("sys.version", sys.version)
|
||||
show_value("sys.executable", sys.executable)
|
||||
show_value("sys.getdefaultencoding", sys.getdefaultencoding())
|
||||
show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
|
||||
show_value(
|
||||
'locale.getpreferredencoding', locale.getpreferredencoding(),
|
||||
"locale.getpreferredencoding",
|
||||
locale.getpreferredencoding(),
|
||||
)
|
||||
show_value('sys.platform', sys.platform)
|
||||
show_value("sys.platform", sys.platform)
|
||||
show_sys_implementation()
|
||||
|
||||
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
||||
show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE'))
|
||||
show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE'))
|
||||
show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
|
||||
show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
|
||||
show_value("pip._vendor.certifi.where()", where())
|
||||
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
||||
|
||||
|
||||
@@ -7,7 +7,8 @@ from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.cmdoptions import make_target_python
|
||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||
from pip._internal.req.req_install import check_legacy_setup_py_options
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
@@ -37,7 +38,6 @@ class DownloadCommand(RequirementCommand):
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
@@ -50,14 +50,18 @@ class DownloadCommand(RequirementCommand):
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
||||
dest='download_dir',
|
||||
metavar='dir',
|
||||
"-d",
|
||||
"--dest",
|
||||
"--destination-dir",
|
||||
"--destination-directory",
|
||||
dest="download_dir",
|
||||
metavar="dir",
|
||||
default=os.curdir,
|
||||
help=("Download packages into <dir>."),
|
||||
help="Download packages into <dir>.",
|
||||
)
|
||||
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
@@ -72,7 +76,6 @@ class DownloadCommand(RequirementCommand):
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
|
||||
options.ignore_installed = True
|
||||
# editable doesn't really make sense for `pip download`, but the bowels
|
||||
# of the RequirementSet code require that property.
|
||||
@@ -93,7 +96,7 @@ class DownloadCommand(RequirementCommand):
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
|
||||
req_tracker = self.enter_context(get_requirement_tracker())
|
||||
build_tracker = self.enter_context(get_build_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
@@ -102,15 +105,17 @@ class DownloadCommand(RequirementCommand):
|
||||
)
|
||||
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
check_legacy_setup_py_options(options, reqs)
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
req_tracker=req_tracker,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
download_dir=options.download_dir,
|
||||
use_user_site=False,
|
||||
verbosity=self.verbosity,
|
||||
)
|
||||
|
||||
resolver = self.make_resolver(
|
||||
@@ -118,14 +123,13 @@ class DownloadCommand(RequirementCommand):
|
||||
finder=finder,
|
||||
options=options,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
use_pep517=options.use_pep517,
|
||||
py_version_info=options.python_version,
|
||||
)
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(
|
||||
reqs, check_supported_wheels=True
|
||||
)
|
||||
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||
|
||||
downloaded: List[str] = []
|
||||
for req in requirement_set.requirements.values():
|
||||
@@ -134,6 +138,6 @@ class DownloadCommand(RequirementCommand):
|
||||
preparer.save_linked_requirement(req)
|
||||
downloaded.append(req.name)
|
||||
if downloaded:
|
||||
write_output('Successfully downloaded %s', ' '.join(downloaded))
|
||||
write_output("Successfully downloaded %s", " ".join(downloaded))
|
||||
|
||||
return SUCCESS
|
||||
|
||||
@@ -8,7 +8,7 @@ from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.operations.freeze import freeze
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
|
||||
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
||||
DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"}
|
||||
|
||||
|
||||
class FreezeCommand(Command):
|
||||
@@ -24,39 +24,52 @@ class FreezeCommand(Command):
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
"-r",
|
||||
"--requirement",
|
||||
dest="requirements",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar='file',
|
||||
help="Use the order in the given requirements file and its "
|
||||
"comments when generating output. This option can be "
|
||||
"used multiple times.")
|
||||
metavar="file",
|
||||
help=(
|
||||
"Use the order in the given requirements file and its "
|
||||
"comments when generating output. This option can be "
|
||||
"used multiple times."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
dest='local',
|
||||
action='store_true',
|
||||
"-l",
|
||||
"--local",
|
||||
dest="local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='If in a virtualenv that has global access, do not output '
|
||||
'globally-installed packages.')
|
||||
help=(
|
||||
"If in a virtualenv that has global access, do not output "
|
||||
"globally-installed packages."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
"--user",
|
||||
dest="user",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
help="Only output packages installed in user-site.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||
self.cmd_opts.add_option(
|
||||
'--all',
|
||||
dest='freeze_all',
|
||||
action='store_true',
|
||||
help='Do not skip these packages in the output:'
|
||||
' {}'.format(', '.join(DEV_PKGS)))
|
||||
"--all",
|
||||
dest="freeze_all",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Do not skip these packages in the output:"
|
||||
" {}".format(", ".join(DEV_PKGS))
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--exclude-editable',
|
||||
dest='exclude_editable',
|
||||
action='store_true',
|
||||
help='Exclude editable package from output.')
|
||||
"--exclude-editable",
|
||||
dest="exclude_editable",
|
||||
action="store_true",
|
||||
help="Exclude editable package from output.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
@@ -80,5 +93,5 @@ class FreezeCommand(Command):
|
||||
skip=skip,
|
||||
exclude_editable=options.exclude_editable,
|
||||
):
|
||||
sys.stdout.write(line + '\n')
|
||||
sys.stdout.write(line + "\n")
|
||||
return SUCCESS
|
||||
|
||||
@@ -20,18 +20,21 @@ class HashCommand(Command):
|
||||
installs.
|
||||
"""
|
||||
|
||||
usage = '%prog [options] <file> ...'
|
||||
usage = "%prog [options] <file> ..."
|
||||
ignore_require_venv = True
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'-a', '--algorithm',
|
||||
dest='algorithm',
|
||||
"-a",
|
||||
"--algorithm",
|
||||
dest="algorithm",
|
||||
choices=STRONG_HASHES,
|
||||
action='store',
|
||||
action="store",
|
||||
default=FAVORITE_HASH,
|
||||
help='The hash algorithm to use: one of {}'.format(
|
||||
', '.join(STRONG_HASHES)))
|
||||
help="The hash algorithm to use: one of {}".format(
|
||||
", ".join(STRONG_HASHES)
|
||||
),
|
||||
)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
@@ -41,14 +44,15 @@ class HashCommand(Command):
|
||||
|
||||
algorithm = options.algorithm
|
||||
for path in args:
|
||||
write_output('%s:\n--hash=%s:%s',
|
||||
path, algorithm, _hash_of_file(path, algorithm))
|
||||
write_output(
|
||||
"%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
|
||||
)
|
||||
return SUCCESS
|
||||
|
||||
|
||||
def _hash_of_file(path: str, algorithm: str) -> str:
|
||||
"""Return the hash digest of a file."""
|
||||
with open(path, 'rb') as archive:
|
||||
with open(path, "rb") as archive:
|
||||
hash = hashlib.new(algorithm)
|
||||
for chunk in read_chunks(archive):
|
||||
hash.update(chunk)
|
||||
|
||||
@@ -33,7 +33,7 @@ class HelpCommand(Command):
|
||||
if guess:
|
||||
msg.append(f'maybe you meant "{guess}"')
|
||||
|
||||
raise CommandError(' - '.join(msg))
|
||||
raise CommandError(" - ".join(msg))
|
||||
|
||||
command = create_command(cmd_name)
|
||||
command.parser.print_help()
|
||||
|
||||
@@ -24,6 +24,7 @@ class IndexCommand(IndexGroupCommand):
|
||||
Inspect information available from package indexes.
|
||||
"""
|
||||
|
||||
ignore_require_venv = True
|
||||
usage = """
|
||||
%prog versions <package>
|
||||
"""
|
||||
@@ -44,7 +45,7 @@ class IndexCommand(IndexGroupCommand):
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[Any]) -> int:
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
handlers = {
|
||||
"versions": self.get_available_package_versions,
|
||||
}
|
||||
@@ -101,7 +102,7 @@ class IndexCommand(IndexGroupCommand):
|
||||
|
||||
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
|
||||
if len(args) != 1:
|
||||
raise CommandError('You need to specify exactly one argument')
|
||||
raise CommandError("You need to specify exactly one argument")
|
||||
|
||||
target_python = cmdoptions.make_target_python(options)
|
||||
query = args[0]
|
||||
@@ -115,25 +116,24 @@ class IndexCommand(IndexGroupCommand):
|
||||
)
|
||||
|
||||
versions: Iterable[Union[LegacyVersion, Version]] = (
|
||||
candidate.version
|
||||
for candidate in finder.find_all_candidates(query)
|
||||
candidate.version for candidate in finder.find_all_candidates(query)
|
||||
)
|
||||
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
versions = (version for version in versions
|
||||
if not version.is_prerelease)
|
||||
versions = (
|
||||
version for version in versions if not version.is_prerelease
|
||||
)
|
||||
versions = set(versions)
|
||||
|
||||
if not versions:
|
||||
raise DistributionNotFound(
|
||||
'No matching distribution found for {}'.format(query))
|
||||
"No matching distribution found for {}".format(query)
|
||||
)
|
||||
|
||||
formatted_versions = [str(ver) for ver in sorted(
|
||||
versions, reverse=True)]
|
||||
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
|
||||
latest = formatted_versions[0]
|
||||
|
||||
write_output('{} ({})'.format(query, latest))
|
||||
write_output('Available versions: {}'.format(
|
||||
', '.join(formatted_versions)))
|
||||
write_output("{} ({})".format(query, latest))
|
||||
write_output("Available versions: {}".format(", ".join(formatted_versions)))
|
||||
print_dist_installation_info(query, latest)
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import errno
|
||||
import json
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import site
|
||||
from optparse import SUPPRESS_HELP, Values
|
||||
from typing import Iterable, List, Optional
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.rich import print_json
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.cli import cmdoptions
|
||||
@@ -20,16 +21,19 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS
|
||||
from pip._internal.exceptions import CommandError, InstallationError
|
||||
from pip._internal.locations import get_scheme
|
||||
from pip._internal.metadata import get_environment
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
from pip._internal.models.installation_report import InstallationReport
|
||||
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
||||
from pip._internal.req import install_given_reqs
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||
from pip._internal.req.req_install import (
|
||||
InstallRequirement,
|
||||
check_legacy_setup_py_options,
|
||||
)
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.distutils_args import parse_distutils_args
|
||||
from pip._internal.utils.filesystem import test_writable_dir
|
||||
from pip._internal.utils.logging import getLogger
|
||||
from pip._internal.utils.misc import (
|
||||
check_externally_managed,
|
||||
ensure_dir,
|
||||
get_pip_version,
|
||||
protect_pip_from_modification_on_windows,
|
||||
@@ -40,24 +44,11 @@ from pip._internal.utils.virtualenv import (
|
||||
running_under_virtualenv,
|
||||
virtualenv_no_global,
|
||||
)
|
||||
from pip._internal.wheel_builder import (
|
||||
BinaryAllowedPredicate,
|
||||
build,
|
||||
should_build_for_install_command,
|
||||
)
|
||||
from pip._internal.wheel_builder import build, should_build_for_install_command
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate:
|
||||
def check_binary_allowed(req: InstallRequirement) -> bool:
|
||||
canonical_name = canonicalize_name(req.name or "")
|
||||
allowed_formats = format_control.get_allowed_formats(canonical_name)
|
||||
return "binary" in allowed_formats
|
||||
|
||||
return check_binary_allowed
|
||||
|
||||
|
||||
class InstallCommand(RequirementCommand):
|
||||
"""
|
||||
Install packages from:
|
||||
@@ -86,95 +77,129 @@ class InstallCommand(RequirementCommand):
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.editable())
|
||||
self.cmd_opts.add_option(
|
||||
'-t', '--target',
|
||||
dest='target_dir',
|
||||
metavar='dir',
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
dest="dry_run",
|
||||
default=False,
|
||||
help=(
|
||||
"Don't actually install anything, just print what would be. "
|
||||
"Can be used in combination with --ignore-installed "
|
||||
"to 'resolve' the requirements."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-t",
|
||||
"--target",
|
||||
dest="target_dir",
|
||||
metavar="dir",
|
||||
default=None,
|
||||
help='Install packages into <dir>. '
|
||||
'By default this will not replace existing files/folders in '
|
||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
||||
'with new versions.'
|
||||
help=(
|
||||
"Install packages into <dir>. "
|
||||
"By default this will not replace existing files/folders in "
|
||||
"<dir>. Use --upgrade to replace existing packages in <dir> "
|
||||
"with new versions."
|
||||
),
|
||||
)
|
||||
cmdoptions.add_target_python_options(self.cmd_opts)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='use_user_site',
|
||||
action='store_true',
|
||||
help="Install to the Python user install directory for your "
|
||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||
"Windows. (See the Python documentation for site.USER_BASE "
|
||||
"for full details.)")
|
||||
"--user",
|
||||
dest="use_user_site",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Install to the Python user install directory for your "
|
||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
||||
"Windows. (See the Python documentation for site.USER_BASE "
|
||||
"for full details.)"
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--no-user',
|
||||
dest='use_user_site',
|
||||
action='store_false',
|
||||
help=SUPPRESS_HELP)
|
||||
"--no-user",
|
||||
dest="use_user_site",
|
||||
action="store_false",
|
||||
help=SUPPRESS_HELP,
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--root',
|
||||
dest='root_path',
|
||||
metavar='dir',
|
||||
"--root",
|
||||
dest="root_path",
|
||||
metavar="dir",
|
||||
default=None,
|
||||
help="Install everything relative to this alternate root "
|
||||
"directory.")
|
||||
help="Install everything relative to this alternate root directory.",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--prefix',
|
||||
dest='prefix_path',
|
||||
metavar='dir',
|
||||
"--prefix",
|
||||
dest="prefix_path",
|
||||
metavar="dir",
|
||||
default=None,
|
||||
help="Installation prefix where lib, bin and other top-level "
|
||||
"folders are placed")
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
||||
help=(
|
||||
"Installation prefix where lib, bin and other top-level "
|
||||
"folders are placed. Note that the resulting installation may "
|
||||
"contain scripts and other resources which reference the "
|
||||
"Python interpreter of pip, and not that of ``--prefix``. "
|
||||
"See also the ``--python`` option if the intention is to "
|
||||
"install packages into another (possibly pip-free) "
|
||||
"environment."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-U', '--upgrade',
|
||||
dest='upgrade',
|
||||
action='store_true',
|
||||
help='Upgrade all specified packages to the newest available '
|
||||
'version. The handling of dependencies depends on the '
|
||||
'upgrade-strategy used.'
|
||||
"-U",
|
||||
"--upgrade",
|
||||
dest="upgrade",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Upgrade all specified packages to the newest available "
|
||||
"version. The handling of dependencies depends on the "
|
||||
"upgrade-strategy used."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--upgrade-strategy',
|
||||
dest='upgrade_strategy',
|
||||
default='only-if-needed',
|
||||
choices=['only-if-needed', 'eager'],
|
||||
help='Determines how dependency upgrading should be handled '
|
||||
'[default: %default]. '
|
||||
'"eager" - dependencies are upgraded regardless of '
|
||||
'whether the currently installed version satisfies the '
|
||||
'requirements of the upgraded package(s). '
|
||||
'"only-if-needed" - are upgraded only when they do not '
|
||||
'satisfy the requirements of the upgraded package(s).'
|
||||
"--upgrade-strategy",
|
||||
dest="upgrade_strategy",
|
||||
default="only-if-needed",
|
||||
choices=["only-if-needed", "eager"],
|
||||
help=(
|
||||
"Determines how dependency upgrading should be handled "
|
||||
"[default: %default]. "
|
||||
'"eager" - dependencies are upgraded regardless of '
|
||||
"whether the currently installed version satisfies the "
|
||||
"requirements of the upgraded package(s). "
|
||||
'"only-if-needed" - are upgraded only when they do not '
|
||||
"satisfy the requirements of the upgraded package(s)."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--force-reinstall',
|
||||
dest='force_reinstall',
|
||||
action='store_true',
|
||||
help='Reinstall all packages even if they are already '
|
||||
'up-to-date.')
|
||||
"--force-reinstall",
|
||||
dest="force_reinstall",
|
||||
action="store_true",
|
||||
help="Reinstall all packages even if they are already up-to-date.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-I', '--ignore-installed',
|
||||
dest='ignore_installed',
|
||||
action='store_true',
|
||||
help='Ignore the installed packages, overwriting them. '
|
||||
'This can break your system if the existing package '
|
||||
'is of a different version or was installed '
|
||||
'with a different package manager!'
|
||||
"-I",
|
||||
"--ignore-installed",
|
||||
dest="ignore_installed",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Ignore the installed packages, overwriting them. "
|
||||
"This can break your system if the existing package "
|
||||
"is of a different version or was installed "
|
||||
"with a different package manager!"
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.install_options())
|
||||
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
@@ -206,12 +231,12 @@ class InstallCommand(RequirementCommand):
|
||||
default=True,
|
||||
help="Do not warn about broken dependencies",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group,
|
||||
@@ -221,20 +246,50 @@ class InstallCommand(RequirementCommand):
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
"--report",
|
||||
dest="json_report_file",
|
||||
metavar="file",
|
||||
default=None,
|
||||
help=(
|
||||
"Generate a JSON file describing what pip did to install "
|
||||
"the provided requirements. "
|
||||
"Can be used in combination with --dry-run and --ignore-installed "
|
||||
"to 'resolve' the requirements. "
|
||||
"When - is used as file name it writes to stdout. "
|
||||
"When writing to stdout, please combine with the --quiet option "
|
||||
"to avoid mixing pip logging output with JSON output."
|
||||
),
|
||||
)
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if options.use_user_site and options.target_dir is not None:
|
||||
raise CommandError("Can not combine '--user' and '--target'")
|
||||
|
||||
cmdoptions.check_install_build_global(options)
|
||||
# Check whether the environment we're installing into is externally
|
||||
# managed, as specified in PEP 668. Specifying --root, --target, or
|
||||
# --prefix disables the check, since there's no reliable way to locate
|
||||
# the EXTERNALLY-MANAGED file for those cases. An exception is also
|
||||
# made specifically for "--dry-run --report" for convenience.
|
||||
installing_into_current_environment = (
|
||||
not (options.dry_run and options.json_report_file)
|
||||
and options.root_path is None
|
||||
and options.target_dir is None
|
||||
and options.prefix_path is None
|
||||
)
|
||||
if (
|
||||
installing_into_current_environment
|
||||
and not options.override_externally_managed
|
||||
):
|
||||
check_externally_managed()
|
||||
|
||||
upgrade_strategy = "to-satisfy-only"
|
||||
if options.upgrade:
|
||||
upgrade_strategy = options.upgrade_strategy
|
||||
|
||||
cmdoptions.check_dist_restriction(options, check_target=True)
|
||||
|
||||
install_options = options.install_options or []
|
||||
|
||||
logger.verbose("Using %s", get_pip_version())
|
||||
options.use_user_site = decide_user_install(
|
||||
options.use_user_site,
|
||||
@@ -249,11 +304,14 @@ class InstallCommand(RequirementCommand):
|
||||
if options.target_dir:
|
||||
options.ignore_installed = True
|
||||
options.target_dir = os.path.abspath(options.target_dir)
|
||||
if (os.path.exists(options.target_dir) and not
|
||||
os.path.isdir(options.target_dir)):
|
||||
if (
|
||||
# fmt: off
|
||||
os.path.exists(options.target_dir) and
|
||||
not os.path.isdir(options.target_dir)
|
||||
# fmt: on
|
||||
):
|
||||
raise CommandError(
|
||||
"Target path exists but is not a directory, will not "
|
||||
"continue."
|
||||
"Target path exists but is not a directory, will not continue."
|
||||
)
|
||||
|
||||
# Create a target directory for using with the target option
|
||||
@@ -272,9 +330,7 @@ class InstallCommand(RequirementCommand):
|
||||
target_python=target_python,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
|
||||
req_tracker = self.enter_context(get_requirement_tracker())
|
||||
build_tracker = self.enter_context(get_build_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
@@ -284,18 +340,24 @@ class InstallCommand(RequirementCommand):
|
||||
|
||||
try:
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
check_legacy_setup_py_options(options, reqs)
|
||||
|
||||
reject_location_related_install_options(
|
||||
reqs, options.install_options
|
||||
)
|
||||
wheel_cache = WheelCache(options.cache_dir)
|
||||
|
||||
# Only when installing is it permitted to use PEP 660.
|
||||
# In other circumstances (pip wheel, pip download) we generate
|
||||
# regular (i.e. non editable) metadata and wheels.
|
||||
for req in reqs:
|
||||
req.permit_editable_wheels = True
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
req_tracker=req_tracker,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
use_user_site=options.use_user_site,
|
||||
verbosity=self.verbosity,
|
||||
)
|
||||
resolver = self.make_resolver(
|
||||
preparer=preparer,
|
||||
@@ -316,6 +378,26 @@ class InstallCommand(RequirementCommand):
|
||||
reqs, check_supported_wheels=not options.target_dir
|
||||
)
|
||||
|
||||
if options.json_report_file:
|
||||
report = InstallationReport(requirement_set.requirements_to_install)
|
||||
if options.json_report_file == "-":
|
||||
print_json(data=report.to_dict())
|
||||
else:
|
||||
with open(options.json_report_file, "w", encoding="utf-8") as f:
|
||||
json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
|
||||
|
||||
if options.dry_run:
|
||||
would_install_items = sorted(
|
||||
(r.metadata["name"], r.metadata["version"])
|
||||
for r in requirement_set.requirements_to_install
|
||||
)
|
||||
if would_install_items:
|
||||
write_output(
|
||||
"Would install %s",
|
||||
" ".join("-".join(item) for item in would_install_items),
|
||||
)
|
||||
return SUCCESS
|
||||
|
||||
try:
|
||||
pip_req = requirement_set.get_requirement("pip")
|
||||
except KeyError:
|
||||
@@ -324,19 +406,12 @@ class InstallCommand(RequirementCommand):
|
||||
# If we're not replacing an already installed pip,
|
||||
# we're not modifying it.
|
||||
modifying_pip = pip_req.satisfied_by is None
|
||||
protect_pip_from_modification_on_windows(
|
||||
modifying_pip=modifying_pip
|
||||
)
|
||||
|
||||
check_binary_allowed = get_check_binary_allowed(
|
||||
finder.format_control
|
||||
)
|
||||
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
|
||||
|
||||
reqs_to_build = [
|
||||
r for r in requirement_set.requirements.values()
|
||||
if should_build_for_install_command(
|
||||
r, check_binary_allowed
|
||||
)
|
||||
r
|
||||
for r in requirement_set.requirements.values()
|
||||
if should_build_for_install_command(r)
|
||||
]
|
||||
|
||||
_, build_failures = build(
|
||||
@@ -344,39 +419,23 @@ class InstallCommand(RequirementCommand):
|
||||
wheel_cache=wheel_cache,
|
||||
verify=True,
|
||||
build_options=[],
|
||||
global_options=[],
|
||||
global_options=global_options,
|
||||
)
|
||||
|
||||
# If we're using PEP 517, we cannot do a direct install
|
||||
# so we fail here.
|
||||
pep517_build_failure_names: List[str] = [
|
||||
r.name # type: ignore
|
||||
for r in build_failures if r.use_pep517
|
||||
]
|
||||
if pep517_build_failure_names:
|
||||
if build_failures:
|
||||
raise InstallationError(
|
||||
"Could not build wheels for {} which use"
|
||||
" PEP 517 and cannot be installed directly".format(
|
||||
", ".join(pep517_build_failure_names)
|
||||
"Could not build wheels for {}, which is required to "
|
||||
"install pyproject.toml-based projects".format(
|
||||
", ".join(r.name for r in build_failures) # type: ignore
|
||||
)
|
||||
)
|
||||
|
||||
# For now, we just warn about failures building legacy
|
||||
# requirements, as we'll fall through to a direct
|
||||
# install for those.
|
||||
for r in build_failures:
|
||||
if not r.use_pep517:
|
||||
r.legacy_install_reason = 8368
|
||||
|
||||
to_install = resolver.get_installation_order(
|
||||
requirement_set
|
||||
)
|
||||
to_install = resolver.get_installation_order(requirement_set)
|
||||
|
||||
# Check for conflicts in the package set we're installing.
|
||||
conflicts: Optional[ConflictDetails] = None
|
||||
should_warn_about_conflicts = (
|
||||
not options.ignore_dependencies and
|
||||
options.warn_about_conflicts
|
||||
not options.ignore_dependencies and options.warn_about_conflicts
|
||||
)
|
||||
if should_warn_about_conflicts:
|
||||
conflicts = self._determine_conflicts(to_install)
|
||||
@@ -389,7 +448,6 @@ class InstallCommand(RequirementCommand):
|
||||
|
||||
installed = install_given_reqs(
|
||||
to_install,
|
||||
install_options,
|
||||
global_options,
|
||||
root=options.root_path,
|
||||
home=target_temp_dir_path,
|
||||
@@ -408,7 +466,7 @@ class InstallCommand(RequirementCommand):
|
||||
)
|
||||
env = get_environment(lib_locations)
|
||||
|
||||
installed.sort(key=operator.attrgetter('name'))
|
||||
installed.sort(key=operator.attrgetter("name"))
|
||||
items = []
|
||||
for result in installed:
|
||||
item = result.name
|
||||
@@ -426,16 +484,19 @@ class InstallCommand(RequirementCommand):
|
||||
resolver_variant=self.determine_resolver_variant(options),
|
||||
)
|
||||
|
||||
installed_desc = ' '.join(items)
|
||||
installed_desc = " ".join(items)
|
||||
if installed_desc:
|
||||
write_output(
|
||||
'Successfully installed %s', installed_desc,
|
||||
"Successfully installed %s",
|
||||
installed_desc,
|
||||
)
|
||||
except OSError as error:
|
||||
show_traceback = (self.verbosity >= 1)
|
||||
show_traceback = self.verbosity >= 1
|
||||
|
||||
message = create_os_error_message(
|
||||
error, show_traceback, options.use_user_site,
|
||||
error,
|
||||
show_traceback,
|
||||
options.use_user_site,
|
||||
)
|
||||
logger.error(message, exc_info=show_traceback) # noqa
|
||||
|
||||
@@ -446,8 +507,8 @@ class InstallCommand(RequirementCommand):
|
||||
self._handle_target_dir(
|
||||
options.target_dir, target_temp_dir, options.upgrade
|
||||
)
|
||||
|
||||
warn_if_run_as_root()
|
||||
if options.root_user_action == "warn":
|
||||
warn_if_run_as_root()
|
||||
return SUCCESS
|
||||
|
||||
def _handle_target_dir(
|
||||
@@ -461,7 +522,7 @@ class InstallCommand(RequirementCommand):
|
||||
|
||||
# Checking both purelib and platlib directories for installed
|
||||
# packages to be moved to target directory
|
||||
scheme = get_scheme('', home=target_temp_dir.path)
|
||||
scheme = get_scheme("", home=target_temp_dir.path)
|
||||
purelib_dir = scheme.purelib
|
||||
platlib_dir = scheme.platlib
|
||||
data_dir = scheme.data
|
||||
@@ -483,18 +544,18 @@ class InstallCommand(RequirementCommand):
|
||||
if os.path.exists(target_item_dir):
|
||||
if not upgrade:
|
||||
logger.warning(
|
||||
'Target directory %s already exists. Specify '
|
||||
'--upgrade to force replacement.',
|
||||
target_item_dir
|
||||
"Target directory %s already exists. Specify "
|
||||
"--upgrade to force replacement.",
|
||||
target_item_dir,
|
||||
)
|
||||
continue
|
||||
if os.path.islink(target_item_dir):
|
||||
logger.warning(
|
||||
'Target directory %s already exists and is '
|
||||
'a link. pip will not automatically replace '
|
||||
'links, please remove if replacement is '
|
||||
'desired.',
|
||||
target_item_dir
|
||||
"Target directory %s already exists and is "
|
||||
"a link. pip will not automatically replace "
|
||||
"links, please remove if replacement is "
|
||||
"desired.",
|
||||
target_item_dir,
|
||||
)
|
||||
continue
|
||||
if os.path.isdir(target_item_dir):
|
||||
@@ -502,10 +563,7 @@ class InstallCommand(RequirementCommand):
|
||||
else:
|
||||
os.remove(target_item_dir)
|
||||
|
||||
shutil.move(
|
||||
os.path.join(lib_dir, item),
|
||||
target_item_dir
|
||||
)
|
||||
shutil.move(os.path.join(lib_dir, item), target_item_dir)
|
||||
|
||||
def _determine_conflicts(
|
||||
self, to_install: List[InstallRequirement]
|
||||
@@ -567,7 +625,7 @@ class InstallCommand(RequirementCommand):
|
||||
requirement=req,
|
||||
dep_name=dep_name,
|
||||
dep_version=dep_version,
|
||||
you=("you" if resolver_variant == "2020-resolver" else "you'll")
|
||||
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
|
||||
)
|
||||
parts.append(message)
|
||||
|
||||
@@ -575,14 +633,14 @@ class InstallCommand(RequirementCommand):
|
||||
|
||||
|
||||
def get_lib_location_guesses(
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
isolated: bool = False,
|
||||
prefix: Optional[str] = None
|
||||
user: bool = False,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
isolated: bool = False,
|
||||
prefix: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
scheme = get_scheme(
|
||||
'',
|
||||
"",
|
||||
user=user,
|
||||
home=home,
|
||||
root=root,
|
||||
@@ -594,8 +652,8 @@ def get_lib_location_guesses(
|
||||
|
||||
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
||||
return all(
|
||||
test_writable_dir(d) for d in set(
|
||||
get_lib_location_guesses(root=root, isolated=isolated))
|
||||
test_writable_dir(d)
|
||||
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
|
||||
)
|
||||
|
||||
|
||||
@@ -653,51 +711,11 @@ def decide_user_install(
|
||||
logger.debug("Non-user install because site-packages writeable")
|
||||
return False
|
||||
|
||||
logger.info("Defaulting to user installation because normal site-packages "
|
||||
"is not writeable")
|
||||
return True
|
||||
|
||||
|
||||
def reject_location_related_install_options(
|
||||
requirements: List[InstallRequirement], options: Optional[List[str]]
|
||||
) -> None:
|
||||
"""If any location-changing --install-option arguments were passed for
|
||||
requirements or on the command-line, then show a deprecation warning.
|
||||
"""
|
||||
def format_options(option_names: Iterable[str]) -> List[str]:
|
||||
return ["--{}".format(name.replace("_", "-")) for name in option_names]
|
||||
|
||||
offenders = []
|
||||
|
||||
for requirement in requirements:
|
||||
install_options = requirement.install_options
|
||||
location_options = parse_distutils_args(install_options)
|
||||
if location_options:
|
||||
offenders.append(
|
||||
"{!r} from {}".format(
|
||||
format_options(location_options.keys()), requirement
|
||||
)
|
||||
)
|
||||
|
||||
if options:
|
||||
location_options = parse_distutils_args(options)
|
||||
if location_options:
|
||||
offenders.append(
|
||||
"{!r} from command line".format(
|
||||
format_options(location_options.keys())
|
||||
)
|
||||
)
|
||||
|
||||
if not offenders:
|
||||
return
|
||||
|
||||
raise CommandError(
|
||||
"Location-changing options found in --install-option: {}."
|
||||
" This is unsupported, use pip-level options like --user,"
|
||||
" --prefix, --root, and --target instead.".format(
|
||||
"; ".join(offenders)
|
||||
)
|
||||
logger.info(
|
||||
"Defaulting to user installation because normal site-packages "
|
||||
"is not writeable"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def create_os_error_message(
|
||||
@@ -727,18 +745,25 @@ def create_os_error_message(
|
||||
permissions_part = "Check the permissions"
|
||||
|
||||
if not running_under_virtualenv() and not using_user_site:
|
||||
parts.extend([
|
||||
user_option_part, " or ",
|
||||
permissions_part.lower(),
|
||||
])
|
||||
parts.extend(
|
||||
[
|
||||
user_option_part,
|
||||
" or ",
|
||||
permissions_part.lower(),
|
||||
]
|
||||
)
|
||||
else:
|
||||
parts.append(permissions_part)
|
||||
parts.append(".\n")
|
||||
|
||||
# Suggest the user to enable Long Paths if path length is
|
||||
# more than 260
|
||||
if (WINDOWS and error.errno == errno.ENOENT and error.filename and
|
||||
len(error.filename) > 260):
|
||||
if (
|
||||
WINDOWS
|
||||
and error.errno == errno.ENOENT
|
||||
and error.filename
|
||||
and len(error.filename) > 260
|
||||
):
|
||||
parts.append(
|
||||
"HINT: This error might have occurred since "
|
||||
"this system does not have Windows Long Path "
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
from optparse import Values
|
||||
from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast
|
||||
from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
@@ -14,8 +14,8 @@ from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution, get_environment
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.utils.misc import stdlib_pkgs, tabulate, write_output
|
||||
from pip._internal.utils.parallel import map_multithread
|
||||
from pip._internal.utils.compat import stdlib_pkgs
|
||||
from pip._internal.utils.misc import tabulate, write_output
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.metadata.base import DistributionVersion
|
||||
@@ -26,6 +26,7 @@ if TYPE_CHECKING:
|
||||
These will be populated during ``get_outdated()``. This is dirty but
|
||||
makes the rest of the code much cleaner.
|
||||
"""
|
||||
|
||||
latest_version: DistributionVersion
|
||||
latest_filetype: str
|
||||
|
||||
@@ -48,77 +49,85 @@ class ListCommand(IndexGroupCommand):
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'-o', '--outdated',
|
||||
action='store_true',
|
||||
"-o",
|
||||
"--outdated",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='List outdated packages')
|
||||
self.cmd_opts.add_option(
|
||||
'-u', '--uptodate',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List uptodate packages')
|
||||
self.cmd_opts.add_option(
|
||||
'-e', '--editable',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='List editable projects.')
|
||||
self.cmd_opts.add_option(
|
||||
'-l', '--local',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help=('If in a virtualenv that has global access, do not list '
|
||||
'globally-installed packages.'),
|
||||
help="List outdated packages",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--user',
|
||||
dest='user',
|
||||
action='store_true',
|
||||
"-u",
|
||||
"--uptodate",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Only output packages installed in user-site.')
|
||||
help="List uptodate packages",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-e",
|
||||
"--editable",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="List editable projects.",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"-l",
|
||||
"--local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=(
|
||||
"If in a virtualenv that has global access, do not list "
|
||||
"globally-installed packages."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
"--user",
|
||||
dest="user",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Only output packages installed in user-site.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_path())
|
||||
self.cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
"--pre",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
help=(
|
||||
"Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--format',
|
||||
action='store',
|
||||
dest='list_format',
|
||||
"--format",
|
||||
action="store",
|
||||
dest="list_format",
|
||||
default="columns",
|
||||
choices=('columns', 'freeze', 'json'),
|
||||
help="Select the output format among: columns (default), freeze, "
|
||||
"or json",
|
||||
choices=("columns", "freeze", "json"),
|
||||
help="Select the output format among: columns (default), freeze, or json",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--not-required',
|
||||
action='store_true',
|
||||
dest='not_required',
|
||||
help="List packages that are not dependencies of "
|
||||
"installed packages.",
|
||||
"--not-required",
|
||||
action="store_true",
|
||||
dest="not_required",
|
||||
help="List packages that are not dependencies of installed packages.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--exclude-editable',
|
||||
action='store_false',
|
||||
dest='include_editable',
|
||||
help='Exclude editable package from output.',
|
||||
"--exclude-editable",
|
||||
action="store_false",
|
||||
dest="include_editable",
|
||||
help="Exclude editable package from output.",
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'--include-editable',
|
||||
action='store_true',
|
||||
dest='include_editable',
|
||||
help='Include editable package from output.',
|
||||
"--include-editable",
|
||||
action="store_true",
|
||||
dest="include_editable",
|
||||
help="Include editable package from output.",
|
||||
default=True,
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
||||
index_opts = cmdoptions.make_option_group(
|
||||
cmdoptions.index_group, self.parser
|
||||
)
|
||||
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
||||
|
||||
self.parser.insert_option_group(0, index_opts)
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
@@ -144,8 +153,12 @@ class ListCommand(IndexGroupCommand):
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if options.outdated and options.uptodate:
|
||||
raise CommandError("Options --outdated and --uptodate cannot be combined.")
|
||||
|
||||
if options.outdated and options.list_format == "freeze":
|
||||
raise CommandError(
|
||||
"Options --outdated and --uptodate cannot be combined.")
|
||||
"List format 'freeze' can not be used with the --outdated option."
|
||||
)
|
||||
|
||||
cmdoptions.check_list_path_option(options)
|
||||
|
||||
@@ -183,7 +196,8 @@ class ListCommand(IndexGroupCommand):
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> "_ProcessedDists":
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
dist
|
||||
for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version > dist.version
|
||||
]
|
||||
|
||||
@@ -191,7 +205,8 @@ class ListCommand(IndexGroupCommand):
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> "_ProcessedDists":
|
||||
return [
|
||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
||||
dist
|
||||
for dist in self.iter_packages_latest_infos(packages, options)
|
||||
if dist.latest_version == dist.version
|
||||
]
|
||||
|
||||
@@ -211,18 +226,21 @@ class ListCommand(IndexGroupCommand):
|
||||
|
||||
def iter_packages_latest_infos(
|
||||
self, packages: "_ProcessedDists", options: Values
|
||||
) -> Iterator["_DistWithLatestInfo"]:
|
||||
) -> Generator["_DistWithLatestInfo", None, None]:
|
||||
with self._build_session(options) as session:
|
||||
finder = self._build_package_finder(options, session)
|
||||
|
||||
def latest_info(
|
||||
dist: "_DistWithLatestInfo"
|
||||
dist: "_DistWithLatestInfo",
|
||||
) -> Optional["_DistWithLatestInfo"]:
|
||||
all_candidates = finder.find_all_candidates(dist.canonical_name)
|
||||
if not options.pre:
|
||||
# Remove prereleases
|
||||
all_candidates = [candidate for candidate in all_candidates
|
||||
if not candidate.version.is_prerelease]
|
||||
all_candidates = [
|
||||
candidate
|
||||
for candidate in all_candidates
|
||||
if not candidate.version.is_prerelease
|
||||
]
|
||||
|
||||
evaluator = finder.make_candidate_evaluator(
|
||||
project_name=dist.canonical_name,
|
||||
@@ -233,14 +251,14 @@ class ListCommand(IndexGroupCommand):
|
||||
|
||||
remote_version = best_candidate.version
|
||||
if best_candidate.link.is_wheel:
|
||||
typ = 'wheel'
|
||||
typ = "wheel"
|
||||
else:
|
||||
typ = 'sdist'
|
||||
typ = "sdist"
|
||||
dist.latest_version = remote_version
|
||||
dist.latest_filetype = typ
|
||||
return dist
|
||||
|
||||
for dist in map_multithread(latest_info, packages):
|
||||
for dist in map(latest_info, packages):
|
||||
if dist is not None:
|
||||
yield dist
|
||||
|
||||
@@ -251,17 +269,18 @@ class ListCommand(IndexGroupCommand):
|
||||
packages,
|
||||
key=lambda dist: dist.canonical_name,
|
||||
)
|
||||
if options.list_format == 'columns' and packages:
|
||||
if options.list_format == "columns" and packages:
|
||||
data, header = format_for_columns(packages, options)
|
||||
self.output_package_listing_columns(data, header)
|
||||
elif options.list_format == 'freeze':
|
||||
elif options.list_format == "freeze":
|
||||
for dist in packages:
|
||||
if options.verbose >= 1:
|
||||
write_output("%s==%s (%s)", dist.raw_name,
|
||||
dist.version, dist.location)
|
||||
write_output(
|
||||
"%s==%s (%s)", dist.raw_name, dist.version, dist.location
|
||||
)
|
||||
else:
|
||||
write_output("%s==%s", dist.raw_name, dist.version)
|
||||
elif options.list_format == 'json':
|
||||
elif options.list_format == "json":
|
||||
write_output(format_for_json(packages, options))
|
||||
|
||||
def output_package_listing_columns(
|
||||
@@ -275,7 +294,7 @@ class ListCommand(IndexGroupCommand):
|
||||
|
||||
# Create and add a separator.
|
||||
if len(data) > 0:
|
||||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
||||
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
|
||||
|
||||
for val in pkg_strings:
|
||||
write_output(val)
|
||||
@@ -288,19 +307,22 @@ def format_for_columns(
|
||||
Convert the package data into something usable
|
||||
by output_package_listing_columns.
|
||||
"""
|
||||
running_outdated = options.outdated
|
||||
# Adjust the header for the `pip list --outdated` case.
|
||||
if running_outdated:
|
||||
header = ["Package", "Version", "Latest", "Type"]
|
||||
else:
|
||||
header = ["Package", "Version"]
|
||||
header = ["Package", "Version"]
|
||||
|
||||
data = []
|
||||
if options.verbose >= 1 or any(x.editable for x in pkgs):
|
||||
running_outdated = options.outdated
|
||||
if running_outdated:
|
||||
header.extend(["Latest", "Type"])
|
||||
|
||||
has_editables = any(x.editable for x in pkgs)
|
||||
if has_editables:
|
||||
header.append("Editable project location")
|
||||
|
||||
if options.verbose >= 1:
|
||||
header.append("Location")
|
||||
if options.verbose >= 1:
|
||||
header.append("Installer")
|
||||
|
||||
data = []
|
||||
for proj in pkgs:
|
||||
# if we're working on the 'outdated' list, separate out the
|
||||
# latest_version and type
|
||||
@@ -310,7 +332,10 @@ def format_for_columns(
|
||||
row.append(str(proj.latest_version))
|
||||
row.append(proj.latest_filetype)
|
||||
|
||||
if options.verbose >= 1 or proj.editable:
|
||||
if has_editables:
|
||||
row.append(proj.editable_project_location or "")
|
||||
|
||||
if options.verbose >= 1:
|
||||
row.append(proj.location or "")
|
||||
if options.verbose >= 1:
|
||||
row.append(proj.installer)
|
||||
@@ -324,14 +349,17 @@ def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
|
||||
data = []
|
||||
for dist in packages:
|
||||
info = {
|
||||
'name': dist.raw_name,
|
||||
'version': str(dist.version),
|
||||
"name": dist.raw_name,
|
||||
"version": str(dist.version),
|
||||
}
|
||||
if options.verbose >= 1:
|
||||
info['location'] = dist.location or ""
|
||||
info['installer'] = dist.installer
|
||||
info["location"] = dist.location or ""
|
||||
info["installer"] = dist.installer
|
||||
if options.outdated:
|
||||
info['latest_version'] = str(dist.latest_version)
|
||||
info['latest_filetype'] = dist.latest_filetype
|
||||
info["latest_version"] = str(dist.latest_version)
|
||||
info["latest_filetype"] = dist.latest_filetype
|
||||
editable_project_location = dist.editable_project_location
|
||||
if editable_project_location:
|
||||
info["editable_project_location"] = editable_project_location
|
||||
data.append(info)
|
||||
return json.dumps(data)
|
||||
|
||||
@@ -27,6 +27,7 @@ if TYPE_CHECKING:
|
||||
summary: str
|
||||
versions: List[str]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -39,17 +40,19 @@ class SearchCommand(Command, SessionCommandMixin):
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'-i', '--index',
|
||||
dest='index',
|
||||
metavar='URL',
|
||||
"-i",
|
||||
"--index",
|
||||
dest="index",
|
||||
metavar="URL",
|
||||
default=PyPI.pypi_url,
|
||||
help='Base URL of Python Package Index (default %default)')
|
||||
help="Base URL of Python Package Index (default %default)",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if not args:
|
||||
raise CommandError('Missing required argument (search query).')
|
||||
raise CommandError("Missing required argument (search query).")
|
||||
query = args
|
||||
pypi_hits = self.search(query, options)
|
||||
hits = transform_hits(pypi_hits)
|
||||
@@ -71,7 +74,7 @@ class SearchCommand(Command, SessionCommandMixin):
|
||||
transport = PipXmlrpcTransport(index_url, session)
|
||||
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
||||
try:
|
||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
||||
hits = pypi.search({"name": query, "summary": query}, "or")
|
||||
except xmlrpc.client.Fault as fault:
|
||||
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
||||
code=fault.faultCode,
|
||||
@@ -90,22 +93,22 @@ def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
|
||||
"""
|
||||
packages: Dict[str, "TransformedHit"] = OrderedDict()
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary']
|
||||
version = hit['version']
|
||||
name = hit["name"]
|
||||
summary = hit["summary"]
|
||||
version = hit["version"]
|
||||
|
||||
if name not in packages.keys():
|
||||
packages[name] = {
|
||||
'name': name,
|
||||
'summary': summary,
|
||||
'versions': [version],
|
||||
"name": name,
|
||||
"summary": summary,
|
||||
"versions": [version],
|
||||
}
|
||||
else:
|
||||
packages[name]['versions'].append(version)
|
||||
packages[name]["versions"].append(version)
|
||||
|
||||
# if this is the highest version, replace summary and score
|
||||
if version == highest_version(packages[name]['versions']):
|
||||
packages[name]['summary'] = summary
|
||||
if version == highest_version(packages[name]["versions"]):
|
||||
packages[name]["summary"] = summary
|
||||
|
||||
return list(packages.values())
|
||||
|
||||
@@ -116,14 +119,17 @@ def print_dist_installation_info(name: str, latest: str) -> None:
|
||||
if dist is not None:
|
||||
with indent_log():
|
||||
if dist.version == latest:
|
||||
write_output('INSTALLED: %s (latest)', dist.version)
|
||||
write_output("INSTALLED: %s (latest)", dist.version)
|
||||
else:
|
||||
write_output('INSTALLED: %s', dist.version)
|
||||
write_output("INSTALLED: %s", dist.version)
|
||||
if parse_version(latest).pre:
|
||||
write_output('LATEST: %s (pre-release; install'
|
||||
' with "pip install --pre")', latest)
|
||||
write_output(
|
||||
"LATEST: %s (pre-release; install"
|
||||
" with `pip install --pre`)",
|
||||
latest,
|
||||
)
|
||||
else:
|
||||
write_output('LATEST: %s', latest)
|
||||
write_output("LATEST: %s", latest)
|
||||
|
||||
|
||||
def print_results(
|
||||
@@ -134,25 +140,29 @@ def print_results(
|
||||
if not hits:
|
||||
return
|
||||
if name_column_width is None:
|
||||
name_column_width = max([
|
||||
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
||||
for hit in hits
|
||||
]) + 4
|
||||
name_column_width = (
|
||||
max(
|
||||
[
|
||||
len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
|
||||
for hit in hits
|
||||
]
|
||||
)
|
||||
+ 4
|
||||
)
|
||||
|
||||
for hit in hits:
|
||||
name = hit['name']
|
||||
summary = hit['summary'] or ''
|
||||
latest = highest_version(hit.get('versions', ['-']))
|
||||
name = hit["name"]
|
||||
summary = hit["summary"] or ""
|
||||
latest = highest_version(hit.get("versions", ["-"]))
|
||||
if terminal_width is not None:
|
||||
target_width = terminal_width - name_column_width - 5
|
||||
if target_width > 10:
|
||||
# wrap and indent summary to fit terminal
|
||||
summary_lines = textwrap.wrap(summary, target_width)
|
||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(
|
||||
summary_lines)
|
||||
summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
|
||||
|
||||
name_latest = f'{name} ({latest})'
|
||||
line = f'{name_latest:{name_column_width}} - {summary}'
|
||||
name_latest = f"{name} ({latest})"
|
||||
line = f"{name_latest:{name_column_width}} - {summary}"
|
||||
try:
|
||||
write_output(line)
|
||||
print_dist_installation_info(name, latest)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import csv
|
||||
import logging
|
||||
import pathlib
|
||||
from optparse import Values
|
||||
from typing import Iterator, List, NamedTuple, Optional, Tuple
|
||||
from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
@@ -27,23 +25,26 @@ class ShowCommand(Command):
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'-f', '--files',
|
||||
dest='files',
|
||||
action='store_true',
|
||||
"-f",
|
||||
"--files",
|
||||
dest="files",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Show the full list of installed files for each package.')
|
||||
help="Show the full list of installed files for each package.",
|
||||
)
|
||||
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
if not args:
|
||||
logger.warning('ERROR: Please provide a package name or names.')
|
||||
logger.warning("ERROR: Please provide a package name or names.")
|
||||
return ERROR
|
||||
query = args
|
||||
|
||||
results = search_packages_info(query)
|
||||
if not print_results(
|
||||
results, list_files=options.files, verbose=options.verbose):
|
||||
results, list_files=options.files, verbose=options.verbose
|
||||
):
|
||||
return ERROR
|
||||
return SUCCESS
|
||||
|
||||
@@ -52,6 +53,7 @@ class _PackageInfo(NamedTuple):
|
||||
name: str
|
||||
version: str
|
||||
location: str
|
||||
editable_project_location: Optional[str]
|
||||
requires: List[str]
|
||||
required_by: List[str]
|
||||
installer: str
|
||||
@@ -59,6 +61,7 @@ class _PackageInfo(NamedTuple):
|
||||
classifiers: List[str]
|
||||
summary: str
|
||||
homepage: str
|
||||
project_urls: List[str]
|
||||
author: str
|
||||
author_email: str
|
||||
license: str
|
||||
@@ -66,34 +69,7 @@ class _PackageInfo(NamedTuple):
|
||||
files: Optional[List[str]]
|
||||
|
||||
|
||||
def _covert_legacy_entry(entry: Tuple[str, ...], info: Tuple[str, ...]) -> str:
|
||||
"""Convert a legacy installed-files.txt path into modern RECORD path.
|
||||
|
||||
The legacy format stores paths relative to the info directory, while the
|
||||
modern format stores paths relative to the package root, e.g. the
|
||||
site-packages directory.
|
||||
|
||||
:param entry: Path parts of the installed-files.txt entry.
|
||||
:param info: Path parts of the egg-info directory relative to package root.
|
||||
:returns: The converted entry.
|
||||
|
||||
For best compatibility with symlinks, this does not use ``abspath()`` or
|
||||
``Path.resolve()``, but tries to work with path parts:
|
||||
|
||||
1. While ``entry`` starts with ``..``, remove the equal amounts of parts
|
||||
from ``info``; if ``info`` is empty, start appending ``..`` instead.
|
||||
2. Join the two directly.
|
||||
"""
|
||||
while entry and entry[0] == "..":
|
||||
if not info or info[-1] == "..":
|
||||
info += ("..",)
|
||||
else:
|
||||
info = info[:-1]
|
||||
entry = entry[1:]
|
||||
return str(pathlib.Path(*info, *entry))
|
||||
|
||||
|
||||
def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
||||
def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
|
||||
"""
|
||||
Gather details from installed distributions. Print distribution name,
|
||||
version, location, and installed files. Installed files requires a
|
||||
@@ -102,53 +78,20 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
||||
"""
|
||||
env = get_default_environment()
|
||||
|
||||
installed = {
|
||||
dist.canonical_name: dist
|
||||
for dist in env.iter_distributions()
|
||||
}
|
||||
installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
|
||||
query_names = [canonicalize_name(name) for name in query]
|
||||
missing = sorted(
|
||||
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
||||
)
|
||||
if missing:
|
||||
logger.warning('Package(s) not found: %s', ', '.join(missing))
|
||||
logger.warning("Package(s) not found: %s", ", ".join(missing))
|
||||
|
||||
def _get_requiring_packages(current_dist: BaseDistribution) -> List[str]:
|
||||
return [
|
||||
def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
|
||||
return (
|
||||
dist.metadata["Name"] or "UNKNOWN"
|
||||
for dist in installed.values()
|
||||
if current_dist.canonical_name in {
|
||||
canonicalize_name(d.name) for d in dist.iter_dependencies()
|
||||
}
|
||||
]
|
||||
|
||||
def _files_from_record(dist: BaseDistribution) -> Optional[Iterator[str]]:
|
||||
try:
|
||||
text = dist.read_text('RECORD')
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
# This extra Path-str cast normalizes entries.
|
||||
return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
|
||||
|
||||
def _files_from_legacy(dist: BaseDistribution) -> Optional[Iterator[str]]:
|
||||
try:
|
||||
text = dist.read_text('installed-files.txt')
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
paths = (p for p in text.splitlines(keepends=False) if p)
|
||||
root = dist.location
|
||||
info = dist.info_directory
|
||||
if root is None or info is None:
|
||||
return paths
|
||||
try:
|
||||
info_rel = pathlib.Path(info).relative_to(root)
|
||||
except ValueError: # info is not relative to root.
|
||||
return paths
|
||||
if not info_rel.parts: # info *is* root.
|
||||
return paths
|
||||
return (
|
||||
_covert_legacy_entry(pathlib.Path(p).parts, info_rel.parts)
|
||||
for p in paths
|
||||
if current_dist.canonical_name
|
||||
in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
|
||||
)
|
||||
|
||||
for query_name in query_names:
|
||||
@@ -157,13 +100,16 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
|
||||
required_by = sorted(_get_requiring_packages(dist), key=str.lower)
|
||||
|
||||
try:
|
||||
entry_points_text = dist.read_text('entry_points.txt')
|
||||
entry_points_text = dist.read_text("entry_points.txt")
|
||||
entry_points = entry_points_text.splitlines(keepends=False)
|
||||
except FileNotFoundError:
|
||||
entry_points = []
|
||||
|
||||
files_iter = _files_from_record(dist) or _files_from_legacy(dist)
|
||||
files_iter = dist.iter_declared_entries()
|
||||
if files_iter is None:
|
||||
files: Optional[List[str]] = None
|
||||
else:
|
||||
@@ -175,13 +121,15 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
||||
name=dist.raw_name,
|
||||
version=str(dist.version),
|
||||
location=dist.location or "",
|
||||
requires=[req.name for req in dist.iter_dependencies()],
|
||||
required_by=_get_requiring_packages(dist),
|
||||
editable_project_location=dist.editable_project_location,
|
||||
requires=requires,
|
||||
required_by=required_by,
|
||||
installer=dist.installer,
|
||||
metadata_version=dist.metadata_version or "",
|
||||
classifiers=metadata.get_all("Classifier", []),
|
||||
summary=metadata.get("Summary", ""),
|
||||
homepage=metadata.get("Home-page", ""),
|
||||
project_urls=metadata.get_all("Project-URL", []),
|
||||
author=metadata.get("Author", ""),
|
||||
author_email=metadata.get("Author-email", ""),
|
||||
license=metadata.get("License", ""),
|
||||
@@ -191,7 +139,7 @@ def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
||||
|
||||
|
||||
def print_results(
|
||||
distributions: Iterator[_PackageInfo],
|
||||
distributions: Iterable[_PackageInfo],
|
||||
list_files: bool,
|
||||
verbose: bool,
|
||||
) -> bool:
|
||||
@@ -212,8 +160,12 @@ def print_results(
|
||||
write_output("Author-email: %s", dist.author_email)
|
||||
write_output("License: %s", dist.license)
|
||||
write_output("Location: %s", dist.location)
|
||||
write_output("Requires: %s", ', '.join(dist.requires))
|
||||
write_output("Required-by: %s", ', '.join(dist.required_by))
|
||||
if dist.editable_project_location is not None:
|
||||
write_output(
|
||||
"Editable project location: %s", dist.editable_project_location
|
||||
)
|
||||
write_output("Requires: %s", ", ".join(dist.requires))
|
||||
write_output("Required-by: %s", ", ".join(dist.required_by))
|
||||
|
||||
if verbose:
|
||||
write_output("Metadata-Version: %s", dist.metadata_version)
|
||||
@@ -224,6 +176,9 @@ def print_results(
|
||||
write_output("Entry-points:")
|
||||
for entry in dist.entry_points:
|
||||
write_output(" %s", entry.strip())
|
||||
write_output("Project-URLs:")
|
||||
for project_url in dist.project_urls:
|
||||
write_output(" %s", project_url)
|
||||
if list_files:
|
||||
write_output("Files:")
|
||||
if dist.files is None:
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import List
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.base_command import Command
|
||||
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
@@ -13,7 +14,10 @@ from pip._internal.req.constructors import (
|
||||
install_req_from_line,
|
||||
install_req_from_parsed_requirement,
|
||||
)
|
||||
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
||||
from pip._internal.utils.misc import (
|
||||
check_externally_managed,
|
||||
protect_pip_from_modification_on_windows,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -35,20 +39,26 @@ class UninstallCommand(Command, SessionCommandMixin):
|
||||
|
||||
def add_options(self) -> None:
|
||||
self.cmd_opts.add_option(
|
||||
'-r', '--requirement',
|
||||
dest='requirements',
|
||||
action='append',
|
||||
"-r",
|
||||
"--requirement",
|
||||
dest="requirements",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar='file',
|
||||
help='Uninstall all the packages listed in the given requirements '
|
||||
'file. This option can be used multiple times.',
|
||||
metavar="file",
|
||||
help=(
|
||||
"Uninstall all the packages listed in the given requirements "
|
||||
"file. This option can be used multiple times."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(
|
||||
'-y', '--yes',
|
||||
dest='yes',
|
||||
action='store_true',
|
||||
help="Don't ask for confirmation of uninstall deletions.")
|
||||
|
||||
"-y",
|
||||
"--yes",
|
||||
dest="yes",
|
||||
action="store_true",
|
||||
help="Don't ask for confirmation of uninstall deletions.",
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.root_user_action())
|
||||
self.cmd_opts.add_option(cmdoptions.override_externally_managed())
|
||||
self.parser.insert_option_group(0, self.cmd_opts)
|
||||
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
@@ -57,7 +67,8 @@ class UninstallCommand(Command, SessionCommandMixin):
|
||||
reqs_to_uninstall = {}
|
||||
for name in args:
|
||||
req = install_req_from_line(
|
||||
name, isolated=options.isolated_mode,
|
||||
name,
|
||||
isolated=options.isolated_mode,
|
||||
)
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
@@ -70,31 +81,33 @@ class UninstallCommand(Command, SessionCommandMixin):
|
||||
)
|
||||
for filename in options.requirements:
|
||||
for parsed_req in parse_requirements(
|
||||
filename,
|
||||
options=options,
|
||||
session=session):
|
||||
filename, options=options, session=session
|
||||
):
|
||||
req = install_req_from_parsed_requirement(
|
||||
parsed_req,
|
||||
isolated=options.isolated_mode
|
||||
parsed_req, isolated=options.isolated_mode
|
||||
)
|
||||
if req.name:
|
||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
||||
if not reqs_to_uninstall:
|
||||
raise InstallationError(
|
||||
f'You must give at least one requirement to {self.name} (see '
|
||||
f"You must give at least one requirement to {self.name} (see "
|
||||
f'"pip help {self.name}")'
|
||||
)
|
||||
|
||||
if not options.override_externally_managed:
|
||||
check_externally_managed()
|
||||
|
||||
protect_pip_from_modification_on_windows(
|
||||
modifying_pip="pip" in reqs_to_uninstall
|
||||
)
|
||||
|
||||
for req in reqs_to_uninstall.values():
|
||||
uninstall_pathset = req.uninstall(
|
||||
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
||||
auto_confirm=options.yes,
|
||||
verbose=self.verbosity > 0,
|
||||
)
|
||||
if uninstall_pathset:
|
||||
uninstall_pathset.commit()
|
||||
|
||||
warn_if_run_as_root()
|
||||
if options.root_user_action == "warn":
|
||||
warn_if_run_as_root()
|
||||
return SUCCESS
|
||||
|
||||
@@ -9,8 +9,11 @@ from pip._internal.cli import cmdoptions
|
||||
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
||||
from pip._internal.cli.status_codes import SUCCESS
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import get_requirement_tracker
|
||||
from pip._internal.operations.build.build_tracker import get_build_tracker
|
||||
from pip._internal.req.req_install import (
|
||||
InstallRequirement,
|
||||
check_legacy_setup_py_options,
|
||||
)
|
||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
||||
@@ -26,10 +29,8 @@ class WheelCommand(RequirementCommand):
|
||||
recompiling your software during every install. For more details, see the
|
||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
||||
|
||||
Requirements: setuptools>=0.8, and wheel.
|
||||
|
||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
||||
package to build individual wheels.
|
||||
'pip wheel' uses the build system interface as described here:
|
||||
https://pip.pypa.io/en/stable/reference/build-system/
|
||||
|
||||
"""
|
||||
|
||||
@@ -41,14 +42,16 @@ class WheelCommand(RequirementCommand):
|
||||
%prog [options] <archive url/path> ..."""
|
||||
|
||||
def add_options(self) -> None:
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'-w', '--wheel-dir',
|
||||
dest='wheel_dir',
|
||||
metavar='dir',
|
||||
"-w",
|
||||
"--wheel-dir",
|
||||
dest="wheel_dir",
|
||||
metavar="dir",
|
||||
default=os.curdir,
|
||||
help=("Build wheels into <dir>, where the default is the "
|
||||
"current working directory."),
|
||||
help=(
|
||||
"Build wheels into <dir>, where the default is the "
|
||||
"current working directory."
|
||||
),
|
||||
)
|
||||
self.cmd_opts.add_option(cmdoptions.no_binary())
|
||||
self.cmd_opts.add_option(cmdoptions.only_binary())
|
||||
@@ -56,32 +59,35 @@ class WheelCommand(RequirementCommand):
|
||||
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
||||
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
||||
self.cmd_opts.add_option(cmdoptions.check_build_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.constraints())
|
||||
self.cmd_opts.add_option(cmdoptions.editable())
|
||||
self.cmd_opts.add_option(cmdoptions.requirements())
|
||||
self.cmd_opts.add_option(cmdoptions.src())
|
||||
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
||||
self.cmd_opts.add_option(cmdoptions.no_deps())
|
||||
self.cmd_opts.add_option(cmdoptions.build_dir())
|
||||
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--no-verify',
|
||||
dest='no_verify',
|
||||
action='store_true',
|
||||
"--no-verify",
|
||||
dest="no_verify",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Don't verify if built wheel is valid.",
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.config_settings())
|
||||
self.cmd_opts.add_option(cmdoptions.build_options())
|
||||
self.cmd_opts.add_option(cmdoptions.global_options())
|
||||
|
||||
self.cmd_opts.add_option(
|
||||
'--pre',
|
||||
action='store_true',
|
||||
"--pre",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=("Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."),
|
||||
help=(
|
||||
"Include pre-release and development versions. By default, "
|
||||
"pip only finds stable versions."
|
||||
),
|
||||
)
|
||||
|
||||
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
||||
@@ -96,17 +102,14 @@ class WheelCommand(RequirementCommand):
|
||||
|
||||
@with_cleanup
|
||||
def run(self, options: Values, args: List[str]) -> int:
|
||||
cmdoptions.check_install_build_global(options)
|
||||
|
||||
session = self.get_default_session(options)
|
||||
|
||||
finder = self._build_package_finder(options, session)
|
||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
||||
|
||||
options.wheel_dir = normalize_path(options.wheel_dir)
|
||||
ensure_dir(options.wheel_dir)
|
||||
|
||||
req_tracker = self.enter_context(get_requirement_tracker())
|
||||
build_tracker = self.enter_context(get_build_tracker())
|
||||
|
||||
directory = TempDirectory(
|
||||
delete=not options.no_clean,
|
||||
@@ -115,15 +118,19 @@ class WheelCommand(RequirementCommand):
|
||||
)
|
||||
|
||||
reqs = self.get_requirements(args, options, finder, session)
|
||||
check_legacy_setup_py_options(options, reqs)
|
||||
|
||||
wheel_cache = WheelCache(options.cache_dir)
|
||||
|
||||
preparer = self.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
req_tracker=req_tracker,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
download_dir=options.wheel_dir,
|
||||
use_user_site=False,
|
||||
verbosity=self.verbosity,
|
||||
)
|
||||
|
||||
resolver = self.make_resolver(
|
||||
@@ -137,9 +144,7 @@ class WheelCommand(RequirementCommand):
|
||||
|
||||
self.trace_basic_info(finder)
|
||||
|
||||
requirement_set = resolver.resolve(
|
||||
reqs, check_supported_wheels=True
|
||||
)
|
||||
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||
|
||||
reqs_to_build: List[InstallRequirement] = []
|
||||
for req in requirement_set.requirements.values():
|
||||
@@ -165,12 +170,11 @@ class WheelCommand(RequirementCommand):
|
||||
except OSError as e:
|
||||
logger.warning(
|
||||
"Building wheel for %s failed: %s",
|
||||
req.name, e,
|
||||
req.name,
|
||||
e,
|
||||
)
|
||||
build_failures.append(req)
|
||||
if len(build_failures) != 0:
|
||||
raise CommandError(
|
||||
"Failed to build one or more wheels"
|
||||
)
|
||||
raise CommandError("Failed to build one or more wheels")
|
||||
|
||||
return SUCCESS
|
||||
|
||||
@@ -13,7 +13,6 @@ Some terminology:
|
||||
|
||||
import configparser
|
||||
import locale
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
||||
@@ -24,41 +23,39 @@ from pip._internal.exceptions import (
|
||||
)
|
||||
from pip._internal.utils import appdirs
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.logging import getLogger
|
||||
from pip._internal.utils.misc import ensure_dir, enum
|
||||
|
||||
RawConfigParser = configparser.RawConfigParser # Shorthand
|
||||
Kind = NewType("Kind", str)
|
||||
|
||||
CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf'
|
||||
CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
|
||||
ENV_NAMES_IGNORED = "version", "help"
|
||||
|
||||
# The kinds of configurations there are.
|
||||
kinds = enum(
|
||||
USER="user", # User Specific
|
||||
GLOBAL="global", # System Wide
|
||||
SITE="site", # [Virtual] Environment Specific
|
||||
ENV="env", # from PIP_CONFIG_FILE
|
||||
USER="user", # User Specific
|
||||
GLOBAL="global", # System Wide
|
||||
SITE="site", # [Virtual] Environment Specific
|
||||
ENV="env", # from PIP_CONFIG_FILE
|
||||
ENV_VAR="env-var", # from Environment Variables
|
||||
)
|
||||
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
||||
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
||||
def _normalize_name(name):
|
||||
# type: (str) -> str
|
||||
"""Make a name consistent regardless of source (environment or file)
|
||||
"""
|
||||
name = name.lower().replace('_', '-')
|
||||
if name.startswith('--'):
|
||||
def _normalize_name(name: str) -> str:
|
||||
"""Make a name consistent regardless of source (environment or file)"""
|
||||
name = name.lower().replace("_", "-")
|
||||
if name.startswith("--"):
|
||||
name = name[2:] # only prefer long opts
|
||||
return name
|
||||
|
||||
|
||||
def _disassemble_key(name):
|
||||
# type: (str) -> List[str]
|
||||
def _disassemble_key(name: str) -> List[str]:
|
||||
if "." not in name:
|
||||
error_message = (
|
||||
"Key does not contain dot separated section and key. "
|
||||
@@ -68,22 +65,18 @@ def _disassemble_key(name):
|
||||
return name.split(".", 1)
|
||||
|
||||
|
||||
def get_configuration_files():
|
||||
# type: () -> Dict[Kind, List[str]]
|
||||
def get_configuration_files() -> Dict[Kind, List[str]]:
|
||||
global_config_files = [
|
||||
os.path.join(path, CONFIG_BASENAME)
|
||||
for path in appdirs.site_config_dirs('pip')
|
||||
os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
|
||||
]
|
||||
|
||||
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
||||
legacy_config_file = os.path.join(
|
||||
os.path.expanduser('~'),
|
||||
'pip' if WINDOWS else '.pip',
|
||||
os.path.expanduser("~"),
|
||||
"pip" if WINDOWS else ".pip",
|
||||
CONFIG_BASENAME,
|
||||
)
|
||||
new_config_file = os.path.join(
|
||||
appdirs.user_config_dir("pip"), CONFIG_BASENAME
|
||||
)
|
||||
new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
|
||||
return {
|
||||
kinds.GLOBAL: global_config_files,
|
||||
kinds.SITE: [site_config_file],
|
||||
@@ -105,8 +98,7 @@ class Configuration:
|
||||
and the data stored is also nice.
|
||||
"""
|
||||
|
||||
def __init__(self, isolated, load_only=None):
|
||||
# type: (bool, Optional[Kind]) -> None
|
||||
def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
|
||||
super().__init__()
|
||||
|
||||
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
||||
@@ -119,54 +111,50 @@ class Configuration:
|
||||
self.load_only = load_only
|
||||
|
||||
# Because we keep track of where we got the data from
|
||||
self._parsers = {
|
||||
self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
|
||||
variant: [] for variant in OVERRIDE_ORDER
|
||||
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
||||
self._config = {
|
||||
}
|
||||
self._config: Dict[Kind, Dict[str, Any]] = {
|
||||
variant: {} for variant in OVERRIDE_ORDER
|
||||
} # type: Dict[Kind, Dict[str, Any]]
|
||||
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
||||
}
|
||||
self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
|
||||
|
||||
def load(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from configuration files and environment
|
||||
"""
|
||||
def load(self) -> None:
|
||||
"""Loads configuration from configuration files and environment"""
|
||||
self._load_config_files()
|
||||
if not self.isolated:
|
||||
self._load_environment_vars()
|
||||
|
||||
def get_file_to_edit(self):
|
||||
# type: () -> Optional[str]
|
||||
"""Returns the file with highest priority in configuration
|
||||
"""
|
||||
assert self.load_only is not None, \
|
||||
"Need to be specified a file to be editing"
|
||||
def get_file_to_edit(self) -> Optional[str]:
|
||||
"""Returns the file with highest priority in configuration"""
|
||||
assert self.load_only is not None, "Need to be specified a file to be editing"
|
||||
|
||||
try:
|
||||
return self._get_parser_to_modify()[0]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def items(self):
|
||||
# type: () -> Iterable[Tuple[str, Any]]
|
||||
def items(self) -> Iterable[Tuple[str, Any]]:
|
||||
"""Returns key-value pairs like dict.items() representing the loaded
|
||||
configuration
|
||||
"""
|
||||
return self._dictionary.items()
|
||||
|
||||
def get_value(self, key):
|
||||
# type: (str) -> Any
|
||||
"""Get a value from the configuration.
|
||||
"""
|
||||
def get_value(self, key: str) -> Any:
|
||||
"""Get a value from the configuration."""
|
||||
orig_key = key
|
||||
key = _normalize_name(key)
|
||||
try:
|
||||
return self._dictionary[key]
|
||||
except KeyError:
|
||||
raise ConfigurationError(f"No such key - {key}")
|
||||
# disassembling triggers a more useful error message than simply
|
||||
# "No such key" in the case that the key isn't in the form command.option
|
||||
_disassemble_key(key)
|
||||
raise ConfigurationError(f"No such key - {orig_key}")
|
||||
|
||||
def set_value(self, key, value):
|
||||
# type: (str, Any) -> None
|
||||
"""Modify a value in the configuration.
|
||||
"""
|
||||
def set_value(self, key: str, value: Any) -> None:
|
||||
"""Modify a value in the configuration."""
|
||||
key = _normalize_name(key)
|
||||
self._ensure_have_load_only()
|
||||
|
||||
assert self.load_only
|
||||
@@ -183,21 +171,23 @@ class Configuration:
|
||||
self._config[self.load_only][key] = value
|
||||
self._mark_as_modified(fname, parser)
|
||||
|
||||
def unset_value(self, key):
|
||||
# type: (str) -> None
|
||||
def unset_value(self, key: str) -> None:
|
||||
"""Unset a value in the configuration."""
|
||||
orig_key = key
|
||||
key = _normalize_name(key)
|
||||
self._ensure_have_load_only()
|
||||
|
||||
assert self.load_only
|
||||
if key not in self._config[self.load_only]:
|
||||
raise ConfigurationError(f"No such key - {key}")
|
||||
raise ConfigurationError(f"No such key - {orig_key}")
|
||||
|
||||
fname, parser = self._get_parser_to_modify()
|
||||
|
||||
if parser is not None:
|
||||
section, name = _disassemble_key(key)
|
||||
if not (parser.has_section(section)
|
||||
and parser.remove_option(section, name)):
|
||||
if not (
|
||||
parser.has_section(section) and parser.remove_option(section, name)
|
||||
):
|
||||
# The option was not removed.
|
||||
raise ConfigurationError(
|
||||
"Fatal Internal error [id=1]. Please report as a bug."
|
||||
@@ -210,10 +200,8 @@ class Configuration:
|
||||
|
||||
del self._config[self.load_only][key]
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
"""Save the current in-memory state.
|
||||
"""
|
||||
def save(self) -> None:
|
||||
"""Save the current in-memory state."""
|
||||
self._ensure_have_load_only()
|
||||
|
||||
for fname, parser in self._modified_parsers:
|
||||
@@ -229,17 +217,14 @@ class Configuration:
|
||||
# Private routines
|
||||
#
|
||||
|
||||
def _ensure_have_load_only(self):
|
||||
# type: () -> None
|
||||
def _ensure_have_load_only(self) -> None:
|
||||
if self.load_only is None:
|
||||
raise ConfigurationError("Needed a specific file to be modifying.")
|
||||
logger.debug("Will be working with %s variant only", self.load_only)
|
||||
|
||||
@property
|
||||
def _dictionary(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
"""A dictionary representing the loaded configuration.
|
||||
"""
|
||||
def _dictionary(self) -> Dict[str, Any]:
|
||||
"""A dictionary representing the loaded configuration."""
|
||||
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
||||
# are not needed here.
|
||||
retval = {}
|
||||
@@ -249,10 +234,8 @@ class Configuration:
|
||||
|
||||
return retval
|
||||
|
||||
def _load_config_files(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from configuration files
|
||||
"""
|
||||
def _load_config_files(self) -> None:
|
||||
"""Loads configuration from configuration files"""
|
||||
config_files = dict(self.iter_config_files())
|
||||
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
||||
logger.debug(
|
||||
@@ -266,9 +249,7 @@ class Configuration:
|
||||
# If there's specific variant set in `load_only`, load only
|
||||
# that variant, not the others.
|
||||
if self.load_only is not None and variant != self.load_only:
|
||||
logger.debug(
|
||||
"Skipping file '%s' (variant: %s)", fname, variant
|
||||
)
|
||||
logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
|
||||
continue
|
||||
|
||||
parser = self._load_file(variant, fname)
|
||||
@@ -276,9 +257,8 @@ class Configuration:
|
||||
# Keeping track of the parsers used
|
||||
self._parsers[variant].append((fname, parser))
|
||||
|
||||
def _load_file(self, variant, fname):
|
||||
# type: (Kind, str) -> RawConfigParser
|
||||
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
||||
def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
|
||||
logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
|
||||
parser = self._construct_parser(fname)
|
||||
|
||||
for section in parser.sections():
|
||||
@@ -287,22 +267,20 @@ class Configuration:
|
||||
|
||||
return parser
|
||||
|
||||
def _construct_parser(self, fname):
|
||||
# type: (str) -> RawConfigParser
|
||||
def _construct_parser(self, fname: str) -> RawConfigParser:
|
||||
parser = configparser.RawConfigParser()
|
||||
# If there is no such file, don't bother reading it but create the
|
||||
# parser anyway, to hold the data.
|
||||
# Doing this is useful when modifying and saving files, where we don't
|
||||
# need to construct a parser.
|
||||
if os.path.exists(fname):
|
||||
locale_encoding = locale.getpreferredencoding(False)
|
||||
try:
|
||||
parser.read(fname)
|
||||
parser.read(fname, encoding=locale_encoding)
|
||||
except UnicodeDecodeError:
|
||||
# See https://github.com/pypa/pip/issues/4963
|
||||
raise ConfigurationFileCouldNotBeLoaded(
|
||||
reason="contains invalid {} characters".format(
|
||||
locale.getpreferredencoding(False)
|
||||
),
|
||||
reason=f"contains invalid {locale_encoding} characters",
|
||||
fname=fname,
|
||||
)
|
||||
except configparser.Error as error:
|
||||
@@ -310,16 +288,15 @@ class Configuration:
|
||||
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
||||
return parser
|
||||
|
||||
def _load_environment_vars(self):
|
||||
# type: () -> None
|
||||
"""Loads configuration from environment variables
|
||||
"""
|
||||
def _load_environment_vars(self) -> None:
|
||||
"""Loads configuration from environment variables"""
|
||||
self._config[kinds.ENV_VAR].update(
|
||||
self._normalized_keys(":env:", self.get_environ_vars())
|
||||
)
|
||||
|
||||
def _normalized_keys(self, section, items):
|
||||
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
||||
def _normalized_keys(
|
||||
self, section: str, items: Iterable[Tuple[str, Any]]
|
||||
) -> Dict[str, Any]:
|
||||
"""Normalizes items to construct a dictionary with normalized keys.
|
||||
|
||||
This routine is where the names become keys and are made the same
|
||||
@@ -331,8 +308,7 @@ class Configuration:
|
||||
normalized[key] = val
|
||||
return normalized
|
||||
|
||||
def get_environ_vars(self):
|
||||
# type: () -> Iterable[Tuple[str, str]]
|
||||
def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
|
||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
||||
for key, val in os.environ.items():
|
||||
if key.startswith("PIP_"):
|
||||
@@ -341,8 +317,7 @@ class Configuration:
|
||||
yield name, val
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def iter_config_files(self):
|
||||
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
||||
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
|
||||
"""Yields variant and configuration files associated with it.
|
||||
|
||||
This should be treated like items of a dictionary.
|
||||
@@ -350,7 +325,7 @@ class Configuration:
|
||||
# SMELL: Move the conditions out of this function
|
||||
|
||||
# environment variables have the lowest priority
|
||||
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
||||
config_file = os.environ.get("PIP_CONFIG_FILE", None)
|
||||
if config_file is not None:
|
||||
yield kinds.ENV, [config_file]
|
||||
else:
|
||||
@@ -372,13 +347,11 @@ class Configuration:
|
||||
# finally virtualenv configuration first trumping others
|
||||
yield kinds.SITE, config_files[kinds.SITE]
|
||||
|
||||
def get_values_in_config(self, variant):
|
||||
# type: (Kind) -> Dict[str, Any]
|
||||
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
|
||||
"""Get values present in a config file"""
|
||||
return self._config[variant]
|
||||
|
||||
def _get_parser_to_modify(self):
|
||||
# type: () -> Tuple[str, RawConfigParser]
|
||||
def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
|
||||
# Determine which parser to modify
|
||||
assert self.load_only
|
||||
parsers = self._parsers[self.load_only]
|
||||
@@ -392,12 +365,10 @@ class Configuration:
|
||||
return parsers[-1]
|
||||
|
||||
# XXX: This is patched in the tests.
|
||||
def _mark_as_modified(self, fname, parser):
|
||||
# type: (str, RawConfigParser) -> None
|
||||
def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
|
||||
file_parser_tuple = (fname, parser)
|
||||
if file_parser_tuple not in self._modified_parsers:
|
||||
self._modified_parsers.append(file_parser_tuple)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self._dictionary!r})"
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import abc
|
||||
from typing import Optional
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata.base import BaseDistribution
|
||||
from pip._internal.req import InstallRequirement
|
||||
|
||||
|
||||
@@ -28,11 +26,14 @@ class AbstractDistribution(metaclass=abc.ABCMeta):
|
||||
self.req = req
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_pkg_resources_distribution(self) -> Optional[Distribution]:
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
raise NotImplementedError()
|
||||
|
||||
@abc.abstractmethod
|
||||
def prepare_distribution_metadata(
|
||||
self, finder: PackageFinder, build_isolation: bool
|
||||
self,
|
||||
finder: PackageFinder,
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
from typing import Optional
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
|
||||
|
||||
class InstalledDistribution(AbstractDistribution):
|
||||
@@ -13,10 +10,14 @@ class InstalledDistribution(AbstractDistribution):
|
||||
been computed.
|
||||
"""
|
||||
|
||||
def get_pkg_resources_distribution(self) -> Optional[Distribution]:
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
assert self.req.satisfied_by is not None, "not actually installed"
|
||||
return self.req.satisfied_by
|
||||
|
||||
def prepare_distribution_metadata(
|
||||
self, finder: PackageFinder, build_isolation: bool
|
||||
self,
|
||||
finder: PackageFinder,
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import logging
|
||||
from typing import Set, Tuple
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from typing import Iterable, Set, Tuple
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -19,11 +18,14 @@ class SourceDistribution(AbstractDistribution):
|
||||
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
||||
"""
|
||||
|
||||
def get_pkg_resources_distribution(self) -> Distribution:
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
return self.req.get_dist()
|
||||
|
||||
def prepare_distribution_metadata(
|
||||
self, finder: PackageFinder, build_isolation: bool
|
||||
self,
|
||||
finder: PackageFinder,
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
||||
self.req.load_pyproject_toml()
|
||||
@@ -31,28 +33,34 @@ class SourceDistribution(AbstractDistribution):
|
||||
# Set up the build isolation, if this requirement should be isolated
|
||||
should_isolate = self.req.use_pep517 and build_isolation
|
||||
if should_isolate:
|
||||
self._setup_isolation(finder)
|
||||
|
||||
# Setup an isolated environment and install the build backend static
|
||||
# requirements in it.
|
||||
self._prepare_build_backend(finder)
|
||||
# Check that if the requirement is editable, it either supports PEP 660 or
|
||||
# has a setup.py or a setup.cfg. This cannot be done earlier because we need
|
||||
# to setup the build backend to verify it supports build_editable, nor can
|
||||
# it be done later, because we want to avoid installing build requirements
|
||||
# needlessly. Doing it here also works around setuptools generating
|
||||
# UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
|
||||
# without setup.py nor setup.cfg.
|
||||
self.req.isolated_editable_sanity_check()
|
||||
# Install the dynamic build requirements.
|
||||
self._install_build_reqs(finder)
|
||||
# Check if the current environment provides build dependencies
|
||||
should_check_deps = self.req.use_pep517 and check_build_deps
|
||||
if should_check_deps:
|
||||
pyproject_requires = self.req.pyproject_requires
|
||||
assert pyproject_requires is not None
|
||||
conflicting, missing = self.req.build_env.check_requirements(
|
||||
pyproject_requires
|
||||
)
|
||||
if conflicting:
|
||||
self._raise_conflicts("the backend dependencies", conflicting)
|
||||
if missing:
|
||||
self._raise_missing_reqs(missing)
|
||||
self.req.prepare_metadata()
|
||||
|
||||
def _setup_isolation(self, finder: PackageFinder) -> None:
|
||||
def _raise_conflicts(
|
||||
conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
||||
) -> None:
|
||||
format_string = (
|
||||
"Some build dependencies for {requirement} "
|
||||
"conflict with {conflicting_with}: {description}."
|
||||
)
|
||||
error_message = format_string.format(
|
||||
requirement=self.req,
|
||||
conflicting_with=conflicting_with,
|
||||
description=", ".join(
|
||||
f"{installed} is incompatible with {wanted}"
|
||||
for installed, wanted in sorted(conflicting)
|
||||
),
|
||||
)
|
||||
raise InstallationError(error_message)
|
||||
|
||||
def _prepare_build_backend(self, finder: PackageFinder) -> None:
|
||||
# Isolate in a BuildEnvironment and install the build-time
|
||||
# requirements.
|
||||
pyproject_requires = self.req.pyproject_requires
|
||||
@@ -60,13 +68,13 @@ class SourceDistribution(AbstractDistribution):
|
||||
|
||||
self.req.build_env = BuildEnvironment()
|
||||
self.req.build_env.install_requirements(
|
||||
finder, pyproject_requires, "overlay", "Installing build dependencies"
|
||||
finder, pyproject_requires, "overlay", kind="build dependencies"
|
||||
)
|
||||
conflicting, missing = self.req.build_env.check_requirements(
|
||||
self.req.requirements_to_check
|
||||
)
|
||||
if conflicting:
|
||||
_raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
||||
self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
||||
if missing:
|
||||
logger.warning(
|
||||
"Missing build requirements in pyproject.toml for %s.",
|
||||
@@ -77,19 +85,66 @@ class SourceDistribution(AbstractDistribution):
|
||||
"pip cannot fall back to setuptools without %s.",
|
||||
" and ".join(map(repr, sorted(missing))),
|
||||
)
|
||||
# Install any extra build dependencies that the backend requests.
|
||||
# This must be done in a second pass, as the pyproject.toml
|
||||
# dependencies must be installed before we can call the backend.
|
||||
|
||||
def _get_build_requires_wheel(self) -> Iterable[str]:
|
||||
with self.req.build_env:
|
||||
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
||||
backend = self.req.pep517_backend
|
||||
assert backend is not None
|
||||
with backend.subprocess_runner(runner):
|
||||
reqs = backend.get_requires_for_build_wheel()
|
||||
return backend.get_requires_for_build_wheel()
|
||||
|
||||
conflicting, missing = self.req.build_env.check_requirements(reqs)
|
||||
def _get_build_requires_editable(self) -> Iterable[str]:
|
||||
with self.req.build_env:
|
||||
runner = runner_with_spinner_message(
|
||||
"Getting requirements to build editable"
|
||||
)
|
||||
backend = self.req.pep517_backend
|
||||
assert backend is not None
|
||||
with backend.subprocess_runner(runner):
|
||||
return backend.get_requires_for_build_editable()
|
||||
|
||||
def _install_build_reqs(self, finder: PackageFinder) -> None:
|
||||
# Install any extra build dependencies that the backend requests.
|
||||
# This must be done in a second pass, as the pyproject.toml
|
||||
# dependencies must be installed before we can call the backend.
|
||||
if (
|
||||
self.req.editable
|
||||
and self.req.permit_editable_wheels
|
||||
and self.req.supports_pyproject_editable()
|
||||
):
|
||||
build_reqs = self._get_build_requires_editable()
|
||||
else:
|
||||
build_reqs = self._get_build_requires_wheel()
|
||||
conflicting, missing = self.req.build_env.check_requirements(build_reqs)
|
||||
if conflicting:
|
||||
_raise_conflicts("the backend dependencies", conflicting)
|
||||
self._raise_conflicts("the backend dependencies", conflicting)
|
||||
self.req.build_env.install_requirements(
|
||||
finder, missing, "normal", "Installing backend dependencies"
|
||||
finder, missing, "normal", kind="backend dependencies"
|
||||
)
|
||||
|
||||
def _raise_conflicts(
|
||||
self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
||||
) -> None:
|
||||
format_string = (
|
||||
"Some build dependencies for {requirement} "
|
||||
"conflict with {conflicting_with}: {description}."
|
||||
)
|
||||
error_message = format_string.format(
|
||||
requirement=self.req,
|
||||
conflicting_with=conflicting_with,
|
||||
description=", ".join(
|
||||
f"{installed} is incompatible with {wanted}"
|
||||
for installed, wanted in sorted(conflicting_reqs)
|
||||
),
|
||||
)
|
||||
raise InstallationError(error_message)
|
||||
|
||||
def _raise_missing_reqs(self, missing: Set[str]) -> None:
|
||||
format_string = (
|
||||
"Some build dependencies for {requirement} are missing: {missing}."
|
||||
)
|
||||
error_message = format_string.format(
|
||||
requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
|
||||
)
|
||||
raise InstallationError(error_message)
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
from zipfile import ZipFile
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.distributions.base import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
||||
from pip._internal.metadata import (
|
||||
BaseDistribution,
|
||||
FilesystemWheel,
|
||||
get_wheel_distribution,
|
||||
)
|
||||
|
||||
|
||||
class WheelDistribution(AbstractDistribution):
|
||||
@@ -13,22 +15,20 @@ class WheelDistribution(AbstractDistribution):
|
||||
This does not need any preparation as wheels can be directly unpacked.
|
||||
"""
|
||||
|
||||
def get_pkg_resources_distribution(self) -> Distribution:
|
||||
def get_metadata_distribution(self) -> BaseDistribution:
|
||||
"""Loads the metadata from the wheel file into memory and returns a
|
||||
Distribution that uses it, not relying on the wheel file or
|
||||
requirement.
|
||||
"""
|
||||
# Set as part of preparation during download.
|
||||
assert self.req.local_file_path
|
||||
# Wheels are never unnamed.
|
||||
assert self.req.name
|
||||
|
||||
with ZipFile(self.req.local_file_path, allowZip64=True) as z:
|
||||
return pkg_resources_distribution_for_wheel(
|
||||
z, self.req.name, self.req.local_file_path
|
||||
)
|
||||
assert self.req.local_file_path, "Set as part of preparation during download"
|
||||
assert self.req.name, "Wheels are never unnamed"
|
||||
wheel = FilesystemWheel(self.req.local_file_path)
|
||||
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
|
||||
|
||||
def prepare_distribution_metadata(
|
||||
self, finder: PackageFinder, build_isolation: bool
|
||||
self,
|
||||
finder: PackageFinder,
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
@@ -1,22 +1,181 @@
|
||||
"""Exceptions used throughout package"""
|
||||
"""Exceptions used throughout package.
|
||||
|
||||
This module MUST NOT try to import from anything within `pip._internal` to
|
||||
operate. This is expected to be importable from any/all files within the
|
||||
subpackage and, thus, should not depend on them.
|
||||
"""
|
||||
|
||||
import configparser
|
||||
import contextlib
|
||||
import locale
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from itertools import chain, groupby, repeat
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional
|
||||
from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.requests.models import Request, Response
|
||||
from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
|
||||
from pip._vendor.rich.markup import escape
|
||||
from pip._vendor.rich.text import Text
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from hashlib import _Hash
|
||||
from typing import Literal
|
||||
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
#
|
||||
# Scaffolding
|
||||
#
|
||||
def _is_kebab_case(s: str) -> bool:
|
||||
return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
|
||||
|
||||
|
||||
def _prefix_with_indent(
|
||||
s: Union[Text, str],
|
||||
console: Console,
|
||||
*,
|
||||
prefix: str,
|
||||
indent: str,
|
||||
) -> Text:
|
||||
if isinstance(s, Text):
|
||||
text = s
|
||||
else:
|
||||
text = console.render_str(s)
|
||||
|
||||
return console.render_str(prefix, overflow="ignore") + console.render_str(
|
||||
f"\n{indent}", overflow="ignore"
|
||||
).join(text.split(allow_blank=True))
|
||||
|
||||
|
||||
class PipError(Exception):
|
||||
"""Base pip exception"""
|
||||
"""The base pip error."""
|
||||
|
||||
|
||||
class DiagnosticPipError(PipError):
|
||||
"""An error, that presents diagnostic information to the user.
|
||||
|
||||
This contains a bunch of logic, to enable pretty presentation of our error
|
||||
messages. Each error gets a unique reference. Each error can also include
|
||||
additional context, a hint and/or a note -- which are presented with the
|
||||
main error message in a consistent style.
|
||||
|
||||
This is adapted from the error output styling in `sphinx-theme-builder`.
|
||||
"""
|
||||
|
||||
reference: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
kind: 'Literal["error", "warning"]' = "error",
|
||||
reference: Optional[str] = None,
|
||||
message: Union[str, Text],
|
||||
context: Optional[Union[str, Text]],
|
||||
hint_stmt: Optional[Union[str, Text]],
|
||||
note_stmt: Optional[Union[str, Text]] = None,
|
||||
link: Optional[str] = None,
|
||||
) -> None:
|
||||
# Ensure a proper reference is provided.
|
||||
if reference is None:
|
||||
assert hasattr(self, "reference"), "error reference not provided!"
|
||||
reference = self.reference
|
||||
assert _is_kebab_case(reference), "error reference must be kebab-case!"
|
||||
|
||||
self.kind = kind
|
||||
self.reference = reference
|
||||
|
||||
self.message = message
|
||||
self.context = context
|
||||
|
||||
self.note_stmt = note_stmt
|
||||
self.hint_stmt = hint_stmt
|
||||
|
||||
self.link = link
|
||||
|
||||
super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"<{self.__class__.__name__}("
|
||||
f"reference={self.reference!r}, "
|
||||
f"message={self.message!r}, "
|
||||
f"context={self.context!r}, "
|
||||
f"note_stmt={self.note_stmt!r}, "
|
||||
f"hint_stmt={self.hint_stmt!r}"
|
||||
")>"
|
||||
)
|
||||
|
||||
def __rich_console__(
|
||||
self,
|
||||
console: Console,
|
||||
options: ConsoleOptions,
|
||||
) -> RenderResult:
|
||||
colour = "red" if self.kind == "error" else "yellow"
|
||||
|
||||
yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
|
||||
yield ""
|
||||
|
||||
if not options.ascii_only:
|
||||
# Present the main message, with relevant context indented.
|
||||
if self.context is not None:
|
||||
yield _prefix_with_indent(
|
||||
self.message,
|
||||
console,
|
||||
prefix=f"[{colour}]×[/] ",
|
||||
indent=f"[{colour}]│[/] ",
|
||||
)
|
||||
yield _prefix_with_indent(
|
||||
self.context,
|
||||
console,
|
||||
prefix=f"[{colour}]╰─>[/] ",
|
||||
indent=f"[{colour}] [/] ",
|
||||
)
|
||||
else:
|
||||
yield _prefix_with_indent(
|
||||
self.message,
|
||||
console,
|
||||
prefix="[red]×[/] ",
|
||||
indent=" ",
|
||||
)
|
||||
else:
|
||||
yield self.message
|
||||
if self.context is not None:
|
||||
yield ""
|
||||
yield self.context
|
||||
|
||||
if self.note_stmt is not None or self.hint_stmt is not None:
|
||||
yield ""
|
||||
|
||||
if self.note_stmt is not None:
|
||||
yield _prefix_with_indent(
|
||||
self.note_stmt,
|
||||
console,
|
||||
prefix="[magenta bold]note[/]: ",
|
||||
indent=" ",
|
||||
)
|
||||
if self.hint_stmt is not None:
|
||||
yield _prefix_with_indent(
|
||||
self.hint_stmt,
|
||||
console,
|
||||
prefix="[cyan bold]hint[/]: ",
|
||||
indent=" ",
|
||||
)
|
||||
|
||||
if self.link is not None:
|
||||
yield ""
|
||||
yield f"Link: {self.link}"
|
||||
|
||||
|
||||
#
|
||||
# Actual Errors
|
||||
#
|
||||
class ConfigurationError(PipError):
|
||||
"""General exception in configuration"""
|
||||
|
||||
@@ -29,17 +188,54 @@ class UninstallationError(PipError):
|
||||
"""General exception during uninstallation"""
|
||||
|
||||
|
||||
class MissingPyProjectBuildRequires(DiagnosticPipError):
|
||||
"""Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
|
||||
|
||||
reference = "missing-pyproject-build-system-requires"
|
||||
|
||||
def __init__(self, *, package: str) -> None:
|
||||
super().__init__(
|
||||
message=f"Can not process {escape(package)}",
|
||||
context=Text(
|
||||
"This package has an invalid pyproject.toml file.\n"
|
||||
"The [build-system] table is missing the mandatory `requires` key."
|
||||
),
|
||||
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||
)
|
||||
|
||||
|
||||
class InvalidPyProjectBuildRequires(DiagnosticPipError):
|
||||
"""Raised when pyproject.toml an invalid `build-system.requires`."""
|
||||
|
||||
reference = "invalid-pyproject-build-system-requires"
|
||||
|
||||
def __init__(self, *, package: str, reason: str) -> None:
|
||||
super().__init__(
|
||||
message=f"Can not process {escape(package)}",
|
||||
context=Text(
|
||||
"This package has an invalid `build-system.requires` key in "
|
||||
f"pyproject.toml.\n{reason}"
|
||||
),
|
||||
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
||||
)
|
||||
|
||||
|
||||
class NoneMetadataError(PipError):
|
||||
"""
|
||||
Raised when accessing "METADATA" or "PKG-INFO" metadata for a
|
||||
pip._vendor.pkg_resources.Distribution object and
|
||||
`dist.has_metadata('METADATA')` returns True but
|
||||
`dist.get_metadata('METADATA')` returns None (and similarly for
|
||||
"PKG-INFO").
|
||||
"""Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
|
||||
|
||||
This signifies an inconsistency, when the Distribution claims to have
|
||||
the metadata file (if not, raise ``FileNotFoundError`` instead), but is
|
||||
not actually able to produce its content. This may be due to permission
|
||||
errors.
|
||||
"""
|
||||
|
||||
def __init__(self, dist, metadata_name):
|
||||
# type: (Distribution, str) -> None
|
||||
def __init__(
|
||||
self,
|
||||
dist: "BaseDistribution",
|
||||
metadata_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
:param dist: A Distribution object.
|
||||
:param metadata_name: The name of the metadata being accessed
|
||||
@@ -48,28 +244,24 @@ class NoneMetadataError(PipError):
|
||||
self.dist = dist
|
||||
self.metadata_name = metadata_name
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
# Use `dist` in the error message because its stringification
|
||||
# includes more information, like the version and location.
|
||||
return (
|
||||
'None {} metadata found for distribution: {}'.format(
|
||||
self.metadata_name, self.dist,
|
||||
)
|
||||
return "None {} metadata found for distribution: {}".format(
|
||||
self.metadata_name,
|
||||
self.dist,
|
||||
)
|
||||
|
||||
|
||||
class UserInstallationInvalid(InstallationError):
|
||||
"""A --user install is requested on an environment without user site."""
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
return "User base directory is not specified"
|
||||
|
||||
|
||||
class InvalidSchemeCombination(InstallationError):
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
before = ", ".join(str(a) for a in self.args[:-1])
|
||||
return f"Cannot set {before} and {self.args[-1]} together"
|
||||
|
||||
@@ -102,8 +294,12 @@ class PreviousBuildDirError(PipError):
|
||||
class NetworkConnectionError(PipError):
|
||||
"""HTTP connection error"""
|
||||
|
||||
def __init__(self, error_msg, response=None, request=None):
|
||||
# type: (str, Response, Request) -> None
|
||||
def __init__(
|
||||
self,
|
||||
error_msg: str,
|
||||
response: Optional[Response] = None,
|
||||
request: Optional[Request] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize NetworkConnectionError with `request` and `response`
|
||||
objects.
|
||||
@@ -111,13 +307,15 @@ class NetworkConnectionError(PipError):
|
||||
self.response = response
|
||||
self.request = request
|
||||
self.error_msg = error_msg
|
||||
if (self.response is not None and not self.request and
|
||||
hasattr(response, 'request')):
|
||||
if (
|
||||
self.response is not None
|
||||
and not self.request
|
||||
and hasattr(response, "request")
|
||||
):
|
||||
self.request = self.response.request
|
||||
super().__init__(error_msg, response, request)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
return str(self.error_msg)
|
||||
|
||||
|
||||
@@ -129,74 +327,122 @@ class UnsupportedWheel(InstallationError):
|
||||
"""Unsupported wheel."""
|
||||
|
||||
|
||||
class InvalidWheel(InstallationError):
|
||||
"""Invalid (e.g. corrupt) wheel."""
|
||||
|
||||
def __init__(self, location: str, name: str):
|
||||
self.location = location
|
||||
self.name = name
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Wheel '{self.name}' located at {self.location} is invalid."
|
||||
|
||||
|
||||
class MetadataInconsistent(InstallationError):
|
||||
"""Built metadata contains inconsistent information.
|
||||
|
||||
This is raised when the metadata contains values (e.g. name and version)
|
||||
that do not match the information previously obtained from sdist filename
|
||||
or user-supplied ``#egg=`` value.
|
||||
that do not match the information previously obtained from sdist filename,
|
||||
user-supplied ``#egg=`` value, or an install requirement name.
|
||||
"""
|
||||
def __init__(self, ireq, field, f_val, m_val):
|
||||
# type: (InstallRequirement, str, str, str) -> None
|
||||
|
||||
def __init__(
|
||||
self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
|
||||
) -> None:
|
||||
self.ireq = ireq
|
||||
self.field = field
|
||||
self.f_val = f_val
|
||||
self.m_val = m_val
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
template = (
|
||||
"Requested {} has inconsistent {}: "
|
||||
"filename has {!r}, but metadata has {!r}"
|
||||
)
|
||||
return template.format(self.ireq, self.field, self.f_val, self.m_val)
|
||||
|
||||
|
||||
class InstallationSubprocessError(InstallationError):
|
||||
"""A subprocess call failed during installation."""
|
||||
def __init__(self, returncode, description):
|
||||
# type: (int, str) -> None
|
||||
self.returncode = returncode
|
||||
self.description = description
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
"Command errored out with exit status {}: {} "
|
||||
"Check the logs for full command output."
|
||||
).format(self.returncode, self.description)
|
||||
f"Requested {self.ireq} has inconsistent {self.field}: "
|
||||
f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
|
||||
)
|
||||
|
||||
|
||||
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
|
||||
"""A subprocess call failed."""
|
||||
|
||||
reference = "subprocess-exited-with-error"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
command_description: str,
|
||||
exit_code: int,
|
||||
output_lines: Optional[List[str]],
|
||||
) -> None:
|
||||
if output_lines is None:
|
||||
output_prompt = Text("See above for output.")
|
||||
else:
|
||||
output_prompt = (
|
||||
Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
|
||||
+ Text("".join(output_lines))
|
||||
+ Text.from_markup(R"[red]\[end of output][/]")
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
message=(
|
||||
f"[green]{escape(command_description)}[/] did not run successfully.\n"
|
||||
f"exit code: {exit_code}"
|
||||
),
|
||||
context=output_prompt,
|
||||
hint_stmt=None,
|
||||
note_stmt=(
|
||||
"This error originates from a subprocess, and is likely not a "
|
||||
"problem with pip."
|
||||
),
|
||||
)
|
||||
|
||||
self.command_description = command_description
|
||||
self.exit_code = exit_code
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.command_description} exited with {self.exit_code}"
|
||||
|
||||
|
||||
class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
|
||||
reference = "metadata-generation-failed"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
package_details: str,
|
||||
) -> None:
|
||||
super(InstallationSubprocessError, self).__init__(
|
||||
message="Encountered error while generating package metadata.",
|
||||
context=escape(package_details),
|
||||
hint_stmt="See above for details.",
|
||||
note_stmt="This is an issue with the package mentioned above, not pip.",
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "metadata generation failed"
|
||||
|
||||
|
||||
class HashErrors(InstallationError):
|
||||
"""Multiple HashError instances rolled into one for reporting"""
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.errors = [] # type: List[HashError]
|
||||
def __init__(self) -> None:
|
||||
self.errors: List["HashError"] = []
|
||||
|
||||
def append(self, error):
|
||||
# type: (HashError) -> None
|
||||
def append(self, error: "HashError") -> None:
|
||||
self.errors.append(error)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
lines = []
|
||||
self.errors.sort(key=lambda e: e.order)
|
||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
||||
lines.append(cls.head)
|
||||
lines.extend(e.body() for e in errors_of_cls)
|
||||
if lines:
|
||||
return '\n'.join(lines)
|
||||
return ''
|
||||
return "\n".join(lines)
|
||||
return ""
|
||||
|
||||
def __nonzero__(self):
|
||||
# type: () -> bool
|
||||
def __bool__(self) -> bool:
|
||||
return bool(self.errors)
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return self.__nonzero__()
|
||||
|
||||
|
||||
class HashError(InstallationError):
|
||||
"""
|
||||
@@ -214,12 +460,12 @@ class HashError(InstallationError):
|
||||
typically available earlier.
|
||||
|
||||
"""
|
||||
req = None # type: Optional[InstallRequirement]
|
||||
head = ''
|
||||
order = -1 # type: int
|
||||
|
||||
def body(self):
|
||||
# type: () -> str
|
||||
req: Optional["InstallRequirement"] = None
|
||||
head = ""
|
||||
order: int = -1
|
||||
|
||||
def body(self) -> str:
|
||||
"""Return a summary of me for display under the heading.
|
||||
|
||||
This default implementation simply prints a description of the
|
||||
@@ -229,21 +475,19 @@ class HashError(InstallationError):
|
||||
its link already populated by the resolver's _populate_link().
|
||||
|
||||
"""
|
||||
return f' {self._requirement_name()}'
|
||||
return f" {self._requirement_name()}"
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return f'{self.head}\n{self.body()}'
|
||||
def __str__(self) -> str:
|
||||
return f"{self.head}\n{self.body()}"
|
||||
|
||||
def _requirement_name(self):
|
||||
# type: () -> str
|
||||
def _requirement_name(self) -> str:
|
||||
"""Return a description of the requirement that triggered me.
|
||||
|
||||
This default implementation returns long description of the req, with
|
||||
line numbers
|
||||
|
||||
"""
|
||||
return str(self.req) if self.req else 'unknown package'
|
||||
return str(self.req) if self.req else "unknown package"
|
||||
|
||||
|
||||
class VcsHashUnsupported(HashError):
|
||||
@@ -251,8 +495,10 @@ class VcsHashUnsupported(HashError):
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 0
|
||||
head = ("Can't verify hashes for these requirements because we don't "
|
||||
"have a way to hash version control repositories:")
|
||||
head = (
|
||||
"Can't verify hashes for these requirements because we don't "
|
||||
"have a way to hash version control repositories:"
|
||||
)
|
||||
|
||||
|
||||
class DirectoryUrlHashUnsupported(HashError):
|
||||
@@ -260,32 +506,34 @@ class DirectoryUrlHashUnsupported(HashError):
|
||||
we don't have a method for hashing those."""
|
||||
|
||||
order = 1
|
||||
head = ("Can't verify hashes for these file:// requirements because they "
|
||||
"point to directories:")
|
||||
head = (
|
||||
"Can't verify hashes for these file:// requirements because they "
|
||||
"point to directories:"
|
||||
)
|
||||
|
||||
|
||||
class HashMissing(HashError):
|
||||
"""A hash was needed for a requirement but is absent."""
|
||||
|
||||
order = 2
|
||||
head = ('Hashes are required in --require-hashes mode, but they are '
|
||||
'missing from some requirements. Here is a list of those '
|
||||
'requirements along with the hashes their downloaded archives '
|
||||
'actually had. Add lines like these to your requirements files to '
|
||||
'prevent tampering. (If you did not enable --require-hashes '
|
||||
'manually, note that it turns on automatically when any package '
|
||||
'has a hash.)')
|
||||
head = (
|
||||
"Hashes are required in --require-hashes mode, but they are "
|
||||
"missing from some requirements. Here is a list of those "
|
||||
"requirements along with the hashes their downloaded archives "
|
||||
"actually had. Add lines like these to your requirements files to "
|
||||
"prevent tampering. (If you did not enable --require-hashes "
|
||||
"manually, note that it turns on automatically when any package "
|
||||
"has a hash.)"
|
||||
)
|
||||
|
||||
def __init__(self, gotten_hash):
|
||||
# type: (str) -> None
|
||||
def __init__(self, gotten_hash: str) -> None:
|
||||
"""
|
||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
||||
just downloaded
|
||||
"""
|
||||
self.gotten_hash = gotten_hash
|
||||
|
||||
def body(self):
|
||||
# type: () -> str
|
||||
def body(self) -> str:
|
||||
# Dodge circular import.
|
||||
from pip._internal.utils.hashes import FAVORITE_HASH
|
||||
|
||||
@@ -294,13 +542,16 @@ class HashMissing(HashError):
|
||||
# In the case of URL-based requirements, display the original URL
|
||||
# seen in the requirements file rather than the package name,
|
||||
# so the output can be directly copied into the requirements file.
|
||||
package = (self.req.original_link if self.req.original_link
|
||||
# In case someone feeds something downright stupid
|
||||
# to InstallRequirement's constructor.
|
||||
else getattr(self.req, 'req', None))
|
||||
return ' {} --hash={}:{}'.format(package or 'unknown package',
|
||||
FAVORITE_HASH,
|
||||
self.gotten_hash)
|
||||
package = (
|
||||
self.req.original_link
|
||||
if self.req.original_link
|
||||
# In case someone feeds something downright stupid
|
||||
# to InstallRequirement's constructor.
|
||||
else getattr(self.req, "req", None)
|
||||
)
|
||||
return " {} --hash={}:{}".format(
|
||||
package or "unknown package", FAVORITE_HASH, self.gotten_hash
|
||||
)
|
||||
|
||||
|
||||
class HashUnpinned(HashError):
|
||||
@@ -308,8 +559,10 @@ class HashUnpinned(HashError):
|
||||
version."""
|
||||
|
||||
order = 3
|
||||
head = ('In --require-hashes mode, all requirements must have their '
|
||||
'versions pinned with ==. These do not:')
|
||||
head = (
|
||||
"In --require-hashes mode, all requirements must have their "
|
||||
"versions pinned with ==. These do not:"
|
||||
)
|
||||
|
||||
|
||||
class HashMismatch(HashError):
|
||||
@@ -321,14 +574,16 @@ class HashMismatch(HashError):
|
||||
improve its error message.
|
||||
|
||||
"""
|
||||
order = 4
|
||||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
||||
'FILE. If you have updated the package versions, please update '
|
||||
'the hashes. Otherwise, examine the package contents carefully; '
|
||||
'someone may have tampered with them.')
|
||||
|
||||
def __init__(self, allowed, gots):
|
||||
# type: (Dict[str, List[str]], Dict[str, _Hash]) -> None
|
||||
order = 4
|
||||
head = (
|
||||
"THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
|
||||
"FILE. If you have updated the package versions, please update "
|
||||
"the hashes. Otherwise, examine the package contents carefully; "
|
||||
"someone may have tampered with them."
|
||||
)
|
||||
|
||||
def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
|
||||
"""
|
||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
||||
hex digests
|
||||
@@ -338,13 +593,10 @@ class HashMismatch(HashError):
|
||||
self.allowed = allowed
|
||||
self.gots = gots
|
||||
|
||||
def body(self):
|
||||
# type: () -> str
|
||||
return ' {}:\n{}'.format(self._requirement_name(),
|
||||
self._hash_comparison())
|
||||
def body(self) -> str:
|
||||
return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
|
||||
|
||||
def _hash_comparison(self):
|
||||
# type: () -> str
|
||||
def _hash_comparison(self) -> str:
|
||||
"""
|
||||
Return a comparison of actual and expected hash values.
|
||||
|
||||
@@ -355,20 +607,22 @@ class HashMismatch(HashError):
|
||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
||||
|
||||
"""
|
||||
def hash_then_or(hash_name):
|
||||
# type: (str) -> chain[str]
|
||||
|
||||
def hash_then_or(hash_name: str) -> "chain[str]":
|
||||
# For now, all the decent hashes have 6-char names, so we can get
|
||||
# away with hard-coding space literals.
|
||||
return chain([hash_name], repeat(' or'))
|
||||
return chain([hash_name], repeat(" or"))
|
||||
|
||||
lines = [] # type: List[str]
|
||||
lines: List[str] = []
|
||||
for hash_name, expecteds in self.allowed.items():
|
||||
prefix = hash_then_or(hash_name)
|
||||
lines.extend((' Expected {} {}'.format(next(prefix), e))
|
||||
for e in expecteds)
|
||||
lines.append(' Got {}\n'.format(
|
||||
self.gots[hash_name].hexdigest()))
|
||||
return '\n'.join(lines)
|
||||
lines.extend(
|
||||
(" Expected {} {}".format(next(prefix), e)) for e in expecteds
|
||||
)
|
||||
lines.append(
|
||||
" Got {}\n".format(self.gots[hash_name].hexdigest())
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
class UnsupportedPythonVersion(InstallationError):
|
||||
@@ -377,21 +631,103 @@ class UnsupportedPythonVersion(InstallationError):
|
||||
|
||||
|
||||
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
||||
"""When there are errors while loading a configuration file
|
||||
"""
|
||||
"""When there are errors while loading a configuration file"""
|
||||
|
||||
def __init__(self, reason="could not be loaded", fname=None, error=None):
|
||||
# type: (str, Optional[str], Optional[configparser.Error]) -> None
|
||||
def __init__(
|
||||
self,
|
||||
reason: str = "could not be loaded",
|
||||
fname: Optional[str] = None,
|
||||
error: Optional[configparser.Error] = None,
|
||||
) -> None:
|
||||
super().__init__(error)
|
||||
self.reason = reason
|
||||
self.fname = fname
|
||||
self.error = error
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
if self.fname is not None:
|
||||
message_part = f" in {self.fname}."
|
||||
else:
|
||||
assert self.error is not None
|
||||
message_part = f".\n{self.error}\n"
|
||||
return f"Configuration file {self.reason}{message_part}"
|
||||
|
||||
|
||||
_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
|
||||
The Python environment under {sys.prefix} is managed externally, and may not be
|
||||
manipulated by the user. Please use specific tooling from the distributor of
|
||||
the Python installation to interact with this environment instead.
|
||||
"""
|
||||
|
||||
|
||||
class ExternallyManagedEnvironment(DiagnosticPipError):
|
||||
"""The current environment is externally managed.
|
||||
|
||||
This is raised when the current environment is externally managed, as
|
||||
defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
|
||||
and displayed when the error is bubbled up to the user.
|
||||
|
||||
:param error: The error message read from ``EXTERNALLY-MANAGED``.
|
||||
"""
|
||||
|
||||
reference = "externally-managed-environment"
|
||||
|
||||
def __init__(self, error: Optional[str]) -> None:
|
||||
if error is None:
|
||||
context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
|
||||
else:
|
||||
context = Text(error)
|
||||
super().__init__(
|
||||
message="This environment is externally managed",
|
||||
context=context,
|
||||
note_stmt=(
|
||||
"If you believe this is a mistake, please contact your "
|
||||
"Python installation or OS distribution provider. "
|
||||
"You can override this, at the risk of breaking your Python "
|
||||
"installation or OS, by passing --break-system-packages."
|
||||
),
|
||||
hint_stmt=Text("See PEP 668 for the detailed specification."),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _iter_externally_managed_error_keys() -> Iterator[str]:
|
||||
# LC_MESSAGES is in POSIX, but not the C standard. The most common
|
||||
# platform that does not implement this category is Windows, where
|
||||
# using other categories for console message localization is equally
|
||||
# unreliable, so we fall back to the locale-less vendor message. This
|
||||
# can always be re-evaluated when a vendor proposes a new alternative.
|
||||
try:
|
||||
category = locale.LC_MESSAGES
|
||||
except AttributeError:
|
||||
lang: Optional[str] = None
|
||||
else:
|
||||
lang, _ = locale.getlocale(category)
|
||||
if lang is not None:
|
||||
yield f"Error-{lang}"
|
||||
for sep in ("-", "_"):
|
||||
before, found, _ = lang.partition(sep)
|
||||
if not found:
|
||||
continue
|
||||
yield f"Error-{before}"
|
||||
yield "Error"
|
||||
|
||||
@classmethod
|
||||
def from_config(
|
||||
cls,
|
||||
config: Union[pathlib.Path, str],
|
||||
) -> "ExternallyManagedEnvironment":
|
||||
parser = configparser.ConfigParser(interpolation=None)
|
||||
try:
|
||||
parser.read(config, encoding="utf-8")
|
||||
section = parser["externally-managed"]
|
||||
for key in cls._iter_externally_managed_error_keys():
|
||||
with contextlib.suppress(KeyError):
|
||||
return cls(section[key])
|
||||
except KeyError:
|
||||
pass
|
||||
except (OSError, UnicodeDecodeError, configparser.ParsingError):
|
||||
from pip._internal.utils._log import VERBOSE
|
||||
|
||||
exc_info = logger.isEnabledFor(VERBOSE)
|
||||
logger.warning("Failed to read %s", config, exc_info=exc_info)
|
||||
return cls(None)
|
||||
|
||||
@@ -2,30 +2,32 @@
|
||||
The main purpose of this module is to expose LinkCollector.collect_sources().
|
||||
"""
|
||||
|
||||
import cgi
|
||||
import collections
|
||||
import email.message
|
||||
import functools
|
||||
import html
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import xml.etree.ElementTree
|
||||
from html.parser import HTMLParser
|
||||
from optparse import Values
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
MutableMapping,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._vendor import html5lib, requests
|
||||
from pip._vendor import requests
|
||||
from pip._vendor.requests import Response
|
||||
from pip._vendor.requests.exceptions import RetryError, SSLError
|
||||
|
||||
@@ -35,14 +37,18 @@ from pip._internal.models.search_scope import SearchScope
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.network.utils import raise_for_status
|
||||
from pip._internal.utils.filetypes import is_archive_file
|
||||
from pip._internal.utils.misc import pairwise, redact_auth_from_url
|
||||
from pip._internal.utils.misc import redact_auth_from_url
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
from .sources import CandidatesFromPage, LinkSource, build_source
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
else:
|
||||
Protocol = object
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
HTMLElement = xml.etree.ElementTree.Element
|
||||
ResponseHeaders = MutableMapping[str, str]
|
||||
|
||||
|
||||
@@ -52,70 +58,90 @@ def _match_vcs_scheme(url: str) -> Optional[str]:
|
||||
Returns the matched VCS scheme, or None if there's no match.
|
||||
"""
|
||||
for scheme in vcs.schemes:
|
||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
||||
if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
|
||||
return scheme
|
||||
return None
|
||||
|
||||
|
||||
class _NotHTML(Exception):
|
||||
class _NotAPIContent(Exception):
|
||||
def __init__(self, content_type: str, request_desc: str) -> None:
|
||||
super().__init__(content_type, request_desc)
|
||||
self.content_type = content_type
|
||||
self.request_desc = request_desc
|
||||
|
||||
|
||||
def _ensure_html_header(response: Response) -> None:
|
||||
"""Check the Content-Type header to ensure the response contains HTML.
|
||||
|
||||
Raises `_NotHTML` if the content type is not text/html.
|
||||
def _ensure_api_header(response: Response) -> None:
|
||||
"""
|
||||
content_type = response.headers.get("Content-Type", "")
|
||||
if not content_type.lower().startswith("text/html"):
|
||||
raise _NotHTML(content_type, response.request.method)
|
||||
Check the Content-Type header to ensure the response contains a Simple
|
||||
API Response.
|
||||
|
||||
Raises `_NotAPIContent` if the content type is not a valid content-type.
|
||||
"""
|
||||
content_type = response.headers.get("Content-Type", "Unknown")
|
||||
|
||||
content_type_l = content_type.lower()
|
||||
if content_type_l.startswith(
|
||||
(
|
||||
"text/html",
|
||||
"application/vnd.pypi.simple.v1+html",
|
||||
"application/vnd.pypi.simple.v1+json",
|
||||
)
|
||||
):
|
||||
return
|
||||
|
||||
raise _NotAPIContent(content_type, response.request.method)
|
||||
|
||||
|
||||
class _NotHTTP(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _ensure_html_response(url: str, session: PipSession) -> None:
|
||||
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
||||
def _ensure_api_response(url: str, session: PipSession) -> None:
|
||||
"""
|
||||
Send a HEAD request to the URL, and ensure the response contains a simple
|
||||
API Response.
|
||||
|
||||
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
||||
`_NotHTML` if the content type is not text/html.
|
||||
`_NotAPIContent` if the content type is not a valid content type.
|
||||
"""
|
||||
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
||||
if scheme not in {'http', 'https'}:
|
||||
if scheme not in {"http", "https"}:
|
||||
raise _NotHTTP()
|
||||
|
||||
resp = session.head(url, allow_redirects=True)
|
||||
raise_for_status(resp)
|
||||
|
||||
_ensure_html_header(resp)
|
||||
_ensure_api_header(resp)
|
||||
|
||||
|
||||
def _get_html_response(url: str, session: PipSession) -> Response:
|
||||
"""Access an HTML page with GET, and return the response.
|
||||
def _get_simple_response(url: str, session: PipSession) -> Response:
|
||||
"""Access an Simple API response with GET, and return the response.
|
||||
|
||||
This consists of three parts:
|
||||
|
||||
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
||||
check the Content-Type is HTML, to avoid downloading a large file.
|
||||
Raise `_NotHTTP` if the content type cannot be determined, or
|
||||
`_NotHTML` if it is not HTML.
|
||||
check the Content-Type is HTML or Simple API, to avoid downloading a
|
||||
large file. Raise `_NotHTTP` if the content type cannot be determined, or
|
||||
`_NotAPIContent` if it is not HTML or a Simple API.
|
||||
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
||||
3. Check the Content-Type header to make sure we got HTML, and raise
|
||||
`_NotHTML` otherwise.
|
||||
3. Check the Content-Type header to make sure we got a Simple API response,
|
||||
and raise `_NotAPIContent` otherwise.
|
||||
"""
|
||||
if is_archive_file(Link(url).filename):
|
||||
_ensure_html_response(url, session=session)
|
||||
_ensure_api_response(url, session=session)
|
||||
|
||||
logger.debug('Getting page %s', redact_auth_from_url(url))
|
||||
logger.debug("Getting page %s", redact_auth_from_url(url))
|
||||
|
||||
resp = session.get(
|
||||
url,
|
||||
headers={
|
||||
"Accept": "text/html",
|
||||
"Accept": ", ".join(
|
||||
[
|
||||
"application/vnd.pypi.simple.v1+json",
|
||||
"application/vnd.pypi.simple.v1+html; q=0.1",
|
||||
"text/html; q=0.01",
|
||||
]
|
||||
),
|
||||
# We don't want to blindly returned cached data for
|
||||
# /simple/, because authors generally expecting that
|
||||
# twine upload && pip install will function, but if
|
||||
@@ -137,153 +163,52 @@ def _get_html_response(url: str, session: PipSession) -> Response:
|
||||
# The check for archives above only works if the url ends with
|
||||
# something that looks like an archive. However that is not a
|
||||
# requirement of an url. Unless we issue a HEAD request on every
|
||||
# url we cannot know ahead of time for sure if something is HTML
|
||||
# or not. However we can check after we've downloaded it.
|
||||
_ensure_html_header(resp)
|
||||
# url we cannot know ahead of time for sure if something is a
|
||||
# Simple API response or not. However we can check after we've
|
||||
# downloaded it.
|
||||
_ensure_api_header(resp)
|
||||
|
||||
logger.debug(
|
||||
"Fetched page %s as %s",
|
||||
redact_auth_from_url(url),
|
||||
resp.headers.get("Content-Type", "Unknown"),
|
||||
)
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
|
||||
"""Determine if we have any encoding information in our headers.
|
||||
"""
|
||||
"""Determine if we have any encoding information in our headers."""
|
||||
if headers and "Content-Type" in headers:
|
||||
content_type, params = cgi.parse_header(headers["Content-Type"])
|
||||
if "charset" in params:
|
||||
return params['charset']
|
||||
m = email.message.Message()
|
||||
m["content-type"] = headers["Content-Type"]
|
||||
charset = m.get_param("charset")
|
||||
if charset:
|
||||
return str(charset)
|
||||
return None
|
||||
|
||||
|
||||
def _determine_base_url(document: HTMLElement, page_url: str) -> str:
|
||||
"""Determine the HTML document's base URL.
|
||||
|
||||
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
||||
attribute denotes the base URL of anchor tags in the document. If there is
|
||||
no such tag (or if it does not have a valid href attribute), the HTML
|
||||
file's URL is used as the base URL.
|
||||
|
||||
:param document: An HTML document representation. The current
|
||||
implementation expects the result of ``html5lib.parse()``.
|
||||
:param page_url: The URL of the HTML document.
|
||||
"""
|
||||
for base in document.findall(".//base"):
|
||||
href = base.get("href")
|
||||
if href is not None:
|
||||
return href
|
||||
return page_url
|
||||
|
||||
|
||||
def _clean_url_path_part(part: str) -> str:
|
||||
"""
|
||||
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
||||
"""
|
||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||
return urllib.parse.quote(urllib.parse.unquote(part))
|
||||
|
||||
|
||||
def _clean_file_url_path(part: str) -> str:
|
||||
"""
|
||||
Clean the first part of a URL path that corresponds to a local
|
||||
filesystem path (i.e. the first part after splitting on "@" characters).
|
||||
"""
|
||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||
# Also, on Windows the path part might contain a drive letter which
|
||||
# should not be quoted. On Linux where drive letters do not
|
||||
# exist, the colon should be quoted. We rely on urllib.request
|
||||
# to do the right thing here.
|
||||
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
||||
|
||||
|
||||
# percent-encoded: /
|
||||
_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE)
|
||||
|
||||
|
||||
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
||||
"""
|
||||
Clean the path portion of a URL.
|
||||
"""
|
||||
if is_local_path:
|
||||
clean_func = _clean_file_url_path
|
||||
else:
|
||||
clean_func = _clean_url_path_part
|
||||
|
||||
# Split on the reserved characters prior to cleaning so that
|
||||
# revision strings in VCS URLs are properly preserved.
|
||||
parts = _reserved_chars_re.split(path)
|
||||
|
||||
cleaned_parts = []
|
||||
for to_clean, reserved in pairwise(itertools.chain(parts, [''])):
|
||||
cleaned_parts.append(clean_func(to_clean))
|
||||
# Normalize %xx escapes (e.g. %2f -> %2F)
|
||||
cleaned_parts.append(reserved.upper())
|
||||
|
||||
return ''.join(cleaned_parts)
|
||||
|
||||
|
||||
def _clean_link(url: str) -> str:
|
||||
"""
|
||||
Make sure a link is fully quoted.
|
||||
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
||||
and without double-quoting other characters.
|
||||
"""
|
||||
# Split the URL into parts according to the general structure
|
||||
# `scheme://netloc/path;parameters?query#fragment`.
|
||||
result = urllib.parse.urlparse(url)
|
||||
# If the netloc is empty, then the URL refers to a local filesystem path.
|
||||
is_local_path = not result.netloc
|
||||
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
||||
return urllib.parse.urlunparse(result._replace(path=path))
|
||||
|
||||
|
||||
def _create_link_from_element(
|
||||
anchor: HTMLElement,
|
||||
page_url: str,
|
||||
base_url: str,
|
||||
) -> Optional[Link]:
|
||||
"""
|
||||
Convert an anchor element in a simple repository page to a Link.
|
||||
"""
|
||||
href = anchor.get("href")
|
||||
if not href:
|
||||
return None
|
||||
|
||||
url = _clean_link(urllib.parse.urljoin(base_url, href))
|
||||
pyrequire = anchor.get('data-requires-python')
|
||||
pyrequire = html.unescape(pyrequire) if pyrequire else None
|
||||
|
||||
yanked_reason = anchor.get('data-yanked')
|
||||
if yanked_reason:
|
||||
yanked_reason = html.unescape(yanked_reason)
|
||||
|
||||
link = Link(
|
||||
url,
|
||||
comes_from=page_url,
|
||||
requires_python=pyrequire,
|
||||
yanked_reason=yanked_reason,
|
||||
)
|
||||
|
||||
return link
|
||||
|
||||
|
||||
class CacheablePageContent:
|
||||
def __init__(self, page: "HTMLPage") -> None:
|
||||
def __init__(self, page: "IndexContent") -> None:
|
||||
assert page.cache_link_parsing
|
||||
self.page = page
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return (isinstance(other, type(self)) and
|
||||
self.page.url == other.page.url)
|
||||
return isinstance(other, type(self)) and self.page.url == other.page.url
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.page.url)
|
||||
|
||||
|
||||
def with_cached_html_pages(
|
||||
fn: Callable[["HTMLPage"], Iterable[Link]],
|
||||
) -> Callable[["HTMLPage"], List[Link]]:
|
||||
class ParseLinks(Protocol):
|
||||
def __call__(self, page: "IndexContent") -> Iterable[Link]:
|
||||
...
|
||||
|
||||
|
||||
def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
|
||||
"""
|
||||
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
|
||||
function's result (keyed by CacheablePageContent), unless the HTMLPage
|
||||
Given a function that parses an Iterable[Link] from an IndexContent, cache the
|
||||
function's result (keyed by CacheablePageContent), unless the IndexContent
|
||||
`page` has `page.cache_link_parsing == False`.
|
||||
"""
|
||||
|
||||
@@ -292,7 +217,7 @@ def with_cached_html_pages(
|
||||
return list(fn(cacheable_page.page))
|
||||
|
||||
@functools.wraps(fn)
|
||||
def wrapper_wrapper(page: "HTMLPage") -> List[Link]:
|
||||
def wrapper_wrapper(page: "IndexContent") -> List[Link]:
|
||||
if page.cache_link_parsing:
|
||||
return wrapper(CacheablePageContent(page))
|
||||
return list(fn(page))
|
||||
@@ -300,36 +225,42 @@ def with_cached_html_pages(
|
||||
return wrapper_wrapper
|
||||
|
||||
|
||||
@with_cached_html_pages
|
||||
def parse_links(page: "HTMLPage") -> Iterable[Link]:
|
||||
@with_cached_index_content
|
||||
def parse_links(page: "IndexContent") -> Iterable[Link]:
|
||||
"""
|
||||
Parse an HTML document, and yield its anchor elements as Link objects.
|
||||
Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
|
||||
"""
|
||||
document = html5lib.parse(
|
||||
page.content,
|
||||
transport_encoding=page.encoding,
|
||||
namespaceHTMLElements=False,
|
||||
)
|
||||
|
||||
content_type_l = page.content_type.lower()
|
||||
if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
|
||||
data = json.loads(page.content)
|
||||
for file in data.get("files", []):
|
||||
link = Link.from_json(file, page.url)
|
||||
if link is None:
|
||||
continue
|
||||
yield link
|
||||
return
|
||||
|
||||
parser = HTMLLinkParser(page.url)
|
||||
encoding = page.encoding or "utf-8"
|
||||
parser.feed(page.content.decode(encoding))
|
||||
|
||||
url = page.url
|
||||
base_url = _determine_base_url(document, url)
|
||||
for anchor in document.findall(".//a"):
|
||||
link = _create_link_from_element(
|
||||
anchor,
|
||||
page_url=url,
|
||||
base_url=base_url,
|
||||
)
|
||||
base_url = parser.base_url or url
|
||||
for anchor in parser.anchors:
|
||||
link = Link.from_element(anchor, page_url=url, base_url=base_url)
|
||||
if link is None:
|
||||
continue
|
||||
yield link
|
||||
|
||||
|
||||
class HTMLPage:
|
||||
"""Represents one page, along with its URL"""
|
||||
class IndexContent:
|
||||
"""Represents one response (or page), along with its URL"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
content: bytes,
|
||||
content_type: str,
|
||||
encoding: Optional[str],
|
||||
url: str,
|
||||
cache_link_parsing: bool = True,
|
||||
@@ -342,6 +273,7 @@ class HTMLPage:
|
||||
have this set to False, for example.
|
||||
"""
|
||||
self.content = content
|
||||
self.content_type = content_type
|
||||
self.encoding = encoding
|
||||
self.url = url
|
||||
self.cache_link_parsing = cache_link_parsing
|
||||
@@ -350,80 +282,115 @@ class HTMLPage:
|
||||
return redact_auth_from_url(self.url)
|
||||
|
||||
|
||||
def _handle_get_page_fail(
|
||||
class HTMLLinkParser(HTMLParser):
|
||||
"""
|
||||
HTMLParser that keeps the first base HREF and a list of all anchor
|
||||
elements' attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, url: str) -> None:
|
||||
super().__init__(convert_charrefs=True)
|
||||
|
||||
self.url: str = url
|
||||
self.base_url: Optional[str] = None
|
||||
self.anchors: List[Dict[str, Optional[str]]] = []
|
||||
|
||||
def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
|
||||
if tag == "base" and self.base_url is None:
|
||||
href = self.get_href(attrs)
|
||||
if href is not None:
|
||||
self.base_url = href
|
||||
elif tag == "a":
|
||||
self.anchors.append(dict(attrs))
|
||||
|
||||
def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
|
||||
for name, value in attrs:
|
||||
if name == "href":
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
def _handle_get_simple_fail(
|
||||
link: Link,
|
||||
reason: Union[str, Exception],
|
||||
meth: Optional[Callable[..., None]] = None
|
||||
meth: Optional[Callable[..., None]] = None,
|
||||
) -> None:
|
||||
if meth is None:
|
||||
meth = logger.debug
|
||||
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
||||
|
||||
|
||||
def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage:
|
||||
def _make_index_content(
|
||||
response: Response, cache_link_parsing: bool = True
|
||||
) -> IndexContent:
|
||||
encoding = _get_encoding_from_headers(response.headers)
|
||||
return HTMLPage(
|
||||
return IndexContent(
|
||||
response.content,
|
||||
response.headers["Content-Type"],
|
||||
encoding=encoding,
|
||||
url=response.url,
|
||||
cache_link_parsing=cache_link_parsing)
|
||||
cache_link_parsing=cache_link_parsing,
|
||||
)
|
||||
|
||||
|
||||
def _get_html_page(
|
||||
link: Link, session: Optional[PipSession] = None
|
||||
) -> Optional["HTMLPage"]:
|
||||
if session is None:
|
||||
raise TypeError(
|
||||
"_get_html_page() missing 1 required keyword argument: 'session'"
|
||||
)
|
||||
|
||||
url = link.url.split('#', 1)[0]
|
||||
def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
|
||||
url = link.url.split("#", 1)[0]
|
||||
|
||||
# Check for VCS schemes that do not support lookup as web pages.
|
||||
vcs_scheme = _match_vcs_scheme(url)
|
||||
if vcs_scheme:
|
||||
logger.warning('Cannot look at %s URL %s because it does not support '
|
||||
'lookup as web pages.', vcs_scheme, link)
|
||||
logger.warning(
|
||||
"Cannot look at %s URL %s because it does not support lookup as web pages.",
|
||||
vcs_scheme,
|
||||
link,
|
||||
)
|
||||
return None
|
||||
|
||||
# Tack index.html onto file:// URLs that point to directories
|
||||
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
||||
if (scheme == 'file' and os.path.isdir(urllib.request.url2pathname(path))):
|
||||
if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
|
||||
# add trailing slash if not present so urljoin doesn't trim
|
||||
# final segment
|
||||
if not url.endswith('/'):
|
||||
url += '/'
|
||||
url = urllib.parse.urljoin(url, 'index.html')
|
||||
logger.debug(' file: URL is directory, getting %s', url)
|
||||
if not url.endswith("/"):
|
||||
url += "/"
|
||||
# TODO: In the future, it would be nice if pip supported PEP 691
|
||||
# style responses in the file:// URLs, however there's no
|
||||
# standard file extension for application/vnd.pypi.simple.v1+json
|
||||
# so we'll need to come up with something on our own.
|
||||
url = urllib.parse.urljoin(url, "index.html")
|
||||
logger.debug(" file: URL is directory, getting %s", url)
|
||||
|
||||
try:
|
||||
resp = _get_html_response(url, session=session)
|
||||
resp = _get_simple_response(url, session=session)
|
||||
except _NotHTTP:
|
||||
logger.warning(
|
||||
'Skipping page %s because it looks like an archive, and cannot '
|
||||
'be checked by a HTTP HEAD request.', link,
|
||||
"Skipping page %s because it looks like an archive, and cannot "
|
||||
"be checked by a HTTP HEAD request.",
|
||||
link,
|
||||
)
|
||||
except _NotHTML as exc:
|
||||
except _NotAPIContent as exc:
|
||||
logger.warning(
|
||||
'Skipping page %s because the %s request got Content-Type: %s.'
|
||||
'The only supported Content-Type is text/html',
|
||||
link, exc.request_desc, exc.content_type,
|
||||
"Skipping page %s because the %s request got Content-Type: %s. "
|
||||
"The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
|
||||
"application/vnd.pypi.simple.v1+html, and text/html",
|
||||
link,
|
||||
exc.request_desc,
|
||||
exc.content_type,
|
||||
)
|
||||
except NetworkConnectionError as exc:
|
||||
_handle_get_page_fail(link, exc)
|
||||
_handle_get_simple_fail(link, exc)
|
||||
except RetryError as exc:
|
||||
_handle_get_page_fail(link, exc)
|
||||
_handle_get_simple_fail(link, exc)
|
||||
except SSLError as exc:
|
||||
reason = "There was a problem confirming the ssl certificate: "
|
||||
reason += str(exc)
|
||||
_handle_get_page_fail(link, reason, meth=logger.info)
|
||||
_handle_get_simple_fail(link, reason, meth=logger.info)
|
||||
except requests.ConnectionError as exc:
|
||||
_handle_get_page_fail(link, f"connection error: {exc}")
|
||||
_handle_get_simple_fail(link, f"connection error: {exc}")
|
||||
except requests.Timeout:
|
||||
_handle_get_page_fail(link, "timed out")
|
||||
_handle_get_simple_fail(link, "timed out")
|
||||
else:
|
||||
return _make_html_page(resp,
|
||||
cache_link_parsing=link.cache_link_parsing)
|
||||
return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
|
||||
return None
|
||||
|
||||
|
||||
@@ -451,9 +418,10 @@ class LinkCollector:
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls, session: PipSession,
|
||||
cls,
|
||||
session: PipSession,
|
||||
options: Values,
|
||||
suppress_no_index: bool = False
|
||||
suppress_no_index: bool = False,
|
||||
) -> "LinkCollector":
|
||||
"""
|
||||
:param session: The Session to use to make requests.
|
||||
@@ -463,8 +431,8 @@ class LinkCollector:
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index and not suppress_no_index:
|
||||
logger.debug(
|
||||
'Ignoring indexes: %s',
|
||||
','.join(redact_auth_from_url(url) for url in index_urls),
|
||||
"Ignoring indexes: %s",
|
||||
",".join(redact_auth_from_url(url) for url in index_urls),
|
||||
)
|
||||
index_urls = []
|
||||
|
||||
@@ -472,10 +440,13 @@ class LinkCollector:
|
||||
find_links = options.find_links or []
|
||||
|
||||
search_scope = SearchScope.create(
|
||||
find_links=find_links, index_urls=index_urls,
|
||||
find_links=find_links,
|
||||
index_urls=index_urls,
|
||||
no_index=options.no_index,
|
||||
)
|
||||
link_collector = LinkCollector(
|
||||
session=session, search_scope=search_scope,
|
||||
session=session,
|
||||
search_scope=search_scope,
|
||||
)
|
||||
return link_collector
|
||||
|
||||
@@ -483,11 +454,11 @@ class LinkCollector:
|
||||
def find_links(self) -> List[str]:
|
||||
return self.search_scope.find_links
|
||||
|
||||
def fetch_page(self, location: Link) -> Optional[HTMLPage]:
|
||||
def fetch_response(self, location: Link) -> Optional[IndexContent]:
|
||||
"""
|
||||
Fetch an HTML page containing package links.
|
||||
"""
|
||||
return _get_html_page(location, session=self.session)
|
||||
return _get_index_content(location, session=self.session)
|
||||
|
||||
def collect_sources(
|
||||
self,
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
"""Routines related to PyPI, indexes"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import enum
|
||||
import functools
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
from pip._vendor.packaging import specifiers
|
||||
from pip._vendor.packaging.tags import Tag
|
||||
@@ -37,17 +35,17 @@ from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import build_netloc
|
||||
from pip._internal.utils.packaging import check_requires_python
|
||||
from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
|
||||
from pip._internal.utils.urls import url_to_path
|
||||
|
||||
__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder']
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.typing_extensions import TypeGuard
|
||||
|
||||
__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
|
||||
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
||||
CandidateSortingKey = (
|
||||
Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
|
||||
)
|
||||
CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
|
||||
|
||||
|
||||
def _check_link_requires_python(
|
||||
@@ -66,39 +64,54 @@ def _check_link_requires_python(
|
||||
"""
|
||||
try:
|
||||
is_compatible = check_requires_python(
|
||||
link.requires_python, version_info=version_info,
|
||||
link.requires_python,
|
||||
version_info=version_info,
|
||||
)
|
||||
except specifiers.InvalidSpecifier:
|
||||
logger.debug(
|
||||
"Ignoring invalid Requires-Python (%r) for link: %s",
|
||||
link.requires_python, link,
|
||||
link.requires_python,
|
||||
link,
|
||||
)
|
||||
else:
|
||||
if not is_compatible:
|
||||
version = '.'.join(map(str, version_info))
|
||||
version = ".".join(map(str, version_info))
|
||||
if not ignore_requires_python:
|
||||
logger.verbose(
|
||||
'Link requires a different Python (%s not in: %r): %s',
|
||||
version, link.requires_python, link,
|
||||
"Link requires a different Python (%s not in: %r): %s",
|
||||
version,
|
||||
link.requires_python,
|
||||
link,
|
||||
)
|
||||
return False
|
||||
|
||||
logger.debug(
|
||||
'Ignoring failed Requires-Python check (%s not in: %r) '
|
||||
'for link: %s',
|
||||
version, link.requires_python, link,
|
||||
"Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
|
||||
version,
|
||||
link.requires_python,
|
||||
link,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class LinkType(enum.Enum):
|
||||
candidate = enum.auto()
|
||||
different_project = enum.auto()
|
||||
yanked = enum.auto()
|
||||
format_unsupported = enum.auto()
|
||||
format_invalid = enum.auto()
|
||||
platform_mismatch = enum.auto()
|
||||
requires_python_mismatch = enum.auto()
|
||||
|
||||
|
||||
class LinkEvaluator:
|
||||
|
||||
"""
|
||||
Responsible for evaluating links for a particular project.
|
||||
"""
|
||||
|
||||
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
|
||||
_py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
|
||||
|
||||
# Don't include an allow_yanked default value to make sure each call
|
||||
# site considers whether yanked releases are allowed. This also causes
|
||||
@@ -141,19 +154,20 @@ class LinkEvaluator:
|
||||
|
||||
self.project_name = project_name
|
||||
|
||||
def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]:
|
||||
def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
|
||||
"""
|
||||
Determine whether a link is a candidate for installation.
|
||||
|
||||
:return: A tuple (is_candidate, result), where `result` is (1) a
|
||||
version string if `is_candidate` is True, and (2) if
|
||||
`is_candidate` is False, an optional string to log the reason
|
||||
the link fails to qualify.
|
||||
:return: A tuple (result, detail), where *result* is an enum
|
||||
representing whether the evaluation found a candidate, or the reason
|
||||
why one is not found. If a candidate is found, *detail* will be the
|
||||
candidate's version string; if one is not found, it contains the
|
||||
reason the link fails to qualify.
|
||||
"""
|
||||
version = None
|
||||
if link.is_yanked and not self._allow_yanked:
|
||||
reason = link.yanked_reason or '<none given>'
|
||||
return (False, f'yanked for reason: {reason}')
|
||||
reason = link.yanked_reason or "<none given>"
|
||||
return (LinkType.yanked, f"yanked for reason: {reason}")
|
||||
|
||||
if link.egg_fragment:
|
||||
egg_info = link.egg_fragment
|
||||
@@ -161,77 +175,83 @@ class LinkEvaluator:
|
||||
else:
|
||||
egg_info, ext = link.splitext()
|
||||
if not ext:
|
||||
return (False, 'not a file')
|
||||
return (LinkType.format_unsupported, "not a file")
|
||||
if ext not in SUPPORTED_EXTENSIONS:
|
||||
return (False, f'unsupported archive format: {ext}')
|
||||
return (
|
||||
LinkType.format_unsupported,
|
||||
f"unsupported archive format: {ext}",
|
||||
)
|
||||
if "binary" not in self._formats and ext == WHEEL_EXTENSION:
|
||||
reason = 'No binaries permitted for {}'.format(
|
||||
self.project_name)
|
||||
return (False, reason)
|
||||
if "macosx10" in link.path and ext == '.zip':
|
||||
return (False, 'macosx10 one')
|
||||
reason = f"No binaries permitted for {self.project_name}"
|
||||
return (LinkType.format_unsupported, reason)
|
||||
if "macosx10" in link.path and ext == ".zip":
|
||||
return (LinkType.format_unsupported, "macosx10 one")
|
||||
if ext == WHEEL_EXTENSION:
|
||||
try:
|
||||
wheel = Wheel(link.filename)
|
||||
except InvalidWheelFilename:
|
||||
return (False, 'invalid wheel filename')
|
||||
return (
|
||||
LinkType.format_invalid,
|
||||
"invalid wheel filename",
|
||||
)
|
||||
if canonicalize_name(wheel.name) != self._canonical_name:
|
||||
reason = 'wrong project name (not {})'.format(
|
||||
self.project_name)
|
||||
return (False, reason)
|
||||
reason = f"wrong project name (not {self.project_name})"
|
||||
return (LinkType.different_project, reason)
|
||||
|
||||
supported_tags = self._target_python.get_tags()
|
||||
if not wheel.supported(supported_tags):
|
||||
# Include the wheel's tags in the reason string to
|
||||
# simplify troubleshooting compatibility issues.
|
||||
file_tags = wheel.get_formatted_file_tags()
|
||||
file_tags = ", ".join(wheel.get_formatted_file_tags())
|
||||
reason = (
|
||||
"none of the wheel's tags ({}) are compatible "
|
||||
"(run pip debug --verbose to show compatible tags)".format(
|
||||
', '.join(file_tags)
|
||||
)
|
||||
f"none of the wheel's tags ({file_tags}) are compatible "
|
||||
f"(run pip debug --verbose to show compatible tags)"
|
||||
)
|
||||
return (False, reason)
|
||||
return (LinkType.platform_mismatch, reason)
|
||||
|
||||
version = wheel.version
|
||||
|
||||
# This should be up by the self.ok_binary check, but see issue 2700.
|
||||
if "source" not in self._formats and ext != WHEEL_EXTENSION:
|
||||
reason = f'No sources permitted for {self.project_name}'
|
||||
return (False, reason)
|
||||
reason = f"No sources permitted for {self.project_name}"
|
||||
return (LinkType.format_unsupported, reason)
|
||||
|
||||
if not version:
|
||||
version = _extract_version_from_fragment(
|
||||
egg_info, self._canonical_name,
|
||||
egg_info,
|
||||
self._canonical_name,
|
||||
)
|
||||
if not version:
|
||||
reason = f'Missing project version for {self.project_name}'
|
||||
return (False, reason)
|
||||
reason = f"Missing project version for {self.project_name}"
|
||||
return (LinkType.format_invalid, reason)
|
||||
|
||||
match = self._py_version_re.search(version)
|
||||
if match:
|
||||
version = version[:match.start()]
|
||||
version = version[: match.start()]
|
||||
py_version = match.group(1)
|
||||
if py_version != self._target_python.py_version:
|
||||
return (False, 'Python version is incorrect')
|
||||
return (
|
||||
LinkType.platform_mismatch,
|
||||
"Python version is incorrect",
|
||||
)
|
||||
|
||||
supports_python = _check_link_requires_python(
|
||||
link, version_info=self._target_python.py_version_info,
|
||||
link,
|
||||
version_info=self._target_python.py_version_info,
|
||||
ignore_requires_python=self._ignore_requires_python,
|
||||
)
|
||||
if not supports_python:
|
||||
# Return None for the reason text to suppress calling
|
||||
# _log_skipped_link().
|
||||
return (False, None)
|
||||
reason = f"{version} Requires-Python {link.requires_python}"
|
||||
return (LinkType.requires_python_mismatch, reason)
|
||||
|
||||
logger.debug('Found link %s, version: %s', link, version)
|
||||
logger.debug("Found link %s, version: %s", link, version)
|
||||
|
||||
return (True, version)
|
||||
return (LinkType.candidate, version)
|
||||
|
||||
|
||||
def filter_unallowed_hashes(
|
||||
candidates: List[InstallationCandidate],
|
||||
hashes: Hashes,
|
||||
hashes: Optional[Hashes],
|
||||
project_name: str,
|
||||
) -> List[InstallationCandidate]:
|
||||
"""
|
||||
@@ -251,8 +271,8 @@ def filter_unallowed_hashes(
|
||||
"""
|
||||
if not hashes:
|
||||
logger.debug(
|
||||
'Given no hashes to check %s links for project %r: '
|
||||
'discarding no candidates',
|
||||
"Given no hashes to check %s links for project %r: "
|
||||
"discarding no candidates",
|
||||
len(candidates),
|
||||
project_name,
|
||||
)
|
||||
@@ -282,22 +302,22 @@ def filter_unallowed_hashes(
|
||||
filtered = list(candidates)
|
||||
|
||||
if len(filtered) == len(candidates):
|
||||
discard_message = 'discarding no candidates'
|
||||
discard_message = "discarding no candidates"
|
||||
else:
|
||||
discard_message = 'discarding {} non-matches:\n {}'.format(
|
||||
discard_message = "discarding {} non-matches:\n {}".format(
|
||||
len(non_matches),
|
||||
'\n '.join(str(candidate.link) for candidate in non_matches)
|
||||
"\n ".join(str(candidate.link) for candidate in non_matches),
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
'Checked %s links for project %r against %s hashes '
|
||||
'(%s matches, %s no digest): %s',
|
||||
"Checked %s links for project %r against %s hashes "
|
||||
"(%s matches, %s no digest): %s",
|
||||
len(candidates),
|
||||
project_name,
|
||||
hashes.digest_count,
|
||||
match_count,
|
||||
len(matches_or_no_digest) - match_count,
|
||||
discard_message
|
||||
discard_message,
|
||||
)
|
||||
|
||||
return filtered
|
||||
@@ -354,13 +374,11 @@ class BestCandidateResult:
|
||||
self.best_candidate = best_candidate
|
||||
|
||||
def iter_all(self) -> Iterable[InstallationCandidate]:
|
||||
"""Iterate through all candidates.
|
||||
"""
|
||||
"""Iterate through all candidates."""
|
||||
return iter(self._candidates)
|
||||
|
||||
def iter_applicable(self) -> Iterable[InstallationCandidate]:
|
||||
"""Iterate through the applicable candidates.
|
||||
"""
|
||||
"""Iterate through the applicable candidates."""
|
||||
return iter(self._applicable_candidates)
|
||||
|
||||
|
||||
@@ -444,7 +462,8 @@ class CandidateEvaluator:
|
||||
allow_prereleases = self._allow_all_prereleases or None
|
||||
specifier = self._specifier
|
||||
versions = {
|
||||
str(v) for v in specifier.filter(
|
||||
str(v)
|
||||
for v in specifier.filter(
|
||||
# We turn the version object into a str here because otherwise
|
||||
# when we're debundled but setuptools isn't, Python will see
|
||||
# packaging.version.Version and
|
||||
@@ -458,9 +477,7 @@ class CandidateEvaluator:
|
||||
}
|
||||
|
||||
# Again, converting version to str to deal with debundling.
|
||||
applicable_candidates = [
|
||||
c for c in candidates if str(c.version) in versions
|
||||
]
|
||||
applicable_candidates = [c for c in candidates if str(c.version) in versions]
|
||||
|
||||
filtered_applicable_candidates = filter_unallowed_hashes(
|
||||
candidates=applicable_candidates,
|
||||
@@ -509,9 +526,11 @@ class CandidateEvaluator:
|
||||
# can raise InvalidWheelFilename
|
||||
wheel = Wheel(link.filename)
|
||||
try:
|
||||
pri = -(wheel.find_most_preferred_tag(
|
||||
valid_tags, self._wheel_tag_preferences
|
||||
))
|
||||
pri = -(
|
||||
wheel.find_most_preferred_tag(
|
||||
valid_tags, self._wheel_tag_preferences
|
||||
)
|
||||
)
|
||||
except ValueError:
|
||||
raise UnsupportedWheel(
|
||||
"{} is not a supported wheel for this platform. It "
|
||||
@@ -520,7 +539,8 @@ class CandidateEvaluator:
|
||||
if self._prefer_binary:
|
||||
binary_preference = 1
|
||||
if wheel.build_tag is not None:
|
||||
match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
|
||||
match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
|
||||
assert match is not None, "guaranteed by filename validation"
|
||||
build_tag_groups = match.groups()
|
||||
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
||||
else: # sdist
|
||||
@@ -528,8 +548,12 @@ class CandidateEvaluator:
|
||||
has_allowed_hash = int(link.is_hash_allowed(self._hashes))
|
||||
yank_value = -1 * int(link.is_yanked) # -1 for yanked.
|
||||
return (
|
||||
has_allowed_hash, yank_value, binary_preference, candidate.version,
|
||||
pri, build_tag,
|
||||
has_allowed_hash,
|
||||
yank_value,
|
||||
binary_preference,
|
||||
candidate.version,
|
||||
pri,
|
||||
build_tag,
|
||||
)
|
||||
|
||||
def sort_best_candidate(
|
||||
@@ -603,7 +627,7 @@ class PackageFinder:
|
||||
self.format_control = format_control
|
||||
|
||||
# These are boring links that have already been logged somehow.
|
||||
self._logged_links: Set[Link] = set()
|
||||
self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
|
||||
|
||||
# Don't include an allow_yanked default value to make sure each call
|
||||
# site considers whether yanked releases are allowed. This also causes
|
||||
@@ -680,6 +704,14 @@ class PackageFinder:
|
||||
def set_prefer_binary(self) -> None:
|
||||
self._candidate_prefs.prefer_binary = True
|
||||
|
||||
def requires_python_skipped_reasons(self) -> List[str]:
|
||||
reasons = {
|
||||
detail
|
||||
for _, result, detail in self._logged_links
|
||||
if result == LinkType.requires_python_mismatch
|
||||
}
|
||||
return sorted(reasons)
|
||||
|
||||
def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
|
||||
canonical_name = canonicalize_name(project_name)
|
||||
formats = self.format_control.get_allowed_formats(canonical_name)
|
||||
@@ -709,12 +741,13 @@ class PackageFinder:
|
||||
no_eggs.append(link)
|
||||
return no_eggs + eggs
|
||||
|
||||
def _log_skipped_link(self, link: Link, reason: str) -> None:
|
||||
if link not in self._logged_links:
|
||||
def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
|
||||
entry = (link, result, detail)
|
||||
if entry not in self._logged_links:
|
||||
# Put the link at the end so the reason is more visible and because
|
||||
# the link string is usually very long.
|
||||
logger.debug('Skipping link: %s: %s', reason, link)
|
||||
self._logged_links.add(link)
|
||||
logger.debug("Skipping link: %s: %s", detail, link)
|
||||
self._logged_links.add(entry)
|
||||
|
||||
def get_install_candidate(
|
||||
self, link_evaluator: LinkEvaluator, link: Link
|
||||
@@ -723,16 +756,15 @@ class PackageFinder:
|
||||
If the link is a candidate for install, convert it to an
|
||||
InstallationCandidate and return it. Otherwise, return None.
|
||||
"""
|
||||
is_candidate, result = link_evaluator.evaluate_link(link)
|
||||
if not is_candidate:
|
||||
if result:
|
||||
self._log_skipped_link(link, reason=result)
|
||||
result, detail = link_evaluator.evaluate_link(link)
|
||||
if result != LinkType.candidate:
|
||||
self._log_skipped_link(link, result, detail)
|
||||
return None
|
||||
|
||||
return InstallationCandidate(
|
||||
name=link_evaluator.project_name,
|
||||
link=link,
|
||||
version=result,
|
||||
version=detail,
|
||||
)
|
||||
|
||||
def evaluate_links(
|
||||
@@ -753,13 +785,14 @@ class PackageFinder:
|
||||
self, project_url: Link, link_evaluator: LinkEvaluator
|
||||
) -> List[InstallationCandidate]:
|
||||
logger.debug(
|
||||
'Fetching project page and analyzing links: %s', project_url,
|
||||
"Fetching project page and analyzing links: %s",
|
||||
project_url,
|
||||
)
|
||||
html_page = self._link_collector.fetch_page(project_url)
|
||||
if html_page is None:
|
||||
index_response = self._link_collector.fetch_response(project_url)
|
||||
if index_response is None:
|
||||
return []
|
||||
|
||||
page_links = list(parse_links(html_page))
|
||||
page_links = list(parse_links(index_response))
|
||||
|
||||
with indent_log():
|
||||
package_links = self.evaluate_links(
|
||||
@@ -809,7 +842,14 @@ class PackageFinder:
|
||||
)
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG) and file_candidates:
|
||||
paths = [url_to_path(c.link.url) for c in file_candidates]
|
||||
paths = []
|
||||
for candidate in file_candidates:
|
||||
assert candidate.link.url # we need to have a URL
|
||||
try:
|
||||
paths.append(candidate.link.file_path)
|
||||
except Exception:
|
||||
paths.append(candidate.link.url) # it's not a local file
|
||||
|
||||
logger.debug("Local files found: %s", ", ".join(paths))
|
||||
|
||||
# This is an intentional priority ordering
|
||||
@@ -821,8 +861,7 @@ class PackageFinder:
|
||||
specifier: Optional[specifiers.BaseSpecifier] = None,
|
||||
hashes: Optional[Hashes] = None,
|
||||
) -> CandidateEvaluator:
|
||||
"""Create a CandidateEvaluator object to use.
|
||||
"""
|
||||
"""Create a CandidateEvaluator object to use."""
|
||||
candidate_prefs = self._candidate_prefs
|
||||
return CandidateEvaluator.create(
|
||||
project_name=project_name,
|
||||
@@ -867,75 +906,83 @@ class PackageFinder:
|
||||
"""
|
||||
hashes = req.hashes(trust_internet=False)
|
||||
best_candidate_result = self.find_best_candidate(
|
||||
req.name, specifier=req.specifier, hashes=hashes,
|
||||
req.name,
|
||||
specifier=req.specifier,
|
||||
hashes=hashes,
|
||||
)
|
||||
best_candidate = best_candidate_result.best_candidate
|
||||
|
||||
installed_version: Optional[_BaseVersion] = None
|
||||
if req.satisfied_by is not None:
|
||||
installed_version = parse_version(req.satisfied_by.version)
|
||||
installed_version = req.satisfied_by.version
|
||||
|
||||
def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
|
||||
# This repeated parse_version and str() conversion is needed to
|
||||
# handle different vendoring sources from pip and pkg_resources.
|
||||
# If we stop using the pkg_resources provided specifier and start
|
||||
# using our own, we can drop the cast to str().
|
||||
return ", ".join(sorted(
|
||||
{str(c.version) for c in cand_iter},
|
||||
key=parse_version,
|
||||
)) or "none"
|
||||
return (
|
||||
", ".join(
|
||||
sorted(
|
||||
{str(c.version) for c in cand_iter},
|
||||
key=parse_version,
|
||||
)
|
||||
)
|
||||
or "none"
|
||||
)
|
||||
|
||||
if installed_version is None and best_candidate is None:
|
||||
logger.critical(
|
||||
'Could not find a version that satisfies the requirement %s '
|
||||
'(from versions: %s)',
|
||||
"Could not find a version that satisfies the requirement %s "
|
||||
"(from versions: %s)",
|
||||
req,
|
||||
_format_versions(best_candidate_result.iter_all()),
|
||||
)
|
||||
|
||||
raise DistributionNotFound(
|
||||
'No matching distribution found for {}'.format(
|
||||
req)
|
||||
"No matching distribution found for {}".format(req)
|
||||
)
|
||||
|
||||
best_installed = False
|
||||
if installed_version and (
|
||||
best_candidate is None or
|
||||
best_candidate.version <= installed_version):
|
||||
best_installed = True
|
||||
def _should_install_candidate(
|
||||
candidate: Optional[InstallationCandidate],
|
||||
) -> "TypeGuard[InstallationCandidate]":
|
||||
if installed_version is None:
|
||||
return True
|
||||
if best_candidate is None:
|
||||
return False
|
||||
return best_candidate.version > installed_version
|
||||
|
||||
if not upgrade and installed_version is not None:
|
||||
if best_installed:
|
||||
if _should_install_candidate(best_candidate):
|
||||
logger.debug(
|
||||
'Existing installed version (%s) is most up-to-date and '
|
||||
'satisfies requirement',
|
||||
installed_version,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
'Existing installed version (%s) satisfies requirement '
|
||||
'(most up-to-date version is %s)',
|
||||
"Existing installed version (%s) satisfies requirement "
|
||||
"(most up-to-date version is %s)",
|
||||
installed_version,
|
||||
best_candidate.version,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
"Existing installed version (%s) is most up-to-date and "
|
||||
"satisfies requirement",
|
||||
installed_version,
|
||||
)
|
||||
return None
|
||||
|
||||
if best_installed:
|
||||
# We have an existing version, and its the best version
|
||||
if _should_install_candidate(best_candidate):
|
||||
logger.debug(
|
||||
'Installed version (%s) is most up-to-date (past versions: '
|
||||
'%s)',
|
||||
installed_version,
|
||||
"Using version %s (newest of versions: %s)",
|
||||
best_candidate.version,
|
||||
_format_versions(best_candidate_result.iter_applicable()),
|
||||
)
|
||||
raise BestVersionAlreadyInstalled
|
||||
return best_candidate
|
||||
|
||||
# We have an existing version, and its the best version
|
||||
logger.debug(
|
||||
'Using version %s (newest of versions: %s)',
|
||||
best_candidate.version,
|
||||
"Installed version (%s) is most up-to-date (past versions: %s)",
|
||||
installed_version,
|
||||
_format_versions(best_candidate_result.iter_applicable()),
|
||||
)
|
||||
return best_candidate
|
||||
raise BestVersionAlreadyInstalled
|
||||
|
||||
|
||||
def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
|
||||
|
||||
@@ -171,7 +171,6 @@ def build_source(
|
||||
expand_dir: bool,
|
||||
cache_link_parsing: bool,
|
||||
) -> Tuple[Optional[str], Optional[LinkSource]]:
|
||||
|
||||
path: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
if os.path.exists(location): # Is a local path.
|
||||
|
||||
@@ -4,14 +4,14 @@ import os
|
||||
import pathlib
|
||||
import sys
|
||||
import sysconfig
|
||||
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
||||
from typing import Any, Dict, Generator, Optional, Tuple
|
||||
|
||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from . import _distutils, _sysconfig
|
||||
from . import _sysconfig
|
||||
from .base import (
|
||||
USER_CACHE_DIR,
|
||||
get_major_minor_version,
|
||||
@@ -27,7 +27,6 @@ __all__ = [
|
||||
"get_bin_user",
|
||||
"get_major_minor_version",
|
||||
"get_platlib",
|
||||
"get_prefixed_libs",
|
||||
"get_purelib",
|
||||
"get_scheme",
|
||||
"get_src_prefix",
|
||||
@@ -38,20 +37,48 @@ __all__ = [
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if os.environ.get("_PIP_LOCATIONS_NO_WARN_ON_MISMATCH"):
|
||||
_MISMATCH_LEVEL = logging.DEBUG
|
||||
else:
|
||||
_MISMATCH_LEVEL = logging.WARNING
|
||||
|
||||
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
||||
|
||||
_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
|
||||
|
||||
|
||||
def _should_use_sysconfig() -> bool:
|
||||
"""This function determines the value of _USE_SYSCONFIG.
|
||||
|
||||
By default, pip uses sysconfig on Python 3.10+.
|
||||
But Python distributors can override this decision by setting:
|
||||
sysconfig._PIP_USE_SYSCONFIG = True / False
|
||||
Rationale in https://github.com/pypa/pip/issues/10647
|
||||
|
||||
This is a function for testability, but should be constant during any one
|
||||
run.
|
||||
"""
|
||||
return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
|
||||
|
||||
|
||||
_USE_SYSCONFIG = _should_use_sysconfig()
|
||||
|
||||
if not _USE_SYSCONFIG:
|
||||
# Import distutils lazily to avoid deprecation warnings,
|
||||
# but import it soon enough that it is in memory and available during
|
||||
# a pip reinstall.
|
||||
from . import _distutils
|
||||
|
||||
# Be noisy about incompatibilities if this platforms "should" be using
|
||||
# sysconfig, but is explicitly opting out and using distutils instead.
|
||||
if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
|
||||
_MISMATCH_LEVEL = logging.WARNING
|
||||
else:
|
||||
_MISMATCH_LEVEL = logging.DEBUG
|
||||
|
||||
|
||||
def _looks_like_bpo_44860() -> bool:
|
||||
"""The resolution to bpo-44860 will change this incorrect platlib.
|
||||
|
||||
See <https://bugs.python.org/issue44860>.
|
||||
"""
|
||||
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
||||
from distutils.command.install import INSTALL_SCHEMES
|
||||
|
||||
try:
|
||||
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
||||
@@ -62,6 +89,8 @@ def _looks_like_bpo_44860() -> bool:
|
||||
|
||||
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
||||
platlib = scheme["platlib"]
|
||||
if "/$platlibdir/" in platlib:
|
||||
platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
|
||||
if "/lib64/" not in platlib:
|
||||
return False
|
||||
unpatched = platlib.replace("/lib64/", "/lib/")
|
||||
@@ -74,7 +103,7 @@ def _looks_like_red_hat_lib() -> bool:
|
||||
|
||||
This is the only way I can see to tell a Red Hat-patched Python.
|
||||
"""
|
||||
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
||||
from distutils.command.install import INSTALL_SCHEMES
|
||||
|
||||
return all(
|
||||
k in INSTALL_SCHEMES
|
||||
@@ -86,7 +115,7 @@ def _looks_like_red_hat_lib() -> bool:
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_debian_scheme() -> bool:
|
||||
"""Debian adds two additional schemes."""
|
||||
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
||||
from distutils.command.install import INSTALL_SCHEMES
|
||||
|
||||
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
||||
|
||||
@@ -111,6 +140,22 @@ def _looks_like_red_hat_scheme() -> bool:
|
||||
)
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_slackware_scheme() -> bool:
|
||||
"""Slackware patches sysconfig but fails to patch distutils and site.
|
||||
|
||||
Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
|
||||
path, but does not do the same to the site module.
|
||||
"""
|
||||
if user_site is None: # User-site not available.
|
||||
return False
|
||||
try:
|
||||
paths = sysconfig.get_paths(scheme="posix_user", expand=False)
|
||||
except KeyError: # User-site not available.
|
||||
return False
|
||||
return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def _looks_like_msys2_mingw_scheme() -> bool:
|
||||
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
||||
@@ -129,9 +174,9 @@ def _looks_like_msys2_mingw_scheme() -> bool:
|
||||
)
|
||||
|
||||
|
||||
def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]:
|
||||
def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
|
||||
ldversion = sysconfig.get_config_var("LDVERSION")
|
||||
abiflags: str = getattr(sys, "abiflags", None)
|
||||
abiflags = getattr(sys, "abiflags", None)
|
||||
|
||||
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
||||
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
||||
@@ -190,7 +235,7 @@ def get_scheme(
|
||||
isolated: bool = False,
|
||||
prefix: Optional[str] = None,
|
||||
) -> Scheme:
|
||||
old = _distutils.get_scheme(
|
||||
new = _sysconfig.get_scheme(
|
||||
dist_name,
|
||||
user=user,
|
||||
home=home,
|
||||
@@ -198,7 +243,10 @@ def get_scheme(
|
||||
isolated=isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
new = _sysconfig.get_scheme(
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
old = _distutils.get_scheme(
|
||||
dist_name,
|
||||
user=user,
|
||||
home=home,
|
||||
@@ -263,6 +311,17 @@ def get_scheme(
|
||||
if skip_bpo_44860:
|
||||
continue
|
||||
|
||||
# Slackware incorrectly patches posix_user to use lib64 instead of lib,
|
||||
# but not usersite to match the location.
|
||||
skip_slackware_user_scheme = (
|
||||
user
|
||||
and k in ("platlib", "purelib")
|
||||
and not WINDOWS
|
||||
and _looks_like_slackware_scheme()
|
||||
)
|
||||
if skip_slackware_user_scheme:
|
||||
continue
|
||||
|
||||
# Both Debian and Red Hat patch Python to place the system site under
|
||||
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
||||
# instead of site-packages, but the /usr/local check should cover it.
|
||||
@@ -296,6 +355,18 @@ def get_scheme(
|
||||
if skip_msys2_mingw_bug:
|
||||
continue
|
||||
|
||||
# CPython's POSIX install script invokes pip (via ensurepip) against the
|
||||
# interpreter located in the source tree, not the install site. This
|
||||
# triggers special logic in sysconfig that's not present in distutils.
|
||||
# https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
|
||||
skip_cpython_build = (
|
||||
sysconfig.is_python_build(check_home=True)
|
||||
and not WINDOWS
|
||||
and k in ("headers", "include", "platinclude")
|
||||
)
|
||||
if skip_cpython_build:
|
||||
continue
|
||||
|
||||
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
||||
|
||||
if not warning_contexts:
|
||||
@@ -315,10 +386,12 @@ def get_scheme(
|
||||
)
|
||||
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
||||
deprecated(
|
||||
"Configuring installation scheme with distutils config files "
|
||||
"is deprecated and will no longer work in the near future. If you "
|
||||
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
||||
"at https://github.com/Homebrew/homebrew-core/issues/76621",
|
||||
reason=(
|
||||
"Configuring installation scheme with distutils config files "
|
||||
"is deprecated and will no longer work in the near future. If you "
|
||||
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
||||
"at https://github.com/Homebrew/homebrew-core/issues/76621"
|
||||
),
|
||||
replacement=None,
|
||||
gone_in=None,
|
||||
)
|
||||
@@ -333,8 +406,11 @@ def get_scheme(
|
||||
|
||||
|
||||
def get_bin_prefix() -> str:
|
||||
old = _distutils.get_bin_prefix()
|
||||
new = _sysconfig.get_bin_prefix()
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
old = _distutils.get_bin_prefix()
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
||||
_log_context()
|
||||
return old
|
||||
@@ -363,8 +439,11 @@ def _looks_like_deb_system_dist_packages(value: str) -> bool:
|
||||
|
||||
def get_purelib() -> str:
|
||||
"""Return the default pure-Python lib location."""
|
||||
old = _distutils.get_purelib()
|
||||
new = _sysconfig.get_purelib()
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
old = _distutils.get_purelib()
|
||||
if _looks_like_deb_system_dist_packages(old):
|
||||
return old
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
||||
@@ -374,35 +453,15 @@ def get_purelib() -> str:
|
||||
|
||||
def get_platlib() -> str:
|
||||
"""Return the default platform-shared lib location."""
|
||||
old = _distutils.get_platlib()
|
||||
new = _sysconfig.get_platlib()
|
||||
if _USE_SYSCONFIG:
|
||||
return new
|
||||
|
||||
from . import _distutils
|
||||
|
||||
old = _distutils.get_platlib()
|
||||
if _looks_like_deb_system_dist_packages(old):
|
||||
return old
|
||||
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
||||
_log_context()
|
||||
return old
|
||||
|
||||
|
||||
def get_prefixed_libs(prefix: str) -> List[str]:
|
||||
"""Return the lib locations under ``prefix``."""
|
||||
old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
|
||||
new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
|
||||
|
||||
warned = [
|
||||
_warn_if_mismatch(
|
||||
pathlib.Path(old_pure),
|
||||
pathlib.Path(new_pure),
|
||||
key="prefixed-purelib",
|
||||
),
|
||||
_warn_if_mismatch(
|
||||
pathlib.Path(old_plat),
|
||||
pathlib.Path(new_plat),
|
||||
key="prefixed-platlib",
|
||||
),
|
||||
]
|
||||
if any(warned):
|
||||
_log_context(prefix=prefix)
|
||||
|
||||
if old_pure == old_plat:
|
||||
return [old_pure]
|
||||
return [old_pure, old_plat]
|
||||
|
||||
@@ -3,6 +3,17 @@
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
# If pip's going to use distutils, it should not be using the copy that setuptools
|
||||
# might have injected into the environment. This is done by removing the injected
|
||||
# shim, if it's injected.
|
||||
#
|
||||
# See https://github.com/pypa/pip/issues/8761 for the original discussion and
|
||||
# rationale for why this is done within pip.
|
||||
try:
|
||||
__import__("_distutils_hack").remove_shim()
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
@@ -10,7 +21,7 @@ from distutils.cmd import Command as DistutilsCommand
|
||||
from distutils.command.install import SCHEME_KEYS
|
||||
from distutils.command.install import install as distutils_install_command
|
||||
from distutils.sysconfig import get_python_lib
|
||||
from typing import Dict, List, Optional, Tuple, Union, cast
|
||||
from typing import Dict, List, Optional, Union, cast
|
||||
|
||||
from pip._internal.models.scheme import Scheme
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
@@ -24,10 +35,10 @@ logger = logging.getLogger(__name__)
|
||||
def distutils_scheme(
|
||||
dist_name: str,
|
||||
user: bool = False,
|
||||
home: str = None,
|
||||
root: str = None,
|
||||
home: Optional[str] = None,
|
||||
root: Optional[str] = None,
|
||||
isolated: bool = False,
|
||||
prefix: str = None,
|
||||
prefix: Optional[str] = None,
|
||||
*,
|
||||
ignore_config_files: bool = False,
|
||||
) -> Dict[str, str]:
|
||||
@@ -84,7 +95,7 @@ def distutils_scheme(
|
||||
if home:
|
||||
prefix = home
|
||||
elif user:
|
||||
prefix = i.install_userbase # type: ignore
|
||||
prefix = i.install_userbase
|
||||
else:
|
||||
prefix = i.prefix
|
||||
scheme["headers"] = os.path.join(
|
||||
@@ -160,10 +171,3 @@ def get_purelib() -> str:
|
||||
|
||||
def get_platlib() -> str:
|
||||
return get_python_lib(plat_specific=True)
|
||||
|
||||
|
||||
def get_prefixed_libs(prefix: str) -> Tuple[str, str]:
|
||||
return (
|
||||
get_python_lib(plat_specific=False, prefix=prefix),
|
||||
get_python_lib(plat_specific=True, prefix=prefix),
|
||||
)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import distutils.util # FIXME: For change_root.
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
@@ -9,7 +8,7 @@ from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationI
|
||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from .base import get_major_minor_version, is_osx_framework
|
||||
from .base import change_root, get_major_minor_version, is_osx_framework
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -194,7 +193,7 @@ def get_scheme(
|
||||
)
|
||||
if root is not None:
|
||||
for key in SCHEME_KEYS:
|
||||
value = distutils.util.change_root(root, getattr(scheme, key))
|
||||
value = change_root(root, getattr(scheme, key))
|
||||
setattr(scheme, key, value)
|
||||
return scheme
|
||||
|
||||
@@ -212,8 +211,3 @@ def get_purelib() -> str:
|
||||
|
||||
def get_platlib() -> str:
|
||||
return sysconfig.get_paths()["platlib"]
|
||||
|
||||
|
||||
def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]:
|
||||
paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
|
||||
return (paths["purelib"], paths["platlib"])
|
||||
|
||||
@@ -5,6 +5,7 @@ import sys
|
||||
import sysconfig
|
||||
import typing
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.utils import appdirs
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
@@ -12,7 +13,7 @@ from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
||||
|
||||
# FIXME doesn't account for venv linked to global site-packages
|
||||
site_packages: typing.Optional[str] = sysconfig.get_path("purelib")
|
||||
site_packages: str = sysconfig.get_path("purelib")
|
||||
|
||||
|
||||
def get_major_minor_version() -> str:
|
||||
@@ -23,6 +24,34 @@ def get_major_minor_version() -> str:
|
||||
return "{}.{}".format(*sys.version_info)
|
||||
|
||||
|
||||
def change_root(new_root: str, pathname: str) -> str:
|
||||
"""Return 'pathname' with 'new_root' prepended.
|
||||
|
||||
If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
|
||||
Otherwise, it requires making 'pathname' relative and then joining the
|
||||
two, which is tricky on DOS/Windows and Mac OS.
|
||||
|
||||
This is borrowed from Python's standard library's distutils module.
|
||||
"""
|
||||
if os.name == "posix":
|
||||
if not os.path.isabs(pathname):
|
||||
return os.path.join(new_root, pathname)
|
||||
else:
|
||||
return os.path.join(new_root, pathname[1:])
|
||||
|
||||
elif os.name == "nt":
|
||||
(drive, path) = os.path.splitdrive(pathname)
|
||||
if path[0] == "\\":
|
||||
path = path[1:]
|
||||
return os.path.join(new_root, path)
|
||||
|
||||
else:
|
||||
raise InstallationError(
|
||||
f"Unknown platform: {os.name}\n"
|
||||
"Can not change root path prefix on unknown platform."
|
||||
)
|
||||
|
||||
|
||||
def get_src_prefix() -> str:
|
||||
if running_under_virtualenv():
|
||||
src_prefix = os.path.join(sys.prefix, "src")
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
def main(args=None):
|
||||
# type: (Optional[List[str]]) -> int
|
||||
def main(args: Optional[List[str]] = None) -> int:
|
||||
"""This is preserved for old console scripts that may still be referencing
|
||||
it.
|
||||
|
||||
|
||||
@@ -1,16 +1,70 @@
|
||||
from typing import List, Optional
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, List, Optional, Type, cast
|
||||
|
||||
from .base import BaseDistribution, BaseEnvironment
|
||||
from pip._internal.utils.misc import strtobool
|
||||
|
||||
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
else:
|
||||
Protocol = object
|
||||
|
||||
__all__ = [
|
||||
"BaseDistribution",
|
||||
"BaseEnvironment",
|
||||
"FilesystemWheel",
|
||||
"MemoryWheel",
|
||||
"Wheel",
|
||||
"get_default_environment",
|
||||
"get_environment",
|
||||
"get_wheel_distribution",
|
||||
"select_backend",
|
||||
]
|
||||
|
||||
|
||||
def _should_use_importlib_metadata() -> bool:
|
||||
"""Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
|
||||
|
||||
By default, pip uses ``importlib.metadata`` on Python 3.11+, and
|
||||
``pkg_resourcess`` otherwise. This can be overridden by a couple of ways:
|
||||
|
||||
* If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
|
||||
dictates whether ``importlib.metadata`` is used, regardless of Python
|
||||
version.
|
||||
* On Python 3.11+, Python distributors can patch ``importlib.metadata``
|
||||
to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
|
||||
makes pip use ``pkg_resources`` (unless the user set the aforementioned
|
||||
environment variable to *True*).
|
||||
"""
|
||||
with contextlib.suppress(KeyError, ValueError):
|
||||
return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
|
||||
if sys.version_info < (3, 11):
|
||||
return False
|
||||
import importlib.metadata
|
||||
|
||||
return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
|
||||
|
||||
|
||||
class Backend(Protocol):
|
||||
Distribution: Type[BaseDistribution]
|
||||
Environment: Type[BaseEnvironment]
|
||||
|
||||
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def select_backend() -> Backend:
|
||||
if _should_use_importlib_metadata():
|
||||
from . import importlib
|
||||
|
||||
return cast(Backend, importlib)
|
||||
from . import pkg_resources
|
||||
|
||||
return cast(Backend, pkg_resources)
|
||||
|
||||
|
||||
def get_default_environment() -> BaseEnvironment:
|
||||
"""Get the default representation for the current environment.
|
||||
|
||||
@@ -18,9 +72,7 @@ def get_default_environment() -> BaseEnvironment:
|
||||
Environment instance should be built from ``sys.path`` and may use caching
|
||||
to share instance state accorss calls.
|
||||
"""
|
||||
from .pkg_resources import Environment
|
||||
|
||||
return Environment.default()
|
||||
return select_backend().Environment.default()
|
||||
|
||||
|
||||
def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
|
||||
@@ -30,12 +82,19 @@ def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
|
||||
given import paths. The backend must build a fresh instance representing
|
||||
the state of installed distributions when this function is called.
|
||||
"""
|
||||
from .pkg_resources import Environment
|
||||
|
||||
return Environment.from_paths(paths)
|
||||
return select_backend().Environment.from_paths(paths)
|
||||
|
||||
|
||||
def get_wheel_distribution(wheel_path: str, canonical_name: str) -> BaseDistribution:
|
||||
def get_directory_distribution(directory: str) -> BaseDistribution:
|
||||
"""Get the distribution metadata representation in the specified directory.
|
||||
|
||||
This returns a Distribution instance from the chosen backend based on
|
||||
the given on-disk ``.dist-info`` directory.
|
||||
"""
|
||||
return select_backend().Distribution.from_directory(directory)
|
||||
|
||||
|
||||
def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
|
||||
"""Get the representation of the specified wheel's distribution metadata.
|
||||
|
||||
This returns a Distribution instance from the chosen backend based on
|
||||
@@ -43,6 +102,26 @@ def get_wheel_distribution(wheel_path: str, canonical_name: str) -> BaseDistribu
|
||||
|
||||
:param canonical_name: Normalized project name of the given wheel.
|
||||
"""
|
||||
from .pkg_resources import Distribution
|
||||
return select_backend().Distribution.from_wheel(wheel, canonical_name)
|
||||
|
||||
return Distribution.from_wheel(wheel_path, canonical_name)
|
||||
|
||||
def get_metadata_distribution(
|
||||
metadata_contents: bytes,
|
||||
filename: str,
|
||||
canonical_name: str,
|
||||
) -> BaseDistribution:
|
||||
"""Get the dist representation of the specified METADATA file contents.
|
||||
|
||||
This returns a Distribution instance from the chosen backend sourced from the data
|
||||
in `metadata_contents`.
|
||||
|
||||
:param metadata_contents: Contents of a METADATA file within a dist, or one served
|
||||
via PEP 658.
|
||||
:param filename: Filename for the dist this metadata represents.
|
||||
:param canonical_name: Normalized project name of the given dist.
|
||||
"""
|
||||
return select_backend().Distribution.from_metadata_file_contents(
|
||||
metadata_contents,
|
||||
filename,
|
||||
canonical_name,
|
||||
)
|
||||
|
||||
@@ -1,37 +1,56 @@
|
||||
import csv
|
||||
import email.message
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
import zipfile
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Collection,
|
||||
Container,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
from pip._vendor.packaging.version import LegacyVersion, Version
|
||||
|
||||
from pip._internal.exceptions import NoneMetadataError
|
||||
from pip._internal.locations import site_packages, user_site
|
||||
from pip._internal.models.direct_url import (
|
||||
DIRECT_URL_METADATA_NAME,
|
||||
DirectUrl,
|
||||
DirectUrlValidationError,
|
||||
)
|
||||
from pip._internal.utils.misc import stdlib_pkgs # TODO: Move definition here.
|
||||
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
|
||||
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
|
||||
from pip._internal.utils.misc import is_local, normalize_path
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.urls import url_to_path
|
||||
|
||||
from ._json import msg_to_json
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
else:
|
||||
Protocol = object
|
||||
|
||||
DistributionVersion = Union[LegacyVersion, Version]
|
||||
|
||||
InfoPath = Union[str, pathlib.PurePath]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -49,7 +68,89 @@ class BaseEntryPoint(Protocol):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _convert_installed_files_path(
|
||||
entry: Tuple[str, ...],
|
||||
info: Tuple[str, ...],
|
||||
) -> str:
|
||||
"""Convert a legacy installed-files.txt path into modern RECORD path.
|
||||
|
||||
The legacy format stores paths relative to the info directory, while the
|
||||
modern format stores paths relative to the package root, e.g. the
|
||||
site-packages directory.
|
||||
|
||||
:param entry: Path parts of the installed-files.txt entry.
|
||||
:param info: Path parts of the egg-info directory relative to package root.
|
||||
:returns: The converted entry.
|
||||
|
||||
For best compatibility with symlinks, this does not use ``abspath()`` or
|
||||
``Path.resolve()``, but tries to work with path parts:
|
||||
|
||||
1. While ``entry`` starts with ``..``, remove the equal amounts of parts
|
||||
from ``info``; if ``info`` is empty, start appending ``..`` instead.
|
||||
2. Join the two directly.
|
||||
"""
|
||||
while entry and entry[0] == "..":
|
||||
if not info or info[-1] == "..":
|
||||
info += ("..",)
|
||||
else:
|
||||
info = info[:-1]
|
||||
entry = entry[1:]
|
||||
return str(pathlib.Path(*info, *entry))
|
||||
|
||||
|
||||
class RequiresEntry(NamedTuple):
|
||||
requirement: str
|
||||
extra: str
|
||||
marker: str
|
||||
|
||||
|
||||
class BaseDistribution(Protocol):
|
||||
@classmethod
|
||||
def from_directory(cls, directory: str) -> "BaseDistribution":
|
||||
"""Load the distribution from a metadata directory.
|
||||
|
||||
:param directory: Path to a metadata directory, e.g. ``.dist-info``.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def from_metadata_file_contents(
|
||||
cls,
|
||||
metadata_contents: bytes,
|
||||
filename: str,
|
||||
project_name: str,
|
||||
) -> "BaseDistribution":
|
||||
"""Load the distribution from the contents of a METADATA file.
|
||||
|
||||
This is used to implement PEP 658 by generating a "shallow" dist object that can
|
||||
be used for resolution without downloading or building the actual dist yet.
|
||||
|
||||
:param metadata_contents: The contents of a METADATA file.
|
||||
:param filename: File name for the dist with this metadata.
|
||||
:param project_name: Name of the project this dist represents.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
|
||||
"""Load the distribution from a given wheel.
|
||||
|
||||
:param wheel: A concrete wheel definition.
|
||||
:param name: File name of the wheel.
|
||||
|
||||
:raises InvalidWheel: Whenever loading of the wheel causes a
|
||||
:py:exc:`zipfile.BadZipFile` exception to be thrown.
|
||||
:raises UnsupportedWheel: If the wheel is a valid zip, but malformed
|
||||
internally.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.raw_name} {self.version} ({self.location})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.raw_name} {self.version}"
|
||||
|
||||
@property
|
||||
def location(self) -> Optional[str]:
|
||||
"""Where the distribution is loaded from.
|
||||
@@ -65,8 +166,43 @@ class BaseDistribution(Protocol):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def info_directory(self) -> Optional[str]:
|
||||
"""Location of the .[egg|dist]-info directory.
|
||||
def editable_project_location(self) -> Optional[str]:
|
||||
"""The project location for editable distributions.
|
||||
|
||||
This is the directory where pyproject.toml or setup.py is located.
|
||||
None if the distribution is not installed in editable mode.
|
||||
"""
|
||||
# TODO: this property is relatively costly to compute, memoize it ?
|
||||
direct_url = self.direct_url
|
||||
if direct_url:
|
||||
if direct_url.is_local_editable():
|
||||
return url_to_path(direct_url.url)
|
||||
else:
|
||||
# Search for an .egg-link file by walking sys.path, as it was
|
||||
# done before by dist_is_editable().
|
||||
egg_link_path = egg_link_path_from_sys_path(self.raw_name)
|
||||
if egg_link_path:
|
||||
# TODO: get project location from second line of egg_link file
|
||||
# (https://github.com/pypa/pip/issues/10243)
|
||||
return self.location
|
||||
return None
|
||||
|
||||
@property
|
||||
def installed_location(self) -> Optional[str]:
|
||||
"""The distribution's "installed" location.
|
||||
|
||||
This should generally be a ``site-packages`` directory. This is
|
||||
usually ``dist.location``, except for legacy develop-installed packages,
|
||||
where ``dist.location`` is the source code location, and this is where
|
||||
the ``.egg-link`` file is.
|
||||
|
||||
The returned location is normalized (in particular, with symlinks removed).
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def info_location(self) -> Optional[str]:
|
||||
"""Location of the .[egg|dist]-info directory or file.
|
||||
|
||||
Similarly to ``location``, a string value is not necessarily a
|
||||
filesystem path. ``None`` means the distribution is created in-memory.
|
||||
@@ -81,13 +217,80 @@ class BaseDistribution(Protocol):
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def canonical_name(self) -> "NormalizedName":
|
||||
def installed_by_distutils(self) -> bool:
|
||||
"""Whether this distribution is installed with legacy distutils format.
|
||||
|
||||
A distribution installed with "raw" distutils not patched by setuptools
|
||||
uses one single file at ``info_location`` to store metadata. We need to
|
||||
treat this specially on uninstallation.
|
||||
"""
|
||||
info_location = self.info_location
|
||||
if not info_location:
|
||||
return False
|
||||
return pathlib.Path(info_location).is_file()
|
||||
|
||||
@property
|
||||
def installed_as_egg(self) -> bool:
|
||||
"""Whether this distribution is installed as an egg.
|
||||
|
||||
This usually indicates the distribution was installed by (older versions
|
||||
of) easy_install.
|
||||
"""
|
||||
location = self.location
|
||||
if not location:
|
||||
return False
|
||||
return location.endswith(".egg")
|
||||
|
||||
@property
|
||||
def installed_with_setuptools_egg_info(self) -> bool:
|
||||
"""Whether this distribution is installed with the ``.egg-info`` format.
|
||||
|
||||
This usually indicates the distribution was installed with setuptools
|
||||
with an old pip version or with ``single-version-externally-managed``.
|
||||
|
||||
Note that this ensure the metadata store is a directory. distutils can
|
||||
also installs an ``.egg-info``, but as a file, not a directory. This
|
||||
property is *False* for that case. Also see ``installed_by_distutils``.
|
||||
"""
|
||||
info_location = self.info_location
|
||||
if not info_location:
|
||||
return False
|
||||
if not info_location.endswith(".egg-info"):
|
||||
return False
|
||||
return pathlib.Path(info_location).is_dir()
|
||||
|
||||
@property
|
||||
def installed_with_dist_info(self) -> bool:
|
||||
"""Whether this distribution is installed with the "modern format".
|
||||
|
||||
This indicates a "modern" installation, e.g. storing metadata in the
|
||||
``.dist-info`` directory. This applies to installations made by
|
||||
setuptools (but through pip, not directly), or anything using the
|
||||
standardized build backend interface (PEP 517).
|
||||
"""
|
||||
info_location = self.info_location
|
||||
if not info_location:
|
||||
return False
|
||||
if not info_location.endswith(".dist-info"):
|
||||
return False
|
||||
return pathlib.Path(info_location).is_dir()
|
||||
|
||||
@property
|
||||
def canonical_name(self) -> NormalizedName:
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def version(self) -> DistributionVersion:
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def setuptools_filename(self) -> str:
|
||||
"""Convert a project name to its setuptools-compatible filename.
|
||||
|
||||
This is a copy of ``pkg_resources.to_filename()`` for compatibility.
|
||||
"""
|
||||
return self.raw_name.replace("-", "_")
|
||||
|
||||
@property
|
||||
def direct_url(self) -> Optional[DirectUrl]:
|
||||
"""Obtain a DirectUrl from this distribution.
|
||||
@@ -116,39 +319,102 @@ class BaseDistribution(Protocol):
|
||||
|
||||
@property
|
||||
def installer(self) -> str:
|
||||
raise NotImplementedError()
|
||||
try:
|
||||
installer_text = self.read_text("INSTALLER")
|
||||
except (OSError, ValueError, NoneMetadataError):
|
||||
return "" # Fail silently if the installer file cannot be read.
|
||||
for line in installer_text.splitlines():
|
||||
cleaned_line = line.strip()
|
||||
if cleaned_line:
|
||||
return cleaned_line
|
||||
return ""
|
||||
|
||||
@property
|
||||
def requested(self) -> bool:
|
||||
return self.is_file("REQUESTED")
|
||||
|
||||
@property
|
||||
def editable(self) -> bool:
|
||||
raise NotImplementedError()
|
||||
return bool(self.editable_project_location)
|
||||
|
||||
@property
|
||||
def local(self) -> bool:
|
||||
raise NotImplementedError()
|
||||
"""If distribution is installed in the current virtual environment.
|
||||
|
||||
Always True if we're not in a virtualenv.
|
||||
"""
|
||||
if self.installed_location is None:
|
||||
return False
|
||||
return is_local(self.installed_location)
|
||||
|
||||
@property
|
||||
def in_usersite(self) -> bool:
|
||||
raise NotImplementedError()
|
||||
if self.installed_location is None or user_site is None:
|
||||
return False
|
||||
return self.installed_location.startswith(normalize_path(user_site))
|
||||
|
||||
@property
|
||||
def in_site_packages(self) -> bool:
|
||||
if self.installed_location is None or site_packages is None:
|
||||
return False
|
||||
return self.installed_location.startswith(normalize_path(site_packages))
|
||||
|
||||
def is_file(self, path: InfoPath) -> bool:
|
||||
"""Check whether an entry in the info directory is a file."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def read_text(self, name: str) -> str:
|
||||
"""Read a file in the .dist-info (or .egg-info) directory.
|
||||
def iter_distutils_script_names(self) -> Iterator[str]:
|
||||
"""Find distutils 'scripts' entries metadata.
|
||||
|
||||
Should raise ``FileNotFoundError`` if ``name`` does not exist in the
|
||||
metadata directory.
|
||||
If 'scripts' is supplied in ``setup.py``, distutils records those in the
|
||||
installed distribution's ``scripts`` directory, a file for each script.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def read_text(self, path: InfoPath) -> str:
|
||||
"""Read a file in the info directory.
|
||||
|
||||
:raise FileNotFoundError: If ``path`` does not exist in the directory.
|
||||
:raise NoneMetadataError: If ``path`` exists in the info directory, but
|
||||
cannot be read.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
||||
raise NotImplementedError()
|
||||
|
||||
def _metadata_impl(self) -> email.message.Message:
|
||||
raise NotImplementedError()
|
||||
|
||||
@functools.lru_cache(maxsize=1)
|
||||
def _metadata_cached(self) -> email.message.Message:
|
||||
# When we drop python 3.7 support, move this to the metadata property and use
|
||||
# functools.cached_property instead of lru_cache.
|
||||
metadata = self._metadata_impl()
|
||||
self._add_egg_info_requires(metadata)
|
||||
return metadata
|
||||
|
||||
@property
|
||||
def metadata(self) -> email.message.Message:
|
||||
"""Metadata of distribution parsed from e.g. METADATA or PKG-INFO."""
|
||||
raise NotImplementedError()
|
||||
"""Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
|
||||
|
||||
This should return an empty message if the metadata file is unavailable.
|
||||
|
||||
:raises NoneMetadataError: If the metadata file is available, but does
|
||||
not contain valid metadata.
|
||||
"""
|
||||
return self._metadata_cached()
|
||||
|
||||
@property
|
||||
def metadata_dict(self) -> Dict[str, Any]:
|
||||
"""PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
|
||||
|
||||
This should return an empty dict if the metadata file is unavailable.
|
||||
|
||||
:raises NoneMetadataError: If the metadata file is available, but does
|
||||
not contain valid metadata.
|
||||
"""
|
||||
return msg_to_json(self.metadata)
|
||||
|
||||
@property
|
||||
def metadata_version(self) -> Optional[str]:
|
||||
@@ -159,12 +425,159 @@ class BaseDistribution(Protocol):
|
||||
def raw_name(self) -> str:
|
||||
"""Value of "Name:" in distribution metadata."""
|
||||
# The metadata should NEVER be missing the Name: key, but if it somehow
|
||||
# does not, fall back to the known canonical name.
|
||||
# does, fall back to the known canonical name.
|
||||
return self.metadata.get("Name", self.canonical_name)
|
||||
|
||||
@property
|
||||
def requires_python(self) -> SpecifierSet:
|
||||
"""Value of "Requires-Python:" in distribution metadata.
|
||||
|
||||
If the key does not exist or contains an invalid value, an empty
|
||||
SpecifierSet should be returned.
|
||||
"""
|
||||
value = self.metadata.get("Requires-Python")
|
||||
if value is None:
|
||||
return SpecifierSet()
|
||||
try:
|
||||
# Convert to str to satisfy the type checker; this can be a Header object.
|
||||
spec = SpecifierSet(str(value))
|
||||
except InvalidSpecifier as e:
|
||||
message = "Package %r has an invalid Requires-Python: %s"
|
||||
logger.warning(message, self.raw_name, e)
|
||||
return SpecifierSet()
|
||||
return spec
|
||||
|
||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||
"""Dependencies of this distribution.
|
||||
|
||||
For modern .dist-info distributions, this is the collection of
|
||||
"Requires-Dist:" entries in distribution metadata.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def iter_provided_extras(self) -> Iterable[str]:
|
||||
"""Extras provided by this distribution.
|
||||
|
||||
For modern .dist-info distributions, this is the collection of
|
||||
"Provides-Extra:" entries in distribution metadata.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
|
||||
try:
|
||||
text = self.read_text("RECORD")
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
# This extra Path-str cast normalizes entries.
|
||||
return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
|
||||
|
||||
def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
|
||||
try:
|
||||
text = self.read_text("installed-files.txt")
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
paths = (p for p in text.splitlines(keepends=False) if p)
|
||||
root = self.location
|
||||
info = self.info_location
|
||||
if root is None or info is None:
|
||||
return paths
|
||||
try:
|
||||
info_rel = pathlib.Path(info).relative_to(root)
|
||||
except ValueError: # info is not relative to root.
|
||||
return paths
|
||||
if not info_rel.parts: # info *is* root.
|
||||
return paths
|
||||
return (
|
||||
_convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
|
||||
for p in paths
|
||||
)
|
||||
|
||||
def iter_declared_entries(self) -> Optional[Iterator[str]]:
|
||||
"""Iterate through file entries declared in this distribution.
|
||||
|
||||
For modern .dist-info distributions, this is the files listed in the
|
||||
``RECORD`` metadata file. For legacy setuptools distributions, this
|
||||
comes from ``installed-files.txt``, with entries normalized to be
|
||||
compatible with the format used by ``RECORD``.
|
||||
|
||||
:return: An iterator for listed entries, or None if the distribution
|
||||
contains neither ``RECORD`` nor ``installed-files.txt``.
|
||||
"""
|
||||
return (
|
||||
self._iter_declared_entries_from_record()
|
||||
or self._iter_declared_entries_from_legacy()
|
||||
)
|
||||
|
||||
def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
|
||||
"""Parse a ``requires.txt`` in an egg-info directory.
|
||||
|
||||
This is an INI-ish format where an egg-info stores dependencies. A
|
||||
section name describes extra other environment markers, while each entry
|
||||
is an arbitrary string (not a key-value pair) representing a dependency
|
||||
as a requirement string (no markers).
|
||||
|
||||
There is a construct in ``importlib.metadata`` called ``Sectioned`` that
|
||||
does mostly the same, but the format is currently considered private.
|
||||
"""
|
||||
try:
|
||||
content = self.read_text("requires.txt")
|
||||
except FileNotFoundError:
|
||||
return
|
||||
extra = marker = "" # Section-less entries don't have markers.
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"): # Comment; ignored.
|
||||
continue
|
||||
if line.startswith("[") and line.endswith("]"): # A section header.
|
||||
extra, _, marker = line.strip("[]").partition(":")
|
||||
continue
|
||||
yield RequiresEntry(requirement=line, extra=extra, marker=marker)
|
||||
|
||||
def _iter_egg_info_extras(self) -> Iterable[str]:
|
||||
"""Get extras from the egg-info directory."""
|
||||
known_extras = {""}
|
||||
for entry in self._iter_requires_txt_entries():
|
||||
if entry.extra in known_extras:
|
||||
continue
|
||||
known_extras.add(entry.extra)
|
||||
yield entry.extra
|
||||
|
||||
def _iter_egg_info_dependencies(self) -> Iterable[str]:
|
||||
"""Get distribution dependencies from the egg-info directory.
|
||||
|
||||
To ease parsing, this converts a legacy dependency entry into a PEP 508
|
||||
requirement string. Like ``_iter_requires_txt_entries()``, there is code
|
||||
in ``importlib.metadata`` that does mostly the same, but not do exactly
|
||||
what we need.
|
||||
|
||||
Namely, ``importlib.metadata`` does not normalize the extra name before
|
||||
putting it into the requirement string, which causes marker comparison
|
||||
to fail because the dist-info format do normalize. This is consistent in
|
||||
all currently available PEP 517 backends, although not standardized.
|
||||
"""
|
||||
for entry in self._iter_requires_txt_entries():
|
||||
if entry.extra and entry.marker:
|
||||
marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"'
|
||||
elif entry.extra:
|
||||
marker = f'extra == "{safe_extra(entry.extra)}"'
|
||||
elif entry.marker:
|
||||
marker = entry.marker
|
||||
else:
|
||||
marker = ""
|
||||
if marker:
|
||||
yield f"{entry.requirement} ; {marker}"
|
||||
else:
|
||||
yield entry.requirement
|
||||
|
||||
def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
|
||||
"""Add egg-info requires.txt information to the metadata."""
|
||||
if not metadata.get_all("Requires-Dist"):
|
||||
for dep in self._iter_egg_info_dependencies():
|
||||
metadata["Requires-Dist"] = dep
|
||||
if not metadata.get_all("Provides-Extra"):
|
||||
for extra in self._iter_egg_info_extras():
|
||||
metadata["Provides-Extra"] = extra
|
||||
|
||||
|
||||
class BaseEnvironment:
|
||||
"""An environment containing distributions to introspect."""
|
||||
@@ -178,7 +591,11 @@ class BaseEnvironment:
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
|
||||
"""Given a requirement name, return the installed distributions."""
|
||||
"""Given a requirement name, return the installed distributions.
|
||||
|
||||
The name may not be normalized. The implementation must canonicalize
|
||||
it for lookup.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _iter_distributions(self) -> Iterator["BaseDistribution"]:
|
||||
@@ -190,8 +607,8 @@ class BaseEnvironment:
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def iter_distributions(self) -> Iterator["BaseDistribution"]:
|
||||
"""Iterate through installed distributions."""
|
||||
def iter_all_distributions(self) -> Iterator[BaseDistribution]:
|
||||
"""Iterate through all installed distributions without any filtering."""
|
||||
for dist in self._iter_distributions():
|
||||
# Make sure the distribution actually comes from a valid Python
|
||||
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
||||
@@ -221,6 +638,11 @@ class BaseEnvironment:
|
||||
) -> Iterator[BaseDistribution]:
|
||||
"""Return a list of installed distributions.
|
||||
|
||||
This is based on ``iter_all_distributions()`` with additional filtering
|
||||
options. Note that ``iter_installed_distributions()`` without arguments
|
||||
is *not* equal to ``iter_all_distributions()``, since some of the
|
||||
configurations exclude packages by default.
|
||||
|
||||
:param local_only: If True (default), only return installations
|
||||
local to the current virtualenv, if in a virtualenv.
|
||||
:param skip: An iterable of canonicalized project names to ignore;
|
||||
@@ -230,7 +652,7 @@ class BaseEnvironment:
|
||||
:param user_only: If True, only report installations in the user
|
||||
site directory.
|
||||
"""
|
||||
it = self.iter_distributions()
|
||||
it = self.iter_all_distributions()
|
||||
if local_only:
|
||||
it = (d for d in it if d.local)
|
||||
if not include_editables:
|
||||
@@ -240,3 +662,27 @@ class BaseEnvironment:
|
||||
if user_only:
|
||||
it = (d for d in it if d.in_usersite)
|
||||
return (d for d in it if d.canonical_name not in skip)
|
||||
|
||||
|
||||
class Wheel(Protocol):
|
||||
location: str
|
||||
|
||||
def as_zipfile(self) -> zipfile.ZipFile:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class FilesystemWheel(Wheel):
|
||||
def __init__(self, location: str) -> None:
|
||||
self.location = location
|
||||
|
||||
def as_zipfile(self) -> zipfile.ZipFile:
|
||||
return zipfile.ZipFile(self.location, allowZip64=True)
|
||||
|
||||
|
||||
class MemoryWheel(Wheel):
|
||||
def __init__(self, location: str, stream: IO[bytes]) -> None:
|
||||
self.location = location
|
||||
self.stream = stream
|
||||
|
||||
def as_zipfile(self) -> zipfile.ZipFile:
|
||||
return zipfile.ZipFile(self.stream, allowZip64=True)
|
||||
|
||||
@@ -1,29 +1,28 @@
|
||||
import email.message
|
||||
import email.parser
|
||||
import logging
|
||||
import os
|
||||
import zipfile
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Collection,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
)
|
||||
from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.utils import misc # TODO: Move definition here.
|
||||
from pip._internal.utils.packaging import get_installer, get_metadata
|
||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
||||
from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
|
||||
from pip._internal.utils.egg_link import egg_link_path_from_location
|
||||
from pip._internal.utils.misc import display_path, normalize_path
|
||||
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
|
||||
|
||||
from .base import BaseDistribution, BaseEntryPoint, BaseEnvironment, DistributionVersion
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
from .base import (
|
||||
BaseDistribution,
|
||||
BaseEntryPoint,
|
||||
BaseEnvironment,
|
||||
DistributionVersion,
|
||||
InfoPath,
|
||||
Wheel,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -34,14 +33,101 @@ class EntryPoint(NamedTuple):
|
||||
group: str
|
||||
|
||||
|
||||
class InMemoryMetadata:
|
||||
"""IMetadataProvider that reads metadata files from a dictionary.
|
||||
|
||||
This also maps metadata decoding exceptions to our internal exception type.
|
||||
"""
|
||||
|
||||
def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
|
||||
self._metadata = metadata
|
||||
self._wheel_name = wheel_name
|
||||
|
||||
def has_metadata(self, name: str) -> bool:
|
||||
return name in self._metadata
|
||||
|
||||
def get_metadata(self, name: str) -> str:
|
||||
try:
|
||||
return self._metadata[name].decode()
|
||||
except UnicodeDecodeError as e:
|
||||
# Augment the default error with the origin of the file.
|
||||
raise UnsupportedWheel(
|
||||
f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
|
||||
)
|
||||
|
||||
def get_metadata_lines(self, name: str) -> Iterable[str]:
|
||||
return pkg_resources.yield_lines(self.get_metadata(name))
|
||||
|
||||
def metadata_isdir(self, name: str) -> bool:
|
||||
return False
|
||||
|
||||
def metadata_listdir(self, name: str) -> List[str]:
|
||||
return []
|
||||
|
||||
def run_script(self, script_name: str, namespace: str) -> None:
|
||||
pass
|
||||
|
||||
|
||||
class Distribution(BaseDistribution):
|
||||
def __init__(self, dist: pkg_resources.Distribution) -> None:
|
||||
self._dist = dist
|
||||
|
||||
@classmethod
|
||||
def from_wheel(cls, path: str, name: str) -> "Distribution":
|
||||
with zipfile.ZipFile(path, allowZip64=True) as zf:
|
||||
dist = pkg_resources_distribution_for_wheel(zf, name, path)
|
||||
def from_directory(cls, directory: str) -> BaseDistribution:
|
||||
dist_dir = directory.rstrip(os.sep)
|
||||
|
||||
# Build a PathMetadata object, from path to metadata. :wink:
|
||||
base_dir, dist_dir_name = os.path.split(dist_dir)
|
||||
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
||||
|
||||
# Determine the correct Distribution object type.
|
||||
if dist_dir.endswith(".egg-info"):
|
||||
dist_cls = pkg_resources.Distribution
|
||||
dist_name = os.path.splitext(dist_dir_name)[0]
|
||||
else:
|
||||
assert dist_dir.endswith(".dist-info")
|
||||
dist_cls = pkg_resources.DistInfoDistribution
|
||||
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
||||
|
||||
dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
|
||||
return cls(dist)
|
||||
|
||||
@classmethod
|
||||
def from_metadata_file_contents(
|
||||
cls,
|
||||
metadata_contents: bytes,
|
||||
filename: str,
|
||||
project_name: str,
|
||||
) -> BaseDistribution:
|
||||
metadata_dict = {
|
||||
"METADATA": metadata_contents,
|
||||
}
|
||||
dist = pkg_resources.DistInfoDistribution(
|
||||
location=filename,
|
||||
metadata=InMemoryMetadata(metadata_dict, filename),
|
||||
project_name=project_name,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
@classmethod
|
||||
def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
|
||||
try:
|
||||
with wheel.as_zipfile() as zf:
|
||||
info_dir, _ = parse_wheel(zf, name)
|
||||
metadata_dict = {
|
||||
path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
|
||||
for path in zf.namelist()
|
||||
if path.startswith(f"{info_dir}/")
|
||||
}
|
||||
except zipfile.BadZipFile as e:
|
||||
raise InvalidWheel(wheel.location, name) from e
|
||||
except UnsupportedWheel as e:
|
||||
raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
|
||||
dist = pkg_resources.DistInfoDistribution(
|
||||
location=wheel.location,
|
||||
metadata=InMemoryMetadata(metadata_dict, wheel.location),
|
||||
project_name=name,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
@property
|
||||
@@ -49,41 +135,52 @@ class Distribution(BaseDistribution):
|
||||
return self._dist.location
|
||||
|
||||
@property
|
||||
def info_directory(self) -> Optional[str]:
|
||||
def installed_location(self) -> Optional[str]:
|
||||
egg_link = egg_link_path_from_location(self.raw_name)
|
||||
if egg_link:
|
||||
location = egg_link
|
||||
elif self.location:
|
||||
location = self.location
|
||||
else:
|
||||
return None
|
||||
return normalize_path(location)
|
||||
|
||||
@property
|
||||
def info_location(self) -> Optional[str]:
|
||||
return self._dist.egg_info
|
||||
|
||||
@property
|
||||
def canonical_name(self) -> "NormalizedName":
|
||||
def installed_by_distutils(self) -> bool:
|
||||
# A distutils-installed distribution is provided by FileMetadata. This
|
||||
# provider has a "path" attribute not present anywhere else. Not the
|
||||
# best introspection logic, but pip has been doing this for a long time.
|
||||
try:
|
||||
return bool(self._dist._provider.path)
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
@property
|
||||
def canonical_name(self) -> NormalizedName:
|
||||
return canonicalize_name(self._dist.project_name)
|
||||
|
||||
@property
|
||||
def version(self) -> DistributionVersion:
|
||||
return parse_version(self._dist.version)
|
||||
|
||||
@property
|
||||
def installer(self) -> str:
|
||||
return get_installer(self._dist)
|
||||
def is_file(self, path: InfoPath) -> bool:
|
||||
return self._dist.has_metadata(str(path))
|
||||
|
||||
@property
|
||||
def editable(self) -> bool:
|
||||
return misc.dist_is_editable(self._dist)
|
||||
def iter_distutils_script_names(self) -> Iterator[str]:
|
||||
yield from self._dist.metadata_listdir("scripts")
|
||||
|
||||
@property
|
||||
def local(self) -> bool:
|
||||
return misc.dist_is_local(self._dist)
|
||||
|
||||
@property
|
||||
def in_usersite(self) -> bool:
|
||||
return misc.dist_in_usersite(self._dist)
|
||||
|
||||
@property
|
||||
def in_site_packages(self) -> bool:
|
||||
return misc.dist_in_site_packages(self._dist)
|
||||
|
||||
def read_text(self, name: str) -> str:
|
||||
def read_text(self, path: InfoPath) -> str:
|
||||
name = str(path)
|
||||
if not self._dist.has_metadata(name):
|
||||
raise FileNotFoundError(name)
|
||||
return self._dist.get_metadata(name)
|
||||
content = self._dist.get_metadata(name)
|
||||
if content is None:
|
||||
raise NoneMetadataError(self, name)
|
||||
return content
|
||||
|
||||
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
||||
for group, entries in self._dist.get_entry_map().items():
|
||||
@@ -91,15 +188,36 @@ class Distribution(BaseDistribution):
|
||||
name, _, value = str(entry_point).partition("=")
|
||||
yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
|
||||
|
||||
@property
|
||||
def metadata(self) -> email.message.Message:
|
||||
return get_metadata(self._dist)
|
||||
def _metadata_impl(self) -> email.message.Message:
|
||||
"""
|
||||
:raises NoneMetadataError: if the distribution reports `has_metadata()`
|
||||
True but `get_metadata()` returns None.
|
||||
"""
|
||||
if isinstance(self._dist, pkg_resources.DistInfoDistribution):
|
||||
metadata_name = "METADATA"
|
||||
else:
|
||||
metadata_name = "PKG-INFO"
|
||||
try:
|
||||
metadata = self.read_text(metadata_name)
|
||||
except FileNotFoundError:
|
||||
if self.location:
|
||||
displaying_path = display_path(self.location)
|
||||
else:
|
||||
displaying_path = repr(self.location)
|
||||
logger.warning("No metadata found in %s", displaying_path)
|
||||
metadata = ""
|
||||
feed_parser = email.parser.FeedParser()
|
||||
feed_parser.feed(metadata)
|
||||
return feed_parser.close()
|
||||
|
||||
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
||||
if extras: # pkg_resources raises on invalid extras, so we sanitize.
|
||||
extras = frozenset(extras).intersection(self._dist.extras)
|
||||
return self._dist.requires(extras)
|
||||
|
||||
def iter_provided_extras(self) -> Iterable[str]:
|
||||
return self._dist.extras
|
||||
|
||||
|
||||
class Environment(BaseEnvironment):
|
||||
def __init__(self, ws: pkg_resources.WorkingSet) -> None:
|
||||
@@ -113,6 +231,10 @@ class Environment(BaseEnvironment):
|
||||
def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
|
||||
return cls(pkg_resources.WorkingSet(paths))
|
||||
|
||||
def _iter_distributions(self) -> Iterator[BaseDistribution]:
|
||||
for dist in self._ws:
|
||||
yield Distribution(dist)
|
||||
|
||||
def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
|
||||
"""Find a distribution matching the ``name`` in the environment.
|
||||
|
||||
@@ -120,13 +242,12 @@ class Environment(BaseEnvironment):
|
||||
match the behavior of ``pkg_resources.get_distribution()``.
|
||||
"""
|
||||
canonical_name = canonicalize_name(name)
|
||||
for dist in self.iter_distributions():
|
||||
for dist in self.iter_all_distributions():
|
||||
if dist.canonical_name == canonical_name:
|
||||
return dist
|
||||
return None
|
||||
|
||||
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
|
||||
|
||||
# Search the distribution by looking through the working set.
|
||||
dist = self._search_distribution(name)
|
||||
if dist:
|
||||
@@ -147,7 +268,3 @@ class Environment(BaseEnvironment):
|
||||
except pkg_resources.DistributionNotFound:
|
||||
return None
|
||||
return self._search_distribution(name)
|
||||
|
||||
def _iter_distributions(self) -> Iterator[BaseDistribution]:
|
||||
for dist in self._ws:
|
||||
yield Distribution(dist)
|
||||
|
||||
@@ -5,8 +5,7 @@ from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
|
||||
|
||||
class InstallationCandidate(KeyBasedCompareMixin):
|
||||
"""Represents a potential "candidate" for installation.
|
||||
"""
|
||||
"""Represents a potential "candidate" for installation."""
|
||||
|
||||
__slots__ = ["name", "version", "link"]
|
||||
|
||||
@@ -17,15 +16,19 @@ class InstallationCandidate(KeyBasedCompareMixin):
|
||||
|
||||
super().__init__(
|
||||
key=(self.name, self.version, self.link),
|
||||
defining_class=InstallationCandidate
|
||||
defining_class=InstallationCandidate,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
||||
self.name, self.version, self.link,
|
||||
self.name,
|
||||
self.version,
|
||||
self.link,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return '{!r} candidate (version {} at {})'.format(
|
||||
self.name, self.version, self.link,
|
||||
return "{!r} candidate (version {} at {})".format(
|
||||
self.name,
|
||||
self.version,
|
||||
self.link,
|
||||
)
|
||||
|
||||
@@ -74,14 +74,10 @@ class VcsInfo:
|
||||
vcs: str,
|
||||
commit_id: str,
|
||||
requested_revision: Optional[str] = None,
|
||||
resolved_revision: Optional[str] = None,
|
||||
resolved_revision_type: Optional[str] = None,
|
||||
) -> None:
|
||||
self.vcs = vcs
|
||||
self.requested_revision = requested_revision
|
||||
self.commit_id = commit_id
|
||||
self.resolved_revision = resolved_revision
|
||||
self.resolved_revision_type = resolved_revision_type
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
|
||||
@@ -91,8 +87,6 @@ class VcsInfo:
|
||||
vcs=_get_required(d, str, "vcs"),
|
||||
commit_id=_get_required(d, str, "commit_id"),
|
||||
requested_revision=_get(d, str, "requested_revision"),
|
||||
resolved_revision=_get(d, str, "resolved_revision"),
|
||||
resolved_revision_type=_get(d, str, "resolved_revision_type"),
|
||||
)
|
||||
|
||||
def _to_dict(self) -> Dict[str, Any]:
|
||||
@@ -100,8 +94,6 @@ class VcsInfo:
|
||||
vcs=self.vcs,
|
||||
requested_revision=self.requested_revision,
|
||||
commit_id=self.commit_id,
|
||||
resolved_revision=self.resolved_revision,
|
||||
resolved_revision_type=self.resolved_revision_type,
|
||||
)
|
||||
|
||||
|
||||
@@ -111,17 +103,42 @@ class ArchiveInfo:
|
||||
def __init__(
|
||||
self,
|
||||
hash: Optional[str] = None,
|
||||
hashes: Optional[Dict[str, str]] = None,
|
||||
) -> None:
|
||||
# set hashes before hash, since the hash setter will further populate hashes
|
||||
self.hashes = hashes
|
||||
self.hash = hash
|
||||
|
||||
@property
|
||||
def hash(self) -> Optional[str]:
|
||||
return self._hash
|
||||
|
||||
@hash.setter
|
||||
def hash(self, value: Optional[str]) -> None:
|
||||
if value is not None:
|
||||
# Auto-populate the hashes key to upgrade to the new format automatically.
|
||||
# We don't back-populate the legacy hash key from hashes.
|
||||
try:
|
||||
hash_name, hash_value = value.split("=", 1)
|
||||
except ValueError:
|
||||
raise DirectUrlValidationError(
|
||||
f"invalid archive_info.hash format: {value!r}"
|
||||
)
|
||||
if self.hashes is None:
|
||||
self.hashes = {hash_name: hash_value}
|
||||
elif hash_name not in self.hashes:
|
||||
self.hashes = self.hashes.copy()
|
||||
self.hashes[hash_name] = hash_value
|
||||
self._hash = value
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
|
||||
if d is None:
|
||||
return None
|
||||
return cls(hash=_get(d, str, "hash"))
|
||||
return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
|
||||
|
||||
def _to_dict(self) -> Dict[str, Any]:
|
||||
return _filter_none(hash=self.hash)
|
||||
return _filter_none(hash=self.hash, hashes=self.hashes)
|
||||
|
||||
|
||||
class DirInfo:
|
||||
@@ -137,9 +154,7 @@ class DirInfo:
|
||||
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
|
||||
if d is None:
|
||||
return None
|
||||
return cls(
|
||||
editable=_get_required(d, bool, "editable", default=False)
|
||||
)
|
||||
return cls(editable=_get_required(d, bool, "editable", default=False))
|
||||
|
||||
def _to_dict(self) -> Dict[str, Any]:
|
||||
return _filter_none(editable=self.editable or None)
|
||||
@@ -149,7 +164,6 @@ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
||||
|
||||
|
||||
class DirectUrl:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
@@ -165,9 +179,9 @@ class DirectUrl:
|
||||
return netloc
|
||||
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
||||
if (
|
||||
isinstance(self.info, VcsInfo) and
|
||||
self.info.vcs == "git" and
|
||||
user_pass == "git"
|
||||
isinstance(self.info, VcsInfo)
|
||||
and self.info.vcs == "git"
|
||||
and user_pass == "git"
|
||||
):
|
||||
return netloc
|
||||
if ENV_VAR_RE.match(user_pass):
|
||||
@@ -218,3 +232,6 @@ class DirectUrl:
|
||||
|
||||
def to_json(self) -> str:
|
||||
return json.dumps(self.to_dict(), sort_keys=True)
|
||||
|
||||
def is_local_editable(self) -> bool:
|
||||
return isinstance(self.info, DirInfo) and self.info.editable
|
||||
|
||||
@@ -6,15 +6,14 @@ from pip._internal.exceptions import CommandError
|
||||
|
||||
|
||||
class FormatControl:
|
||||
"""Helper for managing formats from which a package can be installed.
|
||||
"""
|
||||
"""Helper for managing formats from which a package can be installed."""
|
||||
|
||||
__slots__ = ["no_binary", "only_binary"]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
no_binary: Optional[Set[str]] = None,
|
||||
only_binary: Optional[Set[str]] = None
|
||||
only_binary: Optional[Set[str]] = None,
|
||||
) -> None:
|
||||
if no_binary is None:
|
||||
no_binary = set()
|
||||
@@ -31,35 +30,30 @@ class FormatControl:
|
||||
if self.__slots__ != other.__slots__:
|
||||
return False
|
||||
|
||||
return all(
|
||||
getattr(self, k) == getattr(other, k)
|
||||
for k in self.__slots__
|
||||
)
|
||||
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{}({}, {})".format(
|
||||
self.__class__.__name__,
|
||||
self.no_binary,
|
||||
self.only_binary
|
||||
self.__class__.__name__, self.no_binary, self.only_binary
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
|
||||
if value.startswith('-'):
|
||||
if value.startswith("-"):
|
||||
raise CommandError(
|
||||
"--no-binary / --only-binary option requires 1 argument."
|
||||
)
|
||||
new = value.split(',')
|
||||
while ':all:' in new:
|
||||
new = value.split(",")
|
||||
while ":all:" in new:
|
||||
other.clear()
|
||||
target.clear()
|
||||
target.add(':all:')
|
||||
del new[:new.index(':all:') + 1]
|
||||
target.add(":all:")
|
||||
del new[: new.index(":all:") + 1]
|
||||
# Without a none, we want to discard everything as :all: covers it
|
||||
if ':none:' not in new:
|
||||
if ":none:" not in new:
|
||||
return
|
||||
for name in new:
|
||||
if name == ':none:':
|
||||
if name == ":none:":
|
||||
target.clear()
|
||||
continue
|
||||
name = canonicalize_name(name)
|
||||
@@ -69,16 +63,18 @@ class FormatControl:
|
||||
def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
|
||||
result = {"binary", "source"}
|
||||
if canonical_name in self.only_binary:
|
||||
result.discard('source')
|
||||
result.discard("source")
|
||||
elif canonical_name in self.no_binary:
|
||||
result.discard('binary')
|
||||
elif ':all:' in self.only_binary:
|
||||
result.discard('source')
|
||||
elif ':all:' in self.no_binary:
|
||||
result.discard('binary')
|
||||
result.discard("binary")
|
||||
elif ":all:" in self.only_binary:
|
||||
result.discard("source")
|
||||
elif ":all:" in self.no_binary:
|
||||
result.discard("binary")
|
||||
return frozenset(result)
|
||||
|
||||
def disallow_binaries(self) -> None:
|
||||
self.handle_mutual_excludes(
|
||||
':all:', self.no_binary, self.only_binary,
|
||||
":all:",
|
||||
self.no_binary,
|
||||
self.only_binary,
|
||||
)
|
||||
|
||||
@@ -2,18 +2,16 @@ import urllib.parse
|
||||
|
||||
|
||||
class PackageIndex:
|
||||
"""Represents a Package Index and provides easier access to endpoints
|
||||
"""
|
||||
"""Represents a Package Index and provides easier access to endpoints"""
|
||||
|
||||
__slots__ = ['url', 'netloc', 'simple_url', 'pypi_url',
|
||||
'file_storage_domain']
|
||||
__slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
|
||||
|
||||
def __init__(self, url: str, file_storage_domain: str) -> None:
|
||||
super().__init__()
|
||||
self.url = url
|
||||
self.netloc = urllib.parse.urlsplit(url).netloc
|
||||
self.simple_url = self._url_for_path('simple')
|
||||
self.pypi_url = self._url_for_path('pypi')
|
||||
self.simple_url = self._url_for_path("simple")
|
||||
self.pypi_url = self._url_for_path("pypi")
|
||||
|
||||
# This is part of a temporary hack used to block installs of PyPI
|
||||
# packages which depend on external urls only necessary until PyPI can
|
||||
@@ -24,9 +22,7 @@ class PackageIndex:
|
||||
return urllib.parse.urljoin(self.url, path)
|
||||
|
||||
|
||||
PyPI = PackageIndex(
|
||||
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
||||
)
|
||||
PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
|
||||
TestPyPI = PackageIndex(
|
||||
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
||||
"https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
|
||||
)
|
||||
|
||||
@@ -1,14 +1,28 @@
|
||||
import functools
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.misc import (
|
||||
pairwise,
|
||||
redact_auth_from_url,
|
||||
split_auth_from_netloc,
|
||||
splitext,
|
||||
@@ -17,38 +31,172 @@ from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._internal.index.collector import HTMLPage
|
||||
from pip._internal.index.collector import IndexContent
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5")
|
||||
# Order matters, earlier hashes have a precedence over later hashes for what
|
||||
# we will pick to use.
|
||||
_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class LinkHash:
|
||||
"""Links to content may have embedded hash values. This class parses those.
|
||||
|
||||
`name` must be any member of `_SUPPORTED_HASHES`.
|
||||
|
||||
This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
|
||||
be JSON-serializable to conform to PEP 610, this class contains the logic for
|
||||
parsing a hash name and value for correctness, and then checking whether that hash
|
||||
conforms to a schema with `.is_hash_allowed()`."""
|
||||
|
||||
name: str
|
||||
value: str
|
||||
|
||||
_hash_url_fragment_re = re.compile(
|
||||
# NB: we do not validate that the second group (.*) is a valid hex
|
||||
# digest. Instead, we simply keep that string in this class, and then check it
|
||||
# against Hashes when hash-checking is needed. This is easier to debug than
|
||||
# proactively discarding an invalid hex digest, as we handle incorrect hashes
|
||||
# and malformed hashes in the same place.
|
||||
r"[#&]({choices})=([^&]*)".format(
|
||||
choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
|
||||
),
|
||||
)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
assert self.name in _SUPPORTED_HASHES
|
||||
|
||||
@classmethod
|
||||
def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]:
|
||||
"""Parse a PEP 658 data-dist-info-metadata hash."""
|
||||
if dist_info_metadata == "true":
|
||||
return None
|
||||
name, sep, value = dist_info_metadata.partition("=")
|
||||
if not sep:
|
||||
return None
|
||||
if name not in _SUPPORTED_HASHES:
|
||||
return None
|
||||
return cls(name=name, value=value)
|
||||
|
||||
@classmethod
|
||||
@functools.lru_cache(maxsize=None)
|
||||
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
|
||||
"""Search a string for a checksum algorithm name and encoded output value."""
|
||||
match = cls._hash_url_fragment_re.search(url)
|
||||
if match is None:
|
||||
return None
|
||||
name, value = match.groups()
|
||||
return cls(name=name, value=value)
|
||||
|
||||
def as_dict(self) -> Dict[str, str]:
|
||||
return {self.name: self.value}
|
||||
|
||||
def as_hashes(self) -> Hashes:
|
||||
"""Return a Hashes instance which checks only for the current hash."""
|
||||
return Hashes({self.name: [self.value]})
|
||||
|
||||
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
||||
"""
|
||||
Return True if the current hash is allowed by `hashes`.
|
||||
"""
|
||||
if hashes is None:
|
||||
return False
|
||||
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
|
||||
|
||||
|
||||
def _clean_url_path_part(part: str) -> str:
|
||||
"""
|
||||
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
||||
"""
|
||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||
return urllib.parse.quote(urllib.parse.unquote(part))
|
||||
|
||||
|
||||
def _clean_file_url_path(part: str) -> str:
|
||||
"""
|
||||
Clean the first part of a URL path that corresponds to a local
|
||||
filesystem path (i.e. the first part after splitting on "@" characters).
|
||||
"""
|
||||
# We unquote prior to quoting to make sure nothing is double quoted.
|
||||
# Also, on Windows the path part might contain a drive letter which
|
||||
# should not be quoted. On Linux where drive letters do not
|
||||
# exist, the colon should be quoted. We rely on urllib.request
|
||||
# to do the right thing here.
|
||||
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
||||
|
||||
|
||||
# percent-encoded: /
|
||||
_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
|
||||
|
||||
|
||||
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
||||
"""
|
||||
Clean the path portion of a URL.
|
||||
"""
|
||||
if is_local_path:
|
||||
clean_func = _clean_file_url_path
|
||||
else:
|
||||
clean_func = _clean_url_path_part
|
||||
|
||||
# Split on the reserved characters prior to cleaning so that
|
||||
# revision strings in VCS URLs are properly preserved.
|
||||
parts = _reserved_chars_re.split(path)
|
||||
|
||||
cleaned_parts = []
|
||||
for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
|
||||
cleaned_parts.append(clean_func(to_clean))
|
||||
# Normalize %xx escapes (e.g. %2f -> %2F)
|
||||
cleaned_parts.append(reserved.upper())
|
||||
|
||||
return "".join(cleaned_parts)
|
||||
|
||||
|
||||
def _ensure_quoted_url(url: str) -> str:
|
||||
"""
|
||||
Make sure a link is fully quoted.
|
||||
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
||||
and without double-quoting other characters.
|
||||
"""
|
||||
# Split the URL into parts according to the general structure
|
||||
# `scheme://netloc/path;parameters?query#fragment`.
|
||||
result = urllib.parse.urlparse(url)
|
||||
# If the netloc is empty, then the URL refers to a local filesystem path.
|
||||
is_local_path = not result.netloc
|
||||
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
||||
return urllib.parse.urlunparse(result._replace(path=path))
|
||||
|
||||
|
||||
class Link(KeyBasedCompareMixin):
|
||||
"""Represents a parsed link from a Package Index's simple URL
|
||||
"""
|
||||
"""Represents a parsed link from a Package Index's simple URL"""
|
||||
|
||||
__slots__ = [
|
||||
"_parsed_url",
|
||||
"_url",
|
||||
"_hashes",
|
||||
"comes_from",
|
||||
"requires_python",
|
||||
"yanked_reason",
|
||||
"dist_info_metadata",
|
||||
"cache_link_parsing",
|
||||
"egg_fragment",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
comes_from: Optional[Union[str, "HTMLPage"]] = None,
|
||||
comes_from: Optional[Union[str, "IndexContent"]] = None,
|
||||
requires_python: Optional[str] = None,
|
||||
yanked_reason: Optional[str] = None,
|
||||
dist_info_metadata: Optional[str] = None,
|
||||
cache_link_parsing: bool = True,
|
||||
hashes: Optional[Mapping[str, str]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
:param url: url of the resource pointed to (href of the link)
|
||||
:param comes_from: instance of HTMLPage where the link was found,
|
||||
:param comes_from: instance of IndexContent where the link was found,
|
||||
or string.
|
||||
:param requires_python: String containing the `Requires-Python`
|
||||
metadata field, specified in PEP 345. This may be specified by
|
||||
@@ -60,15 +208,20 @@ class Link(KeyBasedCompareMixin):
|
||||
a simple repository HTML link. If the file has been yanked but
|
||||
no reason was provided, this should be the empty string. See
|
||||
PEP 592 for more information and the specification.
|
||||
:param dist_info_metadata: the metadata attached to the file, or None if no such
|
||||
metadata is provided. This is the value of the "data-dist-info-metadata"
|
||||
attribute, if present, in a simple repository HTML link. This may be parsed
|
||||
into its own `Link` by `self.metadata_link()`. See PEP 658 for more
|
||||
information and the specification.
|
||||
:param cache_link_parsing: A flag that is used elsewhere to determine
|
||||
whether resources retrieved from this link
|
||||
should be cached. PyPI index urls should
|
||||
generally have this set to False, for
|
||||
example.
|
||||
whether resources retrieved from this link should be cached. PyPI
|
||||
URLs should generally have this set to False, for example.
|
||||
:param hashes: A mapping of hash names to digests to allow us to
|
||||
determine the validity of a download.
|
||||
"""
|
||||
|
||||
# url can be a UNC windows share
|
||||
if url.startswith('\\\\'):
|
||||
if url.startswith("\\\\"):
|
||||
url = path_to_url(url)
|
||||
|
||||
self._parsed_url = urllib.parse.urlsplit(url)
|
||||
@@ -76,27 +229,99 @@ class Link(KeyBasedCompareMixin):
|
||||
# trying to set a new value.
|
||||
self._url = url
|
||||
|
||||
link_hash = LinkHash.find_hash_url_fragment(url)
|
||||
hashes_from_link = {} if link_hash is None else link_hash.as_dict()
|
||||
if hashes is None:
|
||||
self._hashes = hashes_from_link
|
||||
else:
|
||||
self._hashes = {**hashes, **hashes_from_link}
|
||||
|
||||
self.comes_from = comes_from
|
||||
self.requires_python = requires_python if requires_python else None
|
||||
self.yanked_reason = yanked_reason
|
||||
self.dist_info_metadata = dist_info_metadata
|
||||
|
||||
super().__init__(key=url, defining_class=Link)
|
||||
|
||||
self.cache_link_parsing = cache_link_parsing
|
||||
self.egg_fragment = self._egg_fragment()
|
||||
|
||||
@classmethod
|
||||
def from_json(
|
||||
cls,
|
||||
file_data: Dict[str, Any],
|
||||
page_url: str,
|
||||
) -> Optional["Link"]:
|
||||
"""
|
||||
Convert an pypi json document from a simple repository page into a Link.
|
||||
"""
|
||||
file_url = file_data.get("url")
|
||||
if file_url is None:
|
||||
return None
|
||||
|
||||
url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
|
||||
pyrequire = file_data.get("requires-python")
|
||||
yanked_reason = file_data.get("yanked")
|
||||
dist_info_metadata = file_data.get("dist-info-metadata")
|
||||
hashes = file_data.get("hashes", {})
|
||||
|
||||
# The Link.yanked_reason expects an empty string instead of a boolean.
|
||||
if yanked_reason and not isinstance(yanked_reason, str):
|
||||
yanked_reason = ""
|
||||
# The Link.yanked_reason expects None instead of False.
|
||||
elif not yanked_reason:
|
||||
yanked_reason = None
|
||||
|
||||
return cls(
|
||||
url,
|
||||
comes_from=page_url,
|
||||
requires_python=pyrequire,
|
||||
yanked_reason=yanked_reason,
|
||||
hashes=hashes,
|
||||
dist_info_metadata=dist_info_metadata,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_element(
|
||||
cls,
|
||||
anchor_attribs: Dict[str, Optional[str]],
|
||||
page_url: str,
|
||||
base_url: str,
|
||||
) -> Optional["Link"]:
|
||||
"""
|
||||
Convert an anchor element's attributes in a simple repository page to a Link.
|
||||
"""
|
||||
href = anchor_attribs.get("href")
|
||||
if not href:
|
||||
return None
|
||||
|
||||
url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
|
||||
pyrequire = anchor_attribs.get("data-requires-python")
|
||||
yanked_reason = anchor_attribs.get("data-yanked")
|
||||
dist_info_metadata = anchor_attribs.get("data-dist-info-metadata")
|
||||
|
||||
return cls(
|
||||
url,
|
||||
comes_from=page_url,
|
||||
requires_python=pyrequire,
|
||||
yanked_reason=yanked_reason,
|
||||
dist_info_metadata=dist_info_metadata,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
if self.requires_python:
|
||||
rp = f' (requires-python:{self.requires_python})'
|
||||
rp = f" (requires-python:{self.requires_python})"
|
||||
else:
|
||||
rp = ''
|
||||
rp = ""
|
||||
if self.comes_from:
|
||||
return '{} (from {}){}'.format(
|
||||
redact_auth_from_url(self._url), self.comes_from, rp)
|
||||
return "{} (from {}){}".format(
|
||||
redact_auth_from_url(self._url), self.comes_from, rp
|
||||
)
|
||||
else:
|
||||
return redact_auth_from_url(str(self._url))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'<Link {self}>'
|
||||
return f"<Link {self}>"
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
@@ -104,7 +329,7 @@ class Link(KeyBasedCompareMixin):
|
||||
|
||||
@property
|
||||
def filename(self) -> str:
|
||||
path = self.path.rstrip('/')
|
||||
path = self.path.rstrip("/")
|
||||
name = posixpath.basename(path)
|
||||
if not name:
|
||||
# Make sure we don't leak auth information if the netloc
|
||||
@@ -113,7 +338,7 @@ class Link(KeyBasedCompareMixin):
|
||||
return netloc
|
||||
|
||||
name = urllib.parse.unquote(name)
|
||||
assert name, f'URL {self._url!r} produced no filename'
|
||||
assert name, f"URL {self._url!r} produced no filename"
|
||||
return name
|
||||
|
||||
@property
|
||||
@@ -136,7 +361,7 @@ class Link(KeyBasedCompareMixin):
|
||||
return urllib.parse.unquote(self._parsed_url.path)
|
||||
|
||||
def splitext(self) -> Tuple[str, str]:
|
||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
||||
return splitext(posixpath.basename(self.path.rstrip("/")))
|
||||
|
||||
@property
|
||||
def ext(self) -> str:
|
||||
@@ -145,18 +370,34 @@ class Link(KeyBasedCompareMixin):
|
||||
@property
|
||||
def url_without_fragment(self) -> str:
|
||||
scheme, netloc, path, query, fragment = self._parsed_url
|
||||
return urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
|
||||
return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
||||
|
||||
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
||||
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
|
||||
|
||||
@property
|
||||
def egg_fragment(self) -> Optional[str]:
|
||||
# Per PEP 508.
|
||||
_project_name_re = re.compile(
|
||||
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
||||
)
|
||||
|
||||
def _egg_fragment(self) -> Optional[str]:
|
||||
match = self._egg_fragment_re.search(self._url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
||||
# An egg fragment looks like a PEP 508 project name, along with
|
||||
# an optional extras specifier. Anything else is invalid.
|
||||
project_name = match.group(1)
|
||||
if not self._project_name_re.match(project_name):
|
||||
deprecated(
|
||||
reason=f"{self} contains an egg fragment with a non-PEP 508 name",
|
||||
replacement="to use the req @ url syntax, and remove the egg fragment",
|
||||
gone_in="25.0",
|
||||
issue=11617,
|
||||
)
|
||||
|
||||
return project_name
|
||||
|
||||
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
|
||||
|
||||
@property
|
||||
def subdirectory_fragment(self) -> Optional[str]:
|
||||
@@ -165,31 +406,37 @@ class Link(KeyBasedCompareMixin):
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_hash_re = re.compile(
|
||||
r'({choices})=([a-f0-9]+)'.format(choices="|".join(_SUPPORTED_HASHES))
|
||||
)
|
||||
def metadata_link(self) -> Optional["Link"]:
|
||||
"""Implementation of PEP 658 parsing."""
|
||||
# Note that Link.from_element() parsing the "data-dist-info-metadata" attribute
|
||||
# from an HTML anchor tag is typically how the Link.dist_info_metadata attribute
|
||||
# gets set.
|
||||
if self.dist_info_metadata is None:
|
||||
return None
|
||||
metadata_url = f"{self.url_without_fragment}.metadata"
|
||||
metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata)
|
||||
if metadata_link_hash is None:
|
||||
return Link(metadata_url)
|
||||
return Link(metadata_url, hashes=metadata_link_hash.as_dict())
|
||||
|
||||
def as_hashes(self) -> Hashes:
|
||||
return Hashes({k: [v] for k, v in self._hashes.items()})
|
||||
|
||||
@property
|
||||
def hash(self) -> Optional[str]:
|
||||
match = self._hash_re.search(self._url)
|
||||
if match:
|
||||
return match.group(2)
|
||||
return None
|
||||
return next(iter(self._hashes.values()), None)
|
||||
|
||||
@property
|
||||
def hash_name(self) -> Optional[str]:
|
||||
match = self._hash_re.search(self._url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
return next(iter(self._hashes), None)
|
||||
|
||||
@property
|
||||
def show_url(self) -> str:
|
||||
return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
|
||||
return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
|
||||
|
||||
@property
|
||||
def is_file(self) -> bool:
|
||||
return self.scheme == 'file'
|
||||
return self.scheme == "file"
|
||||
|
||||
def is_existing_dir(self) -> bool:
|
||||
return self.is_file and os.path.isdir(self.file_path)
|
||||
@@ -210,19 +457,15 @@ class Link(KeyBasedCompareMixin):
|
||||
|
||||
@property
|
||||
def has_hash(self) -> bool:
|
||||
return self.hash_name is not None
|
||||
return bool(self._hashes)
|
||||
|
||||
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
||||
"""
|
||||
Return True if the link has a hash and it is allowed.
|
||||
Return True if the link has a hash and it is allowed by `hashes`.
|
||||
"""
|
||||
if hashes is None or not self.has_hash:
|
||||
if hashes is None:
|
||||
return False
|
||||
# Assert non-None so mypy knows self.hash_name and self.hash are str.
|
||||
assert self.hash_name is not None
|
||||
assert self.hash is not None
|
||||
|
||||
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
||||
return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
|
||||
|
||||
|
||||
class _CleanResult(NamedTuple):
|
||||
|
||||
@@ -6,7 +6,7 @@ https://docs.python.org/3/install/index.html#alternate-installation.
|
||||
"""
|
||||
|
||||
|
||||
SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data']
|
||||
SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
|
||||
|
||||
|
||||
class Scheme:
|
||||
|
||||
@@ -20,13 +20,14 @@ class SearchScope:
|
||||
Encapsulates the locations that pip is configured to search.
|
||||
"""
|
||||
|
||||
__slots__ = ["find_links", "index_urls"]
|
||||
__slots__ = ["find_links", "index_urls", "no_index"]
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
find_links: List[str],
|
||||
index_urls: List[str],
|
||||
no_index: bool,
|
||||
) -> "SearchScope":
|
||||
"""
|
||||
Create a SearchScope object after normalizing the `find_links`.
|
||||
@@ -38,7 +39,7 @@ class SearchScope:
|
||||
# blindly normalize anything starting with a ~...
|
||||
built_find_links: List[str] = []
|
||||
for link in find_links:
|
||||
if link.startswith('~'):
|
||||
if link.startswith("~"):
|
||||
new_link = normalize_path(link)
|
||||
if os.path.exists(new_link):
|
||||
link = new_link
|
||||
@@ -49,33 +50,35 @@ class SearchScope:
|
||||
if not has_tls():
|
||||
for link in itertools.chain(index_urls, built_find_links):
|
||||
parsed = urllib.parse.urlparse(link)
|
||||
if parsed.scheme == 'https':
|
||||
if parsed.scheme == "https":
|
||||
logger.warning(
|
||||
'pip is configured with locations that require '
|
||||
'TLS/SSL, however the ssl module in Python is not '
|
||||
'available.'
|
||||
"pip is configured with locations that require "
|
||||
"TLS/SSL, however the ssl module in Python is not "
|
||||
"available."
|
||||
)
|
||||
break
|
||||
|
||||
return cls(
|
||||
find_links=built_find_links,
|
||||
index_urls=index_urls,
|
||||
no_index=no_index,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
find_links: List[str],
|
||||
index_urls: List[str],
|
||||
no_index: bool,
|
||||
) -> None:
|
||||
self.find_links = find_links
|
||||
self.index_urls = index_urls
|
||||
self.no_index = no_index
|
||||
|
||||
def get_formatted_locations(self) -> str:
|
||||
lines = []
|
||||
redacted_index_urls = []
|
||||
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
||||
for url in self.index_urls:
|
||||
|
||||
redacted_index_url = redact_auth_from_url(url)
|
||||
|
||||
# Parse the URL
|
||||
@@ -88,20 +91,23 @@ class SearchScope:
|
||||
# exceptions for malformed URLs
|
||||
if not purl.scheme and not purl.netloc:
|
||||
logger.warning(
|
||||
'The index url "%s" seems invalid, '
|
||||
'please provide a scheme.', redacted_index_url)
|
||||
'The index url "%s" seems invalid, please provide a scheme.',
|
||||
redacted_index_url,
|
||||
)
|
||||
|
||||
redacted_index_urls.append(redacted_index_url)
|
||||
|
||||
lines.append('Looking in indexes: {}'.format(
|
||||
', '.join(redacted_index_urls)))
|
||||
lines.append(
|
||||
"Looking in indexes: {}".format(", ".join(redacted_index_urls))
|
||||
)
|
||||
|
||||
if self.find_links:
|
||||
lines.append(
|
||||
'Looking in links: {}'.format(', '.join(
|
||||
redact_auth_from_url(url) for url in self.find_links))
|
||||
"Looking in links: {}".format(
|
||||
", ".join(redact_auth_from_url(url) for url in self.find_links)
|
||||
)
|
||||
)
|
||||
return '\n'.join(lines)
|
||||
return "\n".join(lines)
|
||||
|
||||
def get_index_urls_locations(self, project_name: str) -> List[str]:
|
||||
"""Returns the locations found via self.index_urls
|
||||
@@ -112,15 +118,15 @@ class SearchScope:
|
||||
|
||||
def mkurl_pypi_url(url: str) -> str:
|
||||
loc = posixpath.join(
|
||||
url,
|
||||
urllib.parse.quote(canonicalize_name(project_name)))
|
||||
url, urllib.parse.quote(canonicalize_name(project_name))
|
||||
)
|
||||
# For maximum compatibility with easy_install, ensure the path
|
||||
# ends in a trailing slash. Although this isn't in the spec
|
||||
# (and PyPI can handle it without the slash) some other index
|
||||
# implementations might break if they relied on easy_install's
|
||||
# behavior.
|
||||
if not loc.endswith('/'):
|
||||
loc = loc + '/'
|
||||
if not loc.endswith("/"):
|
||||
loc = loc + "/"
|
||||
return loc
|
||||
|
||||
return [mkurl_pypi_url(url) for url in self.index_urls]
|
||||
|
||||
@@ -9,8 +9,13 @@ class SelectionPreferences:
|
||||
and installing files.
|
||||
"""
|
||||
|
||||
__slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control',
|
||||
'prefer_binary', 'ignore_requires_python']
|
||||
__slots__ = [
|
||||
"allow_yanked",
|
||||
"allow_all_prereleases",
|
||||
"format_control",
|
||||
"prefer_binary",
|
||||
"ignore_requires_python",
|
||||
]
|
||||
|
||||
# Don't include an allow_yanked default value to make sure each call
|
||||
# site considers whether yanked releases are allowed. This also causes
|
||||
|
||||
@@ -53,7 +53,7 @@ class TargetPython:
|
||||
else:
|
||||
py_version_info = normalize_version_info(py_version_info)
|
||||
|
||||
py_version = '.'.join(map(str, py_version_info[:2]))
|
||||
py_version = ".".join(map(str, py_version_info[:2]))
|
||||
|
||||
self.abis = abis
|
||||
self.implementation = implementation
|
||||
@@ -70,19 +70,18 @@ class TargetPython:
|
||||
"""
|
||||
display_version = None
|
||||
if self._given_py_version_info is not None:
|
||||
display_version = '.'.join(
|
||||
display_version = ".".join(
|
||||
str(part) for part in self._given_py_version_info
|
||||
)
|
||||
|
||||
key_values = [
|
||||
('platforms', self.platforms),
|
||||
('version_info', display_version),
|
||||
('abis', self.abis),
|
||||
('implementation', self.implementation),
|
||||
("platforms", self.platforms),
|
||||
("version_info", display_version),
|
||||
("abis", self.abis),
|
||||
("implementation", self.implementation),
|
||||
]
|
||||
return ' '.join(
|
||||
f'{key}={value!r}' for key, value in key_values
|
||||
if value is not None
|
||||
return " ".join(
|
||||
f"{key}={value!r}" for key, value in key_values if value is not None
|
||||
)
|
||||
|
||||
def get_tags(self) -> List[Tag]:
|
||||
|
||||
@@ -13,10 +13,10 @@ class Wheel:
|
||||
"""A wheel file"""
|
||||
|
||||
wheel_file_re = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
||||
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
|
||||
((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE
|
||||
re.VERBOSE,
|
||||
)
|
||||
|
||||
def __init__(self, filename: str) -> None:
|
||||
@@ -25,23 +25,20 @@ class Wheel:
|
||||
"""
|
||||
wheel_info = self.wheel_file_re.match(filename)
|
||||
if not wheel_info:
|
||||
raise InvalidWheelFilename(
|
||||
f"{filename} is not a valid wheel filename."
|
||||
)
|
||||
raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
|
||||
self.filename = filename
|
||||
self.name = wheel_info.group('name').replace('_', '-')
|
||||
self.name = wheel_info.group("name").replace("_", "-")
|
||||
# we'll assume "_" means "-" due to wheel naming scheme
|
||||
# (https://github.com/pypa/pip/issues/1150)
|
||||
self.version = wheel_info.group('ver').replace('_', '-')
|
||||
self.build_tag = wheel_info.group('build')
|
||||
self.pyversions = wheel_info.group('pyver').split('.')
|
||||
self.abis = wheel_info.group('abi').split('.')
|
||||
self.plats = wheel_info.group('plat').split('.')
|
||||
self.version = wheel_info.group("ver").replace("_", "-")
|
||||
self.build_tag = wheel_info.group("build")
|
||||
self.pyversions = wheel_info.group("pyver").split(".")
|
||||
self.abis = wheel_info.group("abi").split(".")
|
||||
self.plats = wheel_info.group("plat").split(".")
|
||||
|
||||
# All the tag combinations from this file
|
||||
self.file_tags = {
|
||||
Tag(x, y, z) for x in self.pyversions
|
||||
for y in self.abis for z in self.plats
|
||||
Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
|
||||
}
|
||||
|
||||
def get_formatted_file_tags(self) -> List[str]:
|
||||
@@ -61,7 +58,10 @@ class Wheel:
|
||||
:raises ValueError: If none of the wheel's file tags match one of
|
||||
the supported tags.
|
||||
"""
|
||||
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
||||
try:
|
||||
return next(i for i, t in enumerate(tags) if t in self.file_tags)
|
||||
except StopIteration:
|
||||
raise ValueError()
|
||||
|
||||
def find_most_preferred_tag(
|
||||
self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
|
||||
|
||||
@@ -3,9 +3,18 @@
|
||||
Contains interface (MultiDomainBasicAuth) and associated glue code for
|
||||
providing credentials in the context of network requests.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sysconfig
|
||||
import typing
|
||||
import urllib.parse
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from abc import ABC, abstractmethod
|
||||
from functools import lru_cache
|
||||
from os.path import commonprefix
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, NamedTuple, Optional, Tuple
|
||||
|
||||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
||||
from pip._vendor.requests.models import Request, Response
|
||||
@@ -23,59 +32,204 @@ from pip._internal.vcs.versioncontrol import AuthInfo
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
Credentials = Tuple[str, str, str]
|
||||
|
||||
try:
|
||||
import keyring
|
||||
except ImportError:
|
||||
keyring = None
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Keyring is skipped due to an exception: %s",
|
||||
str(exc),
|
||||
)
|
||||
keyring = None
|
||||
KEYRING_DISABLED = False
|
||||
|
||||
|
||||
def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]:
|
||||
"""Return the tuple auth for a given url from keyring."""
|
||||
global keyring
|
||||
if not url or not keyring:
|
||||
class Credentials(NamedTuple):
|
||||
url: str
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class KeyRingBaseProvider(ABC):
|
||||
"""Keyring base provider interface"""
|
||||
|
||||
has_keyring: bool
|
||||
|
||||
@abstractmethod
|
||||
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||
...
|
||||
|
||||
|
||||
class KeyRingNullProvider(KeyRingBaseProvider):
|
||||
"""Keyring null provider"""
|
||||
|
||||
has_keyring = False
|
||||
|
||||
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||
return None
|
||||
|
||||
try:
|
||||
try:
|
||||
get_credential = keyring.get_credential
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||
return None
|
||||
|
||||
|
||||
class KeyRingPythonProvider(KeyRingBaseProvider):
|
||||
"""Keyring interface which uses locally imported `keyring`"""
|
||||
|
||||
has_keyring = True
|
||||
|
||||
def __init__(self) -> None:
|
||||
import keyring
|
||||
|
||||
self.keyring = keyring
|
||||
|
||||
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||
# Support keyring's get_credential interface which supports getting
|
||||
# credentials without a username. This is only available for
|
||||
# keyring>=15.2.0.
|
||||
if hasattr(self.keyring, "get_credential"):
|
||||
logger.debug("Getting credentials from keyring for %s", url)
|
||||
cred = get_credential(url, username)
|
||||
cred = self.keyring.get_credential(url, username)
|
||||
if cred is not None:
|
||||
return cred.username, cred.password
|
||||
return None
|
||||
|
||||
if username:
|
||||
if username is not None:
|
||||
logger.debug("Getting password from keyring for %s", url)
|
||||
password = keyring.get_password(url, username)
|
||||
password = self.keyring.get_password(url, username)
|
||||
if password:
|
||||
return username, password
|
||||
return None
|
||||
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Keyring is skipped due to an exception: %s",
|
||||
str(exc),
|
||||
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||
self.keyring.set_password(url, username, password)
|
||||
|
||||
|
||||
class KeyRingCliProvider(KeyRingBaseProvider):
|
||||
"""Provider which uses `keyring` cli
|
||||
|
||||
Instead of calling the keyring package installed alongside pip
|
||||
we call keyring on the command line which will enable pip to
|
||||
use which ever installation of keyring is available first in
|
||||
PATH.
|
||||
"""
|
||||
|
||||
has_keyring = True
|
||||
|
||||
def __init__(self, cmd: str) -> None:
|
||||
self.keyring = cmd
|
||||
|
||||
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
||||
# This is the default implementation of keyring.get_credential
|
||||
# https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
|
||||
if username is not None:
|
||||
password = self._get_password(url, username)
|
||||
if password is not None:
|
||||
return username, password
|
||||
return None
|
||||
|
||||
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
||||
return self._set_password(url, username, password)
|
||||
|
||||
def _get_password(self, service_name: str, username: str) -> Optional[str]:
|
||||
"""Mirror the implementation of keyring.get_password using cli"""
|
||||
if self.keyring is None:
|
||||
return None
|
||||
|
||||
cmd = [self.keyring, "get", service_name, username]
|
||||
env = os.environ.copy()
|
||||
env["PYTHONIOENCODING"] = "utf-8"
|
||||
res = subprocess.run(
|
||||
cmd,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE,
|
||||
env=env,
|
||||
)
|
||||
keyring = None
|
||||
return None
|
||||
if res.returncode:
|
||||
return None
|
||||
return res.stdout.decode("utf-8").strip(os.linesep)
|
||||
|
||||
def _set_password(self, service_name: str, username: str, password: str) -> None:
|
||||
"""Mirror the implementation of keyring.set_password using cli"""
|
||||
if self.keyring is None:
|
||||
return None
|
||||
env = os.environ.copy()
|
||||
env["PYTHONIOENCODING"] = "utf-8"
|
||||
subprocess.run(
|
||||
[self.keyring, "set", service_name, username],
|
||||
input=f"{password}{os.linesep}".encode("utf-8"),
|
||||
env=env,
|
||||
check=True,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
|
||||
logger.verbose("Keyring provider requested: %s", provider)
|
||||
|
||||
# keyring has previously failed and been disabled
|
||||
if KEYRING_DISABLED:
|
||||
provider = "disabled"
|
||||
if provider in ["import", "auto"]:
|
||||
try:
|
||||
impl = KeyRingPythonProvider()
|
||||
logger.verbose("Keyring provider set: import")
|
||||
return impl
|
||||
except ImportError:
|
||||
pass
|
||||
except Exception as exc:
|
||||
# In the event of an unexpected exception
|
||||
# we should warn the user
|
||||
msg = "Installed copy of keyring fails with exception %s"
|
||||
if provider == "auto":
|
||||
msg = msg + ", trying to find a keyring executable as a fallback"
|
||||
logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
|
||||
if provider in ["subprocess", "auto"]:
|
||||
cli = shutil.which("keyring")
|
||||
if cli and cli.startswith(sysconfig.get_path("scripts")):
|
||||
# all code within this function is stolen from shutil.which implementation
|
||||
@typing.no_type_check
|
||||
def PATH_as_shutil_which_determines_it() -> str:
|
||||
path = os.environ.get("PATH", None)
|
||||
if path is None:
|
||||
try:
|
||||
path = os.confstr("CS_PATH")
|
||||
except (AttributeError, ValueError):
|
||||
# os.confstr() or CS_PATH is not available
|
||||
path = os.defpath
|
||||
# bpo-35755: Don't use os.defpath if the PATH environment variable is
|
||||
# set to an empty string
|
||||
|
||||
return path
|
||||
|
||||
scripts = Path(sysconfig.get_path("scripts"))
|
||||
|
||||
paths = []
|
||||
for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
|
||||
p = Path(path)
|
||||
try:
|
||||
if not p.samefile(scripts):
|
||||
paths.append(path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
path = os.pathsep.join(paths)
|
||||
|
||||
cli = shutil.which("keyring", path=path)
|
||||
|
||||
if cli:
|
||||
logger.verbose("Keyring provider set: subprocess with executable %s", cli)
|
||||
return KeyRingCliProvider(cli)
|
||||
|
||||
logger.verbose("Keyring provider set: disabled")
|
||||
return KeyRingNullProvider()
|
||||
|
||||
|
||||
class MultiDomainBasicAuth(AuthBase):
|
||||
def __init__(
|
||||
self, prompting: bool = True, index_urls: Optional[List[str]] = None
|
||||
self,
|
||||
prompting: bool = True,
|
||||
index_urls: Optional[List[str]] = None,
|
||||
keyring_provider: str = "auto",
|
||||
) -> None:
|
||||
self.prompting = prompting
|
||||
self.index_urls = index_urls
|
||||
self.keyring_provider = keyring_provider # type: ignore[assignment]
|
||||
self.passwords: Dict[str, AuthInfo] = {}
|
||||
# When the user is prompted to enter credentials and keyring is
|
||||
# available, we will offer to save them. If the user accepts,
|
||||
@@ -84,6 +238,47 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
# ``save_credentials`` to save these.
|
||||
self._credentials_to_save: Optional[Credentials] = None
|
||||
|
||||
@property
|
||||
def keyring_provider(self) -> KeyRingBaseProvider:
|
||||
return get_keyring_provider(self._keyring_provider)
|
||||
|
||||
@keyring_provider.setter
|
||||
def keyring_provider(self, provider: str) -> None:
|
||||
# The free function get_keyring_provider has been decorated with
|
||||
# functools.cache. If an exception occurs in get_keyring_auth that
|
||||
# cache will be cleared and keyring disabled, take that into account
|
||||
# if you want to remove this indirection.
|
||||
self._keyring_provider = provider
|
||||
|
||||
@property
|
||||
def use_keyring(self) -> bool:
|
||||
# We won't use keyring when --no-input is passed unless
|
||||
# a specific provider is requested because it might require
|
||||
# user interaction
|
||||
return self.prompting or self._keyring_provider not in ["auto", "disabled"]
|
||||
|
||||
def _get_keyring_auth(
|
||||
self,
|
||||
url: Optional[str],
|
||||
username: Optional[str],
|
||||
) -> Optional[AuthInfo]:
|
||||
"""Return the tuple auth for a given url from keyring."""
|
||||
# Do nothing if no url was provided
|
||||
if not url:
|
||||
return None
|
||||
|
||||
try:
|
||||
return self.keyring_provider.get_auth_info(url, username)
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"Keyring is skipped due to an exception: %s",
|
||||
str(exc),
|
||||
)
|
||||
global KEYRING_DISABLED
|
||||
KEYRING_DISABLED = True
|
||||
get_keyring_provider.cache_clear()
|
||||
return None
|
||||
|
||||
def _get_index_url(self, url: str) -> Optional[str]:
|
||||
"""Return the original index URL matching the requested URL.
|
||||
|
||||
@@ -100,15 +295,42 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
if not url or not self.index_urls:
|
||||
return None
|
||||
|
||||
for u in self.index_urls:
|
||||
prefix = remove_auth_from_url(u).rstrip("/") + "/"
|
||||
if url.startswith(prefix):
|
||||
return u
|
||||
return None
|
||||
url = remove_auth_from_url(url).rstrip("/") + "/"
|
||||
parsed_url = urllib.parse.urlsplit(url)
|
||||
|
||||
candidates = []
|
||||
|
||||
for index in self.index_urls:
|
||||
index = index.rstrip("/") + "/"
|
||||
parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
|
||||
if parsed_url == parsed_index:
|
||||
return index
|
||||
|
||||
if parsed_url.netloc != parsed_index.netloc:
|
||||
continue
|
||||
|
||||
candidate = urllib.parse.urlsplit(index)
|
||||
candidates.append(candidate)
|
||||
|
||||
if not candidates:
|
||||
return None
|
||||
|
||||
candidates.sort(
|
||||
reverse=True,
|
||||
key=lambda candidate: commonprefix(
|
||||
[
|
||||
parsed_url.path,
|
||||
candidate.path,
|
||||
]
|
||||
).rfind("/"),
|
||||
)
|
||||
|
||||
return urllib.parse.urlunsplit(candidates[0])
|
||||
|
||||
def _get_new_credentials(
|
||||
self,
|
||||
original_url: str,
|
||||
*,
|
||||
allow_netrc: bool = True,
|
||||
allow_keyring: bool = False,
|
||||
) -> AuthInfo:
|
||||
@@ -152,8 +374,8 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
# The index url is more specific than the netloc, so try it first
|
||||
# fmt: off
|
||||
kr_auth = (
|
||||
get_keyring_auth(index_url, username) or
|
||||
get_keyring_auth(netloc, username)
|
||||
self._get_keyring_auth(index_url, username) or
|
||||
self._get_keyring_auth(netloc, username)
|
||||
)
|
||||
# fmt: on
|
||||
if kr_auth:
|
||||
@@ -179,9 +401,16 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
# Try to get credentials from original url
|
||||
username, password = self._get_new_credentials(original_url)
|
||||
|
||||
# If credentials not found, use any stored credentials for this netloc
|
||||
if username is None and password is None:
|
||||
username, password = self.passwords.get(netloc, (None, None))
|
||||
# If credentials not found, use any stored credentials for this netloc.
|
||||
# Do this if either the username or the password is missing.
|
||||
# This accounts for the situation in which the user has specified
|
||||
# the username in the index url, but the password comes from keyring.
|
||||
if (username is None or password is None) and netloc in self.passwords:
|
||||
un, pw = self.passwords[netloc]
|
||||
# It is possible that the cached credentials are for a different username,
|
||||
# in which case the cache should be ignored.
|
||||
if username is None or username == un:
|
||||
username, password = un, pw
|
||||
|
||||
if username is not None or password is not None:
|
||||
# Convert the username and password if they're None, so that
|
||||
@@ -223,18 +452,23 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
def _prompt_for_password(
|
||||
self, netloc: str
|
||||
) -> Tuple[Optional[str], Optional[str], bool]:
|
||||
username = ask_input(f"User for {netloc}: ")
|
||||
username = ask_input(f"User for {netloc}: ") if self.prompting else None
|
||||
if not username:
|
||||
return None, None, False
|
||||
auth = get_keyring_auth(netloc, username)
|
||||
if auth and auth[0] is not None and auth[1] is not None:
|
||||
return auth[0], auth[1], False
|
||||
if self.use_keyring:
|
||||
auth = self._get_keyring_auth(netloc, username)
|
||||
if auth and auth[0] is not None and auth[1] is not None:
|
||||
return auth[0], auth[1], False
|
||||
password = ask_password("Password: ")
|
||||
return username, password, True
|
||||
|
||||
# Factored out to allow for easy patching in tests
|
||||
def _should_save_password_to_keyring(self) -> bool:
|
||||
if not keyring:
|
||||
if (
|
||||
not self.prompting
|
||||
or not self.use_keyring
|
||||
or not self.keyring_provider.has_keyring
|
||||
):
|
||||
return False
|
||||
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
||||
|
||||
@@ -244,19 +478,22 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
if resp.status_code != 401:
|
||||
return resp
|
||||
|
||||
username, password = None, None
|
||||
|
||||
# Query the keyring for credentials:
|
||||
if self.use_keyring:
|
||||
username, password = self._get_new_credentials(
|
||||
resp.url,
|
||||
allow_netrc=False,
|
||||
allow_keyring=True,
|
||||
)
|
||||
|
||||
# We are not able to prompt the user so simply return the response
|
||||
if not self.prompting:
|
||||
if not self.prompting and not username and not password:
|
||||
return resp
|
||||
|
||||
parsed = urllib.parse.urlparse(resp.url)
|
||||
|
||||
# Query the keyring for credentials:
|
||||
username, password = self._get_new_credentials(
|
||||
resp.url,
|
||||
allow_netrc=False,
|
||||
allow_keyring=True,
|
||||
)
|
||||
|
||||
# Prompt the user for a new username and password
|
||||
save = False
|
||||
if not username and not password:
|
||||
@@ -269,7 +506,11 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
|
||||
# Prompt to save the password to keyring
|
||||
if save and self._should_save_password_to_keyring():
|
||||
self._credentials_to_save = (parsed.netloc, username, password)
|
||||
self._credentials_to_save = Credentials(
|
||||
url=parsed.netloc,
|
||||
username=username,
|
||||
password=password,
|
||||
)
|
||||
|
||||
# Consume content and release the original connection to allow our new
|
||||
# request to reuse the same one.
|
||||
@@ -302,15 +543,17 @@ class MultiDomainBasicAuth(AuthBase):
|
||||
|
||||
def save_credentials(self, resp: Response, **kwargs: Any) -> None:
|
||||
"""Response callback to save credentials on success."""
|
||||
assert keyring is not None, "should never reach here without keyring"
|
||||
if not keyring:
|
||||
return
|
||||
assert (
|
||||
self.keyring_provider.has_keyring
|
||||
), "should never reach here without keyring"
|
||||
|
||||
creds = self._credentials_to_save
|
||||
self._credentials_to_save = None
|
||||
if creds and resp.status_code < 400:
|
||||
try:
|
||||
logger.info("Saving credentials to keyring")
|
||||
keyring.set_password(*creds)
|
||||
self.keyring_provider.save_auth_info(
|
||||
creds.url, creds.username, creds.password
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to save credentials")
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterator, Optional
|
||||
from typing import Generator, Optional
|
||||
|
||||
from pip._vendor.cachecontrol.cache import BaseCache
|
||||
from pip._vendor.cachecontrol.caches import FileCache
|
||||
@@ -18,7 +18,7 @@ def is_from_cache(response: Response) -> bool:
|
||||
|
||||
|
||||
@contextmanager
|
||||
def suppressed_cache_errors() -> Iterator[None]:
|
||||
def suppressed_cache_errors() -> Generator[None, None, None]:
|
||||
"""If we can't access the cache then we can just skip caching and process
|
||||
requests as if caching wasn't enabled.
|
||||
"""
|
||||
@@ -53,7 +53,7 @@ class SafeFileCache(BaseCache):
|
||||
with open(path, "rb") as f:
|
||||
return f.read()
|
||||
|
||||
def set(self, key: str, value: bytes) -> None:
|
||||
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
|
||||
path = self._get_cache_path(key)
|
||||
with suppressed_cache_errors():
|
||||
ensure_dir(os.path.dirname(path))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Download files with progress indicators.
|
||||
"""
|
||||
import cgi
|
||||
import email.message
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
@@ -8,7 +8,7 @@ from typing import Iterable, Optional, Tuple
|
||||
|
||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||
|
||||
from pip._internal.cli.progress_bars import DownloadProgressProvider
|
||||
from pip._internal.cli.progress_bars import get_download_progress_renderer
|
||||
from pip._internal.exceptions import NetworkConnectionError
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.models.link import Link
|
||||
@@ -65,7 +65,8 @@ def _prepare_download(
|
||||
if not show_progress:
|
||||
return chunks
|
||||
|
||||
return DownloadProgressProvider(progress_bar, max=total_length)(chunks)
|
||||
renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
|
||||
return renderer(chunks)
|
||||
|
||||
|
||||
def sanitize_content_filename(filename: str) -> str:
|
||||
@@ -80,12 +81,13 @@ def parse_content_disposition(content_disposition: str, default_filename: str) -
|
||||
Parse the "filename" value from a Content-Disposition header, and
|
||||
return the default filename if the result is empty.
|
||||
"""
|
||||
_type, params = cgi.parse_header(content_disposition)
|
||||
filename = params.get("filename")
|
||||
m = email.message.Message()
|
||||
m["content-type"] = content_disposition
|
||||
filename = m.get_param("filename")
|
||||
if filename:
|
||||
# We need to sanitize the filename to prevent directory traversal
|
||||
# in case the filename contains ".." path parts.
|
||||
filename = sanitize_content_filename(filename)
|
||||
filename = sanitize_content_filename(str(filename))
|
||||
return filename or default_filename
|
||||
|
||||
|
||||
|
||||
@@ -5,36 +5,36 @@ __all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
|
||||
from bisect import bisect_left, bisect_right
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
||||
from zipfile import BadZipfile, ZipFile
|
||||
from typing import Any, Dict, Generator, List, Optional, Tuple
|
||||
from zipfile import BadZipFile, ZipFile
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||
|
||||
from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
||||
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
|
||||
|
||||
|
||||
class HTTPRangeRequestUnsupported(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def dist_from_wheel_url(name: str, url: str, session: PipSession) -> Distribution:
|
||||
"""Return a pkg_resources.Distribution from the given wheel URL.
|
||||
def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
|
||||
"""Return a distribution object from the given wheel URL.
|
||||
|
||||
This uses HTTP range requests to only fetch the potion of the wheel
|
||||
This uses HTTP range requests to only fetch the portion of the wheel
|
||||
containing metadata, just enough for the object to be constructed.
|
||||
If such requests are not supported, HTTPRangeRequestUnsupported
|
||||
is raised.
|
||||
"""
|
||||
with LazyZipOverHTTP(url, session) as wheel:
|
||||
with LazyZipOverHTTP(url, session) as zf:
|
||||
# For read-only ZIP files, ZipFile only needs methods read,
|
||||
# seek, seekable and tell, not the whole IO protocol.
|
||||
zip_file = ZipFile(wheel) # type: ignore
|
||||
wheel = MemoryWheel(zf.name, zf) # type: ignore
|
||||
# After context manager exit, wheel.name
|
||||
# is an invalid file by intention.
|
||||
return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name)
|
||||
return get_wheel_distribution(wheel, canonicalize_name(name))
|
||||
|
||||
|
||||
class LazyZipOverHTTP:
|
||||
@@ -135,11 +135,11 @@ class LazyZipOverHTTP:
|
||||
self._file.__enter__()
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc: Any) -> Optional[bool]:
|
||||
return self._file.__exit__(*exc)
|
||||
def __exit__(self, *exc: Any) -> None:
|
||||
self._file.__exit__(*exc)
|
||||
|
||||
@contextmanager
|
||||
def _stay(self) -> Iterator[None]:
|
||||
def _stay(self) -> Generator[None, None, None]:
|
||||
"""Return a context manager keeping the position.
|
||||
|
||||
At the end of the block, seek back to original position.
|
||||
@@ -160,7 +160,7 @@ class LazyZipOverHTTP:
|
||||
# For read-only ZIP files, ZipFile only needs
|
||||
# methods read, seek, seekable and tell.
|
||||
ZipFile(self) # type: ignore
|
||||
except BadZipfile:
|
||||
except BadZipFile:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
@@ -177,8 +177,8 @@ class LazyZipOverHTTP:
|
||||
|
||||
def _merge(
|
||||
self, start: int, end: int, left: int, right: int
|
||||
) -> Iterator[Tuple[int, int]]:
|
||||
"""Return an iterator of intervals to be fetched.
|
||||
) -> Generator[Tuple[int, int], None, None]:
|
||||
"""Return a generator of intervals to be fetched.
|
||||
|
||||
Args:
|
||||
start (int): Start of needed interval
|
||||
|
||||
@@ -2,17 +2,8 @@
|
||||
network request configuration and behavior.
|
||||
"""
|
||||
|
||||
# When mypy runs on Windows the call to distro.linux_distribution() is skipped
|
||||
# resulting in the failure:
|
||||
#
|
||||
# error: unused 'type: ignore' comment
|
||||
#
|
||||
# If the upstream module adds typing, this comment should be removed. See
|
||||
# https://github.com/nir0s/distro/pull/269
|
||||
#
|
||||
# mypy: warn-unused-ignores=False
|
||||
|
||||
import email.utils
|
||||
import io
|
||||
import ipaddress
|
||||
import json
|
||||
import logging
|
||||
@@ -24,11 +15,23 @@ import subprocess
|
||||
import sys
|
||||
import urllib.parse
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._vendor import requests, urllib3
|
||||
from pip._vendor.cachecontrol import CacheControlAdapter
|
||||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
||||
from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter
|
||||
from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter
|
||||
from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter
|
||||
from pip._vendor.requests.models import PreparedRequest, Response
|
||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
||||
from pip._vendor.urllib3.connectionpool import ConnectionPool
|
||||
@@ -46,6 +49,12 @@ from pip._internal.utils.glibc import libc_ver
|
||||
from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
|
||||
from pip._internal.utils.urls import url_to_path
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ssl import SSLContext
|
||||
|
||||
from pip._vendor.urllib3.poolmanager import PoolManager
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
|
||||
@@ -128,9 +137,8 @@ def user_agent() -> str:
|
||||
if sys.platform.startswith("linux"):
|
||||
from pip._vendor import distro
|
||||
|
||||
# https://github.com/nir0s/distro/pull/269
|
||||
linux_distribution = distro.linux_distribution() # type: ignore
|
||||
distro_infos = dict(
|
||||
linux_distribution = distro.name(), distro.version(), distro.codename()
|
||||
distro_infos: Dict[str, Any] = dict(
|
||||
filter(
|
||||
lambda x: x[1],
|
||||
zip(["name", "version", "id"], linux_distribution),
|
||||
@@ -218,8 +226,11 @@ class LocalFSAdapter(BaseAdapter):
|
||||
try:
|
||||
stats = os.stat(pathname)
|
||||
except OSError as exc:
|
||||
# format the exception raised as a io.BytesIO object,
|
||||
# to return a better error message:
|
||||
resp.status_code = 404
|
||||
resp.raw = exc
|
||||
resp.reason = type(exc).__name__
|
||||
resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
|
||||
else:
|
||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
||||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
||||
@@ -240,6 +251,48 @@ class LocalFSAdapter(BaseAdapter):
|
||||
pass
|
||||
|
||||
|
||||
class _SSLContextAdapterMixin:
|
||||
"""Mixin to add the ``ssl_context`` constructor argument to HTTP adapters.
|
||||
|
||||
The additional argument is forwarded directly to the pool manager. This allows us
|
||||
to dynamically decide what SSL store to use at runtime, which is used to implement
|
||||
the optional ``truststore`` backend.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
ssl_context: Optional["SSLContext"] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
self._ssl_context = ssl_context
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def init_poolmanager(
|
||||
self,
|
||||
connections: int,
|
||||
maxsize: int,
|
||||
block: bool = DEFAULT_POOLBLOCK,
|
||||
**pool_kwargs: Any,
|
||||
) -> "PoolManager":
|
||||
if self._ssl_context is not None:
|
||||
pool_kwargs.setdefault("ssl_context", self._ssl_context)
|
||||
return super().init_poolmanager( # type: ignore[misc]
|
||||
connections=connections,
|
||||
maxsize=maxsize,
|
||||
block=block,
|
||||
**pool_kwargs,
|
||||
)
|
||||
|
||||
|
||||
class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter):
|
||||
pass
|
||||
|
||||
|
||||
class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter):
|
||||
pass
|
||||
|
||||
|
||||
class InsecureHTTPAdapter(HTTPAdapter):
|
||||
def cert_verify(
|
||||
self,
|
||||
@@ -263,7 +316,6 @@ class InsecureCacheControlAdapter(CacheControlAdapter):
|
||||
|
||||
|
||||
class PipSession(requests.Session):
|
||||
|
||||
timeout: Optional[int] = None
|
||||
|
||||
def __init__(
|
||||
@@ -273,6 +325,7 @@ class PipSession(requests.Session):
|
||||
cache: Optional[str] = None,
|
||||
trusted_hosts: Sequence[str] = (),
|
||||
index_urls: Optional[List[str]] = None,
|
||||
ssl_context: Optional["SSLContext"] = None,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""
|
||||
@@ -325,13 +378,14 @@ class PipSession(requests.Session):
|
||||
secure_adapter = CacheControlAdapter(
|
||||
cache=SafeFileCache(cache),
|
||||
max_retries=retries,
|
||||
ssl_context=ssl_context,
|
||||
)
|
||||
self._trusted_host_adapter = InsecureCacheControlAdapter(
|
||||
cache=SafeFileCache(cache),
|
||||
max_retries=retries,
|
||||
)
|
||||
else:
|
||||
secure_adapter = HTTPAdapter(max_retries=retries)
|
||||
secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context)
|
||||
self._trusted_host_adapter = insecure_adapter
|
||||
|
||||
self.mount("https://", secure_adapter)
|
||||
@@ -369,12 +423,19 @@ class PipSession(requests.Session):
|
||||
if host_port not in self.pip_trusted_origins:
|
||||
self.pip_trusted_origins.append(host_port)
|
||||
|
||||
self.mount(
|
||||
build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
|
||||
)
|
||||
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
|
||||
if not host_port[1]:
|
||||
self.mount(
|
||||
build_url_from_netloc(host, scheme="http") + ":",
|
||||
self._trusted_host_adapter,
|
||||
)
|
||||
# Mount wildcard ports for the same host.
|
||||
self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
|
||||
|
||||
def iter_secure_origins(self) -> Iterator[SecureOrigin]:
|
||||
def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]:
|
||||
yield from SECURE_ORIGINS
|
||||
for host, port in self.pip_trusted_origins:
|
||||
yield ("*", host, "*" if port is None else port)
|
||||
@@ -403,7 +464,7 @@ class PipSession(requests.Session):
|
||||
continue
|
||||
|
||||
try:
|
||||
addr = ipaddress.ip_address(origin_host)
|
||||
addr = ipaddress.ip_address(origin_host or "")
|
||||
network = ipaddress.ip_network(secure_host)
|
||||
except ValueError:
|
||||
# We don't have both a valid address or a valid network, so
|
||||
@@ -449,6 +510,8 @@ class PipSession(requests.Session):
|
||||
def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
|
||||
# Allow setting a default timeout on a session
|
||||
kwargs.setdefault("timeout", self.timeout)
|
||||
# Allow setting a default proxies on a session
|
||||
kwargs.setdefault("proxies", self.proxies)
|
||||
|
||||
# Dispatch the actual request
|
||||
return super().request(method, url, *args, **kwargs)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Dict, Iterator
|
||||
from typing import Dict, Generator
|
||||
|
||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
||||
|
||||
@@ -56,7 +56,7 @@ def raise_for_status(resp: Response) -> None:
|
||||
|
||||
def response_chunks(
|
||||
response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
|
||||
) -> Iterator[bytes]:
|
||||
) -> Generator[bytes, None, None]:
|
||||
"""Given a requests Response, provide the data chunks."""
|
||||
try:
|
||||
# Special case for urllib3.
|
||||
|
||||
@@ -3,33 +3,37 @@
|
||||
|
||||
import os
|
||||
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.exceptions import (
|
||||
InstallationSubprocessError,
|
||||
MetadataGenerationFailed,
|
||||
)
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
|
||||
def generate_metadata(build_env, backend):
|
||||
# type: (BuildEnvironment, Pep517HookCaller) -> str
|
||||
def generate_metadata(
|
||||
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
|
||||
) -> str:
|
||||
"""Generate metadata using mechanisms described in PEP 517.
|
||||
|
||||
Returns the generated metadata directory.
|
||||
"""
|
||||
metadata_tmpdir = TempDirectory(
|
||||
kind="modern-metadata", globally_managed=True
|
||||
)
|
||||
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
||||
|
||||
metadata_dir = metadata_tmpdir.path
|
||||
|
||||
with build_env:
|
||||
# Note that Pep517HookCaller implements a fallback for
|
||||
# Note that BuildBackendHookCaller implements a fallback for
|
||||
# prepare_metadata_for_build_wheel, so we don't have to
|
||||
# consider the possibility that this hook doesn't exist.
|
||||
runner = runner_with_spinner_message("Preparing wheel metadata")
|
||||
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
|
||||
with backend.subprocess_runner(runner):
|
||||
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
||||
metadata_dir
|
||||
)
|
||||
try:
|
||||
distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
|
||||
except InstallationSubprocessError as error:
|
||||
raise MetadataGenerationFailed(package_details=details) from error
|
||||
|
||||
return os.path.join(metadata_dir, distinfo_dir)
|
||||
|
||||
@@ -5,7 +5,12 @@ import logging
|
||||
import os
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.cli.spinners import open_spinner
|
||||
from pip._internal.exceptions import (
|
||||
InstallationError,
|
||||
InstallationSubprocessError,
|
||||
MetadataGenerationFailed,
|
||||
)
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
@@ -13,49 +18,39 @@ from pip._internal.utils.temp_dir import TempDirectory
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _find_egg_info(directory):
|
||||
# type: (str) -> str
|
||||
"""Find an .egg-info subdirectory in `directory`.
|
||||
"""
|
||||
filenames = [
|
||||
f for f in os.listdir(directory) if f.endswith(".egg-info")
|
||||
]
|
||||
def _find_egg_info(directory: str) -> str:
|
||||
"""Find an .egg-info subdirectory in `directory`."""
|
||||
filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
|
||||
|
||||
if not filenames:
|
||||
raise InstallationError(
|
||||
f"No .egg-info directory found in {directory}"
|
||||
)
|
||||
raise InstallationError(f"No .egg-info directory found in {directory}")
|
||||
|
||||
if len(filenames) > 1:
|
||||
raise InstallationError(
|
||||
"More than one .egg-info directory found in {}".format(
|
||||
directory
|
||||
)
|
||||
"More than one .egg-info directory found in {}".format(directory)
|
||||
)
|
||||
|
||||
return os.path.join(directory, filenames[0])
|
||||
|
||||
|
||||
def generate_metadata(
|
||||
build_env, # type: BuildEnvironment
|
||||
setup_py_path, # type: str
|
||||
source_dir, # type: str
|
||||
isolated, # type: bool
|
||||
details, # type: str
|
||||
):
|
||||
# type: (...) -> str
|
||||
build_env: BuildEnvironment,
|
||||
setup_py_path: str,
|
||||
source_dir: str,
|
||||
isolated: bool,
|
||||
details: str,
|
||||
) -> str:
|
||||
"""Generate metadata using setup.py-based defacto mechanisms.
|
||||
|
||||
Returns the generated metadata directory.
|
||||
"""
|
||||
logger.debug(
|
||||
'Running setup.py (path:%s) egg_info for package %s',
|
||||
setup_py_path, details,
|
||||
"Running setup.py (path:%s) egg_info for package %s",
|
||||
setup_py_path,
|
||||
details,
|
||||
)
|
||||
|
||||
egg_info_dir = TempDirectory(
|
||||
kind="pip-egg-info", globally_managed=True
|
||||
).path
|
||||
egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
|
||||
|
||||
args = make_setuptools_egg_info_args(
|
||||
setup_py_path,
|
||||
@@ -64,11 +59,16 @@ def generate_metadata(
|
||||
)
|
||||
|
||||
with build_env:
|
||||
call_subprocess(
|
||||
args,
|
||||
cwd=source_dir,
|
||||
command_desc='python setup.py egg_info',
|
||||
)
|
||||
with open_spinner("Preparing metadata (setup.py)") as spinner:
|
||||
try:
|
||||
call_subprocess(
|
||||
args,
|
||||
cwd=source_dir,
|
||||
command_desc="python setup.py egg_info",
|
||||
spinner=spinner,
|
||||
)
|
||||
except InstallationSubprocessError as error:
|
||||
raise MetadataGenerationFailed(package_details=details) from error
|
||||
|
||||
# Return the .egg-info directory.
|
||||
return _find_egg_info(egg_info_dir)
|
||||
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
|
||||
@@ -10,22 +10,21 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def build_wheel_pep517(
|
||||
name, # type: str
|
||||
backend, # type: Pep517HookCaller
|
||||
metadata_directory, # type: str
|
||||
tempd, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
name: str,
|
||||
backend: BuildBackendHookCaller,
|
||||
metadata_directory: str,
|
||||
tempd: str,
|
||||
) -> Optional[str]:
|
||||
"""Build one InstallRequirement using the PEP 517 build process.
|
||||
|
||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||
"""
|
||||
assert metadata_directory is not None
|
||||
try:
|
||||
logger.debug('Destination directory: %s', tempd)
|
||||
logger.debug("Destination directory: %s", tempd)
|
||||
|
||||
runner = runner_with_spinner_message(
|
||||
f'Building wheel for {name} (PEP 517)'
|
||||
f"Building wheel for {name} (pyproject.toml)"
|
||||
)
|
||||
with backend.subprocess_runner(runner):
|
||||
wheel_name = backend.build_wheel(
|
||||
@@ -33,6 +32,6 @@ def build_wheel_pep517(
|
||||
metadata_directory=metadata_directory,
|
||||
)
|
||||
except Exception:
|
||||
logger.error('Failed building wheel for %s', name)
|
||||
logger.error("Failed building wheel for %s", name)
|
||||
return None
|
||||
return os.path.join(tempd, wheel_name)
|
||||
|
||||
@@ -4,59 +4,51 @@ from typing import List, Optional
|
||||
|
||||
from pip._internal.cli.spinners import open_spinner
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
||||
from pip._internal.utils.subprocess import (
|
||||
LOG_DIVIDER,
|
||||
call_subprocess,
|
||||
format_command_args,
|
||||
)
|
||||
from pip._internal.utils.subprocess import call_subprocess, format_command_args
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def format_command_result(
|
||||
command_args, # type: List[str]
|
||||
command_output, # type: str
|
||||
):
|
||||
# type: (...) -> str
|
||||
command_args: List[str],
|
||||
command_output: str,
|
||||
) -> str:
|
||||
"""Format command information for logging."""
|
||||
command_desc = format_command_args(command_args)
|
||||
text = f'Command arguments: {command_desc}\n'
|
||||
text = f"Command arguments: {command_desc}\n"
|
||||
|
||||
if not command_output:
|
||||
text += 'Command output: None'
|
||||
text += "Command output: None"
|
||||
elif logger.getEffectiveLevel() > logging.DEBUG:
|
||||
text += 'Command output: [use --verbose to show]'
|
||||
text += "Command output: [use --verbose to show]"
|
||||
else:
|
||||
if not command_output.endswith('\n'):
|
||||
command_output += '\n'
|
||||
text += f'Command output:\n{command_output}{LOG_DIVIDER}'
|
||||
if not command_output.endswith("\n"):
|
||||
command_output += "\n"
|
||||
text += f"Command output:\n{command_output}"
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def get_legacy_build_wheel_path(
|
||||
names, # type: List[str]
|
||||
temp_dir, # type: str
|
||||
name, # type: str
|
||||
command_args, # type: List[str]
|
||||
command_output, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
names: List[str],
|
||||
temp_dir: str,
|
||||
name: str,
|
||||
command_args: List[str],
|
||||
command_output: str,
|
||||
) -> Optional[str]:
|
||||
"""Return the path to the wheel in the temporary build directory."""
|
||||
# Sort for determinism.
|
||||
names = sorted(names)
|
||||
if not names:
|
||||
msg = (
|
||||
'Legacy build of wheel for {!r} created no files.\n'
|
||||
).format(name)
|
||||
msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
|
||||
msg += format_command_result(command_args, command_output)
|
||||
logger.warning(msg)
|
||||
return None
|
||||
|
||||
if len(names) > 1:
|
||||
msg = (
|
||||
'Legacy build of wheel for {!r} created more than one file.\n'
|
||||
'Filenames (choosing first): {}\n'
|
||||
"Legacy build of wheel for {!r} created more than one file.\n"
|
||||
"Filenames (choosing first): {}\n"
|
||||
).format(name, names)
|
||||
msg += format_command_result(command_args, command_output)
|
||||
logger.warning(msg)
|
||||
@@ -65,14 +57,13 @@ def get_legacy_build_wheel_path(
|
||||
|
||||
|
||||
def build_wheel_legacy(
|
||||
name, # type: str
|
||||
setup_py_path, # type: str
|
||||
source_dir, # type: str
|
||||
global_options, # type: List[str]
|
||||
build_options, # type: List[str]
|
||||
tempd, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
name: str,
|
||||
setup_py_path: str,
|
||||
source_dir: str,
|
||||
global_options: List[str],
|
||||
build_options: List[str],
|
||||
tempd: str,
|
||||
) -> Optional[str]:
|
||||
"""Build one unpacked package using the "legacy" build process.
|
||||
|
||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||
@@ -84,19 +75,20 @@ def build_wheel_legacy(
|
||||
destination_dir=tempd,
|
||||
)
|
||||
|
||||
spin_message = f'Building wheel for {name} (setup.py)'
|
||||
spin_message = f"Building wheel for {name} (setup.py)"
|
||||
with open_spinner(spin_message) as spinner:
|
||||
logger.debug('Destination directory: %s', tempd)
|
||||
logger.debug("Destination directory: %s", tempd)
|
||||
|
||||
try:
|
||||
output = call_subprocess(
|
||||
wheel_args,
|
||||
command_desc="python setup.py bdist_wheel",
|
||||
cwd=source_dir,
|
||||
spinner=spinner,
|
||||
)
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
logger.error('Failed building wheel for %s', name)
|
||||
logger.error("Failed building wheel for %s", name)
|
||||
return None
|
||||
|
||||
names = os.listdir(tempd)
|
||||
|
||||
@@ -2,19 +2,16 @@
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Callable, Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
|
||||
from pip._internal.distributions import make_distribution_for_install_requirement
|
||||
from pip._internal.metadata import get_default_environment
|
||||
from pip._internal.metadata.base import DistributionVersion
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.packaging.utils import NormalizedName
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -24,12 +21,12 @@ class PackageDetails(NamedTuple):
|
||||
|
||||
|
||||
# Shorthands
|
||||
PackageSet = Dict['NormalizedName', PackageDetails]
|
||||
Missing = Tuple['NormalizedName', Requirement]
|
||||
Conflicting = Tuple['NormalizedName', DistributionVersion, Requirement]
|
||||
PackageSet = Dict[NormalizedName, PackageDetails]
|
||||
Missing = Tuple[NormalizedName, Requirement]
|
||||
Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
|
||||
|
||||
MissingDict = Dict['NormalizedName', List[Missing]]
|
||||
ConflictingDict = Dict['NormalizedName', List[Conflicting]]
|
||||
MissingDict = Dict[NormalizedName, List[Missing]]
|
||||
ConflictingDict = Dict[NormalizedName, List[Conflicting]]
|
||||
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||
ConflictDetails = Tuple[PackageSet, CheckResult]
|
||||
|
||||
@@ -51,8 +48,9 @@ def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
|
||||
return package_set, problems
|
||||
|
||||
|
||||
def check_package_set(package_set, should_ignore=None):
|
||||
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
||||
def check_package_set(
|
||||
package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
|
||||
) -> CheckResult:
|
||||
"""Check if a package set is consistent
|
||||
|
||||
If should_ignore is passed, it should be a callable that takes a
|
||||
@@ -64,8 +62,8 @@ def check_package_set(package_set, should_ignore=None):
|
||||
|
||||
for package_name, package_detail in package_set.items():
|
||||
# Info about dependencies of package_name
|
||||
missing_deps = set() # type: Set[Missing]
|
||||
conflicting_deps = set() # type: Set[Conflicting]
|
||||
missing_deps: Set[Missing] = set()
|
||||
conflicting_deps: Set[Conflicting] = set()
|
||||
|
||||
if should_ignore and should_ignore(package_name):
|
||||
continue
|
||||
@@ -77,7 +75,7 @@ def check_package_set(package_set, should_ignore=None):
|
||||
if name not in package_set:
|
||||
missed = True
|
||||
if req.marker is not None:
|
||||
missed = req.marker.evaluate()
|
||||
missed = req.marker.evaluate({"extra": ""})
|
||||
if missed:
|
||||
missing_deps.add((name, req))
|
||||
continue
|
||||
@@ -95,8 +93,7 @@ def check_package_set(package_set, should_ignore=None):
|
||||
return missing, conflicting
|
||||
|
||||
|
||||
def check_install_conflicts(to_install):
|
||||
# type: (List[InstallRequirement]) -> ConflictDetails
|
||||
def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
|
||||
"""For checking if the dependency graph would be consistent after \
|
||||
installing given requirements
|
||||
"""
|
||||
@@ -112,33 +109,32 @@ def check_install_conflicts(to_install):
|
||||
package_set,
|
||||
check_package_set(
|
||||
package_set, should_ignore=lambda name: name not in whitelist
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _simulate_installation_of(to_install, package_set):
|
||||
# type: (List[InstallRequirement], PackageSet) -> Set[NormalizedName]
|
||||
"""Computes the version of packages after installing to_install.
|
||||
"""
|
||||
def _simulate_installation_of(
|
||||
to_install: List[InstallRequirement], package_set: PackageSet
|
||||
) -> Set[NormalizedName]:
|
||||
"""Computes the version of packages after installing to_install."""
|
||||
# Keep track of packages that were installed
|
||||
installed = set()
|
||||
|
||||
# Modify it as installing requirement_set would (assuming no errors)
|
||||
for inst_req in to_install:
|
||||
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
||||
dist = abstract_dist.get_pkg_resources_distribution()
|
||||
|
||||
assert dist is not None
|
||||
name = canonicalize_name(dist.project_name)
|
||||
package_set[name] = PackageDetails(dist.parsed_version, dist.requires())
|
||||
dist = abstract_dist.get_metadata_distribution()
|
||||
name = dist.canonical_name
|
||||
package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
|
||||
|
||||
installed.add(name)
|
||||
|
||||
return installed
|
||||
|
||||
|
||||
def _create_whitelist(would_be_installed, package_set):
|
||||
# type: (Set[NormalizedName], PackageSet) -> Set[NormalizedName]
|
||||
def _create_whitelist(
|
||||
would_be_installed: Set[NormalizedName], package_set: PackageSet
|
||||
) -> Set[NormalizedName]:
|
||||
packages_affected = set(would_be_installed)
|
||||
|
||||
for package_name in package_set:
|
||||
|
||||
@@ -1,19 +1,8 @@
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
from typing import (
|
||||
Container,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
NamedTuple,
|
||||
Optional,
|
||||
Set,
|
||||
Union,
|
||||
)
|
||||
from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
|
||||
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import Version
|
||||
|
||||
@@ -30,22 +19,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _EditableInfo(NamedTuple):
|
||||
requirement: Optional[str]
|
||||
editable: bool
|
||||
requirement: str
|
||||
comments: List[str]
|
||||
|
||||
|
||||
def freeze(
|
||||
requirement=None, # type: Optional[List[str]]
|
||||
local_only=False, # type: bool
|
||||
user_only=False, # type: bool
|
||||
paths=None, # type: Optional[List[str]]
|
||||
isolated=False, # type: bool
|
||||
exclude_editable=False, # type: bool
|
||||
skip=() # type: Container[str]
|
||||
):
|
||||
# type: (...) -> Iterator[str]
|
||||
installations = {} # type: Dict[str, FrozenRequirement]
|
||||
requirement: Optional[List[str]] = None,
|
||||
local_only: bool = False,
|
||||
user_only: bool = False,
|
||||
paths: Optional[List[str]] = None,
|
||||
isolated: bool = False,
|
||||
exclude_editable: bool = False,
|
||||
skip: Container[str] = (),
|
||||
) -> Generator[str, None, None]:
|
||||
installations: Dict[str, FrozenRequirement] = {}
|
||||
|
||||
dists = get_environment(paths).iter_installed_distributions(
|
||||
local_only=local_only,
|
||||
@@ -63,42 +50,50 @@ def freeze(
|
||||
# should only be emitted once, even if the same option is in multiple
|
||||
# requirements files, so we need to keep track of what has been emitted
|
||||
# so that we don't emit it again if it's seen again
|
||||
emitted_options = set() # type: Set[str]
|
||||
emitted_options: Set[str] = set()
|
||||
# keep track of which files a requirement is in so that we can
|
||||
# give an accurate warning if a requirement appears multiple times.
|
||||
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
||||
req_files: Dict[str, List[str]] = collections.defaultdict(list)
|
||||
for req_file_path in requirement:
|
||||
with open(req_file_path) as req_file:
|
||||
for line in req_file:
|
||||
if (not line.strip() or
|
||||
line.strip().startswith('#') or
|
||||
line.startswith((
|
||||
'-r', '--requirement',
|
||||
'-f', '--find-links',
|
||||
'-i', '--index-url',
|
||||
'--pre',
|
||||
'--trusted-host',
|
||||
'--process-dependency-links',
|
||||
'--extra-index-url',
|
||||
'--use-feature'))):
|
||||
if (
|
||||
not line.strip()
|
||||
or line.strip().startswith("#")
|
||||
or line.startswith(
|
||||
(
|
||||
"-r",
|
||||
"--requirement",
|
||||
"-f",
|
||||
"--find-links",
|
||||
"-i",
|
||||
"--index-url",
|
||||
"--pre",
|
||||
"--trusted-host",
|
||||
"--process-dependency-links",
|
||||
"--extra-index-url",
|
||||
"--use-feature",
|
||||
)
|
||||
)
|
||||
):
|
||||
line = line.rstrip()
|
||||
if line not in emitted_options:
|
||||
emitted_options.add(line)
|
||||
yield line
|
||||
continue
|
||||
|
||||
if line.startswith('-e') or line.startswith('--editable'):
|
||||
if line.startswith('-e'):
|
||||
if line.startswith("-e") or line.startswith("--editable"):
|
||||
if line.startswith("-e"):
|
||||
line = line[2:].strip()
|
||||
else:
|
||||
line = line[len('--editable'):].strip().lstrip('=')
|
||||
line = line[len("--editable") :].strip().lstrip("=")
|
||||
line_req = install_req_from_editable(
|
||||
line,
|
||||
isolated=isolated,
|
||||
)
|
||||
else:
|
||||
line_req = install_req_from_line(
|
||||
COMMENT_RE.sub('', line).strip(),
|
||||
COMMENT_RE.sub("", line).strip(),
|
||||
isolated=isolated,
|
||||
)
|
||||
|
||||
@@ -106,15 +101,15 @@ def freeze(
|
||||
logger.info(
|
||||
"Skipping line in requirement file [%s] because "
|
||||
"it's not clear what it would install: %s",
|
||||
req_file_path, line.strip(),
|
||||
req_file_path,
|
||||
line.strip(),
|
||||
)
|
||||
logger.info(
|
||||
" (add #egg=PackageName to the URL to avoid"
|
||||
" this warning)"
|
||||
)
|
||||
else:
|
||||
line_req_canonical_name = canonicalize_name(
|
||||
line_req.name)
|
||||
line_req_canonical_name = canonicalize_name(line_req.name)
|
||||
if line_req_canonical_name not in installations:
|
||||
# either it's not installed, or it is installed
|
||||
# but has been processed already
|
||||
@@ -123,14 +118,13 @@ def freeze(
|
||||
"Requirement file [%s] contains %s, but "
|
||||
"package %r is not installed",
|
||||
req_file_path,
|
||||
COMMENT_RE.sub('', line).strip(),
|
||||
line_req.name
|
||||
COMMENT_RE.sub("", line).strip(),
|
||||
line_req.name,
|
||||
)
|
||||
else:
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
else:
|
||||
yield str(installations[
|
||||
line_req_canonical_name]).rstrip()
|
||||
yield str(installations[line_req_canonical_name]).rstrip()
|
||||
del installations[line_req_canonical_name]
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
|
||||
@@ -138,42 +132,33 @@ def freeze(
|
||||
# single requirements file or in different requirements files).
|
||||
for name, files in req_files.items():
|
||||
if len(files) > 1:
|
||||
logger.warning("Requirement %s included multiple times [%s]",
|
||||
name, ', '.join(sorted(set(files))))
|
||||
logger.warning(
|
||||
"Requirement %s included multiple times [%s]",
|
||||
name,
|
||||
", ".join(sorted(set(files))),
|
||||
)
|
||||
|
||||
yield(
|
||||
'## The following requirements were added by '
|
||||
'pip freeze:'
|
||||
)
|
||||
for installation in sorted(
|
||||
installations.values(), key=lambda x: x.name.lower()):
|
||||
yield ("## The following requirements were added by pip freeze:")
|
||||
for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
|
||||
if installation.canonical_name not in skip:
|
||||
yield str(installation).rstrip()
|
||||
|
||||
|
||||
def _format_as_name_version(dist: BaseDistribution) -> str:
|
||||
if isinstance(dist.version, Version):
|
||||
return f"{dist.raw_name}=={dist.version}"
|
||||
return f"{dist.raw_name}==={dist.version}"
|
||||
dist_version = dist.version
|
||||
if isinstance(dist_version, Version):
|
||||
return f"{dist.raw_name}=={dist_version}"
|
||||
return f"{dist.raw_name}==={dist_version}"
|
||||
|
||||
|
||||
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
||||
"""
|
||||
Compute and return values (req, editable, comments) for use in
|
||||
Compute and return values (req, comments) for use in
|
||||
FrozenRequirement.from_dist().
|
||||
"""
|
||||
if not dist.editable:
|
||||
return _EditableInfo(requirement=None, editable=False, comments=[])
|
||||
if dist.location is None:
|
||||
display = _format_as_name_version(dist)
|
||||
logger.warning("Editable requirement not found on disk: %s", display)
|
||||
return _EditableInfo(
|
||||
requirement=None,
|
||||
editable=True,
|
||||
comments=[f"# Editable install not found ({display})"],
|
||||
)
|
||||
|
||||
location = os.path.normcase(os.path.abspath(dist.location))
|
||||
editable_project_location = dist.editable_project_location
|
||||
assert editable_project_location
|
||||
location = os.path.normcase(os.path.abspath(editable_project_location))
|
||||
|
||||
from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
|
||||
|
||||
@@ -182,13 +167,13 @@ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
||||
if vcs_backend is None:
|
||||
display = _format_as_name_version(dist)
|
||||
logger.debug(
|
||||
'No VCS found for editable requirement "%s" in: %r', display,
|
||||
'No VCS found for editable requirement "%s" in: %r',
|
||||
display,
|
||||
location,
|
||||
)
|
||||
return _EditableInfo(
|
||||
requirement=location,
|
||||
editable=True,
|
||||
comments=[f'# Editable install with no version control ({display})'],
|
||||
comments=[f"# Editable install with no version control ({display})"],
|
||||
)
|
||||
|
||||
vcs_name = type(vcs_backend).__name__
|
||||
@@ -199,50 +184,47 @@ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
||||
display = _format_as_name_version(dist)
|
||||
return _EditableInfo(
|
||||
requirement=location,
|
||||
editable=True,
|
||||
comments=[f'# Editable {vcs_name} install with no remote ({display})'],
|
||||
comments=[f"# Editable {vcs_name} install with no remote ({display})"],
|
||||
)
|
||||
except RemoteNotValidError as ex:
|
||||
display = _format_as_name_version(dist)
|
||||
return _EditableInfo(
|
||||
requirement=location,
|
||||
editable=True,
|
||||
comments=[
|
||||
f"# Editable {vcs_name} install ({display}) with either a deleted "
|
||||
f"local remote or invalid URI:",
|
||||
f"# '{ex.url}'",
|
||||
],
|
||||
)
|
||||
|
||||
except BadCommand:
|
||||
logger.warning(
|
||||
'cannot determine version of editable source in %s '
|
||||
'(%s command not found in path)',
|
||||
"cannot determine version of editable source in %s "
|
||||
"(%s command not found in path)",
|
||||
location,
|
||||
vcs_backend.name,
|
||||
)
|
||||
return _EditableInfo(requirement=None, editable=True, comments=[])
|
||||
|
||||
return _EditableInfo(requirement=location, comments=[])
|
||||
except InstallationError as exc:
|
||||
logger.warning(
|
||||
"Error when trying to get requirement for VCS system %s, "
|
||||
"falling back to uneditable format", exc
|
||||
)
|
||||
logger.warning("Error when trying to get requirement for VCS system %s", exc)
|
||||
else:
|
||||
return _EditableInfo(requirement=req, editable=True, comments=[])
|
||||
return _EditableInfo(requirement=req, comments=[])
|
||||
|
||||
logger.warning('Could not determine repository location of %s', location)
|
||||
logger.warning("Could not determine repository location of %s", location)
|
||||
|
||||
return _EditableInfo(
|
||||
requirement=None,
|
||||
editable=False,
|
||||
comments=['## !! Could not determine repository location'],
|
||||
requirement=location,
|
||||
comments=["## !! Could not determine repository location"],
|
||||
)
|
||||
|
||||
|
||||
class FrozenRequirement:
|
||||
def __init__(self, name, req, editable, comments=()):
|
||||
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
req: str,
|
||||
editable: bool,
|
||||
comments: Iterable[str] = (),
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.canonical_name = canonicalize_name(name)
|
||||
self.req = req
|
||||
@@ -251,27 +233,23 @@ class FrozenRequirement:
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
|
||||
# TODO `get_requirement_info` is taking care of editable requirements.
|
||||
# TODO This should be refactored when we will add detection of
|
||||
# editable that provide .dist-info metadata.
|
||||
req, editable, comments = _get_editable_info(dist)
|
||||
if req is None and not editable:
|
||||
# if PEP 610 metadata is present, attempt to use it
|
||||
editable = dist.editable
|
||||
if editable:
|
||||
req, comments = _get_editable_info(dist)
|
||||
else:
|
||||
comments = []
|
||||
direct_url = dist.direct_url
|
||||
if direct_url:
|
||||
req = direct_url_as_pep440_direct_reference(
|
||||
direct_url, dist.raw_name
|
||||
)
|
||||
comments = []
|
||||
if req is None:
|
||||
# name==version requirement
|
||||
req = _format_as_name_version(dist)
|
||||
# if PEP 610 metadata is present, use it
|
||||
req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
|
||||
else:
|
||||
# name==version requirement
|
||||
req = _format_as_name_version(dist)
|
||||
|
||||
return cls(dist.raw_name, req, editable, comments=comments)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
def __str__(self) -> str:
|
||||
req = self.req
|
||||
if self.editable:
|
||||
req = f'-e {req}'
|
||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
||||
req = f"-e {req}"
|
||||
return "\n".join(list(self.comments) + [str(req)]) + "\n"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Legacy editable installation process, i.e. `setup.py develop`.
|
||||
"""
|
||||
import logging
|
||||
from typing import List, Optional, Sequence
|
||||
from typing import Optional, Sequence
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.utils.logging import indent_log
|
||||
@@ -12,27 +12,25 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install_editable(
|
||||
install_options, # type: List[str]
|
||||
global_options, # type: Sequence[str]
|
||||
prefix, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
name, # type: str
|
||||
setup_py_path, # type: str
|
||||
isolated, # type: bool
|
||||
build_env, # type: BuildEnvironment
|
||||
unpacked_source_directory, # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
*,
|
||||
global_options: Sequence[str],
|
||||
prefix: Optional[str],
|
||||
home: Optional[str],
|
||||
use_user_site: bool,
|
||||
name: str,
|
||||
setup_py_path: str,
|
||||
isolated: bool,
|
||||
build_env: BuildEnvironment,
|
||||
unpacked_source_directory: str,
|
||||
) -> None:
|
||||
"""Install a package in editable mode. Most arguments are pass-through
|
||||
to setuptools.
|
||||
"""
|
||||
logger.info('Running setup.py develop for %s', name)
|
||||
logger.info("Running setup.py develop for %s", name)
|
||||
|
||||
args = make_setuptools_develop_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
install_options=install_options,
|
||||
no_user_config=isolated,
|
||||
prefix=prefix,
|
||||
home=home,
|
||||
@@ -43,5 +41,6 @@ def install_editable(
|
||||
with build_env:
|
||||
call_subprocess(
|
||||
args,
|
||||
command_desc="python setup.py develop",
|
||||
cwd=unpacked_source_directory,
|
||||
)
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
"""Legacy installation process, i.e. `setup.py install`.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from distutils.util import change_root
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.models.scheme import Scheme
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_install_args
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LegacyInstallFailure(Exception):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.parent = sys.exc_info()
|
||||
|
||||
|
||||
def write_installed_files_from_setuptools_record(
|
||||
record_lines: List[str],
|
||||
root: Optional[str],
|
||||
req_description: str,
|
||||
) -> None:
|
||||
def prepend_root(path):
|
||||
# type: (str) -> str
|
||||
if root is None or not os.path.isabs(path):
|
||||
return path
|
||||
else:
|
||||
return change_root(root, path)
|
||||
|
||||
for line in record_lines:
|
||||
directory = os.path.dirname(line)
|
||||
if directory.endswith('.egg-info'):
|
||||
egg_info_dir = prepend_root(directory)
|
||||
break
|
||||
else:
|
||||
message = (
|
||||
"{} did not indicate that it installed an "
|
||||
".egg-info directory. Only setup.py projects "
|
||||
"generating .egg-info directories are supported."
|
||||
).format(req_description)
|
||||
raise InstallationError(message)
|
||||
|
||||
new_lines = []
|
||||
for line in record_lines:
|
||||
filename = line.strip()
|
||||
if os.path.isdir(filename):
|
||||
filename += os.path.sep
|
||||
new_lines.append(
|
||||
os.path.relpath(prepend_root(filename), egg_info_dir)
|
||||
)
|
||||
new_lines.sort()
|
||||
ensure_dir(egg_info_dir)
|
||||
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
||||
with open(inst_files_path, 'w') as f:
|
||||
f.write('\n'.join(new_lines) + '\n')
|
||||
|
||||
|
||||
def install(
|
||||
install_options, # type: List[str]
|
||||
global_options, # type: Sequence[str]
|
||||
root, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
prefix, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
pycompile, # type: bool
|
||||
scheme, # type: Scheme
|
||||
setup_py_path, # type: str
|
||||
isolated, # type: bool
|
||||
req_name, # type: str
|
||||
build_env, # type: BuildEnvironment
|
||||
unpacked_source_directory, # type: str
|
||||
req_description, # type: str
|
||||
):
|
||||
# type: (...) -> bool
|
||||
|
||||
header_dir = scheme.headers
|
||||
|
||||
with TempDirectory(kind="record") as temp_dir:
|
||||
try:
|
||||
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
||||
install_args = make_setuptools_install_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
install_options=install_options,
|
||||
record_filename=record_filename,
|
||||
root=root,
|
||||
prefix=prefix,
|
||||
header_dir=header_dir,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
no_user_config=isolated,
|
||||
pycompile=pycompile,
|
||||
)
|
||||
|
||||
runner = runner_with_spinner_message(
|
||||
f"Running setup.py install for {req_name}"
|
||||
)
|
||||
with indent_log(), build_env:
|
||||
runner(
|
||||
cmd=install_args,
|
||||
cwd=unpacked_source_directory,
|
||||
)
|
||||
|
||||
if not os.path.exists(record_filename):
|
||||
logger.debug('Record file %s not found', record_filename)
|
||||
# Signal to the caller that we didn't install the new package
|
||||
return False
|
||||
|
||||
except Exception:
|
||||
# Signal to the caller that we didn't install the new package
|
||||
raise LegacyInstallFailure
|
||||
|
||||
# At this point, we have successfully installed the requirement.
|
||||
|
||||
# We intentionally do not use any encoding to read the file because
|
||||
# setuptools writes the file using distutils.file_util.write_file,
|
||||
# which does not specify an encoding.
|
||||
with open(record_filename) as f:
|
||||
record_lines = f.read().splitlines()
|
||||
|
||||
write_installed_files_from_setuptools_record(record_lines, root, req_description)
|
||||
return True
|
||||
@@ -22,6 +22,7 @@ from typing import (
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
@@ -38,11 +39,14 @@ from zipfile import ZipFile, ZipInfo
|
||||
from pip._vendor.distlib.scripts import ScriptMaker
|
||||
from pip._vendor.distlib.util import get_export_entry
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.six import ensure_str, ensure_text, reraise
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.locations import get_major_minor_version
|
||||
from pip._internal.metadata import BaseDistribution, get_wheel_distribution
|
||||
from pip._internal.metadata import (
|
||||
BaseDistribution,
|
||||
FilesystemWheel,
|
||||
get_wheel_distribution,
|
||||
)
|
||||
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
||||
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
||||
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||
@@ -59,62 +63,55 @@ if TYPE_CHECKING:
|
||||
from typing import Protocol
|
||||
|
||||
class File(Protocol):
|
||||
src_record_path = None # type: RecordPath
|
||||
dest_path = None # type: str
|
||||
changed = None # type: bool
|
||||
src_record_path: "RecordPath"
|
||||
dest_path: str
|
||||
changed: bool
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
def save(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
RecordPath = NewType('RecordPath', str)
|
||||
RecordPath = NewType("RecordPath", str)
|
||||
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
||||
|
||||
|
||||
def rehash(path, blocksize=1 << 20):
|
||||
# type: (str, int) -> Tuple[str, str]
|
||||
def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
|
||||
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
||||
h, length = hash_file(path, blocksize)
|
||||
digest = 'sha256=' + urlsafe_b64encode(
|
||||
h.digest()
|
||||
).decode('latin1').rstrip('=')
|
||||
digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
|
||||
return (digest, str(length))
|
||||
|
||||
|
||||
def csv_io_kwargs(mode):
|
||||
# type: (str) -> Dict[str, Any]
|
||||
def csv_io_kwargs(mode: str) -> Dict[str, Any]:
|
||||
"""Return keyword arguments to properly open a CSV file
|
||||
in the given mode.
|
||||
"""
|
||||
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
|
||||
return {"mode": mode, "newline": "", "encoding": "utf-8"}
|
||||
|
||||
|
||||
def fix_script(path):
|
||||
# type: (str) -> bool
|
||||
def fix_script(path: str) -> bool:
|
||||
"""Replace #!python with #!/path/to/python
|
||||
Return True if file was changed.
|
||||
"""
|
||||
# XXX RECORD hashes will need to be updated
|
||||
assert os.path.isfile(path)
|
||||
|
||||
with open(path, 'rb') as script:
|
||||
with open(path, "rb") as script:
|
||||
firstline = script.readline()
|
||||
if not firstline.startswith(b'#!python'):
|
||||
if not firstline.startswith(b"#!python"):
|
||||
return False
|
||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
||||
firstline = b"#!" + exename + os.linesep.encode("ascii")
|
||||
rest = script.read()
|
||||
with open(path, 'wb') as script:
|
||||
with open(path, "wb") as script:
|
||||
script.write(firstline)
|
||||
script.write(rest)
|
||||
return True
|
||||
|
||||
|
||||
def wheel_root_is_purelib(metadata):
|
||||
# type: (Message) -> bool
|
||||
def wheel_root_is_purelib(metadata: Message) -> bool:
|
||||
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
||||
|
||||
|
||||
@@ -129,8 +126,7 @@ def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, s
|
||||
return console_scripts, gui_scripts
|
||||
|
||||
|
||||
def message_about_scripts_not_on_PATH(scripts):
|
||||
# type: (Sequence[str]) -> Optional[str]
|
||||
def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
|
||||
"""Determine if any scripts are not on PATH and format a warning.
|
||||
Returns a warning message if one or more scripts are not on PATH,
|
||||
otherwise None.
|
||||
@@ -139,7 +135,7 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||
return None
|
||||
|
||||
# Group scripts by the path they were installed in
|
||||
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
||||
grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
|
||||
for destfile in scripts:
|
||||
parent_dir = os.path.dirname(destfile)
|
||||
script_name = os.path.basename(destfile)
|
||||
@@ -147,23 +143,26 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||
|
||||
# We don't want to warn for directories that are on PATH.
|
||||
not_warn_dirs = [
|
||||
os.path.normcase(i).rstrip(os.sep) for i in
|
||||
os.environ.get("PATH", "").split(os.pathsep)
|
||||
os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
|
||||
for i in os.environ.get("PATH", "").split(os.pathsep)
|
||||
]
|
||||
# If an executable sits with sys.executable, we don't warn for it.
|
||||
# This covers the case of venv invocations without activating the venv.
|
||||
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
||||
warn_for = {
|
||||
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
||||
if os.path.normcase(parent_dir) not in not_warn_dirs
|
||||
} # type: Dict[str, Set[str]]
|
||||
not_warn_dirs.append(
|
||||
os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
|
||||
)
|
||||
warn_for: Dict[str, Set[str]] = {
|
||||
parent_dir: scripts
|
||||
for parent_dir, scripts in grouped_by_dir.items()
|
||||
if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
|
||||
}
|
||||
if not warn_for:
|
||||
return None
|
||||
|
||||
# Format a message
|
||||
msg_lines = []
|
||||
for parent_dir, dir_scripts in warn_for.items():
|
||||
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
||||
sorted_scripts: List[str] = sorted(dir_scripts)
|
||||
if len(sorted_scripts) == 1:
|
||||
start_text = "script {} is".format(sorted_scripts[0])
|
||||
else:
|
||||
@@ -172,8 +171,9 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||
)
|
||||
|
||||
msg_lines.append(
|
||||
"The {} installed in '{}' which is not on PATH."
|
||||
.format(start_text, parent_dir)
|
||||
"The {} installed in '{}' which is not on PATH.".format(
|
||||
start_text, parent_dir
|
||||
)
|
||||
)
|
||||
|
||||
last_line_fmt = (
|
||||
@@ -200,8 +200,9 @@ def message_about_scripts_not_on_PATH(scripts):
|
||||
return "\n".join(msg_lines)
|
||||
|
||||
|
||||
def _normalized_outrows(outrows):
|
||||
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
|
||||
def _normalized_outrows(
|
||||
outrows: Iterable[InstalledCSVRow],
|
||||
) -> List[Tuple[str, str, str]]:
|
||||
"""Normalize the given rows of a RECORD file.
|
||||
|
||||
Items in each row are converted into str. Rows are then sorted to make
|
||||
@@ -221,69 +222,57 @@ def _normalized_outrows(outrows):
|
||||
# For additional background, see--
|
||||
# https://github.com/pypa/pip/issues/5868
|
||||
return sorted(
|
||||
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
|
||||
for record_path, hash_, size in outrows
|
||||
(record_path, hash_, str(size)) for record_path, hash_, size in outrows
|
||||
)
|
||||
|
||||
|
||||
def _record_to_fs_path(record_path):
|
||||
# type: (RecordPath) -> str
|
||||
return record_path
|
||||
def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
|
||||
return os.path.join(lib_dir, record_path)
|
||||
|
||||
|
||||
def _fs_to_record_path(path, relative_to=None):
|
||||
# type: (str, Optional[str]) -> RecordPath
|
||||
if relative_to is not None:
|
||||
# On Windows, do not handle relative paths if they belong to different
|
||||
# logical disks
|
||||
if os.path.splitdrive(path)[0].lower() == \
|
||||
os.path.splitdrive(relative_to)[0].lower():
|
||||
path = os.path.relpath(path, relative_to)
|
||||
path = path.replace(os.path.sep, '/')
|
||||
return cast('RecordPath', path)
|
||||
def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
|
||||
# On Windows, do not handle relative paths if they belong to different
|
||||
# logical disks
|
||||
if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
|
||||
path = os.path.relpath(path, lib_dir)
|
||||
|
||||
|
||||
def _parse_record_path(record_column):
|
||||
# type: (str) -> RecordPath
|
||||
p = ensure_text(record_column, encoding='utf-8')
|
||||
return cast('RecordPath', p)
|
||||
path = path.replace(os.path.sep, "/")
|
||||
return cast("RecordPath", path)
|
||||
|
||||
|
||||
def get_csv_rows_for_installed(
|
||||
old_csv_rows, # type: List[List[str]]
|
||||
installed, # type: Dict[RecordPath, RecordPath]
|
||||
changed, # type: Set[RecordPath]
|
||||
generated, # type: List[str]
|
||||
lib_dir, # type: str
|
||||
):
|
||||
# type: (...) -> List[InstalledCSVRow]
|
||||
old_csv_rows: List[List[str]],
|
||||
installed: Dict[RecordPath, RecordPath],
|
||||
changed: Set[RecordPath],
|
||||
generated: List[str],
|
||||
lib_dir: str,
|
||||
) -> List[InstalledCSVRow]:
|
||||
"""
|
||||
:param installed: A map from archive RECORD path to installation RECORD
|
||||
path.
|
||||
"""
|
||||
installed_rows = [] # type: List[InstalledCSVRow]
|
||||
installed_rows: List[InstalledCSVRow] = []
|
||||
for row in old_csv_rows:
|
||||
if len(row) > 3:
|
||||
logger.warning('RECORD line has more than three elements: %s', row)
|
||||
old_record_path = _parse_record_path(row[0])
|
||||
logger.warning("RECORD line has more than three elements: %s", row)
|
||||
old_record_path = cast("RecordPath", row[0])
|
||||
new_record_path = installed.pop(old_record_path, old_record_path)
|
||||
if new_record_path in changed:
|
||||
digest, length = rehash(_record_to_fs_path(new_record_path))
|
||||
digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
|
||||
else:
|
||||
digest = row[1] if len(row) > 1 else ''
|
||||
length = row[2] if len(row) > 2 else ''
|
||||
digest = row[1] if len(row) > 1 else ""
|
||||
length = row[2] if len(row) > 2 else ""
|
||||
installed_rows.append((new_record_path, digest, length))
|
||||
for f in generated:
|
||||
path = _fs_to_record_path(f, lib_dir)
|
||||
digest, length = rehash(f)
|
||||
installed_rows.append((path, digest, length))
|
||||
for installed_record_path in installed.values():
|
||||
installed_rows.append((installed_record_path, '', ''))
|
||||
installed_rows.append((installed_record_path, "", ""))
|
||||
return installed_rows
|
||||
|
||||
|
||||
def get_console_script_specs(console):
|
||||
# type: (Dict[str, str]) -> List[str]
|
||||
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
||||
"""
|
||||
Given the mapping from entrypoint name to callable, return the relevant
|
||||
console script specs.
|
||||
@@ -326,62 +315,57 @@ def get_console_script_specs(console):
|
||||
# DEFAULT
|
||||
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||
# and easy_install-X.Y.
|
||||
pip_script = console.pop('pip', None)
|
||||
pip_script = console.pop("pip", None)
|
||||
if pip_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
scripts_to_generate.append('pip = ' + pip_script)
|
||||
scripts_to_generate.append("pip = " + pip_script)
|
||||
|
||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||
scripts_to_generate.append(
|
||||
'pip{} = {}'.format(sys.version_info[0], pip_script)
|
||||
"pip{} = {}".format(sys.version_info[0], pip_script)
|
||||
)
|
||||
|
||||
scripts_to_generate.append(
|
||||
f'pip{get_major_minor_version()} = {pip_script}'
|
||||
)
|
||||
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
|
||||
# Delete any other versioned pip entry points
|
||||
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
||||
pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
|
||||
for k in pip_ep:
|
||||
del console[k]
|
||||
easy_install_script = console.pop('easy_install', None)
|
||||
easy_install_script = console.pop("easy_install", None)
|
||||
if easy_install_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
scripts_to_generate.append(
|
||||
'easy_install = ' + easy_install_script
|
||||
)
|
||||
scripts_to_generate.append("easy_install = " + easy_install_script)
|
||||
|
||||
scripts_to_generate.append(
|
||||
'easy_install-{} = {}'.format(
|
||||
"easy_install-{} = {}".format(
|
||||
get_major_minor_version(), easy_install_script
|
||||
)
|
||||
)
|
||||
# Delete any other versioned easy_install entry points
|
||||
easy_install_ep = [
|
||||
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
||||
k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
|
||||
]
|
||||
for k in easy_install_ep:
|
||||
del console[k]
|
||||
|
||||
# Generate the console entry points specified in the wheel
|
||||
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
|
||||
scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
|
||||
|
||||
return scripts_to_generate
|
||||
|
||||
|
||||
class ZipBackedFile:
|
||||
def __init__(self, src_record_path, dest_path, zip_file):
|
||||
# type: (RecordPath, str, ZipFile) -> None
|
||||
def __init__(
|
||||
self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
|
||||
) -> None:
|
||||
self.src_record_path = src_record_path
|
||||
self.dest_path = dest_path
|
||||
self._zip_file = zip_file
|
||||
self.changed = False
|
||||
|
||||
def _getinfo(self):
|
||||
# type: () -> ZipInfo
|
||||
def _getinfo(self) -> ZipInfo:
|
||||
return self._zip_file.getinfo(self.src_record_path)
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
def save(self) -> None:
|
||||
# directory creation is lazy and after file filtering
|
||||
# to ensure we don't install empty dirs; empty dirs can't be
|
||||
# uninstalled.
|
||||
@@ -410,22 +394,19 @@ class ZipBackedFile:
|
||||
|
||||
|
||||
class ScriptFile:
|
||||
def __init__(self, file):
|
||||
# type: (File) -> None
|
||||
def __init__(self, file: "File") -> None:
|
||||
self._file = file
|
||||
self.src_record_path = self._file.src_record_path
|
||||
self.dest_path = self._file.dest_path
|
||||
self.changed = False
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
def save(self) -> None:
|
||||
self._file.save()
|
||||
self.changed = fix_script(self.dest_path)
|
||||
|
||||
|
||||
class MissingCallableSuffix(InstallationError):
|
||||
def __init__(self, entry_point):
|
||||
# type: (str) -> None
|
||||
def __init__(self, entry_point: str) -> None:
|
||||
super().__init__(
|
||||
"Invalid script entry point: {} - A callable "
|
||||
"suffix is required. Cf https://packaging.python.org/"
|
||||
@@ -434,31 +415,30 @@ class MissingCallableSuffix(InstallationError):
|
||||
)
|
||||
|
||||
|
||||
def _raise_for_invalid_entrypoint(specification):
|
||||
# type: (str) -> None
|
||||
def _raise_for_invalid_entrypoint(specification: str) -> None:
|
||||
entry = get_export_entry(specification)
|
||||
if entry is not None and entry.suffix is None:
|
||||
raise MissingCallableSuffix(str(entry))
|
||||
|
||||
|
||||
class PipScriptMaker(ScriptMaker):
|
||||
def make(self, specification, options=None):
|
||||
# type: (str, Dict[str, Any]) -> List[str]
|
||||
def make(
|
||||
self, specification: str, options: Optional[Dict[str, Any]] = None
|
||||
) -> List[str]:
|
||||
_raise_for_invalid_entrypoint(specification)
|
||||
return super().make(specification, options)
|
||||
|
||||
|
||||
def _install_wheel(
|
||||
name, # type: str
|
||||
wheel_zip, # type: ZipFile
|
||||
wheel_path, # type: str
|
||||
scheme, # type: Scheme
|
||||
pycompile=True, # type: bool
|
||||
warn_script_location=True, # type: bool
|
||||
direct_url=None, # type: Optional[DirectUrl]
|
||||
requested=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
name: str,
|
||||
wheel_zip: ZipFile,
|
||||
wheel_path: str,
|
||||
scheme: Scheme,
|
||||
pycompile: bool = True,
|
||||
warn_script_location: bool = True,
|
||||
direct_url: Optional[DirectUrl] = None,
|
||||
requested: bool = False,
|
||||
) -> None:
|
||||
"""Install a wheel.
|
||||
|
||||
:param name: Name of the project to install
|
||||
@@ -485,33 +465,23 @@ def _install_wheel(
|
||||
# installed = files copied from the wheel to the destination
|
||||
# changed = files changed while installing (scripts #! line typically)
|
||||
# generated = files newly generated during the install (script wrappers)
|
||||
installed = {} # type: Dict[RecordPath, RecordPath]
|
||||
changed = set() # type: Set[RecordPath]
|
||||
generated = [] # type: List[str]
|
||||
installed: Dict[RecordPath, RecordPath] = {}
|
||||
changed: Set[RecordPath] = set()
|
||||
generated: List[str] = []
|
||||
|
||||
def record_installed(srcfile, destfile, modified=False):
|
||||
# type: (RecordPath, str, bool) -> None
|
||||
def record_installed(
|
||||
srcfile: RecordPath, destfile: str, modified: bool = False
|
||||
) -> None:
|
||||
"""Map archive RECORD paths to installation RECORD paths."""
|
||||
newpath = _fs_to_record_path(destfile, lib_dir)
|
||||
installed[srcfile] = newpath
|
||||
if modified:
|
||||
changed.add(_fs_to_record_path(destfile))
|
||||
changed.add(newpath)
|
||||
|
||||
def all_paths():
|
||||
# type: () -> Iterable[RecordPath]
|
||||
names = wheel_zip.namelist()
|
||||
# If a flag is set, names may be unicode in Python 2. We convert to
|
||||
# text explicitly so these are valid for lookup in RECORD.
|
||||
decoded_names = map(ensure_text, names)
|
||||
for name in decoded_names:
|
||||
yield cast("RecordPath", name)
|
||||
|
||||
def is_dir_path(path):
|
||||
# type: (RecordPath) -> bool
|
||||
def is_dir_path(path: RecordPath) -> bool:
|
||||
return path.endswith("/")
|
||||
|
||||
def assert_no_path_traversal(dest_dir_path, target_path):
|
||||
# type: (str, str) -> None
|
||||
def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
|
||||
if not is_within_directory(dest_dir_path, target_path):
|
||||
message = (
|
||||
"The wheel {!r} has a file {!r} trying to install"
|
||||
@@ -521,10 +491,10 @@ def _install_wheel(
|
||||
message.format(wheel_path, target_path, dest_dir_path)
|
||||
)
|
||||
|
||||
def root_scheme_file_maker(zip_file, dest):
|
||||
# type: (ZipFile, str) -> Callable[[RecordPath], File]
|
||||
def make_root_scheme_file(record_path):
|
||||
# type: (RecordPath) -> File
|
||||
def root_scheme_file_maker(
|
||||
zip_file: ZipFile, dest: str
|
||||
) -> Callable[[RecordPath], "File"]:
|
||||
def make_root_scheme_file(record_path: RecordPath) -> "File":
|
||||
normed_path = os.path.normpath(record_path)
|
||||
dest_path = os.path.join(dest, normed_path)
|
||||
assert_no_path_traversal(dest, dest_path)
|
||||
@@ -532,17 +502,12 @@ def _install_wheel(
|
||||
|
||||
return make_root_scheme_file
|
||||
|
||||
def data_scheme_file_maker(zip_file, scheme):
|
||||
# type: (ZipFile, Scheme) -> Callable[[RecordPath], File]
|
||||
scheme_paths = {}
|
||||
for key in SCHEME_KEYS:
|
||||
encoded_key = ensure_text(key)
|
||||
scheme_paths[encoded_key] = ensure_text(
|
||||
getattr(scheme, key), encoding=sys.getfilesystemencoding()
|
||||
)
|
||||
def data_scheme_file_maker(
|
||||
zip_file: ZipFile, scheme: Scheme
|
||||
) -> Callable[[RecordPath], "File"]:
|
||||
scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
|
||||
|
||||
def make_data_scheme_file(record_path):
|
||||
# type: (RecordPath) -> File
|
||||
def make_data_scheme_file(record_path: RecordPath) -> "File":
|
||||
normed_path = os.path.normpath(record_path)
|
||||
try:
|
||||
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
|
||||
@@ -561,9 +526,7 @@ def _install_wheel(
|
||||
"Unknown scheme key used in {}: {} (for file {!r}). .data"
|
||||
" directory contents should be in subdirectories named"
|
||||
" with a valid scheme key ({})"
|
||||
).format(
|
||||
wheel_path, scheme_key, record_path, valid_scheme_keys
|
||||
)
|
||||
).format(wheel_path, scheme_key, record_path, valid_scheme_keys)
|
||||
raise InstallationError(message)
|
||||
|
||||
dest_path = os.path.join(scheme_path, dest_subpath)
|
||||
@@ -572,30 +535,19 @@ def _install_wheel(
|
||||
|
||||
return make_data_scheme_file
|
||||
|
||||
def is_data_scheme_path(path):
|
||||
# type: (RecordPath) -> bool
|
||||
def is_data_scheme_path(path: RecordPath) -> bool:
|
||||
return path.split("/", 1)[0].endswith(".data")
|
||||
|
||||
paths = all_paths()
|
||||
paths = cast(List[RecordPath], wheel_zip.namelist())
|
||||
file_paths = filterfalse(is_dir_path, paths)
|
||||
root_scheme_paths, data_scheme_paths = partition(
|
||||
is_data_scheme_path, file_paths
|
||||
)
|
||||
root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
|
||||
|
||||
make_root_scheme_file = root_scheme_file_maker(
|
||||
wheel_zip,
|
||||
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
|
||||
)
|
||||
files = map(make_root_scheme_file, root_scheme_paths)
|
||||
make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
|
||||
files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
|
||||
|
||||
def is_script_scheme_path(path):
|
||||
# type: (RecordPath) -> bool
|
||||
def is_script_scheme_path(path: RecordPath) -> bool:
|
||||
parts = path.split("/", 2)
|
||||
return (
|
||||
len(parts) > 2 and
|
||||
parts[0].endswith(".data") and
|
||||
parts[1] == "scripts"
|
||||
)
|
||||
return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
|
||||
|
||||
other_scheme_paths, script_scheme_paths = partition(
|
||||
is_script_scheme_path, data_scheme_paths
|
||||
@@ -606,30 +558,32 @@ def _install_wheel(
|
||||
files = chain(files, other_scheme_files)
|
||||
|
||||
# Get the defined entry points
|
||||
distribution = get_wheel_distribution(wheel_path, canonicalize_name(name))
|
||||
distribution = get_wheel_distribution(
|
||||
FilesystemWheel(wheel_path),
|
||||
canonicalize_name(name),
|
||||
)
|
||||
console, gui = get_entrypoints(distribution)
|
||||
|
||||
def is_entrypoint_wrapper(file):
|
||||
# type: (File) -> bool
|
||||
def is_entrypoint_wrapper(file: "File") -> bool:
|
||||
# EP, EP.exe and EP-script.py are scripts generated for
|
||||
# entry point EP by setuptools
|
||||
path = file.dest_path
|
||||
name = os.path.basename(path)
|
||||
if name.lower().endswith('.exe'):
|
||||
if name.lower().endswith(".exe"):
|
||||
matchname = name[:-4]
|
||||
elif name.lower().endswith('-script.py'):
|
||||
elif name.lower().endswith("-script.py"):
|
||||
matchname = name[:-10]
|
||||
elif name.lower().endswith(".pya"):
|
||||
matchname = name[:-4]
|
||||
else:
|
||||
matchname = name
|
||||
# Ignore setuptools-generated scripts
|
||||
return (matchname in console or matchname in gui)
|
||||
return matchname in console or matchname in gui
|
||||
|
||||
script_scheme_files = map(make_data_scheme_file, script_scheme_paths)
|
||||
script_scheme_files = filterfalse(
|
||||
is_entrypoint_wrapper, script_scheme_files
|
||||
script_scheme_files: Iterator[File] = map(
|
||||
make_data_scheme_file, script_scheme_paths
|
||||
)
|
||||
script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
|
||||
script_scheme_files = map(ScriptFile, script_scheme_files)
|
||||
files = chain(files, script_scheme_files)
|
||||
|
||||
@@ -637,8 +591,7 @@ def _install_wheel(
|
||||
file.save()
|
||||
record_installed(file.src_record_path, file.dest_path, file.changed)
|
||||
|
||||
def pyc_source_file_paths():
|
||||
# type: () -> Iterator[str]
|
||||
def pyc_source_file_paths() -> Generator[str, None, None]:
|
||||
# We de-duplicate installation paths, since there can be overlap (e.g.
|
||||
# file in .data maps to same location as file in wheel root).
|
||||
# Sorting installation paths makes it easier to reproduce and debug
|
||||
@@ -647,30 +600,21 @@ def _install_wheel(
|
||||
full_installed_path = os.path.join(lib_dir, installed_path)
|
||||
if not os.path.isfile(full_installed_path):
|
||||
continue
|
||||
if not full_installed_path.endswith('.py'):
|
||||
if not full_installed_path.endswith(".py"):
|
||||
continue
|
||||
yield full_installed_path
|
||||
|
||||
def pyc_output_path(path):
|
||||
# type: (str) -> str
|
||||
"""Return the path the pyc file would have been written to.
|
||||
"""
|
||||
def pyc_output_path(path: str) -> str:
|
||||
"""Return the path the pyc file would have been written to."""
|
||||
return importlib.util.cache_from_source(path)
|
||||
|
||||
# Compile all of the pyc files for the installed files
|
||||
if pycompile:
|
||||
with captured_stdout() as stdout:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore')
|
||||
warnings.filterwarnings("ignore")
|
||||
for path in pyc_source_file_paths():
|
||||
# Python 2's `compileall.compile_file` requires a str in
|
||||
# error cases, so we must convert to the native type.
|
||||
path_arg = ensure_str(
|
||||
path, encoding=sys.getfilesystemencoding()
|
||||
)
|
||||
success = compileall.compile_file(
|
||||
path_arg, force=True, quiet=True
|
||||
)
|
||||
success = compileall.compile_file(path, force=True, quiet=True)
|
||||
if success:
|
||||
pyc_path = pyc_output_path(path)
|
||||
assert os.path.exists(pyc_path)
|
||||
@@ -689,7 +633,7 @@ def _install_wheel(
|
||||
# Ensure we don't generate any variants for scripts because this is almost
|
||||
# never what somebody wants.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||
maker.variants = {''}
|
||||
maker.variants = {""}
|
||||
|
||||
# This is required because otherwise distlib creates scripts that are not
|
||||
# executable.
|
||||
@@ -699,14 +643,12 @@ def _install_wheel(
|
||||
# Generate the console and GUI entry points specified in the wheel
|
||||
scripts_to_generate = get_console_script_specs(console)
|
||||
|
||||
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
|
||||
gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
|
||||
|
||||
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
||||
generated.extend(generated_console_scripts)
|
||||
|
||||
generated.extend(
|
||||
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
||||
)
|
||||
generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
|
||||
|
||||
if warn_script_location:
|
||||
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
||||
@@ -716,8 +658,7 @@ def _install_wheel(
|
||||
generated_file_mode = 0o666 & ~current_umask()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _generate_file(path, **kwargs):
|
||||
# type: (str, **Any) -> Iterator[BinaryIO]
|
||||
def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
||||
with adjacent_tmp_file(path, **kwargs) as f:
|
||||
yield f
|
||||
os.chmod(f.name, generated_file_mode)
|
||||
@@ -726,9 +667,9 @@ def _install_wheel(
|
||||
dest_info_dir = os.path.join(lib_dir, info_dir)
|
||||
|
||||
# Record pip as the installer
|
||||
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
|
||||
installer_path = os.path.join(dest_info_dir, "INSTALLER")
|
||||
with _generate_file(installer_path) as installer_file:
|
||||
installer_file.write(b'pip\n')
|
||||
installer_file.write(b"pip\n")
|
||||
generated.append(installer_path)
|
||||
|
||||
# Record the PEP 610 direct URL reference
|
||||
@@ -740,12 +681,12 @@ def _install_wheel(
|
||||
|
||||
# Record the REQUESTED file
|
||||
if requested:
|
||||
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
|
||||
requested_path = os.path.join(dest_info_dir, "REQUESTED")
|
||||
with open(requested_path, "wb"):
|
||||
pass
|
||||
generated.append(requested_path)
|
||||
|
||||
record_text = distribution.read_text('RECORD')
|
||||
record_text = distribution.read_text("RECORD")
|
||||
record_rows = list(csv.reader(record_text.splitlines()))
|
||||
|
||||
rows = get_csv_rows_for_installed(
|
||||
@@ -753,42 +694,38 @@ def _install_wheel(
|
||||
installed=installed,
|
||||
changed=changed,
|
||||
generated=generated,
|
||||
lib_dir=lib_dir)
|
||||
lib_dir=lib_dir,
|
||||
)
|
||||
|
||||
# Record details of all files installed
|
||||
record_path = os.path.join(dest_info_dir, 'RECORD')
|
||||
record_path = os.path.join(dest_info_dir, "RECORD")
|
||||
|
||||
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
|
||||
# The type mypy infers for record_file is different for Python 3
|
||||
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
|
||||
# cast to typing.IO[str] as a workaround.
|
||||
writer = csv.writer(cast('IO[str]', record_file))
|
||||
with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
|
||||
# Explicitly cast to typing.IO[str] as a workaround for the mypy error:
|
||||
# "writer" has incompatible type "BinaryIO"; expected "_Writer"
|
||||
writer = csv.writer(cast("IO[str]", record_file))
|
||||
writer.writerows(_normalized_outrows(rows))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def req_error_context(req_description):
|
||||
# type: (str) -> Iterator[None]
|
||||
def req_error_context(req_description: str) -> Generator[None, None, None]:
|
||||
try:
|
||||
yield
|
||||
except InstallationError as e:
|
||||
message = "For req: {}. {}".format(req_description, e.args[0])
|
||||
reraise(
|
||||
InstallationError, InstallationError(message), sys.exc_info()[2]
|
||||
)
|
||||
raise InstallationError(message) from e
|
||||
|
||||
|
||||
def install_wheel(
|
||||
name, # type: str
|
||||
wheel_path, # type: str
|
||||
scheme, # type: Scheme
|
||||
req_description, # type: str
|
||||
pycompile=True, # type: bool
|
||||
warn_script_location=True, # type: bool
|
||||
direct_url=None, # type: Optional[DirectUrl]
|
||||
requested=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
name: str,
|
||||
wheel_path: str,
|
||||
scheme: Scheme,
|
||||
req_description: str,
|
||||
pycompile: bool = True,
|
||||
warn_script_location: bool = True,
|
||||
direct_url: Optional[DirectUrl] = None,
|
||||
requested: bool = False,
|
||||
) -> None:
|
||||
with ZipFile(wheel_path, allowZip64=True) as z:
|
||||
with req_error_context(req_description):
|
||||
_install_wheel(
|
||||
|
||||
@@ -8,10 +8,9 @@ import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import shutil
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
from typing import Dict, Iterable, List, Optional
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.distributions import make_distribution_for_install_requirement
|
||||
from pip._internal.distributions.installed import InstalledDistribution
|
||||
@@ -20,11 +19,14 @@ from pip._internal.exceptions import (
|
||||
HashMismatch,
|
||||
HashUnpinned,
|
||||
InstallationError,
|
||||
MetadataInconsistent,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution, get_metadata_distribution
|
||||
from pip._internal.models.direct_url import ArchiveInfo
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.network.download import BatchDownloader, Downloader
|
||||
@@ -33,13 +35,20 @@ from pip._internal.network.lazy_wheel import (
|
||||
dist_from_wheel_url,
|
||||
)
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.operations.build.build_tracker import BuildTracker
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.filesystem import copy2_fixed
|
||||
from pip._internal.utils.direct_url_helpers import (
|
||||
direct_url_for_editable,
|
||||
direct_url_from_link,
|
||||
)
|
||||
from pip._internal.utils.hashes import Hashes, MissingHashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import display_path, hide_url, is_installable_dir, rmtree
|
||||
from pip._internal.utils.misc import (
|
||||
display_path,
|
||||
hash_file,
|
||||
hide_url,
|
||||
is_installable_dir,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.vcs import vcs
|
||||
@@ -48,30 +57,29 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_prepared_distribution(
|
||||
req, # type: InstallRequirement
|
||||
req_tracker, # type: RequirementTracker
|
||||
finder, # type: PackageFinder
|
||||
build_isolation, # type: bool
|
||||
):
|
||||
# type: (...) -> Distribution
|
||||
req: InstallRequirement,
|
||||
build_tracker: BuildTracker,
|
||||
finder: PackageFinder,
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
) -> BaseDistribution:
|
||||
"""Prepare a distribution for installation."""
|
||||
abstract_dist = make_distribution_for_install_requirement(req)
|
||||
with req_tracker.track(req):
|
||||
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
|
||||
return abstract_dist.get_pkg_resources_distribution()
|
||||
with build_tracker.track(req):
|
||||
abstract_dist.prepare_distribution_metadata(
|
||||
finder, build_isolation, check_build_deps
|
||||
)
|
||||
return abstract_dist.get_metadata_distribution()
|
||||
|
||||
|
||||
def unpack_vcs_link(link, location):
|
||||
# type: (Link, str) -> None
|
||||
def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
|
||||
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||
assert vcs_backend is not None
|
||||
vcs_backend.unpack(location, url=hide_url(link.url))
|
||||
vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
|
||||
|
||||
|
||||
class File:
|
||||
|
||||
def __init__(self, path, content_type):
|
||||
# type: (str, Optional[str]) -> None
|
||||
def __init__(self, path: str, content_type: Optional[str]) -> None:
|
||||
self.path = path
|
||||
if content_type is None:
|
||||
self.content_type = mimetypes.guess_type(path)[0]
|
||||
@@ -80,19 +88,16 @@ class File:
|
||||
|
||||
|
||||
def get_http_url(
|
||||
link, # type: Link
|
||||
download, # type: Downloader
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> File
|
||||
link: Link,
|
||||
download: Downloader,
|
||||
download_dir: Optional[str] = None,
|
||||
hashes: Optional[Hashes] = None,
|
||||
) -> File:
|
||||
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
||||
# If a download dir is specified, is the file already downloaded there?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(
|
||||
link, download_dir, hashes
|
||||
)
|
||||
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
@@ -106,72 +111,14 @@ def get_http_url(
|
||||
return File(from_path, content_type)
|
||||
|
||||
|
||||
def _copy2_ignoring_special_files(src, dest):
|
||||
# type: (str, str) -> None
|
||||
"""Copying special files is not supported, but as a convenience to users
|
||||
we skip errors copying them. This supports tools that may create e.g.
|
||||
socket files in the project source directory.
|
||||
"""
|
||||
try:
|
||||
copy2_fixed(src, dest)
|
||||
except shutil.SpecialFileError as e:
|
||||
# SpecialFileError may be raised due to either the source or
|
||||
# destination. If the destination was the cause then we would actually
|
||||
# care, but since the destination directory is deleted prior to
|
||||
# copy we ignore all of them assuming it is caused by the source.
|
||||
logger.warning(
|
||||
"Ignoring special file error '%s' encountered copying %s to %s.",
|
||||
str(e),
|
||||
src,
|
||||
dest,
|
||||
)
|
||||
|
||||
|
||||
def _copy_source_tree(source, target):
|
||||
# type: (str, str) -> None
|
||||
target_abspath = os.path.abspath(target)
|
||||
target_basename = os.path.basename(target_abspath)
|
||||
target_dirname = os.path.dirname(target_abspath)
|
||||
|
||||
def ignore(d, names):
|
||||
# type: (str, List[str]) -> List[str]
|
||||
skipped = [] # type: List[str]
|
||||
if d == source:
|
||||
# Pulling in those directories can potentially be very slow,
|
||||
# exclude the following directories if they appear in the top
|
||||
# level dir (and only it).
|
||||
# See discussion at https://github.com/pypa/pip/pull/6770
|
||||
skipped += ['.tox', '.nox']
|
||||
if os.path.abspath(d) == target_dirname:
|
||||
# Prevent an infinite recursion if the target is in source.
|
||||
# This can happen when TMPDIR is set to ${PWD}/...
|
||||
# and we copy PWD to TMPDIR.
|
||||
skipped += [target_basename]
|
||||
return skipped
|
||||
|
||||
shutil.copytree(
|
||||
source,
|
||||
target,
|
||||
ignore=ignore,
|
||||
symlinks=True,
|
||||
copy_function=_copy2_ignoring_special_files,
|
||||
)
|
||||
|
||||
|
||||
def get_file_url(
|
||||
link, # type: Link
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> File
|
||||
"""Get file and optionally check its hash.
|
||||
"""
|
||||
link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
|
||||
) -> File:
|
||||
"""Get file and optionally check its hash."""
|
||||
# If a download dir is specified, is the file already there and valid?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(
|
||||
link, download_dir, hashes
|
||||
)
|
||||
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
@@ -189,13 +136,13 @@ def get_file_url(
|
||||
|
||||
|
||||
def unpack_url(
|
||||
link, # type: Link
|
||||
location, # type: str
|
||||
download, # type: Downloader
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> Optional[File]
|
||||
link: Link,
|
||||
location: str,
|
||||
download: Downloader,
|
||||
verbosity: int,
|
||||
download_dir: Optional[str] = None,
|
||||
hashes: Optional[Hashes] = None,
|
||||
) -> Optional[File]:
|
||||
"""Unpack link into location, downloading if required.
|
||||
|
||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||
@@ -205,30 +152,10 @@ def unpack_url(
|
||||
"""
|
||||
# non-editable vcs urls
|
||||
if link.is_vcs:
|
||||
unpack_vcs_link(link, location)
|
||||
unpack_vcs_link(link, location, verbosity=verbosity)
|
||||
return None
|
||||
|
||||
# Once out-of-tree-builds are no longer supported, could potentially
|
||||
# replace the below condition with `assert not link.is_existing_dir`
|
||||
# - unpack_url does not need to be called for in-tree-builds.
|
||||
#
|
||||
# As further cleanup, _copy_source_tree and accompanying tests can
|
||||
# be removed.
|
||||
if link.is_existing_dir():
|
||||
deprecated(
|
||||
"A future pip version will change local packages to be built "
|
||||
"in-place without first copying to a temporary directory. "
|
||||
"We recommend you use --use-feature=in-tree-build to test "
|
||||
"your packages with this new behavior before it becomes the "
|
||||
"default.\n",
|
||||
replacement=None,
|
||||
gone_in="21.3",
|
||||
issue=7555
|
||||
)
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
_copy_source_tree(link.file_path, location)
|
||||
return None
|
||||
assert not link.is_existing_dir()
|
||||
|
||||
# file urls
|
||||
if link.is_file:
|
||||
@@ -251,10 +178,14 @@ def unpack_url(
|
||||
return file
|
||||
|
||||
|
||||
def _check_download_dir(link, download_dir, hashes):
|
||||
# type: (Link, str, Optional[Hashes]) -> Optional[str]
|
||||
""" Check download_dir for previously downloaded file with correct hash
|
||||
If a correct file is found return its path else None
|
||||
def _check_download_dir(
|
||||
link: Link,
|
||||
download_dir: str,
|
||||
hashes: Optional[Hashes],
|
||||
warn_on_hash_mismatch: bool = True,
|
||||
) -> Optional[str]:
|
||||
"""Check download_dir for previously downloaded file with correct hash
|
||||
If a correct file is found return its path else None
|
||||
"""
|
||||
download_path = os.path.join(download_dir, link.filename)
|
||||
|
||||
@@ -262,46 +193,45 @@ def _check_download_dir(link, download_dir, hashes):
|
||||
return None
|
||||
|
||||
# If already downloaded, does its hash match?
|
||||
logger.info('File was already downloaded %s', download_path)
|
||||
logger.info("File was already downloaded %s", download_path)
|
||||
if hashes:
|
||||
try:
|
||||
hashes.check_against_path(download_path)
|
||||
except HashMismatch:
|
||||
logger.warning(
|
||||
'Previously-downloaded file %s has bad hash. '
|
||||
'Re-downloading.',
|
||||
download_path
|
||||
)
|
||||
if warn_on_hash_mismatch:
|
||||
logger.warning(
|
||||
"Previously-downloaded file %s has bad hash. Re-downloading.",
|
||||
download_path,
|
||||
)
|
||||
os.unlink(download_path)
|
||||
return None
|
||||
return download_path
|
||||
|
||||
|
||||
class RequirementPreparer:
|
||||
"""Prepares a Requirement
|
||||
"""
|
||||
"""Prepares a Requirement"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
build_dir, # type: str
|
||||
download_dir, # type: Optional[str]
|
||||
src_dir, # type: str
|
||||
build_isolation, # type: bool
|
||||
req_tracker, # type: RequirementTracker
|
||||
session, # type: PipSession
|
||||
progress_bar, # type: str
|
||||
finder, # type: PackageFinder
|
||||
require_hashes, # type: bool
|
||||
use_user_site, # type: bool
|
||||
lazy_wheel, # type: bool
|
||||
in_tree_build, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
build_dir: str,
|
||||
download_dir: Optional[str],
|
||||
src_dir: str,
|
||||
build_isolation: bool,
|
||||
check_build_deps: bool,
|
||||
build_tracker: BuildTracker,
|
||||
session: PipSession,
|
||||
progress_bar: str,
|
||||
finder: PackageFinder,
|
||||
require_hashes: bool,
|
||||
use_user_site: bool,
|
||||
lazy_wheel: bool,
|
||||
verbosity: int,
|
||||
) -> None:
|
||||
super().__init__()
|
||||
|
||||
self.src_dir = src_dir
|
||||
self.build_dir = build_dir
|
||||
self.req_tracker = req_tracker
|
||||
self.build_tracker = build_tracker
|
||||
self._session = session
|
||||
self._download = Downloader(session, progress_bar)
|
||||
self._batch_download = BatchDownloader(session, progress_bar)
|
||||
@@ -314,6 +244,9 @@ class RequirementPreparer:
|
||||
# Is build isolation allowed?
|
||||
self.build_isolation = build_isolation
|
||||
|
||||
# Should check build dependencies?
|
||||
self.check_build_deps = check_build_deps
|
||||
|
||||
# Should hash-checking be required?
|
||||
self.require_hashes = require_hashes
|
||||
|
||||
@@ -323,35 +256,45 @@ class RequirementPreparer:
|
||||
# Should wheels be downloaded lazily?
|
||||
self.use_lazy_wheel = lazy_wheel
|
||||
|
||||
# Should in-tree builds be used for local paths?
|
||||
self.in_tree_build = in_tree_build
|
||||
# How verbose should underlying tooling be?
|
||||
self.verbosity = verbosity
|
||||
|
||||
# Memoized downloaded files, as mapping of url: (path, mime type)
|
||||
self._downloaded = {} # type: Dict[str, Tuple[str, str]]
|
||||
# Memoized downloaded files, as mapping of url: path.
|
||||
self._downloaded: Dict[str, str] = {}
|
||||
|
||||
# Previous "header" printed for a link-based InstallRequirement
|
||||
self._previous_requirement_header = ("", "")
|
||||
|
||||
def _log_preparing_link(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
def _log_preparing_link(self, req: InstallRequirement) -> None:
|
||||
"""Provide context for the requirement being prepared."""
|
||||
if req.link.is_file and not req.original_link_is_in_wheel_cache:
|
||||
if req.link.is_file and not req.is_wheel_from_cache:
|
||||
message = "Processing %s"
|
||||
information = str(display_path(req.link.file_path))
|
||||
else:
|
||||
message = "Collecting %s"
|
||||
information = str(req.req or req)
|
||||
|
||||
# If we used req.req, inject requirement source if available (this
|
||||
# would already be included if we used req directly)
|
||||
if req.req and req.comes_from:
|
||||
if isinstance(req.comes_from, str):
|
||||
comes_from: Optional[str] = req.comes_from
|
||||
else:
|
||||
comes_from = req.comes_from.from_path()
|
||||
if comes_from:
|
||||
information += f" (from {comes_from})"
|
||||
|
||||
if (message, information) != self._previous_requirement_header:
|
||||
self._previous_requirement_header = (message, information)
|
||||
logger.info(message, information)
|
||||
|
||||
if req.original_link_is_in_wheel_cache:
|
||||
if req.is_wheel_from_cache:
|
||||
with indent_log():
|
||||
logger.info("Using cached %s", req.link.filename)
|
||||
|
||||
def _ensure_link_req_src_dir(self, req, parallel_builds):
|
||||
# type: (InstallRequirement, bool) -> None
|
||||
def _ensure_link_req_src_dir(
|
||||
self, req: InstallRequirement, parallel_builds: bool
|
||||
) -> None:
|
||||
"""Ensure source_dir of a linked InstallRequirement."""
|
||||
# Since source_dir is only set for editable requirements.
|
||||
if req.link.is_wheel:
|
||||
@@ -359,7 +302,7 @@ class RequirementPreparer:
|
||||
# directory.
|
||||
return
|
||||
assert req.source_dir is None
|
||||
if req.link.is_existing_dir() and self.in_tree_build:
|
||||
if req.link.is_existing_dir():
|
||||
# build local directories in-tree
|
||||
req.source_dir = req.link.file_path
|
||||
return
|
||||
@@ -376,6 +319,7 @@ class RequirementPreparer:
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
# TODO: this check is now probably dead code
|
||||
if is_installable_dir(req.source_dir):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
@@ -385,8 +329,7 @@ class RequirementPreparer:
|
||||
"Please delete it and try again.".format(req, req.source_dir)
|
||||
)
|
||||
|
||||
def _get_linked_req_hashes(self, req):
|
||||
# type: (InstallRequirement) -> Hashes
|
||||
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
|
||||
# By the time this is called, the requirement's link should have
|
||||
# been checked so we can tell what kind of requirements req is
|
||||
# and raise some more informative errors than otherwise.
|
||||
@@ -418,18 +361,72 @@ class RequirementPreparer:
|
||||
# showing the user what the hash should be.
|
||||
return req.hashes(trust_internet=False) or MissingHashes()
|
||||
|
||||
def _fetch_metadata_using_lazy_wheel(self, link):
|
||||
# type: (Link) -> Optional[Distribution]
|
||||
"""Fetch metadata using lazy wheel, if possible."""
|
||||
if not self.use_lazy_wheel:
|
||||
return None
|
||||
def _fetch_metadata_only(
|
||||
self,
|
||||
req: InstallRequirement,
|
||||
) -> Optional[BaseDistribution]:
|
||||
if self.require_hashes:
|
||||
logger.debug('Lazy wheel is not used as hash checking is required')
|
||||
logger.debug(
|
||||
"Metadata-only fetching is not used as hash checking is required",
|
||||
)
|
||||
return None
|
||||
# Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
|
||||
return self._fetch_metadata_using_link_data_attr(
|
||||
req
|
||||
) or self._fetch_metadata_using_lazy_wheel(req.link)
|
||||
|
||||
def _fetch_metadata_using_link_data_attr(
|
||||
self,
|
||||
req: InstallRequirement,
|
||||
) -> Optional[BaseDistribution]:
|
||||
"""Fetch metadata from the data-dist-info-metadata attribute, if possible."""
|
||||
# (1) Get the link to the metadata file, if provided by the backend.
|
||||
metadata_link = req.link.metadata_link()
|
||||
if metadata_link is None:
|
||||
return None
|
||||
assert req.req is not None
|
||||
logger.info(
|
||||
"Obtaining dependency information for %s from %s",
|
||||
req.req,
|
||||
metadata_link,
|
||||
)
|
||||
# (2) Download the contents of the METADATA file, separate from the dist itself.
|
||||
metadata_file = get_http_url(
|
||||
metadata_link,
|
||||
self._download,
|
||||
hashes=metadata_link.as_hashes(),
|
||||
)
|
||||
with open(metadata_file.path, "rb") as f:
|
||||
metadata_contents = f.read()
|
||||
# (3) Generate a dist just from those file contents.
|
||||
metadata_dist = get_metadata_distribution(
|
||||
metadata_contents,
|
||||
req.link.filename,
|
||||
req.req.name,
|
||||
)
|
||||
# (4) Ensure the Name: field from the METADATA file matches the name from the
|
||||
# install requirement.
|
||||
#
|
||||
# NB: raw_name will fall back to the name from the install requirement if
|
||||
# the Name: field is not present, but it's noted in the raw_name docstring
|
||||
# that that should NEVER happen anyway.
|
||||
if metadata_dist.raw_name != req.req.name:
|
||||
raise MetadataInconsistent(
|
||||
req, "Name", req.req.name, metadata_dist.raw_name
|
||||
)
|
||||
return metadata_dist
|
||||
|
||||
def _fetch_metadata_using_lazy_wheel(
|
||||
self,
|
||||
link: Link,
|
||||
) -> Optional[BaseDistribution]:
|
||||
"""Fetch metadata using lazy wheel, if possible."""
|
||||
# --use-feature=fast-deps must be provided.
|
||||
if not self.use_lazy_wheel:
|
||||
return None
|
||||
if link.is_file or not link.is_wheel:
|
||||
logger.debug(
|
||||
'Lazy wheel is not used as '
|
||||
'%r does not points to a remote wheel',
|
||||
"Lazy wheel is not used as %r does not point to a remote wheel",
|
||||
link,
|
||||
)
|
||||
return None
|
||||
@@ -437,22 +434,22 @@ class RequirementPreparer:
|
||||
wheel = Wheel(link.filename)
|
||||
name = canonicalize_name(wheel.name)
|
||||
logger.info(
|
||||
'Obtaining dependency information from %s %s',
|
||||
name, wheel.version,
|
||||
"Obtaining dependency information from %s %s",
|
||||
name,
|
||||
wheel.version,
|
||||
)
|
||||
url = link.url.split('#', 1)[0]
|
||||
url = link.url.split("#", 1)[0]
|
||||
try:
|
||||
return dist_from_wheel_url(name, url, self._session)
|
||||
except HTTPRangeRequestUnsupported:
|
||||
logger.debug('%s does not support range requests', url)
|
||||
logger.debug("%s does not support range requests", url)
|
||||
return None
|
||||
|
||||
def _complete_partial_requirements(
|
||||
self,
|
||||
partially_downloaded_reqs, # type: Iterable[InstallRequirement]
|
||||
parallel_builds=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
partially_downloaded_reqs: Iterable[InstallRequirement],
|
||||
parallel_builds: bool = False,
|
||||
) -> None:
|
||||
"""Download any requirements which were only fetched by metadata."""
|
||||
# Download to a temporary directory. These will be copied over as
|
||||
# needed for downstream 'download', 'wheel', and 'install' commands.
|
||||
@@ -461,7 +458,7 @@ class RequirementPreparer:
|
||||
# Map each link to the requirement that owns it. This allows us to set
|
||||
# `req.local_file_path` on the appropriate requirement after passing
|
||||
# all the links at once into BatchDownloader.
|
||||
links_to_fully_download = {} # type: Dict[Link, InstallRequirement]
|
||||
links_to_fully_download: Dict[Link, InstallRequirement] = {}
|
||||
for req in partially_downloaded_reqs:
|
||||
assert req.link
|
||||
links_to_fully_download[req.link] = req
|
||||
@@ -480,35 +477,47 @@ class RequirementPreparer:
|
||||
for req in partially_downloaded_reqs:
|
||||
self._prepare_linked_requirement(req, parallel_builds)
|
||||
|
||||
def prepare_linked_requirement(self, req, parallel_builds=False):
|
||||
# type: (InstallRequirement, bool) -> Distribution
|
||||
def prepare_linked_requirement(
|
||||
self, req: InstallRequirement, parallel_builds: bool = False
|
||||
) -> BaseDistribution:
|
||||
"""Prepare a requirement to be obtained from req.link."""
|
||||
assert req.link
|
||||
link = req.link
|
||||
self._log_preparing_link(req)
|
||||
with indent_log():
|
||||
# Check if the relevant file is already available
|
||||
# in the download directory
|
||||
file_path = None
|
||||
if self.download_dir is not None and link.is_wheel:
|
||||
if self.download_dir is not None and req.link.is_wheel:
|
||||
hashes = self._get_linked_req_hashes(req)
|
||||
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
||||
file_path = _check_download_dir(
|
||||
req.link,
|
||||
self.download_dir,
|
||||
hashes,
|
||||
# When a locally built wheel has been found in cache, we don't warn
|
||||
# about re-downloading when the already downloaded wheel hash does
|
||||
# not match. This is because the hash must be checked against the
|
||||
# original link, not the cached link. It that case the already
|
||||
# downloaded file will be removed and re-fetched from cache (which
|
||||
# implies a hash check against the cache entry's origin.json).
|
||||
warn_on_hash_mismatch=not req.is_wheel_from_cache,
|
||||
)
|
||||
|
||||
if file_path is not None:
|
||||
# The file is already available, so mark it as downloaded
|
||||
self._downloaded[req.link.url] = file_path, None
|
||||
self._downloaded[req.link.url] = file_path
|
||||
else:
|
||||
# The file is not available, attempt to fetch only metadata
|
||||
wheel_dist = self._fetch_metadata_using_lazy_wheel(link)
|
||||
if wheel_dist is not None:
|
||||
metadata_dist = self._fetch_metadata_only(req)
|
||||
if metadata_dist is not None:
|
||||
req.needs_more_preparation = True
|
||||
return wheel_dist
|
||||
return metadata_dist
|
||||
|
||||
# None of the optimizations worked, fully prepare the requirement
|
||||
return self._prepare_linked_requirement(req, parallel_builds)
|
||||
|
||||
def prepare_linked_requirements_more(self, reqs, parallel_builds=False):
|
||||
# type: (Iterable[InstallRequirement], bool) -> None
|
||||
def prepare_linked_requirements_more(
|
||||
self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
|
||||
) -> None:
|
||||
"""Prepare linked requirements more, if needed."""
|
||||
reqs = [req for req in reqs if req.needs_more_preparation]
|
||||
for req in reqs:
|
||||
@@ -517,12 +526,12 @@ class RequirementPreparer:
|
||||
hashes = self._get_linked_req_hashes(req)
|
||||
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
||||
if file_path is not None:
|
||||
self._downloaded[req.link.url] = file_path, None
|
||||
self._downloaded[req.link.url] = file_path
|
||||
req.needs_more_preparation = False
|
||||
|
||||
# Prepare requirements we found were already downloaded for some
|
||||
# reason. The other downloads will be completed separately.
|
||||
partially_downloaded_reqs = [] # type: List[InstallRequirement]
|
||||
partially_downloaded_reqs: List[InstallRequirement] = []
|
||||
for req in reqs:
|
||||
if req.needs_more_preparation:
|
||||
partially_downloaded_reqs.append(req)
|
||||
@@ -532,35 +541,87 @@ class RequirementPreparer:
|
||||
# TODO: separate this part out from RequirementPreparer when the v1
|
||||
# resolver can be removed!
|
||||
self._complete_partial_requirements(
|
||||
partially_downloaded_reqs, parallel_builds=parallel_builds,
|
||||
partially_downloaded_reqs,
|
||||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
def _prepare_linked_requirement(self, req, parallel_builds):
|
||||
# type: (InstallRequirement, bool) -> Distribution
|
||||
def _prepare_linked_requirement(
|
||||
self, req: InstallRequirement, parallel_builds: bool
|
||||
) -> BaseDistribution:
|
||||
assert req.link
|
||||
link = req.link
|
||||
|
||||
self._ensure_link_req_src_dir(req, parallel_builds)
|
||||
hashes = self._get_linked_req_hashes(req)
|
||||
|
||||
if link.is_existing_dir() and self.in_tree_build:
|
||||
if hashes and req.is_wheel_from_cache:
|
||||
assert req.download_info is not None
|
||||
assert link.is_wheel
|
||||
assert link.is_file
|
||||
# We need to verify hashes, and we have found the requirement in the cache
|
||||
# of locally built wheels.
|
||||
if (
|
||||
isinstance(req.download_info.info, ArchiveInfo)
|
||||
and req.download_info.info.hashes
|
||||
and hashes.has_one_of(req.download_info.info.hashes)
|
||||
):
|
||||
# At this point we know the requirement was built from a hashable source
|
||||
# artifact, and we verified that the cache entry's hash of the original
|
||||
# artifact matches one of the hashes we expect. We don't verify hashes
|
||||
# against the cached wheel, because the wheel is not the original.
|
||||
hashes = None
|
||||
else:
|
||||
logger.warning(
|
||||
"The hashes of the source archive found in cache entry "
|
||||
"don't match, ignoring cached built wheel "
|
||||
"and re-downloading source."
|
||||
)
|
||||
req.link = req.cached_wheel_source_link
|
||||
link = req.link
|
||||
|
||||
self._ensure_link_req_src_dir(req, parallel_builds)
|
||||
|
||||
if link.is_existing_dir():
|
||||
local_file = None
|
||||
elif link.url not in self._downloaded:
|
||||
try:
|
||||
local_file = unpack_url(
|
||||
link, req.source_dir, self._download,
|
||||
self.download_dir, hashes
|
||||
link,
|
||||
req.source_dir,
|
||||
self._download,
|
||||
self.verbosity,
|
||||
self.download_dir,
|
||||
hashes,
|
||||
)
|
||||
except NetworkConnectionError as exc:
|
||||
raise InstallationError(
|
||||
'Could not install requirement {} because of HTTP '
|
||||
'error {} for URL {}'.format(req, exc, link)
|
||||
"Could not install requirement {} because of HTTP "
|
||||
"error {} for URL {}".format(req, exc, link)
|
||||
)
|
||||
else:
|
||||
file_path, content_type = self._downloaded[link.url]
|
||||
file_path = self._downloaded[link.url]
|
||||
if hashes:
|
||||
hashes.check_against_path(file_path)
|
||||
local_file = File(file_path, content_type)
|
||||
local_file = File(file_path, content_type=None)
|
||||
|
||||
# If download_info is set, we got it from the wheel cache.
|
||||
if req.download_info is None:
|
||||
# Editables don't go through this function (see
|
||||
# prepare_editable_requirement).
|
||||
assert not req.editable
|
||||
req.download_info = direct_url_from_link(link, req.source_dir)
|
||||
# Make sure we have a hash in download_info. If we got it as part of the
|
||||
# URL, it will have been verified and we can rely on it. Otherwise we
|
||||
# compute it from the downloaded file.
|
||||
# FIXME: https://github.com/pypa/pip/issues/11943
|
||||
if (
|
||||
isinstance(req.download_info.info, ArchiveInfo)
|
||||
and not req.download_info.info.hashes
|
||||
and local_file
|
||||
):
|
||||
hash = hash_file(local_file.path)[0].hexdigest()
|
||||
# We populate info.hash for backward compatibility.
|
||||
# This will automatically populate info.hashes.
|
||||
req.download_info.info.hash = f"sha256={hash}"
|
||||
|
||||
# For use in later processing,
|
||||
# preserve the file path on the requirement.
|
||||
@@ -568,12 +629,15 @@ class RequirementPreparer:
|
||||
req.local_file_path = local_file.path
|
||||
|
||||
dist = _get_prepared_distribution(
|
||||
req, self.req_tracker, self.finder, self.build_isolation,
|
||||
req,
|
||||
self.build_tracker,
|
||||
self.finder,
|
||||
self.build_isolation,
|
||||
self.check_build_deps,
|
||||
)
|
||||
return dist
|
||||
|
||||
def save_linked_requirement(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
def save_linked_requirement(self, req: InstallRequirement) -> None:
|
||||
assert self.download_dir is not None
|
||||
assert req.link is not None
|
||||
link = req.link
|
||||
@@ -584,8 +648,9 @@ class RequirementPreparer:
|
||||
|
||||
if link.is_existing_dir():
|
||||
logger.debug(
|
||||
'Not copying link to destination directory '
|
||||
'since it is a directory: %s', link,
|
||||
"Not copying link to destination directory "
|
||||
"since it is a directory: %s",
|
||||
link,
|
||||
)
|
||||
return
|
||||
if req.local_file_path is None:
|
||||
@@ -596,31 +661,35 @@ class RequirementPreparer:
|
||||
if not os.path.exists(download_location):
|
||||
shutil.copy(req.local_file_path, download_location)
|
||||
download_path = display_path(download_location)
|
||||
logger.info('Saved %s', download_path)
|
||||
logger.info("Saved %s", download_path)
|
||||
|
||||
def prepare_editable_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
):
|
||||
# type: (...) -> Distribution
|
||||
"""Prepare an editable requirement
|
||||
"""
|
||||
req: InstallRequirement,
|
||||
) -> BaseDistribution:
|
||||
"""Prepare an editable requirement."""
|
||||
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||
|
||||
logger.info('Obtaining %s', req)
|
||||
logger.info("Obtaining %s", req)
|
||||
|
||||
with indent_log():
|
||||
if self.require_hashes:
|
||||
raise InstallationError(
|
||||
'The editable requirement {} cannot be installed when '
|
||||
'requiring hashes, because there is no single file to '
|
||||
'hash.'.format(req)
|
||||
"The editable requirement {} cannot be installed when "
|
||||
"requiring hashes, because there is no single file to "
|
||||
"hash.".format(req)
|
||||
)
|
||||
req.ensure_has_source_dir(self.src_dir)
|
||||
req.update_editable()
|
||||
assert req.source_dir
|
||||
req.download_info = direct_url_for_editable(req.unpacked_source_directory)
|
||||
|
||||
dist = _get_prepared_distribution(
|
||||
req, self.req_tracker, self.finder, self.build_isolation,
|
||||
req,
|
||||
self.build_tracker,
|
||||
self.finder,
|
||||
self.build_isolation,
|
||||
self.check_build_deps,
|
||||
)
|
||||
|
||||
req.check_if_exists(self.use_user_site)
|
||||
@@ -629,27 +698,24 @@ class RequirementPreparer:
|
||||
|
||||
def prepare_installed_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
skip_reason # type: str
|
||||
):
|
||||
# type: (...) -> Distribution
|
||||
"""Prepare an already-installed requirement
|
||||
"""
|
||||
req: InstallRequirement,
|
||||
skip_reason: str,
|
||||
) -> BaseDistribution:
|
||||
"""Prepare an already-installed requirement."""
|
||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||
assert skip_reason is not None, (
|
||||
"did not get skip reason skipped but req.satisfied_by "
|
||||
"is set to {}".format(req.satisfied_by)
|
||||
)
|
||||
logger.info(
|
||||
'Requirement %s: %s (%s)',
|
||||
skip_reason, req, req.satisfied_by.version
|
||||
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
|
||||
)
|
||||
with indent_log():
|
||||
if self.require_hashes:
|
||||
logger.debug(
|
||||
'Since it is already installed, we are trusting this '
|
||||
'package without checking its hash. To ensure a '
|
||||
'completely repeatable environment, install into an '
|
||||
'empty virtualenv.'
|
||||
"Since it is already installed, we are trusting this "
|
||||
"package without checking its hash. To ensure a "
|
||||
"completely repeatable environment, install into an "
|
||||
"empty virtualenv."
|
||||
)
|
||||
return InstalledDistribution(req).get_pkg_resources_distribution()
|
||||
return InstalledDistribution(req).get_metadata_distribution()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import importlib.util
|
||||
import os
|
||||
from collections import namedtuple
|
||||
from typing import Any, List, Optional
|
||||
@@ -5,34 +6,29 @@ from typing import Any, List, Optional
|
||||
from pip._vendor import tomli
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.exceptions import (
|
||||
InstallationError,
|
||||
InvalidPyProjectBuildRequires,
|
||||
MissingPyProjectBuildRequires,
|
||||
)
|
||||
|
||||
|
||||
def _is_list_of_str(obj):
|
||||
# type: (Any) -> bool
|
||||
return (
|
||||
isinstance(obj, list) and
|
||||
all(isinstance(item, str) for item in obj)
|
||||
)
|
||||
def _is_list_of_str(obj: Any) -> bool:
|
||||
return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
|
||||
|
||||
|
||||
def make_pyproject_path(unpacked_source_directory):
|
||||
# type: (str) -> str
|
||||
return os.path.join(unpacked_source_directory, 'pyproject.toml')
|
||||
def make_pyproject_path(unpacked_source_directory: str) -> str:
|
||||
return os.path.join(unpacked_source_directory, "pyproject.toml")
|
||||
|
||||
|
||||
BuildSystemDetails = namedtuple('BuildSystemDetails', [
|
||||
'requires', 'backend', 'check', 'backend_path'
|
||||
])
|
||||
BuildSystemDetails = namedtuple(
|
||||
"BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
|
||||
)
|
||||
|
||||
|
||||
def load_pyproject_toml(
|
||||
use_pep517, # type: Optional[bool]
|
||||
pyproject_toml, # type: str
|
||||
setup_py, # type: str
|
||||
req_name # type: str
|
||||
):
|
||||
# type: (...) -> Optional[BuildSystemDetails]
|
||||
use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
|
||||
) -> Optional[BuildSystemDetails]:
|
||||
"""Load the pyproject.toml file.
|
||||
|
||||
Parameters:
|
||||
@@ -57,9 +53,15 @@ def load_pyproject_toml(
|
||||
has_pyproject = os.path.isfile(pyproject_toml)
|
||||
has_setup = os.path.isfile(setup_py)
|
||||
|
||||
if not has_pyproject and not has_setup:
|
||||
raise InstallationError(
|
||||
f"{req_name} does not appear to be a Python project: "
|
||||
f"neither 'setup.py' nor 'pyproject.toml' found."
|
||||
)
|
||||
|
||||
if has_pyproject:
|
||||
with open(pyproject_toml, encoding="utf-8") as f:
|
||||
pp_toml = tomli.load(f)
|
||||
pp_toml = tomli.loads(f.read())
|
||||
build_system = pp_toml.get("build-system")
|
||||
else:
|
||||
build_system = None
|
||||
@@ -82,17 +84,26 @@ def load_pyproject_toml(
|
||||
raise InstallationError(
|
||||
"Disabling PEP 517 processing is invalid: "
|
||||
"project specifies a build backend of {} "
|
||||
"in pyproject.toml".format(
|
||||
build_system["build-backend"]
|
||||
)
|
||||
"in pyproject.toml".format(build_system["build-backend"])
|
||||
)
|
||||
use_pep517 = True
|
||||
|
||||
# If we haven't worked out whether to use PEP 517 yet,
|
||||
# and the user hasn't explicitly stated a preference,
|
||||
# we do so if the project has a pyproject.toml file.
|
||||
# we do so if the project has a pyproject.toml file
|
||||
# or if we cannot import setuptools or wheels.
|
||||
|
||||
# We fallback to PEP 517 when without setuptools or without the wheel package,
|
||||
# so setuptools can be installed as a default build backend.
|
||||
# For more info see:
|
||||
# https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
|
||||
# https://github.com/pypa/pip/issues/8559
|
||||
elif use_pep517 is None:
|
||||
use_pep517 = has_pyproject
|
||||
use_pep517 = (
|
||||
has_pyproject
|
||||
or not importlib.util.find_spec("setuptools")
|
||||
or not importlib.util.find_spec("wheel")
|
||||
)
|
||||
|
||||
# At this point, we know whether we're going to use PEP 517.
|
||||
assert use_pep517 is not None
|
||||
@@ -124,52 +135,37 @@ def load_pyproject_toml(
|
||||
|
||||
# Ensure that the build-system section in pyproject.toml conforms
|
||||
# to PEP 518.
|
||||
error_template = (
|
||||
"{package} has a pyproject.toml file that does not comply "
|
||||
"with PEP 518: {reason}"
|
||||
)
|
||||
|
||||
# Specifying the build-system table but not the requires key is invalid
|
||||
if "requires" not in build_system:
|
||||
raise InstallationError(
|
||||
error_template.format(package=req_name, reason=(
|
||||
"it has a 'build-system' table but not "
|
||||
"'build-system.requires' which is mandatory in the table"
|
||||
))
|
||||
)
|
||||
raise MissingPyProjectBuildRequires(package=req_name)
|
||||
|
||||
# Error out if requires is not a list of strings
|
||||
requires = build_system["requires"]
|
||||
if not _is_list_of_str(requires):
|
||||
raise InstallationError(error_template.format(
|
||||
raise InvalidPyProjectBuildRequires(
|
||||
package=req_name,
|
||||
reason="'build-system.requires' is not a list of strings.",
|
||||
))
|
||||
reason="It is not a list of strings.",
|
||||
)
|
||||
|
||||
# Each requirement must be valid as per PEP 508
|
||||
for requirement in requires:
|
||||
try:
|
||||
Requirement(requirement)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError(
|
||||
error_template.format(
|
||||
package=req_name,
|
||||
reason=(
|
||||
"'build-system.requires' contains an invalid "
|
||||
"requirement: {!r}".format(requirement)
|
||||
),
|
||||
)
|
||||
)
|
||||
except InvalidRequirement as error:
|
||||
raise InvalidPyProjectBuildRequires(
|
||||
package=req_name,
|
||||
reason=f"It contains an invalid requirement: {requirement!r}",
|
||||
) from error
|
||||
|
||||
backend = build_system.get("build-backend")
|
||||
backend_path = build_system.get("backend-path", [])
|
||||
check = [] # type: List[str]
|
||||
check: List[str] = []
|
||||
if backend is None:
|
||||
# If the user didn't specify a backend, we assume they want to use
|
||||
# the setuptools backend. But we can't be sure they have included
|
||||
# a version of setuptools which supplies the backend, or wheel
|
||||
# (which is needed by the backend) in their requirements. So we
|
||||
# make a note to check that those requirements are present once
|
||||
# a version of setuptools which supplies the backend. So we
|
||||
# make a note to check that this requirement is present once
|
||||
# we have set up the environment.
|
||||
# This is quite a lot of work to check for a very specific case. But
|
||||
# the problem is, that case is potentially quite common - projects that
|
||||
@@ -178,6 +174,6 @@ def load_pyproject_toml(
|
||||
# tools themselves. The original PEP 518 code had a similar check (but
|
||||
# implemented in a different way).
|
||||
backend = "setuptools.build_meta:__legacy__"
|
||||
check = ["setuptools>=40.8.0", "wheel"]
|
||||
check = ["setuptools>=40.8.0"]
|
||||
|
||||
return BuildSystemDetails(requires, backend, check, backend_path)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import collections
|
||||
import logging
|
||||
from typing import Iterator, List, Optional, Sequence, Tuple
|
||||
from typing import Generator, List, Optional, Sequence, Tuple
|
||||
|
||||
from pip._internal.utils.logging import indent_log
|
||||
|
||||
@@ -9,8 +9,10 @@ from .req_install import InstallRequirement
|
||||
from .req_set import RequirementSet
|
||||
|
||||
__all__ = [
|
||||
"RequirementSet", "InstallRequirement",
|
||||
"parse_requirements", "install_given_reqs",
|
||||
"RequirementSet",
|
||||
"InstallRequirement",
|
||||
"parse_requirements",
|
||||
"install_given_reqs",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -26,7 +28,7 @@ class InstallationResult:
|
||||
|
||||
def _validate_requirements(
|
||||
requirements: List[InstallRequirement],
|
||||
) -> Iterator[Tuple[str, InstallRequirement]]:
|
||||
) -> Generator[Tuple[str, InstallRequirement], None, None]:
|
||||
for req in requirements:
|
||||
assert req.name, f"invalid to-be-installed requirement: {req}"
|
||||
yield req.name, req
|
||||
@@ -34,7 +36,6 @@ def _validate_requirements(
|
||||
|
||||
def install_given_reqs(
|
||||
requirements: List[InstallRequirement],
|
||||
install_options: List[str],
|
||||
global_options: Sequence[str],
|
||||
root: Optional[str],
|
||||
home: Optional[str],
|
||||
@@ -52,8 +53,8 @@ def install_given_reqs(
|
||||
|
||||
if to_install:
|
||||
logger.info(
|
||||
'Installing collected packages: %s',
|
||||
', '.join(to_install.keys()),
|
||||
"Installing collected packages: %s",
|
||||
", ".join(to_install.keys()),
|
||||
)
|
||||
|
||||
installed = []
|
||||
@@ -61,17 +62,14 @@ def install_given_reqs(
|
||||
with indent_log():
|
||||
for req_name, requirement in to_install.items():
|
||||
if requirement.should_reinstall:
|
||||
logger.info('Attempting uninstall: %s', req_name)
|
||||
logger.info("Attempting uninstall: %s", req_name)
|
||||
with indent_log():
|
||||
uninstalled_pathset = requirement.uninstall(
|
||||
auto_confirm=True
|
||||
)
|
||||
uninstalled_pathset = requirement.uninstall(auto_confirm=True)
|
||||
else:
|
||||
uninstalled_pathset = None
|
||||
|
||||
try:
|
||||
requirement.install(
|
||||
install_options,
|
||||
global_options,
|
||||
root=root,
|
||||
home=home,
|
||||
|
||||
@@ -11,28 +11,28 @@ InstallRequirement.
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Dict, Optional, Set, Tuple, Union
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||
from pip._vendor.packaging.specifiers import Specifier
|
||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.models.index import PyPI, TestPyPI
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.pyproject import make_pyproject_path
|
||||
from pip._internal.req.req_file import ParsedRequirement
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.filetypes import is_archive_file
|
||||
from pip._internal.utils.misc import is_installable_dir
|
||||
from pip._internal.utils.packaging import get_requirement
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
from pip._internal.vcs import is_url, vcs
|
||||
|
||||
__all__ = [
|
||||
"install_req_from_editable", "install_req_from_line",
|
||||
"parse_editable"
|
||||
"install_req_from_editable",
|
||||
"install_req_from_line",
|
||||
"parse_editable",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -40,7 +40,7 @@ operators = Specifier._operators.keys()
|
||||
|
||||
|
||||
def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
||||
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
||||
m = re.match(r"^(.+)(\[[^\]]+\])$", path)
|
||||
extras = None
|
||||
if m:
|
||||
path_no_extras = m.group(1)
|
||||
@@ -54,7 +54,7 @@ def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
||||
def convert_extras(extras: Optional[str]) -> Set[str]:
|
||||
if not extras:
|
||||
return set()
|
||||
return Requirement("placeholder" + extras.lower()).extras
|
||||
return get_requirement("placeholder" + extras.lower()).extras
|
||||
|
||||
|
||||
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
||||
@@ -74,39 +74,23 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
||||
url_no_extras, extras = _strip_extras(url)
|
||||
|
||||
if os.path.isdir(url_no_extras):
|
||||
setup_py = os.path.join(url_no_extras, 'setup.py')
|
||||
setup_cfg = os.path.join(url_no_extras, 'setup.cfg')
|
||||
if not os.path.exists(setup_py) and not os.path.exists(setup_cfg):
|
||||
msg = (
|
||||
'File "setup.py" or "setup.cfg" not found. Directory cannot be '
|
||||
'installed in editable mode: {}'
|
||||
.format(os.path.abspath(url_no_extras))
|
||||
)
|
||||
pyproject_path = make_pyproject_path(url_no_extras)
|
||||
if os.path.isfile(pyproject_path):
|
||||
msg += (
|
||||
'\n(A "pyproject.toml" file was found, but editable '
|
||||
'mode currently requires a setuptools-based build.)'
|
||||
)
|
||||
raise InstallationError(msg)
|
||||
|
||||
# Treating it as code that has already been checked out
|
||||
url_no_extras = path_to_url(url_no_extras)
|
||||
|
||||
if url_no_extras.lower().startswith('file:'):
|
||||
if url_no_extras.lower().startswith("file:"):
|
||||
package_name = Link(url_no_extras).egg_fragment
|
||||
if extras:
|
||||
return (
|
||||
package_name,
|
||||
url_no_extras,
|
||||
Requirement("placeholder" + extras.lower()).extras,
|
||||
get_requirement("placeholder" + extras.lower()).extras,
|
||||
)
|
||||
else:
|
||||
return package_name, url_no_extras, set()
|
||||
|
||||
for version_control in vcs:
|
||||
if url.lower().startswith(f'{version_control}:'):
|
||||
url = f'{version_control}+{url}'
|
||||
if url.lower().startswith(f"{version_control}:"):
|
||||
url = f"{version_control}+{url}"
|
||||
break
|
||||
|
||||
link = Link(url)
|
||||
@@ -114,9 +98,9 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
||||
if not link.is_vcs:
|
||||
backends = ", ".join(vcs.all_schemes)
|
||||
raise InstallationError(
|
||||
f'{editable_req} is not a valid editable requirement. '
|
||||
f'It should either be a path to a local project or a VCS URL '
|
||||
f'(beginning with {backends}).'
|
||||
f"{editable_req} is not a valid editable requirement. "
|
||||
f"It should either be a path to a local project or a VCS URL "
|
||||
f"(beginning with {backends})."
|
||||
)
|
||||
|
||||
package_name = link.egg_fragment
|
||||
@@ -128,43 +112,66 @@ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
||||
return package_name, url, set()
|
||||
|
||||
|
||||
def check_first_requirement_in_file(filename: str) -> None:
|
||||
"""Check if file is parsable as a requirements file.
|
||||
|
||||
This is heavily based on ``pkg_resources.parse_requirements``, but
|
||||
simplified to just check the first meaningful line.
|
||||
|
||||
:raises InvalidRequirement: If the first meaningful line cannot be parsed
|
||||
as an requirement.
|
||||
"""
|
||||
with open(filename, encoding="utf-8", errors="ignore") as f:
|
||||
# Create a steppable iterator, so we can handle \-continuations.
|
||||
lines = (
|
||||
line
|
||||
for line in (line.strip() for line in f)
|
||||
if line and not line.startswith("#") # Skip blank lines/comments.
|
||||
)
|
||||
|
||||
for line in lines:
|
||||
# Drop comments -- a hash without a space may be in a URL.
|
||||
if " #" in line:
|
||||
line = line[: line.find(" #")]
|
||||
# If there is a line continuation, drop it, and append the next line.
|
||||
if line.endswith("\\"):
|
||||
line = line[:-2].strip() + next(lines, "")
|
||||
Requirement(line)
|
||||
return
|
||||
|
||||
|
||||
def deduce_helpful_msg(req: str) -> str:
|
||||
"""Returns helpful msg in case requirements file does not exist,
|
||||
or cannot be parsed.
|
||||
|
||||
:params req: Requirements file path
|
||||
"""
|
||||
msg = ""
|
||||
if os.path.exists(req):
|
||||
msg = " The path does exist. "
|
||||
# Try to parse and check if it is a requirements file.
|
||||
try:
|
||||
with open(req) as fp:
|
||||
# parse first line only
|
||||
next(parse_requirements(fp.read()))
|
||||
msg += (
|
||||
"The argument you provided "
|
||||
"({}) appears to be a"
|
||||
" requirements file. If that is the"
|
||||
" case, use the '-r' flag to install"
|
||||
" the packages specified within it."
|
||||
).format(req)
|
||||
except RequirementParseError:
|
||||
logger.debug(
|
||||
"Cannot parse '%s' as requirements file", req, exc_info=True
|
||||
)
|
||||
if not os.path.exists(req):
|
||||
return f" File '{req}' does not exist."
|
||||
msg = " The path does exist. "
|
||||
# Try to parse and check if it is a requirements file.
|
||||
try:
|
||||
check_first_requirement_in_file(req)
|
||||
except InvalidRequirement:
|
||||
logger.debug("Cannot parse '%s' as requirements file", req)
|
||||
else:
|
||||
msg += f" File '{req}' does not exist."
|
||||
msg += (
|
||||
f"The argument you provided "
|
||||
f"({req}) appears to be a"
|
||||
f" requirements file. If that is the"
|
||||
f" case, use the '-r' flag to install"
|
||||
f" the packages specified within it."
|
||||
)
|
||||
return msg
|
||||
|
||||
|
||||
class RequirementParts:
|
||||
def __init__(
|
||||
self,
|
||||
requirement: Optional[Requirement],
|
||||
link: Optional[Link],
|
||||
markers: Optional[Marker],
|
||||
extras: Set[str],
|
||||
self,
|
||||
requirement: Optional[Requirement],
|
||||
link: Optional[Link],
|
||||
markers: Optional[Marker],
|
||||
extras: Set[str],
|
||||
):
|
||||
self.requirement = requirement
|
||||
self.link = link
|
||||
@@ -194,13 +201,16 @@ def parse_req_from_editable(editable_req: str) -> RequirementParts:
|
||||
def install_req_from_editable(
|
||||
editable_req: str,
|
||||
comes_from: Optional[Union[InstallRequirement, str]] = None,
|
||||
*,
|
||||
use_pep517: Optional[bool] = None,
|
||||
isolated: bool = False,
|
||||
options: Optional[Dict[str, Any]] = None,
|
||||
global_options: Optional[List[str]] = None,
|
||||
hash_options: Optional[Dict[str, List[str]]] = None,
|
||||
constraint: bool = False,
|
||||
user_supplied: bool = False,
|
||||
permit_editable_wheels: bool = False,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
) -> InstallRequirement:
|
||||
|
||||
parts = parse_req_from_editable(editable_req)
|
||||
|
||||
return InstallRequirement(
|
||||
@@ -208,13 +218,14 @@ def install_req_from_editable(
|
||||
comes_from=comes_from,
|
||||
user_supplied=user_supplied,
|
||||
editable=True,
|
||||
permit_editable_wheels=permit_editable_wheels,
|
||||
link=parts.link,
|
||||
constraint=constraint,
|
||||
use_pep517=use_pep517,
|
||||
isolated=isolated,
|
||||
install_options=options.get("install_options", []) if options else [],
|
||||
global_options=options.get("global_options", []) if options else [],
|
||||
hash_options=options.get("hashes", {}) if options else {},
|
||||
global_options=global_options,
|
||||
hash_options=hash_options,
|
||||
config_settings=config_settings,
|
||||
extras=parts.extras,
|
||||
)
|
||||
|
||||
@@ -250,6 +261,8 @@ def _get_url_from_path(path: str, name: str) -> Optional[str]:
|
||||
if _looks_like_path(name) and os.path.isdir(path):
|
||||
if is_installable_dir(path):
|
||||
return path_to_url(path)
|
||||
# TODO: The is_installable_dir test here might not be necessary
|
||||
# now that it is done in load_pyproject_toml too.
|
||||
raise InstallationError(
|
||||
f"Directory {name!r} is not installable. Neither 'setup.py' "
|
||||
"nor 'pyproject.toml' found."
|
||||
@@ -258,24 +271,23 @@ def _get_url_from_path(path: str, name: str) -> Optional[str]:
|
||||
return None
|
||||
if os.path.isfile(path):
|
||||
return path_to_url(path)
|
||||
urlreq_parts = name.split('@', 1)
|
||||
urlreq_parts = name.split("@", 1)
|
||||
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
||||
# If the path contains '@' and the part before it does not look
|
||||
# like a path, try to treat it as a PEP 440 URL req instead.
|
||||
return None
|
||||
logger.warning(
|
||||
'Requirement %r looks like a filename, but the '
|
||||
'file does not exist',
|
||||
name
|
||||
"Requirement %r looks like a filename, but the file does not exist",
|
||||
name,
|
||||
)
|
||||
return path_to_url(path)
|
||||
|
||||
|
||||
def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
|
||||
if is_url(name):
|
||||
marker_sep = '; '
|
||||
marker_sep = "; "
|
||||
else:
|
||||
marker_sep = ';'
|
||||
marker_sep = ";"
|
||||
if marker_sep in name:
|
||||
name, markers_as_string = name.split(marker_sep, 1)
|
||||
markers_as_string = markers_as_string.strip()
|
||||
@@ -302,9 +314,8 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
||||
# it's a local file, dir, or url
|
||||
if link:
|
||||
# Handle relative file URLs
|
||||
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
||||
link = Link(
|
||||
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||
if link.scheme == "file" and re.search(r"\.\./", link.url):
|
||||
link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||
# wheel file
|
||||
if link.is_wheel:
|
||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||
@@ -323,25 +334,24 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
||||
def with_source(text: str) -> str:
|
||||
if not line_source:
|
||||
return text
|
||||
return f'{text} (from {line_source})'
|
||||
return f"{text} (from {line_source})"
|
||||
|
||||
def _parse_req_string(req_as_string: str) -> Requirement:
|
||||
try:
|
||||
req = Requirement(req_as_string)
|
||||
req = get_requirement(req_as_string)
|
||||
except InvalidRequirement:
|
||||
if os.path.sep in req_as_string:
|
||||
add_msg = "It looks like a path."
|
||||
add_msg += deduce_helpful_msg(req_as_string)
|
||||
elif ('=' in req_as_string and
|
||||
not any(op in req_as_string for op in operators)):
|
||||
elif "=" in req_as_string and not any(
|
||||
op in req_as_string for op in operators
|
||||
):
|
||||
add_msg = "= is not a valid operator. Did you mean == ?"
|
||||
else:
|
||||
add_msg = ''
|
||||
msg = with_source(
|
||||
f'Invalid requirement: {req_as_string!r}'
|
||||
)
|
||||
add_msg = ""
|
||||
msg = with_source(f"Invalid requirement: {req_as_string!r}")
|
||||
if add_msg:
|
||||
msg += f'\nHint: {add_msg}'
|
||||
msg += f"\nHint: {add_msg}"
|
||||
raise InstallationError(msg)
|
||||
else:
|
||||
# Deprecate extras after specifiers: "name>=1.0[extras]"
|
||||
@@ -350,7 +360,7 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
||||
# RequirementParts
|
||||
for spec in req.specifier:
|
||||
spec_str = str(spec)
|
||||
if spec_str.endswith(']'):
|
||||
if spec_str.endswith("]"):
|
||||
msg = f"Extras after version '{spec_str}'."
|
||||
raise InstallationError(msg)
|
||||
return req
|
||||
@@ -366,12 +376,15 @@ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementPar
|
||||
def install_req_from_line(
|
||||
name: str,
|
||||
comes_from: Optional[Union[str, InstallRequirement]] = None,
|
||||
*,
|
||||
use_pep517: Optional[bool] = None,
|
||||
isolated: bool = False,
|
||||
options: Optional[Dict[str, Any]] = None,
|
||||
global_options: Optional[List[str]] = None,
|
||||
hash_options: Optional[Dict[str, List[str]]] = None,
|
||||
constraint: bool = False,
|
||||
line_source: Optional[str] = None,
|
||||
user_supplied: bool = False,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
) -> InstallRequirement:
|
||||
"""Creates an InstallRequirement from a name, which might be a
|
||||
requirement, directory containing 'setup.py', filename, or URL.
|
||||
@@ -382,11 +395,15 @@ def install_req_from_line(
|
||||
parts = parse_req_from_line(name, line_source)
|
||||
|
||||
return InstallRequirement(
|
||||
parts.requirement, comes_from, link=parts.link, markers=parts.markers,
|
||||
use_pep517=use_pep517, isolated=isolated,
|
||||
install_options=options.get("install_options", []) if options else [],
|
||||
global_options=options.get("global_options", []) if options else [],
|
||||
hash_options=options.get("hashes", {}) if options else {},
|
||||
parts.requirement,
|
||||
comes_from,
|
||||
link=parts.link,
|
||||
markers=parts.markers,
|
||||
use_pep517=use_pep517,
|
||||
isolated=isolated,
|
||||
global_options=global_options,
|
||||
hash_options=hash_options,
|
||||
config_settings=config_settings,
|
||||
constraint=constraint,
|
||||
extras=parts.extras,
|
||||
user_supplied=user_supplied,
|
||||
@@ -401,7 +418,7 @@ def install_req_from_req_string(
|
||||
user_supplied: bool = False,
|
||||
) -> InstallRequirement:
|
||||
try:
|
||||
req = Requirement(req_string)
|
||||
req = get_requirement(req_string)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError(f"Invalid requirement: '{req_string}'")
|
||||
|
||||
@@ -409,8 +426,12 @@ def install_req_from_req_string(
|
||||
PyPI.file_storage_domain,
|
||||
TestPyPI.file_storage_domain,
|
||||
]
|
||||
if (req.url and comes_from and comes_from.link and
|
||||
comes_from.link.netloc in domains_not_allowed):
|
||||
if (
|
||||
req.url
|
||||
and comes_from
|
||||
and comes_from.link
|
||||
and comes_from.link.netloc in domains_not_allowed
|
||||
):
|
||||
# Explicitly disallow pypi packages that depend on external urls
|
||||
raise InstallationError(
|
||||
"Packages installed from PyPI cannot depend on packages "
|
||||
@@ -432,6 +453,7 @@ def install_req_from_parsed_requirement(
|
||||
isolated: bool = False,
|
||||
use_pep517: Optional[bool] = None,
|
||||
user_supplied: bool = False,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
) -> InstallRequirement:
|
||||
if parsed_req.is_editable:
|
||||
req = install_req_from_editable(
|
||||
@@ -441,6 +463,7 @@ def install_req_from_parsed_requirement(
|
||||
constraint=parsed_req.constraint,
|
||||
isolated=isolated,
|
||||
user_supplied=user_supplied,
|
||||
config_settings=config_settings,
|
||||
)
|
||||
|
||||
else:
|
||||
@@ -449,10 +472,18 @@ def install_req_from_parsed_requirement(
|
||||
comes_from=parsed_req.comes_from,
|
||||
use_pep517=use_pep517,
|
||||
isolated=isolated,
|
||||
options=parsed_req.options,
|
||||
global_options=(
|
||||
parsed_req.options.get("global_options", [])
|
||||
if parsed_req.options
|
||||
else []
|
||||
),
|
||||
hash_options=(
|
||||
parsed_req.options.get("hashes", {}) if parsed_req.options else {}
|
||||
),
|
||||
constraint=parsed_req.constraint,
|
||||
line_source=parsed_req.line_source,
|
||||
user_supplied=user_supplied,
|
||||
config_settings=config_settings,
|
||||
)
|
||||
return req
|
||||
|
||||
@@ -468,7 +499,8 @@ def install_req_from_link_and_ireq(
|
||||
markers=ireq.markers,
|
||||
use_pep517=ireq.use_pep517,
|
||||
isolated=ireq.isolated,
|
||||
install_options=ireq.install_options,
|
||||
global_options=ireq.global_options,
|
||||
hash_options=ireq.hash_options,
|
||||
config_settings=ireq.config_settings,
|
||||
user_supplied=ireq.user_supplied,
|
||||
)
|
||||
|
||||
@@ -2,13 +2,24 @@
|
||||
Requirements file parsing
|
||||
"""
|
||||
|
||||
import logging
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import urllib.parse
|
||||
from optparse import Values
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, Optional, Tuple
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.exceptions import InstallationError, RequirementsFileParseError
|
||||
@@ -25,20 +36,20 @@ if TYPE_CHECKING:
|
||||
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
|
||||
__all__ = ['parse_requirements']
|
||||
__all__ = ["parse_requirements"]
|
||||
|
||||
ReqFileLines = Iterator[Tuple[int, str]]
|
||||
ReqFileLines = Iterable[Tuple[int, str]]
|
||||
|
||||
LineParser = Callable[[str], Tuple[str, Values]]
|
||||
|
||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||
COMMENT_RE = re.compile(r'(^|\s+)#.*$')
|
||||
SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
|
||||
COMMENT_RE = re.compile(r"(^|\s+)#.*$")
|
||||
|
||||
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||
# variable name consisting of only uppercase letters, digits or the '_'
|
||||
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||
# 2013 Edition.
|
||||
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
||||
ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
|
||||
|
||||
SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
|
||||
cmdoptions.index_url,
|
||||
@@ -59,14 +70,16 @@ SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
|
||||
|
||||
# options to be passed to requirements
|
||||
SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
|
||||
cmdoptions.install_options,
|
||||
cmdoptions.global_options,
|
||||
cmdoptions.hash,
|
||||
cmdoptions.config_settings,
|
||||
]
|
||||
|
||||
# the 'dest' string values
|
||||
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ParsedRequirement:
|
||||
def __init__(
|
||||
@@ -119,7 +132,7 @@ def parse_requirements(
|
||||
finder: Optional["PackageFinder"] = None,
|
||||
options: Optional[optparse.Values] = None,
|
||||
constraint: bool = False,
|
||||
) -> Iterator[ParsedRequirement]:
|
||||
) -> Generator[ParsedRequirement, None, None]:
|
||||
"""Parse a requirements file and yield ParsedRequirement instances.
|
||||
|
||||
:param filename: Path or url of requirements file.
|
||||
@@ -134,10 +147,7 @@ def parse_requirements(
|
||||
|
||||
for parsed_line in parser.parse(filename, constraint):
|
||||
parsed_req = handle_line(
|
||||
parsed_line,
|
||||
options=options,
|
||||
finder=finder,
|
||||
session=session
|
||||
parsed_line, options=options, finder=finder, session=session
|
||||
)
|
||||
if parsed_req is not None:
|
||||
yield parsed_req
|
||||
@@ -159,10 +169,11 @@ def handle_requirement_line(
|
||||
line: ParsedLine,
|
||||
options: Optional[optparse.Values] = None,
|
||||
) -> ParsedRequirement:
|
||||
|
||||
# preserve for the nested code path
|
||||
line_comes_from = '{} {} (line {})'.format(
|
||||
'-c' if line.constraint else '-r', line.filename, line.lineno,
|
||||
line_comes_from = "{} {} (line {})".format(
|
||||
"-c" if line.constraint else "-r",
|
||||
line.filename,
|
||||
line.lineno,
|
||||
)
|
||||
|
||||
assert line.is_requirement
|
||||
@@ -177,17 +188,13 @@ def handle_requirement_line(
|
||||
constraint=line.constraint,
|
||||
)
|
||||
else:
|
||||
if options:
|
||||
# Disable wheels if the user has specified build options
|
||||
cmdoptions.check_install_build_global(options, line.opts)
|
||||
|
||||
# get the options that apply to requirements
|
||||
req_options = {}
|
||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
||||
req_options[dest] = line.opts.__dict__[dest]
|
||||
|
||||
line_source = f'line {line.lineno} of {line.filename}'
|
||||
line_source = f"line {line.lineno} of {line.filename}"
|
||||
return ParsedRequirement(
|
||||
requirement=line.requirement,
|
||||
is_editable=line.is_editable,
|
||||
@@ -206,6 +213,12 @@ def handle_option_line(
|
||||
options: Optional[optparse.Values] = None,
|
||||
session: Optional[PipSession] = None,
|
||||
) -> None:
|
||||
if opts.hashes:
|
||||
logger.warning(
|
||||
"%s line %s has --hash but no requirement, and will be ignored.",
|
||||
filename,
|
||||
lineno,
|
||||
)
|
||||
|
||||
if options:
|
||||
# percolate options upward
|
||||
@@ -213,19 +226,20 @@ def handle_option_line(
|
||||
options.require_hashes = opts.require_hashes
|
||||
if opts.features_enabled:
|
||||
options.features_enabled.extend(
|
||||
f for f in opts.features_enabled
|
||||
if f not in options.features_enabled
|
||||
f for f in opts.features_enabled if f not in options.features_enabled
|
||||
)
|
||||
|
||||
# set finder options
|
||||
if finder:
|
||||
find_links = finder.find_links
|
||||
index_urls = finder.index_urls
|
||||
if opts.index_url:
|
||||
index_urls = [opts.index_url]
|
||||
no_index = finder.search_scope.no_index
|
||||
if opts.no_index is True:
|
||||
no_index = True
|
||||
index_urls = []
|
||||
if opts.extra_index_urls:
|
||||
if opts.index_url and not no_index:
|
||||
index_urls = [opts.index_url]
|
||||
if opts.extra_index_urls and not no_index:
|
||||
index_urls.extend(opts.extra_index_urls)
|
||||
if opts.find_links:
|
||||
# FIXME: it would be nice to keep track of the source
|
||||
@@ -245,6 +259,7 @@ def handle_option_line(
|
||||
search_scope = SearchScope(
|
||||
find_links=find_links,
|
||||
index_urls=index_urls,
|
||||
no_index=no_index,
|
||||
)
|
||||
finder.search_scope = search_scope
|
||||
|
||||
@@ -256,7 +271,7 @@ def handle_option_line(
|
||||
|
||||
if session:
|
||||
for host in opts.trusted_hosts or []:
|
||||
source = f'line {lineno} of {filename}'
|
||||
source = f"line {lineno} of {filename}"
|
||||
session.add_trusted_host(host, source=source)
|
||||
|
||||
|
||||
@@ -313,18 +328,18 @@ class RequirementsFileParser:
|
||||
self._session = session
|
||||
self._line_parser = line_parser
|
||||
|
||||
def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
|
||||
"""Parse a given file, yielding parsed lines.
|
||||
"""
|
||||
def parse(
|
||||
self, filename: str, constraint: bool
|
||||
) -> Generator[ParsedLine, None, None]:
|
||||
"""Parse a given file, yielding parsed lines."""
|
||||
yield from self._parse_and_recurse(filename, constraint)
|
||||
|
||||
def _parse_and_recurse(
|
||||
self, filename: str, constraint: bool
|
||||
) -> Iterator[ParsedLine]:
|
||||
) -> Generator[ParsedLine, None, None]:
|
||||
for line in self._parse_file(filename, constraint):
|
||||
if (
|
||||
not line.is_requirement and
|
||||
(line.opts.requirements or line.opts.constraints)
|
||||
if not line.is_requirement and (
|
||||
line.opts.requirements or line.opts.constraints
|
||||
):
|
||||
# parse a nested requirements file
|
||||
if line.opts.requirements:
|
||||
@@ -342,14 +357,17 @@ class RequirementsFileParser:
|
||||
elif not SCHEME_RE.search(req_path):
|
||||
# do a join so relative paths work
|
||||
req_path = os.path.join(
|
||||
os.path.dirname(filename), req_path,
|
||||
os.path.dirname(filename),
|
||||
req_path,
|
||||
)
|
||||
|
||||
yield from self._parse_and_recurse(req_path, nested_constraint)
|
||||
else:
|
||||
yield line
|
||||
|
||||
def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
|
||||
def _parse_file(
|
||||
self, filename: str, constraint: bool
|
||||
) -> Generator[ParsedLine, None, None]:
|
||||
_, content = get_file_content(filename, self._session)
|
||||
|
||||
lines_enum = preprocess(content)
|
||||
@@ -359,7 +377,7 @@ class RequirementsFileParser:
|
||||
args_str, opts = self._line_parser(line)
|
||||
except OptionParsingError as e:
|
||||
# add offending line
|
||||
msg = f'Invalid requirement: {line}\n{e.msg}'
|
||||
msg = f"Invalid requirement: {line}\n{e.msg}"
|
||||
raise RequirementsFileParseError(msg)
|
||||
|
||||
yield ParsedLine(
|
||||
@@ -383,7 +401,12 @@ def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
|
||||
|
||||
args_str, options_str = break_args_options(line)
|
||||
|
||||
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
|
||||
try:
|
||||
options = shlex.split(options_str)
|
||||
except ValueError as e:
|
||||
raise OptionParsingError(f"Could not split options: {options_str}") from e
|
||||
|
||||
opts, _ = parser.parse_args(options, defaults)
|
||||
|
||||
return args_str, opts
|
||||
|
||||
@@ -395,16 +418,16 @@ def break_args_options(line: str) -> Tuple[str, str]:
|
||||
(and then optparse) the options, not the args. args can contain markers
|
||||
which are corrupted by shlex.
|
||||
"""
|
||||
tokens = line.split(' ')
|
||||
tokens = line.split(" ")
|
||||
args = []
|
||||
options = tokens[:]
|
||||
for token in tokens:
|
||||
if token.startswith('-') or token.startswith('--'):
|
||||
if token.startswith("-") or token.startswith("--"):
|
||||
break
|
||||
else:
|
||||
args.append(token)
|
||||
options.pop(0)
|
||||
return ' '.join(args), ' '.join(options)
|
||||
return " ".join(args), " ".join(options)
|
||||
|
||||
|
||||
class OptionParsingError(Exception):
|
||||
@@ -427,6 +450,7 @@ def build_parser() -> optparse.OptionParser:
|
||||
# that in our own exception.
|
||||
def parser_exit(self: Any, msg: str) -> "NoReturn":
|
||||
raise OptionParsingError(msg)
|
||||
|
||||
# NOTE: mypy disallows assigning to a method
|
||||
# https://github.com/python/mypy/issues/2427
|
||||
parser.exit = parser_exit # type: ignore
|
||||
@@ -441,26 +465,26 @@ def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
|
||||
primary_line_number = None
|
||||
new_line: List[str] = []
|
||||
for line_number, line in lines_enum:
|
||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||
if not line.endswith("\\") or COMMENT_RE.match(line):
|
||||
if COMMENT_RE.match(line):
|
||||
# this ensures comments are always matched later
|
||||
line = ' ' + line
|
||||
line = " " + line
|
||||
if new_line:
|
||||
new_line.append(line)
|
||||
assert primary_line_number is not None
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
yield primary_line_number, "".join(new_line)
|
||||
new_line = []
|
||||
else:
|
||||
yield line_number, line
|
||||
else:
|
||||
if not new_line:
|
||||
primary_line_number = line_number
|
||||
new_line.append(line.strip('\\'))
|
||||
new_line.append(line.strip("\\"))
|
||||
|
||||
# last line contains \
|
||||
if new_line:
|
||||
assert primary_line_number is not None
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
yield primary_line_number, "".join(new_line)
|
||||
|
||||
# TODO: handle space after '\'.
|
||||
|
||||
@@ -470,7 +494,7 @@ def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
|
||||
Strips comments and filter empty lines.
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
line = COMMENT_RE.sub('', line)
|
||||
line = COMMENT_RE.sub("", line)
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line_number, line
|
||||
@@ -514,15 +538,15 @@ def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
|
||||
scheme = get_url_scheme(url)
|
||||
|
||||
# Pip has special support for file:// URLs (LocalFSAdapter).
|
||||
if scheme in ['http', 'https', 'file']:
|
||||
if scheme in ["http", "https", "file"]:
|
||||
resp = session.get(url)
|
||||
raise_for_status(resp)
|
||||
return resp.url, resp.text
|
||||
|
||||
# Assume this is a bare path.
|
||||
try:
|
||||
with open(url, 'rb') as f:
|
||||
with open(url, "rb") as f:
|
||||
content = auto_decode(f.read())
|
||||
except OSError as exc:
|
||||
raise InstallationError(f'Could not open requirements file: {exc}')
|
||||
raise InstallationError(f"Could not open requirements file: {exc}")
|
||||
return url, content
|
||||
|
||||
@@ -1,55 +1,59 @@
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import uuid
|
||||
import zipfile
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Union
|
||||
from optparse import Values
|
||||
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
||||
|
||||
from pip._vendor import pkg_resources, six
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import Version
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.locations import get_scheme
|
||||
from pip._internal.metadata import (
|
||||
BaseDistribution,
|
||||
get_default_environment,
|
||||
get_directory_distribution,
|
||||
get_wheel_distribution,
|
||||
)
|
||||
from pip._internal.metadata.base import FilesystemWheel
|
||||
from pip._internal.models.direct_url import DirectUrl
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.operations.build.metadata import generate_metadata
|
||||
from pip._internal.operations.build.metadata_editable import generate_editable_metadata
|
||||
from pip._internal.operations.build.metadata_legacy import (
|
||||
generate_metadata as generate_metadata_legacy,
|
||||
)
|
||||
from pip._internal.operations.install.editable_legacy import (
|
||||
install_editable as install_editable_legacy,
|
||||
)
|
||||
from pip._internal.operations.install.legacy import LegacyInstallFailure
|
||||
from pip._internal.operations.install.legacy import install as install_legacy
|
||||
from pip._internal.operations.install.wheel import install_wheel
|
||||
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
||||
from pip._internal.req.req_uninstall import UninstallPathSet
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
ConfiguredBuildBackendHookCaller,
|
||||
ask_path_exists,
|
||||
backup_dir,
|
||||
display_path,
|
||||
dist_in_site_packages,
|
||||
dist_in_usersite,
|
||||
get_distribution,
|
||||
hide_url,
|
||||
redact_auth_from_url,
|
||||
)
|
||||
from pip._internal.utils.packaging import get_metadata
|
||||
from pip._internal.utils.packaging import safe_extra
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
from pip._internal.vcs import vcs
|
||||
@@ -57,32 +61,6 @@ from pip._internal.vcs import vcs
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_dist(metadata_directory: str) -> Distribution:
|
||||
"""Return a pkg_resources.Distribution for the provided
|
||||
metadata directory.
|
||||
"""
|
||||
dist_dir = metadata_directory.rstrip(os.sep)
|
||||
|
||||
# Build a PathMetadata object, from path to metadata. :wink:
|
||||
base_dir, dist_dir_name = os.path.split(dist_dir)
|
||||
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
||||
|
||||
# Determine the correct Distribution object type.
|
||||
if dist_dir.endswith(".egg-info"):
|
||||
dist_cls = pkg_resources.Distribution
|
||||
dist_name = os.path.splitext(dist_dir_name)[0]
|
||||
else:
|
||||
assert dist_dir.endswith(".dist-info")
|
||||
dist_cls = pkg_resources.DistInfoDistribution
|
||||
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
||||
|
||||
return dist_cls(
|
||||
base_dir,
|
||||
project_name=dist_name,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
|
||||
class InstallRequirement:
|
||||
"""
|
||||
Represents something that may be installed later on, may have information
|
||||
@@ -99,19 +77,21 @@ class InstallRequirement:
|
||||
markers: Optional[Marker] = None,
|
||||
use_pep517: Optional[bool] = None,
|
||||
isolated: bool = False,
|
||||
install_options: Optional[List[str]] = None,
|
||||
*,
|
||||
global_options: Optional[List[str]] = None,
|
||||
hash_options: Optional[Dict[str, List[str]]] = None,
|
||||
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
||||
constraint: bool = False,
|
||||
extras: Iterable[str] = (),
|
||||
extras: Collection[str] = (),
|
||||
user_supplied: bool = False,
|
||||
permit_editable_wheels: bool = False,
|
||||
) -> None:
|
||||
assert req is None or isinstance(req, Requirement), req
|
||||
self.req = req
|
||||
self.comes_from = comes_from
|
||||
self.constraint = constraint
|
||||
self.editable = editable
|
||||
self.legacy_install_reason: Optional[int] = None
|
||||
self.permit_editable_wheels = permit_editable_wheels
|
||||
|
||||
# source_dir is the local directory where the linked requirement is
|
||||
# located, or unpacked. In case unpacking is needed, creating and
|
||||
@@ -122,15 +102,21 @@ class InstallRequirement:
|
||||
if self.editable:
|
||||
assert link
|
||||
if link.is_file:
|
||||
self.source_dir = os.path.normpath(
|
||||
os.path.abspath(link.file_path)
|
||||
)
|
||||
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
|
||||
|
||||
if link is None and req and req.url:
|
||||
# PEP 508 URL requirement
|
||||
link = Link(req.url)
|
||||
self.link = self.original_link = link
|
||||
self.original_link_is_in_wheel_cache = False
|
||||
|
||||
# When this InstallRequirement is a wheel obtained from the cache of locally
|
||||
# built wheels, this is the source link corresponding to the cache entry, which
|
||||
# was used to download and build the cached wheel.
|
||||
self.cached_wheel_source_link: Optional[Link] = None
|
||||
|
||||
# Information about the location of the artifact that was downloaded . This
|
||||
# property is guaranteed to be set in resolver results.
|
||||
self.download_info: Optional[DirectUrl] = None
|
||||
|
||||
# Path to any downloaded or already-existing package.
|
||||
self.local_file_path: Optional[str] = None
|
||||
@@ -140,18 +126,15 @@ class InstallRequirement:
|
||||
if extras:
|
||||
self.extras = extras
|
||||
elif req:
|
||||
self.extras = {
|
||||
pkg_resources.safe_extra(extra) for extra in req.extras
|
||||
}
|
||||
self.extras = {safe_extra(extra) for extra in req.extras}
|
||||
else:
|
||||
self.extras = set()
|
||||
if markers is None and req:
|
||||
markers = req.marker
|
||||
self.markers = markers
|
||||
|
||||
# This holds the pkg_resources.Distribution object if this requirement
|
||||
# is already available:
|
||||
self.satisfied_by: Optional[Distribution] = None
|
||||
# This holds the Distribution object if this requirement is already installed.
|
||||
self.satisfied_by: Optional[BaseDistribution] = None
|
||||
# Whether the installation process should try to uninstall an existing
|
||||
# distribution before installing this requirement.
|
||||
self.should_reinstall = False
|
||||
@@ -160,9 +143,9 @@ class InstallRequirement:
|
||||
# Set to True after successful installation
|
||||
self.install_succeeded: Optional[bool] = None
|
||||
# Supplied options
|
||||
self.install_options = install_options if install_options else []
|
||||
self.global_options = global_options if global_options else []
|
||||
self.hash_options = hash_options if hash_options else {}
|
||||
self.config_settings = config_settings
|
||||
# Set to True after successful preparation of this requirement
|
||||
self.prepared = False
|
||||
# User supplied requirement are explicitly requested for installation
|
||||
@@ -186,7 +169,7 @@ class InstallRequirement:
|
||||
self.requirements_to_check: List[str] = []
|
||||
|
||||
# The PEP 517 backend we should use to build the project
|
||||
self.pep517_backend: Optional[Pep517HookCaller] = None
|
||||
self.pep517_backend: Optional[BuildBackendHookCaller] = None
|
||||
|
||||
# Are we using PEP 517 for this requirement?
|
||||
# After pyproject.toml has been loaded, the only valid values are True
|
||||
@@ -202,36 +185,38 @@ class InstallRequirement:
|
||||
if self.req:
|
||||
s = str(self.req)
|
||||
if self.link:
|
||||
s += ' from {}'.format(redact_auth_from_url(self.link.url))
|
||||
s += " from {}".format(redact_auth_from_url(self.link.url))
|
||||
elif self.link:
|
||||
s = redact_auth_from_url(self.link.url)
|
||||
else:
|
||||
s = '<InstallRequirement>'
|
||||
s = "<InstallRequirement>"
|
||||
if self.satisfied_by is not None:
|
||||
s += ' in {}'.format(display_path(self.satisfied_by.location))
|
||||
if self.satisfied_by.location is not None:
|
||||
location = display_path(self.satisfied_by.location)
|
||||
else:
|
||||
location = "<memory>"
|
||||
s += f" in {location}"
|
||||
if self.comes_from:
|
||||
if isinstance(self.comes_from, str):
|
||||
comes_from: Optional[str] = self.comes_from
|
||||
else:
|
||||
comes_from = self.comes_from.from_path()
|
||||
if comes_from:
|
||||
s += f' (from {comes_from})'
|
||||
s += f" (from {comes_from})"
|
||||
return s
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<{} object: {} editable={!r}>'.format(
|
||||
self.__class__.__name__, str(self), self.editable)
|
||||
return "<{} object: {} editable={!r}>".format(
|
||||
self.__class__.__name__, str(self), self.editable
|
||||
)
|
||||
|
||||
def format_debug(self) -> str:
|
||||
"""An un-tested helper for getting state, for debugging.
|
||||
"""
|
||||
"""An un-tested helper for getting state, for debugging."""
|
||||
attributes = vars(self)
|
||||
names = sorted(attributes)
|
||||
|
||||
state = (
|
||||
"{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
|
||||
)
|
||||
return '<{name} object: {{{state}}}>'.format(
|
||||
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
|
||||
return "<{name} object: {{{state}}}>".format(
|
||||
name=self.__class__.__name__,
|
||||
state=", ".join(state),
|
||||
)
|
||||
@@ -241,7 +226,19 @@ class InstallRequirement:
|
||||
def name(self) -> Optional[str]:
|
||||
if self.req is None:
|
||||
return None
|
||||
return pkg_resources.safe_name(self.req.name)
|
||||
return self.req.name
|
||||
|
||||
@functools.lru_cache() # use cached_property in python 3.8+
|
||||
def supports_pyproject_editable(self) -> bool:
|
||||
if not self.use_pep517:
|
||||
return False
|
||||
assert self.pep517_backend
|
||||
with self.build_env:
|
||||
runner = runner_with_spinner_message(
|
||||
"Checking if build backend supports build_editable"
|
||||
)
|
||||
with self.pep517_backend.subprocess_runner(runner):
|
||||
return "build_editable" in self.pep517_backend._supported_features()
|
||||
|
||||
@property
|
||||
def specifier(self) -> SpecifierSet:
|
||||
@@ -254,18 +251,17 @@ class InstallRequirement:
|
||||
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||
"""
|
||||
specifiers = self.specifier
|
||||
return (len(specifiers) == 1 and
|
||||
next(iter(specifiers)).operator in {'==', '==='})
|
||||
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
||||
|
||||
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
||||
if not extras_requested:
|
||||
# Provide an extra to safely evaluate the markers
|
||||
# without matching any extra
|
||||
extras_requested = ('',)
|
||||
extras_requested = ("",)
|
||||
if self.markers is not None:
|
||||
return any(
|
||||
self.markers.evaluate({'extra': extra})
|
||||
for extra in extras_requested)
|
||||
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
||||
)
|
||||
else:
|
||||
return True
|
||||
|
||||
@@ -295,14 +291,18 @@ class InstallRequirement:
|
||||
|
||||
"""
|
||||
good_hashes = self.hash_options.copy()
|
||||
link = self.link if trust_internet else self.original_link
|
||||
if trust_internet:
|
||||
link = self.link
|
||||
elif self.original_link and self.user_supplied:
|
||||
link = self.original_link
|
||||
else:
|
||||
link = None
|
||||
if link and link.hash:
|
||||
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||
return Hashes(good_hashes)
|
||||
|
||||
def from_path(self) -> Optional[str]:
|
||||
"""Format a nice indicator to show where this "comes from"
|
||||
"""
|
||||
"""Format a nice indicator to show where this "comes from" """
|
||||
if self.req is None:
|
||||
return None
|
||||
s = str(self.req)
|
||||
@@ -312,7 +312,7 @@ class InstallRequirement:
|
||||
else:
|
||||
comes_from = self.comes_from.from_path()
|
||||
if comes_from:
|
||||
s += '->' + comes_from
|
||||
s += "->" + comes_from
|
||||
return s
|
||||
|
||||
def ensure_build_location(
|
||||
@@ -345,7 +345,7 @@ class InstallRequirement:
|
||||
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
||||
# need this)
|
||||
if not os.path.exists(build_dir):
|
||||
logger.debug('Creating directory %s', build_dir)
|
||||
logger.debug("Creating directory %s", build_dir)
|
||||
os.makedirs(build_dir)
|
||||
actual_build_dir = os.path.join(build_dir, dir_name)
|
||||
# `None` indicates that we respect the globally-configured deletion
|
||||
@@ -359,8 +359,7 @@ class InstallRequirement:
|
||||
).path
|
||||
|
||||
def _set_requirement(self) -> None:
|
||||
"""Set requirement after generating metadata.
|
||||
"""
|
||||
"""Set requirement after generating metadata."""
|
||||
assert self.req is None
|
||||
assert self.metadata is not None
|
||||
assert self.source_dir is not None
|
||||
@@ -372,11 +371,13 @@ class InstallRequirement:
|
||||
op = "==="
|
||||
|
||||
self.req = Requirement(
|
||||
"".join([
|
||||
self.metadata["Name"],
|
||||
op,
|
||||
self.metadata["Version"],
|
||||
])
|
||||
"".join(
|
||||
[
|
||||
self.metadata["Name"],
|
||||
op,
|
||||
self.metadata["Version"],
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def warn_on_mismatching_name(self) -> None:
|
||||
@@ -387,10 +388,12 @@ class InstallRequirement:
|
||||
|
||||
# If we're here, there's a mismatch. Log a warning about it.
|
||||
logger.warning(
|
||||
'Generating metadata for package %s '
|
||||
'produced metadata for project name %s. Fix your '
|
||||
'#egg=%s fragments.',
|
||||
self.name, metadata_name, self.name
|
||||
"Generating metadata for package %s "
|
||||
"produced metadata for project name %s. Fix your "
|
||||
"#egg=%s fragments.",
|
||||
self.name,
|
||||
metadata_name,
|
||||
self.name,
|
||||
)
|
||||
self.req = Requirement(metadata_name)
|
||||
|
||||
@@ -401,30 +404,24 @@ class InstallRequirement:
|
||||
"""
|
||||
if self.req is None:
|
||||
return
|
||||
existing_dist = get_distribution(self.req.name)
|
||||
existing_dist = get_default_environment().get_distribution(self.req.name)
|
||||
if not existing_dist:
|
||||
return
|
||||
|
||||
# pkg_resouces may contain a different copy of packaging.version from
|
||||
# pip in if the downstream distributor does a poor job debundling pip.
|
||||
# We avoid existing_dist.parsed_version and let SpecifierSet.contains
|
||||
# parses the version instead.
|
||||
existing_version = existing_dist.version
|
||||
version_compatible = (
|
||||
existing_version is not None and
|
||||
self.req.specifier.contains(existing_version, prereleases=True)
|
||||
version_compatible = self.req.specifier.contains(
|
||||
existing_dist.version,
|
||||
prereleases=True,
|
||||
)
|
||||
if not version_compatible:
|
||||
self.satisfied_by = None
|
||||
if use_user_site:
|
||||
if dist_in_usersite(existing_dist):
|
||||
if existing_dist.in_usersite:
|
||||
self.should_reinstall = True
|
||||
elif (running_under_virtualenv() and
|
||||
dist_in_site_packages(existing_dist)):
|
||||
elif running_under_virtualenv() and existing_dist.in_site_packages:
|
||||
raise InstallationError(
|
||||
"Will not install to the user site because it will "
|
||||
"lack sys.path precedence to {} in {}".format(
|
||||
existing_dist.project_name, existing_dist.location)
|
||||
f"Will not install to the user site because it will "
|
||||
f"lack sys.path precedence to {existing_dist.raw_name} "
|
||||
f"in {existing_dist.location}"
|
||||
)
|
||||
else:
|
||||
self.should_reinstall = True
|
||||
@@ -444,20 +441,33 @@ class InstallRequirement:
|
||||
return False
|
||||
return self.link.is_wheel
|
||||
|
||||
@property
|
||||
def is_wheel_from_cache(self) -> bool:
|
||||
# When True, it means that this InstallRequirement is a local wheel file in the
|
||||
# cache of locally built wheels.
|
||||
return self.cached_wheel_source_link is not None
|
||||
|
||||
# Things valid for sdists
|
||||
@property
|
||||
def unpacked_source_directory(self) -> str:
|
||||
return os.path.join(
|
||||
self.source_dir,
|
||||
self.link and self.link.subdirectory_fragment or '')
|
||||
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
||||
)
|
||||
|
||||
@property
|
||||
def setup_py_path(self) -> str:
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
|
||||
setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
|
||||
|
||||
return setup_py
|
||||
|
||||
@property
|
||||
def setup_cfg_path(self) -> str:
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
|
||||
|
||||
return setup_cfg
|
||||
|
||||
@property
|
||||
def pyproject_toml_path(self) -> str:
|
||||
assert self.source_dir, f"No source dir for {self}"
|
||||
@@ -472,13 +482,19 @@ class InstallRequirement:
|
||||
follow the PEP 517 or legacy (setup.py) code path.
|
||||
"""
|
||||
pyproject_toml_data = load_pyproject_toml(
|
||||
self.use_pep517,
|
||||
self.pyproject_toml_path,
|
||||
self.setup_py_path,
|
||||
str(self)
|
||||
self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
|
||||
)
|
||||
|
||||
if pyproject_toml_data is None:
|
||||
if self.config_settings:
|
||||
deprecated(
|
||||
reason=f"Config settings are ignored for project {self}.",
|
||||
replacement=(
|
||||
"to use --use-pep517 or add a "
|
||||
"pyproject.toml file to the project"
|
||||
),
|
||||
gone_in="23.3",
|
||||
)
|
||||
self.use_pep517 = False
|
||||
return
|
||||
|
||||
@@ -486,46 +502,69 @@ class InstallRequirement:
|
||||
requires, backend, check, backend_path = pyproject_toml_data
|
||||
self.requirements_to_check = check
|
||||
self.pyproject_requires = requires
|
||||
self.pep517_backend = Pep517HookCaller(
|
||||
self.unpacked_source_directory, backend, backend_path=backend_path,
|
||||
self.pep517_backend = ConfiguredBuildBackendHookCaller(
|
||||
self,
|
||||
self.unpacked_source_directory,
|
||||
backend,
|
||||
backend_path=backend_path,
|
||||
)
|
||||
|
||||
def _generate_metadata(self) -> str:
|
||||
"""Invokes metadata generator functions, with the required arguments.
|
||||
def isolated_editable_sanity_check(self) -> None:
|
||||
"""Check that an editable requirement if valid for use with PEP 517/518.
|
||||
|
||||
This verifies that an editable that has a pyproject.toml either supports PEP 660
|
||||
or as a setup.py or a setup.cfg
|
||||
"""
|
||||
if not self.use_pep517:
|
||||
assert self.unpacked_source_directory
|
||||
|
||||
if not os.path.exists(self.setup_py_path):
|
||||
raise InstallationError(
|
||||
f'File "setup.py" not found for legacy project {self}.'
|
||||
)
|
||||
|
||||
return generate_metadata_legacy(
|
||||
build_env=self.build_env,
|
||||
setup_py_path=self.setup_py_path,
|
||||
source_dir=self.unpacked_source_directory,
|
||||
isolated=self.isolated,
|
||||
details=self.name or f"from {self.link}"
|
||||
if (
|
||||
self.editable
|
||||
and self.use_pep517
|
||||
and not self.supports_pyproject_editable()
|
||||
and not os.path.isfile(self.setup_py_path)
|
||||
and not os.path.isfile(self.setup_cfg_path)
|
||||
):
|
||||
raise InstallationError(
|
||||
f"Project {self} has a 'pyproject.toml' and its build "
|
||||
f"backend is missing the 'build_editable' hook. Since it does not "
|
||||
f"have a 'setup.py' nor a 'setup.cfg', "
|
||||
f"it cannot be installed in editable mode. "
|
||||
f"Consider using a build backend that supports PEP 660."
|
||||
)
|
||||
|
||||
assert self.pep517_backend is not None
|
||||
|
||||
return generate_metadata(
|
||||
build_env=self.build_env,
|
||||
backend=self.pep517_backend,
|
||||
)
|
||||
|
||||
def prepare_metadata(self) -> None:
|
||||
"""Ensure that project metadata is available.
|
||||
|
||||
Under PEP 517, call the backend hook to prepare the metadata.
|
||||
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
||||
Under legacy processing, call setup.py egg-info.
|
||||
"""
|
||||
assert self.source_dir
|
||||
details = self.name or f"from {self.link}"
|
||||
|
||||
with indent_log():
|
||||
self.metadata_directory = self._generate_metadata()
|
||||
if self.use_pep517:
|
||||
assert self.pep517_backend is not None
|
||||
if (
|
||||
self.editable
|
||||
and self.permit_editable_wheels
|
||||
and self.supports_pyproject_editable()
|
||||
):
|
||||
self.metadata_directory = generate_editable_metadata(
|
||||
build_env=self.build_env,
|
||||
backend=self.pep517_backend,
|
||||
details=details,
|
||||
)
|
||||
else:
|
||||
self.metadata_directory = generate_metadata(
|
||||
build_env=self.build_env,
|
||||
backend=self.pep517_backend,
|
||||
details=details,
|
||||
)
|
||||
else:
|
||||
self.metadata_directory = generate_metadata_legacy(
|
||||
build_env=self.build_env,
|
||||
setup_py_path=self.setup_py_path,
|
||||
source_dir=self.unpacked_source_directory,
|
||||
isolated=self.isolated,
|
||||
details=details,
|
||||
)
|
||||
|
||||
# Act on the newly generated metadata, based on the name and version.
|
||||
if not self.name:
|
||||
@@ -537,26 +576,35 @@ class InstallRequirement:
|
||||
|
||||
@property
|
||||
def metadata(self) -> Any:
|
||||
if not hasattr(self, '_metadata'):
|
||||
self._metadata = get_metadata(self.get_dist())
|
||||
if not hasattr(self, "_metadata"):
|
||||
self._metadata = self.get_dist().metadata
|
||||
|
||||
return self._metadata
|
||||
|
||||
def get_dist(self) -> Distribution:
|
||||
return _get_dist(self.metadata_directory)
|
||||
def get_dist(self) -> BaseDistribution:
|
||||
if self.metadata_directory:
|
||||
return get_directory_distribution(self.metadata_directory)
|
||||
elif self.local_file_path and self.is_wheel:
|
||||
return get_wheel_distribution(
|
||||
FilesystemWheel(self.local_file_path), canonicalize_name(self.name)
|
||||
)
|
||||
raise AssertionError(
|
||||
f"InstallRequirement {self} has no metadata directory and no wheel: "
|
||||
f"can't make a distribution."
|
||||
)
|
||||
|
||||
def assert_source_matches_version(self) -> None:
|
||||
assert self.source_dir
|
||||
version = self.metadata['version']
|
||||
version = self.metadata["version"]
|
||||
if self.req.specifier and version not in self.req.specifier:
|
||||
logger.warning(
|
||||
'Requested %s, but installing version %s',
|
||||
"Requested %s, but installing version %s",
|
||||
self,
|
||||
version,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
'Source in %s has version %s, which satisfies requirement %s',
|
||||
"Source in %s has version %s, which satisfies requirement %s",
|
||||
display_path(self.source_dir),
|
||||
version,
|
||||
self,
|
||||
@@ -589,14 +637,13 @@ class InstallRequirement:
|
||||
def update_editable(self) -> None:
|
||||
if not self.link:
|
||||
logger.debug(
|
||||
"Cannot update repository at %s; repository location is "
|
||||
"unknown",
|
||||
"Cannot update repository at %s; repository location is unknown",
|
||||
self.source_dir,
|
||||
)
|
||||
return
|
||||
assert self.editable
|
||||
assert self.source_dir
|
||||
if self.link.scheme == 'file':
|
||||
if self.link.scheme == "file":
|
||||
# Static paths don't get updated
|
||||
return
|
||||
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
||||
@@ -604,7 +651,7 @@ class InstallRequirement:
|
||||
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
||||
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
||||
hidden_url = hide_url(self.link.url)
|
||||
vcs_backend.obtain(self.source_dir, url=hidden_url)
|
||||
vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
|
||||
|
||||
# Top-level Actions
|
||||
def uninstall(
|
||||
@@ -623,29 +670,28 @@ class InstallRequirement:
|
||||
|
||||
"""
|
||||
assert self.req
|
||||
dist = get_distribution(self.req.name)
|
||||
dist = get_default_environment().get_distribution(self.req.name)
|
||||
if not dist:
|
||||
logger.warning("Skipping %s as it is not installed.", self.name)
|
||||
return None
|
||||
logger.info('Found existing installation: %s', dist)
|
||||
logger.info("Found existing installation: %s", dist)
|
||||
|
||||
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
||||
uninstalled_pathset.remove(auto_confirm, verbose)
|
||||
return uninstalled_pathset
|
||||
|
||||
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
|
||||
|
||||
def _clean_zip_name(name: str, prefix: str) -> str:
|
||||
assert name.startswith(prefix + os.path.sep), (
|
||||
f"name {name!r} doesn't start with prefix {prefix!r}"
|
||||
)
|
||||
name = name[len(prefix) + 1:]
|
||||
name = name.replace(os.path.sep, '/')
|
||||
assert name.startswith(
|
||||
prefix + os.path.sep
|
||||
), f"name {name!r} doesn't start with prefix {prefix!r}"
|
||||
name = name[len(prefix) + 1 :]
|
||||
name = name.replace(os.path.sep, "/")
|
||||
return name
|
||||
|
||||
path = os.path.join(parentdir, path)
|
||||
name = _clean_zip_name(path, rootdir)
|
||||
return self.name + '/' + name
|
||||
return self.name + "/" + name
|
||||
|
||||
def archive(self, build_dir: Optional[str]) -> None:
|
||||
"""Saves archive to provided build_dir.
|
||||
@@ -657,68 +703,72 @@ class InstallRequirement:
|
||||
return
|
||||
|
||||
create_archive = True
|
||||
archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
|
||||
archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
|
||||
archive_path = os.path.join(build_dir, archive_name)
|
||||
|
||||
if os.path.exists(archive_path):
|
||||
response = ask_path_exists(
|
||||
'The file {} exists. (i)gnore, (w)ipe, '
|
||||
'(b)ackup, (a)bort '.format(
|
||||
display_path(archive_path)),
|
||||
('i', 'w', 'b', 'a'))
|
||||
if response == 'i':
|
||||
"The file {} exists. (i)gnore, (w)ipe, "
|
||||
"(b)ackup, (a)bort ".format(display_path(archive_path)),
|
||||
("i", "w", "b", "a"),
|
||||
)
|
||||
if response == "i":
|
||||
create_archive = False
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(archive_path))
|
||||
elif response == "w":
|
||||
logger.warning("Deleting %s", display_path(archive_path))
|
||||
os.remove(archive_path)
|
||||
elif response == 'b':
|
||||
elif response == "b":
|
||||
dest_file = backup_dir(archive_path)
|
||||
logger.warning(
|
||||
'Backing up %s to %s',
|
||||
"Backing up %s to %s",
|
||||
display_path(archive_path),
|
||||
display_path(dest_file),
|
||||
)
|
||||
shutil.move(archive_path, dest_file)
|
||||
elif response == 'a':
|
||||
elif response == "a":
|
||||
sys.exit(-1)
|
||||
|
||||
if not create_archive:
|
||||
return
|
||||
|
||||
zip_output = zipfile.ZipFile(
|
||||
archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
|
||||
archive_path,
|
||||
"w",
|
||||
zipfile.ZIP_DEFLATED,
|
||||
allowZip64=True,
|
||||
)
|
||||
with zip_output:
|
||||
dir = os.path.normcase(
|
||||
os.path.abspath(self.unpacked_source_directory)
|
||||
)
|
||||
dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
|
||||
for dirpath, dirnames, filenames in os.walk(dir):
|
||||
for dirname in dirnames:
|
||||
dir_arcname = self._get_archive_name(
|
||||
dirname, parentdir=dirpath, rootdir=dir,
|
||||
dirname,
|
||||
parentdir=dirpath,
|
||||
rootdir=dir,
|
||||
)
|
||||
zipdir = zipfile.ZipInfo(dir_arcname + '/')
|
||||
zipdir = zipfile.ZipInfo(dir_arcname + "/")
|
||||
zipdir.external_attr = 0x1ED << 16 # 0o755
|
||||
zip_output.writestr(zipdir, '')
|
||||
zip_output.writestr(zipdir, "")
|
||||
for filename in filenames:
|
||||
file_arcname = self._get_archive_name(
|
||||
filename, parentdir=dirpath, rootdir=dir,
|
||||
filename,
|
||||
parentdir=dirpath,
|
||||
rootdir=dir,
|
||||
)
|
||||
filename = os.path.join(dirpath, filename)
|
||||
zip_output.write(filename, file_arcname)
|
||||
|
||||
logger.info('Saved %s', display_path(archive_path))
|
||||
logger.info("Saved %s", display_path(archive_path))
|
||||
|
||||
def install(
|
||||
self,
|
||||
install_options: List[str],
|
||||
global_options: Optional[Sequence[str]] = None,
|
||||
root: Optional[str] = None,
|
||||
home: Optional[str] = None,
|
||||
prefix: Optional[str] = None,
|
||||
warn_script_location: bool = True,
|
||||
use_user_site: bool = False,
|
||||
pycompile: bool = True
|
||||
pycompile: bool = True,
|
||||
) -> None:
|
||||
scheme = get_scheme(
|
||||
self.name,
|
||||
@@ -729,11 +779,9 @@ class InstallRequirement:
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
global_options = global_options if global_options is not None else []
|
||||
if self.editable:
|
||||
if self.editable and not self.is_wheel:
|
||||
install_editable_legacy(
|
||||
install_options,
|
||||
global_options,
|
||||
global_options=global_options if global_options is not None else [],
|
||||
prefix=prefix,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
@@ -746,79 +794,23 @@ class InstallRequirement:
|
||||
self.install_succeeded = True
|
||||
return
|
||||
|
||||
if self.is_wheel:
|
||||
assert self.local_file_path
|
||||
direct_url = None
|
||||
if self.original_link:
|
||||
direct_url = direct_url_from_link(
|
||||
self.original_link,
|
||||
self.source_dir,
|
||||
self.original_link_is_in_wheel_cache,
|
||||
)
|
||||
install_wheel(
|
||||
self.name,
|
||||
self.local_file_path,
|
||||
scheme=scheme,
|
||||
req_description=str(self.req),
|
||||
pycompile=pycompile,
|
||||
warn_script_location=warn_script_location,
|
||||
direct_url=direct_url,
|
||||
requested=self.user_supplied,
|
||||
)
|
||||
self.install_succeeded = True
|
||||
return
|
||||
assert self.is_wheel
|
||||
assert self.local_file_path
|
||||
|
||||
# TODO: Why don't we do this for editable installs?
|
||||
|
||||
# Extend the list of global and install options passed on to
|
||||
# the setup.py call with the ones from the requirements file.
|
||||
# Options specified in requirements file override those
|
||||
# specified on the command line, since the last option given
|
||||
# to setup.py is the one that is used.
|
||||
global_options = list(global_options) + self.global_options
|
||||
install_options = list(install_options) + self.install_options
|
||||
|
||||
try:
|
||||
success = install_legacy(
|
||||
install_options=install_options,
|
||||
global_options=global_options,
|
||||
root=root,
|
||||
home=home,
|
||||
prefix=prefix,
|
||||
use_user_site=use_user_site,
|
||||
pycompile=pycompile,
|
||||
scheme=scheme,
|
||||
setup_py_path=self.setup_py_path,
|
||||
isolated=self.isolated,
|
||||
req_name=self.name,
|
||||
build_env=self.build_env,
|
||||
unpacked_source_directory=self.unpacked_source_directory,
|
||||
req_description=str(self.req),
|
||||
)
|
||||
except LegacyInstallFailure as exc:
|
||||
self.install_succeeded = False
|
||||
six.reraise(*exc.parent)
|
||||
except Exception:
|
||||
self.install_succeeded = True
|
||||
raise
|
||||
|
||||
self.install_succeeded = success
|
||||
|
||||
if success and self.legacy_install_reason == 8368:
|
||||
deprecated(
|
||||
reason=(
|
||||
"{} was installed using the legacy 'setup.py install' "
|
||||
"method, because a wheel could not be built for it.".
|
||||
format(self.name)
|
||||
),
|
||||
replacement="to fix the wheel build issue reported above",
|
||||
gone_in=None,
|
||||
issue=8368,
|
||||
)
|
||||
install_wheel(
|
||||
self.name,
|
||||
self.local_file_path,
|
||||
scheme=scheme,
|
||||
req_description=str(self.req),
|
||||
pycompile=pycompile,
|
||||
warn_script_location=warn_script_location,
|
||||
direct_url=self.download_info if self.original_link else None,
|
||||
requested=self.user_supplied,
|
||||
)
|
||||
self.install_succeeded = True
|
||||
|
||||
|
||||
def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
||||
|
||||
# Check for unsupported forms
|
||||
problem = ""
|
||||
if not req.name:
|
||||
@@ -844,3 +836,32 @@ def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
||||
)
|
||||
|
||||
return problem
|
||||
|
||||
|
||||
def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
|
||||
if getattr(options, option, None):
|
||||
return True
|
||||
for req in reqs:
|
||||
if getattr(req, option, None):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_legacy_setup_py_options(
|
||||
options: Values,
|
||||
reqs: List[InstallRequirement],
|
||||
) -> None:
|
||||
has_build_options = _has_option(options, reqs, "build_options")
|
||||
has_global_options = _has_option(options, reqs, "global_options")
|
||||
if has_build_options or has_global_options:
|
||||
deprecated(
|
||||
reason="--build-option and --global-option are deprecated.",
|
||||
issue=11859,
|
||||
replacement="to use --config-settings",
|
||||
gone_in="23.3",
|
||||
)
|
||||
logger.warning(
|
||||
"Implying --no-binary=:all: due to the presence of "
|
||||
"--build-option / --global-option. "
|
||||
)
|
||||
options.format_control.disallow_binaries()
|
||||
|
||||
@@ -1,22 +1,17 @@
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
from typing import Dict, List
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils import compatibility_tags
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequirementSet:
|
||||
|
||||
def __init__(self, check_supported_wheels: bool = True) -> None:
|
||||
"""Create a RequirementSet.
|
||||
"""
|
||||
"""Create a RequirementSet."""
|
||||
|
||||
self.requirements: Dict[str, InstallRequirement] = OrderedDict()
|
||||
self.check_supported_wheels = check_supported_wheels
|
||||
@@ -28,7 +23,7 @@ class RequirementSet:
|
||||
(req for req in self.requirements.values() if not req.comes_from),
|
||||
key=lambda req: canonicalize_name(req.name or ""),
|
||||
)
|
||||
return ' '.join(str(req.req) for req in requirements)
|
||||
return " ".join(str(req.req) for req in requirements)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
requirements = sorted(
|
||||
@@ -36,11 +31,11 @@ class RequirementSet:
|
||||
key=lambda req: canonicalize_name(req.name or ""),
|
||||
)
|
||||
|
||||
format_string = '<{classname} object; {count} requirement(s): {reqs}>'
|
||||
format_string = "<{classname} object; {count} requirement(s): {reqs}>"
|
||||
return format_string.format(
|
||||
classname=self.__class__.__name__,
|
||||
count=len(requirements),
|
||||
reqs=', '.join(str(req.req) for req in requirements),
|
||||
reqs=", ".join(str(req.req) for req in requirements),
|
||||
)
|
||||
|
||||
def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
|
||||
@@ -53,128 +48,12 @@ class RequirementSet:
|
||||
project_name = canonicalize_name(install_req.name)
|
||||
self.requirements[project_name] = install_req
|
||||
|
||||
def add_requirement(
|
||||
self,
|
||||
install_req: InstallRequirement,
|
||||
parent_req_name: Optional[str] = None,
|
||||
extras_requested: Optional[Iterable[str]] = None
|
||||
) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
|
||||
"""Add install_req as a requirement to install.
|
||||
|
||||
:param parent_req_name: The name of the requirement that needed this
|
||||
added. The name is used because when multiple unnamed requirements
|
||||
resolve to the same name, we could otherwise end up with dependency
|
||||
links that point outside the Requirements set. parent_req must
|
||||
already be added. Note that None implies that this is a user
|
||||
supplied requirement, vs an inferred one.
|
||||
:param extras_requested: an iterable of extras used to evaluate the
|
||||
environment markers.
|
||||
:return: Additional requirements to scan. That is either [] if
|
||||
the requirement is not applicable, or [install_req] if the
|
||||
requirement is applicable and has just been added.
|
||||
"""
|
||||
# If the markers do not match, ignore this requirement.
|
||||
if not install_req.match_markers(extras_requested):
|
||||
logger.info(
|
||||
"Ignoring %s: markers '%s' don't match your environment",
|
||||
install_req.name, install_req.markers,
|
||||
)
|
||||
return [], None
|
||||
|
||||
# If the wheel is not supported, raise an error.
|
||||
# Should check this after filtering out based on environment markers to
|
||||
# allow specifying different wheels based on the environment/OS, in a
|
||||
# single requirements file.
|
||||
if install_req.link and install_req.link.is_wheel:
|
||||
wheel = Wheel(install_req.link.filename)
|
||||
tags = compatibility_tags.get_supported()
|
||||
if (self.check_supported_wheels and not wheel.supported(tags)):
|
||||
raise InstallationError(
|
||||
"{} is not a supported wheel on this platform.".format(
|
||||
wheel.filename)
|
||||
)
|
||||
|
||||
# This next bit is really a sanity check.
|
||||
assert not install_req.user_supplied or parent_req_name is None, (
|
||||
"a user supplied req shouldn't have a parent"
|
||||
)
|
||||
|
||||
# Unnamed requirements are scanned again and the requirement won't be
|
||||
# added as a dependency until after scanning.
|
||||
if not install_req.name:
|
||||
self.add_unnamed_requirement(install_req)
|
||||
return [install_req], None
|
||||
|
||||
try:
|
||||
existing_req: Optional[InstallRequirement] = self.get_requirement(
|
||||
install_req.name)
|
||||
except KeyError:
|
||||
existing_req = None
|
||||
|
||||
has_conflicting_requirement = (
|
||||
parent_req_name is None and
|
||||
existing_req and
|
||||
not existing_req.constraint and
|
||||
existing_req.extras == install_req.extras and
|
||||
existing_req.req and
|
||||
install_req.req and
|
||||
existing_req.req.specifier != install_req.req.specifier
|
||||
)
|
||||
if has_conflicting_requirement:
|
||||
raise InstallationError(
|
||||
"Double requirement given: {} (already in {}, name={!r})"
|
||||
.format(install_req, existing_req, install_req.name)
|
||||
)
|
||||
|
||||
# When no existing requirement exists, add the requirement as a
|
||||
# dependency and it will be scanned again after.
|
||||
if not existing_req:
|
||||
self.add_named_requirement(install_req)
|
||||
# We'd want to rescan this requirement later
|
||||
return [install_req], install_req
|
||||
|
||||
# Assume there's no need to scan, and that we've already
|
||||
# encountered this for scanning.
|
||||
if install_req.constraint or not existing_req.constraint:
|
||||
return [], existing_req
|
||||
|
||||
does_not_satisfy_constraint = (
|
||||
install_req.link and
|
||||
not (
|
||||
existing_req.link and
|
||||
install_req.link.path == existing_req.link.path
|
||||
)
|
||||
)
|
||||
if does_not_satisfy_constraint:
|
||||
raise InstallationError(
|
||||
"Could not satisfy constraints for '{}': "
|
||||
"installation from path or url cannot be "
|
||||
"constrained to a version".format(install_req.name)
|
||||
)
|
||||
# If we're now installing a constraint, mark the existing
|
||||
# object for real installation.
|
||||
existing_req.constraint = False
|
||||
# If we're now installing a user supplied requirement,
|
||||
# mark the existing object as such.
|
||||
if install_req.user_supplied:
|
||||
existing_req.user_supplied = True
|
||||
existing_req.extras = tuple(sorted(
|
||||
set(existing_req.extras) | set(install_req.extras)
|
||||
))
|
||||
logger.debug(
|
||||
"Setting %s extras to: %s",
|
||||
existing_req, existing_req.extras,
|
||||
)
|
||||
# Return the existing requirement for addition to the parent and
|
||||
# scanning again.
|
||||
return [existing_req], existing_req
|
||||
|
||||
def has_requirement(self, name: str) -> bool:
|
||||
project_name = canonicalize_name(name)
|
||||
|
||||
return (
|
||||
project_name in self.requirements and
|
||||
not self.requirements[project_name].constraint
|
||||
project_name in self.requirements
|
||||
and not self.requirements[project_name].constraint
|
||||
)
|
||||
|
||||
def get_requirement(self, name: str) -> InstallRequirement:
|
||||
@@ -188,3 +67,16 @@ class RequirementSet:
|
||||
@property
|
||||
def all_requirements(self) -> List[InstallRequirement]:
|
||||
return self.unnamed_requirements + list(self.requirements.values())
|
||||
|
||||
@property
|
||||
def requirements_to_install(self) -> List[InstallRequirement]:
|
||||
"""Return the list of requirements that need to be installed.
|
||||
|
||||
TODO remove this property together with the legacy resolver, since the new
|
||||
resolver only returns requirements that need to be installed.
|
||||
"""
|
||||
return [
|
||||
install_req
|
||||
for install_req in self.all_requirements
|
||||
if not install_req.constraint and not install_req.satisfied_by
|
||||
]
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
import contextlib
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
from types import TracebackType
|
||||
from typing import Dict, Iterator, Optional, Set, Type, Union
|
||||
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def update_env_context_manager(**changes: str) -> Iterator[None]:
|
||||
target = os.environ
|
||||
|
||||
# Save values from the target and change them.
|
||||
non_existent_marker = object()
|
||||
saved_values: Dict[str, Union[object, str]] = {}
|
||||
for name, new_value in changes.items():
|
||||
try:
|
||||
saved_values[name] = target[name]
|
||||
except KeyError:
|
||||
saved_values[name] = non_existent_marker
|
||||
target[name] = new_value
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Restore original values in the target.
|
||||
for name, original_value in saved_values.items():
|
||||
if original_value is non_existent_marker:
|
||||
del target[name]
|
||||
else:
|
||||
assert isinstance(original_value, str) # for mypy
|
||||
target[name] = original_value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def get_requirement_tracker() -> Iterator["RequirementTracker"]:
|
||||
root = os.environ.get('PIP_REQ_TRACKER')
|
||||
with contextlib.ExitStack() as ctx:
|
||||
if root is None:
|
||||
root = ctx.enter_context(
|
||||
TempDirectory(kind='req-tracker')
|
||||
).path
|
||||
ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
|
||||
logger.debug("Initialized build tracking at %s", root)
|
||||
|
||||
with RequirementTracker(root) as tracker:
|
||||
yield tracker
|
||||
|
||||
|
||||
class RequirementTracker:
|
||||
|
||||
def __init__(self, root: str) -> None:
|
||||
self._root = root
|
||||
self._entries: Set[InstallRequirement] = set()
|
||||
logger.debug("Created build tracker: %s", self._root)
|
||||
|
||||
def __enter__(self) -> "RequirementTracker":
|
||||
logger.debug("Entered build tracker: %s", self._root)
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_val: Optional[BaseException],
|
||||
exc_tb: Optional[TracebackType]
|
||||
) -> None:
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link: Link) -> str:
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req: InstallRequirement) -> None:
|
||||
"""Add an InstallRequirement to build tracking.
|
||||
"""
|
||||
|
||||
assert req.link
|
||||
# Get the file to write information about this requirement.
|
||||
entry_path = self._entry_path(req.link)
|
||||
|
||||
# Try reading from the file. If it exists and can be read from, a build
|
||||
# is already in progress, so a LookupError is raised.
|
||||
try:
|
||||
with open(entry_path) as fp:
|
||||
contents = fp.read()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
else:
|
||||
message = '{} is already being built: {}'.format(
|
||||
req.link, contents)
|
||||
raise LookupError(message)
|
||||
|
||||
# If we're here, req should really not be building already.
|
||||
assert req not in self._entries
|
||||
|
||||
# Start tracking this requirement.
|
||||
with open(entry_path, 'w', encoding="utf-8") as fp:
|
||||
fp.write(str(req))
|
||||
self._entries.add(req)
|
||||
|
||||
logger.debug('Added %s to build tracker %r', req, self._root)
|
||||
|
||||
def remove(self, req: InstallRequirement) -> None:
|
||||
"""Remove an InstallRequirement from build tracking.
|
||||
"""
|
||||
|
||||
assert req.link
|
||||
# Delete the created file and the corresponding entries.
|
||||
os.unlink(self._entry_path(req.link))
|
||||
self._entries.remove(req)
|
||||
|
||||
logger.debug('Removed %s from build tracker %r', req, self._root)
|
||||
|
||||
def cleanup(self) -> None:
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
|
||||
logger.debug("Removed build tracker: %r", self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req: InstallRequirement) -> Iterator[None]:
|
||||
self.add(req)
|
||||
yield
|
||||
self.remove(req)
|
||||
@@ -1,67 +1,58 @@
|
||||
import csv
|
||||
import functools
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
from importlib.util import cache_from_source
|
||||
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from pip._internal.exceptions import UninstallationError
|
||||
from pip._internal.locations import get_bin_prefix, get_bin_user
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.egg_link import egg_link_path_from_location
|
||||
from pip._internal.utils.logging import getLogger, indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
ask,
|
||||
dist_in_usersite,
|
||||
dist_is_local,
|
||||
egg_link_path,
|
||||
is_local,
|
||||
normalize_path,
|
||||
renames,
|
||||
rmtree,
|
||||
)
|
||||
from pip._internal.utils.misc import ask, normalize_path, renames, rmtree
|
||||
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
def _script_names(dist: Distribution, script_name: str, is_gui: bool) -> List[str]:
|
||||
def _script_names(
|
||||
bin_dir: str, script_name: str, is_gui: bool
|
||||
) -> Generator[str, None, None]:
|
||||
"""Create the fully qualified name of the files created by
|
||||
{console,gui}_scripts for the given ``dist``.
|
||||
Returns the list of file names
|
||||
"""
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = get_bin_user()
|
||||
else:
|
||||
bin_dir = get_bin_prefix()
|
||||
exe_name = os.path.join(bin_dir, script_name)
|
||||
paths_to_remove = [exe_name]
|
||||
if WINDOWS:
|
||||
paths_to_remove.append(exe_name + '.exe')
|
||||
paths_to_remove.append(exe_name + '.exe.manifest')
|
||||
if is_gui:
|
||||
paths_to_remove.append(exe_name + '-script.pyw')
|
||||
else:
|
||||
paths_to_remove.append(exe_name + '-script.py')
|
||||
return paths_to_remove
|
||||
yield exe_name
|
||||
if not WINDOWS:
|
||||
return
|
||||
yield f"{exe_name}.exe"
|
||||
yield f"{exe_name}.exe.manifest"
|
||||
if is_gui:
|
||||
yield f"{exe_name}-script.pyw"
|
||||
else:
|
||||
yield f"{exe_name}-script.py"
|
||||
|
||||
|
||||
def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]:
|
||||
def _unique(
|
||||
fn: Callable[..., Generator[Any, None, None]]
|
||||
) -> Callable[..., Generator[Any, None, None]]:
|
||||
@functools.wraps(fn)
|
||||
def unique(*args: Any, **kw: Any) -> Iterator[Any]:
|
||||
def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]:
|
||||
seen: Set[Any] = set()
|
||||
for item in fn(*args, **kw):
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
yield item
|
||||
|
||||
return unique
|
||||
|
||||
|
||||
@_unique
|
||||
def uninstallation_paths(dist: Distribution) -> Iterator[str]:
|
||||
def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
|
||||
"""
|
||||
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
||||
|
||||
@@ -75,30 +66,32 @@ def uninstallation_paths(dist: Distribution) -> Iterator[str]:
|
||||
|
||||
https://packaging.python.org/specifications/recording-installed-packages/
|
||||
"""
|
||||
try:
|
||||
r = csv.reader(dist.get_metadata_lines('RECORD'))
|
||||
except FileNotFoundError as missing_record_exception:
|
||||
msg = 'Cannot uninstall {dist}, RECORD file not found.'.format(dist=dist)
|
||||
try:
|
||||
installer = next(dist.get_metadata_lines('INSTALLER'))
|
||||
if not installer or installer == 'pip':
|
||||
raise ValueError()
|
||||
except (OSError, StopIteration, ValueError):
|
||||
dep = '{}=={}'.format(dist.project_name, dist.version)
|
||||
msg += (" You might be able to recover from this via: "
|
||||
"'pip install --force-reinstall --no-deps {}'.".format(dep))
|
||||
location = dist.location
|
||||
assert location is not None, "not installed"
|
||||
|
||||
entries = dist.iter_declared_entries()
|
||||
if entries is None:
|
||||
msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
|
||||
installer = dist.installer
|
||||
if not installer or installer == "pip":
|
||||
dep = "{}=={}".format(dist.raw_name, dist.version)
|
||||
msg += (
|
||||
" You might be able to recover from this via: "
|
||||
"'pip install --force-reinstall --no-deps {}'.".format(dep)
|
||||
)
|
||||
else:
|
||||
msg += ' Hint: The package was installed by {}.'.format(installer)
|
||||
raise UninstallationError(msg) from missing_record_exception
|
||||
for row in r:
|
||||
path = os.path.join(dist.location, row[0])
|
||||
msg += " Hint: The package was installed by {}.".format(installer)
|
||||
raise UninstallationError(msg)
|
||||
|
||||
for entry in entries:
|
||||
path = os.path.join(location, entry)
|
||||
yield path
|
||||
if path.endswith('.py'):
|
||||
if path.endswith(".py"):
|
||||
dn, fn = os.path.split(path)
|
||||
base = fn[:-3]
|
||||
path = os.path.join(dn, base + '.pyc')
|
||||
path = os.path.join(dn, base + ".pyc")
|
||||
yield path
|
||||
path = os.path.join(dn, base + '.pyo')
|
||||
path = os.path.join(dn, base + ".pyo")
|
||||
yield path
|
||||
|
||||
|
||||
@@ -112,8 +105,8 @@ def compact(paths: Iterable[str]) -> Set[str]:
|
||||
short_paths: Set[str] = set()
|
||||
for path in sorted(paths, key=len):
|
||||
should_skip = any(
|
||||
path.startswith(shortpath.rstrip("*")) and
|
||||
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||
path.startswith(shortpath.rstrip("*"))
|
||||
and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||
for shortpath in short_paths
|
||||
)
|
||||
if not should_skip:
|
||||
@@ -136,18 +129,15 @@ def compress_for_rename(paths: Iterable[str]) -> Set[str]:
|
||||
return os.path.normcase(os.path.join(*a))
|
||||
|
||||
for root in unchecked:
|
||||
if any(os.path.normcase(root).startswith(w)
|
||||
for w in wildcards):
|
||||
if any(os.path.normcase(root).startswith(w) for w in wildcards):
|
||||
# This directory has already been handled.
|
||||
continue
|
||||
|
||||
all_files: Set[str] = set()
|
||||
all_subdirs: Set[str] = set()
|
||||
for dirname, subdirs, files in os.walk(root):
|
||||
all_subdirs.update(norm_join(root, dirname, d)
|
||||
for d in subdirs)
|
||||
all_files.update(norm_join(root, dirname, f)
|
||||
for f in files)
|
||||
all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
|
||||
all_files.update(norm_join(root, dirname, f) for f in files)
|
||||
# If all the files we found are in our remaining set of files to
|
||||
# remove, then remove them from the latter set and add a wildcard
|
||||
# for the directory.
|
||||
@@ -196,14 +186,14 @@ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str
|
||||
continue
|
||||
|
||||
file_ = os.path.join(dirpath, fname)
|
||||
if (os.path.isfile(file_) and
|
||||
os.path.normcase(file_) not in _normcased_files):
|
||||
if (
|
||||
os.path.isfile(file_)
|
||||
and os.path.normcase(file_) not in _normcased_files
|
||||
):
|
||||
# We are skipping this file. Add it to the set.
|
||||
will_skip.add(file_)
|
||||
|
||||
will_remove = files | {
|
||||
os.path.join(folder, "*") for folder in folders
|
||||
}
|
||||
will_remove = files | {os.path.join(folder, "*") for folder in folders}
|
||||
|
||||
return will_remove, will_skip
|
||||
|
||||
@@ -211,6 +201,7 @@ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str
|
||||
class StashedUninstallPathSet:
|
||||
"""A set of file rename operations to stash files while
|
||||
tentatively uninstalling them."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
# Mapping from source file root to [Adjacent]TempDirectory
|
||||
# for files under that directory.
|
||||
@@ -252,7 +243,7 @@ class StashedUninstallPathSet:
|
||||
else:
|
||||
# Did not find any suitable root
|
||||
head = os.path.dirname(path)
|
||||
save_dir = TempDirectory(kind='uninstall')
|
||||
save_dir = TempDirectory(kind="uninstall")
|
||||
self._save_dirs[head] = save_dir
|
||||
|
||||
relpath = os.path.relpath(path, head)
|
||||
@@ -271,7 +262,7 @@ class StashedUninstallPathSet:
|
||||
new_path = self._get_file_stash(path)
|
||||
|
||||
self._moves.append((path, new_path))
|
||||
if (path_is_dir and os.path.isdir(new_path)):
|
||||
if path_is_dir and os.path.isdir(new_path):
|
||||
# If we're moving a directory, we need to
|
||||
# remove the destination first or else it will be
|
||||
# moved to inside the existing directory.
|
||||
@@ -295,7 +286,7 @@ class StashedUninstallPathSet:
|
||||
|
||||
for new_path, path in self._moves:
|
||||
try:
|
||||
logger.debug('Replacing %s from %s', new_path, path)
|
||||
logger.debug("Replacing %s from %s", new_path, path)
|
||||
if os.path.isfile(new_path) or os.path.islink(new_path):
|
||||
os.unlink(new_path)
|
||||
elif os.path.isdir(new_path):
|
||||
@@ -315,12 +306,17 @@ class StashedUninstallPathSet:
|
||||
class UninstallPathSet:
|
||||
"""A set of file paths to be removed in the uninstallation of a
|
||||
requirement."""
|
||||
def __init__(self, dist: Distribution) -> None:
|
||||
self.paths: Set[str] = set()
|
||||
|
||||
def __init__(self, dist: BaseDistribution) -> None:
|
||||
self._paths: Set[str] = set()
|
||||
self._refuse: Set[str] = set()
|
||||
self.pth: Dict[str, UninstallPthEntries] = {}
|
||||
self.dist = dist
|
||||
self._pth: Dict[str, UninstallPthEntries] = {}
|
||||
self._dist = dist
|
||||
self._moved_paths = StashedUninstallPathSet()
|
||||
# Create local cache of normalize_path results. Creating an UninstallPathSet
|
||||
# can result in hundreds/thousands of redundant calls to normalize_path with
|
||||
# the same args, which hurts performance.
|
||||
self._normalize_path_cached = functools.lru_cache()(normalize_path)
|
||||
|
||||
def _permitted(self, path: str) -> bool:
|
||||
"""
|
||||
@@ -328,70 +324,70 @@ class UninstallPathSet:
|
||||
remove/modify, False otherwise.
|
||||
|
||||
"""
|
||||
return is_local(path)
|
||||
# aka is_local, but caching normalized sys.prefix
|
||||
if not running_under_virtualenv():
|
||||
return True
|
||||
return path.startswith(self._normalize_path_cached(sys.prefix))
|
||||
|
||||
def add(self, path: str) -> None:
|
||||
head, tail = os.path.split(path)
|
||||
|
||||
# we normalize the head to resolve parent directory symlinks, but not
|
||||
# the tail, since we only want to uninstall symlinks, not their targets
|
||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||
path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail))
|
||||
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
if self._permitted(path):
|
||||
self.paths.add(path)
|
||||
self._paths.add(path)
|
||||
else:
|
||||
self._refuse.add(path)
|
||||
|
||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||
# due to imports
|
||||
if os.path.splitext(path)[1] == '.py':
|
||||
if os.path.splitext(path)[1] == ".py":
|
||||
self.add(cache_from_source(path))
|
||||
|
||||
def add_pth(self, pth_file: str, entry: str) -> None:
|
||||
pth_file = normalize_path(pth_file)
|
||||
pth_file = self._normalize_path_cached(pth_file)
|
||||
if self._permitted(pth_file):
|
||||
if pth_file not in self.pth:
|
||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||
self.pth[pth_file].add(entry)
|
||||
if pth_file not in self._pth:
|
||||
self._pth[pth_file] = UninstallPthEntries(pth_file)
|
||||
self._pth[pth_file].add(entry)
|
||||
else:
|
||||
self._refuse.add(pth_file)
|
||||
|
||||
def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
|
||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||
"""Remove paths in ``self._paths`` with confirmation (unless
|
||||
``auto_confirm`` is True)."""
|
||||
|
||||
if not self.paths:
|
||||
if not self._paths:
|
||||
logger.info(
|
||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||
self.dist.project_name,
|
||||
self._dist.raw_name,
|
||||
)
|
||||
return
|
||||
|
||||
dist_name_version = (
|
||||
self.dist.project_name + "-" + self.dist.version
|
||||
)
|
||||
logger.info('Uninstalling %s:', dist_name_version)
|
||||
dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
|
||||
logger.info("Uninstalling %s:", dist_name_version)
|
||||
|
||||
with indent_log():
|
||||
if auto_confirm or self._allowed_to_proceed(verbose):
|
||||
moved = self._moved_paths
|
||||
|
||||
for_rename = compress_for_rename(self.paths)
|
||||
for_rename = compress_for_rename(self._paths)
|
||||
|
||||
for path in sorted(compact(for_rename)):
|
||||
moved.stash(path)
|
||||
logger.verbose('Removing file or directory %s', path)
|
||||
logger.verbose("Removing file or directory %s", path)
|
||||
|
||||
for pth in self.pth.values():
|
||||
for pth in self._pth.values():
|
||||
pth.remove()
|
||||
|
||||
logger.info('Successfully uninstalled %s', dist_name_version)
|
||||
logger.info("Successfully uninstalled %s", dist_name_version)
|
||||
|
||||
def _allowed_to_proceed(self, verbose: bool) -> bool:
|
||||
"""Display which files would be deleted and prompt for confirmation
|
||||
"""
|
||||
"""Display which files would be deleted and prompt for confirmation"""
|
||||
|
||||
def _display(msg: str, paths: Iterable[str]) -> None:
|
||||
if not paths:
|
||||
@@ -403,32 +399,32 @@ class UninstallPathSet:
|
||||
logger.info(path)
|
||||
|
||||
if not verbose:
|
||||
will_remove, will_skip = compress_for_output_listing(self.paths)
|
||||
will_remove, will_skip = compress_for_output_listing(self._paths)
|
||||
else:
|
||||
# In verbose mode, display all the files that are going to be
|
||||
# deleted.
|
||||
will_remove = set(self.paths)
|
||||
will_remove = set(self._paths)
|
||||
will_skip = set()
|
||||
|
||||
_display('Would remove:', will_remove)
|
||||
_display('Would not remove (might be manually added):', will_skip)
|
||||
_display('Would not remove (outside of prefix):', self._refuse)
|
||||
_display("Would remove:", will_remove)
|
||||
_display("Would not remove (might be manually added):", will_skip)
|
||||
_display("Would not remove (outside of prefix):", self._refuse)
|
||||
if verbose:
|
||||
_display('Will actually move:', compress_for_rename(self.paths))
|
||||
_display("Will actually move:", compress_for_rename(self._paths))
|
||||
|
||||
return ask('Proceed (Y/n)? ', ('y', 'n', '')) != 'n'
|
||||
return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
|
||||
|
||||
def rollback(self) -> None:
|
||||
"""Rollback the changes previously made by remove()."""
|
||||
if not self._moved_paths.can_rollback:
|
||||
logger.error(
|
||||
"Can't roll back %s; was not uninstalled",
|
||||
self.dist.project_name,
|
||||
self._dist.raw_name,
|
||||
)
|
||||
return
|
||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||
logger.info("Rolling back uninstall of %s", self._dist.raw_name)
|
||||
self._moved_paths.rollback()
|
||||
for pth in self.pth.values():
|
||||
for pth in self._pth.values():
|
||||
pth.rollback()
|
||||
|
||||
def commit(self) -> None:
|
||||
@@ -436,132 +432,161 @@ class UninstallPathSet:
|
||||
self._moved_paths.commit()
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist: Distribution) -> "UninstallPathSet":
|
||||
dist_path = normalize_path(dist.location)
|
||||
if not dist_is_local(dist):
|
||||
def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
|
||||
dist_location = dist.location
|
||||
info_location = dist.info_location
|
||||
if dist_location is None:
|
||||
logger.info(
|
||||
"Not uninstalling %s since it is not installed",
|
||||
dist.canonical_name,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
normalized_dist_location = normalize_path(dist_location)
|
||||
if not dist.local:
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, outside environment %s",
|
||||
dist.key,
|
||||
dist_path,
|
||||
dist.canonical_name,
|
||||
normalized_dist_location,
|
||||
sys.prefix,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
||||
sysconfig.get_path("platstdlib")}
|
||||
if p}:
|
||||
if normalized_dist_location in {
|
||||
p
|
||||
for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
|
||||
if p
|
||||
}:
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, as it is in the standard library.",
|
||||
dist.key,
|
||||
dist_path,
|
||||
dist.canonical_name,
|
||||
normalized_dist_location,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
paths_to_remove = cls(dist)
|
||||
develop_egg_link = egg_link_path(dist)
|
||||
develop_egg_link_egg_info = '{}.egg-info'.format(
|
||||
pkg_resources.to_filename(dist.project_name))
|
||||
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
||||
# Special case for distutils installed package
|
||||
distutils_egg_info = getattr(dist._provider, 'path', None)
|
||||
develop_egg_link = egg_link_path_from_location(dist.raw_name)
|
||||
|
||||
# Distribution is installed with metadata in a "flat" .egg-info
|
||||
# directory. This means it is not a modern .dist-info installation, an
|
||||
# egg, or legacy editable.
|
||||
setuptools_flat_installation = (
|
||||
dist.installed_with_setuptools_egg_info
|
||||
and info_location is not None
|
||||
and os.path.exists(info_location)
|
||||
# If dist is editable and the location points to a ``.egg-info``,
|
||||
# we are in fact in the legacy editable case.
|
||||
and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
|
||||
)
|
||||
|
||||
# Uninstall cases order do matter as in the case of 2 installs of the
|
||||
# same package, pip needs to uninstall the currently detected version
|
||||
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
||||
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
||||
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
||||
# are in fact in the develop_egg_link case
|
||||
paths_to_remove.add(dist.egg_info)
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
for installed_file in dist.get_metadata(
|
||||
'installed-files.txt').splitlines():
|
||||
path = os.path.normpath(
|
||||
os.path.join(dist.egg_info, installed_file)
|
||||
)
|
||||
paths_to_remove.add(path)
|
||||
if setuptools_flat_installation:
|
||||
if info_location is not None:
|
||||
paths_to_remove.add(info_location)
|
||||
installed_files = dist.iter_declared_entries()
|
||||
if installed_files is not None:
|
||||
for installed_file in installed_files:
|
||||
paths_to_remove.add(os.path.join(dist_location, installed_file))
|
||||
# FIXME: need a test for this elif block
|
||||
# occurs with --single-version-externally-managed/--record outside
|
||||
# of pip
|
||||
elif dist.has_metadata('top_level.txt'):
|
||||
if dist.has_metadata('namespace_packages.txt'):
|
||||
namespaces = dist.get_metadata('namespace_packages.txt')
|
||||
else:
|
||||
elif dist.is_file("top_level.txt"):
|
||||
try:
|
||||
namespace_packages = dist.read_text("namespace_packages.txt")
|
||||
except FileNotFoundError:
|
||||
namespaces = []
|
||||
else:
|
||||
namespaces = namespace_packages.splitlines(keepends=False)
|
||||
for top_level_pkg in [
|
||||
p for p
|
||||
in dist.get_metadata('top_level.txt').splitlines()
|
||||
if p and p not in namespaces]:
|
||||
path = os.path.join(dist.location, top_level_pkg)
|
||||
p
|
||||
for p in dist.read_text("top_level.txt").splitlines()
|
||||
if p and p not in namespaces
|
||||
]:
|
||||
path = os.path.join(dist_location, top_level_pkg)
|
||||
paths_to_remove.add(path)
|
||||
paths_to_remove.add(path + '.py')
|
||||
paths_to_remove.add(path + '.pyc')
|
||||
paths_to_remove.add(path + '.pyo')
|
||||
paths_to_remove.add(f"{path}.py")
|
||||
paths_to_remove.add(f"{path}.pyc")
|
||||
paths_to_remove.add(f"{path}.pyo")
|
||||
|
||||
elif distutils_egg_info:
|
||||
elif dist.installed_by_distutils:
|
||||
raise UninstallationError(
|
||||
"Cannot uninstall {!r}. It is a distutils installed project "
|
||||
"and thus we cannot accurately determine which files belong "
|
||||
"to it which would lead to only a partial uninstall.".format(
|
||||
dist.project_name,
|
||||
dist.raw_name,
|
||||
)
|
||||
)
|
||||
|
||||
elif dist.location.endswith('.egg'):
|
||||
elif dist.installed_as_egg:
|
||||
# package installed by easy_install
|
||||
# We cannot match on dist.egg_name because it can slightly vary
|
||||
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
||||
paths_to_remove.add(dist.location)
|
||||
easy_install_egg = os.path.split(dist.location)[1]
|
||||
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
||||
paths_to_remove.add(dist_location)
|
||||
easy_install_egg = os.path.split(dist_location)[1]
|
||||
easy_install_pth = os.path.join(
|
||||
os.path.dirname(dist_location),
|
||||
"easy-install.pth",
|
||||
)
|
||||
paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
|
||||
|
||||
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
||||
elif dist.installed_with_dist_info:
|
||||
for path in uninstallation_paths(dist):
|
||||
paths_to_remove.add(path)
|
||||
|
||||
elif develop_egg_link:
|
||||
# develop egg
|
||||
# PEP 660 modern editable is handled in the ``.dist-info`` case
|
||||
# above, so this only covers the setuptools-style editable.
|
||||
with open(develop_egg_link) as fh:
|
||||
link_pointer = os.path.normcase(fh.readline().strip())
|
||||
assert (link_pointer == dist.location), (
|
||||
'Egg-link {} does not match installed location of {} '
|
||||
'(at {})'.format(
|
||||
link_pointer, dist.project_name, dist.location)
|
||||
normalized_link_pointer = paths_to_remove._normalize_path_cached(
|
||||
link_pointer
|
||||
)
|
||||
assert os.path.samefile(
|
||||
normalized_link_pointer, normalized_dist_location
|
||||
), (
|
||||
f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
|
||||
f"installed location of {dist.raw_name} (at {dist_location})"
|
||||
)
|
||||
paths_to_remove.add(develop_egg_link)
|
||||
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
||||
easy_install_pth = os.path.join(
|
||||
os.path.dirname(develop_egg_link), "easy-install.pth"
|
||||
)
|
||||
paths_to_remove.add_pth(easy_install_pth, dist_location)
|
||||
|
||||
else:
|
||||
logger.debug(
|
||||
'Not sure how to uninstall: %s - Check: %s',
|
||||
dist, dist.location,
|
||||
"Not sure how to uninstall: %s - Check: %s",
|
||||
dist,
|
||||
dist_location,
|
||||
)
|
||||
|
||||
if dist.in_usersite:
|
||||
bin_dir = get_bin_user()
|
||||
else:
|
||||
bin_dir = get_bin_prefix()
|
||||
|
||||
# find distutils scripts= scripts
|
||||
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
||||
for script in dist.metadata_listdir('scripts'):
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = get_bin_user()
|
||||
else:
|
||||
bin_dir = get_bin_prefix()
|
||||
try:
|
||||
for script in dist.iter_distutils_script_names():
|
||||
paths_to_remove.add(os.path.join(bin_dir, script))
|
||||
if WINDOWS:
|
||||
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
||||
paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat"))
|
||||
except (FileNotFoundError, NotADirectoryError):
|
||||
pass
|
||||
|
||||
# find console_scripts
|
||||
_scripts_to_remove = []
|
||||
console_scripts = dist.get_entry_map(group='console_scripts')
|
||||
for name in console_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, False))
|
||||
# find gui_scripts
|
||||
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
||||
for name in gui_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, True))
|
||||
# find console_scripts and gui_scripts
|
||||
def iter_scripts_to_remove(
|
||||
dist: BaseDistribution,
|
||||
bin_dir: str,
|
||||
) -> Generator[str, None, None]:
|
||||
for entry_point in dist.iter_entry_points():
|
||||
if entry_point.group == "console_scripts":
|
||||
yield from _script_names(bin_dir, entry_point.name, False)
|
||||
elif entry_point.group == "gui_scripts":
|
||||
yield from _script_names(bin_dir, entry_point.name, True)
|
||||
|
||||
for s in _scripts_to_remove:
|
||||
for s in iter_scripts_to_remove(dist, bin_dir):
|
||||
paths_to_remove.add(s)
|
||||
|
||||
return paths_to_remove
|
||||
@@ -585,45 +610,41 @@ class UninstallPthEntries:
|
||||
# have more than "\\sever\share". Valid examples: "\\server\share\" or
|
||||
# "\\server\share\folder".
|
||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||
entry = entry.replace('\\', '/')
|
||||
entry = entry.replace("\\", "/")
|
||||
self.entries.add(entry)
|
||||
|
||||
def remove(self) -> None:
|
||||
logger.verbose('Removing pth entries from %s:', self.file)
|
||||
logger.verbose("Removing pth entries from %s:", self.file)
|
||||
|
||||
# If the file doesn't exist, log a warning and return
|
||||
if not os.path.isfile(self.file):
|
||||
logger.warning(
|
||||
"Cannot remove entries from nonexistent file %s", self.file
|
||||
)
|
||||
logger.warning("Cannot remove entries from nonexistent file %s", self.file)
|
||||
return
|
||||
with open(self.file, 'rb') as fh:
|
||||
with open(self.file, "rb") as fh:
|
||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||
lines = fh.readlines()
|
||||
self._saved_lines = lines
|
||||
if any(b'\r\n' in line for line in lines):
|
||||
endline = '\r\n'
|
||||
if any(b"\r\n" in line for line in lines):
|
||||
endline = "\r\n"
|
||||
else:
|
||||
endline = '\n'
|
||||
endline = "\n"
|
||||
# handle missing trailing newline
|
||||
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
||||
lines[-1] = lines[-1] + endline.encode("utf-8")
|
||||
for entry in self.entries:
|
||||
try:
|
||||
logger.verbose('Removing entry: %s', entry)
|
||||
logger.verbose("Removing entry: %s", entry)
|
||||
lines.remove((entry + endline).encode("utf-8"))
|
||||
except ValueError:
|
||||
pass
|
||||
with open(self.file, 'wb') as fh:
|
||||
with open(self.file, "wb") as fh:
|
||||
fh.writelines(lines)
|
||||
|
||||
def rollback(self) -> bool:
|
||||
if self._saved_lines is None:
|
||||
logger.error(
|
||||
'Cannot roll back changes to %s, none were made', self.file
|
||||
)
|
||||
logger.error("Cannot roll back changes to %s, none were made", self.file)
|
||||
return False
|
||||
logger.debug('Rolling %s back to previous state', self.file)
|
||||
with open(self.file, 'wb') as fh:
|
||||
logger.debug("Rolling %s back to previous state", self.file)
|
||||
with open(self.file, "wb") as fh:
|
||||
fh.writelines(self._saved_lines)
|
||||
return True
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from typing import Callable, List
|
||||
from typing import Callable, List, Optional
|
||||
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_set import RequirementSet
|
||||
|
||||
InstallRequirementProvider = Callable[[str, InstallRequirement], InstallRequirement]
|
||||
InstallRequirementProvider = Callable[
|
||||
[str, Optional[InstallRequirement]], InstallRequirement
|
||||
]
|
||||
|
||||
|
||||
class BaseResolver:
|
||||
|
||||
@@ -20,7 +20,7 @@ from itertools import chain
|
||||
from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging import specifiers
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._internal.exceptions import (
|
||||
@@ -28,10 +28,14 @@ from pip._internal.exceptions import (
|
||||
DistributionNotFound,
|
||||
HashError,
|
||||
HashErrors,
|
||||
InstallationError,
|
||||
NoneMetadataError,
|
||||
UnsupportedPythonVersion,
|
||||
)
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.operations.prepare import RequirementPreparer
|
||||
from pip._internal.req.req_install import (
|
||||
InstallRequirement,
|
||||
@@ -39,10 +43,12 @@ from pip._internal.req.req_install import (
|
||||
)
|
||||
from pip._internal.req.req_set import RequirementSet
|
||||
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
||||
from pip._internal.utils import compatibility_tags
|
||||
from pip._internal.utils.compatibility_tags import get_supported
|
||||
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import dist_in_usersite, normalize_version_info
|
||||
from pip._internal.utils.packaging import check_requires_python, get_requires_python
|
||||
from pip._internal.utils.misc import normalize_version_info
|
||||
from pip._internal.utils.packaging import check_requires_python
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -50,7 +56,7 @@ DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
|
||||
|
||||
|
||||
def _check_dist_requires_python(
|
||||
dist: Distribution,
|
||||
dist: BaseDistribution,
|
||||
version_info: Tuple[int, int, int],
|
||||
ignore_requires_python: bool = False,
|
||||
) -> None:
|
||||
@@ -66,14 +72,21 @@ def _check_dist_requires_python(
|
||||
:raises UnsupportedPythonVersion: When the given Python version isn't
|
||||
compatible.
|
||||
"""
|
||||
requires_python = get_requires_python(dist)
|
||||
# This idiosyncratically converts the SpecifierSet to str and let
|
||||
# check_requires_python then parse it again into SpecifierSet. But this
|
||||
# is the legacy resolver so I'm just not going to bother refactoring.
|
||||
try:
|
||||
requires_python = str(dist.requires_python)
|
||||
except FileNotFoundError as e:
|
||||
raise NoneMetadataError(dist, str(e))
|
||||
try:
|
||||
is_compatible = check_requires_python(
|
||||
requires_python, version_info=version_info
|
||||
requires_python,
|
||||
version_info=version_info,
|
||||
)
|
||||
except specifiers.InvalidSpecifier as exc:
|
||||
logger.warning(
|
||||
"Package %r has an invalid Requires-Python: %s", dist.project_name, exc
|
||||
"Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
|
||||
)
|
||||
return
|
||||
|
||||
@@ -84,7 +97,7 @@ def _check_dist_requires_python(
|
||||
if ignore_requires_python:
|
||||
logger.debug(
|
||||
"Ignoring failed Requires-Python check for package %r: %s not in %r",
|
||||
dist.project_name,
|
||||
dist.raw_name,
|
||||
version,
|
||||
requires_python,
|
||||
)
|
||||
@@ -92,7 +105,7 @@ def _check_dist_requires_python(
|
||||
|
||||
raise UnsupportedPythonVersion(
|
||||
"Package {!r} requires a different Python: {} not in {!r}".format(
|
||||
dist.project_name, version, requires_python
|
||||
dist.raw_name, version, requires_python
|
||||
)
|
||||
)
|
||||
|
||||
@@ -159,7 +172,7 @@ class Resolver(BaseResolver):
|
||||
for req in root_reqs:
|
||||
if req.constraint:
|
||||
check_invalid_constraint_type(req)
|
||||
requirement_set.add_requirement(req)
|
||||
self._add_requirement_to_set(requirement_set, req)
|
||||
|
||||
# Actually prepare the files, and collect any exceptions. Most hash
|
||||
# exceptions cannot be checked ahead of time, because
|
||||
@@ -179,6 +192,124 @@ class Resolver(BaseResolver):
|
||||
|
||||
return requirement_set
|
||||
|
||||
def _add_requirement_to_set(
|
||||
self,
|
||||
requirement_set: RequirementSet,
|
||||
install_req: InstallRequirement,
|
||||
parent_req_name: Optional[str] = None,
|
||||
extras_requested: Optional[Iterable[str]] = None,
|
||||
) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
|
||||
"""Add install_req as a requirement to install.
|
||||
|
||||
:param parent_req_name: The name of the requirement that needed this
|
||||
added. The name is used because when multiple unnamed requirements
|
||||
resolve to the same name, we could otherwise end up with dependency
|
||||
links that point outside the Requirements set. parent_req must
|
||||
already be added. Note that None implies that this is a user
|
||||
supplied requirement, vs an inferred one.
|
||||
:param extras_requested: an iterable of extras used to evaluate the
|
||||
environment markers.
|
||||
:return: Additional requirements to scan. That is either [] if
|
||||
the requirement is not applicable, or [install_req] if the
|
||||
requirement is applicable and has just been added.
|
||||
"""
|
||||
# If the markers do not match, ignore this requirement.
|
||||
if not install_req.match_markers(extras_requested):
|
||||
logger.info(
|
||||
"Ignoring %s: markers '%s' don't match your environment",
|
||||
install_req.name,
|
||||
install_req.markers,
|
||||
)
|
||||
return [], None
|
||||
|
||||
# If the wheel is not supported, raise an error.
|
||||
# Should check this after filtering out based on environment markers to
|
||||
# allow specifying different wheels based on the environment/OS, in a
|
||||
# single requirements file.
|
||||
if install_req.link and install_req.link.is_wheel:
|
||||
wheel = Wheel(install_req.link.filename)
|
||||
tags = compatibility_tags.get_supported()
|
||||
if requirement_set.check_supported_wheels and not wheel.supported(tags):
|
||||
raise InstallationError(
|
||||
"{} is not a supported wheel on this platform.".format(
|
||||
wheel.filename
|
||||
)
|
||||
)
|
||||
|
||||
# This next bit is really a sanity check.
|
||||
assert (
|
||||
not install_req.user_supplied or parent_req_name is None
|
||||
), "a user supplied req shouldn't have a parent"
|
||||
|
||||
# Unnamed requirements are scanned again and the requirement won't be
|
||||
# added as a dependency until after scanning.
|
||||
if not install_req.name:
|
||||
requirement_set.add_unnamed_requirement(install_req)
|
||||
return [install_req], None
|
||||
|
||||
try:
|
||||
existing_req: Optional[
|
||||
InstallRequirement
|
||||
] = requirement_set.get_requirement(install_req.name)
|
||||
except KeyError:
|
||||
existing_req = None
|
||||
|
||||
has_conflicting_requirement = (
|
||||
parent_req_name is None
|
||||
and existing_req
|
||||
and not existing_req.constraint
|
||||
and existing_req.extras == install_req.extras
|
||||
and existing_req.req
|
||||
and install_req.req
|
||||
and existing_req.req.specifier != install_req.req.specifier
|
||||
)
|
||||
if has_conflicting_requirement:
|
||||
raise InstallationError(
|
||||
"Double requirement given: {} (already in {}, name={!r})".format(
|
||||
install_req, existing_req, install_req.name
|
||||
)
|
||||
)
|
||||
|
||||
# When no existing requirement exists, add the requirement as a
|
||||
# dependency and it will be scanned again after.
|
||||
if not existing_req:
|
||||
requirement_set.add_named_requirement(install_req)
|
||||
# We'd want to rescan this requirement later
|
||||
return [install_req], install_req
|
||||
|
||||
# Assume there's no need to scan, and that we've already
|
||||
# encountered this for scanning.
|
||||
if install_req.constraint or not existing_req.constraint:
|
||||
return [], existing_req
|
||||
|
||||
does_not_satisfy_constraint = install_req.link and not (
|
||||
existing_req.link and install_req.link.path == existing_req.link.path
|
||||
)
|
||||
if does_not_satisfy_constraint:
|
||||
raise InstallationError(
|
||||
"Could not satisfy constraints for '{}': "
|
||||
"installation from path or url cannot be "
|
||||
"constrained to a version".format(install_req.name)
|
||||
)
|
||||
# If we're now installing a constraint, mark the existing
|
||||
# object for real installation.
|
||||
existing_req.constraint = False
|
||||
# If we're now installing a user supplied requirement,
|
||||
# mark the existing object as such.
|
||||
if install_req.user_supplied:
|
||||
existing_req.user_supplied = True
|
||||
existing_req.extras = tuple(
|
||||
sorted(set(existing_req.extras) | set(install_req.extras))
|
||||
)
|
||||
logger.debug(
|
||||
"Setting %s extras to: %s",
|
||||
existing_req,
|
||||
existing_req.extras,
|
||||
)
|
||||
# Return the existing requirement for addition to the parent and
|
||||
# scanning again.
|
||||
return [existing_req], existing_req
|
||||
|
||||
def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
|
||||
if self.upgrade_strategy == "to-satisfy-only":
|
||||
return False
|
||||
@@ -194,7 +325,7 @@ class Resolver(BaseResolver):
|
||||
"""
|
||||
# Don't uninstall the conflict if doing a user install and the
|
||||
# conflict is not a user install.
|
||||
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
|
||||
if not self.use_user_site or req.satisfied_by.in_usersite:
|
||||
req.should_reinstall = True
|
||||
req.satisfied_by = None
|
||||
|
||||
@@ -300,10 +431,18 @@ class Resolver(BaseResolver):
|
||||
if cache_entry is not None:
|
||||
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
||||
if req.link is req.original_link and cache_entry.persistent:
|
||||
req.original_link_is_in_wheel_cache = True
|
||||
req.cached_wheel_source_link = req.link
|
||||
if cache_entry.origin is not None:
|
||||
req.download_info = cache_entry.origin
|
||||
else:
|
||||
# Legacy cache entry that does not have origin.json.
|
||||
# download_info may miss the archive_info.hashes field.
|
||||
req.download_info = direct_url_from_link(
|
||||
req.link, link_is_in_wheel_cache=cache_entry.persistent
|
||||
)
|
||||
req.link = cache_entry.link
|
||||
|
||||
def _get_dist_for(self, req: InstallRequirement) -> Distribution:
|
||||
def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
|
||||
"""Takes a InstallRequirement and returns a single AbstractDist \
|
||||
representing a prepared variant of the same.
|
||||
"""
|
||||
@@ -378,13 +517,14 @@ class Resolver(BaseResolver):
|
||||
|
||||
more_reqs: List[InstallRequirement] = []
|
||||
|
||||
def add_req(subreq: Distribution, extras_requested: Iterable[str]) -> None:
|
||||
sub_install_req = self._make_install_req(
|
||||
str(subreq),
|
||||
req_to_install,
|
||||
)
|
||||
def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
|
||||
# This idiosyncratically converts the Requirement to str and let
|
||||
# make_install_req then parse it again into Requirement. But this is
|
||||
# the legacy resolver so I'm just not going to bother refactoring.
|
||||
sub_install_req = self._make_install_req(str(subreq), req_to_install)
|
||||
parent_req_name = req_to_install.name
|
||||
to_scan_again, add_to_parent = requirement_set.add_requirement(
|
||||
to_scan_again, add_to_parent = self._add_requirement_to_set(
|
||||
requirement_set,
|
||||
sub_install_req,
|
||||
parent_req_name=parent_req_name,
|
||||
extras_requested=extras_requested,
|
||||
@@ -401,7 +541,9 @@ class Resolver(BaseResolver):
|
||||
# 'unnamed' requirements can only come from being directly
|
||||
# provided by the user.
|
||||
assert req_to_install.user_supplied
|
||||
requirement_set.add_requirement(req_to_install, parent_req_name=None)
|
||||
self._add_requirement_to_set(
|
||||
requirement_set, req_to_install, parent_req_name=None
|
||||
)
|
||||
|
||||
if not self.ignore_dependencies:
|
||||
if req_to_install.extras:
|
||||
@@ -410,15 +552,20 @@ class Resolver(BaseResolver):
|
||||
",".join(req_to_install.extras),
|
||||
)
|
||||
missing_requested = sorted(
|
||||
set(req_to_install.extras) - set(dist.extras)
|
||||
set(req_to_install.extras) - set(dist.iter_provided_extras())
|
||||
)
|
||||
for missing in missing_requested:
|
||||
logger.warning("%s does not provide the extra '%s'", dist, missing)
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
dist.raw_name,
|
||||
dist.version,
|
||||
missing,
|
||||
)
|
||||
|
||||
available_requested = sorted(
|
||||
set(dist.extras) & set(req_to_install.extras)
|
||||
set(dist.iter_provided_extras()) & set(req_to_install.extras)
|
||||
)
|
||||
for subreq in dist.requires(available_requested):
|
||||
for subreq in dist.iter_dependencies(available_requested):
|
||||
add_req(subreq, extras_requested=available_requested)
|
||||
|
||||
return more_reqs
|
||||
|
||||
@@ -36,11 +36,8 @@ class Constraint:
|
||||
links = frozenset([ireq.link]) if ireq.link else frozenset()
|
||||
return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
|
||||
|
||||
def __nonzero__(self) -> bool:
|
||||
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return self.__nonzero__()
|
||||
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
|
||||
|
||||
def __and__(self, other: InstallRequirement) -> "Constraint":
|
||||
if not isinstance(other, InstallRequirement):
|
||||
|
||||
@@ -2,13 +2,15 @@ import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
|
||||
|
||||
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.packaging.version import Version
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.exceptions import HashError, MetadataInconsistent
|
||||
from pip._internal.exceptions import (
|
||||
HashError,
|
||||
InstallationSubprocessError,
|
||||
MetadataInconsistent,
|
||||
)
|
||||
from pip._internal.metadata import BaseDistribution
|
||||
from pip._internal.models.link import Link, links_equivalent
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.req.constructors import (
|
||||
@@ -16,8 +18,8 @@ from pip._internal.req.constructors import (
|
||||
install_req_from_line,
|
||||
)
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.misc import dist_is_editable, normalize_version_info
|
||||
from pip._internal.utils.packaging import get_requires_python
|
||||
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
||||
from pip._internal.utils.misc import normalize_version_info
|
||||
|
||||
from .base import Candidate, CandidateVersion, Requirement, format_name
|
||||
|
||||
@@ -63,14 +65,13 @@ def make_install_req_from_link(
|
||||
use_pep517=template.use_pep517,
|
||||
isolated=template.isolated,
|
||||
constraint=template.constraint,
|
||||
options=dict(
|
||||
install_options=template.install_options,
|
||||
global_options=template.global_options,
|
||||
hashes=template.hash_options,
|
||||
),
|
||||
global_options=template.global_options,
|
||||
hash_options=template.hash_options,
|
||||
config_settings=template.config_settings,
|
||||
)
|
||||
ireq.original_link = template.original_link
|
||||
ireq.link = link
|
||||
ireq.extras = template.extras
|
||||
return ireq
|
||||
|
||||
|
||||
@@ -78,31 +79,31 @@ def make_install_req_from_editable(
|
||||
link: Link, template: InstallRequirement
|
||||
) -> InstallRequirement:
|
||||
assert template.editable, "template not editable"
|
||||
return install_req_from_editable(
|
||||
ireq = install_req_from_editable(
|
||||
link.url,
|
||||
user_supplied=template.user_supplied,
|
||||
comes_from=template.comes_from,
|
||||
use_pep517=template.use_pep517,
|
||||
isolated=template.isolated,
|
||||
constraint=template.constraint,
|
||||
options=dict(
|
||||
install_options=template.install_options,
|
||||
global_options=template.global_options,
|
||||
hashes=template.hash_options,
|
||||
),
|
||||
permit_editable_wheels=template.permit_editable_wheels,
|
||||
global_options=template.global_options,
|
||||
hash_options=template.hash_options,
|
||||
config_settings=template.config_settings,
|
||||
)
|
||||
ireq.extras = template.extras
|
||||
return ireq
|
||||
|
||||
|
||||
def make_install_req_from_dist(
|
||||
dist: Distribution, template: InstallRequirement
|
||||
def _make_install_req_from_dist(
|
||||
dist: BaseDistribution, template: InstallRequirement
|
||||
) -> InstallRequirement:
|
||||
project_name = canonicalize_name(dist.project_name)
|
||||
if template.req:
|
||||
line = str(template.req)
|
||||
elif template.link:
|
||||
line = f"{project_name} @ {template.link.url}"
|
||||
line = f"{dist.canonical_name} @ {template.link.url}"
|
||||
else:
|
||||
line = f"{project_name}=={dist.parsed_version}"
|
||||
line = f"{dist.canonical_name}=={dist.version}"
|
||||
ireq = install_req_from_line(
|
||||
line,
|
||||
user_supplied=template.user_supplied,
|
||||
@@ -110,11 +111,9 @@ def make_install_req_from_dist(
|
||||
use_pep517=template.use_pep517,
|
||||
isolated=template.isolated,
|
||||
constraint=template.constraint,
|
||||
options=dict(
|
||||
install_options=template.install_options,
|
||||
global_options=template.global_options,
|
||||
hashes=template.hash_options,
|
||||
),
|
||||
global_options=template.global_options,
|
||||
hash_options=template.hash_options,
|
||||
config_settings=template.config_settings,
|
||||
)
|
||||
ireq.satisfied_by = dist
|
||||
return ireq
|
||||
@@ -136,6 +135,7 @@ class _InstallRequirementBackedCandidate(Candidate):
|
||||
found remote link (e.g. from pypi.org).
|
||||
"""
|
||||
|
||||
dist: BaseDistribution
|
||||
is_installed = False
|
||||
|
||||
def __init__(
|
||||
@@ -180,7 +180,7 @@ class _InstallRequirementBackedCandidate(Candidate):
|
||||
def project_name(self) -> NormalizedName:
|
||||
"""The normalised name of the project the candidate refers to"""
|
||||
if self._name is None:
|
||||
self._name = canonicalize_name(self.dist.project_name)
|
||||
self._name = self.dist.canonical_name
|
||||
return self._name
|
||||
|
||||
@property
|
||||
@@ -190,7 +190,7 @@ class _InstallRequirementBackedCandidate(Candidate):
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
if self._version is None:
|
||||
self._version = parse_version(self.dist.version)
|
||||
self._version = self.dist.version
|
||||
return self._version
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
@@ -200,29 +200,27 @@ class _InstallRequirementBackedCandidate(Candidate):
|
||||
self._link.file_path if self._link.is_file else self._link,
|
||||
)
|
||||
|
||||
def _prepare_distribution(self) -> Distribution:
|
||||
def _prepare_distribution(self) -> BaseDistribution:
|
||||
raise NotImplementedError("Override in subclass")
|
||||
|
||||
def _check_metadata_consistency(self, dist: Distribution) -> None:
|
||||
def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
|
||||
"""Check for consistency of project name and version of dist."""
|
||||
canonical_name = canonicalize_name(dist.project_name)
|
||||
if self._name is not None and self._name != canonical_name:
|
||||
if self._name is not None and self._name != dist.canonical_name:
|
||||
raise MetadataInconsistent(
|
||||
self._ireq,
|
||||
"name",
|
||||
self._name,
|
||||
dist.project_name,
|
||||
dist.canonical_name,
|
||||
)
|
||||
parsed_version = parse_version(dist.version)
|
||||
if self._version is not None and self._version != parsed_version:
|
||||
if self._version is not None and self._version != dist.version:
|
||||
raise MetadataInconsistent(
|
||||
self._ireq,
|
||||
"version",
|
||||
str(self._version),
|
||||
dist.version,
|
||||
str(dist.version),
|
||||
)
|
||||
|
||||
def _prepare(self) -> Distribution:
|
||||
def _prepare(self) -> BaseDistribution:
|
||||
try:
|
||||
dist = self._prepare_distribution()
|
||||
except HashError as e:
|
||||
@@ -231,26 +229,19 @@ class _InstallRequirementBackedCandidate(Candidate):
|
||||
# offending line to the user.
|
||||
e.req = self._ireq
|
||||
raise
|
||||
except InstallationSubprocessError as exc:
|
||||
# The output has been presented already, so don't duplicate it.
|
||||
exc.context = "See above for output."
|
||||
raise
|
||||
|
||||
self._check_metadata_consistency(dist)
|
||||
return dist
|
||||
|
||||
def _get_requires_python_dependency(self) -> Optional[Requirement]:
|
||||
requires_python = get_requires_python(self.dist)
|
||||
if requires_python is None:
|
||||
return None
|
||||
try:
|
||||
spec = SpecifierSet(requires_python)
|
||||
except InvalidSpecifier as e:
|
||||
message = "Package %r has an invalid Requires-Python: %s"
|
||||
logger.warning(message, self.name, e)
|
||||
return None
|
||||
return self._factory.make_requires_python_requirement(spec)
|
||||
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
requires = self.dist.requires() if with_requires else ()
|
||||
requires = self.dist.iter_dependencies() if with_requires else ()
|
||||
for r in requires:
|
||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||
yield self._get_requires_python_dependency()
|
||||
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
return self._ireq
|
||||
@@ -268,7 +259,7 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
|
||||
version: Optional[CandidateVersion] = None,
|
||||
) -> None:
|
||||
source_link = link
|
||||
cache_entry = factory.get_wheel_cache_entry(link, name)
|
||||
cache_entry = factory.get_wheel_cache_entry(source_link, name)
|
||||
if cache_entry is not None:
|
||||
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
||||
link = cache_entry.link
|
||||
@@ -285,12 +276,19 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
|
||||
version, wheel_version, name
|
||||
)
|
||||
|
||||
if (
|
||||
cache_entry is not None
|
||||
and cache_entry.persistent
|
||||
and template.link is template.original_link
|
||||
):
|
||||
ireq.original_link_is_in_wheel_cache = True
|
||||
if cache_entry is not None:
|
||||
assert ireq.link.is_wheel
|
||||
assert ireq.link.is_file
|
||||
if cache_entry.persistent and template.link is template.original_link:
|
||||
ireq.cached_wheel_source_link = source_link
|
||||
if cache_entry.origin is not None:
|
||||
ireq.download_info = cache_entry.origin
|
||||
else:
|
||||
# Legacy cache entry that does not have origin.json.
|
||||
# download_info may miss the archive_info.hashes field.
|
||||
ireq.download_info = direct_url_from_link(
|
||||
source_link, link_is_in_wheel_cache=cache_entry.persistent
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
link=link,
|
||||
@@ -301,10 +299,9 @@ class LinkCandidate(_InstallRequirementBackedCandidate):
|
||||
version=version,
|
||||
)
|
||||
|
||||
def _prepare_distribution(self) -> Distribution:
|
||||
return self._factory.preparer.prepare_linked_requirement(
|
||||
self._ireq, parallel_builds=True
|
||||
)
|
||||
def _prepare_distribution(self) -> BaseDistribution:
|
||||
preparer = self._factory.preparer
|
||||
return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
|
||||
|
||||
|
||||
class EditableCandidate(_InstallRequirementBackedCandidate):
|
||||
@@ -327,7 +324,7 @@ class EditableCandidate(_InstallRequirementBackedCandidate):
|
||||
version=version,
|
||||
)
|
||||
|
||||
def _prepare_distribution(self) -> Distribution:
|
||||
def _prepare_distribution(self) -> BaseDistribution:
|
||||
return self._factory.preparer.prepare_editable_requirement(self._ireq)
|
||||
|
||||
|
||||
@@ -337,17 +334,17 @@ class AlreadyInstalledCandidate(Candidate):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
dist: Distribution,
|
||||
dist: BaseDistribution,
|
||||
template: InstallRequirement,
|
||||
factory: "Factory",
|
||||
) -> None:
|
||||
self.dist = dist
|
||||
self._ireq = make_install_req_from_dist(dist, template)
|
||||
self._ireq = _make_install_req_from_dist(dist, template)
|
||||
self._factory = factory
|
||||
|
||||
# This is just logging some messages, so we can do it eagerly.
|
||||
# The returned dist would be exactly the same as self.dist because we
|
||||
# set satisfied_by in make_install_req_from_dist.
|
||||
# set satisfied_by in _make_install_req_from_dist.
|
||||
# TODO: Supply reason based on force_reinstall and upgrade_strategy.
|
||||
skip_reason = "already satisfied"
|
||||
factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
|
||||
@@ -371,7 +368,7 @@ class AlreadyInstalledCandidate(Candidate):
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
return canonicalize_name(self.dist.project_name)
|
||||
return self.dist.canonical_name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -379,11 +376,11 @@ class AlreadyInstalledCandidate(Candidate):
|
||||
|
||||
@property
|
||||
def version(self) -> CandidateVersion:
|
||||
return parse_version(self.dist.version)
|
||||
return self.dist.version
|
||||
|
||||
@property
|
||||
def is_editable(self) -> bool:
|
||||
return dist_is_editable(self.dist)
|
||||
return self.dist.editable
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
return f"{self.name} {self.version} (Installed)"
|
||||
@@ -391,7 +388,7 @@ class AlreadyInstalledCandidate(Candidate):
|
||||
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
||||
if not with_requires:
|
||||
return
|
||||
for r in self.dist.requires():
|
||||
for r in self.dist.iter_dependencies():
|
||||
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
||||
|
||||
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
||||
@@ -491,8 +488,8 @@ class ExtrasCandidate(Candidate):
|
||||
|
||||
# The user may have specified extras that the candidate doesn't
|
||||
# support. We ignore any unsupported extras here.
|
||||
valid_extras = self.extras.intersection(self.base.dist.extras)
|
||||
invalid_extras = self.extras.difference(self.base.dist.extras)
|
||||
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
|
||||
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
|
||||
for extra in sorted(invalid_extras):
|
||||
logger.warning(
|
||||
"%s %s does not provide the extra '%s'",
|
||||
@@ -501,7 +498,7 @@ class ExtrasCandidate(Candidate):
|
||||
extra,
|
||||
)
|
||||
|
||||
for r in self.base.dist.requires(valid_extras):
|
||||
for r in self.base.dist.iter_dependencies(valid_extras):
|
||||
requirement = factory.make_requirement_from_spec(
|
||||
str(r), self.base._ireq, valid_extras
|
||||
)
|
||||
|
||||
@@ -19,7 +19,6 @@ from typing import (
|
||||
)
|
||||
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement
|
||||
from pip._vendor.packaging.requirements import Requirement as PackagingRequirement
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
||||
from pip._vendor.resolvelib import ResolutionImpossible
|
||||
@@ -28,7 +27,6 @@ from pip._internal.cache import CacheEntry, WheelCache
|
||||
from pip._internal.exceptions import (
|
||||
DistributionNotFound,
|
||||
InstallationError,
|
||||
InstallationSubprocessError,
|
||||
MetadataInconsistent,
|
||||
UnsupportedPythonVersion,
|
||||
UnsupportedWheel,
|
||||
@@ -46,6 +44,7 @@ from pip._internal.req.req_install import (
|
||||
from pip._internal.resolution.base import InstallRequirementProvider
|
||||
from pip._internal.utils.compatibility_tags import get_supported
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.packaging import get_requirement
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
|
||||
from .base import Candidate, CandidateVersion, Constraint, Requirement
|
||||
@@ -158,10 +157,7 @@ class Factory:
|
||||
try:
|
||||
base = self._installed_candidate_cache[dist.canonical_name]
|
||||
except KeyError:
|
||||
from pip._internal.metadata.pkg_resources import Distribution as _Dist
|
||||
|
||||
compat_dist = cast(_Dist, dist)._dist
|
||||
base = AlreadyInstalledCandidate(compat_dist, template, factory=self)
|
||||
base = AlreadyInstalledCandidate(dist, template, factory=self)
|
||||
self._installed_candidate_cache[dist.canonical_name] = base
|
||||
if not extras:
|
||||
return base
|
||||
@@ -193,10 +189,16 @@ class Factory:
|
||||
name=name,
|
||||
version=version,
|
||||
)
|
||||
except (InstallationSubprocessError, MetadataInconsistent) as e:
|
||||
logger.warning("Discarding %s. %s", link, e)
|
||||
except MetadataInconsistent as e:
|
||||
logger.info(
|
||||
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
|
||||
link,
|
||||
e,
|
||||
extra={"markup": True},
|
||||
)
|
||||
self._build_failures[link] = e
|
||||
return None
|
||||
|
||||
base: BaseCandidate = self._editable_candidate_cache[link]
|
||||
else:
|
||||
if link not in self._link_candidate_cache:
|
||||
@@ -208,8 +210,13 @@ class Factory:
|
||||
name=name,
|
||||
version=version,
|
||||
)
|
||||
except (InstallationSubprocessError, MetadataInconsistent) as e:
|
||||
logger.warning("Discarding %s. %s", link, e)
|
||||
except MetadataInconsistent as e:
|
||||
logger.info(
|
||||
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
|
||||
link,
|
||||
e,
|
||||
extra={"markup": True},
|
||||
)
|
||||
self._build_failures[link] = e
|
||||
return None
|
||||
base = self._link_candidate_cache[link]
|
||||
@@ -263,7 +270,7 @@ class Factory:
|
||||
extras=extras,
|
||||
template=template,
|
||||
)
|
||||
# The candidate is a known incompatiblity. Don't use it.
|
||||
# The candidate is a known incompatibility. Don't use it.
|
||||
if id(candidate) in incompatible_ids:
|
||||
return None
|
||||
return candidate
|
||||
@@ -276,14 +283,27 @@ class Factory:
|
||||
)
|
||||
icans = list(result.iter_applicable())
|
||||
|
||||
# PEP 592: Yanked releases must be ignored unless only yanked
|
||||
# releases can satisfy the version range. So if this is false,
|
||||
# all yanked icans need to be skipped.
|
||||
# PEP 592: Yanked releases are ignored unless the specifier
|
||||
# explicitly pins a version (via '==' or '===') that can be
|
||||
# solely satisfied by a yanked release.
|
||||
all_yanked = all(ican.link.is_yanked for ican in icans)
|
||||
|
||||
def is_pinned(specifier: SpecifierSet) -> bool:
|
||||
for sp in specifier:
|
||||
if sp.operator == "===":
|
||||
return True
|
||||
if sp.operator != "==":
|
||||
continue
|
||||
if sp.version.endswith(".*"):
|
||||
continue
|
||||
return True
|
||||
return False
|
||||
|
||||
pinned = is_pinned(specifier)
|
||||
|
||||
# PackageFinder returns earlier versions first, so we reverse.
|
||||
for ican in reversed(icans):
|
||||
if not all_yanked and ican.link.is_yanked:
|
||||
if not (all_yanked and pinned) and ican.link.is_yanked:
|
||||
continue
|
||||
func = functools.partial(
|
||||
self._make_candidate_from_link,
|
||||
@@ -350,7 +370,7 @@ class Factory:
|
||||
def find_candidates(
|
||||
self,
|
||||
identifier: str,
|
||||
requirements: Mapping[str, Iterator[Requirement]],
|
||||
requirements: Mapping[str, Iterable[Requirement]],
|
||||
incompatibilities: Mapping[str, Iterator[Candidate]],
|
||||
constraint: Constraint,
|
||||
prefers_installed: bool,
|
||||
@@ -368,7 +388,7 @@ class Factory:
|
||||
# If the current identifier contains extras, add explicit candidates
|
||||
# from entries from extra-less identifier.
|
||||
with contextlib.suppress(InvalidRequirement):
|
||||
parsed_requirement = PackagingRequirement(identifier)
|
||||
parsed_requirement = get_requirement(identifier)
|
||||
explicit_candidates.update(
|
||||
self._iter_explicit_candidates_from_base(
|
||||
requirements.get(parsed_requirement.name, ()),
|
||||
@@ -377,7 +397,7 @@ class Factory:
|
||||
)
|
||||
|
||||
# Add explicit candidates from constraints. We only do this if there are
|
||||
# kown ireqs, which represent requirements not already explicit. If
|
||||
# known ireqs, which represent requirements not already explicit. If
|
||||
# there are no ireqs, we're constraining already-explicit requirements,
|
||||
# which is handled later when we return the explicit candidates.
|
||||
if ireqs:
|
||||
@@ -487,16 +507,20 @@ class Factory:
|
||||
def make_requirement_from_spec(
|
||||
self,
|
||||
specifier: str,
|
||||
comes_from: InstallRequirement,
|
||||
comes_from: Optional[InstallRequirement],
|
||||
requested_extras: Iterable[str] = (),
|
||||
) -> Optional[Requirement]:
|
||||
ireq = self._make_install_req_from_spec(specifier, comes_from)
|
||||
return self._make_requirement_from_install_req(ireq, requested_extras)
|
||||
|
||||
def make_requires_python_requirement(
|
||||
self, specifier: Optional[SpecifierSet]
|
||||
self,
|
||||
specifier: SpecifierSet,
|
||||
) -> Optional[Requirement]:
|
||||
if self._ignore_requires_python or specifier is None:
|
||||
if self._ignore_requires_python:
|
||||
return None
|
||||
# Don't bother creating a dependency for an empty Requires-Python.
|
||||
if not str(specifier):
|
||||
return None
|
||||
return RequiresPythonRequirement(specifier, self._python_candidate)
|
||||
|
||||
@@ -511,7 +535,7 @@ class Factory:
|
||||
hash mismatches. Furthermore, cached wheels at present have
|
||||
nondeterministic contents due to file modification times.
|
||||
"""
|
||||
if self._wheel_cache is None or self.preparer.require_hashes:
|
||||
if self._wheel_cache is None:
|
||||
return None
|
||||
return self._wheel_cache.get_cache_entry(
|
||||
link=link,
|
||||
@@ -578,8 +602,15 @@ class Factory:
|
||||
req_disp = f"{req} (from {parent.name})"
|
||||
|
||||
cands = self._finder.find_all_candidates(req.project_name)
|
||||
skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
|
||||
versions = [str(v) for v in sorted({c.version for c in cands})]
|
||||
|
||||
if skipped_by_requires_python:
|
||||
logger.critical(
|
||||
"Ignored the following versions that require a different python "
|
||||
"version: %s",
|
||||
"; ".join(skipped_by_requires_python) or "none",
|
||||
)
|
||||
logger.critical(
|
||||
"Could not find a version that satisfies the requirement %s "
|
||||
"(from versions: %s)",
|
||||
@@ -601,7 +632,6 @@ class Factory:
|
||||
e: "ResolutionImpossible[Requirement, Candidate]",
|
||||
constraints: Dict[str, Constraint],
|
||||
) -> InstallationError:
|
||||
|
||||
assert e.causes, "Installation error reported with no cause"
|
||||
|
||||
# If one of the things we can't solve is "we need Python X.Y",
|
||||
@@ -614,7 +644,7 @@ class Factory:
|
||||
]
|
||||
if requires_python_causes:
|
||||
# The comprehension above makes sure all Requirement instances are
|
||||
# RequiresPythonRequirement, so let's cast for convinience.
|
||||
# RequiresPythonRequirement, so let's cast for convenience.
|
||||
return self._report_requires_python_error(
|
||||
cast("Sequence[ConflictCause]", requires_python_causes),
|
||||
)
|
||||
@@ -695,6 +725,6 @@ class Factory:
|
||||
|
||||
return DistributionNotFound(
|
||||
"ResolutionImpossible: for help visit "
|
||||
"https://pip.pypa.io/en/latest/user_guide/"
|
||||
"#fixing-conflicting-dependencies"
|
||||
"https://pip.pypa.io/en/latest/topics/dependency-resolution/"
|
||||
"#dealing-with-dependency-conflicts"
|
||||
)
|
||||
|
||||
@@ -9,15 +9,30 @@ something.
|
||||
"""
|
||||
|
||||
import functools
|
||||
from typing import Callable, Iterator, Optional, Set, Tuple
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
|
||||
|
||||
from pip._vendor.packaging.version import _BaseVersion
|
||||
from pip._vendor.six.moves import collections_abc # type: ignore
|
||||
|
||||
from .base import Candidate
|
||||
|
||||
IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
|
||||
|
||||
if TYPE_CHECKING:
|
||||
SequenceCandidate = Sequence[Candidate]
|
||||
else:
|
||||
# For compatibility: Python before 3.9 does not support using [] on the
|
||||
# Sequence class.
|
||||
#
|
||||
# >>> from collections.abc import Sequence
|
||||
# >>> Sequence[str]
|
||||
# Traceback (most recent call last):
|
||||
# File "<stdin>", line 1, in <module>
|
||||
# TypeError: 'ABCMeta' object is not subscriptable
|
||||
#
|
||||
# TODO: Remove this block after dropping Python 3.8 support.
|
||||
SequenceCandidate = Sequence
|
||||
|
||||
|
||||
def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
|
||||
"""Iterator for ``FoundCandidates``.
|
||||
@@ -90,7 +105,7 @@ def _iter_built_with_inserted(
|
||||
yield installed
|
||||
|
||||
|
||||
class FoundCandidates(collections_abc.Sequence):
|
||||
class FoundCandidates(SequenceCandidate):
|
||||
"""A lazy sequence to provide candidates to the resolver.
|
||||
|
||||
The intended usage is to return this from `find_matches()` so the resolver
|
||||
@@ -111,7 +126,7 @@ class FoundCandidates(collections_abc.Sequence):
|
||||
self._prefers_installed = prefers_installed
|
||||
self._incompatible_ids = incompatible_ids
|
||||
|
||||
def __getitem__(self, index: int) -> Candidate:
|
||||
def __getitem__(self, index: Any) -> Any:
|
||||
# Implemented to satisfy the ABC check. This is not needed by the
|
||||
# resolver, and should not be used by the provider either (for
|
||||
# performance reasons).
|
||||
@@ -138,5 +153,3 @@ class FoundCandidates(collections_abc.Sequence):
|
||||
if self._prefers_installed and self._installed:
|
||||
return True
|
||||
return any(self)
|
||||
|
||||
__nonzero__ = __bool__ # XXX: Python 2.
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
import collections
|
||||
import math
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, Iterator, Mapping, Sequence, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from pip._vendor.resolvelib.providers import AbstractProvider
|
||||
|
||||
@@ -37,6 +46,35 @@ else:
|
||||
# services to those objects (access to pip's finder and preparer).
|
||||
|
||||
|
||||
D = TypeVar("D")
|
||||
V = TypeVar("V")
|
||||
|
||||
|
||||
def _get_with_identifier(
|
||||
mapping: Mapping[str, V],
|
||||
identifier: str,
|
||||
default: D,
|
||||
) -> Union[D, V]:
|
||||
"""Get item from a package name lookup mapping with a resolver identifier.
|
||||
|
||||
This extra logic is needed when the target mapping is keyed by package
|
||||
name, which cannot be directly looked up with an identifier (which may
|
||||
contain requested extras). Additional logic is added to also look up a value
|
||||
by "cleaning up" the extras from the identifier.
|
||||
"""
|
||||
if identifier in mapping:
|
||||
return mapping[identifier]
|
||||
# HACK: Theoretically we should check whether this identifier is a valid
|
||||
# "NAME[EXTRAS]" format, and parse out the name part with packaging or
|
||||
# some regular expression. But since pip's resolver only spits out three
|
||||
# kinds of identifiers: normalized PEP 503 names, normalized names plus
|
||||
# extras, and Requires-Python, we can cheat a bit here.
|
||||
name, open_bracket, _ = identifier.partition("[")
|
||||
if open_bracket and name in mapping:
|
||||
return mapping[name]
|
||||
return default
|
||||
|
||||
|
||||
class PipProvider(_ProviderBase):
|
||||
"""Pip's provider implementation for resolvelib.
|
||||
|
||||
@@ -71,28 +109,44 @@ class PipProvider(_ProviderBase):
|
||||
identifier: str,
|
||||
resolutions: Mapping[str, Candidate],
|
||||
candidates: Mapping[str, Iterator[Candidate]],
|
||||
information: Mapping[str, Iterator["PreferenceInformation"]],
|
||||
information: Mapping[str, Iterable["PreferenceInformation"]],
|
||||
backtrack_causes: Sequence["PreferenceInformation"],
|
||||
) -> "Preference":
|
||||
"""Produce a sort key for given requirement based on preference.
|
||||
|
||||
The lower the return value is, the more preferred this group of
|
||||
arguments is.
|
||||
|
||||
Currently pip considers the followings in order:
|
||||
Currently pip considers the following in order:
|
||||
|
||||
* Prefer if any of the known requirements is "direct", e.g. points to an
|
||||
explicit URL.
|
||||
* If equal, prefer if any requirement is "pinned", i.e. contains
|
||||
operator ``===`` or ``==``.
|
||||
* If equal, calculate an approximate "depth" and resolve requirements
|
||||
closer to the user-specified requirements first.
|
||||
closer to the user-specified requirements first. If the depth cannot
|
||||
by determined (eg: due to no matching parents), it is considered
|
||||
infinite.
|
||||
* Order user-specified requirements by the order they are specified.
|
||||
* If equal, prefers "non-free" requirements, i.e. contains at least one
|
||||
operator, such as ``>=`` or ``<``.
|
||||
* If equal, order alphabetically for consistency (helps debuggability).
|
||||
"""
|
||||
lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
|
||||
candidate, ireqs = zip(*lookups)
|
||||
try:
|
||||
next(iter(information[identifier]))
|
||||
except StopIteration:
|
||||
# There is no information for this identifier, so there's no known
|
||||
# candidates.
|
||||
has_information = False
|
||||
else:
|
||||
has_information = True
|
||||
|
||||
if has_information:
|
||||
lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
|
||||
candidate, ireqs = zip(*lookups)
|
||||
else:
|
||||
candidate, ireqs = None, ()
|
||||
|
||||
operators = [
|
||||
specifier.operator
|
||||
for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
|
||||
@@ -107,14 +161,17 @@ class PipProvider(_ProviderBase):
|
||||
requested_order: Union[int, float] = self._user_requested[identifier]
|
||||
except KeyError:
|
||||
requested_order = math.inf
|
||||
parent_depths = (
|
||||
self._known_depths[parent.name] if parent is not None else 0.0
|
||||
for _, parent in information[identifier]
|
||||
)
|
||||
inferred_depth = min(d for d in parent_depths) + 1.0
|
||||
self._known_depths[identifier] = inferred_depth
|
||||
if has_information:
|
||||
parent_depths = (
|
||||
self._known_depths[parent.name] if parent is not None else 0.0
|
||||
for _, parent in information[identifier]
|
||||
)
|
||||
inferred_depth = min(d for d in parent_depths) + 1.0
|
||||
else:
|
||||
inferred_depth = math.inf
|
||||
else:
|
||||
inferred_depth = 1.0
|
||||
self._known_depths[identifier] = inferred_depth
|
||||
|
||||
requested_order = self._user_requested.get(identifier, math.inf)
|
||||
|
||||
@@ -122,49 +179,29 @@ class PipProvider(_ProviderBase):
|
||||
# free, so we always do it first to avoid needless work if it fails.
|
||||
requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
|
||||
|
||||
# HACK: Setuptools have a very long and solid backward compatibility
|
||||
# track record, and extremely few projects would request a narrow,
|
||||
# non-recent version range of it since that would break a lot things.
|
||||
# (Most projects specify it only to request for an installer feature,
|
||||
# which does not work, but that's another topic.) Intentionally
|
||||
# delaying Setuptools helps reduce branches the resolver has to check.
|
||||
# This serves as a temporary fix for issues like "apache-airlfow[all]"
|
||||
# while we work on "proper" branch pruning techniques.
|
||||
delay_this = identifier == "setuptools"
|
||||
# Prefer the causes of backtracking on the assumption that the problem
|
||||
# resolving the dependency tree is related to the failures that caused
|
||||
# the backtracking
|
||||
backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
|
||||
|
||||
return (
|
||||
not requires_python,
|
||||
delay_this,
|
||||
not direct,
|
||||
not pinned,
|
||||
not backtrack_cause,
|
||||
inferred_depth,
|
||||
requested_order,
|
||||
not unfree,
|
||||
identifier,
|
||||
)
|
||||
|
||||
def _get_constraint(self, identifier: str) -> Constraint:
|
||||
if identifier in self._constraints:
|
||||
return self._constraints[identifier]
|
||||
|
||||
# HACK: Theoratically we should check whether this identifier is a valid
|
||||
# "NAME[EXTRAS]" format, and parse out the name part with packaging or
|
||||
# some regular expression. But since pip's resolver only spits out
|
||||
# three kinds of identifiers: normalized PEP 503 names, normalized names
|
||||
# plus extras, and Requires-Python, we can cheat a bit here.
|
||||
name, open_bracket, _ = identifier.partition("[")
|
||||
if open_bracket and name in self._constraints:
|
||||
return self._constraints[name]
|
||||
|
||||
return Constraint.empty()
|
||||
|
||||
def find_matches(
|
||||
self,
|
||||
identifier: str,
|
||||
requirements: Mapping[str, Iterator[Requirement]],
|
||||
incompatibilities: Mapping[str, Iterator[Candidate]],
|
||||
) -> Iterable[Candidate]:
|
||||
def _eligible_for_upgrade(name: str) -> bool:
|
||||
def _eligible_for_upgrade(identifier: str) -> bool:
|
||||
"""Are upgrades allowed for this project?
|
||||
|
||||
This checks the upgrade strategy, and whether the project was one
|
||||
@@ -178,13 +215,23 @@ class PipProvider(_ProviderBase):
|
||||
if self._upgrade_strategy == "eager":
|
||||
return True
|
||||
elif self._upgrade_strategy == "only-if-needed":
|
||||
return name in self._user_requested
|
||||
user_order = _get_with_identifier(
|
||||
self._user_requested,
|
||||
identifier,
|
||||
default=None,
|
||||
)
|
||||
return user_order is not None
|
||||
return False
|
||||
|
||||
constraint = _get_with_identifier(
|
||||
self._constraints,
|
||||
identifier,
|
||||
default=Constraint.empty(),
|
||||
)
|
||||
return self._factory.find_candidates(
|
||||
identifier=identifier,
|
||||
requirements=requirements,
|
||||
constraint=self._get_constraint(identifier),
|
||||
constraint=constraint,
|
||||
prefers_installed=(not _eligible_for_upgrade(identifier)),
|
||||
incompatibilities=incompatibilities,
|
||||
)
|
||||
@@ -195,3 +242,14 @@ class PipProvider(_ProviderBase):
|
||||
def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
|
||||
with_requires = not self._ignore_dependencies
|
||||
return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
|
||||
|
||||
@staticmethod
|
||||
def is_backtrack_cause(
|
||||
identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
|
||||
) -> bool:
|
||||
for backtrack_cause in backtrack_causes:
|
||||
if identifier == backtrack_cause.requirement.name:
|
||||
return True
|
||||
if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -11,9 +11,9 @@ logger = getLogger(__name__)
|
||||
|
||||
class PipReporter(BaseReporter):
|
||||
def __init__(self) -> None:
|
||||
self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int)
|
||||
self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
|
||||
|
||||
self._messages_at_backtrack = {
|
||||
self._messages_at_reject_count = {
|
||||
1: (
|
||||
"pip is looking at multiple versions of {package_name} to "
|
||||
"determine which version is compatible with other "
|
||||
@@ -27,22 +27,33 @@ class PipReporter(BaseReporter):
|
||||
13: (
|
||||
"This is taking longer than usual. You might need to provide "
|
||||
"the dependency resolver with stricter constraints to reduce "
|
||||
"runtime. If you want to abort this run, you can press "
|
||||
"Ctrl + C to do so. To improve how pip performs, tell us what "
|
||||
"happened here: https://pip.pypa.io/surveys/backtracking"
|
||||
"runtime. See https://pip.pypa.io/warnings/backtracking for "
|
||||
"guidance. If you want to abort this run, press Ctrl + C."
|
||||
),
|
||||
}
|
||||
|
||||
def backtracking(self, candidate: Candidate) -> None:
|
||||
self.backtracks_by_package[candidate.name] += 1
|
||||
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
|
||||
self.reject_count_by_package[candidate.name] += 1
|
||||
|
||||
count = self.backtracks_by_package[candidate.name]
|
||||
if count not in self._messages_at_backtrack:
|
||||
count = self.reject_count_by_package[candidate.name]
|
||||
if count not in self._messages_at_reject_count:
|
||||
return
|
||||
|
||||
message = self._messages_at_backtrack[count]
|
||||
message = self._messages_at_reject_count[count]
|
||||
logger.info("INFO: %s", message.format(package_name=candidate.name))
|
||||
|
||||
msg = "Will try a different candidate, due to conflict:"
|
||||
for req_info in criterion.information:
|
||||
req, parent = req_info.requirement, req_info.parent
|
||||
# Inspired by Factory.get_installation_error
|
||||
msg += "\n "
|
||||
if parent:
|
||||
msg += f"{parent.name} {parent.version} depends on "
|
||||
else:
|
||||
msg += "The user requested "
|
||||
msg += req.format_for_error()
|
||||
logger.debug(msg)
|
||||
|
||||
|
||||
class PipDebuggingReporter(BaseReporter):
|
||||
"""A reporter that does an info log for every event it sees."""
|
||||
@@ -62,8 +73,8 @@ class PipDebuggingReporter(BaseReporter):
|
||||
def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
|
||||
logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
|
||||
|
||||
def backtracking(self, candidate: Candidate) -> None:
|
||||
logger.info("Reporter.backtracking(%r)", candidate)
|
||||
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
|
||||
logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate)
|
||||
|
||||
def pinning(self, candidate: Candidate) -> None:
|
||||
logger.info("Reporter.pinning(%r)", candidate)
|
||||
|
||||
@@ -21,12 +21,12 @@ class ExplicitRequirement(Requirement):
|
||||
|
||||
@property
|
||||
def project_name(self) -> NormalizedName:
|
||||
# No need to canonicalise - the candidate did this
|
||||
# No need to canonicalize - the candidate did this
|
||||
return self.candidate.project_name
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
# No need to canonicalise - the candidate did this
|
||||
# No need to canonicalize - the candidate did this
|
||||
return self.candidate.name
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
@@ -64,7 +64,6 @@ class SpecifierRequirement(Requirement):
|
||||
return format_name(self.project_name, self._extras)
|
||||
|
||||
def format_for_error(self) -> str:
|
||||
|
||||
# Convert comma-separated specifiers into "A, B, ..., F and G"
|
||||
# This makes the specifier a bit more "human readable", without
|
||||
# risking a change in meaning. (Hopefully! Not all edge cases have
|
||||
|
||||
@@ -19,8 +19,6 @@ from pip._internal.resolution.resolvelib.reporter import (
|
||||
PipDebuggingReporter,
|
||||
PipReporter,
|
||||
)
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.filetypes import is_archive_file
|
||||
|
||||
from .base import Candidate, Requirement
|
||||
from .factory import Factory
|
||||
@@ -90,9 +88,9 @@ class Resolver(BaseResolver):
|
||||
)
|
||||
|
||||
try:
|
||||
try_to_avoid_resolution_too_deep = 2000000
|
||||
limit_how_complex_resolution_can_be = 200000
|
||||
result = self._result = resolver.resolve(
|
||||
collected.requirements, max_rounds=try_to_avoid_resolution_too_deep
|
||||
collected.requirements, max_rounds=limit_how_complex_resolution_can_be
|
||||
)
|
||||
|
||||
except ResolutionImpossible as e:
|
||||
@@ -136,25 +134,6 @@ class Resolver(BaseResolver):
|
||||
)
|
||||
continue
|
||||
|
||||
looks_like_sdist = (
|
||||
is_archive_file(candidate.source_link.file_path)
|
||||
and candidate.source_link.ext != ".zip"
|
||||
)
|
||||
if looks_like_sdist:
|
||||
# is a local sdist -- show a deprecation warning!
|
||||
reason = (
|
||||
"Source distribution is being reinstalled despite an "
|
||||
"installed package having the same name and version as "
|
||||
"the installed package."
|
||||
)
|
||||
replacement = "use --force-reinstall"
|
||||
deprecated(
|
||||
reason=reason,
|
||||
replacement=replacement,
|
||||
gone_in="21.3",
|
||||
issue=8711,
|
||||
)
|
||||
|
||||
# is a local sdist or path -- reinstall
|
||||
ireq.should_reinstall = True
|
||||
else:
|
||||
@@ -192,17 +171,19 @@ class Resolver(BaseResolver):
|
||||
get installed one-by-one.
|
||||
|
||||
The current implementation creates a topological ordering of the
|
||||
dependency graph, while breaking any cycles in the graph at arbitrary
|
||||
points. We make no guarantees about where the cycle would be broken,
|
||||
other than they would be broken.
|
||||
dependency graph, giving more weight to packages with less
|
||||
or no dependencies, while breaking any cycles in the graph at
|
||||
arbitrary points. We make no guarantees about where the cycle
|
||||
would be broken, other than it *would* be broken.
|
||||
"""
|
||||
assert self._result is not None, "must call resolve() first"
|
||||
|
||||
if not req_set.requirements:
|
||||
# Nothing is left to install, so we do not need an order.
|
||||
return []
|
||||
|
||||
graph = self._result.graph
|
||||
weights = get_topological_weights(
|
||||
graph,
|
||||
expected_node_count=len(self._result.mapping) + 1,
|
||||
)
|
||||
weights = get_topological_weights(graph, set(req_set.requirements.keys()))
|
||||
|
||||
sorted_items = sorted(
|
||||
req_set.requirements.items(),
|
||||
@@ -213,23 +194,32 @@ class Resolver(BaseResolver):
|
||||
|
||||
|
||||
def get_topological_weights(
|
||||
graph: "DirectedGraph[Optional[str]]", expected_node_count: int
|
||||
graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
|
||||
) -> Dict[Optional[str], int]:
|
||||
"""Assign weights to each node based on how "deep" they are.
|
||||
|
||||
This implementation may change at any point in the future without prior
|
||||
notice.
|
||||
|
||||
We take the length for the longest path to any node from root, ignoring any
|
||||
paths that contain a single node twice (i.e. cycles). This is done through
|
||||
a depth-first search through the graph, while keeping track of the path to
|
||||
the node.
|
||||
We first simplify the dependency graph by pruning any leaves and giving them
|
||||
the highest weight: a package without any dependencies should be installed
|
||||
first. This is done again and again in the same way, giving ever less weight
|
||||
to the newly found leaves. The loop stops when no leaves are left: all
|
||||
remaining packages have at least one dependency left in the graph.
|
||||
|
||||
Then we continue with the remaining graph, by taking the length for the
|
||||
longest path to any node from root, ignoring any paths that contain a single
|
||||
node twice (i.e. cycles). This is done through a depth-first search through
|
||||
the graph, while keeping track of the path to the node.
|
||||
|
||||
Cycles in the graph result would result in node being revisited while also
|
||||
being it's own path. In this case, take no action. This helps ensure we
|
||||
being on its own path. In this case, take no action. This helps ensure we
|
||||
don't get stuck in a cycle.
|
||||
|
||||
When assigning weight, the longer path (i.e. larger length) is preferred.
|
||||
|
||||
We are only interested in the weights of packages that are in the
|
||||
requirement_keys.
|
||||
"""
|
||||
path: Set[Optional[str]] = set()
|
||||
weights: Dict[Optional[str], int] = {}
|
||||
@@ -245,15 +235,49 @@ def get_topological_weights(
|
||||
visit(child)
|
||||
path.remove(node)
|
||||
|
||||
if node not in requirement_keys:
|
||||
return
|
||||
|
||||
last_known_parent_count = weights.get(node, 0)
|
||||
weights[node] = max(last_known_parent_count, len(path))
|
||||
|
||||
# Simplify the graph, pruning leaves that have no dependencies.
|
||||
# This is needed for large graphs (say over 200 packages) because the
|
||||
# `visit` function is exponentially slower then, taking minutes.
|
||||
# See https://github.com/pypa/pip/issues/10557
|
||||
# We will loop until we explicitly break the loop.
|
||||
while True:
|
||||
leaves = set()
|
||||
for key in graph:
|
||||
if key is None:
|
||||
continue
|
||||
for _child in graph.iter_children(key):
|
||||
# This means we have at least one child
|
||||
break
|
||||
else:
|
||||
# No child.
|
||||
leaves.add(key)
|
||||
if not leaves:
|
||||
# We are done simplifying.
|
||||
break
|
||||
# Calculate the weight for the leaves.
|
||||
weight = len(graph) - 1
|
||||
for leaf in leaves:
|
||||
if leaf not in requirement_keys:
|
||||
continue
|
||||
weights[leaf] = weight
|
||||
# Remove the leaves from the graph, making it simpler.
|
||||
for leaf in leaves:
|
||||
graph.remove(leaf)
|
||||
|
||||
# Visit the remaining graph.
|
||||
# `None` is guaranteed to be the root node by resolvelib.
|
||||
visit(None)
|
||||
|
||||
# Sanity checks
|
||||
assert weights[None] == 0
|
||||
assert len(weights) == expected_node_count
|
||||
# Sanity check: all requirement keys should be in the weights,
|
||||
# and no other keys should be in the weights.
|
||||
difference = set(weights.keys()).difference(requirement_keys)
|
||||
assert not difference, difference
|
||||
|
||||
return weights
|
||||
|
||||
|
||||
@@ -1,97 +1,149 @@
|
||||
import datetime
|
||||
import functools
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import optparse
|
||||
import os.path
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.rich.console import Group
|
||||
from pip._vendor.rich.markup import escape
|
||||
from pip._vendor.rich.text import Text
|
||||
|
||||
from pip._internal.index.collector import LinkCollector
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.metadata import get_default_environment
|
||||
from pip._internal.metadata.base import DistributionVersion
|
||||
from pip._internal.models.selection_prefs import SelectionPreferences
|
||||
from pip._internal.network.session import PipSession
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
from pip._internal.utils.entrypoints import (
|
||||
get_best_invocation_for_this_pip,
|
||||
get_best_invocation_for_this_python,
|
||||
)
|
||||
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
|
||||
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_statefile_name(key):
|
||||
# type: (str) -> str
|
||||
def _get_statefile_name(key: str) -> str:
|
||||
key_bytes = key.encode()
|
||||
name = hashlib.sha224(key_bytes).hexdigest()
|
||||
return name
|
||||
|
||||
|
||||
class SelfCheckState:
|
||||
def __init__(self, cache_dir):
|
||||
# type: (str) -> None
|
||||
self.state = {} # type: Dict[str, Any]
|
||||
self.statefile_path = None
|
||||
def __init__(self, cache_dir: str) -> None:
|
||||
self._state: Dict[str, Any] = {}
|
||||
self._statefile_path = None
|
||||
|
||||
# Try to load the existing state
|
||||
if cache_dir:
|
||||
self.statefile_path = os.path.join(
|
||||
self._statefile_path = os.path.join(
|
||||
cache_dir, "selfcheck", _get_statefile_name(self.key)
|
||||
)
|
||||
try:
|
||||
with open(self.statefile_path, encoding="utf-8") as statefile:
|
||||
self.state = json.load(statefile)
|
||||
with open(self._statefile_path, encoding="utf-8") as statefile:
|
||||
self._state = json.load(statefile)
|
||||
except (OSError, ValueError, KeyError):
|
||||
# Explicitly suppressing exceptions, since we don't want to
|
||||
# error out if the cache file is invalid.
|
||||
pass
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
# type: () -> str
|
||||
def key(self) -> str:
|
||||
return sys.prefix
|
||||
|
||||
def save(self, pypi_version, current_time):
|
||||
# type: (str, datetime.datetime) -> None
|
||||
def get(self, current_time: datetime.datetime) -> Optional[str]:
|
||||
"""Check if we have a not-outdated version loaded already."""
|
||||
if not self._state:
|
||||
return None
|
||||
|
||||
if "last_check" not in self._state:
|
||||
return None
|
||||
|
||||
if "pypi_version" not in self._state:
|
||||
return None
|
||||
|
||||
seven_days_in_seconds = 7 * 24 * 60 * 60
|
||||
|
||||
# Determine if we need to refresh the state
|
||||
last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT)
|
||||
seconds_since_last_check = (current_time - last_check).total_seconds()
|
||||
if seconds_since_last_check > seven_days_in_seconds:
|
||||
return None
|
||||
|
||||
return self._state["pypi_version"]
|
||||
|
||||
def set(self, pypi_version: str, current_time: datetime.datetime) -> None:
|
||||
# If we do not have a path to cache in, don't bother saving.
|
||||
if not self.statefile_path:
|
||||
if not self._statefile_path:
|
||||
return
|
||||
|
||||
# Check to make sure that we own the directory
|
||||
if not check_path_owner(os.path.dirname(self.statefile_path)):
|
||||
if not check_path_owner(os.path.dirname(self._statefile_path)):
|
||||
return
|
||||
|
||||
# Now that we've ensured the directory is owned by this user, we'll go
|
||||
# ahead and make sure that all our directories are created.
|
||||
ensure_dir(os.path.dirname(self.statefile_path))
|
||||
ensure_dir(os.path.dirname(self._statefile_path))
|
||||
|
||||
state = {
|
||||
# Include the key so it's easy to tell which pip wrote the
|
||||
# file.
|
||||
"key": self.key,
|
||||
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
|
||||
"last_check": current_time.strftime(_DATE_FMT),
|
||||
"pypi_version": pypi_version,
|
||||
}
|
||||
|
||||
text = json.dumps(state, sort_keys=True, separators=(",", ":"))
|
||||
|
||||
with adjacent_tmp_file(self.statefile_path) as f:
|
||||
with adjacent_tmp_file(self._statefile_path) as f:
|
||||
f.write(text.encode())
|
||||
|
||||
try:
|
||||
# Since we have a prefix-specific state file, we can just
|
||||
# overwrite whatever is there, no need to check.
|
||||
replace(f.name, self.statefile_path)
|
||||
replace(f.name, self._statefile_path)
|
||||
except OSError:
|
||||
# Best effort.
|
||||
pass
|
||||
|
||||
|
||||
def was_installed_by_pip(pkg):
|
||||
# type: (str) -> bool
|
||||
@dataclass
|
||||
class UpgradePrompt:
|
||||
old: str
|
||||
new: str
|
||||
|
||||
def __rich__(self) -> Group:
|
||||
if WINDOWS:
|
||||
pip_cmd = f"{get_best_invocation_for_this_python()} -m pip"
|
||||
else:
|
||||
pip_cmd = get_best_invocation_for_this_pip()
|
||||
|
||||
notice = "[bold][[reset][blue]notice[reset][bold]][reset]"
|
||||
return Group(
|
||||
Text(),
|
||||
Text.from_markup(
|
||||
f"{notice} A new release of pip is available: "
|
||||
f"[red]{self.old}[reset] -> [green]{self.new}[reset]"
|
||||
),
|
||||
Text.from_markup(
|
||||
f"{notice} To update, run: "
|
||||
f"[green]{escape(pip_cmd)} install --upgrade pip"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def was_installed_by_pip(pkg: str) -> bool:
|
||||
"""Checks whether pkg was installed by pip
|
||||
|
||||
This is used not to display the upgrade message when pip is in fact
|
||||
@@ -101,8 +153,69 @@ def was_installed_by_pip(pkg):
|
||||
return dist is not None and "pip" == dist.installer
|
||||
|
||||
|
||||
def pip_self_version_check(session, options):
|
||||
# type: (PipSession, optparse.Values) -> None
|
||||
def _get_current_remote_pip_version(
|
||||
session: PipSession, options: optparse.Values
|
||||
) -> Optional[str]:
|
||||
# Lets use PackageFinder to see what the latest pip version is
|
||||
link_collector = LinkCollector.create(
|
||||
session,
|
||||
options=options,
|
||||
suppress_no_index=True,
|
||||
)
|
||||
|
||||
# Pass allow_yanked=False so we don't suggest upgrading to a
|
||||
# yanked version.
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=False,
|
||||
allow_all_prereleases=False, # Explicitly set to False
|
||||
)
|
||||
|
||||
finder = PackageFinder.create(
|
||||
link_collector=link_collector,
|
||||
selection_prefs=selection_prefs,
|
||||
)
|
||||
best_candidate = finder.find_best_candidate("pip").best_candidate
|
||||
if best_candidate is None:
|
||||
return None
|
||||
|
||||
return str(best_candidate.version)
|
||||
|
||||
|
||||
def _self_version_check_logic(
|
||||
*,
|
||||
state: SelfCheckState,
|
||||
current_time: datetime.datetime,
|
||||
local_version: DistributionVersion,
|
||||
get_remote_version: Callable[[], Optional[str]],
|
||||
) -> Optional[UpgradePrompt]:
|
||||
remote_version_str = state.get(current_time)
|
||||
if remote_version_str is None:
|
||||
remote_version_str = get_remote_version()
|
||||
if remote_version_str is None:
|
||||
logger.debug("No remote pip version found")
|
||||
return None
|
||||
state.set(remote_version_str, current_time)
|
||||
|
||||
remote_version = parse_version(remote_version_str)
|
||||
logger.debug("Remote version of pip: %s", remote_version)
|
||||
logger.debug("Local version of pip: %s", local_version)
|
||||
|
||||
pip_installed_by_pip = was_installed_by_pip("pip")
|
||||
logger.debug("Was pip installed by pip? %s", pip_installed_by_pip)
|
||||
if not pip_installed_by_pip:
|
||||
return None # Only suggest upgrade if pip is installed by pip.
|
||||
|
||||
local_version_is_older = (
|
||||
local_version < remote_version
|
||||
and local_version.base_version != remote_version.base_version
|
||||
)
|
||||
if local_version_is_older:
|
||||
return UpgradePrompt(old=str(local_version), new=remote_version_str)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def pip_self_version_check(session: PipSession, options: optparse.Values) -> None:
|
||||
"""Check for an update for pip.
|
||||
|
||||
Limit the frequency of checks to once per week. State is stored either in
|
||||
@@ -113,75 +226,17 @@ def pip_self_version_check(session, options):
|
||||
if not installed_dist:
|
||||
return
|
||||
|
||||
pip_version = installed_dist.version
|
||||
pypi_version = None
|
||||
|
||||
try:
|
||||
state = SelfCheckState(cache_dir=options.cache_dir)
|
||||
|
||||
current_time = datetime.datetime.utcnow()
|
||||
# Determine if we need to refresh the state
|
||||
if "last_check" in state.state and "pypi_version" in state.state:
|
||||
last_check = datetime.datetime.strptime(
|
||||
state.state["last_check"],
|
||||
SELFCHECK_DATE_FMT
|
||||
)
|
||||
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
|
||||
pypi_version = state.state["pypi_version"]
|
||||
|
||||
# Refresh the version if we need to or just see if we need to warn
|
||||
if pypi_version is None:
|
||||
# Lets use PackageFinder to see what the latest pip version is
|
||||
link_collector = LinkCollector.create(
|
||||
session,
|
||||
options=options,
|
||||
suppress_no_index=True,
|
||||
)
|
||||
|
||||
# Pass allow_yanked=False so we don't suggest upgrading to a
|
||||
# yanked version.
|
||||
selection_prefs = SelectionPreferences(
|
||||
allow_yanked=False,
|
||||
allow_all_prereleases=False, # Explicitly set to False
|
||||
)
|
||||
|
||||
finder = PackageFinder.create(
|
||||
link_collector=link_collector,
|
||||
selection_prefs=selection_prefs,
|
||||
)
|
||||
best_candidate = finder.find_best_candidate("pip").best_candidate
|
||||
if best_candidate is None:
|
||||
return
|
||||
pypi_version = str(best_candidate.version)
|
||||
|
||||
# save that we've performed a check
|
||||
state.save(pypi_version, current_time)
|
||||
|
||||
remote_version = parse_version(pypi_version)
|
||||
|
||||
local_version_is_older = (
|
||||
pip_version < remote_version and
|
||||
pip_version.base_version != remote_version.base_version and
|
||||
was_installed_by_pip('pip')
|
||||
)
|
||||
|
||||
# Determine if our pypi_version is older
|
||||
if not local_version_is_older:
|
||||
return
|
||||
|
||||
# We cannot tell how the current pip is available in the current
|
||||
# command context, so be pragmatic here and suggest the command
|
||||
# that's always available. This does not accommodate spaces in
|
||||
# `sys.executable`.
|
||||
pip_cmd = f"{sys.executable} -m pip"
|
||||
logger.warning(
|
||||
"You are using pip version %s; however, version %s is "
|
||||
"available.\nYou should consider upgrading via the "
|
||||
"'%s install --upgrade pip' command.",
|
||||
pip_version, pypi_version, pip_cmd
|
||||
upgrade_prompt = _self_version_check_logic(
|
||||
state=SelfCheckState(cache_dir=options.cache_dir),
|
||||
current_time=datetime.datetime.utcnow(),
|
||||
local_version=installed_dist.version,
|
||||
get_remote_version=functools.partial(
|
||||
_get_current_remote_pip_version, session, options
|
||||
),
|
||||
)
|
||||
if upgrade_prompt is not None:
|
||||
logger.warning("[present-rich] %s", upgrade_prompt)
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"There was an error checking the latest version of pip",
|
||||
exc_info=True,
|
||||
)
|
||||
logger.warning("There was an error checking the latest version of pip.")
|
||||
logger.debug("See below for error", exc_info=True)
|
||||
|
||||
@@ -7,29 +7,46 @@ and eventually drop this after all usages are changed.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
from pip._vendor import appdirs as _appdirs
|
||||
from pip._vendor import platformdirs as _appdirs
|
||||
|
||||
|
||||
def user_cache_dir(appname: str) -> str:
|
||||
return _appdirs.user_cache_dir(appname, appauthor=False)
|
||||
|
||||
|
||||
def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
|
||||
# Use ~/Application Support/pip, if the directory exists.
|
||||
path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
|
||||
if os.path.isdir(path):
|
||||
return path
|
||||
|
||||
# Use a Linux-like ~/.config/pip, by default.
|
||||
linux_like_path = "~/.config/"
|
||||
if appname:
|
||||
linux_like_path = os.path.join(linux_like_path, appname)
|
||||
|
||||
return os.path.expanduser(linux_like_path)
|
||||
|
||||
|
||||
def user_config_dir(appname: str, roaming: bool = True) -> str:
|
||||
path = _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
|
||||
if _appdirs.system == "darwin" and not os.path.isdir(path):
|
||||
path = os.path.expanduser("~/.config/")
|
||||
if appname:
|
||||
path = os.path.join(path, appname)
|
||||
return path
|
||||
if sys.platform == "darwin":
|
||||
return _macos_user_config_dir(appname, roaming)
|
||||
|
||||
return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
|
||||
|
||||
|
||||
# for the discussion regarding site_config_dir locations
|
||||
# see <https://github.com/pypa/pip/issues/1733>
|
||||
def site_config_dirs(appname: str) -> List[str]:
|
||||
if sys.platform == "darwin":
|
||||
return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
|
||||
|
||||
dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
|
||||
if _appdirs.system not in ["win32", "darwin"]:
|
||||
# always look in /etc directly as well
|
||||
return dirval.split(os.pathsep) + ["/etc"]
|
||||
return [dirval]
|
||||
if sys.platform == "win32":
|
||||
return [dirval]
|
||||
|
||||
# Unix-y system. Look in /etc as well.
|
||||
return dirval.split(os.pathsep) + ["/etc"]
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import TYPE_CHECKING, List, Optional, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._vendor.packaging.tags import (
|
||||
PythonVersion,
|
||||
Tag,
|
||||
compatible_tags,
|
||||
cpython_tags,
|
||||
@@ -14,10 +15,6 @@ from pip._vendor.packaging.tags import (
|
||||
mac_platforms,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pip._vendor.packaging.tags import PythonVersion
|
||||
|
||||
|
||||
_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
|
||||
|
||||
|
||||
@@ -95,7 +92,7 @@ def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[s
|
||||
return result
|
||||
|
||||
|
||||
def _get_python_version(version: str) -> "PythonVersion":
|
||||
def _get_python_version(version: str) -> PythonVersion:
|
||||
if len(version) > 1:
|
||||
return int(version[0]), int(version[1:])
|
||||
else:
|
||||
@@ -132,7 +129,7 @@ def get_supported(
|
||||
"""
|
||||
supported: List[Tag] = []
|
||||
|
||||
python_version: Optional["PythonVersion"] = None
|
||||
python_version: Optional[PythonVersion] = None
|
||||
if version is not None:
|
||||
python_version = _get_python_version(version)
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import Any, Optional, TextIO, Type, Union
|
||||
|
||||
from pip._vendor.packaging.version import parse
|
||||
|
||||
from pip import __version__ as current_version
|
||||
from pip import __version__ as current_version # NOTE: tests patch this name.
|
||||
|
||||
DEPRECATION_MSG_PREFIX = "DEPRECATION: "
|
||||
|
||||
@@ -53,52 +53,68 @@ def install_warning_logger() -> None:
|
||||
|
||||
|
||||
def deprecated(
|
||||
*,
|
||||
reason: str,
|
||||
replacement: Optional[str],
|
||||
gone_in: Optional[str],
|
||||
feature_flag: Optional[str] = None,
|
||||
issue: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Helper to deprecate existing functionality.
|
||||
|
||||
reason:
|
||||
Textual reason shown to the user about why this functionality has
|
||||
been deprecated.
|
||||
been deprecated. Should be a complete sentence.
|
||||
replacement:
|
||||
Textual suggestion shown to the user about what alternative
|
||||
functionality they can use.
|
||||
gone_in:
|
||||
The version of pip does this functionality should get removed in.
|
||||
Raises errors if pip's current version is greater than or equal to
|
||||
Raises an error if pip's current version is greater than or equal to
|
||||
this.
|
||||
feature_flag:
|
||||
Command-line flag of the form --use-feature={feature_flag} for testing
|
||||
upcoming functionality.
|
||||
issue:
|
||||
Issue number on the tracker that would serve as a useful place for
|
||||
users to find related discussion and provide feedback.
|
||||
|
||||
Always pass replacement, gone_in and issue as keyword arguments for clarity
|
||||
at the call site.
|
||||
"""
|
||||
|
||||
# Construct a nice message.
|
||||
# This is eagerly formatted as we want it to get logged as if someone
|
||||
# typed this entire message out.
|
||||
sentences = [
|
||||
(reason, DEPRECATION_MSG_PREFIX + "{}"),
|
||||
(gone_in, "pip {} will remove support for this functionality."),
|
||||
(replacement, "A possible replacement is {}."),
|
||||
# Determine whether or not the feature is already gone in this version.
|
||||
is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
|
||||
|
||||
message_parts = [
|
||||
(reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
|
||||
(
|
||||
gone_in,
|
||||
"pip {} will enforce this behaviour change."
|
||||
if not is_gone
|
||||
else "Since pip {}, this is no longer supported.",
|
||||
),
|
||||
(
|
||||
replacement,
|
||||
"A possible replacement is {}.",
|
||||
),
|
||||
(
|
||||
feature_flag,
|
||||
"You can use the flag --use-feature={} to test the upcoming behaviour."
|
||||
if not is_gone
|
||||
else None,
|
||||
),
|
||||
(
|
||||
issue,
|
||||
(
|
||||
"You can find discussion regarding this at "
|
||||
"https://github.com/pypa/pip/issues/{}."
|
||||
),
|
||||
"Discussion can be found at https://github.com/pypa/pip/issues/{}",
|
||||
),
|
||||
]
|
||||
|
||||
message = " ".join(
|
||||
template.format(val) for val, template in sentences if val is not None
|
||||
format_str.format(value)
|
||||
for value, format_str in message_parts
|
||||
if format_str is not None and value is not None
|
||||
)
|
||||
|
||||
# Raise as an error if it has to be removed.
|
||||
if gone_in is not None and parse(current_version) >= parse(gone_in):
|
||||
# Raise as an error if this behaviour is deprecated.
|
||||
if is_gone:
|
||||
raise PipDeprecationWarning(message)
|
||||
|
||||
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
|
||||
|
||||
@@ -2,6 +2,7 @@ from typing import Optional
|
||||
|
||||
from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
|
||||
@@ -28,6 +29,13 @@ def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> s
|
||||
return requirement
|
||||
|
||||
|
||||
def direct_url_for_editable(source_dir: str) -> DirectUrl:
|
||||
return DirectUrl(
|
||||
url=path_to_url(source_dir),
|
||||
info=DirInfo(editable=True),
|
||||
)
|
||||
|
||||
|
||||
def direct_url_from_link(
|
||||
link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
|
||||
) -> DirectUrl:
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
from distutils.errors import DistutilsArgError
|
||||
from distutils.fancy_getopt import FancyGetopt
|
||||
from typing import Dict, List
|
||||
|
||||
_options = [
|
||||
("exec-prefix=", None, ""),
|
||||
("home=", None, ""),
|
||||
("install-base=", None, ""),
|
||||
("install-data=", None, ""),
|
||||
("install-headers=", None, ""),
|
||||
("install-lib=", None, ""),
|
||||
("install-platlib=", None, ""),
|
||||
("install-purelib=", None, ""),
|
||||
("install-scripts=", None, ""),
|
||||
("prefix=", None, ""),
|
||||
("root=", None, ""),
|
||||
("user", None, ""),
|
||||
]
|
||||
|
||||
|
||||
# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469.
|
||||
_distutils_getopt = FancyGetopt(_options) # type: ignore
|
||||
|
||||
|
||||
def parse_distutils_args(args: List[str]) -> Dict[str, str]:
|
||||
"""Parse provided arguments, returning an object that has the
|
||||
matched arguments.
|
||||
|
||||
Any unknown arguments are ignored.
|
||||
"""
|
||||
result = {}
|
||||
for arg in args:
|
||||
try:
|
||||
_, match = _distutils_getopt.getopt(args=[arg])
|
||||
except DistutilsArgError:
|
||||
# We don't care about any other options, which here may be
|
||||
# considered unrecognized since our option list is not
|
||||
# exhaustive.
|
||||
pass
|
||||
else:
|
||||
result.update(match.__dict__)
|
||||
return result
|
||||
@@ -14,7 +14,7 @@ BOMS: List[Tuple[bytes, str]] = [
|
||||
(codecs.BOM_UTF32_LE, "utf-32-le"),
|
||||
]
|
||||
|
||||
ENCODING_RE = re.compile(br"coding[:=]\s*([-\w.]+)")
|
||||
ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
|
||||
|
||||
|
||||
def auto_decode(data: bytes) -> str:
|
||||
|
||||
@@ -1,7 +1,23 @@
|
||||
import itertools
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
from pip._internal.cli.main import main
|
||||
from pip._internal.utils.compat import WINDOWS
|
||||
|
||||
_EXECUTABLE_NAMES = [
|
||||
"pip",
|
||||
f"pip{sys.version_info.major}",
|
||||
f"pip{sys.version_info.major}.{sys.version_info.minor}",
|
||||
]
|
||||
if WINDOWS:
|
||||
_allowed_extensions = {"", ".exe"}
|
||||
_EXECUTABLE_NAMES = [
|
||||
"".join(parts)
|
||||
for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
|
||||
]
|
||||
|
||||
|
||||
def _wrapper(args: Optional[List[str]] = None) -> int:
|
||||
@@ -25,3 +41,44 @@ def _wrapper(args: Optional[List[str]] = None) -> int:
|
||||
"running pip directly.\n"
|
||||
)
|
||||
return main(args)
|
||||
|
||||
|
||||
def get_best_invocation_for_this_pip() -> str:
|
||||
"""Try to figure out the best way to invoke pip in the current environment."""
|
||||
binary_directory = "Scripts" if WINDOWS else "bin"
|
||||
binary_prefix = os.path.join(sys.prefix, binary_directory)
|
||||
|
||||
# Try to use pip[X[.Y]] names, if those executables for this environment are
|
||||
# the first on PATH with that name.
|
||||
path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
|
||||
exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
|
||||
if exe_are_in_PATH:
|
||||
for exe_name in _EXECUTABLE_NAMES:
|
||||
found_executable = shutil.which(exe_name)
|
||||
binary_executable = os.path.join(binary_prefix, exe_name)
|
||||
if (
|
||||
found_executable
|
||||
and os.path.exists(binary_executable)
|
||||
and os.path.samefile(
|
||||
found_executable,
|
||||
binary_executable,
|
||||
)
|
||||
):
|
||||
return exe_name
|
||||
|
||||
# Use the `-m` invocation, if there's no "nice" invocation.
|
||||
return f"{get_best_invocation_for_this_python()} -m pip"
|
||||
|
||||
|
||||
def get_best_invocation_for_this_python() -> str:
|
||||
"""Try to figure out the best way to invoke the current Python."""
|
||||
exe = sys.executable
|
||||
exe_name = os.path.basename(exe)
|
||||
|
||||
# Try to use the basename, if it's the first executable.
|
||||
found_executable = shutil.which(exe_name)
|
||||
if found_executable and os.path.samefile(found_executable, exe):
|
||||
return exe_name
|
||||
|
||||
# Use the full executable name, because we couldn't find something simpler.
|
||||
return exe
|
||||
|
||||
@@ -2,12 +2,10 @@ import fnmatch
|
||||
import os
|
||||
import os.path
|
||||
import random
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, BinaryIO, Iterator, List, Union, cast
|
||||
from typing import Any, BinaryIO, Generator, List, Union, cast
|
||||
|
||||
from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
|
||||
|
||||
@@ -42,35 +40,8 @@ def check_path_owner(path: str) -> bool:
|
||||
return False # assume we don't own the path
|
||||
|
||||
|
||||
def copy2_fixed(src: str, dest: str) -> None:
|
||||
"""Wrap shutil.copy2() but map errors copying socket files to
|
||||
SpecialFileError as expected.
|
||||
|
||||
See also https://bugs.python.org/issue37700.
|
||||
"""
|
||||
try:
|
||||
shutil.copy2(src, dest)
|
||||
except OSError:
|
||||
for f in [src, dest]:
|
||||
try:
|
||||
is_socket_file = is_socket(f)
|
||||
except OSError:
|
||||
# An error has already occurred. Another error here is not
|
||||
# a problem and we can ignore it.
|
||||
pass
|
||||
else:
|
||||
if is_socket_file:
|
||||
raise shutil.SpecialFileError(f"`{f}` is a socket")
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def is_socket(path: str) -> bool:
|
||||
return stat.S_ISSOCK(os.lstat(path).st_mode)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def adjacent_tmp_file(path: str, **kwargs: Any) -> Iterator[BinaryIO]:
|
||||
def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
||||
"""Return a file-like object pointing to a tmp file next to path.
|
||||
|
||||
The file is created securely and is ensured to be written to disk
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user